-- cgit v0.12 From 6093a125ee900738a1840afb6980ec426cacad68 Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Mon, 10 Feb 2014 19:17:46 +0100 Subject: Issue #20505: Use even shorter sleep in test_timeout_rounding() to make the test more reliable (= fail more often on Windows with HPET enabled). --- Lib/test/test_asyncio/test_events.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/Lib/test/test_asyncio/test_events.py b/Lib/test/test_asyncio/test_events.py index e6a1c97..2a01b1c 100644 --- a/Lib/test/test_asyncio/test_events.py +++ b/Lib/test/test_asyncio/test_events.py @@ -1176,13 +1176,15 @@ class EventLoopTestsMixin: loop = self.loop yield from asyncio.sleep(1e-2, loop=loop) yield from asyncio.sleep(1e-4, loop=loop) + yield from asyncio.sleep(1e-6, loop=loop) + yield from asyncio.sleep(1e-8, loop=loop) self.loop.run_until_complete(wait()) - # The ideal number of call is 6, but on some platforms, the selector + # The ideal number of call is 10, but on some platforms, the selector # may sleep at little bit less than timeout depending on the resolution - # of the clock used by the kernel. Tolerate 2 useless calls on these + # of the clock used by the kernel. Tolerate 5 useless calls on these # platforms. - self.assertLessEqual(self.loop._run_once_counter, 8, + self.assertLessEqual(self.loop._run_once_counter, 15, {'time_info': time.get_clock_info('time'), 'monotonic_info': time.get_clock_info('monotonic'), 'selector': self.loop._selector.__class__.__name__}) -- cgit v0.12 From b38b5c43c710da8fd6034d1dab631c5b020652bf Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Mon, 10 Feb 2014 22:11:21 +0100 Subject: merge with 3.3 --- .hgtags | 2 ++ Doc/tools/sphinxext/susp-ignored.csv | 1 + Lib/idlelib/NEWS.txt | 1 - Lib/smtplib.py | 5 ++++- Lib/test/mock_socket.py | 9 +++++++-- Lib/test/test_smtplib.py | 30 +++++++++++++++++++++++++++++- 6 files changed, 43 insertions(+), 5 deletions(-) diff --git a/.hgtags b/.hgtags index 979e8ad..2d5c42b 100644 --- a/.hgtags +++ b/.hgtags @@ -118,6 +118,8 @@ d047928ae3f6314a13b6137051315453d0ae89b6 v3.3.2 fd53c500f8b80f54f3ecedec9da2e8c7e52a6888 v3.3.3rc1 d32442c0e60dfbd71234e807d3d1dedd227495a9 v3.3.3rc2 c3896275c0f61b2510a6c7e6c458a750359a91b8 v3.3.3 +fa92f5f940c6c0d839d7f0611e4b717606504a3c v3.3.4rc1 +7ff62415e4263c432c8acf6e424224209211eadb v3.3.4 46535f65e7f3bcdcf176f36d34bc1fed719ffd2b v3.4.0a1 9265a2168e2cb2a84785d8717792acc661e6b692 v3.4.0a2 dd9cdf90a5073510877e9dd5112f8e6cf20d5e89 v3.4.0a3 diff --git a/Doc/tools/sphinxext/susp-ignored.csv b/Doc/tools/sphinxext/susp-ignored.csv index 1769023..f9d6bbf 100644 --- a/Doc/tools/sphinxext/susp-ignored.csv +++ b/Doc/tools/sphinxext/susp-ignored.csv @@ -282,3 +282,4 @@ whatsnew/changelog,,:PythonCmd,"With Tk < 8.5 _tkinter.c:PythonCmd() raised Unic whatsnew/changelog,,::,": Fix FTP tests for IPv6, bind to ""::1"" instead of ""localhost""." whatsnew/changelog,,::,": Use ""127.0.0.1"" or ""::1"" instead of ""localhost"" as much as" whatsnew/changelog,,:password,user:password +whatsnew/changelog,,:gz,w:gz diff --git a/Lib/idlelib/NEWS.txt b/Lib/idlelib/NEWS.txt index 478c183..953a38d 100644 --- a/Lib/idlelib/NEWS.txt +++ b/Lib/idlelib/NEWS.txt @@ -875,4 +875,3 @@ What's New in IDLEfork 0.9 Alpha 1? -------------------------------------------------------------------- Refer to HISTORY.txt for additional information on earlier releases. -------------------------------------------------------------------- - diff --git a/Lib/smtplib.py b/Lib/smtplib.py index 796b866..66b5879 100755 --- a/Lib/smtplib.py +++ b/Lib/smtplib.py @@ -62,6 +62,7 @@ SMTP_PORT = 25 SMTP_SSL_PORT = 465 CRLF = "\r\n" bCRLF = b"\r\n" +_MAXLINE = 8192 # more than 8 times larger than RFC 821, 4.5.3 OLDSTYLE_AUTH = re.compile(r"auth=(.*)", re.I) @@ -365,7 +366,7 @@ class SMTP: self.file = self.sock.makefile('rb') while 1: try: - line = self.file.readline() + line = self.file.readline(_MAXLINE + 1) except OSError as e: self.close() raise SMTPServerDisconnected("Connection unexpectedly closed: " @@ -375,6 +376,8 @@ class SMTP: raise SMTPServerDisconnected("Connection unexpectedly closed") if self.debuglevel > 0: print('reply:', repr(line), file=stderr) + if len(line) > _MAXLINE: + raise SMTPResponseException(500, "Line too long.") resp.append(line[4:].strip(b' \t\r\n')) code = line[:3] # Check that the error code is syntactically correct. diff --git a/Lib/test/mock_socket.py b/Lib/test/mock_socket.py index 8ef0ec8..e36724f 100644 --- a/Lib/test/mock_socket.py +++ b/Lib/test/mock_socket.py @@ -21,8 +21,13 @@ class MockFile: """ def __init__(self, lines): self.lines = lines - def readline(self): - return self.lines.pop(0) + b'\r\n' + def readline(self, limit=-1): + result = self.lines.pop(0) + b'\r\n' + if limit >= 0: + # Re-insert the line, removing the \r\n we added. + self.lines.insert(0, result[limit:-2]) + result = result[:limit] + return result def close(self): pass diff --git a/Lib/test/test_smtplib.py b/Lib/test/test_smtplib.py index 39bf7ae..3f7b9b4 100644 --- a/Lib/test/test_smtplib.py +++ b/Lib/test/test_smtplib.py @@ -563,6 +563,33 @@ class BadHELOServerTests(unittest.TestCase): HOST, self.port, 'localhost', 3) +@unittest.skipUnless(threading, 'Threading required for this test.') +class TooLongLineTests(unittest.TestCase): + respdata = b'250 OK' + (b'.' * smtplib._MAXLINE * 2) + b'\n' + + def setUp(self): + self.old_stdout = sys.stdout + self.output = io.StringIO() + sys.stdout = self.output + + self.evt = threading.Event() + self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + self.sock.settimeout(15) + self.port = support.bind_port(self.sock) + servargs = (self.evt, self.respdata, self.sock) + threading.Thread(target=server, args=servargs).start() + self.evt.wait() + self.evt.clear() + + def tearDown(self): + self.evt.wait() + sys.stdout = self.old_stdout + + def testLineTooLong(self): + self.assertRaises(smtplib.SMTPResponseException, smtplib.SMTP, + HOST, self.port, 'localhost', 3) + + sim_users = {'Mr.A@somewhere.com':'John A', 'Ms.B@xn--fo-fka.com':'Sally B', 'Mrs.C@somewhereesle.com':'Ruth C', @@ -888,7 +915,8 @@ class SMTPSimTests(unittest.TestCase): def test_main(verbose=None): support.run_unittest(GeneralTests, DebuggingServerTests, NonConnectingTests, - BadHELOServerTests, SMTPSimTests) + BadHELOServerTests, SMTPSimTests, + TooLongLineTests) if __name__ == '__main__': test_main() -- cgit v0.12 From ed1654fa3ea1c05c7397334b09396675f3c3aeaf Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Mon, 10 Feb 2014 23:42:32 +0100 Subject: Issue #20505: BaseEventLoop uses again the resolution of the clock to decide if scheduled tasks should be executed or not. --- Lib/asyncio/base_events.py | 5 +++-- Lib/test/test_asyncio/test_events.py | 12 ++++++------ 2 files changed, 9 insertions(+), 8 deletions(-) diff --git a/Lib/asyncio/base_events.py b/Lib/asyncio/base_events.py index 4207a7e..377ea21 100644 --- a/Lib/asyncio/base_events.py +++ b/Lib/asyncio/base_events.py @@ -96,6 +96,7 @@ class BaseEventLoop(events.AbstractEventLoop): self._default_executor = None self._internal_fds = 0 self._running = False + self._clock_resolution = time.get_clock_info('monotonic').resolution def _make_socket_transport(self, sock, protocol, waiter=None, *, extra=None, server=None): @@ -643,10 +644,10 @@ class BaseEventLoop(events.AbstractEventLoop): self._process_events(event_list) # Handle 'later' callbacks that are ready. - now = self.time() + end_time = self.time() + self._clock_resolution while self._scheduled: handle = self._scheduled[0] - if handle._when > now: + if handle._when >= end_time: break handle = heapq.heappop(self._scheduled) self._ready.append(handle) diff --git a/Lib/test/test_asyncio/test_events.py b/Lib/test/test_asyncio/test_events.py index 2a01b1c..3f99da4 100644 --- a/Lib/test/test_asyncio/test_events.py +++ b/Lib/test/test_asyncio/test_events.py @@ -1178,15 +1178,15 @@ class EventLoopTestsMixin: yield from asyncio.sleep(1e-4, loop=loop) yield from asyncio.sleep(1e-6, loop=loop) yield from asyncio.sleep(1e-8, loop=loop) + yield from asyncio.sleep(1e-10, loop=loop) self.loop.run_until_complete(wait()) - # The ideal number of call is 10, but on some platforms, the selector + # The ideal number of call is 12, but on some platforms, the selector # may sleep at little bit less than timeout depending on the resolution - # of the clock used by the kernel. Tolerate 5 useless calls on these - # platforms. - self.assertLessEqual(self.loop._run_once_counter, 15, - {'time_info': time.get_clock_info('time'), - 'monotonic_info': time.get_clock_info('monotonic'), + # of the clock used by the kernel. Tolerate a few useless calls on + # these platforms. + self.assertLessEqual(self.loop._run_once_counter, 20, + {'clock_resolution': self.loop._clock_resolution, 'selector': self.loop._selector.__class__.__name__}) -- cgit v0.12 From 7b467db4d3f76600d14b39bacbe9bc4b1b10698e Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Tue, 11 Feb 2014 09:03:47 +0100 Subject: Issue #20505: Fix TestLoop, set the clock resolution --- Lib/asyncio/test_utils.py | 1 + 1 file changed, 1 insertion(+) diff --git a/Lib/asyncio/test_utils.py b/Lib/asyncio/test_utils.py index 71d69cf..7c8e1dc 100644 --- a/Lib/asyncio/test_utils.py +++ b/Lib/asyncio/test_utils.py @@ -191,6 +191,7 @@ class TestLoop(base_events.BaseEventLoop): self._gen = gen() next(self._gen) self._time = 0 + self._clock_resolution = 1e-9 self._timers = [] self._selector = TestSelector() -- cgit v0.12 From 8425bf817f03949e0f77b9158a6b4d2d22157039 Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Tue, 11 Feb 2014 10:08:08 +0100 Subject: Issue #20505: Improve debug info in asyncio event loop --- Lib/asyncio/base_events.py | 23 ++++++++++++++++++----- 1 file changed, 18 insertions(+), 5 deletions(-) diff --git a/Lib/asyncio/base_events.py b/Lib/asyncio/base_events.py index 377ea21..e9cb934 100644 --- a/Lib/asyncio/base_events.py +++ b/Lib/asyncio/base_events.py @@ -634,12 +634,25 @@ class BaseEventLoop(events.AbstractEventLoop): else: logger.log(level, 'poll took %.3f seconds', t1-t0) else: - t0 = self.time() + t0_monotonic = time.monotonic() + t0 = time.perf_counter() event_list = self._selector.select(timeout) - dt = self.time() - t0 - if not event_list and timeout and dt < timeout: - print("asyncio: selector.select(%.3f ms) took %.3f ms" - % (timeout*1e3, dt*1e3), + dt = time.perf_counter() - t0 + dt_monotonic = time.monotonic() - t0_monotonic + if not event_list and timeout: # and dt < timeout: + selector = self._selector.__class__.__name__ + if (selector.startswith(("Poll", "Epoll", "Iocp")) + or timeout > 1e-3 or dt > 1e-3): + unit, factor = "ms", 1e3 + else: + unit, factor = "us", 1e6 + print("asyncio: %s.select(%.3f %s) took %.3f %s" + " (monotonic: %.3f %s, clock res: %.3f %s)" + % (self._selector.__class__.__name__, + timeout * factor, unit, + dt * factor, unit, + dt_monotonic * factor, unit, + self._clock_resolution * factor, unit), file=sys.__stderr__, flush=True) self._process_events(event_list) -- cgit v0.12 From 7c4bd39a1fd4b69f47ade08fe4732b2ec80f8596 Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Tue, 11 Feb 2014 10:10:41 +0100 Subject: Issue #20505: Oops, only print debug info if selector.select(timeout) took less than timeout --- Lib/asyncio/base_events.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Lib/asyncio/base_events.py b/Lib/asyncio/base_events.py index e9cb934..0200d35 100644 --- a/Lib/asyncio/base_events.py +++ b/Lib/asyncio/base_events.py @@ -639,7 +639,7 @@ class BaseEventLoop(events.AbstractEventLoop): event_list = self._selector.select(timeout) dt = time.perf_counter() - t0 dt_monotonic = time.monotonic() - t0_monotonic - if not event_list and timeout: # and dt < timeout: + if not event_list and timeout and dt < timeout: selector = self._selector.__class__.__name__ if (selector.startswith(("Poll", "Epoll", "Iocp")) or timeout > 1e-3 or dt > 1e-3): -- cgit v0.12 From 262a458b8a603ed11103bd196f1374a39cee0295 Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Tue, 11 Feb 2014 10:26:53 +0100 Subject: Issue #20505: use also the monotonic time to decide if asyncio debug traces should be printed --- Lib/asyncio/base_events.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/Lib/asyncio/base_events.py b/Lib/asyncio/base_events.py index 0200d35..48b3ee3 100644 --- a/Lib/asyncio/base_events.py +++ b/Lib/asyncio/base_events.py @@ -639,15 +639,16 @@ class BaseEventLoop(events.AbstractEventLoop): event_list = self._selector.select(timeout) dt = time.perf_counter() - t0 dt_monotonic = time.monotonic() - t0_monotonic - if not event_list and timeout and dt < timeout: + if (not event_list and timeout + and (dt < timeout or dt_monotonic < timeout)): selector = self._selector.__class__.__name__ if (selector.startswith(("Poll", "Epoll", "Iocp")) or timeout > 1e-3 or dt > 1e-3): unit, factor = "ms", 1e3 else: unit, factor = "us", 1e6 - print("asyncio: %s.select(%.3f %s) took %.3f %s" - " (monotonic: %.3f %s, clock res: %.3f %s)" + print("asyncio: %s.select(%.4f %s) took %.3f %s" + " (monotonic=%.3f %s, clock res=%.3f %s)" % (self._selector.__class__.__name__, timeout * factor, unit, dt * factor, unit, -- cgit v0.12 From 9af4a246f9b87d859a6f72e2a10b40614ad07030 Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Tue, 11 Feb 2014 11:34:30 +0100 Subject: asyncio, Tulip issue 126: call_soon(), call_soon_threadsafe(), call_later(), call_at() and run_in_executor() now raise a TypeError if the callback is a coroutine function. --- Lib/asyncio/base_events.py | 6 ++++++ Lib/asyncio/test_utils.py | 5 ++++- Lib/test/test_asyncio/test_base_events.py | 18 ++++++++++++++++++ Lib/test/test_asyncio/test_proactor_events.py | 2 +- Lib/test/test_asyncio/test_selector_events.py | 9 +++++---- Lib/test/test_asyncio/test_tasks.py | 12 +++++------- 6 files changed, 39 insertions(+), 13 deletions(-) diff --git a/Lib/asyncio/base_events.py b/Lib/asyncio/base_events.py index 48b3ee3..4b7b161 100644 --- a/Lib/asyncio/base_events.py +++ b/Lib/asyncio/base_events.py @@ -227,6 +227,8 @@ class BaseEventLoop(events.AbstractEventLoop): def call_at(self, when, callback, *args): """Like call_later(), but uses an absolute time.""" + if tasks.iscoroutinefunction(callback): + raise TypeError("coroutines cannot be used with call_at()") timer = events.TimerHandle(when, callback, args) heapq.heappush(self._scheduled, timer) return timer @@ -241,6 +243,8 @@ class BaseEventLoop(events.AbstractEventLoop): Any positional arguments after the callback will be passed to the callback when it is called. """ + if tasks.iscoroutinefunction(callback): + raise TypeError("coroutines cannot be used with call_soon()") handle = events.Handle(callback, args) self._ready.append(handle) return handle @@ -252,6 +256,8 @@ class BaseEventLoop(events.AbstractEventLoop): return handle def run_in_executor(self, executor, callback, *args): + if tasks.iscoroutinefunction(callback): + raise TypeError("coroutines cannot be used with run_in_executor()") if isinstance(callback, events.Handle): assert not args assert not isinstance(callback, events.TimerHandle) diff --git a/Lib/asyncio/test_utils.py b/Lib/asyncio/test_utils.py index 7c8e1dc..deab7c3 100644 --- a/Lib/asyncio/test_utils.py +++ b/Lib/asyncio/test_utils.py @@ -135,7 +135,7 @@ def make_test_protocol(base): if name.startswith('__') and name.endswith('__'): # skip magic names continue - dct[name] = unittest.mock.Mock(return_value=None) + dct[name] = MockCallback(return_value=None) return type('TestProtocol', (base,) + base.__bases__, dct)() @@ -274,3 +274,6 @@ class TestLoop(base_events.BaseEventLoop): def _write_to_self(self): pass + +def MockCallback(**kwargs): + return unittest.mock.Mock(spec=['__call__'], **kwargs) diff --git a/Lib/test/test_asyncio/test_base_events.py b/Lib/test/test_asyncio/test_base_events.py index 5b05684..c6950ab 100644 --- a/Lib/test/test_asyncio/test_base_events.py +++ b/Lib/test/test_asyncio/test_base_events.py @@ -567,6 +567,7 @@ class BaseEventLoopWithSelectorTests(unittest.TestCase): m_socket.getaddrinfo.return_value = [ (2, 1, 6, '', ('127.0.0.1', 10100))] + m_socket.getaddrinfo._is_coroutine = False m_sock = m_socket.socket.return_value = unittest.mock.Mock() m_sock.bind.side_effect = Err @@ -577,6 +578,7 @@ class BaseEventLoopWithSelectorTests(unittest.TestCase): @unittest.mock.patch('asyncio.base_events.socket') def test_create_datagram_endpoint_no_addrinfo(self, m_socket): m_socket.getaddrinfo.return_value = [] + m_socket.getaddrinfo._is_coroutine = False coro = self.loop.create_datagram_endpoint( MyDatagramProto, local_addr=('localhost', 0)) @@ -681,6 +683,22 @@ class BaseEventLoopWithSelectorTests(unittest.TestCase): unittest.mock.ANY, MyProto, sock, None, None) + def test_call_coroutine(self): + @asyncio.coroutine + def coroutine_function(): + pass + + with self.assertRaises(TypeError): + self.loop.call_soon(coroutine_function) + with self.assertRaises(TypeError): + self.loop.call_soon_threadsafe(coroutine_function) + with self.assertRaises(TypeError): + self.loop.call_later(60, coroutine_function) + with self.assertRaises(TypeError): + self.loop.call_at(self.loop.time() + 60, coroutine_function) + with self.assertRaises(TypeError): + self.loop.run_in_executor(None, coroutine_function) + if __name__ == '__main__': unittest.main() diff --git a/Lib/test/test_asyncio/test_proactor_events.py b/Lib/test/test_asyncio/test_proactor_events.py index 9964f42..6bea1a3 100644 --- a/Lib/test/test_asyncio/test_proactor_events.py +++ b/Lib/test/test_asyncio/test_proactor_events.py @@ -402,7 +402,7 @@ class BaseProactorEventLoopTests(unittest.TestCase): NotImplementedError, BaseProactorEventLoop, self.proactor) def test_make_socket_transport(self): - tr = self.loop._make_socket_transport(self.sock, unittest.mock.Mock()) + tr = self.loop._make_socket_transport(self.sock, asyncio.Protocol()) self.assertIsInstance(tr, _ProactorSocketTransport) def test_loop_self_reading(self): diff --git a/Lib/test/test_asyncio/test_selector_events.py b/Lib/test/test_asyncio/test_selector_events.py index ad0b0be..855a895 100644 --- a/Lib/test/test_asyncio/test_selector_events.py +++ b/Lib/test/test_asyncio/test_selector_events.py @@ -44,8 +44,8 @@ class BaseSelectorEventLoopTests(unittest.TestCase): def test_make_socket_transport(self): m = unittest.mock.Mock() self.loop.add_reader = unittest.mock.Mock() - self.assertIsInstance( - self.loop._make_socket_transport(m, m), _SelectorSocketTransport) + transport = self.loop._make_socket_transport(m, asyncio.Protocol()) + self.assertIsInstance(transport, _SelectorSocketTransport) @unittest.skipIf(ssl is None, 'No ssl module') def test_make_ssl_transport(self): @@ -54,8 +54,9 @@ class BaseSelectorEventLoopTests(unittest.TestCase): self.loop.add_writer = unittest.mock.Mock() self.loop.remove_reader = unittest.mock.Mock() self.loop.remove_writer = unittest.mock.Mock() - self.assertIsInstance( - self.loop._make_ssl_transport(m, m, m, m), _SelectorSslTransport) + waiter = asyncio.Future(loop=self.loop) + transport = self.loop._make_ssl_transport(m, asyncio.Protocol(), m, waiter) + self.assertIsInstance(transport, _SelectorSslTransport) @unittest.mock.patch('asyncio.selector_events.ssl', None) def test_make_ssl_transport_without_ssl_error(self): diff --git a/Lib/test/test_asyncio/test_tasks.py b/Lib/test/test_asyncio/test_tasks.py index 9abdfa5..29bdaf5 100644 --- a/Lib/test/test_asyncio/test_tasks.py +++ b/Lib/test/test_asyncio/test_tasks.py @@ -2,8 +2,6 @@ import gc import unittest -import unittest.mock -from unittest.mock import Mock import asyncio from asyncio import test_utils @@ -1358,7 +1356,7 @@ class GatherTestsBase: def _check_success(self, **kwargs): a, b, c = [asyncio.Future(loop=self.one_loop) for i in range(3)] fut = asyncio.gather(*self.wrap_futures(a, b, c), **kwargs) - cb = Mock() + cb = test_utils.MockCallback() fut.add_done_callback(cb) b.set_result(1) a.set_result(2) @@ -1380,7 +1378,7 @@ class GatherTestsBase: def test_one_exception(self): a, b, c, d, e = [asyncio.Future(loop=self.one_loop) for i in range(5)] fut = asyncio.gather(*self.wrap_futures(a, b, c, d, e)) - cb = Mock() + cb = test_utils.MockCallback() fut.add_done_callback(cb) exc = ZeroDivisionError() a.set_result(1) @@ -1399,7 +1397,7 @@ class GatherTestsBase: a, b, c, d = [asyncio.Future(loop=self.one_loop) for i in range(4)] fut = asyncio.gather(*self.wrap_futures(a, b, c, d), return_exceptions=True) - cb = Mock() + cb = test_utils.MockCallback() fut.add_done_callback(cb) exc = ZeroDivisionError() exc2 = RuntimeError() @@ -1460,7 +1458,7 @@ class FutureGatherTests(GatherTestsBase, unittest.TestCase): def test_one_cancellation(self): a, b, c, d, e = [asyncio.Future(loop=self.one_loop) for i in range(5)] fut = asyncio.gather(a, b, c, d, e) - cb = Mock() + cb = test_utils.MockCallback() fut.add_done_callback(cb) a.set_result(1) b.cancel() @@ -1479,7 +1477,7 @@ class FutureGatherTests(GatherTestsBase, unittest.TestCase): a, b, c, d, e, f = [asyncio.Future(loop=self.one_loop) for i in range(6)] fut = asyncio.gather(a, b, c, d, e, f, return_exceptions=True) - cb = Mock() + cb = test_utils.MockCallback() fut.add_done_callback(cb) a.set_result(1) zde = ZeroDivisionError() -- cgit v0.12 From 20e0743a5603479c56f32209b77a8bfac87baf53 Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Tue, 11 Feb 2014 11:44:56 +0100 Subject: asyncio, Tulip issue 130: Add more checks on subprocess_exec/subprocess_shell parameters --- Lib/asyncio/base_events.py | 12 +++++-- Lib/asyncio/subprocess.py | 5 +-- Lib/test/test_asyncio/test_base_events.py | 54 +++++++++++++++++++++++++++++-- 3 files changed, 64 insertions(+), 7 deletions(-) diff --git a/Lib/asyncio/base_events.py b/Lib/asyncio/base_events.py index 4b7b161..5765f47 100644 --- a/Lib/asyncio/base_events.py +++ b/Lib/asyncio/base_events.py @@ -558,7 +558,7 @@ class BaseEventLoop(events.AbstractEventLoop): stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=False, shell=True, bufsize=0, **kwargs): - if not isinstance(cmd, str): + if not isinstance(cmd, (bytes, str)): raise ValueError("cmd must be a string") if universal_newlines: raise ValueError("universal_newlines must be False") @@ -572,7 +572,7 @@ class BaseEventLoop(events.AbstractEventLoop): return transport, protocol @tasks.coroutine - def subprocess_exec(self, protocol_factory, *args, stdin=subprocess.PIPE, + def subprocess_exec(self, protocol_factory, program, *args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=False, shell=False, bufsize=0, **kwargs): @@ -582,9 +582,15 @@ class BaseEventLoop(events.AbstractEventLoop): raise ValueError("shell must be False") if bufsize != 0: raise ValueError("bufsize must be 0") + popen_args = (program,) + args + for arg in popen_args: + if not isinstance(arg, (str, bytes)): + raise TypeError("program arguments must be " + "a bytes or text string, not %s" + % type(arg).__name__) protocol = protocol_factory() transport = yield from self._make_subprocess_transport( - protocol, args, False, stdin, stdout, stderr, bufsize, **kwargs) + protocol, popen_args, False, stdin, stdout, stderr, bufsize, **kwargs) return transport, protocol def _add_callback(self, handle): diff --git a/Lib/asyncio/subprocess.py b/Lib/asyncio/subprocess.py index 848d64f..8d1a407 100644 --- a/Lib/asyncio/subprocess.py +++ b/Lib/asyncio/subprocess.py @@ -180,7 +180,7 @@ def create_subprocess_shell(cmd, stdin=None, stdout=None, stderr=None, return Process(transport, protocol, loop) @tasks.coroutine -def create_subprocess_exec(*args, stdin=None, stdout=None, stderr=None, +def create_subprocess_exec(program, *args, stdin=None, stdout=None, stderr=None, loop=None, limit=streams._DEFAULT_LIMIT, **kwds): if loop is None: loop = events.get_event_loop() @@ -188,7 +188,8 @@ def create_subprocess_exec(*args, stdin=None, stdout=None, stderr=None, loop=loop) transport, protocol = yield from loop.subprocess_exec( protocol_factory, - *args, stdin=stdin, stdout=stdout, + program, *args, + stdin=stdin, stdout=stdout, stderr=stderr, **kwds) yield from protocol.waiter return Process(transport, protocol, loop) diff --git a/Lib/test/test_asyncio/test_base_events.py b/Lib/test/test_asyncio/test_base_events.py index c6950ab..94e2d59 100644 --- a/Lib/test/test_asyncio/test_base_events.py +++ b/Lib/test/test_asyncio/test_base_events.py @@ -3,6 +3,7 @@ import errno import logging import socket +import sys import time import unittest import unittest.mock @@ -234,8 +235,57 @@ class BaseEventLoopTests(unittest.TestCase): self.assertEqual([handle], list(self.loop._ready)) def test_run_until_complete_type_error(self): - self.assertRaises( - TypeError, self.loop.run_until_complete, 'blah') + self.assertRaises(TypeError, + self.loop.run_until_complete, 'blah') + + def test_subprocess_exec_invalid_args(self): + args = [sys.executable, '-c', 'pass'] + + # missing program parameter (empty args) + self.assertRaises(TypeError, + self.loop.run_until_complete, self.loop.subprocess_exec, + asyncio.SubprocessProtocol) + + # exepected multiple arguments, not a list + self.assertRaises(TypeError, + self.loop.run_until_complete, self.loop.subprocess_exec, + asyncio.SubprocessProtocol, args) + + # program arguments must be strings, not int + self.assertRaises(TypeError, + self.loop.run_until_complete, self.loop.subprocess_exec, + asyncio.SubprocessProtocol, sys.executable, 123) + + # universal_newlines, shell, bufsize must not be set + self.assertRaises(TypeError, + self.loop.run_until_complete, self.loop.subprocess_exec, + asyncio.SubprocessProtocol, *args, universal_newlines=True) + self.assertRaises(TypeError, + self.loop.run_until_complete, self.loop.subprocess_exec, + asyncio.SubprocessProtocol, *args, shell=True) + self.assertRaises(TypeError, + self.loop.run_until_complete, self.loop.subprocess_exec, + asyncio.SubprocessProtocol, *args, bufsize=4096) + + def test_subprocess_shell_invalid_args(self): + # exepected a string, not an int or a list + self.assertRaises(TypeError, + self.loop.run_until_complete, self.loop.subprocess_shell, + asyncio.SubprocessProtocol, 123) + self.assertRaises(TypeError, + self.loop.run_until_complete, self.loop.subprocess_shell, + asyncio.SubprocessProtocol, [sys.executable, '-c', 'pass']) + + # universal_newlines, shell, bufsize must not be set + self.assertRaises(TypeError, + self.loop.run_until_complete, self.loop.subprocess_shell, + asyncio.SubprocessProtocol, 'exit 0', universal_newlines=True) + self.assertRaises(TypeError, + self.loop.run_until_complete, self.loop.subprocess_shell, + asyncio.SubprocessProtocol, 'exit 0', shell=True) + self.assertRaises(TypeError, + self.loop.run_until_complete, self.loop.subprocess_shell, + asyncio.SubprocessProtocol, 'exit 0', bufsize=4096) class MyProto(asyncio.Protocol): -- cgit v0.12 From 208556c51ff5598070ff64f0a741628152846933 Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Tue, 11 Feb 2014 11:54:08 +0100 Subject: asyncio, Tulip issue 131: as_completed() and wait() now raises a TypeError if the list of futures is not a list but a Future, Task or coroutine object --- Lib/asyncio/tasks.py | 4 ++++ Lib/test/test_asyncio/test_tasks.py | 26 ++++++++++++++++++++++++++ 2 files changed, 30 insertions(+) diff --git a/Lib/asyncio/tasks.py b/Lib/asyncio/tasks.py index 5ad0652..81a125f 100644 --- a/Lib/asyncio/tasks.py +++ b/Lib/asyncio/tasks.py @@ -358,6 +358,8 @@ def wait(fs, *, loop=None, timeout=None, return_when=ALL_COMPLETED): Note: This does not raise TimeoutError! Futures that aren't done when the timeout occurs are returned in the second set. """ + if isinstance(fs, futures.Future) or iscoroutine(fs): + raise TypeError("expect a list of futures, not %s" % type(fs).__name__) if not fs: raise ValueError('Set of coroutines/Futures is empty.') @@ -474,6 +476,8 @@ def as_completed(fs, *, loop=None, timeout=None): Note: The futures 'f' are not necessarily members of fs. """ + if isinstance(fs, futures.Future) or iscoroutine(fs): + raise TypeError("expect a list of futures, not %s" % type(fs).__name__) loop = loop if loop is not None else events.get_event_loop() deadline = None if timeout is None else loop.time() + timeout todo = {async(f, loop=loop) for f in set(fs)} diff --git a/Lib/test/test_asyncio/test_tasks.py b/Lib/test/test_asyncio/test_tasks.py index 29bdaf5..6847de0 100644 --- a/Lib/test/test_asyncio/test_tasks.py +++ b/Lib/test/test_asyncio/test_tasks.py @@ -7,6 +7,11 @@ import asyncio from asyncio import test_utils +@asyncio.coroutine +def coroutine_function(): + pass + + class Dummy: def __repr__(self): @@ -1338,6 +1343,27 @@ class TaskTests(unittest.TestCase): child2.set_result(2) test_utils.run_briefly(self.loop) + def test_as_completed_invalid_args(self): + fut = asyncio.Future(loop=self.loop) + + # as_completed() expects a list of futures, not a future instance + self.assertRaises(TypeError, self.loop.run_until_complete, + asyncio.as_completed(fut, loop=self.loop)) + self.assertRaises(TypeError, self.loop.run_until_complete, + asyncio.as_completed(coroutine_function(), loop=self.loop)) + + def test_wait_invalid_args(self): + fut = asyncio.Future(loop=self.loop) + + # wait() expects a list of futures, not a future instance + self.assertRaises(TypeError, self.loop.run_until_complete, + asyncio.wait(fut, loop=self.loop)) + self.assertRaises(TypeError, self.loop.run_until_complete, + asyncio.wait(coroutine_function(), loop=self.loop)) + + # wait() expects at least a future + self.assertRaises(ValueError, self.loop.run_until_complete, + asyncio.wait([], loop=self.loop)) class GatherTestsBase: -- cgit v0.12 From 5d95afa99d6bc6da9aecee8ceb7f8ee43521eb85 Mon Sep 17 00:00:00 2001 From: Benjamin Peterson Date: Tue, 11 Feb 2014 10:19:12 -0500 Subject: merge 3.3 (#20594) --- Misc/NEWS | 2 ++ Modules/posixmodule.c | 8 ++++++-- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/Misc/NEWS b/Misc/NEWS index c9dce27..4c0d3a5 100644 --- a/Misc/NEWS +++ b/Misc/NEWS @@ -13,6 +13,8 @@ Core and Builtins Library ------- +- Issue #20594: Avoid name clash with the libc function posix_close. + What's New in Python 3.4.0 release candidate 1? =============================================== diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c index 71abc98..6cbc78f 100644 --- a/Modules/posixmodule.c +++ b/Modules/posixmodule.c @@ -7763,8 +7763,12 @@ PyDoc_STRVAR(posix_close__doc__, "close(fd)\n\n\ Close a file descriptor (for low level IO)."); +/* +The underscore at end of function name avoids a name clash with the libc +function posix_close. +*/ static PyObject * -posix_close(PyObject *self, PyObject *args) +posix_close_(PyObject *self, PyObject *args) { int fd, res; if (!PyArg_ParseTuple(args, "i:close", &fd)) @@ -11422,7 +11426,7 @@ static PyMethodDef posix_methods[] = { {"open", (PyCFunction)posix_open,\ METH_VARARGS | METH_KEYWORDS, posix_open__doc__}, - {"close", posix_close, METH_VARARGS, posix_close__doc__}, + {"close", posix_close_, METH_VARARGS, posix_close__doc__}, {"closerange", posix_closerange, METH_VARARGS, posix_closerange__doc__}, {"device_encoding", device_encoding, METH_VARARGS, device_encoding__doc__}, {"dup", posix_dup, METH_VARARGS, posix_dup__doc__}, -- cgit v0.12 From 613960bee8bbd0d08ca6d082ba7c2d47249a6289 Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Tue, 11 Feb 2014 17:53:47 +0100 Subject: Issue #20505: Remove debug code --- Lib/asyncio/base_events.py | 20 -------------------- 1 file changed, 20 deletions(-) diff --git a/Lib/asyncio/base_events.py b/Lib/asyncio/base_events.py index 5765f47..7d12042 100644 --- a/Lib/asyncio/base_events.py +++ b/Lib/asyncio/base_events.py @@ -646,27 +646,7 @@ class BaseEventLoop(events.AbstractEventLoop): else: logger.log(level, 'poll took %.3f seconds', t1-t0) else: - t0_monotonic = time.monotonic() - t0 = time.perf_counter() event_list = self._selector.select(timeout) - dt = time.perf_counter() - t0 - dt_monotonic = time.monotonic() - t0_monotonic - if (not event_list and timeout - and (dt < timeout or dt_monotonic < timeout)): - selector = self._selector.__class__.__name__ - if (selector.startswith(("Poll", "Epoll", "Iocp")) - or timeout > 1e-3 or dt > 1e-3): - unit, factor = "ms", 1e3 - else: - unit, factor = "us", 1e6 - print("asyncio: %s.select(%.4f %s) took %.3f %s" - " (monotonic=%.3f %s, clock res=%.3f %s)" - % (self._selector.__class__.__name__, - timeout * factor, unit, - dt * factor, unit, - dt_monotonic * factor, unit, - self._clock_resolution * factor, unit), - file=sys.__stderr__, flush=True) self._process_events(event_list) # Handle 'later' callbacks that are ready. -- cgit v0.12 From 9887fd7a7947bc3e53375a749171213d232b2e5c Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Tue, 11 Feb 2014 18:40:56 +0100 Subject: Issue #20495: Skip test_read_pty_output() of test_asyncio on FreeBSD older than FreeBSD 8 --- Lib/test/test_asyncio/test_events.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/Lib/test/test_asyncio/test_events.py b/Lib/test/test_asyncio/test_events.py index 3f99da4..d5d667a 100644 --- a/Lib/test/test_asyncio/test_events.py +++ b/Lib/test/test_asyncio/test_events.py @@ -956,6 +956,8 @@ class EventLoopTestsMixin: # select, poll and kqueue don't support character devices (PTY) on Mac OS X # older than 10.6 (Snow Leopard) @support.requires_mac_ver(10, 6) + # Issue #20495: The test hangs on FreeBSD 7.2 but pass on FreeBSD 9 + @support.requires_freebsd_version(8) def test_read_pty_output(self): proto = None -- cgit v0.12 From b13177885f05f44f859e4bccfc0b551f1771a88b Mon Sep 17 00:00:00 2001 From: Yury Selivanov Date: Wed, 12 Feb 2014 17:01:52 -0500 Subject: asyncio.events: Use __slots__ in Handle and TimerHandle --- Lib/asyncio/events.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/Lib/asyncio/events.py b/Lib/asyncio/events.py index 4c0cbb0..dd9e3fb 100644 --- a/Lib/asyncio/events.py +++ b/Lib/asyncio/events.py @@ -19,6 +19,8 @@ from .log import logger class Handle: """Object returned by callback registration methods.""" + __slots__ = ['_callback', '_args', '_cancelled'] + def __init__(self, callback, args): assert not isinstance(callback, Handle), 'A Handle is not a callback' self._callback = callback @@ -46,6 +48,8 @@ class Handle: class TimerHandle(Handle): """Object returned by timed callback registration methods.""" + __slots__ = ['_when'] + def __init__(self, when, callback, args): assert when is not None super().__init__(callback, args) -- cgit v0.12 From 2303fecedcfbdec16999e054e401dfad3a13fc05 Mon Sep 17 00:00:00 2001 From: Guido van Rossum Date: Wed, 12 Feb 2014 17:58:19 -0800 Subject: asyncio: Change as_completed() to use a Queue, to avoid O(N**2) behavior. Fixes issue #20566. --- Lib/asyncio/tasks.py | 53 +++++++++++++++++++++++-------------- Lib/test/test_asyncio/test_tasks.py | 23 +++++++++++++++- 2 files changed, 55 insertions(+), 21 deletions(-) diff --git a/Lib/asyncio/tasks.py b/Lib/asyncio/tasks.py index 81a125f..b7ee758 100644 --- a/Lib/asyncio/tasks.py +++ b/Lib/asyncio/tasks.py @@ -463,7 +463,11 @@ def _wait(fs, timeout, return_when, loop): # This is *not* a @coroutine! It is just an iterator (yielding Futures). def as_completed(fs, *, loop=None, timeout=None): - """Return an iterator whose values, when waited for, are Futures. + """Return an iterator whose values are coroutines. + + When waiting for the yielded coroutines you'll get the results (or + exceptions!) of the original Futures (or coroutines), in the order + in which and as soon as they complete. This differs from PEP 3148; the proper way to use this is: @@ -471,8 +475,8 @@ def as_completed(fs, *, loop=None, timeout=None): result = yield from f # The 'yield from' may raise. # Use result. - Raises TimeoutError if the timeout occurs before all Futures are - done. + If a timeout is specified, the 'yield from' will raise + TimeoutError when the timeout occurs before all Futures are done. Note: The futures 'f' are not necessarily members of fs. """ @@ -481,27 +485,36 @@ def as_completed(fs, *, loop=None, timeout=None): loop = loop if loop is not None else events.get_event_loop() deadline = None if timeout is None else loop.time() + timeout todo = {async(f, loop=loop) for f in set(fs)} - completed = collections.deque() + from .queues import Queue # Import here to avoid circular import problem. + done = Queue(loop=loop) + timeout_handle = None + + def _on_timeout(): + for f in todo: + f.remove_done_callback(_on_completion) + done.put_nowait(None) # Queue a dummy value for _wait_for_one(). + todo.clear() # Can't do todo.remove(f) in the loop. + + def _on_completion(f): + if not todo: + return # _on_timeout() was here first. + todo.remove(f) + done.put_nowait(f) + if not todo and timeout_handle is not None: + timeout_handle.cancel() @coroutine def _wait_for_one(): - while not completed: - timeout = None - if deadline is not None: - timeout = deadline - loop.time() - if timeout < 0: - raise futures.TimeoutError() - done, pending = yield from _wait( - todo, timeout, FIRST_COMPLETED, loop) - # Multiple callers might be waiting for the same events - # and getting the same outcome. Dedupe by updating todo. - for f in done: - if f in todo: - todo.remove(f) - completed.append(f) - f = completed.popleft() - return f.result() # May raise. + f = yield from done.get() + if f is None: + # Dummy value from _on_timeout(). + raise futures.TimeoutError + return f.result() # May raise f.exception(). + for f in todo: + f.add_done_callback(_on_completion) + if todo and timeout is not None: + timeout_handle = loop.call_later(timeout, _on_timeout) for _ in range(len(todo)): yield _wait_for_one() diff --git a/Lib/test/test_asyncio/test_tasks.py b/Lib/test/test_asyncio/test_tasks.py index 6847de0..024dd2e 100644 --- a/Lib/test/test_asyncio/test_tasks.py +++ b/Lib/test/test_asyncio/test_tasks.py @@ -779,7 +779,6 @@ class TaskTests(unittest.TestCase): yield 0 yield 0 yield 0.1 - yield 0.02 loop = test_utils.TestLoop(gen) self.addCleanup(loop.close) @@ -791,6 +790,8 @@ class TaskTests(unittest.TestCase): def foo(): values = [] for f in asyncio.as_completed([a, b], timeout=0.12, loop=loop): + if values: + loop.advance_time(0.02) try: v = yield from f values.append((1, v)) @@ -809,6 +810,26 @@ class TaskTests(unittest.TestCase): loop.advance_time(10) loop.run_until_complete(asyncio.wait([a, b], loop=loop)) + def test_as_completed_with_unused_timeout(self): + + def gen(): + yield + yield 0 + yield 0.01 + + loop = test_utils.TestLoop(gen) + self.addCleanup(loop.close) + + a = asyncio.sleep(0.01, 'a', loop=loop) + + @asyncio.coroutine + def foo(): + for f in asyncio.as_completed([a], timeout=1, loop=loop): + v = yield from f + self.assertEqual(v, 'a') + + res = loop.run_until_complete(asyncio.Task(foo(), loop=loop)) + def test_as_completed_reverse_wait(self): def gen(): -- cgit v0.12 From 1b0580b3203281e700ab3df10e6d117796f9d955 Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Thu, 13 Feb 2014 09:24:37 +0100 Subject: ayncio, Tulip issue 129: BaseEventLoop.sock_connect() now raises an error if the address is not resolved (hostname instead of an IP address) for AF_INET and AF_INET6 address families. --- Doc/library/asyncio-eventloop.rst | 6 ++++++ Lib/asyncio/base_events.py | 25 +++++++++++++++++++++++++ Lib/asyncio/proactor_events.py | 9 ++++++++- Lib/asyncio/selector_events.py | 20 ++++++++------------ Lib/test/test_asyncio/test_events.py | 12 ++++++++++++ 5 files changed, 59 insertions(+), 13 deletions(-) diff --git a/Doc/library/asyncio-eventloop.rst b/Doc/library/asyncio-eventloop.rst index c9721b4..7760fcb 100644 --- a/Doc/library/asyncio-eventloop.rst +++ b/Doc/library/asyncio-eventloop.rst @@ -366,6 +366,12 @@ Low-level socket operations Connect to a remote socket at *address*. + The *address* must be already resolved to avoid the trap of hanging the + entire event loop when the address requires doing a DNS lookup. For + example, it must be an IP address, not an hostname, for + :py:data:`~socket.AF_INET` and :py:data:`~socket.AF_INET6` address families. + Use :meth:`getaddrinfo` to resolve the hostname asynchronously. + This method returns a :ref:`coroutine object `. .. seealso:: diff --git a/Lib/asyncio/base_events.py b/Lib/asyncio/base_events.py index 7d12042..3bbf6b5 100644 --- a/Lib/asyncio/base_events.py +++ b/Lib/asyncio/base_events.py @@ -41,6 +41,31 @@ class _StopError(BaseException): """Raised to stop the event loop.""" +def _check_resolved_address(sock, address): + # Ensure that the address is already resolved to avoid the trap of hanging + # the entire event loop when the address requires doing a DNS lookup. + family = sock.family + if family not in (socket.AF_INET, socket.AF_INET6): + return + + host, port = address + type_mask = 0 + if hasattr(socket, 'SOCK_NONBLOCK'): + type_mask |= socket.SOCK_NONBLOCK + if hasattr(socket, 'SOCK_CLOEXEC'): + type_mask |= socket.SOCK_CLOEXEC + # Use getaddrinfo(AI_NUMERICHOST) to ensure that the address is + # already resolved. + try: + socket.getaddrinfo(host, port, + family=family, + type=(sock.type & ~type_mask), + proto=sock.proto, + flags=socket.AI_NUMERICHOST) + except socket.gaierror as err: + raise ValueError("address must be resolved (IP address), got %r: %s" + % (address, err)) + def _raise_stop_error(*args): raise _StopError diff --git a/Lib/asyncio/proactor_events.py b/Lib/asyncio/proactor_events.py index 74566b2..5de4d3d 100644 --- a/Lib/asyncio/proactor_events.py +++ b/Lib/asyncio/proactor_events.py @@ -404,7 +404,14 @@ class BaseProactorEventLoop(base_events.BaseEventLoop): return self._proactor.send(sock, data) def sock_connect(self, sock, address): - return self._proactor.connect(sock, address) + try: + base_events._check_resolved_address(sock, address) + except ValueError as err: + fut = futures.Future(loop=self) + fut.set_exception(err) + return fut + else: + return self._proactor.connect(sock, address) def sock_accept(self, sock): return self._proactor.accept(sock) diff --git a/Lib/asyncio/selector_events.py b/Lib/asyncio/selector_events.py index 14231c5..10b0257 100644 --- a/Lib/asyncio/selector_events.py +++ b/Lib/asyncio/selector_events.py @@ -208,6 +208,8 @@ class BaseSelectorEventLoop(base_events.BaseEventLoop): return fut def _sock_recv(self, fut, registered, sock, n): + # _sock_recv() can add itself as an I/O callback if the operation can't + # be done immediatly. Don't use it directly, call sock_recv(). fd = sock.fileno() if registered: # Remove the callback early. It should be rare that the @@ -260,22 +262,16 @@ class BaseSelectorEventLoop(base_events.BaseEventLoop): def sock_connect(self, sock, address): """XXX""" - # That address better not require a lookup! We're not calling - # self.getaddrinfo() for you here. But verifying this is - # complicated; the socket module doesn't have a pattern for - # IPv6 addresses (there are too many forms, apparently). fut = futures.Future(loop=self) - self._sock_connect(fut, False, sock, address) + try: + base_events._check_resolved_address(sock, address) + except ValueError as err: + fut.set_exception(err) + else: + self._sock_connect(fut, False, sock, address) return fut def _sock_connect(self, fut, registered, sock, address): - # TODO: Use getaddrinfo() to look up the address, to avoid the - # trap of hanging the entire event loop when the address - # requires doing a DNS lookup. (OTOH, the caller should - # already have done this, so it would be nice if we could - # easily tell whether the address needs looking up or not. I - # know how to do this for IPv4, but IPv6 addresses have many - # syntaxes.) fd = sock.fileno() if registered: self.remove_writer(fd) diff --git a/Lib/test/test_asyncio/test_events.py b/Lib/test/test_asyncio/test_events.py index d5d667a..4300ddd 100644 --- a/Lib/test/test_asyncio/test_events.py +++ b/Lib/test/test_asyncio/test_events.py @@ -1191,6 +1191,18 @@ class EventLoopTestsMixin: {'clock_resolution': self.loop._clock_resolution, 'selector': self.loop._selector.__class__.__name__}) + def test_sock_connect_address(self): + address = ('www.python.org', 80) + for family in (socket.AF_INET, socket.AF_INET6): + for sock_type in (socket.SOCK_STREAM, socket.SOCK_DGRAM): + sock = socket.socket(family, sock_type) + with sock: + connect = self.loop.sock_connect(sock, address) + with self.assertRaises(ValueError) as cm: + self.loop.run_until_complete(connect) + self.assertIn('address must be resolved', + str(cm.exception)) + class SubprocessTestsMixin: -- cgit v0.12 From 4c07377490c9001779ce737f4348a67bcd8dbf57 Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Thu, 13 Feb 2014 10:46:05 +0100 Subject: Fix test_asyncio/test_events.py: skip IPv6 if IPv6 is disabled on the host --- Lib/test/test_asyncio/test_events.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/Lib/test/test_asyncio/test_events.py b/Lib/test/test_asyncio/test_events.py index 4300ddd..3bb8dd8 100644 --- a/Lib/test/test_asyncio/test_events.py +++ b/Lib/test/test_asyncio/test_events.py @@ -1192,8 +1192,12 @@ class EventLoopTestsMixin: 'selector': self.loop._selector.__class__.__name__}) def test_sock_connect_address(self): + families = [socket.AF_INET] + if support.IPV6_ENABLED: + families.append(socket.AF_INET6) + address = ('www.python.org', 80) - for family in (socket.AF_INET, socket.AF_INET6): + for family in families: for sock_type in (socket.SOCK_STREAM, socket.SOCK_DGRAM): sock = socket.socket(family, sock_type) with sock: -- cgit v0.12 From 933538edde3bfe856f66d6c893ed4d3399769f94 Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Thu, 13 Feb 2014 12:48:54 +0100 Subject: Issue #20526, #19466: Revert changes of issue #19466 which introduces a regression: don't clear anymore the state of Python threads early during the Python shutdown. --- Lib/test/test_threading.py | 49 ---------------------------------------------- Misc/NEWS | 4 ++++ Python/pythonrun.c | 20 +++++-------------- 3 files changed, 9 insertions(+), 64 deletions(-) diff --git a/Lib/test/test_threading.py b/Lib/test/test_threading.py index ad363582..7170f60 100644 --- a/Lib/test/test_threading.py +++ b/Lib/test/test_threading.py @@ -616,51 +616,6 @@ class ThreadTests(BaseTestCase): t.join() self.assertRaises(ValueError, bs.release) - def test_locals_at_exit(self): - # Issue #19466: thread locals must not be deleted before destructors - # are called - rc, out, err = assert_python_ok("-c", """if 1: - import threading - - class Atexit: - def __del__(self): - print("thread_dict.atexit = %r" % thread_dict.atexit) - - thread_dict = threading.local() - thread_dict.atexit = "atexit" - - atexit = Atexit() - """) - self.assertEqual(out.rstrip(), b"thread_dict.atexit = 'atexit'") - - def test_warnings_at_exit(self): - # Issue #19466: try to call most destructors at Python shutdown before - # destroying Python thread states - filename = __file__ - rc, out, err = assert_python_ok("-Wd", "-c", """if 1: - import time - import threading - - def open_sleep(): - # a warning will be emitted when the open file will be - # destroyed (without being explicitly closed) while the daemon - # thread is destroyed - fileobj = open(%a, 'rb') - start_event.set() - time.sleep(60.0) - - start_event = threading.Event() - - thread = threading.Thread(target=open_sleep) - thread.daemon = True - thread.start() - - # wait until the thread started - start_event.wait() - """ % filename) - self.assertRegex(err.rstrip(), - b"^sys:1: ResourceWarning: unclosed file ") - @cpython_only def test_frame_tstate_tracing(self): # Issue #14432: Crash when a generator is created in a C thread that is @@ -786,10 +741,6 @@ class ThreadJoinOnShutdown(BaseTestCase): import sys import time import threading - import warnings - - # ignore "unclosed file ..." warnings - warnings.filterwarnings('ignore', '', ResourceWarning) thread_has_run = set() diff --git a/Misc/NEWS b/Misc/NEWS index 4c0d3a5..06acafa 100644 --- a/Misc/NEWS +++ b/Misc/NEWS @@ -10,6 +10,10 @@ Release date: 2014-02-23 Core and Builtins ----------------- +- Issue #20526: Revert changes of issue #19466 which introduces a regression: + don't clear anymore the state of Python threads early during the Python + shutdown. + Library ------- diff --git a/Python/pythonrun.c b/Python/pythonrun.c index 34a291f..e9947e9 100644 --- a/Python/pythonrun.c +++ b/Python/pythonrun.c @@ -582,13 +582,11 @@ Py_Finalize(void) _Py_Finalizing = tstate; initialized = 0; - /* Destroy the state of all threads except of the current thread: in - practice, only daemon threads should still be alive. Clear frames of - other threads to call objects destructor. Destructors will be called in - the current Python thread. Since _Py_Finalizing has been set, no other - Python threads can lock the GIL at this point (if they try, they will - exit immediately). */ - _PyThreadState_DeleteExcept(tstate); + /* Flush stdout+stderr */ + flush_std_files(); + + /* Disable signal handling */ + PyOS_FiniInterrupts(); /* Collect garbage. This may call finalizers; it's nice to call these * before all modules are destroyed. @@ -603,7 +601,6 @@ Py_Finalize(void) * XXX I haven't seen a real-life report of either of these. */ PyGC_Collect(); - #ifdef COUNT_ALLOCS /* With COUNT_ALLOCS, it helps to run GC multiple times: each collection might release some types from the type @@ -611,13 +608,6 @@ Py_Finalize(void) while (PyGC_Collect() > 0) /* nothing */; #endif - - /* Flush stdout+stderr */ - flush_std_files(); - - /* Disable signal handling */ - PyOS_FiniInterrupts(); - /* Destroy all modules */ PyImport_Cleanup(); -- cgit v0.12 From e84fde981dd957f53ec81e2d6f14a7cf418a9925 Mon Sep 17 00:00:00 2001 From: Benjamin Peterson Date: Thu, 13 Feb 2014 19:22:14 -0500 Subject: set line and column numbers for keyword-only arg nodes (closes #20619) --- Lib/test/test_ast.py | 4 ++-- Misc/NEWS | 3 +++ Python/ast.c | 2 ++ 3 files changed, 7 insertions(+), 2 deletions(-) diff --git a/Lib/test/test_ast.py b/Lib/test/test_ast.py index e69422a..f17c93f 100644 --- a/Lib/test/test_ast.py +++ b/Lib/test/test_ast.py @@ -37,7 +37,7 @@ exec_tests = [ # FunctionDef with kwargs "def f(**kwargs): pass", # FunctionDef with all kind of args - "def f(a, b=1, c=None, d=[], e={}, *args, **kwargs): pass", + "def f(a, b=1, c=None, d=[], e={}, *args, f=42, **kwargs): pass", # ClassDef "class C:pass", # ClassDef, new style class @@ -973,7 +973,7 @@ exec_results = [ ('Module', [('FunctionDef', (1, 0), 'f', ('arguments', [('arg', (1, 6), 'a', None)], None, [], [], None, [('Num', (1, 8), 0)]), [('Pass', (1, 12))], [], None)]), ('Module', [('FunctionDef', (1, 0), 'f', ('arguments', [], ('arg', (1, 7), 'args', None), [], [], None, []), [('Pass', (1, 14))], [], None)]), ('Module', [('FunctionDef', (1, 0), 'f', ('arguments', [], None, [], [], ('arg', (1, 8), 'kwargs', None), []), [('Pass', (1, 17))], [], None)]), -('Module', [('FunctionDef', (1, 0), 'f', ('arguments', [('arg', (1, 6), 'a', None), ('arg', (1, 9), 'b', None), ('arg', (1, 14), 'c', None), ('arg', (1, 22), 'd', None), ('arg', (1, 28), 'e', None)], ('arg', (1, 35), 'args', None), [], [], ('arg', (1, 43), 'kwargs', None), [('Num', (1, 11), 1), ('NameConstant', (1, 16), None), ('List', (1, 24), [], ('Load',)), ('Dict', (1, 30), [], [])]), [('Pass', (1, 52))], [], None)]), + ('Module', [('FunctionDef', (1, 0), 'f', ('arguments', [('arg', (1, 6), 'a', None), ('arg', (1, 9), 'b', None), ('arg', (1, 14), 'c', None), ('arg', (1, 22), 'd', None), ('arg', (1, 28), 'e', None)], ('arg', (1, 35), 'args', None), [('arg', (1, 41), 'f', None)], [('Num', (1, 43), 42)], ('arg', (1, 49), 'kwargs', None), [('Num', (1, 11), 1), ('NameConstant', (1, 16), None), ('List', (1, 24), [], ('Load',)), ('Dict', (1, 30), [], [])]), [('Pass', (1, 58))], [], None)]), ('Module', [('ClassDef', (1, 0), 'C', [], [], None, None, [('Pass', (1, 8))], [])]), ('Module', [('ClassDef', (1, 0), 'C', [('Name', (1, 8), 'object', ('Load',))], [], None, None, [('Pass', (1, 17))], [])]), ('Module', [('FunctionDef', (1, 0), 'f', ('arguments', [], None, [], [], None, []), [('Return', (1, 8), ('Num', (1, 15), 1))], [], None)]), diff --git a/Misc/NEWS b/Misc/NEWS index 06acafa..edb4db4 100644 --- a/Misc/NEWS +++ b/Misc/NEWS @@ -10,6 +10,9 @@ Release date: 2014-02-23 Core and Builtins ----------------- +- Issue #20619: Give the AST nodes of keyword-only arguments a column and line + number. + - Issue #20526: Revert changes of issue #19466 which introduces a regression: don't clear anymore the state of Python threads early during the Python shutdown. diff --git a/Python/ast.c b/Python/ast.c index 3bd24fd..5668755 100644 --- a/Python/ast.c +++ b/Python/ast.c @@ -1203,6 +1203,8 @@ handle_keywordonly_args(struct compiling *c, const node *n, int start, arg = arg(argname, annotation, c->c_arena); if (!arg) goto error; + arg->lineno = LINENO(ch); + arg->col_offset = ch->n_col_offset; asdl_seq_SET(kwonlyargs, j++, arg); i += 2; /* the name and the comma */ break; -- cgit v0.12 From c00fa6387d9a2ceb3b4c4fa94646279c8b70fb4f Mon Sep 17 00:00:00 2001 From: Nick Coghlan Date: Sat, 15 Feb 2014 09:14:54 +1000 Subject: Issue #19744: Handle missing SSL/TLS in ensurepip - now also allows POSIX installation with SSL/TLS missing - a goal for pip 1.6 is to allow local use without SSL/TLS --- Lib/ensurepip/__init__.py | 5 +++++ Lib/test/test_ensurepip.py | 8 ++++++++ Misc/NEWS | 4 ++++ 3 files changed, 17 insertions(+) diff --git a/Lib/ensurepip/__init__.py b/Lib/ensurepip/__init__.py index e8d6abe..3f4e449 100644 --- a/Lib/ensurepip/__init__.py +++ b/Lib/ensurepip/__init__.py @@ -144,6 +144,11 @@ def _uninstall_helper(*, verbosity=0): def _main(argv=None): + if ssl is None: + print("Ignoring ensurepip failure: {}".format(_MISSING_SSL_MESSAGE), + file=sys.stderr) + return + import argparse parser = argparse.ArgumentParser(prog="python -m ensurepip") parser.add_argument( diff --git a/Lib/test/test_ensurepip.py b/Lib/test/test_ensurepip.py index 6ddf8f6..8644a65 100644 --- a/Lib/test/test_ensurepip.py +++ b/Lib/test/test_ensurepip.py @@ -281,12 +281,20 @@ class TestMissingSSL(EnsurepipMixin, unittest.TestCase): self.run_pip.assert_not_called() self.assertIn("PIP_THIS_SHOULD_STAY", self.os_environ) + def test_main_exits_early_with_warning(self): + with test.support.captured_stderr() as stderr: + ensurepip_no_ssl._main(["--version"]) + warning = stderr.getvalue().strip() + self.assertTrue(warning.endswith("requires SSL/TLS"), warning) + self.run_pip.assert_not_called() + # Basic testing of the main functions and their argument parsing EXPECTED_VERSION_OUTPUT = "pip " + ensurepip._PIP_VERSION class TestBootstrappingMainFunction(EnsurepipMixin, unittest.TestCase): + @requires_usable_pip def test_bootstrap_version(self): with test.support.captured_stdout() as stdout: with self.assertRaises(SystemExit): diff --git a/Misc/NEWS b/Misc/NEWS index edb4db4..d2173c2 100644 --- a/Misc/NEWS +++ b/Misc/NEWS @@ -20,6 +20,10 @@ Core and Builtins Library ------- +- Issue #19744: the ensurepip installation step now just prints a warning to + stderr rather than failing outright if SSL/TLS is unavailable. This allows + local installation of POSIX builds without SSL/TLS support. + - Issue #20594: Avoid name clash with the libc function posix_close. -- cgit v0.12 From 2626fab4c7e1cc3daffe2071c8894c54dac8680a Mon Sep 17 00:00:00 2001 From: Benjamin Peterson Date: Sun, 16 Feb 2014 13:49:16 -0500 Subject: look up __getnewargs__ and __getnewargs_ex__ on the object type (#16251) --- Lib/test/test_descr.py | 14 ++++++++++++++ Misc/NEWS | 3 +++ Objects/typeobject.c | 18 ++++++------------ 3 files changed, 23 insertions(+), 12 deletions(-) diff --git a/Lib/test/test_descr.py b/Lib/test/test_descr.py index 508e02b..2a9e329 100644 --- a/Lib/test/test_descr.py +++ b/Lib/test/test_descr.py @@ -4701,6 +4701,20 @@ class PicklingTests(unittest.TestCase): for proto in protocols: self._check_reduce(proto, obj, listitems=list(obj)) + def test_special_method_lookup(self): + protocols = range(pickle.HIGHEST_PROTOCOL + 1) + class Picky: + def __getstate__(self): + return {} + + def __getattr__(self, attr): + if attr in ("__getnewargs__", "__getnewargs_ex__"): + raise AssertionError(attr) + return None + for protocol in protocols: + state = {} if protocol >= 2 else None + self._check_reduce(protocol, Picky(), state=state) + def _assert_is_copy(self, obj, objcopy, msg=None): """Utility method to verify if two objects are copies of each others. """ diff --git a/Misc/NEWS b/Misc/NEWS index d2173c2..4a717fc 100644 --- a/Misc/NEWS +++ b/Misc/NEWS @@ -10,6 +10,9 @@ Release date: 2014-02-23 Core and Builtins ----------------- +- Issue #20261: In pickle, lookup __getnewargs__ and __getnewargs_ex__ on the + type of the object. + - Issue #20619: Give the AST nodes of keyword-only arguments a column and line number. diff --git a/Objects/typeobject.c b/Objects/typeobject.c index 449b6d6..f58960d 100644 --- a/Objects/typeobject.c +++ b/Objects/typeobject.c @@ -3719,7 +3719,7 @@ _PyObject_GetNewArguments(PyObject *obj, PyObject **args, PyObject **kwargs) /* We first attempt to fetch the arguments for __new__ by calling __getnewargs_ex__ on the object. */ - getnewargs_ex = _PyObject_GetAttrId(obj, &PyId___getnewargs_ex__); + getnewargs_ex = _PyObject_LookupSpecial(obj, &PyId___getnewargs_ex__); if (getnewargs_ex != NULL) { PyObject *newargs = PyObject_CallObject(getnewargs_ex, NULL); Py_DECREF(getnewargs_ex); @@ -3766,16 +3766,13 @@ _PyObject_GetNewArguments(PyObject *obj, PyObject **args, PyObject **kwargs) return -1; } return 0; - } else { - if (!PyErr_ExceptionMatches(PyExc_AttributeError)) { - return -1; - } - PyErr_Clear(); + } else if (PyErr_Occurred()) { + return -1; } /* The object does not have __getnewargs_ex__ so we fallback on using __getnewargs__ instead. */ - getnewargs = _PyObject_GetAttrId(obj, &PyId___getnewargs__); + getnewargs = _PyObject_LookupSpecial(obj, &PyId___getnewargs__); if (getnewargs != NULL) { *args = PyObject_CallObject(getnewargs, NULL); Py_DECREF(getnewargs); @@ -3791,11 +3788,8 @@ _PyObject_GetNewArguments(PyObject *obj, PyObject **args, PyObject **kwargs) } *kwargs = NULL; return 0; - } else { - if (!PyErr_ExceptionMatches(PyExc_AttributeError)) { - return -1; - } - PyErr_Clear(); + } else if (PyErr_Occurred()) { + return -1; } /* The object does not have __getnewargs_ex__ and __getnewargs__. This may -- cgit v0.12 From 34c15400092bba37ea36e155d151cfcbfa846db7 Mon Sep 17 00:00:00 2001 From: Benjamin Peterson Date: Sun, 16 Feb 2014 14:17:28 -0500 Subject: merge backout for #20621 --- Lib/test/test_zipimport.py | 104 +++-------------- Modules/zipimport.c | 284 ++++++++------------------------------------- 2 files changed, 67 insertions(+), 321 deletions(-) diff --git a/Lib/test/test_zipimport.py b/Lib/test/test_zipimport.py index bf058bc..1e351c8 100644 --- a/Lib/test/test_zipimport.py +++ b/Lib/test/test_zipimport.py @@ -71,27 +71,6 @@ class ImportHooksBaseTestCase(unittest.TestCase): support.modules_cleanup(*self.modules_before) -def _write_zip_package(zipname, files, - data_to_prepend=b"", compression=ZIP_STORED): - z = ZipFile(zipname, "w") - try: - for name, (mtime, data) in files.items(): - zinfo = ZipInfo(name, time.localtime(mtime)) - zinfo.compress_type = compression - z.writestr(zinfo, data) - finally: - z.close() - - if data_to_prepend: - # Prepend data to the start of the zipfile - with open(zipname, "rb") as f: - zip_data = f.read() - - with open(zipname, "wb") as f: - f.write(data_to_prepend) - f.write(zip_data) - - class UncompressedZipImportTestCase(ImportHooksBaseTestCase): compression = ZIP_STORED @@ -104,9 +83,23 @@ class UncompressedZipImportTestCase(ImportHooksBaseTestCase): ImportHooksBaseTestCase.setUp(self) def doTest(self, expected_ext, files, *modules, **kw): - _write_zip_package(TEMP_ZIP, files, data_to_prepend=kw.get("stuff"), - compression=self.compression) + z = ZipFile(TEMP_ZIP, "w") try: + for name, (mtime, data) in files.items(): + zinfo = ZipInfo(name, time.localtime(mtime)) + zinfo.compress_type = self.compression + z.writestr(zinfo, data) + z.close() + + stuff = kw.get("stuff", None) + if stuff is not None: + # Prepend 'stuff' to the start of the zipfile + with open(TEMP_ZIP, "rb") as f: + data = f.read() + with open(TEMP_ZIP, "wb") as f: + f.write(stuff) + f.write(data) + sys.path.insert(0, TEMP_ZIP) mod = __import__(".".join(modules), globals(), locals(), @@ -121,8 +114,7 @@ class UncompressedZipImportTestCase(ImportHooksBaseTestCase): self.assertEqual(file, os.path.join(TEMP_ZIP, *modules) + expected_ext) finally: - while TEMP_ZIP in sys.path: - sys.path.remove(TEMP_ZIP) + z.close() os.remove(TEMP_ZIP) def testAFakeZlib(self): @@ -430,67 +422,10 @@ class CompressedZipImportTestCase(UncompressedZipImportTestCase): compression = ZIP_DEFLATED -class ZipFileModifiedAfterImportTestCase(ImportHooksBaseTestCase): - def setUp(self): - zipimport._zip_directory_cache.clear() - zipimport._zip_stat_cache.clear() - ImportHooksBaseTestCase.setUp(self) - - def tearDown(self): - ImportHooksBaseTestCase.tearDown(self) - if os.path.exists(TEMP_ZIP): - os.remove(TEMP_ZIP) - - def testZipFileChangesAfterFirstImport(self): - """Alter the zip file after caching its index and try an import.""" - packdir = TESTPACK + os.sep - files = {packdir + "__init__" + pyc_ext: (NOW, test_pyc), - packdir + TESTMOD + ".py": (NOW, "test_value = 38\n"), - "ziptest_a.py": (NOW, "test_value = 23\n"), - "ziptest_b.py": (NOW, "test_value = 42\n"), - "ziptest_c.py": (NOW, "test_value = 1337\n")} - zipfile_path = TEMP_ZIP - _write_zip_package(zipfile_path, files) - self.assertTrue(os.path.exists(zipfile_path)) - sys.path.insert(0, zipfile_path) - - # Import something out of the zipfile and confirm it is correct. - testmod = __import__(TESTPACK + "." + TESTMOD, - globals(), locals(), ["__dummy__"]) - self.assertEqual(testmod.test_value, 38) - # Import something else out of the zipfile and confirm it is correct. - ziptest_b = __import__("ziptest_b", globals(), locals(), ["test_value"]) - self.assertEqual(ziptest_b.test_value, 42) - - # Truncate and fill the zip file with non-zip garbage. - with open(zipfile_path, "rb") as orig_zip_file: - orig_zip_file_contents = orig_zip_file.read() - with open(zipfile_path, "wb") as byebye_valid_zip_file: - byebye_valid_zip_file.write(b"Tear down this wall!\n"*1987) - # Now that the zipfile has been replaced, import something else from it - # which should fail as the file contents are now garbage. - with self.assertRaises(ImportError): - ziptest_a = __import__("ziptest_a", globals(), locals(), - ["test_value"]) - self.assertEqual(ziptest_a.test_value, 23) - - # Now lets make it a valid zipfile that has some garbage at the start. - # This alters all of the offsets within the file - with open(zipfile_path, "wb") as new_zip_file: - new_zip_file.write(b"X"*1991) # The year Python was created. - new_zip_file.write(orig_zip_file_contents) - - # Now that the zip file has been "restored" to a valid but different - # zipfile the zipimporter should *successfully* re-read the new zip - # file's end of file central index and be able to import from it again. - ziptest_c = __import__("ziptest_c", globals(), locals(), ["test_value"]) - self.assertEqual(ziptest_c.test_value, 1337) - - class BadFileZipImportTestCase(unittest.TestCase): def assertZipFailure(self, filename): - with self.assertRaises(zipimport.ZipImportError): - zipimport.zipimporter(filename) + self.assertRaises(zipimport.ZipImportError, + zipimport.zipimporter, filename) def testNoFile(self): self.assertZipFailure('AdfjdkFJKDFJjdklfjs') @@ -564,7 +499,6 @@ def test_main(): UncompressedZipImportTestCase, CompressedZipImportTestCase, BadFileZipImportTestCase, - ZipFileModifiedAfterImportTestCase, ) finally: support.unlink(TESTMOD) diff --git a/Modules/zipimport.c b/Modules/zipimport.c index 9f662f5..8fe9195 100644 --- a/Modules/zipimport.c +++ b/Modules/zipimport.c @@ -49,16 +49,10 @@ struct _zipimporter { static PyObject *ZipImportError; /* read_directory() cache */ static PyObject *zip_directory_cache = NULL; -static PyObject *zip_stat_cache = NULL; -/* posix.fstat or nt.fstat function. Used due to posixmodule.c's - * superior fstat implementation over libc's on Windows. */ -static PyObject *fstat_function = NULL; /* posix.fstat() or nt.fstat() */ /* forward decls */ -static FILE *fopen_rb_and_stat(PyObject *path, PyObject **py_stat_p); -static FILE *safely_reopen_archive(ZipImporter *self); -static PyObject *read_directory(FILE *fp, PyObject *archive); -static PyObject *get_data(FILE *fp, PyObject *archive, PyObject *toc_entry); +static PyObject *read_directory(PyObject *archive); +static PyObject *get_data(PyObject *archive, PyObject *toc_entry); static PyObject *get_module_code(ZipImporter *self, PyObject *fullname, int *p_ispackage, PyObject **p_modpath); @@ -137,39 +131,11 @@ zipimporter_init(ZipImporter *self, PyObject *args, PyObject *kwds) files = PyDict_GetItem(zip_directory_cache, filename); if (files == NULL) { - PyObject *zip_stat = NULL; - FILE *fp = fopen_rb_and_stat(filename, &zip_stat); - if (fp == NULL) { - if (!PyErr_Occurred()) - PyErr_Format(ZipImportError, "can't open Zip file: %R", - filename); - - Py_XDECREF(zip_stat); + files = read_directory(filename); + if (files == NULL) goto error; - } - - if (Py_VerboseFlag) - PySys_FormatStderr("# zipimport: %U not cached, " - "reading TOC.\n", filename); - - files = read_directory(fp, filename); - fclose(fp); - if (files == NULL) { - Py_XDECREF(zip_stat); - goto error; - } - if (PyDict_SetItem(zip_directory_cache, filename, files) != 0) { - Py_DECREF(files); - Py_XDECREF(zip_stat); + if (PyDict_SetItem(zip_directory_cache, filename, files) != 0) goto error; - } - if (zip_stat && PyDict_SetItem(zip_stat_cache, filename, - zip_stat) != 0) { - Py_DECREF(files); - Py_DECREF(zip_stat); - goto error; - } - Py_XDECREF(zip_stat); } else Py_INCREF(files); @@ -594,8 +560,7 @@ zipimporter_get_data(PyObject *obj, PyObject *args) { ZipImporter *self = (ZipImporter *)obj; PyObject *path, *key; - FILE *fp; - PyObject *toc_entry, *data; + PyObject *toc_entry; Py_ssize_t path_start, path_len, len; if (!PyArg_ParseTuple(args, "U:zipimporter.get_data", &path)) @@ -623,23 +588,15 @@ zipimporter_get_data(PyObject *obj, PyObject *args) key = PyUnicode_Substring(path, path_start, path_len); if (key == NULL) goto error; - - fp = safely_reopen_archive(self); - if (fp == NULL) - goto error; - toc_entry = PyDict_GetItem(self->files, key); if (toc_entry == NULL) { PyErr_SetFromErrnoWithFilenameObject(PyExc_IOError, key); Py_DECREF(key); - fclose(fp); goto error; } Py_DECREF(key); Py_DECREF(path); - data = get_data(fp, self->archive, toc_entry); - fclose(fp); - return data; + return get_data(self->archive, toc_entry); error: Py_DECREF(path); return NULL; @@ -664,7 +621,6 @@ zipimporter_get_source(PyObject *obj, PyObject *args) PyObject *toc_entry; PyObject *fullname, *subname, *path, *fullpath; enum zi_module_info mi; - FILE *fp; if (!PyArg_ParseTuple(args, "U:zipimporter.get_source", &fullname)) return NULL; @@ -694,18 +650,11 @@ zipimporter_get_source(PyObject *obj, PyObject *args) if (fullpath == NULL) return NULL; - fp = safely_reopen_archive(self); - if (fp == NULL) { - Py_DECREF(fullpath); - return NULL; - } - toc_entry = PyDict_GetItem(self->files, fullpath); Py_DECREF(fullpath); if (toc_entry != NULL) { PyObject *res, *bytes; - bytes = get_data(fp, self->archive, toc_entry); - fclose(fp); + bytes = get_data(self->archive, toc_entry); if (bytes == NULL) return NULL; res = PyUnicode_FromStringAndSize(PyBytes_AS_STRING(bytes), @@ -713,10 +662,10 @@ zipimporter_get_source(PyObject *obj, PyObject *args) Py_DECREF(bytes); return res; } - fclose(fp); /* we have the module, but no source */ - Py_RETURN_NONE; + Py_INCREF(Py_None); + return Py_None; } PyDoc_STRVAR(doc_find_module, @@ -882,133 +831,10 @@ get_long(unsigned char *buf) { return x; } -/* Return 1 if objects a and b fail a Py_EQ test for an attr. */ -static int -compare_obj_attr_strings(PyObject *obj_a, PyObject *obj_b, char *attr_name) -{ - int problem = 0; - PyObject *attr_a = PyObject_GetAttrString(obj_a, attr_name); - PyObject *attr_b = PyObject_GetAttrString(obj_b, attr_name); - if (attr_a == NULL || attr_b == NULL) - problem = 1; - else - problem = (PyObject_RichCompareBool(attr_a, attr_b, Py_EQ) != 1); - Py_XDECREF(attr_a); - Py_XDECREF(attr_b); - return problem; -} - /* - * Returns an open FILE * on success. - * Returns NULL on error with the Python error context set. - */ -static FILE * -safely_reopen_archive(ZipImporter *self) -{ - FILE *fp; - PyObject *stat_now = NULL; - - fp = fopen_rb_and_stat(self->archive, &stat_now); - if (!fp) { - PyErr_Format(ZipImportError, - "zipimport: can not open file %U", self->archive); - Py_XDECREF(stat_now); - return NULL; - } + read_directory(archive) -> files dict (new reference) - if (stat_now != NULL) { - int problem = 0; - PyObject *files; - PyObject *prev_stat = PyDict_GetItem(zip_stat_cache, self->archive); - /* Test stat_now vs the old cached stat on some key attributes. */ - if (prev_stat != NULL) { - problem = compare_obj_attr_strings(prev_stat, stat_now, - "st_ino"); - problem |= compare_obj_attr_strings(prev_stat, stat_now, - "st_size"); - problem |= compare_obj_attr_strings(prev_stat, stat_now, - "st_mtime"); - } else { - if (Py_VerboseFlag) - PySys_FormatStderr("# zipimport: no stat data for %U!\n", - self->archive); - problem = 1; - } - - if (problem) { - if (Py_VerboseFlag) - PySys_FormatStderr("# zipimport: %U modified since last" - " import, rereading TOC.\n", self->archive); - files = read_directory(fp, self->archive); - if (files == NULL) { - Py_DECREF(stat_now); - fclose(fp); - return NULL; - } - if (PyDict_SetItem(zip_directory_cache, self->archive, - files) != 0) { - Py_DECREF(files); - Py_DECREF(stat_now); - fclose(fp); - return NULL; - } - if (stat_now && PyDict_SetItem(zip_stat_cache, self->archive, - stat_now) != 0) { - Py_DECREF(files); - Py_DECREF(stat_now); - fclose(fp); - return NULL; - } - Py_XDECREF(self->files); /* free the old value. */ - self->files = files; - } - Py_DECREF(stat_now); - } /* stat succeeded */ - - return fp; -} - -/* - fopen_rb_and_stat(path, &py_stat) -> FILE * - - Opens path in "rb" mode and populates the Python py_stat stat_result - with information about the opened file. *py_stat may not be changed - if there is no fstat_function or if fstat_function fails. - - Returns NULL and does nothing to *py_stat if the open failed. -*/ -static FILE * -fopen_rb_and_stat(PyObject *path, PyObject **py_stat_p) -{ - FILE *fp; - assert(py_stat_p != NULL); - assert(*py_stat_p == NULL); - - fp = _Py_fopen_obj(path, "rb"); - if (fp == NULL) { - if (!PyErr_Occurred()) - PyErr_Format(ZipImportError, - "zipimport: can not open file %U", path); - return NULL; - } - - if (fstat_function) { - PyObject *stat_result = PyObject_CallFunction(fstat_function, - "i", fileno(fp)); - if (stat_result == NULL) { - PyErr_Clear(); /* We can function without it. */ - } else { - *py_stat_p = stat_result; - } - } - - return fp; -} - -/* - read_directory(fp, archive) -> files dict (new reference) - - Given an open Zip archive, build a dict, mapping file names + Given a path to a Zip archive, build a dict, mapping file names (local to the archive, using SEP as a separator) to toc entries. A toc_entry is a tuple: @@ -1028,9 +854,10 @@ fopen_rb_and_stat(PyObject *path, PyObject **py_stat_p) data_size and file_offset are 0. */ static PyObject * -read_directory(FILE *fp, PyObject *archive) +read_directory(PyObject *archive) { PyObject *files = NULL; + FILE *fp; unsigned short flags; short compress, time, date, name_size; long crc, data_size, file_size, header_size; @@ -1046,18 +873,27 @@ read_directory(FILE *fp, PyObject *archive) const char *charset; int bootstrap; - assert(fp != NULL); + fp = _Py_fopen_obj(archive, "rb"); + if (fp == NULL) { + if (!PyErr_Occurred()) + PyErr_Format(ZipImportError, "can't open Zip file: %R", archive); + return NULL; + } + if (fseek(fp, -22, SEEK_END) == -1) { + fclose(fp); PyErr_Format(ZipImportError, "can't read Zip file: %R", archive); return NULL; } header_position = ftell(fp); if (fread(endof_central_dir, 1, 22, fp) != 22) { + fclose(fp); PyErr_Format(ZipImportError, "can't read Zip file: %R", archive); return NULL; } if (get_long((unsigned char *)endof_central_dir) != 0x06054B50) { /* Bad: End of Central Dir signature */ + fclose(fp); PyErr_Format(ZipImportError, "not a Zip file: %R", archive); return NULL; } @@ -1164,16 +1000,19 @@ read_directory(FILE *fp, PyObject *archive) goto error; count++; } + fclose(fp); if (Py_VerboseFlag) PySys_FormatStderr("# zipimport: found %ld names in %R\n", count, archive); return files; file_error: + fclose(fp); Py_XDECREF(files); Py_XDECREF(nameobj); PyErr_Format(ZipImportError, "can't read Zip file: %R", archive); return NULL; error: + fclose(fp); Py_XDECREF(files); Py_XDECREF(nameobj); return NULL; @@ -1212,13 +1051,14 @@ get_decompress_func(void) return decompress; } -/* Given a FILE* to a Zip file and a toc_entry, return the (uncompressed) +/* Given a path to a Zip file and a toc_entry, return the (uncompressed) data as a new reference. */ static PyObject * -get_data(FILE *fp, PyObject *archive, PyObject *toc_entry) +get_data(PyObject *archive, PyObject *toc_entry) { PyObject *raw_data, *data = NULL, *decompress; char *buf; + FILE *fp; int err; Py_ssize_t bytes_read = 0; long l; @@ -1232,8 +1072,17 @@ get_data(FILE *fp, PyObject *archive, PyObject *toc_entry) return NULL; } + fp = _Py_fopen_obj(archive, "rb"); + if (!fp) { + if (!PyErr_Occurred()) + PyErr_Format(PyExc_IOError, + "zipimport: can not open file %U", archive); + return NULL; + } + /* Check to make sure the local file header is correct */ if (fseek(fp, file_offset, 0) == -1) { + fclose(fp); PyErr_Format(ZipImportError, "can't read Zip file: %R", archive); return NULL; } @@ -1245,9 +1094,11 @@ get_data(FILE *fp, PyObject *archive, PyObject *toc_entry) PyErr_Format(ZipImportError, "bad local file header in %U", archive); + fclose(fp); return NULL; } if (fseek(fp, file_offset + 26, 0) == -1) { + fclose(fp); PyErr_Format(ZipImportError, "can't read Zip file: %R", archive); return NULL; } @@ -1255,6 +1106,7 @@ get_data(FILE *fp, PyObject *archive, PyObject *toc_entry) l = 30 + PyMarshal_ReadShortFromFile(fp) + PyMarshal_ReadShortFromFile(fp); /* local header size */ if (PyErr_Occurred()) { + fclose(fp); return NULL; } file_offset += l; /* Start of file data */ @@ -1265,6 +1117,7 @@ get_data(FILE *fp, PyObject *archive, PyObject *toc_entry) raw_data = PyBytes_FromStringAndSize((char *)NULL, bytes_size); if (raw_data == NULL) { + fclose(fp); return NULL; } buf = PyBytes_AsString(raw_data); @@ -1273,9 +1126,11 @@ get_data(FILE *fp, PyObject *archive, PyObject *toc_entry) if (err == 0) { bytes_read = fread(buf, 1, data_size, fp); } else { + fclose(fp); PyErr_Format(ZipImportError, "can't read Zip file: %R", archive); return NULL; } + fclose(fp); if (err || bytes_read != data_size) { PyErr_SetString(PyExc_IOError, "zipimport: can't read data"); @@ -1496,12 +1351,12 @@ get_mtime_of_source(ZipImporter *self, PyObject *path) /* Return the code object for the module named by 'fullname' from the Zip archive as a new reference. */ static PyObject * -get_code_from_data(ZipImporter *self, FILE *fp, int ispackage, int isbytecode, +get_code_from_data(ZipImporter *self, int ispackage, int isbytecode, time_t mtime, PyObject *toc_entry) { PyObject *data, *modpath, *code; - data = get_data(fp, self->archive, toc_entry); + data = get_data(self->archive, toc_entry); if (data == NULL) return NULL; @@ -1523,7 +1378,6 @@ get_module_code(ZipImporter *self, PyObject *fullname, PyObject *code = NULL, *toc_entry, *subname; PyObject *path, *fullpath = NULL; struct st_zip_searchorder *zso; - FILE *fp; subname = get_subname(fullname); if (subname == NULL) @@ -1534,12 +1388,6 @@ get_module_code(ZipImporter *self, PyObject *fullname, if (path == NULL) return NULL; - fp = safely_reopen_archive(self); - if (fp == NULL) { - Py_DECREF(path); - return NULL; - } - for (zso = zip_searchorder; *zso->suffix; zso++) { code = NULL; @@ -1550,7 +1398,6 @@ get_module_code(ZipImporter *self, PyObject *fullname, if (Py_VerboseFlag > 1) PySys_FormatStderr("# trying %U%c%U\n", self->archive, (int)SEP, fullpath); - toc_entry = PyDict_GetItem(self->files, fullpath); if (toc_entry != NULL) { time_t mtime = 0; @@ -1566,7 +1413,7 @@ get_module_code(ZipImporter *self, PyObject *fullname, Py_CLEAR(fullpath); if (p_ispackage != NULL) *p_ispackage = ispackage; - code = get_code_from_data(self, fp, ispackage, + code = get_code_from_data(self, ispackage, isbytecode, mtime, toc_entry); if (code == Py_None) { @@ -1586,7 +1433,6 @@ get_module_code(ZipImporter *self, PyObject *fullname, } PyErr_Format(ZipImportError, "can't find module %R", fullname); exit: - fclose(fp); Py_DECREF(path); Py_XDECREF(fullpath); return code; @@ -1604,8 +1450,6 @@ This module exports three objects:\n\ subclass of ImportError, so it can be caught as ImportError, too.\n\ - _zip_directory_cache: a dict, mapping archive paths to zip directory\n\ info dicts, as used in zipimporter._files.\n\ -- _zip_stat_cache: a dict, mapping archive paths to stat_result\n\ - info for the .zip the last time anything was imported from it.\n\ \n\ It is usually not needed to use the zipimport module explicitly; it is\n\ used by the builtin import mechanism for sys.path items that are paths\n\ @@ -1665,7 +1509,6 @@ PyInit_zipimport(void) (PyObject *)&ZipImporter_Type) < 0) return NULL; - Py_XDECREF(zip_directory_cache); /* Avoid embedded interpreter leaks. */ zip_directory_cache = PyDict_New(); if (zip_directory_cache == NULL) return NULL; @@ -1673,36 +1516,5 @@ PyInit_zipimport(void) if (PyModule_AddObject(mod, "_zip_directory_cache", zip_directory_cache) < 0) return NULL; - - Py_XDECREF(zip_stat_cache); /* Avoid embedded interpreter leaks. */ - zip_stat_cache = PyDict_New(); - if (zip_stat_cache == NULL) - return NULL; - Py_INCREF(zip_stat_cache); - if (PyModule_AddObject(mod, "_zip_stat_cache", zip_stat_cache) < 0) - return NULL; - - { - /* We cannot import "os" here as that is a .py/.pyc file that could - * live within a zipped up standard library. Import the posix or nt - * builtin that provides the fstat() function we want instead. */ - PyObject *os_like_module; - Py_CLEAR(fstat_function); /* Avoid embedded interpreter leaks. */ - os_like_module = PyImport_ImportModule("posix"); - if (os_like_module == NULL) { - PyErr_Clear(); - os_like_module = PyImport_ImportModule("nt"); - } - if (os_like_module != NULL) { - fstat_function = PyObject_GetAttrString(os_like_module, "fstat"); - Py_DECREF(os_like_module); - } - if (fstat_function == NULL) { - PyErr_Clear(); /* non-fatal, we'll go on without it. */ - if (Py_VerboseFlag) - PySys_WriteStderr("# zipimport unable to use os.fstat().\n"); - } - } - return mod; } -- cgit v0.12 From a91ff1423fbd57e7bd0853ac494d8cdea1fb5bb9 Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Sun, 16 Feb 2014 23:53:38 +0100 Subject: Issue #20616: Add a format() method to tracemalloc.Traceback. --- Doc/library/tracemalloc.rst | 31 ++++++++++++++++++++++++------- Lib/test/test_tracemalloc.py | 20 ++++++++++++++++++++ Lib/tracemalloc.py | 13 +++++++++++++ Misc/NEWS | 2 ++ 4 files changed, 59 insertions(+), 7 deletions(-) diff --git a/Doc/library/tracemalloc.rst b/Doc/library/tracemalloc.rst index e49d4ca..b79c5f6 100644 --- a/Doc/library/tracemalloc.rst +++ b/Doc/library/tracemalloc.rst @@ -118,7 +118,6 @@ Get the traceback of a memory block Code to display the traceback of the biggest memory block:: - import linecache import tracemalloc # Store 25 frames @@ -132,12 +131,8 @@ Code to display the traceback of the biggest memory block:: # pick the biggest memory block stat = top_stats[0] print("%s memory blocks: %.1f KiB" % (stat.count, stat.size / 1024)) - for frame in stat.traceback: - print(' File "%s", line %s' % (frame.filename, frame.lineno)) - line = linecache.getline(frame.filename, frame.lineno) - line = line.strip() - if line: - print(' ' + line) + for line in stat.traceback.format(): + print(line) Example of output of the Python test suite (traceback limited to 25 frames):: @@ -602,4 +597,26 @@ Traceback The :attr:`Trace.traceback` attribute is an instance of :class:`Traceback` instance. + .. method:: format(limit=None) + + Format the traceback as a list of lines with newlines. Use the + :mod:`linecache` module to retrieve lines from the source code. If + *limit* is set, only format the *limit* most recent frames. + + Similar to the :func:`traceback.format_tb` function, except that + :meth:`format` does not include newlines. + + Example:: + + print("Traceback (most recent call first):") + for line in traceback: + print(line) + + Output:: + + Traceback (most recent call first): + File "test.py", line 9 + obj = Object() + File "test.py", line 12 + tb = tracemalloc.get_object_traceback(f()) diff --git a/Lib/test/test_tracemalloc.py b/Lib/test/test_tracemalloc.py index 3d2333f..d1e5aef 100644 --- a/Lib/test/test_tracemalloc.py +++ b/Lib/test/test_tracemalloc.py @@ -510,6 +510,26 @@ class TestSnapshot(unittest.TestCase): self.assertEqual(traceback[:2], (traceback[0], traceback[1])) + def test_format_traceback(self): + snapshot, snapshot2 = create_snapshots() + def getline(filename, lineno): + return ' <%s, %s>' % (filename, lineno) + with unittest.mock.patch('tracemalloc.linecache.getline', + side_effect=getline): + tb = snapshot.traces[0].traceback + self.assertEqual(tb.format(), + [' File "a.py", line 2', + ' ', + ' File "b.py", line 4', + ' ']) + + self.assertEqual(tb.format(limit=1), + [' File "a.py", line 2', + ' ']) + + self.assertEqual(tb.format(limit=-1), + []) + class TestFilters(unittest.TestCase): maxDiff = 2048 diff --git a/Lib/tracemalloc.py b/Lib/tracemalloc.py index b075946..6f0a234 100644 --- a/Lib/tracemalloc.py +++ b/Lib/tracemalloc.py @@ -1,6 +1,7 @@ from collections import Sequence from functools import total_ordering import fnmatch +import linecache import os.path import pickle @@ -205,6 +206,18 @@ class Traceback(Sequence): def __repr__(self): return "" % (tuple(self),) + def format(self, limit=None): + lines = [] + if limit is not None and limit < 0: + return lines + for frame in self[:limit]: + lines.append(' File "%s", line %s' + % (frame.filename, frame.lineno)) + line = linecache.getline(frame.filename, frame.lineno).strip() + if line: + lines.append(' %s' % line) + return lines + def get_object_traceback(obj): """ diff --git a/Misc/NEWS b/Misc/NEWS index 4a717fc..48b2b89 100644 --- a/Misc/NEWS +++ b/Misc/NEWS @@ -23,6 +23,8 @@ Core and Builtins Library ------- +- Issue #20616: Add a format() method to tracemalloc.Traceback. + - Issue #19744: the ensurepip installation step now just prints a warning to stderr rather than failing outright if SSL/TLS is unavailable. This allows local installation of POSIX builds without SSL/TLS support. -- cgit v0.12 From 04e05da1b3ae3b3f1f7c2535860e156697c5b696 Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Mon, 17 Feb 2014 10:54:30 +0100 Subject: Close #20652: asyncio doc: close the event loop in run_forever() example. Fix also typo. Patch written by Vajrasky Kok. --- Doc/library/asyncio-task.rst | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/Doc/library/asyncio-task.rst b/Doc/library/asyncio-task.rst index 83d9742..e7ef172 100644 --- a/Doc/library/asyncio-task.rst +++ b/Doc/library/asyncio-task.rst @@ -229,7 +229,7 @@ Example combining a :class:`Future` and a :ref:`coroutine function @asyncio.coroutine def slow_operation(future): yield from asyncio.sleep(1) - future.set_result('Future in done!') + future.set_result('Future is done!') loop = asyncio.get_event_loop() future = asyncio.Future() @@ -261,7 +261,7 @@ flow:: @asyncio.coroutine def slow_operation(future): yield from asyncio.sleep(1) - future.set_result('Future in done!') + future.set_result('Future is done!') def got_result(future): print(future.result()) @@ -271,7 +271,10 @@ flow:: future = asyncio.Future() asyncio.Task(slow_operation(future)) future.add_done_callback(got_result) - loop.run_forever() + try: + loop.run_forever() + finally: + loop.close() In this example, the future is responsible to display the result and to stop the loop. -- cgit v0.12 From 79c3bcf2f7bdb88bbe1edbee313c4b6d31887ec2 Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Tue, 18 Feb 2014 00:11:21 +0100 Subject: Issue #20655: Fix test_asyncio, run also subprocess tests. Patch written by Vajrasky Kok. --- Lib/test/test_asyncio/tests.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/Lib/test/test_asyncio/tests.txt b/Lib/test/test_asyncio/tests.txt index 30609cd..c69582b 100644 --- a/Lib/test/test_asyncio/tests.txt +++ b/Lib/test/test_asyncio/tests.txt @@ -6,6 +6,7 @@ test_asyncio.test_proactor_events test_asyncio.test_queues test_asyncio.test_selector_events test_asyncio.test_streams +test_asyncio.test_subprocess test_asyncio.test_tasks test_asyncio.test_transports test_asyncio.test_unix_events -- cgit v0.12 From 3bc3aaab1360f27a8208b98cc54a35a0a2542715 Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Tue, 18 Feb 2014 01:30:03 +0100 Subject: Issue #20667: test_asyncio: Skip KqueueEventLoopTests.test_read_pty_output() on OpenBSD older than 5.5 --- Lib/test/support/__init__.py | 10 ++++++++++ Lib/test/test_asyncio/test_events.py | 3 +++ 2 files changed, 13 insertions(+) diff --git a/Lib/test/support/__init__.py b/Lib/test/support/__init__.py index 9857f9d..05e58b2 100644 --- a/Lib/test/support/__init__.py +++ b/Lib/test/support/__init__.py @@ -78,6 +78,7 @@ __all__ = [ "create_empty_file", "can_symlink", "fs_is_case_insensitive", # unittest "is_resource_enabled", "requires", "requires_freebsd_version", + "requires_openbsd_version", "requires_linux_version", "requires_mac_ver", "check_syntax_error", "TransientResource", "time_out", "socket_peer_reset", "ioerror_peer_reset", "transient_internet", "BasicTestRunner", "run_unittest", "run_doctest", @@ -467,6 +468,15 @@ def requires_freebsd_version(*min_version): """ return _requires_unix_version('FreeBSD', min_version) +def requires_openbsd_version(*min_version): + """Decorator raising SkipTest if the OS is OpenBSD and the OpenBSD version + is less than `min_version`. + + For example, @requires_freebsd_version(5, 4) raises SkipTest if the FreeBSD + version is less than 5.4. + """ + return _requires_unix_version('OpenBSD', min_version) + def requires_linux_version(*min_version): """Decorator raising SkipTest if the OS is Linux and the Linux version is less than `min_version`. diff --git a/Lib/test/test_asyncio/test_events.py b/Lib/test/test_asyncio/test_events.py index 3bb8dd8..c2c0515 100644 --- a/Lib/test/test_asyncio/test_events.py +++ b/Lib/test/test_asyncio/test_events.py @@ -1622,6 +1622,9 @@ else: # kqueue doesn't support character devices (PTY) on Mac OS X older # than 10.9 (Maverick) @support.requires_mac_ver(10, 9) + # Issue #20667: KqueueEventLoopTests.test_read_pty_output() + # hangs on OpenBSD 5.4 + @support.requires_openbsd_version(5, 5) def test_read_pty_output(self): super().test_read_pty_output() -- cgit v0.12 From 9a49c648caf4e8c82b890cd106be0b18eef7fb23 Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Tue, 18 Feb 2014 09:13:47 +0100 Subject: Issue #20667: KqueueEventLoopTests.test_read_pty_output() hangs also on OpenBSD 5.5. --- Lib/test/support/__init__.py | 10 ---------- Lib/test/test_asyncio/test_events.py | 5 +++-- 2 files changed, 3 insertions(+), 12 deletions(-) diff --git a/Lib/test/support/__init__.py b/Lib/test/support/__init__.py index 05e58b2..9857f9d 100644 --- a/Lib/test/support/__init__.py +++ b/Lib/test/support/__init__.py @@ -78,7 +78,6 @@ __all__ = [ "create_empty_file", "can_symlink", "fs_is_case_insensitive", # unittest "is_resource_enabled", "requires", "requires_freebsd_version", - "requires_openbsd_version", "requires_linux_version", "requires_mac_ver", "check_syntax_error", "TransientResource", "time_out", "socket_peer_reset", "ioerror_peer_reset", "transient_internet", "BasicTestRunner", "run_unittest", "run_doctest", @@ -468,15 +467,6 @@ def requires_freebsd_version(*min_version): """ return _requires_unix_version('FreeBSD', min_version) -def requires_openbsd_version(*min_version): - """Decorator raising SkipTest if the OS is OpenBSD and the OpenBSD version - is less than `min_version`. - - For example, @requires_freebsd_version(5, 4) raises SkipTest if the FreeBSD - version is less than 5.4. - """ - return _requires_unix_version('OpenBSD', min_version) - def requires_linux_version(*min_version): """Decorator raising SkipTest if the OS is Linux and the Linux version is less than `min_version`. diff --git a/Lib/test/test_asyncio/test_events.py b/Lib/test/test_asyncio/test_events.py index c2c0515..8c32a6e 100644 --- a/Lib/test/test_asyncio/test_events.py +++ b/Lib/test/test_asyncio/test_events.py @@ -1623,8 +1623,9 @@ else: # than 10.9 (Maverick) @support.requires_mac_ver(10, 9) # Issue #20667: KqueueEventLoopTests.test_read_pty_output() - # hangs on OpenBSD 5.4 - @support.requires_openbsd_version(5, 5) + # hangs on OpenBSD 5.5 + @unittest.skipIf(sys.platform.startswith('openbsd'), + 'test hangs on OpenBSD') def test_read_pty_output(self): super().test_read_pty_output() -- cgit v0.12 From 86516d9225d82cc039b1f71a41bec610cf9ab5ce Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Tue, 18 Feb 2014 09:22:00 +0100 Subject: Close #20649: Fix typo in asyncio doc. Patch written by Brett Cannon. --- Doc/library/asyncio-dev.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Doc/library/asyncio-dev.rst b/Doc/library/asyncio-dev.rst index b0d28b1..90bae84 100644 --- a/Doc/library/asyncio-dev.rst +++ b/Doc/library/asyncio-dev.rst @@ -13,7 +13,7 @@ Concurrency and multithreading ------------------------------ An event loop runs in a thread and executes all callbacks and tasks in the same -thread. While a task in running in the event loop, no other task is running in +thread. While a task is running in the event loop, no other task is running in the same thread. But when the task uses ``yield from``, the task is suspended and the event loop executes the next task. -- cgit v0.12 From fd9d374ae9e1f73e74d0e9a342dd03c7a024d570 Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Tue, 18 Feb 2014 09:37:43 +0100 Subject: Issue #20493: Document that asyncio should not exceed one day --- Doc/library/asyncio-eventloop.rst | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/Doc/library/asyncio-eventloop.rst b/Doc/library/asyncio-eventloop.rst index 7760fcb..b2c4802 100644 --- a/Doc/library/asyncio-eventloop.rst +++ b/Doc/library/asyncio-eventloop.rst @@ -157,6 +157,10 @@ Which clock is used depends on the (platform-specific) event loop implementation; ideally it is a monotonic clock. This will generally be a different clock than :func:`time.time`. +.. note:: + + Timeouts (relative *delay* or absolute *when*) should not exceed one day. + .. method:: BaseEventLoop.call_later(delay, callback, *args) -- cgit v0.12 From b057c52b0175a31ee8a786a780bd0eef785820ae Mon Sep 17 00:00:00 2001 From: Yury Selivanov Date: Tue, 18 Feb 2014 12:15:06 -0500 Subject: asyncio: Add support for UNIX Domain Sockets. --- Lib/asyncio/base_events.py | 7 + Lib/asyncio/events.py | 26 ++ Lib/asyncio/streams.py | 39 ++- Lib/asyncio/test_utils.py | 153 ++++++++--- Lib/asyncio/unix_events.py | 75 +++++- Lib/test/test_asyncio/test_base_events.py | 2 +- Lib/test/test_asyncio/test_events.py | 349 +++++++++++++++++--------- Lib/test/test_asyncio/test_selector_events.py | 3 +- Lib/test/test_asyncio/test_streams.py | 195 +++++++++++--- Lib/test/test_asyncio/test_unix_events.py | 82 +++++- 10 files changed, 738 insertions(+), 193 deletions(-) diff --git a/Lib/asyncio/base_events.py b/Lib/asyncio/base_events.py index 3bbf6b5..b74e936 100644 --- a/Lib/asyncio/base_events.py +++ b/Lib/asyncio/base_events.py @@ -407,6 +407,13 @@ class BaseEventLoop(events.AbstractEventLoop): sock.setblocking(False) + transport, protocol = yield from self._create_connection_transport( + sock, protocol_factory, ssl, server_hostname) + return transport, protocol + + @tasks.coroutine + def _create_connection_transport(self, sock, protocol_factory, ssl, + server_hostname): protocol = protocol_factory() waiter = futures.Future(loop=self) if ssl: diff --git a/Lib/asyncio/events.py b/Lib/asyncio/events.py index dd9e3fb..7841ad9 100644 --- a/Lib/asyncio/events.py +++ b/Lib/asyncio/events.py @@ -220,6 +220,32 @@ class AbstractEventLoop: """ raise NotImplementedError + def create_unix_connection(self, protocol_factory, path, *, + ssl=None, sock=None, + server_hostname=None): + raise NotImplementedError + + def create_unix_server(self, protocol_factory, path, *, + sock=None, backlog=100, ssl=None): + """A coroutine which creates a UNIX Domain Socket server. + + The return valud is a Server object, which can be used to stop + the service. + + path is a str, representing a file systsem path to bind the + server socket to. + + sock can optionally be specified in order to use a preexisting + socket object. + + backlog is the maximum number of queued connections passed to + listen() (defaults to 100). + + ssl can be set to an SSLContext to enable SSL over the + accepted connections. + """ + raise NotImplementedError + def create_datagram_endpoint(self, protocol_factory, local_addr=None, remote_addr=None, *, family=0, proto=0, flags=0): diff --git a/Lib/asyncio/streams.py b/Lib/asyncio/streams.py index 8fc2147..698c5c6 100644 --- a/Lib/asyncio/streams.py +++ b/Lib/asyncio/streams.py @@ -1,9 +1,13 @@ """Stream-related things.""" __all__ = ['StreamReader', 'StreamWriter', 'StreamReaderProtocol', - 'open_connection', 'start_server', 'IncompleteReadError', + 'open_connection', 'start_server', + 'open_unix_connection', 'start_unix_server', + 'IncompleteReadError', ] +import socket + from . import events from . import futures from . import protocols @@ -93,6 +97,39 @@ def start_server(client_connected_cb, host=None, port=None, *, return (yield from loop.create_server(factory, host, port, **kwds)) +if hasattr(socket, 'AF_UNIX'): + # UNIX Domain Sockets are supported on this platform + + @tasks.coroutine + def open_unix_connection(path=None, *, + loop=None, limit=_DEFAULT_LIMIT, **kwds): + """Similar to `open_connection` but works with UNIX Domain Sockets.""" + if loop is None: + loop = events.get_event_loop() + reader = StreamReader(limit=limit, loop=loop) + protocol = StreamReaderProtocol(reader, loop=loop) + transport, _ = yield from loop.create_unix_connection( + lambda: protocol, path, **kwds) + writer = StreamWriter(transport, protocol, reader, loop) + return reader, writer + + + @tasks.coroutine + def start_unix_server(client_connected_cb, path=None, *, + loop=None, limit=_DEFAULT_LIMIT, **kwds): + """Similar to `start_server` but works with UNIX Domain Sockets.""" + if loop is None: + loop = events.get_event_loop() + + def factory(): + reader = StreamReader(limit=limit, loop=loop) + protocol = StreamReaderProtocol(reader, client_connected_cb, + loop=loop) + return protocol + + return (yield from loop.create_unix_server(factory, path, **kwds)) + + class FlowControlMixin(protocols.Protocol): """Reusable flow control logic for StreamWriter.drain(). diff --git a/Lib/asyncio/test_utils.py b/Lib/asyncio/test_utils.py index deab7c3..de2916b 100644 --- a/Lib/asyncio/test_utils.py +++ b/Lib/asyncio/test_utils.py @@ -4,12 +4,18 @@ import collections import contextlib import io import os +import socket +import socketserver import sys +import tempfile import threading import time import unittest import unittest.mock + +from http.server import HTTPServer from wsgiref.simple_server import make_server, WSGIRequestHandler, WSGIServer + try: import ssl except ImportError: # pragma: no cover @@ -70,42 +76,51 @@ def run_once(loop): loop.run_forever() -@contextlib.contextmanager -def run_test_server(*, host='127.0.0.1', port=0, use_ssl=False): +class SilentWSGIRequestHandler(WSGIRequestHandler): - class SilentWSGIRequestHandler(WSGIRequestHandler): - def get_stderr(self): - return io.StringIO() + def get_stderr(self): + return io.StringIO() - def log_message(self, format, *args): - pass + def log_message(self, format, *args): + pass - class SilentWSGIServer(WSGIServer): - def handle_error(self, request, client_address): + +class SilentWSGIServer(WSGIServer): + + def handle_error(self, request, client_address): + pass + + +class SSLWSGIServerMixin: + + def finish_request(self, request, client_address): + # The relative location of our test directory (which + # contains the ssl key and certificate files) differs + # between the stdlib and stand-alone asyncio. + # Prefer our own if we can find it. + here = os.path.join(os.path.dirname(__file__), '..', 'tests') + if not os.path.isdir(here): + here = os.path.join(os.path.dirname(os.__file__), + 'test', 'test_asyncio') + keyfile = os.path.join(here, 'ssl_key.pem') + certfile = os.path.join(here, 'ssl_cert.pem') + ssock = ssl.wrap_socket(request, + keyfile=keyfile, + certfile=certfile, + server_side=True) + try: + self.RequestHandlerClass(ssock, client_address, self) + ssock.close() + except OSError: + # maybe socket has been closed by peer pass - class SSLWSGIServer(SilentWSGIServer): - def finish_request(self, request, client_address): - # The relative location of our test directory (which - # contains the ssl key and certificate files) differs - # between the stdlib and stand-alone asyncio. - # Prefer our own if we can find it. - here = os.path.join(os.path.dirname(__file__), '..', 'tests') - if not os.path.isdir(here): - here = os.path.join(os.path.dirname(os.__file__), - 'test', 'test_asyncio') - keyfile = os.path.join(here, 'ssl_key.pem') - certfile = os.path.join(here, 'ssl_cert.pem') - ssock = ssl.wrap_socket(request, - keyfile=keyfile, - certfile=certfile, - server_side=True) - try: - self.RequestHandlerClass(ssock, client_address, self) - ssock.close() - except OSError: - # maybe socket has been closed by peer - pass + +class SSLWSGIServer(SSLWSGIServerMixin, SilentWSGIServer): + pass + + +def _run_test_server(*, address, use_ssl=False, server_cls, server_ssl_cls): def app(environ, start_response): status = '200 OK' @@ -115,9 +130,9 @@ def run_test_server(*, host='127.0.0.1', port=0, use_ssl=False): # Run the test WSGI server in a separate thread in order not to # interfere with event handling in the main thread - server_class = SSLWSGIServer if use_ssl else SilentWSGIServer - httpd = make_server(host, port, app, - server_class, SilentWSGIRequestHandler) + server_class = server_ssl_cls if use_ssl else server_cls + httpd = server_class(address, SilentWSGIRequestHandler) + httpd.set_app(app) httpd.address = httpd.server_address server_thread = threading.Thread(target=httpd.serve_forever) server_thread.start() @@ -129,6 +144,75 @@ def run_test_server(*, host='127.0.0.1', port=0, use_ssl=False): server_thread.join() +if hasattr(socket, 'AF_UNIX'): + + class UnixHTTPServer(socketserver.UnixStreamServer, HTTPServer): + + def server_bind(self): + socketserver.UnixStreamServer.server_bind(self) + self.server_name = '127.0.0.1' + self.server_port = 80 + + + class UnixWSGIServer(UnixHTTPServer, WSGIServer): + + def server_bind(self): + UnixHTTPServer.server_bind(self) + self.setup_environ() + + def get_request(self): + request, client_addr = super().get_request() + # Code in the stdlib expects that get_request + # will return a socket and a tuple (host, port). + # However, this isn't true for UNIX sockets, + # as the second return value will be a path; + # hence we return some fake data sufficient + # to get the tests going + return request, ('127.0.0.1', '') + + + class SilentUnixWSGIServer(UnixWSGIServer): + + def handle_error(self, request, client_address): + pass + + + class UnixSSLWSGIServer(SSLWSGIServerMixin, SilentUnixWSGIServer): + pass + + + def gen_unix_socket_path(): + with tempfile.NamedTemporaryFile() as file: + return file.name + + + @contextlib.contextmanager + def unix_socket_path(): + path = gen_unix_socket_path() + try: + yield path + finally: + try: + os.unlink(path) + except OSError: + pass + + + @contextlib.contextmanager + def run_test_unix_server(*, use_ssl=False): + with unix_socket_path() as path: + yield from _run_test_server(address=path, use_ssl=use_ssl, + server_cls=SilentUnixWSGIServer, + server_ssl_cls=UnixSSLWSGIServer) + + +@contextlib.contextmanager +def run_test_server(*, host='127.0.0.1', port=0, use_ssl=False): + yield from _run_test_server(address=(host, port), use_ssl=use_ssl, + server_cls=SilentWSGIServer, + server_ssl_cls=SSLWSGIServer) + + def make_test_protocol(base): dct = {} for name in dir(base): @@ -275,5 +359,6 @@ class TestLoop(base_events.BaseEventLoop): def _write_to_self(self): pass + def MockCallback(**kwargs): return unittest.mock.Mock(spec=['__call__'], **kwargs) diff --git a/Lib/asyncio/unix_events.py b/Lib/asyncio/unix_events.py index ea79d33..e0d7507 100644 --- a/Lib/asyncio/unix_events.py +++ b/Lib/asyncio/unix_events.py @@ -11,6 +11,7 @@ import sys import threading +from . import base_events from . import base_subprocess from . import constants from . import events @@ -31,9 +32,9 @@ if sys.platform == 'win32': # pragma: no cover class _UnixSelectorEventLoop(selector_events.BaseSelectorEventLoop): - """Unix event loop + """Unix event loop. - Adds signal handling to SelectorEventLoop + Adds signal handling and UNIX Domain Socket support to SelectorEventLoop. """ def __init__(self, selector=None): @@ -164,6 +165,76 @@ class _UnixSelectorEventLoop(selector_events.BaseSelectorEventLoop): def _child_watcher_callback(self, pid, returncode, transp): self.call_soon_threadsafe(transp._process_exited, returncode) + @tasks.coroutine + def create_unix_connection(self, protocol_factory, path, *, + ssl=None, sock=None, + server_hostname=None): + assert server_hostname is None or isinstance(server_hostname, str) + if ssl: + if server_hostname is None: + raise ValueError( + 'you have to pass server_hostname when using ssl') + else: + if server_hostname is not None: + raise ValueError('server_hostname is only meaningful with ssl') + + if path is not None: + if sock is not None: + raise ValueError( + 'path and sock can not be specified at the same time') + + try: + sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM, 0) + sock.setblocking(False) + yield from self.sock_connect(sock, path) + except OSError: + if sock is not None: + sock.close() + raise + + else: + if sock is None: + raise ValueError('no path and sock were specified') + sock.setblocking(False) + + transport, protocol = yield from self._create_connection_transport( + sock, protocol_factory, ssl, server_hostname) + return transport, protocol + + @tasks.coroutine + def create_unix_server(self, protocol_factory, path=None, *, + sock=None, backlog=100, ssl=None): + if isinstance(ssl, bool): + raise TypeError('ssl argument must be an SSLContext or None') + + if path is not None: + sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) + + try: + sock.bind(path) + except OSError as exc: + if exc.errno == errno.EADDRINUSE: + # Let's improve the error message by adding + # with what exact address it occurs. + msg = 'Address {!r} is already in use'.format(path) + raise OSError(errno.EADDRINUSE, msg) from None + else: + raise + else: + if sock is None: + raise ValueError( + 'path was not specified, and no sock specified') + + if sock.family != socket.AF_UNIX: + raise ValueError( + 'A UNIX Domain Socket was expected, got {!r}'.format(sock)) + + server = base_events.Server(self, [sock]) + sock.listen(backlog) + sock.setblocking(False) + self._start_serving(protocol_factory, sock, ssl, server) + return server + def _set_nonblocking(fd): flags = fcntl.fcntl(fd, fcntl.F_GETFL) diff --git a/Lib/test/test_asyncio/test_base_events.py b/Lib/test/test_asyncio/test_base_events.py index 94e2d59..9fa9841 100644 --- a/Lib/test/test_asyncio/test_base_events.py +++ b/Lib/test/test_asyncio/test_base_events.py @@ -212,7 +212,7 @@ class BaseEventLoopTests(unittest.TestCase): idx = -1 data = [10.0, 10.0, 10.3, 13.0] - self.loop._scheduled = [asyncio.TimerHandle(11.0, lambda:True, ())] + self.loop._scheduled = [asyncio.TimerHandle(11.0, lambda: True, ())] self.loop._run_once() self.assertEqual(logging.DEBUG, m_logger.log.call_args[0][0]) diff --git a/Lib/test/test_asyncio/test_events.py b/Lib/test/test_asyncio/test_events.py index 8c32a6e..c9d04c0 100644 --- a/Lib/test/test_asyncio/test_events.py +++ b/Lib/test/test_asyncio/test_events.py @@ -39,13 +39,14 @@ def data_file(filename): return fullname raise FileNotFoundError(filename) + ONLYCERT = data_file('ssl_cert.pem') ONLYKEY = data_file('ssl_key.pem') SIGNED_CERTFILE = data_file('keycert3.pem') SIGNING_CA = data_file('pycacert.pem') -class MyProto(asyncio.Protocol): +class MyBaseProto(asyncio.Protocol): done = None def __init__(self, loop=None): @@ -59,7 +60,6 @@ class MyProto(asyncio.Protocol): self.transport = transport assert self.state == 'INITIAL', self.state self.state = 'CONNECTED' - transport.write(b'GET / HTTP/1.0\r\nHost: example.com\r\n\r\n') def data_received(self, data): assert self.state == 'CONNECTED', self.state @@ -76,6 +76,12 @@ class MyProto(asyncio.Protocol): self.done.set_result(None) +class MyProto(MyBaseProto): + def connection_made(self, transport): + super().connection_made(transport) + transport.write(b'GET / HTTP/1.0\r\nHost: example.com\r\n\r\n') + + class MyDatagramProto(asyncio.DatagramProtocol): done = None @@ -357,22 +363,30 @@ class EventLoopTestsMixin: r.close() self.assertGreaterEqual(len(data), 200) + def _basetest_sock_client_ops(self, httpd, sock): + sock.setblocking(False) + self.loop.run_until_complete( + self.loop.sock_connect(sock, httpd.address)) + self.loop.run_until_complete( + self.loop.sock_sendall(sock, b'GET / HTTP/1.0\r\n\r\n')) + data = self.loop.run_until_complete( + self.loop.sock_recv(sock, 1024)) + # consume data + self.loop.run_until_complete( + self.loop.sock_recv(sock, 1024)) + sock.close() + self.assertTrue(data.startswith(b'HTTP/1.0 200 OK')) + def test_sock_client_ops(self): with test_utils.run_test_server() as httpd: sock = socket.socket() - sock.setblocking(False) - self.loop.run_until_complete( - self.loop.sock_connect(sock, httpd.address)) - self.loop.run_until_complete( - self.loop.sock_sendall(sock, b'GET / HTTP/1.0\r\n\r\n')) - data = self.loop.run_until_complete( - self.loop.sock_recv(sock, 1024)) - # consume data - self.loop.run_until_complete( - self.loop.sock_recv(sock, 1024)) - sock.close() + self._basetest_sock_client_ops(httpd, sock) - self.assertTrue(data.startswith(b'HTTP/1.0 200 OK')) + @unittest.skipUnless(hasattr(socket, 'AF_UNIX'), 'No UNIX Sockets') + def test_unix_sock_client_ops(self): + with test_utils.run_test_unix_server() as httpd: + sock = socket.socket(socket.AF_UNIX) + self._basetest_sock_client_ops(httpd, sock) def test_sock_client_fail(self): # Make sure that we will get an unused port @@ -485,16 +499,26 @@ class EventLoopTestsMixin: self.loop.run_forever() self.assertEqual(caught, 1) + def _basetest_create_connection(self, connection_fut): + tr, pr = self.loop.run_until_complete(connection_fut) + self.assertIsInstance(tr, asyncio.Transport) + self.assertIsInstance(pr, asyncio.Protocol) + self.loop.run_until_complete(pr.done) + self.assertGreater(pr.nbytes, 0) + tr.close() + def test_create_connection(self): with test_utils.run_test_server() as httpd: - f = self.loop.create_connection( + conn_fut = self.loop.create_connection( lambda: MyProto(loop=self.loop), *httpd.address) - tr, pr = self.loop.run_until_complete(f) - self.assertIsInstance(tr, asyncio.Transport) - self.assertIsInstance(pr, asyncio.Protocol) - self.loop.run_until_complete(pr.done) - self.assertGreater(pr.nbytes, 0) - tr.close() + self._basetest_create_connection(conn_fut) + + @unittest.skipUnless(hasattr(socket, 'AF_UNIX'), 'No UNIX Sockets') + def test_create_unix_connection(self): + with test_utils.run_test_unix_server() as httpd: + conn_fut = self.loop.create_unix_connection( + lambda: MyProto(loop=self.loop), httpd.address) + self._basetest_create_connection(conn_fut) def test_create_connection_sock(self): with test_utils.run_test_server() as httpd: @@ -524,20 +548,37 @@ class EventLoopTestsMixin: self.assertGreater(pr.nbytes, 0) tr.close() + def _basetest_create_ssl_connection(self, connection_fut): + tr, pr = self.loop.run_until_complete(connection_fut) + self.assertIsInstance(tr, asyncio.Transport) + self.assertIsInstance(pr, asyncio.Protocol) + self.assertTrue('ssl' in tr.__class__.__name__.lower()) + self.assertIsNotNone(tr.get_extra_info('sockname')) + self.loop.run_until_complete(pr.done) + self.assertGreater(pr.nbytes, 0) + tr.close() + @unittest.skipIf(ssl is None, 'No ssl module') def test_create_ssl_connection(self): with test_utils.run_test_server(use_ssl=True) as httpd: - f = self.loop.create_connection( - lambda: MyProto(loop=self.loop), *httpd.address, + conn_fut = self.loop.create_connection( + lambda: MyProto(loop=self.loop), + *httpd.address, ssl=test_utils.dummy_ssl_context()) - tr, pr = self.loop.run_until_complete(f) - self.assertIsInstance(tr, asyncio.Transport) - self.assertIsInstance(pr, asyncio.Protocol) - self.assertTrue('ssl' in tr.__class__.__name__.lower()) - self.assertIsNotNone(tr.get_extra_info('sockname')) - self.loop.run_until_complete(pr.done) - self.assertGreater(pr.nbytes, 0) - tr.close() + + self._basetest_create_ssl_connection(conn_fut) + + @unittest.skipIf(ssl is None, 'No ssl module') + @unittest.skipUnless(hasattr(socket, 'AF_UNIX'), 'No UNIX Sockets') + def test_create_ssl_unix_connection(self): + with test_utils.run_test_unix_server(use_ssl=True) as httpd: + conn_fut = self.loop.create_unix_connection( + lambda: MyProto(loop=self.loop), + httpd.address, + ssl=test_utils.dummy_ssl_context(), + server_hostname='127.0.0.1') + + self._basetest_create_ssl_connection(conn_fut) def test_create_connection_local_addr(self): with test_utils.run_test_server() as httpd: @@ -561,14 +602,8 @@ class EventLoopTestsMixin: self.assertIn(str(httpd.address), cm.exception.strerror) def test_create_server(self): - proto = None - - def factory(): - nonlocal proto - proto = MyProto() - return proto - - f = self.loop.create_server(factory, '0.0.0.0', 0) + proto = MyProto() + f = self.loop.create_server(lambda: proto, '0.0.0.0', 0) server = self.loop.run_until_complete(f) self.assertEqual(len(server.sockets), 1) sock = server.sockets[0] @@ -605,38 +640,76 @@ class EventLoopTestsMixin: # close server server.close() - def _make_ssl_server(self, factory, certfile, keyfile=None): + def _make_unix_server(self, factory, **kwargs): + path = test_utils.gen_unix_socket_path() + self.addCleanup(lambda: os.path.exists(path) and os.unlink(path)) + + f = self.loop.create_unix_server(factory, path, **kwargs) + server = self.loop.run_until_complete(f) + + return server, path + + @unittest.skipUnless(hasattr(socket, 'AF_UNIX'), 'No UNIX Sockets') + def test_create_unix_server(self): + proto = MyProto() + server, path = self._make_unix_server(lambda: proto) + self.assertEqual(len(server.sockets), 1) + + client = socket.socket(socket.AF_UNIX) + client.connect(path) + client.sendall(b'xxx') + test_utils.run_briefly(self.loop) + test_utils.run_until(self.loop, lambda: proto is not None, 10) + + self.assertIsInstance(proto, MyProto) + self.assertEqual('INITIAL', proto.state) + test_utils.run_briefly(self.loop) + self.assertEqual('CONNECTED', proto.state) + test_utils.run_until(self.loop, lambda: proto.nbytes > 0, + timeout=10) + self.assertEqual(3, proto.nbytes) + + # close connection + proto.transport.close() + test_utils.run_briefly(self.loop) # windows iocp + + self.assertEqual('CLOSED', proto.state) + + # the client socket must be closed after to avoid ECONNRESET upon + # recv()/send() on the serving socket + client.close() + + # close server + server.close() + + def _create_ssl_context(self, certfile, keyfile=None): sslcontext = ssl.SSLContext(ssl.PROTOCOL_SSLv23) sslcontext.options |= ssl.OP_NO_SSLv2 sslcontext.load_cert_chain(certfile, keyfile) + return sslcontext - f = self.loop.create_server( - factory, '127.0.0.1', 0, ssl=sslcontext) + def _make_ssl_server(self, factory, certfile, keyfile=None): + sslcontext = self._create_ssl_context(certfile, keyfile) + f = self.loop.create_server(factory, '127.0.0.1', 0, ssl=sslcontext) server = self.loop.run_until_complete(f) + sock = server.sockets[0] host, port = sock.getsockname() self.assertEqual(host, '127.0.0.1') return server, host, port + def _make_ssl_unix_server(self, factory, certfile, keyfile=None): + sslcontext = self._create_ssl_context(certfile, keyfile) + return self._make_unix_server(factory, ssl=sslcontext) + @unittest.skipIf(ssl is None, 'No ssl module') def test_create_server_ssl(self): - proto = None - - class ClientMyProto(MyProto): - def connection_made(self, transport): - self.transport = transport - assert self.state == 'INITIAL', self.state - self.state = 'CONNECTED' + proto = MyProto(loop=self.loop) + server, host, port = self._make_ssl_server( + lambda: proto, ONLYCERT, ONLYKEY) - def factory(): - nonlocal proto - proto = MyProto(loop=self.loop) - return proto - - server, host, port = self._make_ssl_server(factory, ONLYCERT, ONLYKEY) - - f_c = self.loop.create_connection(ClientMyProto, host, port, + f_c = self.loop.create_connection(MyBaseProto, host, port, ssl=test_utils.dummy_ssl_context()) client, pr = self.loop.run_until_complete(f_c) @@ -667,16 +740,45 @@ class EventLoopTestsMixin: server.close() @unittest.skipIf(ssl is None, 'No ssl module') - @unittest.skipUnless(HAS_SNI, 'No SNI support in ssl module') - def test_create_server_ssl_verify_failed(self): - proto = None + @unittest.skipUnless(hasattr(socket, 'AF_UNIX'), 'No UNIX Sockets') + def test_create_unix_server_ssl(self): + proto = MyProto(loop=self.loop) + server, path = self._make_ssl_unix_server( + lambda: proto, ONLYCERT, ONLYKEY) - def factory(): - nonlocal proto - proto = MyProto(loop=self.loop) - return proto + f_c = self.loop.create_unix_connection( + MyBaseProto, path, ssl=test_utils.dummy_ssl_context(), + server_hostname='') + + client, pr = self.loop.run_until_complete(f_c) + + client.write(b'xxx') + test_utils.run_briefly(self.loop) + self.assertIsInstance(proto, MyProto) + test_utils.run_briefly(self.loop) + self.assertEqual('CONNECTED', proto.state) + test_utils.run_until(self.loop, lambda: proto.nbytes > 0, + timeout=10) + self.assertEqual(3, proto.nbytes) + + # close connection + proto.transport.close() + self.loop.run_until_complete(proto.done) + self.assertEqual('CLOSED', proto.state) + + # the client socket must be closed after to avoid ECONNRESET upon + # recv()/send() on the serving socket + client.close() - server, host, port = self._make_ssl_server(factory, SIGNED_CERTFILE) + # stop serving + server.close() + + @unittest.skipIf(ssl is None, 'No ssl module') + @unittest.skipUnless(HAS_SNI, 'No SNI support in ssl module') + def test_create_server_ssl_verify_failed(self): + proto = MyProto(loop=self.loop) + server, host, port = self._make_ssl_server( + lambda: proto, SIGNED_CERTFILE) sslcontext_client = ssl.SSLContext(ssl.PROTOCOL_SSLv23) sslcontext_client.options |= ssl.OP_NO_SSLv2 @@ -697,15 +799,36 @@ class EventLoopTestsMixin: @unittest.skipIf(ssl is None, 'No ssl module') @unittest.skipUnless(HAS_SNI, 'No SNI support in ssl module') - def test_create_server_ssl_match_failed(self): - proto = None + @unittest.skipUnless(hasattr(socket, 'AF_UNIX'), 'No UNIX Sockets') + def test_create_unix_server_ssl_verify_failed(self): + proto = MyProto(loop=self.loop) + server, path = self._make_ssl_unix_server( + lambda: proto, SIGNED_CERTFILE) - def factory(): - nonlocal proto - proto = MyProto(loop=self.loop) - return proto + sslcontext_client = ssl.SSLContext(ssl.PROTOCOL_SSLv23) + sslcontext_client.options |= ssl.OP_NO_SSLv2 + sslcontext_client.verify_mode = ssl.CERT_REQUIRED + if hasattr(sslcontext_client, 'check_hostname'): + sslcontext_client.check_hostname = True - server, host, port = self._make_ssl_server(factory, SIGNED_CERTFILE) + # no CA loaded + f_c = self.loop.create_unix_connection(MyProto, path, + ssl=sslcontext_client, + server_hostname='invalid') + with self.assertRaisesRegex(ssl.SSLError, + 'certificate verify failed '): + self.loop.run_until_complete(f_c) + + # close connection + self.assertIsNone(proto.transport) + server.close() + + @unittest.skipIf(ssl is None, 'No ssl module') + @unittest.skipUnless(HAS_SNI, 'No SNI support in ssl module') + def test_create_server_ssl_match_failed(self): + proto = MyProto(loop=self.loop) + server, host, port = self._make_ssl_server( + lambda: proto, SIGNED_CERTFILE) sslcontext_client = ssl.SSLContext(ssl.PROTOCOL_SSLv23) sslcontext_client.options |= ssl.OP_NO_SSLv2 @@ -729,15 +852,36 @@ class EventLoopTestsMixin: @unittest.skipIf(ssl is None, 'No ssl module') @unittest.skipUnless(HAS_SNI, 'No SNI support in ssl module') - def test_create_server_ssl_verified(self): - proto = None + @unittest.skipUnless(hasattr(socket, 'AF_UNIX'), 'No UNIX Sockets') + def test_create_unix_server_ssl_verified(self): + proto = MyProto(loop=self.loop) + server, path = self._make_ssl_unix_server( + lambda: proto, SIGNED_CERTFILE) - def factory(): - nonlocal proto - proto = MyProto(loop=self.loop) - return proto + sslcontext_client = ssl.SSLContext(ssl.PROTOCOL_SSLv23) + sslcontext_client.options |= ssl.OP_NO_SSLv2 + sslcontext_client.verify_mode = ssl.CERT_REQUIRED + sslcontext_client.load_verify_locations(cafile=SIGNING_CA) + if hasattr(sslcontext_client, 'check_hostname'): + sslcontext_client.check_hostname = True - server, host, port = self._make_ssl_server(factory, SIGNED_CERTFILE) + # Connection succeeds with correct CA and server hostname. + f_c = self.loop.create_unix_connection(MyProto, path, + ssl=sslcontext_client, + server_hostname='localhost') + client, pr = self.loop.run_until_complete(f_c) + + # close connection + proto.transport.close() + client.close() + server.close() + + @unittest.skipIf(ssl is None, 'No ssl module') + @unittest.skipUnless(HAS_SNI, 'No SNI support in ssl module') + def test_create_server_ssl_verified(self): + proto = MyProto(loop=self.loop) + server, host, port = self._make_ssl_server( + lambda: proto, SIGNED_CERTFILE) sslcontext_client = ssl.SSLContext(ssl.PROTOCOL_SSLv23) sslcontext_client.options |= ssl.OP_NO_SSLv2 @@ -915,19 +1059,15 @@ class EventLoopTestsMixin: @unittest.skipUnless(sys.platform != 'win32', "Don't support pipes for Windows") def test_read_pipe(self): - proto = None - - def factory(): - nonlocal proto - proto = MyReadPipeProto(loop=self.loop) - return proto + proto = MyReadPipeProto(loop=self.loop) rpipe, wpipe = os.pipe() pipeobj = io.open(rpipe, 'rb', 1024) @asyncio.coroutine def connect(): - t, p = yield from self.loop.connect_read_pipe(factory, pipeobj) + t, p = yield from self.loop.connect_read_pipe( + lambda: proto, pipeobj) self.assertIs(p, proto) self.assertIs(t, proto.transport) self.assertEqual(['INITIAL', 'CONNECTED'], proto.state) @@ -959,19 +1099,14 @@ class EventLoopTestsMixin: # Issue #20495: The test hangs on FreeBSD 7.2 but pass on FreeBSD 9 @support.requires_freebsd_version(8) def test_read_pty_output(self): - proto = None - - def factory(): - nonlocal proto - proto = MyReadPipeProto(loop=self.loop) - return proto + proto = MyReadPipeProto(loop=self.loop) master, slave = os.openpty() master_read_obj = io.open(master, 'rb', 0) @asyncio.coroutine def connect(): - t, p = yield from self.loop.connect_read_pipe(factory, + t, p = yield from self.loop.connect_read_pipe(lambda: proto, master_read_obj) self.assertIs(p, proto) self.assertIs(t, proto.transport) @@ -999,21 +1134,17 @@ class EventLoopTestsMixin: @unittest.skipUnless(sys.platform != 'win32', "Don't support pipes for Windows") def test_write_pipe(self): - proto = None + proto = MyWritePipeProto(loop=self.loop) transport = None - def factory(): - nonlocal proto - proto = MyWritePipeProto(loop=self.loop) - return proto - rpipe, wpipe = os.pipe() pipeobj = io.open(wpipe, 'wb', 1024) @asyncio.coroutine def connect(): nonlocal transport - t, p = yield from self.loop.connect_write_pipe(factory, pipeobj) + t, p = yield from self.loop.connect_write_pipe( + lambda: proto, pipeobj) self.assertIs(p, proto) self.assertIs(t, proto.transport) self.assertEqual('CONNECTED', proto.state) @@ -1045,21 +1176,16 @@ class EventLoopTestsMixin: @unittest.skipUnless(sys.platform != 'win32', "Don't support pipes for Windows") def test_write_pipe_disconnect_on_close(self): - proto = None + proto = MyWritePipeProto(loop=self.loop) transport = None - def factory(): - nonlocal proto - proto = MyWritePipeProto(loop=self.loop) - return proto - rsock, wsock = test_utils.socketpair() pipeobj = io.open(wsock.detach(), 'wb', 1024) @asyncio.coroutine def connect(): nonlocal transport - t, p = yield from self.loop.connect_write_pipe(factory, + t, p = yield from self.loop.connect_write_pipe(lambda: proto, pipeobj) self.assertIs(p, proto) self.assertIs(t, proto.transport) @@ -1084,21 +1210,16 @@ class EventLoopTestsMixin: # older than 10.6 (Snow Leopard) @support.requires_mac_ver(10, 6) def test_write_pty(self): - proto = None + proto = MyWritePipeProto(loop=self.loop) transport = None - def factory(): - nonlocal proto - proto = MyWritePipeProto(loop=self.loop) - return proto - master, slave = os.openpty() slave_write_obj = io.open(slave, 'wb', 0) @asyncio.coroutine def connect(): nonlocal transport - t, p = yield from self.loop.connect_write_pipe(factory, + t, p = yield from self.loop.connect_write_pipe(lambda: proto, slave_write_obj) self.assertIs(p, proto) self.assertIs(t, proto.transport) diff --git a/Lib/test/test_asyncio/test_selector_events.py b/Lib/test/test_asyncio/test_selector_events.py index 855a895..7741e19 100644 --- a/Lib/test/test_asyncio/test_selector_events.py +++ b/Lib/test/test_asyncio/test_selector_events.py @@ -55,7 +55,8 @@ class BaseSelectorEventLoopTests(unittest.TestCase): self.loop.remove_reader = unittest.mock.Mock() self.loop.remove_writer = unittest.mock.Mock() waiter = asyncio.Future(loop=self.loop) - transport = self.loop._make_ssl_transport(m, asyncio.Protocol(), m, waiter) + transport = self.loop._make_ssl_transport( + m, asyncio.Protocol(), m, waiter) self.assertIsInstance(transport, _SelectorSslTransport) @unittest.mock.patch('asyncio.selector_events.ssl', None) diff --git a/Lib/test/test_asyncio/test_streams.py b/Lib/test/test_asyncio/test_streams.py index ee3fb45..31e26a6 100644 --- a/Lib/test/test_asyncio/test_streams.py +++ b/Lib/test/test_asyncio/test_streams.py @@ -1,6 +1,8 @@ """Tests for streams.py.""" +import functools import gc +import socket import unittest import unittest.mock try: @@ -32,48 +34,85 @@ class StreamReaderTests(unittest.TestCase): stream = asyncio.StreamReader() self.assertIs(stream._loop, m_events.get_event_loop.return_value) + def _basetest_open_connection(self, open_connection_fut): + reader, writer = self.loop.run_until_complete(open_connection_fut) + writer.write(b'GET / HTTP/1.0\r\n\r\n') + f = reader.readline() + data = self.loop.run_until_complete(f) + self.assertEqual(data, b'HTTP/1.0 200 OK\r\n') + f = reader.read() + data = self.loop.run_until_complete(f) + self.assertTrue(data.endswith(b'\r\n\r\nTest message')) + writer.close() + def test_open_connection(self): with test_utils.run_test_server() as httpd: - f = asyncio.open_connection(*httpd.address, loop=self.loop) - reader, writer = self.loop.run_until_complete(f) - writer.write(b'GET / HTTP/1.0\r\n\r\n') - f = reader.readline() - data = self.loop.run_until_complete(f) - self.assertEqual(data, b'HTTP/1.0 200 OK\r\n') - f = reader.read() - data = self.loop.run_until_complete(f) - self.assertTrue(data.endswith(b'\r\n\r\nTest message')) - - writer.close() + conn_fut = asyncio.open_connection(*httpd.address, + loop=self.loop) + self._basetest_open_connection(conn_fut) + + @unittest.skipUnless(hasattr(socket, 'AF_UNIX'), 'No UNIX Sockets') + def test_open_unix_connection(self): + with test_utils.run_test_unix_server() as httpd: + conn_fut = asyncio.open_unix_connection(httpd.address, + loop=self.loop) + self._basetest_open_connection(conn_fut) + + def _basetest_open_connection_no_loop_ssl(self, open_connection_fut): + try: + reader, writer = self.loop.run_until_complete(open_connection_fut) + finally: + asyncio.set_event_loop(None) + writer.write(b'GET / HTTP/1.0\r\n\r\n') + f = reader.read() + data = self.loop.run_until_complete(f) + self.assertTrue(data.endswith(b'\r\n\r\nTest message')) + + writer.close() @unittest.skipIf(ssl is None, 'No ssl module') def test_open_connection_no_loop_ssl(self): with test_utils.run_test_server(use_ssl=True) as httpd: - try: - asyncio.set_event_loop(self.loop) - f = asyncio.open_connection(*httpd.address, - ssl=test_utils.dummy_ssl_context()) - reader, writer = self.loop.run_until_complete(f) - finally: - asyncio.set_event_loop(None) - writer.write(b'GET / HTTP/1.0\r\n\r\n') - f = reader.read() - data = self.loop.run_until_complete(f) - self.assertTrue(data.endswith(b'\r\n\r\nTest message')) + conn_fut = asyncio.open_connection( + *httpd.address, + ssl=test_utils.dummy_ssl_context(), + loop=self.loop) - writer.close() + self._basetest_open_connection_no_loop_ssl(conn_fut) + + @unittest.skipIf(ssl is None, 'No ssl module') + @unittest.skipUnless(hasattr(socket, 'AF_UNIX'), 'No UNIX Sockets') + def test_open_unix_connection_no_loop_ssl(self): + with test_utils.run_test_unix_server(use_ssl=True) as httpd: + conn_fut = asyncio.open_unix_connection( + httpd.address, + ssl=test_utils.dummy_ssl_context(), + server_hostname='', + loop=self.loop) + + self._basetest_open_connection_no_loop_ssl(conn_fut) + + def _basetest_open_connection_error(self, open_connection_fut): + reader, writer = self.loop.run_until_complete(open_connection_fut) + writer._protocol.connection_lost(ZeroDivisionError()) + f = reader.read() + with self.assertRaises(ZeroDivisionError): + self.loop.run_until_complete(f) + writer.close() + test_utils.run_briefly(self.loop) def test_open_connection_error(self): with test_utils.run_test_server() as httpd: - f = asyncio.open_connection(*httpd.address, loop=self.loop) - reader, writer = self.loop.run_until_complete(f) - writer._protocol.connection_lost(ZeroDivisionError()) - f = reader.read() - with self.assertRaises(ZeroDivisionError): - self.loop.run_until_complete(f) + conn_fut = asyncio.open_connection(*httpd.address, + loop=self.loop) + self._basetest_open_connection_error(conn_fut) - writer.close() - test_utils.run_briefly(self.loop) + @unittest.skipUnless(hasattr(socket, 'AF_UNIX'), 'No UNIX Sockets') + def test_open_unix_connection_error(self): + with test_utils.run_test_unix_server() as httpd: + conn_fut = asyncio.open_unix_connection(httpd.address, + loop=self.loop) + self._basetest_open_connection_error(conn_fut) def test_feed_empty_data(self): stream = asyncio.StreamReader(loop=self.loop) @@ -415,10 +454,13 @@ class StreamReaderTests(unittest.TestCase): client_writer.write(data) def start(self): + sock = socket.socket() + sock.bind(('127.0.0.1', 0)) self.server = self.loop.run_until_complete( asyncio.start_server(self.handle_client, - '127.0.0.1', 12345, + sock=sock, loop=self.loop)) + return sock.getsockname() def handle_client_callback(self, client_reader, client_writer): task = asyncio.Task(client_reader.readline(), loop=self.loop) @@ -429,10 +471,15 @@ class StreamReaderTests(unittest.TestCase): task.add_done_callback(done) def start_callback(self): + sock = socket.socket() + sock.bind(('127.0.0.1', 0)) + addr = sock.getsockname() + sock.close() self.server = self.loop.run_until_complete( asyncio.start_server(self.handle_client_callback, - '127.0.0.1', 12345, + host=addr[0], port=addr[1], loop=self.loop)) + return addr def stop(self): if self.server is not None: @@ -441,9 +488,9 @@ class StreamReaderTests(unittest.TestCase): self.server = None @asyncio.coroutine - def client(): + def client(addr): reader, writer = yield from asyncio.open_connection( - '127.0.0.1', 12345, loop=self.loop) + *addr, loop=self.loop) # send a line writer.write(b"hello world!\n") # read it back @@ -453,20 +500,90 @@ class StreamReaderTests(unittest.TestCase): # test the server variant with a coroutine as client handler server = MyServer(self.loop) - server.start() - msg = self.loop.run_until_complete(asyncio.Task(client(), + addr = server.start() + msg = self.loop.run_until_complete(asyncio.Task(client(addr), loop=self.loop)) server.stop() self.assertEqual(msg, b"hello world!\n") # test the server variant with a callback as client handler server = MyServer(self.loop) - server.start_callback() - msg = self.loop.run_until_complete(asyncio.Task(client(), + addr = server.start_callback() + msg = self.loop.run_until_complete(asyncio.Task(client(addr), loop=self.loop)) server.stop() self.assertEqual(msg, b"hello world!\n") + @unittest.skipUnless(hasattr(socket, 'AF_UNIX'), 'No UNIX Sockets') + def test_start_unix_server(self): + + class MyServer: + + def __init__(self, loop, path): + self.server = None + self.loop = loop + self.path = path + + @asyncio.coroutine + def handle_client(self, client_reader, client_writer): + data = yield from client_reader.readline() + client_writer.write(data) + + def start(self): + self.server = self.loop.run_until_complete( + asyncio.start_unix_server(self.handle_client, + path=self.path, + loop=self.loop)) + + def handle_client_callback(self, client_reader, client_writer): + task = asyncio.Task(client_reader.readline(), loop=self.loop) + + def done(task): + client_writer.write(task.result()) + + task.add_done_callback(done) + + def start_callback(self): + self.server = self.loop.run_until_complete( + asyncio.start_unix_server(self.handle_client_callback, + path=self.path, + loop=self.loop)) + + def stop(self): + if self.server is not None: + self.server.close() + self.loop.run_until_complete(self.server.wait_closed()) + self.server = None + + @asyncio.coroutine + def client(path): + reader, writer = yield from asyncio.open_unix_connection( + path, loop=self.loop) + # send a line + writer.write(b"hello world!\n") + # read it back + msgback = yield from reader.readline() + writer.close() + return msgback + + # test the server variant with a coroutine as client handler + with test_utils.unix_socket_path() as path: + server = MyServer(self.loop, path) + server.start() + msg = self.loop.run_until_complete(asyncio.Task(client(path), + loop=self.loop)) + server.stop() + self.assertEqual(msg, b"hello world!\n") + + # test the server variant with a callback as client handler + with test_utils.unix_socket_path() as path: + server = MyServer(self.loop, path) + server.start_callback() + msg = self.loop.run_until_complete(asyncio.Task(client(path), + loop=self.loop)) + server.stop() + self.assertEqual(msg, b"hello world!\n") + if __name__ == '__main__': unittest.main() diff --git a/Lib/test/test_asyncio/test_unix_events.py b/Lib/test/test_asyncio/test_unix_events.py index 9461ec8..2fa1db4 100644 --- a/Lib/test/test_asyncio/test_unix_events.py +++ b/Lib/test/test_asyncio/test_unix_events.py @@ -7,8 +7,10 @@ import io import os import pprint import signal +import socket import stat import sys +import tempfile import threading import unittest import unittest.mock @@ -24,7 +26,7 @@ from asyncio import unix_events @unittest.skipUnless(signal, 'Signals are not supported') -class SelectorEventLoopTests(unittest.TestCase): +class SelectorEventLoopSignalTests(unittest.TestCase): def setUp(self): self.loop = asyncio.SelectorEventLoop() @@ -200,6 +202,84 @@ class SelectorEventLoopTests(unittest.TestCase): m_signal.set_wakeup_fd.assert_called_once_with(-1) +@unittest.skipUnless(hasattr(socket, 'AF_UNIX'), + 'UNIX Sockets are not supported') +class SelectorEventLoopUnixSocketTests(unittest.TestCase): + + def setUp(self): + self.loop = asyncio.SelectorEventLoop() + asyncio.set_event_loop(None) + + def tearDown(self): + self.loop.close() + + def test_create_unix_server_existing_path_sock(self): + with test_utils.unix_socket_path() as path: + sock = socket.socket(socket.AF_UNIX) + sock.bind(path) + + coro = self.loop.create_unix_server(lambda: None, path) + with self.assertRaisesRegexp(OSError, + 'Address.*is already in use'): + self.loop.run_until_complete(coro) + + def test_create_unix_server_existing_path_nonsock(self): + with tempfile.NamedTemporaryFile() as file: + coro = self.loop.create_unix_server(lambda: None, file.name) + with self.assertRaisesRegexp(OSError, + 'Address.*is already in use'): + self.loop.run_until_complete(coro) + + def test_create_unix_server_ssl_bool(self): + coro = self.loop.create_unix_server(lambda: None, path='spam', + ssl=True) + with self.assertRaisesRegex(TypeError, + 'ssl argument must be an SSLContext'): + self.loop.run_until_complete(coro) + + def test_create_unix_server_nopath_nosock(self): + coro = self.loop.create_unix_server(lambda: None, path=None) + with self.assertRaisesRegex(ValueError, + 'path was not specified, and no sock'): + self.loop.run_until_complete(coro) + + def test_create_unix_server_path_inetsock(self): + coro = self.loop.create_unix_server(lambda: None, path=None, + sock=socket.socket()) + with self.assertRaisesRegex(ValueError, + 'A UNIX Domain Socket was expected'): + self.loop.run_until_complete(coro) + + def test_create_unix_connection_path_sock(self): + coro = self.loop.create_unix_connection( + lambda: None, '/dev/null', sock=object()) + with self.assertRaisesRegex(ValueError, 'path and sock can not be'): + self.loop.run_until_complete(coro) + + def test_create_unix_connection_nopath_nosock(self): + coro = self.loop.create_unix_connection( + lambda: None, None) + with self.assertRaisesRegex(ValueError, + 'no path and sock were specified'): + self.loop.run_until_complete(coro) + + def test_create_unix_connection_nossl_serverhost(self): + coro = self.loop.create_unix_connection( + lambda: None, '/dev/null', server_hostname='spam') + with self.assertRaisesRegex(ValueError, + 'server_hostname is only meaningful'): + self.loop.run_until_complete(coro) + + def test_create_unix_connection_ssl_noserverhost(self): + coro = self.loop.create_unix_connection( + lambda: None, '/dev/null', ssl=True) + + with self.assertRaisesRegexp( + ValueError, 'you have to pass server_hostname when using ssl'): + + self.loop.run_until_complete(coro) + + class UnixReadPipeTransportTests(unittest.TestCase): def setUp(self): -- cgit v0.12 From 907f0172e558a4003eb287303be862092eca6dc0 Mon Sep 17 00:00:00 2001 From: Yury Selivanov Date: Tue, 18 Feb 2014 12:21:57 -0500 Subject: Misc/NEWS: Add a news item for UNIX Sockets support in asyncio. Cleanup WS. --- Misc/NEWS | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/Misc/NEWS b/Misc/NEWS index 48b2b89..aee36ac 100644 --- a/Misc/NEWS +++ b/Misc/NEWS @@ -23,6 +23,10 @@ Core and Builtins Library ------- +- Issue #20673: Implement support for UNIX Domain Sockets in asyncio. + New APIs: loop.create_unix_connection(), loop.create_unix_server(), + streams.open_unix_connection(), and streams.start_unix_server(). + - Issue #20616: Add a format() method to tracemalloc.Traceback. - Issue #19744: the ensurepip installation step now just prints a warning to -- cgit v0.12 From ee227ae7cf1f88e865c168a8f13743e92ade1938 Mon Sep 17 00:00:00 2001 From: Zachary Ware Date: Tue, 18 Feb 2014 11:35:15 -0600 Subject: Issue #20609: Merge with 3.3. --- Misc/NEWS | 6 ++++++ PCbuild/pythoncore.vcxproj | 48 ++++++++++++++++++++++++++++++++++++++-------- 2 files changed, 46 insertions(+), 8 deletions(-) diff --git a/Misc/NEWS b/Misc/NEWS index aee36ac..4170069 100644 --- a/Misc/NEWS +++ b/Misc/NEWS @@ -35,6 +35,12 @@ Library - Issue #20594: Avoid name clash with the libc function posix_close. +Build +----- + +- Issue #20609: Restored the ability to build 64-bit Windows binaries on + 32-bit Windows, which was broken by the change in issue #19788. + What's New in Python 3.4.0 release candidate 1? =============================================== diff --git a/PCbuild/pythoncore.vcxproj b/PCbuild/pythoncore.vcxproj index 11b7054..2296f0b 100644 --- a/PCbuild/pythoncore.vcxproj +++ b/PCbuild/pythoncore.vcxproj @@ -182,7 +182,11 @@ 0x1e000000 - $(KillPythonExe) + $(KillPythonExe) +IF %ERRORLEVEL% NEQ 0 ( + echo kill_python: warning: could not kill running Pythons, exit code %ERRORLEVEL% + exit /b 0 +) Killing any running $(PythonExe) instances... @@ -213,7 +217,11 @@ 0x1e000000 - $(KillPythonExe) + $(KillPythonExe) +IF %ERRORLEVEL% NEQ 0 ( + echo kill_python: warning: could not kill running Pythons, exit code %ERRORLEVEL% + exit /b 0 +) Killing any running $(PythonExe) instances... @@ -244,7 +252,11 @@ 0x1e000000 - $(KillPythonExe) + $(KillPythonExe) +IF %ERRORLEVEL% NEQ 0 ( + echo kill_python: warning: could not kill running Pythons, exit code %ERRORLEVEL% + exit /b 0 +) Killing any running $(PythonExe) instances... @@ -278,7 +290,11 @@ 0x1e000000 - $(KillPythonExe) + $(KillPythonExe) +IF %ERRORLEVEL% NEQ 0 ( + echo kill_python: warning: could not kill running Pythons, exit code %ERRORLEVEL% + exit /b 0 +) Killing any running $(PythonExe) instances... @@ -307,7 +323,11 @@ 0x1e000000 - $(KillPythonExe) + $(KillPythonExe) +IF %ERRORLEVEL% NEQ 0 ( + echo kill_python: warning: could not kill running Pythons, exit code %ERRORLEVEL% + exit /b 0 +) Killing any running $(PythonExe) instances... @@ -339,7 +359,11 @@ MachineX64 - $(KillPythonExe) + $(KillPythonExe) +IF %ERRORLEVEL% NEQ 0 ( + echo kill_python: warning: could not kill running Pythons, exit code %ERRORLEVEL% + exit /b 0 +) Killing any running $(PythonExe) instances... @@ -368,7 +392,11 @@ 0x1e000000 - $(KillPythonExe) + $(KillPythonExe) +IF %ERRORLEVEL% NEQ 0 ( + echo kill_python: warning: could not kill running Pythons, exit code %ERRORLEVEL% + exit /b 0 +) Killing any running $(PythonExe) instances... @@ -400,7 +428,11 @@ MachineX64 - $(KillPythonExe) + $(KillPythonExe) +IF %ERRORLEVEL% NEQ 0 ( + echo kill_python: warning: could not kill running Pythons, exit code %ERRORLEVEL% + exit /b 0 +) Killing any running $(PythonExe) instances... -- cgit v0.12 From 026019f89bf6669593d458dfa3ad9fc7b8d78bc2 Mon Sep 17 00:00:00 2001 From: Yury Selivanov Date: Tue, 18 Feb 2014 12:49:41 -0500 Subject: Mangle __parameters in __annotations__ dict properly. Issue #20625. --- Doc/whatsnew/3.4.rst | 4 ++++ Lib/test/test_grammar.py | 7 +++++++ Lib/test/test_inspect.py | 16 ++++++++++++++++ Misc/NEWS | 3 +++ Python/compile.c | 8 +++++++- 5 files changed, 37 insertions(+), 1 deletion(-) diff --git a/Doc/whatsnew/3.4.rst b/Doc/whatsnew/3.4.rst index faa35f0..1b71c57 100644 --- a/Doc/whatsnew/3.4.rst +++ b/Doc/whatsnew/3.4.rst @@ -1704,6 +1704,10 @@ Changes in the Python API informative :exc:`ValueError` rather than the previous more mysterious :exc:`AttributeError` (:issue:`9177`). +* Parameter names in ``__annotations__`` dict are now mangled properly, + similarly to ``__kwdefaults__``. (Contributed by Yury Selivanov in + :issue:`20625`). + Changes in the C API -------------------- diff --git a/Lib/test/test_grammar.py b/Lib/test/test_grammar.py index 6b326bd..9da6338 100644 --- a/Lib/test/test_grammar.py +++ b/Lib/test/test_grammar.py @@ -314,6 +314,13 @@ class GrammarTests(unittest.TestCase): self.assertEqual(f.__annotations__, {'b': 1, 'c': 2, 'e': 3, 'g': 6, 'h': 7, 'j': 9, 'k': 11, 'return': 12}) + # Check for issue #20625 -- annotations mangling + class Spam: + def f(self, *, __kw:1): + pass + class Ham(Spam): pass + self.assertEquals(Spam.f.__annotations__, {'_Spam__kw': 1}) + self.assertEquals(Ham.f.__annotations__, {'_Spam__kw': 1}) # Check for SF Bug #1697248 - mixing decorators and a return annotation def null(x): return x @null diff --git a/Lib/test/test_inspect.py b/Lib/test/test_inspect.py index 3f20419..a34a418 100644 --- a/Lib/test/test_inspect.py +++ b/Lib/test/test_inspect.py @@ -2390,6 +2390,22 @@ class TestSignatureObject(unittest.TestCase): self.assertEqual(sig.return_annotation, 42) self.assertEqual(sig, inspect.signature(test)) + def test_signature_on_mangled_parameters(self): + class Spam: + def foo(self, __p1:1=2, *, __p2:2=3): + pass + class Ham(Spam): + pass + + self.assertEqual(self.signature(Spam.foo), + ((('self', ..., ..., "positional_or_keyword"), + ('_Spam__p1', 2, 1, "positional_or_keyword"), + ('_Spam__p2', 3, 2, "keyword_only")), + ...)) + + self.assertEqual(self.signature(Spam.foo), + self.signature(Ham.foo)) + class TestParameterObject(unittest.TestCase): def test_signature_parameter_kinds(self): diff --git a/Misc/NEWS b/Misc/NEWS index 4170069..701c4e9 100644 --- a/Misc/NEWS +++ b/Misc/NEWS @@ -10,6 +10,9 @@ Release date: 2014-02-23 Core and Builtins ----------------- +- Issue #20625: Parameter names in __annotations__ were not mangled properly. + Discovered by Jonas Wielicki, patch by Yury Selivanov. + - Issue #20261: In pickle, lookup __getnewargs__ and __getnewargs_ex__ on the type of the object. diff --git a/Python/compile.c b/Python/compile.c index a7ddc5a..57a2329 100644 --- a/Python/compile.c +++ b/Python/compile.c @@ -1533,8 +1533,14 @@ compiler_visit_argannotation(struct compiler *c, identifier id, { if (annotation) { VISIT(c, expr, annotation); - if (PyList_Append(names, id)) + PyObject *mangled = _Py_Mangle(c->u->u_private, id); + if (!mangled) return -1; + if (PyList_Append(names, mangled) < 0) { + Py_DECREF(mangled); + return -1; + } + Py_DECREF(mangled); } return 0; } -- cgit v0.12 From e3e786c96311e8926c58240cace075360033f9ca Mon Sep 17 00:00:00 2001 From: Guido van Rossum Date: Tue, 18 Feb 2014 10:24:30 -0800 Subject: asyncio: Make tests pass on Windows. --- Lib/asyncio/streams.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/Lib/asyncio/streams.py b/Lib/asyncio/streams.py index 698c5c6..27d595f 100644 --- a/Lib/asyncio/streams.py +++ b/Lib/asyncio/streams.py @@ -2,12 +2,14 @@ __all__ = ['StreamReader', 'StreamWriter', 'StreamReaderProtocol', 'open_connection', 'start_server', - 'open_unix_connection', 'start_unix_server', 'IncompleteReadError', ] import socket +if hasattr(socket, 'AF_UNIX'): + __all__.extend(['open_unix_connection', 'start_unix_server']) + from . import events from . import futures from . import protocols -- cgit v0.12 From dc87052c0cf2afcdbd8ecbd60077a863b295b719 Mon Sep 17 00:00:00 2001 From: Ethan Furman Date: Tue, 18 Feb 2014 12:37:12 -0800 Subject: Close issue20653: allow Enum subclasses to override __reduce_ex__ --- Lib/enum.py | 26 +++++++++++----------- Lib/test/test_enum.py | 61 ++++++++++++++++++++++++++++++++++++++++++++++++++- 2 files changed, 73 insertions(+), 14 deletions(-) diff --git a/Lib/enum.py b/Lib/enum.py index 794f68e..c9bd7c0 100644 --- a/Lib/enum.py +++ b/Lib/enum.py @@ -116,12 +116,14 @@ class EnumMeta(type): enum_class._value2member_map_ = {} # check for a supported pickle protocols, and if not present sabotage - # pickling, since it won't work anyway - if member_type is not object: - methods = ('__getnewargs_ex__', '__getnewargs__', - '__reduce_ex__', '__reduce__') - if not any(map(member_type.__dict__.get, methods)): - _make_class_unpicklable(enum_class) + # pickling, since it won't work anyway. + # if new class implements its own __reduce_ex__, do not sabotage + if classdict.get('__reduce_ex__') is None: + if member_type is not object: + methods = ('__getnewargs_ex__', '__getnewargs__', + '__reduce_ex__', '__reduce__') + if not any(map(member_type.__dict__.get, methods)): + _make_class_unpicklable(enum_class) # instantiate them, checking for duplicates as we go # we instantiate first instead of checking for duplicates first in case @@ -167,7 +169,7 @@ class EnumMeta(type): # double check that repr and friends are not the mixin's or various # things break (such as pickle) - for name in ('__repr__', '__str__', '__format__', '__getnewargs__', '__reduce_ex__'): + for name in ('__repr__', '__str__', '__format__', '__reduce_ex__'): class_method = getattr(enum_class, name) obj_method = getattr(member_type, name, None) enum_method = getattr(first_enum, name, None) @@ -192,8 +194,9 @@ class EnumMeta(type): (i.e. Color = Enum('Color', names='red green blue')). When used for the functional API: `module`, if set, will be stored in - the new class' __module__ attribute; `type`, if set, will be mixed in - as the first base class. + the new class' __module__ attribute; `qualname`, if set, will be stored + in the new class' __qualname__ attribute; `type`, if set, will be mixed + in as the first base class. Note: if `module` is not set this routine will attempt to discover the calling module by walking the frame stack; if this is unsuccessful @@ -465,14 +468,11 @@ class Enum(metaclass=EnumMeta): val = self.value return cls.__format__(val, format_spec) - def __getnewargs__(self): - return (self._value_, ) - def __hash__(self): return hash(self._name_) def __reduce_ex__(self, proto): - return self.__class__, self.__getnewargs__() + return self.__class__, (self._value_, ) # DynamicClassAttribute is used to provide access to the `name` and # `value` properties of enum members while keeping some measure of diff --git a/Lib/test/test_enum.py b/Lib/test/test_enum.py index 02009af..b8ef632 100644 --- a/Lib/test/test_enum.py +++ b/Lib/test/test_enum.py @@ -956,6 +956,7 @@ class TestEnum(unittest.TestCase): test_pickle_dump_load(self.assertEqual, NI5, 5) self.assertEqual(NEI.y.value, 2) test_pickle_dump_load(self.assertIs, NEI.y) + test_pickle_dump_load(self.assertIs, NEI) def test_subclasses_with_getnewargs_ex(self): class NamedInt(int): @@ -1012,6 +1013,7 @@ class TestEnum(unittest.TestCase): test_pickle_dump_load(self.assertEqual, NI5, 5, protocol=(4, 4)) self.assertEqual(NEI.y.value, 2) test_pickle_dump_load(self.assertIs, NEI.y, protocol=(4, 4)) + test_pickle_dump_load(self.assertIs, NEI) def test_subclasses_with_reduce(self): class NamedInt(int): @@ -1068,6 +1070,7 @@ class TestEnum(unittest.TestCase): test_pickle_dump_load(self.assertEqual, NI5, 5) self.assertEqual(NEI.y.value, 2) test_pickle_dump_load(self.assertIs, NEI.y) + test_pickle_dump_load(self.assertIs, NEI) def test_subclasses_with_reduce_ex(self): class NamedInt(int): @@ -1124,8 +1127,9 @@ class TestEnum(unittest.TestCase): test_pickle_dump_load(self.assertEqual, NI5, 5) self.assertEqual(NEI.y.value, 2) test_pickle_dump_load(self.assertIs, NEI.y) + test_pickle_dump_load(self.assertIs, NEI) - def test_subclasses_without_getnewargs(self): + def test_subclasses_without_direct_pickle_support(self): class NamedInt(int): __qualname__ = 'NamedInt' def __new__(cls, *args): @@ -1178,6 +1182,61 @@ class TestEnum(unittest.TestCase): test_pickle_exception(self.assertRaises, TypeError, NEI.x) test_pickle_exception(self.assertRaises, PicklingError, NEI) + def test_subclasses_without_direct_pickle_support_using_name(self): + class NamedInt(int): + __qualname__ = 'NamedInt' + def __new__(cls, *args): + _args = args + name, *args = args + if len(args) == 0: + raise TypeError("name and value must be specified") + self = int.__new__(cls, *args) + self._intname = name + self._args = _args + return self + @property + def __name__(self): + return self._intname + def __repr__(self): + # repr() is updated to include the name and type info + return "{}({!r}, {})".format(type(self).__name__, + self.__name__, + int.__repr__(self)) + def __str__(self): + # str() is unchanged, even if it relies on the repr() fallback + base = int + base_str = base.__str__ + if base_str.__objclass__ is object: + return base.__repr__(self) + return base_str(self) + # for simplicity, we only define one operator that + # propagates expressions + def __add__(self, other): + temp = int(self) + int( other) + if isinstance(self, NamedInt) and isinstance(other, NamedInt): + return NamedInt( + '({0} + {1})'.format(self.__name__, other.__name__), + temp ) + else: + return temp + + class NEI(NamedInt, Enum): + __qualname__ = 'NEI' + x = ('the-x', 1) + y = ('the-y', 2) + def __reduce_ex__(self, proto): + return getattr, (self.__class__, self._name_) + + self.assertIs(NEI.__new__, Enum.__new__) + self.assertEqual(repr(NEI.x + NEI.y), "NamedInt('(the-x + the-y)', 3)") + globals()['NamedInt'] = NamedInt + globals()['NEI'] = NEI + NI5 = NamedInt('test', 5) + self.assertEqual(NI5, 5) + self.assertEqual(NEI.y.value, 2) + test_pickle_dump_load(self.assertIs, NEI.y) + test_pickle_dump_load(self.assertIs, NEI) + def test_tuple_subclass(self): class SomeTuple(tuple, Enum): __qualname__ = 'SomeTuple' # needed for pickle protocol 4 -- cgit v0.12 From 6acc5e1330239cd721205b310dfddec1eb6425c1 Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Tue, 18 Feb 2014 22:07:56 +0100 Subject: Issue #20625: Fix compilation issue --- Python/compile.c | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/Python/compile.c b/Python/compile.c index 57a2329..9978eb3 100644 --- a/Python/compile.c +++ b/Python/compile.c @@ -1532,8 +1532,9 @@ compiler_visit_argannotation(struct compiler *c, identifier id, expr_ty annotation, PyObject *names) { if (annotation) { + PyObject *mangled; VISIT(c, expr, annotation); - PyObject *mangled = _Py_Mangle(c->u->u_private, id); + mangled = _Py_Mangle(c->u->u_private, id); if (!mangled) return -1; if (PyList_Append(names, mangled) < 0) { -- cgit v0.12 From 569efa2e4bf985f27a9f85393e29d3ad8ac73344 Mon Sep 17 00:00:00 2001 From: Yury Selivanov Date: Tue, 18 Feb 2014 18:02:19 -0500 Subject: asyncio: New error handling API. Issue #20681. --- Lib/asyncio/base_events.py | 96 ++++++++++++- Lib/asyncio/events.py | 31 +++- Lib/asyncio/futures.py | 22 ++- Lib/asyncio/proactor_events.py | 33 ++++- Lib/asyncio/selector_events.py | 35 +++-- Lib/asyncio/test_utils.py | 18 ++- Lib/asyncio/unix_events.py | 26 +++- Lib/asyncio/windows_events.py | 8 +- Lib/test/test_asyncio/test_base_events.py | 194 ++++++++++++++++++++++++-- Lib/test/test_asyncio/test_events.py | 44 +++--- Lib/test/test_asyncio/test_futures.py | 12 +- Lib/test/test_asyncio/test_proactor_events.py | 8 +- Lib/test/test_asyncio/test_selector_events.py | 20 ++- Lib/test/test_asyncio/test_unix_events.py | 39 ++++-- Misc/NEWS | 4 + 15 files changed, 491 insertions(+), 99 deletions(-) diff --git a/Lib/asyncio/base_events.py b/Lib/asyncio/base_events.py index b74e936..cb2499d 100644 --- a/Lib/asyncio/base_events.py +++ b/Lib/asyncio/base_events.py @@ -122,6 +122,7 @@ class BaseEventLoop(events.AbstractEventLoop): self._internal_fds = 0 self._running = False self._clock_resolution = time.get_clock_info('monotonic').resolution + self._exception_handler = None def _make_socket_transport(self, sock, protocol, waiter=None, *, extra=None, server=None): @@ -254,7 +255,7 @@ class BaseEventLoop(events.AbstractEventLoop): """Like call_later(), but uses an absolute time.""" if tasks.iscoroutinefunction(callback): raise TypeError("coroutines cannot be used with call_at()") - timer = events.TimerHandle(when, callback, args) + timer = events.TimerHandle(when, callback, args, self) heapq.heappush(self._scheduled, timer) return timer @@ -270,7 +271,7 @@ class BaseEventLoop(events.AbstractEventLoop): """ if tasks.iscoroutinefunction(callback): raise TypeError("coroutines cannot be used with call_soon()") - handle = events.Handle(callback, args) + handle = events.Handle(callback, args, self) self._ready.append(handle) return handle @@ -625,6 +626,97 @@ class BaseEventLoop(events.AbstractEventLoop): protocol, popen_args, False, stdin, stdout, stderr, bufsize, **kwargs) return transport, protocol + def set_exception_handler(self, handler): + """Set handler as the new event loop exception handler. + + If handler is None, the default exception handler will + be set. + + If handler is a callable object, it should have a + matching signature to '(loop, context)', where 'loop' + will be a reference to the active event loop, 'context' + will be a dict object (see `call_exception_handler()` + documentation for details about context). + """ + if handler is not None and not callable(handler): + raise TypeError('A callable object or None is expected, ' + 'got {!r}'.format(handler)) + self._exception_handler = handler + + def default_exception_handler(self, context): + """Default exception handler. + + This is called when an exception occurs and no exception + handler is set, and can be called by a custom exception + handler that wants to defer to the default behavior. + + context parameter has the same meaning as in + `call_exception_handler()`. + """ + message = context.get('message') + if not message: + message = 'Unhandled exception in event loop' + + exception = context.get('exception') + if exception is not None: + exc_info = (type(exception), exception, exception.__traceback__) + else: + exc_info = False + + log_lines = [message] + for key in sorted(context): + if key in {'message', 'exception'}: + continue + log_lines.append('{}: {!r}'.format(key, context[key])) + + logger.error('\n'.join(log_lines), exc_info=exc_info) + + def call_exception_handler(self, context): + """Call the current event loop exception handler. + + context is a dict object containing the following keys + (new keys maybe introduced later): + - 'message': Error message; + - 'exception' (optional): Exception object; + - 'future' (optional): Future instance; + - 'handle' (optional): Handle instance; + - 'protocol' (optional): Protocol instance; + - 'transport' (optional): Transport instance; + - 'socket' (optional): Socket instance. + + Note: this method should not be overloaded in subclassed + event loops. For any custom exception handling, use + `set_exception_handler()` method. + """ + if self._exception_handler is None: + try: + self.default_exception_handler(context) + except Exception: + # Second protection layer for unexpected errors + # in the default implementation, as well as for subclassed + # event loops with overloaded "default_exception_handler". + logger.error('Exception in default exception handler', + exc_info=True) + else: + try: + self._exception_handler(self, context) + except Exception as exc: + # Exception in the user set custom exception handler. + try: + # Let's try default handler. + self.default_exception_handler({ + 'message': 'Unhandled error in exception handler', + 'exception': exc, + 'context': context, + }) + except Exception: + # Guard 'default_exception_handler' in case it's + # overloaded. + logger.error('Exception in default exception handler ' + 'while handling an unexpected error ' + 'in custom exception handler', + exc_info=True) + def _add_callback(self, handle): """Add a Handle to ready or scheduled.""" assert isinstance(handle, events.Handle), 'A Handle is required here' diff --git a/Lib/asyncio/events.py b/Lib/asyncio/events.py index 7841ad9..f61c5b7 100644 --- a/Lib/asyncio/events.py +++ b/Lib/asyncio/events.py @@ -19,10 +19,11 @@ from .log import logger class Handle: """Object returned by callback registration methods.""" - __slots__ = ['_callback', '_args', '_cancelled'] + __slots__ = ['_callback', '_args', '_cancelled', '_loop'] - def __init__(self, callback, args): + def __init__(self, callback, args, loop): assert not isinstance(callback, Handle), 'A Handle is not a callback' + self._loop = loop self._callback = callback self._args = args self._cancelled = False @@ -39,9 +40,14 @@ class Handle: def _run(self): try: self._callback(*self._args) - except Exception: - logger.exception('Exception in callback %s %r', - self._callback, self._args) + except Exception as exc: + msg = 'Exception in callback {}{!r}'.format(self._callback, + self._args) + self._loop.call_exception_handler({ + 'message': msg, + 'exception': exc, + 'handle': self, + }) self = None # Needed to break cycles when an exception occurs. @@ -50,9 +56,9 @@ class TimerHandle(Handle): __slots__ = ['_when'] - def __init__(self, when, callback, args): + def __init__(self, when, callback, args, loop): assert when is not None - super().__init__(callback, args) + super().__init__(callback, args, loop) self._when = when @@ -328,6 +334,17 @@ class AbstractEventLoop: def remove_signal_handler(self, sig): raise NotImplementedError + # Error handlers. + + def set_exception_handler(self, handler): + raise NotImplementedError + + def default_exception_handler(self, context): + raise NotImplementedError + + def call_exception_handler(self, context): + raise NotImplementedError + class AbstractEventLoopPolicy: """Abstract policy for accessing the event loop.""" diff --git a/Lib/asyncio/futures.py b/Lib/asyncio/futures.py index d09f423..b9cd45c 100644 --- a/Lib/asyncio/futures.py +++ b/Lib/asyncio/futures.py @@ -83,9 +83,10 @@ class _TracebackLogger: in a discussion about closing files when they are collected. """ - __slots__ = ['exc', 'tb'] + __slots__ = ['exc', 'tb', 'loop'] - def __init__(self, exc): + def __init__(self, exc, loop): + self.loop = loop self.exc = exc self.tb = None @@ -102,8 +103,11 @@ class _TracebackLogger: def __del__(self): if self.tb: - logger.error('Future/Task exception was never retrieved:\n%s', - ''.join(self.tb)) + msg = 'Future/Task exception was never retrieved:\n{tb}' + context = { + 'message': msg.format(tb=''.join(self.tb)), + } + self.loop.call_exception_handler(context) class Future: @@ -173,8 +177,12 @@ class Future: # has consumed the exception return exc = self._exception - logger.error('Future/Task exception was never retrieved:', - exc_info=(exc.__class__, exc, exc.__traceback__)) + context = { + 'message': 'Future/Task exception was never retrieved', + 'exception': exc, + 'future': self, + } + self._loop.call_exception_handler(context) def cancel(self): """Cancel the future and schedule callbacks. @@ -309,7 +317,7 @@ class Future: if _PY34: self._log_traceback = True else: - self._tb_logger = _TracebackLogger(exception) + self._tb_logger = _TracebackLogger(exception, self._loop) # Arrange for the logger to be activated after all callbacks # have had a chance to call result() or exception(). self._loop.call_soon(self._tb_logger.activate) diff --git a/Lib/asyncio/proactor_events.py b/Lib/asyncio/proactor_events.py index 5de4d3d..b2ac632 100644 --- a/Lib/asyncio/proactor_events.py +++ b/Lib/asyncio/proactor_events.py @@ -56,7 +56,12 @@ class _ProactorBasePipeTransport(transports.BaseTransport): def _fatal_error(self, exc): if not isinstance(exc, (BrokenPipeError, ConnectionResetError)): - logger.exception('Fatal error for %s', self) + self._loop.call_exception_handler({ + 'message': 'Fatal transport error', + 'exception': exc, + 'transport': self, + 'protocol': self._protocol, + }) self._force_close(exc) def _force_close(self, exc): @@ -103,8 +108,13 @@ class _ProactorBasePipeTransport(transports.BaseTransport): self._protocol_paused = True try: self._protocol.pause_writing() - except Exception: - logger.exception('pause_writing() failed') + except Exception as exc: + self._loop.call_exception_handler({ + 'message': 'protocol.pause_writing() failed', + 'exception': exc, + 'transport': self, + 'protocol': self._protocol, + }) def _maybe_resume_protocol(self): if (self._protocol_paused and @@ -112,8 +122,13 @@ class _ProactorBasePipeTransport(transports.BaseTransport): self._protocol_paused = False try: self._protocol.resume_writing() - except Exception: - logger.exception('resume_writing() failed') + except Exception as exc: + self._loop.call_exception_handler({ + 'message': 'protocol.resume_writing() failed', + 'exception': exc, + 'transport': self, + 'protocol': self._protocol, + }) def set_write_buffer_limits(self, high=None, low=None): if high is None: @@ -465,9 +480,13 @@ class BaseProactorEventLoop(base_events.BaseEventLoop): conn, protocol, extra={'peername': addr}, server=server) f = self._proactor.accept(sock) - except OSError: + except OSError as exc: if sock.fileno() != -1: - logger.exception('Accept failed') + self.call_exception_handler({ + 'message': 'Accept failed', + 'exception': exc, + 'socket': sock, + }) sock.close() except futures.CancelledError: sock.close() diff --git a/Lib/asyncio/selector_events.py b/Lib/asyncio/selector_events.py index 10b0257..fb86f82 100644 --- a/Lib/asyncio/selector_events.py +++ b/Lib/asyncio/selector_events.py @@ -112,7 +112,11 @@ class BaseSelectorEventLoop(base_events.BaseEventLoop): # Some platforms (e.g. Linux keep reporting the FD as # ready, so we remove the read handler temporarily. # We'll try again in a while. - logger.exception('Accept out of system resource (%s)', exc) + self.call_exception_handler({ + 'message': 'socket.accept() out of system resource', + 'exception': exc, + 'socket': sock, + }) self.remove_reader(sock.fileno()) self.call_later(constants.ACCEPT_RETRY_DELAY, self._start_serving, @@ -132,7 +136,7 @@ class BaseSelectorEventLoop(base_events.BaseEventLoop): def add_reader(self, fd, callback, *args): """Add a reader callback.""" - handle = events.Handle(callback, args) + handle = events.Handle(callback, args, self) try: key = self._selector.get_key(fd) except KeyError: @@ -167,7 +171,7 @@ class BaseSelectorEventLoop(base_events.BaseEventLoop): def add_writer(self, fd, callback, *args): """Add a writer callback..""" - handle = events.Handle(callback, args) + handle = events.Handle(callback, args, self) try: key = self._selector.get_key(fd) except KeyError: @@ -364,8 +368,13 @@ class _FlowControlMixin(transports.Transport): self._protocol_paused = True try: self._protocol.pause_writing() - except Exception: - logger.exception('pause_writing() failed') + except Exception as exc: + self._loop.call_exception_handler({ + 'message': 'protocol.pause_writing() failed', + 'exception': exc, + 'transport': self, + 'protocol': self._protocol, + }) def _maybe_resume_protocol(self): if (self._protocol_paused and @@ -373,8 +382,13 @@ class _FlowControlMixin(transports.Transport): self._protocol_paused = False try: self._protocol.resume_writing() - except Exception: - logger.exception('resume_writing() failed') + except Exception as exc: + self._loop.call_exception_handler({ + 'message': 'protocol.resume_writing() failed', + 'exception': exc, + 'transport': self, + 'protocol': self._protocol, + }) def set_write_buffer_limits(self, high=None, low=None): if high is None: @@ -435,7 +449,12 @@ class _SelectorTransport(_FlowControlMixin, transports.Transport): def _fatal_error(self, exc): # Should be called from exception handler only. if not isinstance(exc, (BrokenPipeError, ConnectionResetError)): - logger.exception('Fatal error for %s', self) + self._loop.call_exception_handler({ + 'message': 'Fatal transport error', + 'exception': exc, + 'transport': self, + 'protocol': self._protocol, + }) self._force_close(exc) def _force_close(self, exc): diff --git a/Lib/asyncio/test_utils.py b/Lib/asyncio/test_utils.py index de2916b..28e5243 100644 --- a/Lib/asyncio/test_utils.py +++ b/Lib/asyncio/test_utils.py @@ -4,6 +4,7 @@ import collections import contextlib import io import os +import re import socket import socketserver import sys @@ -301,7 +302,7 @@ class TestLoop(base_events.BaseEventLoop): raise AssertionError("Time generator is not finished") def add_reader(self, fd, callback, *args): - self.readers[fd] = events.Handle(callback, args) + self.readers[fd] = events.Handle(callback, args, self) def remove_reader(self, fd): self.remove_reader_count[fd] += 1 @@ -320,7 +321,7 @@ class TestLoop(base_events.BaseEventLoop): handle._args, args) def add_writer(self, fd, callback, *args): - self.writers[fd] = events.Handle(callback, args) + self.writers[fd] = events.Handle(callback, args, self) def remove_writer(self, fd): self.remove_writer_count[fd] += 1 @@ -362,3 +363,16 @@ class TestLoop(base_events.BaseEventLoop): def MockCallback(**kwargs): return unittest.mock.Mock(spec=['__call__'], **kwargs) + + +class MockPattern(str): + """A regex based str with a fuzzy __eq__. + + Use this helper with 'mock.assert_called_with', or anywhere + where a regexp comparison between strings is needed. + + For instance: + mock_call.assert_called_with(MockPattern('spam.*ham')) + """ + def __eq__(self, other): + return bool(re.search(str(self), other, re.S)) diff --git a/Lib/asyncio/unix_events.py b/Lib/asyncio/unix_events.py index e0d7507..9a40c04 100644 --- a/Lib/asyncio/unix_events.py +++ b/Lib/asyncio/unix_events.py @@ -65,7 +65,7 @@ class _UnixSelectorEventLoop(selector_events.BaseSelectorEventLoop): except ValueError as exc: raise RuntimeError(str(exc)) - handle = events.Handle(callback, args) + handle = events.Handle(callback, args, self) self._signal_handlers[sig] = handle try: @@ -294,7 +294,12 @@ class _UnixReadPipeTransport(transports.ReadTransport): def _fatal_error(self, exc): # should be called by exception handler only if not (isinstance(exc, OSError) and exc.errno == errno.EIO): - logger.exception('Fatal error for %s', self) + self._loop.call_exception_handler({ + 'message': 'Fatal transport error', + 'exception': exc, + 'transport': self, + 'protocol': self._protocol, + }) self._close(exc) def _close(self, exc): @@ -441,7 +446,12 @@ class _UnixWritePipeTransport(selector_events._FlowControlMixin, def _fatal_error(self, exc): # should be called by exception handler only if not isinstance(exc, (BrokenPipeError, ConnectionResetError)): - logger.exception('Fatal error for %s', self) + self._loop.call_exception_handler({ + 'message': 'Fatal transport error', + 'exception': exc, + 'transport': self, + 'protocol': self._protocol, + }) self._close(exc) def _close(self, exc=None): @@ -582,8 +592,14 @@ class BaseChildWatcher(AbstractChildWatcher): def _sig_chld(self): try: self._do_waitpid_all() - except Exception: - logger.exception('Unknown exception in SIGCHLD handler') + except Exception as exc: + # self._loop should always be available here + # as '_sig_chld' is added as a signal handler + # in 'attach_loop' + self._loop.call_exception_handler({ + 'message': 'Unknown exception in SIGCHLD handler', + 'exception': exc, + }) def _compute_returncode(self, status): if os.WIFSIGNALED(status): diff --git a/Lib/asyncio/windows_events.py b/Lib/asyncio/windows_events.py index 0a2d981..c667a1c 100644 --- a/Lib/asyncio/windows_events.py +++ b/Lib/asyncio/windows_events.py @@ -156,9 +156,13 @@ class ProactorEventLoop(proactor_events.BaseProactorEventLoop): if pipe is None: return f = self._proactor.accept_pipe(pipe) - except OSError: + except OSError as exc: if pipe and pipe.fileno() != -1: - logger.exception('Pipe accept failed') + self.call_exception_handler({ + 'message': 'Pipe accept failed', + 'exception': exc, + 'pipe': pipe, + }) pipe.close() except futures.CancelledError: if pipe: diff --git a/Lib/test/test_asyncio/test_base_events.py b/Lib/test/test_asyncio/test_base_events.py index 9fa9841..f664ccc 100644 --- a/Lib/test/test_asyncio/test_base_events.py +++ b/Lib/test/test_asyncio/test_base_events.py @@ -15,6 +15,10 @@ from asyncio import constants from asyncio import test_utils +MOCK_ANY = unittest.mock.ANY +PY34 = sys.version_info >= (3, 4) + + class BaseEventLoopTests(unittest.TestCase): def setUp(self): @@ -49,20 +53,21 @@ class BaseEventLoopTests(unittest.TestCase): self.assertRaises(NotImplementedError, next, iter(gen)) def test__add_callback_handle(self): - h = asyncio.Handle(lambda: False, ()) + h = asyncio.Handle(lambda: False, (), self.loop) self.loop._add_callback(h) self.assertFalse(self.loop._scheduled) self.assertIn(h, self.loop._ready) def test__add_callback_timer(self): - h = asyncio.TimerHandle(time.monotonic()+10, lambda: False, ()) + h = asyncio.TimerHandle(time.monotonic()+10, lambda: False, (), + self.loop) self.loop._add_callback(h) self.assertIn(h, self.loop._scheduled) def test__add_callback_cancelled_handle(self): - h = asyncio.Handle(lambda: False, ()) + h = asyncio.Handle(lambda: False, (), self.loop) h.cancel() self.loop._add_callback(h) @@ -137,15 +142,15 @@ class BaseEventLoopTests(unittest.TestCase): self.assertRaises( AssertionError, self.loop.run_in_executor, - None, asyncio.Handle(cb, ()), ('',)) + None, asyncio.Handle(cb, (), self.loop), ('',)) self.assertRaises( AssertionError, self.loop.run_in_executor, - None, asyncio.TimerHandle(10, cb, ())) + None, asyncio.TimerHandle(10, cb, (), self.loop)) def test_run_once_in_executor_cancelled(self): def cb(): pass - h = asyncio.Handle(cb, ()) + h = asyncio.Handle(cb, (), self.loop) h.cancel() f = self.loop.run_in_executor(None, h) @@ -156,7 +161,7 @@ class BaseEventLoopTests(unittest.TestCase): def test_run_once_in_executor_plain(self): def cb(): pass - h = asyncio.Handle(cb, ()) + h = asyncio.Handle(cb, (), self.loop) f = asyncio.Future(loop=self.loop) executor = unittest.mock.Mock() executor.submit.return_value = f @@ -175,8 +180,10 @@ class BaseEventLoopTests(unittest.TestCase): f.cancel() # Don't complain about abandoned Future. def test__run_once(self): - h1 = asyncio.TimerHandle(time.monotonic() + 5.0, lambda: True, ()) - h2 = asyncio.TimerHandle(time.monotonic() + 10.0, lambda: True, ()) + h1 = asyncio.TimerHandle(time.monotonic() + 5.0, lambda: True, (), + self.loop) + h2 = asyncio.TimerHandle(time.monotonic() + 10.0, lambda: True, (), + self.loop) h1.cancel() @@ -205,14 +212,15 @@ class BaseEventLoopTests(unittest.TestCase): m_time.monotonic = monotonic self.loop._scheduled.append( - asyncio.TimerHandle(11.0, lambda: True, ())) + asyncio.TimerHandle(11.0, lambda: True, (), self.loop)) self.loop._process_events = unittest.mock.Mock() self.loop._run_once() self.assertEqual(logging.INFO, m_logger.log.call_args[0][0]) idx = -1 data = [10.0, 10.0, 10.3, 13.0] - self.loop._scheduled = [asyncio.TimerHandle(11.0, lambda: True, ())] + self.loop._scheduled = [asyncio.TimerHandle(11.0, lambda: True, (), + self.loop)] self.loop._run_once() self.assertEqual(logging.DEBUG, m_logger.log.call_args[0][0]) @@ -225,7 +233,8 @@ class BaseEventLoopTests(unittest.TestCase): processed = True handle = loop.call_soon(lambda: True) - h = asyncio.TimerHandle(time.monotonic() - 1, cb, (self.loop,)) + h = asyncio.TimerHandle(time.monotonic() - 1, cb, (self.loop,), + self.loop) self.loop._process_events = unittest.mock.Mock() self.loop._scheduled.append(h) @@ -287,6 +296,163 @@ class BaseEventLoopTests(unittest.TestCase): self.loop.run_until_complete, self.loop.subprocess_shell, asyncio.SubprocessProtocol, 'exit 0', bufsize=4096) + def test_default_exc_handler_callback(self): + self.loop._process_events = unittest.mock.Mock() + + def zero_error(fut): + fut.set_result(True) + 1/0 + + # Test call_soon (events.Handle) + with unittest.mock.patch('asyncio.base_events.logger') as log: + fut = asyncio.Future(loop=self.loop) + self.loop.call_soon(zero_error, fut) + fut.add_done_callback(lambda fut: self.loop.stop()) + self.loop.run_forever() + log.error.assert_called_with( + test_utils.MockPattern('Exception in callback.*zero'), + exc_info=(ZeroDivisionError, MOCK_ANY, MOCK_ANY)) + + # Test call_later (events.TimerHandle) + with unittest.mock.patch('asyncio.base_events.logger') as log: + fut = asyncio.Future(loop=self.loop) + self.loop.call_later(0.01, zero_error, fut) + fut.add_done_callback(lambda fut: self.loop.stop()) + self.loop.run_forever() + log.error.assert_called_with( + test_utils.MockPattern('Exception in callback.*zero'), + exc_info=(ZeroDivisionError, MOCK_ANY, MOCK_ANY)) + + def test_default_exc_handler_coro(self): + self.loop._process_events = unittest.mock.Mock() + + @asyncio.coroutine + def zero_error_coro(): + yield from asyncio.sleep(0.01, loop=self.loop) + 1/0 + + # Test Future.__del__ + with unittest.mock.patch('asyncio.base_events.logger') as log: + fut = asyncio.async(zero_error_coro(), loop=self.loop) + fut.add_done_callback(lambda *args: self.loop.stop()) + self.loop.run_forever() + fut = None # Trigger Future.__del__ or futures._TracebackLogger + if PY34: + # Future.__del__ in Python 3.4 logs error with + # an actual exception context + log.error.assert_called_with( + test_utils.MockPattern('.*exception was never retrieved'), + exc_info=(ZeroDivisionError, MOCK_ANY, MOCK_ANY)) + else: + # futures._TracebackLogger logs only textual traceback + log.error.assert_called_with( + test_utils.MockPattern( + '.*exception was never retrieved.*ZeroDiv'), + exc_info=False) + + def test_set_exc_handler_invalid(self): + with self.assertRaisesRegex(TypeError, 'A callable object or None'): + self.loop.set_exception_handler('spam') + + def test_set_exc_handler_custom(self): + def zero_error(): + 1/0 + + def run_loop(): + self.loop.call_soon(zero_error) + self.loop._run_once() + + self.loop._process_events = unittest.mock.Mock() + + mock_handler = unittest.mock.Mock() + self.loop.set_exception_handler(mock_handler) + run_loop() + mock_handler.assert_called_with(self.loop, { + 'exception': MOCK_ANY, + 'message': test_utils.MockPattern( + 'Exception in callback.*zero_error'), + 'handle': MOCK_ANY, + }) + mock_handler.reset_mock() + + self.loop.set_exception_handler(None) + with unittest.mock.patch('asyncio.base_events.logger') as log: + run_loop() + log.error.assert_called_with( + test_utils.MockPattern( + 'Exception in callback.*zero'), + exc_info=(ZeroDivisionError, MOCK_ANY, MOCK_ANY)) + + assert not mock_handler.called + + def test_set_exc_handler_broken(self): + def run_loop(): + def zero_error(): + 1/0 + self.loop.call_soon(zero_error) + self.loop._run_once() + + def handler(loop, context): + raise AttributeError('spam') + + self.loop._process_events = unittest.mock.Mock() + + self.loop.set_exception_handler(handler) + + with unittest.mock.patch('asyncio.base_events.logger') as log: + run_loop() + log.error.assert_called_with( + test_utils.MockPattern( + 'Unhandled error in exception handler'), + exc_info=(AttributeError, MOCK_ANY, MOCK_ANY)) + + def test_default_exc_handler_broken(self): + _context = None + + class Loop(base_events.BaseEventLoop): + + _selector = unittest.mock.Mock() + _process_events = unittest.mock.Mock() + + def default_exception_handler(self, context): + nonlocal _context + _context = context + # Simulates custom buggy "default_exception_handler" + raise ValueError('spam') + + loop = Loop() + asyncio.set_event_loop(loop) + + def run_loop(): + def zero_error(): + 1/0 + loop.call_soon(zero_error) + loop._run_once() + + with unittest.mock.patch('asyncio.base_events.logger') as log: + run_loop() + log.error.assert_called_with( + 'Exception in default exception handler', + exc_info=True) + + def custom_handler(loop, context): + raise ValueError('ham') + + _context = None + loop.set_exception_handler(custom_handler) + with unittest.mock.patch('asyncio.base_events.logger') as log: + run_loop() + log.error.assert_called_with( + test_utils.MockPattern('Exception in default exception.*' + 'while handling.*in custom'), + exc_info=True) + + # Check that original context was passed to default + # exception handler. + self.assertIn('context', _context) + self.assertIs(type(_context['context']['exception']), + ZeroDivisionError) + class MyProto(asyncio.Protocol): done = None @@ -716,7 +882,7 @@ class BaseEventLoopWithSelectorTests(unittest.TestCase): self.loop._accept_connection(MyProto, sock) self.assertFalse(sock.close.called) - @unittest.mock.patch('asyncio.selector_events.logger') + @unittest.mock.patch('asyncio.base_events.logger') def test_accept_connection_exception(self, m_log): sock = unittest.mock.Mock() sock.fileno.return_value = 10 @@ -725,7 +891,7 @@ class BaseEventLoopWithSelectorTests(unittest.TestCase): self.loop.call_later = unittest.mock.Mock() self.loop._accept_connection(MyProto, sock) - self.assertTrue(m_log.exception.called) + self.assertTrue(m_log.error.called) self.assertFalse(sock.close.called) self.loop.remove_reader.assert_called_with(10) self.loop.call_later.assert_called_with(constants.ACCEPT_RETRY_DELAY, diff --git a/Lib/test/test_asyncio/test_events.py b/Lib/test/test_asyncio/test_events.py index c9d04c0..a0a4d02 100644 --- a/Lib/test/test_asyncio/test_events.py +++ b/Lib/test/test_asyncio/test_events.py @@ -1788,7 +1788,7 @@ class HandleTests(unittest.TestCase): return args args = () - h = asyncio.Handle(callback, args) + h = asyncio.Handle(callback, args, unittest.mock.Mock()) self.assertIs(h._callback, callback) self.assertIs(h._args, args) self.assertFalse(h._cancelled) @@ -1808,28 +1808,37 @@ class HandleTests(unittest.TestCase): '.callback')) self.assertTrue(r.endswith('())'), r) - def test_handle(self): + def test_handle_from_handle(self): def callback(*args): return args - h1 = asyncio.Handle(callback, ()) + m_loop = object() + h1 = asyncio.Handle(callback, (), loop=m_loop) self.assertRaises( - AssertionError, asyncio.Handle, h1, ()) + AssertionError, asyncio.Handle, h1, (), m_loop) - @unittest.mock.patch('asyncio.events.logger') - def test_callback_with_exception(self, log): + def test_callback_with_exception(self): def callback(): raise ValueError() - h = asyncio.Handle(callback, ()) + m_loop = unittest.mock.Mock() + m_loop.call_exception_handler = unittest.mock.Mock() + + h = asyncio.Handle(callback, (), m_loop) h._run() - self.assertTrue(log.exception.called) + + m_loop.call_exception_handler.assert_called_with({ + 'message': test_utils.MockPattern('Exception in callback.*'), + 'exception': unittest.mock.ANY, + 'handle': h + }) class TimerTests(unittest.TestCase): def test_hash(self): when = time.monotonic() - h = asyncio.TimerHandle(when, lambda: False, ()) + h = asyncio.TimerHandle(when, lambda: False, (), + unittest.mock.Mock()) self.assertEqual(hash(h), hash(when)) def test_timer(self): @@ -1838,7 +1847,7 @@ class TimerTests(unittest.TestCase): args = () when = time.monotonic() - h = asyncio.TimerHandle(when, callback, args) + h = asyncio.TimerHandle(when, callback, args, unittest.mock.Mock()) self.assertIs(h._callback, callback) self.assertIs(h._args, args) self.assertFalse(h._cancelled) @@ -1853,16 +1862,19 @@ class TimerTests(unittest.TestCase): self.assertTrue(r.endswith('())'), r) self.assertRaises(AssertionError, - asyncio.TimerHandle, None, callback, args) + asyncio.TimerHandle, None, callback, args, + unittest.mock.Mock()) def test_timer_comparison(self): + loop = unittest.mock.Mock() + def callback(*args): return args when = time.monotonic() - h1 = asyncio.TimerHandle(when, callback, ()) - h2 = asyncio.TimerHandle(when, callback, ()) + h1 = asyncio.TimerHandle(when, callback, (), loop) + h2 = asyncio.TimerHandle(when, callback, (), loop) # TODO: Use assertLess etc. self.assertFalse(h1 < h2) self.assertFalse(h2 < h1) @@ -1878,8 +1890,8 @@ class TimerTests(unittest.TestCase): h2.cancel() self.assertFalse(h1 == h2) - h1 = asyncio.TimerHandle(when, callback, ()) - h2 = asyncio.TimerHandle(when + 10.0, callback, ()) + h1 = asyncio.TimerHandle(when, callback, (), loop) + h2 = asyncio.TimerHandle(when + 10.0, callback, (), loop) self.assertTrue(h1 < h2) self.assertFalse(h2 < h1) self.assertTrue(h1 <= h2) @@ -1891,7 +1903,7 @@ class TimerTests(unittest.TestCase): self.assertFalse(h1 == h2) self.assertTrue(h1 != h2) - h3 = asyncio.Handle(callback, ()) + h3 = asyncio.Handle(callback, (), loop) self.assertIs(NotImplemented, h1.__eq__(h3)) self.assertIs(NotImplemented, h1.__ne__(h3)) diff --git a/Lib/test/test_asyncio/test_futures.py b/Lib/test/test_asyncio/test_futures.py index 8a6976b..2e4dbd4 100644 --- a/Lib/test/test_asyncio/test_futures.py +++ b/Lib/test/test_asyncio/test_futures.py @@ -174,20 +174,20 @@ class FutureTests(unittest.TestCase): self.assertRaises(AssertionError, test) fut.cancel() - @unittest.mock.patch('asyncio.futures.logger') + @unittest.mock.patch('asyncio.base_events.logger') def test_tb_logger_abandoned(self, m_log): fut = asyncio.Future(loop=self.loop) del fut self.assertFalse(m_log.error.called) - @unittest.mock.patch('asyncio.futures.logger') + @unittest.mock.patch('asyncio.base_events.logger') def test_tb_logger_result_unretrieved(self, m_log): fut = asyncio.Future(loop=self.loop) fut.set_result(42) del fut self.assertFalse(m_log.error.called) - @unittest.mock.patch('asyncio.futures.logger') + @unittest.mock.patch('asyncio.base_events.logger') def test_tb_logger_result_retrieved(self, m_log): fut = asyncio.Future(loop=self.loop) fut.set_result(42) @@ -195,7 +195,7 @@ class FutureTests(unittest.TestCase): del fut self.assertFalse(m_log.error.called) - @unittest.mock.patch('asyncio.futures.logger') + @unittest.mock.patch('asyncio.base_events.logger') def test_tb_logger_exception_unretrieved(self, m_log): fut = asyncio.Future(loop=self.loop) fut.set_exception(RuntimeError('boom')) @@ -203,7 +203,7 @@ class FutureTests(unittest.TestCase): test_utils.run_briefly(self.loop) self.assertTrue(m_log.error.called) - @unittest.mock.patch('asyncio.futures.logger') + @unittest.mock.patch('asyncio.base_events.logger') def test_tb_logger_exception_retrieved(self, m_log): fut = asyncio.Future(loop=self.loop) fut.set_exception(RuntimeError('boom')) @@ -211,7 +211,7 @@ class FutureTests(unittest.TestCase): del fut self.assertFalse(m_log.error.called) - @unittest.mock.patch('asyncio.futures.logger') + @unittest.mock.patch('asyncio.base_events.logger') def test_tb_logger_exception_result_retrieved(self, m_log): fut = asyncio.Future(loop=self.loop) fut.set_exception(RuntimeError('boom')) diff --git a/Lib/test/test_asyncio/test_proactor_events.py b/Lib/test/test_asyncio/test_proactor_events.py index 6bea1a3..816c973 100644 --- a/Lib/test/test_asyncio/test_proactor_events.py +++ b/Lib/test/test_asyncio/test_proactor_events.py @@ -207,13 +207,13 @@ class ProactorSocketTransportTests(unittest.TestCase): test_utils.run_briefly(self.loop) self.assertFalse(self.protocol.connection_lost.called) - @unittest.mock.patch('asyncio.proactor_events.logger') + @unittest.mock.patch('asyncio.base_events.logger') def test_fatal_error(self, m_logging): tr = _ProactorSocketTransport(self.loop, self.sock, self.protocol) tr._force_close = unittest.mock.Mock() tr._fatal_error(None) self.assertTrue(tr._force_close.called) - self.assertTrue(m_logging.exception.called) + self.assertTrue(m_logging.error.called) def test_force_close(self): tr = _ProactorSocketTransport(self.loop, self.sock, self.protocol) @@ -432,7 +432,7 @@ class BaseProactorEventLoopTests(unittest.TestCase): def test_process_events(self): self.loop._process_events([]) - @unittest.mock.patch('asyncio.proactor_events.logger') + @unittest.mock.patch('asyncio.base_events.logger') def test_create_server(self, m_log): pf = unittest.mock.Mock() call_soon = self.loop.call_soon = unittest.mock.Mock() @@ -458,7 +458,7 @@ class BaseProactorEventLoopTests(unittest.TestCase): fut.result.side_effect = OSError() loop(fut) self.assertTrue(self.sock.close.called) - self.assertTrue(m_log.exception.called) + self.assertTrue(m_log.error.called) def test_create_server_cancel(self): pf = unittest.mock.Mock() diff --git a/Lib/test/test_asyncio/test_selector_events.py b/Lib/test/test_asyncio/test_selector_events.py index 7741e19..04b0578 100644 --- a/Lib/test/test_asyncio/test_selector_events.py +++ b/Lib/test/test_asyncio/test_selector_events.py @@ -23,6 +23,9 @@ from asyncio.selector_events import _SelectorSocketTransport from asyncio.selector_events import _SelectorDatagramTransport +MOCK_ANY = unittest.mock.ANY + + class TestBaseSelectorEventLoop(BaseSelectorEventLoop): def _make_self_pipe(self): @@ -643,14 +646,18 @@ class SelectorTransportTests(unittest.TestCase): self.assertFalse(self.loop.readers) self.assertEqual(1, self.loop.remove_reader_count[7]) - @unittest.mock.patch('asyncio.log.logger.exception') + @unittest.mock.patch('asyncio.log.logger.error') def test_fatal_error(self, m_exc): exc = OSError() tr = _SelectorTransport(self.loop, self.sock, self.protocol, None) tr._force_close = unittest.mock.Mock() tr._fatal_error(exc) - m_exc.assert_called_with('Fatal error for %s', tr) + m_exc.assert_called_with( + test_utils.MockPattern( + 'Fatal transport error\nprotocol:.*\ntransport:.*'), + exc_info=(OSError, MOCK_ANY, MOCK_ANY)) + tr._force_close.assert_called_with(exc) def test_connection_lost(self): @@ -996,7 +1003,7 @@ class SelectorSocketTransportTests(unittest.TestCase): transport._write_ready() transport._fatal_error.assert_called_with(err) - @unittest.mock.patch('asyncio.selector_events.logger') + @unittest.mock.patch('asyncio.base_events.logger') def test_write_ready_exception_and_close(self, m_log): self.sock.send.side_effect = OSError() remove_writer = self.loop.remove_writer = unittest.mock.Mock() @@ -1651,14 +1658,17 @@ class SelectorDatagramTransportTests(unittest.TestCase): self.assertFalse(transport._fatal_error.called) self.assertTrue(self.protocol.error_received.called) - @unittest.mock.patch('asyncio.log.logger.exception') + @unittest.mock.patch('asyncio.base_events.logger.error') def test_fatal_error_connected(self, m_exc): transport = _SelectorDatagramTransport( self.loop, self.sock, self.protocol, ('0.0.0.0', 1)) err = ConnectionRefusedError() transport._fatal_error(err) self.assertFalse(self.protocol.error_received.called) - m_exc.assert_called_with('Fatal error for %s', transport) + m_exc.assert_called_with( + test_utils.MockPattern( + 'Fatal transport error\nprotocol:.*\ntransport:.*'), + exc_info=(ConnectionRefusedError, MOCK_ANY, MOCK_ANY)) if __name__ == '__main__': diff --git a/Lib/test/test_asyncio/test_unix_events.py b/Lib/test/test_asyncio/test_unix_events.py index 2fa1db4..e933079 100644 --- a/Lib/test/test_asyncio/test_unix_events.py +++ b/Lib/test/test_asyncio/test_unix_events.py @@ -25,6 +25,9 @@ from asyncio import test_utils from asyncio import unix_events +MOCK_ANY = unittest.mock.ANY + + @unittest.skipUnless(signal, 'Signals are not supported') class SelectorEventLoopSignalTests(unittest.TestCase): @@ -45,7 +48,8 @@ class SelectorEventLoopSignalTests(unittest.TestCase): self.loop._handle_signal(signal.NSIG + 1, ()) def test_handle_signal_cancelled_handler(self): - h = asyncio.Handle(unittest.mock.Mock(), ()) + h = asyncio.Handle(unittest.mock.Mock(), (), + loop=unittest.mock.Mock()) h.cancel() self.loop._signal_handlers[signal.NSIG + 1] = h self.loop.remove_signal_handler = unittest.mock.Mock() @@ -91,7 +95,7 @@ class SelectorEventLoopSignalTests(unittest.TestCase): signal.SIGINT, lambda: True) @unittest.mock.patch('asyncio.unix_events.signal') - @unittest.mock.patch('asyncio.unix_events.logger') + @unittest.mock.patch('asyncio.base_events.logger') def test_add_signal_handler_install_error2(self, m_logging, m_signal): m_signal.NSIG = signal.NSIG @@ -108,7 +112,7 @@ class SelectorEventLoopSignalTests(unittest.TestCase): self.assertEqual(1, m_signal.set_wakeup_fd.call_count) @unittest.mock.patch('asyncio.unix_events.signal') - @unittest.mock.patch('asyncio.unix_events.logger') + @unittest.mock.patch('asyncio.base_events.logger') def test_add_signal_handler_install_error3(self, m_logging, m_signal): class Err(OSError): errno = errno.EINVAL @@ -153,7 +157,7 @@ class SelectorEventLoopSignalTests(unittest.TestCase): m_signal.signal.call_args[0]) @unittest.mock.patch('asyncio.unix_events.signal') - @unittest.mock.patch('asyncio.unix_events.logger') + @unittest.mock.patch('asyncio.base_events.logger') def test_remove_signal_handler_cleanup_error(self, m_logging, m_signal): m_signal.NSIG = signal.NSIG self.loop.add_signal_handler(signal.SIGHUP, lambda: True) @@ -347,7 +351,7 @@ class UnixReadPipeTransportTests(unittest.TestCase): test_utils.run_briefly(self.loop) self.assertFalse(self.protocol.data_received.called) - @unittest.mock.patch('asyncio.log.logger.exception') + @unittest.mock.patch('asyncio.log.logger.error') @unittest.mock.patch('os.read') def test__read_ready_error(self, m_read, m_logexc): tr = unix_events._UnixReadPipeTransport( @@ -359,7 +363,10 @@ class UnixReadPipeTransportTests(unittest.TestCase): m_read.assert_called_with(5, tr.max_size) tr._close.assert_called_with(err) - m_logexc.assert_called_with('Fatal error for %s', tr) + m_logexc.assert_called_with( + test_utils.MockPattern( + 'Fatal transport error\nprotocol:.*\ntransport:.*'), + exc_info=(OSError, MOCK_ANY, MOCK_ANY)) @unittest.mock.patch('os.read') def test_pause_reading(self, m_read): @@ -423,7 +430,7 @@ class UnixReadPipeTransportTests(unittest.TestCase): self.assertEqual(2, sys.getrefcount(self.protocol), pprint.pformat(gc.get_referrers(self.protocol))) self.assertIsNone(tr._loop) - self.assertEqual(2, sys.getrefcount(self.loop), + self.assertEqual(4, sys.getrefcount(self.loop), pprint.pformat(gc.get_referrers(self.loop))) def test__call_connection_lost_with_err(self): @@ -436,10 +443,11 @@ class UnixReadPipeTransportTests(unittest.TestCase): self.pipe.close.assert_called_with() self.assertIsNone(tr._protocol) + self.assertEqual(2, sys.getrefcount(self.protocol), pprint.pformat(gc.get_referrers(self.protocol))) self.assertIsNone(tr._loop) - self.assertEqual(2, sys.getrefcount(self.loop), + self.assertEqual(4, sys.getrefcount(self.loop), pprint.pformat(gc.get_referrers(self.loop))) @@ -635,7 +643,7 @@ class UnixWritePipeTransportTests(unittest.TestCase): self.loop.assert_writer(5, tr._write_ready) self.assertEqual([b'data'], tr._buffer) - @unittest.mock.patch('asyncio.log.logger.exception') + @unittest.mock.patch('asyncio.log.logger.error') @unittest.mock.patch('os.write') def test__write_ready_err(self, m_write, m_logexc): tr = unix_events._UnixWritePipeTransport( @@ -650,7 +658,10 @@ class UnixWritePipeTransportTests(unittest.TestCase): self.assertFalse(self.loop.readers) self.assertEqual([], tr._buffer) self.assertTrue(tr._closing) - m_logexc.assert_called_with('Fatal error for %s', tr) + m_logexc.assert_called_with( + test_utils.MockPattern( + 'Fatal transport error\nprotocol:.*\ntransport:.*'), + exc_info=(OSError, MOCK_ANY, MOCK_ANY)) self.assertEqual(1, tr._conn_lost) test_utils.run_briefly(self.loop) self.protocol.connection_lost.assert_called_with(err) @@ -702,7 +713,7 @@ class UnixWritePipeTransportTests(unittest.TestCase): self.assertEqual(2, sys.getrefcount(self.protocol), pprint.pformat(gc.get_referrers(self.protocol))) self.assertIsNone(tr._loop) - self.assertEqual(2, sys.getrefcount(self.loop), + self.assertEqual(4, sys.getrefcount(self.loop), pprint.pformat(gc.get_referrers(self.loop))) def test__call_connection_lost_with_err(self): @@ -718,7 +729,7 @@ class UnixWritePipeTransportTests(unittest.TestCase): self.assertEqual(2, sys.getrefcount(self.protocol), pprint.pformat(gc.get_referrers(self.protocol))) self.assertIsNone(tr._loop) - self.assertEqual(2, sys.getrefcount(self.loop), + self.assertEqual(4, sys.getrefcount(self.loop), pprint.pformat(gc.get_referrers(self.loop))) def test_close(self): @@ -1285,10 +1296,10 @@ class ChildWatcherTestsMixin: m.waitpid.side_effect = ValueError with unittest.mock.patch.object(log.logger, - "exception") as m_exception: + 'error') as m_error: self.assertEqual(self.watcher._sig_chld(), None) - self.assertTrue(m_exception.called) + self.assertTrue(m_error.called) @waitpid_mocks def test_sigchld_child_reaped_elsewhere(self, m): diff --git a/Misc/NEWS b/Misc/NEWS index 701c4e9..b128462 100644 --- a/Misc/NEWS +++ b/Misc/NEWS @@ -26,6 +26,10 @@ Core and Builtins Library ------- +- Issue #20681: Add new error handling API in asyncio. New APIs: + loop.set_exception_handler(), loop.default_exception_handler(), and + loop.call_exception_handler(). + - Issue #20673: Implement support for UNIX Domain Sockets in asyncio. New APIs: loop.create_unix_connection(), loop.create_unix_server(), streams.open_unix_connection(), and streams.start_unix_server(). -- cgit v0.12 From 3cb9914488d2d5c95a1b688a68302f5fa2e914ad Mon Sep 17 00:00:00 2001 From: Yury Selivanov Date: Tue, 18 Feb 2014 18:41:13 -0500 Subject: asyncio.transports: Make _ProactorBasePipeTransport use _FlowControlMixin --- Lib/asyncio/proactor_events.py | 55 ++----------------------------- Lib/asyncio/selector_events.py | 73 ++---------------------------------------- Lib/asyncio/transports.py | 70 ++++++++++++++++++++++++++++++++++++++++ Lib/asyncio/unix_events.py | 2 +- 4 files changed, 75 insertions(+), 125 deletions(-) diff --git a/Lib/asyncio/proactor_events.py b/Lib/asyncio/proactor_events.py index b2ac632..d72f927 100644 --- a/Lib/asyncio/proactor_events.py +++ b/Lib/asyncio/proactor_events.py @@ -15,7 +15,8 @@ from . import transports from .log import logger -class _ProactorBasePipeTransport(transports.BaseTransport): +class _ProactorBasePipeTransport(transports._FlowControlMixin, + transports.BaseTransport): """Base class for pipe and socket transports.""" def __init__(self, loop, sock, protocol, waiter=None, @@ -33,8 +34,6 @@ class _ProactorBasePipeTransport(transports.BaseTransport): self._conn_lost = 0 self._closing = False # Set when close() called. self._eof_written = False - self._protocol_paused = False - self.set_write_buffer_limits() if self._server is not None: self._server.attach(self) self._loop.call_soon(self._protocol.connection_made, self) @@ -94,56 +93,6 @@ class _ProactorBasePipeTransport(transports.BaseTransport): server.detach(self) self._server = None - # XXX The next four methods are nearly identical to corresponding - # ones in _SelectorTransport. Maybe refactor buffer management to - # share the implementations? (Also these are really only needed - # by _ProactorWritePipeTransport but since _buffer is defined on - # the base class I am putting it here for now.) - - def _maybe_pause_protocol(self): - size = self.get_write_buffer_size() - if size <= self._high_water: - return - if not self._protocol_paused: - self._protocol_paused = True - try: - self._protocol.pause_writing() - except Exception as exc: - self._loop.call_exception_handler({ - 'message': 'protocol.pause_writing() failed', - 'exception': exc, - 'transport': self, - 'protocol': self._protocol, - }) - - def _maybe_resume_protocol(self): - if (self._protocol_paused and - self.get_write_buffer_size() <= self._low_water): - self._protocol_paused = False - try: - self._protocol.resume_writing() - except Exception as exc: - self._loop.call_exception_handler({ - 'message': 'protocol.resume_writing() failed', - 'exception': exc, - 'transport': self, - 'protocol': self._protocol, - }) - - def set_write_buffer_limits(self, high=None, low=None): - if high is None: - if low is None: - high = 64*1024 - else: - high = 4*low - if low is None: - low = high // 4 - if not high >= low >= 0: - raise ValueError('high (%r) must be >= low (%r) must be >= 0' % - (high, low)) - self._high_water = high - self._low_water = low - def get_write_buffer_size(self): size = self._pending_write if self._buffer is not None: diff --git a/Lib/asyncio/selector_events.py b/Lib/asyncio/selector_events.py index fb86f82..869d66e 100644 --- a/Lib/asyncio/selector_events.py +++ b/Lib/asyncio/selector_events.py @@ -338,77 +338,8 @@ class BaseSelectorEventLoop(base_events.BaseEventLoop): sock.close() -class _FlowControlMixin(transports.Transport): - """All the logic for (write) flow control in a mix-in base class. - - The subclass must implement get_write_buffer_size(). It must call - _maybe_pause_protocol() whenever the write buffer size increases, - and _maybe_resume_protocol() whenever it decreases. It may also - override set_write_buffer_limits() (e.g. to specify different - defaults). - - The subclass constructor must call super().__init__(extra). This - will call set_write_buffer_limits(). - - The user may call set_write_buffer_limits() and - get_write_buffer_size(), and their protocol's pause_writing() and - resume_writing() may be called. - """ - - def __init__(self, extra=None): - super().__init__(extra) - self._protocol_paused = False - self.set_write_buffer_limits() - - def _maybe_pause_protocol(self): - size = self.get_write_buffer_size() - if size <= self._high_water: - return - if not self._protocol_paused: - self._protocol_paused = True - try: - self._protocol.pause_writing() - except Exception as exc: - self._loop.call_exception_handler({ - 'message': 'protocol.pause_writing() failed', - 'exception': exc, - 'transport': self, - 'protocol': self._protocol, - }) - - def _maybe_resume_protocol(self): - if (self._protocol_paused and - self.get_write_buffer_size() <= self._low_water): - self._protocol_paused = False - try: - self._protocol.resume_writing() - except Exception as exc: - self._loop.call_exception_handler({ - 'message': 'protocol.resume_writing() failed', - 'exception': exc, - 'transport': self, - 'protocol': self._protocol, - }) - - def set_write_buffer_limits(self, high=None, low=None): - if high is None: - if low is None: - high = 64*1024 - else: - high = 4*low - if low is None: - low = high // 4 - if not high >= low >= 0: - raise ValueError('high (%r) must be >= low (%r) must be >= 0' % - (high, low)) - self._high_water = high - self._low_water = low - - def get_write_buffer_size(self): - raise NotImplementedError - - -class _SelectorTransport(_FlowControlMixin, transports.Transport): +class _SelectorTransport(transports._FlowControlMixin, + transports.Transport): max_size = 256 * 1024 # Buffer size passed to recv(). diff --git a/Lib/asyncio/transports.py b/Lib/asyncio/transports.py index 67ae7fd..5b975aa 100644 --- a/Lib/asyncio/transports.py +++ b/Lib/asyncio/transports.py @@ -219,3 +219,73 @@ class SubprocessTransport(BaseTransport): http://docs.python.org/3/library/subprocess#subprocess.Popen.kill """ raise NotImplementedError + + +class _FlowControlMixin(Transport): + """All the logic for (write) flow control in a mix-in base class. + + The subclass must implement get_write_buffer_size(). It must call + _maybe_pause_protocol() whenever the write buffer size increases, + and _maybe_resume_protocol() whenever it decreases. It may also + override set_write_buffer_limits() (e.g. to specify different + defaults). + + The subclass constructor must call super().__init__(extra). This + will call set_write_buffer_limits(). + + The user may call set_write_buffer_limits() and + get_write_buffer_size(), and their protocol's pause_writing() and + resume_writing() may be called. + """ + + def __init__(self, extra=None): + super().__init__(extra) + self._protocol_paused = False + self.set_write_buffer_limits() + + def _maybe_pause_protocol(self): + size = self.get_write_buffer_size() + if size <= self._high_water: + return + if not self._protocol_paused: + self._protocol_paused = True + try: + self._protocol.pause_writing() + except Exception as exc: + self._loop.call_exception_handler({ + 'message': 'protocol.pause_writing() failed', + 'exception': exc, + 'transport': self, + 'protocol': self._protocol, + }) + + def _maybe_resume_protocol(self): + if (self._protocol_paused and + self.get_write_buffer_size() <= self._low_water): + self._protocol_paused = False + try: + self._protocol.resume_writing() + except Exception as exc: + self._loop.call_exception_handler({ + 'message': 'protocol.resume_writing() failed', + 'exception': exc, + 'transport': self, + 'protocol': self._protocol, + }) + + def set_write_buffer_limits(self, high=None, low=None): + if high is None: + if low is None: + high = 64*1024 + else: + high = 4*low + if low is None: + low = high // 4 + if not high >= low >= 0: + raise ValueError('high (%r) must be >= low (%r) must be >= 0' % + (high, low)) + self._high_water = high + self._low_water = low + + def get_write_buffer_size(self): + raise NotImplementedError diff --git a/Lib/asyncio/unix_events.py b/Lib/asyncio/unix_events.py index 9a40c04..748452c 100644 --- a/Lib/asyncio/unix_events.py +++ b/Lib/asyncio/unix_events.py @@ -317,7 +317,7 @@ class _UnixReadPipeTransport(transports.ReadTransport): self._loop = None -class _UnixWritePipeTransport(selector_events._FlowControlMixin, +class _UnixWritePipeTransport(transports._FlowControlMixin, transports.WriteTransport): def __init__(self, loop, pipe, protocol, waiter=None, extra=None): -- cgit v0.12 From 0ee29c2c7fbc7e1b10e153b0cb59d2270347d4f3 Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Wed, 19 Feb 2014 01:40:41 +0100 Subject: asyncio, Tulip issue 139: Improve error messages on "fatal errors" Mention if the error was caused by a read or a write, and be more specific on the object (ex: "pipe transport" instead of "transport"). --- Lib/asyncio/proactor_events.py | 10 ++++---- Lib/asyncio/selector_events.py | 22 ++++++++-------- Lib/asyncio/unix_events.py | 14 +++++------ Lib/test/test_asyncio/test_proactor_events.py | 12 ++++++--- Lib/test/test_asyncio/test_selector_events.py | 36 +++++++++++++++++++-------- Lib/test/test_asyncio/test_unix_events.py | 8 +++--- 6 files changed, 64 insertions(+), 38 deletions(-) diff --git a/Lib/asyncio/proactor_events.py b/Lib/asyncio/proactor_events.py index d72f927..f45cd9c 100644 --- a/Lib/asyncio/proactor_events.py +++ b/Lib/asyncio/proactor_events.py @@ -53,10 +53,10 @@ class _ProactorBasePipeTransport(transports._FlowControlMixin, if self._read_fut is not None: self._read_fut.cancel() - def _fatal_error(self, exc): + def _fatal_error(self, exc, message='Fatal error on pipe transport'): if not isinstance(exc, (BrokenPipeError, ConnectionResetError)): self._loop.call_exception_handler({ - 'message': 'Fatal transport error', + 'message': message, 'exception': exc, 'transport': self, 'protocol': self._protocol, @@ -151,11 +151,11 @@ class _ProactorReadPipeTransport(_ProactorBasePipeTransport, self._read_fut = self._loop._proactor.recv(self._sock, 4096) except ConnectionAbortedError as exc: if not self._closing: - self._fatal_error(exc) + self._fatal_error(exc, 'Fatal read error on pipe transport') except ConnectionResetError as exc: self._force_close(exc) except OSError as exc: - self._fatal_error(exc) + self._fatal_error(exc, 'Fatal read error on pipe transport') except futures.CancelledError: if not self._closing: raise @@ -246,7 +246,7 @@ class _ProactorBaseWritePipeTransport(_ProactorBasePipeTransport, except ConnectionResetError as exc: self._force_close(exc) except OSError as exc: - self._fatal_error(exc) + self._fatal_error(exc, 'Fatal write error on pipe transport') def can_write_eof(self): return True diff --git a/Lib/asyncio/selector_events.py b/Lib/asyncio/selector_events.py index 869d66e..c142356 100644 --- a/Lib/asyncio/selector_events.py +++ b/Lib/asyncio/selector_events.py @@ -377,11 +377,11 @@ class _SelectorTransport(transports._FlowControlMixin, self._conn_lost += 1 self._loop.call_soon(self._call_connection_lost, None) - def _fatal_error(self, exc): + def _fatal_error(self, exc, message='Fatal error on transport'): # Should be called from exception handler only. if not isinstance(exc, (BrokenPipeError, ConnectionResetError)): self._loop.call_exception_handler({ - 'message': 'Fatal transport error', + 'message': message, 'exception': exc, 'transport': self, 'protocol': self._protocol, @@ -452,7 +452,7 @@ class _SelectorSocketTransport(_SelectorTransport): except (BlockingIOError, InterruptedError): pass except Exception as exc: - self._fatal_error(exc) + self._fatal_error(exc, 'Fatal read error on socket transport') else: if data: self._protocol.data_received(data) @@ -488,7 +488,7 @@ class _SelectorSocketTransport(_SelectorTransport): except (BlockingIOError, InterruptedError): pass except Exception as exc: - self._fatal_error(exc) + self._fatal_error(exc, 'Fatal write error on socket transport') return else: data = data[n:] @@ -511,7 +511,7 @@ class _SelectorSocketTransport(_SelectorTransport): except Exception as exc: self._loop.remove_writer(self._sock_fd) self._buffer.clear() - self._fatal_error(exc) + self._fatal_error(exc, 'Fatal write error on socket transport') else: if n: del self._buffer[:n] @@ -678,7 +678,7 @@ class _SelectorSslTransport(_SelectorTransport): self._loop.remove_reader(self._sock_fd) self._loop.add_writer(self._sock_fd, self._write_ready) except Exception as exc: - self._fatal_error(exc) + self._fatal_error(exc, 'Fatal read error on SSL transport') else: if data: self._protocol.data_received(data) @@ -712,7 +712,7 @@ class _SelectorSslTransport(_SelectorTransport): except Exception as exc: self._loop.remove_writer(self._sock_fd) self._buffer.clear() - self._fatal_error(exc) + self._fatal_error(exc, 'Fatal write error on SSL transport') return if n: @@ -770,7 +770,7 @@ class _SelectorDatagramTransport(_SelectorTransport): except OSError as exc: self._protocol.error_received(exc) except Exception as exc: - self._fatal_error(exc) + self._fatal_error(exc, 'Fatal read error on datagram transport') else: self._protocol.datagram_received(data, addr) @@ -805,7 +805,8 @@ class _SelectorDatagramTransport(_SelectorTransport): self._protocol.error_received(exc) return except Exception as exc: - self._fatal_error(exc) + self._fatal_error(exc, + 'Fatal write error on datagram transport') return # Ensure that what we buffer is immutable. @@ -827,7 +828,8 @@ class _SelectorDatagramTransport(_SelectorTransport): self._protocol.error_received(exc) return except Exception as exc: - self._fatal_error(exc) + self._fatal_error(exc, + 'Fatal write error on datagram transport') return self._maybe_resume_protocol() # May append to buffer. diff --git a/Lib/asyncio/unix_events.py b/Lib/asyncio/unix_events.py index 748452c..3a2fd18 100644 --- a/Lib/asyncio/unix_events.py +++ b/Lib/asyncio/unix_events.py @@ -271,7 +271,7 @@ class _UnixReadPipeTransport(transports.ReadTransport): except (BlockingIOError, InterruptedError): pass except OSError as exc: - self._fatal_error(exc) + self._fatal_error(exc, 'Fatal read error on pipe transport') else: if data: self._protocol.data_received(data) @@ -291,11 +291,11 @@ class _UnixReadPipeTransport(transports.ReadTransport): if not self._closing: self._close(None) - def _fatal_error(self, exc): + def _fatal_error(self, exc, message='Fatal error on pipe transport'): # should be called by exception handler only if not (isinstance(exc, OSError) and exc.errno == errno.EIO): self._loop.call_exception_handler({ - 'message': 'Fatal transport error', + 'message': message, 'exception': exc, 'transport': self, 'protocol': self._protocol, @@ -381,7 +381,7 @@ class _UnixWritePipeTransport(transports._FlowControlMixin, n = 0 except Exception as exc: self._conn_lost += 1 - self._fatal_error(exc) + self._fatal_error(exc, 'Fatal write error on pipe transport') return if n == len(data): return @@ -406,7 +406,7 @@ class _UnixWritePipeTransport(transports._FlowControlMixin, # Remove writer here, _fatal_error() doesn't it # because _buffer is empty. self._loop.remove_writer(self._fileno) - self._fatal_error(exc) + self._fatal_error(exc, 'Fatal write error on pipe transport') else: if n == len(data): self._loop.remove_writer(self._fileno) @@ -443,11 +443,11 @@ class _UnixWritePipeTransport(transports._FlowControlMixin, def abort(self): self._close(None) - def _fatal_error(self, exc): + def _fatal_error(self, exc, message='Fatal error on pipe transport'): # should be called by exception handler only if not isinstance(exc, (BrokenPipeError, ConnectionResetError)): self._loop.call_exception_handler({ - 'message': 'Fatal transport error', + 'message': message, 'exception': exc, 'transport': self, 'protocol': self._protocol, diff --git a/Lib/test/test_asyncio/test_proactor_events.py b/Lib/test/test_asyncio/test_proactor_events.py index 816c973..0892069 100644 --- a/Lib/test/test_asyncio/test_proactor_events.py +++ b/Lib/test/test_asyncio/test_proactor_events.py @@ -69,7 +69,9 @@ class ProactorSocketTransportTests(unittest.TestCase): tr = _ProactorSocketTransport(self.loop, self.sock, self.protocol) tr._fatal_error = unittest.mock.Mock() tr._loop_reading() - tr._fatal_error.assert_called_with(err) + tr._fatal_error.assert_called_with( + err, + 'Fatal read error on pipe transport') def test_loop_reading_aborted_closing(self): self.loop._proactor.recv.side_effect = ConnectionAbortedError() @@ -105,7 +107,9 @@ class ProactorSocketTransportTests(unittest.TestCase): tr = _ProactorSocketTransport(self.loop, self.sock, self.protocol) tr._fatal_error = unittest.mock.Mock() tr._loop_reading() - tr._fatal_error.assert_called_with(err) + tr._fatal_error.assert_called_with( + err, + 'Fatal read error on pipe transport') def test_write(self): tr = _ProactorSocketTransport(self.loop, self.sock, self.protocol) @@ -142,7 +146,9 @@ class ProactorSocketTransportTests(unittest.TestCase): tr._fatal_error = unittest.mock.Mock() tr._buffer = [b'da', b'ta'] tr._loop_writing() - tr._fatal_error.assert_called_with(err) + tr._fatal_error.assert_called_with( + err, + 'Fatal write error on pipe transport') tr._conn_lost = 1 tr.write(b'data') diff --git a/Lib/test/test_asyncio/test_selector_events.py b/Lib/test/test_asyncio/test_selector_events.py index 04b0578..247df9e 100644 --- a/Lib/test/test_asyncio/test_selector_events.py +++ b/Lib/test/test_asyncio/test_selector_events.py @@ -655,7 +655,7 @@ class SelectorTransportTests(unittest.TestCase): m_exc.assert_called_with( test_utils.MockPattern( - 'Fatal transport error\nprotocol:.*\ntransport:.*'), + 'Fatal error on transport\nprotocol:.*\ntransport:.*'), exc_info=(OSError, MOCK_ANY, MOCK_ANY)) tr._force_close.assert_called_with(exc) @@ -785,7 +785,9 @@ class SelectorSocketTransportTests(unittest.TestCase): transport._fatal_error = unittest.mock.Mock() transport._read_ready() - transport._fatal_error.assert_called_with(err) + transport._fatal_error.assert_called_with( + err, + 'Fatal read error on socket transport') def test_write(self): data = b'data' @@ -898,7 +900,9 @@ class SelectorSocketTransportTests(unittest.TestCase): self.loop, self.sock, self.protocol) transport._fatal_error = unittest.mock.Mock() transport.write(data) - transport._fatal_error.assert_called_with(err) + transport._fatal_error.assert_called_with( + err, + 'Fatal write error on socket transport') transport._conn_lost = 1 self.sock.reset_mock() @@ -1001,7 +1005,9 @@ class SelectorSocketTransportTests(unittest.TestCase): transport._fatal_error = unittest.mock.Mock() transport._buffer.extend(b'data') transport._write_ready() - transport._fatal_error.assert_called_with(err) + transport._fatal_error.assert_called_with( + err, + 'Fatal write error on socket transport') @unittest.mock.patch('asyncio.base_events.logger') def test_write_ready_exception_and_close(self, m_log): @@ -1237,7 +1243,9 @@ class SelectorSslTransportTests(unittest.TestCase): transport = self._make_one() transport._fatal_error = unittest.mock.Mock() transport._read_ready() - transport._fatal_error.assert_called_with(err) + transport._fatal_error.assert_called_with( + err, + 'Fatal read error on SSL transport') def test_write_ready_send(self): self.sslsock.send.return_value = 4 @@ -1319,7 +1327,9 @@ class SelectorSslTransportTests(unittest.TestCase): transport._buffer = list_to_buffer([b'data']) transport._fatal_error = unittest.mock.Mock() transport._write_ready() - transport._fatal_error.assert_called_with(err) + transport._fatal_error.assert_called_with( + err, + 'Fatal write error on SSL transport') self.assertEqual(list_to_buffer(), transport._buffer) def test_write_ready_read_wants_write(self): @@ -1407,7 +1417,9 @@ class SelectorDatagramTransportTests(unittest.TestCase): transport._fatal_error = unittest.mock.Mock() transport._read_ready() - transport._fatal_error.assert_called_with(err) + transport._fatal_error.assert_called_with( + err, + 'Fatal read error on datagram transport') def test_read_ready_oserr(self): transport = _SelectorDatagramTransport( @@ -1517,7 +1529,9 @@ class SelectorDatagramTransportTests(unittest.TestCase): transport.sendto(data, ()) self.assertTrue(transport._fatal_error.called) - transport._fatal_error.assert_called_with(err) + transport._fatal_error.assert_called_with( + err, + 'Fatal write error on datagram transport') transport._conn_lost = 1 transport._address = ('123',) @@ -1633,7 +1647,9 @@ class SelectorDatagramTransportTests(unittest.TestCase): transport._buffer.append((b'data', ())) transport._sendto_ready() - transport._fatal_error.assert_called_with(err) + transport._fatal_error.assert_called_with( + err, + 'Fatal write error on datagram transport') def test_sendto_ready_error_received(self): self.sock.sendto.side_effect = ConnectionRefusedError @@ -1667,7 +1683,7 @@ class SelectorDatagramTransportTests(unittest.TestCase): self.assertFalse(self.protocol.error_received.called) m_exc.assert_called_with( test_utils.MockPattern( - 'Fatal transport error\nprotocol:.*\ntransport:.*'), + 'Fatal error on transport\nprotocol:.*\ntransport:.*'), exc_info=(ConnectionRefusedError, MOCK_ANY, MOCK_ANY)) diff --git a/Lib/test/test_asyncio/test_unix_events.py b/Lib/test/test_asyncio/test_unix_events.py index e933079..9866e33 100644 --- a/Lib/test/test_asyncio/test_unix_events.py +++ b/Lib/test/test_asyncio/test_unix_events.py @@ -365,7 +365,7 @@ class UnixReadPipeTransportTests(unittest.TestCase): tr._close.assert_called_with(err) m_logexc.assert_called_with( test_utils.MockPattern( - 'Fatal transport error\nprotocol:.*\ntransport:.*'), + 'Fatal read error on pipe transport\nprotocol:.*\ntransport:.*'), exc_info=(OSError, MOCK_ANY, MOCK_ANY)) @unittest.mock.patch('os.read') @@ -558,7 +558,9 @@ class UnixWritePipeTransportTests(unittest.TestCase): m_write.assert_called_with(5, b'data') self.assertFalse(self.loop.writers) self.assertEqual([], tr._buffer) - tr._fatal_error.assert_called_with(err) + tr._fatal_error.assert_called_with( + err, + 'Fatal write error on pipe transport') self.assertEqual(1, tr._conn_lost) tr.write(b'data') @@ -660,7 +662,7 @@ class UnixWritePipeTransportTests(unittest.TestCase): self.assertTrue(tr._closing) m_logexc.assert_called_with( test_utils.MockPattern( - 'Fatal transport error\nprotocol:.*\ntransport:.*'), + 'Fatal write error on pipe transport\nprotocol:.*\ntransport:.*'), exc_info=(OSError, MOCK_ANY, MOCK_ANY)) self.assertEqual(1, tr._conn_lost) test_utils.run_briefly(self.loop) -- cgit v0.12 From 79a295261a89f67b9d700732e6e7436bfe63e832 Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Wed, 19 Feb 2014 01:45:59 +0100 Subject: asyncio, Tulip issue 143: UNIX domain methods, fix ResourceWarning and DeprecationWarning warnings. create_unix_server() closes the socket on any error, not only on OSError. --- Lib/asyncio/unix_events.py | 8 ++++---- Lib/test/test_asyncio/test_unix_events.py | 28 +++++++++++++++------------- 2 files changed, 19 insertions(+), 17 deletions(-) diff --git a/Lib/asyncio/unix_events.py b/Lib/asyncio/unix_events.py index 3a2fd18..faf4c60 100644 --- a/Lib/asyncio/unix_events.py +++ b/Lib/asyncio/unix_events.py @@ -183,13 +183,12 @@ class _UnixSelectorEventLoop(selector_events.BaseSelectorEventLoop): raise ValueError( 'path and sock can not be specified at the same time') + sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM, 0) try: - sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM, 0) sock.setblocking(False) yield from self.sock_connect(sock, path) - except OSError: - if sock is not None: - sock.close() + except: + sock.close() raise else: @@ -213,6 +212,7 @@ class _UnixSelectorEventLoop(selector_events.BaseSelectorEventLoop): try: sock.bind(path) except OSError as exc: + sock.close() if exc.errno == errno.EADDRINUSE: # Let's improve the error message by adding # with what exact address it occurs. diff --git a/Lib/test/test_asyncio/test_unix_events.py b/Lib/test/test_asyncio/test_unix_events.py index 9866e33..7b5196c 100644 --- a/Lib/test/test_asyncio/test_unix_events.py +++ b/Lib/test/test_asyncio/test_unix_events.py @@ -221,17 +221,17 @@ class SelectorEventLoopUnixSocketTests(unittest.TestCase): with test_utils.unix_socket_path() as path: sock = socket.socket(socket.AF_UNIX) sock.bind(path) - - coro = self.loop.create_unix_server(lambda: None, path) - with self.assertRaisesRegexp(OSError, - 'Address.*is already in use'): - self.loop.run_until_complete(coro) + with sock: + coro = self.loop.create_unix_server(lambda: None, path) + with self.assertRaisesRegex(OSError, + 'Address.*is already in use'): + self.loop.run_until_complete(coro) def test_create_unix_server_existing_path_nonsock(self): with tempfile.NamedTemporaryFile() as file: coro = self.loop.create_unix_server(lambda: None, file.name) - with self.assertRaisesRegexp(OSError, - 'Address.*is already in use'): + with self.assertRaisesRegex(OSError, + 'Address.*is already in use'): self.loop.run_until_complete(coro) def test_create_unix_server_ssl_bool(self): @@ -248,11 +248,13 @@ class SelectorEventLoopUnixSocketTests(unittest.TestCase): self.loop.run_until_complete(coro) def test_create_unix_server_path_inetsock(self): - coro = self.loop.create_unix_server(lambda: None, path=None, - sock=socket.socket()) - with self.assertRaisesRegex(ValueError, - 'A UNIX Domain Socket was expected'): - self.loop.run_until_complete(coro) + sock = socket.socket() + with sock: + coro = self.loop.create_unix_server(lambda: None, path=None, + sock=sock) + with self.assertRaisesRegex(ValueError, + 'A UNIX Domain Socket was expected'): + self.loop.run_until_complete(coro) def test_create_unix_connection_path_sock(self): coro = self.loop.create_unix_connection( @@ -278,7 +280,7 @@ class SelectorEventLoopUnixSocketTests(unittest.TestCase): coro = self.loop.create_unix_connection( lambda: None, '/dev/null', ssl=True) - with self.assertRaisesRegexp( + with self.assertRaisesRegex( ValueError, 'you have to pass server_hostname when using ssl'): self.loop.run_until_complete(coro) -- cgit v0.12 From 74d519fcc685f903a9127987ae53ff75eaf17d73 Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Wed, 19 Feb 2014 02:21:08 +0100 Subject: Issue #20682: test_asyncio, _basetest_create_connection() checks also the sockname, as _basetest_create_ssl_connection(). --- Lib/test/test_asyncio/test_events.py | 1 + 1 file changed, 1 insertion(+) diff --git a/Lib/test/test_asyncio/test_events.py b/Lib/test/test_asyncio/test_events.py index a0a4d02..15cc520 100644 --- a/Lib/test/test_asyncio/test_events.py +++ b/Lib/test/test_asyncio/test_events.py @@ -503,6 +503,7 @@ class EventLoopTestsMixin: tr, pr = self.loop.run_until_complete(connection_fut) self.assertIsInstance(tr, asyncio.Transport) self.assertIsInstance(pr, asyncio.Protocol) + self.assertIsNotNone(tr.get_extra_info('sockname')) self.loop.run_until_complete(pr.done) self.assertGreater(pr.nbytes, 0) tr.close() -- cgit v0.12 From dec1a45fd1b37d4688fa5846a0d32a2393808fb1 Mon Sep 17 00:00:00 2001 From: Yury Selivanov Date: Tue, 18 Feb 2014 22:27:48 -0500 Subject: asyncio: Fix spelling and typos. Thanks to Vajrasky Kok for discovering some of them. --- Lib/asyncio/events.py | 6 +++--- Lib/asyncio/protocols.py | 2 +- Lib/asyncio/selector_events.py | 2 +- Lib/asyncio/tasks.py | 2 +- Lib/asyncio/test_utils.py | 4 ++-- Lib/asyncio/unix_events.py | 4 ++-- Lib/asyncio/windows_events.py | 2 +- Lib/selectors.py | 2 +- Lib/test/test_asyncio/test_base_events.py | 2 +- Lib/test/test_asyncio/test_events.py | 1 - Lib/test/test_asyncio/test_futures.py | 2 +- Lib/test/test_asyncio/test_streams.py | 2 +- Lib/test/test_asyncio/test_unix_events.py | 6 +++--- 13 files changed, 18 insertions(+), 19 deletions(-) diff --git a/Lib/asyncio/events.py b/Lib/asyncio/events.py index f61c5b7..1030c04 100644 --- a/Lib/asyncio/events.py +++ b/Lib/asyncio/events.py @@ -235,7 +235,7 @@ class AbstractEventLoop: sock=None, backlog=100, ssl=None): """A coroutine which creates a UNIX Domain Socket server. - The return valud is a Server object, which can be used to stop + The return value is a Server object, which can be used to stop the service. path is a str, representing a file systsem path to bind the @@ -260,7 +260,7 @@ class AbstractEventLoop: # Pipes and subprocesses. def connect_read_pipe(self, protocol_factory, pipe): - """Register read pipe in eventloop. + """Register read pipe in event loop. protocol_factory should instantiate object with Protocol interface. pipe is file-like object already switched to nonblocking. @@ -273,7 +273,7 @@ class AbstractEventLoop: raise NotImplementedError def connect_write_pipe(self, protocol_factory, pipe): - """Register write pipe in eventloop. + """Register write pipe in event loop. protocol_factory should instantiate object with BaseProtocol interface. Pipe is file-like object already switched to nonblocking. diff --git a/Lib/asyncio/protocols.py b/Lib/asyncio/protocols.py index 3c4f3f4..52fc25c 100644 --- a/Lib/asyncio/protocols.py +++ b/Lib/asyncio/protocols.py @@ -114,7 +114,7 @@ class SubprocessProtocol(BaseProtocol): def pipe_data_received(self, fd, data): """Called when the subprocess writes data into stdout/stderr pipe. - fd is int file dascriptor. + fd is int file descriptor. data is bytes object. """ diff --git a/Lib/asyncio/selector_events.py b/Lib/asyncio/selector_events.py index c142356..aa42745 100644 --- a/Lib/asyncio/selector_events.py +++ b/Lib/asyncio/selector_events.py @@ -213,7 +213,7 @@ class BaseSelectorEventLoop(base_events.BaseEventLoop): def _sock_recv(self, fut, registered, sock, n): # _sock_recv() can add itself as an I/O callback if the operation can't - # be done immediatly. Don't use it directly, call sock_recv(). + # be done immediately. Don't use it directly, call sock_recv(). fd = sock.fileno() if registered: # Remove the callback early. It should be rare that the diff --git a/Lib/asyncio/tasks.py b/Lib/asyncio/tasks.py index b7ee758..a3e7cdf 100644 --- a/Lib/asyncio/tasks.py +++ b/Lib/asyncio/tasks.py @@ -181,7 +181,7 @@ class Task(futures.Future): The frames are always ordered from oldest to newest. - The optional limit gives the maximum nummber of frames to + The optional limit gives the maximum number of frames to return; by default all available frames are returned. Its meaning differs depending on whether a stack or a traceback is returned: the newest frames of a stack are returned, but the diff --git a/Lib/asyncio/test_utils.py b/Lib/asyncio/test_utils.py index 28e5243..2a8a241 100644 --- a/Lib/asyncio/test_utils.py +++ b/Lib/asyncio/test_utils.py @@ -259,7 +259,7 @@ class TestLoop(base_events.BaseEventLoop): when = yield ... ... = yield time_advance - Value retuned by yield is absolute time of next scheduled handler. + Value returned by yield is absolute time of next scheduled handler. Value passed to yield is time advance to move loop's time forward. """ @@ -369,7 +369,7 @@ class MockPattern(str): """A regex based str with a fuzzy __eq__. Use this helper with 'mock.assert_called_with', or anywhere - where a regexp comparison between strings is needed. + where a regex comparison between strings is needed. For instance: mock_call.assert_called_with(MockPattern('spam.*ham')) diff --git a/Lib/asyncio/unix_events.py b/Lib/asyncio/unix_events.py index faf4c60..ce45e5f 100644 --- a/Lib/asyncio/unix_events.py +++ b/Lib/asyncio/unix_events.py @@ -1,4 +1,4 @@ -"""Selector eventloop for Unix with signal handling.""" +"""Selector event loop for Unix with signal handling.""" import errno import fcntl @@ -244,7 +244,7 @@ def _set_nonblocking(fd): class _UnixReadPipeTransport(transports.ReadTransport): - max_size = 256 * 1024 # max bytes we read in one eventloop iteration + max_size = 256 * 1024 # max bytes we read in one event loop iteration def __init__(self, loop, pipe, protocol, waiter=None, extra=None): super().__init__(extra) diff --git a/Lib/asyncio/windows_events.py b/Lib/asyncio/windows_events.py index c667a1c..e6be9d1 100644 --- a/Lib/asyncio/windows_events.py +++ b/Lib/asyncio/windows_events.py @@ -1,4 +1,4 @@ -"""Selector and proactor eventloops for Windows.""" +"""Selector and proactor event loops for Windows.""" import _winapi import errno diff --git a/Lib/selectors.py b/Lib/selectors.py index bb2a45a..a5465e2 100644 --- a/Lib/selectors.py +++ b/Lib/selectors.py @@ -80,7 +80,7 @@ class BaseSelector(metaclass=ABCMeta): A selector can use various implementations (select(), poll(), epoll()...) depending on the platform. The default `Selector` class uses the most - performant implementation on the current platform. + efficient implementation on the current platform. """ @abstractmethod diff --git a/Lib/test/test_asyncio/test_base_events.py b/Lib/test/test_asyncio/test_base_events.py index f664ccc..2eee3be 100644 --- a/Lib/test/test_asyncio/test_base_events.py +++ b/Lib/test/test_asyncio/test_base_events.py @@ -277,7 +277,7 @@ class BaseEventLoopTests(unittest.TestCase): asyncio.SubprocessProtocol, *args, bufsize=4096) def test_subprocess_shell_invalid_args(self): - # exepected a string, not an int or a list + # expected a string, not an int or a list self.assertRaises(TypeError, self.loop.run_until_complete, self.loop.subprocess_shell, asyncio.SubprocessProtocol, 123) diff --git a/Lib/test/test_asyncio/test_events.py b/Lib/test/test_asyncio/test_events.py index 15cc520..a0a4d02 100644 --- a/Lib/test/test_asyncio/test_events.py +++ b/Lib/test/test_asyncio/test_events.py @@ -503,7 +503,6 @@ class EventLoopTestsMixin: tr, pr = self.loop.run_until_complete(connection_fut) self.assertIsInstance(tr, asyncio.Transport) self.assertIsInstance(pr, asyncio.Protocol) - self.assertIsNotNone(tr.get_extra_info('sockname')) self.loop.run_until_complete(pr.done) self.assertGreater(pr.nbytes, 0) tr.close() diff --git a/Lib/test/test_asyncio/test_futures.py b/Lib/test/test_asyncio/test_futures.py index 2e4dbd4..f2b81dd 100644 --- a/Lib/test/test_asyncio/test_futures.py +++ b/Lib/test/test_asyncio/test_futures.py @@ -38,7 +38,7 @@ class FutureTests(unittest.TestCase): asyncio.set_event_loop(None) def test_constructor_positional(self): - # Make sure Future does't accept a positional argument + # Make sure Future doesn't accept a positional argument self.assertRaises(TypeError, asyncio.Future, 42) def test_cancel(self): diff --git a/Lib/test/test_asyncio/test_streams.py b/Lib/test/test_asyncio/test_streams.py index 31e26a6..ca792f2 100644 --- a/Lib/test/test_asyncio/test_streams.py +++ b/Lib/test/test_asyncio/test_streams.py @@ -239,7 +239,7 @@ class StreamReaderTests(unittest.TestCase): # No b'\n' at the end. The 'limit' is set to 3. So before # waiting for the new data in buffer, 'readline' will consume # the entire buffer, and since the length of the consumed data - # is more than 3, it will raise a ValudError. The buffer is + # is more than 3, it will raise a ValueError. The buffer is # expected to be empty now. self.assertEqual(b'', stream._buffer) diff --git a/Lib/test/test_asyncio/test_unix_events.py b/Lib/test/test_asyncio/test_unix_events.py index 7b5196c..c0f205e 100644 --- a/Lib/test/test_asyncio/test_unix_events.py +++ b/Lib/test/test_asyncio/test_unix_events.py @@ -965,7 +965,7 @@ class ChildWatcherTestsMixin: self.assertFalse(m.WEXITSTATUS.called) self.assertFalse(m.WTERMSIG.called) - # childen are running + # children are running self.watcher._sig_chld() self.assertFalse(callback1.called) @@ -1069,7 +1069,7 @@ class ChildWatcherTestsMixin: self.assertFalse(m.WEXITSTATUS.called) self.assertFalse(m.WTERMSIG.called) - # childen are running + # children are running self.watcher._sig_chld() self.assertFalse(callback1.called) @@ -1425,7 +1425,7 @@ class ChildWatcherTestsMixin: self.add_zombie(61, 11) self.add_zombie(62, -5) - # SIGCHLD was not catched + # SIGCHLD was not caught self.assertFalse(callback1.called) self.assertFalse(callback2.called) self.assertFalse(callback3.called) -- cgit v0.12 From 57797521bdc5d64eb2440a5818a20c2dbc6358cb Mon Sep 17 00:00:00 2001 From: Yury Selivanov Date: Tue, 18 Feb 2014 22:56:15 -0500 Subject: asyncio: pep8-ify the code. --- Lib/asyncio/base_events.py | 11 ++++++----- Lib/asyncio/subprocess.py | 5 +++-- Lib/test/test_asyncio/test_subprocess.py | 12 ++++++++++-- Lib/test/test_asyncio/test_tasks.py | 4 +++- Lib/test/test_asyncio/test_unix_events.py | 6 ++++-- 5 files changed, 26 insertions(+), 12 deletions(-) diff --git a/Lib/asyncio/base_events.py b/Lib/asyncio/base_events.py index cb2499d..b94ba07 100644 --- a/Lib/asyncio/base_events.py +++ b/Lib/asyncio/base_events.py @@ -605,10 +605,10 @@ class BaseEventLoop(events.AbstractEventLoop): return transport, protocol @tasks.coroutine - def subprocess_exec(self, protocol_factory, program, *args, stdin=subprocess.PIPE, - stdout=subprocess.PIPE, stderr=subprocess.PIPE, - universal_newlines=False, shell=False, bufsize=0, - **kwargs): + def subprocess_exec(self, protocol_factory, program, *args, + stdin=subprocess.PIPE, stdout=subprocess.PIPE, + stderr=subprocess.PIPE, universal_newlines=False, + shell=False, bufsize=0, **kwargs): if universal_newlines: raise ValueError("universal_newlines must be False") if shell: @@ -623,7 +623,8 @@ class BaseEventLoop(events.AbstractEventLoop): % type(arg).__name__) protocol = protocol_factory() transport = yield from self._make_subprocess_transport( - protocol, popen_args, False, stdin, stdout, stderr, bufsize, **kwargs) + protocol, popen_args, False, stdin, stdout, stderr, + bufsize, **kwargs) return transport, protocol def set_exception_handler(self, handler): diff --git a/Lib/asyncio/subprocess.py b/Lib/asyncio/subprocess.py index 8d1a407..c3b0175 100644 --- a/Lib/asyncio/subprocess.py +++ b/Lib/asyncio/subprocess.py @@ -180,8 +180,9 @@ def create_subprocess_shell(cmd, stdin=None, stdout=None, stderr=None, return Process(transport, protocol, loop) @tasks.coroutine -def create_subprocess_exec(program, *args, stdin=None, stdout=None, stderr=None, - loop=None, limit=streams._DEFAULT_LIMIT, **kwds): +def create_subprocess_exec(program, *args, stdin=None, stdout=None, + stderr=None, loop=None, + limit=streams._DEFAULT_LIMIT, **kwds): if loop is None: loop = events.get_event_loop() protocol_factory = lambda: SubprocessStreamProtocol(limit=limit, diff --git a/Lib/test/test_asyncio/test_subprocess.py b/Lib/test/test_asyncio/test_subprocess.py index 1b2f05b..14fd17e 100644 --- a/Lib/test/test_asyncio/test_subprocess.py +++ b/Lib/test/test_asyncio/test_subprocess.py @@ -21,6 +21,7 @@ PROGRAM_CAT = [ 'sys.stdout.buffer.write(data)'))] class SubprocessMixin: + def test_stdin_stdout(self): args = PROGRAM_CAT @@ -132,6 +133,7 @@ class SubprocessMixin: if sys.platform != 'win32': # Unix class SubprocessWatcherMixin(SubprocessMixin): + Watcher = None def setUp(self): @@ -151,14 +153,20 @@ if sys.platform != 'win32': self.loop.close() policy.set_event_loop(None) - class SubprocessSafeWatcherTests(SubprocessWatcherMixin, unittest.TestCase): + class SubprocessSafeWatcherTests(SubprocessWatcherMixin, + unittest.TestCase): + Watcher = unix_events.SafeChildWatcher - class SubprocessFastWatcherTests(SubprocessWatcherMixin, unittest.TestCase): + class SubprocessFastWatcherTests(SubprocessWatcherMixin, + unittest.TestCase): + Watcher = unix_events.FastChildWatcher + else: # Windows class SubprocessProactorTests(SubprocessMixin, unittest.TestCase): + def setUp(self): policy = asyncio.get_event_loop_policy() self.loop = asyncio.ProactorEventLoop() diff --git a/Lib/test/test_asyncio/test_tasks.py b/Lib/test/test_asyncio/test_tasks.py index 024dd2e..f27b952 100644 --- a/Lib/test/test_asyncio/test_tasks.py +++ b/Lib/test/test_asyncio/test_tasks.py @@ -876,6 +876,7 @@ class TaskTests(unittest.TestCase): self.assertEqual(set(f.result() for f in done), {'a', 'b'}) def test_as_completed_duplicate_coroutines(self): + @asyncio.coroutine def coro(s): return s @@ -884,7 +885,8 @@ class TaskTests(unittest.TestCase): def runner(): result = [] c = coro('ham') - for f in asyncio.as_completed([c, c, coro('spam')], loop=self.loop): + for f in asyncio.as_completed([c, c, coro('spam')], + loop=self.loop): result.append((yield from f)) return result diff --git a/Lib/test/test_asyncio/test_unix_events.py b/Lib/test/test_asyncio/test_unix_events.py index c0f205e..9e489c2 100644 --- a/Lib/test/test_asyncio/test_unix_events.py +++ b/Lib/test/test_asyncio/test_unix_events.py @@ -367,7 +367,8 @@ class UnixReadPipeTransportTests(unittest.TestCase): tr._close.assert_called_with(err) m_logexc.assert_called_with( test_utils.MockPattern( - 'Fatal read error on pipe transport\nprotocol:.*\ntransport:.*'), + 'Fatal read error on pipe transport' + '\nprotocol:.*\ntransport:.*'), exc_info=(OSError, MOCK_ANY, MOCK_ANY)) @unittest.mock.patch('os.read') @@ -664,7 +665,8 @@ class UnixWritePipeTransportTests(unittest.TestCase): self.assertTrue(tr._closing) m_logexc.assert_called_with( test_utils.MockPattern( - 'Fatal write error on pipe transport\nprotocol:.*\ntransport:.*'), + 'Fatal write error on pipe transport' + '\nprotocol:.*\ntransport:.*'), exc_info=(OSError, MOCK_ANY, MOCK_ANY)) self.assertEqual(1, tr._conn_lost) test_utils.run_briefly(self.loop) -- cgit v0.12 From a6919aa4ed4e2cce92fd426d796593d534844ed4 Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Wed, 19 Feb 2014 13:32:34 +0100 Subject: asyncio: document new create_unix_connection() and create_unix_server() methods of BaseEventLoop --- Doc/library/asyncio-eventloop.rst | 43 ++++++++++++++++++++++++++++++++++----- 1 file changed, 38 insertions(+), 5 deletions(-) diff --git a/Doc/library/asyncio-eventloop.rst b/Doc/library/asyncio-eventloop.rst index b2c4802..f056cac 100644 --- a/Doc/library/asyncio-eventloop.rst +++ b/Doc/library/asyncio-eventloop.rst @@ -201,8 +201,10 @@ Creating connections .. method:: BaseEventLoop.create_connection(protocol_factory, host=None, port=None, \*, ssl=None, family=0, proto=0, flags=0, sock=None, local_addr=None, server_hostname=None) Create a streaming transport connection to a given Internet *host* and - *port*. *protocol_factory* must be a callable returning a - :ref:`protocol ` instance. + *port*: socket family :py:data:`~socket.AF_INET` or + :py:data:`~socket.AF_INET6` depending on *host* (or *family* if specified), + socket type :py:data:`~socket.SOCK_STREAM`. *protocol_factory* must be a + callable returning a :ref:`protocol ` instance. This method returns a :ref:`coroutine object ` which will try to establish the connection in the background. When successful, the @@ -265,6 +267,35 @@ Creating connections (:class:`StreamReader`, :class:`StreamWriter`) instead of a protocol. +.. method:: BaseEventLoop.create_datagram_endpoint(protocol_factory, local_addr=None, remote_addr=None, \*, family=0, proto=0, flags=0) + + Create datagram connection: socket family :py:data:`~socket.AF_INET` or + :py:data:`~socket.AF_INET6` depending on *host* (or *family* if specified), + socket type :py:data:`~socket.SOCK_DGRAM`. + + This method returns a :ref:`coroutine object ` which will try to + establish the connection in the background. When successful, the + coroutine returns a ``(transport, protocol)`` pair. + + See the :meth:`BaseEventLoop.create_connection` method for parameters. + + +.. method:: BaseEventLoop.create_unix_connection(protocol_factory, path, \*, ssl=None, sock=None, server_hostname=None) + + Create UNIX connection: socket family :py:data:`~socket.AF_UNIX`, socket + type :py:data:`~socket.SOCK_STREAM`. The :py:data:`~socket.AF_UNIX` socket + family is used to communicate between processes on the same machine + efficiently. + + This method returns a :ref:`coroutine object ` which will try to + establish the connection in the background. When successful, the + coroutine returns a ``(transport, protocol)`` pair. + + See the :meth:`BaseEventLoop.create_connection` method for parameters. + + Availability: UNIX. + + Creating listening connections ------------------------------ @@ -307,11 +338,13 @@ Creating listening connections The function :func:`start_server` creates a (:class:`StreamReader`, :class:`StreamWriter`) pair and calls back a function with this pair. -.. method:: BaseEventLoop.create_datagram_endpoint(protocol_factory, local_addr=None, remote_addr=None, \*, family=0, proto=0, flags=0) - Create datagram connection. +.. method:: BaseEventLoop.create_unix_server(protocol_factory, path=None, \*, sock=None, backlog=100, ssl=None) - This method returns a :ref:`coroutine object `. + Similar to :meth:`BaseEventLoop.create_server`, but specific to the + socket family :py:data:`~socket.AF_UNIX`. + + Availability: UNIX. -- cgit v0.12 From 15899209770970c0320b47d1ae5749b998152c9f Mon Sep 17 00:00:00 2001 From: Yury Selivanov Date: Wed, 19 Feb 2014 11:10:52 -0500 Subject: asyncio: WriteTransport.set_write_buffer_size to call _maybe_pause_protocol --- Lib/asyncio/transports.py | 8 ++++++-- Lib/test/test_asyncio/test_transports.py | 23 +++++++++++++++++++++++ 2 files changed, 29 insertions(+), 2 deletions(-) diff --git a/Lib/asyncio/transports.py b/Lib/asyncio/transports.py index 5b975aa..5f674f9 100644 --- a/Lib/asyncio/transports.py +++ b/Lib/asyncio/transports.py @@ -241,7 +241,7 @@ class _FlowControlMixin(Transport): def __init__(self, extra=None): super().__init__(extra) self._protocol_paused = False - self.set_write_buffer_limits() + self._set_write_buffer_limits() def _maybe_pause_protocol(self): size = self.get_write_buffer_size() @@ -273,7 +273,7 @@ class _FlowControlMixin(Transport): 'protocol': self._protocol, }) - def set_write_buffer_limits(self, high=None, low=None): + def _set_write_buffer_limits(self, high=None, low=None): if high is None: if low is None: high = 64*1024 @@ -287,5 +287,9 @@ class _FlowControlMixin(Transport): self._high_water = high self._low_water = low + def set_write_buffer_limits(self, high=None, low=None): + self._set_write_buffer_limits(high=high, low=low) + self._maybe_pause_protocol() + def get_write_buffer_size(self): raise NotImplementedError diff --git a/Lib/test/test_asyncio/test_transports.py b/Lib/test/test_asyncio/test_transports.py index d16db80..4c64526 100644 --- a/Lib/test/test_asyncio/test_transports.py +++ b/Lib/test/test_asyncio/test_transports.py @@ -4,6 +4,7 @@ import unittest import unittest.mock import asyncio +from asyncio import transports class TransportTests(unittest.TestCase): @@ -60,6 +61,28 @@ class TransportTests(unittest.TestCase): self.assertRaises(NotImplementedError, transport.terminate) self.assertRaises(NotImplementedError, transport.kill) + def test_flowcontrol_mixin_set_write_limits(self): + + class MyTransport(transports._FlowControlMixin, + transports.Transport): + + def get_write_buffer_size(self): + return 512 + + transport = MyTransport() + transport._protocol = unittest.mock.Mock() + + self.assertFalse(transport._protocol_paused) + + with self.assertRaisesRegex(ValueError, 'high.*must be >= low'): + transport.set_write_buffer_limits(high=0, low=1) + + transport.set_write_buffer_limits(high=1024, low=128) + self.assertFalse(transport._protocol_paused) + + transport.set_write_buffer_limits(high=256, low=128) + self.assertTrue(transport._protocol_paused) + if __name__ == '__main__': unittest.main() -- cgit v0.12 From df90ae69fee6f979e3aa92dd62304c3f8adf193f Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Wed, 19 Feb 2014 18:10:32 +0100 Subject: Close #20682: Fix UNIX sockets tests of test_asyncio on Mac OS X Tiger On Mac OS X Tiger (and older), getsockname() returns a zero-length address for UNIX socket, and so 'sockname' extra info is None. --- Lib/test/test_asyncio/test_events.py | 32 +++++++++++++++++++++++++++----- 1 file changed, 27 insertions(+), 5 deletions(-) diff --git a/Lib/test/test_asyncio/test_events.py b/Lib/test/test_asyncio/test_events.py index a0a4d02..36bb93a 100644 --- a/Lib/test/test_asyncio/test_events.py +++ b/Lib/test/test_asyncio/test_events.py @@ -4,6 +4,7 @@ import functools import gc import io import os +import platform import signal import socket try: @@ -40,6 +41,15 @@ def data_file(filename): raise FileNotFoundError(filename) +def osx_tiger(): + """Return True if the platform is Mac OS 10.4 or older.""" + if sys.platform != 'darwin': + return False + version = platform.mac_ver()[0] + version = tuple(map(int, version.split('.'))) + return version < (10, 5) + + ONLYCERT = data_file('ssl_cert.pem') ONLYKEY = data_file('ssl_key.pem') SIGNED_CERTFILE = data_file('keycert3.pem') @@ -499,10 +509,12 @@ class EventLoopTestsMixin: self.loop.run_forever() self.assertEqual(caught, 1) - def _basetest_create_connection(self, connection_fut): + def _basetest_create_connection(self, connection_fut, check_sockname): tr, pr = self.loop.run_until_complete(connection_fut) self.assertIsInstance(tr, asyncio.Transport) self.assertIsInstance(pr, asyncio.Protocol) + if check_sockname: + self.assertIsNotNone(tr.get_extra_info('sockname')) self.loop.run_until_complete(pr.done) self.assertGreater(pr.nbytes, 0) tr.close() @@ -515,10 +527,14 @@ class EventLoopTestsMixin: @unittest.skipUnless(hasattr(socket, 'AF_UNIX'), 'No UNIX Sockets') def test_create_unix_connection(self): + # Issue #20682: On Mac OS X Tiger, getsockname() returns a + # zero-length address for UNIX socket. + check_sockname = not osx_tiger() + with test_utils.run_test_unix_server() as httpd: conn_fut = self.loop.create_unix_connection( lambda: MyProto(loop=self.loop), httpd.address) - self._basetest_create_connection(conn_fut) + self._basetest_create_connection(conn_fut, check_sockname) def test_create_connection_sock(self): with test_utils.run_test_server() as httpd: @@ -548,12 +564,14 @@ class EventLoopTestsMixin: self.assertGreater(pr.nbytes, 0) tr.close() - def _basetest_create_ssl_connection(self, connection_fut): + def _basetest_create_ssl_connection(self, connection_fut, + check_sockname=True): tr, pr = self.loop.run_until_complete(connection_fut) self.assertIsInstance(tr, asyncio.Transport) self.assertIsInstance(pr, asyncio.Protocol) self.assertTrue('ssl' in tr.__class__.__name__.lower()) - self.assertIsNotNone(tr.get_extra_info('sockname')) + if check_sockname: + self.assertIsNotNone(tr.get_extra_info('sockname')) self.loop.run_until_complete(pr.done) self.assertGreater(pr.nbytes, 0) tr.close() @@ -571,6 +589,10 @@ class EventLoopTestsMixin: @unittest.skipIf(ssl is None, 'No ssl module') @unittest.skipUnless(hasattr(socket, 'AF_UNIX'), 'No UNIX Sockets') def test_create_ssl_unix_connection(self): + # Issue #20682: On Mac OS X Tiger, getsockname() returns a + # zero-length address for UNIX socket. + check_sockname = not osx_tiger() + with test_utils.run_test_unix_server(use_ssl=True) as httpd: conn_fut = self.loop.create_unix_connection( lambda: MyProto(loop=self.loop), @@ -578,7 +600,7 @@ class EventLoopTestsMixin: ssl=test_utils.dummy_ssl_context(), server_hostname='127.0.0.1') - self._basetest_create_ssl_connection(conn_fut) + self._basetest_create_ssl_connection(conn_fut, check_sockname) def test_create_connection_local_addr(self): with test_utils.run_test_server() as httpd: -- cgit v0.12 From eaeb623ae3f1f7ac516017851224c018cf0310f6 Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Wed, 19 Feb 2014 18:32:03 +0100 Subject: Issue #20682: Oops, fix test_create_connection() of test_asyncio (fix my previous commit) --- Lib/test/test_asyncio/test_events.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Lib/test/test_asyncio/test_events.py b/Lib/test/test_asyncio/test_events.py index 36bb93a..3720cc7 100644 --- a/Lib/test/test_asyncio/test_events.py +++ b/Lib/test/test_asyncio/test_events.py @@ -509,7 +509,7 @@ class EventLoopTestsMixin: self.loop.run_forever() self.assertEqual(caught, 1) - def _basetest_create_connection(self, connection_fut, check_sockname): + def _basetest_create_connection(self, connection_fut, check_sockname=True): tr, pr = self.loop.run_until_complete(connection_fut) self.assertIsInstance(tr, asyncio.Transport) self.assertIsInstance(pr, asyncio.Protocol) -- cgit v0.12 From ff385b89f40cfdfb6ceab41acfa89fa8594318f6 Mon Sep 17 00:00:00 2001 From: Yury Selivanov Date: Wed, 19 Feb 2014 16:27:23 -0500 Subject: inspect: Fix getfullargspec() to not to follow __wrapped__ chains Initial patch by Nick Coghlan. --- Lib/inspect.py | 111 +++++++++++++++++++++++++++-------------------- Lib/test/test_inspect.py | 40 +++++++++++++++++ Misc/NEWS | 4 ++ 3 files changed, 109 insertions(+), 46 deletions(-) diff --git a/Lib/inspect.py b/Lib/inspect.py index 017a7e8..8b7840a 100644 --- a/Lib/inspect.py +++ b/Lib/inspect.py @@ -949,9 +949,9 @@ def getfullargspec(func): The first four items in the tuple correspond to getargspec(). """ - builtin_method_param = None - - if ismethod(func): + try: + # Re: `skip_bound_arg=False` + # # There is a notable difference in behaviour between getfullargspec # and Signature: the former always returns 'self' parameter for bound # methods, whereas the Signature always shows the actual calling @@ -960,20 +960,15 @@ def getfullargspec(func): # To simulate this behaviour, we "unbind" bound methods, to trick # inspect.signature to always return their first parameter ("self", # usually) - func = func.__func__ - elif isbuiltin(func): - # We have a builtin function or method. For that, we check the - # special '__text_signature__' attribute, provided by the - # Argument Clinic. If it's a method, we'll need to make sure - # that its first parameter (usually "self") is always returned - # (see the previous comment). - text_signature = getattr(func, '__text_signature__', None) - if text_signature and text_signature.startswith('($'): - builtin_method_param = _signature_get_bound_param(text_signature) + # Re: `follow_wrapper_chains=False` + # + # getfullargspec() historically ignored __wrapped__ attributes, + # so we ensure that remains the case in 3.3+ - try: - sig = signature(func) + sig = _signature_internal(func, + follow_wrapper_chains=False, + skip_bound_arg=False) except Exception as ex: # Most of the times 'signature' will raise ValueError. # But, it can also raise AttributeError, and, maybe something @@ -1023,13 +1018,6 @@ def getfullargspec(func): # compatibility with 'func.__defaults__' defaults = None - if builtin_method_param and (not args or args[0] != builtin_method_param): - # `func` is a method, and we always need to return its - # first parameter -- usually "self" (to be backwards - # compatible with the previous implementation of - # getfullargspec) - args.insert(0, builtin_method_param) - return FullArgSpec(args, varargs, varkw, defaults, kwonlyargs, kwdefaults, annotations) @@ -1719,7 +1707,7 @@ def _signature_strip_non_python_syntax(signature): return clean_signature, self_parameter, last_positional_only -def _signature_fromstr(cls, obj, s): +def _signature_fromstr(cls, obj, s, skip_bound_arg=True): # Internal helper to parse content of '__text_signature__' # and return a Signature based on it Parameter = cls._parameter_cls @@ -1840,7 +1828,7 @@ def _signature_fromstr(cls, obj, s): if self_parameter is not None: assert parameters - if getattr(obj, '__self__', None): + if getattr(obj, '__self__', None) and skip_bound_arg: # strip off self, it's already been bound parameters.pop(0) else: @@ -1851,8 +1839,21 @@ def _signature_fromstr(cls, obj, s): return cls(parameters, return_annotation=cls.empty) -def signature(obj): - '''Get a signature object for the passed callable.''' +def _signature_from_builtin(cls, func, skip_bound_arg=True): + # Internal helper function to get signature for + # builtin callables + if not _signature_is_builtin(func): + raise TypeError("{!r} is not a Python builtin " + "function".format(func)) + + s = getattr(func, "__text_signature__", None) + if not s: + raise ValueError("no signature found for builtin {!r}".format(func)) + + return _signature_fromstr(cls, func, s, skip_bound_arg) + + +def _signature_internal(obj, follow_wrapper_chains=True, skip_bound_arg=True): if not callable(obj): raise TypeError('{!r} is not a callable object'.format(obj)) @@ -1860,11 +1861,17 @@ def signature(obj): if isinstance(obj, types.MethodType): # In this case we skip the first parameter of the underlying # function (usually `self` or `cls`). - sig = signature(obj.__func__) - return _signature_bound_method(sig) + sig = _signature_internal(obj.__func__, + follow_wrapper_chains, + skip_bound_arg) + if skip_bound_arg: + return _signature_bound_method(sig) + else: + return sig # Was this function wrapped by a decorator? - obj = unwrap(obj, stop=(lambda f: hasattr(f, "__signature__"))) + if follow_wrapper_chains: + obj = unwrap(obj, stop=(lambda f: hasattr(f, "__signature__"))) try: sig = obj.__signature__ @@ -1887,7 +1894,9 @@ def signature(obj): # (usually `self`, or `cls`) will not be passed # automatically (as for boundmethods) - wrapped_sig = signature(partialmethod.func) + wrapped_sig = _signature_internal(partialmethod.func, + follow_wrapper_chains, + skip_bound_arg) sig = _signature_get_partial(wrapped_sig, partialmethod, (None,)) first_wrapped_param = tuple(wrapped_sig.parameters.values())[0] @@ -1896,7 +1905,8 @@ def signature(obj): return sig.replace(parameters=new_params) if _signature_is_builtin(obj): - return Signature.from_builtin(obj) + return _signature_from_builtin(Signature, obj, + skip_bound_arg=skip_bound_arg) if isfunction(obj) or _signature_is_functionlike(obj): # If it's a pure Python function, or an object that is duck type @@ -1904,7 +1914,9 @@ def signature(obj): return Signature.from_function(obj) if isinstance(obj, functools.partial): - wrapped_sig = signature(obj.func) + wrapped_sig = _signature_internal(obj.func, + follow_wrapper_chains, + skip_bound_arg) return _signature_get_partial(wrapped_sig, obj) sig = None @@ -1915,17 +1927,23 @@ def signature(obj): # in its metaclass call = _signature_get_user_defined_method(type(obj), '__call__') if call is not None: - sig = signature(call) + sig = _signature_internal(call, + follow_wrapper_chains, + skip_bound_arg) else: # Now we check if the 'obj' class has a '__new__' method new = _signature_get_user_defined_method(obj, '__new__') if new is not None: - sig = signature(new) + sig = _signature_internal(new, + follow_wrapper_chains, + skip_bound_arg) else: # Finally, we should have at least __init__ implemented init = _signature_get_user_defined_method(obj, '__init__') if init is not None: - sig = signature(init) + sig = _signature_internal(init, + follow_wrapper_chains, + skip_bound_arg) if sig is None: # At this point we know, that `obj` is a class, with no user- @@ -1967,7 +1985,9 @@ def signature(obj): call = _signature_get_user_defined_method(type(obj), '__call__') if call is not None: try: - sig = signature(call) + sig = _signature_internal(call, + follow_wrapper_chains, + skip_bound_arg) except ValueError as ex: msg = 'no signature found for {!r}'.format(obj) raise ValueError(msg) from ex @@ -1975,7 +1995,10 @@ def signature(obj): if sig is not None: # For classes and objects we skip the first parameter of their # __call__, __new__, or __init__ methods - return _signature_bound_method(sig) + if skip_bound_arg: + return _signature_bound_method(sig) + else: + return sig if isinstance(obj, types.BuiltinFunctionType): # Raise a nicer error message for builtins @@ -1984,6 +2007,10 @@ def signature(obj): raise ValueError('callable {!r} is not supported by signature'.format(obj)) +def signature(obj): + '''Get a signature object for the passed callable.''' + return _signature_internal(obj) + class _void: '''A private marker - used in Parameter & Signature''' @@ -2417,15 +2444,7 @@ class Signature: @classmethod def from_builtin(cls, func): - if not _signature_is_builtin(func): - raise TypeError("{!r} is not a Python builtin " - "function".format(func)) - - s = getattr(func, "__text_signature__", None) - if not s: - raise ValueError("no signature found for builtin {!r}".format(func)) - - return _signature_fromstr(cls, func, s) + return _signature_from_builtin(cls, func) @property def parameters(self): diff --git a/Lib/test/test_inspect.py b/Lib/test/test_inspect.py index a34a418..711d2a3 100644 --- a/Lib/test/test_inspect.py +++ b/Lib/test/test_inspect.py @@ -577,6 +577,46 @@ class TestClassesAndFunctions(unittest.TestCase): kwonlyargs_e=['arg'], formatted='(*, arg)') + def test_argspec_api_ignores_wrapped(self): + # Issue 20684: low level introspection API must ignore __wrapped__ + @functools.wraps(mod.spam) + def ham(x, y): + pass + # Basic check + self.assertArgSpecEquals(ham, ['x', 'y'], formatted='(x, y)') + self.assertFullArgSpecEquals(ham, ['x', 'y'], formatted='(x, y)') + self.assertFullArgSpecEquals(functools.partial(ham), + ['x', 'y'], formatted='(x, y)') + # Other variants + def check_method(f): + self.assertArgSpecEquals(f, ['self', 'x', 'y'], + formatted='(self, x, y)') + class C: + @functools.wraps(mod.spam) + def ham(self, x, y): + pass + pham = functools.partialmethod(ham) + @functools.wraps(mod.spam) + def __call__(self, x, y): + pass + check_method(C()) + check_method(C.ham) + check_method(C().ham) + check_method(C.pham) + check_method(C().pham) + + class C_new: + @functools.wraps(mod.spam) + def __new__(self, x, y): + pass + check_method(C_new) + + class C_init: + @functools.wraps(mod.spam) + def __init__(self, x, y): + pass + check_method(C_init) + def test_getfullargspec_signature_attr(self): def test(): pass diff --git a/Misc/NEWS b/Misc/NEWS index b128462..d138547 100644 --- a/Misc/NEWS +++ b/Misc/NEWS @@ -26,6 +26,10 @@ Core and Builtins Library ------- +- Issue #20684: Fix inspect.getfullargspec() to not to follow __wrapped__ + chains. Make its behaviour consistent with bound methods first argument. + Patch by Nick Coghlan and Yury Selivanov. + - Issue #20681: Add new error handling API in asyncio. New APIs: loop.set_exception_handler(), loop.default_exception_handler(), and loop.call_exception_handler(). -- cgit v0.12 From 0f3e6bca1b7478027843fe6181f6b12f4c1514ed Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Wed, 19 Feb 2014 23:15:02 +0100 Subject: asyncio, Tulip issue #136: Add get/set_debug() methods to BaseEventLoopTests. Add also a PYTHONASYNCIODEBUG environment variable to debug coroutines since Python startup, to be able to debug coroutines defined directly in the asyncio module. --- Doc/library/asyncio-dev.rst | 10 ++++++---- Doc/library/asyncio-eventloop.rst | 16 ++++++++++++++++ Doc/using/cmdline.rst | 8 ++++++++ Lib/asyncio/base_events.py | 7 +++++++ Lib/asyncio/events.py | 8 ++++++++ Lib/asyncio/tasks.py | 5 ++++- Lib/test/test_asyncio/test_base_events.py | 6 ++++++ Lib/test/test_asyncio/test_tasks.py | 28 ++++++++++++++++++++++++++++ 8 files changed, 83 insertions(+), 5 deletions(-) diff --git a/Doc/library/asyncio-dev.rst b/Doc/library/asyncio-dev.rst index 90bae84..c5f0d1a 100644 --- a/Doc/library/asyncio-dev.rst +++ b/Doc/library/asyncio-dev.rst @@ -1,5 +1,7 @@ .. currentmodule:: asyncio +.. _asyncio-dev: + Develop with asyncio ==================== @@ -81,10 +83,10 @@ Detect coroutine objects never scheduled When a coroutine function is called but not passed to :func:`async` or to the :class:`Task` constructor, it is not scheduled and it is probably a bug. -To detect such bug, set :data:`asyncio.tasks._DEBUG` to ``True``. When the -coroutine object is destroyed by the garbage collector, a log will be emitted -with the traceback where the coroutine function was called. See the -:ref:`asyncio logger `. +To detect such bug, set the environment variable :envvar:`PYTHONASYNCIODEBUG` +to ``1``. When the coroutine object is destroyed by the garbage collector, a +log will be emitted with the traceback where the coroutine function was called. +See the :ref:`asyncio logger `. The debug flag changes the behaviour of the :func:`coroutine` decorator. The debug flag value is only used when then coroutine function is defined, not when diff --git a/Doc/library/asyncio-eventloop.rst b/Doc/library/asyncio-eventloop.rst index f056cac..04b182b 100644 --- a/Doc/library/asyncio-eventloop.rst +++ b/Doc/library/asyncio-eventloop.rst @@ -553,6 +553,22 @@ pool of processes). By default, an event loop uses a thread pool executor Set the default executor used by :meth:`run_in_executor`. +Debug mode +---------- + +.. method:: BaseEventLoop.get_debug() + + Get the debug mode (:class:`bool`) of the event loop. + +.. method:: BaseEventLoop.set_debug(enabled: bool) + + Set the debug mode of the event loop. + +.. seealso:: + + The :ref:`Develop with asyncio ` section. + + Server ------ diff --git a/Doc/using/cmdline.rst b/Doc/using/cmdline.rst index 4807a36..0c3c203 100644 --- a/Doc/using/cmdline.rst +++ b/Doc/using/cmdline.rst @@ -614,6 +614,14 @@ conflict. .. versionadded:: 3.4 +.. envvar:: PYTHONASYNCIODEBUG + + If this environment variable is set to a non-empty string, enable the debug + mode of the :mod:`asyncio` module. + + .. versionadded:: 3.4 + + Debug-mode variables ~~~~~~~~~~~~~~~~~~~~ diff --git a/Lib/asyncio/base_events.py b/Lib/asyncio/base_events.py index b94ba07..69caa4d 100644 --- a/Lib/asyncio/base_events.py +++ b/Lib/asyncio/base_events.py @@ -123,6 +123,7 @@ class BaseEventLoop(events.AbstractEventLoop): self._running = False self._clock_resolution = time.get_clock_info('monotonic').resolution self._exception_handler = None + self._debug = False def _make_socket_transport(self, sock, protocol, waiter=None, *, extra=None, server=None): @@ -795,3 +796,9 @@ class BaseEventLoop(events.AbstractEventLoop): if not handle._cancelled: handle._run() handle = None # Needed to break cycles when an exception occurs. + + def get_debug(self): + return self._debug + + def set_debug(self, enabled): + self._debug = enabled diff --git a/Lib/asyncio/events.py b/Lib/asyncio/events.py index 1030c04..5362f05 100644 --- a/Lib/asyncio/events.py +++ b/Lib/asyncio/events.py @@ -345,6 +345,14 @@ class AbstractEventLoop: def call_exception_handler(self, context): raise NotImplementedError + # Debug flag management. + + def get_debug(self): + raise NotImplementedError + + def set_debug(self, enabled): + raise NotImplementedError + class AbstractEventLoopPolicy: """Abstract policy for accessing the event loop.""" diff --git a/Lib/asyncio/tasks.py b/Lib/asyncio/tasks.py index a3e7cdf..cf7b540 100644 --- a/Lib/asyncio/tasks.py +++ b/Lib/asyncio/tasks.py @@ -12,6 +12,8 @@ import concurrent.futures import functools import inspect import linecache +import os +import sys import traceback import weakref @@ -28,7 +30,8 @@ from .log import logger # before you define your coroutines. A downside of using this feature # is that tracebacks show entries for the CoroWrapper.__next__ method # when _DEBUG is true. -_DEBUG = False +_DEBUG = (not sys.flags.ignore_environment + and bool(os.environ.get('PYTHONASYNCIODEBUG'))) class CoroWrapper: diff --git a/Lib/test/test_asyncio/test_base_events.py b/Lib/test/test_asyncio/test_base_events.py index 2eee3be..784a39f 100644 --- a/Lib/test/test_asyncio/test_base_events.py +++ b/Lib/test/test_asyncio/test_base_events.py @@ -197,6 +197,12 @@ class BaseEventLoopTests(unittest.TestCase): self.assertEqual([h2], self.loop._scheduled) self.assertTrue(self.loop._process_events.called) + def test_set_debug(self): + self.loop.set_debug(True) + self.assertTrue(self.loop.get_debug()) + self.loop.set_debug(False) + self.assertFalse(self.loop.get_debug()) + @unittest.mock.patch('asyncio.base_events.time') @unittest.mock.patch('asyncio.base_events.logger') def test__run_once_logging(self, m_logger, m_time): diff --git a/Lib/test/test_asyncio/test_tasks.py b/Lib/test/test_asyncio/test_tasks.py index f27b952..6d03dc7 100644 --- a/Lib/test/test_asyncio/test_tasks.py +++ b/Lib/test/test_asyncio/test_tasks.py @@ -1,7 +1,9 @@ """Tests for tasks.py.""" import gc +import os.path import unittest +from test.script_helper import assert_python_ok import asyncio from asyncio import test_utils @@ -1461,6 +1463,32 @@ class GatherTestsBase: cb.assert_called_once_with(fut) self.assertEqual(fut.result(), [3, 1, exc, exc2]) + def test_env_var_debug(self): + path = os.path.dirname(asyncio.__file__) + path = os.path.normpath(os.path.join(path, '..')) + code = '\n'.join(( + 'import sys', + 'sys.path.insert(0, %r)' % path, + 'import asyncio.tasks', + 'print(asyncio.tasks._DEBUG)')) + + # Test with -E to not fail if the unit test was run with + # PYTHONASYNCIODEBUG set to a non-empty string + sts, stdout, stderr = assert_python_ok('-E', '-c', code) + self.assertEqual(stdout.rstrip(), b'False') + + sts, stdout, stderr = assert_python_ok('-c', code, + PYTHONASYNCIODEBUG='') + self.assertEqual(stdout.rstrip(), b'False') + + sts, stdout, stderr = assert_python_ok('-c', code, + PYTHONASYNCIODEBUG='1') + self.assertEqual(stdout.rstrip(), b'True') + + sts, stdout, stderr = assert_python_ok('-E', '-c', code, + PYTHONASYNCIODEBUG='1') + self.assertEqual(stdout.rstrip(), b'False') + class FutureGatherTests(GatherTestsBase, unittest.TestCase): -- cgit v0.12 From a50fd87237da1fe14f6f35cc0d0e492442924035 Mon Sep 17 00:00:00 2001 From: Benjamin Peterson Date: Wed, 19 Feb 2014 18:05:36 -0500 Subject: update magic number for #20625 --- Lib/importlib/_bootstrap.py | 3 +- Python/importlib.h | 320 ++++++++++++++++++++++---------------------- 2 files changed, 162 insertions(+), 161 deletions(-) diff --git a/Lib/importlib/_bootstrap.py b/Lib/importlib/_bootstrap.py index 8a0cc34..4864024 100644 --- a/Lib/importlib/_bootstrap.py +++ b/Lib/importlib/_bootstrap.py @@ -418,12 +418,13 @@ def _call_with_frames_removed(f, *args, **kwds): # Python 3.4a1 3280 (remove implicit class argument) # Python 3.4a4 3290 (changes to __qualname__ computation) # Python 3.4a4 3300 (more changes to __qualname__ computation) +# Python 3.4rc2 3310 (alter __qualname__ computation) # # MAGIC must change whenever the bytecode emitted by the compiler may no # longer be understood by older implementations of the eval loop (usually # due to the addition of new opcodes). -MAGIC_NUMBER = (3300).to_bytes(2, 'little') + b'\r\n' +MAGIC_NUMBER = (3310).to_bytes(2, 'little') + b'\r\n' _RAW_MAGIC_NUMBER = int.from_bytes(MAGIC_NUMBER, 'little') # For import.c _PYCACHE = '__pycache__' diff --git a/Python/importlib.h b/Python/importlib.h index d84d9fb..f55c450 100644 --- a/Python/importlib.h +++ b/Python/importlib.h @@ -702,7 +702,7 @@ const unsigned char _Py_M__importlib[] = { 5,0,0,0,218,25,95,99,97,108,108,95,119,105,116,104, 95,102,114,97,109,101,115,95,114,101,109,111,118,101,100,57, 1,0,0,115,2,0,0,0,0,8,114,114,0,0,0,105, - 228,12,0,0,233,2,0,0,0,114,13,0,0,0,115,2, + 238,12,0,0,233,2,0,0,0,114,13,0,0,0,115,2, 0,0,0,13,10,90,11,95,95,112,121,99,97,99,104,101, 95,95,122,3,46,112,121,122,4,46,112,121,99,122,4,46, 112,121,111,78,99,2,0,0,0,0,0,0,0,11,0,0, @@ -771,7 +771,7 @@ const unsigned char _Py_M__importlib[] = { 3,116,97,103,218,8,102,105,108,101,110,97,109,101,114,4, 0,0,0,114,4,0,0,0,114,5,0,0,0,218,17,99, 97,99,104,101,95,102,114,111,109,95,115,111,117,114,99,101, - 180,1,0,0,115,22,0,0,0,0,13,31,1,6,1,9, + 181,1,0,0,115,22,0,0,0,0,13,31,1,6,1,9, 2,6,1,18,1,24,1,12,1,12,1,15,1,31,1,114, 132,0,0,0,99,1,0,0,0,0,0,0,0,5,0,0, 0,5,0,0,0,67,0,0,0,115,193,0,0,0,116,0, @@ -828,7 +828,7 @@ const unsigned char _Py_M__importlib[] = { 95,102,105,108,101,110,97,109,101,90,7,112,121,99,97,99, 104,101,114,129,0,0,0,114,4,0,0,0,114,4,0,0, 0,114,5,0,0,0,218,17,115,111,117,114,99,101,95,102, - 114,111,109,95,99,97,99,104,101,207,1,0,0,115,24,0, + 114,111,109,95,99,97,99,104,101,208,1,0,0,115,24,0, 0,0,0,9,18,1,15,1,18,1,18,1,12,1,3,1, 24,1,21,1,3,1,21,1,19,1,114,135,0,0,0,99, 1,0,0,0,0,0,0,0,5,0,0,0,13,0,0,0, @@ -865,7 +865,7 @@ const unsigned char _Py_M__importlib[] = { 105,111,110,218,11,115,111,117,114,99,101,95,112,97,116,104, 114,4,0,0,0,114,4,0,0,0,114,5,0,0,0,218, 15,95,103,101,116,95,115,111,117,114,99,101,102,105,108,101, - 230,1,0,0,115,20,0,0,0,0,7,18,1,4,1,24, + 231,1,0,0,115,20,0,0,0,0,7,18,1,4,1,24, 1,35,1,4,1,3,1,16,1,19,1,21,1,114,142,0, 0,0,99,1,0,0,0,0,0,0,0,2,0,0,0,11, 0,0,0,67,0,0,0,115,60,0,0,0,121,19,0,116, @@ -880,7 +880,7 @@ const unsigned char _Py_M__importlib[] = { 114,41,0,0,0,114,40,0,0,0,41,2,114,35,0,0, 0,114,42,0,0,0,114,4,0,0,0,114,4,0,0,0, 114,5,0,0,0,218,10,95,99,97,108,99,95,109,111,100, - 101,249,1,0,0,115,12,0,0,0,0,2,3,1,19,1, + 101,250,1,0,0,115,12,0,0,0,0,2,3,1,19,1, 13,1,11,3,10,1,114,144,0,0,0,218,9,118,101,114, 98,111,115,105,116,121,114,29,0,0,0,99,1,0,0,0, 1,0,0,0,3,0,0,0,4,0,0,0,71,0,0,0, @@ -901,7 +901,7 @@ const unsigned char _Py_M__importlib[] = { 6,115,116,100,101,114,114,41,3,218,7,109,101,115,115,97, 103,101,114,145,0,0,0,114,80,0,0,0,114,4,0,0, 0,114,4,0,0,0,114,5,0,0,0,218,16,95,118,101, - 114,98,111,115,101,95,109,101,115,115,97,103,101,5,2,0, + 114,98,111,115,101,95,109,101,115,115,97,103,101,6,2,0, 0,115,8,0,0,0,0,2,18,1,15,1,13,1,114,152, 0,0,0,99,1,0,0,0,0,0,0,0,2,0,0,0, 4,0,0,0,3,0,0,0,115,38,0,0,0,100,1,0, @@ -937,14 +937,14 @@ const unsigned char _Py_M__importlib[] = { 0,0,114,80,0,0,0,114,108,0,0,0,41,1,218,6, 109,101,116,104,111,100,114,4,0,0,0,114,5,0,0,0, 218,19,95,99,104,101,99,107,95,110,97,109,101,95,119,114, - 97,112,112,101,114,21,2,0,0,115,10,0,0,0,0,1, + 97,112,112,101,114,22,2,0,0,115,10,0,0,0,0,1, 12,1,12,1,15,1,25,1,122,40,95,99,104,101,99,107, 95,110,97,109,101,46,60,108,111,99,97,108,115,62,46,95, 99,104,101,99,107,95,110,97,109,101,95,119,114,97,112,112, 101,114,41,1,114,65,0,0,0,41,2,114,154,0,0,0, 114,155,0,0,0,114,4,0,0,0,41,1,114,154,0,0, 0,114,5,0,0,0,218,11,95,99,104,101,99,107,95,110, - 97,109,101,13,2,0,0,115,6,0,0,0,0,8,21,6, + 97,109,101,14,2,0,0,115,6,0,0,0,0,8,21,6, 13,1,114,156,0,0,0,99,1,0,0,0,0,0,0,0, 2,0,0,0,3,0,0,0,3,0,0,0,115,35,0,0, 0,135,0,0,102,1,0,100,1,0,100,2,0,134,0,0, @@ -966,7 +966,7 @@ const unsigned char _Py_M__importlib[] = { 0,0,218,8,102,117,108,108,110,97,109,101,41,1,218,3, 102,120,110,114,4,0,0,0,114,5,0,0,0,218,25,95, 114,101,113,117,105,114,101,115,95,98,117,105,108,116,105,110, - 95,119,114,97,112,112,101,114,33,2,0,0,115,8,0,0, + 95,119,114,97,112,112,101,114,34,2,0,0,115,8,0,0, 0,0,1,15,1,18,1,12,1,122,52,95,114,101,113,117, 105,114,101,115,95,98,117,105,108,116,105,110,46,60,108,111, 99,97,108,115,62,46,95,114,101,113,117,105,114,101,115,95, @@ -974,7 +974,7 @@ const unsigned char _Py_M__importlib[] = { 1,114,65,0,0,0,41,2,114,159,0,0,0,114,160,0, 0,0,114,4,0,0,0,41,1,114,159,0,0,0,114,5, 0,0,0,218,17,95,114,101,113,117,105,114,101,115,95,98, - 117,105,108,116,105,110,31,2,0,0,115,6,0,0,0,0, + 117,105,108,116,105,110,32,2,0,0,115,6,0,0,0,0, 2,18,5,13,1,114,161,0,0,0,99,1,0,0,0,0, 0,0,0,2,0,0,0,3,0,0,0,3,0,0,0,115, 35,0,0,0,135,0,0,102,1,0,100,1,0,100,2,0, @@ -995,7 +995,7 @@ const unsigned char _Py_M__importlib[] = { 2,114,71,0,0,0,114,158,0,0,0,41,1,114,159,0, 0,0,114,4,0,0,0,114,5,0,0,0,218,24,95,114, 101,113,117,105,114,101,115,95,102,114,111,122,101,110,95,119, - 114,97,112,112,101,114,44,2,0,0,115,8,0,0,0,0, + 114,97,112,112,101,114,45,2,0,0,115,8,0,0,0,0, 1,15,1,18,1,12,1,122,50,95,114,101,113,117,105,114, 101,115,95,102,114,111,122,101,110,46,60,108,111,99,97,108, 115,62,46,95,114,101,113,117,105,114,101,115,95,102,114,111, @@ -1003,7 +1003,7 @@ const unsigned char _Py_M__importlib[] = { 0,0,41,2,114,159,0,0,0,114,163,0,0,0,114,4, 0,0,0,41,1,114,159,0,0,0,114,5,0,0,0,218, 16,95,114,101,113,117,105,114,101,115,95,102,114,111,122,101, - 110,42,2,0,0,115,6,0,0,0,0,2,18,5,13,1, + 110,43,2,0,0,115,6,0,0,0,0,2,18,5,13,1, 114,164,0,0,0,99,2,0,0,0,0,0,0,0,5,0, 0,0,5,0,0,0,67,0,0,0,115,87,0,0,0,124, 0,0,106,0,0,124,1,0,131,1,0,92,2,0,125,2, @@ -1032,7 +1032,7 @@ const unsigned char _Py_M__importlib[] = { 108,111,97,100,101,114,218,8,112,111,114,116,105,111,110,115, 218,3,109,115,103,114,4,0,0,0,114,4,0,0,0,114, 5,0,0,0,218,17,95,102,105,110,100,95,109,111,100,117, - 108,101,95,115,104,105,109,53,2,0,0,115,10,0,0,0, + 108,101,95,115,104,105,109,54,2,0,0,115,10,0,0,0, 0,10,21,1,24,1,6,1,32,1,114,172,0,0,0,99, 2,0,0,0,0,0,0,0,5,0,0,0,3,0,0,0, 67,0,0,0,115,93,0,0,0,116,0,0,124,1,0,124, @@ -1057,7 +1057,7 @@ const unsigned char _Py_M__importlib[] = { 4,115,112,101,99,218,7,109,101,116,104,111,100,115,218,6, 109,111,100,117,108,101,114,4,0,0,0,114,4,0,0,0, 114,5,0,0,0,218,17,95,108,111,97,100,95,109,111,100, - 117,108,101,95,115,104,105,109,70,2,0,0,115,14,0,0, + 117,108,101,95,115,104,105,109,71,2,0,0,115,14,0,0, 0,0,6,15,1,12,1,15,1,13,1,13,1,11,2,114, 180,0,0,0,99,4,0,0,0,0,0,0,0,11,0,0, 0,19,0,0,0,67,0,0,0,115,243,1,0,0,105,0, @@ -1142,7 +1142,7 @@ const unsigned char _Py_M__importlib[] = { 101,95,109,116,105,109,101,218,11,115,111,117,114,99,101,95, 115,105,122,101,114,4,0,0,0,114,4,0,0,0,114,5, 0,0,0,218,25,95,118,97,108,105,100,97,116,101,95,98, - 121,116,101,99,111,100,101,95,104,101,97,100,101,114,86,2, + 121,116,101,99,111,100,101,95,104,101,97,100,101,114,87,2, 0,0,115,76,0,0,0,0,11,6,1,12,1,13,3,6, 1,12,1,13,1,16,1,16,1,16,1,12,1,18,1,10, 1,18,1,18,1,15,1,10,1,15,1,18,1,15,1,10, @@ -1173,7 +1173,7 @@ const unsigned char _Py_M__importlib[] = { 41,5,114,53,0,0,0,114,67,0,0,0,114,140,0,0, 0,114,141,0,0,0,218,4,99,111,100,101,114,4,0,0, 0,114,4,0,0,0,114,5,0,0,0,218,17,95,99,111, - 109,112,105,108,101,95,98,121,116,101,99,111,100,101,141,2, + 109,112,105,108,101,95,98,121,116,101,99,111,100,101,142,2, 0,0,115,16,0,0,0,0,2,15,1,15,1,13,1,12, 1,19,1,4,2,18,1,114,195,0,0,0,114,84,0,0, 0,99,3,0,0,0,0,0,0,0,4,0,0,0,3,0, @@ -1193,7 +1193,7 @@ const unsigned char _Py_M__importlib[] = { 5,100,117,109,112,115,41,4,114,194,0,0,0,114,183,0, 0,0,114,189,0,0,0,114,53,0,0,0,114,4,0,0, 0,114,4,0,0,0,114,5,0,0,0,218,17,95,99,111, - 100,101,95,116,111,95,98,121,116,101,99,111,100,101,153,2, + 100,101,95,116,111,95,98,121,116,101,99,111,100,101,154,2, 0,0,115,10,0,0,0,0,3,12,1,19,1,19,1,22, 1,114,198,0,0,0,99,1,0,0,0,0,0,0,0,5, 0,0,0,4,0,0,0,67,0,0,0,115,89,0,0,0, @@ -1222,7 +1222,7 @@ const unsigned char _Py_M__importlib[] = { 101,218,8,101,110,99,111,100,105,110,103,90,15,110,101,119, 108,105,110,101,95,100,101,99,111,100,101,114,114,4,0,0, 0,114,4,0,0,0,114,5,0,0,0,218,13,100,101,99, - 111,100,101,95,115,111,117,114,99,101,163,2,0,0,115,10, + 111,100,101,95,115,111,117,114,99,101,164,2,0,0,115,10, 0,0,0,0,5,12,1,18,1,15,1,18,1,114,203,0, 0,0,99,1,0,0,0,0,0,0,0,5,0,0,0,35, 0,0,0,67,0,0,0,115,15,1,0,0,116,0,0,124, @@ -1256,7 +1256,7 @@ const unsigned char _Py_M__importlib[] = { 95,95,114,47,0,0,0,41,5,114,179,0,0,0,114,169, 0,0,0,114,177,0,0,0,114,67,0,0,0,114,131,0, 0,0,114,4,0,0,0,114,4,0,0,0,114,5,0,0, - 0,218,12,95,109,111,100,117,108,101,95,114,101,112,114,177, + 0,218,12,95,109,111,100,117,108,101,95,114,101,112,114,178, 2,0,0,115,46,0,0,0,0,2,18,1,15,4,3,1, 17,1,13,1,8,1,3,1,13,1,13,1,5,2,12,1, 16,4,3,1,13,1,13,1,11,1,3,1,13,1,13,1, @@ -1273,7 +1273,7 @@ const unsigned char _Py_M__importlib[] = { 83,41,1,78,41,3,218,7,95,109,111,100,117,108,101,114, 208,0,0,0,218,5,95,115,112,101,99,41,2,114,71,0, 0,0,114,179,0,0,0,114,4,0,0,0,114,4,0,0, - 0,114,5,0,0,0,114,72,0,0,0,215,2,0,0,115, + 0,114,5,0,0,0,114,72,0,0,0,216,2,0,0,115, 4,0,0,0,0,1,9,1,122,26,95,105,110,115,116,97, 108,108,101,100,95,115,97,102,101,108,121,46,95,95,105,110, 105,116,95,95,99,1,0,0,0,0,0,0,0,1,0,0, @@ -1284,7 +1284,7 @@ const unsigned char _Py_M__importlib[] = { 13,95,105,110,105,116,105,97,108,105,122,105,110,103,114,213, 0,0,0,114,7,0,0,0,114,73,0,0,0,114,67,0, 0,0,41,1,114,71,0,0,0,114,4,0,0,0,114,4, - 0,0,0,114,5,0,0,0,114,75,0,0,0,219,2,0, + 0,0,0,114,5,0,0,0,114,75,0,0,0,220,2,0, 0,115,4,0,0,0,0,4,12,1,122,27,95,105,110,115, 116,97,108,108,101,100,95,115,97,102,101,108,121,46,95,95, 101,110,116,101,114,95,95,99,1,0,0,0,0,0,0,0, @@ -1302,7 +1302,7 @@ const unsigned char _Py_M__importlib[] = { 124,1,0,100,0,0,107,9,0,86,1,113,3,0,100,0, 0,83,41,1,78,114,4,0,0,0,41,2,114,22,0,0, 0,114,76,0,0,0,114,4,0,0,0,114,4,0,0,0, - 114,5,0,0,0,114,77,0,0,0,229,2,0,0,115,2, + 114,5,0,0,0,114,77,0,0,0,230,2,0,0,115,2, 0,0,0,6,0,122,45,95,105,110,115,116,97,108,108,101, 100,95,115,97,102,101,108,121,46,95,95,101,120,105,116,95, 95,46,60,108,111,99,97,108,115,62,46,60,103,101,110,101, @@ -1312,14 +1312,14 @@ const unsigned char _Py_M__importlib[] = { 67,0,0,0,114,79,0,0,0,114,152,0,0,0,114,169, 0,0,0,114,215,0,0,0,41,3,114,71,0,0,0,114, 80,0,0,0,114,177,0,0,0,114,4,0,0,0,114,4, - 0,0,0,114,5,0,0,0,114,81,0,0,0,226,2,0, + 0,0,0,114,5,0,0,0,114,81,0,0,0,227,2,0, 0,115,18,0,0,0,0,1,3,1,9,1,25,1,3,1, 17,1,13,1,8,2,26,2,122,26,95,105,110,115,116,97, 108,108,101,100,95,115,97,102,101,108,121,46,95,95,101,120, 105,116,95,95,78,41,6,114,57,0,0,0,114,56,0,0, 0,114,58,0,0,0,114,72,0,0,0,114,75,0,0,0, 114,81,0,0,0,114,4,0,0,0,114,4,0,0,0,114, - 4,0,0,0,114,5,0,0,0,114,212,0,0,0,213,2, + 4,0,0,0,114,5,0,0,0,114,212,0,0,0,214,2, 0,0,115,6,0,0,0,12,2,12,4,12,7,114,212,0, 0,0,99,0,0,0,0,0,0,0,0,0,0,0,0,8, 0,0,0,64,0,0,0,115,172,0,0,0,101,0,0,90, @@ -1444,7 +1444,7 @@ const unsigned char _Py_M__importlib[] = { 97,99,104,101,100,41,6,114,71,0,0,0,114,67,0,0, 0,114,169,0,0,0,114,217,0,0,0,114,218,0,0,0, 114,219,0,0,0,114,4,0,0,0,114,4,0,0,0,114, - 5,0,0,0,114,72,0,0,0,21,3,0,0,115,14,0, + 5,0,0,0,114,72,0,0,0,22,3,0,0,115,14,0, 0,0,0,2,9,1,9,1,9,1,9,1,21,3,9,1, 122,19,77,111,100,117,108,101,83,112,101,99,46,95,95,105, 110,105,116,95,95,99,1,0,0,0,0,0,0,0,2,0, @@ -1469,7 +1469,7 @@ const unsigned char _Py_M__importlib[] = { 95,95,99,108,97,115,115,95,95,114,57,0,0,0,114,26, 0,0,0,41,2,114,71,0,0,0,114,80,0,0,0,114, 4,0,0,0,114,4,0,0,0,114,5,0,0,0,114,101, - 0,0,0,33,3,0,0,115,16,0,0,0,0,1,15,1, + 0,0,0,34,3,0,0,115,16,0,0,0,0,1,15,1, 21,1,15,1,28,1,15,1,6,1,22,1,122,19,77,111, 100,117,108,101,83,112,101,99,46,95,95,114,101,112,114,95, 95,99,2,0,0,0,0,0,0,0,3,0,0,0,13,0, @@ -1488,7 +1488,7 @@ const unsigned char _Py_M__importlib[] = { 104,97,115,95,108,111,99,97,116,105,111,110,114,209,0,0, 0,41,3,114,71,0,0,0,218,5,111,116,104,101,114,218, 4,115,109,115,108,114,4,0,0,0,114,4,0,0,0,114, - 5,0,0,0,218,6,95,95,101,113,95,95,43,3,0,0, + 5,0,0,0,218,6,95,95,101,113,95,95,44,3,0,0, 115,20,0,0,0,0,1,9,1,3,1,18,1,18,1,18, 1,15,1,18,1,20,1,13,1,122,17,77,111,100,117,108, 101,83,112,101,99,46,95,95,101,113,95,95,99,1,0,0, @@ -1509,7 +1509,7 @@ const unsigned char _Py_M__importlib[] = { 0,0,0,114,124,0,0,0,218,17,66,89,84,69,67,79, 68,69,95,83,85,70,70,73,88,69,83,41,2,114,71,0, 0,0,114,131,0,0,0,114,4,0,0,0,114,4,0,0, - 0,114,5,0,0,0,114,225,0,0,0,55,3,0,0,115, + 0,114,5,0,0,0,114,225,0,0,0,56,3,0,0,115, 22,0,0,0,0,2,15,1,24,1,9,1,21,1,3,1, 19,1,13,1,8,1,21,1,18,1,122,17,77,111,100,117, 108,101,83,112,101,99,46,99,97,99,104,101,100,99,2,0, @@ -1517,7 +1517,7 @@ const unsigned char _Py_M__importlib[] = { 0,0,115,13,0,0,0,124,1,0,124,0,0,95,0,0, 100,0,0,83,41,1,78,41,1,114,222,0,0,0,41,2, 114,71,0,0,0,114,225,0,0,0,114,4,0,0,0,114, - 4,0,0,0,114,5,0,0,0,114,225,0,0,0,69,3, + 4,0,0,0,114,5,0,0,0,114,225,0,0,0,70,3, 0,0,115,2,0,0,0,0,2,99,1,0,0,0,0,0, 0,0,1,0,0,0,2,0,0,0,67,0,0,0,115,46, 0,0,0,124,0,0,106,0,0,100,1,0,107,8,0,114, @@ -1528,14 +1528,14 @@ const unsigned char _Py_M__importlib[] = { 114,101,110,116,46,78,114,116,0,0,0,114,84,0,0,0, 41,3,114,220,0,0,0,114,67,0,0,0,114,32,0,0, 0,41,1,114,71,0,0,0,114,4,0,0,0,114,4,0, - 0,0,114,5,0,0,0,218,6,112,97,114,101,110,116,73, + 0,0,114,5,0,0,0,218,6,112,97,114,101,110,116,74, 3,0,0,115,6,0,0,0,0,3,15,1,20,2,122,17, 77,111,100,117,108,101,83,112,101,99,46,112,97,114,101,110, 116,99,1,0,0,0,0,0,0,0,1,0,0,0,1,0, 0,0,67,0,0,0,115,7,0,0,0,124,0,0,106,0, 0,83,41,1,78,41,1,114,221,0,0,0,41,1,114,71, 0,0,0,114,4,0,0,0,114,4,0,0,0,114,5,0, - 0,0,114,226,0,0,0,81,3,0,0,115,2,0,0,0, + 0,0,114,226,0,0,0,82,3,0,0,115,2,0,0,0, 0,2,122,23,77,111,100,117,108,101,83,112,101,99,46,104, 97,115,95,108,111,99,97,116,105,111,110,99,2,0,0,0, 0,0,0,0,2,0,0,0,2,0,0,0,67,0,0,0, @@ -1543,14 +1543,14 @@ const unsigned char _Py_M__importlib[] = { 0,95,1,0,100,0,0,83,41,1,78,41,2,218,4,98, 111,111,108,114,221,0,0,0,41,2,114,71,0,0,0,218, 5,118,97,108,117,101,114,4,0,0,0,114,4,0,0,0, - 114,5,0,0,0,114,226,0,0,0,85,3,0,0,115,2, + 114,5,0,0,0,114,226,0,0,0,86,3,0,0,115,2, 0,0,0,0,2,41,12,114,57,0,0,0,114,56,0,0, 0,114,58,0,0,0,114,59,0,0,0,114,72,0,0,0, 114,101,0,0,0,114,229,0,0,0,218,8,112,114,111,112, 101,114,116,121,114,225,0,0,0,218,6,115,101,116,116,101, 114,114,233,0,0,0,114,226,0,0,0,114,4,0,0,0, 114,4,0,0,0,114,4,0,0,0,114,5,0,0,0,114, - 216,0,0,0,240,2,0,0,115,20,0,0,0,12,35,6, + 216,0,0,0,241,2,0,0,115,20,0,0,0,12,35,6, 2,15,1,15,11,12,10,12,12,18,14,21,4,18,8,18, 4,114,216,0,0,0,114,217,0,0,0,114,219,0,0,0, 99,2,0,0,0,2,0,0,0,5,0,0,0,15,0,0, @@ -1578,7 +1578,7 @@ const unsigned char _Py_M__importlib[] = { 0,0,114,216,0,0,0,41,5,114,67,0,0,0,114,169, 0,0,0,114,217,0,0,0,114,219,0,0,0,90,6,115, 101,97,114,99,104,114,4,0,0,0,114,4,0,0,0,114, - 5,0,0,0,114,173,0,0,0,90,3,0,0,115,28,0, + 5,0,0,0,114,173,0,0,0,91,3,0,0,115,28,0, 0,0,0,2,15,1,12,1,16,1,18,1,15,1,7,2, 12,1,15,1,3,1,19,1,13,1,14,3,9,2,114,173, 0,0,0,114,169,0,0,0,114,220,0,0,0,99,2,0, @@ -1640,7 +1640,7 @@ const unsigned char _Py_M__importlib[] = { 218,12,108,111,97,100,101,114,95,99,108,97,115,115,114,127, 0,0,0,114,219,0,0,0,90,7,100,105,114,110,97,109, 101,114,4,0,0,0,114,4,0,0,0,114,5,0,0,0, - 114,239,0,0,0,115,3,0,0,115,60,0,0,0,0,12, + 114,239,0,0,0,116,3,0,0,115,60,0,0,0,0,12, 12,4,6,1,15,2,3,1,19,1,13,1,11,8,21,1, 9,3,12,1,22,1,21,1,15,1,9,1,8,2,7,3, 12,2,15,1,3,1,19,1,13,1,5,2,6,1,18,2, @@ -1680,7 +1680,7 @@ const unsigned char _Py_M__importlib[] = { 217,0,0,0,114,177,0,0,0,114,67,0,0,0,114,242, 0,0,0,114,225,0,0,0,114,220,0,0,0,114,4,0, 0,0,114,4,0,0,0,114,5,0,0,0,218,17,95,115, - 112,101,99,95,102,114,111,109,95,109,111,100,117,108,101,179, + 112,101,99,95,102,114,111,109,95,109,111,100,117,108,101,180, 3,0,0,115,72,0,0,0,0,2,3,1,13,1,13,1, 5,2,12,1,4,2,9,1,12,1,3,1,13,1,13,2, 8,1,3,1,13,1,13,1,11,1,12,1,12,1,3,1, @@ -1707,7 +1707,7 @@ const unsigned char _Py_M__importlib[] = { 124,0,0,95,0,0,100,0,0,83,41,1,78,41,1,114, 177,0,0,0,41,2,114,71,0,0,0,114,177,0,0,0, 114,4,0,0,0,114,4,0,0,0,114,5,0,0,0,114, - 72,0,0,0,231,3,0,0,115,2,0,0,0,0,1,122, + 72,0,0,0,232,3,0,0,115,2,0,0,0,0,1,122, 21,95,83,112,101,99,77,101,116,104,111,100,115,46,95,95, 105,110,105,116,95,95,99,1,0,0,0,0,0,0,0,3, 0,0,0,3,0,0,0,67,0,0,0,115,158,0,0,0, @@ -1733,7 +1733,7 @@ const unsigned char _Py_M__importlib[] = { 0,114,169,0,0,0,114,47,0,0,0,114,226,0,0,0, 41,3,114,71,0,0,0,114,177,0,0,0,114,67,0,0, 0,114,4,0,0,0,114,4,0,0,0,114,5,0,0,0, - 114,205,0,0,0,234,3,0,0,115,18,0,0,0,0,3, + 114,205,0,0,0,235,3,0,0,115,18,0,0,0,0,3, 9,1,30,1,15,1,15,1,13,2,22,2,9,1,19,2, 122,24,95,83,112,101,99,77,101,116,104,111,100,115,46,109, 111,100,117,108,101,95,114,101,112,114,218,9,95,111,118,101, @@ -1824,7 +1824,7 @@ const unsigned char _Py_M__importlib[] = { 71,0,0,0,114,179,0,0,0,114,248,0,0,0,114,249, 0,0,0,114,177,0,0,0,114,169,0,0,0,114,4,0, 0,0,114,4,0,0,0,114,5,0,0,0,218,17,105,110, - 105,116,95,109,111,100,117,108,101,95,97,116,116,114,115,250, + 105,116,95,109,111,100,117,108,101,95,97,116,116,114,115,251, 3,0,0,115,88,0,0,0,0,17,9,6,12,1,24,1, 3,1,16,1,13,1,8,3,30,1,9,1,12,2,15,1, 15,1,18,1,3,1,13,1,13,1,8,3,30,1,3,1, @@ -1856,7 +1856,7 @@ const unsigned char _Py_M__importlib[] = { 0,0,114,68,0,0,0,114,67,0,0,0,114,254,0,0, 0,41,3,114,71,0,0,0,114,177,0,0,0,114,179,0, 0,0,114,4,0,0,0,114,4,0,0,0,114,5,0,0, - 0,218,6,99,114,101,97,116,101,74,4,0,0,115,16,0, + 0,218,6,99,114,101,97,116,101,75,4,0,0,115,16,0, 0,0,0,7,9,2,18,3,21,2,6,1,12,4,18,1, 13,1,122,19,95,83,112,101,99,77,101,116,104,111,100,115, 46,99,114,101,97,116,101,99,2,0,0,0,0,0,0,0, @@ -1878,7 +1878,7 @@ const unsigned char _Py_M__importlib[] = { 0,114,169,0,0,0,218,11,101,120,101,99,95,109,111,100, 117,108,101,41,2,114,71,0,0,0,114,179,0,0,0,114, 4,0,0,0,114,4,0,0,0,114,5,0,0,0,218,5, - 95,101,120,101,99,97,4,0,0,115,2,0,0,0,0,7, + 95,101,120,101,99,98,4,0,0,115,2,0,0,0,0,7, 122,18,95,83,112,101,99,77,101,116,104,111,100,115,46,95, 101,120,101,99,99,2,0,0,0,0,0,0,0,4,0,0, 0,11,0,0,0,67,0,0,0,115,17,1,0,0,124,0, @@ -1915,7 +1915,7 @@ const unsigned char _Py_M__importlib[] = { 111,100,117,108,101,114,2,1,0,0,41,4,114,71,0,0, 0,114,179,0,0,0,114,67,0,0,0,114,171,0,0,0, 114,4,0,0,0,114,4,0,0,0,114,5,0,0,0,114, - 175,0,0,0,107,4,0,0,115,32,0,0,0,0,2,12, + 175,0,0,0,108,4,0,0,115,32,0,0,0,0,2,12, 1,10,1,13,1,24,1,15,1,21,1,18,1,18,1,27, 2,19,1,4,1,19,1,21,4,22,2,19,1,122,17,95, 83,112,101,99,77,101,116,104,111,100,115,46,101,120,101,99, @@ -1948,7 +1948,7 @@ const unsigned char _Py_M__importlib[] = { 0,0,114,177,0,0,0,114,179,0,0,0,114,4,0,0, 0,114,4,0,0,0,114,5,0,0,0,218,25,95,108,111, 97,100,95,98,97,99,107,119,97,114,100,95,99,111,109,112, - 97,116,105,98,108,101,131,4,0,0,115,42,0,0,0,0, + 97,116,105,98,108,101,132,4,0,0,115,42,0,0,0,0, 4,9,1,19,2,16,1,24,1,3,1,16,1,13,1,8, 1,24,1,3,4,12,1,15,1,32,1,13,1,8,1,24, 1,3,1,13,1,13,1,8,1,122,38,95,83,112,101,99, @@ -1975,7 +1975,7 @@ const unsigned char _Py_M__importlib[] = { 7,0,0,0,114,73,0,0,0,41,2,114,71,0,0,0, 114,179,0,0,0,114,4,0,0,0,114,4,0,0,0,114, 5,0,0,0,218,14,95,108,111,97,100,95,117,110,108,111, - 99,107,101,100,161,4,0,0,115,20,0,0,0,0,2,18, + 99,107,101,100,162,4,0,0,115,20,0,0,0,0,2,18, 2,21,1,13,2,12,1,13,1,18,1,18,1,30,3,19, 5,122,27,95,83,112,101,99,77,101,116,104,111,100,115,46, 95,108,111,97,100,95,117,110,108,111,99,107,101,100,99,1, @@ -2000,7 +2000,7 @@ const unsigned char _Py_M__importlib[] = { 106,0,0,0,114,3,1,0,0,114,103,0,0,0,114,177, 0,0,0,114,67,0,0,0,114,6,1,0,0,41,1,114, 71,0,0,0,114,4,0,0,0,114,4,0,0,0,114,5, - 0,0,0,114,176,0,0,0,184,4,0,0,115,6,0,0, + 0,0,0,114,176,0,0,0,185,4,0,0,115,6,0,0, 0,0,9,10,1,19,1,122,17,95,83,112,101,99,77,101, 116,104,111,100,115,46,108,111,97,100,78,41,13,114,57,0, 0,0,114,56,0,0,0,114,58,0,0,0,114,59,0,0, @@ -2008,7 +2008,7 @@ const unsigned char _Py_M__importlib[] = { 114,0,1,0,0,114,2,1,0,0,114,175,0,0,0,114, 5,1,0,0,114,6,1,0,0,114,176,0,0,0,114,4, 0,0,0,114,4,0,0,0,114,4,0,0,0,114,5,0, - 0,0,114,174,0,0,0,224,3,0,0,115,20,0,0,0, + 0,0,114,174,0,0,0,225,3,0,0,115,20,0,0,0, 12,3,6,4,12,3,12,16,24,80,12,23,12,10,12,24, 12,30,12,23,114,174,0,0,0,99,0,0,0,0,0,0, 0,0,0,0,0,0,5,0,0,0,64,0,0,0,115,181, @@ -2047,7 +2047,7 @@ const unsigned char _Py_M__importlib[] = { 60,109,111,100,117,108,101,32,123,33,114,125,32,40,98,117, 105,108,116,45,105,110,41,62,41,2,114,47,0,0,0,114, 57,0,0,0,41,1,114,179,0,0,0,114,4,0,0,0, - 114,4,0,0,0,114,5,0,0,0,114,205,0,0,0,209, + 114,4,0,0,0,114,5,0,0,0,114,205,0,0,0,210, 4,0,0,115,2,0,0,0,0,7,122,27,66,117,105,108, 116,105,110,73,109,112,111,114,116,101,114,46,109,111,100,117, 108,101,95,114,101,112,114,78,99,4,0,0,0,0,0,0, @@ -2061,7 +2061,7 @@ const unsigned char _Py_M__importlib[] = { 110,114,173,0,0,0,41,4,218,3,99,108,115,114,158,0, 0,0,114,35,0,0,0,218,6,116,97,114,103,101,116,114, 4,0,0,0,114,4,0,0,0,114,5,0,0,0,218,9, - 102,105,110,100,95,115,112,101,99,218,4,0,0,115,10,0, + 102,105,110,100,95,115,112,101,99,219,4,0,0,115,10,0, 0,0,0,2,12,1,4,1,15,1,19,2,122,25,66,117, 105,108,116,105,110,73,109,112,111,114,116,101,114,46,102,105, 110,100,95,115,112,101,99,99,3,0,0,0,0,0,0,0, @@ -2083,7 +2083,7 @@ const unsigned char _Py_M__importlib[] = { 114,10,1,0,0,114,169,0,0,0,41,4,114,8,1,0, 0,114,158,0,0,0,114,35,0,0,0,114,177,0,0,0, 114,4,0,0,0,114,4,0,0,0,114,5,0,0,0,218, - 11,102,105,110,100,95,109,111,100,117,108,101,227,4,0,0, + 11,102,105,110,100,95,109,111,100,117,108,101,228,4,0,0, 115,4,0,0,0,0,9,18,1,122,27,66,117,105,108,116, 105,110,73,109,112,111,114,116,101,114,46,102,105,110,100,95, 109,111,100,117,108,101,99,2,0,0,0,0,0,0,0,3, @@ -2098,7 +2098,7 @@ const unsigned char _Py_M__importlib[] = { 110,105,116,95,98,117,105,108,116,105,110,114,204,0,0,0, 114,250,0,0,0,41,3,114,8,1,0,0,114,158,0,0, 0,114,179,0,0,0,114,4,0,0,0,114,4,0,0,0, - 114,5,0,0,0,114,4,1,0,0,239,4,0,0,115,10, + 114,5,0,0,0,114,4,1,0,0,240,4,0,0,115,10, 0,0,0,0,6,13,1,24,1,9,1,9,1,122,27,66, 117,105,108,116,105,110,73,109,112,111,114,116,101,114,46,108, 111,97,100,95,109,111,100,117,108,101,99,2,0,0,0,0, @@ -2110,7 +2110,7 @@ const unsigned char _Py_M__importlib[] = { 101,99,116,115,46,78,114,4,0,0,0,41,2,114,8,1, 0,0,114,158,0,0,0,114,4,0,0,0,114,4,0,0, 0,114,5,0,0,0,218,8,103,101,116,95,99,111,100,101, - 251,4,0,0,115,2,0,0,0,0,4,122,24,66,117,105, + 252,4,0,0,115,2,0,0,0,0,4,122,24,66,117,105, 108,116,105,110,73,109,112,111,114,116,101,114,46,103,101,116, 95,99,111,100,101,99,2,0,0,0,0,0,0,0,2,0, 0,0,1,0,0,0,67,0,0,0,115,4,0,0,0,100, @@ -2120,7 +2120,7 @@ const unsigned char _Py_M__importlib[] = { 118,101,32,115,111,117,114,99,101,32,99,111,100,101,46,78, 114,4,0,0,0,41,2,114,8,1,0,0,114,158,0,0, 0,114,4,0,0,0,114,4,0,0,0,114,5,0,0,0, - 218,10,103,101,116,95,115,111,117,114,99,101,1,5,0,0, + 218,10,103,101,116,95,115,111,117,114,99,101,2,5,0,0, 115,2,0,0,0,0,4,122,26,66,117,105,108,116,105,110, 73,109,112,111,114,116,101,114,46,103,101,116,95,115,111,117, 114,99,101,99,2,0,0,0,0,0,0,0,2,0,0,0, @@ -2130,7 +2130,7 @@ const unsigned char _Py_M__importlib[] = { 100,117,108,101,115,32,97,114,101,32,110,101,118,101,114,32, 112,97,99,107,97,103,101,115,46,70,114,4,0,0,0,41, 2,114,8,1,0,0,114,158,0,0,0,114,4,0,0,0, - 114,4,0,0,0,114,5,0,0,0,114,219,0,0,0,7, + 114,4,0,0,0,114,5,0,0,0,114,219,0,0,0,8, 5,0,0,115,2,0,0,0,0,4,122,26,66,117,105,108, 116,105,110,73,109,112,111,114,116,101,114,46,105,115,95,112, 97,99,107,97,103,101,41,14,114,57,0,0,0,114,56,0, @@ -2140,7 +2140,7 @@ const unsigned char _Py_M__importlib[] = { 0,114,11,1,0,0,114,161,0,0,0,114,4,1,0,0, 114,12,1,0,0,114,13,1,0,0,114,219,0,0,0,114, 4,0,0,0,114,4,0,0,0,114,4,0,0,0,114,5, - 0,0,0,114,7,1,0,0,200,4,0,0,115,28,0,0, + 0,0,0,114,7,1,0,0,201,4,0,0,115,28,0,0, 0,12,7,6,2,18,9,3,1,21,8,3,1,18,11,3, 1,21,11,3,1,21,5,3,1,21,5,3,1,114,7,1, 0,0,99,0,0,0,0,0,0,0,0,0,0,0,0,5, @@ -2180,7 +2180,7 @@ const unsigned char _Py_M__importlib[] = { 122,22,60,109,111,100,117,108,101,32,123,33,114,125,32,40, 102,114,111,122,101,110,41,62,41,2,114,47,0,0,0,114, 57,0,0,0,41,1,218,1,109,114,4,0,0,0,114,4, - 0,0,0,114,5,0,0,0,114,205,0,0,0,23,5,0, + 0,0,0,114,5,0,0,0,114,205,0,0,0,24,5,0, 0,115,2,0,0,0,0,7,122,26,70,114,111,122,101,110, 73,109,112,111,114,116,101,114,46,109,111,100,117,108,101,95, 114,101,112,114,78,99,4,0,0,0,0,0,0,0,4,0, @@ -2192,7 +2192,7 @@ const unsigned char _Py_M__importlib[] = { 114,162,0,0,0,114,173,0,0,0,41,4,114,8,1,0, 0,114,158,0,0,0,114,35,0,0,0,114,9,1,0,0, 114,4,0,0,0,114,4,0,0,0,114,5,0,0,0,114, - 10,1,0,0,32,5,0,0,115,6,0,0,0,0,2,15, + 10,1,0,0,33,5,0,0,115,6,0,0,0,0,2,15, 1,19,2,122,24,70,114,111,122,101,110,73,109,112,111,114, 116,101,114,46,102,105,110,100,95,115,112,101,99,99,3,0, 0,0,0,0,0,0,3,0,0,0,2,0,0,0,67,0, @@ -2207,7 +2207,7 @@ const unsigned char _Py_M__importlib[] = { 41,2,114,106,0,0,0,114,162,0,0,0,41,3,114,8, 1,0,0,114,158,0,0,0,114,35,0,0,0,114,4,0, 0,0,114,4,0,0,0,114,5,0,0,0,114,11,1,0, - 0,39,5,0,0,115,2,0,0,0,0,7,122,26,70,114, + 0,40,5,0,0,115,2,0,0,0,0,7,122,26,70,114, 111,122,101,110,73,109,112,111,114,116,101,114,46,102,105,110, 100,95,109,111,100,117,108,101,99,1,0,0,0,0,0,0, 0,3,0,0,0,4,0,0,0,67,0,0,0,115,95,0, @@ -2225,7 +2225,7 @@ const unsigned char _Py_M__importlib[] = { 95,102,114,111,122,101,110,95,111,98,106,101,99,116,114,175, 0,0,0,114,63,0,0,0,41,3,114,179,0,0,0,114, 67,0,0,0,114,194,0,0,0,114,4,0,0,0,114,4, - 0,0,0,114,5,0,0,0,114,1,1,0,0,48,5,0, + 0,0,0,114,5,0,0,0,114,1,1,0,0,49,5,0, 0,115,12,0,0,0,0,2,12,1,15,1,18,1,12,1, 18,1,122,26,70,114,111,122,101,110,73,109,112,111,114,116, 101,114,46,101,120,101,99,95,109,111,100,117,108,101,99,2, @@ -2239,7 +2239,7 @@ const unsigned char _Py_M__importlib[] = { 117,108,101,40,41,32,105,110,115,116,101,97,100,46,10,10, 32,32,32,32,32,32,32,32,41,1,114,180,0,0,0,41, 2,114,8,1,0,0,114,158,0,0,0,114,4,0,0,0, - 114,4,0,0,0,114,5,0,0,0,114,4,1,0,0,57, + 114,4,0,0,0,114,5,0,0,0,114,4,1,0,0,58, 5,0,0,115,2,0,0,0,0,7,122,26,70,114,111,122, 101,110,73,109,112,111,114,116,101,114,46,108,111,97,100,95, 109,111,100,117,108,101,99,2,0,0,0,0,0,0,0,2, @@ -2250,7 +2250,7 @@ const unsigned char _Py_M__importlib[] = { 102,114,111,122,101,110,32,109,111,100,117,108,101,46,41,2, 114,106,0,0,0,114,18,1,0,0,41,2,114,8,1,0, 0,114,158,0,0,0,114,4,0,0,0,114,4,0,0,0, - 114,5,0,0,0,114,12,1,0,0,66,5,0,0,115,2, + 114,5,0,0,0,114,12,1,0,0,67,5,0,0,115,2, 0,0,0,0,4,122,23,70,114,111,122,101,110,73,109,112, 111,114,116,101,114,46,103,101,116,95,99,111,100,101,99,2, 0,0,0,0,0,0,0,2,0,0,0,1,0,0,0,67, @@ -2260,7 +2260,7 @@ const unsigned char _Py_M__importlib[] = { 32,110,111,116,32,104,97,118,101,32,115,111,117,114,99,101, 32,99,111,100,101,46,78,114,4,0,0,0,41,2,114,8, 1,0,0,114,158,0,0,0,114,4,0,0,0,114,4,0, - 0,0,114,5,0,0,0,114,13,1,0,0,72,5,0,0, + 0,0,114,5,0,0,0,114,13,1,0,0,73,5,0,0, 115,2,0,0,0,0,4,122,25,70,114,111,122,101,110,73, 109,112,111,114,116,101,114,46,103,101,116,95,115,111,117,114, 99,101,99,2,0,0,0,0,0,0,0,2,0,0,0,2, @@ -2272,7 +2272,7 @@ const unsigned char _Py_M__importlib[] = { 0,0,90,17,105,115,95,102,114,111,122,101,110,95,112,97, 99,107,97,103,101,41,2,114,8,1,0,0,114,158,0,0, 0,114,4,0,0,0,114,4,0,0,0,114,5,0,0,0, - 114,219,0,0,0,78,5,0,0,115,2,0,0,0,0,4, + 114,219,0,0,0,79,5,0,0,115,2,0,0,0,0,4, 122,25,70,114,111,122,101,110,73,109,112,111,114,116,101,114, 46,105,115,95,112,97,99,107,97,103,101,41,15,114,57,0, 0,0,114,56,0,0,0,114,58,0,0,0,114,59,0,0, @@ -2281,7 +2281,7 @@ const unsigned char _Py_M__importlib[] = { 4,1,0,0,114,164,0,0,0,114,12,1,0,0,114,13, 1,0,0,114,219,0,0,0,114,4,0,0,0,114,4,0, 0,0,114,4,0,0,0,114,5,0,0,0,114,16,1,0, - 0,14,5,0,0,115,28,0,0,0,12,7,6,2,18,9, + 0,15,5,0,0,115,28,0,0,0,12,7,6,2,18,9, 3,1,21,6,3,1,18,8,18,9,18,9,3,1,21,5, 3,1,21,5,3,1,114,16,1,0,0,99,0,0,0,0, 0,0,0,0,0,0,0,0,5,0,0,0,64,0,0,0, @@ -2319,7 +2319,7 @@ const unsigned char _Py_M__importlib[] = { 95,77,65,67,72,73,78,69,41,2,114,8,1,0,0,218, 3,107,101,121,114,4,0,0,0,114,4,0,0,0,114,5, 0,0,0,218,14,95,111,112,101,110,95,114,101,103,105,115, - 116,114,121,97,5,0,0,115,8,0,0,0,0,2,3,1, + 116,114,121,98,5,0,0,115,8,0,0,0,0,2,3,1, 23,1,13,1,122,36,87,105,110,100,111,119,115,82,101,103, 105,115,116,114,121,70,105,110,100,101,114,46,95,111,112,101, 110,95,114,101,103,105,115,116,114,121,99,2,0,0,0,0, @@ -2346,7 +2346,7 @@ const unsigned char _Py_M__importlib[] = { 4,104,107,101,121,218,8,102,105,108,101,112,97,116,104,114, 4,0,0,0,114,4,0,0,0,114,5,0,0,0,218,16, 95,115,101,97,114,99,104,95,114,101,103,105,115,116,114,121, - 104,5,0,0,115,22,0,0,0,0,2,9,1,12,2,9, + 105,5,0,0,115,22,0,0,0,0,2,9,1,12,2,9, 1,15,1,22,1,3,1,18,1,28,1,13,1,9,1,122, 38,87,105,110,100,111,119,115,82,101,103,105,115,116,114,121, 70,105,110,100,101,114,46,95,115,101,97,114,99,104,95,114, @@ -2368,7 +2368,7 @@ const unsigned char _Py_M__importlib[] = { 0,114,158,0,0,0,114,35,0,0,0,114,9,1,0,0, 114,27,1,0,0,114,169,0,0,0,114,127,0,0,0,114, 177,0,0,0,114,4,0,0,0,114,4,0,0,0,114,5, - 0,0,0,114,10,1,0,0,119,5,0,0,115,24,0,0, + 0,0,0,114,10,1,0,0,120,5,0,0,115,24,0,0, 0,0,2,15,1,12,1,4,1,3,1,14,1,13,1,9, 1,22,1,21,1,21,1,9,1,122,31,87,105,110,100,111, 119,115,82,101,103,105,115,116,114,121,70,105,110,100,101,114, @@ -2387,7 +2387,7 @@ const unsigned char _Py_M__importlib[] = { 32,78,41,2,114,10,1,0,0,114,169,0,0,0,41,4, 114,8,1,0,0,114,158,0,0,0,114,35,0,0,0,114, 177,0,0,0,114,4,0,0,0,114,4,0,0,0,114,5, - 0,0,0,114,11,1,0,0,134,5,0,0,115,8,0,0, + 0,0,0,114,11,1,0,0,135,5,0,0,115,8,0,0, 0,0,7,18,1,12,1,7,2,122,33,87,105,110,100,111, 119,115,82,101,103,105,115,116,114,121,70,105,110,100,101,114, 46,102,105,110,100,95,109,111,100,117,108,101,41,12,114,57, @@ -2396,7 +2396,7 @@ const unsigned char _Py_M__importlib[] = { 0,114,15,1,0,0,114,22,1,0,0,114,28,1,0,0, 114,10,1,0,0,114,11,1,0,0,114,4,0,0,0,114, 4,0,0,0,114,4,0,0,0,114,5,0,0,0,114,19, - 1,0,0,85,5,0,0,115,20,0,0,0,12,2,6,3, + 1,0,0,86,5,0,0,115,20,0,0,0,12,2,6,3, 6,3,6,2,6,2,18,7,18,15,3,1,21,14,3,1, 114,19,1,0,0,99,0,0,0,0,0,0,0,0,0,0, 0,0,2,0,0,0,64,0,0,0,115,52,0,0,0,101, @@ -2432,7 +2432,7 @@ const unsigned char _Py_M__importlib[] = { 0,0,114,158,0,0,0,114,131,0,0,0,90,13,102,105, 108,101,110,97,109,101,95,98,97,115,101,90,9,116,97,105, 108,95,110,97,109,101,114,4,0,0,0,114,4,0,0,0, - 114,5,0,0,0,114,219,0,0,0,153,5,0,0,115,8, + 114,5,0,0,0,114,219,0,0,0,154,5,0,0,115,8, 0,0,0,0,3,25,1,22,1,19,1,122,24,95,76,111, 97,100,101,114,66,97,115,105,99,115,46,105,115,95,112,97, 99,107,97,103,101,99,2,0,0,0,0,0,0,0,3,0, @@ -2451,14 +2451,14 @@ const unsigned char _Py_M__importlib[] = { 0,0,0,114,114,0,0,0,114,175,0,0,0,114,63,0, 0,0,41,3,114,71,0,0,0,114,179,0,0,0,114,194, 0,0,0,114,4,0,0,0,114,4,0,0,0,114,5,0, - 0,0,114,1,1,0,0,161,5,0,0,115,10,0,0,0, + 0,0,114,1,1,0,0,162,5,0,0,115,10,0,0,0, 0,2,18,1,12,1,3,1,24,1,122,25,95,76,111,97, 100,101,114,66,97,115,105,99,115,46,101,120,101,99,95,109, 111,100,117,108,101,78,41,8,114,57,0,0,0,114,56,0, 0,0,114,58,0,0,0,114,59,0,0,0,114,219,0,0, 0,114,1,1,0,0,114,180,0,0,0,114,4,1,0,0, 114,4,0,0,0,114,4,0,0,0,114,4,0,0,0,114, - 5,0,0,0,114,29,1,0,0,148,5,0,0,115,8,0, + 5,0,0,0,114,29,1,0,0,149,5,0,0,115,8,0, 0,0,12,3,6,2,12,8,12,8,114,29,1,0,0,99, 0,0,0,0,0,0,0,0,0,0,0,0,4,0,0,0, 64,0,0,0,115,106,0,0,0,101,0,0,90,1,0,100, @@ -2486,7 +2486,7 @@ const unsigned char _Py_M__importlib[] = { 218,7,73,79,69,114,114,111,114,41,2,114,71,0,0,0, 114,35,0,0,0,114,4,0,0,0,114,4,0,0,0,114, 5,0,0,0,218,10,112,97,116,104,95,109,116,105,109,101, - 174,5,0,0,115,2,0,0,0,0,6,122,23,83,111,117, + 175,5,0,0,115,2,0,0,0,0,6,122,23,83,111,117, 114,99,101,76,111,97,100,101,114,46,112,97,116,104,95,109, 116,105,109,101,99,2,0,0,0,0,0,0,0,2,0,0, 0,3,0,0,0,67,0,0,0,115,20,0,0,0,105,1, @@ -2521,7 +2521,7 @@ const unsigned char _Py_M__importlib[] = { 32,32,32,114,183,0,0,0,41,1,114,32,1,0,0,41, 2,114,71,0,0,0,114,35,0,0,0,114,4,0,0,0, 114,4,0,0,0,114,5,0,0,0,218,10,112,97,116,104, - 95,115,116,97,116,115,182,5,0,0,115,2,0,0,0,0, + 95,115,116,97,116,115,183,5,0,0,115,2,0,0,0,0, 11,122,23,83,111,117,114,99,101,76,111,97,100,101,114,46, 112,97,116,104,95,115,116,97,116,115,99,4,0,0,0,0, 0,0,0,4,0,0,0,3,0,0,0,67,0,0,0,115, @@ -2545,7 +2545,7 @@ const unsigned char _Py_M__importlib[] = { 141,0,0,0,90,10,99,97,99,104,101,95,112,97,116,104, 114,53,0,0,0,114,4,0,0,0,114,4,0,0,0,114, 5,0,0,0,218,15,95,99,97,99,104,101,95,98,121,116, - 101,99,111,100,101,195,5,0,0,115,2,0,0,0,0,8, + 101,99,111,100,101,196,5,0,0,115,2,0,0,0,0,8, 122,28,83,111,117,114,99,101,76,111,97,100,101,114,46,95, 99,97,99,104,101,95,98,121,116,101,99,111,100,101,99,3, 0,0,0,0,0,0,0,3,0,0,0,1,0,0,0,67, @@ -2562,7 +2562,7 @@ const unsigned char _Py_M__importlib[] = { 32,32,32,32,32,32,78,114,4,0,0,0,41,3,114,71, 0,0,0,114,35,0,0,0,114,53,0,0,0,114,4,0, 0,0,114,4,0,0,0,114,5,0,0,0,114,34,1,0, - 0,205,5,0,0,115,0,0,0,0,122,21,83,111,117,114, + 0,206,5,0,0,115,0,0,0,0,122,21,83,111,117,114, 99,101,76,111,97,100,101,114,46,115,101,116,95,100,97,116, 97,99,2,0,0,0,0,0,0,0,5,0,0,0,16,0, 0,0,67,0,0,0,115,105,0,0,0,124,0,0,106,0, @@ -2583,7 +2583,7 @@ const unsigned char _Py_M__importlib[] = { 0,114,153,0,0,0,114,203,0,0,0,41,5,114,71,0, 0,0,114,158,0,0,0,114,35,0,0,0,114,201,0,0, 0,218,3,101,120,99,114,4,0,0,0,114,4,0,0,0, - 114,5,0,0,0,114,13,1,0,0,212,5,0,0,115,14, + 114,5,0,0,0,114,13,1,0,0,213,5,0,0,115,14, 0,0,0,0,2,15,1,3,1,19,1,18,1,9,1,31, 1,122,23,83,111,117,114,99,101,76,111,97,100,101,114,46, 103,101,116,95,115,111,117,114,99,101,218,9,95,111,112,116, @@ -2605,7 +2605,7 @@ const unsigned char _Py_M__importlib[] = { 108,101,41,4,114,71,0,0,0,114,53,0,0,0,114,35, 0,0,0,114,38,1,0,0,114,4,0,0,0,114,4,0, 0,0,114,5,0,0,0,218,14,115,111,117,114,99,101,95, - 116,111,95,99,111,100,101,222,5,0,0,115,4,0,0,0, + 116,111,95,99,111,100,101,223,5,0,0,115,4,0,0,0, 0,5,18,1,122,27,83,111,117,114,99,101,76,111,97,100, 101,114,46,115,111,117,114,99,101,95,116,111,95,99,111,100, 101,99,2,0,0,0,0,0,0,0,10,0,0,0,45,0, @@ -2666,7 +2666,7 @@ const unsigned char _Py_M__importlib[] = { 0,0,218,10,98,121,116,101,115,95,100,97,116,97,114,201, 0,0,0,90,11,99,111,100,101,95,111,98,106,101,99,116, 114,4,0,0,0,114,4,0,0,0,114,5,0,0,0,114, - 12,1,0,0,230,5,0,0,115,78,0,0,0,0,7,15, + 12,1,0,0,231,5,0,0,115,78,0,0,0,0,7,15, 1,6,1,3,1,16,1,13,1,11,2,3,1,19,1,13, 1,5,2,16,1,3,1,19,1,13,1,5,2,3,1,9, 1,12,1,13,1,19,1,5,2,9,1,7,1,15,1,6, @@ -2678,7 +2678,7 @@ const unsigned char _Py_M__importlib[] = { 1,0,0,114,35,1,0,0,114,34,1,0,0,114,13,1, 0,0,114,41,1,0,0,114,12,1,0,0,114,4,0,0, 0,114,4,0,0,0,114,4,0,0,0,114,5,0,0,0, - 114,30,1,0,0,172,5,0,0,115,14,0,0,0,12,2, + 114,30,1,0,0,173,5,0,0,115,14,0,0,0,12,2, 12,8,12,13,12,10,12,7,12,10,18,8,114,30,1,0, 0,99,0,0,0,0,0,0,0,0,0,0,0,0,4,0, 0,0,0,0,0,0,115,112,0,0,0,101,0,0,90,1, @@ -2707,7 +2707,7 @@ const unsigned char _Py_M__importlib[] = { 46,78,41,2,114,67,0,0,0,114,35,0,0,0,41,3, 114,71,0,0,0,114,158,0,0,0,114,35,0,0,0,114, 4,0,0,0,114,4,0,0,0,114,5,0,0,0,114,72, - 0,0,0,31,6,0,0,115,4,0,0,0,0,3,9,1, + 0,0,0,32,6,0,0,115,4,0,0,0,0,3,9,1, 122,19,70,105,108,101,76,111,97,100,101,114,46,95,95,105, 110,105,116,95,95,99,2,0,0,0,0,0,0,0,2,0, 0,0,3,0,0,0,67,0,0,0,115,34,0,0,0,124, @@ -2716,7 +2716,7 @@ const unsigned char _Py_M__importlib[] = { 83,41,1,78,41,2,114,224,0,0,0,114,63,0,0,0, 41,2,114,71,0,0,0,114,227,0,0,0,114,4,0,0, 0,114,4,0,0,0,114,5,0,0,0,114,229,0,0,0, - 37,6,0,0,115,4,0,0,0,0,1,18,1,122,17,70, + 38,6,0,0,115,4,0,0,0,0,1,18,1,122,17,70, 105,108,101,76,111,97,100,101,114,46,95,95,101,113,95,95, 99,1,0,0,0,0,0,0,0,1,0,0,0,3,0,0, 0,67,0,0,0,115,26,0,0,0,116,0,0,124,0,0, @@ -2724,7 +2724,7 @@ const unsigned char _Py_M__importlib[] = { 1,0,65,83,41,1,78,41,3,218,4,104,97,115,104,114, 67,0,0,0,114,35,0,0,0,41,1,114,71,0,0,0, 114,4,0,0,0,114,4,0,0,0,114,5,0,0,0,218, - 8,95,95,104,97,115,104,95,95,41,6,0,0,115,2,0, + 8,95,95,104,97,115,104,95,95,42,6,0,0,115,2,0, 0,0,0,1,122,19,70,105,108,101,76,111,97,100,101,114, 46,95,95,104,97,115,104,95,95,99,2,0,0,0,0,0, 0,0,2,0,0,0,3,0,0,0,3,0,0,0,115,22, @@ -2739,7 +2739,7 @@ const unsigned char _Py_M__importlib[] = { 32,41,3,218,5,115,117,112,101,114,114,45,1,0,0,114, 4,1,0,0,41,2,114,71,0,0,0,114,158,0,0,0, 41,1,114,224,0,0,0,114,4,0,0,0,114,5,0,0, - 0,114,4,1,0,0,44,6,0,0,115,2,0,0,0,0, + 0,114,4,1,0,0,45,6,0,0,115,2,0,0,0,0, 10,122,22,70,105,108,101,76,111,97,100,101,114,46,108,111, 97,100,95,109,111,100,117,108,101,99,2,0,0,0,0,0, 0,0,2,0,0,0,1,0,0,0,67,0,0,0,115,7, @@ -2749,7 +2749,7 @@ const unsigned char _Py_M__importlib[] = { 32,97,115,32,102,111,117,110,100,32,98,121,32,116,104,101, 32,102,105,110,100,101,114,46,41,1,114,35,0,0,0,41, 2,114,71,0,0,0,114,158,0,0,0,114,4,0,0,0, - 114,4,0,0,0,114,5,0,0,0,114,238,0,0,0,56, + 114,4,0,0,0,114,5,0,0,0,114,238,0,0,0,57, 6,0,0,115,2,0,0,0,0,3,122,23,70,105,108,101, 76,111,97,100,101,114,46,103,101,116,95,102,105,108,101,110, 97,109,101,99,2,0,0,0,0,0,0,0,3,0,0,0, @@ -2762,7 +2762,7 @@ const unsigned char _Py_M__importlib[] = { 46,218,1,114,78,41,3,114,49,0,0,0,114,50,0,0, 0,90,4,114,101,97,100,41,3,114,71,0,0,0,114,35, 0,0,0,114,54,0,0,0,114,4,0,0,0,114,4,0, - 0,0,114,5,0,0,0,114,36,1,0,0,61,6,0,0, + 0,0,114,5,0,0,0,114,36,1,0,0,62,6,0,0, 115,4,0,0,0,0,2,21,1,122,19,70,105,108,101,76, 111,97,100,101,114,46,103,101,116,95,100,97,116,97,41,11, 114,57,0,0,0,114,56,0,0,0,114,58,0,0,0,114, @@ -2770,7 +2770,7 @@ const unsigned char _Py_M__importlib[] = { 1,0,0,114,156,0,0,0,114,4,1,0,0,114,238,0, 0,0,114,36,1,0,0,114,4,0,0,0,114,4,0,0, 0,41,1,114,224,0,0,0,114,5,0,0,0,114,45,1, - 0,0,26,6,0,0,115,14,0,0,0,12,3,6,2,12, + 0,0,27,6,0,0,115,14,0,0,0,12,3,6,2,12, 6,12,4,12,3,24,12,18,5,114,45,1,0,0,99,0, 0,0,0,0,0,0,0,0,0,0,0,4,0,0,0,64, 0,0,0,115,64,0,0,0,101,0,0,90,1,0,100,0, @@ -2793,7 +2793,7 @@ const unsigned char _Py_M__importlib[] = { 218,8,115,116,95,109,116,105,109,101,90,7,115,116,95,115, 105,122,101,41,3,114,71,0,0,0,114,35,0,0,0,114, 43,1,0,0,114,4,0,0,0,114,4,0,0,0,114,5, - 0,0,0,114,33,1,0,0,71,6,0,0,115,4,0,0, + 0,0,0,114,33,1,0,0,72,6,0,0,115,4,0,0, 0,0,2,12,1,122,27,83,111,117,114,99,101,70,105,108, 101,76,111,97,100,101,114,46,112,97,116,104,95,115,116,97, 116,115,99,4,0,0,0,0,0,0,0,5,0,0,0,5, @@ -2804,7 +2804,7 @@ const unsigned char _Py_M__importlib[] = { 34,1,0,0,41,5,114,71,0,0,0,114,141,0,0,0, 114,140,0,0,0,114,53,0,0,0,114,42,0,0,0,114, 4,0,0,0,114,4,0,0,0,114,5,0,0,0,114,35, - 1,0,0,76,6,0,0,115,4,0,0,0,0,2,12,1, + 1,0,0,77,6,0,0,115,4,0,0,0,0,2,12,1, 122,32,83,111,117,114,99,101,70,105,108,101,76,111,97,100, 101,114,46,95,99,97,99,104,101,95,98,121,116,101,99,111, 100,101,114,52,1,0,0,105,182,1,0,0,99,3,0,0, @@ -2842,7 +2842,7 @@ const unsigned char _Py_M__importlib[] = { 1,0,0,114,233,0,0,0,114,131,0,0,0,114,27,0, 0,0,114,23,0,0,0,114,37,1,0,0,114,4,0,0, 0,114,4,0,0,0,114,5,0,0,0,114,34,1,0,0, - 81,6,0,0,115,38,0,0,0,0,2,18,1,6,2,22, + 82,6,0,0,115,38,0,0,0,0,2,18,1,6,2,22, 1,18,1,17,2,19,1,15,1,3,1,17,1,13,2,7, 1,18,3,16,1,27,1,3,1,16,1,17,1,18,2,122, 25,83,111,117,114,99,101,70,105,108,101,76,111,97,100,101, @@ -2850,7 +2850,7 @@ const unsigned char _Py_M__importlib[] = { 0,0,114,56,0,0,0,114,58,0,0,0,114,59,0,0, 0,114,33,1,0,0,114,35,1,0,0,114,34,1,0,0, 114,4,0,0,0,114,4,0,0,0,114,4,0,0,0,114, - 5,0,0,0,114,50,1,0,0,67,6,0,0,115,8,0, + 5,0,0,0,114,50,1,0,0,68,6,0,0,115,8,0, 0,0,12,2,6,2,12,5,12,5,114,50,1,0,0,99, 0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0, 64,0,0,0,115,46,0,0,0,101,0,0,90,1,0,100, @@ -2872,7 +2872,7 @@ const unsigned char _Py_M__importlib[] = { 0,0,114,190,0,0,0,114,195,0,0,0,41,5,114,71, 0,0,0,114,158,0,0,0,114,35,0,0,0,114,53,0, 0,0,114,44,1,0,0,114,4,0,0,0,114,4,0,0, - 0,114,5,0,0,0,114,12,1,0,0,114,6,0,0,115, + 0,114,5,0,0,0,114,12,1,0,0,115,6,0,0,115, 8,0,0,0,0,1,15,1,15,1,24,1,122,29,83,111, 117,114,99,101,108,101,115,115,70,105,108,101,76,111,97,100, 101,114,46,103,101,116,95,99,111,100,101,99,2,0,0,0, @@ -2882,14 +2882,14 @@ const unsigned char _Py_M__importlib[] = { 101,32,105,115,32,110,111,32,115,111,117,114,99,101,32,99, 111,100,101,46,78,114,4,0,0,0,41,2,114,71,0,0, 0,114,158,0,0,0,114,4,0,0,0,114,4,0,0,0, - 114,5,0,0,0,114,13,1,0,0,120,6,0,0,115,2, + 114,5,0,0,0,114,13,1,0,0,121,6,0,0,115,2, 0,0,0,0,2,122,31,83,111,117,114,99,101,108,101,115, 115,70,105,108,101,76,111,97,100,101,114,46,103,101,116,95, 115,111,117,114,99,101,78,41,6,114,57,0,0,0,114,56, 0,0,0,114,58,0,0,0,114,59,0,0,0,114,12,1, 0,0,114,13,1,0,0,114,4,0,0,0,114,4,0,0, 0,114,4,0,0,0,114,5,0,0,0,114,54,1,0,0, - 110,6,0,0,115,6,0,0,0,12,2,6,2,12,6,114, + 111,6,0,0,115,6,0,0,0,12,2,6,2,12,6,114, 54,1,0,0,99,0,0,0,0,0,0,0,0,0,0,0, 0,3,0,0,0,64,0,0,0,115,130,0,0,0,101,0, 0,90,1,0,100,0,0,90,2,0,100,1,0,90,3,0, @@ -2913,7 +2913,7 @@ const unsigned char _Py_M__importlib[] = { 1,0,100,0,0,83,41,1,78,41,2,114,67,0,0,0, 114,35,0,0,0,41,3,114,71,0,0,0,114,67,0,0, 0,114,35,0,0,0,114,4,0,0,0,114,4,0,0,0, - 114,5,0,0,0,114,72,0,0,0,137,6,0,0,115,4, + 114,5,0,0,0,114,72,0,0,0,138,6,0,0,115,4, 0,0,0,0,1,9,1,122,28,69,120,116,101,110,115,105, 111,110,70,105,108,101,76,111,97,100,101,114,46,95,95,105, 110,105,116,95,95,99,2,0,0,0,0,0,0,0,2,0, @@ -2923,7 +2923,7 @@ const unsigned char _Py_M__importlib[] = { 83,41,1,78,41,2,114,224,0,0,0,114,63,0,0,0, 41,2,114,71,0,0,0,114,227,0,0,0,114,4,0,0, 0,114,4,0,0,0,114,5,0,0,0,114,229,0,0,0, - 141,6,0,0,115,4,0,0,0,0,1,18,1,122,26,69, + 142,6,0,0,115,4,0,0,0,0,1,18,1,122,26,69, 120,116,101,110,115,105,111,110,70,105,108,101,76,111,97,100, 101,114,46,95,95,101,113,95,95,99,1,0,0,0,0,0, 0,0,1,0,0,0,3,0,0,0,67,0,0,0,115,26, @@ -2931,7 +2931,7 @@ const unsigned char _Py_M__importlib[] = { 0,0,124,0,0,106,2,0,131,1,0,65,83,41,1,78, 41,3,114,46,1,0,0,114,67,0,0,0,114,35,0,0, 0,41,1,114,71,0,0,0,114,4,0,0,0,114,4,0, - 0,0,114,5,0,0,0,114,47,1,0,0,145,6,0,0, + 0,0,114,5,0,0,0,114,47,1,0,0,146,6,0,0, 115,2,0,0,0,0,1,122,28,69,120,116,101,110,115,105, 111,110,70,105,108,101,76,111,97,100,101,114,46,95,95,104, 97,115,104,95,95,99,2,0,0,0,0,0,0,0,4,0, @@ -2960,7 +2960,7 @@ const unsigned char _Py_M__importlib[] = { 0,0,114,32,0,0,0,41,4,114,71,0,0,0,114,158, 0,0,0,114,179,0,0,0,114,219,0,0,0,114,4,0, 0,0,114,4,0,0,0,114,5,0,0,0,114,4,1,0, - 0,148,6,0,0,115,24,0,0,0,0,5,13,1,9,1, + 0,149,6,0,0,115,24,0,0,0,0,5,13,1,9,1, 21,1,16,1,15,1,22,1,28,1,9,1,12,1,6,1, 28,1,122,31,69,120,116,101,110,115,105,111,110,70,105,108, 101,76,111,97,100,101,114,46,108,111,97,100,95,109,111,100, @@ -2979,7 +2979,7 @@ const unsigned char _Py_M__importlib[] = { 41,2,114,72,0,0,0,78,114,4,0,0,0,41,2,114, 22,0,0,0,218,6,115,117,102,102,105,120,41,1,218,9, 102,105,108,101,95,110,97,109,101,114,4,0,0,0,114,5, - 0,0,0,114,77,0,0,0,169,6,0,0,115,2,0,0, + 0,0,0,114,77,0,0,0,170,6,0,0,115,2,0,0, 0,6,1,122,49,69,120,116,101,110,115,105,111,110,70,105, 108,101,76,111,97,100,101,114,46,105,115,95,112,97,99,107, 97,103,101,46,60,108,111,99,97,108,115,62,46,60,103,101, @@ -2987,7 +2987,7 @@ const unsigned char _Py_M__importlib[] = { 0,0,114,78,0,0,0,218,18,69,88,84,69,78,83,73, 79,78,95,83,85,70,70,73,88,69,83,41,2,114,71,0, 0,0,114,158,0,0,0,114,4,0,0,0,41,1,114,57, - 1,0,0,114,5,0,0,0,114,219,0,0,0,166,6,0, + 1,0,0,114,5,0,0,0,114,219,0,0,0,167,6,0, 0,115,6,0,0,0,0,2,19,1,18,1,122,30,69,120, 116,101,110,115,105,111,110,70,105,108,101,76,111,97,100,101, 114,46,105,115,95,112,97,99,107,97,103,101,99,2,0,0, @@ -2999,7 +2999,7 @@ const unsigned char _Py_M__importlib[] = { 32,99,111,100,101,32,111,98,106,101,99,116,46,78,114,4, 0,0,0,41,2,114,71,0,0,0,114,158,0,0,0,114, 4,0,0,0,114,4,0,0,0,114,5,0,0,0,114,12, - 1,0,0,172,6,0,0,115,2,0,0,0,0,2,122,28, + 1,0,0,173,6,0,0,115,2,0,0,0,0,2,122,28, 69,120,116,101,110,115,105,111,110,70,105,108,101,76,111,97, 100,101,114,46,103,101,116,95,99,111,100,101,99,2,0,0, 0,0,0,0,0,2,0,0,0,1,0,0,0,67,0,0, @@ -3009,7 +3009,7 @@ const unsigned char _Py_M__importlib[] = { 97,118,101,32,110,111,32,115,111,117,114,99,101,32,99,111, 100,101,46,78,114,4,0,0,0,41,2,114,71,0,0,0, 114,158,0,0,0,114,4,0,0,0,114,4,0,0,0,114, - 5,0,0,0,114,13,1,0,0,176,6,0,0,115,2,0, + 5,0,0,0,114,13,1,0,0,177,6,0,0,115,2,0, 0,0,0,2,122,30,69,120,116,101,110,115,105,111,110,70, 105,108,101,76,111,97,100,101,114,46,103,101,116,95,115,111, 117,114,99,101,99,2,0,0,0,0,0,0,0,2,0,0, @@ -3020,7 +3020,7 @@ const unsigned char _Py_M__importlib[] = { 111,117,110,100,32,98,121,32,116,104,101,32,102,105,110,100, 101,114,46,41,1,114,35,0,0,0,41,2,114,71,0,0, 0,114,158,0,0,0,114,4,0,0,0,114,4,0,0,0, - 114,5,0,0,0,114,238,0,0,0,180,6,0,0,115,2, + 114,5,0,0,0,114,238,0,0,0,181,6,0,0,115,2, 0,0,0,0,3,122,32,69,120,116,101,110,115,105,111,110, 70,105,108,101,76,111,97,100,101,114,46,103,101,116,95,102, 105,108,101,110,97,109,101,78,41,13,114,57,0,0,0,114, @@ -3029,7 +3029,7 @@ const unsigned char _Py_M__importlib[] = { 0,0,114,4,1,0,0,114,219,0,0,0,114,12,1,0, 0,114,13,1,0,0,114,238,0,0,0,114,4,0,0,0, 114,4,0,0,0,114,4,0,0,0,114,5,0,0,0,114, - 55,1,0,0,129,6,0,0,115,18,0,0,0,12,6,6, + 55,1,0,0,130,6,0,0,115,18,0,0,0,12,6,6, 2,12,4,12,4,12,3,18,18,12,6,12,4,12,4,114, 55,1,0,0,99,0,0,0,0,0,0,0,0,0,0,0, 0,2,0,0,0,64,0,0,0,115,130,0,0,0,101,0, @@ -3073,7 +3073,7 @@ const unsigned char _Py_M__importlib[] = { 101,114,41,4,114,71,0,0,0,114,67,0,0,0,114,35, 0,0,0,218,11,112,97,116,104,95,102,105,110,100,101,114, 114,4,0,0,0,114,4,0,0,0,114,5,0,0,0,114, - 72,0,0,0,193,6,0,0,115,8,0,0,0,0,1,9, + 72,0,0,0,194,6,0,0,115,8,0,0,0,0,1,9, 1,9,1,21,1,122,23,95,78,97,109,101,115,112,97,99, 101,80,97,116,104,46,95,95,105,110,105,116,95,95,99,1, 0,0,0,0,0,0,0,4,0,0,0,3,0,0,0,67, @@ -3091,7 +3091,7 @@ const unsigned char _Py_M__importlib[] = { 0,41,4,114,71,0,0,0,114,233,0,0,0,218,3,100, 111,116,114,94,0,0,0,114,4,0,0,0,114,4,0,0, 0,114,5,0,0,0,218,23,95,102,105,110,100,95,112,97, - 114,101,110,116,95,112,97,116,104,95,110,97,109,101,115,199, + 114,101,110,116,95,112,97,116,104,95,110,97,109,101,115,200, 6,0,0,115,8,0,0,0,0,2,27,1,12,2,4,3, 122,38,95,78,97,109,101,115,112,97,99,101,80,97,116,104, 46,95,102,105,110,100,95,112,97,114,101,110,116,95,112,97, @@ -3105,7 +3105,7 @@ const unsigned char _Py_M__importlib[] = { 95,109,111,100,117,108,101,95,110,97,109,101,90,14,112,97, 116,104,95,97,116,116,114,95,110,97,109,101,114,4,0,0, 0,114,4,0,0,0,114,5,0,0,0,114,60,1,0,0, - 209,6,0,0,115,4,0,0,0,0,1,18,1,122,31,95, + 210,6,0,0,115,4,0,0,0,0,1,18,1,122,31,95, 78,97,109,101,115,112,97,99,101,80,97,116,104,46,95,103, 101,116,95,112,97,114,101,110,116,95,112,97,116,104,99,1, 0,0,0,0,0,0,0,3,0,0,0,3,0,0,0,67, @@ -3123,7 +3123,7 @@ const unsigned char _Py_M__importlib[] = { 253,0,0,0,41,3,114,71,0,0,0,90,11,112,97,114, 101,110,116,95,112,97,116,104,114,177,0,0,0,114,4,0, 0,0,114,4,0,0,0,114,5,0,0,0,218,12,95,114, - 101,99,97,108,99,117,108,97,116,101,213,6,0,0,115,16, + 101,99,97,108,99,117,108,97,116,101,214,6,0,0,115,16, 0,0,0,0,2,18,1,15,1,21,3,27,1,9,1,18, 1,12,1,122,27,95,78,97,109,101,115,112,97,99,101,80, 97,116,104,46,95,114,101,99,97,108,99,117,108,97,116,101, @@ -3132,7 +3132,7 @@ const unsigned char _Py_M__importlib[] = { 106,1,0,131,0,0,131,1,0,83,41,1,78,41,2,218, 4,105,116,101,114,114,66,1,0,0,41,1,114,71,0,0, 0,114,4,0,0,0,114,4,0,0,0,114,5,0,0,0, - 218,8,95,95,105,116,101,114,95,95,226,6,0,0,115,2, + 218,8,95,95,105,116,101,114,95,95,227,6,0,0,115,2, 0,0,0,0,1,122,23,95,78,97,109,101,115,112,97,99, 101,80,97,116,104,46,95,95,105,116,101,114,95,95,99,1, 0,0,0,0,0,0,0,1,0,0,0,2,0,0,0,67, @@ -3140,7 +3140,7 @@ const unsigned char _Py_M__importlib[] = { 0,131,0,0,131,1,0,83,41,1,78,41,2,114,31,0, 0,0,114,66,1,0,0,41,1,114,71,0,0,0,114,4, 0,0,0,114,4,0,0,0,114,5,0,0,0,218,7,95, - 95,108,101,110,95,95,229,6,0,0,115,2,0,0,0,0, + 95,108,101,110,95,95,230,6,0,0,115,2,0,0,0,0, 1,122,22,95,78,97,109,101,115,112,97,99,101,80,97,116, 104,46,95,95,108,101,110,95,95,99,1,0,0,0,0,0, 0,0,1,0,0,0,2,0,0,0,67,0,0,0,115,16, @@ -3149,7 +3149,7 @@ const unsigned char _Py_M__importlib[] = { 99,101,80,97,116,104,40,123,33,114,125,41,41,2,114,47, 0,0,0,114,253,0,0,0,41,1,114,71,0,0,0,114, 4,0,0,0,114,4,0,0,0,114,5,0,0,0,114,101, - 0,0,0,232,6,0,0,115,2,0,0,0,0,1,122,23, + 0,0,0,233,6,0,0,115,2,0,0,0,0,1,122,23, 95,78,97,109,101,115,112,97,99,101,80,97,116,104,46,95, 95,114,101,112,114,95,95,99,2,0,0,0,0,0,0,0, 2,0,0,0,2,0,0,0,67,0,0,0,115,16,0,0, @@ -3157,7 +3157,7 @@ const unsigned char _Py_M__importlib[] = { 83,41,1,78,41,1,114,66,1,0,0,41,2,114,71,0, 0,0,218,4,105,116,101,109,114,4,0,0,0,114,4,0, 0,0,114,5,0,0,0,218,12,95,95,99,111,110,116,97, - 105,110,115,95,95,235,6,0,0,115,2,0,0,0,0,1, + 105,110,115,95,95,236,6,0,0,115,2,0,0,0,0,1, 122,27,95,78,97,109,101,115,112,97,99,101,80,97,116,104, 46,95,95,99,111,110,116,97,105,110,115,95,95,99,2,0, 0,0,0,0,0,0,2,0,0,0,2,0,0,0,67,0, @@ -3165,7 +3165,7 @@ const unsigned char _Py_M__importlib[] = { 124,1,0,131,1,0,1,100,0,0,83,41,1,78,41,2, 114,253,0,0,0,114,223,0,0,0,41,2,114,71,0,0, 0,114,70,1,0,0,114,4,0,0,0,114,4,0,0,0, - 114,5,0,0,0,114,223,0,0,0,238,6,0,0,115,2, + 114,5,0,0,0,114,223,0,0,0,239,6,0,0,115,2, 0,0,0,0,1,122,21,95,78,97,109,101,115,112,97,99, 101,80,97,116,104,46,97,112,112,101,110,100,78,41,13,114, 57,0,0,0,114,56,0,0,0,114,58,0,0,0,114,59, @@ -3173,7 +3173,7 @@ const unsigned char _Py_M__importlib[] = { 0,0,114,66,1,0,0,114,68,1,0,0,114,69,1,0, 0,114,101,0,0,0,114,71,1,0,0,114,223,0,0,0, 114,4,0,0,0,114,4,0,0,0,114,4,0,0,0,114, - 5,0,0,0,114,59,1,0,0,186,6,0,0,115,20,0, + 5,0,0,0,114,59,1,0,0,187,6,0,0,115,20,0, 0,0,12,5,6,2,12,6,12,10,12,4,12,13,12,3, 12,3,12,3,12,3,114,59,1,0,0,99,0,0,0,0, 0,0,0,0,0,0,0,0,3,0,0,0,64,0,0,0, @@ -3191,7 +3191,7 @@ const unsigned char _Py_M__importlib[] = { 1,0,0,114,253,0,0,0,41,4,114,71,0,0,0,114, 67,0,0,0,114,35,0,0,0,114,63,1,0,0,114,4, 0,0,0,114,4,0,0,0,114,5,0,0,0,114,72,0, - 0,0,244,6,0,0,115,2,0,0,0,0,1,122,25,95, + 0,0,245,6,0,0,115,2,0,0,0,0,1,122,25,95, 78,97,109,101,115,112,97,99,101,76,111,97,100,101,114,46, 95,95,105,110,105,116,95,95,99,2,0,0,0,0,0,0, 0,2,0,0,0,2,0,0,0,67,0,0,0,115,16,0, @@ -3208,21 +3208,21 @@ const unsigned char _Py_M__importlib[] = { 99,101,41,62,41,2,114,47,0,0,0,114,57,0,0,0, 41,2,114,8,1,0,0,114,179,0,0,0,114,4,0,0, 0,114,4,0,0,0,114,5,0,0,0,114,205,0,0,0, - 247,6,0,0,115,2,0,0,0,0,7,122,28,95,78,97, + 248,6,0,0,115,2,0,0,0,0,7,122,28,95,78,97, 109,101,115,112,97,99,101,76,111,97,100,101,114,46,109,111, 100,117,108,101,95,114,101,112,114,99,2,0,0,0,0,0, 0,0,2,0,0,0,1,0,0,0,67,0,0,0,115,4, 0,0,0,100,1,0,83,41,2,78,84,114,4,0,0,0, 41,2,114,71,0,0,0,114,158,0,0,0,114,4,0,0, 0,114,4,0,0,0,114,5,0,0,0,114,219,0,0,0, - 0,7,0,0,115,2,0,0,0,0,1,122,27,95,78,97, + 1,7,0,0,115,2,0,0,0,0,1,122,27,95,78,97, 109,101,115,112,97,99,101,76,111,97,100,101,114,46,105,115, 95,112,97,99,107,97,103,101,99,2,0,0,0,0,0,0, 0,2,0,0,0,1,0,0,0,67,0,0,0,115,4,0, 0,0,100,1,0,83,41,2,78,114,30,0,0,0,114,4, 0,0,0,41,2,114,71,0,0,0,114,158,0,0,0,114, 4,0,0,0,114,4,0,0,0,114,5,0,0,0,114,13, - 1,0,0,3,7,0,0,115,2,0,0,0,0,1,122,27, + 1,0,0,4,7,0,0,115,2,0,0,0,0,1,122,27, 95,78,97,109,101,115,112,97,99,101,76,111,97,100,101,114, 46,103,101,116,95,115,111,117,114,99,101,99,2,0,0,0, 0,0,0,0,2,0,0,0,6,0,0,0,67,0,0,0, @@ -3231,14 +3231,14 @@ const unsigned char _Py_M__importlib[] = { 0,0,0,122,8,60,115,116,114,105,110,103,62,114,175,0, 0,0,114,39,1,0,0,84,41,1,114,40,1,0,0,41, 2,114,71,0,0,0,114,158,0,0,0,114,4,0,0,0, - 114,4,0,0,0,114,5,0,0,0,114,12,1,0,0,6, + 114,4,0,0,0,114,5,0,0,0,114,12,1,0,0,7, 7,0,0,115,2,0,0,0,0,1,122,25,95,78,97,109, 101,115,112,97,99,101,76,111,97,100,101,114,46,103,101,116, 95,99,111,100,101,99,2,0,0,0,0,0,0,0,2,0, 0,0,1,0,0,0,67,0,0,0,115,4,0,0,0,100, 0,0,83,41,1,78,114,4,0,0,0,41,2,114,71,0, 0,0,114,179,0,0,0,114,4,0,0,0,114,4,0,0, - 0,114,5,0,0,0,114,1,1,0,0,9,7,0,0,115, + 0,114,5,0,0,0,114,1,1,0,0,10,7,0,0,115, 2,0,0,0,0,1,122,28,95,78,97,109,101,115,112,97, 99,101,76,111,97,100,101,114,46,101,120,101,99,95,109,111, 100,117,108,101,99,2,0,0,0,0,0,0,0,2,0,0, @@ -3256,7 +3256,7 @@ const unsigned char _Py_M__importlib[] = { 112,97,116,104,32,123,33,114,125,41,3,114,152,0,0,0, 114,253,0,0,0,114,180,0,0,0,41,2,114,71,0,0, 0,114,158,0,0,0,114,4,0,0,0,114,4,0,0,0, - 114,5,0,0,0,114,4,1,0,0,12,7,0,0,115,4, + 114,5,0,0,0,114,4,1,0,0,13,7,0,0,115,4, 0,0,0,0,7,16,1,122,28,95,78,97,109,101,115,112, 97,99,101,76,111,97,100,101,114,46,108,111,97,100,95,109, 111,100,117,108,101,78,41,11,114,57,0,0,0,114,56,0, @@ -3264,7 +3264,7 @@ const unsigned char _Py_M__importlib[] = { 0,114,205,0,0,0,114,219,0,0,0,114,13,1,0,0, 114,12,1,0,0,114,1,1,0,0,114,4,1,0,0,114, 4,0,0,0,114,4,0,0,0,114,4,0,0,0,114,5, - 0,0,0,114,251,0,0,0,243,6,0,0,115,14,0,0, + 0,0,0,114,251,0,0,0,244,6,0,0,115,14,0,0, 0,12,1,12,3,18,9,12,3,12,3,12,3,12,3,114, 251,0,0,0,99,0,0,0,0,0,0,0,0,0,0,0, 0,5,0,0,0,64,0,0,0,115,160,0,0,0,101,0, @@ -3302,7 +3302,7 @@ const unsigned char _Py_M__importlib[] = { 101,218,6,118,97,108,117,101,115,114,60,0,0,0,114,73, 1,0,0,41,2,114,8,1,0,0,218,6,102,105,110,100, 101,114,114,4,0,0,0,114,4,0,0,0,114,5,0,0, - 0,114,73,1,0,0,29,7,0,0,115,6,0,0,0,0, + 0,114,73,1,0,0,30,7,0,0,115,6,0,0,0,0, 4,22,1,15,1,122,28,80,97,116,104,70,105,110,100,101, 114,46,105,110,118,97,108,105,100,97,116,101,95,99,97,99, 104,101,115,99,2,0,0,0,0,0,0,0,3,0,0,0, @@ -3326,7 +3326,7 @@ const unsigned char _Py_M__importlib[] = { 0,0,114,167,0,0,0,114,168,0,0,0,114,153,0,0, 0,41,3,114,8,1,0,0,114,35,0,0,0,90,4,104, 111,111,107,114,4,0,0,0,114,4,0,0,0,114,5,0, - 0,0,218,11,95,112,97,116,104,95,104,111,111,107,115,37, + 0,0,218,11,95,112,97,116,104,95,104,111,111,107,115,38, 7,0,0,115,16,0,0,0,0,7,9,1,19,1,16,1, 3,1,14,1,13,1,12,2,122,22,80,97,116,104,70,105, 110,100,101,114,46,95,112,97,116,104,95,104,111,111,107,115, @@ -3356,7 +3356,7 @@ const unsigned char _Py_M__importlib[] = { 0,114,78,1,0,0,41,3,114,8,1,0,0,114,35,0, 0,0,114,76,1,0,0,114,4,0,0,0,114,4,0,0, 0,114,5,0,0,0,218,20,95,112,97,116,104,95,105,109, - 112,111,114,116,101,114,95,99,97,99,104,101,54,7,0,0, + 112,111,114,116,101,114,95,99,97,99,104,101,55,7,0,0, 115,16,0,0,0,0,8,12,1,15,1,3,1,17,1,13, 1,15,1,18,1,122,31,80,97,116,104,70,105,110,100,101, 114,46,95,112,97,116,104,95,105,109,112,111,114,116,101,114, @@ -3375,7 +3375,7 @@ const unsigned char _Py_M__importlib[] = { 114,158,0,0,0,114,76,1,0,0,114,169,0,0,0,114, 170,0,0,0,114,177,0,0,0,114,4,0,0,0,114,4, 0,0,0,114,5,0,0,0,218,16,95,108,101,103,97,99, - 121,95,103,101,116,95,115,112,101,99,71,7,0,0,115,18, + 121,95,103,101,116,95,115,112,101,99,72,7,0,0,115,18, 0,0,0,0,4,15,1,24,2,15,1,6,1,12,1,13, 1,15,1,9,1,122,27,80,97,116,104,70,105,110,100,101, 114,46,95,108,101,103,97,99,121,95,103,101,116,95,115,112, @@ -3411,7 +3411,7 @@ const unsigned char _Py_M__importlib[] = { 97,109,101,115,112,97,99,101,95,112,97,116,104,90,5,101, 110,116,114,121,114,76,1,0,0,114,177,0,0,0,114,170, 0,0,0,114,4,0,0,0,114,4,0,0,0,114,5,0, - 0,0,218,9,95,103,101,116,95,115,112,101,99,86,7,0, + 0,0,218,9,95,103,101,116,95,115,112,101,99,87,7,0, 0,115,40,0,0,0,0,5,6,1,13,1,21,1,6,1, 15,1,12,1,15,1,21,2,18,1,12,1,6,1,15,1, 4,1,9,1,12,1,15,5,20,2,15,1,9,1,122,20, @@ -3439,7 +3439,7 @@ const unsigned char _Py_M__importlib[] = { 59,1,0,0,41,6,114,8,1,0,0,114,158,0,0,0, 114,35,0,0,0,114,9,1,0,0,114,177,0,0,0,114, 83,1,0,0,114,4,0,0,0,114,4,0,0,0,114,5, - 0,0,0,114,10,1,0,0,118,7,0,0,115,26,0,0, + 0,0,0,114,10,1,0,0,119,7,0,0,115,26,0,0, 0,0,4,12,1,12,1,21,1,12,1,4,1,15,1,9, 1,6,3,9,1,24,1,4,2,7,2,122,20,80,97,116, 104,70,105,110,100,101,114,46,102,105,110,100,95,115,112,101, @@ -3461,7 +3461,7 @@ const unsigned char _Py_M__importlib[] = { 32,32,78,41,2,114,10,1,0,0,114,169,0,0,0,41, 4,114,8,1,0,0,114,158,0,0,0,114,35,0,0,0, 114,177,0,0,0,114,4,0,0,0,114,4,0,0,0,114, - 5,0,0,0,114,11,1,0,0,140,7,0,0,115,8,0, + 5,0,0,0,114,11,1,0,0,141,7,0,0,115,8,0, 0,0,0,8,18,1,12,1,4,1,122,22,80,97,116,104, 70,105,110,100,101,114,46,102,105,110,100,95,109,111,100,117, 108,101,41,12,114,57,0,0,0,114,56,0,0,0,114,58, @@ -3469,7 +3469,7 @@ const unsigned char _Py_M__importlib[] = { 0,0,114,78,1,0,0,114,79,1,0,0,114,80,1,0, 0,114,84,1,0,0,114,10,1,0,0,114,11,1,0,0, 114,4,0,0,0,114,4,0,0,0,114,4,0,0,0,114, - 5,0,0,0,114,72,1,0,0,25,7,0,0,115,22,0, + 5,0,0,0,114,72,1,0,0,26,7,0,0,115,22,0, 0,0,12,2,6,2,18,8,18,17,18,17,18,15,3,1, 18,31,3,1,21,21,3,1,114,72,1,0,0,99,0,0, 0,0,0,0,0,0,0,0,0,0,3,0,0,0,64,0, @@ -3518,7 +3518,7 @@ const unsigned char _Py_M__importlib[] = { 136,0,0,102,2,0,86,1,113,3,0,100,0,0,83,41, 1,78,114,4,0,0,0,41,2,114,22,0,0,0,114,56, 1,0,0,41,1,114,169,0,0,0,114,4,0,0,0,114, - 5,0,0,0,114,77,0,0,0,169,7,0,0,115,2,0, + 5,0,0,0,114,77,0,0,0,170,7,0,0,115,2,0, 0,0,6,0,122,38,70,105,108,101,70,105,110,100,101,114, 46,95,95,105,110,105,116,95,95,46,60,108,111,99,97,108, 115,62,46,60,103,101,110,101,120,112,114,62,114,116,0,0, @@ -3531,7 +3531,7 @@ const unsigned char _Py_M__importlib[] = { 0,0,218,14,108,111,97,100,101,114,95,100,101,116,97,105, 108,115,90,7,108,111,97,100,101,114,115,114,127,0,0,0, 114,4,0,0,0,41,1,114,169,0,0,0,114,5,0,0, - 0,114,72,0,0,0,163,7,0,0,115,16,0,0,0,0, + 0,114,72,0,0,0,164,7,0,0,115,16,0,0,0,0, 4,6,1,19,1,36,1,9,2,15,1,9,1,12,1,122, 19,70,105,108,101,70,105,110,100,101,114,46,95,95,105,110, 105,116,95,95,99,1,0,0,0,0,0,0,0,1,0,0, @@ -3541,7 +3541,7 @@ const unsigned char _Py_M__importlib[] = { 114,101,99,116,111,114,121,32,109,116,105,109,101,46,114,29, 0,0,0,78,114,138,0,0,0,41,1,114,87,1,0,0, 41,1,114,71,0,0,0,114,4,0,0,0,114,4,0,0, - 0,114,5,0,0,0,114,73,1,0,0,177,7,0,0,115, + 0,114,5,0,0,0,114,73,1,0,0,178,7,0,0,115, 2,0,0,0,0,2,122,28,70,105,108,101,70,105,110,100, 101,114,46,105,110,118,97,108,105,100,97,116,101,95,99,97, 99,104,101,115,99,2,0,0,0,0,0,0,0,3,0,0, @@ -3565,7 +3565,7 @@ const unsigned char _Py_M__importlib[] = { 32,32,78,41,3,114,10,1,0,0,114,169,0,0,0,114, 220,0,0,0,41,3,114,71,0,0,0,114,158,0,0,0, 114,177,0,0,0,114,4,0,0,0,114,4,0,0,0,114, - 5,0,0,0,114,165,0,0,0,183,7,0,0,115,8,0, + 5,0,0,0,114,165,0,0,0,184,7,0,0,115,8,0, 0,0,0,7,15,1,12,1,10,1,122,22,70,105,108,101, 70,105,110,100,101,114,46,102,105,110,100,95,108,111,97,100, 101,114,99,6,0,0,0,0,0,0,0,7,0,0,0,7, @@ -3577,7 +3577,7 @@ const unsigned char _Py_M__importlib[] = { 243,0,0,0,114,158,0,0,0,114,35,0,0,0,114,228, 0,0,0,114,9,1,0,0,114,169,0,0,0,114,4,0, 0,0,114,4,0,0,0,114,5,0,0,0,114,84,1,0, - 0,195,7,0,0,115,6,0,0,0,0,1,15,1,18,1, + 0,196,7,0,0,115,6,0,0,0,0,1,15,1,18,1, 122,20,70,105,108,101,70,105,110,100,101,114,46,95,103,101, 116,95,115,112,101,99,78,99,3,0,0,0,0,0,0,0, 14,0,0,0,15,0,0,0,67,0,0,0,115,240,1,0, @@ -3640,7 +3640,7 @@ const unsigned char _Py_M__importlib[] = { 104,114,56,1,0,0,114,243,0,0,0,90,13,105,110,105, 116,95,102,105,108,101,110,97,109,101,90,9,102,117,108,108, 95,112,97,116,104,114,177,0,0,0,114,4,0,0,0,114, - 4,0,0,0,114,5,0,0,0,114,10,1,0,0,200,7, + 4,0,0,0,114,5,0,0,0,114,10,1,0,0,201,7, 0,0,115,68,0,0,0,0,3,6,1,19,1,3,1,34, 1,13,1,11,1,15,1,10,1,12,2,9,1,9,1,15, 2,9,1,6,2,12,1,18,1,22,1,10,1,15,1,12, @@ -3677,7 +3677,7 @@ const unsigned char _Py_M__importlib[] = { 0,146,2,0,113,6,0,83,114,4,0,0,0,41,1,114, 139,0,0,0,41,2,114,22,0,0,0,90,2,102,110,114, 4,0,0,0,114,4,0,0,0,114,5,0,0,0,250,9, - 60,115,101,116,99,111,109,112,62,18,8,0,0,115,2,0, + 60,115,101,116,99,111,109,112,62,19,8,0,0,115,2,0, 0,0,9,0,122,41,70,105,108,101,70,105,110,100,101,114, 46,95,102,105,108,108,95,99,97,99,104,101,46,60,108,111, 99,97,108,115,62,46,60,115,101,116,99,111,109,112,62,78, @@ -3695,7 +3695,7 @@ const unsigned char _Py_M__importlib[] = { 110,116,101,110,116,115,114,70,1,0,0,114,67,0,0,0, 114,64,1,0,0,114,56,1,0,0,90,8,110,101,119,95, 110,97,109,101,114,4,0,0,0,114,4,0,0,0,114,5, - 0,0,0,114,92,1,0,0,245,7,0,0,115,34,0,0, + 0,0,0,114,92,1,0,0,246,7,0,0,115,34,0,0, 0,0,2,9,1,3,1,31,1,22,3,11,3,18,1,18, 7,9,1,13,1,24,1,6,1,27,2,6,1,17,1,9, 1,18,1,122,22,70,105,108,101,70,105,110,100,101,114,46, @@ -3734,7 +3734,7 @@ const unsigned char _Py_M__importlib[] = { 0,0,0,41,1,114,35,0,0,0,41,2,114,8,1,0, 0,114,91,1,0,0,114,4,0,0,0,114,5,0,0,0, 218,24,112,97,116,104,95,104,111,111,107,95,102,111,114,95, - 70,105,108,101,70,105,110,100,101,114,30,8,0,0,115,6, + 70,105,108,101,70,105,110,100,101,114,31,8,0,0,115,6, 0,0,0,0,2,12,1,21,1,122,54,70,105,108,101,70, 105,110,100,101,114,46,112,97,116,104,95,104,111,111,107,46, 60,108,111,99,97,108,115,62,46,112,97,116,104,95,104,111, @@ -3742,7 +3742,7 @@ const unsigned char _Py_M__importlib[] = { 114,114,4,0,0,0,41,3,114,8,1,0,0,114,91,1, 0,0,114,98,1,0,0,114,4,0,0,0,41,2,114,8, 1,0,0,114,91,1,0,0,114,5,0,0,0,218,9,112, - 97,116,104,95,104,111,111,107,20,8,0,0,115,4,0,0, + 97,116,104,95,104,111,111,107,21,8,0,0,115,4,0,0, 0,0,10,21,6,122,20,70,105,108,101,70,105,110,100,101, 114,46,112,97,116,104,95,104,111,111,107,99,1,0,0,0, 0,0,0,0,1,0,0,0,2,0,0,0,67,0,0,0, @@ -3751,7 +3751,7 @@ const unsigned char _Py_M__importlib[] = { 110,100,101,114,40,123,33,114,125,41,41,2,114,47,0,0, 0,114,35,0,0,0,41,1,114,71,0,0,0,114,4,0, 0,0,114,4,0,0,0,114,5,0,0,0,114,101,0,0, - 0,38,8,0,0,115,2,0,0,0,0,1,122,19,70,105, + 0,39,8,0,0,115,2,0,0,0,0,1,122,19,70,105, 108,101,70,105,110,100,101,114,46,95,95,114,101,112,114,95, 95,41,15,114,57,0,0,0,114,56,0,0,0,114,58,0, 0,0,114,59,0,0,0,114,72,0,0,0,114,73,1,0, @@ -3759,7 +3759,7 @@ const unsigned char _Py_M__importlib[] = { 114,84,1,0,0,114,10,1,0,0,114,92,1,0,0,114, 15,1,0,0,114,99,1,0,0,114,101,0,0,0,114,4, 0,0,0,114,4,0,0,0,114,4,0,0,0,114,5,0, - 0,0,114,85,1,0,0,154,7,0,0,115,20,0,0,0, + 0,0,114,85,1,0,0,155,7,0,0,115,20,0,0,0, 12,7,6,2,12,14,12,4,6,2,12,12,12,5,15,45, 12,31,18,18,114,85,1,0,0,99,0,0,0,0,0,0, 0,0,0,0,0,0,2,0,0,0,64,0,0,0,115,46, @@ -3776,7 +3776,7 @@ const unsigned char _Py_M__importlib[] = { 32,116,104,101,32,105,109,112,111,114,116,32,108,111,99,107, 46,78,41,2,114,106,0,0,0,114,3,1,0,0,41,1, 114,71,0,0,0,114,4,0,0,0,114,4,0,0,0,114, - 5,0,0,0,114,75,0,0,0,48,8,0,0,115,2,0, + 5,0,0,0,114,75,0,0,0,49,8,0,0,115,2,0, 0,0,0,2,122,28,95,73,109,112,111,114,116,76,111,99, 107,67,111,110,116,101,120,116,46,95,95,101,110,116,101,114, 95,95,99,4,0,0,0,0,0,0,0,4,0,0,0,1, @@ -3790,13 +3790,13 @@ const unsigned char _Py_M__importlib[] = { 101,120,99,95,116,121,112,101,90,9,101,120,99,95,118,97, 108,117,101,90,13,101,120,99,95,116,114,97,99,101,98,97, 99,107,114,4,0,0,0,114,4,0,0,0,114,5,0,0, - 0,114,81,0,0,0,52,8,0,0,115,2,0,0,0,0, + 0,114,81,0,0,0,53,8,0,0,115,2,0,0,0,0, 2,122,27,95,73,109,112,111,114,116,76,111,99,107,67,111, 110,116,101,120,116,46,95,95,101,120,105,116,95,95,78,41, 6,114,57,0,0,0,114,56,0,0,0,114,58,0,0,0, 114,59,0,0,0,114,75,0,0,0,114,81,0,0,0,114, 4,0,0,0,114,4,0,0,0,114,4,0,0,0,114,5, - 0,0,0,114,100,1,0,0,44,8,0,0,115,6,0,0, + 0,0,0,114,100,1,0,0,45,8,0,0,115,6,0,0, 0,12,2,6,2,12,4,114,100,1,0,0,99,3,0,0, 0,0,0,0,0,5,0,0,0,4,0,0,0,67,0,0, 0,115,91,0,0,0,124,1,0,106,0,0,100,1,0,124, @@ -3818,7 +3818,7 @@ const unsigned char _Py_M__importlib[] = { 112,97,99,107,97,103,101,218,5,108,101,118,101,108,90,4, 98,105,116,115,90,4,98,97,115,101,114,4,0,0,0,114, 4,0,0,0,114,5,0,0,0,218,13,95,114,101,115,111, - 108,118,101,95,110,97,109,101,57,8,0,0,115,10,0,0, + 108,118,101,95,110,97,109,101,58,8,0,0,115,10,0,0, 0,0,2,22,1,18,1,15,1,10,1,114,103,1,0,0, 99,3,0,0,0,0,0,0,0,4,0,0,0,3,0,0, 0,67,0,0,0,115,47,0,0,0,124,0,0,106,0,0, @@ -3829,7 +3829,7 @@ const unsigned char _Py_M__importlib[] = { 67,0,0,0,114,35,0,0,0,114,169,0,0,0,114,4, 0,0,0,114,4,0,0,0,114,5,0,0,0,218,17,95, 102,105,110,100,95,115,112,101,99,95,108,101,103,97,99,121, - 66,8,0,0,115,8,0,0,0,0,3,18,1,12,1,4, + 67,8,0,0,115,8,0,0,0,0,3,18,1,12,1,4, 1,114,104,1,0,0,99,3,0,0,0,0,0,0,0,9, 0,0,0,27,0,0,0,67,0,0,0,115,34,1,0,0, 116,0,0,106,1,0,115,28,0,116,2,0,106,3,0,100, @@ -3862,7 +3862,7 @@ const unsigned char _Py_M__importlib[] = { 100,114,76,1,0,0,114,10,1,0,0,114,177,0,0,0, 114,179,0,0,0,114,208,0,0,0,114,4,0,0,0,114, 4,0,0,0,114,5,0,0,0,218,10,95,102,105,110,100, - 95,115,112,101,99,75,8,0,0,115,48,0,0,0,0,2, + 95,115,112,101,99,76,8,0,0,115,48,0,0,0,0,2, 9,1,19,4,15,1,16,1,10,1,3,1,13,1,13,1, 18,1,12,1,11,2,24,1,12,2,22,1,13,1,3,1, 13,1,13,4,9,2,12,1,4,2,7,2,11,2,114,106, @@ -3899,7 +3899,7 @@ const unsigned char _Py_M__importlib[] = { 121,115,116,101,109,69,114,114,111,114,41,4,114,67,0,0, 0,114,101,1,0,0,114,102,1,0,0,114,171,0,0,0, 114,4,0,0,0,114,4,0,0,0,114,5,0,0,0,218, - 13,95,115,97,110,105,116,121,95,99,104,101,99,107,115,8, + 13,95,115,97,110,105,116,121,95,99,104,101,99,107,116,8, 0,0,115,24,0,0,0,0,2,15,1,30,1,12,1,15, 1,6,1,15,1,15,1,15,1,6,2,27,1,19,1,114, 109,1,0,0,122,16,78,111,32,109,111,100,117,108,101,32, @@ -3938,7 +3938,7 @@ const unsigned char _Py_M__importlib[] = { 114,177,0,0,0,114,179,0,0,0,114,4,0,0,0,114, 4,0,0,0,114,5,0,0,0,218,23,95,102,105,110,100, 95,97,110,100,95,108,111,97,100,95,117,110,108,111,99,107, - 101,100,135,8,0,0,115,42,0,0,0,0,1,6,1,19, + 101,100,136,8,0,0,115,42,0,0,0,0,1,6,1,19, 1,6,1,15,1,16,2,15,1,11,1,13,1,3,1,13, 1,13,1,22,1,26,1,15,1,12,1,30,2,18,1,6, 2,13,1,32,1,114,112,1,0,0,99,2,0,0,0,0, @@ -3952,7 +3952,7 @@ const unsigned char _Py_M__importlib[] = { 107,46,78,41,2,114,103,0,0,0,114,112,1,0,0,41, 2,114,67,0,0,0,114,111,1,0,0,114,4,0,0,0, 114,4,0,0,0,114,5,0,0,0,218,14,95,102,105,110, - 100,95,97,110,100,95,108,111,97,100,162,8,0,0,115,4, + 100,95,97,110,100,95,108,111,97,100,163,8,0,0,115,4, 0,0,0,0,2,13,1,114,113,1,0,0,99,3,0,0, 0,0,0,0,0,5,0,0,0,4,0,0,0,67,0,0, 0,115,172,0,0,0,116,0,0,124,0,0,124,1,0,124, @@ -3996,7 +3996,7 @@ const unsigned char _Py_M__importlib[] = { 0,114,112,0,0,0,41,5,114,67,0,0,0,114,101,1, 0,0,114,102,1,0,0,114,179,0,0,0,114,151,0,0, 0,114,4,0,0,0,114,4,0,0,0,114,5,0,0,0, - 114,114,1,0,0,168,8,0,0,115,26,0,0,0,0,9, + 114,114,1,0,0,169,8,0,0,115,26,0,0,0,0,9, 16,1,12,1,21,1,10,1,15,1,13,1,13,1,12,1, 10,2,15,1,21,1,10,1,114,114,1,0,0,99,3,0, 0,0,0,0,0,0,6,0,0,0,17,0,0,0,67,0, @@ -4043,7 +4043,7 @@ const unsigned char _Py_M__importlib[] = { 90,9,102,114,111,109,95,110,97,109,101,114,37,1,0,0, 114,4,0,0,0,114,4,0,0,0,114,5,0,0,0,218, 16,95,104,97,110,100,108,101,95,102,114,111,109,108,105,115, - 116,192,8,0,0,115,34,0,0,0,0,10,15,1,12,1, + 116,193,8,0,0,115,34,0,0,0,0,10,15,1,12,1, 12,1,13,1,15,1,22,1,13,1,15,1,21,1,3,1, 17,1,18,4,21,1,15,1,9,1,32,1,114,120,1,0, 0,99,1,0,0,0,0,0,0,0,2,0,0,0,2,0, @@ -4068,7 +4068,7 @@ const unsigned char _Py_M__importlib[] = { 114,93,0,0,0,114,32,0,0,0,41,2,218,7,103,108, 111,98,97,108,115,114,101,1,0,0,114,4,0,0,0,114, 4,0,0,0,114,5,0,0,0,218,17,95,99,97,108,99, - 95,95,95,112,97,99,107,97,103,101,95,95,224,8,0,0, + 95,95,95,112,97,99,107,97,103,101,95,95,225,8,0,0, 115,12,0,0,0,0,7,15,1,12,1,10,1,12,1,25, 1,114,122,1,0,0,99,0,0,0,0,0,0,0,0,3, 0,0,0,3,0,0,0,67,0,0,0,115,55,0,0,0, @@ -4088,7 +4088,7 @@ const unsigned char _Py_M__importlib[] = { 90,10,101,120,116,101,110,115,105,111,110,115,90,6,115,111, 117,114,99,101,90,8,98,121,116,101,99,111,100,101,114,4, 0,0,0,114,4,0,0,0,114,5,0,0,0,114,240,0, - 0,0,239,8,0,0,115,8,0,0,0,0,5,18,1,12, + 0,0,240,8,0,0,115,8,0,0,0,0,5,18,1,12, 1,12,1,114,240,0,0,0,99,5,0,0,0,0,0,0, 0,9,0,0,0,5,0,0,0,67,0,0,0,115,227,0, 0,0,124,4,0,100,1,0,107,2,0,114,27,0,116,0, @@ -4143,7 +4143,7 @@ const unsigned char _Py_M__importlib[] = { 114,102,1,0,0,114,179,0,0,0,90,8,103,108,111,98, 97,108,115,95,114,101,1,0,0,90,7,99,117,116,95,111, 102,102,114,4,0,0,0,114,4,0,0,0,114,5,0,0, - 0,218,10,95,95,105,109,112,111,114,116,95,95,250,8,0, + 0,218,10,95,95,105,109,112,111,114,116,95,95,251,8,0, 0,115,26,0,0,0,0,11,12,1,15,2,24,1,12,1, 18,1,6,3,12,1,23,1,6,1,4,4,35,3,40,2, 114,125,1,0,0,99,1,0,0,0,0,0,0,0,3,0, @@ -4159,7 +4159,7 @@ const unsigned char _Py_M__importlib[] = { 0,0,0,114,177,0,0,0,114,178,0,0,0,114,4,0, 0,0,114,4,0,0,0,114,5,0,0,0,218,18,95,98, 117,105,108,116,105,110,95,102,114,111,109,95,110,97,109,101, - 29,9,0,0,115,10,0,0,0,0,1,15,1,12,1,19, + 30,9,0,0,115,10,0,0,0,0,1,15,1,12,1,19, 1,12,1,114,126,1,0,0,99,2,0,0,0,0,0,0, 0,19,0,0,0,12,0,0,0,67,0,0,0,115,232,2, 0,0,124,1,0,97,0,0,124,0,0,97,1,0,116,1, @@ -4233,7 +4233,7 @@ const unsigned char _Py_M__importlib[] = { 0,107,2,0,86,1,113,3,0,100,1,0,83,41,2,114, 29,0,0,0,78,41,1,114,31,0,0,0,41,2,114,22, 0,0,0,114,130,0,0,0,114,4,0,0,0,114,4,0, - 0,0,114,5,0,0,0,114,77,0,0,0,81,9,0,0, + 0,0,114,5,0,0,0,114,77,0,0,0,82,9,0,0, 115,2,0,0,0,6,0,122,25,95,115,101,116,117,112,46, 60,108,111,99,97,108,115,62,46,60,103,101,110,101,120,112, 114,62,114,84,0,0,0,122,30,105,109,112,111,114,116,108, @@ -4269,7 +4269,7 @@ const unsigned char _Py_M__importlib[] = { 101,97,107,114,101,102,95,109,111,100,117,108,101,90,13,119, 105,110,114,101,103,95,109,111,100,117,108,101,114,4,0,0, 0,114,4,0,0,0,114,5,0,0,0,218,6,95,115,101, - 116,117,112,37,9,0,0,115,108,0,0,0,0,9,6,1, + 116,117,112,38,9,0,0,115,108,0,0,0,0,9,6,1, 6,2,12,1,9,2,6,3,12,1,28,1,15,1,15,1, 9,1,15,1,9,2,3,1,15,1,12,1,20,3,13,1, 13,1,15,1,15,2,13,1,20,3,33,1,19,2,31,1, @@ -4299,7 +4299,7 @@ const unsigned char _Py_M__importlib[] = { 134,1,0,0,90,17,115,117,112,112,111,114,116,101,100,95, 108,111,97,100,101,114,115,114,4,0,0,0,114,4,0,0, 0,114,5,0,0,0,218,8,95,105,110,115,116,97,108,108, - 124,9,0,0,115,16,0,0,0,0,2,13,1,9,1,28, + 125,9,0,0,115,16,0,0,0,0,2,13,1,9,1,28, 1,16,1,16,1,15,1,19,1,114,136,1,0,0,41,3, 122,3,119,105,110,114,1,0,0,0,114,2,0,0,0,41, 91,114,59,0,0,0,114,10,0,0,0,114,11,0,0,0, @@ -4337,7 +4337,7 @@ const unsigned char _Py_M__importlib[] = { 115,168,0,0,0,6,17,6,3,12,12,12,5,12,5,12, 6,12,12,12,10,12,9,12,5,12,7,15,22,12,8,12, 4,15,4,19,20,6,2,6,3,22,4,19,68,19,21,19, - 19,12,19,12,20,12,113,22,1,18,2,6,2,9,2,9, + 19,12,19,12,20,12,114,22,1,18,2,6,2,9,2,9, 1,9,2,15,27,12,23,12,19,12,12,18,8,12,18,12, 11,12,11,12,17,12,16,21,55,21,12,18,10,12,14,12, 36,19,27,19,106,24,22,9,3,12,1,15,63,18,45,19, -- cgit v0.12 From 1415e25e055232e655e1643967fef1f1eee423f5 Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Thu, 20 Feb 2014 01:44:10 +0100 Subject: asyncio doc: remove reference to _DEBUG (now replaced with PYTHONASYNCIODEBUG env var), document the default debug mode --- Doc/library/asyncio-dev.rst | 1 - Doc/library/asyncio-eventloop.rst | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/Doc/library/asyncio-dev.rst b/Doc/library/asyncio-dev.rst index c5f0d1a..9d6f054 100644 --- a/Doc/library/asyncio-dev.rst +++ b/Doc/library/asyncio-dev.rst @@ -98,7 +98,6 @@ imported before the flag value can be changed. Example with the bug:: import asyncio - asyncio.tasks._DEBUG = True @asyncio.coroutine def test(): diff --git a/Doc/library/asyncio-eventloop.rst b/Doc/library/asyncio-eventloop.rst index 04b182b..92c4978 100644 --- a/Doc/library/asyncio-eventloop.rst +++ b/Doc/library/asyncio-eventloop.rst @@ -558,7 +558,7 @@ Debug mode .. method:: BaseEventLoop.get_debug() - Get the debug mode (:class:`bool`) of the event loop. + Get the debug mode (:class:`bool`) of the event loop, ``False`` by default. .. method:: BaseEventLoop.set_debug(enabled: bool) -- cgit v0.12 From 43ee1c1325c05821aa19f9c5813f8941bc34d7b7 Mon Sep 17 00:00:00 2001 From: Yury Selivanov Date: Wed, 19 Feb 2014 20:58:44 -0500 Subject: asyncio.docs: Document Error Handling API and asyncio.Handle --- Doc/library/asyncio-eventloop.rst | 69 +++++++++++++++++++++++++++++++++++++-- 1 file changed, 67 insertions(+), 2 deletions(-) diff --git a/Doc/library/asyncio-eventloop.rst b/Doc/library/asyncio-eventloop.rst index 92c4978..88fe35e 100644 --- a/Doc/library/asyncio-eventloop.rst +++ b/Doc/library/asyncio-eventloop.rst @@ -142,6 +142,8 @@ Calls Any positional arguments after the callback will be passed to the callback when it is called. + An instance of :class:`asyncio.Handle` is returned. + .. method:: BaseEventLoop.call_soon_threadsafe(callback, \*args) Like :meth:`call_soon`, but thread safe. @@ -167,8 +169,7 @@ a different clock than :func:`time.time`. Arrange for the *callback* to be called after the given *delay* seconds (either an int or float). - A "handle" is returned: an opaque object with a :meth:`cancel` method - that can be used to cancel the call. + An instance of :class:`asyncio.Handle` is returned. *callback* will be called exactly once per call to :meth:`call_later`. If two callbacks are scheduled for exactly the same time, it is @@ -553,6 +554,56 @@ pool of processes). By default, an event loop uses a thread pool executor Set the default executor used by :meth:`run_in_executor`. +Error Handling API +------------------ + +Allows to customize how exceptions are handled in the event loop. + +.. method:: BaseEventLoop.set_exception_handler(handler) + + Set *handler* as the new event loop exception handler. + + If *handler* is ``None``, the default exception handler will + be set. + + If *handler* is a callable object, it should have a + matching signature to ``(loop, context)``, where ``loop`` + will be a reference to the active event loop, ``context`` + will be a ``dict`` object (see :meth:`call_exception_handler` + documentation for details about context). + +.. method:: BaseEventLoop.default_exception_handler(context) + + Default exception handler. + + This is called when an exception occurs and no exception + handler is set, and can be called by a custom exception + handler that wants to defer to the default behavior. + + *context* parameter has the same meaning as in + :meth:`call_exception_handler`. + +.. method:: BaseEventLoop.call_exception_handler(context) + + Call the current event loop exception handler. + + *context* is a ``dict`` object containing the following keys + (new keys may be introduced later): + + * 'message': Error message; + * 'exception' (optional): Exception object; + * 'future' (optional): :class:`asyncio.Future` instance; + * 'handle' (optional): :class:`asyncio.Handle` instance; + * 'protocol' (optional): :ref:`Protocol ` instance; + * 'transport' (optional): :ref:`Transport ` instance; + * 'socket' (optional): :class:`socket.socket` instance. + + .. note:: + + Note: this method should not be overloaded in subclassed + event loops. For any custom exception handling, use + :meth:`set_exception_handler()` method. + Debug mode ---------- @@ -585,6 +636,20 @@ Server Coroutine to wait until service is closed. +Handle +------ + +.. class:: Handle + + A callback wrapper object returned by :func:`BaseEventLoop.call_soon`, + :func:`BaseEventLoop.call_soon_threadsafe`, :func:`BaseEventLoop.call_later`, + and :func:`BaseEventLoop.call_at`. + + .. method:: cancel() + + Cancel the call. + + .. _asyncio-hello-world-callback: Example: Hello World (callback) -- cgit v0.12 From a96ed63d36ec7950d9f4135770f910a341f530df Mon Sep 17 00:00:00 2001 From: Benjamin Peterson Date: Wed, 19 Feb 2014 23:06:41 -0500 Subject: merge 3.3 (#20695) --- Lib/test/test_urllibnet.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/Lib/test/test_urllibnet.py b/Lib/test/test_urllibnet.py index 6347c54..4da89fc 100644 --- a/Lib/test/test_urllibnet.py +++ b/Lib/test/test_urllibnet.py @@ -103,7 +103,7 @@ class urlopenNetworkTests(unittest.TestCase): # Make sure fd returned by fileno is valid. with self.urlopen("http://www.python.org/", timeout=None) as open_url: fd = open_url.fileno() - with os.fdopen(fd, encoding='utf-8') as f: + with os.fdopen(fd, 'rb') as f: self.assertTrue(f.read(), "reading from file created using fd " "returned by fileno failed") @@ -150,7 +150,7 @@ class urlretrieveNetworkTests(unittest.TestCase): with self.urlretrieve("http://www.python.org/") as (file_location, info): self.assertTrue(os.path.exists(file_location), "file location returned by" " urlretrieve is not a valid path") - with open(file_location, encoding='utf-8') as f: + with open(file_location, 'rb') as f: self.assertTrue(f.read(), "reading from the file location returned" " by urlretrieve failed") @@ -160,7 +160,7 @@ class urlretrieveNetworkTests(unittest.TestCase): support.TESTFN) as (file_location, info): self.assertEqual(file_location, support.TESTFN) self.assertTrue(os.path.exists(file_location)) - with open(file_location, encoding='utf-8') as f: + with open(file_location, 'rb') as f: self.assertTrue(f.read(), "reading from temporary file failed") def test_header(self): @@ -169,7 +169,7 @@ class urlretrieveNetworkTests(unittest.TestCase): self.assertIsInstance(info, email.message.Message, "info is not an instance of email.message.Message") - logo = "http://www.python.org/community/logos/python-logo-master-v3-TM.png" + logo = "http://www.python.org/static/community_logos/python-logo-master-v3-TM.png" def test_data_header(self): with self.urlretrieve(self.logo) as (file_location, fileheaders): -- cgit v0.12 From 8d0230b0f2e9b7f8b6dcb56556d15c9590c6c519 Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Thu, 20 Feb 2014 10:12:59 +0100 Subject: asyncio.subprocess: Fix a race condition in communicate() Use self._loop instead of self._transport._loop, because transport._loop is set to None at process exit. --- Lib/asyncio/subprocess.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/Lib/asyncio/subprocess.py b/Lib/asyncio/subprocess.py index c3b0175..414e023 100644 --- a/Lib/asyncio/subprocess.py +++ b/Lib/asyncio/subprocess.py @@ -146,7 +146,6 @@ class Process: @tasks.coroutine def communicate(self, input=None): - loop = self._transport._loop if input: stdin = self._feed_stdin(input) else: @@ -160,7 +159,7 @@ class Process: else: stderr = self._noop() stdin, stdout, stderr = yield from tasks.gather(stdin, stdout, stderr, - loop=loop) + loop=self._loop) yield from self.wait() return (stdout, stderr) -- cgit v0.12 From 6f24d83ac68fdea3a28cee64d631d7701cf102ae Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Thu, 20 Feb 2014 10:33:01 +0100 Subject: asyncio: Fix _ProactorWritePipeTransport._pipe_closed() The "exc" variable was not defined, pass a BrokenPipeError exception instead. --- Lib/asyncio/proactor_events.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Lib/asyncio/proactor_events.py b/Lib/asyncio/proactor_events.py index f45cd9c..d99e8ce 100644 --- a/Lib/asyncio/proactor_events.py +++ b/Lib/asyncio/proactor_events.py @@ -275,7 +275,7 @@ class _ProactorWritePipeTransport(_ProactorBaseWritePipeTransport): assert fut is self._read_fut, (fut, self._read_fut) self._read_fut = None if self._write_fut is not None: - self._force_close(exc) + self._force_close(BrokenPipeError()) else: self.close() -- cgit v0.12 From b4c9388947b2c93e3ba7d85e828d8667551a71f6 Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Thu, 20 Feb 2014 10:37:27 +0100 Subject: asyncio: remove unused imports and unused variables noticed by pyflakes --- Lib/asyncio/events.py | 3 --- Lib/asyncio/futures.py | 1 - Lib/asyncio/tasks.py | 2 -- Lib/asyncio/test_utils.py | 2 +- Lib/asyncio/unix_events.py | 1 - Lib/asyncio/windows_events.py | 1 - Lib/selectors.py | 3 +-- 7 files changed, 2 insertions(+), 11 deletions(-) diff --git a/Lib/asyncio/events.py b/Lib/asyncio/events.py index 5362f05..57af68a 100644 --- a/Lib/asyncio/events.py +++ b/Lib/asyncio/events.py @@ -9,12 +9,9 @@ __all__ = ['AbstractEventLoopPolicy', ] import subprocess -import sys import threading import socket -from .log import logger - class Handle: """Object returned by callback registration methods.""" diff --git a/Lib/asyncio/futures.py b/Lib/asyncio/futures.py index b9cd45c..91ea170 100644 --- a/Lib/asyncio/futures.py +++ b/Lib/asyncio/futures.py @@ -11,7 +11,6 @@ import sys import traceback from . import events -from .log import logger # States for Future. _PENDING = 'PENDING' diff --git a/Lib/asyncio/tasks.py b/Lib/asyncio/tasks.py index cf7b540..19fa654 100644 --- a/Lib/asyncio/tasks.py +++ b/Lib/asyncio/tasks.py @@ -7,7 +7,6 @@ __all__ = ['coroutine', 'Task', 'gather', 'shield', ] -import collections import concurrent.futures import functools import inspect @@ -486,7 +485,6 @@ def as_completed(fs, *, loop=None, timeout=None): if isinstance(fs, futures.Future) or iscoroutine(fs): raise TypeError("expect a list of futures, not %s" % type(fs).__name__) loop = loop if loop is not None else events.get_event_loop() - deadline = None if timeout is None else loop.time() + timeout todo = {async(f, loop=loop) for f in set(fs)} from .queues import Queue # Import here to avoid circular import problem. done = Queue(loop=loop) diff --git a/Lib/asyncio/test_utils.py b/Lib/asyncio/test_utils.py index 2a8a241..dd87789 100644 --- a/Lib/asyncio/test_utils.py +++ b/Lib/asyncio/test_utils.py @@ -15,7 +15,7 @@ import unittest import unittest.mock from http.server import HTTPServer -from wsgiref.simple_server import make_server, WSGIRequestHandler, WSGIServer +from wsgiref.simple_server import WSGIRequestHandler, WSGIServer try: import ssl diff --git a/Lib/asyncio/unix_events.py b/Lib/asyncio/unix_events.py index ce45e5f..2125548 100644 --- a/Lib/asyncio/unix_events.py +++ b/Lib/asyncio/unix_events.py @@ -15,7 +15,6 @@ from . import base_events from . import base_subprocess from . import constants from . import events -from . import protocols from . import selector_events from . import tasks from . import transports diff --git a/Lib/asyncio/windows_events.py b/Lib/asyncio/windows_events.py index e6be9d1..60fb589 100644 --- a/Lib/asyncio/windows_events.py +++ b/Lib/asyncio/windows_events.py @@ -5,7 +5,6 @@ import errno import math import socket import struct -import subprocess import weakref from . import events diff --git a/Lib/selectors.py b/Lib/selectors.py index a5465e2..9be9225 100644 --- a/Lib/selectors.py +++ b/Lib/selectors.py @@ -5,9 +5,8 @@ This module allows high-level and efficient I/O multiplexing, built upon the """ -from abc import ABCMeta, abstractmethod, abstractproperty +from abc import ABCMeta, abstractmethod from collections import namedtuple, Mapping -import functools import math import select import sys -- cgit v0.12 From d1a727a9e5440cdca283719db8fd71c8cb908b35 Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Thu, 20 Feb 2014 16:43:09 +0100 Subject: asyncio: Fix _check_resolved_address() for IPv6 address --- Lib/asyncio/base_events.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/Lib/asyncio/base_events.py b/Lib/asyncio/base_events.py index 69caa4d..1615ecb 100644 --- a/Lib/asyncio/base_events.py +++ b/Lib/asyncio/base_events.py @@ -45,10 +45,13 @@ def _check_resolved_address(sock, address): # Ensure that the address is already resolved to avoid the trap of hanging # the entire event loop when the address requires doing a DNS lookup. family = sock.family - if family not in (socket.AF_INET, socket.AF_INET6): + if family == socket.AF_INET: + host, port = address + elif family == socket.AF_INET6: + host, port, flow_info, scope_id = address + else: return - host, port = address type_mask = 0 if hasattr(socket, 'SOCK_NONBLOCK'): type_mask |= socket.SOCK_NONBLOCK -- cgit v0.12 From 6dcfec985915036e7bda23ca5d227802645c83dd Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Thu, 20 Feb 2014 17:01:11 +0100 Subject: asyncio: ops, and now fix also the unit test for IPv6 address: test_sock_connect_address() --- Lib/test/test_asyncio/test_events.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/Lib/test/test_asyncio/test_events.py b/Lib/test/test_asyncio/test_events.py index 3720cc7..f8499dc 100644 --- a/Lib/test/test_asyncio/test_events.py +++ b/Lib/test/test_asyncio/test_events.py @@ -1335,12 +1335,11 @@ class EventLoopTestsMixin: 'selector': self.loop._selector.__class__.__name__}) def test_sock_connect_address(self): - families = [socket.AF_INET] + families = [(socket.AF_INET, ('www.python.org', 80))] if support.IPV6_ENABLED: - families.append(socket.AF_INET6) + families.append((socket.AF_INET6, ('www.python.org', 80, 0, 0))) - address = ('www.python.org', 80) - for family in families: + for family, address in families: for sock_type in (socket.SOCK_STREAM, socket.SOCK_DGRAM): sock = socket.socket(family, sock_type) with sock: -- cgit v0.12 From 389b036b8f20b8c61c68f10722726aec69e61c3c Mon Sep 17 00:00:00 2001 From: Yury Selivanov Date: Thu, 20 Feb 2014 13:59:14 -0500 Subject: Misc/NEWS: Add some missing news items re asyncio. --- Misc/NEWS | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/Misc/NEWS b/Misc/NEWS index d138547..43bdbd4 100644 --- a/Misc/NEWS +++ b/Misc/NEWS @@ -26,6 +26,25 @@ Core and Builtins Library ------- +- Issue #20566: Change asyncio.as_completed() to use a Queue, to + avoid O(N**2) behavior. + +- Issue #20704: Implement new debug API in asyncio. Add new methods + BaseEventLoop.set_debug() and BaseEventLoop.get_debug(). + Add support for setting 'asyncio.tasks._DEBUG' variable with + 'PYTHONASYNCIODEBUG' environment variable. + +- asyncio: Refactoring and fixes: BaseEventLoop.sock_connect() raises an + error if the address is not resolved; use __slots__ in Handle and + TimerHandle; as_completed() and wait() raise TypeError if the passed + list of Futures is a single Future; call_soon() and other 'call_*()' + functions raise TypeError if the passed callback is a coroutine + function; _ProactorBasePipeTransport uses _FlowControlMixin; + WriteTransport.set_write_buffer_size() calls _maybe_pause_protocol() + to consider pausing receiving if the watermark limits have changed; + fix _check_resolved_address() for IPv6 address; and other minor + improvements, along with multiple documentation updates. + - Issue #20684: Fix inspect.getfullargspec() to not to follow __wrapped__ chains. Make its behaviour consistent with bound methods first argument. Patch by Nick Coghlan and Yury Selivanov. -- cgit v0.12 From d3f8e308286731eda3cb488a008de094f980353d Mon Sep 17 00:00:00 2001 From: Yury Selivanov Date: Thu, 20 Feb 2014 14:10:02 -0500 Subject: asyncio.docs: Improve documentation of Streams. Issue #20696. --- Doc/library/asyncio-stream.rst | 72 +++++++++++++++++++++++++----------------- 1 file changed, 43 insertions(+), 29 deletions(-) diff --git a/Doc/library/asyncio-stream.rst b/Doc/library/asyncio-stream.rst index f557df3..76b6643 100644 --- a/Doc/library/asyncio-stream.rst +++ b/Doc/library/asyncio-stream.rst @@ -34,7 +34,7 @@ Stream functions .. function:: start_server(client_connected_cb, host=None, port=None, \*, loop=None, limit=None, **kwds) - Start a socket server, call back for each client connected. + Start a socket server, with a callback for each client connected. The first parameter, *client_connected_cb*, takes two parameters: *client_reader*, *client_writer*. *client_reader* is a @@ -58,6 +58,29 @@ Stream functions This function returns a :ref:`coroutine object `. +.. function:: open_unix_connection(path=None, \*, loop=None, limit=None, **kwds) + + A wrapper for :meth:`~BaseEventLoop.create_unix_connection()` returning + a (reader, writer) pair. + + See :func:`open_connection` for information about return value and other + details. + + This function returns a :ref:`coroutine object `. + + Availability: UNIX. + +.. function:: start_unix_server(client_connected_cb, path=None, \*, loop=None, limit=None, **kwds) + + Start a UNIX Domain Socket server, with a callback for each client connected. + + See :func:`start_server` for information about return value and other + details. + + This function returns a :ref:`coroutine object `. + + Availability: UNIX. + StreamReader ============ @@ -70,11 +93,12 @@ StreamReader .. method:: feed_eof() - XXX + Acknowledge the EOF. .. method:: feed_data(data) - XXX + Feed *data* bytes in the internal buffer. Any operations waiting + for the data will be resumed. .. method:: set_exception(exc) @@ -86,13 +110,23 @@ StreamReader .. method:: read(n=-1) - XXX + Read up to *n* bytes. If *n* is not provided, or set to ``-1``, + read until EOF and return all read bytes. + + If the EOF was received and the internal buffer is empty, + return an empty ``bytes`` object. This method returns a :ref:`coroutine object `. .. method:: readline() - XXX + Read one line, where "line" is a sequence of bytes ending with ``\n``. + + If EOF is received, and ``\n`` was not found, the method will + return the partial read bytes. + + If the EOF was received and the internal buffer is empty, + return an empty ``bytes`` object. This method returns a :ref:`coroutine object `. @@ -105,6 +139,10 @@ StreamReader This method returns a :ref:`coroutine object `. + .. method:: at_eof() + + Return ``True`` if the buffer is empty and :meth:`feed_eof` was called. + StreamWriter ============ @@ -186,30 +224,6 @@ StreamReaderProtocol potential uses, and to prevent the user of the :class:`StreamReader` to accidentally call inappropriate methods of the protocol.) - .. method:: connection_made(transport) - - XXX - - .. method:: connection_lost(exc) - - XXX - - .. method:: data_received(data) - - XXX - - .. method:: eof_received() - - XXX - - .. method:: pause_writing() - - XXX - - .. method:: resume_writing() - - XXX - IncompleteReadError =================== -- cgit v0.12 From 934c88588541141144ce4d013a4ac769c80d62ce Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Thu, 20 Feb 2014 21:59:38 +0100 Subject: asyncio: _check_resolved_address() must also accept IPv6 without flow_info and scope_id: (host, port). --- Lib/asyncio/base_events.py | 2 +- Lib/test/test_asyncio/test_events.py | 9 ++++++--- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/Lib/asyncio/base_events.py b/Lib/asyncio/base_events.py index 1615ecb..80df927 100644 --- a/Lib/asyncio/base_events.py +++ b/Lib/asyncio/base_events.py @@ -48,7 +48,7 @@ def _check_resolved_address(sock, address): if family == socket.AF_INET: host, port = address elif family == socket.AF_INET6: - host, port, flow_info, scope_id = address + host, port = address[:2] else: return diff --git a/Lib/test/test_asyncio/test_events.py b/Lib/test/test_asyncio/test_events.py index f8499dc..d00af23 100644 --- a/Lib/test/test_asyncio/test_events.py +++ b/Lib/test/test_asyncio/test_events.py @@ -1335,11 +1335,14 @@ class EventLoopTestsMixin: 'selector': self.loop._selector.__class__.__name__}) def test_sock_connect_address(self): - families = [(socket.AF_INET, ('www.python.org', 80))] + addresses = [(socket.AF_INET, ('www.python.org', 80))] if support.IPV6_ENABLED: - families.append((socket.AF_INET6, ('www.python.org', 80, 0, 0))) + addresses.extend(( + (socket.AF_INET6, ('www.python.org', 80)), + (socket.AF_INET6, ('www.python.org', 80, 0, 0)), + )) - for family, address in families: + for family, address in addresses: for sock_type in (socket.SOCK_STREAM, socket.SOCK_DGRAM): sock = socket.socket(family, sock_type) with sock: -- cgit v0.12 From 37f15bcfed4133f5ed79abf7e01eb052e2a60592 Mon Sep 17 00:00:00 2001 From: Yury Selivanov Date: Thu, 20 Feb 2014 16:20:44 -0500 Subject: asyncio.docs: Improve wordings; add a note to the Coroutines section. Issue #20706 --- Doc/library/asyncio-eventloop.rst | 45 ++++++++++++++++++++------------------- Doc/library/asyncio-stream.rst | 14 ++++++------ Doc/library/asyncio-sync.rst | 14 ++++++------ Doc/library/asyncio-task.rst | 14 ++++++++++-- 4 files changed, 49 insertions(+), 38 deletions(-) diff --git a/Doc/library/asyncio-eventloop.rst b/Doc/library/asyncio-eventloop.rst index 88fe35e..2e48d30 100644 --- a/Doc/library/asyncio-eventloop.rst +++ b/Doc/library/asyncio-eventloop.rst @@ -102,7 +102,8 @@ Run an event loop Run until the :class:`Future` is done. - If the argument is a coroutine, it is wrapped in a :class:`Task`. + If the argument is a :ref:`coroutine `, it is wrapped + in a :class:`Task`. Return the Future's result, or raise its exception. @@ -207,7 +208,7 @@ Creating connections socket type :py:data:`~socket.SOCK_STREAM`. *protocol_factory* must be a callable returning a :ref:`protocol ` instance. - This method returns a :ref:`coroutine object ` which will try to + This method is a :ref:`coroutine ` which will try to establish the connection in the background. When successful, the coroutine returns a ``(transport, protocol)`` pair. @@ -274,7 +275,7 @@ Creating connections :py:data:`~socket.AF_INET6` depending on *host* (or *family* if specified), socket type :py:data:`~socket.SOCK_DGRAM`. - This method returns a :ref:`coroutine object ` which will try to + This method is a :ref:`coroutine ` which will try to establish the connection in the background. When successful, the coroutine returns a ``(transport, protocol)`` pair. @@ -288,7 +289,7 @@ Creating connections family is used to communicate between processes on the same machine efficiently. - This method returns a :ref:`coroutine object ` which will try to + This method is a :ref:`coroutine ` which will try to establish the connection in the background. When successful, the coroutine returns a ``(transport, protocol)`` pair. @@ -302,8 +303,8 @@ Creating listening connections .. method:: BaseEventLoop.create_server(protocol_factory, host=None, port=None, \*, family=socket.AF_UNSPEC, flags=socket.AI_PASSIVE, sock=None, backlog=100, ssl=None, reuse_address=None) - A :ref:`coroutine function ` which creates a TCP server bound to host and - port. + A :ref:`coroutine ` method which creates a TCP server bound to + host and port. The return value is a :class:`AbstractServer` object which can be used to stop the service. @@ -332,8 +333,6 @@ Creating listening connections expire. If not specified will automatically be set to True on UNIX. - This method returns a :ref:`coroutine object `. - .. seealso:: The function :func:`start_server` creates a (:class:`StreamReader`, @@ -380,7 +379,7 @@ Low-level socket operations representing the data received. The maximum amount of data to be received at once is specified by *nbytes*. - This method returns a :ref:`coroutine object `. + This method is a :ref:`coroutine `. .. seealso:: @@ -392,9 +391,9 @@ Low-level socket operations This method continues to send data from *data* until either all data has been sent or an error occurs. ``None`` is returned on success. On error, an exception is raised, and there is no way to determine how much data, if - any, was successfully sent. + any, was successfully processed by the receiving end of the connection. - This method returns a :ref:`coroutine object `. + This method is a :ref:`coroutine `. .. seealso:: @@ -410,7 +409,7 @@ Low-level socket operations :py:data:`~socket.AF_INET` and :py:data:`~socket.AF_INET6` address families. Use :meth:`getaddrinfo` to resolve the hostname asynchronously. - This method returns a :ref:`coroutine object `. + This method is a :ref:`coroutine `. .. seealso:: @@ -427,7 +426,7 @@ Low-level socket operations and *address* is the address bound to the socket on the other end of the connection. - This method returns a :ref:`coroutine object `. + This method is a :ref:`coroutine `. .. seealso:: @@ -440,13 +439,13 @@ Resolve host name .. method:: BaseEventLoop.getaddrinfo(host, port, \*, family=0, type=0, proto=0, flags=0) - Similar to the :meth:`socket.getaddrinfo` function, but return a - :ref:`coroutine object `. + This method is a :ref:`coroutine `, similar to + :meth:`socket.getaddrinfo` function but non-blocking. .. method:: BaseEventLoop.getnameinfo(sockaddr, flags=0) - Similar to the :meth:`socket.getnameinfo` function, but return a - :ref:`coroutine object `. + This method is a :ref:`coroutine `, similar to + :meth:`socket.getnameinfo` function but non-blocking. Running subprocesses @@ -472,7 +471,7 @@ Run subprocesses asynchronously using the :mod:`subprocess` module. XXX - This method returns a :ref:`coroutine object `. + This method is a :ref:`coroutine `. See the constructor of the :class:`subprocess.Popen` class for parameters. @@ -480,7 +479,7 @@ Run subprocesses asynchronously using the :mod:`subprocess` module. XXX - This method returns a :ref:`coroutine object `. + This method is a :ref:`coroutine `. See the constructor of the :class:`subprocess.Popen` class for parameters. @@ -493,7 +492,7 @@ Run subprocesses asynchronously using the :mod:`subprocess` module. Return pair (transport, protocol), where transport support :class:`ReadTransport` interface. - This method returns a :ref:`coroutine object `. + This method is a :ref:`coroutine `. .. method:: BaseEventLoop.connect_write_pipe(protocol_factory, pipe) @@ -504,7 +503,7 @@ Run subprocesses asynchronously using the :mod:`subprocess` module. Return pair (transport, protocol), where transport support :class:`WriteTransport` interface. - This method returns a :ref:`coroutine object `. + This method is a :ref:`coroutine `. .. seealso:: @@ -549,6 +548,8 @@ pool of processes). By default, an event loop uses a thread pool executor *executor* is a :class:`~concurrent.futures.Executor` instance, the default executor is used if *executor* is ``None``. + This method is a :ref:`coroutine `. + .. method:: BaseEventLoop.set_default_executor(executor) Set the default executor used by :meth:`run_in_executor`. @@ -633,7 +634,7 @@ Server .. method:: wait_closed() - Coroutine to wait until service is closed. + A :ref:`coroutine ` to wait until service is closed. Handle diff --git a/Doc/library/asyncio-stream.rst b/Doc/library/asyncio-stream.rst index 76b6643..4543af4 100644 --- a/Doc/library/asyncio-stream.rst +++ b/Doc/library/asyncio-stream.rst @@ -30,7 +30,7 @@ Stream functions :class:`StreamReaderProtocol` classes, just copy the code -- there's really nothing special here except some convenience.) - This function returns a :ref:`coroutine object `. + This function is a :ref:`coroutine `. .. function:: start_server(client_connected_cb, host=None, port=None, \*, loop=None, limit=None, **kwds) @@ -56,7 +56,7 @@ Stream functions The return value is the same as :meth:`~BaseEventLoop.create_server()`, i.e. a :class:`AbstractServer` object which can be used to stop the service. - This function returns a :ref:`coroutine object `. + This function is a :ref:`coroutine `. .. function:: open_unix_connection(path=None, \*, loop=None, limit=None, **kwds) @@ -66,7 +66,7 @@ Stream functions See :func:`open_connection` for information about return value and other details. - This function returns a :ref:`coroutine object `. + This function is a :ref:`coroutine `. Availability: UNIX. @@ -77,7 +77,7 @@ Stream functions See :func:`start_server` for information about return value and other details. - This function returns a :ref:`coroutine object `. + This function is a :ref:`coroutine `. Availability: UNIX. @@ -116,7 +116,7 @@ StreamReader If the EOF was received and the internal buffer is empty, return an empty ``bytes`` object. - This method returns a :ref:`coroutine object `. + This method is a :ref:`coroutine `. .. method:: readline() @@ -128,7 +128,7 @@ StreamReader If the EOF was received and the internal buffer is empty, return an empty ``bytes`` object. - This method returns a :ref:`coroutine object `. + This method is a :ref:`coroutine `. .. method:: readexactly(n) @@ -137,7 +137,7 @@ StreamReader :attr:`IncompleteReadError.partial` attribute of the exception contains the partial read bytes. - This method returns a :ref:`coroutine object `. + This method is a :ref:`coroutine `. .. method:: at_eof() diff --git a/Doc/library/asyncio-sync.rst b/Doc/library/asyncio-sync.rst index 19e49ff..de38131 100644 --- a/Doc/library/asyncio-sync.rst +++ b/Doc/library/asyncio-sync.rst @@ -124,7 +124,7 @@ Event Otherwise, block until another coroutine calls :meth:`set` to set the flag to true, then return ``True``. - This method returns a :ref:`coroutine object `. + This method is a :ref:`coroutine `. Condition @@ -175,7 +175,7 @@ Condition condition variable in another coroutine. Once awakened, it re-acquires the lock and returns ``True``. - This method returns a :ref:`coroutine object `. + This method is a :ref:`coroutine `. .. method:: wait_for(predicate) @@ -184,7 +184,7 @@ Condition The predicate should be a callable which result will be interpreted as a boolean value. The final predicate value is the return value. - This method returns a :ref:`coroutine object `. + This method is a :ref:`coroutine `. Semaphores @@ -217,7 +217,7 @@ Semaphore until some other coroutine has called :meth:`release` to make it larger than ``0``, and then return ``True``. - This method returns a :ref:`coroutine object `. + This method is a :ref:`coroutine `. .. method:: locked() @@ -279,7 +279,7 @@ Queue If you yield from :meth:`get()`, wait until a item is available. - This method returns a :ref:`coroutine object `. + This method is a :ref:`coroutine `. .. method:: get_nowait() @@ -295,7 +295,7 @@ Queue If you yield from ``put()``, wait until a free slot is available before adding item. - This method returns a :ref:`coroutine object `. + This method is a :ref:`coroutine `. .. method:: put_nowait(item) @@ -350,7 +350,7 @@ JoinableQueue it is complete. When the count of unfinished tasks drops to zero, :meth:`join` unblocks. - This method returns a :ref:`coroutine object `. + This method is a :ref:`coroutine `. .. method:: task_done() diff --git a/Doc/library/asyncio-task.rst b/Doc/library/asyncio-task.rst index e7ef172..4e5526e 100644 --- a/Doc/library/asyncio-task.rst +++ b/Doc/library/asyncio-task.rst @@ -64,6 +64,14 @@ Coroutines (and tasks) can only run when the event loop is running. message is logged. See :ref:`Detect coroutines never scheduled `. +.. note:: + + In this documentation, some methods are documented as coroutines, + even if they are plain Python functions returning a :class:`Future`. + This is intentional to have a freedom of tweaking the implementation + of these functions in the future. If such a function is needed to be + used in a callback-style code, wrap its result with :func:`async`. + .. _asyncio-hello-world-coroutine: @@ -440,7 +448,7 @@ Task functions .. function:: sleep(delay, result=None, \*, loop=None) - Create a :ref:`coroutine object ` that completes after a given + Create a :ref:`coroutine ` that completes after a given time (in seconds). If *result* is provided, it is produced to the caller when the coroutine completes. @@ -505,7 +513,7 @@ Task functions | | futures finish or are cancelled. | +-----------------------------+----------------------------------------+ - This function returns a :ref:`coroutine object `. + This function is a :ref:`coroutine `. Usage:: @@ -529,6 +537,8 @@ Task functions cancels the task and raises :exc:`TimeoutError`. To avoid the task cancellation, wrap it in :func:`shield`. + This function is a :ref:`coroutine `. + Usage:: result = yield from asyncio.wait_for(fut, 60.0) -- cgit v0.12 From 0c9beb64deb1ca61de4ee3625b27fad6ebbfe29f Mon Sep 17 00:00:00 2001 From: Zachary Ware Date: Thu, 20 Feb 2014 15:39:29 -0600 Subject: Issue #20221: Removed conflicting (or circular) hypot definition when compiled with VS 2010 or above. Initial patch by Tabrez Mohammed. --- Misc/NEWS | 3 +++ PC/pyconfig.h | 4 ++++ 2 files changed, 7 insertions(+) diff --git a/Misc/NEWS b/Misc/NEWS index 43bdbd4..40c2760 100644 --- a/Misc/NEWS +++ b/Misc/NEWS @@ -68,6 +68,9 @@ Library Build ----- +- Issue #20221: Removed conflicting (or circular) hypot definition when + compiled with VS 2010 or above. Initial patch by Tabrez Mohammed. + - Issue #20609: Restored the ability to build 64-bit Windows binaries on 32-bit Windows, which was broken by the change in issue #19788. diff --git a/PC/pyconfig.h b/PC/pyconfig.h index 299527d..ccf75f3 100644 --- a/PC/pyconfig.h +++ b/PC/pyconfig.h @@ -207,7 +207,11 @@ typedef int pid_t; #define Py_IS_INFINITY(X) (!_finite(X) && !_isnan(X)) #define Py_IS_FINITE(X) _finite(X) #define copysign _copysign + +/* VS 2010 and above already defines hypot as _hypot */ +#if _MSC_VER < 1600 #define hypot _hypot +#endif /* Side by Side assemblies supported in VS 2005 and VS 2008 but not 2010*/ #if _MSC_VER >= 1400 && _MSC_VER < 1600 -- cgit v0.12 From 53281b16267f67be3606fe97164a8d23bfb2b99f Mon Sep 17 00:00:00 2001 From: Yury Selivanov Date: Thu, 20 Feb 2014 20:10:28 -0500 Subject: asyncio.docs: Document subprocess_exec and subprocess_shell. Issue #20694. --- Doc/library/asyncio-eventloop.rst | 54 +++++++++++++++++++++++++++++++++++---- 1 file changed, 49 insertions(+), 5 deletions(-) diff --git a/Doc/library/asyncio-eventloop.rst b/Doc/library/asyncio-eventloop.rst index 2e48d30..9d54964 100644 --- a/Doc/library/asyncio-eventloop.rst +++ b/Doc/library/asyncio-eventloop.rst @@ -467,17 +467,61 @@ Run subprocesses asynchronously using the :mod:`subprocess` module. :class:`SelectSelector` or :class:`PollSelector` to handle character devices on Mac OS X 10.6 (Snow Leopard) and later. -.. method:: BaseEventLoop.subprocess_exec(protocol_factory, \*args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=False, shell=False, bufsize=0, \*\*kwargs) - - XXX +.. method:: BaseEventLoop.subprocess_exec(protocol_factory, \*args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, \*\*kwargs) + + Create a subprocess from one or more string arguments, where the first string + specifies the program to execute, and the remaining strings specify the + program's arguments. (Thus, together the string arguments form the + ``sys.argv`` value of the program, assuming it is a Python script.) This is + similar to the standard library :class:`subprocess.Popen` class called with + shell=False and the list of strings passed as the first argument; + however, where :class:`~subprocess.Popen` takes a single argument which is + list of strings, :func:`subprocess_exec` takes multiple string arguments. + + Other parameters: + + * *stdin*: Either a file-like object representing the pipe to be connected + to the subprocess's standard input stream using + :meth:`~BaseEventLoop.connect_write_pipe`, or the constant + :const:`subprocess.PIPE` (the default). By default a new pipe will be + created and connected. + + * *stdout*: Either a file-like object representing the pipe to be connected + to the subprocess's standard output stream using + :meth:`~BaseEventLoop.connect_write_pipe`, or the constant + :const:`subprocess.PIPE` (the default). By default a new pipe will be + created and connected. + + * *stderr*: Either a file-like object representing the pipe to be connected + to the subprocess's standard error stream using + :meth:`~BaseEventLoop.connect_read_pipe`, or one of the constants + :const:`subprocess.PIPE` (the default) or :const:`subprocess.STDOUT`. + By default a new pipe will be created and connected. When + :const:`subprocess.STDOUT` is specified, the subprocess's standard error + stream will be connected to the same pipe as the standard output stream. + + * All other keyword arguments are passed to :class:`subprocess.Popen` + without interpretation, except for *bufsize*, *universal_newlines* and + *shell*, which should not be specified at all. + + Returns a pair of ``(transport, protocol)``, where *transport* is an + instance of :class:`BaseSubprocessTransport`. This method is a :ref:`coroutine `. See the constructor of the :class:`subprocess.Popen` class for parameters. -.. method:: BaseEventLoop.subprocess_shell(protocol_factory, cmd, \*, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=False, shell=True, bufsize=0, \*\*kwargs) +.. method:: BaseEventLoop.subprocess_shell(protocol_factory, cmd, \*, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, \*\*kwargs) + + Create a subprocess from *cmd*, which is a string using the platform's + "shell" syntax. This is similar to the standard library + :class:`subprocess.Popen` class called with ``shell=True``. + + See :meth:`~BaseEventLoop.subprocess_exec` for more details about + the remaining arguments. - XXX + Returns a pair of ``(transport, protocol)``, where *transport* is an + instance of :class:`BaseSubprocessTransport`. This method is a :ref:`coroutine `. -- cgit v0.12 From b0b75a192215e87214f9f2c77d6839d2f83a5ea8 Mon Sep 17 00:00:00 2001 From: Donald Stufft Date: Thu, 20 Feb 2014 20:53:27 -0500 Subject: Upgrade pip from 1.5.2 to 1.5.3 --- Lib/ensurepip/__init__.py | 2 +- Lib/ensurepip/_bundled/pip-1.5.2-py2.py3-none-any.whl | Bin 1167543 -> 0 bytes Lib/ensurepip/_bundled/pip-1.5.3-py2.py3-none-any.whl | Bin 0 -> 1169264 bytes 3 files changed, 1 insertion(+), 1 deletion(-) delete mode 100644 Lib/ensurepip/_bundled/pip-1.5.2-py2.py3-none-any.whl create mode 100644 Lib/ensurepip/_bundled/pip-1.5.3-py2.py3-none-any.whl diff --git a/Lib/ensurepip/__init__.py b/Lib/ensurepip/__init__.py index 3f4e449..d495b50 100644 --- a/Lib/ensurepip/__init__.py +++ b/Lib/ensurepip/__init__.py @@ -10,7 +10,7 @@ __all__ = ["version", "bootstrap"] _SETUPTOOLS_VERSION = "2.1" -_PIP_VERSION = "1.5.2" +_PIP_VERSION = "1.5.3" # pip currently requires ssl support, so we try to provide a nicer # error message when that is missing (http://bugs.python.org/issue19744) diff --git a/Lib/ensurepip/_bundled/pip-1.5.2-py2.py3-none-any.whl b/Lib/ensurepip/_bundled/pip-1.5.2-py2.py3-none-any.whl deleted file mode 100644 index 9133eed..0000000 Binary files a/Lib/ensurepip/_bundled/pip-1.5.2-py2.py3-none-any.whl and /dev/null differ diff --git a/Lib/ensurepip/_bundled/pip-1.5.3-py2.py3-none-any.whl b/Lib/ensurepip/_bundled/pip-1.5.3-py2.py3-none-any.whl new file mode 100644 index 0000000..31cee0a Binary files /dev/null and b/Lib/ensurepip/_bundled/pip-1.5.3-py2.py3-none-any.whl differ -- cgit v0.12 From d224b6a796a8934b85b9a959c7a9e4b31d2bb428 Mon Sep 17 00:00:00 2001 From: Yury Selivanov Date: Fri, 21 Feb 2014 01:32:42 -0500 Subject: inspect: Fix getfullargspec to support builtin module-level functions. Issue #20711 --- Lib/inspect.py | 11 +++++++++-- Lib/test/test_inspect.py | 7 +++++++ 2 files changed, 16 insertions(+), 2 deletions(-) diff --git a/Lib/inspect.py b/Lib/inspect.py index 8b7840a..b85fbcc 100644 --- a/Lib/inspect.py +++ b/Lib/inspect.py @@ -1827,9 +1827,16 @@ def _signature_fromstr(cls, obj, s, skip_bound_arg=True): p(f.args.kwarg, empty) if self_parameter is not None: + # Possibly strip the bound argument: + # - We *always* strip first bound argument if + # it is a module. + # - We don't strip first bound argument if + # skip_bound_arg is False. assert parameters - if getattr(obj, '__self__', None) and skip_bound_arg: - # strip off self, it's already been bound + _self = getattr(obj, '__self__', None) + self_isbound = _self is not None + self_ismodule = ismodule(_self) + if self_isbound and (self_ismodule or skip_bound_arg): parameters.pop(0) else: # for builtins, self parameter is always positional-only! diff --git a/Lib/test/test_inspect.py b/Lib/test/test_inspect.py index 711d2a3..95c1b32 100644 --- a/Lib/test/test_inspect.py +++ b/Lib/test/test_inspect.py @@ -643,6 +643,13 @@ class TestClassesAndFunctions(unittest.TestCase): self.assertFullArgSpecEquals(_pickle.Pickler(io.BytesIO()).dump, args_e=['self', 'obj'], formatted='(self, obj)') + self.assertFullArgSpecEquals( + os.stat, + args_e=['path'], + kwonlyargs_e=['dir_fd', 'follow_symlinks'], + kwonlydefaults_e={'dir_fd': None, 'follow_symlinks': True}, + formatted='(path, *, dir_fd=None, follow_symlinks=True)') + @cpython_only @unittest.skipIf(MISSING_C_DOCSTRINGS, "Signature information for builtins requires docstrings") -- cgit v0.12 From 13da6a1a9ce69ab8c08c60c2532bd2d42ec32a12 Mon Sep 17 00:00:00 2001 From: Larry Hastings Date: Thu, 20 Feb 2014 23:34:46 -0800 Subject: Issue #20710: The pydoc summary line no longer displays the "self" parameter for bound methods. Previous to this change, it displayed "self" for methods implemented in Python but not methods implemented in C; it is now both internally consistent and consistent with inspect.Signature. --- Lib/pydoc.py | 21 ++++++++++++++++----- Lib/test/test_pydoc.py | 40 +++++++++++++++++++++++++++++++++++----- Misc/NEWS | 3 +++ 3 files changed, 54 insertions(+), 10 deletions(-) diff --git a/Lib/pydoc.py b/Lib/pydoc.py index cf164cc..c4838d7 100755 --- a/Lib/pydoc.py +++ b/Lib/pydoc.py @@ -137,6 +137,19 @@ def _is_some_method(obj): inspect.isbuiltin(obj) or inspect.ismethoddescriptor(obj)) +def _is_bound_method(fn): + """ + Returns True if fn is a bound method, regardless of whether + fn was implemented in Python or in C. + """ + if inspect.ismethod(fn): + return True + if inspect.isbuiltin(fn): + self = getattr(fn, '__self__', None) + return not (inspect.ismodule(self) or (self is None)) + return False + + def allmethods(cl): methods = {} for key, value in inspect.getmembers(cl, _is_some_method): @@ -898,7 +911,7 @@ class HTMLDoc(Doc): anchor = (cl and cl.__name__ or '') + '-' + name note = '' skipdocs = 0 - if inspect.ismethod(object): + if _is_bound_method(object): imclass = object.__self__.__class__ if cl: if imclass is not cl: @@ -909,7 +922,6 @@ class HTMLDoc(Doc): object.__self__.__class__, mod) else: note = ' unbound %s method' % self.classlink(imclass,mod) - object = object.__func__ if name == realname: title = '%s' % (anchor, realname) @@ -924,7 +936,7 @@ class HTMLDoc(Doc): title = '%s = %s' % ( anchor, name, reallink) argspec = None - if inspect.isfunction(object) or inspect.isbuiltin(object): + if inspect.isroutine(object): try: signature = inspect.signature(object) except (ValueError, TypeError): @@ -1301,7 +1313,7 @@ location listed above. name = name or realname note = '' skipdocs = 0 - if inspect.ismethod(object): + if _is_bound_method(object): imclass = object.__self__.__class__ if cl: if imclass is not cl: @@ -1312,7 +1324,6 @@ location listed above. object.__self__.__class__, mod) else: note = ' unbound %s method' % classname(imclass,mod) - object = object.__func__ if name == realname: title = self.bold(realname) diff --git a/Lib/test/test_pydoc.py b/Lib/test/test_pydoc.py index 105116a..f6aae12 100644 --- a/Lib/test/test_pydoc.py +++ b/Lib/test/test_pydoc.py @@ -6,6 +6,7 @@ import difflib import inspect import pydoc import keyword +import _pickle import pkgutil import re import string @@ -689,12 +690,41 @@ class TestDescriptions(unittest.TestCase): self.assertIsNone(pydoc.locate(name)) self.assertRaises(ImportError, pydoc.render_doc, name) + @staticmethod + def _get_summary_line(o): + text = pydoc.plain(pydoc.render_doc(o)) + lines = text.split('\n') + assert len(lines) >= 2 + return lines[2] + + # these should include "self" + def test_unbound_python_method(self): + self.assertEqual(self._get_summary_line(textwrap.TextWrapper.wrap), + "wrap(self, text)") + + @requires_docstrings + def test_unbound_builtin_method(self): + self.assertEqual(self._get_summary_line(_pickle.Pickler.dump), + "dump(self, obj, /)") + + # these no longer include "self" + def test_bound_python_method(self): + t = textwrap.TextWrapper() + self.assertEqual(self._get_summary_line(t.wrap), + "wrap(text) method of textwrap.TextWrapper instance") + + @requires_docstrings + def test_bound_builtin_method(self): + s = StringIO() + p = _pickle.Pickler(s) + self.assertEqual(self._get_summary_line(p.dump), + "dump(obj, /) method of _pickle.Pickler instance") + + # this should *never* include self! @requires_docstrings - def test_builtin_signatures(self): - # test producing signatures from builtins - stat_sig = pydoc.render_doc(os.stat) - self.assertEqual(pydoc.plain(stat_sig).splitlines()[2], - 'stat(path, *, dir_fd=None, follow_symlinks=True)') + def test_module_level_callable(self): + self.assertEqual(self._get_summary_line(os.stat), + "stat(path, *, dir_fd=None, follow_symlinks=True)") @unittest.skipUnless(threading, 'Threading required for this test.') diff --git a/Misc/NEWS b/Misc/NEWS index 40c2760..e4f7e9a 100644 --- a/Misc/NEWS +++ b/Misc/NEWS @@ -26,6 +26,9 @@ Core and Builtins Library ------- +- Issue #20710: The pydoc summary line no longer displays the "self" parameter + for bound methods. + - Issue #20566: Change asyncio.as_completed() to use a Queue, to avoid O(N**2) behavior. -- cgit v0.12 From e2df3ea682e4b51ed6357ed9e105b2618616e839 Mon Sep 17 00:00:00 2001 From: Donald Stufft Date: Fri, 21 Feb 2014 07:42:39 -0500 Subject: Upgrade pip from 1.5.3 to 1.5.4 --- Lib/ensurepip/__init__.py | 2 +- Lib/ensurepip/_bundled/pip-1.5.3-py2.py3-none-any.whl | Bin 1169264 -> 0 bytes Lib/ensurepip/_bundled/pip-1.5.4-py2.py3-none-any.whl | Bin 0 -> 1169272 bytes 3 files changed, 1 insertion(+), 1 deletion(-) delete mode 100644 Lib/ensurepip/_bundled/pip-1.5.3-py2.py3-none-any.whl create mode 100644 Lib/ensurepip/_bundled/pip-1.5.4-py2.py3-none-any.whl diff --git a/Lib/ensurepip/__init__.py b/Lib/ensurepip/__init__.py index d495b50..f584bdc 100644 --- a/Lib/ensurepip/__init__.py +++ b/Lib/ensurepip/__init__.py @@ -10,7 +10,7 @@ __all__ = ["version", "bootstrap"] _SETUPTOOLS_VERSION = "2.1" -_PIP_VERSION = "1.5.3" +_PIP_VERSION = "1.5.4" # pip currently requires ssl support, so we try to provide a nicer # error message when that is missing (http://bugs.python.org/issue19744) diff --git a/Lib/ensurepip/_bundled/pip-1.5.3-py2.py3-none-any.whl b/Lib/ensurepip/_bundled/pip-1.5.3-py2.py3-none-any.whl deleted file mode 100644 index 31cee0a..0000000 Binary files a/Lib/ensurepip/_bundled/pip-1.5.3-py2.py3-none-any.whl and /dev/null differ diff --git a/Lib/ensurepip/_bundled/pip-1.5.4-py2.py3-none-any.whl b/Lib/ensurepip/_bundled/pip-1.5.4-py2.py3-none-any.whl new file mode 100644 index 0000000..e07d476 Binary files /dev/null and b/Lib/ensurepip/_bundled/pip-1.5.4-py2.py3-none-any.whl differ -- cgit v0.12 From 8dfb4576a9a74a35f71a2f68a7236f2f0b1a7e63 Mon Sep 17 00:00:00 2001 From: Yury Selivanov Date: Fri, 21 Feb 2014 18:30:53 -0500 Subject: inspect.signature: Check for function-like objects before builtins. Issue #17159 --- Lib/inspect.py | 8 ++++---- Lib/test/test_inspect.py | 16 ++++++++++++++++ 2 files changed, 20 insertions(+), 4 deletions(-) diff --git a/Lib/inspect.py b/Lib/inspect.py index b85fbcc..c7a2cf8 100644 --- a/Lib/inspect.py +++ b/Lib/inspect.py @@ -1911,15 +1911,15 @@ def _signature_internal(obj, follow_wrapper_chains=True, skip_bound_arg=True): return sig.replace(parameters=new_params) - if _signature_is_builtin(obj): - return _signature_from_builtin(Signature, obj, - skip_bound_arg=skip_bound_arg) - if isfunction(obj) or _signature_is_functionlike(obj): # If it's a pure Python function, or an object that is duck type # of a Python function (Cython functions, for instance), then: return Signature.from_function(obj) + if _signature_is_builtin(obj): + return _signature_from_builtin(Signature, obj, + skip_bound_arg=skip_bound_arg) + if isinstance(obj, functools.partial): wrapped_sig = _signature_internal(obj.func, follow_wrapper_chains, diff --git a/Lib/test/test_inspect.py b/Lib/test/test_inspect.py index 95c1b32..0dc7451 100644 --- a/Lib/test/test_inspect.py +++ b/Lib/test/test_inspect.py @@ -14,6 +14,7 @@ import sys import types import unicodedata import unittest +import unittest.mock try: from concurrent.futures import ThreadPoolExecutor @@ -1836,6 +1837,21 @@ class TestSignatureObject(unittest.TestCase): ('kwargs', ..., ..., "var_keyword")), ...)) + # Test with cython-like builtins: + _orig_isdesc = inspect.ismethoddescriptor + def _isdesc(obj): + if hasattr(obj, '_builtinmock'): + return True + return _orig_isdesc(obj) + + with unittest.mock.patch('inspect.ismethoddescriptor', _isdesc): + builtin_func = funclike(func) + # Make sure that our mock setup is working + self.assertFalse(inspect.ismethoddescriptor(builtin_func)) + builtin_func._builtinmock = True + self.assertTrue(inspect.ismethoddescriptor(builtin_func)) + self.assertEqual(inspect.signature(builtin_func), sig_func) + def test_signature_functionlike_class(self): # We only want to duck type function-like objects, # not classes. -- cgit v0.12 From b79b785a921959245ebc96ae5da216da593959a5 Mon Sep 17 00:00:00 2001 From: R David Murray Date: Sat, 22 Feb 2014 14:28:46 -0500 Subject: whatsnew: importlib deprecations. This addresses issue #20199, if I got it right. The deprecation and replacement lists are based on the importlib documentation. --- Doc/whatsnew/3.4.rst | 28 ++++++++++++++++++++++++---- 1 file changed, 24 insertions(+), 4 deletions(-) diff --git a/Doc/whatsnew/3.4.rst b/Doc/whatsnew/3.4.rst index 1b71c57..c8c47e0 100644 --- a/Doc/whatsnew/3.4.rst +++ b/Doc/whatsnew/3.4.rst @@ -1504,10 +1504,30 @@ Deprecated Python Modules, Functions and Methods :meth:`difflib.SequenceMatcher.isbpopular` were removed: use ``x in sm.bjunk`` and ``x in sm.bpopular``, where *sm* is a :class:`~difflib.SequenceMatcher` object. -* :func:`importlib.util.module_for_loader` is pending deprecation. Using - :func:`importlib.util.module_to_load` and - :meth:`importlib.abc.Loader.init_module_attrs` allows subclasses of a loader - to more easily customize module loading. +* As mentioned in :ref:`whatsnew-pep-451`, a number of :mod:`importilb` + methods and functions are deprecated: :meth:`importlib.find_loader` is + replaced by :func:`importlib.util.find_spec`; + :meth:`importlib.machinery.PathFinder.find_module` is replaced by + :meth:`importlib.machinery.PathFinder.find_spec`; + :meth:`importlib.abc.MetaPathFinder.find_module` is replaced by + :meth:`importlib.abc.MetaPathFinder.find_spec`; + :meth:`importlib.abc.PathEntryFinder.find_loader` and + :meth:`~importlib.abc.PathEntryFinder.find_module` are replaced by + :meth:`importlib.abc.PathEntryFinder.find_spec`; all of the ``xxxLoader`` ABC + ``load_module`` methods (:meth:`importlib.abc.Loader.load_module`, + :meth:`importlib.abc.InspectLoader.load_module`, + :meth:`importlib.abc.FileLoader.load_module`, + :meth:`importlib.abc.SourceLoader.load_module`) should no longer be + implemented, instead loaders should implement an + ``exec_module`` method + (:meth:`importlib.abc.Loader.exec_module`, + :meth:`importlib.abc.InspectLoader.exec_module` + :meth:`importlib.abc.SourceLoader.exec_module`) and let the import system + take care of the rest; and + :meth:`importlib.abc.Loader.module_repr`, + :meth:`importlib.util.module_for_loader`, :meth:`importlib.util.set_loader`, + and :meth:`importlib.util.set_package` are no longer needed because their + functions are now handled automatically by the import system. * The :mod:`imp` module is pending deprecation. To keep compatibility with Python 2/3 code bases, the module's removal is currently not scheduled. -- cgit v0.12 From 9d4c5f46404938659c7386b13f16e72ccca01344 Mon Sep 17 00:00:00 2001 From: Eric Snow Date: Sat, 22 Feb 2014 13:57:08 -0700 Subject: Issue #20484: Disable the 2 remaining "modules" tests in test_pydoc. I'll look into re-enabling them in issue #20128. --- Lib/test/test_pydoc.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/Lib/test/test_pydoc.py b/Lib/test/test_pydoc.py index f6aae12..81b58a2 100644 --- a/Lib/test/test_pydoc.py +++ b/Lib/test/test_pydoc.py @@ -608,6 +608,7 @@ class PydocImportTest(PydocBaseTest): self.assertEqual(out.getvalue(), '') self.assertEqual(err.getvalue(), '') + @unittest.skip('causes undesireable side-effects (#20128)') def test_modules(self): # See Helper.listmodules(). num_header_lines = 2 @@ -623,6 +624,7 @@ class PydocImportTest(PydocBaseTest): self.assertGreaterEqual(num_lines, expected) + @unittest.skip('causes undesireable side-effects (#20128)') def test_modules_search(self): # See Helper.listmodules(). expected = 'pydoc - ' @@ -635,7 +637,7 @@ class PydocImportTest(PydocBaseTest): self.assertIn(expected, result) - @unittest.skip('some buildbots are not cooperating (#20123)') + @unittest.skip('some buildbots are not cooperating (#20128)') def test_modules_search_builtin(self): expected = 'gc - ' -- cgit v0.12 From 7850541a7207c82aa0d98345f49f851bf24c0255 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Martin=20v=2E=20L=C3=B6wis?= Date: Sat, 22 Feb 2014 23:44:20 +0100 Subject: Issue #20641: Run custom actions with the NoImpersonate flag to support UAC. --- Misc/NEWS | 3 +++ Tools/msi/msi.py | 20 +++++++++----------- 2 files changed, 12 insertions(+), 11 deletions(-) diff --git a/Misc/NEWS b/Misc/NEWS index e4f7e9a..142a82a 100644 --- a/Misc/NEWS +++ b/Misc/NEWS @@ -71,6 +71,9 @@ Library Build ----- +- Issue #20641: Run MSI custom actions (pip installation, pyc compilation) + with the NoImpersonate flag, to support elevated execution (UAC). + - Issue #20221: Removed conflicting (or circular) hypot definition when compiled with VS 2010 or above. Initial patch by Tabrez Mohammed. diff --git a/Tools/msi/msi.py b/Tools/msi/msi.py index e816c8a..8409b81 100644 --- a/Tools/msi/msi.py +++ b/Tools/msi/msi.py @@ -435,12 +435,13 @@ def add_ui(db): ("SetLauncherDirToWindows", 307, "LAUNCHERDIR", "[WindowsFolder]"), # msidbCustomActionTypeExe + msidbCustomActionTypeSourceFile # See "Custom Action Type 18" - ("CompilePyc", 18, "python.exe", compileargs), - ("CompilePyo", 18, "python.exe", "-O "+compileargs), - ("CompileGrammar", 18, "python.exe", lib2to3args), # msidbCustomActionTypeInScript (1024); run during actual installation - ("UpdatePip", 18+1024, "python.exe", updatepipargs), - ("RemovePip", 18, "python.exe", removepipargs), + # msidbCustomActionTypeNoImpersonate (2048); run action in system account, not user account + ("CompilePyc", 18+1024+2048, "python.exe", compileargs), + ("CompilePyo", 18+1024+2048, "python.exe", "-O "+compileargs), + ("CompileGrammar", 18+1024+2048, "python.exe", lib2to3args), + ("UpdatePip", 18+1024+2048, "python.exe", updatepipargs), + ("RemovePip", 18+1024+2048, "python.exe", removepipargs), ]) # UI Sequences, see "InstallUISequence Table", "Using a Sequence Table" @@ -483,17 +484,14 @@ def add_ui(db): # remove pip when state changes to INSTALLSTATE_ABSENT # run before RemoveFiles ("RemovePip", "&pip_feature=2", 3499), - ("CompilePyc", "COMPILEALL", 6800), - ("CompilePyo", "COMPILEALL", 6801), - ("CompileGrammar", "COMPILEALL", 6802), + ("CompilePyc", "COMPILEALL", 4002), + ("CompilePyo", "COMPILEALL", 4003), + ("CompileGrammar", "COMPILEALL", 4004), ]) add_data(db, "AdminExecuteSequence", [("InitialTargetDir", 'TARGETDIR=""', 750), ("SetDLLDirToTarget", 'DLLDIR=""', 751), ("SetLauncherDirToTarget", 'LAUNCHERDIR=""', 752), - ("CompilePyc", "COMPILEALL", 6800), - ("CompilePyo", "COMPILEALL", 6801), - ("CompileGrammar", "COMPILEALL", 6802), ]) ##################################################################### -- cgit v0.12 From 209c043c10f6c35d51714c6dfa4307e5e68f141d Mon Sep 17 00:00:00 2001 From: Larry Hastings Date: Sun, 23 Feb 2014 02:16:46 -0600 Subject: Regenerated pydoc topics for Python 3.4.0rc2. --- Lib/pydoc_data/topics.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Lib/pydoc_data/topics.py b/Lib/pydoc_data/topics.py index 905c7c8..6e9a714 100644 --- a/Lib/pydoc_data/topics.py +++ b/Lib/pydoc_data/topics.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Autogenerated by Sphinx on Mon Feb 10 04:20:03 2014 +# Autogenerated by Sphinx on Sun Feb 23 02:12:38 2014 topics = {'assert': '\nThe "assert" statement\n**********************\n\nAssert statements are a convenient way to insert debugging assertions\ninto a program:\n\n assert_stmt ::= "assert" expression ["," expression]\n\nThe simple form, "assert expression", is equivalent to\n\n if __debug__:\n if not expression: raise AssertionError\n\nThe extended form, "assert expression1, expression2", is equivalent to\n\n if __debug__:\n if not expression1: raise AssertionError(expression2)\n\nThese equivalences assume that "__debug__" and "AssertionError" refer\nto the built-in variables with those names. In the current\nimplementation, the built-in variable "__debug__" is "True" under\nnormal circumstances, "False" when optimization is requested (command\nline option -O). The current code generator emits no code for an\nassert statement when optimization is requested at compile time. Note\nthat it is unnecessary to include the source code for the expression\nthat failed in the error message; it will be displayed as part of the\nstack trace.\n\nAssignments to "__debug__" are illegal. The value for the built-in\nvariable is determined when the interpreter starts.\n', 'assignment': '\nAssignment statements\n*********************\n\nAssignment statements are used to (re)bind names to values and to\nmodify attributes or items of mutable objects:\n\n assignment_stmt ::= (target_list "=")+ (expression_list | yield_expression)\n target_list ::= target ("," target)* [","]\n target ::= identifier\n | "(" target_list ")"\n | "[" target_list "]"\n | attributeref\n | subscription\n | slicing\n | "*" target\n\n(See section *Primaries* for the syntax definitions for the last three\nsymbols.)\n\nAn assignment statement evaluates the expression list (remember that\nthis can be a single expression or a comma-separated list, the latter\nyielding a tuple) and assigns the single resulting object to each of\nthe target lists, from left to right.\n\nAssignment is defined recursively depending on the form of the target\n(list). When a target is part of a mutable object (an attribute\nreference, subscription or slicing), the mutable object must\nultimately perform the assignment and decide about its validity, and\nmay raise an exception if the assignment is unacceptable. The rules\nobserved by various types and the exceptions raised are given with the\ndefinition of the object types (see section *The standard type\nhierarchy*).\n\nAssignment of an object to a target list, optionally enclosed in\nparentheses or square brackets, is recursively defined as follows.\n\n* If the target list is a single target: The object is assigned to\n that target.\n\n* If the target list is a comma-separated list of targets: The\n object must be an iterable with the same number of items as there\n are targets in the target list, and the items are assigned, from\n left to right, to the corresponding targets.\n\n * If the target list contains one target prefixed with an\n asterisk, called a "starred" target: The object must be a sequence\n with at least as many items as there are targets in the target\n list, minus one. The first items of the sequence are assigned,\n from left to right, to the targets before the starred target. The\n final items of the sequence are assigned to the targets after the\n starred target. A list of the remaining items in the sequence is\n then assigned to the starred target (the list can be empty).\n\n * Else: The object must be a sequence with the same number of\n items as there are targets in the target list, and the items are\n assigned, from left to right, to the corresponding targets.\n\nAssignment of an object to a single target is recursively defined as\nfollows.\n\n* If the target is an identifier (name):\n\n * If the name does not occur in a "global" or "nonlocal" statement\n in the current code block: the name is bound to the object in the\n current local namespace.\n\n * Otherwise: the name is bound to the object in the global\n namespace or the outer namespace determined by "nonlocal",\n respectively.\n\n The name is rebound if it was already bound. This may cause the\n reference count for the object previously bound to the name to reach\n zero, causing the object to be deallocated and its destructor (if it\n has one) to be called.\n\n* If the target is a target list enclosed in parentheses or in\n square brackets: The object must be an iterable with the same number\n of items as there are targets in the target list, and its items are\n assigned, from left to right, to the corresponding targets.\n\n* If the target is an attribute reference: The primary expression in\n the reference is evaluated. It should yield an object with\n assignable attributes; if this is not the case, "TypeError" is\n raised. That object is then asked to assign the assigned object to\n the given attribute; if it cannot perform the assignment, it raises\n an exception (usually but not necessarily "AttributeError").\n\n Note: If the object is a class instance and the attribute reference\n occurs on both sides of the assignment operator, the RHS expression,\n "a.x" can access either an instance attribute or (if no instance\n attribute exists) a class attribute. The LHS target "a.x" is always\n set as an instance attribute, creating it if necessary. Thus, the\n two occurrences of "a.x" do not necessarily refer to the same\n attribute: if the RHS expression refers to a class attribute, the\n LHS creates a new instance attribute as the target of the\n assignment:\n\n class Cls:\n x = 3 # class variable\n inst = Cls()\n inst.x = inst.x + 1 # writes inst.x as 4 leaving Cls.x as 3\n\n This description does not necessarily apply to descriptor\n attributes, such as properties created with "property()".\n\n* If the target is a subscription: The primary expression in the\n reference is evaluated. It should yield either a mutable sequence\n object (such as a list) or a mapping object (such as a dictionary).\n Next, the subscript expression is evaluated.\n\n If the primary is a mutable sequence object (such as a list), the\n subscript must yield an integer. If it is negative, the sequence\'s\n length is added to it. The resulting value must be a nonnegative\n integer less than the sequence\'s length, and the sequence is asked\n to assign the assigned object to its item with that index. If the\n index is out of range, "IndexError" is raised (assignment to a\n subscripted sequence cannot add new items to a list).\n\n If the primary is a mapping object (such as a dictionary), the\n subscript must have a type compatible with the mapping\'s key type,\n and the mapping is then asked to create a key/datum pair which maps\n the subscript to the assigned object. This can either replace an\n existing key/value pair with the same key value, or insert a new\n key/value pair (if no key with the same value existed).\n\n For user-defined objects, the "__setitem__()" method is called with\n appropriate arguments.\n\n* If the target is a slicing: The primary expression in the\n reference is evaluated. It should yield a mutable sequence object\n (such as a list). The assigned object should be a sequence object\n of the same type. Next, the lower and upper bound expressions are\n evaluated, insofar they are present; defaults are zero and the\n sequence\'s length. The bounds should evaluate to integers. If\n either bound is negative, the sequence\'s length is added to it. The\n resulting bounds are clipped to lie between zero and the sequence\'s\n length, inclusive. Finally, the sequence object is asked to replace\n the slice with the items of the assigned sequence. The length of\n the slice may be different from the length of the assigned sequence,\n thus changing the length of the target sequence, if the object\n allows it.\n\n**CPython implementation detail:** In the current implementation, the\nsyntax for targets is taken to be the same as for expressions, and\ninvalid syntax is rejected during the code generation phase, causing\nless detailed error messages.\n\nWARNING: Although the definition of assignment implies that overlaps\nbetween the left-hand side and the right-hand side are \'safe\' (for\nexample "a, b = b, a" swaps two variables), overlaps *within* the\ncollection of assigned-to variables are not safe! For instance, the\nfollowing program prints "[0, 2]":\n\n x = [0, 1]\n i = 0\n i, x[i] = 1, 2\n print(x)\n\nSee also: **PEP 3132** - Extended Iterable Unpacking\n\n The specification for the "*target" feature.\n\n\nAugmented assignment statements\n===============================\n\nAugmented assignment is the combination, in a single statement, of a\nbinary operation and an assignment statement:\n\n augmented_assignment_stmt ::= augtarget augop (expression_list | yield_expression)\n augtarget ::= identifier | attributeref | subscription | slicing\n augop ::= "+=" | "-=" | "*=" | "/=" | "//=" | "%=" | "**="\n | ">>=" | "<<=" | "&=" | "^=" | "|="\n\n(See section *Primaries* for the syntax definitions for the last three\nsymbols.)\n\nAn augmented assignment evaluates the target (which, unlike normal\nassignment statements, cannot be an unpacking) and the expression\nlist, performs the binary operation specific to the type of assignment\non the two operands, and assigns the result to the original target.\nThe target is only evaluated once.\n\nAn augmented assignment expression like "x += 1" can be rewritten as\n"x = x + 1" to achieve a similar, but not exactly equal effect. In the\naugmented version, "x" is only evaluated once. Also, when possible,\nthe actual operation is performed *in-place*, meaning that rather than\ncreating a new object and assigning that to the target, the old object\nis modified instead.\n\nWith the exception of assigning to tuples and multiple targets in a\nsingle statement, the assignment done by augmented assignment\nstatements is handled the same way as normal assignments. Similarly,\nwith the exception of the possible *in-place* behavior, the binary\noperation performed by augmented assignment is the same as the normal\nbinary operations.\n\nFor targets which are attribute references, the same *caveat about\nclass and instance attributes* applies as for regular assignments.\n', 'atom-identifiers': '\nIdentifiers (Names)\n*******************\n\nAn identifier occurring as an atom is a name. See section\n*Identifiers and keywords* for lexical definition and section *Naming\nand binding* for documentation of naming and binding.\n\nWhen the name is bound to an object, evaluation of the atom yields\nthat object. When a name is not bound, an attempt to evaluate it\nraises a "NameError" exception.\n\n**Private name mangling:** When an identifier that textually occurs in\na class definition begins with two or more underscore characters and\ndoes not end in two or more underscores, it is considered a *private\nname* of that class. Private names are transformed to a longer form\nbefore code is generated for them. The transformation inserts the\nclass name, with leading underscores removed and a single underscore\ninserted, in front of the name. For example, the identifier "__spam"\noccurring in a class named "Ham" will be transformed to "_Ham__spam".\nThis transformation is independent of the syntactical context in which\nthe identifier is used. If the transformed name is extremely long\n(longer than 255 characters), implementation defined truncation may\nhappen. If the class name consists only of underscores, no\ntransformation is done.\n', -- cgit v0.12 From f34177a6f9fd7d8afa38f20dd18721a34f808f96 Mon Sep 17 00:00:00 2001 From: Larry Hastings Date: Sun, 23 Feb 2014 02:18:24 -0600 Subject: Version bump for Python 3.4.0rc2. --- Include/patchlevel.h | 4 ++-- Lib/distutils/__init__.py | 2 +- Lib/idlelib/idlever.py | 2 +- Misc/RPM/python-3.4.spec | 2 +- README | 2 +- 5 files changed, 6 insertions(+), 6 deletions(-) diff --git a/Include/patchlevel.h b/Include/patchlevel.h index 4e4265c..b200276 100644 --- a/Include/patchlevel.h +++ b/Include/patchlevel.h @@ -20,10 +20,10 @@ #define PY_MINOR_VERSION 4 #define PY_MICRO_VERSION 0 #define PY_RELEASE_LEVEL PY_RELEASE_LEVEL_GAMMA -#define PY_RELEASE_SERIAL 1 +#define PY_RELEASE_SERIAL 2 /* Version as a string */ -#define PY_VERSION "3.4.0rc1+" +#define PY_VERSION "3.4.0rc2" /*--end constants--*/ /* Version as a single 4-byte hex number, e.g. 0x010502B2 == 1.5.2b2. diff --git a/Lib/distutils/__init__.py b/Lib/distutils/__init__.py index 4c716e2..c03185b 100644 --- a/Lib/distutils/__init__.py +++ b/Lib/distutils/__init__.py @@ -13,5 +13,5 @@ used from a setup script as # Updated automatically by the Python release process. # #--start constants-- -__version__ = "3.4.0rc1" +__version__ = "3.4.0rc2" #--end constants-- diff --git a/Lib/idlelib/idlever.py b/Lib/idlelib/idlever.py index 8646baa..38dbbcb 100644 --- a/Lib/idlelib/idlever.py +++ b/Lib/idlelib/idlever.py @@ -1 +1 @@ -IDLE_VERSION = "3.4.0rc1" +IDLE_VERSION = "3.4.0rc2" diff --git a/Misc/RPM/python-3.4.spec b/Misc/RPM/python-3.4.spec index a911a7e..4c31a23 100644 --- a/Misc/RPM/python-3.4.spec +++ b/Misc/RPM/python-3.4.spec @@ -39,7 +39,7 @@ %define name python #--start constants-- -%define version 3.4.0rc1 +%define version 3.4.0rc2 %define libvers 3.4 #--end constants-- %define release 1pydotorg diff --git a/README b/README index 790b947..d35ff87 100644 --- a/README +++ b/README @@ -1,4 +1,4 @@ -This is Python version 3.4.0 release candidate 1 +This is Python version 3.4.0 release candidate 2 ================================================ Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, -- cgit v0.12 From 761412246751bbf32bfa6315a9888d6cbf322ee5 Mon Sep 17 00:00:00 2001 From: Larry Hastings Date: Sun, 23 Feb 2014 02:18:41 -0600 Subject: Added tag v3.4.0rc2 for changeset a300712ed38c --- .hgtags | 1 + 1 file changed, 1 insertion(+) diff --git a/.hgtags b/.hgtags index 2d5c42b..adeecd1 100644 --- a/.hgtags +++ b/.hgtags @@ -128,3 +128,4 @@ e245b0d7209bb6d0e19316e1e2af1aa9c2139104 v3.4.0a4 ba32913eb13ec545a46dd0ce18035b6c416f0d78 v3.4.0b2 a97ce3ecc96af79bd2e1ac66ce48d9138e0ca749 v3.4.0b3 5e088cea8660677969113741c1313d570d977e02 v3.4.0rc1 +a300712ed38c9a242b736c44e806caea25a6dc05 v3.4.0rc2 -- cgit v0.12 From 013bb91aa304062bb65fe8951e2d263f2065ee56 Mon Sep 17 00:00:00 2001 From: Serhiy Storchaka Date: Mon, 10 Feb 2014 18:21:34 +0200 Subject: Issue #19255: The builtins module is restored to initial value before cleaning other modules. The sys and builtins modules are cleaned last. --- Include/moduleobject.h | 1 + Include/pystate.h | 2 +- Lib/test/test_builtin.py | 29 ++++++++++++++++ Misc/NEWS | 3 ++ Objects/moduleobject.c | 13 +++++--- Python/import.c | 87 +++++++++++++++++++++++++++++------------------- Python/pystate.c | 2 ++ 7 files changed, 97 insertions(+), 40 deletions(-) diff --git a/Include/moduleobject.h b/Include/moduleobject.h index 8013dd9..f119364 100644 --- a/Include/moduleobject.h +++ b/Include/moduleobject.h @@ -25,6 +25,7 @@ PyAPI_FUNC(const char *) PyModule_GetFilename(PyObject *); PyAPI_FUNC(PyObject *) PyModule_GetFilenameObject(PyObject *); #ifndef Py_LIMITED_API PyAPI_FUNC(void) _PyModule_Clear(PyObject *); +PyAPI_FUNC(void) _PyModule_ClearDict(PyObject *); #endif PyAPI_FUNC(struct PyModuleDef*) PyModule_GetDef(PyObject*); PyAPI_FUNC(void*) PyModule_GetState(PyObject*); diff --git a/Include/pystate.h b/Include/pystate.h index 1f3465f..4992c22 100644 --- a/Include/pystate.h +++ b/Include/pystate.h @@ -33,7 +33,6 @@ typedef struct _is { int codecs_initialized; int fscodec_initialized; - #ifdef HAVE_DLOPEN int dlopenflags; #endif @@ -41,6 +40,7 @@ typedef struct _is { int tscdump; #endif + PyObject *builtins_copy; } PyInterpreterState; #endif diff --git a/Lib/test/test_builtin.py b/Lib/test/test_builtin.py index 6843066..c078b44 100644 --- a/Lib/test/test_builtin.py +++ b/Lib/test/test_builtin.py @@ -16,6 +16,7 @@ import unittest import warnings from operator import neg from test.support import TESTFN, unlink, run_unittest, check_warnings +from test.script_helper import assert_python_ok try: import pty, signal except ImportError: @@ -1592,6 +1593,34 @@ class TestSorted(unittest.TestCase): data = 'The quick Brown fox Jumped over The lazy Dog'.split() self.assertRaises(TypeError, sorted, data, None, lambda x,y: 0) + +class ShutdownTest(unittest.TestCase): + + def test_cleanup(self): + # Issue #19255: builtins are still available at shutdown + code = """if 1: + import builtins + import sys + + class C: + def __del__(self): + print("before") + # Check that builtins still exist + len(()) + print("after") + + c = C() + # Make this module survive until builtins and sys are cleaned + builtins.here = sys.modules[__name__] + sys.here = sys.modules[__name__] + # Create a reference loop so that this module needs to go + # through a GC phase. + here = sys.modules[__name__] + """ + rc, out, err = assert_python_ok("-c", code) + self.assertEqual(["before", "after"], out.decode().splitlines()) + + def load_tests(loader, tests, pattern): from doctest import DocTestSuite tests.addTest(DocTestSuite(builtins)) diff --git a/Misc/NEWS b/Misc/NEWS index 142a82a..f642353 100644 --- a/Misc/NEWS +++ b/Misc/NEWS @@ -89,6 +89,9 @@ Release date: 2014-02-10 Core and Builtins ----------------- +- Issue #19255: The builtins module is restored to initial value before + cleaning other modules. The sys and builtins modules are cleaned last. + - Issue #20437: Fixed 22 potential bugs when deleting objects references. - Issue #20500: Displaying an exception at interpreter shutdown no longer diff --git a/Objects/moduleobject.c b/Objects/moduleobject.c index d59475e..6821710 100644 --- a/Objects/moduleobject.c +++ b/Objects/moduleobject.c @@ -299,6 +299,14 @@ PyModule_GetState(PyObject* m) void _PyModule_Clear(PyObject *m) { + PyObject *d = ((PyModuleObject *)m)->md_dict; + if (d != NULL) + _PyModule_ClearDict(d); +} + +void +_PyModule_ClearDict(PyObject *d) +{ /* To make the execution order of destructors for global objects a bit more predictable, we first zap all objects whose name starts with a single underscore, before we clear @@ -308,11 +316,6 @@ _PyModule_Clear(PyObject *m) Py_ssize_t pos; PyObject *key, *value; - PyObject *d; - - d = ((PyModuleObject *)m)->md_dict; - if (d == NULL) - return; /* First, clear only names starting with a single underscore */ pos = 0; diff --git a/Python/import.c b/Python/import.c index d0115e4..207dbce 100644 --- a/Python/import.c +++ b/Python/import.c @@ -49,9 +49,13 @@ class fs_unicode_converter(CConverter): void _PyImport_Init(void) { + PyInterpreterState *interp = PyThreadState_Get()->interp; initstr = PyUnicode_InternFromString("__init__"); if (initstr == NULL) Py_FatalError("Can't initialize import variables"); + interp->builtins_copy = PyDict_Copy(interp->builtins); + if (interp->builtins_copy == NULL) + Py_FatalError("Can't backup builtins dict"); } void @@ -397,8 +401,10 @@ PyImport_Cleanup(void) PyObject *key, *value, *dict; PyInterpreterState *interp = PyThreadState_GET()->interp; PyObject *modules = interp->modules; - PyObject *builtins = interp->builtins; + PyObject *builtins_mod = NULL; + PyObject *sys_mod = NULL; PyObject *weaklist = NULL; + char **p; if (modules == NULL) return; /* Already done */ @@ -411,31 +417,22 @@ PyImport_Cleanup(void) /* XXX Perhaps these precautions are obsolete. Who knows? */ - value = PyDict_GetItemString(modules, "builtins"); - if (value != NULL && PyModule_Check(value)) { - dict = PyModule_GetDict(value); + if (Py_VerboseFlag) + PySys_WriteStderr("# clear builtins._\n"); + PyDict_SetItemString(interp->builtins, "_", Py_None); + + for (p = sys_deletes; *p != NULL; p++) { if (Py_VerboseFlag) - PySys_WriteStderr("# clear builtins._\n"); - PyDict_SetItemString(dict, "_", Py_None); - } - value = PyDict_GetItemString(modules, "sys"); - if (value != NULL && PyModule_Check(value)) { - char **p; - PyObject *v; - dict = PyModule_GetDict(value); - for (p = sys_deletes; *p != NULL; p++) { - if (Py_VerboseFlag) - PySys_WriteStderr("# clear sys.%s\n", *p); - PyDict_SetItemString(dict, *p, Py_None); - } - for (p = sys_files; *p != NULL; p+=2) { - if (Py_VerboseFlag) - PySys_WriteStderr("# restore sys.%s\n", *p); - v = PyDict_GetItemString(dict, *(p+1)); - if (v == NULL) - v = Py_None; - PyDict_SetItemString(dict, *p, v); - } + PySys_WriteStderr("# clear sys.%s\n", *p); + PyDict_SetItemString(interp->sysdict, *p, Py_None); + } + for (p = sys_files; *p != NULL; p+=2) { + if (Py_VerboseFlag) + PySys_WriteStderr("# restore sys.%s\n", *p); + value = PyDict_GetItemString(interp->sysdict, *(p+1)); + if (value == NULL) + value = Py_None; + PyDict_SetItemString(interp->sysdict, *p, value); } /* We prepare a list which will receive (name, weakref) tuples of @@ -473,11 +470,15 @@ PyImport_Cleanup(void) /* Clear the modules dict. */ PyDict_Clear(modules); - /* Replace the interpreter's reference to builtins with an empty dict - (module globals still have a reference to the original builtins). */ - builtins = interp->builtins; - interp->builtins = PyDict_New(); - Py_DECREF(builtins); + /* Restore the original builtins dict, to ensure that any + user data gets cleared. */ + dict = PyDict_Copy(interp->builtins); + if (dict == NULL) + PyErr_Clear(); + PyDict_Clear(interp->builtins); + if (PyDict_Update(interp->builtins, interp->builtins_copy)) + PyErr_Clear(); + Py_XDECREF(dict); /* Clear module dict copies stored in the interpreter state */ _PyState_ClearModules(); /* Collect references */ @@ -488,7 +489,15 @@ PyImport_Cleanup(void) /* Now, if there are any modules left alive, clear their globals to minimize potential leaks. All C extension modules actually end - up here, since they are kept alive in the interpreter state. */ + up here, since they are kept alive in the interpreter state. + + The special treatment of "builtins" here is because even + when it's not referenced as a module, its dictionary is + referenced by almost every module's __builtins__. Since + deleting a module clears its dictionary (even if there are + references left to it), we need to delete the "builtins" + module last. Likewise, we don't delete sys until the very + end because it is implicitly referenced (e.g. by print). */ if (weaklist != NULL) { Py_ssize_t i, n; n = PyList_GET_SIZE(weaklist); @@ -498,17 +507,27 @@ PyImport_Cleanup(void) PyObject *mod = PyWeakref_GET_OBJECT(PyTuple_GET_ITEM(tup, 1)); if (mod == Py_None) continue; - Py_INCREF(mod); assert(PyModule_Check(mod)); + dict = PyModule_GetDict(mod); + if (dict == interp->builtins || dict == interp->sysdict) + continue; + Py_INCREF(mod); if (Py_VerboseFlag && PyUnicode_Check(name)) - PySys_FormatStderr("# cleanup[3] wiping %U\n", - name, mod); + PySys_FormatStderr("# cleanup[3] wiping %U\n", name); _PyModule_Clear(mod); Py_DECREF(mod); } Py_DECREF(weaklist); } + /* Next, delete sys and builtins (in that order) */ + if (Py_VerboseFlag) + PySys_FormatStderr("# cleanup[3] wiping sys\n"); + _PyModule_ClearDict(interp->sysdict); + if (Py_VerboseFlag) + PySys_FormatStderr("# cleanup[3] wiping builtins\n"); + _PyModule_ClearDict(interp->builtins); + /* Clear and delete the modules directory. Actual modules will still be there only if imported during the execution of some destructor. */ diff --git a/Python/pystate.c b/Python/pystate.c index 19fceb7..2ac2fd5 100644 --- a/Python/pystate.c +++ b/Python/pystate.c @@ -72,6 +72,7 @@ PyInterpreterState_New(void) interp->modules_by_index = NULL; interp->sysdict = NULL; interp->builtins = NULL; + interp->builtins_copy = NULL; interp->tstate_head = NULL; interp->codec_search_path = NULL; interp->codec_search_cache = NULL; @@ -115,6 +116,7 @@ PyInterpreterState_Clear(PyInterpreterState *interp) Py_CLEAR(interp->modules_by_index); Py_CLEAR(interp->sysdict); Py_CLEAR(interp->builtins); + Py_CLEAR(interp->builtins_copy); Py_CLEAR(interp->importlib); } -- cgit v0.12 From e0a976c09d379485380728be54d96b3213a10fa9 Mon Sep 17 00:00:00 2001 From: Serhiy Storchaka Date: Mon, 10 Feb 2014 19:09:19 +0200 Subject: Temporary silence test broken by issue19255. Remove unused variables. --- Lib/test/test_io.py | 3 ++- Python/import.c | 2 -- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/Lib/test/test_io.py b/Lib/test/test_io.py index 23edee6..4182da3 100644 --- a/Lib/test/test_io.py +++ b/Lib/test/test_io.py @@ -2691,7 +2691,8 @@ class CTextIOWrapperTest(TextIOWrapperTest): class PyTextIOWrapperTest(TextIOWrapperTest): io = pyio - shutdown_error = "LookupError: unknown encoding: ascii" + #shutdown_error = "LookupError: unknown encoding: ascii" + shutdown_error = "TypeError: 'NoneType' object is not iterable" class IncrementalNewlineDecoderTest(unittest.TestCase): diff --git a/Python/import.c b/Python/import.c index 207dbce..7972f86 100644 --- a/Python/import.c +++ b/Python/import.c @@ -401,8 +401,6 @@ PyImport_Cleanup(void) PyObject *key, *value, *dict; PyInterpreterState *interp = PyThreadState_GET()->interp; PyObject *modules = interp->modules; - PyObject *builtins_mod = NULL; - PyObject *sys_mod = NULL; PyObject *weaklist = NULL; char **p; -- cgit v0.12 From 3b4ce690fdd7b3fba5a93b98f95129661338db4d Mon Sep 17 00:00:00 2001 From: Serhiy Storchaka Date: Wed, 12 Feb 2014 09:55:45 +0200 Subject: Issue #19255: Clear error after failed PyDict_SetItem() on shutdown. This silences a Coverity complain. --- Objects/moduleobject.c | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/Objects/moduleobject.c b/Objects/moduleobject.c index 6821710..f509932 100644 --- a/Objects/moduleobject.c +++ b/Objects/moduleobject.c @@ -330,7 +330,8 @@ _PyModule_ClearDict(PyObject *d) else PyErr_Clear(); } - PyDict_SetItem(d, key, Py_None); + if (PyDict_SetItem(d, key, Py_None) != 0) + PyErr_Clear(); } } } @@ -349,7 +350,8 @@ _PyModule_ClearDict(PyObject *d) else PyErr_Clear(); } - PyDict_SetItem(d, key, Py_None); + if (PyDict_SetItem(d, key, Py_None) != 0) + PyErr_Clear(); } } } -- cgit v0.12 From f86b43382311370d099b1ba9995093a2e0252605 Mon Sep 17 00:00:00 2001 From: Serhiy Storchaka Date: Wed, 12 Feb 2014 12:40:22 +0200 Subject: Try to fix test_cleanup (issue #20599). --- Lib/test/test_builtin.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Lib/test/test_builtin.py b/Lib/test/test_builtin.py index c078b44..8a307b9 100644 --- a/Lib/test/test_builtin.py +++ b/Lib/test/test_builtin.py @@ -1604,10 +1604,10 @@ class ShutdownTest(unittest.TestCase): class C: def __del__(self): - print("before") + print("before", flush=True) # Check that builtins still exist len(()) - print("after") + print("after", flush=True) c = C() # Make this module survive until builtins and sys are cleaned -- cgit v0.12 From 8ebe5324a072b871646ce67bb44d675acdff05fb Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Wed, 12 Feb 2014 18:27:55 +0100 Subject: Issue #20599: Force ASCII encoding for stdout in test_cleanup() of test_builtin On Windows, the codec of sys.stdout is implemented in Python. At exit, the codec may be unloaded before the destructor tries to write something to sys.stdout. --- Lib/test/test_builtin.py | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/Lib/test/test_builtin.py b/Lib/test/test_builtin.py index 8a307b9..5c14de3 100644 --- a/Lib/test/test_builtin.py +++ b/Lib/test/test_builtin.py @@ -1604,10 +1604,10 @@ class ShutdownTest(unittest.TestCase): class C: def __del__(self): - print("before", flush=True) + print("before") # Check that builtins still exist len(()) - print("after", flush=True) + print("after") c = C() # Make this module survive until builtins and sys are cleaned @@ -1617,7 +1617,15 @@ class ShutdownTest(unittest.TestCase): # through a GC phase. here = sys.modules[__name__] """ - rc, out, err = assert_python_ok("-c", code) + # Issue #20599: Force ASCII encoding to get a codec implemented in C, + # otherwise the codec may be unloaded before C.__del__() is called, and + # so print("before") fails because the codec cannot be used to encode + # "before" to sys.stdout.encoding. For example, on Windows, + # sys.stdout.encoding is the OEM code page and these code pages are + # implemented in Python + rc, out, err = assert_python_ok("-c", code, + PYTHONIOENCODING="ascii", + __cleanenv=True) self.assertEqual(["before", "after"], out.decode().splitlines()) -- cgit v0.12 From d44cebb0f50b7c541691b9516c5c1717d2a510b1 Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Thu, 13 Feb 2014 10:54:32 +0100 Subject: Issue #20599: Don't clear environment in test_cleanup() of test_builtin --- Lib/test/test_builtin.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/Lib/test/test_builtin.py b/Lib/test/test_builtin.py index 5c14de3..b561a6f 100644 --- a/Lib/test/test_builtin.py +++ b/Lib/test/test_builtin.py @@ -1624,8 +1624,7 @@ class ShutdownTest(unittest.TestCase): # sys.stdout.encoding is the OEM code page and these code pages are # implemented in Python rc, out, err = assert_python_ok("-c", code, - PYTHONIOENCODING="ascii", - __cleanenv=True) + PYTHONIOENCODING="ascii") self.assertEqual(["before", "after"], out.decode().splitlines()) -- cgit v0.12 From 26dd0ff075bcfcdc6eebe584dce55dfd2f1b1537 Mon Sep 17 00:00:00 2001 From: Brett Cannon Date: Wed, 26 Feb 2014 18:26:49 -0500 Subject: Issue #20763: Fix importlib.machinery.PathFinder to support PathEntryFinder instances which only define find_module(). Reported by Yukihiro Nakadaira. --- Lib/importlib/_bootstrap.py | 2 +- Lib/test/test_importlib/import_/test_path.py | 24 ++++++++++++++++++++++++ Python/importlib.h | 2 +- 3 files changed, 26 insertions(+), 2 deletions(-) diff --git a/Lib/importlib/_bootstrap.py b/Lib/importlib/_bootstrap.py index 4864024..beaa9b3 100644 --- a/Lib/importlib/_bootstrap.py +++ b/Lib/importlib/_bootstrap.py @@ -1869,7 +1869,7 @@ class PathFinder: loader, portions = finder.find_loader(fullname) else: loader = finder.find_module(fullname) - portions = None + portions = [] if loader is not None: return spec_from_loader(fullname, loader) spec = ModuleSpec(fullname, None) diff --git a/Lib/test/test_importlib/import_/test_path.py b/Lib/test/test_importlib/import_/test_path.py index 713e754..1274f8c 100644 --- a/Lib/test/test_importlib/import_/test_path.py +++ b/Lib/test/test_importlib/import_/test_path.py @@ -116,5 +116,29 @@ Frozen_FinderTests, Source_FinderTests = util.test_both( FinderTests, importlib=importlib, machinery=machinery) +class PathEntryFinderTests: + + def test_finder_with_failing_find_module(self): + # PathEntryFinder with find_module() defined should work. + # Issue #20763. + class Finder: + path_location = 'test_finder_with_find_module' + def __init__(self, path): + if path != self.path_location: + raise ImportError + + @staticmethod + def find_module(fullname): + return None + + + with util.import_state(path=[Finder.path_location]+sys.path[:], + path_hooks=[Finder]): + self.machinery.PathFinder.find_spec('importlib') + +Frozen_PEFTests, Source_PEFTests = util.test_both( + PathEntryFinderTests, machinery=machinery) + + if __name__ == '__main__': unittest.main() diff --git a/Python/importlib.h b/Python/importlib.h index f55c450..2644b1d 100644 --- a/Python/importlib.h +++ b/Python/importlib.h @@ -3365,7 +3365,7 @@ const unsigned char _Py_M__importlib[] = { 116,0,0,124,2,0,100,1,0,131,2,0,114,39,0,124, 2,0,106,1,0,124,1,0,131,1,0,92,2,0,125,3, 0,125,4,0,110,21,0,124,2,0,106,2,0,124,1,0, - 131,1,0,125,3,0,100,0,0,125,4,0,124,3,0,100, + 131,1,0,125,3,0,103,0,0,125,4,0,124,3,0,100, 0,0,107,9,0,114,85,0,116,3,0,124,1,0,124,3, 0,131,2,0,83,116,4,0,124,1,0,100,0,0,131,2, 0,125,5,0,124,4,0,124,5,0,95,5,0,124,5,0, -- cgit v0.12 From a46cf12e9956dbb0fa84cc1d7e8b81d4d2f7c55a Mon Sep 17 00:00:00 2001 From: Nick Coghlan Date: Fri, 28 Feb 2014 23:35:05 +1000 Subject: Close #20757: return success for skipped pip uninstall The 3.4rc2 Windows uninstaller would fail if pip had been updated to a version that didn't match the version installed by ensurepip. This skip is no longer treated as an error, so an updated pip ends up being handled like any other pip installed package and is left alone by the CPython uninstaller. --- Lib/ensurepip/__init__.py | 5 +++-- Lib/test/test_ensurepip.py | 6 ++++-- Misc/NEWS | 14 ++++++++++++++ 3 files changed, 21 insertions(+), 4 deletions(-) diff --git a/Lib/ensurepip/__init__.py b/Lib/ensurepip/__init__.py index f584bdc..7cf6a4b 100644 --- a/Lib/ensurepip/__init__.py +++ b/Lib/ensurepip/__init__.py @@ -128,9 +128,10 @@ def _uninstall_helper(*, verbosity=0): # If the pip version doesn't match the bundled one, leave it alone if pip.__version__ != _PIP_VERSION: - msg = ("ensurepip will only uninstall a matching pip " + msg = ("ensurepip will only uninstall a matching version " "({!r} installed, {!r} bundled)") - raise RuntimeError(msg.format(pip.__version__, _PIP_VERSION)) + print(msg.format(pip.__version__, _PIP_VERSION), file=sys.stderr) + return _require_ssl_for_pip() _disable_pip_configuration_settings() diff --git a/Lib/test/test_ensurepip.py b/Lib/test/test_ensurepip.py index 8644a65..70b4153 100644 --- a/Lib/test/test_ensurepip.py +++ b/Lib/test/test_ensurepip.py @@ -196,10 +196,12 @@ class TestUninstall(EnsurepipMixin, unittest.TestCase): ensurepip._uninstall_helper() self.run_pip.assert_not_called() - def test_uninstall_fails_with_wrong_version(self): + def test_uninstall_skipped_with_warning_for_wrong_version(self): with fake_pip("not a valid version"): - with self.assertRaises(RuntimeError): + with test.support.captured_stderr() as stderr: ensurepip._uninstall_helper() + warning = stderr.getvalue().strip() + self.assertIn("only uninstall a matching version", warning) self.run_pip.assert_not_called() diff --git a/Misc/NEWS b/Misc/NEWS index f642353..554de26 100644 --- a/Misc/NEWS +++ b/Misc/NEWS @@ -2,6 +2,20 @@ Python News +++++++++++ +What's New in Python 3.4.0 release candidate 3? +=============================================== + +Release date: 2014-03-09 + +Build +----- + +- Issue #20757: The ensurepip helper for the Windows uninstaller now skips + uninstalling pip (rather than failing) if the user has updated pip to a + different version from the one bundled with ensurepip. + + + What's New in Python 3.4.0 release candidate 2? =============================================== -- cgit v0.12 From 07d843a101726b01a2dbdb7183ebc8fbc8a5f6e4 Mon Sep 17 00:00:00 2001 From: Nick Coghlan Date: Fri, 28 Feb 2014 23:37:35 +1000 Subject: Close #20568: install unversioned pip command on Windows --- Misc/NEWS | 3 +++ Tools/msi/msi.py | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/Misc/NEWS b/Misc/NEWS index 554de26..343a0b7 100644 --- a/Misc/NEWS +++ b/Misc/NEWS @@ -10,6 +10,9 @@ Release date: 2014-03-09 Build ----- +- Issue #20568: The Windows installer now installs the unversioned ``pip`` + command in addition to the versioned ``pip3`` and ``pip3.4`` commands. + - Issue #20757: The ensurepip helper for the Windows uninstaller now skips uninstalling pip (rather than failing) if the user has updated pip to a different version from the one bundled with ensurepip. diff --git a/Tools/msi/msi.py b/Tools/msi/msi.py index 8409b81..038050b 100644 --- a/Tools/msi/msi.py +++ b/Tools/msi/msi.py @@ -420,7 +420,7 @@ def add_ui(db): compileargs = r'-Wi "[TARGETDIR]Lib\compileall.py" -f -x "bad_coding|badsyntax|site-packages|py2_|lib2to3\\tests|venv\\scripts" "[TARGETDIR]Lib"' lib2to3args = r'-c "import lib2to3.pygram, lib2to3.patcomp;lib2to3.patcomp.PatternCompiler()"' - updatepipargs = r'-m ensurepip -U' + updatepipargs = r'-m ensurepip -U --default-install' removepipargs = r'-m ensurepip._uninstall' # See "CustomAction Table" add_data(db, "CustomAction", [ -- cgit v0.12 From c1195c7ae7d24f7a965f984bb064b4a9634bc163 Mon Sep 17 00:00:00 2001 From: Ned Deily Date: Sat, 1 Mar 2014 14:00:46 -0800 Subject: Issue #20465: Update OS X installer build to use SQLite 3.8.0.1. --- Mac/BuildScript/build-installer.py | 6 +++--- Misc/NEWS | 1 + 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/Mac/BuildScript/build-installer.py b/Mac/BuildScript/build-installer.py index 0aafcd0..7c1d90f 100755 --- a/Mac/BuildScript/build-installer.py +++ b/Mac/BuildScript/build-installer.py @@ -283,9 +283,9 @@ def library_recipes(): ), ), dict( - name="SQLite 3.8.3", - url="http://www.sqlite.org/2014/sqlite-autoconf-3080300.tar.gz", - checksum='11572878dc0ac74ae370367a464ab5cf', + name="SQLite 3.8.3.1", + url="http://www.sqlite.org/2014/sqlite-autoconf-3080301.tar.gz", + checksum='509ff98d8dc9729b618b7e96612079c6', extra_cflags=('-Os ' '-DSQLITE_ENABLE_FTS4 ' '-DSQLITE_ENABLE_FTS3_PARENTHESIS ' diff --git a/Misc/NEWS b/Misc/NEWS index 343a0b7..e8bc1a6 100644 --- a/Misc/NEWS +++ b/Misc/NEWS @@ -17,6 +17,7 @@ Build uninstalling pip (rather than failing) if the user has updated pip to a different version from the one bundled with ensurepip. +- Issue #20465: Update OS X installer build to use SQLite 3.8.0.1. What's New in Python 3.4.0 release candidate 2? -- cgit v0.12 From 90c5c8ad5c78865e0177ab40fd83fa5345b19a79 Mon Sep 17 00:00:00 2001 From: Ned Deily Date: Sat, 1 Mar 2014 14:04:48 -0800 Subject: Issue #20465: fix NEWS typo (it's 3.8.3.1) --- Misc/NEWS | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Misc/NEWS b/Misc/NEWS index e8bc1a6..a3ca8db 100644 --- a/Misc/NEWS +++ b/Misc/NEWS @@ -17,7 +17,7 @@ Build uninstalling pip (rather than failing) if the user has updated pip to a different version from the one bundled with ensurepip. -- Issue #20465: Update OS X installer build to use SQLite 3.8.0.1. +- Issue #20465: Update OS X installer build to use SQLite 3.8.3.1. What's New in Python 3.4.0 release candidate 2? -- cgit v0.12 From d930d85f120d67f7bab3f69f881bb1c65f462ea7 Mon Sep 17 00:00:00 2001 From: Yury Selivanov Date: Sun, 2 Mar 2014 12:25:27 -0500 Subject: Issue #20786: Fix signatures for dict.__delitem__ and property.__delete__ --- Lib/test/test_inspect.py | 5 +++++ Misc/NEWS | 7 ++++++- Objects/typeobject.c | 4 ++-- 3 files changed, 13 insertions(+), 3 deletions(-) diff --git a/Lib/test/test_inspect.py b/Lib/test/test_inspect.py index 0dc7451..5c6ae39 100644 --- a/Lib/test/test_inspect.py +++ b/Lib/test/test_inspect.py @@ -1764,6 +1764,11 @@ class TestSignatureObject(unittest.TestCase): __call__ = type test_callable(ThisWorksNow()) + # Regression test for issue #20786 + test_unbound_method(dict.__delitem__) + test_unbound_method(property.__delete__) + + @cpython_only @unittest.skipIf(MISSING_C_DOCSTRINGS, "Signature information for builtins requires docstrings") diff --git a/Misc/NEWS b/Misc/NEWS index a3ca8db..172bb73 100644 --- a/Misc/NEWS +++ b/Misc/NEWS @@ -4,9 +4,14 @@ Python News What's New in Python 3.4.0 release candidate 3? =============================================== - Release date: 2014-03-09 +Core and Builtins +----------------- + +- Issue #20786: Fix signatures for dict.__delitem__ and + property.__delete__ builtins. + Build ----- diff --git a/Objects/typeobject.c b/Objects/typeobject.c index f58960d..49385e2 100644 --- a/Objects/typeobject.c +++ b/Objects/typeobject.c @@ -6189,7 +6189,7 @@ static slotdef slotdefs[] = { "__set__($self, instance, value, /)\n--\n\nSet an attribute of instance to value."), TPSLOT("__delete__", tp_descr_set, slot_tp_descr_set, wrap_descr_delete, - "__delete__(instance, /)\n--\n\nDelete an attribute of instance."), + "__delete__($self, instance, /)\n--\n\nDelete an attribute of instance."), FLSLOT("__init__", tp_init, slot_tp_init, (wrapperfunc)wrap_init, "__init__($self, /, *args, **kwargs)\n--\n\n" "Initialize self. See help(type(self)) for accurate signature.", @@ -6286,7 +6286,7 @@ static slotdef slotdefs[] = { "__setitem__($self, key, value, /)\n--\n\nSet self[key] to value."), MPSLOT("__delitem__", mp_ass_subscript, slot_mp_ass_subscript, wrap_delitem, - "__delitem__(key)\n--\n\nDelete self[key]."), + "__delitem__($self, key, /)\n--\n\nDelete self[key]."), SQSLOT("__len__", sq_length, slot_sq_length, wrap_lenfunc, "__len__($self, /)\n--\n\nReturn len(self)."), -- cgit v0.12 From 75512c4556da5a8215a99c57e9a056f191ad9c99 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Martin=20v=2E=20L=C3=B6wis?= Date: Sun, 2 Mar 2014 19:15:47 +0100 Subject: Issue #20568: Fix typo in pip option. --- Tools/msi/msi.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Tools/msi/msi.py b/Tools/msi/msi.py index 038050b..f2b7148 100644 --- a/Tools/msi/msi.py +++ b/Tools/msi/msi.py @@ -420,7 +420,7 @@ def add_ui(db): compileargs = r'-Wi "[TARGETDIR]Lib\compileall.py" -f -x "bad_coding|badsyntax|site-packages|py2_|lib2to3\\tests|venv\\scripts" "[TARGETDIR]Lib"' lib2to3args = r'-c "import lib2to3.pygram, lib2to3.patcomp;lib2to3.patcomp.PatternCompiler()"' - updatepipargs = r'-m ensurepip -U --default-install' + updatepipargs = r'-m ensurepip -U --default-pip' removepipargs = r'-m ensurepip._uninstall' # See "CustomAction Table" add_data(db, "CustomAction", [ -- cgit v0.12 From 582538bde2c212ddc40a58764a1bd4eeb54255ab Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Martin=20v=2E=20L=C3=B6wis?= Date: Sun, 2 Mar 2014 19:29:19 +0100 Subject: Issue #20748: Uninstalling pip does not leave behind the pyc of the uninstaller anymore. --- Misc/NEWS | 3 +++ Tools/msi/msi.py | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/Misc/NEWS b/Misc/NEWS index 172bb73..054341d 100644 --- a/Misc/NEWS +++ b/Misc/NEWS @@ -15,6 +15,9 @@ Core and Builtins Build ----- +- Issue #20748: Uninstalling pip does not leave behind the pyc of + the uninstaller anymore. + - Issue #20568: The Windows installer now installs the unversioned ``pip`` command in addition to the versioned ``pip3`` and ``pip3.4`` commands. diff --git a/Tools/msi/msi.py b/Tools/msi/msi.py index f2b7148..2ba72e4 100644 --- a/Tools/msi/msi.py +++ b/Tools/msi/msi.py @@ -421,7 +421,7 @@ def add_ui(db): compileargs = r'-Wi "[TARGETDIR]Lib\compileall.py" -f -x "bad_coding|badsyntax|site-packages|py2_|lib2to3\\tests|venv\\scripts" "[TARGETDIR]Lib"' lib2to3args = r'-c "import lib2to3.pygram, lib2to3.patcomp;lib2to3.patcomp.PatternCompiler()"' updatepipargs = r'-m ensurepip -U --default-pip' - removepipargs = r'-m ensurepip._uninstall' + removepipargs = r'-B -m ensurepip._uninstall' # See "CustomAction Table" add_data(db, "CustomAction", [ # msidbCustomActionTypeFirstSequence + msidbCustomActionTypeTextData + msidbCustomActionTypeProperty -- cgit v0.12 From a663069b5cf9d53777454e31404de51c7651a010 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Martin=20v=2E=20L=C3=B6wis?= Date: Sun, 2 Mar 2014 19:42:50 +0100 Subject: Issue #20465: Update Windows installer to SQLite 3.8.3.1. --- Misc/NEWS | 3 ++- PC/VS9.0/pyproject.vsprops | 2 +- PCbuild/pyproject.props | 2 +- PCbuild/readme.txt | 2 +- Tools/buildbot/external-common.bat | 6 +++--- 5 files changed, 8 insertions(+), 7 deletions(-) diff --git a/Misc/NEWS b/Misc/NEWS index 054341d..b9c713c 100644 --- a/Misc/NEWS +++ b/Misc/NEWS @@ -25,7 +25,8 @@ Build uninstalling pip (rather than failing) if the user has updated pip to a different version from the one bundled with ensurepip. -- Issue #20465: Update OS X installer build to use SQLite 3.8.3.1. +- Issue #20465: Update OS X and Windows installer builds to use + SQLite 3.8.3.1. What's New in Python 3.4.0 release candidate 2? diff --git a/PC/VS9.0/pyproject.vsprops b/PC/VS9.0/pyproject.vsprops index ed5464d..88af242 100644 --- a/PC/VS9.0/pyproject.vsprops +++ b/PC/VS9.0/pyproject.vsprops @@ -50,7 +50,7 @@ /> $(OutDir)python$(PyDebugExt).exe $(OutDir)kill_python$(PyDebugExt).exe ..\.. - $(externalsDir)\sqlite-3.8.1 + $(externalsDir)\sqlite-3.8.3.1 $(externalsDir)\bzip2-1.0.6 $(externalsDir)\xz-5.0.5 $(externalsDir)\openssl-1.0.1e diff --git a/PCbuild/readme.txt b/PCbuild/readme.txt index d5c69a7..619f6a8 100644 --- a/PCbuild/readme.txt +++ b/PCbuild/readme.txt @@ -206,7 +206,7 @@ _ssl The ssl sub-project does not have the ability to clean the OpenSSL build; if you need to rebuild, you'll have to clean it by hand. _sqlite3 - Wraps SQLite 3.8.1, which is itself built by sqlite3.vcxproj + Wraps SQLite 3.8.3.1, which is itself built by sqlite3.vcxproj Homepage: http://www.sqlite.org/ _tkinter diff --git a/Tools/buildbot/external-common.bat b/Tools/buildbot/external-common.bat index 244abd2..c63e889 100644 --- a/Tools/buildbot/external-common.bat +++ b/Tools/buildbot/external-common.bat @@ -37,9 +37,9 @@ if not exist tcl-8.6.1.0 ( if not exist tk-8.6.1.0 svn export http://svn.python.org/projects/external/tk-8.6.1.0 @rem sqlite3 -if not exist sqlite-3.8.1 ( - rd /s/q sqlite-source-3.7.12 - svn export http://svn.python.org/projects/external/sqlite-3.8.1 +if not exist sqlite-3.8.3.1 ( + rd /s/q sqlite-source-3.8.1 + svn export http://svn.python.org/projects/external/sqlite-3.8.3.1 ) @rem lzma -- cgit v0.12 From 0c3ea0942d75cfd2b484a3ae5d0a41f9fb1c8984 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Martin=20v=2E=20L=C3=B6wis?= Date: Sun, 2 Mar 2014 20:29:18 +0100 Subject: Issue #14512: Launch pydoc -b instead of pydocgui.pyw on Windows. --- Misc/NEWS | 2 ++ Tools/msi/msi.py | 4 ++-- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/Misc/NEWS b/Misc/NEWS index b9c713c..8b68f00 100644 --- a/Misc/NEWS +++ b/Misc/NEWS @@ -15,6 +15,8 @@ Core and Builtins Build ----- +- Issue #14512: Launch pydoc -b instead of pydocgui.pyw on Windows. + - Issue #20748: Uninstalling pip does not leave behind the pyc of the uninstaller anymore. diff --git a/Tools/msi/msi.py b/Tools/msi/msi.py index 2ba72e4..e399dde 100644 --- a/Tools/msi/msi.py +++ b/Tools/msi/msi.py @@ -1324,8 +1324,6 @@ def add_registry(db): tcltkshortcuts = [ ("IDLE", "MenuDir", "IDLE|IDLE (Python GUI)", "pythonw.exe", tcltk.id, r'"[TARGETDIR]Lib\idlelib\idle.pyw"', None, None, "python_icon.exe", 0, None, "TARGETDIR"), - ("PyDoc", "MenuDir", "MODDOCS|Module Docs", "pythonw.exe", - tcltk.id, r'"[TARGETDIR]Tools\scripts\pydocgui.pyw"', None, None, "python_icon.exe", 0, None, "TARGETDIR"), ] add_data(db, "Shortcut", tcltkshortcuts + @@ -1340,6 +1338,8 @@ def add_registry(db): ("Manual", "MenuDir", "MANUAL|Python Manuals", "REGISTRY.doc", "[#%s]" % docfile, None, None, None, None, None, None, None), + ("PyDoc", "MenuDir", "MODDOCS|Module Docs", "python.exe", + default_feature.id, r'-m pydoc -b', None, None, "python_icon.exe", 0, None, "TARGETDIR"), ("Uninstall", "MenuDir", "UNINST|Uninstall Python", "REGISTRY", SystemFolderName+"msiexec", "/x%s" % product_code, None, None, None, None, None, None), -- cgit v0.12 From adb2e2ab647eae7dd8077d617055872d52dfc6ad Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Mon, 3 Mar 2014 11:57:57 +0100 Subject: Close #20814: doc: Fix "Pretty top" example of tracemalloc --- Doc/library/tracemalloc.rst | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/Doc/library/tracemalloc.rst b/Doc/library/tracemalloc.rst index b79c5f6..c12ef87 100644 --- a/Doc/library/tracemalloc.rst +++ b/Doc/library/tracemalloc.rst @@ -200,8 +200,7 @@ ignoring ```` and ```` files:: # replace "/path/to/module/file.py" with "module/file.py" filename = os.sep.join(frame.filename.split(os.sep)[-2:]) print("#%s: %s:%s: %.1f KiB" - % (index, filename, frame.lineno, - stat.size / 1024)) + % (index, filename, frame.lineno, stat.size / 1024)) other = top_stats[limit:] if other: @@ -215,7 +214,7 @@ ignoring ```` and ```` files:: # ... run your application ... snapshot = tracemalloc.take_snapshot() - display_top(snapshot, 10) + display_top(snapshot) Example of output of the Python test suite:: -- cgit v0.12 From 2da950460d130420d6b8eed8c5a7798558a30f5f Mon Sep 17 00:00:00 2001 From: Ethan Furman Date: Mon, 3 Mar 2014 12:42:52 -0800 Subject: Close issue20653: improve functional API docs; minor code changes --- Doc/library/enum.rst | 34 ++++++++++++++++++++++++++++++++++ Lib/enum.py | 39 +++++++++++++++++++++++++++------------ 2 files changed, 61 insertions(+), 12 deletions(-) diff --git a/Doc/library/enum.rst b/Doc/library/enum.rst index bce4966..e9dbff2 100644 --- a/Doc/library/enum.rst +++ b/Doc/library/enum.rst @@ -374,6 +374,9 @@ from that module. With pickle protocol version 4 it is possible to easily pickle enums nested in other classes. +It is possible to modify how Enum members are pickled/unpickled by defining +:meth:`__reduce_ex__` in the enumeration class. + Functional API -------------- @@ -420,6 +423,12 @@ The solution is to specify the module name explicitly as follows:: >>> Animals = Enum('Animals', 'ant bee cat dog', module=__name__) +.. warning:: + + If :param module: is not supplied, and Enum cannot determine what it is, + the new Enum members will not be unpicklable; to keep errors closer to + the source, pickling will be disabled. + The new pickle protocol 4 also, in some circumstances, relies on :attr:`__qualname__` being set to the location where pickle will be able to find the class. For example, if the class was made available in class @@ -427,6 +436,31 @@ SomeData in the global scope:: >>> Animals = Enum('Animals', 'ant bee cat dog', qualname='SomeData.Animals') +The complete signature is:: + + Enum(value='NewEnumName', names=<...>, *, module='...', qualname='...', type=) + +:param value: What the new Enum class will record as its name. + +:param names: The Enum members. This can be a whitespace or comma seperated +string:: + + 'red green blue', 'red,green,blue', 'red, green, blue' + +(values will start at 1), or an iterator of name, value pairs:: + + [('cyan', 4), ('magenta', 5), ('yellow', 6)] + +or a mapping:: + + {'chartruese': 7, 'sea_green': 11, 'rosemary': 42} + +:param module: name of module where new Enum class can be found. + +:param qualname: where in module new Enum class can be found. + +:param type: type to mix in to new Enum class. + Derived Enumerations -------------------- diff --git a/Lib/enum.py b/Lib/enum.py index c9bd7c0..844a956 100644 --- a/Lib/enum.py +++ b/Lib/enum.py @@ -115,14 +115,21 @@ class EnumMeta(type): # Reverse value->name map for hashable values. enum_class._value2member_map_ = {} - # check for a supported pickle protocols, and if not present sabotage - # pickling, since it won't work anyway. - # if new class implements its own __reduce_ex__, do not sabotage - if classdict.get('__reduce_ex__') is None: + # If a custom type is mixed into the Enum, and it does not know how + # to pickle itself, pickle.dumps will succeed but pickle.loads will + # fail. Rather than have the error show up later and possibly far + # from the source, sabotage the pickle protocol for this class so + # that pickle.dumps also fails. + # + # However, if the new class implements its own __reduce_ex__, do not + # sabotage -- it's on them to make sure it works correctly. We use + # __reduce_ex__ instead of any of the others as it is preferred by + # pickle over __reduce__, and it handles all pickle protocols. + if '__reduce_ex__' not in classdict: if member_type is not object: methods = ('__getnewargs_ex__', '__getnewargs__', '__reduce_ex__', '__reduce__') - if not any(map(member_type.__dict__.get, methods)): + if not any(m in member_type.__dict__ for m in methods): _make_class_unpicklable(enum_class) # instantiate them, checking for duplicates as we go @@ -193,14 +200,22 @@ class EnumMeta(type): to an enumeration member (i.e. Color(3)) and for the functional API (i.e. Color = Enum('Color', names='red green blue')). - When used for the functional API: `module`, if set, will be stored in - the new class' __module__ attribute; `qualname`, if set, will be stored - in the new class' __qualname__ attribute; `type`, if set, will be mixed - in as the first base class. + When used for the functional API: - Note: if `module` is not set this routine will attempt to discover the - calling module by walking the frame stack; if this is unsuccessful - the resulting class will not be pickleable. + `value` will be the name of the new class. + + `names` should be either a string of white-space/comma delimited names + (values will start at 1), or an iterator/mapping of name, value pairs. + + `module` should be set to the module this class is being created in; + if it is not set, an attempt to find that module will be made, but if + it fails the class will not be picklable. + + `qualname` should be set to the actual location this class can be found + at in its module; by default it is set to the global scope. If this is + not correct, unpickling will fail in some circumstances. + + `type`, if set, will be mixed in as the first base class. """ if names is None: # simple value lookup -- cgit v0.12 From 01cc2d5fb8354f9bf8fe523ee1c26f82d871a175 Mon Sep 17 00:00:00 2001 From: Ethan Furman Date: Mon, 3 Mar 2014 15:02:04 -0800 Subject: Issue20653: fix ReST for Enum --- Doc/library/enum.rst | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/Doc/library/enum.rst b/Doc/library/enum.rst index e9dbff2..628f91c 100644 --- a/Doc/library/enum.rst +++ b/Doc/library/enum.rst @@ -425,7 +425,7 @@ The solution is to specify the module name explicitly as follows:: .. warning:: - If :param module: is not supplied, and Enum cannot determine what it is, + If ``module`` is not supplied, and Enum cannot determine what it is, the new Enum members will not be unpicklable; to keep errors closer to the source, pickling will be disabled. @@ -440,26 +440,26 @@ The complete signature is:: Enum(value='NewEnumName', names=<...>, *, module='...', qualname='...', type=) -:param value: What the new Enum class will record as its name. +:value: What the new Enum class will record as its name. -:param names: The Enum members. This can be a whitespace or comma seperated -string:: +:names: The Enum members. This can be a whitespace or comma seperated string + (values will start at 1):: - 'red green blue', 'red,green,blue', 'red, green, blue' + 'red green blue' | 'red,green,blue' | 'red, green, blue' -(values will start at 1), or an iterator of name, value pairs:: + or an iterator of (name, value) pairs:: [('cyan', 4), ('magenta', 5), ('yellow', 6)] -or a mapping:: + or a mapping:: {'chartruese': 7, 'sea_green': 11, 'rosemary': 42} -:param module: name of module where new Enum class can be found. +:module: name of module where new Enum class can be found. -:param qualname: where in module new Enum class can be found. +:qualname: where in module new Enum class can be found. -:param type: type to mix in to new Enum class. +:type: type to mix in to new Enum class. Derived Enumerations -- cgit v0.12 From 62b4b9eecbcdd98d318eec522c8c48a3843be855 Mon Sep 17 00:00:00 2001 From: Nick Coghlan Date: Tue, 4 Mar 2014 20:39:42 +1000 Subject: Close #20839: pkgutil.find_loader now uses importlib.util.find_spec --- Doc/library/pkgutil.rst | 17 +++++++++-------- Lib/pkgutil.py | 17 +++++------------ Lib/test/test_pkgutil.py | 19 +++++++++++++++++++ Misc/NEWS | 9 ++++++++- 4 files changed, 41 insertions(+), 21 deletions(-) diff --git a/Doc/library/pkgutil.rst b/Doc/library/pkgutil.rst index 22d44eb..10b7848 100644 --- a/Doc/library/pkgutil.rst +++ b/Doc/library/pkgutil.rst @@ -74,15 +74,17 @@ support. Retrieve a :pep:`302` module loader for the given *fullname*. - This is a convenience wrapper around :func:`importlib.find_loader` that - sets the *path* argument correctly when searching for submodules, and - also ensures parent packages (if any) are imported before searching for - submodules. + This is a backwards compatibility wrapper around + :func:`importlib.util.find_spec` that converts most failures to + :exc:`ImportError` and only returns the loader rather than the full + :class:`ModuleSpec`. .. versionchanged:: 3.3 Updated to be based directly on :mod:`importlib` rather than relying on the package internal PEP 302 import emulation. + .. versionchanged:: 3.4 + Updated to be based on :pep:`451` .. function:: get_importer(path_item) @@ -109,14 +111,13 @@ support. not already imported, its containing package (if any) is imported, in order to establish the package ``__path__``. - This function uses :func:`iter_importers`, and is thus subject to the same - limitations regarding platform-specific special import locations such as the - Windows registry. - .. versionchanged:: 3.3 Updated to be based directly on :mod:`importlib` rather than relying on the package internal PEP 302 import emulation. + .. versionchanged:: 3.4 + Updated to be based on :pep:`451` + .. function:: iter_importers(fullname='') diff --git a/Lib/pkgutil.py b/Lib/pkgutil.py index 326657a..58cccdc 100644 --- a/Lib/pkgutil.py +++ b/Lib/pkgutil.py @@ -470,29 +470,22 @@ def get_loader(module_or_name): def find_loader(fullname): """Find a PEP 302 "loader" object for fullname - This is s convenience wrapper around :func:`importlib.find_loader` that - sets the *path* argument correctly when searching for submodules, and - also ensures parent packages (if any) are imported before searching for - submodules. + This is a backwards compatibility wrapper around + importlib.util.find_spec that converts most failures to ImportError + and only returns the loader rather than the full spec """ if fullname.startswith('.'): msg = "Relative module name {!r} not supported".format(fullname) raise ImportError(msg) - path = None - pkg_name = fullname.rpartition(".")[0] - if pkg_name: - pkg = importlib.import_module(pkg_name) - path = getattr(pkg, "__path__", None) - if path is None: - return None try: - return importlib.find_loader(fullname, path) + spec = importlib.util.find_spec(fullname) except (ImportError, AttributeError, TypeError, ValueError) as ex: # This hack fixes an impedance mismatch between pkgutil and # importlib, where the latter raises other errors for cases where # pkgutil previously raised ImportError msg = "Error while finding loader for {!r} ({}: {})" raise ImportError(msg.format(fullname, type(ex), ex)) from ex + return spec.loader def extend_path(path, name): diff --git a/Lib/test/test_pkgutil.py b/Lib/test/test_pkgutil.py index 7a1cd41..aafdf3c 100644 --- a/Lib/test/test_pkgutil.py +++ b/Lib/test/test_pkgutil.py @@ -334,6 +334,25 @@ class ImportlibMigrationTests(unittest.TestCase): self.assertIsNotNone(pkgutil.get_loader("test.support")) self.assertEqual(len(w.warnings), 0) + def test_get_loader_handles_missing_loader_attribute(self): + global __loader__ + this_loader = __loader__ + del __loader__ + try: + with check_warnings() as w: + self.assertIsNotNone(pkgutil.get_loader(__name__)) + self.assertEqual(len(w.warnings), 0) + finally: + __loader__ = this_loader + + + def test_find_loader_avoids_emulation(self): + with check_warnings() as w: + self.assertIsNotNone(pkgutil.find_loader("sys")) + self.assertIsNotNone(pkgutil.find_loader("os")) + self.assertIsNotNone(pkgutil.find_loader("test.support")) + self.assertEqual(len(w.warnings), 0) + def test_get_importer_avoids_emulation(self): # We use an illegal path so *none* of the path hooks should fire with check_warnings() as w: diff --git a/Misc/NEWS b/Misc/NEWS index 8b68f00..75d2909 100644 --- a/Misc/NEWS +++ b/Misc/NEWS @@ -12,6 +12,13 @@ Core and Builtins - Issue #20786: Fix signatures for dict.__delitem__ and property.__delete__ builtins. +Library +------- + +- Issue #20839: Don't trigger a DeprecationWarning in the still supported + pkgutil.get_loader() API when __loader__ isn't set on a module (nor + when pkgutil.find_loader() is called directly). + Build ----- @@ -27,7 +34,7 @@ Build uninstalling pip (rather than failing) if the user has updated pip to a different version from the one bundled with ensurepip. -- Issue #20465: Update OS X and Windows installer builds to use +- Issue #20465: Update OS X and Windows installer builds to use SQLite 3.8.3.1. -- cgit v0.12 From a3188ef476bdacce44efb98c43e4d30bdbb3b20f Mon Sep 17 00:00:00 2001 From: Larry Hastings Date: Sun, 9 Mar 2014 04:12:12 -0700 Subject: Update pydoc topics and suspicious filters. --- Doc/tools/sphinxext/susp-ignored.csv | 1 - Lib/pydoc_data/topics.py | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/Doc/tools/sphinxext/susp-ignored.csv b/Doc/tools/sphinxext/susp-ignored.csv index f9d6bbf..1769023 100644 --- a/Doc/tools/sphinxext/susp-ignored.csv +++ b/Doc/tools/sphinxext/susp-ignored.csv @@ -282,4 +282,3 @@ whatsnew/changelog,,:PythonCmd,"With Tk < 8.5 _tkinter.c:PythonCmd() raised Unic whatsnew/changelog,,::,": Fix FTP tests for IPv6, bind to ""::1"" instead of ""localhost""." whatsnew/changelog,,::,": Use ""127.0.0.1"" or ""::1"" instead of ""localhost"" as much as" whatsnew/changelog,,:password,user:password -whatsnew/changelog,,:gz,w:gz diff --git a/Lib/pydoc_data/topics.py b/Lib/pydoc_data/topics.py index 6e9a714..ec2a713 100644 --- a/Lib/pydoc_data/topics.py +++ b/Lib/pydoc_data/topics.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Autogenerated by Sphinx on Sun Feb 23 02:12:38 2014 +# Autogenerated by Sphinx on Sun Mar 9 03:59:56 2014 topics = {'assert': '\nThe "assert" statement\n**********************\n\nAssert statements are a convenient way to insert debugging assertions\ninto a program:\n\n assert_stmt ::= "assert" expression ["," expression]\n\nThe simple form, "assert expression", is equivalent to\n\n if __debug__:\n if not expression: raise AssertionError\n\nThe extended form, "assert expression1, expression2", is equivalent to\n\n if __debug__:\n if not expression1: raise AssertionError(expression2)\n\nThese equivalences assume that "__debug__" and "AssertionError" refer\nto the built-in variables with those names. In the current\nimplementation, the built-in variable "__debug__" is "True" under\nnormal circumstances, "False" when optimization is requested (command\nline option -O). The current code generator emits no code for an\nassert statement when optimization is requested at compile time. Note\nthat it is unnecessary to include the source code for the expression\nthat failed in the error message; it will be displayed as part of the\nstack trace.\n\nAssignments to "__debug__" are illegal. The value for the built-in\nvariable is determined when the interpreter starts.\n', 'assignment': '\nAssignment statements\n*********************\n\nAssignment statements are used to (re)bind names to values and to\nmodify attributes or items of mutable objects:\n\n assignment_stmt ::= (target_list "=")+ (expression_list | yield_expression)\n target_list ::= target ("," target)* [","]\n target ::= identifier\n | "(" target_list ")"\n | "[" target_list "]"\n | attributeref\n | subscription\n | slicing\n | "*" target\n\n(See section *Primaries* for the syntax definitions for the last three\nsymbols.)\n\nAn assignment statement evaluates the expression list (remember that\nthis can be a single expression or a comma-separated list, the latter\nyielding a tuple) and assigns the single resulting object to each of\nthe target lists, from left to right.\n\nAssignment is defined recursively depending on the form of the target\n(list). When a target is part of a mutable object (an attribute\nreference, subscription or slicing), the mutable object must\nultimately perform the assignment and decide about its validity, and\nmay raise an exception if the assignment is unacceptable. The rules\nobserved by various types and the exceptions raised are given with the\ndefinition of the object types (see section *The standard type\nhierarchy*).\n\nAssignment of an object to a target list, optionally enclosed in\nparentheses or square brackets, is recursively defined as follows.\n\n* If the target list is a single target: The object is assigned to\n that target.\n\n* If the target list is a comma-separated list of targets: The\n object must be an iterable with the same number of items as there\n are targets in the target list, and the items are assigned, from\n left to right, to the corresponding targets.\n\n * If the target list contains one target prefixed with an\n asterisk, called a "starred" target: The object must be a sequence\n with at least as many items as there are targets in the target\n list, minus one. The first items of the sequence are assigned,\n from left to right, to the targets before the starred target. The\n final items of the sequence are assigned to the targets after the\n starred target. A list of the remaining items in the sequence is\n then assigned to the starred target (the list can be empty).\n\n * Else: The object must be a sequence with the same number of\n items as there are targets in the target list, and the items are\n assigned, from left to right, to the corresponding targets.\n\nAssignment of an object to a single target is recursively defined as\nfollows.\n\n* If the target is an identifier (name):\n\n * If the name does not occur in a "global" or "nonlocal" statement\n in the current code block: the name is bound to the object in the\n current local namespace.\n\n * Otherwise: the name is bound to the object in the global\n namespace or the outer namespace determined by "nonlocal",\n respectively.\n\n The name is rebound if it was already bound. This may cause the\n reference count for the object previously bound to the name to reach\n zero, causing the object to be deallocated and its destructor (if it\n has one) to be called.\n\n* If the target is a target list enclosed in parentheses or in\n square brackets: The object must be an iterable with the same number\n of items as there are targets in the target list, and its items are\n assigned, from left to right, to the corresponding targets.\n\n* If the target is an attribute reference: The primary expression in\n the reference is evaluated. It should yield an object with\n assignable attributes; if this is not the case, "TypeError" is\n raised. That object is then asked to assign the assigned object to\n the given attribute; if it cannot perform the assignment, it raises\n an exception (usually but not necessarily "AttributeError").\n\n Note: If the object is a class instance and the attribute reference\n occurs on both sides of the assignment operator, the RHS expression,\n "a.x" can access either an instance attribute or (if no instance\n attribute exists) a class attribute. The LHS target "a.x" is always\n set as an instance attribute, creating it if necessary. Thus, the\n two occurrences of "a.x" do not necessarily refer to the same\n attribute: if the RHS expression refers to a class attribute, the\n LHS creates a new instance attribute as the target of the\n assignment:\n\n class Cls:\n x = 3 # class variable\n inst = Cls()\n inst.x = inst.x + 1 # writes inst.x as 4 leaving Cls.x as 3\n\n This description does not necessarily apply to descriptor\n attributes, such as properties created with "property()".\n\n* If the target is a subscription: The primary expression in the\n reference is evaluated. It should yield either a mutable sequence\n object (such as a list) or a mapping object (such as a dictionary).\n Next, the subscript expression is evaluated.\n\n If the primary is a mutable sequence object (such as a list), the\n subscript must yield an integer. If it is negative, the sequence\'s\n length is added to it. The resulting value must be a nonnegative\n integer less than the sequence\'s length, and the sequence is asked\n to assign the assigned object to its item with that index. If the\n index is out of range, "IndexError" is raised (assignment to a\n subscripted sequence cannot add new items to a list).\n\n If the primary is a mapping object (such as a dictionary), the\n subscript must have a type compatible with the mapping\'s key type,\n and the mapping is then asked to create a key/datum pair which maps\n the subscript to the assigned object. This can either replace an\n existing key/value pair with the same key value, or insert a new\n key/value pair (if no key with the same value existed).\n\n For user-defined objects, the "__setitem__()" method is called with\n appropriate arguments.\n\n* If the target is a slicing: The primary expression in the\n reference is evaluated. It should yield a mutable sequence object\n (such as a list). The assigned object should be a sequence object\n of the same type. Next, the lower and upper bound expressions are\n evaluated, insofar they are present; defaults are zero and the\n sequence\'s length. The bounds should evaluate to integers. If\n either bound is negative, the sequence\'s length is added to it. The\n resulting bounds are clipped to lie between zero and the sequence\'s\n length, inclusive. Finally, the sequence object is asked to replace\n the slice with the items of the assigned sequence. The length of\n the slice may be different from the length of the assigned sequence,\n thus changing the length of the target sequence, if the object\n allows it.\n\n**CPython implementation detail:** In the current implementation, the\nsyntax for targets is taken to be the same as for expressions, and\ninvalid syntax is rejected during the code generation phase, causing\nless detailed error messages.\n\nWARNING: Although the definition of assignment implies that overlaps\nbetween the left-hand side and the right-hand side are \'safe\' (for\nexample "a, b = b, a" swaps two variables), overlaps *within* the\ncollection of assigned-to variables are not safe! For instance, the\nfollowing program prints "[0, 2]":\n\n x = [0, 1]\n i = 0\n i, x[i] = 1, 2\n print(x)\n\nSee also: **PEP 3132** - Extended Iterable Unpacking\n\n The specification for the "*target" feature.\n\n\nAugmented assignment statements\n===============================\n\nAugmented assignment is the combination, in a single statement, of a\nbinary operation and an assignment statement:\n\n augmented_assignment_stmt ::= augtarget augop (expression_list | yield_expression)\n augtarget ::= identifier | attributeref | subscription | slicing\n augop ::= "+=" | "-=" | "*=" | "/=" | "//=" | "%=" | "**="\n | ">>=" | "<<=" | "&=" | "^=" | "|="\n\n(See section *Primaries* for the syntax definitions for the last three\nsymbols.)\n\nAn augmented assignment evaluates the target (which, unlike normal\nassignment statements, cannot be an unpacking) and the expression\nlist, performs the binary operation specific to the type of assignment\non the two operands, and assigns the result to the original target.\nThe target is only evaluated once.\n\nAn augmented assignment expression like "x += 1" can be rewritten as\n"x = x + 1" to achieve a similar, but not exactly equal effect. In the\naugmented version, "x" is only evaluated once. Also, when possible,\nthe actual operation is performed *in-place*, meaning that rather than\ncreating a new object and assigning that to the target, the old object\nis modified instead.\n\nWith the exception of assigning to tuples and multiple targets in a\nsingle statement, the assignment done by augmented assignment\nstatements is handled the same way as normal assignments. Similarly,\nwith the exception of the possible *in-place* behavior, the binary\noperation performed by augmented assignment is the same as the normal\nbinary operations.\n\nFor targets which are attribute references, the same *caveat about\nclass and instance attributes* applies as for regular assignments.\n', 'atom-identifiers': '\nIdentifiers (Names)\n*******************\n\nAn identifier occurring as an atom is a name. See section\n*Identifiers and keywords* for lexical definition and section *Naming\nand binding* for documentation of naming and binding.\n\nWhen the name is bound to an object, evaluation of the atom yields\nthat object. When a name is not bound, an attempt to evaluate it\nraises a "NameError" exception.\n\n**Private name mangling:** When an identifier that textually occurs in\na class definition begins with two or more underscore characters and\ndoes not end in two or more underscores, it is considered a *private\nname* of that class. Private names are transformed to a longer form\nbefore code is generated for them. The transformation inserts the\nclass name, with leading underscores removed and a single underscore\ninserted, in front of the name. For example, the identifier "__spam"\noccurring in a class named "Ham" will be transformed to "_Ham__spam".\nThis transformation is independent of the syntactical context in which\nthe identifier is used. If the transformed name is extremely long\n(longer than 255 characters), implementation defined truncation may\nhappen. If the class name consists only of underscores, no\ntransformation is done.\n', -- cgit v0.12 From d5c59763adc85b1ed931465bb29fc451f19516b7 Mon Sep 17 00:00:00 2001 From: Larry Hastings Date: Sun, 9 Mar 2014 04:13:05 -0700 Subject: Version bump for 3.4.0rc3. --- Include/patchlevel.h | 4 ++-- Lib/distutils/__init__.py | 2 +- Lib/idlelib/idlever.py | 2 +- Misc/RPM/python-3.4.spec | 2 +- README | 2 +- 5 files changed, 6 insertions(+), 6 deletions(-) diff --git a/Include/patchlevel.h b/Include/patchlevel.h index b200276..2a6e8a8 100644 --- a/Include/patchlevel.h +++ b/Include/patchlevel.h @@ -20,10 +20,10 @@ #define PY_MINOR_VERSION 4 #define PY_MICRO_VERSION 0 #define PY_RELEASE_LEVEL PY_RELEASE_LEVEL_GAMMA -#define PY_RELEASE_SERIAL 2 +#define PY_RELEASE_SERIAL 3 /* Version as a string */ -#define PY_VERSION "3.4.0rc2" +#define PY_VERSION "3.4.0rc3" /*--end constants--*/ /* Version as a single 4-byte hex number, e.g. 0x010502B2 == 1.5.2b2. diff --git a/Lib/distutils/__init__.py b/Lib/distutils/__init__.py index c03185b..954b50d 100644 --- a/Lib/distutils/__init__.py +++ b/Lib/distutils/__init__.py @@ -13,5 +13,5 @@ used from a setup script as # Updated automatically by the Python release process. # #--start constants-- -__version__ = "3.4.0rc2" +__version__ = "3.4.0rc3" #--end constants-- diff --git a/Lib/idlelib/idlever.py b/Lib/idlelib/idlever.py index 38dbbcb..d2df47d 100644 --- a/Lib/idlelib/idlever.py +++ b/Lib/idlelib/idlever.py @@ -1 +1 @@ -IDLE_VERSION = "3.4.0rc2" +IDLE_VERSION = "3.4.0rc3" diff --git a/Misc/RPM/python-3.4.spec b/Misc/RPM/python-3.4.spec index 4c31a23..e6987bc 100644 --- a/Misc/RPM/python-3.4.spec +++ b/Misc/RPM/python-3.4.spec @@ -39,7 +39,7 @@ %define name python #--start constants-- -%define version 3.4.0rc2 +%define version 3.4.0rc3 %define libvers 3.4 #--end constants-- %define release 1pydotorg diff --git a/README b/README index d35ff87..c55b1bc 100644 --- a/README +++ b/README @@ -1,4 +1,4 @@ -This is Python version 3.4.0 release candidate 2 +This is Python version 3.4.0 release candidate 3 ================================================ Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, -- cgit v0.12 From 4fe3bd410412f6cf5584f8fadef89151c83347bf Mon Sep 17 00:00:00 2001 From: Larry Hastings Date: Sun, 9 Mar 2014 04:13:25 -0700 Subject: Added tag v3.4.0rc3 for changeset 8a81cdab3e9d --- .hgtags | 1 + 1 file changed, 1 insertion(+) diff --git a/.hgtags b/.hgtags index adeecd1..70bc4f9 100644 --- a/.hgtags +++ b/.hgtags @@ -129,3 +129,4 @@ ba32913eb13ec545a46dd0ce18035b6c416f0d78 v3.4.0b2 a97ce3ecc96af79bd2e1ac66ce48d9138e0ca749 v3.4.0b3 5e088cea8660677969113741c1313d570d977e02 v3.4.0rc1 a300712ed38c9a242b736c44e806caea25a6dc05 v3.4.0rc2 +8a81cdab3e9d521daaef989fade94b16455fc3b8 v3.4.0rc3 -- cgit v0.12 From 6b5284fd4e8b76b223520a155a6fefda92359f9c Mon Sep 17 00:00:00 2001 From: Larry Hastings Date: Sat, 15 Mar 2014 20:57:42 -0700 Subject: Mark branch as being after Python 3.4.0rc3. --- Include/patchlevel.h | 2 +- Misc/NEWS | 13 +++++++++++++ 2 files changed, 14 insertions(+), 1 deletion(-) diff --git a/Include/patchlevel.h b/Include/patchlevel.h index 2a6e8a8..52b9674 100644 --- a/Include/patchlevel.h +++ b/Include/patchlevel.h @@ -23,7 +23,7 @@ #define PY_RELEASE_SERIAL 3 /* Version as a string */ -#define PY_VERSION "3.4.0rc3" +#define PY_VERSION "3.4.0rc3+" /*--end constants--*/ /* Version as a single 4-byte hex number, e.g. 0x010502B2 == 1.5.2b2. diff --git a/Misc/NEWS b/Misc/NEWS index 75d2909..3c6540a 100644 --- a/Misc/NEWS +++ b/Misc/NEWS @@ -2,8 +2,21 @@ Python News +++++++++++ +What's New in Python 3.4.0? +=========================== + +Release date: 2014-03-16 + +Core and Builtins +----------------- + +Library +------- + + What's New in Python 3.4.0 release candidate 3? =============================================== + Release date: 2014-03-09 Core and Builtins -- cgit v0.12 From c4c7b1ccb6231bcd53c5a122d7a3ccf2fbbaf194 Mon Sep 17 00:00:00 2001 From: R David Murray Date: Fri, 7 Mar 2014 21:00:34 -0500 Subject: whatsnew: cp273 codec (#10907797) Also updated the docs and added the aliases mentioned by the references. --- Doc/library/codecs.rst | 4 ++++ Doc/whatsnew/3.4.rst | 3 +++ Lib/encodings/aliases.py | 5 +++++ 3 files changed, 12 insertions(+) diff --git a/Doc/library/codecs.rst b/Doc/library/codecs.rst index 843de27..3729dac 100644 --- a/Doc/library/codecs.rst +++ b/Doc/library/codecs.rst @@ -971,6 +971,10 @@ particular, the following variants typically exist: +-----------------+--------------------------------+--------------------------------+ | cp037 | IBM037, IBM039 | English | +-----------------+--------------------------------+--------------------------------+ +| cp273 | 273, IBM273, csIBM273 | German | +| | | | +| | | .. versionadded:: 3.4 | ++-----------------+--------------------------------+--------------------------------+ | cp424 | EBCDIC-CP-HE, IBM424 | Hebrew | +-----------------+--------------------------------+--------------------------------+ | cp437 | 437, IBM437 | English | diff --git a/Doc/whatsnew/3.4.rst b/Doc/whatsnew/3.4.rst index c8c47e0..1bda6fa 100644 --- a/Doc/whatsnew/3.4.rst +++ b/Doc/whatsnew/3.4.rst @@ -364,6 +364,9 @@ Some smaller changes made to the core Python language are: Contributed by Victor Stinner, Kang-Hao (Kenny) Lu and Serhiy Storchaka in :issue:`12892`. +* New EBCDIC :ref:`codec ` ``cp273``. (Contributed by + Michael Bierenfeld and Andrew Kuchling in :issue:`1097797`.) + New Modules diff --git a/Lib/encodings/aliases.py b/Lib/encodings/aliases.py index 5461aa0..4cbaade 100644 --- a/Lib/encodings/aliases.py +++ b/Lib/encodings/aliases.py @@ -109,6 +109,11 @@ aliases = { '1258' : 'cp1258', 'windows_1258' : 'cp1258', + # cp273 codec + '273' : 'cp273', + 'ibm273' : 'cp273', + 'csibm273' : 'cp273', + # cp424 codec '424' : 'cp424', 'csibm424' : 'cp424', -- cgit v0.12 From 10b93cc29cc98409ac45a94a44331b0bf6bfd666 Mon Sep 17 00:00:00 2001 From: Benjamin Peterson Date: Wed, 12 Mar 2014 18:10:57 -0500 Subject: merge 3.3 (#20896) --- Lib/test/test_ssl.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/Lib/test/test_ssl.py b/Lib/test/test_ssl.py index 0dc04c0..1a2a9f0 100644 --- a/Lib/test/test_ssl.py +++ b/Lib/test/test_ssl.py @@ -1351,12 +1351,15 @@ class NetworkedTests(unittest.TestCase): def test_get_server_certificate(self): def _test_get_server_certificate(host, port, cert=None): with support.transient_internet(host): - pem = ssl.get_server_certificate((host, port)) + pem = ssl.get_server_certificate((host, port), + ssl.PROTOCOL_SSLv23) if not pem: self.fail("No server certificate on %s:%s!" % (host, port)) try: - pem = ssl.get_server_certificate((host, port), ca_certs=CERTFILE) + pem = ssl.get_server_certificate((host, port), + ssl.PROTOCOL_SSLv23, + ca_certs=CERTFILE) except ssl.SSLError as x: #should fail if support.verbose: @@ -1364,7 +1367,9 @@ class NetworkedTests(unittest.TestCase): else: self.fail("Got server certificate %s for %s:%s!" % (pem, host, port)) - pem = ssl.get_server_certificate((host, port), ca_certs=cert) + pem = ssl.get_server_certificate((host, port), + ssl.PROTOCOL_SSLv23, + ca_certs=cert) if not pem: self.fail("No server certificate on %s:%s!" % (host, port)) if support.verbose: -- cgit v0.12 From 21317b654ed2e6ba131be02f08a99751a454bb99 Mon Sep 17 00:00:00 2001 From: Benjamin Peterson Date: Wed, 12 Mar 2014 21:42:04 -0500 Subject: merge 3.3 (#19060) --- Doc/library/subprocess.rst | 2 +- Lib/subprocess.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Doc/library/subprocess.rst b/Doc/library/subprocess.rst index 95a85ce..d2893b6 100644 --- a/Doc/library/subprocess.rst +++ b/Doc/library/subprocess.rst @@ -9,7 +9,7 @@ The :mod:`subprocess` module allows you to spawn new processes, connect to their input/output/error pipes, and obtain their return codes. This module intends to -replace several other, older modules and functions, such as:: +replace several older modules and functions:: os.system os.spawn* diff --git a/Lib/subprocess.py b/Lib/subprocess.py index e79e5fd..26d8de5 100644 --- a/Lib/subprocess.py +++ b/Lib/subprocess.py @@ -11,7 +11,7 @@ r"""subprocess - Subprocesses with accessible I/O streams This module allows you to spawn processes, connect to their input/output/error pipes, and obtain their return codes. This module -intends to replace several other, older modules and functions, like: +intends to replace several older modules and functions: os.system os.spawn* -- cgit v0.12 From 6dfc632f1f8407079d728bc71c10704d078662e2 Mon Sep 17 00:00:00 2001 From: R David Murray Date: Thu, 13 Mar 2014 21:34:54 -0400 Subject: whatsnew: difflib.isbjunk &c were removed, not deprecated. Also move NEWS item to correct position (it was in 3.3). --- Doc/whatsnew/3.4.rst | 10 ++++++---- Misc/NEWS | 5 +++-- 2 files changed, 9 insertions(+), 6 deletions(-) diff --git a/Doc/whatsnew/3.4.rst b/Doc/whatsnew/3.4.rst index 1bda6fa..939d491 100644 --- a/Doc/whatsnew/3.4.rst +++ b/Doc/whatsnew/3.4.rst @@ -1503,10 +1503,6 @@ using ``-Wd``). Deprecated Python Modules, Functions and Methods ------------------------------------------------ -* :meth:`difflib.SequenceMatcher.isbjunk` and - :meth:`difflib.SequenceMatcher.isbpopular` were removed: use ``x in sm.bjunk`` and - ``x in sm.bpopular``, where *sm* is a :class:`~difflib.SequenceMatcher` object. - * As mentioned in :ref:`whatsnew-pep-451`, a number of :mod:`importilb` methods and functions are deprecated: :meth:`importlib.find_loader` is replaced by :func:`importlib.util.find_spec`; @@ -1612,6 +1608,12 @@ removed: * :class:`inspect.Signature`: positional-only parameters are now required to have a valid name. +* :meth:`difflib.SequenceMatcher.isbjunk` and + :meth:`difflib.SequenceMatcher.isbpopular` were deprecated in 3.2, and have + now been removed: use ``x in sm.bjunk`` and + ``x in sm.bpopular``, where *sm* is a :class:`~difflib.SequenceMatcher` object + (:issue:`13248`). + Code Cleanups ------------- diff --git a/Misc/NEWS b/Misc/NEWS index 3c6540a..39b1197 100644 --- a/Misc/NEWS +++ b/Misc/NEWS @@ -2987,6 +2987,9 @@ Library - Issue #17467: add readline and readlines support to mock_open in unittest.mock. +- Issue #13248: removed deprecated and undocumented difflib.isbjunk, + isbpopular. + - Issue #17192: Update the ctypes module's libffi to v3.0.13. This specifically addresses a stack misalignment issue on x86 and issues on some more recent platforms. @@ -4412,8 +4415,6 @@ Core and Builtins Library ------- -- Issue #13248: removed deprecated and undocumented difflib.isbjunk, isbpopular. - - Issue #13370: Ensure that ctypes works on Mac OS X when Python is compiled using the clang compiler. -- cgit v0.12 From b6b6a6d587d267cbad490232d08faebd30fdb7e2 Mon Sep 17 00:00:00 2001 From: Ned Deily Date: Sat, 15 Mar 2014 13:19:20 -0700 Subject: Issue #20939: merge from 3.3 --- Lib/test/test_urllibnet.py | 2 +- Misc/NEWS | 3 +++ 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/Lib/test/test_urllibnet.py b/Lib/test/test_urllibnet.py index 4da89fc..fba6dbb 100644 --- a/Lib/test/test_urllibnet.py +++ b/Lib/test/test_urllibnet.py @@ -80,7 +80,7 @@ class urlopenNetworkTests(unittest.TestCase): def test_geturl(self): # Make sure same URL as opened is returned by geturl. - URL = "http://www.python.org/" + URL = "https://www.python.org/" with self.urlopen(URL) as open_url: gotten_url = open_url.geturl() self.assertEqual(gotten_url, URL) diff --git a/Misc/NEWS b/Misc/NEWS index 39b1197..7fd12a6 100644 --- a/Misc/NEWS +++ b/Misc/NEWS @@ -13,6 +13,9 @@ Core and Builtins Library ------- +- Issue #20939: Fix test_geturl failure in test_urllibnet due to + new redirect of http://www.python.org/ to https://www.python.org. + What's New in Python 3.4.0 release candidate 3? =============================================== -- cgit v0.12 From 3732ed24145c1ac77e99bcf85bccda3af095e696 Mon Sep 17 00:00:00 2001 From: Larry Hastings Date: Sat, 15 Mar 2014 21:13:56 -0700 Subject: Merge in all documentation changes since branching 3.4.0rc1. --- Doc/c-api/arg.rst | 1 + Doc/c-api/exceptions.rst | 8 +- Doc/c-api/init.rst | 1 + Doc/c-api/module.rst | 2 +- Doc/c-api/object.rst | 6 +- Doc/c-api/tuple.rst | 8 + Doc/c-api/typeobj.rst | 5 +- Doc/conf.py | 6 +- Doc/contents.rst | 12 +- Doc/distributing/index.rst | 143 ++++ Doc/distutils/apiref.rst | 26 +- Doc/distutils/configfile.rst | 2 + Doc/distutils/index.rst | 6 +- Doc/distutils/setupscript.rst | 9 +- Doc/extending/index.rst | 42 +- Doc/faq/general.rst | 3 +- Doc/faq/library.rst | 1 + Doc/faq/programming.rst | 1 + Doc/glossary.rst | 8 + Doc/howto/pyporting.rst | 33 +- Doc/includes/email-alternative-new-api.py | 2 +- Doc/install/index.rst | 9 +- Doc/installing/index.rst | 210 ++++++ Doc/library/aifc.rst | 4 +- Doc/library/asyncio-eventloop.rst | 6 +- Doc/library/asyncio-protocol.rst | 15 +- Doc/library/asyncio-subprocess.rst | 8 +- Doc/library/asyncio-sync.rst | 29 +- Doc/library/audioop.rst | 8 +- Doc/library/base64.rst | 32 +- Doc/library/csv.rst | 67 +- Doc/library/curses.rst | 2 +- Doc/library/distutils.rst | 19 +- Doc/library/email-examples.rst | 6 +- Doc/library/email.contentmanager.rst | 19 +- Doc/library/email.message.rst | 4 +- Doc/library/email.policy.rst | 2 +- Doc/library/ensurepip.rst | 2 +- Doc/library/enum.rst | 2 +- Doc/library/exceptions.rst | 1 - Doc/library/fractions.rst | 8 +- Doc/library/functions.rst | 35 +- Doc/library/getopt.rst | 1 + Doc/library/hashlib.rst | 12 +- Doc/library/hmac.rst | 5 +- Doc/library/http.server.rst | 14 +- Doc/library/importlib.rst | 2 +- Doc/library/inspect.rst | 5 + Doc/library/io.rst | 1 + Doc/library/ipaddress.rst | 20 +- Doc/library/logging.config.rst | 8 +- Doc/library/math.rst | 6 +- Doc/library/multiprocessing.rst | 37 +- Doc/library/operator.rst | 4 +- Doc/library/os.path.rst | 18 +- Doc/library/os.rst | 12 +- Doc/library/pathlib.rst | 30 + Doc/library/pkgutil.rst | 2 + Doc/library/plistlib.rst | 13 +- Doc/library/pydoc.rst | 5 + Doc/library/site.rst | 10 +- Doc/library/smtplib.rst | 3 + Doc/library/socket.rst | 13 +- Doc/library/ssl.rst | 5 +- Doc/library/stringprep.rst | 1 - Doc/library/subprocess.rst | 23 +- Doc/library/sunau.rst | 4 +- Doc/library/sys.rst | 13 +- Doc/library/test.rst | 1 + Doc/library/textwrap.rst | 29 +- Doc/library/tracemalloc.rst | 39 +- Doc/library/types.rst | 21 + Doc/library/undoc.rst | 2 +- Doc/library/unittest.mock-examples.rst | 2 +- Doc/library/unittest.mock.rst | 4 +- Doc/library/unittest.rst | 31 +- Doc/library/urllib.error.rst | 2 +- Doc/library/urllib.request.rst | 17 +- Doc/library/venv.rst | 2 + Doc/library/wave.rst | 35 +- Doc/library/xml.etree.elementtree.rst | 3 +- Doc/library/xml.rst | 73 +- Doc/library/zipfile.rst | 26 +- Doc/reference/datamodel.rst | 18 +- Doc/tools/sphinxext/indexcontent.html | 8 +- Doc/tools/sphinxext/susp-ignored.csv | 1 + Doc/tutorial/introduction.rst | 8 +- Doc/tutorial/whatnow.rst | 4 +- Doc/using/unix.rst | 2 + Doc/using/venv-create.inc | 10 +- Doc/using/windows.rst | 4 + Doc/whatsnew/3.4.rst | 1166 +++++++++++++++++++++++------ Misc/NEWS | 8 +- 93 files changed, 2029 insertions(+), 567 deletions(-) create mode 100644 Doc/distributing/index.rst create mode 100644 Doc/installing/index.rst diff --git a/Doc/c-api/arg.rst b/Doc/c-api/arg.rst index bf13eed..2f02241 100644 --- a/Doc/c-api/arg.rst +++ b/Doc/c-api/arg.rst @@ -45,6 +45,7 @@ in any early abort case). Unless otherwise stated, buffers are not NUL-terminated. .. note:: + For all ``#`` variants of formats (``s#``, ``y#``, etc.), the type of the length argument (int or :c:type:`Py_ssize_t`) is controlled by defining the macro :c:macro:`PY_SSIZE_T_CLEAN` before including diff --git a/Doc/c-api/exceptions.rst b/Doc/c-api/exceptions.rst index 989166f..7067448 100644 --- a/Doc/c-api/exceptions.rst +++ b/Doc/c-api/exceptions.rst @@ -525,11 +525,11 @@ Exception Objects reference, as accessible from Python through :attr:`__cause__`. -.. c:function:: void PyException_SetCause(PyObject *ex, PyObject *ctx) +.. c:function:: void PyException_SetCause(PyObject *ex, PyObject *cause) - Set the cause associated with the exception to *ctx*. Use *NULL* to clear - it. There is no type check to make sure that *ctx* is either an exception - instance or :const:`None`. This steals a reference to *ctx*. + Set the cause associated with the exception to *cause*. Use *NULL* to clear + it. There is no type check to make sure that *cause* is either an exception + instance or :const:`None`. This steals a reference to *cause*. :attr:`__suppress_context__` is implicitly set to ``True`` by this function. diff --git a/Doc/c-api/init.rst b/Doc/c-api/init.rst index 6439d7f..0587e15 100644 --- a/Doc/c-api/init.rst +++ b/Doc/c-api/init.rst @@ -582,6 +582,7 @@ code, or when embedding the Python interpreter: .. index:: module: _thread .. note:: + When only the main thread exists, no GIL operations are needed. This is a common situation (most Python programs do not use threads), and the lock operations slow the interpreter down a bit. Therefore, the lock is not diff --git a/Doc/c-api/module.rst b/Doc/c-api/module.rst index 26c4384..985a347 100644 --- a/Doc/c-api/module.rst +++ b/Doc/c-api/module.rst @@ -120,7 +120,7 @@ There are only a few functions special to module objects. Return a pointer to the :c:type:`PyModuleDef` struct from which the module was created, or *NULL* if the module wasn't created with - :c:func:`PyModule_Create`.i + :c:func:`PyModule_Create`. .. c:function:: PyObject* PyState_FindModule(PyModuleDef *def) diff --git a/Doc/c-api/object.rst b/Doc/c-api/object.rst index ad84301..509ca3f 100644 --- a/Doc/c-api/object.rst +++ b/Doc/c-api/object.rst @@ -357,9 +357,9 @@ attribute is considered sufficient for this determination. .. c:function:: Py_ssize_t PyObject_LengthHint(PyObject *o, Py_ssize_t default) - Return an estimated length for the object *o*. First trying to return its - actual length, then an estimate using ``__length_hint__``, and finally - returning the default value. On error ``-1`` is returned. This is the + Return an estimated length for the object *o*. First try to return its + actual length, then an estimate using :meth:`~object.__length_hint__`, and + finally return the default value. On error return ``-1``. This is the equivalent to the Python expression ``operator.length_hint(o, default)``. .. versionadded:: 3.4 diff --git a/Doc/c-api/tuple.rst b/Doc/c-api/tuple.rst index 184affb..3922d50 100644 --- a/Doc/c-api/tuple.rst +++ b/Doc/c-api/tuple.rst @@ -129,6 +129,14 @@ type. Initializes a struct sequence type *type* from *desc* in place. +.. c:function:: int PyStructSequence_InitType2(PyTypeObject *type, PyStructSequence_Desc *desc) + + The same as ``PyStructSequence_InitType``, but returns ``0`` on success and ``-1`` on + failure. + + .. versionadded:: 3.4 + + .. c:type:: PyStructSequence_Desc Contains the meta information of a struct sequence type to create. diff --git a/Doc/c-api/typeobj.rst b/Doc/c-api/typeobj.rst index 48b13e5..efd8574 100644 --- a/Doc/c-api/typeobj.rst +++ b/Doc/c-api/typeobj.rst @@ -205,9 +205,8 @@ type objects) *must* have the :attr:`ob_size` field. bit currently defined is :const:`Py_PRINT_RAW`. When the :const:`Py_PRINT_RAW` flag bit is set, the instance should be printed the same way as :c:member:`~PyTypeObject.tp_str` would format it; when the :const:`Py_PRINT_RAW` flag bit is clear, the instance - should be printed the same was as :c:member:`~PyTypeObject.tp_repr` would format it. It should - return ``-1`` and set an exception condition when an error occurred during the - comparison. + should be printed the same way as :c:member:`~PyTypeObject.tp_repr` would format it. It should + return ``-1`` and set an exception condition when an error occurs. It is possible that the :c:member:`~PyTypeObject.tp_print` field will be deprecated. In any case, it is recommended not to define :c:member:`~PyTypeObject.tp_print`, but instead to rely on diff --git a/Doc/conf.py b/Doc/conf.py index 5b63cad..c0ed8c6 100644 --- a/Doc/conf.py +++ b/Doc/conf.py @@ -61,6 +61,8 @@ add_module_names = True # By default, highlight as Python 3. highlight_language = 'python3' +needs_sphinx = '1.1' + # Options for HTML output # ----------------------- @@ -118,11 +120,11 @@ _stdauthor = r'Guido van Rossum\\Fred L. Drake, Jr., editor' latex_documents = [ ('c-api/index', 'c-api.tex', 'The Python/C API', _stdauthor, 'manual'), - ('distutils/index', 'distutils.tex', + ('distributing/index', 'distributing.tex', 'Distributing Python Modules', _stdauthor, 'manual'), ('extending/index', 'extending.tex', 'Extending and Embedding Python', _stdauthor, 'manual'), - ('install/index', 'install.tex', + ('installing/index', 'installing.tex', 'Installing Python Modules', _stdauthor, 'manual'), ('library/index', 'library.tex', 'The Python Library Reference', _stdauthor, 'manual'), diff --git a/Doc/contents.rst b/Doc/contents.rst index c0c6af3..29b07db 100644 --- a/Doc/contents.rst +++ b/Doc/contents.rst @@ -11,8 +11,8 @@ library/index.rst extending/index.rst c-api/index.rst - distutils/index.rst - install/index.rst + distributing/index.rst + installing/index.rst howto/index.rst faq/index.rst glossary.rst @@ -21,3 +21,11 @@ bugs.rst copyright.rst license.rst + +.. include legacy packaging docs in build + +.. toctree:: + :hidden: + + distutils/index.rst + install/index.rst diff --git a/Doc/distributing/index.rst b/Doc/distributing/index.rst new file mode 100644 index 0000000..d671041 --- /dev/null +++ b/Doc/distributing/index.rst @@ -0,0 +1,143 @@ +.. _distributing-index: + +############################### + Distributing Python Modules +############################### + +:Email: distutils-sig@python.org + + +As a popular open source development project, Python has an active +supporting community of contributors and users that also make their software +available for other Python developers to use under open source license terms. + +This allows Python users to share and collaborate effectively, benefiting +from the solutions others have already created to common (and sometimes +even rare!) problems, as well as potentially contributing their own +solutions to the common pool. + +This guide covers the distribution part of the process. For a guide to +installing other Python projects, refer to the +:ref:`installation guide `. + +.. note:: + + For corporate and other institutional users, be aware that many + organisations have their own policies around using and contributing to + open source software. Please take such policies into account when making + use of the distribution and installation tools provided with Python. + + +Key terms +========= + +* the `Python Package Index `__ is a public + repository of open source licensed packages made available for use by + other Python users +* the `Python Packaging Authority + `__ are the group of + developers and documentation authors responsible for the maintenance and + evolution of the standard packaging tools and the associated metadata and + file format standards. They maintain a variety of tools, documentation + and issue trackers on both `GitHub `__ and + `BitBucket `__. +* ``distutils`` is the original build and distribution system first added to + the Python standard library in 1998. While direct use of ``distutils`` is + being phased out, it still laid the foundation for the current packaging + and distribution infrastructure, and it not only remains part of the + standard library, but its name lives on in other ways (such as the name + of the mailing list used to coordinate Python packaging standards + development). + + +Open source licensing and collaboration +======================================= + +In most parts of the world, software is automatically covered by copyright. +This means that other developers require explicit permission to copy, use, +modify and redistribute the software. + +Open source licensing is a way of explicitly granting such permission in a +relatively consistent way, allowing developers to share and collaborate +efficiently by making common solutions to various problems freely available. +This leaves many developers free to spend more time focusing on the problems +that are relatively unique to their specific situation. + +The distribution tools provided with Python are designed to make it +reasonably straightforward for developers to make their own contributions +back to that common pool of software if they choose to do so. + +The same distribution tools can also be used to distribute software within +an organisation, regardless of whether that software is published as open +source software or not. + + +Installing the tools +==================== + +The standard library does not include build tools that support modern +Python packaging standards, as the core development team has found that it +is important to have standard tools that work consistently, even on older +versions of Python. + +The currently recommended build and distribution tools can be installed +using ``pip``:: + + pip install setuptools wheel twine + + +Reading the guide +================= + +The Python Packaging User Guide covers the various key steps and elements +involved in creating a project + +* `Project structure`_ +* `Building and packaging the project`_ +* `Uploading the project to the Python Package Index`_ + +.. _Project structure: \ + http://packaging.python.org/en/latest/tutorial.html#creating-your-own-project +.. _Building and packaging the project: \ + http://packaging.python.org/en/latest/tutorial.html#building-packaging-your-project +.. _Uploading the project to the Python Package Index: \ + http://packaging.python.org/en/latest/tutorial.html#uploading-your-project-to-pypi + + +How do I...? +============ + +These are quick answers or links for some common tasks. + +... choose a name for my project? +--------------------------------- + +This isn't an easy topic, but here are a few tips: + +* check the Python Package Index to see if the name is already in use +* check popular hosting sites like GitHub, BitBucket, etc to see if there + is already a project with that name +* check what comes up in a web search for the name you're considering +* avoid particularly common words, especially ones with multiple meanings, + as they can make it difficult for users to find your software when + searching for it + + +... create and distribute binary extensions? +-------------------------------------------- + +This is actually quite a complex topic, with a variety of alternatives +available depending on exactly what you're aiming to achieve. See the +Python Packaging User Guide for more information and recommendations. + +.. seealso:: + + `Python Packaging User Guide: Binary Extensions + `__ + +.. other topics: + + Once the Development & Deployment part of PPUG is fleshed out, some of + those sections should be linked from new questions here (most notably, + we should have a question about avoiding depending on PyPI that links to + http://packaging.python.org/en/latest/deployment.html#pypi-mirrors-and-caches) diff --git a/Doc/distutils/apiref.rst b/Doc/distutils/apiref.rst index 54f0a4e..36a911e 100644 --- a/Doc/distutils/apiref.rst +++ b/Doc/distutils/apiref.rst @@ -853,17 +853,6 @@ Windows. It also contains the Mingw32CCompiler class which handles the mingw32 port of GCC (same as cygwin in no-cygwin mode). -:mod:`distutils.emxccompiler` --- OS/2 EMX Compiler -=================================================== - -.. module:: distutils.emxccompiler - :synopsis: OS/2 EMX Compiler support - - -This module provides the EMXCCompiler class, a subclass of -:class:`UnixCCompiler` that handles the EMX port of the GNU C compiler to OS/2. - - :mod:`distutils.archive_util` --- Archiving utilities ====================================================== @@ -1171,15 +1160,6 @@ other utility module. underscore. No { } or ( ) style quoting is available. -.. function:: grok_environment_error(exc[, prefix='error: ']) - - Generate a useful error message from an :exc:`OSError` exception object. - Handles Python 1.5.1 and later styles, and does what it can to deal with - exception objects that don't have a filename (which happens when the error - is due to a two-file operation, such as :func:`~os.rename` or :func:`~os.link`). - Returns the error message as a string prefixed with *prefix*. - - .. function:: split_quoted(s) Split a string up according to Unix shell-like rules for quotes and backslashes. @@ -1943,8 +1923,12 @@ Subclasses of :class:`Command` must define the following methods. .. module:: distutils.command.clean :synopsis: Clean a package build area +This command removes the temporary files created by :command:`build` +and its subcommands, like intermediary compiled object files. With +the ``--all`` option, the complete build directory will be removed. -.. % todo +Extension modules built :ref:`in place ` +will not be cleaned, as they are not in the build directory. :mod:`distutils.command.config` --- Perform package configuration diff --git a/Doc/distutils/configfile.rst b/Doc/distutils/configfile.rst index 890047c..ac79671 100644 --- a/Doc/distutils/configfile.rst +++ b/Doc/distutils/configfile.rst @@ -69,6 +69,8 @@ universal :option:`--help` option, e.g. :: Note that an option spelled :option:`--foo-bar` on the command-line is spelled :option:`foo_bar` in configuration files. +.. _distutils-build-ext-inplace: + For example, say you want your extensions to be built "in-place"---that is, you have an extension :mod:`pkg.ext`, and you want the compiled extension file (:file:`ext.so` on Unix, say) to be put in the same source directory as your diff --git a/Doc/distutils/index.rst b/Doc/distutils/index.rst index 1a6f04c..90d1c1a 100644 --- a/Doc/distutils/index.rst +++ b/Doc/distutils/index.rst @@ -1,8 +1,8 @@ .. _distutils-index: -############################### - Distributing Python Modules -############################### +############################################## + Distributing Python Modules (Legacy version) +############################################## :Authors: Greg Ward, Anthony Baxter :Email: distutils-sig@python.org diff --git a/Doc/distutils/setupscript.rst b/Doc/distutils/setupscript.rst index ee96302..fbe4290 100644 --- a/Doc/distutils/setupscript.rst +++ b/Doc/distutils/setupscript.rst @@ -685,6 +685,8 @@ include the following code fragment in your :file:`setup.py` before the DistributionMetadata.download_url = None +.. _debug-setup-script: + Debugging the setup script ========================== @@ -700,7 +702,8 @@ installation is broken because they don't read all the way down to the bottom and see that it's a permission problem. On the other hand, this doesn't help the developer to find the cause of the -failure. For this purpose, the DISTUTILS_DEBUG environment variable can be set +failure. For this purpose, the :envvar:`DISTUTILS_DEBUG` environment variable can be set to anything except an empty string, and distutils will now print detailed -information what it is doing, and prints the full traceback in case an exception -occurs. +information about what it is doing, dump the full traceback when an exception +occurs, and print the whole command line when an external program (like a C +compiler) fails. diff --git a/Doc/extending/index.rst b/Doc/extending/index.rst index 44a7f92..dd43926 100644 --- a/Doc/extending/index.rst +++ b/Doc/extending/index.rst @@ -21,14 +21,31 @@ Python) that give the language its wide application range. For a detailed description of the whole Python/C API, see the separate :ref:`c-api-index`. -.. note:: - This guide only covers the basic tools for creating extensions provided - as part of this version of CPython. Third party tools may offer simpler - alternatives. Refer to the `binary extensions section - `__ - in the Python Packaging User Guide for more information. +Recommended third party tools +============================= +This guide only covers the basic tools for creating extensions provided +as part of this version of CPython. Third party tools like Cython, +``cffi``, SWIG and Numba offer both simpler and more sophisticated +approaches to creating C and C++ extensions for Python. + +.. seealso:: + + `Python Packaging User Guide: Binary Extensions `_ + The Python Packaging User Guide not only covers several available + tools that simplify the creation of binary extensions, but also + discusses the various reasons why creating an extension module may be + desirable in the first place. + + +Creating extensions without third party tools +============================================= + +This section of the guide covers creating C and C++ extensions without +assistance from third party tools. It is intended primarily for creators +of those tools, rather than being a recommended way to create your own +C extensions. .. toctree:: :maxdepth: 2 @@ -38,4 +55,17 @@ For a detailed description of the whole Python/C API, see the separate newtypes.rst building.rst windows.rst + +Embedding the CPython runtime in a larger application +===================================================== + +Sometimes, rather than creating an extension that runs inside the Python +interpreter as the main application, it is desirable to instead embed +the CPython runtime inside a larger application. This section covers +some of the details involved in doing that successfully. + +.. toctree:: + :maxdepth: 2 + :numbered: + embedding.rst diff --git a/Doc/faq/general.rst b/Doc/faq/general.rst index da2c933..6f4733f 100644 --- a/Doc/faq/general.rst +++ b/Doc/faq/general.rst @@ -471,7 +471,8 @@ that is written in Python using Tkinter. PythonWin is a Windows-specific IDE. Emacs users will be happy to know that there is a very good Python mode for Emacs. All of these programming environments provide syntax highlighting, auto-indenting, and access to the interactive interpreter while coding. Consult -http://www.python.org/editors/ for a full list of Python editing environments. +`the Python wiki `_ for a full list +of Python editing environments. If you want to discuss Python's use in education, you may be interested in joining `the edu-sig mailing list diff --git a/Doc/faq/library.rst b/Doc/faq/library.rst index 34e2fdf..5f4ff17 100644 --- a/Doc/faq/library.rst +++ b/Doc/faq/library.rst @@ -513,6 +513,7 @@ For data that is more regular (e.g. a homogeneous list of ints or floats), you can also use the :mod:`array` module. .. note:: + To read and write binary data, it is mandatory to open the file in binary mode (here, passing ``"rb"`` to :func:`open`). If you use ``"r"`` instead (the default), the file will be open in text mode diff --git a/Doc/faq/programming.rst b/Doc/faq/programming.rst index 79c7289..d514a80 100644 --- a/Doc/faq/programming.rst +++ b/Doc/faq/programming.rst @@ -1103,6 +1103,7 @@ Use a list comprehension:: result = [obj.method() for obj in mylist] +.. _faq-augmented-assignment-tuple-error: Why does a_tuple[i] += ['item'] raise an exception when the addition works? --------------------------------------------------------------------------- diff --git a/Doc/glossary.rst b/Doc/glossary.rst index 2f658ac..f71a1f7 100644 --- a/Doc/glossary.rst +++ b/Doc/glossary.rst @@ -783,6 +783,14 @@ Glossary mapping rather than a sequence because the lookups use arbitrary :term:`immutable` keys rather than integers. + The :class:`collections.abc.Sequence` abstract base class + defines a much richer interface that goes beyond just + :meth:`__getitem__` and :meth:`__len__`, adding :meth:`count`, + :meth:`index`, :meth:`__contains__`, and + :meth:`__reversed__`. Types that implement this expanded + interface can be registered explicitly using + :func:`~abc.register`. + single dispatch A form of :term:`generic function` dispatch where the implementation is chosen based on the type of a single argument. diff --git a/Doc/howto/pyporting.rst b/Doc/howto/pyporting.rst index d6ad401..9d7e859 100644 --- a/Doc/howto/pyporting.rst +++ b/Doc/howto/pyporting.rst @@ -26,6 +26,32 @@ Porting Python 2 Code to Python 3 For help with porting, you can email the python-porting_ mailing list with questions. +The Short Version +================= + +* Decide what's the oldest version of Python 2 you want to support (if at all) +* Make sure you have a thorough test suite and use continuous integration + testing to make sure you stay compatible with the versions of Python you care + about +* If you have dependencies, check their Python 3 status using caniusepython3 + (`command-line tool `__, + `web app `__) + +With that done, your options are: + +* If you are dropping Python 2 support, use 2to3_ to port to Python 3 +* If you are keeping Python 2 support, then start writing Python 2/3-compatible + code starting **TODAY** + + + If you have dependencies that have not been ported, reach out to them to port + their project while working to make your code compatible with Python 3 so + you're ready when your dependencies are all ported + + If all your dependencies have been ported (or you have none), go ahead and + port to Python 3 + +* If you are creating a new project that wants to have 2/3 compatibility, + code in Python 3 and then backport to Python 2 + Before You Begin ================ @@ -85,7 +111,7 @@ between Python 2 and 3 easier. Projects to Consider -------------------- -The lowest level library for suppoting Python 2 & 3 simultaneously is six_. +The lowest level library for supporting Python 2 & 3 simultaneously is six_. Reading through its documentation will give you an idea of where exactly the Python language changed between versions 2 & 3 and thus what you will want the library to help you continue to support. @@ -548,7 +574,10 @@ Backporting Python 3 code to Python 2 If you have Python 3 code and have little interest in supporting Python 2 you can use 3to2_ to translate from Python 3 code to Python 2 code. This is only -recommended if you don't plan to heavily support Python 2 users. +recommended if you don't plan to heavily support Python 2 users. Otherwise +write your code for Python 3 and then backport as far back as you want. This +is typically easier than going from Python 2 to 3 as you will have worked out +any difficulties with e.g. bytes/strings, etc. Other Resources diff --git a/Doc/includes/email-alternative-new-api.py b/Doc/includes/email-alternative-new-api.py index eeabf34..c1255a6 100644 --- a/Doc/includes/email-alternative-new-api.py +++ b/Doc/includes/email-alternative-new-api.py @@ -19,7 +19,7 @@ Cela ressemble à un excellent recipie[1] déjeuner. [1] http://www.yummly.com/recipe/Roasted-Asparagus-Epicurious-203718 ---Éric +--Pepé """) # Add the html version. This converts the message into a multipart/alternative diff --git a/Doc/install/index.rst b/Doc/install/index.rst index 779e923..d0fea3d 100644 --- a/Doc/install/index.rst +++ b/Doc/install/index.rst @@ -2,9 +2,9 @@ .. _install-index: -***************************** - Installing Python Modules -***************************** +******************************************** + Installing Python Modules (Legacy version) +******************************************** :Author: Greg Ward @@ -58,7 +58,8 @@ new goodies to their toolbox. You don't need to know Python to read this document; there will be some brief forays into using Python's interactive mode to explore your installation, but that's it. If you're looking for information on how to distribute your own Python modules so that others may use them, see -the :ref:`distutils-index` manual. +the :ref:`distutils-index` manual. :ref:`debug-setup-script` may also be of +interest. .. _inst-trivial-install: diff --git a/Doc/installing/index.rst b/Doc/installing/index.rst new file mode 100644 index 0000000..1284613 --- /dev/null +++ b/Doc/installing/index.rst @@ -0,0 +1,210 @@ +.. highlightlang:: none + +.. _installing-index: + +***************************** + Installing Python Modules +***************************** + +:Email: distutils-sig@python.org + +As a popular open source development project, Python has an active +supporting community of contributors and users that also make their software +available for other Python developers to use under open source license terms. + +This allows Python users to share and collaborate effectively, benefiting +from the solutions others have already created to common (and sometimes +even rare!) problems, as well as potentially contributing their own +solutions to the common pool. + +This guide covers the installation part of the process. For a guide to +creating and sharing your own Python projects, refer to the +:ref:`distribution guide `. + +.. note:: + + For corporate and other institutional users, be aware that many + organisations have their own policies around using and contributing to + open source software. Please take such policies into account when making + use of the distribution and installation tools provided with Python. + + +Key terms +========= + +* ``pip`` is the preferred installer program. Starting with Python 3.4, it + is included by default with the Python binary installers. +* a virtual environment is a semi-isolated Python environment that allows + packages to be installed for use by a particular application, rather than + being installed system wide +* ``pyvenv`` is the standard tool for creating virtual environments, and has + been part of Python since Python 3.3. Starting with Python 3.4, it + defaults to installing ``pip`` into all created virtual environments +* the `Python Package Index `__ is a public + repository of open source licensed packages made available for use by + other Python users +* the `Python Packaging Authority + `__ are the group of + developers and documentation authors responsible for the maintenance and + evolution of the standard packaging tools and the associated metadata and + file format standards. They maintain a variety of tools, documentation + and issue trackers on both `GitHub `__ and + `BitBucket `__. +* ``distutils`` is the original build and distribution system first added to + the Python standard library in 1998. While direct use of ``distutils`` is + being phased out, it still laid the foundation for the current packaging + and distribution infrastructure, and it not only remains part of the + standard library, but its name lives on in other ways (such as the name + of the mailing list used to coordinate Python packaging standards + development). + + +Basic usage +=========== + +The standard packaging tools are all designed to be used from the command +line. For Windows users, the examples below assume that the option to +adjust the system PATH environment variable was selected when installing +Python. For Linux users, the command to install into the system version of +Python 3 is likely to be ``pip3`` rather than ``pip``. + +The following command will install the latest version of a module and its +dependencies from the Python Package Index:: + + pip install SomePackage + +It's also possible to specify an exact or minimum version directly on the +command line:: + + pip install SomePackage==1.0.4 # specific version + pip install 'SomePackage>=1.0.4' # minimum version + +Normally, if a suitable module is already installed, attempting to install +it again will have no effect. Upgrading existing modules must be requested +explicitly:: + + pip install --upgrade SomePackage + +More information and resources regarding ``pip`` and its capabilities can be +found in the `Python Packaging User Guide `__. + +``pyvenv`` has its own documentation at :ref:`scripts-pyvenv`. Installing +into an active virtual environment uses the commands shown above. + +.. seealso:: + + `Python Packaging User Guide: Installing Python packages + `__ + + +How do I ...? +============= + +These are quick answers or links for some common tasks. + +... install ``pip`` in versions of Python prior to Python 3.4? +-------------------------------------------------------------- + +Python only started bundling ``pip`` with Python 3.4. For earlier versions, +``pip`` needs to be "bootstrapped" as described in the Python Packaging +User Guide. + +.. seealso:: + + `Python Packaging User Guide: Installing the Tools + `__ + + +.. installing-per-user-installation: + +... install packages just for the current user? +----------------------------------------------- + +Passing the ``--user`` option to ``pip install`` will install a package +just for the current user, rather than for all users of the system. + + +... install scientific Python packages? +--------------------------------------- + +A number of scientific Python packages have complex binary dependencies, and +aren't currently easy to install using ``pip`` directly. At this point in +time, it will often be easier for users to install these packages by +`other means +`__ +rather than attempting to install them with ``pip``. + +.. seealso:: + + `Python Packaging User Guide: Installing Scientific Packages + `__ + + +... work with multiple versions of Python installed in parallel? +---------------------------------------------------------------- + +On Linux, Mac OS X and other POSIX systems, use the versioned Python commands +in combination with the ``-m`` switch to run the appropriate copy of +``pip``:: + + python2 -m pip install SomePackage # default Python 2 + python2.7 -m pip install SomePackage # specifically Python 2.7 + python3 -m pip install SomePackage # default Python 3 + python3.4 -m pip install SomePackage # specifically Python 3.4 + +(appropriately versioned ``pip`` commands may also be available) + +On Windows, use the ``py`` Python launcher in combination with the ``-m`` +switch:: + + py -2 -m pip install SomePackage # default Python 2 + py -2.7 -m pip install SomePackage # specifically Python 2.7 + py -3 -m pip install SomePackage # default Python 3 + py -3.4 -m pip install SomePackage # specifically Python 3.4 + +.. other questions: + + Once the Development & Deployment part of PPUG is fleshed out, some of + those sections should be linked from new questions here (most notably, + we should have a question about avoiding depending on PyPI that links to + http://packaging.python.org/en/latest/deployment.html#pypi-mirrors-and-caches) + + +Common installation issues +========================== + +Installing into the system Python on Linux +------------------------------------------ + +On Linux systems, a Python installation will typically be included as part +of the distribution. Installing into this Python installation requires +root access to the system, and may interfere with the operation of the +system package manager and other components of the system if a component +is unexpectedly upgraded using ``pip``. + +On such systems, it is often better to use a virtual environment or a +per-user installation when installing packages with ``pip``. + + +Installing binary extensions +---------------------------- + +Python has typically relied heavily on source based distribution, with end +users being expected to compile extension modules from source as part of +the installation process. + +With the introduction of support for the binary ``wheel`` format, and the +ability to publish wheels for at least Windows and Mac OS X through the +Python Package Index, this problem is expected to diminish over time, +as users are more regularly able to install pre-built extensions rather +than needing to build them themselves. + +Some of the solutions for installing `scientific software +`__ +that is not yet available as pre-built ``wheel`` files may also help with +obtaining other binary extensions without needing to build them locally. + +.. seealso:: + + `Python Packaging User Guide: Binary Extensions + `__ diff --git a/Doc/library/aifc.rst b/Doc/library/aifc.rst index caecfbc..6fbcf28 100644 --- a/Doc/library/aifc.rst +++ b/Doc/library/aifc.rst @@ -226,7 +226,7 @@ number of frames must be filled in. file parameters have been set. .. versionchanged:: 3.4 - Any :term:`bytes-like object`\ s are now accepted. + Any :term:`bytes-like object` is now accepted. .. method:: aifc.writeframesraw(data) @@ -235,7 +235,7 @@ number of frames must be filled in. updated. .. versionchanged:: 3.4 - Any :term:`bytes-like object`\ s are now accepted. + Any :term:`bytes-like object` is now accepted. .. method:: aifc.close() diff --git a/Doc/library/asyncio-eventloop.rst b/Doc/library/asyncio-eventloop.rst index 9d54964..df84169 100644 --- a/Doc/library/asyncio-eventloop.rst +++ b/Doc/library/asyncio-eventloop.rst @@ -488,7 +488,7 @@ Run subprocesses asynchronously using the :mod:`subprocess` module. * *stdout*: Either a file-like object representing the pipe to be connected to the subprocess's standard output stream using - :meth:`~BaseEventLoop.connect_write_pipe`, or the constant + :meth:`~BaseEventLoop.connect_read_pipe`, or the constant :const:`subprocess.PIPE` (the default). By default a new pipe will be created and connected. @@ -589,8 +589,8 @@ pool of processes). By default, an event loop uses a thread pool executor Arrange for a callback to be called in the specified executor. - *executor* is a :class:`~concurrent.futures.Executor` instance, - the default executor is used if *executor* is ``None``. + The *executor* argument should be an :class:`~concurrent.futures.Executor` + instance. The default executor is used if *executor* is ``None``. This method is a :ref:`coroutine `. diff --git a/Doc/library/asyncio-protocol.rst b/Doc/library/asyncio-protocol.rst index dabe7d8..80c974a 100644 --- a/Doc/library/asyncio-protocol.rst +++ b/Doc/library/asyncio-protocol.rst @@ -376,8 +376,8 @@ The following callbacks are called on :class:`DatagramProtocol` instances. Flow control callbacks ---------------------- -These callbacks may be called on :class:`Protocol` and -:class:`SubprocessProtocol` instances: +These callbacks may be called on :class:`Protocol`, +:class:`DatagramProtocol` and :class:`SubprocessProtocol` instances: .. method:: BaseProtocol.pause_writing() @@ -402,6 +402,15 @@ buffer size reaches the low-water mark. are important to ensure that things go as expected when either mark is zero. +.. note:: + On BSD systems (OS X, FreeBSD, etc.) flow control is not supported + for :class:`DatagramProtocol`, because send failures caused by + writing too many packets cannot be detected easily. The socket + always appears 'ready' and excess packets are dropped; an + :class:`OSError` with errno set to :const:`errno.ENOBUFS` may or + may not be raised; if it is raised, it will be reported to + :meth:`DatagramProtocol.error_received` but otherwise ignored. + Coroutines and protocols ------------------------ @@ -488,6 +497,6 @@ TCP echo server example, send back received data and close the connection:: :meth:`Transport.close` can be called immediately after :meth:`WriteTransport.write` even if data are not sent yet on the socket: both methods are asynchronous. ``yield from`` is not needed because these transport -methods don't return coroutines. +methods are not coroutines. diff --git a/Doc/library/asyncio-subprocess.rst b/Doc/library/asyncio-subprocess.rst index 5e6e657..861bcc0 100644 --- a/Doc/library/asyncio-subprocess.rst +++ b/Doc/library/asyncio-subprocess.rst @@ -8,16 +8,16 @@ Create a subprocess .. function:: create_subprocess_shell(cmd, stdin=None, stdout=None, stderr=None, loop=None, limit=None, \*\*kwds) - Run the shell command *cmd* given as a string. Return a :class:`Process` + Run the shell command *cmd* given as a string. Return a :class:`~asyncio.subprocess.Process` instance. - This function returns a :ref:`coroutine object `. + This function is a :ref:`coroutine `. .. function:: create_subprocess_exec(\*args, stdin=None, stdout=None, stderr=None, loop=None, limit=None, \*\*kwds) - Create a subprocess. Return a :class:`Process` instance. + Create a subprocess. Return a :class:`~asyncio.subprocess.Process` instance. - This function returns a :ref:`coroutine object `. + This function is a :ref:`coroutine `. Use the :meth:`BaseEventLoop.connect_read_pipe` and :meth:`BaseEventLoop.connect_write_pipe` methods to connect pipes. diff --git a/Doc/library/asyncio-sync.rst b/Doc/library/asyncio-sync.rst index de38131..a299f09 100644 --- a/Doc/library/asyncio-sync.rst +++ b/Doc/library/asyncio-sync.rst @@ -64,7 +64,7 @@ Lock .. method:: locked() - Return ``True`` if lock is acquired. + Return ``True`` if the lock is acquired. .. method:: acquire() @@ -73,7 +73,7 @@ Lock This method blocks until the lock is unlocked, then sets it to locked and returns ``True``. - This method returns a :ref:`coroutine object `. + This method is a :ref:`coroutine `. .. method:: release() @@ -141,6 +141,15 @@ Condition A new :class:`Lock` object is created and used as the underlying lock. + .. method:: acquire() + + Acquire the underlying lock. + + This method blocks until the lock is unlocked, then sets it to locked and + returns ``True``. + + This method is a :ref:`coroutine `. + .. method:: notify(n=1) By default, wake up one coroutine waiting on this condition, if any. @@ -156,6 +165,10 @@ Condition call until it can reacquire the lock. Since :meth:`notify` does not release the lock, its caller should. + .. method:: locked() + + Return ``True`` if the underlying lock is acquired. + .. method:: notify_all() Wake up all threads waiting on this condition. This method acts like @@ -163,6 +176,18 @@ Condition calling thread has not acquired the lock when this method is called, a :exc:`RuntimeError` is raised. + .. method:: release() + + Release the underlying lock. + + When the lock is locked, reset it to unlocked, and return. If any other + coroutines are blocked waiting for the lock to become unlocked, allow + exactly one of them to proceed. + + When invoked on an unlocked lock, a :exc:`RuntimeError` is raised. + + There is no return value. + .. method:: wait() Wait until notified. diff --git a/Doc/library/audioop.rst b/Doc/library/audioop.rst index fbb7fc6..ce127aa 100644 --- a/Doc/library/audioop.rst +++ b/Doc/library/audioop.rst @@ -12,10 +12,8 @@ integers, unless specified otherwise. .. versionchanged:: 3.4 Support for 24-bit samples was added. - -.. versionchanged:: 3.4 - Any :term:`bytes-like object`\ s are now accepted by all functions in this - module. Strings no more supported. + All functions now accept any :term:`bytes-like object`. + String input now results in an immediate error. .. index:: single: Intel/DVI ADPCM @@ -82,7 +80,7 @@ The module defines the following variables and functions: "Byteswap" all samples in a fragment and returns the modified fragment. Converts big-endian samples to little-endian and vice versa. - .. versionadded: 3.4 + .. versionadded:: 3.4 .. function:: cross(fragment, width) diff --git a/Doc/library/base64.rst b/Doc/library/base64.rst index 3d1f087..02b4d7b 100644 --- a/Doc/library/base64.rst +++ b/Doc/library/base64.rst @@ -1,27 +1,33 @@ -:mod:`base64` --- RFC 3548: Base16, Base32, Base64 Data Encodings -================================================================= +:mod:`base64` --- Base16, Base32, Base64, Base85 Data Encodings +=============================================================== .. module:: base64 - :synopsis: RFC 3548: Base16, Base32, Base64 Data Encodings + :synopsis: RFC 3548: Base16, Base32, Base64 Data Encodings; + Base85 and Ascii85 .. index:: pair: base64; encoding single: MIME; base64 encoding -This module provides data encoding and decoding as specified in :rfc:`3548`. -This standard defines the Base16, Base32, and Base64 algorithms for encoding -and decoding arbitrary binary strings into ASCII-only byte strings that can be +This module provides functions for encoding binary data to printable +ASCII characters and decoding such encodings back to binary data. +It provides encoding and decoding functions for the encodings specified in +in :rfc:`3548`, which defines the Base16, Base32, and Base64 algorithms, +and for the de-facto standard Ascii85 and Base85 encodings. + +The :rfc:`3548` encodings are suitable for encoding binary data so that it can safely sent by email, used as parts of URLs, or included as part of an HTTP POST request. The encoding algorithm is not the same as the :program:`uuencode` program. -There are two interfaces provided by this module. The modern interface -supports encoding and decoding ASCII byte string objects using all three -alphabets. Additionally, the decoding functions of the modern interface also -accept Unicode strings containing only ASCII characters. The legacy interface -provides for encoding and decoding to and from file-like objects as well as -byte strings, but only using the Base64 standard alphabet. +There are two :rfc:`3548` interfaces provided by this module. The modern +interface supports encoding and decoding ASCII byte string objects using all +three :rfc:`3548` defined alphabets (normal, URL-safe, and filesystem-safe). +Additionally, the decoding functions of the modern interface also accept +Unicode strings containing only ASCII characters. The legacy interface provides +for encoding and decoding to and from file-like objects as well as byte +strings, but only using the Base64 standard alphabet. .. versionchanged:: 3.3 ASCII-only Unicode strings are now accepted by the decoding functions of @@ -29,7 +35,7 @@ byte strings, but only using the Base64 standard alphabet. .. versionchanged:: 3.4 Any :term:`bytes-like object`\ s are now accepted by all - encoding and decoding functions in this module. + encoding and decoding functions in this module. Ascii85/Base85 support added. The modern interface provides: diff --git a/Doc/library/csv.rst b/Doc/library/csv.rst index a20c4be..ccc9dc6 100644 --- a/Doc/library/csv.rst +++ b/Doc/library/csv.rst @@ -142,36 +142,43 @@ The :mod:`csv` module defines the following functions: The :mod:`csv` module defines the following classes: -.. class:: DictReader(csvfile, fieldnames=None, restkey=None, restval=None, dialect='excel', *args, **kwds) - - Create an object which operates like a regular reader but maps the information - read into a dict whose keys are given by the optional *fieldnames* parameter. - If the *fieldnames* parameter is omitted, the values in the first row of the - *csvfile* will be used as the fieldnames. If the row read has more fields - than the fieldnames sequence, the remaining data is added as a sequence - keyed by the value of *restkey*. If the row read has fewer fields than the - fieldnames sequence, the remaining keys take the value of the optional - *restval* parameter. Any other optional or keyword arguments are passed to - the underlying :class:`reader` instance. - - -.. class:: DictWriter(csvfile, fieldnames, restval='', extrasaction='raise', dialect='excel', *args, **kwds) - - Create an object which operates like a regular writer but maps dictionaries onto - output rows. The *fieldnames* parameter identifies the order in which values in - the dictionary passed to the :meth:`writerow` method are written to the - *csvfile*. The optional *restval* parameter specifies the value to be written - if the dictionary is missing a key in *fieldnames*. If the dictionary passed to - the :meth:`writerow` method contains a key not found in *fieldnames*, the - optional *extrasaction* parameter indicates what action to take. If it is set - to ``'raise'`` a :exc:`ValueError` is raised. If it is set to ``'ignore'``, - extra values in the dictionary are ignored. Any other optional or keyword - arguments are passed to the underlying :class:`writer` instance. - - Note that unlike the :class:`DictReader` class, the *fieldnames* parameter of - the :class:`DictWriter` is not optional. Since Python's :class:`dict` objects - are not ordered, there is not enough information available to deduce the order - in which the row should be written to the *csvfile*. +.. class:: DictReader(csvfile, fieldnames=None, restkey=None, restval=None, \ + dialect='excel', *args, **kwds) + + Create an object which operates like a regular reader but maps the + information read into a dict whose keys are given by the optional + *fieldnames* parameter. The *fieldnames* parameter is a :mod:`sequence + ` whose elements are associated with the fields of the + input data in order. These elements become the keys of the resulting + dictionary. If the *fieldnames* parameter is omitted, the values in the + first row of the *csvfile* will be used as the fieldnames. If the row read + has more fields than the fieldnames sequence, the remaining data is added as + a sequence keyed by the value of *restkey*. If the row read has fewer + fields than the fieldnames sequence, the remaining keys take the value of + the optional *restval* parameter. Any other optional or keyword arguments + are passed to the underlying :class:`reader` instance. + + +.. class:: DictWriter(csvfile, fieldnames, restval='', extrasaction='raise', \ + dialect='excel', *args, **kwds) + + Create an object which operates like a regular writer but maps dictionaries + onto output rows. The *fieldnames* parameter is a :mod:`sequence + ` of keys that identify the order in which values in the + dictionary passed to the :meth:`writerow` method are written to the + *csvfile*. The optional *restval* parameter specifies the value to be + written if the dictionary is missing a key in *fieldnames*. If the + dictionary passed to the :meth:`writerow` method contains a key not found in + *fieldnames*, the optional *extrasaction* parameter indicates what action to + take. If it is set to ``'raise'`` a :exc:`ValueError` is raised. If it is + set to ``'ignore'``, extra values in the dictionary are ignored. Any other + optional or keyword arguments are passed to the underlying :class:`writer` + instance. + + Note that unlike the :class:`DictReader` class, the *fieldnames* parameter + of the :class:`DictWriter` is not optional. Since Python's :class:`dict` + objects are not ordered, there is not enough information available to deduce + the order in which the row should be written to the *csvfile*. .. class:: Dialect diff --git a/Doc/library/curses.rst b/Doc/library/curses.rst index 314636e..f3e60b4 100644 --- a/Doc/library/curses.rst +++ b/Doc/library/curses.rst @@ -12,7 +12,7 @@ The :mod:`curses` module provides an interface to the curses library, the de-facto standard for portable advanced terminal handling. While curses is most widely used in the Unix environment, versions are available -for DOS, OS/2, and possibly other systems as well. This extension module is +for Windows, DOS, and possibly other systems as well. This extension module is designed to match the API of ncurses, an open-source curses library hosted on Linux and the BSD variants of Unix. diff --git a/Doc/library/distutils.rst b/Doc/library/distutils.rst index 6666a9b..8e3732b 100644 --- a/Doc/library/distutils.rst +++ b/Doc/library/distutils.rst @@ -12,14 +12,15 @@ additional modules into a Python installation. The new modules may be either 100%-pure Python, or may be extension modules written in C, or may be collections of Python packages which include modules coded in both Python and C. +Most Python users will *not* want to use this module directly, but instead +use the cross-version tools maintained by the Python Packaging Authority. +Refer to the `Python Packaging User Guide `_ +for more information. -User documentation and API reference are provided in another document: +For the benefits of packaging tool authors and users seeking a deeper +understanding of the details of the current packaging and distribution +system, the legacy :mod:`distutils` based user documentation and API +reference remain available: -.. seealso:: - - :ref:`distutils-index` - The manual for developers and packagers of Python modules. This describes - how to prepare :mod:`distutils`\ -based packages so that they may be - easily installed into an existing Python installation. It also contains - instructions for end-users wanting to install a distutils-based package, - :ref:`install-index`. +* :ref:`install-index` +* :ref:`distutils-index` diff --git a/Doc/library/email-examples.rst b/Doc/library/email-examples.rst index 294e131..cbbcb78 100644 --- a/Doc/library/email-examples.rst +++ b/Doc/library/email-examples.rst @@ -40,8 +40,10 @@ text version: [2]_ .. literalinclude:: ../includes/email-alternative.py -Examples using the Provision API -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +.. _email-contentmanager-api-examples: + +Examples using the Provisional API +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Here is a reworking of the last example using the provisional API. To make things a bit more interesting, we include a related image in the html part, and we save a copy of what we are going to send to disk, as well as sending it. diff --git a/Doc/library/email.contentmanager.rst b/Doc/library/email.contentmanager.rst index 5162da1..8f0bfdb 100644 --- a/Doc/library/email.contentmanager.rst +++ b/Doc/library/email.contentmanager.rst @@ -54,6 +54,7 @@ this module. documented in this module because of the provisional nature of the code, the implementation lives in the :mod:`email.message` module. +.. currentmodule:: email.message .. class:: EmailMessage(policy=default) @@ -235,6 +236,16 @@ this module. all other headers intact and in their original order. +.. class:: MIMEPart(policy=default) + + This class represents a subpart of a MIME message. It is identical to + :class:`EmailMessage`, except that no :mailheader:`MIME-Version` headers are + added when :meth:`~EmailMessage.set_content` is called, since sub-parts do + not need their own :mailheader:`MIME-Version` headers. + + +.. currentmodule:: email.contentmanager + .. class:: ContentManager() Base class for content managers. Provides the standard registry mechanisms @@ -305,14 +316,6 @@ this module. values of *typekey*, see :meth:`set_content`. -.. class:: MIMEPart(policy=default) - - This class represents a subpart of a MIME message. It is identical to - :class:`EmailMessage`, except that no :mailheader:`MIME-Version` headers are - added when :meth:`~EmailMessage.set_content` is called, since sub-parts do - not need their own :mailheader:`MIME-Version` headers. - - Content Manager Instances ~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/Doc/library/email.message.rst b/Doc/library/email.message.rst index 84a5f51..58f708c 100644 --- a/Doc/library/email.message.rst +++ b/Doc/library/email.message.rst @@ -466,7 +466,7 @@ Here are the methods of the :class:`Message` class: to ``False``. - .. method:: set_param(param, value, header='Content-Type', requote=True, + .. method:: set_param(param, value, header='Content-Type', requote=True, \ charset=None, language='', replace=False) Set a parameter in the :mailheader:`Content-Type` header. If the @@ -488,7 +488,7 @@ Here are the methods of the :class:`Message` class: end of the list of headers. If *replace* is ``True``, the header will be updated in place. - .. versionchanged: 3.4 ``replace`` keyword was added. + .. versionchanged:: 3.4 ``replace`` keyword was added. .. method:: del_param(param, header='content-type', requote=True) diff --git a/Doc/library/email.policy.rst b/Doc/library/email.policy.rst index c2f9e6a..97358f0 100644 --- a/Doc/library/email.policy.rst +++ b/Doc/library/email.policy.rst @@ -419,7 +419,7 @@ added matters. To illustrate:: additional arguments. By default ``content_manager`` is set to :data:`~email.contentmanager.raw_data_manager`. - .. versionadded 3.4 + .. versionadded:: 3.4 The class provides the following concrete implementations of the abstract diff --git a/Doc/library/ensurepip.rst b/Doc/library/ensurepip.rst index 0d82135..8012f51 100644 --- a/Doc/library/ensurepip.rst +++ b/Doc/library/ensurepip.rst @@ -28,7 +28,7 @@ when creating a virtual environment) or after explicitly uninstalling .. seealso:: - :ref:`install-index` + :ref:`installing-index` The end user guide for installing Python packages :pep:`453`: Explicit bootstrapping of pip in Python installations diff --git a/Doc/library/enum.rst b/Doc/library/enum.rst index 628f91c..fc7267b 100644 --- a/Doc/library/enum.rst +++ b/Doc/library/enum.rst @@ -139,7 +139,7 @@ If you want to access enum members by *name*, use item access:: >>> Color['green'] -If have an enum member and need its :attr:`name` or :attr:`value`:: +If you have an enum member and need its :attr:`name` or :attr:`value`:: >>> member = Color.red >>> member.name diff --git a/Doc/library/exceptions.rst b/Doc/library/exceptions.rst index c2017cc..0aac1bf 100644 --- a/Doc/library/exceptions.rst +++ b/Doc/library/exceptions.rst @@ -265,7 +265,6 @@ The following exceptions are the exceptions that are usually raised. :exc:`mmap.error` have been merged into :exc:`OSError`. .. versionchanged:: 3.4 - The :attr:`filename` attribute is now the original file name passed to the function, instead of the name encoded to or decoded from the filesystem encoding. Also, the :attr:`filename2` attribute was added. diff --git a/Doc/library/fractions.rst b/Doc/library/fractions.rst index fba199b..c2c7401 100644 --- a/Doc/library/fractions.rst +++ b/Doc/library/fractions.rst @@ -99,7 +99,9 @@ another rational number, or from a string. value of *flt*, which must be a :class:`float`. Beware that ``Fraction.from_float(0.3)`` is not the same value as ``Fraction(3, 10)`` - .. note:: From Python 3.2 onwards, you can also construct a + .. note:: + + From Python 3.2 onwards, you can also construct a :class:`Fraction` instance directly from a :class:`float`. @@ -108,7 +110,9 @@ another rational number, or from a string. This class method constructs a :class:`Fraction` representing the exact value of *dec*, which must be a :class:`decimal.Decimal` instance. - .. note:: From Python 3.2 onwards, you can also construct a + .. note:: + + From Python 3.2 onwards, you can also construct a :class:`Fraction` instance directly from a :class:`decimal.Decimal` instance. diff --git a/Doc/library/functions.rst b/Doc/library/functions.rst index 4371969..2b37069 100644 --- a/Doc/library/functions.rst +++ b/Doc/library/functions.rst @@ -540,12 +540,13 @@ are always available. They are listed here in alphabetical order. A call to ``format(value, format_spec)`` is translated to ``type(value).__format__(format_spec)`` which bypasses the instance dictionary when searching for the value's :meth:`__format__` method. A - :exc:`TypeError` exception is raised if the method is not found or if either - the *format_spec* or the return value are not strings. + :exc:`TypeError` exception is raised if the method search reaches + :mod:`object` and the *format_spec* is non-empty, or if either the + *format_spec* or the return value are not strings. - .. versionadded:: 3.4 + .. versionchanged:: 3.4 ``object().__format__(format_spec)`` raises :exc:`TypeError` - if *format_spec* is not empty string. + if *format_spec* is not an empty string. .. _func-frozenset: @@ -609,12 +610,26 @@ are always available. They are listed here in alphabetical order. This function is added to the built-in namespace by the :mod:`site` module. + .. versionchanged:: 3.4 + Changes to :mod:`pydoc` and :mod:`inspect` mean that the reported + signatures for callables are now more comprehensive and consistent. + .. function:: hex(x) - Convert an integer number to a hexadecimal string. The result is a valid Python - expression. If *x* is not a Python :class:`int` object, it has to define an - :meth:`__index__` method that returns an integer. + Convert an integer number to a lowercase hexadecimal string + prefixed with "0x", for example: + + >>> hex(255) + '0xff' + >>> hex(-42) + '-0x2a' + + If x is not a Python :class:`int` object, it has to define an __index__() + method that returns an integer. + + See also :func:`int` for converting a hexadecimal string to an + integer using a base of 16. .. note:: @@ -780,6 +795,9 @@ are always available. They are listed here in alphabetical order. such as ``sorted(iterable, key=keyfunc, reverse=True)[0]`` and ``heapq.nlargest(1, iterable, key=keyfunc)``. + .. versionadded:: 3.4 + The *default* keyword-only argument. + .. _func-memoryview: .. function:: memoryview(obj) @@ -811,6 +829,9 @@ are always available. They are listed here in alphabetical order. such as ``sorted(iterable, key=keyfunc)[0]`` and ``heapq.nsmallest(1, iterable, key=keyfunc)``. + .. versionadded:: 3.4 + The *default* keyword-only argument. + .. function:: next(iterator[, default]) Retrieve the next item from the *iterator* by calling its diff --git a/Doc/library/getopt.rst b/Doc/library/getopt.rst index b6ab3df..f9a1e53 100644 --- a/Doc/library/getopt.rst +++ b/Doc/library/getopt.rst @@ -10,6 +10,7 @@ -------------- .. note:: + The :mod:`getopt` module is a parser for command line options whose API is designed to be familiar to users of the C :c:func:`getopt` function. Users who are unfamiliar with the C :c:func:`getopt` function or who would like to write diff --git a/Doc/library/hashlib.rst b/Doc/library/hashlib.rst index 8f4cd70..d27902e 100644 --- a/Doc/library/hashlib.rst +++ b/Doc/library/hashlib.rst @@ -101,18 +101,18 @@ Hashlib provides the following constant attributes: .. data:: algorithms_guaranteed - Contains the names of the hash algorithms guaranteed to be supported + A set containing the names of the hash algorithms guaranteed to be supported by this module on all platforms. .. versionadded:: 3.2 .. data:: algorithms_available - Contains the names of the hash algorithms that are available - in the running Python interpreter. These names will be recognized - when passed to :func:`new`. :attr:`algorithms_guaranteed` - will always be a subset. Duplicate algorithms with different - name formats may appear in this set (thanks to OpenSSL). + A set containing the names of the hash algorithms that are available in the + running Python interpreter. These names will be recognized when passed to + :func:`new`. :attr:`algorithms_guaranteed` will always be a subset. The + same algorithm may appear multiple times in this set under different names + (thanks to OpenSSL). .. versionadded:: 3.2 diff --git a/Doc/library/hmac.rst b/Doc/library/hmac.rst index 2e9b0b2..4858235 100644 --- a/Doc/library/hmac.rst +++ b/Doc/library/hmac.rst @@ -23,9 +23,8 @@ This module implements the HMAC algorithm as described by :rfc:`2104`. defaults to the :data:`hashlib.md5` constructor. .. versionchanged:: 3.4 - Parameter *key* can be a bytes or bytearray object. Parameter *msg* can - be of any type supported by :mod:`hashlib`. - + Parameter *key* can be a bytes or bytearray object. + Parameter *msg* can be of any type supported by :mod:`hashlib`. Paramter *digestmod* can be the name of a hash algorithm. .. deprecated:: 3.4 diff --git a/Doc/library/http.server.rst b/Doc/library/http.server.rst index 0f7c8b3..dd19fb4 100644 --- a/Doc/library/http.server.rst +++ b/Doc/library/http.server.rst @@ -351,7 +351,7 @@ of which this module provides three different variants: The :class:`SimpleHTTPRequestHandler` class can be used in the following manner in order to create a very basic webserver serving files relative to -the current directory. :: +the current directory:: import http.server import socketserver @@ -365,15 +365,17 @@ the current directory. :: print("serving at port", PORT) httpd.serve_forever() +.. _http-server-cli: + :mod:`http.server` can also be invoked directly using the :option:`-m` switch of the interpreter with a ``port number`` argument. Similar to -the previous example, this serves files relative to the current directory. :: +the previous example, this serves files relative to the current directory:: python -m http.server 8000 -By default, server binds itself to all interfaces. To restrict it to bind to a -particular interface only, ``--bind ADDRESS`` argument can be used. For e.g, to -restrict the server to bind only to localhost. :: +By default, server binds itself to all interfaces. The option ``-b/--bind`` +specifies a specific address to which it should bind. For example, the +following command causes the server to bind to localhost only:: python -m http.server 8000 --bind 127.0.0.1 @@ -422,7 +424,7 @@ restrict the server to bind only to localhost. :: reasons. Problems with the CGI script will be translated to error 403. :class:`CGIHTTPRequestHandler` can be enabled in the command line by passing -the ``--cgi`` option.:: +the ``--cgi`` option:: python -m http.server --cgi 8000 diff --git a/Doc/library/importlib.rst b/Doc/library/importlib.rst index a81ccfd..3b55b06 100644 --- a/Doc/library/importlib.rst +++ b/Doc/library/importlib.rst @@ -134,7 +134,7 @@ Functions When :func:`reload` is executed: - * Python modules' code is recompiled and the module-level code re-executed, + * Python module's code is recompiled and the module-level code re-executed, defining a new set of objects which are bound to names in the module's dictionary by reusing the :term:`loader` which originally loaded the module. The ``init`` function of extension modules is not called a second diff --git a/Doc/library/inspect.rst b/Doc/library/inspect.rst index ccb2bd7..0c08712 100644 --- a/Doc/library/inspect.rst +++ b/Doc/library/inspect.rst @@ -729,6 +729,11 @@ Classes and functions Consider using the new :ref:`Signature Object ` interface, which provides a better way of introspecting functions. + .. versionchanged:: 3.4 + This function is now based on :func:`signature`, but still ignores + ``__wrapped__`` attributes and includes the already bound first + parameter in the signature output for bound methods. + .. function:: getargvalues(frame) diff --git a/Doc/library/io.rst b/Doc/library/io.rst index c80198b..79f65e0 100644 --- a/Doc/library/io.rst +++ b/Doc/library/io.rst @@ -686,6 +686,7 @@ than raw I/O does. :exc:`UnsupportedOperation`. .. warning:: + :class:`BufferedRWPair` does not attempt to synchronize accesses to its underlying raw streams. You should not pass it the same object as reader and writer; use :class:`BufferedRandom` instead. diff --git a/Doc/library/ipaddress.rst b/Doc/library/ipaddress.rst index 9ccf262..9625e71 100644 --- a/Doc/library/ipaddress.rst +++ b/Doc/library/ipaddress.rst @@ -154,20 +154,20 @@ write code that handles both IP versions correctly. .. attribute:: is_private ``True`` if the address is allocated for private networks. See - iana-ipv4-special-registry (for IPv4) or iana-ipv6-special-registry + iana-ipv4-special-registry_ (for IPv4) or iana-ipv6-special-registry_ (for IPv6). .. attribute:: is_global ``True`` if the address is allocated for public networks. See - iana-ipv4-special-registry (for IPv4) or iana-ipv6-special-registry + iana-ipv4-special-registry_ (for IPv4) or iana-ipv6-special-registry_ (for IPv6). - .. versionadded:: 3.4 + .. versionadded:: 3.4 .. attribute:: is_unspecified - ``True`` if the address is unspecified. See :RFC:`5375` (for IPv4) + ``True`` if the address is unspecified. See :RFC:`5735` (for IPv4) or :RFC:`2373` (for IPv6). .. attribute:: is_reserved @@ -184,6 +184,9 @@ write code that handles both IP versions correctly. ``True`` if the address is reserved for link-local usage. See :RFC:`3927`. +.. _iana-ipv4-special-registry: http://www.iana.org/assignments/iana-ipv4-special-registry/iana-ipv4-special-registry.xhtml +.. _iana-ipv6-special-registry: http://www.iana.org/assignments/iana-ipv6-special-registry/iana-ipv6-special-registry.xhtml + .. class:: IPv6Address(address) @@ -218,18 +221,23 @@ write code that handles both IP versions correctly. The long form of the address representation, with all leading zeroes and groups consisting entirely of zeroes included. + + For the following attributes, see the corresponding documention of the + :class:`IPv4Address` class: + .. attribute:: packed .. attribute:: version .. attribute:: max_prefixlen .. attribute:: is_multicast .. attribute:: is_private + .. attribute:: is_global .. attribute:: is_unspecified .. attribute:: is_reserved .. attribute:: is_loopback .. attribute:: is_link_local - Refer to the corresponding attribute documentation in - :class:`IPv4Address` + .. versionadded:: 3.4 + is_global .. attribute:: is_site_local diff --git a/Doc/library/logging.config.rst b/Doc/library/logging.config.rst index 5fb1b7a..171b0a3 100644 --- a/Doc/library/logging.config.rst +++ b/Doc/library/logging.config.rst @@ -148,7 +148,9 @@ in :mod:`logging` itself) and defining handlers which are declared either in send it to the socket as a string of bytes preceded by a four-byte length string packed in binary using ``struct.pack('>L', n)``. - .. note:: Because portions of the configuration are passed through + .. note:: + + Because portions of the configuration are passed through :func:`eval`, use of this function may open its users to a security risk. While the function only binds to a socket on ``localhost``, and so does not accept connections from remote machines, there are scenarios where @@ -752,7 +754,9 @@ The ``class`` entry is optional. It indicates the name of the formatter's class :class:`~logging.Formatter` can present exception tracebacks in an expanded or condensed format. -.. note:: Due to the use of :func:`eval` as described above, there are +.. note:: + + Due to the use of :func:`eval` as described above, there are potential security risks which result from using the :func:`listen` to send and receive configurations via sockets. The risks are limited to where multiple users with no mutual trust run code on the same machine; see the diff --git a/Doc/library/math.rst b/Doc/library/math.rst index 7c3ab59..3c41672 100644 --- a/Doc/library/math.rst +++ b/Doc/library/math.rst @@ -36,9 +36,9 @@ Number-theoretic and representation functions .. function:: copysign(x, y) - Return *x* with the sign of *y*. On a platform that supports - signed zeros, ``copysign(1.0, -0.0)`` returns *-1.0*. - + Return a float with the magnitude (absolute value) of *x* but the sign of + *y*. On platforms that support signed zeros, ``copysign(1.0, -0.0)`` + returns *-1.0*. .. function:: fabs(x) diff --git a/Doc/library/multiprocessing.rst b/Doc/library/multiprocessing.rst index a8872e7..b303e16 100644 --- a/Doc/library/multiprocessing.rst +++ b/Doc/library/multiprocessing.rst @@ -136,9 +136,11 @@ to start a process. These *start methods* are Available on Unix platforms which support passing file descriptors over Unix pipes. -Before Python 3.4 *fork* was the only option available on Unix. Also, -prior to Python 3.4, child processes would inherit all the parents -inheritable handles on Windows. +.. versionchanged:: 3.4 + *spawn* added on all unix platforms, and *forkserver* added for + some unix platforms. + Child processes no longer inherit all of the parents inheritable + handles on Windows. On Unix using the *spawn* or *forkserver* start methods will also start a *semaphore tracker* process which tracks the unlinked named @@ -1474,7 +1476,7 @@ their parent process exits. The manager classes are defined in the *exposed* is used to specify a sequence of method names which proxies for this typeid should be allowed to access using - :meth:`BaseProxy._callMethod`. (If *exposed* is ``None`` then + :meth:`BaseProxy._callmethod`. (If *exposed* is ``None`` then :attr:`proxytype._exposed_` is used instead if it exists.) In the case where no exposed list is specified, all "public methods" of the shared object will be accessible. (Here a "public method" means any attribute @@ -1853,25 +1855,30 @@ with the :class:`Pool` class. callbacks and has a parallel map implementation. *processes* is the number of worker processes to use. If *processes* is - ``None`` then the number returned by :func:`os.cpu_count` is used. If - *initializer* is not ``None`` then each worker process will call + ``None`` then the number returned by :func:`os.cpu_count` is used. + + If *initializer* is not ``None`` then each worker process will call ``initializer(*initargs)`` when it starts. + *maxtasksperchild* is the number of tasks a worker process can complete + before it will exit and be replaced with a fresh worker process, to enable + unused resources to be freed. The default *maxtasksperchild* is None, which + means worker processes will live as long as the pool. + + *context* can be used to specify the context used for starting + the worker processes. Usually a pool is created using the + function :func:`multiprocessing.Pool` or the :meth:`Pool` method + of a context object. In both cases *context* is set + appropriately. + Note that the methods of the pool object should only be called by the process which created the pool. .. versionadded:: 3.2 - *maxtasksperchild* is the number of tasks a worker process can complete - before it will exit and be replaced with a fresh worker process, to enable - unused resources to be freed. The default *maxtasksperchild* is None, which - means worker processes will live as long as the pool. + *maxtasksperchild* .. versionadded:: 3.4 - *context* can be used to specify the context used for starting - the worker processes. Usually a pool is created using the - function :func:`multiprocessing.Pool` or the :meth:`Pool` method - of a context object. In both cases *context* is set - appropriately. + *context* .. note:: diff --git a/Doc/library/operator.rst b/Doc/library/operator.rst index fa3bcdd..3bcbaa4 100644 --- a/Doc/library/operator.rst +++ b/Doc/library/operator.rst @@ -240,9 +240,9 @@ their character equivalents. .. function:: length_hint(obj, default=0) - Return an estimated length for the object *o*. First trying to return its + Return an estimated length for the object *o*. First try to return its actual length, then an estimate using :meth:`object.__length_hint__`, and - finally returning the default value. + finally return the default value. .. versionadded:: 3.4 diff --git a/Doc/library/os.path.rst b/Doc/library/os.path.rst index 269856c..3771ab0 100644 --- a/Doc/library/os.path.rst +++ b/Doc/library/os.path.rst @@ -193,11 +193,17 @@ the :mod:`glob` module.) .. function:: ismount(path) - Return ``True`` if pathname *path* is a :dfn:`mount point`: a point in a file - system where a different file system has been mounted. The function checks - whether *path*'s parent, :file:`path/..`, is on a different device than *path*, - or whether :file:`path/..` and *path* point to the same i-node on the same - device --- this should detect mount points for all Unix and POSIX variants. + Return ``True`` if pathname *path* is a :dfn:`mount point`: a point in a + file system where a different file system has been mounted. On POSIX, the + function checks whether *path*'s parent, :file:`path/..`, is on a different + device than *path*, or whether :file:`path/..` and *path* point to the same + i-node on the same device --- this should detect mount points for all Unix + and POSIX variants. On Windows, a drive letter root and a share UNC are + always mount points, and for any other path ``GetVolumePathName`` is called + to see if it is different from the input path. + + .. versionadded:: 3.4 + Support for detecting non-root mount points on Windows. .. function:: join(path1[, path2[, ...]]) @@ -251,7 +257,7 @@ the :mod:`glob` module.) .. function:: samefile(path1, path2) Return ``True`` if both pathname arguments refer to the same file or directory. - On Unix, this is determined by the device number and i-node number and raises an + This is determined by the device number and i-node number and raises an exception if a :func:`os.stat` call on either pathname fails. Availability: Unix, Windows. diff --git a/Doc/library/os.rst b/Doc/library/os.rst index 1fc8de9..f50e5d9 100644 --- a/Doc/library/os.rst +++ b/Doc/library/os.rst @@ -260,7 +260,9 @@ process and user. Availability: Unix. - .. note:: On Mac OS X, :func:`getgroups` behavior differs somewhat from + .. note:: + + On Mac OS X, :func:`getgroups` behavior differs somewhat from other Unix platforms. If the Python interpreter was built with a deployment target of :const:`10.5` or earlier, :func:`getgroups` returns the list of effective group ids associated with the current user process; @@ -940,8 +942,9 @@ or `the MSDN `_ on Window the C library. .. versionchanged:: 3.4 - Add :data:`O_TMPFILE` constant. It's only available on Linux Kernel 3.11 - or newer. + Add :data:`O_PATH` on systems that support it. + Add :data:`O_TMPFILE`, only available on Linux Kernel 3.11 + or newer. .. function:: openpty() @@ -2653,7 +2656,7 @@ written in Python, such as a mail server's external command delivery program. Fork a child process. Return ``0`` in the child and the child's process id in the parent. If an error occurs :exc:`OSError` is raised. - Note that some platforms including FreeBSD <= 6.3, Cygwin and OS/2 EMX have + Note that some platforms including FreeBSD <= 6.3 and Cygwin have known issues when using fork() from a thread. .. warning:: @@ -2899,7 +2902,6 @@ written in Python, such as a mail server's external command delivery program. :manpage:`times(2)` or the corresponding Windows Platform API documentation. On Windows, only :attr:`user` and :attr:`system` are known; the other attributes are zero. - On OS/2, only :attr:`elapsed` is known; the other attributes are zero. Availability: Unix, Windows. diff --git a/Doc/library/pathlib.rst b/Doc/library/pathlib.rst index 3aa9d4b..ec1dc4f 100644 --- a/Doc/library/pathlib.rst +++ b/Doc/library/pathlib.rst @@ -522,6 +522,36 @@ Pure paths provide the following methods and properties: ValueError: '/etc/passwd' does not start with '/usr' +.. method:: PurePath.with_name(name) + + Return a new path with the :attr:`name` changed. If the original path + doesn't have a name, ValueError is raised:: + + >>> p = PureWindowsPath('c:/Downloads/pathlib.tar.gz') + >>> p.with_name('setup.py') + PureWindowsPath('c:/Downloads/setup.py') + >>> p = PureWindowsPath('c:/') + >>> p.with_name('setup.py') + Traceback (most recent call last): + File "", line 1, in + File "/home/antoine/cpython/default/Lib/pathlib.py", line 751, in with_name + raise ValueError("%r has an empty name" % (self,)) + ValueError: PureWindowsPath('c:/') has an empty name + + +.. method:: PurePath.with_suffix(suffix) + + Return a new path with the :attr:`suffix` changed. If the original path + doesn't have a suffix, the new *suffix* is appended instead:: + + >>> p = PureWindowsPath('c:/Downloads/pathlib.tar.gz') + >>> p.with_suffix('.bz2') + PureWindowsPath('c:/Downloads/pathlib.tar.bz2') + >>> p = PureWindowsPath('README') + >>> p.with_suffix('.txt') + PureWindowsPath('README.txt') + + .. _concrete-paths: diff --git a/Doc/library/pkgutil.rst b/Doc/library/pkgutil.rst index 10b7848..13ea7b9 100644 --- a/Doc/library/pkgutil.rst +++ b/Doc/library/pkgutil.rst @@ -147,6 +147,7 @@ support. *prefix* is a string to output on the front of every module name on output. .. note:: + Only works for a :term:`finder` which defines an ``iter_modules()`` method. This interface is non-standard, so the module also provides implementations for :class:`importlib.machinery.FileFinder` and @@ -185,6 +186,7 @@ support. walk_packages(ctypes.__path__, ctypes.__name__ + '.') .. note:: + Only works for a :term:`finder` which defines an ``iter_modules()`` method. This interface is non-standard, so the module also provides implementations for :class:`importlib.machinery.FileFinder` and diff --git a/Doc/library/plistlib.rst b/Doc/library/plistlib.rst index 7301d49..6a2d6b4 100644 --- a/Doc/library/plistlib.rst +++ b/Doc/library/plistlib.rst @@ -32,6 +32,9 @@ Values can be strings, integers, floats, booleans, tuples, lists, dictionaries (but only with string keys), :class:`Data`, :class:`bytes`, :class:`bytesarray` or :class:`datetime.datetime` objects. +.. versionchanged:: 3.4 + New API, old API deprecated. Support for binary format plists added. + .. seealso:: `PList manual page `_ @@ -107,7 +110,7 @@ This module defines the following functions: An :exc:`OverflowError` will be raised for integer values that cannot be represented in (binary) plist files. - .. versionadded: 3.4 + .. versionadded:: 3.4 .. function:: dumps(value, \*, fmt=FMT_XML, sort_keys=True, skipkeys=False) @@ -116,7 +119,7 @@ This module defines the following functions: the documentation for :func:`dump` for an explanation of the keyword arguments of this function. - .. versionadded: 3.4 + .. versionadded:: 3.4 The following functions are deprecated: @@ -135,7 +138,7 @@ The following functions are deprecated: to ``__getitem_``. This means that you can use attribute access to access items of these dictionaries. - .. deprecated: 3.4 Use :func:`load` instead. + .. deprecated:: 3.4 Use :func:`load` instead. .. function:: writePlist(rootObject, pathOrFile) @@ -143,7 +146,7 @@ The following functions are deprecated: Write *rootObject* to an XML plist file. *pathOrFile* may be either a file name or a (writable and binary) file object - .. deprecated: 3.4 Use :func:`dump` instead. + .. deprecated:: 3.4 Use :func:`dump` instead. .. function:: readPlistFromBytes(data) @@ -194,7 +197,7 @@ The following classes are available: .. deprecated:: 3.4 Use a :class:`bytes` object instead -The following constants are avaiable: +The following constants are available: .. data:: FMT_XML diff --git a/Doc/library/pydoc.rst b/Doc/library/pydoc.rst index e100865..3f520e8 100644 --- a/Doc/library/pydoc.rst +++ b/Doc/library/pydoc.rst @@ -84,3 +84,8 @@ Reference Manual pages. .. versionchanged:: 3.2 Added the ``-b`` option, deprecated the ``-g`` option. + +.. versionchanged:: 3.4 + :mod:`pydoc` now uses :func:`inspect.signature` rather than + :func:`inspect.getfullargspec` to extract signature information from + callables. diff --git a/Doc/library/site.rst b/Doc/library/site.rst index d93e938..2fdf303 100644 --- a/Doc/library/site.rst +++ b/Doc/library/site.rst @@ -123,9 +123,13 @@ On systems that support :mod:`readline`, this module will also import and configure the :mod:`rlcompleter` module, if Python is started in :ref:`interactive mode ` and without the :option:`-S` option. The default behavior is enable tab-completion and to use -:file:`~/.python_history` as the history save file. To disable it, override -the :data:`sys.__interactivehook__` attribute in your :mod:`sitecustomize` -or :mod:`usercustomize` module or your :envvar:`PYTHONSTARTUP` file. +:file:`~/.python_history` as the history save file. To disable it, delete (or +override) the :data:`sys.__interactivehook__` attribute in your +:mod:`sitecustomize` or :mod:`usercustomize` module or your +:envvar:`PYTHONSTARTUP` file. + +.. versionchanged:: 3.4 + Activation of rlcompleter and history was made automatic. Module contents diff --git a/Doc/library/smtplib.rst b/Doc/library/smtplib.rst index eba8ae9..ec8dc9d 100644 --- a/Doc/library/smtplib.rst +++ b/Doc/library/smtplib.rst @@ -117,6 +117,9 @@ A nice selection of exceptions is defined as well: Subclass of :exc:`OSError` that is the base exception class for all the other exceptions provided by this module. + .. versionchanged:: 3.4 + SMTPException became subclass of :exc:`OSError` + .. exception:: SMTPServerDisconnected diff --git a/Doc/library/socket.rst b/Doc/library/socket.rst index c2e9f00..54c6bad 100644 --- a/Doc/library/socket.rst +++ b/Doc/library/socket.rst @@ -6,8 +6,7 @@ This module provides access to the BSD *socket* interface. It is available on -all modern Unix systems, Windows, MacOS, OS/2, and probably additional -platforms. +all modern Unix systems, Windows, MacOS, and probably additional platforms. .. note:: @@ -468,7 +467,7 @@ The :mod:`socket` module also offers various network-related services: (10, 1, 6, '', ('2001:888:2000:d::a2', 80, 0, 0))] .. versionchanged:: 3.2 - parameters can now be passed as single keyword arguments. + parameters can now be passed using keyword arguments. .. function:: getfqdn([name]) @@ -634,6 +633,9 @@ The :mod:`socket` module also offers various network-related services: Availability: Unix (maybe not all platforms), Windows. + .. versionchanged:: 3.4 + Windows support added + .. function:: inet_ntop(address_family, packed_ip) @@ -650,6 +652,9 @@ The :mod:`socket` module also offers various network-related services: Availability: Unix (maybe not all platforms), Windows. + .. versionchanged:: 3.4 + Windows support added + .. XXX: Are sendmsg(), recvmsg() and CMSG_*() available on any @@ -794,6 +799,7 @@ to sockets. :keyword:`with` statement around them. .. note:: + :meth:`close()` releases the resource associated with a connection but does not necessarily close the connection immediately. If you want to close the connection in a timely fashion, call :meth:`shutdown()` @@ -1512,4 +1518,3 @@ the :data:`SO_REUSEADDR` flag tells the kernel to reuse a local socket in details of socket semantics. For Unix, refer to the manual pages; for Windows, see the WinSock (or Winsock 2) specification. For IPv6-ready APIs, readers may want to refer to :rfc:`3493` titled Basic Socket Interface Extensions for IPv6. - diff --git a/Doc/library/ssl.rst b/Doc/library/ssl.rst index 76bb432..0ed5fb2 100644 --- a/Doc/library/ssl.rst +++ b/Doc/library/ssl.rst @@ -818,6 +818,7 @@ SSL sockets also have the following additional methods and attributes: 'version': 3} .. note:: + To validate a certificate for a particular service, you can use the :func:`match_hostname` function. @@ -841,10 +842,8 @@ SSL sockets also have the following additional methods and attributes: .. versionchanged:: 3.4 :exc:`ValueError` is raised when the handshake isn't done. - - .. versionchanged:: 3.4 The returned dictionary includes additional X509v3 extension items - such as ``crlDistributionPoints``, ``caIssuers`` and ``OCSP`` URIs. + such as ``crlDistributionPoints``, ``caIssuers`` and ``OCSP`` URIs. .. method:: SSLSocket.cipher() diff --git a/Doc/library/stringprep.rst b/Doc/library/stringprep.rst index 47144a6..fc890cb 100644 --- a/Doc/library/stringprep.rst +++ b/Doc/library/stringprep.rst @@ -3,7 +3,6 @@ .. module:: stringprep :synopsis: String preparation, as per RFC 3453 - :deprecated: .. moduleauthor:: Martin v. Löwis .. sectionauthor:: Martin v. Löwis diff --git a/Doc/library/subprocess.rst b/Doc/library/subprocess.rst index d2893b6..cdcbe82 100644 --- a/Doc/library/subprocess.rst +++ b/Doc/library/subprocess.rst @@ -629,6 +629,12 @@ Instances of the :class:`Popen` class have the following methods: :exc:`TimeoutExpired` exception. It is safe to catch this exception and retry the wait. + .. note:: + + The function is implemented using a busy loop (non-blocking call and + short sleeps). Use the :mod:`asyncio` module for an asynchronous wait: + see :class:`asyncio.create_subprocess_exec`. + .. warning:: This will deadlock when using ``stdout=PIPE`` and/or @@ -639,6 +645,11 @@ Instances of the :class:`Popen` class have the following methods: .. versionchanged:: 3.3 *timeout* was added. + .. deprecated:: 3.4 + + Do not use the undocumented *endtime* parameter. It is was + unintentionally exposed in 3.3 but was intended to be private + for internal use. Use *timeout* instead. .. method:: Popen.communicate(input=None, timeout=None) @@ -1080,8 +1091,10 @@ handling consistency are valid for these functions. >>> subprocess.getstatusoutput('/bin/junk') (256, 'sh: /bin/junk: not found') - .. versionchanged:: 3.3 - Availability: Unix & Windows + Availability: Unix & Windows + + .. versionchanged:: 3.3.4 + Windows support added .. function:: getoutput(cmd) @@ -1094,8 +1107,10 @@ handling consistency are valid for these functions. >>> subprocess.getoutput('ls /bin/ls') '/bin/ls' - .. versionchanged:: 3.3 - Availability: Unix & Windows + Availability: Unix & Windows + + .. versionchanged:: 3.3.4 + Windows support added Notes diff --git a/Doc/library/sunau.rst b/Doc/library/sunau.rst index 15c06b5..a94ae08 100644 --- a/Doc/library/sunau.rst +++ b/Doc/library/sunau.rst @@ -251,7 +251,7 @@ AU_write objects, as returned by :func:`.open` above, have the following methods Write audio frames, without correcting *nframes*. .. versionchanged:: 3.4 - Any :term:`bytes-like object`\ s are now accepted. + Any :term:`bytes-like object` is now accepted. .. method:: AU_write.writeframes(data) @@ -259,7 +259,7 @@ AU_write objects, as returned by :func:`.open` above, have the following methods Write audio frames and make sure *nframes* is correct. .. versionchanged:: 3.4 - Any :term:`bytes-like object`\ s are now accepted. + Any :term:`bytes-like object` is now accepted. .. method:: AU_write.close() diff --git a/Doc/library/sys.rst b/Doc/library/sys.rst index 5ff7178..e3cc866 100644 --- a/Doc/library/sys.rst +++ b/Doc/library/sys.rst @@ -227,7 +227,9 @@ always available. installed in :file:`{exec_prefix}/lib/python{X.Y}/lib-dynload`, where *X.Y* is the version number of Python, for example ``3.2``. - .. note:: If a :ref:`virtual environment ` is in effect, this + .. note:: + + If a :ref:`virtual environment ` is in effect, this value will be changed in ``site.py`` to point to the virtual environment. The value for the Python installation will still be available, via :data:`base_exec_prefix`. @@ -692,10 +694,11 @@ always available. .. data:: __interactivehook__ - When present, this function is automatically called (with no arguments) - when the interpreter is launched in :ref:`interactive mode `. - This is done after the :envvar:`PYTHONSTARTUP` file is read, so that you - can set this hook there. + When this attribute exists, its value is automatically called (with no + arguments) when the interpreter is launched in :ref:`interactive mode + `. This is done after the :envvar:`PYTHONSTARTUP` file is + read, so that you can set this hook there. The :mod:`site` module + :ref:`sets this `. .. versionadded:: 3.4 diff --git a/Doc/library/test.rst b/Doc/library/test.rst index 2c51549..83026d8 100644 --- a/Doc/library/test.rst +++ b/Doc/library/test.rst @@ -199,6 +199,7 @@ The :mod:`test.support` module provides support for Python's regression test suite. .. note:: + :mod:`test.support` is not a public module. It is documented here to help Python developers write tests. The API of this module is subject to change without backwards compatibility concerns between releases. diff --git a/Doc/library/textwrap.rst b/Doc/library/textwrap.rst index 1ba42a3..edf1fd6 100644 --- a/Doc/library/textwrap.rst +++ b/Doc/library/textwrap.rst @@ -40,13 +40,14 @@ functions should be good enough; otherwise, you should use an instance of :func:`wrap`. -.. function:: shorten(text, width=70, *, placeholder=" [...]") +.. function:: shorten(text, width, **kwargs) - Collapse and truncate the given text to fit in the given width. + Collapse and truncate the given *text* to fit in the given *width*. - The text first has its whitespace collapsed. If it then fits in - the *width*, it is returned unchanged. Otherwise, as many words - as possible are joined and then the *placeholder* is appended:: + First the whitespace in *text* is collapsed (all whitespace is replaced by + single spaces). If the result fits in the *width*, it is returned. + Otherwise, enough words are dropped from the end so that the remaining words + plus the :attr:`placeholder` fit within :attr:`width`:: >>> textwrap.shorten("Hello world!", width=12) 'Hello world!' @@ -55,6 +56,12 @@ functions should be good enough; otherwise, you should use an instance of >>> textwrap.shorten("Hello world", width=10, placeholder="...") 'Hello...' + Optional keyword arguments correspond to the instance attributes of + :class:`TextWrapper`, documented below. Note that the whitespace is + collapsed before the text is passed to the :class:`TextWrapper` :meth:`fill` + function, so changing the value of :attr:`.tabsize`, :attr:`.expand_tabs`, + :attr:`.drop_whitespace`, and :attr:`.replace_whitespace` will have no effect. + .. versionadded:: 3.4 @@ -110,8 +117,8 @@ functions should be good enough; otherwise, you should use an instance of :func:`wrap`, :func:`fill` and :func:`shorten` work by creating a :class:`TextWrapper` instance and calling a single method on it. That instance is not reused, so for applications that process many text -strings, it may be more efficient to create your own -:class:`TextWrapper` object. +strings using :func:`wrap` and/or :func:`fill`, it may be more efficient to +create your own :class:`TextWrapper` object. Text is preferably wrapped on whitespaces and right after the hyphens in hyphenated words; only then will long words be broken if necessary, unless @@ -252,16 +259,16 @@ hyphenated words; only then will long words be broken if necessary, unless .. attribute:: max_lines - (default: ``None``) If not ``None``, then the text be will truncated to - *max_lines* lines. + (default: ``None``) If not ``None``, then the output will contain at most + *max_lines* lines, with *placeholder* appearing at the end of the output. .. versionadded:: 3.4 .. attribute:: placeholder - (default: ``' [...]'``) String that will be appended to the last line of - text if it will be truncated. + (default: ``' [...]'``) String that will appear at the end of the output + text if it has been truncated. .. versionadded:: 3.4 diff --git a/Doc/library/tracemalloc.rst b/Doc/library/tracemalloc.rst index c12ef87..3405518 100644 --- a/Doc/library/tracemalloc.rst +++ b/Doc/library/tracemalloc.rst @@ -184,6 +184,7 @@ Pretty top Code to display the 10 lines allocating the most memory with a pretty output, ignoring ```` and ```` files:: + import linecache import os import tracemalloc @@ -201,6 +202,9 @@ ignoring ```` and ```` files:: filename = os.sep.join(frame.filename.split(os.sep)[-2:]) print("#%s: %s:%s: %.1f KiB" % (index, filename, frame.lineno, stat.size / 1024)) + line = linecache.getline(frame.filename, frame.lineno).strip() + if line: + print(' %s' % line) other = top_stats[limit:] if other: @@ -218,19 +222,28 @@ ignoring ```` and ```` files:: Example of output of the Python test suite:: - 2013-11-08 14:16:58.149320: Top 10 lines - #1: collections/__init__.py:368: 291.9 KiB - #2: Lib/doctest.py:1291: 200.2 KiB - #3: unittest/case.py:571: 160.3 KiB - #4: Lib/abc.py:133: 99.8 KiB - #5: urllib/parse.py:476: 71.8 KiB - #6: :5: 62.7 KiB - #7: Lib/base64.py:140: 59.8 KiB - #8: Lib/_weakrefset.py:37: 51.8 KiB - #9: collections/__init__.py:362: 50.6 KiB - #10: test/test_site.py:56: 48.0 KiB - 7496 other: 4161.9 KiB - Total allocated size: 5258.8 KiB + Top 10 lines + #1: Lib/base64.py:414: 419.8 KiB + _b85chars2 = [(a + b) for a in _b85chars for b in _b85chars] + #2: Lib/base64.py:306: 419.8 KiB + _a85chars2 = [(a + b) for a in _a85chars for b in _a85chars] + #3: collections/__init__.py:368: 293.6 KiB + exec(class_definition, namespace) + #4: Lib/abc.py:133: 115.2 KiB + cls = super().__new__(mcls, name, bases, namespace) + #5: unittest/case.py:574: 103.1 KiB + testMethod() + #6: Lib/linecache.py:127: 95.4 KiB + lines = fp.readlines() + #7: urllib/parse.py:476: 71.8 KiB + for a in _hexdig for b in _hexdig} + #8: :5: 62.0 KiB + #9: Lib/_weakrefset.py:37: 60.0 KiB + self.data = set() + #10: Lib/base64.py:142: 59.8 KiB + _b32tab2 = [a + b for a in _b32tab for b in _b32tab] + 6220 other: 3602.8 KiB + Total allocated size: 5303.1 KiB See :meth:`Snapshot.statistics` for more options. diff --git a/Doc/library/types.rst b/Doc/library/types.rst index c4f57e4..abdb939 100644 --- a/Doc/library/types.rst +++ b/Doc/library/types.rst @@ -15,6 +15,9 @@ It also defines names for some object types that are used by the standard Python interpreter, but not exposed as builtins like :class:`int` or :class:`str` are. +Finally, it provides some additional type-related utility classes and functions +that are not fundamental enough to be builtins. + Dynamic Type Creation --------------------- @@ -220,6 +223,9 @@ Standard names are defined for the following types: Return a new view of the underlying mapping's values. +Additional Utility Classes and Functions +---------------------------------------- + .. class:: SimpleNamespace A simple :class:`object` subclass that provides attribute access to its @@ -246,3 +252,18 @@ Standard names are defined for the following types: instead. .. versionadded:: 3.3 + + +.. function:: DynamicClassAttribute(fget=None, fset=None, fdel=None, doc=None) + + Route attribute access on a class to __getattr__. + + This is a descriptor, used to define attributes that act differently when + accessed through an instance and through a class. Instance access remains + normal, but access to an attribute through a class will be routed to the + class's __getattr__ method; this is done by raising AttributeError. + + This allows one to have properties active on an instance, and have virtual + attributes on the class with the same name (see Enum for an example). + + .. versionadded:: 3.4 diff --git a/Doc/library/undoc.rst b/Doc/library/undoc.rst index 80386d2..20830e2 100644 --- a/Doc/library/undoc.rst +++ b/Doc/library/undoc.rst @@ -20,7 +20,7 @@ These modules are used to implement the :mod:`os.path` module, and are not documented beyond this mention. There's little need to document these. :mod:`ntpath` - --- Implementation of :mod:`os.path` on Win32, Win64, WinCE, and OS/2 platforms. + --- Implementation of :mod:`os.path` on Win32, Win64, and WinCE platforms. :mod:`posixpath` --- Implementation of :mod:`os.path` on POSIX. diff --git a/Doc/library/unittest.mock-examples.rst b/Doc/library/unittest.mock-examples.rst index 98bfcc5..c5fa365 100644 --- a/Doc/library/unittest.mock-examples.rst +++ b/Doc/library/unittest.mock-examples.rst @@ -426,7 +426,7 @@ mock using the "as" form of the with statement: As an alternative `patch`, `patch.object` and `patch.dict` can be used as class decorators. When used in this way it is the same as applying the -decorator indvidually to every method whose name starts with "test". +decorator individually to every method whose name starts with "test". .. _further-examples: diff --git a/Doc/library/unittest.mock.rst b/Doc/library/unittest.mock.rst index b5cf7b5..cb72a68 100644 --- a/Doc/library/unittest.mock.rst +++ b/Doc/library/unittest.mock.rst @@ -958,7 +958,7 @@ method: .. [#] The only exceptions are magic methods and attributes (those that have leading and trailing double underscores). Mock doesn't create these but - instead of raises an ``AttributeError``. This is because the interpreter + instead raises an ``AttributeError``. This is because the interpreter will often implicitly request these methods, and gets *very* confused to get a new Mock object when it expects a magic method. If you need magic method support see :ref:`magic methods `. @@ -1509,7 +1509,7 @@ Patching Descriptors and Proxy Objects Both patch_ and patch.object_ correctly patch and restore descriptors: class methods, static methods and properties. You should patch these on the *class* rather than an instance. They also work with *some* objects -that proxy attribute access, like the `django setttings object +that proxy attribute access, like the `django settings object `_. diff --git a/Doc/library/unittest.rst b/Doc/library/unittest.rst index 6ec3609..690c98b 100644 --- a/Doc/library/unittest.rst +++ b/Doc/library/unittest.rst @@ -239,9 +239,10 @@ Test Discovery Unittest supports simple test discovery. In order to be compatible with test discovery, all of the test files must be :ref:`modules ` or -:ref:`packages ` importable from the top-level directory of -the project (this means that their filenames must be valid -:ref:`identifiers `). +:ref:`packages ` (including :term:`namespace packages +`) importable from the top-level directory of +the project (this means that their filenames must be valid :ref:`identifiers +`). Test discovery is implemented in :meth:`TestLoader.discover`, but can also be used from the command line. The basic command-line usage is:: @@ -306,6 +307,9 @@ as the start directory. Test modules and packages can customize test loading and discovery by through the `load_tests protocol`_. +.. versionchanged:: 3.4 + Test discovery supports :term:`namespace packages `. + .. _organizing-tests: @@ -1620,11 +1624,11 @@ Loading and running tests .. method:: discover(start_dir, pattern='test*.py', top_level_dir=None) - Find and return all test modules from the specified start directory, - recursing into subdirectories to find them. Only test files that match - *pattern* will be loaded. (Using shell style pattern matching.) Only - module names that are importable (i.e. are valid Python identifiers) will - be loaded. + Find all the test modules by recursing into subdirectories from the + specified start directory, and return a TestSuite object containing them. + Only test files that match *pattern* will be loaded. (Using shell style + pattern matching.) Only module names that are importable (i.e. are valid + Python identifiers) will be loaded. All test modules must be importable from the top level of the project. If the start directory is not the top level directory then the top level @@ -1654,12 +1658,11 @@ Loading and running tests .. versionchanged:: 3.4 Modules that raise :exc:`SkipTest` on import are recorded as skips, - not errors. - - .. versionchanged:: 3.4 - Paths are sorted before being imported to ensure execution order for a - given test suite is the same even if the underlying file system's ordering - is not dependent on file name like in ext3/4. + not errors. + Discovery works for :term:`namespace packages `. + Paths are sorted before being imported so that execution order is + the same even if the underlying file system's ordering is not + dependent on file name. The following attributes of a :class:`TestLoader` can be configured either by diff --git a/Doc/library/urllib.error.rst b/Doc/library/urllib.error.rst index 9fb58f5..f7f0c97 100644 --- a/Doc/library/urllib.error.rst +++ b/Doc/library/urllib.error.rst @@ -48,7 +48,7 @@ The following exceptions are raised by :mod:`urllib.error` as appropriate: .. attribute:: headers - The HTTP response headers for the HTTP request that cause the + The HTTP response headers for the HTTP request that caused the :exc:`HTTPError`. .. versionadded:: 3.4 diff --git a/Doc/library/urllib.request.rst b/Doc/library/urllib.request.rst index 20449d9..6ac9081 100644 --- a/Doc/library/urllib.request.rst +++ b/Doc/library/urllib.request.rst @@ -218,7 +218,7 @@ The following classes are provided: fetching of the image, this should be true. *method* should be a string that indicates the HTTP request method that - will be used (e.g. ``'HEAD'``). Its value is stored in the + will be used (e.g. ``'HEAD'``). If provided, its value is stored in the :attr:`~Request.method` attribute and is used by :meth:`get_method()`. Subclasses may indicate a default method by setting the :attr:`~Request.method` attribute in the class itself. @@ -440,13 +440,20 @@ request. .. attribute:: Request.method - The HTTP request method to use. This value is used by - :meth:`~Request.get_method` to override the computed HTTP request - method that would otherwise be returned. This attribute is initialized with - the value of the *method* argument passed to the constructor. + The HTTP request method to use. By default its value is :const:`None`, + which means that :meth:`~Request.get_method` will do its normal computation + of the method to be used. Its value can be set (thus overriding the default + computation in :meth:`~Request.get_method`) either by providing a default + value by setting it at the class level in a :class:`Request` subclass, or by + passing a value in to the :class:`Request` constructor via the *method* + argument. .. versionadded:: 3.3 + .. versionchanged:: 3.4 + A default value can now be set in subclasses; previously it could only + be set via the constructor argument. + .. method:: Request.get_method() diff --git a/Doc/library/venv.rst b/Doc/library/venv.rst index d29a957..e93f48e 100644 --- a/Doc/library/venv.rst +++ b/Doc/library/venv.rst @@ -76,6 +76,8 @@ Creating virtual environments without there needing to be any reference to its venv in ``PATH``. +.. _venv-api: + API --- diff --git a/Doc/library/wave.rst b/Doc/library/wave.rst index c32e1fc..ab64978 100644 --- a/Doc/library/wave.rst +++ b/Doc/library/wave.rst @@ -150,14 +150,30 @@ them, and is otherwise implementation dependent. Wave_write Objects ------------------ +For seekable output streams, the ``wave`` header will automatically be updated +to reflect the number of frames actually written. For unseekable streams, the +*nframes* value must be accurate when the first frame data is written. An +accurate *nframes* value can be achieved either by calling +:meth:`~Wave_write.setnframes` or :meth:`~Wave_write.setparams` with the number +of frames that will be written before :meth:`~Wave_write.close` is called and +then using :meth:`~Wave_write.writeframesraw` to write the frame data, or by +calling :meth:`~Wave_write.writeframes` with all of the frame data to be +written. In the latter case :meth:`~Wave_write.writeframes` will calculate +the number of frames in the data and set *nframes* accordingly before writing +the frame data. + Wave_write objects, as returned by :func:`.open`, have the following methods: +.. versionchanged:: 3.4 + Added support for unseekable files. + .. method:: Wave_write.close() Make sure *nframes* is correct, and close the file if it was opened by - :mod:`wave`. This method is called upon object collection. Can raise an - exception if *nframes* is not correct and a file is not seekable. + :mod:`wave`. This method is called upon object collection. It will raise + an exception if the output stream is not seekable and *nframes* does not + match the number of frames actually written. .. method:: Wave_write.setnchannels(n) @@ -181,8 +197,9 @@ Wave_write objects, as returned by :func:`.open`, have the following methods: .. method:: Wave_write.setnframes(n) - Set the number of frames to *n*. This will be changed later if more frames are - written. + Set the number of frames to *n*. This will be changed later if the number + of frames actually written is different (this update attempt will + raise an error if the output stream is not seekable). .. method:: Wave_write.setcomptype(type, name) @@ -209,16 +226,18 @@ Wave_write objects, as returned by :func:`.open`, have the following methods: Write audio frames, without correcting *nframes*. .. versionchanged:: 3.4 - Any :term:`bytes-like object`\ s are now accepted. + Any :term:`bytes-like object` is now accepted. .. method:: Wave_write.writeframes(data) - Write audio frames and make sure *nframes* is correct. Can raise an - exception if a file is not seekable. + Write audio frames and make sure *nframes* is correct. It will raise an + error if the output stream is not seekable and the total number of frames + that have been written after *data* has been written does not match the + previously set value for *nframes*. .. versionchanged:: 3.4 - Any :term:`bytes-like object`\ s are now accepted. + Any :term:`bytes-like object` is now accepted. Note that it is invalid to set any parameters after calling :meth:`writeframes` diff --git a/Doc/library/xml.etree.elementtree.rst b/Doc/library/xml.etree.elementtree.rst index dfa2b71..c6dbce0 100644 --- a/Doc/library/xml.etree.elementtree.rst +++ b/Doc/library/xml.etree.elementtree.rst @@ -952,7 +952,8 @@ XMLParser Objects specified in the XML file. .. deprecated:: 3.4 - The *html* argument. + The *html* argument. The remaining arguments should be passed via + keywword to prepare for the removal of the *html* argument. .. method:: close() diff --git a/Doc/library/xml.rst b/Doc/library/xml.rst index f793bae..0188219 100644 --- a/Doc/library/xml.rst +++ b/Doc/library/xml.rst @@ -14,9 +14,9 @@ Python's interfaces for processing XML are grouped in the ``xml`` package. .. warning:: The XML modules are not secure against erroneous or maliciously - constructed data. If you need to parse untrusted or unauthenticated data see - :ref:`xml-vulnerabilities`. - + constructed data. If you need to parse untrusted or + unauthenticated data see the :ref:`xml-vulnerabilities` and + :ref:`defused-packages` sections. It is important to note that modules in the :mod:`xml` package require that there be at least one SAX-compliant XML parser available. The Expat parser is @@ -46,16 +46,15 @@ The XML handling submodules are: .. _xml-vulnerabilities: XML vulnerabilities -=================== +------------------- The XML processing modules are not secure against maliciously constructed data. -An attacker can abuse vulnerabilities for e.g. denial of service attacks, to -access local files, to generate network connections to other machines, or -to or circumvent firewalls. The attacks on XML abuse unfamiliar features -like inline `DTD`_ (document type definition) with entities. +An attacker can abuse XML features to carry out denial of service attacks, +access local files, generate network connections to other machines, or +circumvent firewalls. -The following table gives an overview of the known attacks and if the various -modules are vulnerable to them. +The following table gives an overview of the known attacks and whether +the various modules are vulnerable to them. ========================= ======== ========= ========= ======== ========= kind sax etree minidom pulldom xmlrpc @@ -68,7 +67,7 @@ decompression bomb No No No No **Yes** ========================= ======== ========= ========= ======== ========= 1. :mod:`xml.etree.ElementTree` doesn't expand external entities and raises a - ParserError when an entity occurs. + :exc:`ParserError` when an entity occurs. 2. :mod:`xml.dom.minidom` doesn't expand external entities and simply returns the unexpanded entity verbatim. 3. :mod:`xmlrpclib` doesn't expand external entities and omits them. @@ -77,23 +76,21 @@ decompression bomb No No No No **Yes** billion laughs / exponential entity expansion The `Billion Laughs`_ attack -- also known as exponential entity expansion -- uses multiple levels of nested entities. Each entity refers to another entity - several times, the final entity definition contains a small string. Eventually - the small string is expanded to several gigabytes. The exponential expansion - consumes lots of CPU time, too. + several times, and the final entity definition contains a small string. + The exponential expansion results in several gigabytes of text and + consumes lots of memory and CPU time. quadratic blowup entity expansion A quadratic blowup attack is similar to a `Billion Laughs`_ attack; it abuses entity expansion, too. Instead of nested entities it repeats one large entity with a couple of thousand chars over and over again. The attack isn't as - efficient as the exponential case but it avoids triggering countermeasures of - parsers against heavily nested entities. + efficient as the exponential case but it avoids triggering parser countermeasures + that forbid deeply-nested entities. external entity expansion Entity declarations can contain more than just text for replacement. They can - also point to external resources by public identifiers or system identifiers. - System identifiers are standard URIs or can refer to local files. The XML - parser retrieves the resource with e.g. HTTP or FTP requests and embeds the - content into the XML document. + also point to external resources or local files. The XML + parser accesses the resource and embeds the content into the XML document. DTD retrieval Some XML libraries like Python's :mod:`xml.dom.pulldom` retrieve document type @@ -101,31 +98,32 @@ DTD retrieval implications as the external entity expansion issue. decompression bomb - The issue of decompression bombs (aka `ZIP bomb`_) apply to all XML libraries - that can parse compressed XML stream like gzipped HTTP streams or LZMA-ed + Decompression bombs (aka `ZIP bomb`_) apply to all XML libraries + that can parse compressed XML streams such as gzipped HTTP streams or + LZMA-compressed files. For an attacker it can reduce the amount of transmitted data by three magnitudes or more. -The documentation of `defusedxml`_ on PyPI has further information about +The documentation for `defusedxml`_ on PyPI has further information about all known attack vectors with examples and references. -defused packages ----------------- +.. _defused-packages: -`defusedxml`_ is a pure Python package with modified subclasses of all stdlib -XML parsers that prevent any potentially malicious operation. The courses of -action are recommended for any server code that parses untrusted XML data. The -package also ships with example exploits and an extended documentation on more -XML exploits like xpath injection. +The :mod:`defusedxml` and :mod:`defusedexpat` Packages +------------------------------------------------------ -`defusedexpat`_ provides a modified libexpat and patched replacment -:mod:`pyexpat` extension module with countermeasures against entity expansion -DoS attacks. Defusedexpat still allows a sane and configurable amount of entity -expansions. The modifications will be merged into future releases of Python. +`defusedxml`_ is a pure Python package with modified subclasses of all stdlib +XML parsers that prevent any potentially malicious operation. Use of this +package is recommended for any server code that parses untrusted XML data. The +package also ships with example exploits and extended documentation on more +XML exploits such as XPath injection. -The workarounds and modifications are not included in patch releases as they -break backward compatibility. After all inline DTD and entity expansion are -well-definied XML features. +`defusedexpat`_ provides a modified libexpat and a patched +:mod:`pyexpat` module that have countermeasures against entity expansion +DoS attacks. The :mod:`defusedexpat` module still allows a sane and configurable amount of entity +expansions. The modifications may be included in some future release of Python, +but will not be included in any bugfix releases of +Python because they break backward compatibility. .. _defusedxml: https://pypi.python.org/pypi/defusedxml/ @@ -133,4 +131,3 @@ well-definied XML features. .. _Billion Laughs: http://en.wikipedia.org/wiki/Billion_laughs .. _ZIP bomb: http://en.wikipedia.org/wiki/Zip_bomb .. _DTD: http://en.wikipedia.org/wiki/Document_Type_Definition - diff --git a/Doc/library/zipfile.rst b/Doc/library/zipfile.rst index 969a536..1d23a7c 100644 --- a/Doc/library/zipfile.rst +++ b/Doc/library/zipfile.rst @@ -401,18 +401,32 @@ The :class:`PyZipFile` constructor takes the same parameters as the ``2``, only files with that optimization level (see :func:`compile`) are added to the archive, compiling if necessary. - If the pathname is a file, the filename must end with :file:`.py`, and + If *pathname* is a file, the filename must end with :file:`.py`, and just the (corresponding :file:`\*.py[co]`) file is added at the top level - (no path information). If the pathname is a file that does not end with + (no path information). If *pathname* is a file that does not end with :file:`.py`, a :exc:`RuntimeError` will be raised. If it is a directory, and the directory is not a package directory, then all the files :file:`\*.py[co]` are added at the top level. If the directory is a package directory, then all :file:`\*.py[co]` are added under the package name as a file path, and if any subdirectories are package directories, - all of these are added recursively. *basename* is intended for internal - use only. When *filterfunc(pathname)* is given, it will be called for every - invocation. When it returns a false value, that path and its subpaths will - be ignored. + all of these are added recursively. + + *basename* is intended for internal use only. + + *filterfunc*, if given, must be a function taking a single string + argument. It will be passed each path (including each individual full + file path) before it is added to the archive. If *filterfunc* returns a + false value, the path will not be added, and if it is a directory its + contents will be ignored. For example, if our test files are all either + in ``test`` directories or start with the string ``test_``, we can use a + *filterfunc* to exclude them:: + + >>> zf = PyZipFile('myprog.zip') + >>> def notests(s): + ... fn = os.path.basename(s) + ... return (not (fn == 'test' or fn.startswith('test_'))) + >>> zf.writepy('myprog', filterfunc=notests) + The :meth:`writepy` method makes archives with file names like this:: diff --git a/Doc/reference/datamodel.rst b/Doc/reference/datamodel.rst index 4f19b37..8204dc3 100644 --- a/Doc/reference/datamodel.rst +++ b/Doc/reference/datamodel.rst @@ -1226,6 +1226,10 @@ Basic customization The return value must be a string object. + .. versionchanged:: 3.4 + The __format__ method of ``object`` itself raises a :exc:`TypeError` + if passed any non-empty string. + .. _richcmpfuncs: .. method:: object.__lt__(self, other) @@ -1643,6 +1647,8 @@ of these candidate metaclasses. If none of the candidate metaclasses meets that criterion, then the class definition will fail with ``TypeError``. +.. _prepare: + Preparing the class namespace ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -2044,11 +2050,13 @@ left undefined. ``&=``, ``^=``, ``|=``). These methods should attempt to do the operation in-place (modifying *self*) and return the result (which could be, but does not have to be, *self*). If a specific method is not defined, the augmented - assignment falls back to the normal methods. For instance, to execute the - statement ``x += y``, where *x* is an instance of a class that has an - :meth:`__iadd__` method, ``x.__iadd__(y)`` is called. If *x* is an instance - of a class that does not define a :meth:`__iadd__` method, ``x.__add__(y)`` - and ``y.__radd__(x)`` are considered, as with the evaluation of ``x + y``. + assignment falls back to the normal methods. For instance, if *x* is an + instance of a class with an :meth:`__iadd__` method, ``x += y`` is equivalent + to ``x = x.__iadd__(y)`` . Otherwise, ``x.__add__(y)`` and ``y.__radd__(x)`` + are considered, as with the evaluation of ``x + y``. In certain situations, + augmented assignment can result in unexpected errors (see + :ref:`faq-augmented-assignment-tuple-error`), but this behavior is in + fact part of the data model. .. method:: object.__neg__(self) diff --git a/Doc/tools/sphinxext/indexcontent.html b/Doc/tools/sphinxext/indexcontent.html index 7f85470..969099a 100644 --- a/Doc/tools/sphinxext/indexcontent.html +++ b/Doc/tools/sphinxext/indexcontent.html @@ -16,14 +16,14 @@ + + - - diff --git a/Doc/tools/sphinxext/susp-ignored.csv b/Doc/tools/sphinxext/susp-ignored.csv index 1769023..f9d6bbf 100644 --- a/Doc/tools/sphinxext/susp-ignored.csv +++ b/Doc/tools/sphinxext/susp-ignored.csv @@ -282,3 +282,4 @@ whatsnew/changelog,,:PythonCmd,"With Tk < 8.5 _tkinter.c:PythonCmd() raised Unic whatsnew/changelog,,::,": Fix FTP tests for IPv6, bind to ""::1"" instead of ""localhost""." whatsnew/changelog,,::,": Use ""127.0.0.1"" or ""::1"" instead of ""localhost"" as much as" whatsnew/changelog,,:password,user:password +whatsnew/changelog,,:gz,w:gz diff --git a/Doc/tutorial/introduction.rst b/Doc/tutorial/introduction.rst index 1225e20..9efd1ac 100644 --- a/Doc/tutorial/introduction.rst +++ b/Doc/tutorial/introduction.rst @@ -371,9 +371,9 @@ values. The most versatile is the *list*, which can be written as a list of comma-separated values (items) between square brackets. Lists might contain items of different types, but usually the items all have the same type. :: - >>> squares = [1, 2, 4, 9, 16, 25] + >>> squares = [1, 4, 9, 16, 25] >>> squares - [1, 2, 4, 9, 16, 25] + [1, 4, 9, 16, 25] Like strings (and all other built-in :term:`sequence` type), lists can be indexed and sliced:: @@ -389,12 +389,12 @@ All slice operations return a new list containing the requested elements. This means that the following slice returns a new (shallow) copy of the list:: >>> squares[:] - [1, 2, 4, 9, 16, 25] + [1, 4, 9, 16, 25] Lists also supports operations like concatenation:: >>> squares + [36, 49, 64, 81, 100] - [1, 2, 4, 9, 16, 25, 36, 49, 64, 81, 100] + [1, 4, 9, 16, 25, 36, 49, 64, 81, 100] Unlike strings, which are :term:`immutable`, lists are a :term:`mutable` type, i.e. it is possible to change their content:: diff --git a/Doc/tutorial/whatnow.rst b/Doc/tutorial/whatnow.rst index 7fcbdc3f..979f587 100644 --- a/Doc/tutorial/whatnow.rst +++ b/Doc/tutorial/whatnow.rst @@ -21,8 +21,8 @@ the set are: and many other tasks. Skimming through the Library Reference will give you an idea of what's available. -* :ref:`install-index` explains how to install external modules written by other - Python users. +* :ref:`installing-index` explains how to install additional modules written + by other Python users. * :ref:`reference-index`: A detailed explanation of Python's syntax and semantics. It's heavy reading, but is useful as a complete guide to the diff --git a/Doc/using/unix.rst b/Doc/using/unix.rst index 40635c6..3c7fc41 100644 --- a/Doc/using/unix.rst +++ b/Doc/using/unix.rst @@ -60,6 +60,8 @@ To install the newest Python versions on OpenSolaris, install `blastwave prompt. +.. _building-python-on-unix: + Building Python =============== diff --git a/Doc/using/venv-create.inc b/Doc/using/venv-create.inc index 997e24b..52cdda0 100644 --- a/Doc/using/venv-create.inc +++ b/Doc/using/venv-create.inc @@ -11,6 +11,11 @@ containing a copy of the ``python`` binary (or binaries, in the case of Windows). It also creates an (initially empty) ``lib/pythonX.Y/site-packages`` subdirectory (on Windows, this is ``Lib\site-packages``). +.. seealso:: + + `Python Packaging User Guide: Creating and using virtual environments + `__ + .. highlight:: none On Windows, you may have to invoke the ``pyvenv`` script as follows, if you @@ -38,6 +43,8 @@ The command, if run with ``-h``, will show the available options:: virtual environment. --symlinks Try to use symlinks rather than copies, when symlinks are not the default for the platform. + --copies Try to use copies rather than symlinks, even when + symlinks are the default for the platform. --clear Delete the environment directory if it already exists. If not specified and the directory exists, an error is raised. @@ -47,7 +54,8 @@ The command, if run with ``-h``, will show the available options:: environment (pip is bootstrapped by default) .. versionchanged:: 3.4 - Installs pip by default, added the ``--without-pip`` option + Installs pip by default, added the ``--without-pip`` and ``--copies`` + options If the target directory already exists an error will be raised, unless the ``--clear`` or ``--upgrade`` option was provided. diff --git a/Doc/using/windows.rst b/Doc/using/windows.rst index b1e7898..42a92af 100644 --- a/Doc/using/windows.rst +++ b/Doc/using/windows.rst @@ -11,6 +11,10 @@ This document aims to give an overview of Windows-specific behaviour you should know about when using Python on Microsoft Windows. +.. XXX (ncoghlan) + + This looks rather stale to me... + Installing Python ================= diff --git a/Doc/whatsnew/3.4.rst b/Doc/whatsnew/3.4.rst index 939d491..9cf51bd 100644 --- a/Doc/whatsnew/3.4.rst +++ b/Doc/whatsnew/3.4.rst @@ -2,8 +2,7 @@ What's New In Python 3.4 **************************** -.. :Author: Someone - (uncomment if there is a principal author) +:Author: R. David Murray (Editor) .. Rules for maintenance: @@ -68,11 +67,6 @@ This article explains the new features in Python 3.4, compared to 3.3. For full details, see the `changelog `_. -.. note:: Prerelease users should be aware that this document is currently in - draft form. While it should be close to complete for the Python 3.4 - release candidates, adjustments and additions to the document may be made - up until the final release. - .. seealso:: @@ -90,17 +84,19 @@ New syntax features: * No new syntax features were added in Python 3.4. -New expected features for Python implementations: +Other new features: -* :ref:`pip should always be "available" ` (:pep:`453`). -* :ref:`Make newly created file descriptors non-inheritable ` +* :ref:`pip should always be available ` (:pep:`453`). +* :ref:`Newly created file descriptors are non-inheritable ` (:pep:`446`). -* command line option for :ref:`isolated mode `, +* command line option for :ref:`isolated mode ` (:issue:`16499`). * :ref:`improvements in the handling of codecs ` that are not text encodings (multiple issues). * :ref:`A ModuleSpec Type ` for the Import System (:pep:`451`). (Affects importer authors.) +* The :mod:`marshal` format has been made :ref:`more compact and efficient + ` (:issue:`16475`). New library modules: @@ -120,33 +116,57 @@ New library modules: * :mod:`tracemalloc`: :ref:`Trace Python memory allocations ` (:pep:`454`). -Significantly Improved Library Modules: +Significantly improved library modules: * :ref:`Single-dispatch generic functions ` in :mod:`functools` (:pep:`443`). * New :mod:`pickle` :ref:`protocol 4 ` (:pep:`3154`). -* :ref:`TLSv1.1 and TLSv1.2 support ` for :mod:`ssl` - (:issue:`16692`). * :mod:`multiprocessing` now has :ref:`an option to avoid using os.fork on Unix ` (:issue:`8713`). * :mod:`email` has a new submodule, :mod:`~email.contentmanager`, and a new :mod:`~email.message.Message` subclass (:class:`~email.contentmanager.EmailMessage`) that :ref:`simplify MIME handling ` (:issue:`18891`). +* The :mod:`inspect` and :mod:`pydoc` modules are now capable of + correct introspection of a much wider variety of callable objects, + which improves the output of the Python :func:`help` system. * The :mod:`ipaddress` module API has been declared stable +Security improvements: + +* :ref:`Secure and interchangeable hash algorithm ` + (:pep:`456`). +* :ref:`Make newly created file descriptors non-inheritable ` + (:pep:`446`) to avoid leaking file descriptors to child processes. +* New command line option for :ref:`isolated mode `, + (:issue:`16499`). +* :mod:`multiprocessing` now has :ref:`an option to avoid using os.fork + on Unix `. *spawn* and *forkserver* are + more secure because they avoid sharing data with child processes. +* :mod:`multiprocessing` child processes on Windows no longer inherit + all of the parent's inheritable handles, only the necessary ones. +* A new :func:`hashlib.pbkdf2_hmac` function provides + the `PKCS#5 password-based key derivation function 2 + `_. +* :ref:`TLSv1.1 and TLSv1.2 support ` for :mod:`ssl`. +* :ref:`Retrieving certificates from the Windows system cert store support + ` for :mod:`ssl`. +* :ref:`Server-side SNI (Server Name Indication) support + ` for :mod:`ssl`. +* The :class:`ssl.SSLContext` class has a :ref:`lot of improvements + `. +* All modules in the standard library that support SSL now support server + certificate verification, including hostname matching + (:func:`ssl.match_hostname`) and CRLs (Certificate Revocation lists, see + :func:`ssl.SSLContext.load_verify_locations`). CPython implementation improvements: * :ref:`Safe object finalization ` (:pep:`442`). -* Leveraging :pep:`442`, :ref:`module globals are no longer set to None - during finalization `, in most cases (:issue:`18214`). +* Leveraging :pep:`442`, in most cases :ref:`module globals are no longer set + to None during finalization ` (:issue:`18214`). * :ref:`Configurable memory allocators ` (:pep:`445`). -* :ref:`Secure and interchangeable hash algorithm ` - (:pep:`456`). * :ref:`Argument Clinic ` (:pep:`436`). -* The :mod:`marshal` format has been made :ref:`more compact and efficient - ` (:issue:`16475`). Please read on for a comprehensive list of user-facing changes, including many other smaller improvements, CPython optimizations, deprecations, and potential @@ -154,38 +174,54 @@ porting issues. -New Expected Features for Python Implementations -================================================ +New Features +============ .. _whatsnew-pep-453: PEP 453: Explicit Bootstrapping of PIP in Python Installations -------------------------------------------------------------- +Bootstrapping pip By Default +~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + The new :mod:`ensurepip` module (defined in :pep:`453`) provides a standard cross-platform mechanism to bootstrap the pip installer into Python -installations and virtual environments. - -By default, the scripts ``pipX`` and ``pipX.Y`` will be installed (where -X.Y stands for the version of the Python installation), along with the -``pip`` Python package and its dependencies. - -The :mod:`venv` module and the :command:`pyvenv` utility make use of this -module to make ``pip`` readily available in virtual environments. When -using the command line interface, ``pip`` is installed by default, while -for the module API installation of ``pip`` must be requested explicitly. - -For CPython source builds on POSIX systems, the ``make install`` and -``make altinstall`` commands bootstrap ``pip`` by default. This behaviour -can be controlled through configure options, and overridden through -Makefile options. - -On Windows and Mac OS X, the CPython installers now offer the option to -install ``pip`` along with CPython itself. +installations and virtual environments. The version of ``pip`` included +with Python 3.4.0 is ``pip`` 1.5.4, and future 3.4.x maintenance releases +will update the bundled version to the latest version of ``pip`` that is +available at the time of creating the release candidate. + +By default, the commands ``pipX`` and ``pipX.Y`` will be installed on all +platforms (where X.Y stands for the version of the Python installation), +along with the ``pip`` Python package and its dependencies. On Windows and +in virtual environments on all platforms, the unversioned ``pip`` command +will also be installed. On other platforms, the system wide unversioned +``pip`` command typically refers to the separately installed Python 2 +version. + +The :ref:`pyvenv ` command line utility and the :mod:`venv` +module make use of the :mod:`ensurepip` module to make ``pip`` readily +available in virtual environments. When using the command line utility, +``pip`` is installed by default, while when using the :mod:`venv` module +:ref:`venv-api` installation of ``pip`` must be requested explicitly. + +For CPython :ref:`source builds on POSIX systems `, +the ``make install`` and ``make altinstall`` commands bootstrap ``pip`` by +default. This behaviour can be controlled through configure options, and +overridden through Makefile options. + +On Windows and Mac OS X, the CPython installers now default to installing +``pip`` along with CPython itself (users may opt out of installing it +during the installation process). Window users will need to opt in to the +automatic ``PATH`` modifications to have ``pip`` available from the command +line by default, otherwise it can still be accessed through the Python +launcher for Windows as ``py -m pip``. As `discussed in the PEP`__, platform packagers may choose not to install -``pip`` by default, as long as the command ``pip``, when invoked, provides -clear and simple directions on how to install ``pip`` on the platform. +these commands by default, as long as, when invoked, they provide clear and +simple directions on how to install them on that platform (usually using +the system package manager). __ http://www.python.org/dev/peps/pep-0453/#recommendations-for-downstream-distributors @@ -193,12 +229,29 @@ __ http://www.python.org/dev/peps/pep-0453/#recommendations-for-downstream-distr To avoid conflicts between parallel Python 2 and Python 3 installations, only the versioned ``pip3`` and ``pip3.4`` commands are bootstrapped by - default when ``ensurepip`` is invoked directly (including by the CPython - installers). ``pyvenv`` ensures that the unqualified ``pip`` command is - made available in virtual environments, and ``pip`` can always be + default when ``ensurepip`` is invoked directly - the ``--default-pip`` + option is needed to also request the unversioned ``pip`` command. + ``pyvenv`` and the Windows installer ensure that the unqualified ``pip`` + command is made available in those environments, and ``pip`` can always be invoked via the ``-m`` switch rather than directly to avoid ambiguity on systems with multiple Python installations. + +Documentation Changes +~~~~~~~~~~~~~~~~~~~~~ + +As part of this change, the :ref:`installing-index` and +:ref:`distributing-index` sections of the documentation have been +completely redesigned as short getting started and FAQ documents. Most +packaging documentation has now been moved out to the Python Packaging +Authority maintained `Python Packaging User Guide +`__ and the documentation of the individual +projects. + +However, as this migration is currently still incomplete, the legacy +versions of those guides remaining available as :ref:`install-index` +and :ref:`distutils-index`. + .. seealso:: :pep:`453` -- Explicit bootstrapping of pip in Python installations @@ -208,11 +261,17 @@ __ http://www.python.org/dev/peps/pep-0453/#recommendations-for-downstream-distr .. _whatsnew-pep-446: -PEP 446: Make Newly Created File Descriptors Non-Inheritable ------------------------------------------------------------- +PEP 446: Newly Created File Descriptors Are Non-Inheritable +----------------------------------------------------------- :pep:`446` makes newly created file descriptors :ref:`non-inheritable -`. New functions and methods: +`. In general, this is the behavior an application will +want: when launching a new process, having currently open files also +open in the new process can lead to all sorts of hard to find bugs, +and potentially to security issues. + +However, there are occasions when inheritance is desired. To support +these cases, the following new functions and methods are available: * :func:`os.get_inheritable`, :func:`os.set_inheritable` * :func:`os.get_handle_inheritable`, :func:`os.set_handle_inheritable` @@ -243,10 +302,10 @@ module (and have been covered by the regression test suite) since Python 2.4, but were previously only discoverable through runtime introspection. Unlike the convenience methods on :class:`str`, :class:`bytes` and -:class:`bytearray`, these convenience functions support arbitrary codecs -in both Python 2 and Python 3, rather than being limited to Unicode text -encodings (in Python 3) or ``basestring`` <-> ``basestring`` conversions -(in Python 2). +:class:`bytearray`, the :mod:`codecs` convenience functions support arbitrary +codecs in both Python 2 and Python 3, rather than being limited to Unicode text +encodings (in Python 3) or ``basestring`` <-> ``basestring`` conversions (in +Python 2). In Python 3.4, the interpreter is able to identify the known non-text encodings provided in the standard library and direct users towards these @@ -269,7 +328,7 @@ general purpose convenience functions when appropriate:: In a related change, whenever it is feasible without breaking backwards compatibility, exceptions raised during encoding and decoding operations -will be wrapped in a chained exception of the same type that mentions the +are wrapped in a chained exception of the same type that mentions the name of the codec responsible for producing the error:: >>> import codecs @@ -315,7 +374,7 @@ as:: The binary and text transforms provided in the standard library are detailed in :ref:`binary-transforms` and :ref:`text-transforms`. -(Contributed by Nick Coghlan in :issue:`7475`, , :issue:`17827`, +(Contributed by Nick Coghlan in :issue:`7475`, :issue:`17827`, :issue:`17828` and :issue:`19619`) @@ -336,7 +395,9 @@ The public-facing changes from the PEP are entirely backward-compatible. Furthermore, they should be transparent to everyone but importer authors. Key finder and loader methods have been deprecated, but they will continue working. New importers should use the new methods described in the PEP. Existing -importers should be updated to implement the new methods. +importers should be updated to implement the new methods. See the +:ref:`deprecated-3.4` section for a list of methods that should be replaced and +their replacements. Other Language Changes @@ -346,9 +407,10 @@ Some smaller changes made to the core Python language are: * Unicode database updated to UCD version 6.3. -* :func:`min` and :func:`max` now accept a *default* argument that can be used - to specify the value they return if the iterable they are evaluating has no - elements. Contributed by Julian Berman in :issue:`18111`. +* :func:`min` and :func:`max` now accept a *default* keyword-only argument that + can be used to specify the value they return if the iterable they are + evaluating has no elements. (Contributed by Julian Berman in + :issue:`18111`.) * Module objects are now :mod:`weakref`'able. @@ -357,16 +419,42 @@ Some smaller changes made to the core Python language are: ``__main__.__file__`` when a script has been executed directly using a relative path (Contributed by Brett Cannon in :issue:`18416`). -* Now all the UTF-\* codecs (except UTF-7) reject surrogates during both +* All the UTF-\* codecs (except UTF-7) now reject surrogates during both encoding and decoding unless the ``surrogatepass`` error handler is used, - with the exception of the UTF-16 decoder that accepts valid surrogate pairs, - and the UTF-16 encoder that produces them while encoding non-BMP characters. + with the exception of the UTF-16 decoder (which accepts valid surrogate pairs) + and the UTF-16 encoder (which produces them while encoding non-BMP characters). Contributed by Victor Stinner, Kang-Hao (Kenny) Lu and Serhiy Storchaka in :issue:`12892`. -* New EBCDIC :ref:`codec ` ``cp273``. (Contributed by - Michael Bierenfeld and Andrew Kuchling in :issue:`1097797`.) +* New German EBCDIC :ref:`codec ` ``cp273``. (Contributed + by Michael Bierenfeld and Andrew Kuchling in :issue:`1097797`.) + +* New Ukrainian :ref:`codec ` ``cp1125``. (Contributed by + Serhiy Storchaka in :issue:`19668`.) + +* :class:`bytes`.join() and :class:`bytearray`.join() now accept arbitrary + buffer objects as arguments. (Contributed by Antoine Pitrou in + :issue:`15958`.) +* The :class:`int` constructor now accepts any object that has an ``__index__`` + method for its *base* argument. (Contributed by Mark Dickinson in + :issue:`16772`.) + +* Frame objects now have a :func:`~frame.clear` method that clears all + references to local variables from the frame. (Contributed by Antoine Pitrou + in :issue:`17934`.) + +* :class:`memoryview` is now registered as a :class:`Sequence `, + and supports the :func:`reversed` builtin. (Contributed by Nick Coghlan + and Claudiu Popa in :issue:`18690` and :issue:`19078`.) + +* Signatures reported by :func:`help` have been modified and improved in + several cases as a result of the introduction of Argument Clinic and other + changes to the :mod:`inspect` and :mod:`pydoc` modules. + +* :meth:`~object.__length_hint__` is now part of the formal language + specification (see :pep:`424`). (Contributed by Armin Ronacher in + :issue:`16148`.) New Modules @@ -405,9 +493,11 @@ environment was declined. :mod:`ensurepip` includes a bundled copy of ``pip``, up-to-date as of the first release candidate of the release of CPython with which it ships (this applies to both maintenance releases and feature releases). ``ensurepip`` does not -access the internet. (If the installation has Internet access, it is of course -possible to upgrade ``pip`` to a release more recent than the bundled ``pip`` -by using the bundled ``pip`` command itself once it is installed.) +access the internet. If the installation has Internet access, after +``ensurepip`` is run the bundled ``pip`` can be used to upgrade ``pip`` to a +more recent release than the bundled one. (Note that such an upgraded version +of ``pip`` is considered to be a separately installed package and will not be +removed if Python is uninstalled.) The module is named *ensure*\ pip because if called when ``pip`` is already installed, it does nothing. It also has an ``--upgrade`` option that will @@ -486,7 +576,7 @@ tracemalloc The new :mod:`tracemalloc` module (defined in :pep:`454`) is a debug tool to trace memory blocks allocated by Python. It provides the following information: -* Traceback where an object was allocated +* Trace where an object was allocated * Statistics on allocated memory blocks per filename and per line number: total size, number and average size of allocated memory blocks * Compute the differences between two snapshots to detect memory leaks @@ -518,7 +608,7 @@ Using ``ABC`` as a base class has essentially the same effect as specifying aifc ---- -The :meth:`~aifc.getparams` method now returns a namedtuple rather than a +The :meth:`~aifc.aifc.getparams` method now returns a namedtuple rather than a plain tuple. (Contributed by Claudiu Popa in :issue:`17818`.) :func:`aifc.open` now supports the context manager protocol: when used in a @@ -526,6 +616,10 @@ plain tuple. (Contributed by Claudiu Popa in :issue:`17818`.) object will be called automatically at the end of the block. (Contributed by Serhiy Storchacha in :issue:`16486`.) +The :meth:`~aifc.aifc.writeframesraw` and :meth:`~aifc.aifc.writeframes` +methods now accept any :term:`bytes-like object`. (Contributed by Serhiy +Storchaka in :issue:`8311`.) + argparse -------- @@ -538,10 +632,16 @@ by Lucas Maystre in :issue:`11175`.) audioop ------- -Added support for 24-bit samples (:issue:`12866`). +:mod:`audioop` now supports 24-bit samples. (Contributed by Serhiy Storchaka +in :issue:`12866`.) -Added the :func:`~audioop.byteswap` function to convert big-endian samples -to little-endian and vice versa (:issue:`19641`). +New :func:`~audioop.byteswap` function converts big-endian samples to +little-endian and vice versa (Contributed by Serhiy Storchaka in +:issue:`19641`). + +All :mod:`audioop` functions now accept any :term:`bytes-like object`. Strings +are not accepted: they didn't work before, now they raise an error right away. +(Contributed by Serhiy Storchaka in :issue:`16685`.) base64 @@ -549,7 +649,25 @@ base64 The encoding and decoding functions in :mod:`base64` now accept any :term:`bytes-like object` in cases where it previously required a -:class:`bytes` or :class:`bytearray` instance (:issue:`17839`). +:class:`bytes` or :class:`bytearray` instance. (Contributed by Nick Coghlan in +:issue:`17839`.) + +New functions :func:`~base64.a85encode`, :func:`~base64.a85decode`, +:func:`~base64.b85encode`, and :func:`~base64.b85decode` provide the ability to +encode and decode binary data from and to ``Ascii85`` and the git/mercurial +``Base85`` formats, respectively. The ``a85`` functions have options that can +be used to make them compatible with the variants of the ``Ascii85`` encoding, +including the Adobe variant. (Contributed by Martin Morrison, the Mercurial +project, Serhiy Storchaka, and Antoine Pitrou in :issue:`17618`.) + + +collections +----------- + +The :meth:`.ChainMap.new_child` method now accepts an *m* argument specifying +the child map to add to the chain. This allows an existing mapping and/or a +custom mapping type to be used for the child. (Contributed by Vinay Sajip in +:issue:`16613`.) colorsys @@ -570,17 +688,30 @@ statement. (Contributed by Raymond Hettinger in :issue:`15806` and Zero Piraeus in :issue:`19266`) The new :func:`contextlib.redirect_stdout` context manager makes it easier -for utility scripts to handle inflexible APIs that don't provide any -options to retrieve their output as a string or direct it to somewhere -other than :data:`sys.stdout`. In conjunction with :class:`io.StringIO`, -this context manager is also useful for checking expected output from -command line utilities. (Contribute by Raymond Hettinger in :issue:`15805`) +for utility scripts to handle inflexible APIs that write their output to +:data:`sys.stdout` and don't provide any options to redirect it. Using the +context manager, the :data:`sys.stdout` output can be redirected to any +other stream or, in conjunction with :class:`io.StringIO`, to a string. +The latter can be especially useful, for example, to capture output +from a function that was written to implement a command line interface. +It is recommended only for utility scripts because it affects the +global state of :data:`sys.stdout`. (Contributed by Raymond Hettinger +in :issue:`15805`) The :mod:`contextlib` documentation has also been updated to include a :ref:`discussion ` of the differences between single use, reusable and reentrant context managers. +dbm +--- + +:func:`dbm.open` objects now support the context management protocol. When +used in a :keyword:`with` statement, the ``close`` method of the database +object will be called automatically at the end of the block. (Contributed by +Claudiu Popa and Nick Coghlan in :issue:`19282`.) + + dis --- @@ -639,6 +770,10 @@ to ``distb(tb)``). (Contributed by Nick Coghlan, Ryan Kelly and Thomas Kluyver in :issue:`11816` and Claudiu Popa in :issue:`17916`) +New function :func:`~dis.stack_effect` computes the effect on the Python stack +of a given opcode and argument, information that is not otherwise available. +(Contributed by Larry Hastings in :issue:`19722`.) + doctest ------- @@ -653,6 +788,9 @@ new options, ``-o`` and ``-f``. ``-o`` allows :ref:`doctest options shorthand for ``-o FAIL_FAST`` (to parallel the similar option supported by the :mod:`unittest` CLI). (Contributed by R. David Murray in :issue:`11390`.) +:mod:`doctest` will now find doctests in extension module ``__doc__`` strings. +(Contributed by Zachary Ware in :issue:`3158`.) + email ----- @@ -661,7 +799,8 @@ email override the default policy of the message when generating a string representation of it. This means that ``as_string`` can now be used in more circumstances, instead of having to create and use a :mod:`~email.generator` in -order to pass formatting parameters to its ``flatten`` method. +order to pass formatting parameters to its ``flatten`` method. (Contributed by +R. David Murray in :issue:`18600`.) New method :meth:`~email.message.Message.as_bytes` added to produce a bytes representation of the message in a fashion similar to how ``as_string`` @@ -669,26 +808,30 @@ produces a string representation. It does not accept the *maxheaderlen* argument, but does accept the *unixfrom* and *policy* arguments. The :class:`~email.message.Message` :meth:`~email.message.Message.__bytes__` method calls it, meaning that ``bytes(mymsg)`` will now produce the intuitive -result: a bytes object containing the fully formatted message. +result: a bytes object containing the fully formatted message. (Contributed +by R. David Murray in :issue:`18600`.) -(Contributed by R. David Murray in :issue:`18600`.) +The :meth:`.Message.set_param` message now accepts a *replace* keyword argument. +When specified, the associated header will be updated without changing +its location in the list of headers. For backward compatibility, the default +is ``False``. (Contributed by R. David Murray in :issue:`18891`.) -.. _whatsnew_email_contentmanager: - -A pair of new subclasses of :class:`~email.message.Message` have been added, -along with a new sub-module, :mod:`~email.contentmanager`. All documentation -is currently in the new module, which is being added as part of the new -:term:`provisional ` email API. These classes provide a -number of new methods that make extracting content from and inserting content -into email messages much easier. See the :mod:`~email.contentmanager` -documentation for details. -These API additions complete the bulk of the work that was planned as part of -the email6 project. The currently provisional API is scheduled to become final -in Python 3.5 (possibly with a few minor additions in the area of error -handling). +.. _whatsnew_email_contentmanager: -(Contributed by R. David Murray in :issue:`18891`.) +A pair of new subclasses of :class:`~email.message.Message` have been added +(:class:`.EmailMessage` and :class:`.MIMEPart`), along with a new sub-module, +:mod:`~email.contentmanager` and a new :mod:`~email.policy` attribute +:attr:`~email.policy.EmailPolicy.content_manager`. All documentation is +currently in the new module, which is being added as part of email's new +:term:`provisional API`. These classes provide a number of new methods that +make extracting content from and inserting content into email messages much +easier. For details, see the :mod:`~email.contentmanager` documentation and +the :ref:`email-contentmanager-api-examples`. These API additions complete the +bulk of the work that was planned as part of the email6 project. The currently +provisional API is scheduled to become final in Python 3.5 (possibly with a few +minor additions in the area of error handling). (Contributed by R. David +Murray in :issue:`18891`.) filecmp @@ -701,6 +844,11 @@ for example, if the file might have been changed and re-checked in less time than the resolution of a particular filesystem's file modification time field. (Contributed by Mark Levitt in :issue:`18149`.) +New module attribute :data:`~filecmp.DEFAULT_IGNORES` provides the list of +directories that are used as the default value for the *ignore* parameter of +the :func:`~filecmp.dircmp` function. (Contributed by Eli Bendersky in +:issue:`15442`.) + functools --------- @@ -726,6 +874,10 @@ multiple implementations of an operation that allows it to work with :pep:`443` -- Single-dispatch generic functions PEP written and implemented by Łukasz Langa. +:func:`~functools.total_ordering` now supports a return value of +:const:`NotImplemented` from the underlying comparison function. (Contributed +by Katie Miller in :issue:`10042`.) + A pure-python version of the :func:`~functools.partial` function is now in the stdlib; in CPython it is overridden by the C accelerated version, but it is available for other implementations to use. (Contributed by Brian Thorne in @@ -740,11 +892,28 @@ dictionaries containing the collections statistics since interpreter startup. (Contributed by Antoine Pitrou in :issue:`16351`.) +glob +---- + +A new function :func:`~glob.escape` provides a way to escape special characters +in a filename so that they do not become part of the globbing expansion but are +instead matched literally. (Contributed by Serhiy Storchaka in :issue:`8402`.) + + hashlib ------- -New :func:`hashlib.pbkdf2_hmac` function. -(Contributed by Christian Heimes in :issue:`18582`) +A new :func:`hashlib.pbkdf2_hmac` function provides +the `PKCS#5 password-based key derivation function 2 +`_. (Contributed by Christian +Heimes in :issue:`18582`) + +The :attr:`~hashlib.hash.name` attribute of :mod:`hashlib` hash objects is now +a formally supported interface. It has always existed in CPython's +:mod:`hashlib` (although it did not return lower case names for all supported +hashes), but it was not a public interface and so some other Python +implementations have not previously supported it. (Contributed by Jason R. +Coombs in :issue:`18532`.) hmac @@ -756,20 +925,31 @@ argument to the :func:`~hmac.new` function, and the *msg* parameter to both the accepts any type supported by the :mod:`hashlib` module. (Contributed by Jonas Borgström in :issue:`18240`.) +The *digestmod* argument to the :func:`hmac.new` function may now be any hash +digest name recognized by :mod:`hashlib`. In addition, the current behavior in +which the value of *digestmod* defaults to ``MD5`` is deprecated: in a +future version of Python there will be no default value. (Contributed by +Christian Heimes in :issue:`17276`.) + +With the addition of :attr:`~hmac.HMAC.block_size` and :attr:`~hmac.HMAC.name` +attributes (and the formal documentation of the :attr:`~hmac.HMAC.digest_size` +attribute), the :mod:`hmac` module now conforms fully to the :pep:`247` API. +(Contributed by Christian Heimes in :issue:`18775`.) + html ---- -Added a new :func:`html.unescape` function that converts HTML5 character -references to the corresponding Unicode characters. -(Contributed by Ezio Melotti in :issue:`2927`) +New function :func:`~html.unescape` function converts HTML5 character references to +the corresponding Unicode characters. (Contributed by Ezio Melotti in +:issue:`2927`) -Added a new *convert_charrefs* keyword argument to -:class:`~html.parser.HTMLParser` that, when ``True``, automatically converts -all character references. For backward-compatibility, its value defaults -to ``False``, but it will change to ``True`` in future versions, so you -are invited to set it explicitly and update your code to use this new feature. -(Contributed by Ezio Melotti in :issue:`13633`) +:class:`~html.parser.HTMLParser` accepts a new keyword argument +*convert_charrefs* that, when ``True``, automatically converts all character +references. For backward-compatibility, its value defaults to ``False``, but +it will change to ``True`` in a future version of Python, so you are invited to +set it explicitly and update your code to use this new feature. (Contributed +by Ezio Melotti in :issue:`13633`) The *strict* argument of :class:`~html.parser.HTMLParser` is now deprecated. (Contributed by Ezio Melotti in :issue:`15114`) @@ -785,6 +965,10 @@ This extended error description will be formatted using the :attr:`~http.server.HTTP.error_message_format` attribute and sent as the body of the error response. (Contributed by Karl Cow in :issue:`12921`.) +The :mod:`http.server` :ref:`command line interface ` now has +a ``-b/--bind`` option that causes the server to listen on a specific address. +(Contributed by Malte Swart in :issue:`17764`.) + importlib --------- @@ -823,11 +1007,16 @@ by Brett Cannon in :issue:`18058`.) that decodes source from bytes using universal newline processing. This is useful for implementing :meth:`.InspectLoader.get_source` methods. +:class:`importlib.machinery.ExtensionFileLoader` now has a +:meth:`~importlib.machinery.ExtensionFileLoader.get_filename` method. This was +inadvertently omitted in the original implementation. (Contributed by Eric +Snow in :issue:`19152`.) + inspect ------- -The inspect module now offers a basic :ref:`command line interface +The :mod:`inspect` module now offers a basic :ref:`command line interface ` to quickly display source code and other information for modules, classes and functions. (Contributed by Claudiu Popa and Nick Coghlan in :issue:`18626`) @@ -845,10 +1034,14 @@ metaclasses (Contributed by Ethan Furman in :issue:`18929` and :func:`~inspect.getfullargspec` and :func:`~inspect.getargspec` now use the :func:`~inspect.signature` API. This allows them to -support much broader range of functions, including some builtins and -callables that follow ``__signature__`` protocol. It is still -recommended to update your code to use :func:`~inspect.signature` -directly. (Contributed by Yury Selivanov in :issue:`17481`) +support a much broader range of callables, including those with +``__signature__`` attributes, those with metadata provided by argument +clinic, :func:`functools.partial` objects and more. Note that, unlike +:func:`~inspect.signature`, these functions still ignore ``__wrapped__`` +attributes, and report the already bound first argument for bound methods, +so it is still necessary to update your code to use +:func:`~inspect.signature` directly if those features are desired. +(Contributed by Yury Selivanov in :issue:`17481`) :func:`~inspect.signature` now supports duck types of CPython functions, which adds support for functions compiled with Cython. (Contributed @@ -864,6 +1057,10 @@ has been removed: :mod:`ipaddress` is now considered a stable API, covered by the normal standard library requirements to maintain backwards compatibility. +A new :attr:`~ipaddress.IPv4Address.is_global` property is ``True`` if +an address is globally routeable. (Contributed by Peter Moody in +:issue:`17400`.) + logging ------- @@ -872,6 +1069,24 @@ The :class:`~logging.handlers.TimedRotatingFileHandler` has a new *atTime* parameter that can be used to specify the time of day when rollover should happen. (Contributed by Ronald Oussoren in :issue:`9556`.) +:class:`~logging.handlers.SocketHandler` and +:class:`~logging.handlers.DatagramHandler` now support Unix domain sockets (by +setting *port* to ``None``). (Contributed by Vinay Sajip in commit +ce46195b56a9.) + +:func:`~logging.config.fileConfig` now accepts a +:class:`configparser.RawConfigParser` subclass instance for the *fname* +parameter. This facilitates using a configuration file when logging +configuration is just a part of the overall application configuration, or where +the application modifies the configuration before passing it to +:func:`~logging.config.fileConfig`. (Contributed by Vinay Sajip in +:issue:`16110`.) + +Logging configuration data received from a socket via the +:func:`logging.config.listen` function can now be validated before being +processed by supplying a verification function as the argument to the new +*verify* keyword argument. (Contributed by Vinay Sajip in :issue:`15452`.) + .. _whatsnew-marshal-3: @@ -884,26 +1099,15 @@ interned strings and preserving the interning on deserialization, and extends this "one copy" ability to any object type (including handling recursive references). This reduces both the size of ``.pyc`` files and the amount of memory a module occupies in memory when it is loaded from a ``.pyc`` (or -``.pyo``) file. (Contributed by Kristján Valur Jónsson in :issue:`16475`.) +``.pyo``) file. (Contributed by Kristján Valur Jónsson in :issue:`16475`, +with additional speedups by Antoine Pitrou in :issue:`19219`.) mmap ---- -mmap objects can now be weakref'ed. -(Contributed by Valerie Lambert in :issue:`4885`.) - - -mock ----- - -:mod:`~unittest.mock` objects now inspect their specification signatures when -matching calls, which means an argument can now be matched by either position -or name, instead of only by position. (Contributed by Antoine Pitrou in -:issue:`17015`.) - -:func:`~mock.mock_open` objects now have ``readline`` and ``readlines`` -methods. (Contributed by Toshio Kuratomi in :issue:`17467`.) +mmap objects can now be :mod:`weakref`\ ed. (Contributed by Valerie Lambert in +:issue:`4885`.) multiprocessing @@ -911,20 +1115,34 @@ multiprocessing .. _whatsnew-multiprocessing-no-fork: -On Unix, two new :ref:`start methods ` -(``spawn`` and ``forkserver``) have been added for starting processes using +On Unix two new :ref:`start methods `, +(``spawn`` and ``forkserver``, have been added for starting processes using :mod:`multiprocessing`. These make the mixing of processes with threads more robust, and the ``spawn`` method matches the semantics that multiprocessing has -always used on Windows. (Contributed by Richard Oudkerk in :issue:`8713`). - -Also, except when using the old *fork* start method, child processes -will no longer inherit unneeded handles/file descriptors from their parents -(part of :issue:`8713`). +always used on Windows. New function +:func:`~multiprocessing.get_all_start_methods` reports all start methods +available on the platform, :func:`~multiprocessing.get_start_method` reports +the current start method, and :func:`~multiprocessing.set_start_method` sets +the start method. (Contributed by Richard Oudkerk in :issue:`8713`). + +:mod:`multiprocessing` also now has the concept of a ``context``, which +determines how child processes are created. New function +:func:`~multiprocessing.get_context` returns a context that uses a specified +start method. It has the same API as the :mod:`multiprocessing` module itself, +so you can use it to create :class:`~multiprocessing.pool.Pool`\ s and other +objects that will operate within that context. This allows a framework and an +application or different parts of the same application to use multiprocessing +without interfering with each other. (Contributed by Richard Oudkerk in +:issue:`18999`.) + +Except when using the old *fork* start method, child processes no longer +inherit unneeded handles/file descriptors from their parents (part of +:issue:`8713`). :mod:`multiprocessing` now relies on :mod:`runpy` (which implements the ``-m`` switch) to initialise ``__main__`` appropriately in child processes when using the ``spawn`` or ``forkserver`` start methods. This resolves some -edge cases where combining multiprocessing, the ``-m`` command line switch +edge cases where combining multiprocessing, the ``-m`` command line switch, and explicit relative imports could cause obscure failures in child processes. (Contributed by Nick Coghlan in :issue:`19946`) @@ -932,6 +1150,11 @@ processes. (Contributed by Nick Coghlan in :issue:`19946`) operator -------- +New function :func:`~operator.length_hint` provides an implementation of the +specification for how the :meth:`~object.__length_hint__` special method should +be used, as part of the :pep:`424` formal specification of this language +feature. (Contributed by Armin Ronacher in :issue:`16148`.) + There is now a pure-python version of the :mod:`operator` module available for reference and for use by alternate implementations of Python. (Contributed by Zachary Ware in :issue:`16694`.) @@ -940,32 +1163,48 @@ Zachary Ware in :issue:`16694`.) os -- -New functions to get and set the :ref:`inheritable flag ` of a file -descriptors or a Windows handle: +There are new functions to get and set the :ref:`inheritable flag +` of a file descriptor (:func:`os.get_inheritable`, +:func:`os.set_inheritable`) or a Windows handle +(:func:`os.get_handle_inheritable`, :func:`os.set_handle_inheritable`). -* :func:`os.get_inheritable`, :func:`os.set_inheritable` -* :func:`os.get_handle_inheritable`, :func:`os.set_handle_inheritable` +New function :func:`~os.cpu_count` reports the number of CPUs available on the +platform on which Python is running (or ``None`` if the count can't be +determined). The :func:`multiprocessing.cpu_count` function is now implemented +in terms of this function). (Contributed by Trent Nelson, Yogesh Chaudhari, +Victor Stinner, and Charles-François Natali in :issue:`17914`.) + +:func:`os.path.samestat` is now available on the Windows platform (and the +:func:`os.path.samefile` implementation is now shared between Unix and +Windows). (Contributed by Brian Curtin in :issue:`11939`.) -The :mod:`os` module now provides a :func:`~os.cpu_count` function, analogous to -the :func:`multiprocessing.cpu_count` function (which is now implemented in -terms of the new :mod:`os` function). (Contributed by Trent Nelson, Yogesh -Chaudhari, Victor Stinner, and Charles-François Natali in :issue:`17914`.) +:func:`os.path.ismount` now recognizes volumes mounted below a drive +root on Windows. (Contributed by Tim Golden in :issue:`9035`.) + +:func:`os.open` supports two new flags on platforms that provide them, +:data:`~os.O_PATH` (un-opened file descriptor), and :data:`~os.O_TMPFILE` +(unnamed temporary file; as of 3.4.0 release available only on Linux systems +with a kernel version of 3.11 or newer that have uapi headers). (Contributed +by Christian Heimes in :issue:`18673` and Benjamin Peterson, respectively.) pdb --- -The ``print`` command has been removed from :mod:`pdb`, restoring access to the -``print`` function. - -Rationale: Python2's ``pdb`` did not have a ``print`` command; instead, -entering ``print`` executed the ``print`` statement. In Python3 ``print`` was -mistakenly made an alias for the pdb :pdbcmd:`p` command. ``p``, however, -prints the ``repr`` of its argument, not the ``str`` like the Python2 ``print`` -command did. Worse, the Python3 ``pdb print`` command shadowed the Python3 -``print`` function, making it inaccessible at the ``pdb`` prompt. +:mod:`pdb` has been enhanced to handle generators, :keyword:`yield`, and +``yield from`` in a more useful fashion. This is especially helpful when +debugging :mod:`asyncio` based programs. (Contributed by Andrew Svetlov and +Xavier de Gaye in :issue:`16596`.) -(Contributed by Connor Osborn in :issue:`18764`.) +The ``print`` command has been removed from :mod:`pdb`, restoring access to the +Python :func:`print` function from the pdb command line. Python2's ``pdb`` did +not have a ``print`` command; instead, entering ``print`` executed the +``print`` statement. In Python3 ``print`` was mistakenly made an alias for the +pdb :pdbcmd:`p` command. ``p``, however, prints the ``repr`` of its argument, +not the ``str`` like the Python2 ``print`` command did. Worse, the Python3 +``pdb print`` command shadowed the Python3 ``print`` function, making it +inaccessible at the ``pdb`` prompt. (Contributed by Connor Osborn in +:issue:`18764`.) .. _whatsnew-protocol-4: @@ -976,7 +1215,7 @@ pickle :mod:`pickle` now supports (but does not use by default) a new pickle protocol, protocol 4. This new protocol addresses a number of issues that were present in previous protocols, such as the serialization of nested classes, very large -strings and containers, or classes whose :meth:`__new__` method takes +strings and containers, and classes whose :meth:`__new__` method takes keyword-only arguments. It also provides some efficiency improvements. .. seealso:: @@ -988,9 +1227,13 @@ keyword-only arguments. It also provides some efficiency improvements. plistlib -------- -:mod:`plistlib` now supports binary plist files, and offers the common -``load``/``loads``/``dump``/``dumps`` API pattern for serialization formats -(Contributed by Ronald Oussoren and others in :issue:`14455`). +:mod:`plistlib` now has an API that is similar to the standard pattern for +stdlib serialization protocols, with new :func:`~plistlib.load`, +:func:`~plistlib.dump`, :func:`~plistlib.loads`, and :func:`~plistlib.dumps` +functions. (The older API is now deprecated.) In addition to the already +supported XML plist format (:data:`~plistlib.FMT_XML`), it also now supports +the binary plist format (:data:`~plistlib.FMT_BINARY`). (Contributed by Ronald +Oussoren and others in :issue:`14455`). poplib @@ -1006,11 +1249,15 @@ Catucci in :issue:`4473`.) pprint ------ -The :mod:`pprint` module now supports *compact* mode for formatting long -sequences (:issue:`19132`). +The :mod:`pprint` module's :class:`~pprint.PrettyPrinter` class and its +:func:`~pprint.pformat`, and :func:`~pprint.pprint` functions have a new +option, *compact*, that controls how the output is formatted. Currently +setting *compact* to ``True`` means that sequences will be printed with as many +sequence elements as will fit within *width* on each (indented) line. +(Contributed by Serhiy Storchaka in :issue:`19132`.) Long strings are now wrapped using Python's normal line continuation -syntax (Contributed by Antoine Pitrou in :issue:`17150`.) +syntax. (Contributed by Antoine Pitrou in :issue:`17150`). pty @@ -1023,30 +1270,61 @@ the child process, instead of ``None``. (Contributed by Gregory P. Smith.) pydoc ----- -While significant changes have not been made to :mod:`pydoc` directly, +The :mod:`pydoc` module is now based directly on the :func:`inspect.signature` +introspection API, allowing it to provide signature information for a wider +variety of callable objects. This change also means that ``__wrapped__`` +attributes are now taken into account when displaying help information +(Contributed by Larry Hastings in :issue:`19674`) + +The :mod:`pydoc` module no longer displays the ``self`` parameter for +already bound methods. Instead, it aims to always display the exact current +signature of the supplied callable (Contributed by Larry Hastings in +:issue:`20710`) + +In addition to the changes that have been made to :mod:`pydoc` directly, its handling of custom ``__dir__`` methods and various descriptor -behaviours has been improved substantially by the underlying changes in +behaviours has also been improved substantially by the underlying changes in the :mod:`inspect` module. +As the :func:`help` builtin is based on :mod:`pydoc`, the above changes also +affect the behaviour of :func:`help`. + re -- -Added :func:`re.fullmatch` function and :meth:`regex.fullmatch` method, -which anchor the pattern at both ends of the string to match. -(Contributed by Matthew Barnett in :issue:`16203`.) +New :func:`~re.fullmatch` function and :meth:`.regex.fullmatch` method anchor +the pattern at both ends of the string to match. This provides a way to be +explicit about the goal of the match, which avoids a class of subtle bugs where +``$`` characters get lost during code changes or the addition of alternatives +to an existing regular expression. (Contributed by Matthew Barnett in +:issue:`16203`.) The repr of :ref:`regex objects ` now includes the pattern and the flags; the repr of :ref:`match objects ` now -includes the start, end, and the part of the string that matched. -(Contributed by Serhiy Storchaka in :issue:`13592` and :issue:`17087`.) +includes the start, end, and the part of the string that matched. (Contributed +by Hugo Lopes Tavares and Serhiy Storchaka in :issue:`13592` and +:issue:`17087`.) resource -------- -New :func:`resource.prlimit` function and Linux specific constants. -(Contributed by Christian Heimes in :issue:`16595` and :issue:`19324`.) +New :func:`~resource.prlimit` function, available on Linux platforms with a +kernel version of 2.6.36 or later and glibc of 2.13 or later, provides the +ability to query or set the resource limits for processes other than the one +making the call. (Contributed by Christian Heimes in :issue:`16595`.) + +On Linux kernel version 2.6.36 or later, there are there are also some new +Linux specific constants: :attr:`~resource.RLIMIT_MSGQUEUE`, +:attr:`~resource.RLIMIT_NICE`, :attr:`~resource.RLIMIT_RTPRIO`, +:attr:`~resource.RLIMIT_RTTIME`, and :attr:`~resource.RLIMIT_SIGPENDING`. +(Contributed by Christian Heimes in :issue:`19324`.) + +On FreeBSD version 9 and later, there some new FreeBSD specific constants: +:attr:`~resource.RLIMIT_SBSIZE`, :attr:`~resource.RLIMIT_SWAP`, and +:attr:`~resource.RLIMIT_NPTS`. (Contributed by Claudiu Popa in +:issue:`19343`.) select @@ -1057,6 +1335,11 @@ When used in a :keyword:`with` statement, the :meth:`~select.epoll.close` method will be called automatically at the end of the block. (Contributed by Serhiy Storchaka in :issue:`16488`.) +:class:`~select.devpoll` objects now have :meth:`~select.devpoll.fileno` and +:meth:`~select.devpoll.close` methods, as well as a new attribute +:attr:`~select.devpoll.closed`. (Contributed by Victor Stinner in +:issue:`18794`.) + shelve ------ @@ -1066,11 +1349,21 @@ and will be automatically closed at the end of the :keyword:`with` block. (Contributed by Filip Gruszczyński in :issue:`13896`.) +shutil +------ + +:func:`~shutil.copyfile` now raises a specific :exc:`~shutil.Error` subclass, +:exc:`~shutil.SameFileError`, when the source and destination are the same +file, which allows an application to take appropriate action on this specific +error. (Contributed by Atsuo Ishimoto and Hynek Schlawack in +:issue:`1492704`.) + + smtpd ----- The :class:`~smtpd.SMTPServer` and :class:`~smtpd.SMTPChannel` classes now -accept a *map* keyword argument, which if specified is passed in to +accept a *map* keyword argument which, if specified, is passed in to :class:`asynchat.async_chat` as its *map* argument. This allows an application to avoid affecting the global socket map. (Contributed by Vinay Sajip in :issue:`11959`.) @@ -1082,7 +1375,7 @@ smtplib :exc:`~smtplib.SMTPException` is now a subclass of :exc:`OSError`, which allows both socket level errors and SMTP protocol level errors to be caught in one try/except statement by code that only cares whether or not an error occurred. -(:issue:`2118`). +(Contributed by Ned Jackson Lovely in :issue:`2118`). socket @@ -1101,14 +1394,17 @@ during debugging, instead of integer "magic numbers". The :data:`~socket.AF_LINK` constant is now available on BSD and OSX. +:func:`~socket.inet_pton` and :func:`~socket.inet_ntop` are now supported +on Windows. (Contributed by Atsuo Ishimoto in :issue:`7171`.) + sqlite3 ------- -A new boolean parameter, *uri*, to the :func:`~sqlite3.connect` function can -be used to indicate that the *database* parameter is a ``uri`` (see -the `SQLite URI documentation `_). -(Contributed by poq in :issue:`13773`.) +A new boolean parameter to the :func:`~sqlite3.connect` function, *uri*, can be +used to indicate that the *database* parameter is a ``uri`` (see the `SQLite +URI documentation `_). (Contributed by poq in +:issue:`13773`.) ssl @@ -1121,6 +1417,25 @@ TLSv1.2 support) have been added; support for these protocols is only available Python is linked with OpenSSL 1.0.1 or later. (Contributed by Michele Orrù and Antoine Pitrou in :issue:`16692`) +.. _whatsnew34-sslcontext: + +New function :func:`~ssl.create_default_context` provides a standard way to +obtain an :class:`~ssl.SSLContext` whose settings are intended to be a +reasonable balance between compatibility and security. These settings are +more stringent than the defaults provided by the :class:`~ssl.SSLContext` +constructor, and may be adjusted in the future, without prior deprecation, if +best-practice security requirements change. The new recommended best +practice for using stdlib libraries that support SSL is to use +:func:`~ssl.create_default_context` to obtain an :class:`~ssl.SSLContext` +object, modify it if needed, and then pass it as the *context* argument +of the appropriate stdlib API. (Contributed by Christian Heimes +in :issue:`19689`.) + +:class:`~ssl.SSLContext` method :meth:`~ssl.SSLContext.load_verify_locations` +accepts a new optional argument *cadata*, which can be used to provide PEM or +DER encoded certificates directly via strings or bytes, respectively. +(Contributed by Christian Heimes in :issue:`18138`.) + New function :func:`~ssl.get_default_verify_paths` returns a named tuple of the paths and environment variables that the :meth:`~ssl.SSLContext.set_default_verify_paths` method uses to set @@ -1133,36 +1448,62 @@ in :issue:`18143`.) ``X.509`` certs, ``X.509 CA`` certs, and certificate revocation lists (``crl``\ s), as well as a :meth:`~ssl.SSLContext.get_ca_certs` method that returns a list of the loaded ``CA`` certificates. (Contributed by Christian Heimes in -and :issue:`18147`.) +:issue:`18147`.) + +If OpenSSL 0.9.8 or later is available, :class:`~ssl.SSLContext` has an new +attribute :attr:`~ssl.SSLContext.verify_flags` that can be used to control the +certificate verification process by setting it to some combination of the new +constants :data:`~ssl.VERIFY_DEFAULT`, :data:`~ssl.VERIFY_CRL_CHECK_LEAF`, +:data:`~ssl.VERIFY_CRL_CHECK_CHAIN`, or :data:`~ssl.VERIFY_X509_STRICT`. +OpenSSL does not do any CRL verification by default. (Contributed by +Christien Heimes in :issue:`8813`.) + +New :class:`~ssl.SSLContext` method :meth:`~ssl.SSLContext.load_default_certs` +loads a set of dfault "certificate authority" (CA) certificates from default +locations, which vary according to the platform. It can be used to load both +TLS web server authentication certificates +(``purpose=``:data:`~ssl.Purpose.SERVER_AUTH`) for a client to use to verify a +server, and certificates for a server to use in verifying client certificates +(``purpose=``:data:`~ssl.Purpose.CLIENT_AUTH`). (Contributed by Christian +Heimes in :issue:`19292`.) + +.. _whatsnew34-win-cert-store: Two new windows-only functions, :func:`~ssl.enum_certificates` and :func:`~ssl.enum_crls` provide the ability to retrieve certificates, certificate information, and CRLs from the Windows cert store. (Contributed by Christian Heimes in :issue:`17134`.) -Support for server-side SNI using the new +.. _whatsnew34-sni: + +Support for server-side SNI (Server Name Indication) using the new :meth:`ssl.SSLContext.set_servername_callback` method. (Contributed by Daniel Black in :issue:`8109`.) +The dictionary returned by :meth:`.SSLSocket.getpeercert` contains additional +``X509v3`` extension items: ``crlDistributionPoints``, ``calIssuers``, and +``OCSP`` URIs. (Contributed by Christian Heimes in :issue:`18379`.) + stat ---- The :mod:`stat` module is now backed by a C implementation in :mod:`_stat`. A C implementation is required as most of the values aren't standardized and -platform-dependent. (Contributed by Christian Heimes in :issue:`11016`.) +are platform-dependent. (Contributed by Christian Heimes in :issue:`11016`.) -The module supports new file types: door, event port and whiteout. +The module supports new :mod:`~stat.ST_MODE` flags, :mod:`~stat.S_IFDOOR`, +:attr:`~stat.S_IFPORT`, and :attr:`~stat.S_IFWHT`. (Contributed by +Christian Hiemes in :issue:`11016`.) struct ------ -:mod:`struct` now supports the streamed unpacking of a buffer containing -repeated instances of a given format of data. Both a module level -:mod:`~struct.iter_unpack` function and a :meth:`struct.Struct.iter_unpack` -method on compiled formats have been added. (Contributed by Antoine Pitrou in -:issue:`17804`.) +New function :mod:`~struct.iter_unpack` and a new +:meth:`struct.Struct.iter_unpack` method on compiled formats provide streamed +unpacking of a buffer containing repeated instances of a given format of data. +(Contributed by Antoine Pitrou in :issue:`17804`.) subprocess @@ -1172,6 +1513,10 @@ subprocess be used to provide the contents of ``stdin`` for the command that is run. (Contributed by Zack Weinberg in :issue:`16624`.) +:func:`~subprocess.getstatus` and :func:`~subprocess.getstatusoutput` now +work on Windows. This change was actually inadvertently made in 3.3.4. +(Contributed by Tim Golden in :issue:`10197`.) + sunau ----- @@ -1179,26 +1524,93 @@ sunau The :meth:`~sunau.getparams` method now returns a namedtuple rather than a plain tuple. (Contributed by Claudiu Popa in :issue:`18901`.) -:meth:`sunau.open` now supports the context manager protocol (:issue:`18878`). +:meth:`sunau.open` now supports the context manager protocol: when used in a +:keyword:`with` block, the ``close`` method of the returned object will be +called automatically at the end of the block. (Contributed by Serhiy Storchaka +in :issue:`18878`.) + +:meth:`.AU_write.setsampwidth` now supports 24 bit samples, thus adding +support for writing 24 sample using the module. (Contributed by +Serhiy Storchaka in :issue:`19261`.) + +The :meth:`~sunau.AU_write.writeframesraw` and +:meth:`~sunau.AU_write.writeframes` methods now accept any :term:`bytes-like +object`. (Contributed by Serhiy Storchaka in :issue:`8311`.) sys --- New function :func:`sys.getallocatedblocks` returns the current number of -blocks allocated by the interpreter (in CPython with the default +blocks allocated by the interpreter. (In CPython with the default ``--with-pymalloc`` setting, this is allocations made through the -:c:func:`PyObject_Malloc` API). This can be useful for tracking memory leaks, +:c:func:`PyObject_Malloc` API.) This can be useful for tracking memory leaks, especially if automated via a test suite. (Contributed by Antoine Pitrou in :issue:`13390`.) +When the Python interpreter starts in :ref:`interactive mode +`, it checks for an :data:`~sys.__interactivehook__` attribute +on the :mod:`sys` module. If the attribute exists, its value is called with no +arguments just before interactive mode is started. The check is made after the +:envvar:`PYTHONSTARTUP` file is read, so it can be set there. The :mod:`site` +module :ref:`sets it ` to a function that enables tab +completion and history saving (in :file:`~/.python-history`) if the platform +supports :mod:`readline`. If you do not want this (new) behavior, you can +override it in :envvar:`PYTHONSTARTUP`, :mod:`sitecustomize`, or +:mod:`usercustomize` by deleting this attribute from :mod:`sys` (or setting it +to some other callable). (Contributed by Éric Araujo and Antoine Pitrou in +:issue:`5845`.) + + +tarfile +------- + +The :mod:`tarfile` module now supports a simple :ref:`tarfile-commandline` when +called as a script directly or via ``-m``. This can be used to create and +extract tarfile archives. (Contributed by Berker Peksag in :issue:`13477`.) + + +textwrap +-------- + +The :class:`~textwrap.TextWrapper` class has two new attributes/constructor +arguments: :attr:`~textwrap.TextWrapper.max_lines`, which limits the number of +lines in the output, and :attr:`~textwrap.TextWrapper.placeholder`, which is a +string that will appear at the end of the output if it has been truncated +because of *max_lines*. Building on these capabilities, a new convenience +function :func:`~textwrap.shorten` collapses all of the whitespace in the input +to single spaces and produces a single line of a given *width* that ends with +the *placeholder* (by default, ``[...]``). (Contributed by Antoine Pitrou and +Serhiy Storchaka in :issue:`18585` and :issue:`18725`.) + + +threading +--------- + +The :class:`~threading.Thread` object representing the main thread can be +obtained from the new :func:`~threading.main_thread` function. In normal +conditions this will be the thread from which the Python interpreter was +started. (Contributed by Andrew Svetlov in :issue:`18882`.) + traceback --------- A new :func:`traceback.clear_frames` function takes a traceback object and clears the local variables in all of the frames it references, -reducing the amount of memory consumed (:issue:`1565525`). +reducing the amount of memory consumed. (Contributed by Andrew Kuchling in +:issue:`1565525`). + + +types +----- + +A new :func:`~types.DynamicClassAttribute` descriptor provides a way to define +an attribute that acts normally when looked up through an instance object, but +which is routed to the *class* ``__getattr__`` when looked up through the +class. This allows one to have properties active on a class, and have virtual +attributes on the class with the same name (see :mod:`Enum` for an example). +(Contributed by Ethan Furman in :issue:`19030`.) urllib @@ -1208,6 +1620,29 @@ urllib :class:`~urllib.request.DataHandler` class. (Contributed by Mathias Panzenböck in :issue:`16423`.) +The http method that will be used by a :class:`~urllib.request.Request` class +can now be specified by setting a :class:`~urllib.request.Request.method` +class attribute on the subclass. (Contributed by Jason R Coombs in +:issue:`18978`.) + +:class:`~urllib.request.Request` objects are now reusable: if the +:attr:`~urllib.request.Request.full_url` or :attr:`~urllib.request.Request.data` +attributes are modified, all relevant internal properties are updated. This +means, for example, that it is now possible to use the same +:class:`~urllib.request.Request` object in more than one +:meth:`.OpenerDirector.open` call with different *data* arguments, or to +modify a :class:`~urllib.request.Request`\ 's ``url`` rather than recomputing it +from scratch. There is also a new +:meth:`~urllib.request.Request.remove_header` method that can be used to remove +headers from a :class:`~urllib.request.Request`. (Contributed by Alexey +Kachayev in :issue:`16464`, Daniel Wozniak in :issue:`17485`, and Damien Brecht +and Senthil Kumaran in :issue:`17272`.) + +:class:`~urllib.error.HTTPError` objects now have a +:attr:`~urllib.error.HTTPError.headers` attribute that provides access to the +HTTP response headers associated with the error. (Contributed by +Berker Peksag in :issue:`15701`.) + unittest -------- @@ -1223,7 +1658,7 @@ which will run even if one or more of them fail. For example:: class NumbersTest(unittest.TestCase): def test_even(self): for i in range(6): - with self.subTest(i=1): + with self.subTest(i=i): self.assertEqual(i % 2, 0) will result in six subtests, each identified in the unittest verbose output @@ -1243,6 +1678,34 @@ error. (Contributed by Zach Ware in :issue:`16935`.) consistent test ordering. (Contributed by Martin Melin and Jeff Ramnani in :issue:`16709`.) +:class:`~unittest.TestSuite` now drops references to tests as soon as the test +has been run, if the test is successful. On Python interpreters that do +garbage collection, this allows the tests to be garbage collected if nothing +else is holding a reference to the test. It is possible to override this +behavior by creating a :class:`~unittest.TestSuite` subclass that defines a +custom ``_removeTestAtIndex`` method. (Contributed by Tom Wardill, Matt +McClure, and Andrew Svetlov in :issue:`11798`.) + +A new test assertion context-manager, :meth:`~unittest.TestCase.assertLogs`, +will ensure that a given block of code emits a log message using the +:mod:`logging` module. By default the message can come from any logger and +have a priority of ``INFO`` or higher, but both the logger name and an +alternative minimum logging level may be specified. The object returned by the +context manager can be queried for the :class:`~logging.LogRecord`\ s and/or +formatted messages that were logged. (Contributed by Antoine Pitrou in +:issue:`18937`.) + +Test discovery now works with namespace packages (Contributed by Claudiu Popa +in :issue:`17457`.) + +:mod:`unittest.mock` objects now inspect their specification signatures when +matching calls, which means an argument can now be matched by either position +or name, instead of only by position. (Contributed by Antoine Pitrou in +:issue:`17015`.) + +:func:`~mock.mock_open` objects now have ``readline`` and ``readlines`` +methods. (Contributed by Toshio Kuratomi in :issue:`17467`.) + venv ---- @@ -1250,6 +1713,12 @@ venv :mod:`venv` now includes activation scripts for the ``csh`` and ``fish`` shells (Contributed by Andrew Svetlov in :issue:`15417`.) +:class:`~venv.EnvBuilder` and the :func:`~venv.create` convenience function +take a new keyword argument *with_pip*, which defaults to ``False``, that +controls whether or not :class:`~venv.EnvBuilder` ensures that ``pip`` is +installed in the virtual environment. (Contributed by Nick Coghlan in +:issue:`19552` as part of the :pep:`453` implementation.) + wave ---- @@ -1260,6 +1729,14 @@ plain tuple. (Contributed by Claudiu Popa in :issue:`17487`.) :meth:`wave.open` now supports the context manager protocol. (Contributed by Claudiu Popa in :issue:`17616`.) +:mod:`wave` can now :ref:`write output to unseekable files +`. (Contributed by David Jones, Guilherme Polo, and Serhiy +Storchaka in :issue:`5202`.) + +The :meth:`~wave.Wave_write.writeframesraw` and +:meth:`~wave.Wave_write.writeframes` methods now accept any :term:`bytes-like +object`. (Contributed by Serhiy Storchaka in :issue:`8311`.) + weakref ------- @@ -1295,13 +1772,19 @@ abbreviated (````) or expanded (````) form. (Contributed by Ariel Poliak and Serhiy Storchaka in :issue:`14377`.) -zipfile.PyZipfile ------------------ +zipfile +------- -Add a filter function to ignore some packages (tests for instance), -:meth:`~zipfile.PyZipFile.writepy`. +The :meth:`~zipfile.PyZipFile.writepy` method of the +:class:`~zipfile.PyZipFile` class has a new *filterfunc* option that can be +used to control which directories and files are added to the archive. For +example, this could be used to exclude test files from the archive. (Contributed by Christian Tismer in :issue:`19274`.) +The *allowZip64* parameter to :class:`~zipfile.ZipFile` and +:class:`~zipfile.PyZipfile` is now ``True`` by default. (Contributed by +William Mallard in :issue:`17201`.) + CPython Implementation Changes @@ -1375,7 +1858,12 @@ accurate signatures for builtins and standard library extension modules implemented in C. Some standard library extension modules have been converted to use Argument -Clinic in Python 3.4, and :mod:`inspect` has been updated accordingly. +Clinic in Python 3.4, and :mod:`pydoc` and :mod:`inspect` have been updated +accordingly. + +It is expected that signature metadata for programmatic introspection will +be added to additional callables implemented in C as part of Python 3.4 +maintenance releases. .. note:: The Argument Clinic PEP is not fully up to date with the state of the @@ -1405,15 +1893,53 @@ Other Build and C API Changes marked as accepting ``const char *`` rather than ``char *`` (Contributed by Serhiy Storchaka in :issue:`1772673`). -* New shell version of ``python-config``; can be used even when a python +* A new shell version of ``python-config`` can be used even when a python interpreter is not available (for example, in cross compilation scenarios). +* :c:func:`PyUnicode_FromFormat` now supports width and precision + specifications for ``%s``, ``%A``, ``%U``, ``%V``, ``%S``, and ``%R``. + (Contributed by Ysj Ray and Victor Stinner in :issue:`7330`.) +* New function :c:func:`PyStructSequence_InitType2` supplements the + existing :c:func:`PyStructSequence_InitType` function. The difference + is that it returns ``0`` on success and ``-1`` on failure. -Other Improvements -================== +* The CPython source can now be compiled using the address sanity checking + features of recent versions of GCC and clang: the false alarms in the small + object allocator have been silenced. (Contributed by Dhiru Kholia in + :issue:`18596`.) -* Tab-completion is now enabled by default in the interactive interpreter. +* The Windows build now uses `Address Space Layout Randomization + `_ and `Data Execution Prevention + `_. (Contributed by + Christian Heimes in :issue:`16632`.) + +* New function :c:func:`PyObject_LengthHint` is the C API equivalent + of :func:`operator.length_hint`. (Contributed by Armin Ronacher in + :issue:`16148`.) + + +.. _other-improvements-3.4: + +Other Improvements +------------------ + +.. _whatsnew-isolated-mode: + +* The :ref:`python ` command has a new :ref:`option + `, ``-I``, which causes it to run in "isolated mode", + which means that :data:`sys.path` contains neither the script's directory nor + the user's ``site-packages`` directory, and all :envvar:`PYTHON*` environment + variables are ignored (it implies both ``-s`` and ``-E``). Other + restrictions may also be applied in the future, with the goal being to + isolate the execution of a script from the user's environment. This is + appropriate, for example, when Python is used to run a system script. On + most POSIX systems it can and should be used in the ``#!`` line of system + scripts. (Contributed by Christian Heimes in :issue:`16499`.) + +* Tab-completion is now enabled by default in the interactive interpreter + on systems that support :mod:`readline`. History is also enabled by default, + and is written to (and read from) the file :file:`~/.python-history`. (Contributed by Antoine Pitrou and Éric Araujo in :issue:`5845`.) * Invoking the Python interpreter with ``--version`` now outputs the version to @@ -1443,10 +1969,40 @@ Other Improvements values for its constants from the C header files, instead of having the values hard-coded in the python module as was previously the case. +* Loading multiple python modules from a single OS module (``.so``, ``.dll``) + now works correctly (previously it silently returned the first python + module in the file). (Contributed by Václav Šmilauer in :issue:`16421`.) + +* A new opcode, :opcode:`LOAD_CLASSDEREF`, has been added to fix a bug in the + loading of free variables in class bodies that could be triggered by certain + uses of :ref:`__prepare__ `. (Contributed by Benjamin Peterson in + :issue:`17853`.) + +* A number of MemoryError-related crashes were identified and fixed by Victor + Stinner using his :pep:`445`-based ``pyfailmalloc`` tool (:issue:`18408`, + :issue:`18520`). + +* The :ref:`pyvenv ` command now accepts a ``--copies`` option + to use copies rather than symlinks even on systems where symlinks are the + default. (Contributed by Vinay Sajip in :issue:`18807`.) + +* The :ref:`pyvenv ` command also accepts a ``--without-pip`` + option to suppress the otherwise-automatic bootstrapping of pip into + the virtual environment. (Contributed by Nick Coghlan in :issue:`19552` + as part of the :pep:`453` implementation.) + +* The encoding name is now optional in the value set for the + :envvar:`PYTHONIOENCODING` environment variable. This makes it possible to + set just the error handler, without changing the default encoding. + (Contributed by Serhiy Storchaka in :issue:`18818`.) + +* The :mod:`bz2`, :mod:`lzma`, and :mod:`gzip` module ``open`` functions now + support ``x`` (exclusive creation) mode. (Contributed by Tim Heaney and + Vajrasky Kok in :issue:`19201`, :issue:`19222`, and :issue:`19223`.) Significant Optimizations -========================= +------------------------- * The UTF-32 decoder is now 3x to 4x faster. (Contributed by Serhiy Storchaka in :issue:`14625`.) @@ -1489,6 +2045,16 @@ Significant Optimizations * :func:`html.escape` is now 10x faster. (Contributed by Matt Bryant in :issue:`18020`.) +* On Windows, the native ``VirtualAlloc`` is now used instead of the CRT + ``malloc`` in ``obmalloc``. Artificial benchmarks show about a 3% memory + savings. + +* :func:`os.urandom` now uses a lazily-opened persistent file descriptor + so as to avoid using many file descriptors when run in parallel from + multiple threads. (Contributed by Antoine Pitrou in :issue:`18756`.) + + +.. _deprecated-3.4: Deprecated ========== @@ -1500,8 +2066,8 @@ when the interpreter is run with deprecation warnings enabled (for example, by using ``-Wd``). -Deprecated Python Modules, Functions and Methods ------------------------------------------------- +Deprecations in the Python API +------------------------------ * As mentioned in :ref:`whatsnew-pep-451`, a number of :mod:`importilb` methods and functions are deprecated: :meth:`importlib.find_loader` is @@ -1534,18 +2100,39 @@ Deprecated Python Modules, Functions and Methods * The :mod:`formatter` module is pending deprecation and is slated for removal in Python 3.6. -* MD5 as default digestmod for :mod:`hmac` is deprecated. Python 3.6 will - require an explicit digest name or constructor as *digestmod* argument. +* ``MD5`` as the default *digestmod* for the :func:`hmac.new` function is + deprecated. Python 3.6 will require an explicit digest name or constructor as + *digestmod* argument. * The internal ``Netrc`` class in the :mod:`ftplib` module has been documented as deprecated in its docstring for quite some time. It now emits a :exc:`DeprecationWarning` and will be removed completely in Python 3.5. +* The undocumented *endtime* argument to :meth:`subprocess.Popen.wait` should + not have been exposed and is hopefully not in use; it is deprecated and + will mostly likely be removed in Python 3.5. + +* The *strict* argument of :class:`~html.parser.HTMLParser` is deprecated. + +* The :mod:`plistlib` :func:`~plistlib.readPlist`, + :func:`~plistlib.writePlist`, :func:`~plistlib.readPlistFromBytes`, and + :func:`~plistlib.writePlistToBytes` functions are deprecated in favor of the + corresponding new functions :func:`~plistlib.load`, :func:`~plistlib.dump`, + :func:`~plistlib.loads`, and :func:`~plistlib.dumps`. :func:`~plistlib.Data` + is deprecated in favor of just using the :class:`bytes` constructor. + +* The :mod:`sysconfig` key ``SO`` is deprecated, it has been replaced by + ``EXT_SUFFIX``. -Deprecated Functions and Types in the C API -------------------------------------------- +* The ``U`` mode accepted by various ``open`` functions is deprecated. + In Python3 it does not do anything useful, and should be replaced by + appropriate uses of :class:`io.TextIOWrapper` (if needed) and its *newline* + argument. -XXX: None so far +* The *parser* argument of :func:`xml.etree.ElementTree.iterparse` has + been deprecated, as has the *html* argument of + :func:`~xml.etree.ElementTree.XMLParser`. To prepare for the removal of the + latter, all arguments to ``XMLParser`` should be passed by keyword. Deprecated Features @@ -1571,6 +2158,7 @@ and build tools: * OS/2 (:issue:`16135`). * Windows 2000 (changeset e52df05b496a). +* Windows systems where ``COMSPEC`` points to ``command.com`` (:issue:`14470`). * VMS (:issue:`16136`). @@ -1588,7 +2176,8 @@ removed: ``SHLIB_SUFFIX`` and ``EXT_SUFFIX`` macros) (:issue:`16754`). * The ``PyThreadState.tick_counter`` field has been removed; its value has - been meaningless since Python 3.2, when the "new GIL" was introduced. + been meaningless since Python 3.2, when the "new GIL" was introduced + (:issue:`19199`). * ``PyLoader`` and ``PyPycLoader`` have been removed from :mod:`importlib`. (Contributed by Taras Lyapun in :issue:`15641`.) @@ -1608,6 +2197,15 @@ removed: * :class:`inspect.Signature`: positional-only parameters are now required to have a valid name. +* :meth:`object.__format__` no longer accepts non-empty format strings, it now + raises a :exc:`TypeError` instead. Using a non-empty string has been + deprecated since Python 3.2. This change has been made to prevent a + situation where previously working (but incorrect) code would start failing + if an object gained a __format__ method, which means that your code may now + raise a :exc:`TypeError` if you are using an ``'s'`` format code with objects + that do not have a __format__ method that handles it. See :issue:`7994` for + background. + * :meth:`difflib.SequenceMatcher.isbjunk` and :meth:`difflib.SequenceMatcher.isbpopular` were deprecated in 3.2, and have now been removed: use ``x in sm.bjunk`` and @@ -1626,6 +2224,9 @@ Code Cleanups ``_mac_ver_gstalt``, and ``_bcd2str``, which would only have ever been called on badly broken OSX systems (see :issue:`18393`). +* The hardcoded copies of certain :mod:`stat` constants that were included in + the :mod:`tarfile` module namespace have been removed. + Porting to Python 3.4 @@ -1634,6 +2235,27 @@ Porting to Python 3.4 This section lists previously described changes and other bugfixes that may require changes to your code. + +Changes in 'python' Command Behavior +------------------------------------ + +* In a posix shell, setting the :envvar:`PATH` environment variable to + an empty value is equivalent to not setting it at all. However, setting + :envvar:`PYTHONPATH` to an empty value was *not* equivalent to not setting it + at all: setting :envvar:`PYTHONPATH` to an empty value was equivalent to + setting it to ``.``, which leads to confusion when reasoning by analogy to + how :envvar:`PATH` works. The behavior now conforms to the posix convention + for :envvar:`PATH`. + +* The [X refs, Y blocks] output of a debug (``--with-pydebug``) build of the + CPython interpreter is now off by default. It can be re-enabled using the + ``-X showrefcount`` option. (Contributed by Ezio Melotti in :issue:`17323`.) + +* The python command and most stdlib scripts (as well as :mod:`argparse`) now + output ``--version`` information to ``stdout`` instead of ``stderr`` (for + issue list see :ref:`other-improvements-3.4` above). + + Changes in the Python API ------------------------- @@ -1646,7 +2268,7 @@ Changes in the Python API * The module type now initializes the :attr:`__package__` and :attr:`__loader__` attributes to ``None`` by default. To determine if these attributes were set in a backwards-compatible fashion, use e.g. - ``getattr(module, '__loader__', None) is not None``. + ``getattr(module, '__loader__', None) is not None``. (:issue:`17115`.) * :meth:`importlib.util.module_for_loader` now sets ``__loader__`` and ``__package__`` unconditionally to properly support reloading. If this is not @@ -1655,10 +2277,23 @@ Changes in the Python API * Import now resets relevant attributes (e.g. ``__name__``, ``__loader__``, ``__package__``, ``__file__``, ``__cached__``) unconditionally when reloading. + Note that this restores a pre-3.3 behavior in that it means a module is + re-found when re-loaded (:issue:`19413`). * Frozen packages no longer set ``__path__`` to a list containing the package - name but an empty list instead. Determing if a module is a package should be - done using ``hasattr(module, '__path__')``. + name, they now set it to an empty list. The previous behavior could cause + the import system to do the wrong thing on submodule imports if there was + also a directory with the same name as the frozen package. The correct way + to determine if a module is a package or not is to use``hasattr(module, + '__path__')`` (:issue:`18065`). + +* Frozen modules no longer define a ``__file__`` attribute. It's semantically + incorrect for frozen modules to set the attribute as they are not loaded from + any explicit location. If you must know that a module comes from frozen code + then you can see if the module's ``__spec__.location`` is set to ``'frozen'``, + check if the loader is a subclass of + :class:`importlib.machinery.FrozenImporter`, + or if Python 2 compatibility is necessary you can use :func:`imp.is_frozen`. * :func:`py_compile.compile` now raises :exc:`FileExistsError` if the file path it would write to is a symlink or a non-regular file. This is to act as a @@ -1684,14 +2319,24 @@ Changes in the Python API :func:`inspect.unwrap` to access the first function in the chain that has no ``__wrapped__`` attribute. +* :func:`inspect.getfullargspec` has been reimplemented on top of + :func:`inspect.signature` and hence handles a much wider variety of callable + objects than it did in the past. It is expected that additional builtin and + extension module callables will gain signature metadata over the course of + the Python 3.4 series. Code that assumes that + :func:`inspect.getfullargspec` will fail on non-Python callables may need + to be adjusted accordingly. + * :class:`importlib.machinery.PathFinder` now passes on the current working directory to objects in :data:`sys.path_hooks` for the empty string. This results in :data:`sys.path_importer_cache` never containing ``''``, thus iterating through :data:`sys.path_importer_cache` based on :data:`sys.path` will not find all keys. A module's ``__file__`` when imported in the current working directory will also now have an absolute path, including when using - ``-m`` with the interpreter (this does not influence when the path to a file - is specified on the command-line). + ``-m`` with the interpreter (except for ``__main__.__file__`` when a script + has been executed directly using a relative path) (Contributed by Brett + Cannon in :issue:`18416`). is specified on the command-line) + (:issue:`18416`). * The removal of the *strict* argument to :class:`~http.client.HTTPConnection` and :class:`~http.client.HTTPSConnection` changes the meaning of the @@ -1729,10 +2374,106 @@ Changes in the Python API informative :exc:`ValueError` rather than the previous more mysterious :exc:`AttributeError` (:issue:`9177`). -* Parameter names in ``__annotations__`` dict are now mangled properly, +* :meth:`slice.indices` no longer produces an :exc:`OverflowError` for huge + values. As a consequence of this fix, :meth:`slice.indices` now raises a + :exc:`ValueError` if given a negative length; previously it returned nonsense + values (:issue:`14794`). + +* The :class:`complex` constructor, unlike the :mod:`cmath` functions, was + incorrectly accepting :class:`float` values if an object's ``__complex__`` + special method returned one. This now raises a :exc:`TypeError`. + (:issue:`16290`.) + +* The :class:`int` constructor in 3.2 and 3.3 erroneously accepts :class:`float` + values for the *base* parameter. It is unlikely anyone was doing this, but + if so, it will now raise a :exc:`TypeError` (:issue:`16772`). + +* Defaults for keyword-only arguments are now evaluated *after* defaults for + regular keyword arguments, instead of before. Hopefully no one wrote any + code that depends on the previous buggy behavior (:issue:`16967`). + +* Stale thread states are now cleared after :func:`~os.fork`. This may cause + some system resources to be released that previously were incorrectly kept + perpetually alive (for example, database connections kept in thread-local + storage). (:issue:`17094`.) + +* Parameter names in ``__annotations__`` dicts are now mangled properly, similarly to ``__kwdefaults__``. (Contributed by Yury Selivanov in :issue:`20625`). +* :attr:`hashlib.hash.name` now always returns the identifier in lower case. + Previously some builtin hashes had uppercase names, but now that it is a + formal public interface the naming has been made consistent (:issue:`18532`). + +* Because :mod:`unittest.TestSuite` now drops references to tests after they + are run, test harnesses that re-use a :class:`~unittest.TestSuite` to re-run + a set of tests may fail. Test suites should not be re-used in this fashion + since it means state is retained between test runs, breaking the test + isolation that :mod:`unittest` is designed to provide. However, if the lack + of isolation is considered acceptable, the old behavior can be restored by + creating a :mod:`~unittest.TestSuite` subclass that defines a + ``_removeTestAtIndex`` method that does nothing (see + :meth:`.TestSuite.__iter__`) (:issue:`11798`). + +* :mod:`unittest` now uses :mod:`argparse` for command line parsing. There are + certain invalid command forms that used to work that are no longer allowed; + in theory this should not cause backward compatibility issues since the + disallowed command forms didn't make any sense and are unlikely to be in use. + +* The :func:`re.split`, :func:`re.findall`, and :func:`re.sub` functions, and + the :meth:`~re.match.group` and :meth:`~re.match.groups` methods of + ``match`` objects now always return a *bytes* object when the string + to be matched is a :term:`bytes-like object`. Previously the return type + matched the input type, so if your code was depending on the return value + being, say, a ``bytearray``, you will need to change your code. + +* :mod:`audioop` functions now raise an error immediately if passed string + input, instead of failing randomly later on (:issue:`16685`). + +* The new *convert_charrefs* argument to :class:`~html.parser.HTMLParser` + currently defaults to ``False`` for backward compatibility, but will + eventually be changed to default to ``True``. It is recommended that you add + this keyword, with the appropriate value, to any + :class:`~html.parser.HTMLParser` calls in your code (:issue:`13633`). + +* Since the *digestmod* argument to the :func:`hmac.new` function will in the + future have no default, all calls to :func:`hmac.new` should be changed to + explicitly specify a *digestmod* (:issue:`17276`). + +* Calling :func:`sysconfig.get_config_var` with the ``SO`` key, or looking + ``SO`` up in the results of a call to :func:`sysconfig.get_config_vars` + is deprecated. This key should be replaced by ``EXT_SUFFIX`` or + ``SHLIB_SUFFIX``, depending on the context (:issue:`19555`). + +* Any calls to ``open`` functions that specify ``U`` should be modified. + ``U`` is ineffective in Python3 and will eventually raise an error if used. + Depending on the function, the equivalent of its old Python2 behavior can be + achieved using either a *newline* argument, or if necessary by wrapping the + stream in :mod:`~io.TextIOWrapper` to use its *newline* argument + (:issue:`15204`). + +* If you use :ref:`pyvenv ` in a script and desire that pip + *not* be installed, you must add ``--without-pip`` to your command + invocation. + +* The default behavior of :func:`json.dump` and :func:`json.dumps` when + an indent is specified has changed: it no longer produces trailing + spaces after the item separating commas at the ends of lines. This + will matter only if you have tests that are doing white-space-sensitive + comparisons of such output (:issue:`16333`). + +* :mod:`doctest` now looks for doctests in extension module ``__doc__`` + strings, so if your doctest test discovery includes extension modules that + have things that look like doctests in them you may see test failures you've + never seen before when running your tests (:issue:`3158`). + +* The :mod:`collections.abc` module has been slightly refactored as + part of the Python startup improvements. As a consequence of this, it is no + longer the case that importing :mod:`collections` automatically imports + :mod:`collections.abc`. If your program depended on the (undocumented) + implicit import, you will need to add an explicit ``import collections.abc`` + (:issue:`20784`). + Changes in the C API -------------------- @@ -1756,7 +2497,8 @@ Changes in the C API * The result of the :c:data:`PyOS_ReadlineFunctionPointer` callback must now be a string allocated by :c:func:`PyMem_RawMalloc` or :c:func:`PyMem_RawRealloc`, or *NULL* if an error occurred, instead of a - string allocated by :c:func:`PyMem_Malloc` or :c:func:`PyMem_Realloc`. + string allocated by :c:func:`PyMem_Malloc` or :c:func:`PyMem_Realloc` + (:issue:`16742`) * :c:func:`PyThread_set_key_value` now always set the value. In Python 3.3, the function did nothing if the key already exists (if the current diff --git a/Misc/NEWS b/Misc/NEWS index 7fd12a6..f3685ca 100644 --- a/Misc/NEWS +++ b/Misc/NEWS @@ -7,15 +7,17 @@ What's New in Python 3.4.0? Release date: 2014-03-16 -Core and Builtins ------------------ - Library ------- - Issue #20939: Fix test_geturl failure in test_urllibnet due to new redirect of http://www.python.org/ to https://www.python.org. +Documentation +------------- + +- Merge in all documentation changes since branching 3.4.0rc1. + What's New in Python 3.4.0 release candidate 3? =============================================== -- cgit v0.12 From 0548f5c5141b385594b3bad02fa62e8dfe17d51e Mon Sep 17 00:00:00 2001 From: Larry Hastings Date: Sat, 15 Mar 2014 22:29:19 -0700 Subject: Regenerate pydoc_topics, fix markup errors, in preparation for 3.4.0 final. --- Doc/tools/sphinxext/susp-ignored.csv | 1 - Doc/whatsnew/3.4.rst | 2 +- Lib/pydoc_data/topics.py | 8 ++++---- 3 files changed, 5 insertions(+), 6 deletions(-) diff --git a/Doc/tools/sphinxext/susp-ignored.csv b/Doc/tools/sphinxext/susp-ignored.csv index f9d6bbf..1769023 100644 --- a/Doc/tools/sphinxext/susp-ignored.csv +++ b/Doc/tools/sphinxext/susp-ignored.csv @@ -282,4 +282,3 @@ whatsnew/changelog,,:PythonCmd,"With Tk < 8.5 _tkinter.c:PythonCmd() raised Unic whatsnew/changelog,,::,": Fix FTP tests for IPv6, bind to ""::1"" instead of ""localhost""." whatsnew/changelog,,::,": Use ""127.0.0.1"" or ""::1"" instead of ""localhost"" as much as" whatsnew/changelog,,:password,user:password -whatsnew/changelog,,:gz,w:gz diff --git a/Doc/whatsnew/3.4.rst b/Doc/whatsnew/3.4.rst index 9cf51bd..a4f6cb3 100644 --- a/Doc/whatsnew/3.4.rst +++ b/Doc/whatsnew/3.4.rst @@ -2284,7 +2284,7 @@ Changes in the Python API name, they now set it to an empty list. The previous behavior could cause the import system to do the wrong thing on submodule imports if there was also a directory with the same name as the frozen package. The correct way - to determine if a module is a package or not is to use``hasattr(module, + to determine if a module is a package or not is to use ``hasattr(module, '__path__')`` (:issue:`18065`). * Frozen modules no longer define a ``__file__`` attribute. It's semantically diff --git a/Lib/pydoc_data/topics.py b/Lib/pydoc_data/topics.py index ec2a713..16c911e 100644 --- a/Lib/pydoc_data/topics.py +++ b/Lib/pydoc_data/topics.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Autogenerated by Sphinx on Sun Mar 9 03:59:56 2014 +# Autogenerated by Sphinx on Sat Mar 15 22:21:45 2014 topics = {'assert': '\nThe "assert" statement\n**********************\n\nAssert statements are a convenient way to insert debugging assertions\ninto a program:\n\n assert_stmt ::= "assert" expression ["," expression]\n\nThe simple form, "assert expression", is equivalent to\n\n if __debug__:\n if not expression: raise AssertionError\n\nThe extended form, "assert expression1, expression2", is equivalent to\n\n if __debug__:\n if not expression1: raise AssertionError(expression2)\n\nThese equivalences assume that "__debug__" and "AssertionError" refer\nto the built-in variables with those names. In the current\nimplementation, the built-in variable "__debug__" is "True" under\nnormal circumstances, "False" when optimization is requested (command\nline option -O). The current code generator emits no code for an\nassert statement when optimization is requested at compile time. Note\nthat it is unnecessary to include the source code for the expression\nthat failed in the error message; it will be displayed as part of the\nstack trace.\n\nAssignments to "__debug__" are illegal. The value for the built-in\nvariable is determined when the interpreter starts.\n', 'assignment': '\nAssignment statements\n*********************\n\nAssignment statements are used to (re)bind names to values and to\nmodify attributes or items of mutable objects:\n\n assignment_stmt ::= (target_list "=")+ (expression_list | yield_expression)\n target_list ::= target ("," target)* [","]\n target ::= identifier\n | "(" target_list ")"\n | "[" target_list "]"\n | attributeref\n | subscription\n | slicing\n | "*" target\n\n(See section *Primaries* for the syntax definitions for the last three\nsymbols.)\n\nAn assignment statement evaluates the expression list (remember that\nthis can be a single expression or a comma-separated list, the latter\nyielding a tuple) and assigns the single resulting object to each of\nthe target lists, from left to right.\n\nAssignment is defined recursively depending on the form of the target\n(list). When a target is part of a mutable object (an attribute\nreference, subscription or slicing), the mutable object must\nultimately perform the assignment and decide about its validity, and\nmay raise an exception if the assignment is unacceptable. The rules\nobserved by various types and the exceptions raised are given with the\ndefinition of the object types (see section *The standard type\nhierarchy*).\n\nAssignment of an object to a target list, optionally enclosed in\nparentheses or square brackets, is recursively defined as follows.\n\n* If the target list is a single target: The object is assigned to\n that target.\n\n* If the target list is a comma-separated list of targets: The\n object must be an iterable with the same number of items as there\n are targets in the target list, and the items are assigned, from\n left to right, to the corresponding targets.\n\n * If the target list contains one target prefixed with an\n asterisk, called a "starred" target: The object must be a sequence\n with at least as many items as there are targets in the target\n list, minus one. The first items of the sequence are assigned,\n from left to right, to the targets before the starred target. The\n final items of the sequence are assigned to the targets after the\n starred target. A list of the remaining items in the sequence is\n then assigned to the starred target (the list can be empty).\n\n * Else: The object must be a sequence with the same number of\n items as there are targets in the target list, and the items are\n assigned, from left to right, to the corresponding targets.\n\nAssignment of an object to a single target is recursively defined as\nfollows.\n\n* If the target is an identifier (name):\n\n * If the name does not occur in a "global" or "nonlocal" statement\n in the current code block: the name is bound to the object in the\n current local namespace.\n\n * Otherwise: the name is bound to the object in the global\n namespace or the outer namespace determined by "nonlocal",\n respectively.\n\n The name is rebound if it was already bound. This may cause the\n reference count for the object previously bound to the name to reach\n zero, causing the object to be deallocated and its destructor (if it\n has one) to be called.\n\n* If the target is a target list enclosed in parentheses or in\n square brackets: The object must be an iterable with the same number\n of items as there are targets in the target list, and its items are\n assigned, from left to right, to the corresponding targets.\n\n* If the target is an attribute reference: The primary expression in\n the reference is evaluated. It should yield an object with\n assignable attributes; if this is not the case, "TypeError" is\n raised. That object is then asked to assign the assigned object to\n the given attribute; if it cannot perform the assignment, it raises\n an exception (usually but not necessarily "AttributeError").\n\n Note: If the object is a class instance and the attribute reference\n occurs on both sides of the assignment operator, the RHS expression,\n "a.x" can access either an instance attribute or (if no instance\n attribute exists) a class attribute. The LHS target "a.x" is always\n set as an instance attribute, creating it if necessary. Thus, the\n two occurrences of "a.x" do not necessarily refer to the same\n attribute: if the RHS expression refers to a class attribute, the\n LHS creates a new instance attribute as the target of the\n assignment:\n\n class Cls:\n x = 3 # class variable\n inst = Cls()\n inst.x = inst.x + 1 # writes inst.x as 4 leaving Cls.x as 3\n\n This description does not necessarily apply to descriptor\n attributes, such as properties created with "property()".\n\n* If the target is a subscription: The primary expression in the\n reference is evaluated. It should yield either a mutable sequence\n object (such as a list) or a mapping object (such as a dictionary).\n Next, the subscript expression is evaluated.\n\n If the primary is a mutable sequence object (such as a list), the\n subscript must yield an integer. If it is negative, the sequence\'s\n length is added to it. The resulting value must be a nonnegative\n integer less than the sequence\'s length, and the sequence is asked\n to assign the assigned object to its item with that index. If the\n index is out of range, "IndexError" is raised (assignment to a\n subscripted sequence cannot add new items to a list).\n\n If the primary is a mapping object (such as a dictionary), the\n subscript must have a type compatible with the mapping\'s key type,\n and the mapping is then asked to create a key/datum pair which maps\n the subscript to the assigned object. This can either replace an\n existing key/value pair with the same key value, or insert a new\n key/value pair (if no key with the same value existed).\n\n For user-defined objects, the "__setitem__()" method is called with\n appropriate arguments.\n\n* If the target is a slicing: The primary expression in the\n reference is evaluated. It should yield a mutable sequence object\n (such as a list). The assigned object should be a sequence object\n of the same type. Next, the lower and upper bound expressions are\n evaluated, insofar they are present; defaults are zero and the\n sequence\'s length. The bounds should evaluate to integers. If\n either bound is negative, the sequence\'s length is added to it. The\n resulting bounds are clipped to lie between zero and the sequence\'s\n length, inclusive. Finally, the sequence object is asked to replace\n the slice with the items of the assigned sequence. The length of\n the slice may be different from the length of the assigned sequence,\n thus changing the length of the target sequence, if the object\n allows it.\n\n**CPython implementation detail:** In the current implementation, the\nsyntax for targets is taken to be the same as for expressions, and\ninvalid syntax is rejected during the code generation phase, causing\nless detailed error messages.\n\nWARNING: Although the definition of assignment implies that overlaps\nbetween the left-hand side and the right-hand side are \'safe\' (for\nexample "a, b = b, a" swaps two variables), overlaps *within* the\ncollection of assigned-to variables are not safe! For instance, the\nfollowing program prints "[0, 2]":\n\n x = [0, 1]\n i = 0\n i, x[i] = 1, 2\n print(x)\n\nSee also: **PEP 3132** - Extended Iterable Unpacking\n\n The specification for the "*target" feature.\n\n\nAugmented assignment statements\n===============================\n\nAugmented assignment is the combination, in a single statement, of a\nbinary operation and an assignment statement:\n\n augmented_assignment_stmt ::= augtarget augop (expression_list | yield_expression)\n augtarget ::= identifier | attributeref | subscription | slicing\n augop ::= "+=" | "-=" | "*=" | "/=" | "//=" | "%=" | "**="\n | ">>=" | "<<=" | "&=" | "^=" | "|="\n\n(See section *Primaries* for the syntax definitions for the last three\nsymbols.)\n\nAn augmented assignment evaluates the target (which, unlike normal\nassignment statements, cannot be an unpacking) and the expression\nlist, performs the binary operation specific to the type of assignment\non the two operands, and assigns the result to the original target.\nThe target is only evaluated once.\n\nAn augmented assignment expression like "x += 1" can be rewritten as\n"x = x + 1" to achieve a similar, but not exactly equal effect. In the\naugmented version, "x" is only evaluated once. Also, when possible,\nthe actual operation is performed *in-place*, meaning that rather than\ncreating a new object and assigning that to the target, the old object\nis modified instead.\n\nWith the exception of assigning to tuples and multiple targets in a\nsingle statement, the assignment done by augmented assignment\nstatements is handled the same way as normal assignments. Similarly,\nwith the exception of the possible *in-place* behavior, the binary\noperation performed by augmented assignment is the same as the normal\nbinary operations.\n\nFor targets which are attribute references, the same *caveat about\nclass and instance attributes* applies as for regular assignments.\n', 'atom-identifiers': '\nIdentifiers (Names)\n*******************\n\nAn identifier occurring as an atom is a name. See section\n*Identifiers and keywords* for lexical definition and section *Naming\nand binding* for documentation of naming and binding.\n\nWhen the name is bound to an object, evaluation of the atom yields\nthat object. When a name is not bound, an attempt to evaluate it\nraises a "NameError" exception.\n\n**Private name mangling:** When an identifier that textually occurs in\na class definition begins with two or more underscore characters and\ndoes not end in two or more underscores, it is considered a *private\nname* of that class. Private names are transformed to a longer form\nbefore code is generated for them. The transformation inserts the\nclass name, with leading underscores removed and a single underscore\ninserted, in front of the name. For example, the identifier "__spam"\noccurring in a class named "Ham" will be transformed to "_Ham__spam".\nThis transformation is independent of the syntactical context in which\nthe identifier is used. If the transformed name is extremely long\n(longer than 255 characters), implementation defined truncation may\nhappen. If the class name consists only of underscores, no\ntransformation is done.\n', @@ -23,7 +23,7 @@ topics = {'assert': '\nThe "assert" statement\n**********************\n\nAssert 'context-managers': '\nWith Statement Context Managers\n*******************************\n\nA *context manager* is an object that defines the runtime context to\nbe established when executing a "with" statement. The context manager\nhandles the entry into, and the exit from, the desired runtime context\nfor the execution of the block of code. Context managers are normally\ninvoked using the "with" statement (described in section *The with\nstatement*), but can also be used by directly invoking their methods.\n\nTypical uses of context managers include saving and restoring various\nkinds of global state, locking and unlocking resources, closing opened\nfiles, etc.\n\nFor more information on context managers, see *Context Manager Types*.\n\nobject.__enter__(self)\n\n Enter the runtime context related to this object. The "with"\n statement will bind this method\'s return value to the target(s)\n specified in the "as" clause of the statement, if any.\n\nobject.__exit__(self, exc_type, exc_value, traceback)\n\n Exit the runtime context related to this object. The parameters\n describe the exception that caused the context to be exited. If the\n context was exited without an exception, all three arguments will\n be "None".\n\n If an exception is supplied, and the method wishes to suppress the\n exception (i.e., prevent it from being propagated), it should\n return a true value. Otherwise, the exception will be processed\n normally upon exit from this method.\n\n Note that "__exit__()" methods should not reraise the passed-in\n exception; this is the caller\'s responsibility.\n\nSee also: **PEP 0343** - The "with" statement\n\n The specification, background, and examples for the Python "with"\n statement.\n', 'continue': '\nThe "continue" statement\n************************\n\n continue_stmt ::= "continue"\n\n"continue" may only occur syntactically nested in a "for" or "while"\nloop, but not nested in a function or class definition or "finally"\nclause within that loop. It continues with the next cycle of the\nnearest enclosing loop.\n\nWhen "continue" passes control out of a "try" statement with a\n"finally" clause, that "finally" clause is executed before really\nstarting the next loop cycle.\n', 'conversions': '\nArithmetic conversions\n**********************\n\nWhen a description of an arithmetic operator below uses the phrase\n"the numeric arguments are converted to a common type," this means\nthat the operator implementation for built-in types works that way:\n\n* If either argument is a complex number, the other is converted to\n complex;\n\n* otherwise, if either argument is a floating point number, the\n other is converted to floating point;\n\n* otherwise, both must be integers and no conversion is necessary.\n\nSome additional rules apply for certain operators (e.g., a string left\nargument to the \'%\' operator). Extensions must define their own\nconversion behavior.\n', - 'customization': '\nBasic customization\n*******************\n\nobject.__new__(cls[, ...])\n\n Called to create a new instance of class *cls*. "__new__()" is a\n static method (special-cased so you need not declare it as such)\n that takes the class of which an instance was requested as its\n first argument. The remaining arguments are those passed to the\n object constructor expression (the call to the class). The return\n value of "__new__()" should be the new object instance (usually an\n instance of *cls*).\n\n Typical implementations create a new instance of the class by\n invoking the superclass\'s "__new__()" method using\n "super(currentclass, cls).__new__(cls[, ...])" with appropriate\n arguments and then modifying the newly-created instance as\n necessary before returning it.\n\n If "__new__()" returns an instance of *cls*, then the new\n instance\'s "__init__()" method will be invoked like\n "__init__(self[, ...])", where *self* is the new instance and the\n remaining arguments are the same as were passed to "__new__()".\n\n If "__new__()" does not return an instance of *cls*, then the new\n instance\'s "__init__()" method will not be invoked.\n\n "__new__()" is intended mainly to allow subclasses of immutable\n types (like int, str, or tuple) to customize instance creation. It\n is also commonly overridden in custom metaclasses in order to\n customize class creation.\n\nobject.__init__(self[, ...])\n\n Called when the instance is created. The arguments are those\n passed to the class constructor expression. If a base class has an\n "__init__()" method, the derived class\'s "__init__()" method, if\n any, must explicitly call it to ensure proper initialization of the\n base class part of the instance; for example:\n "BaseClass.__init__(self, [args...])". As a special constraint on\n constructors, no value may be returned; doing so will cause a\n "TypeError" to be raised at runtime.\n\nobject.__del__(self)\n\n Called when the instance is about to be destroyed. This is also\n called a destructor. If a base class has a "__del__()" method, the\n derived class\'s "__del__()" method, if any, must explicitly call it\n to ensure proper deletion of the base class part of the instance.\n Note that it is possible (though not recommended!) for the\n "__del__()" method to postpone destruction of the instance by\n creating a new reference to it. It may then be called at a later\n time when this new reference is deleted. It is not guaranteed that\n "__del__()" methods are called for objects that still exist when\n the interpreter exits.\n\n Note: "del x" doesn\'t directly call "x.__del__()" --- the former\n decrements the reference count for "x" by one, and the latter is\n only called when "x"\'s reference count reaches zero. Some common\n situations that may prevent the reference count of an object from\n going to zero include: circular references between objects (e.g.,\n a doubly-linked list or a tree data structure with parent and\n child pointers); a reference to the object on the stack frame of\n a function that caught an exception (the traceback stored in\n "sys.exc_info()[2]" keeps the stack frame alive); or a reference\n to the object on the stack frame that raised an unhandled\n exception in interactive mode (the traceback stored in\n "sys.last_traceback" keeps the stack frame alive). The first\n situation can only be remedied by explicitly breaking the cycles;\n the latter two situations can be resolved by storing "None" in\n "sys.last_traceback". Circular references which are garbage are\n detected and cleaned up when the cyclic garbage collector is\n enabled (it\'s on by default). Refer to the documentation for the\n "gc" module for more information about this topic.\n\n Warning: Due to the precarious circumstances under which\n "__del__()" methods are invoked, exceptions that occur during\n their execution are ignored, and a warning is printed to\n "sys.stderr" instead. Also, when "__del__()" is invoked in\n response to a module being deleted (e.g., when execution of the\n program is done), other globals referenced by the "__del__()"\n method may already have been deleted or in the process of being\n torn down (e.g. the import machinery shutting down). For this\n reason, "__del__()" methods should do the absolute minimum needed\n to maintain external invariants. Starting with version 1.5,\n Python guarantees that globals whose name begins with a single\n underscore are deleted from their module before other globals are\n deleted; if no other references to such globals exist, this may\n help in assuring that imported modules are still available at the\n time when the "__del__()" method is called.\n\nobject.__repr__(self)\n\n Called by the "repr()" built-in function to compute the "official"\n string representation of an object. If at all possible, this\n should look like a valid Python expression that could be used to\n recreate an object with the same value (given an appropriate\n environment). If this is not possible, a string of the form\n "<...some useful description...>" should be returned. The return\n value must be a string object. If a class defines "__repr__()" but\n not "__str__()", then "__repr__()" is also used when an "informal"\n string representation of instances of that class is required.\n\n This is typically used for debugging, so it is important that the\n representation is information-rich and unambiguous.\n\nobject.__str__(self)\n\n Called by "str(object)" and the built-in functions "format()" and\n "print()" to compute the "informal" or nicely printable string\n representation of an object. The return value must be a *string*\n object.\n\n This method differs from "object.__repr__()" in that there is no\n expectation that "__str__()" return a valid Python expression: a\n more convenient or concise representation can be used.\n\n The default implementation defined by the built-in type "object"\n calls "object.__repr__()".\n\nobject.__bytes__(self)\n\n Called by "bytes()" to compute a byte-string representation of an\n object. This should return a "bytes" object.\n\nobject.__format__(self, format_spec)\n\n Called by the "format()" built-in function (and by extension, the\n "str.format()" method of class "str") to produce a "formatted"\n string representation of an object. The "format_spec" argument is a\n string that contains a description of the formatting options\n desired. The interpretation of the "format_spec" argument is up to\n the type implementing "__format__()", however most classes will\n either delegate formatting to one of the built-in types, or use a\n similar formatting option syntax.\n\n See *Format Specification Mini-Language* for a description of the\n standard formatting syntax.\n\n The return value must be a string object.\n\nobject.__lt__(self, other)\nobject.__le__(self, other)\nobject.__eq__(self, other)\nobject.__ne__(self, other)\nobject.__gt__(self, other)\nobject.__ge__(self, other)\n\n These are the so-called "rich comparison" methods. The\n correspondence between operator symbols and method names is as\n follows: "xy" calls\n "x.__gt__(y)", and "x>=y" calls "x.__ge__(y)".\n\n A rich comparison method may return the singleton "NotImplemented"\n if it does not implement the operation for a given pair of\n arguments. By convention, "False" and "True" are returned for a\n successful comparison. However, these methods can return any value,\n so if the comparison operator is used in a Boolean context (e.g.,\n in the condition of an "if" statement), Python will call "bool()"\n on the value to determine if the result is true or false.\n\n There are no implied relationships among the comparison operators.\n The truth of "x==y" does not imply that "x!=y" is false.\n Accordingly, when defining "__eq__()", one should also define\n "__ne__()" so that the operators will behave as expected. See the\n paragraph on "__hash__()" for some important notes on creating\n *hashable* objects which support custom comparison operations and\n are usable as dictionary keys.\n\n There are no swapped-argument versions of these methods (to be used\n when the left argument does not support the operation but the right\n argument does); rather, "__lt__()" and "__gt__()" are each other\'s\n reflection, "__le__()" and "__ge__()" are each other\'s reflection,\n and "__eq__()" and "__ne__()" are their own reflection.\n\n Arguments to rich comparison methods are never coerced.\n\n To automatically generate ordering operations from a single root\n operation, see "functools.total_ordering()".\n\nobject.__hash__(self)\n\n Called by built-in function "hash()" and for operations on members\n of hashed collections including "set", "frozenset", and "dict".\n "__hash__()" should return an integer. The only required property\n is that objects which compare equal have the same hash value; it is\n advised to somehow mix together (e.g. using exclusive or) the hash\n values for the components of the object that also play a part in\n comparison of objects.\n\n Note: "hash()" truncates the value returned from an object\'s\n custom "__hash__()" method to the size of a "Py_ssize_t". This\n is typically 8 bytes on 64-bit builds and 4 bytes on 32-bit\n builds. If an object\'s "__hash__()" must interoperate on builds\n of different bit sizes, be sure to check the width on all\n supported builds. An easy way to do this is with "python -c\n "import sys; print(sys.hash_info.width)""\n\n If a class does not define an "__eq__()" method it should not\n define a "__hash__()" operation either; if it defines "__eq__()"\n but not "__hash__()", its instances will not be usable as items in\n hashable collections. If a class defines mutable objects and\n implements an "__eq__()" method, it should not implement\n "__hash__()", since the implementation of hashable collections\n requires that a key\'s hash value is immutable (if the object\'s hash\n value changes, it will be in the wrong hash bucket).\n\n User-defined classes have "__eq__()" and "__hash__()" methods by\n default; with them, all objects compare unequal (except with\n themselves) and "x.__hash__()" returns an appropriate value such\n that "x == y" implies both that "x is y" and "hash(x) == hash(y)".\n\n A class that overrides "__eq__()" and does not define "__hash__()"\n will have its "__hash__()" implicitly set to "None". When the\n "__hash__()" method of a class is "None", instances of the class\n will raise an appropriate "TypeError" when a program attempts to\n retrieve their hash value, and will also be correctly identified as\n unhashable when checking "isinstance(obj, collections.Hashable").\n\n If a class that overrides "__eq__()" needs to retain the\n implementation of "__hash__()" from a parent class, the interpreter\n must be told this explicitly by setting "__hash__ =\n .__hash__".\n\n If a class that does not override "__eq__()" wishes to suppress\n hash support, it should include "__hash__ = None" in the class\n definition. A class which defines its own "__hash__()" that\n explicitly raises a "TypeError" would be incorrectly identified as\n hashable by an "isinstance(obj, collections.Hashable)" call.\n\n Note: By default, the "__hash__()" values of str, bytes and\n datetime objects are "salted" with an unpredictable random value.\n Although they remain constant within an individual Python\n process, they are not predictable between repeated invocations of\n Python.This is intended to provide protection against a denial-\n of-service caused by carefully-chosen inputs that exploit the\n worst case performance of a dict insertion, O(n^2) complexity.\n See http://www.ocert.org/advisories/ocert-2011-003.html for\n details.Changing hash values affects the iteration order of\n dicts, sets and other mappings. Python has never made guarantees\n about this ordering (and it typically varies between 32-bit and\n 64-bit builds).See also "PYTHONHASHSEED".\n\n Changed in version 3.3: Hash randomization is enabled by default.\n\nobject.__bool__(self)\n\n Called to implement truth value testing and the built-in operation\n "bool()"; should return "False" or "True". When this method is not\n defined, "__len__()" is called, if it is defined, and the object is\n considered true if its result is nonzero. If a class defines\n neither "__len__()" nor "__bool__()", all its instances are\n considered true.\n', + 'customization': '\nBasic customization\n*******************\n\nobject.__new__(cls[, ...])\n\n Called to create a new instance of class *cls*. "__new__()" is a\n static method (special-cased so you need not declare it as such)\n that takes the class of which an instance was requested as its\n first argument. The remaining arguments are those passed to the\n object constructor expression (the call to the class). The return\n value of "__new__()" should be the new object instance (usually an\n instance of *cls*).\n\n Typical implementations create a new instance of the class by\n invoking the superclass\'s "__new__()" method using\n "super(currentclass, cls).__new__(cls[, ...])" with appropriate\n arguments and then modifying the newly-created instance as\n necessary before returning it.\n\n If "__new__()" returns an instance of *cls*, then the new\n instance\'s "__init__()" method will be invoked like\n "__init__(self[, ...])", where *self* is the new instance and the\n remaining arguments are the same as were passed to "__new__()".\n\n If "__new__()" does not return an instance of *cls*, then the new\n instance\'s "__init__()" method will not be invoked.\n\n "__new__()" is intended mainly to allow subclasses of immutable\n types (like int, str, or tuple) to customize instance creation. It\n is also commonly overridden in custom metaclasses in order to\n customize class creation.\n\nobject.__init__(self[, ...])\n\n Called when the instance is created. The arguments are those\n passed to the class constructor expression. If a base class has an\n "__init__()" method, the derived class\'s "__init__()" method, if\n any, must explicitly call it to ensure proper initialization of the\n base class part of the instance; for example:\n "BaseClass.__init__(self, [args...])". As a special constraint on\n constructors, no value may be returned; doing so will cause a\n "TypeError" to be raised at runtime.\n\nobject.__del__(self)\n\n Called when the instance is about to be destroyed. This is also\n called a destructor. If a base class has a "__del__()" method, the\n derived class\'s "__del__()" method, if any, must explicitly call it\n to ensure proper deletion of the base class part of the instance.\n Note that it is possible (though not recommended!) for the\n "__del__()" method to postpone destruction of the instance by\n creating a new reference to it. It may then be called at a later\n time when this new reference is deleted. It is not guaranteed that\n "__del__()" methods are called for objects that still exist when\n the interpreter exits.\n\n Note: "del x" doesn\'t directly call "x.__del__()" --- the former\n decrements the reference count for "x" by one, and the latter is\n only called when "x"\'s reference count reaches zero. Some common\n situations that may prevent the reference count of an object from\n going to zero include: circular references between objects (e.g.,\n a doubly-linked list or a tree data structure with parent and\n child pointers); a reference to the object on the stack frame of\n a function that caught an exception (the traceback stored in\n "sys.exc_info()[2]" keeps the stack frame alive); or a reference\n to the object on the stack frame that raised an unhandled\n exception in interactive mode (the traceback stored in\n "sys.last_traceback" keeps the stack frame alive). The first\n situation can only be remedied by explicitly breaking the cycles;\n the latter two situations can be resolved by storing "None" in\n "sys.last_traceback". Circular references which are garbage are\n detected and cleaned up when the cyclic garbage collector is\n enabled (it\'s on by default). Refer to the documentation for the\n "gc" module for more information about this topic.\n\n Warning: Due to the precarious circumstances under which\n "__del__()" methods are invoked, exceptions that occur during\n their execution are ignored, and a warning is printed to\n "sys.stderr" instead. Also, when "__del__()" is invoked in\n response to a module being deleted (e.g., when execution of the\n program is done), other globals referenced by the "__del__()"\n method may already have been deleted or in the process of being\n torn down (e.g. the import machinery shutting down). For this\n reason, "__del__()" methods should do the absolute minimum needed\n to maintain external invariants. Starting with version 1.5,\n Python guarantees that globals whose name begins with a single\n underscore are deleted from their module before other globals are\n deleted; if no other references to such globals exist, this may\n help in assuring that imported modules are still available at the\n time when the "__del__()" method is called.\n\nobject.__repr__(self)\n\n Called by the "repr()" built-in function to compute the "official"\n string representation of an object. If at all possible, this\n should look like a valid Python expression that could be used to\n recreate an object with the same value (given an appropriate\n environment). If this is not possible, a string of the form\n "<...some useful description...>" should be returned. The return\n value must be a string object. If a class defines "__repr__()" but\n not "__str__()", then "__repr__()" is also used when an "informal"\n string representation of instances of that class is required.\n\n This is typically used for debugging, so it is important that the\n representation is information-rich and unambiguous.\n\nobject.__str__(self)\n\n Called by "str(object)" and the built-in functions "format()" and\n "print()" to compute the "informal" or nicely printable string\n representation of an object. The return value must be a *string*\n object.\n\n This method differs from "object.__repr__()" in that there is no\n expectation that "__str__()" return a valid Python expression: a\n more convenient or concise representation can be used.\n\n The default implementation defined by the built-in type "object"\n calls "object.__repr__()".\n\nobject.__bytes__(self)\n\n Called by "bytes()" to compute a byte-string representation of an\n object. This should return a "bytes" object.\n\nobject.__format__(self, format_spec)\n\n Called by the "format()" built-in function (and by extension, the\n "str.format()" method of class "str") to produce a "formatted"\n string representation of an object. The "format_spec" argument is a\n string that contains a description of the formatting options\n desired. The interpretation of the "format_spec" argument is up to\n the type implementing "__format__()", however most classes will\n either delegate formatting to one of the built-in types, or use a\n similar formatting option syntax.\n\n See *Format Specification Mini-Language* for a description of the\n standard formatting syntax.\n\n The return value must be a string object.\n\n Changed in version 3.4: The __format__ method of "object" itself\n raises a "TypeError" if passed any non-empty string.\n\nobject.__lt__(self, other)\nobject.__le__(self, other)\nobject.__eq__(self, other)\nobject.__ne__(self, other)\nobject.__gt__(self, other)\nobject.__ge__(self, other)\n\n These are the so-called "rich comparison" methods. The\n correspondence between operator symbols and method names is as\n follows: "xy" calls\n "x.__gt__(y)", and "x>=y" calls "x.__ge__(y)".\n\n A rich comparison method may return the singleton "NotImplemented"\n if it does not implement the operation for a given pair of\n arguments. By convention, "False" and "True" are returned for a\n successful comparison. However, these methods can return any value,\n so if the comparison operator is used in a Boolean context (e.g.,\n in the condition of an "if" statement), Python will call "bool()"\n on the value to determine if the result is true or false.\n\n There are no implied relationships among the comparison operators.\n The truth of "x==y" does not imply that "x!=y" is false.\n Accordingly, when defining "__eq__()", one should also define\n "__ne__()" so that the operators will behave as expected. See the\n paragraph on "__hash__()" for some important notes on creating\n *hashable* objects which support custom comparison operations and\n are usable as dictionary keys.\n\n There are no swapped-argument versions of these methods (to be used\n when the left argument does not support the operation but the right\n argument does); rather, "__lt__()" and "__gt__()" are each other\'s\n reflection, "__le__()" and "__ge__()" are each other\'s reflection,\n and "__eq__()" and "__ne__()" are their own reflection.\n\n Arguments to rich comparison methods are never coerced.\n\n To automatically generate ordering operations from a single root\n operation, see "functools.total_ordering()".\n\nobject.__hash__(self)\n\n Called by built-in function "hash()" and for operations on members\n of hashed collections including "set", "frozenset", and "dict".\n "__hash__()" should return an integer. The only required property\n is that objects which compare equal have the same hash value; it is\n advised to somehow mix together (e.g. using exclusive or) the hash\n values for the components of the object that also play a part in\n comparison of objects.\n\n Note: "hash()" truncates the value returned from an object\'s\n custom "__hash__()" method to the size of a "Py_ssize_t". This\n is typically 8 bytes on 64-bit builds and 4 bytes on 32-bit\n builds. If an object\'s "__hash__()" must interoperate on builds\n of different bit sizes, be sure to check the width on all\n supported builds. An easy way to do this is with "python -c\n "import sys; print(sys.hash_info.width)""\n\n If a class does not define an "__eq__()" method it should not\n define a "__hash__()" operation either; if it defines "__eq__()"\n but not "__hash__()", its instances will not be usable as items in\n hashable collections. If a class defines mutable objects and\n implements an "__eq__()" method, it should not implement\n "__hash__()", since the implementation of hashable collections\n requires that a key\'s hash value is immutable (if the object\'s hash\n value changes, it will be in the wrong hash bucket).\n\n User-defined classes have "__eq__()" and "__hash__()" methods by\n default; with them, all objects compare unequal (except with\n themselves) and "x.__hash__()" returns an appropriate value such\n that "x == y" implies both that "x is y" and "hash(x) == hash(y)".\n\n A class that overrides "__eq__()" and does not define "__hash__()"\n will have its "__hash__()" implicitly set to "None". When the\n "__hash__()" method of a class is "None", instances of the class\n will raise an appropriate "TypeError" when a program attempts to\n retrieve their hash value, and will also be correctly identified as\n unhashable when checking "isinstance(obj, collections.Hashable").\n\n If a class that overrides "__eq__()" needs to retain the\n implementation of "__hash__()" from a parent class, the interpreter\n must be told this explicitly by setting "__hash__ =\n .__hash__".\n\n If a class that does not override "__eq__()" wishes to suppress\n hash support, it should include "__hash__ = None" in the class\n definition. A class which defines its own "__hash__()" that\n explicitly raises a "TypeError" would be incorrectly identified as\n hashable by an "isinstance(obj, collections.Hashable)" call.\n\n Note: By default, the "__hash__()" values of str, bytes and\n datetime objects are "salted" with an unpredictable random value.\n Although they remain constant within an individual Python\n process, they are not predictable between repeated invocations of\n Python.This is intended to provide protection against a denial-\n of-service caused by carefully-chosen inputs that exploit the\n worst case performance of a dict insertion, O(n^2) complexity.\n See http://www.ocert.org/advisories/ocert-2011-003.html for\n details.Changing hash values affects the iteration order of\n dicts, sets and other mappings. Python has never made guarantees\n about this ordering (and it typically varies between 32-bit and\n 64-bit builds).See also "PYTHONHASHSEED".\n\n Changed in version 3.3: Hash randomization is enabled by default.\n\nobject.__bool__(self)\n\n Called to implement truth value testing and the built-in operation\n "bool()"; should return "False" or "True". When this method is not\n defined, "__len__()" is called, if it is defined, and the object is\n considered true if its result is nonzero. If a class defines\n neither "__len__()" nor "__bool__()", all its instances are\n considered true.\n', 'debugger': '\n"pdb" --- The Python Debugger\n*****************************\n\nThe module "pdb" defines an interactive source code debugger for\nPython programs. It supports setting (conditional) breakpoints and\nsingle stepping at the source line level, inspection of stack frames,\nsource code listing, and evaluation of arbitrary Python code in the\ncontext of any stack frame. It also supports post-mortem debugging\nand can be called under program control.\n\nThe debugger is extensible -- it is actually defined as the class\n"Pdb". This is currently undocumented but easily understood by reading\nthe source. The extension interface uses the modules "bdb" and "cmd".\n\nThe debugger\'s prompt is "(Pdb)". Typical usage to run a program under\ncontrol of the debugger is:\n\n >>> import pdb\n >>> import mymodule\n >>> pdb.run(\'mymodule.test()\')\n > (0)?()\n (Pdb) continue\n > (1)?()\n (Pdb) continue\n NameError: \'spam\'\n > (1)?()\n (Pdb)\n\nChanged in version 3.3: Tab-completion via the "readline" module is\navailable for commands and command arguments, e.g. the current global\nand local names are offered as arguments of the "p" command.\n\n"pdb.py" can also be invoked as a script to debug other scripts. For\nexample:\n\n python3 -m pdb myscript.py\n\nWhen invoked as a script, pdb will automatically enter post-mortem\ndebugging if the program being debugged exits abnormally. After post-\nmortem debugging (or after normal exit of the program), pdb will\nrestart the program. Automatic restarting preserves pdb\'s state (such\nas breakpoints) and in most cases is more useful than quitting the\ndebugger upon program\'s exit.\n\nNew in version 3.2: "pdb.py" now accepts a "-c" option that executes\ncommands as if given in a ".pdbrc" file, see *Debugger Commands*.\n\nThe typical usage to break into the debugger from a running program is\nto insert\n\n import pdb; pdb.set_trace()\n\nat the location you want to break into the debugger. You can then\nstep through the code following this statement, and continue running\nwithout the debugger using the "continue" command.\n\nThe typical usage to inspect a crashed program is:\n\n >>> import pdb\n >>> import mymodule\n >>> mymodule.test()\n Traceback (most recent call last):\n File "", line 1, in ?\n File "./mymodule.py", line 4, in test\n test2()\n File "./mymodule.py", line 3, in test2\n print(spam)\n NameError: spam\n >>> pdb.pm()\n > ./mymodule.py(3)test2()\n -> print(spam)\n (Pdb)\n\nThe module defines the following functions; each enters the debugger\nin a slightly different way:\n\npdb.run(statement, globals=None, locals=None)\n\n Execute the *statement* (given as a string or a code object) under\n debugger control. The debugger prompt appears before any code is\n executed; you can set breakpoints and type "continue", or you can\n step through the statement using "step" or "next" (all these\n commands are explained below). The optional *globals* and *locals*\n arguments specify the environment in which the code is executed; by\n default the dictionary of the module "__main__" is used. (See the\n explanation of the built-in "exec()" or "eval()" functions.)\n\npdb.runeval(expression, globals=None, locals=None)\n\n Evaluate the *expression* (given as a string or a code object)\n under debugger control. When "runeval()" returns, it returns the\n value of the expression. Otherwise this function is similar to\n "run()".\n\npdb.runcall(function, *args, **kwds)\n\n Call the *function* (a function or method object, not a string)\n with the given arguments. When "runcall()" returns, it returns\n whatever the function call returned. The debugger prompt appears\n as soon as the function is entered.\n\npdb.set_trace()\n\n Enter the debugger at the calling stack frame. This is useful to\n hard-code a breakpoint at a given point in a program, even if the\n code is not otherwise being debugged (e.g. when an assertion\n fails).\n\npdb.post_mortem(traceback=None)\n\n Enter post-mortem debugging of the given *traceback* object. If no\n *traceback* is given, it uses the one of the exception that is\n currently being handled (an exception must be being handled if the\n default is to be used).\n\npdb.pm()\n\n Enter post-mortem debugging of the traceback found in\n "sys.last_traceback".\n\nThe "run*" functions and "set_trace()" are aliases for instantiating\nthe "Pdb" class and calling the method of the same name. If you want\nto access further features, you have to do this yourself:\n\nclass class pdb.Pdb(completekey=\'tab\', stdin=None, stdout=None, skip=None, nosigint=False)\n\n "Pdb" is the debugger class.\n\n The *completekey*, *stdin* and *stdout* arguments are passed to the\n underlying "cmd.Cmd" class; see the description there.\n\n The *skip* argument, if given, must be an iterable of glob-style\n module name patterns. The debugger will not step into frames that\n originate in a module that matches one of these patterns. [1]\n\n By default, Pdb sets a handler for the SIGINT signal (which is sent\n when the user presses Ctrl-C on the console) when you give a\n "continue" command. This allows you to break into the debugger\n again by pressing Ctrl-C. If you want Pdb not to touch the SIGINT\n handler, set *nosigint* tot true.\n\n Example call to enable tracing with *skip*:\n\n import pdb; pdb.Pdb(skip=[\'django.*\']).set_trace()\n\n New in version 3.1: The *skip* argument.\n\n New in version 3.2: The *nosigint* argument. Previously, a SIGINT\n handler was never set by Pdb.\n\n run(statement, globals=None, locals=None)\n runeval(expression, globals=None, locals=None)\n runcall(function, *args, **kwds)\n set_trace()\n\n See the documentation for the functions explained above.\n\n\nDebugger Commands\n=================\n\nThe commands recognized by the debugger are listed below. Most\ncommands can be abbreviated to one or two letters as indicated; e.g.\n"h(elp)" means that either "h" or "help" can be used to enter the help\ncommand (but not "he" or "hel", nor "H" or "Help" or "HELP").\nArguments to commands must be separated by whitespace (spaces or\ntabs). Optional arguments are enclosed in square brackets ("[]") in\nthe command syntax; the square brackets must not be typed.\nAlternatives in the command syntax are separated by a vertical bar\n("|").\n\nEntering a blank line repeats the last command entered. Exception: if\nthe last command was a "list" command, the next 11 lines are listed.\n\nCommands that the debugger doesn\'t recognize are assumed to be Python\nstatements and are executed in the context of the program being\ndebugged. Python statements can also be prefixed with an exclamation\npoint ("!"). This is a powerful way to inspect the program being\ndebugged; it is even possible to change a variable or call a function.\nWhen an exception occurs in such a statement, the exception name is\nprinted but the debugger\'s state is not changed.\n\nThe debugger supports *aliases*. Aliases can have parameters which\nallows one a certain level of adaptability to the context under\nexamination.\n\nMultiple commands may be entered on a single line, separated by ";;".\n(A single ";" is not used as it is the separator for multiple commands\nin a line that is passed to the Python parser.) No intelligence is\napplied to separating the commands; the input is split at the first\n";;" pair, even if it is in the middle of a quoted string.\n\nIf a file ".pdbrc" exists in the user\'s home directory or in the\ncurrent directory, it is read in and executed as if it had been typed\nat the debugger prompt. This is particularly useful for aliases. If\nboth files exist, the one in the home directory is read first and\naliases defined there can be overridden by the local file.\n\nChanged in version 3.2: ".pdbrc" can now contain commands that\ncontinue debugging, such as "continue" or "next". Previously, these\ncommands had no effect.\n\nh(elp) [command]\n\n Without argument, print the list of available commands. With a\n *command* as argument, print help about that command. "help pdb"\n displays the full documentation (the docstring of the "pdb"\n module). Since the *command* argument must be an identifier, "help\n exec" must be entered to get help on the "!" command.\n\nw(here)\n\n Print a stack trace, with the most recent frame at the bottom. An\n arrow indicates the current frame, which determines the context of\n most commands.\n\nd(own) [count]\n\n Move the current frame *count* (default one) levels down in the\n stack trace (to a newer frame).\n\nu(p) [count]\n\n Move the current frame *count* (default one) levels up in the stack\n trace (to an older frame).\n\nb(reak) [([filename:]lineno | function) [, condition]]\n\n With a *lineno* argument, set a break there in the current file.\n With a *function* argument, set a break at the first executable\n statement within that function. The line number may be prefixed\n with a filename and a colon, to specify a breakpoint in another\n file (probably one that hasn\'t been loaded yet). The file is\n searched on "sys.path". Note that each breakpoint is assigned a\n number to which all the other breakpoint commands refer.\n\n If a second argument is present, it is an expression which must\n evaluate to true before the breakpoint is honored.\n\n Without argument, list all breaks, including for each breakpoint,\n the number of times that breakpoint has been hit, the current\n ignore count, and the associated condition if any.\n\ntbreak [([filename:]lineno | function) [, condition]]\n\n Temporary breakpoint, which is removed automatically when it is\n first hit. The arguments are the same as for "break".\n\ncl(ear) [filename:lineno | bpnumber [bpnumber ...]]\n\n With a *filename:lineno* argument, clear all the breakpoints at\n this line. With a space separated list of breakpoint numbers, clear\n those breakpoints. Without argument, clear all breaks (but first\n ask confirmation).\n\ndisable [bpnumber [bpnumber ...]]\n\n Disable the breakpoints given as a space separated list of\n breakpoint numbers. Disabling a breakpoint means it cannot cause\n the program to stop execution, but unlike clearing a breakpoint, it\n remains in the list of breakpoints and can be (re-)enabled.\n\nenable [bpnumber [bpnumber ...]]\n\n Enable the breakpoints specified.\n\nignore bpnumber [count]\n\n Set the ignore count for the given breakpoint number. If count is\n omitted, the ignore count is set to 0. A breakpoint becomes active\n when the ignore count is zero. When non-zero, the count is\n decremented each time the breakpoint is reached and the breakpoint\n is not disabled and any associated condition evaluates to true.\n\ncondition bpnumber [condition]\n\n Set a new *condition* for the breakpoint, an expression which must\n evaluate to true before the breakpoint is honored. If *condition*\n is absent, any existing condition is removed; i.e., the breakpoint\n is made unconditional.\n\ncommands [bpnumber]\n\n Specify a list of commands for breakpoint number *bpnumber*. The\n commands themselves appear on the following lines. Type a line\n containing just "end" to terminate the commands. An example:\n\n (Pdb) commands 1\n (com) p some_variable\n (com) end\n (Pdb)\n\n To remove all commands from a breakpoint, type commands and follow\n it immediately with "end"; that is, give no commands.\n\n With no *bpnumber* argument, commands refers to the last breakpoint\n set.\n\n You can use breakpoint commands to start your program up again.\n Simply use the continue command, or step, or any other command that\n resumes execution.\n\n Specifying any command resuming execution (currently continue,\n step, next, return, jump, quit and their abbreviations) terminates\n the command list (as if that command was immediately followed by\n end). This is because any time you resume execution (even with a\n simple next or step), you may encounter another breakpoint--which\n could have its own command list, leading to ambiguities about which\n list to execute.\n\n If you use the \'silent\' command in the command list, the usual\n message about stopping at a breakpoint is not printed. This may be\n desirable for breakpoints that are to print a specific message and\n then continue. If none of the other commands print anything, you\n see no sign that the breakpoint was reached.\n\ns(tep)\n\n Execute the current line, stop at the first possible occasion\n (either in a function that is called or on the next line in the\n current function).\n\nn(ext)\n\n Continue execution until the next line in the current function is\n reached or it returns. (The difference between "next" and "step"\n is that "step" stops inside a called function, while "next"\n executes called functions at (nearly) full speed, only stopping at\n the next line in the current function.)\n\nunt(il) [lineno]\n\n Without argument, continue execution until the line with a number\n greater than the current one is reached.\n\n With a line number, continue execution until a line with a number\n greater or equal to that is reached. In both cases, also stop when\n the current frame returns.\n\n Changed in version 3.2: Allow giving an explicit line number.\n\nr(eturn)\n\n Continue execution until the current function returns.\n\nc(ont(inue))\n\n Continue execution, only stop when a breakpoint is encountered.\n\nj(ump) lineno\n\n Set the next line that will be executed. Only available in the\n bottom-most frame. This lets you jump back and execute code again,\n or jump forward to skip code that you don\'t want to run.\n\n It should be noted that not all jumps are allowed -- for instance\n it is not possible to jump into the middle of a "for" loop or out\n of a "finally" clause.\n\nl(ist) [first[, last]]\n\n List source code for the current file. Without arguments, list 11\n lines around the current line or continue the previous listing.\n With "." as argument, list 11 lines around the current line. With\n one argument, list 11 lines around at that line. With two\n arguments, list the given range; if the second argument is less\n than the first, it is interpreted as a count.\n\n The current line in the current frame is indicated by "->". If an\n exception is being debugged, the line where the exception was\n originally raised or propagated is indicated by ">>", if it differs\n from the current line.\n\n New in version 3.2: The ">>" marker.\n\nll | longlist\n\n List all source code for the current function or frame.\n Interesting lines are marked as for "list".\n\n New in version 3.2.\n\na(rgs)\n\n Print the argument list of the current function.\n\np expression\n\n Evaluate the *expression* in the current context and print its\n value.\n\n Note: "print()" can also be used, but is not a debugger command\n --- this executes the Python "print()" function.\n\npp expression\n\n Like the "p" command, except the value of the expression is pretty-\n printed using the "pprint" module.\n\nwhatis expression\n\n Print the type of the *expression*.\n\nsource expression\n\n Try to get source code for the given object and display it.\n\n New in version 3.2.\n\ndisplay [expression]\n\n Display the value of the expression if it changed, each time\n execution stops in the current frame.\n\n Without expression, list all display expressions for the current\n frame.\n\n New in version 3.2.\n\nundisplay [expression]\n\n Do not display the expression any more in the current frame.\n Without expression, clear all display expressions for the current\n frame.\n\n New in version 3.2.\n\ninteract\n\n Start an interative interpreter (using the "code" module) whose\n global namespace contains all the (global and local) names found in\n the current scope.\n\n New in version 3.2.\n\nalias [name [command]]\n\n Create an alias called *name* that executes *command*. The command\n must *not* be enclosed in quotes. Replaceable parameters can be\n indicated by "%1", "%2", and so on, while "%*" is replaced by all\n the parameters. If no command is given, the current alias for\n *name* is shown. If no arguments are given, all aliases are listed.\n\n Aliases may be nested and can contain anything that can be legally\n typed at the pdb prompt. Note that internal pdb commands *can* be\n overridden by aliases. Such a command is then hidden until the\n alias is removed. Aliasing is recursively applied to the first\n word of the command line; all other words in the line are left\n alone.\n\n As an example, here are two useful aliases (especially when placed\n in the ".pdbrc" file):\n\n # Print instance variables (usage "pi classInst")\n alias pi for k in %1.__dict__.keys(): print("%1.",k,"=",%1.__dict__[k])\n # Print instance variables in self\n alias ps pi self\n\nunalias name\n\n Delete the specified alias.\n\n! statement\n\n Execute the (one-line) *statement* in the context of the current\n stack frame. The exclamation point can be omitted unless the first\n word of the statement resembles a debugger command. To set a\n global variable, you can prefix the assignment command with a\n "global" statement on the same line, e.g.:\n\n (Pdb) global list_options; list_options = [\'-l\']\n (Pdb)\n\nrun [args ...]\nrestart [args ...]\n\n Restart the debugged Python program. If an argument is supplied,\n it is split with "shlex" and the result is used as the new\n "sys.argv". History, breakpoints, actions and debugger options are\n preserved. "restart" is an alias for "run".\n\nq(uit)\n\n Quit from the debugger. The program being executed is aborted.\n\n-[ Footnotes ]-\n\n[1] Whether a frame is considered to originate in a certain module\n is determined by the "__name__" in the frame globals.\n', 'del': '\nThe "del" statement\n*******************\n\n del_stmt ::= "del" target_list\n\nDeletion is recursively defined very similar to the way assignment is\ndefined. Rather than spelling it out in full details, here are some\nhints.\n\nDeletion of a target list recursively deletes each target, from left\nto right.\n\nDeletion of a name removes the binding of that name from the local or\nglobal namespace, depending on whether the name occurs in a "global"\nstatement in the same code block. If the name is unbound, a\n"NameError" exception will be raised.\n\nDeletion of attribute references, subscriptions and slicings is passed\nto the primary object involved; deletion of a slicing is in general\nequivalent to assignment of an empty slice of the right type (but even\nthis is determined by the sliced object).\n\nChanged in version 3.2: Previously it was illegal to delete a name\nfrom the local namespace if it occurs as a free variable in a nested\nblock.\n', 'dict': '\nDictionary displays\n*******************\n\nA dictionary display is a possibly empty series of key/datum pairs\nenclosed in curly braces:\n\n dict_display ::= "{" [key_datum_list | dict_comprehension] "}"\n key_datum_list ::= key_datum ("," key_datum)* [","]\n key_datum ::= expression ":" expression\n dict_comprehension ::= expression ":" expression comp_for\n\nA dictionary display yields a new dictionary object.\n\nIf a comma-separated sequence of key/datum pairs is given, they are\nevaluated from left to right to define the entries of the dictionary:\neach key object is used as a key into the dictionary to store the\ncorresponding datum. This means that you can specify the same key\nmultiple times in the key/datum list, and the final dictionary\'s value\nfor that key will be the last one given.\n\nA dict comprehension, in contrast to list and set comprehensions,\nneeds two expressions separated with a colon followed by the usual\n"for" and "if" clauses. When the comprehension is run, the resulting\nkey and value elements are inserted in the new dictionary in the order\nthey are produced.\n\nRestrictions on the types of the key values are listed earlier in\nsection *The standard type hierarchy*. (To summarize, the key type\nshould be *hashable*, which excludes all mutable objects.) Clashes\nbetween duplicate keys are not detected; the last datum (textually\nrightmost in the display) stored for a given key value prevails.\n', @@ -49,7 +49,7 @@ topics = {'assert': '\nThe "assert" statement\n**********************\n\nAssert 'naming': '\nNaming and binding\n******************\n\n*Names* refer to objects. Names are introduced by name binding\noperations. Each occurrence of a name in the program text refers to\nthe *binding* of that name established in the innermost function block\ncontaining the use.\n\nA *block* is a piece of Python program text that is executed as a\nunit. The following are blocks: a module, a function body, and a class\ndefinition. Each command typed interactively is a block. A script\nfile (a file given as standard input to the interpreter or specified\non the interpreter command line the first argument) is a code block.\nA script command (a command specified on the interpreter command line\nwith the \'**-c**\' option) is a code block. The string argument passed\nto the built-in functions "eval()" and "exec()" is a code block.\n\nA code block is executed in an *execution frame*. A frame contains\nsome administrative information (used for debugging) and determines\nwhere and how execution continues after the code block\'s execution has\ncompleted.\n\nA *scope* defines the visibility of a name within a block. If a local\nvariable is defined in a block, its scope includes that block. If the\ndefinition occurs in a function block, the scope extends to any blocks\ncontained within the defining one, unless a contained block introduces\na different binding for the name. The scope of names defined in a\nclass block is limited to the class block; it does not extend to the\ncode blocks of methods -- this includes comprehensions and generator\nexpressions since they are implemented using a function scope. This\nmeans that the following will fail:\n\n class A:\n a = 42\n b = list(a + i for i in range(10))\n\nWhen a name is used in a code block, it is resolved using the nearest\nenclosing scope. The set of all such scopes visible to a code block\nis called the block\'s *environment*.\n\nIf a name is bound in a block, it is a local variable of that block,\nunless declared as "nonlocal". If a name is bound at the module\nlevel, it is a global variable. (The variables of the module code\nblock are local and global.) If a variable is used in a code block\nbut not defined there, it is a *free variable*.\n\nWhen a name is not found at all, a "NameError" exception is raised.\nIf the name refers to a local variable that has not been bound, a\n"UnboundLocalError" exception is raised. "UnboundLocalError" is a\nsubclass of "NameError".\n\nThe following constructs bind names: formal parameters to functions,\n"import" statements, class and function definitions (these bind the\nclass or function name in the defining block), and targets that are\nidentifiers if occurring in an assignment, "for" loop header, or after\n"as" in a "with" statement or "except" clause. The "import" statement\nof the form "from ... import *" binds all names defined in the\nimported module, except those beginning with an underscore. This form\nmay only be used at the module level.\n\nA target occurring in a "del" statement is also considered bound for\nthis purpose (though the actual semantics are to unbind the name).\n\nEach assignment or import statement occurs within a block defined by a\nclass or function definition or at the module level (the top-level\ncode block).\n\nIf a name binding operation occurs anywhere within a code block, all\nuses of the name within the block are treated as references to the\ncurrent block. This can lead to errors when a name is used within a\nblock before it is bound. This rule is subtle. Python lacks\ndeclarations and allows name binding operations to occur anywhere\nwithin a code block. The local variables of a code block can be\ndetermined by scanning the entire text of the block for name binding\noperations.\n\nIf the "global" statement occurs within a block, all uses of the name\nspecified in the statement refer to the binding of that name in the\ntop-level namespace. Names are resolved in the top-level namespace by\nsearching the global namespace, i.e. the namespace of the module\ncontaining the code block, and the builtins namespace, the namespace\nof the module "builtins". The global namespace is searched first. If\nthe name is not found there, the builtins namespace is searched. The\nglobal statement must precede all uses of the name.\n\nThe builtins namespace associated with the execution of a code block\nis actually found by looking up the name "__builtins__" in its global\nnamespace; this should be a dictionary or a module (in the latter case\nthe module\'s dictionary is used). By default, when in the "__main__"\nmodule, "__builtins__" is the built-in module "builtins"; when in any\nother module, "__builtins__" is an alias for the dictionary of the\n"builtins" module itself. "__builtins__" can be set to a user-created\ndictionary to create a weak form of restricted execution.\n\n**CPython implementation detail:** Users should not touch\n"__builtins__"; it is strictly an implementation detail. Users\nwanting to override values in the builtins namespace should "import"\nthe "builtins" module and modify its attributes appropriately.\n\nThe namespace for a module is automatically created the first time a\nmodule is imported. The main module for a script is always called\n"__main__".\n\nThe "global" statement has the same scope as a name binding operation\nin the same block. If the nearest enclosing scope for a free variable\ncontains a global statement, the free variable is treated as a global.\n\nA class definition is an executable statement that may use and define\nnames. These references follow the normal rules for name resolution.\nThe namespace of the class definition becomes the attribute dictionary\nof the class. Names defined at the class scope are not visible in\nmethods.\n\n\nInteraction with dynamic features\n=================================\n\nThere are several cases where Python statements are illegal when used\nin conjunction with nested scopes that contain free variables.\n\nIf a variable is referenced in an enclosing scope, it is illegal to\ndelete the name. An error will be reported at compile time.\n\nIf the wild card form of import --- "import *" --- is used in a\nfunction and the function contains or is a nested block with free\nvariables, the compiler will raise a "SyntaxError".\n\nThe "eval()" and "exec()" functions do not have access to the full\nenvironment for resolving names. Names may be resolved in the local\nand global namespaces of the caller. Free variables are not resolved\nin the nearest enclosing namespace, but in the global namespace. [1]\nThe "exec()" and "eval()" functions have optional arguments to\noverride the global and local namespace. If only one namespace is\nspecified, it is used for both.\n', 'nonlocal': '\nThe "nonlocal" statement\n************************\n\n nonlocal_stmt ::= "nonlocal" identifier ("," identifier)*\n\nThe "nonlocal" statement causes the listed identifiers to refer to\npreviously bound variables in the nearest enclosing scope. This is\nimportant because the default behavior for binding is to search the\nlocal namespace first. The statement allows encapsulated code to\nrebind variables outside of the local scope besides the global\n(module) scope.\n\nNames listed in a "nonlocal" statement, unlike to those listed in a\n"global" statement, must refer to pre-existing bindings in an\nenclosing scope (the scope in which a new binding should be created\ncannot be determined unambiguously).\n\nNames listed in a "nonlocal" statement must not collide with pre-\nexisting bindings in the local scope.\n\nSee also: **PEP 3104** - Access to Names in Outer Scopes\n\n The specification for the "nonlocal" statement.\n', 'numbers': '\nNumeric literals\n****************\n\nThere are three types of numeric literals: integers, floating point\nnumbers, and imaginary numbers. There are no complex literals\n(complex numbers can be formed by adding a real number and an\nimaginary number).\n\nNote that numeric literals do not include a sign; a phrase like "-1"\nis actually an expression composed of the unary operator \'"-"\' and the\nliteral "1".\n', - 'numeric-types': '\nEmulating numeric types\n***********************\n\nThe following methods can be defined to emulate numeric objects.\nMethods corresponding to operations that are not supported by the\nparticular kind of number implemented (e.g., bitwise operations for\nnon-integral numbers) should be left undefined.\n\nobject.__add__(self, other)\nobject.__sub__(self, other)\nobject.__mul__(self, other)\nobject.__truediv__(self, other)\nobject.__floordiv__(self, other)\nobject.__mod__(self, other)\nobject.__divmod__(self, other)\nobject.__pow__(self, other[, modulo])\nobject.__lshift__(self, other)\nobject.__rshift__(self, other)\nobject.__and__(self, other)\nobject.__xor__(self, other)\nobject.__or__(self, other)\n\n These methods are called to implement the binary arithmetic\n operations ("+", "-", "*", "/", "//", "%", "divmod()", "pow()",\n "**", "<<", ">>", "&", "^", "|"). For instance, to evaluate the\n expression "x + y", where *x* is an instance of a class that has an\n "__add__()" method, "x.__add__(y)" is called. The "__divmod__()"\n method should be the equivalent to using "__floordiv__()" and\n "__mod__()"; it should not be related to "__truediv__()". Note\n that "__pow__()" should be defined to accept an optional third\n argument if the ternary version of the built-in "pow()" function is\n to be supported.\n\n If one of those methods does not support the operation with the\n supplied arguments, it should return "NotImplemented".\n\nobject.__radd__(self, other)\nobject.__rsub__(self, other)\nobject.__rmul__(self, other)\nobject.__rtruediv__(self, other)\nobject.__rfloordiv__(self, other)\nobject.__rmod__(self, other)\nobject.__rdivmod__(self, other)\nobject.__rpow__(self, other)\nobject.__rlshift__(self, other)\nobject.__rrshift__(self, other)\nobject.__rand__(self, other)\nobject.__rxor__(self, other)\nobject.__ror__(self, other)\n\n These methods are called to implement the binary arithmetic\n operations ("+", "-", "*", "/", "//", "%", "divmod()", "pow()",\n "**", "<<", ">>", "&", "^", "|") with reflected (swapped) operands.\n These functions are only called if the left operand does not\n support the corresponding operation and the operands are of\n different types. [2] For instance, to evaluate the expression "x -\n y", where *y* is an instance of a class that has an "__rsub__()"\n method, "y.__rsub__(x)" is called if "x.__sub__(y)" returns\n *NotImplemented*.\n\n Note that ternary "pow()" will not try calling "__rpow__()" (the\n coercion rules would become too complicated).\n\n Note: If the right operand\'s type is a subclass of the left\n operand\'s type and that subclass provides the reflected method\n for the operation, this method will be called before the left\n operand\'s non-reflected method. This behavior allows subclasses\n to override their ancestors\' operations.\n\nobject.__iadd__(self, other)\nobject.__isub__(self, other)\nobject.__imul__(self, other)\nobject.__itruediv__(self, other)\nobject.__ifloordiv__(self, other)\nobject.__imod__(self, other)\nobject.__ipow__(self, other[, modulo])\nobject.__ilshift__(self, other)\nobject.__irshift__(self, other)\nobject.__iand__(self, other)\nobject.__ixor__(self, other)\nobject.__ior__(self, other)\n\n These methods are called to implement the augmented arithmetic\n assignments ("+=", "-=", "*=", "/=", "//=", "%=", "**=", "<<=",\n ">>=", "&=", "^=", "|="). These methods should attempt to do the\n operation in-place (modifying *self*) and return the result (which\n could be, but does not have to be, *self*). If a specific method\n is not defined, the augmented assignment falls back to the normal\n methods. For instance, to execute the statement "x += y", where\n *x* is an instance of a class that has an "__iadd__()" method,\n "x.__iadd__(y)" is called. If *x* is an instance of a class that\n does not define a "__iadd__()" method, "x.__add__(y)" and\n "y.__radd__(x)" are considered, as with the evaluation of "x + y".\n\nobject.__neg__(self)\nobject.__pos__(self)\nobject.__abs__(self)\nobject.__invert__(self)\n\n Called to implement the unary arithmetic operations ("-", "+",\n "abs()" and "~").\n\nobject.__complex__(self)\nobject.__int__(self)\nobject.__float__(self)\nobject.__round__(self[, n])\n\n Called to implement the built-in functions "complex()", "int()",\n "float()" and "round()". Should return a value of the appropriate\n type.\n\nobject.__index__(self)\n\n Called to implement "operator.index()", and whenever Python needs\n to losslessly convert the numeric object to an integer object (such\n as in slicing, or in the built-in "bin()", "hex()" and "oct()"\n functions). Presence of this method indicates that the numeric\n object is an integer type. Must return an integer.\n\n Note: When "__index__()" is defined, "__int__()" should also be\n defined, and both shuld return the same value, in order to have a\n coherent integer type class.\n', + 'numeric-types': '\nEmulating numeric types\n***********************\n\nThe following methods can be defined to emulate numeric objects.\nMethods corresponding to operations that are not supported by the\nparticular kind of number implemented (e.g., bitwise operations for\nnon-integral numbers) should be left undefined.\n\nobject.__add__(self, other)\nobject.__sub__(self, other)\nobject.__mul__(self, other)\nobject.__truediv__(self, other)\nobject.__floordiv__(self, other)\nobject.__mod__(self, other)\nobject.__divmod__(self, other)\nobject.__pow__(self, other[, modulo])\nobject.__lshift__(self, other)\nobject.__rshift__(self, other)\nobject.__and__(self, other)\nobject.__xor__(self, other)\nobject.__or__(self, other)\n\n These methods are called to implement the binary arithmetic\n operations ("+", "-", "*", "/", "//", "%", "divmod()", "pow()",\n "**", "<<", ">>", "&", "^", "|"). For instance, to evaluate the\n expression "x + y", where *x* is an instance of a class that has an\n "__add__()" method, "x.__add__(y)" is called. The "__divmod__()"\n method should be the equivalent to using "__floordiv__()" and\n "__mod__()"; it should not be related to "__truediv__()". Note\n that "__pow__()" should be defined to accept an optional third\n argument if the ternary version of the built-in "pow()" function is\n to be supported.\n\n If one of those methods does not support the operation with the\n supplied arguments, it should return "NotImplemented".\n\nobject.__radd__(self, other)\nobject.__rsub__(self, other)\nobject.__rmul__(self, other)\nobject.__rtruediv__(self, other)\nobject.__rfloordiv__(self, other)\nobject.__rmod__(self, other)\nobject.__rdivmod__(self, other)\nobject.__rpow__(self, other)\nobject.__rlshift__(self, other)\nobject.__rrshift__(self, other)\nobject.__rand__(self, other)\nobject.__rxor__(self, other)\nobject.__ror__(self, other)\n\n These methods are called to implement the binary arithmetic\n operations ("+", "-", "*", "/", "//", "%", "divmod()", "pow()",\n "**", "<<", ">>", "&", "^", "|") with reflected (swapped) operands.\n These functions are only called if the left operand does not\n support the corresponding operation and the operands are of\n different types. [2] For instance, to evaluate the expression "x -\n y", where *y* is an instance of a class that has an "__rsub__()"\n method, "y.__rsub__(x)" is called if "x.__sub__(y)" returns\n *NotImplemented*.\n\n Note that ternary "pow()" will not try calling "__rpow__()" (the\n coercion rules would become too complicated).\n\n Note: If the right operand\'s type is a subclass of the left\n operand\'s type and that subclass provides the reflected method\n for the operation, this method will be called before the left\n operand\'s non-reflected method. This behavior allows subclasses\n to override their ancestors\' operations.\n\nobject.__iadd__(self, other)\nobject.__isub__(self, other)\nobject.__imul__(self, other)\nobject.__itruediv__(self, other)\nobject.__ifloordiv__(self, other)\nobject.__imod__(self, other)\nobject.__ipow__(self, other[, modulo])\nobject.__ilshift__(self, other)\nobject.__irshift__(self, other)\nobject.__iand__(self, other)\nobject.__ixor__(self, other)\nobject.__ior__(self, other)\n\n These methods are called to implement the augmented arithmetic\n assignments ("+=", "-=", "*=", "/=", "//=", "%=", "**=", "<<=",\n ">>=", "&=", "^=", "|="). These methods should attempt to do the\n operation in-place (modifying *self*) and return the result (which\n could be, but does not have to be, *self*). If a specific method\n is not defined, the augmented assignment falls back to the normal\n methods. For instance, if *x* is an instance of a class with an\n "__iadd__()" method, "x += y" is equivalent to "x = x.__iadd__(y)"\n . Otherwise, "x.__add__(y)" and "y.__radd__(x)" are considered, as\n with the evaluation of "x + y". In certain situations, augmented\n assignment can result in unexpected errors (see *Why does\n a_tuple[i] += [\'item\'] raise an exception when the addition\n works?*), but this behavior is in fact part of the data model.\n\nobject.__neg__(self)\nobject.__pos__(self)\nobject.__abs__(self)\nobject.__invert__(self)\n\n Called to implement the unary arithmetic operations ("-", "+",\n "abs()" and "~").\n\nobject.__complex__(self)\nobject.__int__(self)\nobject.__float__(self)\nobject.__round__(self[, n])\n\n Called to implement the built-in functions "complex()", "int()",\n "float()" and "round()". Should return a value of the appropriate\n type.\n\nobject.__index__(self)\n\n Called to implement "operator.index()", and whenever Python needs\n to losslessly convert the numeric object to an integer object (such\n as in slicing, or in the built-in "bin()", "hex()" and "oct()"\n functions). Presence of this method indicates that the numeric\n object is an integer type. Must return an integer.\n\n Note: When "__index__()" is defined, "__int__()" should also be\n defined, and both shuld return the same value, in order to have a\n coherent integer type class.\n', 'objects': '\nObjects, values and types\n*************************\n\n*Objects* are Python\'s abstraction for data. All data in a Python\nprogram is represented by objects or by relations between objects. (In\na sense, and in conformance to Von Neumann\'s model of a "stored\nprogram computer," code is also represented by objects.)\n\nEvery object has an identity, a type and a value. An object\'s\n*identity* never changes once it has been created; you may think of it\nas the object\'s address in memory. The \'"is"\' operator compares the\nidentity of two objects; the "id()" function returns an integer\nrepresenting its identity.\n\n**CPython implementation detail:** For CPython, "id(x)" is the memory\naddress where "x" is stored.\n\nAn object\'s type determines the operations that the object supports\n(e.g., "does it have a length?") and also defines the possible values\nfor objects of that type. The "type()" function returns an object\'s\ntype (which is an object itself). Like its identity, an object\'s\n*type* is also unchangeable. [1]\n\nThe *value* of some objects can change. Objects whose value can\nchange are said to be *mutable*; objects whose value is unchangeable\nonce they are created are called *immutable*. (The value of an\nimmutable container object that contains a reference to a mutable\nobject can change when the latter\'s value is changed; however the\ncontainer is still considered immutable, because the collection of\nobjects it contains cannot be changed. So, immutability is not\nstrictly the same as having an unchangeable value, it is more subtle.)\nAn object\'s mutability is determined by its type; for instance,\nnumbers, strings and tuples are immutable, while dictionaries and\nlists are mutable.\n\nObjects are never explicitly destroyed; however, when they become\nunreachable they may be garbage-collected. An implementation is\nallowed to postpone garbage collection or omit it altogether --- it is\na matter of implementation quality how garbage collection is\nimplemented, as long as no objects are collected that are still\nreachable.\n\n**CPython implementation detail:** CPython currently uses a reference-\ncounting scheme with (optional) delayed detection of cyclically linked\ngarbage, which collects most objects as soon as they become\nunreachable, but is not guaranteed to collect garbage containing\ncircular references. See the documentation of the "gc" module for\ninformation on controlling the collection of cyclic garbage. Other\nimplementations act differently and CPython may change. Do not depend\non immediate finalization of objects when they become unreachable (ex:\nalways close files).\n\nNote that the use of the implementation\'s tracing or debugging\nfacilities may keep objects alive that would normally be collectable.\nAlso note that catching an exception with a \'"try"..."except"\'\nstatement may keep objects alive.\n\nSome objects contain references to "external" resources such as open\nfiles or windows. It is understood that these resources are freed\nwhen the object is garbage-collected, but since garbage collection is\nnot guaranteed to happen, such objects also provide an explicit way to\nrelease the external resource, usually a "close()" method. Programs\nare strongly recommended to explicitly close such objects. The\n\'"try"..."finally"\' statement and the \'"with"\' statement provide\nconvenient ways to do this.\n\nSome objects contain references to other objects; these are called\n*containers*. Examples of containers are tuples, lists and\ndictionaries. The references are part of a container\'s value. In\nmost cases, when we talk about the value of a container, we imply the\nvalues, not the identities of the contained objects; however, when we\ntalk about the mutability of a container, only the identities of the\nimmediately contained objects are implied. So, if an immutable\ncontainer (like a tuple) contains a reference to a mutable object, its\nvalue changes if that mutable object is changed.\n\nTypes affect almost all aspects of object behavior. Even the\nimportance of object identity is affected in some sense: for immutable\ntypes, operations that compute new values may actually return a\nreference to any existing object with the same type and value, while\nfor mutable objects this is not allowed. E.g., after "a = 1; b = 1",\n"a" and "b" may or may not refer to the same object with the value\none, depending on the implementation, but after "c = []; d = []", "c"\nand "d" are guaranteed to refer to two different, unique, newly\ncreated empty lists. (Note that "c = d = []" assigns the same object\nto both "c" and "d".)\n', 'operator-summary': '\nOperator precedence\n*******************\n\nThe following table summarizes the operator precedences in Python,\nfrom lowest precedence (least binding) to highest precedence (most\nbinding). Operators in the same box have the same precedence. Unless\nthe syntax is explicitly given, operators are binary. Operators in\nthe same box group left to right (except for comparisons, including\ntests, which all have the same precedence and chain from left to right\n--- see section *Comparisons* --- and exponentiation, which groups\nfrom right to left).\n\n+-------------------------------------------------+---------------------------------------+\n| Operator | Description |\n+=================================================+=======================================+\n| "lambda" | Lambda expression |\n+-------------------------------------------------+---------------------------------------+\n| "if" -- "else" | Conditional expression |\n+-------------------------------------------------+---------------------------------------+\n| "or" | Boolean OR |\n+-------------------------------------------------+---------------------------------------+\n| "and" | Boolean AND |\n+-------------------------------------------------+---------------------------------------+\n| "not" "x" | Boolean NOT |\n+-------------------------------------------------+---------------------------------------+\n| "in", "not in", "is", "is not", "<", "<=", ">", | Comparisons, including membership |\n| ">=", "!=", "==" | tests and identity tests |\n+-------------------------------------------------+---------------------------------------+\n| "|" | Bitwise OR |\n+-------------------------------------------------+---------------------------------------+\n| "^" | Bitwise XOR |\n+-------------------------------------------------+---------------------------------------+\n| "&" | Bitwise AND |\n+-------------------------------------------------+---------------------------------------+\n| "<<", ">>" | Shifts |\n+-------------------------------------------------+---------------------------------------+\n| "+", "-" | Addition and subtraction |\n+-------------------------------------------------+---------------------------------------+\n| "*", "/", "//", "%" | Multiplication, division, remainder |\n+-------------------------------------------------+---------------------------------------+\n| "+x", "-x", "~x" | Positive, negative, bitwise NOT |\n+-------------------------------------------------+---------------------------------------+\n| "**" | Exponentiation [6] |\n+-------------------------------------------------+---------------------------------------+\n| "x[index]", "x[index:index]", | Subscription, slicing, call, |\n| "x(arguments...)", "x.attribute" | attribute reference |\n+-------------------------------------------------+---------------------------------------+\n| "(expressions...)", "[expressions...]", "{key: | Binding or tuple display, list |\n| value...}", "{expressions...}" | display, dictionary display, set |\n+-------------------------------------------------+---------------------------------------+\n\n-[ Footnotes ]-\n\n[1] While "abs(x%y) < abs(y)" is true mathematically, for floats\n it may not be true numerically due to roundoff. For example, and\n assuming a platform on which a Python float is an IEEE 754 double-\n precision number, in order that "-1e-100 % 1e100" have the same\n sign as "1e100", the computed result is "-1e-100 + 1e100", which\n is numerically exactly equal to "1e100". The function\n "math.fmod()" returns a result whose sign matches the sign of the\n first argument instead, and so returns "-1e-100" in this case.\n Which approach is more appropriate depends on the application.\n\n[2] If x is very close to an exact integer multiple of y, it\'s\n possible for "x//y" to be one larger than "(x-x%y)//y" due to\n rounding. In such cases, Python returns the latter result, in\n order to preserve that "divmod(x,y)[0] * y + x % y" be very close\n to "x".\n\n[3] While comparisons between strings make sense at the byte\n level, they may be counter-intuitive to users. For example, the\n strings ""\\u00C7"" and ""\\u0327\\u0043"" compare differently, even\n though they both represent the same unicode character (LATIN\n CAPITAL LETTER C WITH CEDILLA). To compare strings in a human\n recognizable way, compare using "unicodedata.normalize()".\n\n[4] Due to automatic garbage-collection, free lists, and the\n dynamic nature of descriptors, you may notice seemingly unusual\n behaviour in certain uses of the "is" operator, like those\n involving comparisons between instance methods, or constants.\n Check their documentation for more info.\n\n[5] The "%" operator is also used for string formatting; the same\n precedence applies.\n\n[6] The power operator "**" binds less tightly than an arithmetic\n or bitwise unary operator on its right, that is, "2**-1" is "0.5".\n', 'pass': '\nThe "pass" statement\n********************\n\n pass_stmt ::= "pass"\n\n"pass" is a null operation --- when it is executed, nothing happens.\nIt is useful as a placeholder when a statement is required\nsyntactically, but no code needs to be executed, for example:\n\n def f(arg): pass # a function that does nothing (yet)\n\n class C: pass # a class with no methods (yet)\n', @@ -60,7 +60,7 @@ topics = {'assert': '\nThe "assert" statement\n**********************\n\nAssert 'shifting': '\nShifting operations\n*******************\n\nThe shifting operations have lower priority than the arithmetic\noperations:\n\n shift_expr ::= a_expr | shift_expr ( "<<" | ">>" ) a_expr\n\nThese operators accept integers as arguments. They shift the first\nargument to the left or right by the number of bits given by the\nsecond argument.\n\nA right shift by *n* bits is defined as floor division by "pow(2,n)".\nA left shift by *n* bits is defined as multiplication with "pow(2,n)".\n\nNote: In the current implementation, the right-hand operand is\n required to be at most "sys.maxsize". If the right-hand operand is\n larger than "sys.maxsize" an "OverflowError" exception is raised.\n', 'slicings': '\nSlicings\n********\n\nA slicing selects a range of items in a sequence object (e.g., a\nstring, tuple or list). Slicings may be used as expressions or as\ntargets in assignment or "del" statements. The syntax for a slicing:\n\n slicing ::= primary "[" slice_list "]"\n slice_list ::= slice_item ("," slice_item)* [","]\n slice_item ::= expression | proper_slice\n proper_slice ::= [lower_bound] ":" [upper_bound] [ ":" [stride] ]\n lower_bound ::= expression\n upper_bound ::= expression\n stride ::= expression\n\nThere is ambiguity in the formal syntax here: anything that looks like\nan expression list also looks like a slice list, so any subscription\ncan be interpreted as a slicing. Rather than further complicating the\nsyntax, this is disambiguated by defining that in this case the\ninterpretation as a subscription takes priority over the\ninterpretation as a slicing (this is the case if the slice list\ncontains no proper slice).\n\nThe semantics for a slicing are as follows. The primary must evaluate\nto a mapping object, and it is indexed (using the same "__getitem__()"\nmethod as normal subscription) with a key that is constructed from the\nslice list, as follows. If the slice list contains at least one\ncomma, the key is a tuple containing the conversion of the slice\nitems; otherwise, the conversion of the lone slice item is the key.\nThe conversion of a slice item that is an expression is that\nexpression. The conversion of a proper slice is a slice object (see\nsection *The standard type hierarchy*) whose "start", "stop" and\n"step" attributes are the values of the expressions given as lower\nbound, upper bound and stride, respectively, substituting "None" for\nmissing expressions.\n', 'specialattrs': '\nSpecial Attributes\n******************\n\nThe implementation adds a few special read-only attributes to several\nobject types, where they are relevant. Some of these are not reported\nby the "dir()" built-in function.\n\nobject.__dict__\n\n A dictionary or other mapping object used to store an object\'s\n (writable) attributes.\n\ninstance.__class__\n\n The class to which a class instance belongs.\n\nclass.__bases__\n\n The tuple of base classes of a class object.\n\nclass.__name__\n\n The name of the class or type.\n\nclass.__qualname__\n\n The *qualified name* of the class or type.\n\n New in version 3.3.\n\nclass.__mro__\n\n This attribute is a tuple of classes that are considered when\n looking for base classes during method resolution.\n\nclass.mro()\n\n This method can be overridden by a metaclass to customize the\n method resolution order for its instances. It is called at class\n instantiation, and its result is stored in "__mro__".\n\nclass.__subclasses__()\n\n Each class keeps a list of weak references to its immediate\n subclasses. This method returns a list of all those references\n still alive. Example:\n\n >>> int.__subclasses__()\n []\n\n-[ Footnotes ]-\n\n[1] Additional information on these special methods may be found\n in the Python Reference Manual (*Basic customization*).\n\n[2] As a consequence, the list "[1, 2]" is considered equal to\n "[1.0, 2.0]", and similarly for tuples.\n\n[3] They must have since the parser can\'t tell the type of the\n operands.\n\n[4] Cased characters are those with general category property\n being one of "Lu" (Letter, uppercase), "Ll" (Letter, lowercase),\n or "Lt" (Letter, titlecase).\n\n[5] To format only a tuple you should therefore provide a\n singleton tuple whose only element is the tuple to be formatted.\n', - 'specialnames': '\nSpecial method names\n********************\n\nA class can implement certain operations that are invoked by special\nsyntax (such as arithmetic operations or subscripting and slicing) by\ndefining methods with special names. This is Python\'s approach to\n*operator overloading*, allowing classes to define their own behavior\nwith respect to language operators. For instance, if a class defines\na method named "__getitem__()", and "x" is an instance of this class,\nthen "x[i]" is roughly equivalent to "type(x).__getitem__(x, i)".\nExcept where mentioned, attempts to execute an operation raise an\nexception when no appropriate method is defined (typically\n"AttributeError" or "TypeError").\n\nWhen implementing a class that emulates any built-in type, it is\nimportant that the emulation only be implemented to the degree that it\nmakes sense for the object being modelled. For example, some\nsequences may work well with retrieval of individual elements, but\nextracting a slice may not make sense. (One example of this is the\n"NodeList" interface in the W3C\'s Document Object Model.)\n\n\nBasic customization\n===================\n\nobject.__new__(cls[, ...])\n\n Called to create a new instance of class *cls*. "__new__()" is a\n static method (special-cased so you need not declare it as such)\n that takes the class of which an instance was requested as its\n first argument. The remaining arguments are those passed to the\n object constructor expression (the call to the class). The return\n value of "__new__()" should be the new object instance (usually an\n instance of *cls*).\n\n Typical implementations create a new instance of the class by\n invoking the superclass\'s "__new__()" method using\n "super(currentclass, cls).__new__(cls[, ...])" with appropriate\n arguments and then modifying the newly-created instance as\n necessary before returning it.\n\n If "__new__()" returns an instance of *cls*, then the new\n instance\'s "__init__()" method will be invoked like\n "__init__(self[, ...])", where *self* is the new instance and the\n remaining arguments are the same as were passed to "__new__()".\n\n If "__new__()" does not return an instance of *cls*, then the new\n instance\'s "__init__()" method will not be invoked.\n\n "__new__()" is intended mainly to allow subclasses of immutable\n types (like int, str, or tuple) to customize instance creation. It\n is also commonly overridden in custom metaclasses in order to\n customize class creation.\n\nobject.__init__(self[, ...])\n\n Called when the instance is created. The arguments are those\n passed to the class constructor expression. If a base class has an\n "__init__()" method, the derived class\'s "__init__()" method, if\n any, must explicitly call it to ensure proper initialization of the\n base class part of the instance; for example:\n "BaseClass.__init__(self, [args...])". As a special constraint on\n constructors, no value may be returned; doing so will cause a\n "TypeError" to be raised at runtime.\n\nobject.__del__(self)\n\n Called when the instance is about to be destroyed. This is also\n called a destructor. If a base class has a "__del__()" method, the\n derived class\'s "__del__()" method, if any, must explicitly call it\n to ensure proper deletion of the base class part of the instance.\n Note that it is possible (though not recommended!) for the\n "__del__()" method to postpone destruction of the instance by\n creating a new reference to it. It may then be called at a later\n time when this new reference is deleted. It is not guaranteed that\n "__del__()" methods are called for objects that still exist when\n the interpreter exits.\n\n Note: "del x" doesn\'t directly call "x.__del__()" --- the former\n decrements the reference count for "x" by one, and the latter is\n only called when "x"\'s reference count reaches zero. Some common\n situations that may prevent the reference count of an object from\n going to zero include: circular references between objects (e.g.,\n a doubly-linked list or a tree data structure with parent and\n child pointers); a reference to the object on the stack frame of\n a function that caught an exception (the traceback stored in\n "sys.exc_info()[2]" keeps the stack frame alive); or a reference\n to the object on the stack frame that raised an unhandled\n exception in interactive mode (the traceback stored in\n "sys.last_traceback" keeps the stack frame alive). The first\n situation can only be remedied by explicitly breaking the cycles;\n the latter two situations can be resolved by storing "None" in\n "sys.last_traceback". Circular references which are garbage are\n detected and cleaned up when the cyclic garbage collector is\n enabled (it\'s on by default). Refer to the documentation for the\n "gc" module for more information about this topic.\n\n Warning: Due to the precarious circumstances under which\n "__del__()" methods are invoked, exceptions that occur during\n their execution are ignored, and a warning is printed to\n "sys.stderr" instead. Also, when "__del__()" is invoked in\n response to a module being deleted (e.g., when execution of the\n program is done), other globals referenced by the "__del__()"\n method may already have been deleted or in the process of being\n torn down (e.g. the import machinery shutting down). For this\n reason, "__del__()" methods should do the absolute minimum needed\n to maintain external invariants. Starting with version 1.5,\n Python guarantees that globals whose name begins with a single\n underscore are deleted from their module before other globals are\n deleted; if no other references to such globals exist, this may\n help in assuring that imported modules are still available at the\n time when the "__del__()" method is called.\n\nobject.__repr__(self)\n\n Called by the "repr()" built-in function to compute the "official"\n string representation of an object. If at all possible, this\n should look like a valid Python expression that could be used to\n recreate an object with the same value (given an appropriate\n environment). If this is not possible, a string of the form\n "<...some useful description...>" should be returned. The return\n value must be a string object. If a class defines "__repr__()" but\n not "__str__()", then "__repr__()" is also used when an "informal"\n string representation of instances of that class is required.\n\n This is typically used for debugging, so it is important that the\n representation is information-rich and unambiguous.\n\nobject.__str__(self)\n\n Called by "str(object)" and the built-in functions "format()" and\n "print()" to compute the "informal" or nicely printable string\n representation of an object. The return value must be a *string*\n object.\n\n This method differs from "object.__repr__()" in that there is no\n expectation that "__str__()" return a valid Python expression: a\n more convenient or concise representation can be used.\n\n The default implementation defined by the built-in type "object"\n calls "object.__repr__()".\n\nobject.__bytes__(self)\n\n Called by "bytes()" to compute a byte-string representation of an\n object. This should return a "bytes" object.\n\nobject.__format__(self, format_spec)\n\n Called by the "format()" built-in function (and by extension, the\n "str.format()" method of class "str") to produce a "formatted"\n string representation of an object. The "format_spec" argument is a\n string that contains a description of the formatting options\n desired. The interpretation of the "format_spec" argument is up to\n the type implementing "__format__()", however most classes will\n either delegate formatting to one of the built-in types, or use a\n similar formatting option syntax.\n\n See *Format Specification Mini-Language* for a description of the\n standard formatting syntax.\n\n The return value must be a string object.\n\nobject.__lt__(self, other)\nobject.__le__(self, other)\nobject.__eq__(self, other)\nobject.__ne__(self, other)\nobject.__gt__(self, other)\nobject.__ge__(self, other)\n\n These are the so-called "rich comparison" methods. The\n correspondence between operator symbols and method names is as\n follows: "xy" calls\n "x.__gt__(y)", and "x>=y" calls "x.__ge__(y)".\n\n A rich comparison method may return the singleton "NotImplemented"\n if it does not implement the operation for a given pair of\n arguments. By convention, "False" and "True" are returned for a\n successful comparison. However, these methods can return any value,\n so if the comparison operator is used in a Boolean context (e.g.,\n in the condition of an "if" statement), Python will call "bool()"\n on the value to determine if the result is true or false.\n\n There are no implied relationships among the comparison operators.\n The truth of "x==y" does not imply that "x!=y" is false.\n Accordingly, when defining "__eq__()", one should also define\n "__ne__()" so that the operators will behave as expected. See the\n paragraph on "__hash__()" for some important notes on creating\n *hashable* objects which support custom comparison operations and\n are usable as dictionary keys.\n\n There are no swapped-argument versions of these methods (to be used\n when the left argument does not support the operation but the right\n argument does); rather, "__lt__()" and "__gt__()" are each other\'s\n reflection, "__le__()" and "__ge__()" are each other\'s reflection,\n and "__eq__()" and "__ne__()" are their own reflection.\n\n Arguments to rich comparison methods are never coerced.\n\n To automatically generate ordering operations from a single root\n operation, see "functools.total_ordering()".\n\nobject.__hash__(self)\n\n Called by built-in function "hash()" and for operations on members\n of hashed collections including "set", "frozenset", and "dict".\n "__hash__()" should return an integer. The only required property\n is that objects which compare equal have the same hash value; it is\n advised to somehow mix together (e.g. using exclusive or) the hash\n values for the components of the object that also play a part in\n comparison of objects.\n\n Note: "hash()" truncates the value returned from an object\'s\n custom "__hash__()" method to the size of a "Py_ssize_t". This\n is typically 8 bytes on 64-bit builds and 4 bytes on 32-bit\n builds. If an object\'s "__hash__()" must interoperate on builds\n of different bit sizes, be sure to check the width on all\n supported builds. An easy way to do this is with "python -c\n "import sys; print(sys.hash_info.width)""\n\n If a class does not define an "__eq__()" method it should not\n define a "__hash__()" operation either; if it defines "__eq__()"\n but not "__hash__()", its instances will not be usable as items in\n hashable collections. If a class defines mutable objects and\n implements an "__eq__()" method, it should not implement\n "__hash__()", since the implementation of hashable collections\n requires that a key\'s hash value is immutable (if the object\'s hash\n value changes, it will be in the wrong hash bucket).\n\n User-defined classes have "__eq__()" and "__hash__()" methods by\n default; with them, all objects compare unequal (except with\n themselves) and "x.__hash__()" returns an appropriate value such\n that "x == y" implies both that "x is y" and "hash(x) == hash(y)".\n\n A class that overrides "__eq__()" and does not define "__hash__()"\n will have its "__hash__()" implicitly set to "None". When the\n "__hash__()" method of a class is "None", instances of the class\n will raise an appropriate "TypeError" when a program attempts to\n retrieve their hash value, and will also be correctly identified as\n unhashable when checking "isinstance(obj, collections.Hashable").\n\n If a class that overrides "__eq__()" needs to retain the\n implementation of "__hash__()" from a parent class, the interpreter\n must be told this explicitly by setting "__hash__ =\n .__hash__".\n\n If a class that does not override "__eq__()" wishes to suppress\n hash support, it should include "__hash__ = None" in the class\n definition. A class which defines its own "__hash__()" that\n explicitly raises a "TypeError" would be incorrectly identified as\n hashable by an "isinstance(obj, collections.Hashable)" call.\n\n Note: By default, the "__hash__()" values of str, bytes and\n datetime objects are "salted" with an unpredictable random value.\n Although they remain constant within an individual Python\n process, they are not predictable between repeated invocations of\n Python.This is intended to provide protection against a denial-\n of-service caused by carefully-chosen inputs that exploit the\n worst case performance of a dict insertion, O(n^2) complexity.\n See http://www.ocert.org/advisories/ocert-2011-003.html for\n details.Changing hash values affects the iteration order of\n dicts, sets and other mappings. Python has never made guarantees\n about this ordering (and it typically varies between 32-bit and\n 64-bit builds).See also "PYTHONHASHSEED".\n\n Changed in version 3.3: Hash randomization is enabled by default.\n\nobject.__bool__(self)\n\n Called to implement truth value testing and the built-in operation\n "bool()"; should return "False" or "True". When this method is not\n defined, "__len__()" is called, if it is defined, and the object is\n considered true if its result is nonzero. If a class defines\n neither "__len__()" nor "__bool__()", all its instances are\n considered true.\n\n\nCustomizing attribute access\n============================\n\nThe following methods can be defined to customize the meaning of\nattribute access (use of, assignment to, or deletion of "x.name") for\nclass instances.\n\nobject.__getattr__(self, name)\n\n Called when an attribute lookup has not found the attribute in the\n usual places (i.e. it is not an instance attribute nor is it found\n in the class tree for "self"). "name" is the attribute name. This\n method should return the (computed) attribute value or raise an\n "AttributeError" exception.\n\n Note that if the attribute is found through the normal mechanism,\n "__getattr__()" is not called. (This is an intentional asymmetry\n between "__getattr__()" and "__setattr__()".) This is done both for\n efficiency reasons and because otherwise "__getattr__()" would have\n no way to access other attributes of the instance. Note that at\n least for instance variables, you can fake total control by not\n inserting any values in the instance attribute dictionary (but\n instead inserting them in another object). See the\n "__getattribute__()" method below for a way to actually get total\n control over attribute access.\n\nobject.__getattribute__(self, name)\n\n Called unconditionally to implement attribute accesses for\n instances of the class. If the class also defines "__getattr__()",\n the latter will not be called unless "__getattribute__()" either\n calls it explicitly or raises an "AttributeError". This method\n should return the (computed) attribute value or raise an\n "AttributeError" exception. In order to avoid infinite recursion in\n this method, its implementation should always call the base class\n method with the same name to access any attributes it needs, for\n example, "object.__getattribute__(self, name)".\n\n Note: This method may still be bypassed when looking up special\n methods as the result of implicit invocation via language syntax\n or built-in functions. See *Special method lookup*.\n\nobject.__setattr__(self, name, value)\n\n Called when an attribute assignment is attempted. This is called\n instead of the normal mechanism (i.e. store the value in the\n instance dictionary). *name* is the attribute name, *value* is the\n value to be assigned to it.\n\n If "__setattr__()" wants to assign to an instance attribute, it\n should call the base class method with the same name, for example,\n "object.__setattr__(self, name, value)".\n\nobject.__delattr__(self, name)\n\n Like "__setattr__()" but for attribute deletion instead of\n assignment. This should only be implemented if "del obj.name" is\n meaningful for the object.\n\nobject.__dir__(self)\n\n Called when "dir()" is called on the object. A sequence must be\n returned. "dir()" converts the returned sequence to a list and\n sorts it.\n\n\nImplementing Descriptors\n------------------------\n\nThe following methods only apply when an instance of the class\ncontaining the method (a so-called *descriptor* class) appears in an\n*owner* class (the descriptor must be in either the owner\'s class\ndictionary or in the class dictionary for one of its parents). In the\nexamples below, "the attribute" refers to the attribute whose name is\nthe key of the property in the owner class\' "__dict__".\n\nobject.__get__(self, instance, owner)\n\n Called to get the attribute of the owner class (class attribute\n access) or of an instance of that class (instance attribute\n access). *owner* is always the owner class, while *instance* is the\n instance that the attribute was accessed through, or "None" when\n the attribute is accessed through the *owner*. This method should\n return the (computed) attribute value or raise an "AttributeError"\n exception.\n\nobject.__set__(self, instance, value)\n\n Called to set the attribute on an instance *instance* of the owner\n class to a new value, *value*.\n\nobject.__delete__(self, instance)\n\n Called to delete the attribute on an instance *instance* of the\n owner class.\n\n\nInvoking Descriptors\n--------------------\n\nIn general, a descriptor is an object attribute with "binding\nbehavior", one whose attribute access has been overridden by methods\nin the descriptor protocol: "__get__()", "__set__()", and\n"__delete__()". If any of those methods are defined for an object, it\nis said to be a descriptor.\n\nThe default behavior for attribute access is to get, set, or delete\nthe attribute from an object\'s dictionary. For instance, "a.x" has a\nlookup chain starting with "a.__dict__[\'x\']", then\n"type(a).__dict__[\'x\']", and continuing through the base classes of\n"type(a)" excluding metaclasses.\n\nHowever, if the looked-up value is an object defining one of the\ndescriptor methods, then Python may override the default behavior and\ninvoke the descriptor method instead. Where this occurs in the\nprecedence chain depends on which descriptor methods were defined and\nhow they were called.\n\nThe starting point for descriptor invocation is a binding, "a.x". How\nthe arguments are assembled depends on "a":\n\nDirect Call\n The simplest and least common call is when user code directly\n invokes a descriptor method: "x.__get__(a)".\n\nInstance Binding\n If binding to an object instance, "a.x" is transformed into the\n call: "type(a).__dict__[\'x\'].__get__(a, type(a))".\n\nClass Binding\n If binding to a class, "A.x" is transformed into the call:\n "A.__dict__[\'x\'].__get__(None, A)".\n\nSuper Binding\n If "a" is an instance of "super", then the binding "super(B,\n obj).m()" searches "obj.__class__.__mro__" for the base class "A"\n immediately preceding "B" and then invokes the descriptor with the\n call: "A.__dict__[\'m\'].__get__(obj, obj.__class__)".\n\nFor instance bindings, the precedence of descriptor invocation depends\non the which descriptor methods are defined. A descriptor can define\nany combination of "__get__()", "__set__()" and "__delete__()". If it\ndoes not define "__get__()", then accessing the attribute will return\nthe descriptor object itself unless there is a value in the object\'s\ninstance dictionary. If the descriptor defines "__set__()" and/or\n"__delete__()", it is a data descriptor; if it defines neither, it is\na non-data descriptor. Normally, data descriptors define both\n"__get__()" and "__set__()", while non-data descriptors have just the\n"__get__()" method. Data descriptors with "__set__()" and "__get__()"\ndefined always override a redefinition in an instance dictionary. In\ncontrast, non-data descriptors can be overridden by instances.\n\nPython methods (including "staticmethod()" and "classmethod()") are\nimplemented as non-data descriptors. Accordingly, instances can\nredefine and override methods. This allows individual instances to\nacquire behaviors that differ from other instances of the same class.\n\nThe "property()" function is implemented as a data descriptor.\nAccordingly, instances cannot override the behavior of a property.\n\n\n__slots__\n---------\n\nBy default, instances of classes have a dictionary for attribute\nstorage. This wastes space for objects having very few instance\nvariables. The space consumption can become acute when creating large\nnumbers of instances.\n\nThe default can be overridden by defining *__slots__* in a class\ndefinition. The *__slots__* declaration takes a sequence of instance\nvariables and reserves just enough space in each instance to hold a\nvalue for each variable. Space is saved because *__dict__* is not\ncreated for each instance.\n\nobject.__slots__\n\n This class variable can be assigned a string, iterable, or sequence\n of strings with variable names used by instances. If defined in a\n class, *__slots__* reserves space for the declared variables and\n prevents the automatic creation of *__dict__* and *__weakref__* for\n each instance.\n\n\nNotes on using *__slots__*\n~~~~~~~~~~~~~~~~~~~~~~~~~~\n\n* When inheriting from a class without *__slots__*, the *__dict__*\n attribute of that class will always be accessible, so a *__slots__*\n definition in the subclass is meaningless.\n\n* Without a *__dict__* variable, instances cannot be assigned new\n variables not listed in the *__slots__* definition. Attempts to\n assign to an unlisted variable name raises "AttributeError". If\n dynamic assignment of new variables is desired, then add\n "\'__dict__\'" to the sequence of strings in the *__slots__*\n declaration.\n\n* Without a *__weakref__* variable for each instance, classes\n defining *__slots__* do not support weak references to its\n instances. If weak reference support is needed, then add\n "\'__weakref__\'" to the sequence of strings in the *__slots__*\n declaration.\n\n* *__slots__* are implemented at the class level by creating\n descriptors (*Implementing Descriptors*) for each variable name. As\n a result, class attributes cannot be used to set default values for\n instance variables defined by *__slots__*; otherwise, the class\n attribute would overwrite the descriptor assignment.\n\n* The action of a *__slots__* declaration is limited to the class\n where it is defined. As a result, subclasses will have a *__dict__*\n unless they also define *__slots__* (which must only contain names\n of any *additional* slots).\n\n* If a class defines a slot also defined in a base class, the\n instance variable defined by the base class slot is inaccessible\n (except by retrieving its descriptor directly from the base class).\n This renders the meaning of the program undefined. In the future, a\n check may be added to prevent this.\n\n* Nonempty *__slots__* does not work for classes derived from\n "variable-length" built-in types such as "int", "bytes" and "tuple".\n\n* Any non-string iterable may be assigned to *__slots__*. Mappings\n may also be used; however, in the future, special meaning may be\n assigned to the values corresponding to each key.\n\n* *__class__* assignment works only if both classes have the same\n *__slots__*.\n\n\nCustomizing class creation\n==========================\n\nBy default, classes are constructed using "type()". The class body is\nexecuted in a new namespace and the class name is bound locally to the\nresult of "type(name, bases, namespace)".\n\nThe class creation process can be customised by passing the\n"metaclass" keyword argument in the class definition line, or by\ninheriting from an existing class that included such an argument. In\nthe following example, both "MyClass" and "MySubclass" are instances\nof "Meta":\n\n class Meta(type):\n pass\n\n class MyClass(metaclass=Meta):\n pass\n\n class MySubclass(MyClass):\n pass\n\nAny other keyword arguments that are specified in the class definition\nare passed through to all metaclass operations described below.\n\nWhen a class definition is executed, the following steps occur:\n\n* the appropriate metaclass is determined\n\n* the class namespace is prepared\n\n* the class body is executed\n\n* the class object is created\n\n\nDetermining the appropriate metaclass\n-------------------------------------\n\nThe appropriate metaclass for a class definition is determined as\nfollows:\n\n* if no bases and no explicit metaclass are given, then "type()" is\n used\n\n* if an explicit metaclass is given and it is *not* an instance of\n "type()", then it is used directly as the metaclass\n\n* if an instance of "type()" is given as the explicit metaclass, or\n bases are defined, then the most derived metaclass is used\n\nThe most derived metaclass is selected from the explicitly specified\nmetaclass (if any) and the metaclasses (i.e. "type(cls)") of all\nspecified base classes. The most derived metaclass is one which is a\nsubtype of *all* of these candidate metaclasses. If none of the\ncandidate metaclasses meets that criterion, then the class definition\nwill fail with "TypeError".\n\n\nPreparing the class namespace\n-----------------------------\n\nOnce the appropriate metaclass has been identified, then the class\nnamespace is prepared. If the metaclass has a "__prepare__" attribute,\nit is called as "namespace = metaclass.__prepare__(name, bases,\n**kwds)" (where the additional keyword arguments, if any, come from\nthe class definition).\n\nIf the metaclass has no "__prepare__" attribute, then the class\nnamespace is initialised as an empty "dict()" instance.\n\nSee also: **PEP 3115** - Metaclasses in Python 3000\n\n Introduced the "__prepare__" namespace hook\n\n\nExecuting the class body\n------------------------\n\nThe class body is executed (approximately) as "exec(body, globals(),\nnamespace)". The key difference from a normal call to "exec()" is that\nlexical scoping allows the class body (including any methods) to\nreference names from the current and outer scopes when the class\ndefinition occurs inside a function.\n\nHowever, even when the class definition occurs inside the function,\nmethods defined inside the class still cannot see names defined at the\nclass scope. Class variables must be accessed through the first\nparameter of instance or class methods, and cannot be accessed at all\nfrom static methods.\n\n\nCreating the class object\n-------------------------\n\nOnce the class namespace has been populated by executing the class\nbody, the class object is created by calling "metaclass(name, bases,\nnamespace, **kwds)" (the additional keywords passed here are the same\nas those passed to "__prepare__").\n\nThis class object is the one that will be referenced by the zero-\nargument form of "super()". "__class__" is an implicit closure\nreference created by the compiler if any methods in a class body refer\nto either "__class__" or "super". This allows the zero argument form\nof "super()" to correctly identify the class being defined based on\nlexical scoping, while the class or instance that was used to make the\ncurrent call is identified based on the first argument passed to the\nmethod.\n\nAfter the class object is created, it is passed to the class\ndecorators included in the class definition (if any) and the resulting\nobject is bound in the local namespace as the defined class.\n\nSee also: **PEP 3135** - New super\n\n Describes the implicit "__class__" closure reference\n\n\nMetaclass example\n-----------------\n\nThe potential uses for metaclasses are boundless. Some ideas that have\nbeen explored include logging, interface checking, automatic\ndelegation, automatic property creation, proxies, frameworks, and\nautomatic resource locking/synchronization.\n\nHere is an example of a metaclass that uses an\n"collections.OrderedDict" to remember the order that class members\nwere defined:\n\n class OrderedClass(type):\n\n @classmethod\n def __prepare__(metacls, name, bases, **kwds):\n return collections.OrderedDict()\n\n def __new__(cls, name, bases, namespace, **kwds):\n result = type.__new__(cls, name, bases, dict(namespace))\n result.members = tuple(namespace)\n return result\n\n class A(metaclass=OrderedClass):\n def one(self): pass\n def two(self): pass\n def three(self): pass\n def four(self): pass\n\n >>> A.members\n (\'__module__\', \'one\', \'two\', \'three\', \'four\')\n\nWhen the class definition for *A* gets executed, the process begins\nwith calling the metaclass\'s "__prepare__()" method which returns an\nempty "collections.OrderedDict". That mapping records the methods and\nattributes of *A* as they are defined within the body of the class\nstatement. Once those definitions are executed, the ordered dictionary\nis fully populated and the metaclass\'s "__new__()" method gets\ninvoked. That method builds the new type and it saves the ordered\ndictionary keys in an attribute called "members".\n\n\nCustomizing instance and subclass checks\n========================================\n\nThe following methods are used to override the default behavior of the\n"isinstance()" and "issubclass()" built-in functions.\n\nIn particular, the metaclass "abc.ABCMeta" implements these methods in\norder to allow the addition of Abstract Base Classes (ABCs) as\n"virtual base classes" to any class or type (including built-in\ntypes), including other ABCs.\n\nclass.__instancecheck__(self, instance)\n\n Return true if *instance* should be considered a (direct or\n indirect) instance of *class*. If defined, called to implement\n "isinstance(instance, class)".\n\nclass.__subclasscheck__(self, subclass)\n\n Return true if *subclass* should be considered a (direct or\n indirect) subclass of *class*. If defined, called to implement\n "issubclass(subclass, class)".\n\nNote that these methods are looked up on the type (metaclass) of a\nclass. They cannot be defined as class methods in the actual class.\nThis is consistent with the lookup of special methods that are called\non instances, only in this case the instance is itself a class.\n\nSee also: **PEP 3119** - Introducing Abstract Base Classes\n\n Includes the specification for customizing "isinstance()" and\n "issubclass()" behavior through "__instancecheck__()" and\n "__subclasscheck__()", with motivation for this functionality in\n the context of adding Abstract Base Classes (see the "abc"\n module) to the language.\n\n\nEmulating callable objects\n==========================\n\nobject.__call__(self[, args...])\n\n Called when the instance is "called" as a function; if this method\n is defined, "x(arg1, arg2, ...)" is a shorthand for\n "x.__call__(arg1, arg2, ...)".\n\n\nEmulating container types\n=========================\n\nThe following methods can be defined to implement container objects.\nContainers usually are sequences (such as lists or tuples) or mappings\n(like dictionaries), but can represent other containers as well. The\nfirst set of methods is used either to emulate a sequence or to\nemulate a mapping; the difference is that for a sequence, the\nallowable keys should be the integers *k* for which "0 <= k < N" where\n*N* is the length of the sequence, or slice objects, which define a\nrange of items. It is also recommended that mappings provide the\nmethods "keys()", "values()", "items()", "get()", "clear()",\n"setdefault()", "pop()", "popitem()", "copy()", and "update()"\nbehaving similar to those for Python\'s standard dictionary objects.\nThe "collections" module provides a "MutableMapping" abstract base\nclass to help create those methods from a base set of "__getitem__()",\n"__setitem__()", "__delitem__()", and "keys()". Mutable sequences\nshould provide methods "append()", "count()", "index()", "extend()",\n"insert()", "pop()", "remove()", "reverse()" and "sort()", like Python\nstandard list objects. Finally, sequence types should implement\naddition (meaning concatenation) and multiplication (meaning\nrepetition) by defining the methods "__add__()", "__radd__()",\n"__iadd__()", "__mul__()", "__rmul__()" and "__imul__()" described\nbelow; they should not define other numerical operators. It is\nrecommended that both mappings and sequences implement the\n"__contains__()" method to allow efficient use of the "in" operator;\nfor mappings, "in" should search the mapping\'s keys; for sequences, it\nshould search through the values. It is further recommended that both\nmappings and sequences implement the "__iter__()" method to allow\nefficient iteration through the container; for mappings, "__iter__()"\nshould be the same as "keys()"; for sequences, it should iterate\nthrough the values.\n\nobject.__len__(self)\n\n Called to implement the built-in function "len()". Should return\n the length of the object, an integer ">=" 0. Also, an object that\n doesn\'t define a "__bool__()" method and whose "__len__()" method\n returns zero is considered to be false in a Boolean context.\n\nobject.__length_hint__(self)\n\n Called to implement "operator.length_hint()". Should return an\n estimated length for the object (which may be greater or less than\n the actual length). The length must be an integer ">=" 0. This\n method is purely an optimization and is never required for\n correctness.\n\n New in version 3.4.\n\nNote: Slicing is done exclusively with the following three methods.\n A call like\n\n a[1:2] = b\n\n is translated to\n\n a[slice(1, 2, None)] = b\n\n and so forth. Missing slice items are always filled in with "None".\n\nobject.__getitem__(self, key)\n\n Called to implement evaluation of "self[key]". For sequence types,\n the accepted keys should be integers and slice objects. Note that\n the special interpretation of negative indexes (if the class wishes\n to emulate a sequence type) is up to the "__getitem__()" method. If\n *key* is of an inappropriate type, "TypeError" may be raised; if of\n a value outside the set of indexes for the sequence (after any\n special interpretation of negative values), "IndexError" should be\n raised. For mapping types, if *key* is missing (not in the\n container), "KeyError" should be raised.\n\n Note: "for" loops expect that an "IndexError" will be raised for\n illegal indexes to allow proper detection of the end of the\n sequence.\n\nobject.__setitem__(self, key, value)\n\n Called to implement assignment to "self[key]". Same note as for\n "__getitem__()". This should only be implemented for mappings if\n the objects support changes to the values for keys, or if new keys\n can be added, or for sequences if elements can be replaced. The\n same exceptions should be raised for improper *key* values as for\n the "__getitem__()" method.\n\nobject.__delitem__(self, key)\n\n Called to implement deletion of "self[key]". Same note as for\n "__getitem__()". This should only be implemented for mappings if\n the objects support removal of keys, or for sequences if elements\n can be removed from the sequence. The same exceptions should be\n raised for improper *key* values as for the "__getitem__()" method.\n\nobject.__iter__(self)\n\n This method is called when an iterator is required for a container.\n This method should return a new iterator object that can iterate\n over all the objects in the container. For mappings, it should\n iterate over the keys of the container, and should also be made\n available as the method "keys()".\n\n Iterator objects also need to implement this method; they are\n required to return themselves. For more information on iterator\n objects, see *Iterator Types*.\n\nobject.__reversed__(self)\n\n Called (if present) by the "reversed()" built-in to implement\n reverse iteration. It should return a new iterator object that\n iterates over all the objects in the container in reverse order.\n\n If the "__reversed__()" method is not provided, the "reversed()"\n built-in will fall back to using the sequence protocol ("__len__()"\n and "__getitem__()"). Objects that support the sequence protocol\n should only provide "__reversed__()" if they can provide an\n implementation that is more efficient than the one provided by\n "reversed()".\n\nThe membership test operators ("in" and "not in") are normally\nimplemented as an iteration through a sequence. However, container\nobjects can supply the following special method with a more efficient\nimplementation, which also does not require the object be a sequence.\n\nobject.__contains__(self, item)\n\n Called to implement membership test operators. Should return true\n if *item* is in *self*, false otherwise. For mapping objects, this\n should consider the keys of the mapping rather than the values or\n the key-item pairs.\n\n For objects that don\'t define "__contains__()", the membership test\n first tries iteration via "__iter__()", then the old sequence\n iteration protocol via "__getitem__()", see *this section in the\n language reference*.\n\n\nEmulating numeric types\n=======================\n\nThe following methods can be defined to emulate numeric objects.\nMethods corresponding to operations that are not supported by the\nparticular kind of number implemented (e.g., bitwise operations for\nnon-integral numbers) should be left undefined.\n\nobject.__add__(self, other)\nobject.__sub__(self, other)\nobject.__mul__(self, other)\nobject.__truediv__(self, other)\nobject.__floordiv__(self, other)\nobject.__mod__(self, other)\nobject.__divmod__(self, other)\nobject.__pow__(self, other[, modulo])\nobject.__lshift__(self, other)\nobject.__rshift__(self, other)\nobject.__and__(self, other)\nobject.__xor__(self, other)\nobject.__or__(self, other)\n\n These methods are called to implement the binary arithmetic\n operations ("+", "-", "*", "/", "//", "%", "divmod()", "pow()",\n "**", "<<", ">>", "&", "^", "|"). For instance, to evaluate the\n expression "x + y", where *x* is an instance of a class that has an\n "__add__()" method, "x.__add__(y)" is called. The "__divmod__()"\n method should be the equivalent to using "__floordiv__()" and\n "__mod__()"; it should not be related to "__truediv__()". Note\n that "__pow__()" should be defined to accept an optional third\n argument if the ternary version of the built-in "pow()" function is\n to be supported.\n\n If one of those methods does not support the operation with the\n supplied arguments, it should return "NotImplemented".\n\nobject.__radd__(self, other)\nobject.__rsub__(self, other)\nobject.__rmul__(self, other)\nobject.__rtruediv__(self, other)\nobject.__rfloordiv__(self, other)\nobject.__rmod__(self, other)\nobject.__rdivmod__(self, other)\nobject.__rpow__(self, other)\nobject.__rlshift__(self, other)\nobject.__rrshift__(self, other)\nobject.__rand__(self, other)\nobject.__rxor__(self, other)\nobject.__ror__(self, other)\n\n These methods are called to implement the binary arithmetic\n operations ("+", "-", "*", "/", "//", "%", "divmod()", "pow()",\n "**", "<<", ">>", "&", "^", "|") with reflected (swapped) operands.\n These functions are only called if the left operand does not\n support the corresponding operation and the operands are of\n different types. [2] For instance, to evaluate the expression "x -\n y", where *y* is an instance of a class that has an "__rsub__()"\n method, "y.__rsub__(x)" is called if "x.__sub__(y)" returns\n *NotImplemented*.\n\n Note that ternary "pow()" will not try calling "__rpow__()" (the\n coercion rules would become too complicated).\n\n Note: If the right operand\'s type is a subclass of the left\n operand\'s type and that subclass provides the reflected method\n for the operation, this method will be called before the left\n operand\'s non-reflected method. This behavior allows subclasses\n to override their ancestors\' operations.\n\nobject.__iadd__(self, other)\nobject.__isub__(self, other)\nobject.__imul__(self, other)\nobject.__itruediv__(self, other)\nobject.__ifloordiv__(self, other)\nobject.__imod__(self, other)\nobject.__ipow__(self, other[, modulo])\nobject.__ilshift__(self, other)\nobject.__irshift__(self, other)\nobject.__iand__(self, other)\nobject.__ixor__(self, other)\nobject.__ior__(self, other)\n\n These methods are called to implement the augmented arithmetic\n assignments ("+=", "-=", "*=", "/=", "//=", "%=", "**=", "<<=",\n ">>=", "&=", "^=", "|="). These methods should attempt to do the\n operation in-place (modifying *self*) and return the result (which\n could be, but does not have to be, *self*). If a specific method\n is not defined, the augmented assignment falls back to the normal\n methods. For instance, to execute the statement "x += y", where\n *x* is an instance of a class that has an "__iadd__()" method,\n "x.__iadd__(y)" is called. If *x* is an instance of a class that\n does not define a "__iadd__()" method, "x.__add__(y)" and\n "y.__radd__(x)" are considered, as with the evaluation of "x + y".\n\nobject.__neg__(self)\nobject.__pos__(self)\nobject.__abs__(self)\nobject.__invert__(self)\n\n Called to implement the unary arithmetic operations ("-", "+",\n "abs()" and "~").\n\nobject.__complex__(self)\nobject.__int__(self)\nobject.__float__(self)\nobject.__round__(self[, n])\n\n Called to implement the built-in functions "complex()", "int()",\n "float()" and "round()". Should return a value of the appropriate\n type.\n\nobject.__index__(self)\n\n Called to implement "operator.index()", and whenever Python needs\n to losslessly convert the numeric object to an integer object (such\n as in slicing, or in the built-in "bin()", "hex()" and "oct()"\n functions). Presence of this method indicates that the numeric\n object is an integer type. Must return an integer.\n\n Note: When "__index__()" is defined, "__int__()" should also be\n defined, and both shuld return the same value, in order to have a\n coherent integer type class.\n\n\nWith Statement Context Managers\n===============================\n\nA *context manager* is an object that defines the runtime context to\nbe established when executing a "with" statement. The context manager\nhandles the entry into, and the exit from, the desired runtime context\nfor the execution of the block of code. Context managers are normally\ninvoked using the "with" statement (described in section *The with\nstatement*), but can also be used by directly invoking their methods.\n\nTypical uses of context managers include saving and restoring various\nkinds of global state, locking and unlocking resources, closing opened\nfiles, etc.\n\nFor more information on context managers, see *Context Manager Types*.\n\nobject.__enter__(self)\n\n Enter the runtime context related to this object. The "with"\n statement will bind this method\'s return value to the target(s)\n specified in the "as" clause of the statement, if any.\n\nobject.__exit__(self, exc_type, exc_value, traceback)\n\n Exit the runtime context related to this object. The parameters\n describe the exception that caused the context to be exited. If the\n context was exited without an exception, all three arguments will\n be "None".\n\n If an exception is supplied, and the method wishes to suppress the\n exception (i.e., prevent it from being propagated), it should\n return a true value. Otherwise, the exception will be processed\n normally upon exit from this method.\n\n Note that "__exit__()" methods should not reraise the passed-in\n exception; this is the caller\'s responsibility.\n\nSee also: **PEP 0343** - The "with" statement\n\n The specification, background, and examples for the Python "with"\n statement.\n\n\nSpecial method lookup\n=====================\n\nFor custom classes, implicit invocations of special methods are only\nguaranteed to work correctly if defined on an object\'s type, not in\nthe object\'s instance dictionary. That behaviour is the reason why\nthe following code raises an exception:\n\n >>> class C:\n ... pass\n ...\n >>> c = C()\n >>> c.__len__ = lambda: 5\n >>> len(c)\n Traceback (most recent call last):\n File "", line 1, in \n TypeError: object of type \'C\' has no len()\n\nThe rationale behind this behaviour lies with a number of special\nmethods such as "__hash__()" and "__repr__()" that are implemented by\nall objects, including type objects. If the implicit lookup of these\nmethods used the conventional lookup process, they would fail when\ninvoked on the type object itself:\n\n >>> 1 .__hash__() == hash(1)\n True\n >>> int.__hash__() == hash(int)\n Traceback (most recent call last):\n File "", line 1, in \n TypeError: descriptor \'__hash__\' of \'int\' object needs an argument\n\nIncorrectly attempting to invoke an unbound method of a class in this\nway is sometimes referred to as \'metaclass confusion\', and is avoided\nby bypassing the instance when looking up special methods:\n\n >>> type(1).__hash__(1) == hash(1)\n True\n >>> type(int).__hash__(int) == hash(int)\n True\n\nIn addition to bypassing any instance attributes in the interest of\ncorrectness, implicit special method lookup generally also bypasses\nthe "__getattribute__()" method even of the object\'s metaclass:\n\n >>> class Meta(type):\n ... def __getattribute__(*args):\n ... print("Metaclass getattribute invoked")\n ... return type.__getattribute__(*args)\n ...\n >>> class C(object, metaclass=Meta):\n ... def __len__(self):\n ... return 10\n ... def __getattribute__(*args):\n ... print("Class getattribute invoked")\n ... return object.__getattribute__(*args)\n ...\n >>> c = C()\n >>> c.__len__() # Explicit lookup via instance\n Class getattribute invoked\n 10\n >>> type(c).__len__(c) # Explicit lookup via type\n Metaclass getattribute invoked\n 10\n >>> len(c) # Implicit lookup\n 10\n\nBypassing the "__getattribute__()" machinery in this fashion provides\nsignificant scope for speed optimisations within the interpreter, at\nthe cost of some flexibility in the handling of special methods (the\nspecial method *must* be set on the class object itself in order to be\nconsistently invoked by the interpreter).\n\n-[ Footnotes ]-\n\n[1] It *is* possible in some cases to change an object\'s type,\n under certain controlled conditions. It generally isn\'t a good\n idea though, since it can lead to some very strange behaviour if\n it is handled incorrectly.\n\n[2] For operands of the same type, it is assumed that if the non-\n reflected method (such as "__add__()") fails the operation is not\n supported, which is why the reflected method is not called.\n', + 'specialnames': '\nSpecial method names\n********************\n\nA class can implement certain operations that are invoked by special\nsyntax (such as arithmetic operations or subscripting and slicing) by\ndefining methods with special names. This is Python\'s approach to\n*operator overloading*, allowing classes to define their own behavior\nwith respect to language operators. For instance, if a class defines\na method named "__getitem__()", and "x" is an instance of this class,\nthen "x[i]" is roughly equivalent to "type(x).__getitem__(x, i)".\nExcept where mentioned, attempts to execute an operation raise an\nexception when no appropriate method is defined (typically\n"AttributeError" or "TypeError").\n\nWhen implementing a class that emulates any built-in type, it is\nimportant that the emulation only be implemented to the degree that it\nmakes sense for the object being modelled. For example, some\nsequences may work well with retrieval of individual elements, but\nextracting a slice may not make sense. (One example of this is the\n"NodeList" interface in the W3C\'s Document Object Model.)\n\n\nBasic customization\n===================\n\nobject.__new__(cls[, ...])\n\n Called to create a new instance of class *cls*. "__new__()" is a\n static method (special-cased so you need not declare it as such)\n that takes the class of which an instance was requested as its\n first argument. The remaining arguments are those passed to the\n object constructor expression (the call to the class). The return\n value of "__new__()" should be the new object instance (usually an\n instance of *cls*).\n\n Typical implementations create a new instance of the class by\n invoking the superclass\'s "__new__()" method using\n "super(currentclass, cls).__new__(cls[, ...])" with appropriate\n arguments and then modifying the newly-created instance as\n necessary before returning it.\n\n If "__new__()" returns an instance of *cls*, then the new\n instance\'s "__init__()" method will be invoked like\n "__init__(self[, ...])", where *self* is the new instance and the\n remaining arguments are the same as were passed to "__new__()".\n\n If "__new__()" does not return an instance of *cls*, then the new\n instance\'s "__init__()" method will not be invoked.\n\n "__new__()" is intended mainly to allow subclasses of immutable\n types (like int, str, or tuple) to customize instance creation. It\n is also commonly overridden in custom metaclasses in order to\n customize class creation.\n\nobject.__init__(self[, ...])\n\n Called when the instance is created. The arguments are those\n passed to the class constructor expression. If a base class has an\n "__init__()" method, the derived class\'s "__init__()" method, if\n any, must explicitly call it to ensure proper initialization of the\n base class part of the instance; for example:\n "BaseClass.__init__(self, [args...])". As a special constraint on\n constructors, no value may be returned; doing so will cause a\n "TypeError" to be raised at runtime.\n\nobject.__del__(self)\n\n Called when the instance is about to be destroyed. This is also\n called a destructor. If a base class has a "__del__()" method, the\n derived class\'s "__del__()" method, if any, must explicitly call it\n to ensure proper deletion of the base class part of the instance.\n Note that it is possible (though not recommended!) for the\n "__del__()" method to postpone destruction of the instance by\n creating a new reference to it. It may then be called at a later\n time when this new reference is deleted. It is not guaranteed that\n "__del__()" methods are called for objects that still exist when\n the interpreter exits.\n\n Note: "del x" doesn\'t directly call "x.__del__()" --- the former\n decrements the reference count for "x" by one, and the latter is\n only called when "x"\'s reference count reaches zero. Some common\n situations that may prevent the reference count of an object from\n going to zero include: circular references between objects (e.g.,\n a doubly-linked list or a tree data structure with parent and\n child pointers); a reference to the object on the stack frame of\n a function that caught an exception (the traceback stored in\n "sys.exc_info()[2]" keeps the stack frame alive); or a reference\n to the object on the stack frame that raised an unhandled\n exception in interactive mode (the traceback stored in\n "sys.last_traceback" keeps the stack frame alive). The first\n situation can only be remedied by explicitly breaking the cycles;\n the latter two situations can be resolved by storing "None" in\n "sys.last_traceback". Circular references which are garbage are\n detected and cleaned up when the cyclic garbage collector is\n enabled (it\'s on by default). Refer to the documentation for the\n "gc" module for more information about this topic.\n\n Warning: Due to the precarious circumstances under which\n "__del__()" methods are invoked, exceptions that occur during\n their execution are ignored, and a warning is printed to\n "sys.stderr" instead. Also, when "__del__()" is invoked in\n response to a module being deleted (e.g., when execution of the\n program is done), other globals referenced by the "__del__()"\n method may already have been deleted or in the process of being\n torn down (e.g. the import machinery shutting down). For this\n reason, "__del__()" methods should do the absolute minimum needed\n to maintain external invariants. Starting with version 1.5,\n Python guarantees that globals whose name begins with a single\n underscore are deleted from their module before other globals are\n deleted; if no other references to such globals exist, this may\n help in assuring that imported modules are still available at the\n time when the "__del__()" method is called.\n\nobject.__repr__(self)\n\n Called by the "repr()" built-in function to compute the "official"\n string representation of an object. If at all possible, this\n should look like a valid Python expression that could be used to\n recreate an object with the same value (given an appropriate\n environment). If this is not possible, a string of the form\n "<...some useful description...>" should be returned. The return\n value must be a string object. If a class defines "__repr__()" but\n not "__str__()", then "__repr__()" is also used when an "informal"\n string representation of instances of that class is required.\n\n This is typically used for debugging, so it is important that the\n representation is information-rich and unambiguous.\n\nobject.__str__(self)\n\n Called by "str(object)" and the built-in functions "format()" and\n "print()" to compute the "informal" or nicely printable string\n representation of an object. The return value must be a *string*\n object.\n\n This method differs from "object.__repr__()" in that there is no\n expectation that "__str__()" return a valid Python expression: a\n more convenient or concise representation can be used.\n\n The default implementation defined by the built-in type "object"\n calls "object.__repr__()".\n\nobject.__bytes__(self)\n\n Called by "bytes()" to compute a byte-string representation of an\n object. This should return a "bytes" object.\n\nobject.__format__(self, format_spec)\n\n Called by the "format()" built-in function (and by extension, the\n "str.format()" method of class "str") to produce a "formatted"\n string representation of an object. The "format_spec" argument is a\n string that contains a description of the formatting options\n desired. The interpretation of the "format_spec" argument is up to\n the type implementing "__format__()", however most classes will\n either delegate formatting to one of the built-in types, or use a\n similar formatting option syntax.\n\n See *Format Specification Mini-Language* for a description of the\n standard formatting syntax.\n\n The return value must be a string object.\n\n Changed in version 3.4: The __format__ method of "object" itself\n raises a "TypeError" if passed any non-empty string.\n\nobject.__lt__(self, other)\nobject.__le__(self, other)\nobject.__eq__(self, other)\nobject.__ne__(self, other)\nobject.__gt__(self, other)\nobject.__ge__(self, other)\n\n These are the so-called "rich comparison" methods. The\n correspondence between operator symbols and method names is as\n follows: "xy" calls\n "x.__gt__(y)", and "x>=y" calls "x.__ge__(y)".\n\n A rich comparison method may return the singleton "NotImplemented"\n if it does not implement the operation for a given pair of\n arguments. By convention, "False" and "True" are returned for a\n successful comparison. However, these methods can return any value,\n so if the comparison operator is used in a Boolean context (e.g.,\n in the condition of an "if" statement), Python will call "bool()"\n on the value to determine if the result is true or false.\n\n There are no implied relationships among the comparison operators.\n The truth of "x==y" does not imply that "x!=y" is false.\n Accordingly, when defining "__eq__()", one should also define\n "__ne__()" so that the operators will behave as expected. See the\n paragraph on "__hash__()" for some important notes on creating\n *hashable* objects which support custom comparison operations and\n are usable as dictionary keys.\n\n There are no swapped-argument versions of these methods (to be used\n when the left argument does not support the operation but the right\n argument does); rather, "__lt__()" and "__gt__()" are each other\'s\n reflection, "__le__()" and "__ge__()" are each other\'s reflection,\n and "__eq__()" and "__ne__()" are their own reflection.\n\n Arguments to rich comparison methods are never coerced.\n\n To automatically generate ordering operations from a single root\n operation, see "functools.total_ordering()".\n\nobject.__hash__(self)\n\n Called by built-in function "hash()" and for operations on members\n of hashed collections including "set", "frozenset", and "dict".\n "__hash__()" should return an integer. The only required property\n is that objects which compare equal have the same hash value; it is\n advised to somehow mix together (e.g. using exclusive or) the hash\n values for the components of the object that also play a part in\n comparison of objects.\n\n Note: "hash()" truncates the value returned from an object\'s\n custom "__hash__()" method to the size of a "Py_ssize_t". This\n is typically 8 bytes on 64-bit builds and 4 bytes on 32-bit\n builds. If an object\'s "__hash__()" must interoperate on builds\n of different bit sizes, be sure to check the width on all\n supported builds. An easy way to do this is with "python -c\n "import sys; print(sys.hash_info.width)""\n\n If a class does not define an "__eq__()" method it should not\n define a "__hash__()" operation either; if it defines "__eq__()"\n but not "__hash__()", its instances will not be usable as items in\n hashable collections. If a class defines mutable objects and\n implements an "__eq__()" method, it should not implement\n "__hash__()", since the implementation of hashable collections\n requires that a key\'s hash value is immutable (if the object\'s hash\n value changes, it will be in the wrong hash bucket).\n\n User-defined classes have "__eq__()" and "__hash__()" methods by\n default; with them, all objects compare unequal (except with\n themselves) and "x.__hash__()" returns an appropriate value such\n that "x == y" implies both that "x is y" and "hash(x) == hash(y)".\n\n A class that overrides "__eq__()" and does not define "__hash__()"\n will have its "__hash__()" implicitly set to "None". When the\n "__hash__()" method of a class is "None", instances of the class\n will raise an appropriate "TypeError" when a program attempts to\n retrieve their hash value, and will also be correctly identified as\n unhashable when checking "isinstance(obj, collections.Hashable").\n\n If a class that overrides "__eq__()" needs to retain the\n implementation of "__hash__()" from a parent class, the interpreter\n must be told this explicitly by setting "__hash__ =\n .__hash__".\n\n If a class that does not override "__eq__()" wishes to suppress\n hash support, it should include "__hash__ = None" in the class\n definition. A class which defines its own "__hash__()" that\n explicitly raises a "TypeError" would be incorrectly identified as\n hashable by an "isinstance(obj, collections.Hashable)" call.\n\n Note: By default, the "__hash__()" values of str, bytes and\n datetime objects are "salted" with an unpredictable random value.\n Although they remain constant within an individual Python\n process, they are not predictable between repeated invocations of\n Python.This is intended to provide protection against a denial-\n of-service caused by carefully-chosen inputs that exploit the\n worst case performance of a dict insertion, O(n^2) complexity.\n See http://www.ocert.org/advisories/ocert-2011-003.html for\n details.Changing hash values affects the iteration order of\n dicts, sets and other mappings. Python has never made guarantees\n about this ordering (and it typically varies between 32-bit and\n 64-bit builds).See also "PYTHONHASHSEED".\n\n Changed in version 3.3: Hash randomization is enabled by default.\n\nobject.__bool__(self)\n\n Called to implement truth value testing and the built-in operation\n "bool()"; should return "False" or "True". When this method is not\n defined, "__len__()" is called, if it is defined, and the object is\n considered true if its result is nonzero. If a class defines\n neither "__len__()" nor "__bool__()", all its instances are\n considered true.\n\n\nCustomizing attribute access\n============================\n\nThe following methods can be defined to customize the meaning of\nattribute access (use of, assignment to, or deletion of "x.name") for\nclass instances.\n\nobject.__getattr__(self, name)\n\n Called when an attribute lookup has not found the attribute in the\n usual places (i.e. it is not an instance attribute nor is it found\n in the class tree for "self"). "name" is the attribute name. This\n method should return the (computed) attribute value or raise an\n "AttributeError" exception.\n\n Note that if the attribute is found through the normal mechanism,\n "__getattr__()" is not called. (This is an intentional asymmetry\n between "__getattr__()" and "__setattr__()".) This is done both for\n efficiency reasons and because otherwise "__getattr__()" would have\n no way to access other attributes of the instance. Note that at\n least for instance variables, you can fake total control by not\n inserting any values in the instance attribute dictionary (but\n instead inserting them in another object). See the\n "__getattribute__()" method below for a way to actually get total\n control over attribute access.\n\nobject.__getattribute__(self, name)\n\n Called unconditionally to implement attribute accesses for\n instances of the class. If the class also defines "__getattr__()",\n the latter will not be called unless "__getattribute__()" either\n calls it explicitly or raises an "AttributeError". This method\n should return the (computed) attribute value or raise an\n "AttributeError" exception. In order to avoid infinite recursion in\n this method, its implementation should always call the base class\n method with the same name to access any attributes it needs, for\n example, "object.__getattribute__(self, name)".\n\n Note: This method may still be bypassed when looking up special\n methods as the result of implicit invocation via language syntax\n or built-in functions. See *Special method lookup*.\n\nobject.__setattr__(self, name, value)\n\n Called when an attribute assignment is attempted. This is called\n instead of the normal mechanism (i.e. store the value in the\n instance dictionary). *name* is the attribute name, *value* is the\n value to be assigned to it.\n\n If "__setattr__()" wants to assign to an instance attribute, it\n should call the base class method with the same name, for example,\n "object.__setattr__(self, name, value)".\n\nobject.__delattr__(self, name)\n\n Like "__setattr__()" but for attribute deletion instead of\n assignment. This should only be implemented if "del obj.name" is\n meaningful for the object.\n\nobject.__dir__(self)\n\n Called when "dir()" is called on the object. A sequence must be\n returned. "dir()" converts the returned sequence to a list and\n sorts it.\n\n\nImplementing Descriptors\n------------------------\n\nThe following methods only apply when an instance of the class\ncontaining the method (a so-called *descriptor* class) appears in an\n*owner* class (the descriptor must be in either the owner\'s class\ndictionary or in the class dictionary for one of its parents). In the\nexamples below, "the attribute" refers to the attribute whose name is\nthe key of the property in the owner class\' "__dict__".\n\nobject.__get__(self, instance, owner)\n\n Called to get the attribute of the owner class (class attribute\n access) or of an instance of that class (instance attribute\n access). *owner* is always the owner class, while *instance* is the\n instance that the attribute was accessed through, or "None" when\n the attribute is accessed through the *owner*. This method should\n return the (computed) attribute value or raise an "AttributeError"\n exception.\n\nobject.__set__(self, instance, value)\n\n Called to set the attribute on an instance *instance* of the owner\n class to a new value, *value*.\n\nobject.__delete__(self, instance)\n\n Called to delete the attribute on an instance *instance* of the\n owner class.\n\n\nInvoking Descriptors\n--------------------\n\nIn general, a descriptor is an object attribute with "binding\nbehavior", one whose attribute access has been overridden by methods\nin the descriptor protocol: "__get__()", "__set__()", and\n"__delete__()". If any of those methods are defined for an object, it\nis said to be a descriptor.\n\nThe default behavior for attribute access is to get, set, or delete\nthe attribute from an object\'s dictionary. For instance, "a.x" has a\nlookup chain starting with "a.__dict__[\'x\']", then\n"type(a).__dict__[\'x\']", and continuing through the base classes of\n"type(a)" excluding metaclasses.\n\nHowever, if the looked-up value is an object defining one of the\ndescriptor methods, then Python may override the default behavior and\ninvoke the descriptor method instead. Where this occurs in the\nprecedence chain depends on which descriptor methods were defined and\nhow they were called.\n\nThe starting point for descriptor invocation is a binding, "a.x". How\nthe arguments are assembled depends on "a":\n\nDirect Call\n The simplest and least common call is when user code directly\n invokes a descriptor method: "x.__get__(a)".\n\nInstance Binding\n If binding to an object instance, "a.x" is transformed into the\n call: "type(a).__dict__[\'x\'].__get__(a, type(a))".\n\nClass Binding\n If binding to a class, "A.x" is transformed into the call:\n "A.__dict__[\'x\'].__get__(None, A)".\n\nSuper Binding\n If "a" is an instance of "super", then the binding "super(B,\n obj).m()" searches "obj.__class__.__mro__" for the base class "A"\n immediately preceding "B" and then invokes the descriptor with the\n call: "A.__dict__[\'m\'].__get__(obj, obj.__class__)".\n\nFor instance bindings, the precedence of descriptor invocation depends\non the which descriptor methods are defined. A descriptor can define\nany combination of "__get__()", "__set__()" and "__delete__()". If it\ndoes not define "__get__()", then accessing the attribute will return\nthe descriptor object itself unless there is a value in the object\'s\ninstance dictionary. If the descriptor defines "__set__()" and/or\n"__delete__()", it is a data descriptor; if it defines neither, it is\na non-data descriptor. Normally, data descriptors define both\n"__get__()" and "__set__()", while non-data descriptors have just the\n"__get__()" method. Data descriptors with "__set__()" and "__get__()"\ndefined always override a redefinition in an instance dictionary. In\ncontrast, non-data descriptors can be overridden by instances.\n\nPython methods (including "staticmethod()" and "classmethod()") are\nimplemented as non-data descriptors. Accordingly, instances can\nredefine and override methods. This allows individual instances to\nacquire behaviors that differ from other instances of the same class.\n\nThe "property()" function is implemented as a data descriptor.\nAccordingly, instances cannot override the behavior of a property.\n\n\n__slots__\n---------\n\nBy default, instances of classes have a dictionary for attribute\nstorage. This wastes space for objects having very few instance\nvariables. The space consumption can become acute when creating large\nnumbers of instances.\n\nThe default can be overridden by defining *__slots__* in a class\ndefinition. The *__slots__* declaration takes a sequence of instance\nvariables and reserves just enough space in each instance to hold a\nvalue for each variable. Space is saved because *__dict__* is not\ncreated for each instance.\n\nobject.__slots__\n\n This class variable can be assigned a string, iterable, or sequence\n of strings with variable names used by instances. If defined in a\n class, *__slots__* reserves space for the declared variables and\n prevents the automatic creation of *__dict__* and *__weakref__* for\n each instance.\n\n\nNotes on using *__slots__*\n~~~~~~~~~~~~~~~~~~~~~~~~~~\n\n* When inheriting from a class without *__slots__*, the *__dict__*\n attribute of that class will always be accessible, so a *__slots__*\n definition in the subclass is meaningless.\n\n* Without a *__dict__* variable, instances cannot be assigned new\n variables not listed in the *__slots__* definition. Attempts to\n assign to an unlisted variable name raises "AttributeError". If\n dynamic assignment of new variables is desired, then add\n "\'__dict__\'" to the sequence of strings in the *__slots__*\n declaration.\n\n* Without a *__weakref__* variable for each instance, classes\n defining *__slots__* do not support weak references to its\n instances. If weak reference support is needed, then add\n "\'__weakref__\'" to the sequence of strings in the *__slots__*\n declaration.\n\n* *__slots__* are implemented at the class level by creating\n descriptors (*Implementing Descriptors*) for each variable name. As\n a result, class attributes cannot be used to set default values for\n instance variables defined by *__slots__*; otherwise, the class\n attribute would overwrite the descriptor assignment.\n\n* The action of a *__slots__* declaration is limited to the class\n where it is defined. As a result, subclasses will have a *__dict__*\n unless they also define *__slots__* (which must only contain names\n of any *additional* slots).\n\n* If a class defines a slot also defined in a base class, the\n instance variable defined by the base class slot is inaccessible\n (except by retrieving its descriptor directly from the base class).\n This renders the meaning of the program undefined. In the future, a\n check may be added to prevent this.\n\n* Nonempty *__slots__* does not work for classes derived from\n "variable-length" built-in types such as "int", "bytes" and "tuple".\n\n* Any non-string iterable may be assigned to *__slots__*. Mappings\n may also be used; however, in the future, special meaning may be\n assigned to the values corresponding to each key.\n\n* *__class__* assignment works only if both classes have the same\n *__slots__*.\n\n\nCustomizing class creation\n==========================\n\nBy default, classes are constructed using "type()". The class body is\nexecuted in a new namespace and the class name is bound locally to the\nresult of "type(name, bases, namespace)".\n\nThe class creation process can be customised by passing the\n"metaclass" keyword argument in the class definition line, or by\ninheriting from an existing class that included such an argument. In\nthe following example, both "MyClass" and "MySubclass" are instances\nof "Meta":\n\n class Meta(type):\n pass\n\n class MyClass(metaclass=Meta):\n pass\n\n class MySubclass(MyClass):\n pass\n\nAny other keyword arguments that are specified in the class definition\nare passed through to all metaclass operations described below.\n\nWhen a class definition is executed, the following steps occur:\n\n* the appropriate metaclass is determined\n\n* the class namespace is prepared\n\n* the class body is executed\n\n* the class object is created\n\n\nDetermining the appropriate metaclass\n-------------------------------------\n\nThe appropriate metaclass for a class definition is determined as\nfollows:\n\n* if no bases and no explicit metaclass are given, then "type()" is\n used\n\n* if an explicit metaclass is given and it is *not* an instance of\n "type()", then it is used directly as the metaclass\n\n* if an instance of "type()" is given as the explicit metaclass, or\n bases are defined, then the most derived metaclass is used\n\nThe most derived metaclass is selected from the explicitly specified\nmetaclass (if any) and the metaclasses (i.e. "type(cls)") of all\nspecified base classes. The most derived metaclass is one which is a\nsubtype of *all* of these candidate metaclasses. If none of the\ncandidate metaclasses meets that criterion, then the class definition\nwill fail with "TypeError".\n\n\nPreparing the class namespace\n-----------------------------\n\nOnce the appropriate metaclass has been identified, then the class\nnamespace is prepared. If the metaclass has a "__prepare__" attribute,\nit is called as "namespace = metaclass.__prepare__(name, bases,\n**kwds)" (where the additional keyword arguments, if any, come from\nthe class definition).\n\nIf the metaclass has no "__prepare__" attribute, then the class\nnamespace is initialised as an empty "dict()" instance.\n\nSee also: **PEP 3115** - Metaclasses in Python 3000\n\n Introduced the "__prepare__" namespace hook\n\n\nExecuting the class body\n------------------------\n\nThe class body is executed (approximately) as "exec(body, globals(),\nnamespace)". The key difference from a normal call to "exec()" is that\nlexical scoping allows the class body (including any methods) to\nreference names from the current and outer scopes when the class\ndefinition occurs inside a function.\n\nHowever, even when the class definition occurs inside the function,\nmethods defined inside the class still cannot see names defined at the\nclass scope. Class variables must be accessed through the first\nparameter of instance or class methods, and cannot be accessed at all\nfrom static methods.\n\n\nCreating the class object\n-------------------------\n\nOnce the class namespace has been populated by executing the class\nbody, the class object is created by calling "metaclass(name, bases,\nnamespace, **kwds)" (the additional keywords passed here are the same\nas those passed to "__prepare__").\n\nThis class object is the one that will be referenced by the zero-\nargument form of "super()". "__class__" is an implicit closure\nreference created by the compiler if any methods in a class body refer\nto either "__class__" or "super". This allows the zero argument form\nof "super()" to correctly identify the class being defined based on\nlexical scoping, while the class or instance that was used to make the\ncurrent call is identified based on the first argument passed to the\nmethod.\n\nAfter the class object is created, it is passed to the class\ndecorators included in the class definition (if any) and the resulting\nobject is bound in the local namespace as the defined class.\n\nSee also: **PEP 3135** - New super\n\n Describes the implicit "__class__" closure reference\n\n\nMetaclass example\n-----------------\n\nThe potential uses for metaclasses are boundless. Some ideas that have\nbeen explored include logging, interface checking, automatic\ndelegation, automatic property creation, proxies, frameworks, and\nautomatic resource locking/synchronization.\n\nHere is an example of a metaclass that uses an\n"collections.OrderedDict" to remember the order that class members\nwere defined:\n\n class OrderedClass(type):\n\n @classmethod\n def __prepare__(metacls, name, bases, **kwds):\n return collections.OrderedDict()\n\n def __new__(cls, name, bases, namespace, **kwds):\n result = type.__new__(cls, name, bases, dict(namespace))\n result.members = tuple(namespace)\n return result\n\n class A(metaclass=OrderedClass):\n def one(self): pass\n def two(self): pass\n def three(self): pass\n def four(self): pass\n\n >>> A.members\n (\'__module__\', \'one\', \'two\', \'three\', \'four\')\n\nWhen the class definition for *A* gets executed, the process begins\nwith calling the metaclass\'s "__prepare__()" method which returns an\nempty "collections.OrderedDict". That mapping records the methods and\nattributes of *A* as they are defined within the body of the class\nstatement. Once those definitions are executed, the ordered dictionary\nis fully populated and the metaclass\'s "__new__()" method gets\ninvoked. That method builds the new type and it saves the ordered\ndictionary keys in an attribute called "members".\n\n\nCustomizing instance and subclass checks\n========================================\n\nThe following methods are used to override the default behavior of the\n"isinstance()" and "issubclass()" built-in functions.\n\nIn particular, the metaclass "abc.ABCMeta" implements these methods in\norder to allow the addition of Abstract Base Classes (ABCs) as\n"virtual base classes" to any class or type (including built-in\ntypes), including other ABCs.\n\nclass.__instancecheck__(self, instance)\n\n Return true if *instance* should be considered a (direct or\n indirect) instance of *class*. If defined, called to implement\n "isinstance(instance, class)".\n\nclass.__subclasscheck__(self, subclass)\n\n Return true if *subclass* should be considered a (direct or\n indirect) subclass of *class*. If defined, called to implement\n "issubclass(subclass, class)".\n\nNote that these methods are looked up on the type (metaclass) of a\nclass. They cannot be defined as class methods in the actual class.\nThis is consistent with the lookup of special methods that are called\non instances, only in this case the instance is itself a class.\n\nSee also: **PEP 3119** - Introducing Abstract Base Classes\n\n Includes the specification for customizing "isinstance()" and\n "issubclass()" behavior through "__instancecheck__()" and\n "__subclasscheck__()", with motivation for this functionality in\n the context of adding Abstract Base Classes (see the "abc"\n module) to the language.\n\n\nEmulating callable objects\n==========================\n\nobject.__call__(self[, args...])\n\n Called when the instance is "called" as a function; if this method\n is defined, "x(arg1, arg2, ...)" is a shorthand for\n "x.__call__(arg1, arg2, ...)".\n\n\nEmulating container types\n=========================\n\nThe following methods can be defined to implement container objects.\nContainers usually are sequences (such as lists or tuples) or mappings\n(like dictionaries), but can represent other containers as well. The\nfirst set of methods is used either to emulate a sequence or to\nemulate a mapping; the difference is that for a sequence, the\nallowable keys should be the integers *k* for which "0 <= k < N" where\n*N* is the length of the sequence, or slice objects, which define a\nrange of items. It is also recommended that mappings provide the\nmethods "keys()", "values()", "items()", "get()", "clear()",\n"setdefault()", "pop()", "popitem()", "copy()", and "update()"\nbehaving similar to those for Python\'s standard dictionary objects.\nThe "collections" module provides a "MutableMapping" abstract base\nclass to help create those methods from a base set of "__getitem__()",\n"__setitem__()", "__delitem__()", and "keys()". Mutable sequences\nshould provide methods "append()", "count()", "index()", "extend()",\n"insert()", "pop()", "remove()", "reverse()" and "sort()", like Python\nstandard list objects. Finally, sequence types should implement\naddition (meaning concatenation) and multiplication (meaning\nrepetition) by defining the methods "__add__()", "__radd__()",\n"__iadd__()", "__mul__()", "__rmul__()" and "__imul__()" described\nbelow; they should not define other numerical operators. It is\nrecommended that both mappings and sequences implement the\n"__contains__()" method to allow efficient use of the "in" operator;\nfor mappings, "in" should search the mapping\'s keys; for sequences, it\nshould search through the values. It is further recommended that both\nmappings and sequences implement the "__iter__()" method to allow\nefficient iteration through the container; for mappings, "__iter__()"\nshould be the same as "keys()"; for sequences, it should iterate\nthrough the values.\n\nobject.__len__(self)\n\n Called to implement the built-in function "len()". Should return\n the length of the object, an integer ">=" 0. Also, an object that\n doesn\'t define a "__bool__()" method and whose "__len__()" method\n returns zero is considered to be false in a Boolean context.\n\nobject.__length_hint__(self)\n\n Called to implement "operator.length_hint()". Should return an\n estimated length for the object (which may be greater or less than\n the actual length). The length must be an integer ">=" 0. This\n method is purely an optimization and is never required for\n correctness.\n\n New in version 3.4.\n\nNote: Slicing is done exclusively with the following three methods.\n A call like\n\n a[1:2] = b\n\n is translated to\n\n a[slice(1, 2, None)] = b\n\n and so forth. Missing slice items are always filled in with "None".\n\nobject.__getitem__(self, key)\n\n Called to implement evaluation of "self[key]". For sequence types,\n the accepted keys should be integers and slice objects. Note that\n the special interpretation of negative indexes (if the class wishes\n to emulate a sequence type) is up to the "__getitem__()" method. If\n *key* is of an inappropriate type, "TypeError" may be raised; if of\n a value outside the set of indexes for the sequence (after any\n special interpretation of negative values), "IndexError" should be\n raised. For mapping types, if *key* is missing (not in the\n container), "KeyError" should be raised.\n\n Note: "for" loops expect that an "IndexError" will be raised for\n illegal indexes to allow proper detection of the end of the\n sequence.\n\nobject.__setitem__(self, key, value)\n\n Called to implement assignment to "self[key]". Same note as for\n "__getitem__()". This should only be implemented for mappings if\n the objects support changes to the values for keys, or if new keys\n can be added, or for sequences if elements can be replaced. The\n same exceptions should be raised for improper *key* values as for\n the "__getitem__()" method.\n\nobject.__delitem__(self, key)\n\n Called to implement deletion of "self[key]". Same note as for\n "__getitem__()". This should only be implemented for mappings if\n the objects support removal of keys, or for sequences if elements\n can be removed from the sequence. The same exceptions should be\n raised for improper *key* values as for the "__getitem__()" method.\n\nobject.__iter__(self)\n\n This method is called when an iterator is required for a container.\n This method should return a new iterator object that can iterate\n over all the objects in the container. For mappings, it should\n iterate over the keys of the container, and should also be made\n available as the method "keys()".\n\n Iterator objects also need to implement this method; they are\n required to return themselves. For more information on iterator\n objects, see *Iterator Types*.\n\nobject.__reversed__(self)\n\n Called (if present) by the "reversed()" built-in to implement\n reverse iteration. It should return a new iterator object that\n iterates over all the objects in the container in reverse order.\n\n If the "__reversed__()" method is not provided, the "reversed()"\n built-in will fall back to using the sequence protocol ("__len__()"\n and "__getitem__()"). Objects that support the sequence protocol\n should only provide "__reversed__()" if they can provide an\n implementation that is more efficient than the one provided by\n "reversed()".\n\nThe membership test operators ("in" and "not in") are normally\nimplemented as an iteration through a sequence. However, container\nobjects can supply the following special method with a more efficient\nimplementation, which also does not require the object be a sequence.\n\nobject.__contains__(self, item)\n\n Called to implement membership test operators. Should return true\n if *item* is in *self*, false otherwise. For mapping objects, this\n should consider the keys of the mapping rather than the values or\n the key-item pairs.\n\n For objects that don\'t define "__contains__()", the membership test\n first tries iteration via "__iter__()", then the old sequence\n iteration protocol via "__getitem__()", see *this section in the\n language reference*.\n\n\nEmulating numeric types\n=======================\n\nThe following methods can be defined to emulate numeric objects.\nMethods corresponding to operations that are not supported by the\nparticular kind of number implemented (e.g., bitwise operations for\nnon-integral numbers) should be left undefined.\n\nobject.__add__(self, other)\nobject.__sub__(self, other)\nobject.__mul__(self, other)\nobject.__truediv__(self, other)\nobject.__floordiv__(self, other)\nobject.__mod__(self, other)\nobject.__divmod__(self, other)\nobject.__pow__(self, other[, modulo])\nobject.__lshift__(self, other)\nobject.__rshift__(self, other)\nobject.__and__(self, other)\nobject.__xor__(self, other)\nobject.__or__(self, other)\n\n These methods are called to implement the binary arithmetic\n operations ("+", "-", "*", "/", "//", "%", "divmod()", "pow()",\n "**", "<<", ">>", "&", "^", "|"). For instance, to evaluate the\n expression "x + y", where *x* is an instance of a class that has an\n "__add__()" method, "x.__add__(y)" is called. The "__divmod__()"\n method should be the equivalent to using "__floordiv__()" and\n "__mod__()"; it should not be related to "__truediv__()". Note\n that "__pow__()" should be defined to accept an optional third\n argument if the ternary version of the built-in "pow()" function is\n to be supported.\n\n If one of those methods does not support the operation with the\n supplied arguments, it should return "NotImplemented".\n\nobject.__radd__(self, other)\nobject.__rsub__(self, other)\nobject.__rmul__(self, other)\nobject.__rtruediv__(self, other)\nobject.__rfloordiv__(self, other)\nobject.__rmod__(self, other)\nobject.__rdivmod__(self, other)\nobject.__rpow__(self, other)\nobject.__rlshift__(self, other)\nobject.__rrshift__(self, other)\nobject.__rand__(self, other)\nobject.__rxor__(self, other)\nobject.__ror__(self, other)\n\n These methods are called to implement the binary arithmetic\n operations ("+", "-", "*", "/", "//", "%", "divmod()", "pow()",\n "**", "<<", ">>", "&", "^", "|") with reflected (swapped) operands.\n These functions are only called if the left operand does not\n support the corresponding operation and the operands are of\n different types. [2] For instance, to evaluate the expression "x -\n y", where *y* is an instance of a class that has an "__rsub__()"\n method, "y.__rsub__(x)" is called if "x.__sub__(y)" returns\n *NotImplemented*.\n\n Note that ternary "pow()" will not try calling "__rpow__()" (the\n coercion rules would become too complicated).\n\n Note: If the right operand\'s type is a subclass of the left\n operand\'s type and that subclass provides the reflected method\n for the operation, this method will be called before the left\n operand\'s non-reflected method. This behavior allows subclasses\n to override their ancestors\' operations.\n\nobject.__iadd__(self, other)\nobject.__isub__(self, other)\nobject.__imul__(self, other)\nobject.__itruediv__(self, other)\nobject.__ifloordiv__(self, other)\nobject.__imod__(self, other)\nobject.__ipow__(self, other[, modulo])\nobject.__ilshift__(self, other)\nobject.__irshift__(self, other)\nobject.__iand__(self, other)\nobject.__ixor__(self, other)\nobject.__ior__(self, other)\n\n These methods are called to implement the augmented arithmetic\n assignments ("+=", "-=", "*=", "/=", "//=", "%=", "**=", "<<=",\n ">>=", "&=", "^=", "|="). These methods should attempt to do the\n operation in-place (modifying *self*) and return the result (which\n could be, but does not have to be, *self*). If a specific method\n is not defined, the augmented assignment falls back to the normal\n methods. For instance, if *x* is an instance of a class with an\n "__iadd__()" method, "x += y" is equivalent to "x = x.__iadd__(y)"\n . Otherwise, "x.__add__(y)" and "y.__radd__(x)" are considered, as\n with the evaluation of "x + y". In certain situations, augmented\n assignment can result in unexpected errors (see *Why does\n a_tuple[i] += [\'item\'] raise an exception when the addition\n works?*), but this behavior is in fact part of the data model.\n\nobject.__neg__(self)\nobject.__pos__(self)\nobject.__abs__(self)\nobject.__invert__(self)\n\n Called to implement the unary arithmetic operations ("-", "+",\n "abs()" and "~").\n\nobject.__complex__(self)\nobject.__int__(self)\nobject.__float__(self)\nobject.__round__(self[, n])\n\n Called to implement the built-in functions "complex()", "int()",\n "float()" and "round()". Should return a value of the appropriate\n type.\n\nobject.__index__(self)\n\n Called to implement "operator.index()", and whenever Python needs\n to losslessly convert the numeric object to an integer object (such\n as in slicing, or in the built-in "bin()", "hex()" and "oct()"\n functions). Presence of this method indicates that the numeric\n object is an integer type. Must return an integer.\n\n Note: When "__index__()" is defined, "__int__()" should also be\n defined, and both shuld return the same value, in order to have a\n coherent integer type class.\n\n\nWith Statement Context Managers\n===============================\n\nA *context manager* is an object that defines the runtime context to\nbe established when executing a "with" statement. The context manager\nhandles the entry into, and the exit from, the desired runtime context\nfor the execution of the block of code. Context managers are normally\ninvoked using the "with" statement (described in section *The with\nstatement*), but can also be used by directly invoking their methods.\n\nTypical uses of context managers include saving and restoring various\nkinds of global state, locking and unlocking resources, closing opened\nfiles, etc.\n\nFor more information on context managers, see *Context Manager Types*.\n\nobject.__enter__(self)\n\n Enter the runtime context related to this object. The "with"\n statement will bind this method\'s return value to the target(s)\n specified in the "as" clause of the statement, if any.\n\nobject.__exit__(self, exc_type, exc_value, traceback)\n\n Exit the runtime context related to this object. The parameters\n describe the exception that caused the context to be exited. If the\n context was exited without an exception, all three arguments will\n be "None".\n\n If an exception is supplied, and the method wishes to suppress the\n exception (i.e., prevent it from being propagated), it should\n return a true value. Otherwise, the exception will be processed\n normally upon exit from this method.\n\n Note that "__exit__()" methods should not reraise the passed-in\n exception; this is the caller\'s responsibility.\n\nSee also: **PEP 0343** - The "with" statement\n\n The specification, background, and examples for the Python "with"\n statement.\n\n\nSpecial method lookup\n=====================\n\nFor custom classes, implicit invocations of special methods are only\nguaranteed to work correctly if defined on an object\'s type, not in\nthe object\'s instance dictionary. That behaviour is the reason why\nthe following code raises an exception:\n\n >>> class C:\n ... pass\n ...\n >>> c = C()\n >>> c.__len__ = lambda: 5\n >>> len(c)\n Traceback (most recent call last):\n File "", line 1, in \n TypeError: object of type \'C\' has no len()\n\nThe rationale behind this behaviour lies with a number of special\nmethods such as "__hash__()" and "__repr__()" that are implemented by\nall objects, including type objects. If the implicit lookup of these\nmethods used the conventional lookup process, they would fail when\ninvoked on the type object itself:\n\n >>> 1 .__hash__() == hash(1)\n True\n >>> int.__hash__() == hash(int)\n Traceback (most recent call last):\n File "", line 1, in \n TypeError: descriptor \'__hash__\' of \'int\' object needs an argument\n\nIncorrectly attempting to invoke an unbound method of a class in this\nway is sometimes referred to as \'metaclass confusion\', and is avoided\nby bypassing the instance when looking up special methods:\n\n >>> type(1).__hash__(1) == hash(1)\n True\n >>> type(int).__hash__(int) == hash(int)\n True\n\nIn addition to bypassing any instance attributes in the interest of\ncorrectness, implicit special method lookup generally also bypasses\nthe "__getattribute__()" method even of the object\'s metaclass:\n\n >>> class Meta(type):\n ... def __getattribute__(*args):\n ... print("Metaclass getattribute invoked")\n ... return type.__getattribute__(*args)\n ...\n >>> class C(object, metaclass=Meta):\n ... def __len__(self):\n ... return 10\n ... def __getattribute__(*args):\n ... print("Class getattribute invoked")\n ... return object.__getattribute__(*args)\n ...\n >>> c = C()\n >>> c.__len__() # Explicit lookup via instance\n Class getattribute invoked\n 10\n >>> type(c).__len__(c) # Explicit lookup via type\n Metaclass getattribute invoked\n 10\n >>> len(c) # Implicit lookup\n 10\n\nBypassing the "__getattribute__()" machinery in this fashion provides\nsignificant scope for speed optimisations within the interpreter, at\nthe cost of some flexibility in the handling of special methods (the\nspecial method *must* be set on the class object itself in order to be\nconsistently invoked by the interpreter).\n\n-[ Footnotes ]-\n\n[1] It *is* possible in some cases to change an object\'s type,\n under certain controlled conditions. It generally isn\'t a good\n idea though, since it can lead to some very strange behaviour if\n it is handled incorrectly.\n\n[2] For operands of the same type, it is assumed that if the non-\n reflected method (such as "__add__()") fails the operation is not\n supported, which is why the reflected method is not called.\n', 'string-methods': '\nString Methods\n**************\n\nStrings implement all of the *common* sequence operations, along with\nthe additional methods described below.\n\nStrings also support two styles of string formatting, one providing a\nlarge degree of flexibility and customization (see "str.format()",\n*Format String Syntax* and *String Formatting*) and the other based on\nC "printf" style formatting that handles a narrower range of types and\nis slightly harder to use correctly, but is often faster for the cases\nit can handle (*printf-style String Formatting*).\n\nThe *Text Processing Services* section of the standard library covers\na number of other modules that provide various text related utilities\n(including regular expression support in the "re" module).\n\nstr.capitalize()\n\n Return a copy of the string with its first character capitalized\n and the rest lowercased.\n\nstr.casefold()\n\n Return a casefolded copy of the string. Casefolded strings may be\n used for caseless matching.\n\n Casefolding is similar to lowercasing but more aggressive because\n it is intended to remove all case distinctions in a string. For\n example, the German lowercase letter "\'\xc3\x9f\'" is equivalent to ""ss"".\n Since it is already lowercase, "lower()" would do nothing to "\'\xc3\x9f\'";\n "casefold()" converts it to ""ss"".\n\n The casefolding algorithm is described in section 3.13 of the\n Unicode Standard.\n\n New in version 3.3.\n\nstr.center(width[, fillchar])\n\n Return centered in a string of length *width*. Padding is done\n using the specified *fillchar* (default is a space).\n\nstr.count(sub[, start[, end]])\n\n Return the number of non-overlapping occurrences of substring *sub*\n in the range [*start*, *end*]. Optional arguments *start* and\n *end* are interpreted as in slice notation.\n\nstr.encode(encoding="utf-8", errors="strict")\n\n Return an encoded version of the string as a bytes object. Default\n encoding is "\'utf-8\'". *errors* may be given to set a different\n error handling scheme. The default for *errors* is "\'strict\'",\n meaning that encoding errors raise a "UnicodeError". Other possible\n values are "\'ignore\'", "\'replace\'", "\'xmlcharrefreplace\'",\n "\'backslashreplace\'" and any other name registered via\n "codecs.register_error()", see section *Codec Base Classes*. For a\n list of possible encodings, see section *Standard Encodings*.\n\n Changed in version 3.1: Support for keyword arguments added.\n\nstr.endswith(suffix[, start[, end]])\n\n Return "True" if the string ends with the specified *suffix*,\n otherwise return "False". *suffix* can also be a tuple of suffixes\n to look for. With optional *start*, test beginning at that\n position. With optional *end*, stop comparing at that position.\n\nstr.expandtabs(tabsize=8)\n\n Return a copy of the string where all tab characters are replaced\n by one or more spaces, depending on the current column and the\n given tab size. Tab positions occur every *tabsize* characters\n (default is 8, giving tab positions at columns 0, 8, 16 and so on).\n To expand the string, the current column is set to zero and the\n string is examined character by character. If the character is a\n tab ("\\t"), one or more space characters are inserted in the result\n until the current column is equal to the next tab position. (The\n tab character itself is not copied.) If the character is a newline\n ("\\n") or return ("\\r"), it is copied and the current column is\n reset to zero. Any other character is copied unchanged and the\n current column is incremented by one regardless of how the\n character is represented when printed.\n\n >>> \'01\\t012\\t0123\\t01234\'.expandtabs()\n \'01 012 0123 01234\'\n >>> \'01\\t012\\t0123\\t01234\'.expandtabs(4)\n \'01 012 0123 01234\'\n\nstr.find(sub[, start[, end]])\n\n Return the lowest index in the string where substring *sub* is\n found, such that *sub* is contained in the slice "s[start:end]".\n Optional arguments *start* and *end* are interpreted as in slice\n notation. Return "-1" if *sub* is not found.\n\n Note: The "find()" method should be used only if you need to know\n the position of *sub*. To check if *sub* is a substring or not,\n use the "in" operator:\n\n >>> \'Py\' in \'Python\'\n True\n\nstr.format(*args, **kwargs)\n\n Perform a string formatting operation. The string on which this\n method is called can contain literal text or replacement fields\n delimited by braces "{}". Each replacement field contains either\n the numeric index of a positional argument, or the name of a\n keyword argument. Returns a copy of the string where each\n replacement field is replaced with the string value of the\n corresponding argument.\n\n >>> "The sum of 1 + 2 is {0}".format(1+2)\n \'The sum of 1 + 2 is 3\'\n\n See *Format String Syntax* for a description of the various\n formatting options that can be specified in format strings.\n\nstr.format_map(mapping)\n\n Similar to "str.format(**mapping)", except that "mapping" is used\n directly and not copied to a "dict". This is useful if for example\n "mapping" is a dict subclass:\n\n >>> class Default(dict):\n ... def __missing__(self, key):\n ... return key\n ...\n >>> \'{name} was born in {country}\'.format_map(Default(name=\'Guido\'))\n \'Guido was born in country\'\n\n New in version 3.2.\n\nstr.index(sub[, start[, end]])\n\n Like "find()", but raise "ValueError" when the substring is not\n found.\n\nstr.isalnum()\n\n Return true if all characters in the string are alphanumeric and\n there is at least one character, false otherwise. A character "c"\n is alphanumeric if one of the following returns "True":\n "c.isalpha()", "c.isdecimal()", "c.isdigit()", or "c.isnumeric()".\n\nstr.isalpha()\n\n Return true if all characters in the string are alphabetic and\n there is at least one character, false otherwise. Alphabetic\n characters are those characters defined in the Unicode character\n database as "Letter", i.e., those with general category property\n being one of "Lm", "Lt", "Lu", "Ll", or "Lo". Note that this is\n different from the "Alphabetic" property defined in the Unicode\n Standard.\n\nstr.isdecimal()\n\n Return true if all characters in the string are decimal characters\n and there is at least one character, false otherwise. Decimal\n characters are those from general category "Nd". This category\n includes digit characters, and all characters that can be used to\n form decimal-radix numbers, e.g. U+0660, ARABIC-INDIC DIGIT ZERO.\n\nstr.isdigit()\n\n Return true if all characters in the string are digits and there is\n at least one character, false otherwise. Digits include decimal\n characters and digits that need special handling, such as the\n compatibility superscript digits. Formally, a digit is a character\n that has the property value Numeric_Type=Digit or\n Numeric_Type=Decimal.\n\nstr.isidentifier()\n\n Return true if the string is a valid identifier according to the\n language definition, section *Identifiers and keywords*.\n\n Use "keyword.iskeyword()" to test for reserved identifiers such as\n "def" and "class".\n\nstr.islower()\n\n Return true if all cased characters [4] in the string are lowercase\n and there is at least one cased character, false otherwise.\n\nstr.isnumeric()\n\n Return true if all characters in the string are numeric characters,\n and there is at least one character, false otherwise. Numeric\n characters include digit characters, and all characters that have\n the Unicode numeric value property, e.g. U+2155, VULGAR FRACTION\n ONE FIFTH. Formally, numeric characters are those with the\n property value Numeric_Type=Digit, Numeric_Type=Decimal or\n Numeric_Type=Numeric.\n\nstr.isprintable()\n\n Return true if all characters in the string are printable or the\n string is empty, false otherwise. Nonprintable characters are\n those characters defined in the Unicode character database as\n "Other" or "Separator", excepting the ASCII space (0x20) which is\n considered printable. (Note that printable characters in this\n context are those which should not be escaped when "repr()" is\n invoked on a string. It has no bearing on the handling of strings\n written to "sys.stdout" or "sys.stderr".)\n\nstr.isspace()\n\n Return true if there are only whitespace characters in the string\n and there is at least one character, false otherwise. Whitespace\n characters are those characters defined in the Unicode character\n database as "Other" or "Separator" and those with bidirectional\n property being one of "WS", "B", or "S".\n\nstr.istitle()\n\n Return true if the string is a titlecased string and there is at\n least one character, for example uppercase characters may only\n follow uncased characters and lowercase characters only cased ones.\n Return false otherwise.\n\nstr.isupper()\n\n Return true if all cased characters [4] in the string are uppercase\n and there is at least one cased character, false otherwise.\n\nstr.join(iterable)\n\n Return a string which is the concatenation of the strings in the\n *iterable* *iterable*. A "TypeError" will be raised if there are\n any non-string values in *iterable*, including "bytes" objects.\n The separator between elements is the string providing this method.\n\nstr.ljust(width[, fillchar])\n\n Return the string left justified in a string of length *width*.\n Padding is done using the specified *fillchar* (default is a\n space). The original string is returned if *width* is less than or\n equal to "len(s)".\n\nstr.lower()\n\n Return a copy of the string with all the cased characters [4]\n converted to lowercase.\n\n The lowercasing algorithm used is described in section 3.13 of the\n Unicode Standard.\n\nstr.lstrip([chars])\n\n Return a copy of the string with leading characters removed. The\n *chars* argument is a string specifying the set of characters to be\n removed. If omitted or "None", the *chars* argument defaults to\n removing whitespace. The *chars* argument is not a prefix; rather,\n all combinations of its values are stripped:\n\n >>> \' spacious \'.lstrip()\n \'spacious \'\n >>> \'www.example.com\'.lstrip(\'cmowz.\')\n \'example.com\'\n\nstatic str.maketrans(x[, y[, z]])\n\n This static method returns a translation table usable for\n "str.translate()".\n\n If there is only one argument, it must be a dictionary mapping\n Unicode ordinals (integers) or characters (strings of length 1) to\n Unicode ordinals, strings (of arbitrary lengths) or None.\n Character keys will then be converted to ordinals.\n\n If there are two arguments, they must be strings of equal length,\n and in the resulting dictionary, each character in x will be mapped\n to the character at the same position in y. If there is a third\n argument, it must be a string, whose characters will be mapped to\n None in the result.\n\nstr.partition(sep)\n\n Split the string at the first occurrence of *sep*, and return a\n 3-tuple containing the part before the separator, the separator\n itself, and the part after the separator. If the separator is not\n found, return a 3-tuple containing the string itself, followed by\n two empty strings.\n\nstr.replace(old, new[, count])\n\n Return a copy of the string with all occurrences of substring *old*\n replaced by *new*. If the optional argument *count* is given, only\n the first *count* occurrences are replaced.\n\nstr.rfind(sub[, start[, end]])\n\n Return the highest index in the string where substring *sub* is\n found, such that *sub* is contained within "s[start:end]".\n Optional arguments *start* and *end* are interpreted as in slice\n notation. Return "-1" on failure.\n\nstr.rindex(sub[, start[, end]])\n\n Like "rfind()" but raises "ValueError" when the substring *sub* is\n not found.\n\nstr.rjust(width[, fillchar])\n\n Return the string right justified in a string of length *width*.\n Padding is done using the specified *fillchar* (default is a\n space). The original string is returned if *width* is less than or\n equal to "len(s)".\n\nstr.rpartition(sep)\n\n Split the string at the last occurrence of *sep*, and return a\n 3-tuple containing the part before the separator, the separator\n itself, and the part after the separator. If the separator is not\n found, return a 3-tuple containing two empty strings, followed by\n the string itself.\n\nstr.rsplit(sep=None, maxsplit=-1)\n\n Return a list of the words in the string, using *sep* as the\n delimiter string. If *maxsplit* is given, at most *maxsplit* splits\n are done, the *rightmost* ones. If *sep* is not specified or\n "None", any whitespace string is a separator. Except for splitting\n from the right, "rsplit()" behaves like "split()" which is\n described in detail below.\n\nstr.rstrip([chars])\n\n Return a copy of the string with trailing characters removed. The\n *chars* argument is a string specifying the set of characters to be\n removed. If omitted or "None", the *chars* argument defaults to\n removing whitespace. The *chars* argument is not a suffix; rather,\n all combinations of its values are stripped:\n\n >>> \' spacious \'.rstrip()\n \' spacious\'\n >>> \'mississippi\'.rstrip(\'ipz\')\n \'mississ\'\n\nstr.split(sep=None, maxsplit=-1)\n\n Return a list of the words in the string, using *sep* as the\n delimiter string. If *maxsplit* is given, at most *maxsplit*\n splits are done (thus, the list will have at most "maxsplit+1"\n elements). If *maxsplit* is not specified or "-1", then there is\n no limit on the number of splits (all possible splits are made).\n\n If *sep* is given, consecutive delimiters are not grouped together\n and are deemed to delimit empty strings (for example,\n "\'1,,2\'.split(\',\')" returns "[\'1\', \'\', \'2\']"). The *sep* argument\n may consist of multiple characters (for example,\n "\'1<>2<>3\'.split(\'<>\')" returns "[\'1\', \'2\', \'3\']"). Splitting an\n empty string with a specified separator returns "[\'\']".\n\n If *sep* is not specified or is "None", a different splitting\n algorithm is applied: runs of consecutive whitespace are regarded\n as a single separator, and the result will contain no empty strings\n at the start or end if the string has leading or trailing\n whitespace. Consequently, splitting an empty string or a string\n consisting of just whitespace with a "None" separator returns "[]".\n\n For example, "\' 1 2 3 \'.split()" returns "[\'1\', \'2\', \'3\']", and\n "\' 1 2 3 \'.split(None, 1)" returns "[\'1\', \'2 3 \']".\n\nstr.splitlines([keepends])\n\n Return a list of the lines in the string, breaking at line\n boundaries. This method uses the *universal newlines* approach to\n splitting lines. Line breaks are not included in the resulting list\n unless *keepends* is given and true.\n\n For example, "\'ab c\\n\\nde fg\\rkl\\r\\n\'.splitlines()" returns "[\'ab\n c\', \'\', \'de fg\', \'kl\']", while the same call with\n "splitlines(True)" returns "[\'ab c\\n\', \'\\n\', \'de fg\\r\', \'kl\\r\\n\']".\n\n Unlike "split()" when a delimiter string *sep* is given, this\n method returns an empty list for the empty string, and a terminal\n line break does not result in an extra line.\n\nstr.startswith(prefix[, start[, end]])\n\n Return "True" if string starts with the *prefix*, otherwise return\n "False". *prefix* can also be a tuple of prefixes to look for.\n With optional *start*, test string beginning at that position.\n With optional *end*, stop comparing string at that position.\n\nstr.strip([chars])\n\n Return a copy of the string with the leading and trailing\n characters removed. The *chars* argument is a string specifying the\n set of characters to be removed. If omitted or "None", the *chars*\n argument defaults to removing whitespace. The *chars* argument is\n not a prefix or suffix; rather, all combinations of its values are\n stripped:\n\n >>> \' spacious \'.strip()\n \'spacious\'\n >>> \'www.example.com\'.strip(\'cmowz.\')\n \'example\'\n\nstr.swapcase()\n\n Return a copy of the string with uppercase characters converted to\n lowercase and vice versa. Note that it is not necessarily true that\n "s.swapcase().swapcase() == s".\n\nstr.title()\n\n Return a titlecased version of the string where words start with an\n uppercase character and the remaining characters are lowercase.\n\n The algorithm uses a simple language-independent definition of a\n word as groups of consecutive letters. The definition works in\n many contexts but it means that apostrophes in contractions and\n possessives form word boundaries, which may not be the desired\n result:\n\n >>> "they\'re bill\'s friends from the UK".title()\n "They\'Re Bill\'S Friends From The Uk"\n\n A workaround for apostrophes can be constructed using regular\n expressions:\n\n >>> import re\n >>> def titlecase(s):\n ... return re.sub(r"[A-Za-z]+(\'[A-Za-z]+)?",\n ... lambda mo: mo.group(0)[0].upper() +\n ... mo.group(0)[1:].lower(),\n ... s)\n ...\n >>> titlecase("they\'re bill\'s friends.")\n "They\'re Bill\'s Friends."\n\nstr.translate(map)\n\n Return a copy of the *s* where all characters have been mapped\n through the *map* which must be a dictionary of Unicode ordinals\n (integers) to Unicode ordinals, strings or "None". Unmapped\n characters are left untouched. Characters mapped to "None" are\n deleted.\n\n You can use "str.maketrans()" to create a translation map from\n character-to-character mappings in different formats.\n\n Note: An even more flexible approach is to create a custom\n character mapping codec using the "codecs" module (see\n "encodings.cp1251" for an example).\n\nstr.upper()\n\n Return a copy of the string with all the cased characters [4]\n converted to uppercase. Note that "str.upper().isupper()" might be\n "False" if "s" contains uncased characters or if the Unicode\n category of the resulting character(s) is not "Lu" (Letter,\n uppercase), but e.g. "Lt" (Letter, titlecase).\n\n The uppercasing algorithm used is described in section 3.13 of the\n Unicode Standard.\n\nstr.zfill(width)\n\n Return the numeric string left filled with zeros in a string of\n length *width*. A sign prefix is handled correctly. The original\n string is returned if *width* is less than or equal to "len(s)".\n', 'strings': '\nString and Bytes literals\n*************************\n\nString literals are described by the following lexical definitions:\n\n stringliteral ::= [stringprefix](shortstring | longstring)\n stringprefix ::= "r" | "u" | "R" | "U"\n shortstring ::= "\'" shortstringitem* "\'" | \'"\' shortstringitem* \'"\'\n longstring ::= "\'\'\'" longstringitem* "\'\'\'" | \'"""\' longstringitem* \'"""\'\n shortstringitem ::= shortstringchar | stringescapeseq\n longstringitem ::= longstringchar | stringescapeseq\n shortstringchar ::= \n longstringchar ::= \n stringescapeseq ::= "\\" \n\n bytesliteral ::= bytesprefix(shortbytes | longbytes)\n bytesprefix ::= "b" | "B" | "br" | "Br" | "bR" | "BR" | "rb" | "rB" | "Rb" | "RB"\n shortbytes ::= "\'" shortbytesitem* "\'" | \'"\' shortbytesitem* \'"\'\n longbytes ::= "\'\'\'" longbytesitem* "\'\'\'" | \'"""\' longbytesitem* \'"""\'\n shortbytesitem ::= shortbyteschar | bytesescapeseq\n longbytesitem ::= longbyteschar | bytesescapeseq\n shortbyteschar ::= \n longbyteschar ::= \n bytesescapeseq ::= "\\" \n\nOne syntactic restriction not indicated by these productions is that\nwhitespace is not allowed between the "stringprefix" or "bytesprefix"\nand the rest of the literal. The source character set is defined by\nthe encoding declaration; it is UTF-8 if no encoding declaration is\ngiven in the source file; see section *Encoding declarations*.\n\nIn plain English: Both types of literals can be enclosed in matching\nsingle quotes ("\'") or double quotes ("""). They can also be enclosed\nin matching groups of three single or double quotes (these are\ngenerally referred to as *triple-quoted strings*). The backslash\n("\\") character is used to escape characters that otherwise have a\nspecial meaning, such as newline, backslash itself, or the quote\ncharacter.\n\nBytes literals are always prefixed with "\'b\'" or "\'B\'"; they produce\nan instance of the "bytes" type instead of the "str" type. They may\nonly contain ASCII characters; bytes with a numeric value of 128 or\ngreater must be expressed with escapes.\n\nAs of Python 3.3 it is possible again to prefix unicode strings with a\n"u" prefix to simplify maintenance of dual 2.x and 3.x codebases.\n\nBoth string and bytes literals may optionally be prefixed with a\nletter "\'r\'" or "\'R\'"; such strings are called *raw strings* and treat\nbackslashes as literal characters. As a result, in string literals,\n"\'\\U\'" and "\'\\u\'" escapes in raw strings are not treated specially.\nGiven that Python 2.x\'s raw unicode literals behave differently than\nPython 3.x\'s the "\'ur\'" syntax is not supported.\n\n New in version 3.3: The "\'rb\'" prefix of raw bytes literals has\n been added as a synonym of "\'br\'".\n\n New in version 3.3: Support for the unicode legacy literal\n ("u\'value\'") was reintroduced to simplify the maintenance of dual\n Python 2.x and 3.x codebases. See **PEP 414** for more information.\n\nIn triple-quoted strings, unescaped newlines and quotes are allowed\n(and are retained), except that three unescaped quotes in a row\nterminate the string. (A "quote" is the character used to open the\nstring, i.e. either "\'" or """.)\n\nUnless an "\'r\'" or "\'R\'" prefix is present, escape sequences in\nstrings are interpreted according to rules similar to those used by\nStandard C. The recognized escape sequences are:\n\n+-------------------+-----------------------------------+---------+\n| Escape Sequence | Meaning | Notes |\n+===================+===================================+=========+\n+-------------------+-----------------------------------+---------+\n+-------------------+-----------------------------------+---------+\n+-------------------+-----------------------------------+---------+\n+-------------------+-----------------------------------+---------+\n+-------------------+-----------------------------------+---------+\n+-------------------+-----------------------------------+---------+\n+-------------------+-----------------------------------+---------+\n+-------------------+-----------------------------------+---------+\n+-------------------+-----------------------------------+---------+\n+-------------------+-----------------------------------+---------+\n+-------------------+-----------------------------------+---------+\n| "\\ooo" | Character with octal value *ooo* | (1,3) |\n+-------------------+-----------------------------------+---------+\n| "\\xhh" | Character with hex value *hh* | (2,3) |\n+-------------------+-----------------------------------+---------+\n\nEscape sequences only recognized in string literals are:\n\n+-------------------+-----------------------------------+---------+\n| Escape Sequence | Meaning | Notes |\n+===================+===================================+=========+\n| "\\N{name}" | Character named *name* in the | (4) |\n+-------------------+-----------------------------------+---------+\n| "\\uxxxx" | Character with 16-bit hex value | (5) |\n+-------------------+-----------------------------------+---------+\n| "\\Uxxxxxxxx" | Character with 32-bit hex value | (6) |\n+-------------------+-----------------------------------+---------+\n\nNotes:\n\n1. As in Standard C, up to three octal digits are accepted.\n\n2. Unlike in Standard C, exactly two hex digits are required.\n\n3. In a bytes literal, hexadecimal and octal escapes denote the\n byte with the given value. In a string literal, these escapes\n denote a Unicode character with the given value.\n\n4. Changed in version 3.3: Support for name aliases [1] has been\n added.\n\n5. Individual code units which form parts of a surrogate pair can\n be encoded using this escape sequence. Exactly four hex digits are\n required.\n\n6. Any Unicode character can be encoded this way. Exactly eight\n hex digits are required.\n\nUnlike Standard C, all unrecognized escape sequences are left in the\nstring unchanged, i.e., *the backslash is left in the string*. (This\nbehavior is useful when debugging: if an escape sequence is mistyped,\nthe resulting output is more easily recognized as broken.) It is also\nimportant to note that the escape sequences only recognized in string\nliterals fall into the category of unrecognized escapes for bytes\nliterals.\n\nEven in a raw string, string quotes can be escaped with a backslash,\nbut the backslash remains in the string; for example, "r"\\""" is a\nvalid string literal consisting of two characters: a backslash and a\ndouble quote; "r"\\"" is not a valid string literal (even a raw string\ncannot end in an odd number of backslashes). Specifically, *a raw\nstring cannot end in a single backslash* (since the backslash would\nescape the following quote character). Note also that a single\nbackslash followed by a newline is interpreted as those two characters\nas part of the string, *not* as a line continuation.\n', 'subscriptions': '\nSubscriptions\n*************\n\nA subscription selects an item of a sequence (string, tuple or list)\nor mapping (dictionary) object:\n\n subscription ::= primary "[" expression_list "]"\n\nThe primary must evaluate to an object that supports subscription,\ne.g. a list or dictionary. User-defined objects can support\nsubscription by defining a "__getitem__()" method.\n\nFor built-in objects, there are two types of objects that support\nsubscription:\n\nIf the primary is a mapping, the expression list must evaluate to an\nobject whose value is one of the keys of the mapping, and the\nsubscription selects the value in the mapping that corresponds to that\nkey. (The expression list is a tuple except if it has exactly one\nitem.)\n\nIf the primary is a sequence, the expression (list) must evaluate to\nan integer or a slice (as discussed in the following section).\n\nThe formal syntax makes no special provision for negative indices in\nsequences; however, built-in sequences all provide a "__getitem__()"\nmethod that interprets negative indices by adding the length of the\nsequence to the index (so that "x[-1]" selects the last item of "x").\nThe resulting value must be a nonnegative integer less than the number\nof items in the sequence, and the subscription selects the item whose\nindex is that value (counting from zero). Since the support for\nnegative indices and slicing occurs in the object\'s "__getitem__()"\nmethod, subclasses overriding this method will need to explicitly add\nthat support.\n\nA string\'s items are characters. A character is not a separate data\ntype but a string of exactly one character.\n', -- cgit v0.12 From cf1a3cd2c79212aa73678145a4119f5878724602 Mon Sep 17 00:00:00 2001 From: Larry Hastings Date: Sat, 15 Mar 2014 22:34:24 -0700 Subject: Release bump for 3.4.0 final. --- Doc/tools/sphinxext/pyspecific.py | 2 +- Include/patchlevel.h | 6 +++--- Lib/distutils/__init__.py | 2 +- Lib/idlelib/idlever.py | 2 +- Misc/RPM/python-3.4.spec | 2 +- README | 4 ++-- 6 files changed, 9 insertions(+), 9 deletions(-) diff --git a/Doc/tools/sphinxext/pyspecific.py b/Doc/tools/sphinxext/pyspecific.py index a7ea7df..4e41446 100644 --- a/Doc/tools/sphinxext/pyspecific.py +++ b/Doc/tools/sphinxext/pyspecific.py @@ -10,7 +10,7 @@ """ ISSUE_URI = 'http://bugs.python.org/issue%s' -SOURCE_URI = 'http://hg.python.org/cpython/file/default/%s' +SOURCE_URI = 'http://hg.python.org/cpython/file/3.4/%s' from docutils import nodes, utils diff --git a/Include/patchlevel.h b/Include/patchlevel.h index 52b9674..ae041c3 100644 --- a/Include/patchlevel.h +++ b/Include/patchlevel.h @@ -19,11 +19,11 @@ #define PY_MAJOR_VERSION 3 #define PY_MINOR_VERSION 4 #define PY_MICRO_VERSION 0 -#define PY_RELEASE_LEVEL PY_RELEASE_LEVEL_GAMMA -#define PY_RELEASE_SERIAL 3 +#define PY_RELEASE_LEVEL PY_RELEASE_LEVEL_FINAL +#define PY_RELEASE_SERIAL 0 /* Version as a string */ -#define PY_VERSION "3.4.0rc3+" +#define PY_VERSION "3.4.0" /*--end constants--*/ /* Version as a single 4-byte hex number, e.g. 0x010502B2 == 1.5.2b2. diff --git a/Lib/distutils/__init__.py b/Lib/distutils/__init__.py index 954b50d..04804c1 100644 --- a/Lib/distutils/__init__.py +++ b/Lib/distutils/__init__.py @@ -13,5 +13,5 @@ used from a setup script as # Updated automatically by the Python release process. # #--start constants-- -__version__ = "3.4.0rc3" +__version__ = "3.4.0" #--end constants-- diff --git a/Lib/idlelib/idlever.py b/Lib/idlelib/idlever.py index d2df47d..efe37a1 100644 --- a/Lib/idlelib/idlever.py +++ b/Lib/idlelib/idlever.py @@ -1 +1 @@ -IDLE_VERSION = "3.4.0rc3" +IDLE_VERSION = "3.4.0" diff --git a/Misc/RPM/python-3.4.spec b/Misc/RPM/python-3.4.spec index e6987bc..60971f2 100644 --- a/Misc/RPM/python-3.4.spec +++ b/Misc/RPM/python-3.4.spec @@ -39,7 +39,7 @@ %define name python #--start constants-- -%define version 3.4.0rc3 +%define version 3.4.0 %define libvers 3.4 #--end constants-- %define release 1pydotorg diff --git a/README b/README index c55b1bc..d0c2b73 100644 --- a/README +++ b/README @@ -1,5 +1,5 @@ -This is Python version 3.4.0 release candidate 3 -================================================ +This is Python version 3.4.0 +============================ Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012, 2013, 2014 Python Software Foundation. All rights reserved. -- cgit v0.12 From 3101b7076270756f8be699358c69c5d15ea2cc48 Mon Sep 17 00:00:00 2001 From: Larry Hastings Date: Sat, 15 Mar 2014 22:43:17 -0700 Subject: Final documentation fixes for 3.4.0 final. --- Doc/license.rst | 6 +++--- Doc/whatsnew/3.4.rst | 5 +---- LICENSE | 4 ++-- 3 files changed, 6 insertions(+), 9 deletions(-) diff --git a/Doc/license.rst b/Doc/license.rst index 08e5464..6190fe0 100644 --- a/Doc/license.rst +++ b/Doc/license.rst @@ -84,9 +84,9 @@ Terms and conditions for accessing or otherwise using Python analyze, test, perform and/or display publicly, prepare derivative works, distribute, and otherwise use Python |release| alone or in any derivative version, provided, however, that PSF's License Agreement and PSF's notice of - copyright, i.e., "Copyright © 2001-2014 Python Software Foundation; All Rights - Reserved" are retained in Python |release| alone or in any derivative version - prepared by Licensee. + copyright, i.e., "Copyright © 2001-2014 Python Software Foundation; All + Rights Reserved" are retained in Python |release| alone or in any derivative + version prepared by Licensee. #. In the event Licensee prepares a derivative work that is based on or incorporates Python |release| or any part thereof, and wants to make the diff --git a/Doc/whatsnew/3.4.rst b/Doc/whatsnew/3.4.rst index a4f6cb3..1b751b0 100644 --- a/Doc/whatsnew/3.4.rst +++ b/Doc/whatsnew/3.4.rst @@ -61,10 +61,7 @@ while :ref:`~mod.attr` will display as ``attr``. This article explains the new features in Python 3.4, compared to 3.3. - -.. Python 3.4 was released on TBD. - -For full details, see the +Python 3.4 was released on March 16, 2014. For full details, see the `changelog `_. diff --git a/LICENSE b/LICENSE index f9ca2c9..583f9f6 100644 --- a/LICENSE +++ b/LICENSE @@ -74,8 +74,8 @@ analyze, test, perform and/or display publicly, prepare derivative works, distribute, and otherwise use Python alone or in any derivative version, provided, however, that PSF's License Agreement and PSF's notice of copyright, i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, -2011, 2012, 2013, 2014 Python Software Foundation; All Rights Reserved" are retained -in Python alone or in any derivative version prepared by Licensee. +2011, 2012, 2013, 2014 Python Software Foundation; All Rights Reserved" are +retained in Python alone or in any derivative version prepared by Licensee. 3. In the event Licensee prepares a derivative work that is based on or incorporates Python or any part thereof, and wants to make -- cgit v0.12 From 84b1e7168c3b718b0598c15c751cf1b23c307660 Mon Sep 17 00:00:00 2001 From: Larry Hastings Date: Sat, 15 Mar 2014 22:43:24 -0700 Subject: Added tag v3.4.0 for changeset 04f714765c13 --- .hgtags | 1 + 1 file changed, 1 insertion(+) diff --git a/.hgtags b/.hgtags index 70bc4f9..460d724 100644 --- a/.hgtags +++ b/.hgtags @@ -130,3 +130,4 @@ a97ce3ecc96af79bd2e1ac66ce48d9138e0ca749 v3.4.0b3 5e088cea8660677969113741c1313d570d977e02 v3.4.0rc1 a300712ed38c9a242b736c44e806caea25a6dc05 v3.4.0rc2 8a81cdab3e9d521daaef989fade94b16455fc3b8 v3.4.0rc3 +04f714765c13824c3bc2835d7b008908862e083a v3.4.0 -- cgit v0.12 From e41b73caca2b543422f423a2ac07c29de834f44d Mon Sep 17 00:00:00 2001 From: Larry Hastings Date: Sun, 16 Mar 2014 20:13:07 -0700 Subject: Post-release verion bump for 3.4.0(+). --- Include/patchlevel.h | 2 +- Misc/NEWS | 12 ++++++++++++ 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/Include/patchlevel.h b/Include/patchlevel.h index ae041c3..72f540d 100644 --- a/Include/patchlevel.h +++ b/Include/patchlevel.h @@ -23,7 +23,7 @@ #define PY_RELEASE_SERIAL 0 /* Version as a string */ -#define PY_VERSION "3.4.0" +#define PY_VERSION "3.4.0+" /*--end constants--*/ /* Version as a single 4-byte hex number, e.g. 0x010502B2 == 1.5.2b2. diff --git a/Misc/NEWS b/Misc/NEWS index f3685ca..ff476fc 100644 --- a/Misc/NEWS +++ b/Misc/NEWS @@ -2,6 +2,18 @@ Python News +++++++++++ +What's New in Python 3.4.1rc1? +============================== + +Release date: TBA + +Core and Builtins +----------------- + +Library +------- + + What's New in Python 3.4.0? =========================== -- cgit v0.12