diff options
-rw-r--r-- | Doc/library/importlib.rst | 10 | ||||
-rw-r--r-- | Doc/whatsnew/3.5.rst | 66 | ||||
-rw-r--r-- | Lib/asyncio/base_events.py | 5 | ||||
-rw-r--r-- | Lib/asyncio/base_subprocess.py | 4 | ||||
-rw-r--r-- | Lib/asyncio/proactor_events.py | 4 | ||||
-rw-r--r-- | Lib/asyncio/selector_events.py | 4 | ||||
-rw-r--r-- | Lib/asyncio/sslproto.py | 4 | ||||
-rw-r--r-- | Lib/asyncio/unix_events.py | 5 | ||||
-rw-r--r-- | Lib/http/cookiejar.py | 2 | ||||
-rwxr-xr-x | Lib/idlelib/PyShell.py | 10 | ||||
-rwxr-xr-x | Lib/test/regrtest.py | 9 | ||||
-rw-r--r-- | Lib/test/test_asyncio/test_subprocess.py | 6 | ||||
-rw-r--r-- | Lib/test/test_http_cookiejar.py | 9 | ||||
-rw-r--r-- | Lib/test/test_typing.py | 275 | ||||
-rw-r--r-- | Lib/typing.py | 223 | ||||
-rw-r--r-- | Lib/urllib/request.py | 1 | ||||
-rw-r--r-- | Misc/ACKS | 3 | ||||
-rw-r--r-- | Misc/NEWS | 13 | ||||
-rw-r--r-- | Tools/msi/make_zip.py | 12 | ||||
-rw-r--r-- | Tools/msi/tcltk/tcltk.wixproj | 7 |
20 files changed, 289 insertions, 383 deletions
diff --git a/Doc/library/importlib.rst b/Doc/library/importlib.rst index 771c4c5..632df75 100644 --- a/Doc/library/importlib.rst +++ b/Doc/library/importlib.rst @@ -179,11 +179,11 @@ Functions except NameError: cache = {} - It is legal though generally not very useful to reload built-in or - dynamically loaded modules (this is not true for e.g. :mod:`sys`, - :mod:`__main__`, :mod:`builtins` and other key modules where reloading is - frowned upon). In many cases, however, extension modules are not designed to - be initialized more than once, and may fail in arbitrary ways when reloaded. + It is generally not very useful to reload built-in or dynamically loaded + modules. Reloading :mod:`sys`, :mod:`__main__`, :mod:`builtins` and other + key modules is not recommended. In many cases extension modules are not + designed to be initialized more than once, and may fail in arbitrary ways + when reloaded. If a module imports objects from another module using :keyword:`from` ... :keyword:`import` ..., calling :func:`reload` for the other module does not diff --git a/Doc/whatsnew/3.5.rst b/Doc/whatsnew/3.5.rst index e7992d9..86c5b60 100644 --- a/Doc/whatsnew/3.5.rst +++ b/Doc/whatsnew/3.5.rst @@ -71,6 +71,7 @@ New syntax features: * :pep:`465`, a new matrix multiplication operator: ``a @ b``. * :pep:`492`, coroutines with async and await syntax. +* :pep:`448`, additional unpacking generalizations. New library modules: @@ -203,6 +204,71 @@ called ``@``. (Mnemonic: ``@`` is ``*`` for mATrices.) :pep:`465` -- A dedicated infix operator for matrix multiplication +PEP 448 - Additional Unpacking Generalizations +---------------------------------------------- + +This PEP proposes extended usages of the ``*`` iterable unpacking +operator and ``**`` dictionary unpacking operators +to allow unpacking in more positions, an arbitrary number of +times, and in additional circumstances. Specifically, +in function calls, in comprehensions and generator expressions, and +in displays. + +Function calls are proposed to support an arbitrary number of +unpackings rather than just one:: + + >>> print(*[1], *[2], 3) + 1 2 3 + >>> dict(**{'x': 1}, y=2, **{'z': 3}) + {'x': 1, 'y': 2, 'z': 3} + +Unpacking is proposed to be allowed inside tuple, list, set, +and dictionary displays:: + + >>> *range(4), 4 + (0, 1, 2, 3, 4) + >>> [*range(4), 4] + [0, 1, 2, 3, 4] + >>> {*range(4), 4} + {0, 1, 2, 3, 4} + >>> {'x': 1, **{'y': 2}} + {'x': 1, 'y': 2} + +In dictionaries, later values will always override earlier ones:: + + >>> {'x': 1, **{'x': 2}} + {'x': 2} + + >>> {**{'x': 2}, 'x': 1} + {'x': 1} + +.. seealso:: + + :pep:`448` -- Additional Unpacking Generalizations + + +PEP 484 - Type Hints +-------------------- + +This PEP introduces a provisional module to provide these standard +definitions and tools, along with some conventions for situations +where annotations are not available. + +For example, here is a simple function whose argument and return type +are declared in the annotations:: + + def greeting(name: str) -> str: + return 'Hello ' + name + +The type system supports unions, generic types, and a special type +named ``Any`` which is consistent with (i.e. assignable to and from) all +types. + +.. seealso:: + + :pep:`484` -- Type Hints + + PEP 471 - os.scandir() function -- a better and faster directory iterator ------------------------------------------------------------------------- diff --git a/Lib/asyncio/base_events.py b/Lib/asyncio/base_events.py index 5a536a2..c205445 100644 --- a/Lib/asyncio/base_events.py +++ b/Lib/asyncio/base_events.py @@ -28,6 +28,7 @@ import traceback import sys import warnings +from . import compat from . import coroutines from . import events from . import futures @@ -378,7 +379,7 @@ class BaseEventLoop(events.AbstractEventLoop): # On Python 3.3 and older, objects with a destructor part of a reference # cycle are never destroyed. It's not more the case on Python 3.4 thanks # to the PEP 442. - if sys.version_info >= (3, 4): + if compat.PY34: def __del__(self): if not self.is_closed(): warnings.warn("unclosed event loop %r" % self, ResourceWarning) @@ -1205,7 +1206,7 @@ class BaseEventLoop(events.AbstractEventLoop): return enabled = bool(enabled) - if self._coroutine_wrapper_set is enabled: + if self._coroutine_wrapper_set == enabled: return wrapper = coroutines.debug_wrapper diff --git a/Lib/asyncio/base_subprocess.py b/Lib/asyncio/base_subprocess.py index a6971b1..6851cd2 100644 --- a/Lib/asyncio/base_subprocess.py +++ b/Lib/asyncio/base_subprocess.py @@ -1,8 +1,8 @@ import collections import subprocess -import sys import warnings +from . import compat from . import futures from . import protocols from . import transports @@ -116,7 +116,7 @@ class BaseSubprocessTransport(transports.SubprocessTransport): # On Python 3.3 and older, objects with a destructor part of a reference # cycle are never destroyed. It's not more the case on Python 3.4 thanks # to the PEP 442. - if sys.version_info >= (3, 4): + if compat.PY34: def __del__(self): if not self._closed: warnings.warn("unclosed transport %r" % self, ResourceWarning) diff --git a/Lib/asyncio/proactor_events.py b/Lib/asyncio/proactor_events.py index 9c2b8f1..abe4c12 100644 --- a/Lib/asyncio/proactor_events.py +++ b/Lib/asyncio/proactor_events.py @@ -7,10 +7,10 @@ proactor is only implemented on Windows with IOCP. __all__ = ['BaseProactorEventLoop'] import socket -import sys import warnings from . import base_events +from . import compat from . import constants from . import futures from . import sslproto @@ -79,7 +79,7 @@ class _ProactorBasePipeTransport(transports._FlowControlMixin, # On Python 3.3 and older, objects with a destructor part of a reference # cycle are never destroyed. It's not more the case on Python 3.4 thanks # to the PEP 442. - if sys.version_info >= (3, 4): + if compat.PY34: def __del__(self): if self._sock is not None: warnings.warn("unclosed transport %r" % self, ResourceWarning) diff --git a/Lib/asyncio/selector_events.py b/Lib/asyncio/selector_events.py index 7c5b9b5..4a99658 100644 --- a/Lib/asyncio/selector_events.py +++ b/Lib/asyncio/selector_events.py @@ -10,7 +10,6 @@ import collections import errno import functools import socket -import sys import warnings try: import ssl @@ -18,6 +17,7 @@ except ImportError: # pragma: no cover ssl = None from . import base_events +from . import compat from . import constants from . import events from . import futures @@ -568,7 +568,7 @@ class _SelectorTransport(transports._FlowControlMixin, # On Python 3.3 and older, objects with a destructor part of a reference # cycle are never destroyed. It's not more the case on Python 3.4 thanks # to the PEP 442. - if sys.version_info >= (3, 4): + if compat.PY34: def __del__(self): if self._sock is not None: warnings.warn("unclosed transport %r" % self, ResourceWarning) diff --git a/Lib/asyncio/sslproto.py b/Lib/asyncio/sslproto.py index 235855e..e566946 100644 --- a/Lib/asyncio/sslproto.py +++ b/Lib/asyncio/sslproto.py @@ -1,11 +1,11 @@ import collections -import sys import warnings try: import ssl except ImportError: # pragma: no cover ssl = None +from . import compat from . import protocols from . import transports from .log import logger @@ -317,7 +317,7 @@ class _SSLProtocolTransport(transports._FlowControlMixin, # On Python 3.3 and older, objects with a destructor part of a reference # cycle are never destroyed. It's not more the case on Python 3.4 thanks # to the PEP 442. - if sys.version_info >= (3, 4): + if compat.PY34: def __del__(self): if not self._closed: warnings.warn("unclosed transport %r" % self, ResourceWarning) diff --git a/Lib/asyncio/unix_events.py b/Lib/asyncio/unix_events.py index 75e7c9c..bf3b084 100644 --- a/Lib/asyncio/unix_events.py +++ b/Lib/asyncio/unix_events.py @@ -13,6 +13,7 @@ import warnings from . import base_events from . import base_subprocess +from . import compat from . import constants from . import coroutines from . import events @@ -370,7 +371,7 @@ class _UnixReadPipeTransport(transports.ReadTransport): # On Python 3.3 and older, objects with a destructor part of a reference # cycle are never destroyed. It's not more the case on Python 3.4 thanks # to the PEP 442. - if sys.version_info >= (3, 4): + if compat.PY34: def __del__(self): if self._pipe is not None: warnings.warn("unclosed transport %r" % self, ResourceWarning) @@ -555,7 +556,7 @@ class _UnixWritePipeTransport(transports._FlowControlMixin, # On Python 3.3 and older, objects with a destructor part of a reference # cycle are never destroyed. It's not more the case on Python 3.4 thanks # to the PEP 442. - if sys.version_info >= (3, 4): + if compat.PY34: def __del__(self): if self._pipe is not None: warnings.warn("unclosed transport %r" % self, ResourceWarning) diff --git a/Lib/http/cookiejar.py b/Lib/http/cookiejar.py index d54f58a..b1ba72e 100644 --- a/Lib/http/cookiejar.py +++ b/Lib/http/cookiejar.py @@ -758,7 +758,7 @@ class Cookie: ): if version is not None: version = int(version) - if expires is not None: expires = int(expires) + if expires is not None: expires = int(float(expires)) if port is None and port_specified is True: raise ValueError("if port is None, port_specified must be false") diff --git a/Lib/idlelib/PyShell.py b/Lib/idlelib/PyShell.py index 4f7a6de..3869d45 100755 --- a/Lib/idlelib/PyShell.py +++ b/Lib/idlelib/PyShell.py @@ -23,16 +23,6 @@ except ImportError: "Your Python may not be configured for Tk. **", file=sys.__stderr__) sys.exit(1) import tkinter.messagebox as tkMessageBox -try: - from tkinter import ttk -except: - root = Tk() - root.withdraw() - tkMessageBox.showerror("Idle Cannot Start", - "Idle now requires the tkinter.ttk module from tcl/tk 8.5+.\n" - + "It found tk %s and no ttk." % TkVersion, - parent=root) - sys.exit(1) from idlelib.EditorWindow import EditorWindow, fixwordbreaks from idlelib.FileList import FileList diff --git a/Lib/test/regrtest.py b/Lib/test/regrtest.py index 5b1fcc6..5650be0 100755 --- a/Lib/test/regrtest.py +++ b/Lib/test/regrtest.py @@ -810,7 +810,7 @@ def main(tests=None, **kwargs): if ns.verbose2 and bad: print("Re-running failed tests in verbose mode") - for test in bad: + for test in bad[:]: print("Re-running test %r in verbose mode" % test) sys.stdout.flush() try: @@ -821,6 +821,13 @@ def main(tests=None, **kwargs): # print a newline separate from the ^C print() break + else: + if ok[0] in {PASSED, ENV_CHANGED, SKIPPED, RESOURCE_DENIED}: + bad.remove(test) + else: + if bad: + print(count(len(bad), 'test'), "failed again:") + printlist(bad) if ns.single: if next_single_test: diff --git a/Lib/test/test_asyncio/test_subprocess.py b/Lib/test/test_asyncio/test_subprocess.py index 38f0cee..d138c26 100644 --- a/Lib/test/test_asyncio/test_subprocess.py +++ b/Lib/test/test_asyncio/test_subprocess.py @@ -417,11 +417,7 @@ class SubprocessMixin: def test_popen_error(self): # Issue #24763: check that the subprocess transport is closed # when BaseSubprocessTransport fails - if sys.platform == 'win32': - target = 'asyncio.windows_utils.Popen' - else: - target = 'subprocess.Popen' - with mock.patch(target) as popen: + with mock.patch('subprocess.Popen') as popen: exc = ZeroDivisionError popen.side_effect = exc diff --git a/Lib/test/test_http_cookiejar.py b/Lib/test/test_http_cookiejar.py index e9f0356..50260ff 100644 --- a/Lib/test/test_http_cookiejar.py +++ b/Lib/test/test_http_cookiejar.py @@ -566,6 +566,15 @@ class CookieTests(unittest.TestCase): self.assertEqual(len(c), 1) self.assertIn('spam="bar"', h) + # test if fractional expiry is accepted + cookie = Cookie(0, "name", "value", + None, False, "www.python.org", + True, False, "/", + False, False, "1444312383.018307", + False, None, None, + {}) + self.assertEqual(cookie.expires, 1444312383) + # XXX RFC 2965 expiry rules (some apply to V0 too) def test_default_path(self): diff --git a/Lib/test/test_typing.py b/Lib/test/test_typing.py index c37e113..b34007d 100644 --- a/Lib/test/test_typing.py +++ b/Lib/test/test_typing.py @@ -41,11 +41,9 @@ class ManagingFounder(Manager, Founder): class AnyTests(TestCase): - def test_any_instance(self): - self.assertIsInstance(Employee(), Any) - self.assertIsInstance(42, Any) - self.assertIsInstance(None, Any) - self.assertIsInstance(object(), Any) + def test_any_instance_type_error(self): + with self.assertRaises(TypeError): + isinstance(42, Any) def test_any_subclass(self): self.assertTrue(issubclass(Employee, Any)) @@ -109,9 +107,6 @@ class TypeVarTests(TestCase): def test_basic_plain(self): T = TypeVar('T') - # Nothing is an instance if T. - with self.assertRaises(TypeError): - isinstance('', T) # Every class is a subclass of T. assert issubclass(int, T) assert issubclass(str, T) @@ -119,12 +114,16 @@ class TypeVarTests(TestCase): assert T == T # T is a subclass of itself. assert issubclass(T, T) + # T is an instance of TypeVar + assert isinstance(T, TypeVar) + + def test_typevar_instance_type_error(self): + T = TypeVar('T') + with self.assertRaises(TypeError): + isinstance(42, T) def test_basic_constrained(self): A = TypeVar('A', str, bytes) - # Nothing is an instance of A. - with self.assertRaises(TypeError): - isinstance('', A) # Only str and bytes are subclasses of A. assert issubclass(str, A) assert issubclass(bytes, A) @@ -213,8 +212,6 @@ class UnionTests(TestCase): def test_basics(self): u = Union[int, float] self.assertNotEqual(u, Union) - self.assertIsInstance(42, u) - self.assertIsInstance(3.14, u) self.assertTrue(issubclass(int, u)) self.assertTrue(issubclass(float, u)) @@ -247,7 +244,6 @@ class UnionTests(TestCase): def test_subclass(self): u = Union[int, Employee] - self.assertIsInstance(Manager(), u) self.assertTrue(issubclass(Manager, u)) def test_self_subclass(self): @@ -256,7 +252,6 @@ class UnionTests(TestCase): def test_multiple_inheritance(self): u = Union[int, Employee] - self.assertIsInstance(ManagingFounder(), u) self.assertTrue(issubclass(ManagingFounder, u)) def test_single_class_disappears(self): @@ -309,9 +304,6 @@ class UnionTests(TestCase): o = Optional[int] u = Union[int, None] self.assertEqual(o, u) - self.assertIsInstance(42, o) - self.assertIsInstance(None, o) - self.assertNotIsInstance(3.14, o) def test_empty(self): with self.assertRaises(TypeError): @@ -321,11 +313,9 @@ class UnionTests(TestCase): assert issubclass(Union[int, str], Union) assert not issubclass(int, Union) - def test_isinstance_union(self): - # Nothing is an instance of bare Union. - assert not isinstance(42, Union) - assert not isinstance(int, Union) - assert not isinstance(Union[int, str], Union) + def test_union_instance_type_error(self): + with self.assertRaises(TypeError): + isinstance(42, Union[int, str]) class TypeVarUnionTests(TestCase): @@ -352,22 +342,11 @@ class TypeVarUnionTests(TestCase): TU = TypeVar('TU', Union[int, float], None) assert issubclass(int, TU) assert issubclass(float, TU) - with self.assertRaises(TypeError): - isinstance(42, TU) - with self.assertRaises(TypeError): - isinstance('', TU) class TupleTests(TestCase): def test_basics(self): - self.assertIsInstance((42, 3.14, ''), Tuple) - self.assertIsInstance((42, 3.14, ''), Tuple[int, float, str]) - self.assertIsInstance((42,), Tuple[int]) - self.assertNotIsInstance((3.14,), Tuple[int]) - self.assertNotIsInstance((42, 3.14), Tuple[int, float, str]) - self.assertNotIsInstance((42, 3.14, 100), Tuple[int, float, str]) - self.assertNotIsInstance((42, 3.14, 100), Tuple[int, float]) self.assertTrue(issubclass(Tuple[int, str], Tuple)) self.assertTrue(issubclass(Tuple[int, str], Tuple[int, str])) self.assertFalse(issubclass(int, Tuple)) @@ -382,14 +361,11 @@ class TupleTests(TestCase): pass self.assertTrue(issubclass(MyTuple, Tuple)) - def test_tuple_ellipsis(self): - t = Tuple[int, ...] - assert isinstance((), t) - assert isinstance((1,), t) - assert isinstance((1, 2), t) - assert isinstance((1, 2, 3), t) - assert not isinstance((3.14,), t) - assert not isinstance((1, 2, 3.14,), t) + def test_tuple_instance_type_error(self): + with self.assertRaises(TypeError): + isinstance((0, 0), Tuple[int, int]) + with self.assertRaises(TypeError): + isinstance((0, 0), Tuple) def test_tuple_ellipsis_subclass(self): @@ -419,18 +395,6 @@ class TupleTests(TestCase): class CallableTests(TestCase): - def test_basics(self): - c = Callable[[int, float], str] - - def flub(a: int, b: float) -> str: - return str(a * b) - - def flob(a: int, b: int) -> str: - return str(a * b) - - self.assertIsInstance(flub, c) - self.assertNotIsInstance(flob, c) - def test_self_subclass(self): self.assertTrue(issubclass(Callable[[int], int], Callable)) self.assertFalse(issubclass(Callable, Callable[[int], int])) @@ -453,91 +417,6 @@ class CallableTests(TestCase): self.assertNotEqual(Callable[[int], int], Callable[[], int]) self.assertNotEqual(Callable[[int], int], Callable) - def test_with_none(self): - c = Callable[[None], None] - - def flub(self: None) -> None: - pass - - def flab(self: Any) -> None: - pass - - def flob(self: None) -> Any: - pass - - self.assertIsInstance(flub, c) - self.assertIsInstance(flab, c) - self.assertNotIsInstance(flob, c) # Test contravariance. - - def test_with_subclasses(self): - c = Callable[[Employee, Manager], Employee] - - def flub(a: Employee, b: Employee) -> Manager: - return Manager() - - def flob(a: Manager, b: Manager) -> Employee: - return Employee() - - self.assertIsInstance(flub, c) - self.assertNotIsInstance(flob, c) - - def test_with_default_args(self): - c = Callable[[int], int] - - def flub(a: int, b: float = 3.14) -> int: - return a - - def flab(a: int, *, b: float = 3.14) -> int: - return a - - def flob(a: int = 42) -> int: - return a - - self.assertIsInstance(flub, c) - self.assertIsInstance(flab, c) - self.assertIsInstance(flob, c) - - def test_with_varargs(self): - c = Callable[[int], int] - - def flub(*args) -> int: - return 42 - - def flab(*args: int) -> int: - return 42 - - def flob(*args: float) -> int: - return 42 - - self.assertIsInstance(flub, c) - self.assertIsInstance(flab, c) - self.assertNotIsInstance(flob, c) - - def test_with_method(self): - - class C: - - def imethod(self, arg: int) -> int: - self.last_arg = arg - return arg + 1 - - @classmethod - def cmethod(cls, arg: int) -> int: - cls.last_cls_arg = arg - return arg + 1 - - @staticmethod - def smethod(arg: int) -> int: - return arg + 1 - - ct = Callable[[int], int] - self.assertIsInstance(C().imethod, ct) - self.assertIsInstance(C().cmethod, ct) - self.assertIsInstance(C.cmethod, ct) - self.assertIsInstance(C().smethod, ct) - self.assertIsInstance(C.smethod, ct) - self.assertIsInstance(C.imethod, Callable[[Any, int], int]) - def test_cannot_subclass(self): with self.assertRaises(TypeError): @@ -556,21 +435,21 @@ class CallableTests(TestCase): with self.assertRaises(TypeError): c() - def test_varargs(self): - ct = Callable[..., int] - - def foo(a, b) -> int: - return 42 - - def bar(a=42) -> int: - return a - - def baz(*, x, y, z) -> int: - return 100 + def test_callable_instance_works(self): + f = lambda: None + assert isinstance(f, Callable) + assert not isinstance(None, Callable) - self.assertIsInstance(foo, ct) - self.assertIsInstance(bar, ct) - self.assertIsInstance(baz, ct) + def test_callable_instance_type_error(self): + f = lambda: None + with self.assertRaises(TypeError): + assert isinstance(f, Callable[[], None]) + with self.assertRaises(TypeError): + assert isinstance(f, Callable[[], Any]) + with self.assertRaises(TypeError): + assert not isinstance(None, Callable[[], None]) + with self.assertRaises(TypeError): + assert not isinstance(None, Callable[[], Any]) def test_repr(self): ct0 = Callable[[], bool] @@ -580,6 +459,14 @@ class CallableTests(TestCase): ctv = Callable[..., str] self.assertEqual(repr(ctv), 'typing.Callable[..., str]') + def test_callable_with_ellipsis(self): + + def foo(a: Callable[..., T]): + pass + + self.assertEqual(get_type_hints(foo, globals(), locals()), + {'a': Callable[..., T]}) + XK = TypeVar('XK', str, bytes) XV = TypeVar('XV') @@ -659,6 +546,10 @@ class ProtocolTests(TestCase): assert issubclass(list, typing.Reversible) assert not issubclass(int, typing.Reversible) + def test_protocol_instance_type_error(self): + with self.assertRaises(TypeError): + isinstance([], typing.Reversible) + class GenericTests(TestCase): @@ -672,6 +563,14 @@ class GenericTests(TestCase): with self.assertRaises(TypeError): Y[str, bytes] + def test_init(self): + T = TypeVar('T') + S = TypeVar('S') + with self.assertRaises(TypeError): + Generic[T, T] + with self.assertRaises(TypeError): + Generic[T, S, T] + def test_repr(self): self.assertEqual(repr(SimpleMapping), __name__ + '.' + 'SimpleMapping[~XK, ~XV]') @@ -824,11 +723,11 @@ class VarianceTests(TestCase): typing.Sequence[Manager]) def test_covariance_mapping(self): - # Ditto for Mapping (a generic class with two parameters). + # Ditto for Mapping (covariant in the value, invariant in the key). assert issubclass(typing.Mapping[Employee, Manager], typing.Mapping[Employee, Employee]) - assert issubclass(typing.Mapping[Manager, Employee], - typing.Mapping[Employee, Employee]) + assert not issubclass(typing.Mapping[Manager, Employee], + typing.Mapping[Employee, Employee]) assert not issubclass(typing.Mapping[Employee, Manager], typing.Mapping[Manager, Manager]) assert not issubclass(typing.Mapping[Manager, Employee], @@ -889,6 +788,11 @@ class ForwardRefTests(TestCase): right_hints = get_type_hints(t.add_right, globals(), locals()) assert right_hints['node'] == Optional[Node[T]] + def test_forwardref_instance_type_error(self): + fr = typing._ForwardRef('int') + with self.assertRaises(TypeError): + isinstance(42, fr) + def test_union_forward(self): def foo(a: Union['T']): @@ -913,6 +817,14 @@ class ForwardRefTests(TestCase): self.assertEqual(get_type_hints(foo, globals(), locals()), {'a': Callable[[T], T]}) + def test_callable_with_ellipsis_forward(self): + + def foo(a: 'Callable[..., T]'): + pass + + self.assertEqual(get_type_hints(foo, globals(), locals()), + {'a': Callable[..., T]}) + def test_syntax_error(self): with self.assertRaises(SyntaxError): @@ -1069,50 +981,17 @@ class CollectionsAbcTests(TestCase): def test_list(self): assert issubclass(list, typing.List) - assert isinstance([], typing.List) - assert not isinstance((), typing.List) - t = typing.List[int] - assert isinstance([], t) - assert isinstance([42], t) - assert not isinstance([''], t) def test_set(self): assert issubclass(set, typing.Set) assert not issubclass(frozenset, typing.Set) - assert isinstance(set(), typing.Set) - assert not isinstance({}, typing.Set) - t = typing.Set[int] - assert isinstance(set(), t) - assert isinstance({42}, t) - assert not isinstance({''}, t) def test_frozenset(self): assert issubclass(frozenset, typing.FrozenSet) assert not issubclass(set, typing.FrozenSet) - assert isinstance(frozenset(), typing.FrozenSet) - assert not isinstance({}, typing.FrozenSet) - t = typing.FrozenSet[int] - assert isinstance(frozenset(), t) - assert isinstance(frozenset({42}), t) - assert not isinstance(frozenset({''}), t) - assert not isinstance({42}, t) - - def test_mapping_views(self): - # TODO: These tests are kind of lame. - assert isinstance({}.keys(), typing.KeysView) - assert isinstance({}.items(), typing.ItemsView) - assert isinstance({}.values(), typing.ValuesView) def test_dict(self): assert issubclass(dict, typing.Dict) - assert isinstance({}, typing.Dict) - assert not isinstance([], typing.Dict) - t = typing.Dict[int, str] - assert isinstance({}, t) - assert isinstance({42: ''}, t) - assert not isinstance({42: 42}, t) - assert not isinstance({'': 42}, t) - assert not isinstance({'': ''}, t) def test_no_list_instantiation(self): with self.assertRaises(TypeError): @@ -1191,8 +1070,6 @@ class CollectionsAbcTests(TestCase): yield 42 g = foo() assert issubclass(type(g), typing.Generator) - assert isinstance(g, typing.Generator) - assert not isinstance(foo, typing.Generator) assert issubclass(typing.Generator[Manager, Employee, Manager], typing.Generator[Employee, Manager, Employee]) assert not issubclass(typing.Generator[Manager, Manager, Manager], @@ -1228,12 +1105,6 @@ class CollectionsAbcTests(TestCase): assert len(MMB[str, str]()) == 0 assert len(MMB[KT, VT]()) == 0 - def test_recursive_dict(self): - D = typing.Dict[int, 'D'] # Uses a _ForwardRef - assert isinstance({}, D) # Easy - assert isinstance({0: {}}, D) # Touches _ForwardRef - assert isinstance({0: {0: {}}}, D) # Etc... - class NamedTupleTests(TestCase): @@ -1294,8 +1165,6 @@ class RETests(TestCase): def test_basics(self): pat = re.compile('[a-z]+', re.I) assert issubclass(pat.__class__, Pattern) - assert isinstance(pat, Pattern[str]) - assert not isinstance(pat, Pattern[bytes]) assert issubclass(type(pat), Pattern) assert issubclass(type(pat), Pattern[str]) @@ -1307,12 +1176,10 @@ class RETests(TestCase): assert issubclass(type(mat), Match[str]) p = Pattern[Union[str, bytes]] - assert isinstance(pat, p) assert issubclass(Pattern[str], Pattern) assert issubclass(Pattern[str], p) m = Match[Union[bytes, str]] - assert isinstance(mat, m) assert issubclass(Match[bytes], Match) assert issubclass(Match[bytes], m) @@ -1327,6 +1194,12 @@ class RETests(TestCase): with self.assertRaises(TypeError): # Too complicated? m[str] + with self.assertRaises(TypeError): + # We don't support isinstance(). + isinstance(42, Pattern) + with self.assertRaises(TypeError): + # We don't support isinstance(). + isinstance(42, Pattern[str]) def test_repr(self): assert repr(Pattern) == 'Pattern[~AnyStr]' diff --git a/Lib/typing.py b/Lib/typing.py index 38e07ad..ddaec3e 100644 --- a/Lib/typing.py +++ b/Lib/typing.py @@ -128,6 +128,8 @@ class TypingMeta(type): class Final: """Mix-in class to prevent instantiation.""" + __slots__ = () + def __new__(self, *args, **kwds): raise TypeError("Cannot instantiate %r" % self.__class__) @@ -176,6 +178,9 @@ class _ForwardRef(TypingMeta): self.__forward_evaluated__ = True return self.__forward_value__ + def __instancecheck__(self, obj): + raise TypeError("Forward references cannot be used with isinstance().") + def __subclasscheck__(self, cls): if not self.__forward_evaluated__: globalns = self.__forward_frame__.f_globals @@ -186,16 +191,6 @@ class _ForwardRef(TypingMeta): return False # Too early. return issubclass(cls, self.__forward_value__) - def __instancecheck__(self, obj): - if not self.__forward_evaluated__: - globalns = self.__forward_frame__.f_globals - localns = self.__forward_frame__.f_locals - try: - self._eval_type(globalns, localns) - except NameError: - return False # Too early. - return isinstance(obj, self.__forward_value__) - def __repr__(self): return '_ForwardRef(%r)' % (self.__forward_arg__,) @@ -211,6 +206,8 @@ class _TypeAlias: False. """ + __slots__ = ('name', 'type_var', 'impl_type', 'type_checker') + def __new__(cls, *args, **kwds): """Constructor. @@ -259,8 +256,7 @@ class _TypeAlias: self.impl_type, self.type_checker) def __instancecheck__(self, obj): - return (isinstance(obj, self.impl_type) and - isinstance(self.type_checker(obj), self.type_var)) + raise TypeError("Type aliases cannot be used with isinstance().") def __subclasscheck__(self, cls): if cls is Any: @@ -332,8 +328,8 @@ class AnyMeta(TypingMeta): self = super().__new__(cls, name, bases, namespace, _root=_root) return self - def __instancecheck__(self, instance): - return True + def __instancecheck__(self, obj): + raise TypeError("Any cannot be used with isinstance().") def __subclasscheck__(self, cls): if not isinstance(cls, type): @@ -349,6 +345,8 @@ class Any(Final, metaclass=AnyMeta, _root=True): - As a special case, Any and object are subclasses of each other. """ + __slots__ = () + class TypeVar(TypingMeta, metaclass=TypingMeta, _root=True): """Type variable. @@ -447,7 +445,6 @@ KT = TypeVar('KT') # Key type. VT = TypeVar('VT') # Value type. T_co = TypeVar('T_co', covariant=True) # Any type covariant containers. V_co = TypeVar('V_co', covariant=True) # Any type covariant containers. -KT_co = TypeVar('KT_co', covariant=True) # Key type covariant containers. VT_co = TypeVar('VT_co', covariant=True) # Value type covariant containers. T_contra = TypeVar('T_contra', contravariant=True) # Ditto contravariant. @@ -548,9 +545,8 @@ class UnionMeta(TypingMeta): def __hash__(self): return hash(self.__union_set_params__) - def __instancecheck__(self, instance): - return (self.__union_set_params__ is not None and - any(isinstance(instance, t) for t in self.__union_params__)) + def __instancecheck__(self, obj): + raise TypeError("Unions cannot be used with isinstance().") def __subclasscheck__(self, cls): if cls is Any: @@ -645,6 +641,8 @@ class Optional(Final, metaclass=OptionalMeta, _root=True): Optional[X] is equivalent to Union[X, type(None)]. """ + __slots__ = () + class TupleMeta(TypingMeta): """Metaclass for Tuple.""" @@ -709,18 +707,8 @@ class TupleMeta(TypingMeta): def __hash__(self): return hash(self.__tuple_params__) - def __instancecheck__(self, t): - if not isinstance(t, tuple): - return False - if self.__tuple_params__ is None: - return True - if self.__tuple_use_ellipsis__: - p = self.__tuple_params__[0] - return all(isinstance(x, p) for x in t) - else: - return (len(t) == len(self.__tuple_params__) and - all(isinstance(x, p) - for x, p in zip(t, self.__tuple_params__))) + def __instancecheck__(self, obj): + raise TypeError("Tuples cannot be used with isinstance().") def __subclasscheck__(self, cls): if cls is Any: @@ -754,6 +742,8 @@ class Tuple(Final, metaclass=TupleMeta, _root=True): To specify a variable-length tuple of homogeneous type, use Sequence[T]. """ + __slots__ = () + class CallableMeta(TypingMeta): """Metaclass for Callable.""" @@ -787,7 +777,10 @@ class CallableMeta(TypingMeta): def _eval_type(self, globalns, localns): if self.__args__ is None and self.__result__ is None: return self - args = [_eval_type(t, globalns, localns) for t in self.__args__] + if self.__args__ is Ellipsis: + args = self.__args__ + else: + args = [_eval_type(t, globalns, localns) for t in self.__args__] result = _eval_type(self.__result__, globalns, localns) if args == self.__args__ and result == self.__result__: return self @@ -826,57 +819,14 @@ class CallableMeta(TypingMeta): def __hash__(self): return hash(self.__args__) ^ hash(self.__result__) - def __instancecheck__(self, instance): - if not callable(instance): - return False + def __instancecheck__(self, obj): + # For unparametrized Callable we allow this, because + # typing.Callable should be equivalent to + # collections.abc.Callable. if self.__args__ is None and self.__result__ is None: - return True - assert self.__args__ is not None - assert self.__result__ is not None - my_args, my_result = self.__args__, self.__result__ - import inspect # TODO: Avoid this import. - # Would it be better to use Signature objects? - try: - (args, varargs, varkw, defaults, kwonlyargs, kwonlydefaults, - annotations) = inspect.getfullargspec(instance) - except TypeError: - return False # We can't find the signature. Give up. - msg = ("When testing isinstance(<callable>, Callable[...], " - "<calleble>'s annotations must be types.") - if my_args is not Ellipsis: - if kwonlyargs and (not kwonlydefaults or - len(kwonlydefaults) < len(kwonlyargs)): - return False - if isinstance(instance, types.MethodType): - # For methods, getfullargspec() includes self/cls, - # but it's not part of the call signature, so drop it. - del args[0] - min_call_args = len(args) - if defaults: - min_call_args -= len(defaults) - if varargs: - max_call_args = 999999999 - if len(args) < len(my_args): - args += [varargs] * (len(my_args) - len(args)) - else: - max_call_args = len(args) - if not min_call_args <= len(my_args) <= max_call_args: - return False - for my_arg_type, name in zip(my_args, args): - if name in annotations: - annot_type = _type_check(annotations[name], msg) - else: - annot_type = Any - if not issubclass(my_arg_type, annot_type): - return False - # TODO: If mutable type, check invariance? - if 'return' in annotations: - annot_return_type = _type_check(annotations['return'], msg) - # Note contravariance here! - if not issubclass(annot_return_type, my_result): - return False - # Can't find anything wrong... - return True + return isinstance(obj, collections_abc.Callable) + else: + raise TypeError("Callable[] cannot be used with isinstance().") def __subclasscheck__(self, cls): if cls is Any: @@ -900,6 +850,8 @@ class Callable(Final, metaclass=CallableMeta, _root=True): such function types are rarely used as callback types. """ + __slots__ = () + def _gorg(a): """Return the farthest origin of a generic class.""" @@ -1010,6 +962,8 @@ class GenericMeta(TypingMeta, abc.ABCMeta): if not isinstance(p, TypeVar): raise TypeError("Initial parameters must be " "type variables; got %s" % p) + if len(set(params)) != len(params): + raise TypeError("All type variables in Generic[...] must be distinct.") else: if len(params) != len(self.__parameters__): raise TypeError("Cannot change parameter count from %d to %d" % @@ -1073,13 +1027,6 @@ class GenericMeta(TypingMeta, abc.ABCMeta): return False return issubclass(cls, self.__extra__) - def __instancecheck__(self, obj): - if super().__instancecheck__(obj): - return True - if self.__extra__ is None: - return False - return isinstance(obj, self.__extra__) - class Generic(metaclass=GenericMeta): """Abstract base class for generic types. @@ -1109,6 +1056,8 @@ class Generic(metaclass=GenericMeta): # Same body as above. """ + __slots__ = () + def __new__(cls, *args, **kwds): next_in_mro = object # Look for the last occurrence of Generic or Generic[...]. @@ -1234,6 +1183,9 @@ class _ProtocolMeta(GenericMeta): from Generic. """ + def __instancecheck__(self, obj): + raise TypeError("Protocols cannot be used with isinstance().") + def __subclasscheck__(self, cls): if not self._is_protocol: # No structural checks since this isn't a protocol. @@ -1272,6 +1224,7 @@ class _ProtocolMeta(GenericMeta): attr != '__abstractmethods__' and attr != '_is_protocol' and attr != '__dict__' and + attr != '__slots__' and attr != '_get_protocol_attrs' and attr != '__parameters__' and attr != '__origin__' and @@ -1289,6 +1242,8 @@ class _Protocol(metaclass=_ProtocolMeta): such as Hashable). """ + __slots__ = () + _is_protocol = True @@ -1299,14 +1254,15 @@ Hashable = collections_abc.Hashable # Not generic. class Iterable(Generic[T_co], extra=collections_abc.Iterable): - pass + __slots__ = () class Iterator(Iterable[T_co], extra=collections_abc.Iterator): - pass + __slots__ = () class SupportsInt(_Protocol): + __slots__ = () @abstractmethod def __int__(self) -> int: @@ -1314,6 +1270,7 @@ class SupportsInt(_Protocol): class SupportsFloat(_Protocol): + __slots__ = () @abstractmethod def __float__(self) -> float: @@ -1321,6 +1278,7 @@ class SupportsFloat(_Protocol): class SupportsComplex(_Protocol): + __slots__ = () @abstractmethod def __complex__(self) -> complex: @@ -1328,30 +1286,34 @@ class SupportsComplex(_Protocol): class SupportsBytes(_Protocol): + __slots__ = () @abstractmethod def __bytes__(self) -> bytes: pass -class SupportsAbs(_Protocol[T]): +class SupportsAbs(_Protocol[T_co]): + __slots__ = () @abstractmethod - def __abs__(self) -> T: + def __abs__(self) -> T_co: pass -class SupportsRound(_Protocol[T]): +class SupportsRound(_Protocol[T_co]): + __slots__ = () @abstractmethod - def __round__(self, ndigits: int = 0) -> T: + def __round__(self, ndigits: int = 0) -> T_co: pass -class Reversible(_Protocol[T]): +class Reversible(_Protocol[T_co]): + __slots__ = () @abstractmethod - def __reversed__(self) -> 'Iterator[T]': + def __reversed__(self) -> 'Iterator[T_co]': pass @@ -1359,7 +1321,7 @@ Sized = collections_abc.Sized # Not generic. class Container(Generic[T_co], extra=collections_abc.Container): - pass + __slots__ = () # Callable was defined earlier. @@ -1374,7 +1336,8 @@ class MutableSet(AbstractSet[T], extra=collections_abc.MutableSet): pass -class Mapping(Sized, Iterable[KT_co], Container[KT_co], Generic[KT_co, VT_co], +# NOTE: Only the value type is covariant. +class Mapping(Sized, Iterable[KT], Container[KT], Generic[VT_co], extra=collections_abc.Mapping): pass @@ -1399,19 +1362,7 @@ class ByteString(Sequence[int], extra=collections_abc.ByteString): ByteString.register(type(memoryview(b''))) -class _ListMeta(GenericMeta): - - def __instancecheck__(self, obj): - if not super().__instancecheck__(obj): - return False - itemtype = self.__parameters__[0] - for x in obj: - if not isinstance(x, itemtype): - return False - return True - - -class List(list, MutableSequence[T], metaclass=_ListMeta): +class List(list, MutableSequence[T]): def __new__(cls, *args, **kwds): if _geqv(cls, List): @@ -1420,19 +1371,7 @@ class List(list, MutableSequence[T], metaclass=_ListMeta): return list.__new__(cls, *args, **kwds) -class _SetMeta(GenericMeta): - - def __instancecheck__(self, obj): - if not super().__instancecheck__(obj): - return False - itemtype = self.__parameters__[0] - for x in obj: - if not isinstance(x, itemtype): - return False - return True - - -class Set(set, MutableSet[T], metaclass=_SetMeta): +class Set(set, MutableSet[T]): def __new__(cls, *args, **kwds): if _geqv(cls, Set): @@ -1441,7 +1380,7 @@ class Set(set, MutableSet[T], metaclass=_SetMeta): return set.__new__(cls, *args, **kwds) -class _FrozenSetMeta(_SetMeta): +class _FrozenSetMeta(GenericMeta): """This metaclass ensures set is not a subclass of FrozenSet. Without this metaclass, set would be considered a subclass of @@ -1454,13 +1393,9 @@ class _FrozenSetMeta(_SetMeta): return False return super().__subclasscheck__(cls) - def __instancecheck__(self, obj): - if issubclass(obj.__class__, Set): - return False - return super().__instancecheck__(obj) - class FrozenSet(frozenset, AbstractSet[T_co], metaclass=_FrozenSetMeta): + __slots__ = () def __new__(cls, *args, **kwds): if _geqv(cls, FrozenSet): @@ -1473,13 +1408,13 @@ class MappingView(Sized, Iterable[T_co], extra=collections_abc.MappingView): pass -class KeysView(MappingView[KT_co], AbstractSet[KT_co], +class KeysView(MappingView[KT], AbstractSet[KT], extra=collections_abc.KeysView): pass -# TODO: Enable Set[Tuple[KT_co, VT_co]] instead of Generic[KT_co, VT_co]. -class ItemsView(MappingView, Generic[KT_co, VT_co], +# TODO: Enable Set[Tuple[KT, VT_co]] instead of Generic[KT, VT_co]. +class ItemsView(MappingView, Generic[KT, VT_co], extra=collections_abc.ItemsView): pass @@ -1488,20 +1423,7 @@ class ValuesView(MappingView[VT_co], extra=collections_abc.ValuesView): pass -class _DictMeta(GenericMeta): - - def __instancecheck__(self, obj): - if not super().__instancecheck__(obj): - return False - keytype, valuetype = self.__parameters__ - for key, value in obj.items(): - if not (isinstance(key, keytype) and - isinstance(value, valuetype)): - return False - return True - - -class Dict(dict, MutableMapping[KT, VT], metaclass=_DictMeta): +class Dict(dict, MutableMapping[KT, VT]): def __new__(cls, *args, **kwds): if _geqv(cls, Dict): @@ -1521,6 +1443,7 @@ else: class Generator(Iterator[T_co], Generic[T_co, T_contra, V_co], extra=_G_base): + __slots__ = () def __new__(cls, *args, **kwds): if _geqv(cls, Generator): @@ -1564,6 +1487,8 @@ class IO(Generic[AnyStr]): way to track the other distinctions in the type system. """ + __slots__ = () + @abstractproperty def mode(self) -> str: pass @@ -1648,6 +1573,8 @@ class IO(Generic[AnyStr]): class BinaryIO(IO[bytes]): """Typed version of the return of open() in binary mode.""" + __slots__ = () + @abstractmethod def write(self, s: Union[bytes, bytearray]) -> int: pass @@ -1660,6 +1587,8 @@ class BinaryIO(IO[bytes]): class TextIO(IO[str]): """Typed version of the return of open() in text mode.""" + __slots__ = () + @abstractproperty def buffer(self) -> BinaryIO: pass diff --git a/Lib/urllib/request.py b/Lib/urllib/request.py index eada0a9..a7fd017 100644 --- a/Lib/urllib/request.py +++ b/Lib/urllib/request.py @@ -230,6 +230,7 @@ def urlretrieve(url, filename=None, reporthook=None, data=None): return result def urlcleanup(): + """Clean up temporary files from urlretrieve calls.""" for temp_file in _url_tempfiles: try: os.unlink(temp_file) @@ -518,6 +518,7 @@ Duncan Grisby Olivier Grisel Fabian Groffen Eric Groo +Daniel Andrade Groppe Dag Gruneau Filip Gruszczyński Thomas Guettler @@ -860,6 +861,7 @@ Anne Lord Tom Loredo Justin Love Ned Jackson Lovely +Peter Lovett Chalmer Lowe Jason Lowe Tony Lownds @@ -1125,6 +1127,7 @@ Paul Prescod Donovan Preston Paul Price Iuliia Proskurnia +Dorian Pula Jyrki Pulliainen Steve Purcell Eduardo Pérez @@ -13,6 +13,10 @@ Core and Builtins Library ------- +- Issue #23973: Update typing.py from GitHub repo. + +- Issue #23888: Handle fractional time in cookie expiry. Patch by ssh. + - Issue #23652: Make it possible to compile the select module against the libc headers from the Linux Standard Base, which do not include some EPOLL macros. Patch by Matt Frank. @@ -36,11 +40,20 @@ Library Documentation ------------- +- Issue #20769: Improve reload() docs. Patch by Dorian Pula. + - Issue #23589: Remove duplicate sentence from the FAQ. Patch by Yongzhi Pan. - Issue #24729: Correct IO tutorial to match implementation regarding encoding parameter to open function. +Tests +----- + +- Issue #24751: When running regrtest with the ``-w`` command line option, + a test run is no longer marked as a failure if all tests succeed when + re-run. + What's New in Python 3.5.0 beta 4? ================================== diff --git a/Tools/msi/make_zip.py b/Tools/msi/make_zip.py index 521ba93..bace19a 100644 --- a/Tools/msi/make_zip.py +++ b/Tools/msi/make_zip.py @@ -1,4 +1,5 @@ import argparse +import py_compile import re import sys import shutil @@ -82,7 +83,16 @@ def copy_to_layout(target, rel_sources): with ZipFile(str(target), 'w', ZIP_DEFLATED) as f: for s, rel in rel_sources: - f.write(str(s), str(rel)) + if rel.suffix.lower() == '.py': + pyc = Path(tempfile.gettempdir()) / rel.with_suffix('.pyc').name + try: + py_compile.compile(str(s), str(pyc), str(rel), doraise=True, optimize=2) + except py_compile.PyCompileError: + f.write(str(s), str(rel)) + else: + f.write(str(pyc), str(rel.with_suffix('.pyc'))) + else: + f.write(str(s), str(rel)) count += 1 else: diff --git a/Tools/msi/tcltk/tcltk.wixproj b/Tools/msi/tcltk/tcltk.wixproj index f66fc14..e1addd9 100644 --- a/Tools/msi/tcltk/tcltk.wixproj +++ b/Tools/msi/tcltk/tcltk.wixproj @@ -27,6 +27,13 @@ <Target_>DLLs\</Target_> <Group>tcltk_dlls</Group> </InstallFiles> + <InstallFiles Include="$(VCInstallDir)redist\$(Platform)\Microsoft.VC$(PlatformToolset.Substring(1)).CRT\vcruntime$(PlatformToolset.Substring(1)).dll"> + <SourceBase>$(VCInstallDir)redist\$(Platform)\</SourceBase> + <Source>$(VCInstallDir)redist\$(Platform)\</Source> + <TargetBase>$(VCInstallDir)redist\$(Platform)\</TargetBase> + <Target_>DLLs\</Target_> + <Group>tcltk_dlls</Group> + </InstallFiles> <InstallFiles Include="$(tcltkDir)lib\**\*"> <SourceBase>$(tcltkDir)</SourceBase> |