diff options
author | Ivan Levkivskyi <levkivskyi@gmail.com> | 2017-09-13 23:25:15 (GMT) |
---|---|---|
committer | Ćukasz Langa <lukasz@langa.pl> | 2017-09-13 23:25:15 (GMT) |
commit | 65bc62052fe5d550cb14c0033e8a2550618fb7b9 (patch) | |
tree | 6980d9a0af62cb7793b5da83fb33511ae31514c9 /Lib | |
parent | f6e61df01536493f1280cd07639c7ff9bffb2cdc (diff) | |
download | cpython-65bc62052fe5d550cb14c0033e8a2550618fb7b9.zip cpython-65bc62052fe5d550cb14c0033e8a2550618fb7b9.tar.gz cpython-65bc62052fe5d550cb14c0033e8a2550618fb7b9.tar.bz2 |
bpo-28556: Minor updates to typing module (#3550)
* Copy changes to typing from upstream repo
* Add NEWS entry
Diffstat (limited to 'Lib')
-rw-r--r-- | Lib/test/test_typing.py | 7 | ||||
-rw-r--r-- | Lib/typing.py | 79 |
2 files changed, 36 insertions, 50 deletions
diff --git a/Lib/test/test_typing.py b/Lib/test/test_typing.py index fd2d93c..a351be1 100644 --- a/Lib/test/test_typing.py +++ b/Lib/test/test_typing.py @@ -1069,6 +1069,13 @@ class GenericTests(BaseTestCase): for t in things + [Any]: self.assertEqual(t, copy(t)) self.assertEqual(t, deepcopy(t)) + if sys.version_info >= (3, 3): + # From copy module documentation: + # It does "copy" functions and classes (shallow and deeply), by returning + # the original object unchanged; this is compatible with the way these + # are treated by the pickle module. + self.assertTrue(t is copy(t)) + self.assertTrue(t is deepcopy(t)) def test_weakref_all(self): T = TypeVar('T') diff --git a/Lib/typing.py b/Lib/typing.py index c487afc..609f813 100644 --- a/Lib/typing.py +++ b/Lib/typing.py @@ -376,7 +376,7 @@ def _type_check(arg, msg): if ( type(arg).__name__ in ('_Union', '_Optional') and not getattr(arg, '__origin__', None) or - isinstance(arg, TypingMeta) and _gorg(arg) in (Generic, _Protocol) + isinstance(arg, TypingMeta) and arg._gorg in (Generic, _Protocol) ): raise TypeError("Plain %s is not valid as type argument" % arg) return arg @@ -849,29 +849,6 @@ class _Optional(_FinalTypingBase, _root=True): Optional = _Optional(_root=True) -def _gorg(a): - """Return the farthest origin of a generic class (internal helper).""" - assert isinstance(a, GenericMeta) - while a.__origin__ is not None: - a = a.__origin__ - return a - - -def _geqv(a, b): - """Return whether two generic classes are equivalent (internal helper). - - The intention is to consider generic class X and any of its - parameterized forms (X[T], X[int], etc.) as equivalent. - - However, X is not equivalent to a subclass of X. - - The relation is reflexive, symmetric and transitive. - """ - assert isinstance(a, GenericMeta) and isinstance(b, GenericMeta) - # Reduce each to its origin. - return _gorg(a) is _gorg(b) - - def _next_in_mro(cls): """Helper for Generic.__new__. @@ -881,7 +858,7 @@ def _next_in_mro(cls): next_in_mro = object # Look for the last occurrence of Generic or Generic[...]. for i, c in enumerate(cls.__mro__[:-1]): - if isinstance(c, GenericMeta) and _gorg(c) is Generic: + if isinstance(c, GenericMeta) and c._gorg is Generic: next_in_mro = cls.__mro__[i + 1] return next_in_mro @@ -991,14 +968,15 @@ class GenericMeta(TypingMeta, abc.ABCMeta): initial_bases = bases if extra is not None and type(extra) is abc.ABCMeta and extra not in bases: bases = (extra,) + bases - bases = tuple(_gorg(b) if isinstance(b, GenericMeta) else b for b in bases) + bases = tuple(b._gorg if isinstance(b, GenericMeta) else b for b in bases) # remove bare Generic from bases if there are other generic bases if any(isinstance(b, GenericMeta) and b is not Generic for b in bases): bases = tuple(b for b in bases if b is not Generic) namespace.update({'__origin__': origin, '__extra__': extra}) self = super().__new__(cls, name, bases, namespace, _root=True) - + super(GenericMeta, self).__setattr__('_gorg', + self if not origin else origin._gorg) self.__parameters__ = tvars # Be prepared that GenericMeta will be subclassed by TupleMeta # and CallableMeta, those two allow ..., (), or [] in __args___. @@ -1041,7 +1019,7 @@ class GenericMeta(TypingMeta, abc.ABCMeta): def _abc_negative_cache(self): if isinstance(self.__extra__, abc.ABCMeta): return self.__extra__._abc_negative_cache - return _gorg(self)._abc_generic_negative_cache + return self._gorg._abc_generic_negative_cache @_abc_negative_cache.setter def _abc_negative_cache(self, value): @@ -1055,7 +1033,7 @@ class GenericMeta(TypingMeta, abc.ABCMeta): def _abc_negative_cache_version(self): if isinstance(self.__extra__, abc.ABCMeta): return self.__extra__._abc_negative_cache_version - return _gorg(self)._abc_generic_negative_cache_version + return self._gorg._abc_generic_negative_cache_version @_abc_negative_cache_version.setter def _abc_negative_cache_version(self, value): @@ -1105,7 +1083,7 @@ class GenericMeta(TypingMeta, abc.ABCMeta): if self.__origin__ is None: return self tree_args = _subs_tree(self, tvars, args) - return (_gorg(self),) + tuple(tree_args) + return (self._gorg,) + tuple(tree_args) def __eq__(self, other): if not isinstance(other, GenericMeta): @@ -1121,7 +1099,7 @@ class GenericMeta(TypingMeta, abc.ABCMeta): def __getitem__(self, params): if not isinstance(params, tuple): params = (params,) - if not params and not _gorg(self) is Tuple: + if not params and self._gorg is not Tuple: raise TypeError( "Parameter list to %s[...] cannot be empty" % _qualname(self)) msg = "Parameters to generic types must be types." @@ -1189,14 +1167,14 @@ class GenericMeta(TypingMeta, abc.ABCMeta): self.__extra__, self.__orig_bases__) def __setattr__(self, attr, value): - # We consider all the subscripted genrics as proxies for original class + # We consider all the subscripted generics as proxies for original class if ( attr.startswith('__') and attr.endswith('__') or attr.startswith('_abc_') ): super(GenericMeta, self).__setattr__(attr, value) else: - super(GenericMeta, _gorg(self)).__setattr__(attr, value) + super(GenericMeta, self._gorg).__setattr__(attr, value) # Prevent checks for Generic to crash when defining Generic. @@ -1209,7 +1187,7 @@ def _generic_new(base_cls, cls, *args, **kwds): if cls.__origin__ is None: return base_cls.__new__(cls) else: - origin = _gorg(cls) + origin = cls._gorg obj = base_cls.__new__(origin) try: obj.__orig_class__ = cls @@ -1243,7 +1221,7 @@ class Generic(metaclass=GenericMeta): __slots__ = () def __new__(cls, *args, **kwds): - if _geqv(cls, Generic): + if cls._gorg is Generic: raise TypeError("Type Generic cannot be instantiated; " "it can be used only as a base class") return _generic_new(cls.__next_in_mro__, cls, *args, **kwds) @@ -1265,7 +1243,7 @@ class TupleMeta(GenericMeta): @_tp_cache def __getitem__(self, parameters): - if self.__origin__ is not None or not _geqv(self, Tuple): + if self.__origin__ is not None or self._gorg is not Tuple: # Normal generic rules apply if this is not the first subscription # or a subscription of a subclass. return super().__getitem__(parameters) @@ -1307,7 +1285,7 @@ class Tuple(tuple, extra=tuple, metaclass=TupleMeta): __slots__ = () def __new__(cls, *args, **kwds): - if _geqv(cls, Tuple): + if cls._gorg is Tuple: raise TypeError("Type Tuple cannot be instantiated; " "use tuple() instead") return _generic_new(tuple, cls, *args, **kwds) @@ -1322,7 +1300,7 @@ class CallableMeta(GenericMeta): return self._tree_repr(self._subs_tree()) def _tree_repr(self, tree): - if _gorg(self) is not Callable: + if self._gorg is not Callable: return super()._tree_repr(tree) # For actual Callable (not its subclass) we override # super()._tree_repr() for nice formatting. @@ -1342,7 +1320,7 @@ class CallableMeta(GenericMeta): with hashable arguments to improve speed. """ - if self.__origin__ is not None or not _geqv(self, Callable): + if self.__origin__ is not None or self._gorg is not Callable: return super().__getitem__(parameters) if not isinstance(parameters, tuple) or len(parameters) != 2: raise TypeError("Callable must be used as " @@ -1384,7 +1362,7 @@ class Callable(extra=collections_abc.Callable, metaclass=CallableMeta): __slots__ = () def __new__(cls, *args, **kwds): - if _geqv(cls, Callable): + if cls._gorg is Callable: raise TypeError("Type Callable cannot be instantiated; " "use a non-abstract subclass instead") return _generic_new(cls.__next_in_mro__, cls, *args, **kwds) @@ -1568,7 +1546,7 @@ def no_type_check(arg): if isinstance(arg, type): arg_attrs = arg.__dict__.copy() for attr, val in arg.__dict__.items(): - if val in arg.__bases__: + if val in arg.__bases__ + (arg,): arg_attrs.pop(attr) for obj in arg_attrs.values(): if isinstance(obj, types.FunctionType): @@ -1687,6 +1665,7 @@ class _ProtocolMeta(GenericMeta): attr != '__annotations__' and attr != '__weakref__' and attr != '_is_protocol' and + attr != '_gorg' and attr != '__dict__' and attr != '__args__' and attr != '__slots__' and @@ -1892,7 +1871,7 @@ class List(list, MutableSequence[T], extra=list): __slots__ = () def __new__(cls, *args, **kwds): - if _geqv(cls, List): + if cls._gorg is List: raise TypeError("Type List cannot be instantiated; " "use list() instead") return _generic_new(list, cls, *args, **kwds) @@ -1903,7 +1882,7 @@ class Deque(collections.deque, MutableSequence[T], extra=collections.deque): __slots__ = () def __new__(cls, *args, **kwds): - if _geqv(cls, Deque): + if cls._gorg is Deque: return collections.deque(*args, **kwds) return _generic_new(collections.deque, cls, *args, **kwds) @@ -1913,7 +1892,7 @@ class Set(set, MutableSet[T], extra=set): __slots__ = () def __new__(cls, *args, **kwds): - if _geqv(cls, Set): + if cls._gorg is Set: raise TypeError("Type Set cannot be instantiated; " "use set() instead") return _generic_new(set, cls, *args, **kwds) @@ -1923,7 +1902,7 @@ class FrozenSet(frozenset, AbstractSet[T_co], extra=frozenset): __slots__ = () def __new__(cls, *args, **kwds): - if _geqv(cls, FrozenSet): + if cls._gorg is FrozenSet: raise TypeError("Type FrozenSet cannot be instantiated; " "use frozenset() instead") return _generic_new(frozenset, cls, *args, **kwds) @@ -2014,7 +1993,7 @@ class Dict(dict, MutableMapping[KT, VT], extra=dict): __slots__ = () def __new__(cls, *args, **kwds): - if _geqv(cls, Dict): + if cls._gorg is Dict: raise TypeError("Type Dict cannot be instantiated; " "use dict() instead") return _generic_new(dict, cls, *args, **kwds) @@ -2026,7 +2005,7 @@ class DefaultDict(collections.defaultdict, MutableMapping[KT, VT], __slots__ = () def __new__(cls, *args, **kwds): - if _geqv(cls, DefaultDict): + if cls._gorg is DefaultDict: return collections.defaultdict(*args, **kwds) return _generic_new(collections.defaultdict, cls, *args, **kwds) @@ -2036,7 +2015,7 @@ class Counter(collections.Counter, Dict[T, int], extra=collections.Counter): __slots__ = () def __new__(cls, *args, **kwds): - if _geqv(cls, Counter): + if cls._gorg is Counter: return collections.Counter(*args, **kwds) return _generic_new(collections.Counter, cls, *args, **kwds) @@ -2051,7 +2030,7 @@ if hasattr(collections, 'ChainMap'): __slots__ = () def __new__(cls, *args, **kwds): - if _geqv(cls, ChainMap): + if cls._gorg is ChainMap: return collections.ChainMap(*args, **kwds) return _generic_new(collections.ChainMap, cls, *args, **kwds) @@ -2070,7 +2049,7 @@ class Generator(Iterator[T_co], Generic[T_co, T_contra, V_co], __slots__ = () def __new__(cls, *args, **kwds): - if _geqv(cls, Generator): + if cls._gorg is Generator: raise TypeError("Type Generator cannot be instantiated; " "create a subclass instead") return _generic_new(_G_base, cls, *args, **kwds) |