summaryrefslogtreecommitdiffstats
path: root/Lib/sets.py
diff options
context:
space:
mode:
Diffstat (limited to 'Lib/sets.py')
-rw-r--r--Lib/sets.py44
1 files changed, 11 insertions, 33 deletions
diff --git a/Lib/sets.py b/Lib/sets.py
index b1743da..fe31a0b 100644
--- a/Lib/sets.py
+++ b/Lib/sets.py
@@ -54,29 +54,7 @@ what's tested is actually `z in y'.
# - Raymond Hettinger added a number of speedups and other
# improvements.
-from __future__ import generators
-try:
- from itertools import ifilter, ifilterfalse
-except ImportError:
- # Code to make the module run under Py2.2
- def ifilter(predicate, iterable):
- if predicate is None:
- def predicate(x):
- return x
- for x in iterable:
- if predicate(x):
- yield x
- def ifilterfalse(predicate, iterable):
- if predicate is None:
- def predicate(x):
- return x
- for x in iterable:
- if not predicate(x):
- yield x
- try:
- True, False
- except NameError:
- True, False = (0==0, 0!=0)
+from itertools import ifilter, ifilterfalse
__all__ = ['BaseSet', 'Set', 'ImmutableSet']
@@ -235,7 +213,7 @@ class BaseSet(object):
little, big = self, other
else:
little, big = other, self
- common = ifilter(big._data.has_key, little)
+ common = ifilter(big._data.__contains__, little)
return self.__class__(common)
def __xor__(self, other):
@@ -260,9 +238,9 @@ class BaseSet(object):
otherdata = other._data
except AttributeError:
otherdata = Set(other)._data
- for elt in ifilterfalse(otherdata.has_key, selfdata):
+ for elt in ifilterfalse(otherdata.__contains__, selfdata):
data[elt] = value
- for elt in ifilterfalse(selfdata.has_key, otherdata):
+ for elt in ifilterfalse(selfdata.__contains__, otherdata):
data[elt] = value
return result
@@ -287,7 +265,7 @@ class BaseSet(object):
except AttributeError:
otherdata = Set(other)._data
value = True
- for elt in ifilterfalse(otherdata.has_key, self):
+ for elt in ifilterfalse(otherdata.__contains__, self):
data[elt] = value
return result
@@ -313,7 +291,7 @@ class BaseSet(object):
self._binary_sanity_check(other)
if len(self) > len(other): # Fast check for obvious cases
return False
- for elt in ifilterfalse(other._data.has_key, self):
+ for elt in ifilterfalse(other._data.__contains__, self):
return False
return True
@@ -322,7 +300,7 @@ class BaseSet(object):
self._binary_sanity_check(other)
if len(self) < len(other): # Fast check for obvious cases
return False
- for elt in ifilterfalse(self._data.has_key, other):
+ for elt in ifilterfalse(self._data.__contains__, other):
return False
return True
@@ -338,6 +316,9 @@ class BaseSet(object):
self._binary_sanity_check(other)
return len(self) > len(other) and self.issuperset(other)
+ # We inherit object.__hash__, so we must deny this explicitly
+ __hash__ = None
+
# Assorted helpers
def _binary_sanity_check(self, other):
@@ -439,9 +420,6 @@ class Set(BaseSet):
def __setstate__(self, data):
self._data, = data
- # We inherit object.__hash__, so we must deny this explicitly
- __hash__ = None
-
# In-place union, intersection, differences.
# Subtle: The xyz_update() functions deliberately return None,
# as do all mutating operations on built-in container types.
@@ -503,7 +481,7 @@ class Set(BaseSet):
other = Set(other)
if self is other:
self.clear()
- for elt in ifilter(data.has_key, other):
+ for elt in ifilter(data.__contains__, other):
del data[elt]
# Python dict-like mass mutations: update, clear