summaryrefslogtreecommitdiffstats
path: root/Lib/test
diff options
context:
space:
mode:
authorChristian Heimes <christian@cheimes.de>2013-07-30 13:33:30 (GMT)
committerChristian Heimes <christian@cheimes.de>2013-07-30 13:33:30 (GMT)
commit65aa573082b64023c8bd0f33c655c7f02830cf97 (patch)
treed9f710c39e835d6e27a2eccf2b2d1bc86fc74523 /Lib/test
parent4fec4314cf24d78a498c85e4dc039df5ef7fcc54 (diff)
downloadcpython-65aa573082b64023c8bd0f33c655c7f02830cf97.zip
cpython-65aa573082b64023c8bd0f33c655c7f02830cf97.tar.gz
cpython-65aa573082b64023c8bd0f33c655c7f02830cf97.tar.bz2
Add more tests for hashlib and hash object attributes
Diffstat (limited to 'Lib/test')
-rw-r--r--Lib/test/test_hashlib.py75
1 files changed, 62 insertions, 13 deletions
diff --git a/Lib/test/test_hashlib.py b/Lib/test/test_hashlib.py
index 32f85e9..f3385f6 100644
--- a/Lib/test/test_hashlib.py
+++ b/Lib/test/test_hashlib.py
@@ -96,10 +96,14 @@ class HashLibTestCase(unittest.TestCase):
super(HashLibTestCase, self).__init__(*args, **kwargs)
+ @property
+ def hash_constructors(self):
+ constructors = self.constructors_to_test.values()
+ return itertools.chain.from_iterable(constructors)
+
def test_hash_array(self):
a = array.array("b", range(10))
- constructors = self.constructors_to_test.values()
- for cons in itertools.chain.from_iterable(constructors):
+ for cons in self.hash_constructors:
c = cons(a)
c.hexdigest()
@@ -136,39 +140,57 @@ class HashLibTestCase(unittest.TestCase):
self.assertRaises(TypeError, get_builtin_constructor, 3)
def test_hexdigest(self):
- for name in self.supported_hash_names:
- h = hashlib.new(name)
+ for cons in self.hash_constructors:
+ h = cons()
assert isinstance(h.digest(), bytes), name
self.assertEqual(hexstr(h.digest()), h.hexdigest())
-
def test_large_update(self):
aas = b'a' * 128
bees = b'b' * 127
cees = b'c' * 126
+ dees = b'd' * 2048 # HASHLIB_GIL_MINSIZE
- for name in self.supported_hash_names:
- m1 = hashlib.new(name)
+ for cons in self.hash_constructors:
+ m1 = cons()
m1.update(aas)
m1.update(bees)
m1.update(cees)
+ m1.update(dees)
- m2 = hashlib.new(name)
- m2.update(aas + bees + cees)
+ m2 = cons()
+ m2.update(aas + bees + cees + dees)
self.assertEqual(m1.digest(), m2.digest())
- def check(self, name, data, digest):
+ m3 = cons(aas + bees + cees + dees)
+ self.assertEqual(m1.digest(), m3.digest())
+
+ # verify copy() doesn't touch original
+ m4 = cons(aas + bees + cees)
+ m4_digest = m4.digest()
+ m4_copy = m4.copy()
+ m4_copy.update(dees)
+ self.assertEqual(m1.digest(), m4_copy.digest())
+ self.assertEqual(m4.digest(), m4_digest)
+
+ def check(self, name, data, hexdigest):
+ hexdigest = hexdigest.lower()
constructors = self.constructors_to_test[name]
# 2 is for hashlib.name(...) and hashlib.new(name, ...)
self.assertGreaterEqual(len(constructors), 2)
for hash_object_constructor in constructors:
- computed = hash_object_constructor(data).hexdigest()
+ m = hash_object_constructor(data)
+ computed = m.hexdigest()
self.assertEqual(
- computed, digest,
+ computed, hexdigest,
"Hash algorithm %s constructed using %s returned hexdigest"
" %r for %d byte input data that should have hashed to %r."
% (name, hash_object_constructor,
- computed, len(data), digest))
+ computed, len(data), hexdigest))
+ computed = m.digest()
+ digest = bytes.fromhex(hexdigest)
+ self.assertEqual(computed, digest)
+ self.assertEqual(len(digest), m.digest_size)
def check_no_unicode(self, algorithm_name):
# Unicode objects are not allowed as input.
@@ -184,6 +206,24 @@ class HashLibTestCase(unittest.TestCase):
self.check_no_unicode('sha384')
self.check_no_unicode('sha512')
+ def check_blocksize_name(self, name, block_size=0, digest_size=0):
+ constructors = self.constructors_to_test[name]
+ for hash_object_constructor in constructors:
+ m = hash_object_constructor()
+ self.assertEqual(m.block_size, block_size)
+ self.assertEqual(m.digest_size, digest_size)
+ self.assertEqual(len(m.digest()), digest_size)
+ self.assertEqual(m.name.lower(), name.lower())
+ self.assertIn(name.split("_")[0], repr(m).lower())
+
+ def test_blocksize_name(self):
+ self.check_blocksize_name('md5', 64, 16)
+ self.check_blocksize_name('sha1', 64, 20)
+ self.check_blocksize_name('sha224', 64, 28)
+ self.check_blocksize_name('sha256', 64, 32)
+ self.check_blocksize_name('sha384', 128, 48)
+ self.check_blocksize_name('sha512', 128, 64)
+
def test_case_md5_0(self):
self.check('md5', b'', 'd41d8cd98f00b204e9800998ecf8427e')
@@ -323,6 +363,15 @@ class HashLibTestCase(unittest.TestCase):
# for multithreaded operation (which is hardwired to 2048).
gil_minsize = 2048
+ for cons in self.hash_constructors:
+ m = cons()
+ m.update(b'1')
+ m.update(b'#' * gil_minsize)
+ m.update(b'1')
+
+ m = cons(b'x' * gil_minsize)
+ m.update(b'1')
+
m = hashlib.md5()
m.update(b'1')
m.update(b'#' * gil_minsize)