diff options
author | Gregory P. Smith <greg@mad-scientist.com> | 2010-01-03 00:29:15 (GMT) |
---|---|---|
committer | Gregory P. Smith <greg@mad-scientist.com> | 2010-01-03 00:29:15 (GMT) |
commit | cd54e546bb70e5e9abeb2b6d2ca9031507015f60 (patch) | |
tree | d2a19fc4410792bf8f808341fba9af4c7fd54a9b | |
parent | 072090214484f13661e1a28ab7418a0a424449eb (diff) | |
download | cpython-cd54e546bb70e5e9abeb2b6d2ca9031507015f60.zip cpython-cd54e546bb70e5e9abeb2b6d2ca9031507015f60.tar.gz cpython-cd54e546bb70e5e9abeb2b6d2ca9031507015f60.tar.bz2 |
Merged revisions 77257 via svnmerge from
svn+ssh://pythondev@svn.python.org/python/trunk
........
r77257 | gregory.p.smith | 2010-01-02 16:19:04 -0800 (Sat, 02 Jan 2010) | 8 lines
Import all implementations of the hash algorithms (OpenSSL & builtin) and run
the test suite across all that are available. Warns about extension modules
that could not be imported when python was compiled with Py_DEBUG.
That warning could be made fatal but I didn't want to do that initially as
I suspect non setup.py based build processes (windows, any others?) won't
compile them all conditionally based on the Py_DEBUG setting today.
........
-rw-r--r-- | Lib/test/test_hashlib.py | 85 |
1 files changed, 77 insertions, 8 deletions
diff --git a/Lib/test/test_hashlib.py b/Lib/test/test_hashlib.py index d0ad07d..9f702e9 100644 --- a/Lib/test/test_hashlib.py +++ b/Lib/test/test_hashlib.py @@ -8,14 +8,20 @@ import hashlib from io import StringIO +import sys try: import threading except ImportError: threading = None import unittest +import warnings from test import support from test.support import _4G, precisionbigmemtest +# Were we compiled --with-pydebug or with #define Py_DEBUG? +COMPILED_WITH_PYDEBUG = hasattr(sys, 'gettotalrefcount') + + def hexstr(s): assert isinstance(s, bytes), repr(s) h = "0123456789abcdef" @@ -30,6 +36,63 @@ class HashLibTestCase(unittest.TestCase): 'sha224', 'SHA224', 'sha256', 'SHA256', 'sha384', 'SHA384', 'sha512', 'SHA512' ) + _warn_on_extension_import = COMPILED_WITH_PYDEBUG + + def _conditional_import_module(self, module_name): + """Import a module and return a reference to it or None on failure.""" + try: + exec('import '+module_name) + except ImportError as error: + if self._warn_on_extension_import: + warnings.warn('Did a C extension fail to compile? %s' % error) + return locals().get(module_name) + + def __init__(self, *args, **kwargs): + algorithms = set() + for algorithm in self.supported_hash_names: + algorithms.add(algorithm.lower()) + self.constructors_to_test = {} + for algorithm in algorithms: + self.constructors_to_test[algorithm] = set() + + # For each algorithm, test the direct constructor and the use + # of hashlib.new given the algorithm name. + for algorithm, constructors in self.constructors_to_test.items(): + constructors.add(getattr(hashlib, algorithm)) + def _test_algorithm_via_hashlib_new(data=None, _alg=algorithm): + if data is None: + return hashlib.new(_alg) + return hashlib.new(_alg, data) + constructors.add(_test_algorithm_via_hashlib_new) + + _hashlib = self._conditional_import_module('_hashlib') + if _hashlib: + # These two algorithms should always be present when this module + # is compiled. If not, something was compiled wrong. + assert hasattr(_hashlib, 'openssl_md5') + assert hasattr(_hashlib, 'openssl_sha1') + for algorithm, constructors in self.constructors_to_test.items(): + constructor = getattr(_hashlib, 'openssl_'+algorithm, None) + if constructor: + constructors.add(constructor) + + _md5 = self._conditional_import_module('_md5') + if _md5: + self.constructors_to_test['md5'].add(_md5.new) + _sha = self._conditional_import_module('_sha') + if _sha: + self.constructors_to_test['sha1'].add(_sha.new) + _sha256 = self._conditional_import_module('_sha256') + if _sha256: + self.constructors_to_test['sha224'].add(_sha256.sha224) + self.constructors_to_test['sha256'].add(_sha256.sha256) + _sha512 = self._conditional_import_module('_sha512') + if _sha512: + self.constructors_to_test['sha384'].add(_sha512.sha384) + self.constructors_to_test['sha512'].add(_sha512.sha512) + + super(HashLibTestCase, self).__init__(*args, **kwargs) + def test_unknown_hash(self): try: hashlib.new('spam spam spam spam spam') @@ -61,17 +124,23 @@ class HashLibTestCase(unittest.TestCase): self.assertEqual(m1.digest(), m2.digest()) def check(self, name, data, digest): - # test the direct constructors - computed = getattr(hashlib, name)(data).hexdigest() - self.assertEqual(computed, digest) - # test the general new() interface - computed = hashlib.new(name, data).hexdigest() - self.assertEqual(computed, digest) + constructors = self.constructors_to_test[name] + # 2 is for hashlib.name(...) and hashlib.new(name, ...) + self.assertGreaterEqual(len(constructors), 2) + for hash_object_constructor in constructors: + computed = hash_object_constructor(data).hexdigest() + self.assertEqual( + computed, digest, + "Hash algorithm %s constructed using %s returned hexdigest" + " %r for %d byte input data that should have hashed to %r." + % (name, hash_object_constructor, + computed, len(data), digest)) def check_no_unicode(self, algorithm_name): # Unicode objects are not allowed as input. - self.assertRaises(TypeError, getattr(hashlib, algorithm_name), 'spam') - self.assertRaises(TypeError, hashlib.new, algorithm_name, 'spam') + constructors = self.constructors_to_test[algorithm_name] + for hash_object_constructor in constructors: + self.assertRaises(TypeError, hash_object_constructor, 'spam') def test_no_unicode(self): self.check_no_unicode('md5') |