summaryrefslogtreecommitdiffstats
path: root/Lib
diff options
context:
space:
mode:
authorGregory P. Smith <greg@mad-scientist.com>2009-05-04 00:45:33 (GMT)
committerGregory P. Smith <greg@mad-scientist.com>2009-05-04 00:45:33 (GMT)
commit3f61d61b353c08525348a018294fa05bba1bdfe2 (patch)
tree25258dd4c53962cd25ae584b079839369465df74 /Lib
parentc1651a0b968390ef6b722d3c2e1ca72c5a7c9cec (diff)
downloadcpython-3f61d61b353c08525348a018294fa05bba1bdfe2.zip
cpython-3f61d61b353c08525348a018294fa05bba1bdfe2.tar.gz
cpython-3f61d61b353c08525348a018294fa05bba1bdfe2.tar.bz2
Merge refactoring I did when committing r72267 to trunk into the
already committed issue4751 support in py3k r68411.
Diffstat (limited to 'Lib')
-rw-r--r--Lib/test/test_hashlib.py46
1 files changed, 45 insertions, 1 deletions
diff --git a/Lib/test/test_hashlib.py b/Lib/test/test_hashlib.py
index 9b51459..594f5dd 100644
--- a/Lib/test/test_hashlib.py
+++ b/Lib/test/test_hashlib.py
@@ -2,11 +2,16 @@
#
# $Id$
#
-# Copyright (C) 2005 Gregory P. Smith (greg@krypto.org)
+# Copyright (C) 2005-2009 Gregory P. Smith (greg@krypto.org)
# Licensed to PSF under a Contributor Agreement.
#
import hashlib
+from io import StringIO
+try:
+ import threading
+except ImportError:
+ threading = None
import unittest
from test import support
from test.support import _4G, precisionbigmemtest
@@ -224,6 +229,45 @@ class HashLibTestCase(unittest.TestCase):
m = hashlib.md5(b'x' * gil_minsize)
self.assertEquals(m.hexdigest(), 'cfb767f225d58469c5de3632a8803958')
+ def test_threaded_hashing(self):
+ if not threading:
+ raise unittest.SkipTest('No threading module.')
+
+ # Updating the same hash object from several threads at once
+ # using data chunk sizes containing the same byte sequences.
+ #
+ # If the internal locks are working to prevent multiple
+ # updates on the same object from running at once, the resulting
+ # hash will be the same as doing it single threaded upfront.
+ hasher = hashlib.sha1()
+ num_threads = 5
+ smallest_data = b'swineflu'
+ data = smallest_data*200000
+ expected_hash = hashlib.sha1(data*num_threads).hexdigest()
+
+ def hash_in_chunks(chunk_size, event):
+ index = 0
+ while index < len(data):
+ hasher.update(data[index:index+chunk_size])
+ index += chunk_size
+ event.set()
+
+ events = []
+ for threadnum in range(num_threads):
+ chunk_size = len(data) // (10**threadnum)
+ assert chunk_size > 0
+ assert chunk_size % len(smallest_data) == 0
+ event = threading.Event()
+ events.append(event)
+ threading.Thread(target=hash_in_chunks,
+ args=(chunk_size, event)).start()
+
+ for event in events:
+ event.wait()
+
+ self.assertEqual(expected_hash, hasher.hexdigest())
+
+
def test_main():
support.run_unittest(HashLibTestCase)