summaryrefslogtreecommitdiffstats
path: root/Lib/test/test_functools.py
diff options
context:
space:
mode:
authorSerhiy Storchaka <storchaka@gmail.com>2015-06-08 08:14:31 (GMT)
committerSerhiy Storchaka <storchaka@gmail.com>2015-06-08 08:14:31 (GMT)
commit77cb197aaa289d1815b9b476f48ebe86d9b75a72 (patch)
tree49912c3b3616328b2416fd90c8045724ab2a4030 /Lib/test/test_functools.py
parent93cfeb93ac07033b84cee71ce3fac9d9f5d5e60b (diff)
downloadcpython-77cb197aaa289d1815b9b476f48ebe86d9b75a72.zip
cpython-77cb197aaa289d1815b9b476f48ebe86d9b75a72.tar.gz
cpython-77cb197aaa289d1815b9b476f48ebe86d9b75a72.tar.bz2
Issue #14373: Fixed threaded test for lru_cache(). Added new threaded test.
Diffstat (limited to 'Lib/test/test_functools.py')
-rw-r--r--Lib/test/test_functools.py61
1 files changed, 47 insertions, 14 deletions
diff --git a/Lib/test/test_functools.py b/Lib/test/test_functools.py
index e8cf848..632b2d3 100644
--- a/Lib/test/test_functools.py
+++ b/Lib/test/test_functools.py
@@ -1102,43 +1102,76 @@ class TestLRU:
@unittest.skipUnless(threading, 'This test requires threading.')
def test_lru_cache_threaded(self):
+ n, m = 5, 11
def orig(x, y):
return 3 * x + y
- f = self.module.lru_cache(maxsize=20)(orig)
+ f = self.module.lru_cache(maxsize=n*m)(orig)
hits, misses, maxsize, currsize = f.cache_info()
self.assertEqual(currsize, 0)
+ start = threading.Event()
def full(f, *args):
- for _ in range(10):
+ start.wait(10)
+ for _ in range(m):
f(*args)
- def clear(f):
- for _ in range(10):
+ def clear():
+ start.wait(10)
+ for _ in range(2*m):
f.cache_clear()
orig_si = sys.getswitchinterval()
sys.setswitchinterval(1e-6)
try:
- # create 5 threads in order to fill cache
+ # create n threads in order to fill cache
threads = [threading.Thread(target=full, args=[f, k, k])
- for k in range(5)]
+ for k in range(n)]
with support.start_threads(threads):
- pass
+ start.set()
hits, misses, maxsize, currsize = f.cache_info()
- self.assertEqual(hits, 45)
- self.assertEqual(misses, 5)
- self.assertEqual(currsize, 5)
+ self.assertLessEqual(misses, n)
+ self.assertEqual(hits, m*n - misses)
+ self.assertEqual(currsize, n)
- # create 5 threads in order to fill cache and 1 to clear it
- threads = [threading.Thread(target=clear, args=[f])]
+ # create n threads in order to fill cache and 1 to clear it
+ threads = [threading.Thread(target=clear)]
threads += [threading.Thread(target=full, args=[f, k, k])
- for k in range(5)]
+ for k in range(n)]
+ start.clear()
with support.start_threads(threads):
- pass
+ start.set()
finally:
sys.setswitchinterval(orig_si)
+ @unittest.skipUnless(threading, 'This test requires threading.')
+ def test_lru_cache_threaded2(self):
+ # Simultaneous call with the same arguments
+ n, m = 5, 7
+ start = threading.Barrier(n+1)
+ pause = threading.Barrier(n+1)
+ stop = threading.Barrier(n+1)
+ @self.module.lru_cache(maxsize=m*n)
+ def f(x):
+ pause.wait(10)
+ return 3 * x
+ self.assertEqual(f.cache_info(), (0, 0, m*n, 0))
+ def test():
+ for i in range(m):
+ start.wait(10)
+ self.assertEqual(f(i), 3 * i)
+ stop.wait(10)
+ threads = [threading.Thread(target=test) for k in range(n)]
+ with support.start_threads(threads):
+ for i in range(m):
+ start.wait(10)
+ stop.reset()
+ pause.wait(10)
+ start.reset()
+ stop.wait(10)
+ pause.reset()
+ self.assertEqual(f.cache_info(), (0, (i+1)*n, m*n, i+1))
+
def test_need_for_rlock(self):
# This will deadlock on an LRU cache that uses a regular lock