summaryrefslogtreecommitdiffstats
path: root/Lib/test/_test_multiprocessing.py
diff options
context:
space:
mode:
authorVictor Stinner <victor.stinner@gmail.com>2017-07-24 23:55:54 (GMT)
committerGitHub <noreply@github.com>2017-07-24 23:55:54 (GMT)
commitffb49408f0780ae80a553208aa133bc5bb3ba129 (patch)
treec1d29d1be3aa593af611fdc7b6982e5afe587bb2 /Lib/test/_test_multiprocessing.py
parentd7e64d9934d86aa6173229de5af5fe908662a33a (diff)
downloadcpython-ffb49408f0780ae80a553208aa133bc5bb3ba129.zip
cpython-ffb49408f0780ae80a553208aa133bc5bb3ba129.tar.gz
cpython-ffb49408f0780ae80a553208aa133bc5bb3ba129.tar.bz2
test_multiprocessing detects dangling per test case (#2841)
bpo-26762: test_multiprocessing now detects dangling processes and threads per test case classes: * setUpClass()/tearDownClass() of mixin classes now check if multiprocessing.process._dangling or threading._dangling was modified to detect "dangling" processses and threads. * ManagerMixin.tearDownClass() now also emits a warning if it still has more than one active child process after 5 seconds. * tearDownModule() now checks for dangling processes and threads before sleep 500 ms. And it now only sleeps if there is a least one dangling process or thread.
Diffstat (limited to 'Lib/test/_test_multiprocessing.py')
-rw-r--r--Lib/test/_test_multiprocessing.py81
1 files changed, 67 insertions, 14 deletions
diff --git a/Lib/test/_test_multiprocessing.py b/Lib/test/_test_multiprocessing.py
index 126725f..a87b028 100644
--- a/Lib/test/_test_multiprocessing.py
+++ b/Lib/test/_test_multiprocessing.py
@@ -4303,7 +4303,32 @@ class TestSimpleQueue(unittest.TestCase):
# Mixins
#
-class ProcessesMixin(object):
+class BaseMixin(object):
+ @classmethod
+ def setUpClass(cls):
+ cls.dangling = (multiprocessing.process._dangling.copy(),
+ threading._dangling.copy())
+
+ @classmethod
+ def tearDownClass(cls):
+ # bpo-26762: Some multiprocessing objects like Pool create reference
+ # cycles. Trigger a garbage collection to break these cycles.
+ test.support.gc_collect()
+
+ processes = set(multiprocessing.process._dangling) - set(cls.dangling[0])
+ if processes:
+ print('Warning -- Dangling processes: %s' % processes,
+ file=sys.stderr)
+ processes = None
+
+ threads = set(threading._dangling) - set(cls.dangling[1])
+ if threads:
+ print('Warning -- Dangling threads: %s' % threads,
+ file=sys.stderr)
+ threads = None
+
+
+class ProcessesMixin(BaseMixin):
TYPE = 'processes'
Process = multiprocessing.Process
connection = multiprocessing.connection
@@ -4326,7 +4351,7 @@ class ProcessesMixin(object):
RawArray = staticmethod(multiprocessing.RawArray)
-class ManagerMixin(object):
+class ManagerMixin(BaseMixin):
TYPE = 'manager'
Process = multiprocessing.Process
Queue = property(operator.attrgetter('manager.Queue'))
@@ -4350,6 +4375,7 @@ class ManagerMixin(object):
@classmethod
def setUpClass(cls):
+ super().setUpClass()
cls.manager = multiprocessing.Manager()
@classmethod
@@ -4357,23 +4383,35 @@ class ManagerMixin(object):
# only the manager process should be returned by active_children()
# but this can take a bit on slow machines, so wait a few seconds
# if there are other children too (see #17395)
+ start_time = time.monotonic()
t = 0.01
- while len(multiprocessing.active_children()) > 1 and t < 5:
+ while len(multiprocessing.active_children()) > 1:
time.sleep(t)
t *= 2
+ dt = time.monotonic() - start_time
+ if dt >= 5.0:
+ print("Warning -- multiprocessing.Manager still has %s active "
+ "children after %s seconds"
+ % (multiprocessing.active_children(), dt),
+ file=sys.stderr)
+ break
+
gc.collect() # do garbage collection
if cls.manager._number_of_objects() != 0:
# This is not really an error since some tests do not
# ensure that all processes which hold a reference to a
# managed object have been joined.
- print('Shared objects which still exist at manager shutdown:')
+ print('Warning -- Shared objects which still exist at manager '
+ 'shutdown:')
print(cls.manager._debug_info())
cls.manager.shutdown()
cls.manager.join()
cls.manager = None
+ super().tearDownClass()
+
-class ThreadsMixin(object):
+class ThreadsMixin(BaseMixin):
TYPE = 'threads'
Process = multiprocessing.dummy.Process
connection = multiprocessing.dummy.connection
@@ -4450,18 +4488,33 @@ def install_tests_in_module_dict(remote_globs, start_method):
multiprocessing.get_logger().setLevel(LOG_LEVEL)
def tearDownModule():
+ need_sleep = False
+
+ # bpo-26762: Some multiprocessing objects like Pool create reference
+ # cycles. Trigger a garbage collection to break these cycles.
+ test.support.gc_collect()
+
multiprocessing.set_start_method(old_start_method[0], force=True)
# pause a bit so we don't get warning about dangling threads/processes
- time.sleep(0.5)
+ processes = set(multiprocessing.process._dangling) - set(dangling[0])
+ if processes:
+ need_sleep = True
+ print('Warning -- Dangling processes: %s' % processes,
+ file=sys.stderr)
+ processes = None
+
+ threads = set(threading._dangling) - set(dangling[1])
+ if threads:
+ need_sleep = True
+ print('Warning -- Dangling threads: %s' % threads,
+ file=sys.stderr)
+ threads = None
+
+ # Sleep 500 ms to give time to child processes to complete.
+ if need_sleep:
+ time.sleep(0.5)
multiprocessing.process._cleanup()
- gc.collect()
- tmp = set(multiprocessing.process._dangling) - set(dangling[0])
- if tmp:
- print('Dangling processes:', tmp, file=sys.stderr)
- del tmp
- tmp = set(threading._dangling) - set(dangling[1])
- if tmp:
- print('Dangling threads:', tmp, file=sys.stderr)
+ test.support.gc_collect()
remote_globs['setUpModule'] = setUpModule
remote_globs['tearDownModule'] = tearDownModule