summaryrefslogtreecommitdiffstats
path: root/Lib/multiprocessing
diff options
context:
space:
mode:
authorBenjamin Peterson <benjamin@python.org>2008-06-11 19:14:14 (GMT)
committerBenjamin Peterson <benjamin@python.org>2008-06-11 19:14:14 (GMT)
commit672b8031a803fa420cac91cdaab02130c1f8bed0 (patch)
treecff73aa339853806887dd85bf1df34f088532efe /Lib/multiprocessing
parent559e5d7f4d1155e95fb6f925c927a263f9196935 (diff)
downloadcpython-672b8031a803fa420cac91cdaab02130c1f8bed0.zip
cpython-672b8031a803fa420cac91cdaab02130c1f8bed0.tar.gz
cpython-672b8031a803fa420cac91cdaab02130c1f8bed0.tar.bz2
Merged revisions 64125 via svnmerge from
svn+ssh://pythondev@svn.python.org/python/trunk ........ r64125 | benjamin.peterson | 2008-06-11 12:27:50 -0500 (Wed, 11 Jun 2008) | 2 lines give the threading API PEP 8 names ........
Diffstat (limited to 'Lib/multiprocessing')
-rw-r--r--Lib/multiprocessing/dummy/__init__.py29
-rw-r--r--Lib/multiprocessing/managers.py22
-rw-r--r--Lib/multiprocessing/pool.py14
-rw-r--r--Lib/multiprocessing/queues.py2
-rw-r--r--Lib/multiprocessing/reduction.py2
-rw-r--r--Lib/multiprocessing/synchronize.py8
6 files changed, 35 insertions, 42 deletions
diff --git a/Lib/multiprocessing/dummy/__init__.py b/Lib/multiprocessing/dummy/__init__.py
index 841d831..fe4ef96 100644
--- a/Lib/multiprocessing/dummy/__init__.py
+++ b/Lib/multiprocessing/dummy/__init__.py
@@ -48,24 +48,17 @@ class DummyProcess(threading.Thread):
threading.Thread.start(self)
def get_exitcode(self):
- if self._start_called and not self.isAlive():
+ if self._start_called and not self.is_alive():
return 0
else:
return None
- # XXX
- if sys.version_info < (3, 0):
- is_alive = threading.Thread.isAlive.__func__
- get_name = threading.Thread.getName.__func__
- set_name = threading.Thread.setName.__func__
- is_daemon = threading.Thread.isDaemon.__func__
- set_daemon = threading.Thread.setDaemon.__func__
- else:
- is_alive = threading.Thread.isAlive
- get_name = threading.Thread.getName
- set_name = threading.Thread.setName
- is_daemon = threading.Thread.isDaemon
- set_daemon = threading.Thread.setDaemon
+
+ is_alive = threading.Thread.is_alive
+ get_name = threading.Thread.get_name
+ set_name = threading.Thread.set_name
+ is_daemon = threading.Thread.is_daemon
+ set_daemon = threading.Thread.set_daemon
#
#
@@ -74,22 +67,22 @@ class DummyProcess(threading.Thread):
class Condition(threading._Condition):
# XXX
if sys.version_info < (3, 0):
- notify_all = threading._Condition.notifyAll.__func__
+ notify_all = threading._Condition.notify_all.__func__
else:
- notify_all = threading._Condition.notifyAll
+ notify_all = threading._Condition.notify_all
#
#
#
Process = DummyProcess
-current_process = threading.currentThread
+current_process = threading.current_thread
current_process()._children = weakref.WeakKeyDictionary()
def active_children():
children = current_process()._children
for p in list(children):
- if not p.isAlive():
+ if not p.is_alive():
children.pop(p, None)
return list(children)
diff --git a/Lib/multiprocessing/managers.py b/Lib/multiprocessing/managers.py
index ecad563..2deee8c 100644
--- a/Lib/multiprocessing/managers.py
+++ b/Lib/multiprocessing/managers.py
@@ -169,7 +169,7 @@ class Server(object):
except (OSError, IOError):
continue
t = threading.Thread(target=self.handle_request, args=(c,))
- t.setDaemon(True)
+ t.set_daemon(True)
t.start()
except (KeyboardInterrupt, SystemExit):
pass
@@ -216,7 +216,7 @@ class Server(object):
Handle requests from the proxies in a particular process/thread
'''
util.debug('starting server thread to service %r',
- threading.currentThread().getName())
+ threading.current_thread().get_name())
recv = conn.recv
send = conn.send
@@ -266,7 +266,7 @@ class Server(object):
except EOFError:
util.debug('got EOF -- exiting thread serving %r',
- threading.currentThread().getName())
+ threading.current_thread().get_name())
sys.exit(0)
except Exception:
@@ -279,7 +279,7 @@ class Server(object):
send(('#UNSERIALIZABLE', repr(msg)))
except Exception as e:
util.info('exception in thread serving %r',
- threading.currentThread().getName())
+ threading.current_thread().get_name())
util.info(' ... message was %r', msg)
util.info(' ... exception was %r', e)
conn.close()
@@ -401,7 +401,7 @@ class Server(object):
'''
Spawn a new thread to serve this connection
'''
- threading.currentThread().setName(name)
+ threading.current_thread().set_name(name)
c.send(('#RETURN', None))
self.serve_client(c)
@@ -715,8 +715,8 @@ class BaseProxy(object):
def _connect(self):
util.debug('making connection to manager')
name = current_process().get_name()
- if threading.currentThread().getName() != 'MainThread':
- name += '|' + threading.currentThread().getName()
+ if threading.current_thread().get_name() != 'MainThread':
+ name += '|' + threading.current_thread().get_name()
conn = self._Client(self._token.address, authkey=self._authkey)
dispatch(conn, None, 'accept_connection', (name,))
self._tls.connection = conn
@@ -729,7 +729,7 @@ class BaseProxy(object):
conn = self._tls.connection
except AttributeError:
util.debug('thread %r does not own a connection',
- threading.currentThread().getName())
+ threading.current_thread().get_name())
self._connect()
conn = self._tls.connection
@@ -790,7 +790,7 @@ class BaseProxy(object):
# the process owns no more references to objects for this manager
if not idset and hasattr(tls, 'connection'):
util.debug('thread %r has no more proxies so closing conn',
- threading.currentThread().getName())
+ threading.current_thread().get_name())
tls.connection.close()
del tls.connection
@@ -969,13 +969,13 @@ class AcquirerProxy(BaseProxy):
class ConditionProxy(AcquirerProxy):
# XXX will Condition.notfyAll() name be available in Py3.0?
- _exposed_ = ('acquire', 'release', 'wait', 'notify', 'notifyAll')
+ _exposed_ = ('acquire', 'release', 'wait', 'notify', 'notify_all')
def wait(self, timeout=None):
return self._callmethod('wait', (timeout,))
def notify(self):
return self._callmethod('notify')
def notify_all(self):
- return self._callmethod('notifyAll')
+ return self._callmethod('notify_all')
class EventProxy(BaseProxy):
# XXX will Event.isSet name be available in Py3.0?
diff --git a/Lib/multiprocessing/pool.py b/Lib/multiprocessing/pool.py
index 3d5d275..d7425d5 100644
--- a/Lib/multiprocessing/pool.py
+++ b/Lib/multiprocessing/pool.py
@@ -107,7 +107,7 @@ class Pool(object):
target=Pool._handle_tasks,
args=(self._taskqueue, self._quick_put, self._outqueue, self._pool)
)
- self._task_handler.setDaemon(True)
+ self._task_handler.set_daemon(True)
self._task_handler._state = RUN
self._task_handler.start()
@@ -115,7 +115,7 @@ class Pool(object):
target=Pool._handle_results,
args=(self._outqueue, self._quick_get, self._cache)
)
- self._result_handler.setDaemon(True)
+ self._result_handler.set_daemon(True)
self._result_handler._state = RUN
self._result_handler.start()
@@ -213,7 +213,7 @@ class Pool(object):
@staticmethod
def _handle_tasks(taskqueue, put, outqueue, pool):
- thread = threading.currentThread()
+ thread = threading.current_thread()
for taskseq, set_length in iter(taskqueue.get, None):
i = -1
@@ -252,7 +252,7 @@ class Pool(object):
@staticmethod
def _handle_results(outqueue, get, cache):
- thread = threading.currentThread()
+ thread = threading.current_thread()
while 1:
try:
@@ -346,7 +346,7 @@ class Pool(object):
# task_handler may be blocked trying to put items on inqueue
debug('removing tasks from inqueue until task handler finished')
inqueue._rlock.acquire()
- while task_handler.isAlive() and inqueue._reader.poll():
+ while task_handler.is_alive() and inqueue._reader.poll():
inqueue._reader.recv()
time.sleep(0)
@@ -362,7 +362,7 @@ class Pool(object):
debug('helping task handler/workers to finish')
cls._help_stuff_finish(inqueue, task_handler, len(pool))
- assert result_handler.isAlive() or len(cache) == 0
+ assert result_handler.is_alive() or len(cache) == 0
result_handler._state = TERMINATE
outqueue.put(None) # sentinel
@@ -591,6 +591,6 @@ class ThreadPool(Pool):
try:
inqueue.queue.clear()
inqueue.queue.extend([None] * size)
- inqueue.not_empty.notifyAll()
+ inqueue.not_empty.notify_all()
finally:
inqueue.not_empty.release()
diff --git a/Lib/multiprocessing/queues.py b/Lib/multiprocessing/queues.py
index fb6cb6d..1c32dde 100644
--- a/Lib/multiprocessing/queues.py
+++ b/Lib/multiprocessing/queues.py
@@ -155,7 +155,7 @@ class Queue(object):
self._wlock, self._writer.close),
name='QueueFeederThread'
)
- self._thread.setDaemon(True)
+ self._thread.set_daemon(True)
debug('doing self._thread.start()')
self._thread.start()
diff --git a/Lib/multiprocessing/reduction.py b/Lib/multiprocessing/reduction.py
index 818d1a9..194bb17 100644
--- a/Lib/multiprocessing/reduction.py
+++ b/Lib/multiprocessing/reduction.py
@@ -84,7 +84,7 @@ def _get_listener():
debug('starting listener and thread for sending handles')
_listener = Listener(authkey=current_process().get_authkey())
t = threading.Thread(target=_serve)
- t.setDaemon(True)
+ t.set_daemon(True)
t.start()
finally:
_lock.release()
diff --git a/Lib/multiprocessing/synchronize.py b/Lib/multiprocessing/synchronize.py
index 44b1171..628792e 100644
--- a/Lib/multiprocessing/synchronize.py
+++ b/Lib/multiprocessing/synchronize.py
@@ -109,8 +109,8 @@ class Lock(SemLock):
try:
if self._semlock._is_mine():
name = current_process().get_name()
- if threading.currentThread().getName() != 'MainThread':
- name += '|' + threading.currentThread().getName()
+ if threading.current_thread().get_name() != 'MainThread':
+ name += '|' + threading.current_thread().get_name()
elif self._semlock._get_value() == 1:
name = 'None'
elif self._semlock._count() > 0:
@@ -134,8 +134,8 @@ class RLock(SemLock):
try:
if self._semlock._is_mine():
name = current_process().get_name()
- if threading.currentThread().getName() != 'MainThread':
- name += '|' + threading.currentThread().getName()
+ if threading.current_thread().get_name() != 'MainThread':
+ name += '|' + threading.current_thread().get_name()
count = self._semlock._count()
elif self._semlock._get_value() == 1:
name, count = 'None', 0