summaryrefslogtreecommitdiffstats
path: root/Lib/multiprocessing
diff options
context:
space:
mode:
authorBenjamin Peterson <benjamin@python.org>2008-08-19 19:17:39 (GMT)
committerBenjamin Peterson <benjamin@python.org>2008-08-19 19:17:39 (GMT)
commit58ea9fedc825a91a3b153898afade19512bbde85 (patch)
tree97f626c59079f769595e8382f330511378beb168 /Lib/multiprocessing
parentbe2c2b23137e93f163c87fb27c65593516f153c2 (diff)
downloadcpython-58ea9fedc825a91a3b153898afade19512bbde85.zip
cpython-58ea9fedc825a91a3b153898afade19512bbde85.tar.gz
cpython-58ea9fedc825a91a3b153898afade19512bbde85.tar.bz2
Merged revisions 65864 via svnmerge from
svn+ssh://pythondev@svn.python.org/python/trunk ........ r65864 | jesse.noller | 2008-08-19 14:06:19 -0500 (Tue, 19 Aug 2008) | 2 lines issue3352: clean up the multiprocessing API to remove many get_/set_ methods and convert them to properties. Update the docs and the examples included. ........
Diffstat (limited to 'Lib/multiprocessing')
-rw-r--r--Lib/multiprocessing/dummy/__init__.py3
-rw-r--r--Lib/multiprocessing/forking.py4
-rw-r--r--Lib/multiprocessing/managers.py10
-rw-r--r--Lib/multiprocessing/pool.py2
-rw-r--r--Lib/multiprocessing/process.py39
-rw-r--r--Lib/multiprocessing/reduction.py6
-rw-r--r--Lib/multiprocessing/synchronize.py12
-rw-r--r--Lib/multiprocessing/util.py4
8 files changed, 40 insertions, 40 deletions
diff --git a/Lib/multiprocessing/dummy/__init__.py b/Lib/multiprocessing/dummy/__init__.py
index 48ca75b..da18877 100644
--- a/Lib/multiprocessing/dummy/__init__.py
+++ b/Lib/multiprocessing/dummy/__init__.py
@@ -47,7 +47,8 @@ class DummyProcess(threading.Thread):
self._parent._children[self] = None
threading.Thread.start(self)
- def get_exitcode(self):
+ @property
+ def exitcode(self):
if self._start_called and not self.is_alive():
return 0
else:
diff --git a/Lib/multiprocessing/forking.py b/Lib/multiprocessing/forking.py
index b14143b..47d54f2 100644
--- a/Lib/multiprocessing/forking.py
+++ b/Lib/multiprocessing/forking.py
@@ -315,7 +315,7 @@ else:
sys_argv=sys.argv,
log_to_stderr=_log_to_stderr,
orig_dir=process.ORIGINAL_DIR,
- authkey=process.current_process().get_authkey(),
+ authkey=process.current_process().authkey,
)
if _logger is not None:
@@ -363,7 +363,7 @@ def prepare(data):
old_main_modules.append(sys.modules['__main__'])
if 'name' in data:
- process.current_process().set_name(data['name'])
+ process.current_process().name = data['name']
if 'authkey' in data:
process.current_process()._authkey = data['authkey']
diff --git a/Lib/multiprocessing/managers.py b/Lib/multiprocessing/managers.py
index d7558c7..d6b16e5 100644
--- a/Lib/multiprocessing/managers.py
+++ b/Lib/multiprocessing/managers.py
@@ -450,7 +450,7 @@ class BaseManager(object):
def __init__(self, address=None, authkey=None, serializer='pickle'):
if authkey is None:
- authkey = current_process().get_authkey()
+ authkey = current_process().authkey
self._address = address # XXX not final address if eg ('', 0)
self._authkey = AuthenticationString(authkey)
self._state = State()
@@ -495,7 +495,7 @@ class BaseManager(object):
self._serializer, writer),
)
ident = ':'.join(str(i) for i in self._process._identity)
- self._process.set_name(type(self).__name__ + '-' + ident)
+ self._process.name = type(self).__name__ + '-' + ident
self._process.start()
# get address of server
@@ -696,7 +696,7 @@ class BaseProxy(object):
elif self._manager is not None:
self._authkey = self._manager._authkey
else:
- self._authkey = current_process().get_authkey()
+ self._authkey = current_process().authkey
if incref:
self._incref()
@@ -705,7 +705,7 @@ class BaseProxy(object):
def _connect(self):
util.debug('making connection to manager')
- name = current_process().get_name()
+ name = current_process().name
if threading.current_thread().name != 'MainThread':
name += '|' + threading.current_thread().name
conn = self._Client(self._token.address, authkey=self._authkey)
@@ -886,7 +886,7 @@ def AutoProxy(token, serializer, manager=None, authkey=None,
if authkey is None and manager is not None:
authkey = manager._authkey
if authkey is None:
- authkey = current_process().get_authkey()
+ authkey = current_process().authkey
ProxyType = MakeProxyType('AutoProxy[%s]' % token.typeid, exposed)
proxy = ProxyType(token, serializer, manager=manager, authkey=authkey,
diff --git a/Lib/multiprocessing/pool.py b/Lib/multiprocessing/pool.py
index cb0e49f..90fd178 100644
--- a/Lib/multiprocessing/pool.py
+++ b/Lib/multiprocessing/pool.py
@@ -99,7 +99,7 @@ class Pool(object):
args=(self._inqueue, self._outqueue, initializer, initargs)
)
self._pool.append(w)
- w.name = w.get_name().replace('Process', 'PoolWorker')
+ w.name = w.name.replace('Process', 'PoolWorker')
w.daemon = True
w.start()
diff --git a/Lib/multiprocessing/process.py b/Lib/multiprocessing/process.py
index 1f89dba..e21d2f0 100644
--- a/Lib/multiprocessing/process.py
+++ b/Lib/multiprocessing/process.py
@@ -132,45 +132,43 @@ class Process(object):
self._popen.poll()
return self._popen.returncode is None
- def get_name(self):
- '''
- Return name of process
- '''
+ @property
+ def name(self):
return self._name
- def set_name(self, name):
- '''
- Set name of process
- '''
+ @name.setter
+ def name(self, name):
assert isinstance(name, str), 'name must be a string'
self._name = name
- def is_daemon(self):
+ @property
+ def daemon(self):
'''
Return whether process is a daemon
'''
return self._daemonic
- def set_daemon(self, daemonic):
+ @daemon.setter
+ def daemon(self, daemonic):
'''
Set whether process is a daemon
'''
assert self._popen is None, 'process has already started'
self._daemonic = daemonic
- def get_authkey(self):
- '''
- Return authorization key of process
- '''
+ @property
+ def authkey(self):
return self._authkey
- def set_authkey(self, authkey):
+ @authkey.setter
+ def authkey(self, authkey):
'''
Set authorization key of process
'''
self._authkey = AuthenticationString(authkey)
- def get_exitcode(self):
+ @property
+ def exitcode(self):
'''
Return exit code of process or `None` if it has yet to stop
'''
@@ -178,7 +176,8 @@ class Process(object):
return self._popen
return self._popen.poll()
- def get_ident(self):
+ @property
+ def ident(self):
'''
Return indentifier (PID) of process or `None` if it has yet to start
'''
@@ -187,7 +186,7 @@ class Process(object):
else:
return self._popen and self._popen.pid
- pid = property(get_ident)
+ pid = ident
def __repr__(self):
if self is _current_process:
@@ -198,7 +197,7 @@ class Process(object):
status = 'initial'
else:
if self._popen.poll() is not None:
- status = self.get_exitcode()
+ status = self.exitcode
else:
status = 'started'
@@ -245,7 +244,7 @@ class Process(object):
except:
exitcode = 1
import traceback
- sys.stderr.write('Process %s:\n' % self.get_name())
+ sys.stderr.write('Process %s:\n' % self.name)
sys.stderr.flush()
traceback.print_exc()
diff --git a/Lib/multiprocessing/reduction.py b/Lib/multiprocessing/reduction.py
index 194bb17..010d871 100644
--- a/Lib/multiprocessing/reduction.py
+++ b/Lib/multiprocessing/reduction.py
@@ -82,9 +82,9 @@ def _get_listener():
try:
if _listener is None:
debug('starting listener and thread for sending handles')
- _listener = Listener(authkey=current_process().get_authkey())
+ _listener = Listener(authkey=current_process().authkey)
t = threading.Thread(target=_serve)
- t.set_daemon(True)
+ t.daemon = True
t.start()
finally:
_lock.release()
@@ -127,7 +127,7 @@ def rebuild_handle(pickled_data):
if inherited:
return handle
sub_debug('rebuilding handle %d', handle)
- conn = Client(address, authkey=current_process().get_authkey())
+ conn = Client(address, authkey=current_process().authkey)
conn.send((handle, os.getpid()))
new_handle = recv_handle(conn)
conn.close()
diff --git a/Lib/multiprocessing/synchronize.py b/Lib/multiprocessing/synchronize.py
index 628792e..be56a5b 100644
--- a/Lib/multiprocessing/synchronize.py
+++ b/Lib/multiprocessing/synchronize.py
@@ -108,9 +108,9 @@ class Lock(SemLock):
def __repr__(self):
try:
if self._semlock._is_mine():
- name = current_process().get_name()
- if threading.current_thread().get_name() != 'MainThread':
- name += '|' + threading.current_thread().get_name()
+ name = current_process().name
+ if threading.current_thread().name != 'MainThread':
+ name += '|' + threading.current_thread().name
elif self._semlock._get_value() == 1:
name = 'None'
elif self._semlock._count() > 0:
@@ -133,9 +133,9 @@ class RLock(SemLock):
def __repr__(self):
try:
if self._semlock._is_mine():
- name = current_process().get_name()
- if threading.current_thread().get_name() != 'MainThread':
- name += '|' + threading.current_thread().get_name()
+ name = current_process().name
+ if threading.current_thread().name != 'MainThread':
+ name += '|' + threading.current_thread().name
count = self._semlock._count()
elif self._semlock._get_value() == 1:
name, count = 'None', 0
diff --git a/Lib/multiprocessing/util.py b/Lib/multiprocessing/util.py
index 8aff4f4..aae38c7 100644
--- a/Lib/multiprocessing/util.py
+++ b/Lib/multiprocessing/util.py
@@ -274,11 +274,11 @@ def _exit_function():
for p in active_children():
if p._daemonic:
- info('calling terminate() for daemon %s', p.get_name())
+ info('calling terminate() for daemon %s', p.name)
p._popen.terminate()
for p in active_children():
- info('calling join() for process %s', p.get_name())
+ info('calling join() for process %s', p.name)
p.join()
debug('running the remaining "atexit" finalizers')