diff options
author | Nick Coghlan <ncoghlan@gmail.com> | 2012-10-16 12:50:04 (GMT) |
---|---|---|
committer | Nick Coghlan <ncoghlan@gmail.com> | 2012-10-16 12:50:04 (GMT) |
commit | f06ea25df2e7840d00a3ad624446c47f93aad1d4 (patch) | |
tree | 8eba914b7e3486af8d6b1dd3b643997d1b35c2aa /Doc | |
parent | c101bf32c4011a3c7f3249b8c15bcf95b944095b (diff) | |
download | cpython-f06ea25df2e7840d00a3ad624446c47f93aad1d4.zip cpython-f06ea25df2e7840d00a3ad624446c47f93aad1d4.tar.gz cpython-f06ea25df2e7840d00a3ad624446c47f93aad1d4.tar.bz2 |
Tweak the threaded example in concurrent.futures
Diffstat (limited to 'Doc')
-rw-r--r-- | Doc/library/concurrent.futures.rst | 25 |
1 files changed, 15 insertions, 10 deletions
diff --git a/Doc/library/concurrent.futures.rst b/Doc/library/concurrent.futures.rst index 8908542..70b0fd1 100644 --- a/Doc/library/concurrent.futures.rst +++ b/Doc/library/concurrent.futures.rst @@ -136,20 +136,25 @@ ThreadPoolExecutor Example 'http://www.bbc.co.uk/', 'http://some-made-up-domain.com/'] + # Retrieve a single page and report the url and contents def load_url(url, timeout): - return urllib.request.urlopen(url, timeout=timeout).read() + conn = urllib.request.urlopen(url, timeout=timeout) + return conn.readall() + # We can use a with statement to ensure threads are cleaned up promptly with concurrent.futures.ThreadPoolExecutor(max_workers=5) as executor: - future_to_url = dict((executor.submit(load_url, url, 60), url) - for url in URLS) - - for future in concurrent.futures.as_completed(future_to_url): - url = future_to_url[future] - if future.exception() is not None: - print('%r generated an exception: %s' % (url, - future.exception())) + # Start the load operations and mark each future with its URL + load_urls = [executor.submit(load_url, url, 60) for url in URLS] + for future, url in zip(load_urls, URLS): + future.url = url + for future in concurrent.futures.as_completed(load_urls): + url = future.url + try: + data = future.result() + except Exception as exc: + print('%r generated an exception: %s' % (url, exc)) else: - print('%r page is %d bytes' % (url, len(future.result()))) + print('%r page is %d bytes' % (url, len(data))) ProcessPoolExecutor |