summaryrefslogtreecommitdiffstats
path: root/Lib/test/test_robotparser.py
diff options
context:
space:
mode:
authorAntoine Pitrou <solipsis@pitrou.net>2010-09-07 21:40:25 (GMT)
committerAntoine Pitrou <solipsis@pitrou.net>2010-09-07 21:40:25 (GMT)
commitc818ed4d61b12a702b2af813cd0ac4839faf497f (patch)
treeba4a83dcee870d847a7f0f8c2a91200c4b26304a /Lib/test/test_robotparser.py
parentd47a68716e7dfdf79a19129a9a8369b41a173f27 (diff)
downloadcpython-c818ed4d61b12a702b2af813cd0ac4839faf497f.zip
cpython-c818ed4d61b12a702b2af813cd0ac4839faf497f.tar.gz
cpython-c818ed4d61b12a702b2af813cd0ac4839faf497f.tar.bz2
Merged revisions 84597-84599 via svnmerge from
svn+ssh://pythondev@svn.python.org/python/branches/py3k ........ r84597 | antoine.pitrou | 2010-09-07 22:42:19 +0200 (mar., 07 sept. 2010) | 5 lines Issue #8574: better implementation of test.support.transient_internet(). Original patch by Victor. ........ r84598 | antoine.pitrou | 2010-09-07 23:05:49 +0200 (mar., 07 sept. 2010) | 6 lines Issue #9792: In case of connection failure, socket.create_connection() would swallow the exception and raise a new one, making it impossible to fetch the original errno, or to filter timeout errors. Now the original error is re-raised. ........ r84599 | antoine.pitrou | 2010-09-07 23:09:09 +0200 (mar., 07 sept. 2010) | 4 lines Improve transient_internet() again to detect more network errors, and use it in test_robotparser. Fixes #8574. ........
Diffstat (limited to 'Lib/test/test_robotparser.py')
-rw-r--r--Lib/test/test_robotparser.py29
1 files changed, 15 insertions, 14 deletions
diff --git a/Lib/test/test_robotparser.py b/Lib/test/test_robotparser.py
index aa73ec5..3376a8a 100644
--- a/Lib/test/test_robotparser.py
+++ b/Lib/test/test_robotparser.py
@@ -232,23 +232,24 @@ class NetworkTestCase(unittest.TestCase):
def testPasswordProtectedSite(self):
test_support.requires('network')
- # XXX it depends on an external resource which could be unavailable
- url = 'http://mueblesmoraleda.com'
- parser = robotparser.RobotFileParser()
- parser.set_url(url)
- try:
- parser.read()
- except IOError:
- self.skipTest('%s is unavailable' % url)
- self.assertEqual(parser.can_fetch("*", url+"/robots.txt"), False)
+ with test_support.transient_internet('mueblesmoraleda.com'):
+ url = 'http://mueblesmoraleda.com'
+ parser = robotparser.RobotFileParser()
+ parser.set_url(url)
+ try:
+ parser.read()
+ except IOError:
+ self.skipTest('%s is unavailable' % url)
+ self.assertEqual(parser.can_fetch("*", url+"/robots.txt"), False)
def testPythonOrg(self):
test_support.requires('network')
- parser = robotparser.RobotFileParser(
- "http://www.python.org/robots.txt")
- parser.read()
- self.assertTrue(parser.can_fetch("*",
- "http://www.python.org/robots.txt"))
+ with test_support.transient_internet('www.python.org'):
+ parser = robotparser.RobotFileParser(
+ "http://www.python.org/robots.txt")
+ parser.read()
+ self.assertTrue(
+ parser.can_fetch("*", "http://www.python.org/robots.txt"))
def test_main():