diff options
author | Antoine Pitrou <solipsis@pitrou.net> | 2010-09-07 21:09:09 (GMT) |
---|---|---|
committer | Antoine Pitrou <solipsis@pitrou.net> | 2010-09-07 21:09:09 (GMT) |
commit | 8bc09039ed7a2aa9d878e82419baf3402c48600d (patch) | |
tree | a7684250e1ec70d80b90d9b6a500bee22c5ed29c /Lib/test/test_robotparser.py | |
parent | 4b92b5fad3baaa22a3ab198556e1adf5a2df7d9c (diff) | |
download | cpython-8bc09039ed7a2aa9d878e82419baf3402c48600d.zip cpython-8bc09039ed7a2aa9d878e82419baf3402c48600d.tar.gz cpython-8bc09039ed7a2aa9d878e82419baf3402c48600d.tar.bz2 |
Improve transient_internet() again to detect more network errors,
and use it in test_robotparser. Fixes #8574.
Diffstat (limited to 'Lib/test/test_robotparser.py')
-rw-r--r-- | Lib/test/test_robotparser.py | 29 |
1 files changed, 15 insertions, 14 deletions
diff --git a/Lib/test/test_robotparser.py b/Lib/test/test_robotparser.py index fd00706..2a6d047 100644 --- a/Lib/test/test_robotparser.py +++ b/Lib/test/test_robotparser.py @@ -235,23 +235,24 @@ class NetworkTestCase(unittest.TestCase): def testPasswordProtectedSite(self): support.requires('network') - # XXX it depends on an external resource which could be unavailable - url = 'http://mueblesmoraleda.com' - parser = urllib.robotparser.RobotFileParser() - parser.set_url(url) - try: - parser.read() - except URLError: - self.skipTest('%s is unavailable' % url) - self.assertEqual(parser.can_fetch("*", url+"/robots.txt"), False) + with support.transient_internet('mueblesmoraleda.com'): + url = 'http://mueblesmoraleda.com' + parser = urllib.robotparser.RobotFileParser() + parser.set_url(url) + try: + parser.read() + except URLError: + self.skipTest('%s is unavailable' % url) + self.assertEqual(parser.can_fetch("*", url+"/robots.txt"), False) def testPythonOrg(self): support.requires('network') - parser = urllib.robotparser.RobotFileParser( - "http://www.python.org/robots.txt") - parser.read() - self.assertTrue(parser.can_fetch("*", - "http://www.python.org/robots.txt")) + with support.transient_internet('www.python.org'): + parser = urllib.robotparser.RobotFileParser( + "http://www.python.org/robots.txt") + parser.read() + self.assertTrue( + parser.can_fetch("*", "http://www.python.org/robots.txt")) def test_main(): support.run_unittest(NetworkTestCase) |