diff options
author | Antoine Pitrou <solipsis@pitrou.net> | 2011-07-08 17:44:55 (GMT) |
---|---|---|
committer | Antoine Pitrou <solipsis@pitrou.net> | 2011-07-08 17:44:55 (GMT) |
commit | 4a183b47f353a25b76aaaf1b7043458dbcab8890 (patch) | |
tree | 5870dffffe385f3a89d077cc9b8b1d7a911609e3 | |
parent | 88fcf1bcab11e4db00215339748fa36bcf4a16b6 (diff) | |
parent | 95531ea2f1c69eed6b8a5d964f0fd47c124ccca8 (diff) | |
download | cpython-4a183b47f353a25b76aaaf1b7043458dbcab8890.zip cpython-4a183b47f353a25b76aaaf1b7043458dbcab8890.tar.gz cpython-4a183b47f353a25b76aaaf1b7043458dbcab8890.tar.bz2 |
Avoid failing in test_robotparser when mueblesmoraleda.com is flaky and
an overzealous DNS service (e.g. OpenDNS) redirects to a placeholder
Web site.
-rw-r--r-- | Lib/test/test_robotparser.py | 19 | ||||
-rw-r--r-- | Misc/NEWS | 4 |
2 files changed, 21 insertions, 2 deletions
diff --git a/Lib/test/test_robotparser.py b/Lib/test/test_robotparser.py index 2a6d047..178761d 100644 --- a/Lib/test/test_robotparser.py +++ b/Lib/test/test_robotparser.py @@ -1,7 +1,8 @@ import io import unittest import urllib.robotparser -from urllib.error import URLError +from urllib.error import URLError, HTTPError +from urllib.request import urlopen from test import support class RobotTestCase(unittest.TestCase): @@ -237,13 +238,27 @@ class NetworkTestCase(unittest.TestCase): support.requires('network') with support.transient_internet('mueblesmoraleda.com'): url = 'http://mueblesmoraleda.com' + robots_url = url + "/robots.txt" + # First check the URL is usable for our purposes, since the + # test site is a bit flaky. + try: + urlopen(robots_url) + except HTTPError as e: + if e.code not in {401, 403}: + self.skipTest( + "%r should return a 401 or 403 HTTP error, not %r" + % (robots_url, e.code)) + else: + self.skipTest( + "%r should return a 401 or 403 HTTP error, not succeed" + % (robots_url)) parser = urllib.robotparser.RobotFileParser() parser.set_url(url) try: parser.read() except URLError: self.skipTest('%s is unavailable' % url) - self.assertEqual(parser.can_fetch("*", url+"/robots.txt"), False) + self.assertEqual(parser.can_fetch("*", robots_url), False) def testPythonOrg(self): support.requires('network') @@ -1004,6 +1004,10 @@ Extension Modules Tests ----- +- Avoid failing in test_robotparser when mueblesmoraleda.com is flaky and + an overzealous DNS service (e.g. OpenDNS) redirects to a placeholder + Web site. + - Avoid failing in test_urllibnet.test_bad_address when some overzealous DNS service (e.g. OpenDNS) resolves a non-existent domain name. The test is now skipped instead. |