summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorAntoine Pitrou <solipsis@pitrou.net>2011-07-08 17:43:51 (GMT)
committerAntoine Pitrou <solipsis@pitrou.net>2011-07-08 17:43:51 (GMT)
commit95531ea2f1c69eed6b8a5d964f0fd47c124ccca8 (patch)
treef17ece654d4df717ec7c45bd2d84b98ebed82b7d
parent72fff046a6d96131d2a929699e6fc8875f57e452 (diff)
downloadcpython-95531ea2f1c69eed6b8a5d964f0fd47c124ccca8.zip
cpython-95531ea2f1c69eed6b8a5d964f0fd47c124ccca8.tar.gz
cpython-95531ea2f1c69eed6b8a5d964f0fd47c124ccca8.tar.bz2
Avoid failing in test_robotparser when mueblesmoraleda.com is flaky and
an overzealous DNS service (e.g. OpenDNS) redirects to a placeholder Web site.
-rw-r--r--Lib/test/test_robotparser.py19
-rw-r--r--Misc/NEWS4
2 files changed, 21 insertions, 2 deletions
diff --git a/Lib/test/test_robotparser.py b/Lib/test/test_robotparser.py
index 2a6d047..178761d 100644
--- a/Lib/test/test_robotparser.py
+++ b/Lib/test/test_robotparser.py
@@ -1,7 +1,8 @@
import io
import unittest
import urllib.robotparser
-from urllib.error import URLError
+from urllib.error import URLError, HTTPError
+from urllib.request import urlopen
from test import support
class RobotTestCase(unittest.TestCase):
@@ -237,13 +238,27 @@ class NetworkTestCase(unittest.TestCase):
support.requires('network')
with support.transient_internet('mueblesmoraleda.com'):
url = 'http://mueblesmoraleda.com'
+ robots_url = url + "/robots.txt"
+ # First check the URL is usable for our purposes, since the
+ # test site is a bit flaky.
+ try:
+ urlopen(robots_url)
+ except HTTPError as e:
+ if e.code not in {401, 403}:
+ self.skipTest(
+ "%r should return a 401 or 403 HTTP error, not %r"
+ % (robots_url, e.code))
+ else:
+ self.skipTest(
+ "%r should return a 401 or 403 HTTP error, not succeed"
+ % (robots_url))
parser = urllib.robotparser.RobotFileParser()
parser.set_url(url)
try:
parser.read()
except URLError:
self.skipTest('%s is unavailable' % url)
- self.assertEqual(parser.can_fetch("*", url+"/robots.txt"), False)
+ self.assertEqual(parser.can_fetch("*", robots_url), False)
def testPythonOrg(self):
support.requires('network')
diff --git a/Misc/NEWS b/Misc/NEWS
index e415b4a..53b7562 100644
--- a/Misc/NEWS
+++ b/Misc/NEWS
@@ -47,6 +47,10 @@ C-API
Tests
-----
+- Avoid failing in test_robotparser when mueblesmoraleda.com is flaky and
+ an overzealous DNS service (e.g. OpenDNS) redirects to a placeholder
+ Web site.
+
- Avoid failing in test_urllibnet.test_bad_address when some overzealous
DNS service (e.g. OpenDNS) resolves a non-existent domain name. The test
is now skipped instead.