summaryrefslogtreecommitdiffstats
path: root/Lib/test/test_robotparser.py
diff options
context:
space:
mode:
authorFlorent Xicluna <florent.xicluna@gmail.com>2010-04-03 00:45:27 (GMT)
committerFlorent Xicluna <florent.xicluna@gmail.com>2010-04-03 00:45:27 (GMT)
commit05609eff8a6d02753b8a177560998034f28efb5d (patch)
tree647af88c07cab58df1ed10afd17b02a276eff1e0 /Lib/test/test_robotparser.py
parentaeac26b9eadc17840857799f668341f508314518 (diff)
downloadcpython-05609eff8a6d02753b8a177560998034f28efb5d.zip
cpython-05609eff8a6d02753b8a177560998034f28efb5d.tar.gz
cpython-05609eff8a6d02753b8a177560998034f28efb5d.tar.bz2
Merged revision 79605 via svnmerge from
svn+ssh://pythondev@svn.python.org/python/branches/py3k ........ r79605 | antoine.pitrou | 2010-04-02 19:12:12 +0200 (ven, 02 avr 2010) | 3 lines Furniture is not very reliable these days (buildbot failures). ........
Diffstat (limited to 'Lib/test/test_robotparser.py')
-rw-r--r--Lib/test/test_robotparser.py14
1 files changed, 8 insertions, 6 deletions
diff --git a/Lib/test/test_robotparser.py b/Lib/test/test_robotparser.py
index cc8b390..4c3b536 100644
--- a/Lib/test/test_robotparser.py
+++ b/Lib/test/test_robotparser.py
@@ -1,6 +1,7 @@
import io
import unittest
import urllib.robotparser
+from urllib.error import URLError
from test import support
class RobotTestCase(unittest.TestCase):
@@ -208,18 +209,19 @@ RobotTest(13, doc, good, bad, agent="googlebot")
class NetworkTestCase(unittest.TestCase):
def testPasswordProtectedSite(self):
- if not support.is_resource_enabled('network'):
- return
- # whole site is password-protected.
+ support.requires('network')
+ # XXX it depends on an external resource which could be unavailable
url = 'http://mueblesmoraleda.com'
parser = urllib.robotparser.RobotFileParser()
parser.set_url(url)
- parser.read()
+ try:
+ parser.read()
+ except URLError:
+ self.skipTest('%s is unavailable' % url)
self.assertEqual(parser.can_fetch("*", url+"/robots.txt"), False)
def testPythonOrg(self):
- if not support.is_resource_enabled('network'):
- return
+ support.requires('network')
parser = urllib.robotparser.RobotFileParser(
"http://www.python.org/robots.txt")
parser.read()