diff options
author | Jeremy Hylton <jeremy@alum.mit.edu> | 2008-06-18 20:49:58 (GMT) |
---|---|---|
committer | Jeremy Hylton <jeremy@alum.mit.edu> | 2008-06-18 20:49:58 (GMT) |
commit | 1afc1696167547a5fa101c53e5a3ab4717f8852c (patch) | |
tree | e989e72e71530d892214562785df7e11f84c1111 /Lib/test/test_robotparser.py | |
parent | a656d2cd8984f1ecb5a7e2cd09a18f72452f2b78 (diff) | |
download | cpython-1afc1696167547a5fa101c53e5a3ab4717f8852c.zip cpython-1afc1696167547a5fa101c53e5a3ab4717f8852c.tar.gz cpython-1afc1696167547a5fa101c53e5a3ab4717f8852c.tar.bz2 |
Make a new urllib package .
It consists of code from urllib, urllib2, urlparse, and robotparser.
The old modules have all been removed. The new package has five
submodules: urllib.parse, urllib.request, urllib.response,
urllib.error, and urllib.robotparser. The urllib.request.urlopen()
function uses the url opener from urllib2.
Note that the unittests have not been renamed for the
beta, but they will be renamed in the future.
Joint work with Senthil Kumaran.
Diffstat (limited to 'Lib/test/test_robotparser.py')
-rw-r--r-- | Lib/test/test_robotparser.py | 7 |
1 files changed, 4 insertions, 3 deletions
diff --git a/Lib/test/test_robotparser.py b/Lib/test/test_robotparser.py index 4e530f0..fbb02bc 100644 --- a/Lib/test/test_robotparser.py +++ b/Lib/test/test_robotparser.py @@ -1,5 +1,6 @@ -import unittest, robotparser import io +import unittest +import urllib.robotparser from test import support class RobotTestCase(unittest.TestCase): @@ -34,7 +35,7 @@ def RobotTest(index, robots_txt, good_urls, bad_urls, agent="test_robotparser"): lines = io.StringIO(robots_txt).readlines() - parser = robotparser.RobotFileParser() + parser = urllib.robotparser.RobotFileParser() parser.parse(lines) for url in good_urls: tests.addTest(RobotTestCase(index, parser, url, 1, agent)) @@ -140,7 +141,7 @@ class TestCase(unittest.TestCase): support.requires('network') # whole site is password-protected. url = 'http://mueblesmoraleda.com' - parser = robotparser.RobotFileParser() + parser = urllib.robotparser.RobotFileParser() parser.set_url(url) parser.read() self.assertEqual(parser.can_fetch("*", url+"/robots.txt"), False) |