summaryrefslogtreecommitdiffstats
path: root/Lib
diff options
context:
space:
mode:
authorRaymond Hettinger <python@rcn.com>2004-03-13 20:27:23 (GMT)
committerRaymond Hettinger <python@rcn.com>2004-03-13 20:27:23 (GMT)
commit2d95f1ad570791a5dffb16addd74b35b46f77fdc (patch)
tree6e0cddf34d3727863d9481c8cbbb20a1b27f4a62 /Lib
parent3aa82c07f709a532de28a39308c23757d3b9c91b (diff)
downloadcpython-2d95f1ad570791a5dffb16addd74b35b46f77fdc.zip
cpython-2d95f1ad570791a5dffb16addd74b35b46f77fdc.tar.gz
cpython-2d95f1ad570791a5dffb16addd74b35b46f77fdc.tar.bz2
SF patch #911431: robot.txt must be robots.txt
(Contributed by George Yoshida.)
Diffstat (limited to 'Lib')
-rw-r--r--Lib/robotparser.py4
1 files changed, 2 insertions, 2 deletions
diff --git a/Lib/robotparser.py b/Lib/robotparser.py
index e2af545..6b23188 100644
--- a/Lib/robotparser.py
+++ b/Lib/robotparser.py
@@ -83,7 +83,7 @@ class RobotFileParser:
self.entries.append(entry)
def parse(self, lines):
- """parse the input lines from a robot.txt file.
+ """parse the input lines from a robots.txt file.
We allow that a user-agent: line is not preceded by
one or more blank lines."""
state = 0
@@ -148,7 +148,7 @@ class RobotFileParser:
def can_fetch(self, useragent, url):
"""using the parsed robots.txt decide if useragent can fetch url"""
- _debug("Checking robot.txt allowance for:\n user agent: %s\n url: %s" %
+ _debug("Checking robots.txt allowance for:\n user agent: %s\n url: %s" %
(useragent, url))
if self.disallow_all:
return False