From d0aa2457135a0a362808b40437b768a532033f9d Mon Sep 17 00:00:00 2001 From: Raymond Hettinger Date: Sat, 13 Mar 2004 20:31:33 +0000 Subject: SF patch #911431: robot.txt must be robots.txt (Contributed by George Yoshida.) --- Lib/robotparser.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Lib/robotparser.py b/Lib/robotparser.py index e2af545..6b23188 100644 --- a/Lib/robotparser.py +++ b/Lib/robotparser.py @@ -83,7 +83,7 @@ class RobotFileParser: self.entries.append(entry) def parse(self, lines): - """parse the input lines from a robot.txt file. + """parse the input lines from a robots.txt file. We allow that a user-agent: line is not preceded by one or more blank lines.""" state = 0 @@ -148,7 +148,7 @@ class RobotFileParser: def can_fetch(self, useragent, url): """using the parsed robots.txt decide if useragent can fetch url""" - _debug("Checking robot.txt allowance for:\n user agent: %s\n url: %s" % + _debug("Checking robots.txt allowance for:\n user agent: %s\n url: %s" % (useragent, url)) if self.disallow_all: return False -- cgit v0.12