summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorSkip Montanaro <skip@pobox.com>2003-07-14 17:05:19 (GMT)
committerSkip Montanaro <skip@pobox.com>2003-07-14 17:05:19 (GMT)
commit34166684998ad922768127da19402e86f56d5c32 (patch)
treef1df305d8624d6746119ae2da78f852c9357bef1
parentc52b0366af88c3e64a817e62c3b46ea45bd5d6c6 (diff)
downloadcpython-34166684998ad922768127da19402e86f56d5c32.zip
cpython-34166684998ad922768127da19402e86f56d5c32.tar.gz
cpython-34166684998ad922768127da19402e86f56d5c32.tar.bz2
correct link
-rw-r--r--Doc/lib/librobotparser.tex2
1 files changed, 1 insertions, 1 deletions
diff --git a/Doc/lib/librobotparser.tex b/Doc/lib/librobotparser.tex
index 0008dde..4c0b933 100644
--- a/Doc/lib/librobotparser.tex
+++ b/Doc/lib/librobotparser.tex
@@ -15,7 +15,7 @@ This module provides a single class, \class{RobotFileParser}, which answers
questions about whether or not a particular user agent can fetch a URL on
the Web site that published the \file{robots.txt} file. For more details on
the structure of \file{robots.txt} files, see
-\url{http://info.webcrawler.com/mak/projects/robots/norobots.html}.
+\url{http://www.robotstxt.org/wc/norobots.html}.
\begin{classdesc}{RobotFileParser}{}