summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorSkip Montanaro <skip@pobox.com>2003-07-14 17:04:50 (GMT)
committerSkip Montanaro <skip@pobox.com>2003-07-14 17:04:50 (GMT)
commit72331179608221081ca8e2af2b6fd68946ba9ed6 (patch)
treed84d8d91351f926f868a4b0d5f0cc2b6dc412704
parent6d06815b562e1540ee334d30953354210dfebd24 (diff)
downloadcpython-72331179608221081ca8e2af2b6fd68946ba9ed6.zip
cpython-72331179608221081ca8e2af2b6fd68946ba9ed6.tar.gz
cpython-72331179608221081ca8e2af2b6fd68946ba9ed6.tar.bz2
update norobots link
-rw-r--r--Doc/lib/librobotparser.tex2
1 files changed, 1 insertions, 1 deletions
diff --git a/Doc/lib/librobotparser.tex b/Doc/lib/librobotparser.tex
index 8bf1ae8..5eac528 100644
--- a/Doc/lib/librobotparser.tex
+++ b/Doc/lib/librobotparser.tex
@@ -15,7 +15,7 @@ This module provides a single class, \class{RobotFileParser}, which answers
questions about whether or not a particular user agent can fetch a URL on
the Web site that published the \file{robots.txt} file. For more details on
the structure of \file{robots.txt} files, see
-\url{http://info.webcrawler.com/mak/projects/robots/norobots.html}.
+\url{http://www.robotstxt.org/wc/norobots.html}.
\begin{classdesc}{RobotFileParser}{}