summaryrefslogtreecommitdiffstats
path: root/Lib
diff options
context:
space:
mode:
authorguido@google.com <guido@google.com>2011-03-29 18:41:02 (GMT)
committerguido@google.com <guido@google.com>2011-03-29 18:41:02 (GMT)
commita119df91f33724f64e6bc1ecb484eeaa30ace014 (patch)
treebe27f880b0ed6fdf79367fddc1c58019f07ca4ac /Lib
parentb938c8c25316b69f1d5df2c7880a9f6b87e7c2fa (diff)
downloadcpython-a119df91f33724f64e6bc1ecb484eeaa30ace014.zip
cpython-a119df91f33724f64e6bc1ecb484eeaa30ace014.tar.gz
cpython-a119df91f33724f64e6bc1ecb484eeaa30ace014.tar.bz2
Issue 11662: Fix vulnerability in urllib/urllib2.
(This version is a cleaned-up backport of a fix by Senthil Kumaran.)
Diffstat (limited to 'Lib')
-rw-r--r--Lib/test/test_urllib.py16
-rw-r--r--Lib/test/test_urllib2.py24
-rw-r--r--Lib/urllib/request.py27
3 files changed, 67 insertions, 0 deletions
diff --git a/Lib/test/test_urllib.py b/Lib/test/test_urllib.py
index 422831e..2b88521 100644
--- a/Lib/test/test_urllib.py
+++ b/Lib/test/test_urllib.py
@@ -2,6 +2,7 @@
import urllib.parse
import urllib.request
+import urllib.error
import http.client
import email.message
import io
@@ -183,6 +184,21 @@ Content-Type: text/html; charset=iso-8859-1
finally:
self.unfakehttp()
+ def test_invalid_redirect(self):
+ # urlopen() should raise IOError for many error codes.
+ self.fakehttp(b'''HTTP/1.1 302 Found
+Date: Wed, 02 Jan 2008 03:03:54 GMT
+Server: Apache/1.3.33 (Debian GNU/Linux) mod_ssl/2.8.22 OpenSSL/0.9.7e
+Location: file://guidocomputer.athome.com:/python/license
+Connection: close
+Content-Type: text/html; charset=iso-8859-1
+''')
+ try:
+ self.assertRaises(urllib.error.HTTPError, urlopen,
+ "http://python.org/")
+ finally:
+ self.unfakehttp()
+
def test_empty_socket(self):
# urlopen() raises IOError if the underlying socket does not send any
# data. (#1680230)
diff --git a/Lib/test/test_urllib2.py b/Lib/test/test_urllib2.py
index 1704683..3fd7baa 100644
--- a/Lib/test/test_urllib2.py
+++ b/Lib/test/test_urllib2.py
@@ -9,6 +9,7 @@ import urllib.request
# The proxy bypass method imported below has logic specific to the OSX
# proxy config data structure but is testable on all platforms.
from urllib.request import Request, OpenerDirector, _proxy_bypass_macosx_sysconf
+import urllib.error
# XXX
# Request
@@ -985,6 +986,29 @@ class HandlerTests(unittest.TestCase):
self.assertEqual(count,
urllib.request.HTTPRedirectHandler.max_redirections)
+
+ def test_invalid_redirect(self):
+ from_url = "http://example.com/a.html"
+ valid_schemes = ['http','https','ftp']
+ invalid_schemes = ['file','imap','ldap']
+ schemeless_url = "example.com/b.html"
+ h = urllib.request.HTTPRedirectHandler()
+ o = h.parent = MockOpener()
+ req = Request(from_url)
+ req.timeout = socket._GLOBAL_DEFAULT_TIMEOUT
+
+ for scheme in invalid_schemes:
+ invalid_url = scheme + '://' + schemeless_url
+ self.assertRaises(urllib.error.HTTPError, h.http_error_302,
+ req, MockFile(), 302, "Security Loophole",
+ MockHeaders({"location": invalid_url}))
+
+ for scheme in valid_schemes:
+ valid_url = scheme + '://' + schemeless_url
+ h.http_error_302(req, MockFile(), 302, "That's fine",
+ MockHeaders({"location": valid_url}))
+ self.assertEqual(o.req.get_full_url(), valid_url)
+
def test_cookie_redirect(self):
# cookies shouldn't leak into redirected requests
from http.cookiejar import CookieJar
diff --git a/Lib/urllib/request.py b/Lib/urllib/request.py
index 087e9a6..220dfe4 100644
--- a/Lib/urllib/request.py
+++ b/Lib/urllib/request.py
@@ -528,6 +528,17 @@ class HTTPRedirectHandler(BaseHandler):
# fix a possible malformed URL
urlparts = urlparse(newurl)
+
+ # For security reasons we don't allow redirection to anything other
+ # than http, https or ftp.
+
+ if not urlparts.scheme in ('http', 'https', 'ftp'):
+ raise HTTPError(newurl, code,
+ msg +
+ " - Redirection to url '%s' is not allowed" %
+ newurl,
+ headers, fp)
+
if not urlparts.path:
urlparts = list(urlparts)
urlparts[2] = "/"
@@ -1864,8 +1875,24 @@ class FancyURLopener(URLopener):
return
void = fp.read()
fp.close()
+
# In case the server sent a relative URL, join with original:
newurl = urljoin(self.type + ":" + url, newurl)
+
+ urlparts = urlparse(newurl)
+
+ # For security reasons, we don't allow redirection to anything other
+ # than http, https and ftp.
+
+ # We are using newer HTTPError with older redirect_internal method
+ # This older method will get deprecated in 3.3
+
+ if not urlparts.scheme in ('http', 'https', 'ftp'):
+ raise HTTPError(newurl, errcode,
+ errmsg +
+ " Redirection to url '%s' is not allowed." % newurl,
+ headers, fp)
+
return self.open(newurl)
def http_error_301(self, url, fp, errcode, errmsg, headers, data=None):