summaryrefslogtreecommitdiffstats
path: root/Lib
diff options
context:
space:
mode:
Diffstat (limited to 'Lib')
-rwxr-xr-xLib/cgi.py85
-rw-r--r--Lib/test/test_cgi.py22
-rw-r--r--Lib/test/test_urlparse.py22
-rw-r--r--Lib/urlparse.py88
4 files changed, 127 insertions, 90 deletions
diff --git a/Lib/cgi.py b/Lib/cgi.py
index dd11389..373ba51 100755
--- a/Lib/cgi.py
+++ b/Lib/cgi.py
@@ -39,7 +39,9 @@ import sys
import os
import urllib
import UserDict
-from warnings import filterwarnings, catch_warnings
+import urlparse
+
+from warnings import filterwarnings, catch_warnings, warn
with catch_warnings():
if sys.py3kwarning:
filterwarnings("ignore", ".*mimetools has been removed",
@@ -173,72 +175,21 @@ def parse(fp=None, environ=os.environ, keep_blank_values=0, strict_parsing=0):
return parse_qs(qs, keep_blank_values, strict_parsing)
-def parse_qs(qs, keep_blank_values=0, strict_parsing=0):
- """Parse a query given as a string argument.
-
- Arguments:
-
- qs: URL-encoded query string to be parsed
+# parse query string function called from urlparse,
+# this is done in order to maintain backward compatiblity.
- keep_blank_values: flag indicating whether blank values in
- URL encoded queries should be treated as blank strings.
- A true value indicates that blanks should be retained as
- blank strings. The default false value indicates that
- blank values are to be ignored and treated as if they were
- not included.
+def parse_qs(qs, keep_blank_values=0, strict_parsing=0):
+ """Parse a query given as a string argument."""
+ warn("cgi.parse_qs is deprecated, use urlparse.parse_qs \
+ instead",PendingDeprecationWarning)
+ return urlparse.parse_qs(qs, keep_blank_values, strict_parsing)
- strict_parsing: flag indicating what to do with parsing errors.
- If false (the default), errors are silently ignored.
- If true, errors raise a ValueError exception.
- """
- dict = {}
- for name, value in parse_qsl(qs, keep_blank_values, strict_parsing):
- if name in dict:
- dict[name].append(value)
- else:
- dict[name] = [value]
- return dict
def parse_qsl(qs, keep_blank_values=0, strict_parsing=0):
- """Parse a query given as a string argument.
-
- Arguments:
-
- qs: URL-encoded query string to be parsed
-
- keep_blank_values: flag indicating whether blank values in
- URL encoded queries should be treated as blank strings. A
- true value indicates that blanks should be retained as blank
- strings. The default false value indicates that blank values
- are to be ignored and treated as if they were not included.
-
- strict_parsing: flag indicating what to do with parsing errors. If
- false (the default), errors are silently ignored. If true,
- errors raise a ValueError exception.
-
- Returns a list, as G-d intended.
- """
- pairs = [s2 for s1 in qs.split('&') for s2 in s1.split(';')]
- r = []
- for name_value in pairs:
- if not name_value and not strict_parsing:
- continue
- nv = name_value.split('=', 1)
- if len(nv) != 2:
- if strict_parsing:
- raise ValueError, "bad query field: %r" % (name_value,)
- # Handle case of a control-name with no equal sign
- if keep_blank_values:
- nv.append('')
- else:
- continue
- if len(nv[1]) or keep_blank_values:
- name = urllib.unquote(nv[0].replace('+', ' '))
- value = urllib.unquote(nv[1].replace('+', ' '))
- r.append((name, value))
-
- return r
-
+ """Parse a query given as a string argument."""
+ warn("cgi.parse_qsl is deprecated, use urlparse.parse_qsl instead",
+ PendingDeprecationWarning)
+ return urlparse.parse_qs(qs, keep_blank_values, strict_parsing)
def parse_multipart(fp, pdict):
"""Parse multipart input.
@@ -645,8 +596,8 @@ class FieldStorage:
if self.qs_on_post:
qs += '&' + self.qs_on_post
self.list = list = []
- for key, value in parse_qsl(qs, self.keep_blank_values,
- self.strict_parsing):
+ for key, value in urlparse.parse_qsl(qs, self.keep_blank_values,
+ self.strict_parsing):
list.append(MiniFieldStorage(key, value))
self.skip_lines()
@@ -659,8 +610,8 @@ class FieldStorage:
raise ValueError, 'Invalid boundary in multipart form: %r' % (ib,)
self.list = []
if self.qs_on_post:
- for key, value in parse_qsl(self.qs_on_post, self.keep_blank_values,
- self.strict_parsing):
+ for key, value in urlparse.parse_qsl(self.qs_on_post,
+ self.keep_blank_values, self.strict_parsing):
self.list.append(MiniFieldStorage(key, value))
FieldStorageClass = None
diff --git a/Lib/test/test_cgi.py b/Lib/test/test_cgi.py
index 042e507..79bca4e 100644
--- a/Lib/test/test_cgi.py
+++ b/Lib/test/test_cgi.py
@@ -55,23 +55,6 @@ def do_test(buf, method):
except StandardError, err:
return ComparableException(err)
-# A list of test cases. Each test case is a a two-tuple that contains
-# a string with the query and a dictionary with the expected result.
-
-parse_qsl_test_cases = [
- ("", []),
- ("&", []),
- ("&&", []),
- ("=", [('', '')]),
- ("=a", [('', 'a')]),
- ("a", [('a', '')]),
- ("a=", [('a', '')]),
- ("a=", [('a', '')]),
- ("&a=b", [('a', 'b')]),
- ("a=a+b&b=b+c", [('a', 'a b'), ('b', 'b c')]),
- ("a=1&a=2", [('a', '1'), ('a', '2')]),
-]
-
parse_strict_test_cases = [
("", ValueError("bad query field: ''")),
("&", ValueError("bad query field: ''")),
@@ -143,11 +126,6 @@ def gen_result(data, environ):
class CgiTests(unittest.TestCase):
- def test_qsl(self):
- for orig, expect in parse_qsl_test_cases:
- result = cgi.parse_qsl(orig, keep_blank_values=True)
- self.assertEqual(result, expect, "Error parsing %s" % repr(orig))
-
def test_strict(self):
for orig, expect in parse_strict_test_cases:
# Test basic parsing
diff --git a/Lib/test/test_urlparse.py b/Lib/test/test_urlparse.py
index e7d9e5a..fcd1989 100644
--- a/Lib/test/test_urlparse.py
+++ b/Lib/test/test_urlparse.py
@@ -8,6 +8,23 @@ RFC1808_BASE = "http://a/b/c/d;p?q#f"
RFC2396_BASE = "http://a/b/c/d;p?q"
RFC3986_BASE = "http://a/b/c/d;p?q"
+# A list of test cases. Each test case is a a two-tuple that contains
+# a string with the query and a dictionary with the expected result.
+
+parse_qsl_test_cases = [
+ ("", []),
+ ("&", []),
+ ("&&", []),
+ ("=", [('', '')]),
+ ("=a", [('', 'a')]),
+ ("a", [('a', '')]),
+ ("a=", [('a', '')]),
+ ("a=", [('a', '')]),
+ ("&a=b", [('a', 'b')]),
+ ("a=a+b&b=b+c", [('a', 'a b'), ('b', 'b c')]),
+ ("a=1&a=2", [('a', '1'), ('a', '2')]),
+]
+
class UrlParseTestCase(unittest.TestCase):
def checkRoundtrips(self, url, parsed, split):
@@ -61,6 +78,11 @@ class UrlParseTestCase(unittest.TestCase):
self.assertEqual(result3.hostname, result.hostname)
self.assertEqual(result3.port, result.port)
+ def test_qsl(self):
+ for orig, expect in parse_qsl_test_cases:
+ result = urlparse.parse_qsl(orig, keep_blank_values=True)
+ self.assertEqual(result, expect, "Error parsing %s" % repr(orig))
+
def test_roundtrips(self):
testcases = [
('file:///tmp/junk.txt',
diff --git a/Lib/urlparse.py b/Lib/urlparse.py
index 1914304..c56d883 100644
--- a/Lib/urlparse.py
+++ b/Lib/urlparse.py
@@ -5,7 +5,7 @@ UC Irvine, June 1995.
"""
__all__ = ["urlparse", "urlunparse", "urljoin", "urldefrag",
- "urlsplit", "urlunsplit"]
+ "urlsplit", "urlunsplit", "parse_qs", "parse_qsl"]
# A classification of schemes ('' means apply by default)
uses_relative = ['ftp', 'http', 'gopher', 'nntp', 'imap',
@@ -267,6 +267,92 @@ def urldefrag(url):
else:
return url, ''
+# unquote method for parse_qs and parse_qsl
+# Cannot use directly from urllib as it would create circular reference.
+# urllib uses urlparse methods ( urljoin)
+
+_hextochr = dict(('%02x' % i, chr(i)) for i in range(256))
+_hextochr.update(('%02X' % i, chr(i)) for i in range(256))
+
+def unquote(s):
+ """unquote('abc%20def') -> 'abc def'."""
+ res = s.split('%')
+ for i in xrange(1, len(res)):
+ item = res[i]
+ try:
+ res[i] = _hextochr[item[:2]] + item[2:]
+ except KeyError:
+ res[i] = '%' + item
+ except UnicodeDecodeError:
+ res[i] = unichr(int(item[:2], 16)) + item[2:]
+ return "".join(res)
+
+def parse_qs(qs, keep_blank_values=0, strict_parsing=0):
+ """Parse a query given as a string argument.
+
+ Arguments:
+
+ qs: URL-encoded query string to be parsed
+
+ keep_blank_values: flag indicating whether blank values in
+ URL encoded queries should be treated as blank strings.
+ A true value indicates that blanks should be retained as
+ blank strings. The default false value indicates that
+ blank values are to be ignored and treated as if they were
+ not included.
+
+ strict_parsing: flag indicating what to do with parsing errors.
+ If false (the default), errors are silently ignored.
+ If true, errors raise a ValueError exception.
+ """
+ dict = {}
+ for name, value in parse_qsl(qs, keep_blank_values, strict_parsing):
+ if name in dict:
+ dict[name].append(value)
+ else:
+ dict[name] = [value]
+ return dict
+
+def parse_qsl(qs, keep_blank_values=0, strict_parsing=0):
+ """Parse a query given as a string argument.
+
+ Arguments:
+
+ qs: URL-encoded query string to be parsed
+
+ keep_blank_values: flag indicating whether blank values in
+ URL encoded queries should be treated as blank strings. A
+ true value indicates that blanks should be retained as blank
+ strings. The default false value indicates that blank values
+ are to be ignored and treated as if they were not included.
+
+ strict_parsing: flag indicating what to do with parsing errors. If
+ false (the default), errors are silently ignored. If true,
+ errors raise a ValueError exception.
+
+ Returns a list, as G-d intended.
+ """
+ pairs = [s2 for s1 in qs.split('&') for s2 in s1.split(';')]
+ r = []
+ for name_value in pairs:
+ if not name_value and not strict_parsing:
+ continue
+ nv = name_value.split('=', 1)
+ if len(nv) != 2:
+ if strict_parsing:
+ raise ValueError, "bad query field: %r" % (name_value,)
+ # Handle case of a control-name with no equal sign
+ if keep_blank_values:
+ nv.append('')
+ else:
+ continue
+ if len(nv[1]) or keep_blank_values:
+ name = unquote(nv[0].replace('+', ' '))
+ value = unquote(nv[1].replace('+', ' '))
+ r.append((name, value))
+
+ return r
+
test_input = """
http://a/b/c/d