diff options
author | Antoine Pitrou <solipsis@pitrou.net> | 2010-05-07 17:04:02 (GMT) |
---|---|---|
committer | Antoine Pitrou <solipsis@pitrou.net> | 2010-05-07 17:04:02 (GMT) |
commit | 8956271f11a135e33b0ba99bac48118e337304f6 (patch) | |
tree | e418df28d919de55f50b4d8dc45f1af880b55da5 | |
parent | 02a004ea1de27a456cc1471b4d13a9ba8a0c7ce6 (diff) | |
download | cpython-8956271f11a135e33b0ba99bac48118e337304f6.zip cpython-8956271f11a135e33b0ba99bac48118e337304f6.tar.gz cpython-8956271f11a135e33b0ba99bac48118e337304f6.tar.bz2 |
Merged revisions 80926 via svnmerge from
svn+ssh://pythondev@svn.python.org/python/trunk
........
r80926 | antoine.pitrou | 2010-05-07 18:50:34 +0200 (ven., 07 mai 2010) | 5 lines
Issue #8571: Fix an internal error when compressing or decompressing a
chunk larger than 1GB with the zlib module's compressor and decompressor
objects.
........
-rw-r--r-- | Lib/test/test_zlib.py | 63 | ||||
-rw-r--r-- | Misc/NEWS | 4 | ||||
-rw-r--r-- | Modules/zlibmodule.c | 7 |
3 files changed, 68 insertions, 6 deletions
diff --git a/Lib/test/test_zlib.py b/Lib/test/test_zlib.py index d639dd7..9b7c135 100644 --- a/Lib/test/test_zlib.py +++ b/Lib/test/test_zlib.py @@ -2,6 +2,7 @@ import unittest from test import support import binascii import random +from test.support import precisionbigmemtest, _1G zlib = support.import_module('zlib') @@ -93,8 +94,39 @@ class ExceptionTestCase(unittest.TestCase): self.assertRaises(ValueError, zlib.decompressobj().flush, -1) - -class CompressTestCase(unittest.TestCase): +class BaseCompressTestCase(object): + def check_big_compress_buffer(self, size, compress_func): + _1M = 1024 * 1024 + fmt = "%%0%dx" % (2 * _1M) + # Generate 10MB worth of random, and expand it by repeating it. + # The assumption is that zlib's memory is not big enough to exploit + # such spread out redundancy. + data = b''.join([random.getrandbits(8 * _1M).to_bytes(_1M, 'little') + for i in range(10)]) + data = data * (size // len(data) + 1) + try: + compress_func(data) + finally: + # Release memory + data = None + + def check_big_decompress_buffer(self, size, decompress_func): + data = b'x' * size + try: + compressed = zlib.compress(data, 1) + finally: + # Release memory + data = None + data = decompress_func(compressed) + # Sanity check + try: + self.assertEqual(len(data), size) + self.assertEqual(len(data.strip(b'x')), 0) + finally: + data = None + + +class CompressTestCase(BaseCompressTestCase, unittest.TestCase): # Test compression in one go (whole message compression) def test_speech(self): x = zlib.compress(HAMLET_SCENE) @@ -108,9 +140,19 @@ class CompressTestCase(unittest.TestCase): for ob in x, bytearray(x): self.assertEqual(zlib.decompress(ob), data) + # Memory use of the following functions takes into account overallocation + + @precisionbigmemtest(size=_1G + 1024 * 1024, memuse=3) + def test_big_compress_buffer(self, size): + compress = lambda s: zlib.compress(s, 1) + self.check_big_compress_buffer(size, compress) + @precisionbigmemtest(size=_1G + 1024 * 1024, memuse=2) + def test_big_decompress_buffer(self, size): + self.check_big_decompress_buffer(size, zlib.decompress) -class CompressObjectTestCase(unittest.TestCase): + +class CompressObjectTestCase(BaseCompressTestCase, unittest.TestCase): # Test compression object def test_pair(self): # straightforward compress/decompress objects @@ -399,6 +441,21 @@ class CompressObjectTestCase(unittest.TestCase): d.flush() self.assertRaises(ValueError, d.copy) + # Memory use of the following functions takes into account overallocation + + @precisionbigmemtest(size=_1G + 1024 * 1024, memuse=3) + def test_big_compress_buffer(self, size): + c = zlib.compressobj(1) + compress = lambda s: c.compress(s) + c.flush() + self.check_big_compress_buffer(size, compress) + + @precisionbigmemtest(size=_1G + 1024 * 1024, memuse=2) + def test_big_decompress_buffer(self, size): + d = zlib.decompressobj() + decompress = lambda s: d.decompress(s) + d.flush() + self.check_big_decompress_buffer(size, decompress) + + def genblock(seed, length, step=1024, generator=random): """length-byte stream of random data from a seed (in step-byte blocks).""" if seed is not None: @@ -348,6 +348,10 @@ C-API Library ------- +- Issue #8571: Fix an internal error when compressing or decompressing a + chunk larger than 1GB with the zlib module's compressor and decompressor + objects. + - Issue #8603: Support bytes environmental variables on Unix: Add os.environb mapping and os.getenvb() function. os.unsetenv() encodes str argument to the file system encoding with the surrogateescape error handler (instead of diff --git a/Modules/zlibmodule.c b/Modules/zlibmodule.c index 2f2e214..353d11a 100644 --- a/Modules/zlibmodule.c +++ b/Modules/zlibmodule.c @@ -396,7 +396,8 @@ PyDoc_STRVAR(comp_compress__doc__, static PyObject * PyZlib_objcompress(compobject *self, PyObject *args) { - int err, inplen, length = DEFAULTALLOC; + int err, inplen; + Py_ssize_t length = DEFAULTALLOC; PyObject *RetVal; Py_buffer pinput; Byte *input; @@ -477,8 +478,8 @@ PyDoc_STRVAR(decomp_decompress__doc__, static PyObject * PyZlib_objdecompress(compobject *self, PyObject *args) { - int err, inplen, old_length, length = DEFAULTALLOC; - int max_length = 0; + int err, inplen, max_length = 0; + Py_ssize_t old_length, length = DEFAULTALLOC; PyObject *RetVal; Py_buffer pinput; Byte *input; |