summaryrefslogtreecommitdiffstats
path: root/Lib/test/test_gzip.py
diff options
context:
space:
mode:
Diffstat (limited to 'Lib/test/test_gzip.py')
-rw-r--r--Lib/test/test_gzip.py40
1 files changed, 36 insertions, 4 deletions
diff --git a/Lib/test/test_gzip.py b/Lib/test/test_gzip.py
index b417044..3c51673 100644
--- a/Lib/test/test_gzip.py
+++ b/Lib/test/test_gzip.py
@@ -3,9 +3,11 @@
import unittest
from test import support
+from test.support import bigmemtest, _4G
import os
import io
import struct
+import array
gzip = support.import_module('gzip')
data1 = b""" int length=DEFAULTALLOC, err = Z_OK;
@@ -77,15 +79,18 @@ class TestGzip(BaseTest):
def test_write_bytearray(self):
self.write_and_read_back(bytearray(data1 * 50))
+ def test_write_array(self):
+ self.write_and_read_back(array.array('I', data1 * 40))
+
def test_write_incompatible_type(self):
# Test that non-bytes-like types raise TypeError.
# Issue #21560: attempts to write incompatible types
# should not affect the state of the fileobject
with gzip.GzipFile(self.filename, 'wb') as f:
with self.assertRaises(TypeError):
- f.write('a')
+ f.write('')
with self.assertRaises(TypeError):
- f.write([1])
+ f.write([])
f.write(data1)
with gzip.GzipFile(self.filename, 'rb') as f:
self.assertEqual(f.read(), data1)
@@ -112,6 +117,14 @@ class TestGzip(BaseTest):
self.assertEqual(f.tell(), nread)
self.assertEqual(b''.join(blocks), data1 * 50)
+ @bigmemtest(size=_4G, memuse=1)
+ def test_read_large(self, size):
+ # Read chunk size over UINT_MAX should be supported, despite zlib's
+ # limitation per low-level call
+ compressed = gzip.compress(data1, compresslevel=1)
+ f = gzip.GzipFile(fileobj=io.BytesIO(compressed), mode='rb')
+ self.assertEqual(f.read(size), data1)
+
def test_io_on_closed_object(self):
# Test that I/O operations on closed GzipFile objects raise a
# ValueError, just like the corresponding functions on file objects.
@@ -119,7 +132,10 @@ class TestGzip(BaseTest):
# Write to a file, open it for reading, then close it.
self.test_write()
f = gzip.GzipFile(self.filename, 'r')
+ fileobj = f.fileobj
+ self.assertFalse(fileobj.closed)
f.close()
+ self.assertTrue(fileobj.closed)
with self.assertRaises(ValueError):
f.read(1)
with self.assertRaises(ValueError):
@@ -128,7 +144,10 @@ class TestGzip(BaseTest):
f.tell()
# Open the file for writing, then close it.
f = gzip.GzipFile(self.filename, 'w')
+ fileobj = f.fileobj
+ self.assertFalse(fileobj.closed)
f.close()
+ self.assertTrue(fileobj.closed)
with self.assertRaises(ValueError):
f.write(b'')
with self.assertRaises(ValueError):
@@ -267,9 +286,10 @@ class TestGzip(BaseTest):
with gzip.GzipFile(self.filename, 'w', mtime = mtime) as fWrite:
fWrite.write(data1)
with gzip.GzipFile(self.filename) as fRead:
+ self.assertTrue(hasattr(fRead, 'mtime'))
+ self.assertIsNone(fRead.mtime)
dataRead = fRead.read()
self.assertEqual(dataRead, data1)
- self.assertTrue(hasattr(fRead, 'mtime'))
self.assertEqual(fRead.mtime, mtime)
def test_metadata(self):
@@ -412,6 +432,18 @@ class TestGzip(BaseTest):
with gzip.GzipFile(str_filename, "rb") as f:
self.assertEqual(f.read(), data1 * 50)
+ def test_decompress_limited(self):
+ """Decompressed data buffering should be limited"""
+ bomb = gzip.compress(bytes(int(2e6)), compresslevel=9)
+ self.assertLess(len(bomb), io.DEFAULT_BUFFER_SIZE)
+
+ bomb = io.BytesIO(bomb)
+ decomp = gzip.GzipFile(fileobj=bomb)
+ self.assertEqual(bytes(1), decomp.read(1))
+ max_decomp = 1 + io.DEFAULT_BUFFER_SIZE
+ self.assertLessEqual(decomp._buffer.raw.tell(), max_decomp,
+ "Excessive amount of data was decompressed")
+
# Testing compress/decompress shortcut functions
def test_compress(self):
@@ -459,7 +491,7 @@ class TestGzip(BaseTest):
with gzip.open(self.filename, "wb") as f:
f.write(data1)
with gzip.open(self.filename, "rb") as f:
- f.fileobj.prepend()
+ f._buffer.raw._fp.prepend()
class TestOpen(BaseTest):
def test_binary_modes(self):