@@ -122,11 +122,17 @@ def test_decompressobj_badflush(self):
122122 self .assertRaises (ValueError , zlib .decompressobj ().flush , 0 )
123123 self .assertRaises (ValueError , zlib .decompressobj ().flush , - 1 )
124124
125+ @support .cpython_only
126+ def test_overflow (self ):
127+ with self .assertRaisesRegex (OverflowError , 'int too large' ):
128+ zlib .decompress (b'' , 15 , sys .maxsize + 1 )
129+ with self .assertRaisesRegex (OverflowError , 'int too large' ):
130+ zlib .decompressobj ().flush (sys .maxsize + 1 )
131+
125132
126133class BaseCompressTestCase (object ):
127134 def check_big_compress_buffer (self , size , compress_func ):
128135 _1M = 1024 * 1024
129- fmt = "%%0%dx" % (2 * _1M )
130136 # Generate 10MB worth of random, and expand it by repeating it.
131137 # The assumption is that zlib's memory is not big enough to exploit
132138 # such spread out redundancy.
@@ -196,6 +202,18 @@ def test_length_overflow(self, size):
196202 finally :
197203 data = None
198204
205+ @bigmemtest (size = _4G , memuse = 1 )
206+ def test_large_bufsize (self , size ):
207+ # Test decompress(bufsize) parameter greater than the internal limit
208+ data = HAMLET_SCENE * 10
209+ compressed = zlib .compress (data , 1 )
210+ self .assertEqual (zlib .decompress (compressed , 15 , size ), data )
211+
212+ def test_custom_bufsize (self ):
213+ data = HAMLET_SCENE * 10
214+ compressed = zlib .compress (data , 1 )
215+ self .assertEqual (zlib .decompress (compressed , 15 , CustomInt ()), data )
216+
199217
200218class CompressObjectTestCase (BaseCompressTestCase , unittest .TestCase ):
201219 # Test compression object
@@ -364,6 +382,21 @@ def test_maxlenmisc(self):
364382 self .assertRaises (ValueError , dco .decompress , b"" , - 1 )
365383 self .assertEqual (b'' , dco .unconsumed_tail )
366384
385+ def test_maxlen_large (self ):
386+ # Sizes up to sys.maxsize should be accepted, although zlib is
387+ # internally limited to expressing sizes with unsigned int
388+ data = HAMLET_SCENE * 10
389+ self .assertGreater (len (data ), zlib .DEF_BUF_SIZE )
390+ compressed = zlib .compress (data , 1 )
391+ dco = zlib .decompressobj ()
392+ self .assertEqual (dco .decompress (compressed , sys .maxsize ), data )
393+
394+ def test_maxlen_custom (self ):
395+ data = HAMLET_SCENE * 10
396+ compressed = zlib .compress (data , 1 )
397+ dco = zlib .decompressobj ()
398+ self .assertEqual (dco .decompress (compressed , CustomInt ()), data [:100 ])
399+
367400 def test_clear_unconsumed_tail (self ):
368401 # Issue #12050: calling decompress() without providing max_length
369402 # should clear the unconsumed_tail attribute.
@@ -537,6 +570,22 @@ def test_flush_with_freed_input(self):
537570 data = zlib .compress (input2 )
538571 self .assertEqual (dco .flush (), input1 [1 :])
539572
573+ @bigmemtest (size = _4G , memuse = 1 )
574+ def test_flush_large_length (self , size ):
575+ # Test flush(length) parameter greater than internal limit UINT_MAX
576+ input = HAMLET_SCENE * 10
577+ data = zlib .compress (input , 1 )
578+ dco = zlib .decompressobj ()
579+ dco .decompress (data , 1 )
580+ self .assertEqual (dco .flush (size ), input [1 :])
581+
582+ def test_flush_custom_length (self ):
583+ input = HAMLET_SCENE * 10
584+ data = zlib .compress (input , 1 )
585+ dco = zlib .decompressobj ()
586+ dco .decompress (data , 1 )
587+ self .assertEqual (dco .flush (CustomInt ()), input [1 :])
588+
540589 @requires_Compress_copy
541590 def test_compresscopy (self ):
542591 # Test copying a compression object
@@ -725,5 +774,10 @@ def choose_lines(source, number, seed=None, generator=random):
725774"""
726775
727776
777+ class CustomInt :
778+ def __int__ (self ):
779+ return 100
780+
781+
728782if __name__ == "__main__" :
729783 unittest .main ()
0 commit comments