test_zlib.py 23.3 KB
Newer Older
1
import unittest
2
from test import support
Christian Heimes's avatar
Christian Heimes committed
3
import binascii
4
import random
5
import sys
Antoine Pitrou's avatar
Antoine Pitrou committed
6
from test.support import bigmemtest, _1G, _4G
7

8 9
zlib = support.import_module('zlib')

10

11 12 13
class VersionTestCase(unittest.TestCase):

    def test_library_version(self):
14 15 16 17
        # Test that the major version of the actual library in use matches the
        # major version that we were compiled against. We can't guarantee that
        # the minor versions will match (even on the machine on which the module
        # was compiled), and the API is stable between minor versions, so
Nadeem Vawda's avatar
Nadeem Vawda committed
18
        # testing only the major versions avoids spurious failures.
19
        self.assertEqual(zlib.ZLIB_RUNTIME_VERSION[0], zlib.ZLIB_VERSION[0])
20 21


22 23 24
class ChecksumTestCase(unittest.TestCase):
    # checksum test cases
    def test_crc32start(self):
25
        self.assertEqual(zlib.crc32(b""), zlib.crc32(b"", 0))
26
        self.assertTrue(zlib.crc32(b"abc", 0xffffffff))
27 28

    def test_crc32empty(self):
29 30 31
        self.assertEqual(zlib.crc32(b"", 0), 0)
        self.assertEqual(zlib.crc32(b"", 1), 1)
        self.assertEqual(zlib.crc32(b"", 432), 432)
32 33

    def test_adler32start(self):
34
        self.assertEqual(zlib.adler32(b""), zlib.adler32(b"", 1))
35
        self.assertTrue(zlib.adler32(b"abc", 0xffffffff))
36 37

    def test_adler32empty(self):
38 39 40
        self.assertEqual(zlib.adler32(b"", 0), 0)
        self.assertEqual(zlib.adler32(b"", 1), 1)
        self.assertEqual(zlib.adler32(b"", 432), 432)
41 42 43 44

    def assertEqual32(self, seen, expected):
        # 32-bit values masked -- checksums on 32- vs 64- bit machines
        # This is important if bit 31 (0x08000000L) is set.
45
        self.assertEqual(seen & 0x0FFFFFFFF, expected & 0x0FFFFFFFF)
46 47

    def test_penguins(self):
48 49 50 51
        self.assertEqual32(zlib.crc32(b"penguin", 0), 0x0e5c1a120)
        self.assertEqual32(zlib.crc32(b"penguin", 1), 0x43b6aa94)
        self.assertEqual32(zlib.adler32(b"penguin", 0), 0x0bcf02f6)
        self.assertEqual32(zlib.adler32(b"penguin", 1), 0x0bd602f7)
52

53 54
        self.assertEqual(zlib.crc32(b"penguin"), zlib.crc32(b"penguin", 0))
        self.assertEqual(zlib.adler32(b"penguin"),zlib.adler32(b"penguin",1))
55

56
    def test_crc32_adler32_unsigned(self):
57
        foo = b'abcdefghijklmnop'
58
        # explicitly test signed behavior
59
        self.assertEqual(zlib.crc32(foo), 2486878355)
60
        self.assertEqual(zlib.crc32(b'spam'), 1138425661)
61
        self.assertEqual(zlib.adler32(foo+foo), 3573550353)
62
        self.assertEqual(zlib.adler32(b'spam'), 72286642)
63

Christian Heimes's avatar
Christian Heimes committed
64
    def test_same_as_binascii_crc32(self):
65
        foo = b'abcdefghijklmnop'
66
        crc = 2486878355
Christian Heimes's avatar
Christian Heimes committed
67 68
        self.assertEqual(binascii.crc32(foo), crc)
        self.assertEqual(zlib.crc32(foo), crc)
69
        self.assertEqual(binascii.crc32(b'spam'), zlib.crc32(b'spam'))
70 71


72 73 74
# Issue #10276 - check that inputs >=4GB are handled correctly.
class ChecksumBigBufferTestCase(unittest.TestCase):

75 76 77 78 79
    @bigmemtest(size=_4G + 4, memuse=1)
    def test_big_buffer(self,size):
        data = b"nyan" * (_1G + 1)
        self.assertEqual(zlib.crc32(data), 1044521549)
        self.assertEqual(zlib.adler32(data), 2256789997)
80

Christian Heimes's avatar
Christian Heimes committed
81

82 83
class ExceptionTestCase(unittest.TestCase):
    # make sure we generate some expected errors
84 85 86 87
    def test_badlevel(self):
        # specifying compression level out of range causes an error
        # (but -1 is Z_DEFAULT_COMPRESSION and apparently the zlib
        # accepts 0 too)
88 89 90 91 92 93 94 95 96 97 98 99
        self.assertRaises(zlib.error, zlib.compress, b'ERROR', 10)

    def test_badargs(self):
        self.assertRaises(TypeError, zlib.adler32)
        self.assertRaises(TypeError, zlib.crc32)
        self.assertRaises(TypeError, zlib.compress)
        self.assertRaises(TypeError, zlib.decompress)
        for arg in (42, None, '', 'abc', (), []):
            self.assertRaises(TypeError, zlib.adler32, arg)
            self.assertRaises(TypeError, zlib.crc32, arg)
            self.assertRaises(TypeError, zlib.compress, arg)
            self.assertRaises(TypeError, zlib.decompress, arg)
100 101 102

    def test_badcompressobj(self):
        # verify failure on building compress object with bad params
103
        self.assertRaises(ValueError, zlib.compressobj, 1, zlib.DEFLATED, 0)
104 105 106
        # specifying total bits too large causes an error
        self.assertRaises(ValueError,
                zlib.compressobj, 1, zlib.DEFLATED, zlib.MAX_WBITS + 1)
107 108 109

    def test_baddecompressobj(self):
        # verify failure on building decompress object with bad params
110
        self.assertRaises(ValueError, zlib.decompressobj, -1)
111

Christian Heimes's avatar
Christian Heimes committed
112 113 114 115 116
    def test_decompressobj_badflush(self):
        # verify failure on calling decompressobj.flush with bad params
        self.assertRaises(ValueError, zlib.decompressobj().flush, 0)
        self.assertRaises(ValueError, zlib.decompressobj().flush, -1)

117

118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150
class BaseCompressTestCase(object):
    def check_big_compress_buffer(self, size, compress_func):
        _1M = 1024 * 1024
        fmt = "%%0%dx" % (2 * _1M)
        # Generate 10MB worth of random, and expand it by repeating it.
        # The assumption is that zlib's memory is not big enough to exploit
        # such spread out redundancy.
        data = b''.join([random.getrandbits(8 * _1M).to_bytes(_1M, 'little')
                        for i in range(10)])
        data = data * (size // len(data) + 1)
        try:
            compress_func(data)
        finally:
            # Release memory
            data = None

    def check_big_decompress_buffer(self, size, decompress_func):
        data = b'x' * size
        try:
            compressed = zlib.compress(data, 1)
        finally:
            # Release memory
            data = None
        data = decompress_func(compressed)
        # Sanity check
        try:
            self.assertEqual(len(data), size)
            self.assertEqual(len(data.strip(b'x')), 0)
        finally:
            data = None


class CompressTestCase(BaseCompressTestCase, unittest.TestCase):
151 152
    # Test compression in one go (whole message compression)
    def test_speech(self):
153 154
        x = zlib.compress(HAMLET_SCENE)
        self.assertEqual(zlib.decompress(x), HAMLET_SCENE)
155 156

    def test_speech128(self):
157 158
        # compress more data
        data = HAMLET_SCENE * 128
159
        x = zlib.compress(data)
160 161 162
        self.assertEqual(zlib.compress(bytearray(data)), x)
        for ob in x, bytearray(x):
            self.assertEqual(zlib.decompress(ob), data)
163

164 165 166
    def test_incomplete_stream(self):
        # An useful error message is given
        x = zlib.compress(HAMLET_SCENE)
167
        self.assertRaisesRegex(zlib.error,
168 169 170
            "Error -5 while decompressing data: incomplete or truncated stream",
            zlib.decompress, x[:-1])

171 172
    # Memory use of the following functions takes into account overallocation

Antoine Pitrou's avatar
Antoine Pitrou committed
173
    @bigmemtest(size=_1G + 1024 * 1024, memuse=3)
174 175 176
    def test_big_compress_buffer(self, size):
        compress = lambda s: zlib.compress(s, 1)
        self.check_big_compress_buffer(size, compress)
177

Antoine Pitrou's avatar
Antoine Pitrou committed
178
    @bigmemtest(size=_1G + 1024 * 1024, memuse=2)
179 180
    def test_big_decompress_buffer(self, size):
        self.check_big_decompress_buffer(size, zlib.decompress)
181

Antoine Pitrou's avatar
Antoine Pitrou committed
182
    @bigmemtest(size=_4G + 100, memuse=1)
183 184 185 186 187 188
    def test_length_overflow(self, size):
        if size < _4G + 100:
            self.skipTest("not enough free memory, need at least 4 GB")
        data = b'x' * size
        try:
            self.assertRaises(OverflowError, zlib.compress, data, 1)
189
            self.assertRaises(OverflowError, zlib.decompress, data)
190 191 192
        finally:
            data = None

193 194

class CompressObjectTestCase(BaseCompressTestCase, unittest.TestCase):
195 196
    # Test compression object
    def test_pair(self):
197
        # straightforward compress/decompress objects
198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213
        datasrc = HAMLET_SCENE * 128
        datazip = zlib.compress(datasrc)
        # should compress both bytes and bytearray data
        for data in (datasrc, bytearray(datasrc)):
            co = zlib.compressobj()
            x1 = co.compress(data)
            x2 = co.flush()
            self.assertRaises(zlib.error, co.flush) # second flush should not work
            self.assertEqual(x1 + x2, datazip)
        for v1, v2 in ((x1, x2), (bytearray(x1), bytearray(x2))):
            dco = zlib.decompressobj()
            y1 = dco.decompress(v1 + v2)
            y2 = dco.flush()
            self.assertEqual(data, y1 + y2)
            self.assertIsInstance(dco.unconsumed_tail, bytes)
            self.assertIsInstance(dco.unused_data, bytes)
214

215 216 217 218 219 220 221 222
    def test_compressoptions(self):
        # specify lots of options to compressobj()
        level = 2
        method = zlib.DEFLATED
        wbits = -12
        memlevel = 9
        strategy = zlib.Z_FILTERED
        co = zlib.compressobj(level, method, wbits, memlevel, strategy)
223
        x1 = co.compress(HAMLET_SCENE)
224 225 226 227
        x2 = co.flush()
        dco = zlib.decompressobj(wbits)
        y1 = dco.decompress(x1 + x2)
        y2 = dco.flush()
228
        self.assertEqual(HAMLET_SCENE, y1 + y2)
229

230 231
    def test_compressincremental(self):
        # compress object in steps, decompress object as one-shot
232
        data = HAMLET_SCENE * 128
233
        co = zlib.compressobj()
234 235 236 237
        bufs = []
        for i in range(0, len(data), 256):
            bufs.append(co.compress(data[i:i+256]))
        bufs.append(co.flush())
238
        combuf = b''.join(bufs)
239

240
        dco = zlib.decompressobj()
241
        y1 = dco.decompress(b''.join(bufs))
242 243 244
        y2 = dco.flush()
        self.assertEqual(data, y1 + y2)

245
    def test_decompinc(self, flush=False, source=None, cx=256, dcx=64):
246
        # compress object in steps, decompress object in steps
247 248
        source = source or HAMLET_SCENE
        data = source * 128
249
        co = zlib.compressobj()
250
        bufs = []
251 252
        for i in range(0, len(data), cx):
            bufs.append(co.compress(data[i:i+cx]))
253
        bufs.append(co.flush())
254
        combuf = b''.join(bufs)
255

256 257
        decombuf = zlib.decompress(combuf)
        # Test type of return value
258
        self.assertIsInstance(decombuf, bytes)
259 260

        self.assertEqual(data, decombuf)
261

262
        dco = zlib.decompressobj()
263
        bufs = []
264 265
        for i in range(0, len(combuf), dcx):
            bufs.append(dco.decompress(combuf[i:i+dcx]))
266 267
            self.assertEqual(b'', dco.unconsumed_tail, ########
                             "(A) uct should be b'': not %d long" %
268
                                       len(dco.unconsumed_tail))
269
            self.assertEqual(b'', dco.unused_data)
270 271 272 273
        if flush:
            bufs.append(dco.flush())
        else:
            while True:
274
                chunk = dco.decompress(b'')
275 276 277 278
                if chunk:
                    bufs.append(chunk)
                else:
                    break
279 280
        self.assertEqual(b'', dco.unconsumed_tail, ########
                         "(B) uct should be b'': not %d long" %
281
                                       len(dco.unconsumed_tail))
282
        self.assertEqual(b'', dco.unused_data)
283
        self.assertEqual(data, b''.join(bufs))
284 285
        # Failure means: "decompressobj with init options failed"

286 287
    def test_decompincflush(self):
        self.test_decompinc(flush=True)
288

289 290 291
    def test_decompimax(self, source=None, cx=256, dcx=64):
        # compress in steps, decompress in length-restricted steps
        source = source or HAMLET_SCENE
292
        # Check a decompression object with max_length specified
293
        data = source * 128
294
        co = zlib.compressobj()
295
        bufs = []
296 297
        for i in range(0, len(data), cx):
            bufs.append(co.compress(data[i:i+cx]))
298
        bufs.append(co.flush())
299
        combuf = b''.join(bufs)
300
        self.assertEqual(data, zlib.decompress(combuf),
301 302
                         'compressed data failure')

303
        dco = zlib.decompressobj()
304 305 306
        bufs = []
        cb = combuf
        while cb:
307 308
            #max_length = 1 + len(cb)//10
            chunk = dco.decompress(cb, dcx)
309
            self.assertFalse(len(chunk) > dcx,
310
                    'chunk too big (%d>%d)' % (len(chunk), dcx))
311 312 313
            bufs.append(chunk)
            cb = dco.unconsumed_tail
        bufs.append(dco.flush())
314
        self.assertEqual(data, b''.join(bufs), 'Wrong data retrieved')
315

316 317 318
    def test_decompressmaxlen(self, flush=False):
        # Check a decompression object with max_length specified
        data = HAMLET_SCENE * 128
319
        co = zlib.compressobj()
320 321 322 323
        bufs = []
        for i in range(0, len(data), 256):
            bufs.append(co.compress(data[i:i+256]))
        bufs.append(co.flush())
324
        combuf = b''.join(bufs)
325
        self.assertEqual(data, zlib.decompress(combuf),
326
                         'compressed data failure')
327

328
        dco = zlib.decompressobj()
329 330 331
        bufs = []
        cb = combuf
        while cb:
Guido van Rossum's avatar
Guido van Rossum committed
332
            max_length = 1 + len(cb)//10
333
            chunk = dco.decompress(cb, max_length)
334
            self.assertFalse(len(chunk) > max_length,
335 336 337
                        'chunk too big (%d>%d)' % (len(chunk),max_length))
            bufs.append(chunk)
            cb = dco.unconsumed_tail
338 339 340 341
        if flush:
            bufs.append(dco.flush())
        else:
            while chunk:
342
                chunk = dco.decompress(b'', max_length)
343
                self.assertFalse(len(chunk) > max_length,
344 345
                            'chunk too big (%d>%d)' % (len(chunk),max_length))
                bufs.append(chunk)
346
        self.assertEqual(data, b''.join(bufs), 'Wrong data retrieved')
347

348 349 350
    def test_decompressmaxlenflush(self):
        self.test_decompressmaxlen(flush=True)

351 352
    def test_maxlenmisc(self):
        # Misc tests of max_length
353
        dco = zlib.decompressobj()
354
        self.assertRaises(ValueError, dco.decompress, b"", -1)
355
        self.assertEqual(b'', dco.unconsumed_tail)
356

357 358 359 360 361 362 363 364 365
    def test_clear_unconsumed_tail(self):
        # Issue #12050: calling decompress() without providing max_length
        # should clear the unconsumed_tail attribute.
        cdata = b"x\x9cKLJ\x06\x00\x02M\x01"    # "abc"
        dco = zlib.decompressobj()
        ddata = dco.decompress(cdata, 1)
        ddata += dco.decompress(dco.unconsumed_tail)
        self.assertEqual(dco.unconsumed_tail, b"")

366 367 368 369 370 371
    def test_flushes(self):
        # Test flush() with the various options, using all the
        # different levels in order to provide more variations.
        sync_opt = ['Z_NO_FLUSH', 'Z_SYNC_FLUSH', 'Z_FULL_FLUSH']
        sync_opt = [getattr(zlib, opt) for opt in sync_opt
                    if hasattr(zlib, opt)]
372
        data = HAMLET_SCENE * 8
373 374 375 376 377 378 379 380

        for sync in sync_opt:
            for level in range(10):
                obj = zlib.compressobj( level )
                a = obj.compress( data[:3000] )
                b = obj.flush( sync )
                c = obj.compress( data[3000:] )
                d = obj.flush()
381
                self.assertEqual(zlib.decompress(b''.join([a,b,c,d])),
382 383 384 385 386 387 388 389 390 391 392 393
                                 data, ("Decompress failed: flush "
                                        "mode=%i, level=%i") % (sync, level))
                del obj

    def test_odd_flush(self):
        # Test for odd flushing bugs noted in 2.0, and hopefully fixed in 2.1
        import random

        if hasattr(zlib, 'Z_SYNC_FLUSH'):
            # Testing on 17K of "random" data

            # Create compressor and decompressor objects
394
            co = zlib.compressobj(zlib.Z_BEST_COMPRESSION)
395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419
            dco = zlib.decompressobj()

            # Try 17K of data
            # generate random data stream
            try:
                # In 2.3 and later, WichmannHill is the RNG of the bug report
                gen = random.WichmannHill()
            except AttributeError:
                try:
                    # 2.2 called it Random
                    gen = random.Random()
                except AttributeError:
                    # others might simply have a single RNG
                    gen = random
            gen.seed(1)
            data = genblock(1, 17 * 1024, generator=gen)

            # compress, sync-flush, and decompress
            first = co.compress(data)
            second = co.flush(zlib.Z_SYNC_FLUSH)
            expanded = dco.decompress(first + second)

            # if decompressed data is different from the input data, choke.
            self.assertEqual(expanded, data, "17K random source doesn't match")

420 421 422 423 424 425
    def test_empty_flush(self):
        # Test that calling .flush() on unused objects works.
        # (Bug #1083110 -- calling .flush() on decompress objects
        # caused a core dump.)

        co = zlib.compressobj(zlib.Z_BEST_COMPRESSION)
426
        self.assertTrue(co.flush())  # Returns a zlib header
427
        dco = zlib.decompressobj()
428
        self.assertEqual(dco.flush(), b"") # Returns nothing
Tim Peters's avatar
Tim Peters committed
429

430 431 432 433 434 435 436 437 438 439 440 441 442
    def test_decompress_incomplete_stream(self):
        # This is 'foo', deflated
        x = b'x\x9cK\xcb\xcf\x07\x00\x02\x82\x01E'
        # For the record
        self.assertEqual(zlib.decompress(x), b'foo')
        self.assertRaises(zlib.error, zlib.decompress, x[:-5])
        # Omitting the stream end works with decompressor objects
        # (see issue #8672).
        dco = zlib.decompressobj()
        y = dco.decompress(x[:-5])
        y += dco.flush()
        self.assertEqual(y, b'foo')

443 444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462
    def test_decompress_eof(self):
        x = b'x\x9cK\xcb\xcf\x07\x00\x02\x82\x01E'  # 'foo'
        dco = zlib.decompressobj()
        self.assertFalse(dco.eof)
        dco.decompress(x[:-5])
        self.assertFalse(dco.eof)
        dco.decompress(x[-5:])
        self.assertTrue(dco.eof)
        dco.flush()
        self.assertTrue(dco.eof)

    def test_decompress_eof_incomplete_stream(self):
        x = b'x\x9cK\xcb\xcf\x07\x00\x02\x82\x01E'  # 'foo'
        dco = zlib.decompressobj()
        self.assertFalse(dco.eof)
        dco.decompress(x[:-5])
        self.assertFalse(dco.eof)
        dco.flush()
        self.assertFalse(dco.eof)

463 464 465 466
    if hasattr(zlib.compressobj(), "copy"):
        def test_compresscopy(self):
            # Test copying a compression object
            data0 = HAMLET_SCENE
467
            data1 = bytes(str(HAMLET_SCENE, "ascii").swapcase(), "ascii")
468 469 470 471 472 473 474 475 476
            c0 = zlib.compressobj(zlib.Z_BEST_COMPRESSION)
            bufs0 = []
            bufs0.append(c0.compress(data0))

            c1 = c0.copy()
            bufs1 = bufs0[:]

            bufs0.append(c0.compress(data0))
            bufs0.append(c0.flush())
477
            s0 = b''.join(bufs0)
478 479 480

            bufs1.append(c1.compress(data1))
            bufs1.append(c1.flush())
481
            s1 = b''.join(bufs1)
482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497

            self.assertEqual(zlib.decompress(s0),data0+data0)
            self.assertEqual(zlib.decompress(s1),data0+data1)

        def test_badcompresscopy(self):
            # Test copying a compression object in an inconsistent state
            c = zlib.compressobj()
            c.compress(HAMLET_SCENE)
            c.flush()
            self.assertRaises(ValueError, c.copy)

    if hasattr(zlib.decompressobj(), "copy"):
        def test_decompresscopy(self):
            # Test copying a decompression object
            data = HAMLET_SCENE
            comp = zlib.compress(data)
498
            # Test type of return value
499
            self.assertIsInstance(comp, bytes)
500 501 502 503 504 505 506 507 508

            d0 = zlib.decompressobj()
            bufs0 = []
            bufs0.append(d0.decompress(comp[:32]))

            d1 = d0.copy()
            bufs1 = bufs0[:]

            bufs0.append(d0.decompress(comp[32:]))
509
            s0 = b''.join(bufs0)
510 511

            bufs1.append(d1.decompress(comp[32:]))
512
            s1 = b''.join(bufs1)
513 514 515 516 517 518 519 520 521 522 523

            self.assertEqual(s0,s1)
            self.assertEqual(s0,data)

        def test_baddecompresscopy(self):
            # Test copying a compression object in an inconsistent state
            data = zlib.compress(HAMLET_SCENE)
            d = zlib.decompressobj()
            d.decompress(data)
            d.flush()
            self.assertRaises(ValueError, d.copy)
524

525 526
    # Memory use of the following functions takes into account overallocation

Antoine Pitrou's avatar
Antoine Pitrou committed
527
    @bigmemtest(size=_1G + 1024 * 1024, memuse=3)
528 529 530 531 532
    def test_big_compress_buffer(self, size):
        c = zlib.compressobj(1)
        compress = lambda s: c.compress(s) + c.flush()
        self.check_big_compress_buffer(size, compress)

Antoine Pitrou's avatar
Antoine Pitrou committed
533
    @bigmemtest(size=_1G + 1024 * 1024, memuse=2)
534 535 536 537 538
    def test_big_decompress_buffer(self, size):
        d = zlib.decompressobj()
        decompress = lambda s: d.decompress(s) + d.flush()
        self.check_big_decompress_buffer(size, decompress)

Antoine Pitrou's avatar
Antoine Pitrou committed
539
    @bigmemtest(size=_4G + 100, memuse=1)
540 541 542 543
    def test_length_overflow(self, size):
        if size < _4G + 100:
            self.skipTest("not enough free memory, need at least 4 GB")
        data = b'x' * size
544 545
        c = zlib.compressobj(1)
        d = zlib.decompressobj()
546
        try:
547 548
            self.assertRaises(OverflowError, c.compress, data)
            self.assertRaises(OverflowError, d.decompress, data)
549 550 551
        finally:
            data = None

552

553 554 555 556 557 558 559
def genblock(seed, length, step=1024, generator=random):
    """length-byte stream of random data from a seed (in step-byte blocks)."""
    if seed is not None:
        generator.seed(seed)
    randint = generator.randint
    if length < step or step < 2:
        step = length
560
    blocks = bytes()
561
    for i in range(0, length, step):
562 563
        blocks += bytes(randint(0, 255) for x in range(step))
    return blocks
564 565 566 567 568 569 570 571 572 573 574 575



def choose_lines(source, number, seed=None, generator=random):
    """Return a list of number lines randomly chosen from the source"""
    if seed is not None:
        generator.seed(seed)
    sources = source.split('\n')
    return [generator.choice(sources) for n in range(number)]



576
HAMLET_SCENE = b"""
577
LAERTES
Jeremy Hylton's avatar
Jeremy Hylton committed
578 579 580 581 582 583 584 585 586

       O, fear me not.
       I stay too long: but here my father comes.

       Enter POLONIUS

       A double blessing is a double grace,
       Occasion smiles upon a second leave.

587
LORD POLONIUS
Jeremy Hylton's avatar
Jeremy Hylton committed
588 589 590 591 592 593 594 595 596 597 598 599 600 601 602 603 604 605 606 607 608 609 610 611 612 613 614 615 616

       Yet here, Laertes! aboard, aboard, for shame!
       The wind sits in the shoulder of your sail,
       And you are stay'd for. There; my blessing with thee!
       And these few precepts in thy memory
       See thou character. Give thy thoughts no tongue,
       Nor any unproportioned thought his act.
       Be thou familiar, but by no means vulgar.
       Those friends thou hast, and their adoption tried,
       Grapple them to thy soul with hoops of steel;
       But do not dull thy palm with entertainment
       Of each new-hatch'd, unfledged comrade. Beware
       Of entrance to a quarrel, but being in,
       Bear't that the opposed may beware of thee.
       Give every man thy ear, but few thy voice;
       Take each man's censure, but reserve thy judgment.
       Costly thy habit as thy purse can buy,
       But not express'd in fancy; rich, not gaudy;
       For the apparel oft proclaims the man,
       And they in France of the best rank and station
       Are of a most select and generous chief in that.
       Neither a borrower nor a lender be;
       For loan oft loses both itself and friend,
       And borrowing dulls the edge of husbandry.
       This above all: to thine ownself be true,
       And it must follow, as the night the day,
       Thou canst not then be false to any man.
       Farewell: my blessing season this in thee!

617
LAERTES
Jeremy Hylton's avatar
Jeremy Hylton committed
618 619 620

       Most humbly do I take my leave, my lord.

621
LORD POLONIUS
Jeremy Hylton's avatar
Jeremy Hylton committed
622 623 624

       The time invites you; go; your servants tend.

625
LAERTES
Jeremy Hylton's avatar
Jeremy Hylton committed
626 627 628 629

       Farewell, Ophelia; and remember well
       What I have said to you.

630
OPHELIA
Jeremy Hylton's avatar
Jeremy Hylton committed
631 632 633 634

       'Tis in my memory lock'd,
       And you yourself shall keep the key of it.

635
LAERTES
Jeremy Hylton's avatar
Jeremy Hylton committed
636 637 638

       Farewell.
"""
639 640 641


def test_main():
642
    support.run_unittest(
643
        VersionTestCase,
644
        ChecksumTestCase,
645
        ChecksumBigBufferTestCase,
646 647 648 649
        ExceptionTestCase,
        CompressTestCase,
        CompressObjectTestCase
    )
650 651

if __name__ == "__main__":
652 653
    unittest.main() # XXX
    ###test_main()