test_zlib.py 22.2 KB
Newer Older
1
import unittest
2
from test import support
Christian Heimes's avatar
Christian Heimes committed
3
import binascii
4
import random
5
import sys
6
from test.support import precisionbigmemtest, _1G, _4G
7

8 9
zlib = support.import_module('zlib')

10 11 12 13 14
try:
    import mmap
except ImportError:
    mmap = None

15 16 17 18

class ChecksumTestCase(unittest.TestCase):
    # checksum test cases
    def test_crc32start(self):
19
        self.assertEqual(zlib.crc32(b""), zlib.crc32(b"", 0))
20
        self.assertTrue(zlib.crc32(b"abc", 0xffffffff))
21 22

    def test_crc32empty(self):
23 24 25
        self.assertEqual(zlib.crc32(b"", 0), 0)
        self.assertEqual(zlib.crc32(b"", 1), 1)
        self.assertEqual(zlib.crc32(b"", 432), 432)
26 27

    def test_adler32start(self):
28
        self.assertEqual(zlib.adler32(b""), zlib.adler32(b"", 1))
29
        self.assertTrue(zlib.adler32(b"abc", 0xffffffff))
30 31

    def test_adler32empty(self):
32 33 34
        self.assertEqual(zlib.adler32(b"", 0), 0)
        self.assertEqual(zlib.adler32(b"", 1), 1)
        self.assertEqual(zlib.adler32(b"", 432), 432)
35 36 37 38

    def assertEqual32(self, seen, expected):
        # 32-bit values masked -- checksums on 32- vs 64- bit machines
        # This is important if bit 31 (0x08000000L) is set.
39
        self.assertEqual(seen & 0x0FFFFFFFF, expected & 0x0FFFFFFFF)
40 41

    def test_penguins(self):
42 43 44 45
        self.assertEqual32(zlib.crc32(b"penguin", 0), 0x0e5c1a120)
        self.assertEqual32(zlib.crc32(b"penguin", 1), 0x43b6aa94)
        self.assertEqual32(zlib.adler32(b"penguin", 0), 0x0bcf02f6)
        self.assertEqual32(zlib.adler32(b"penguin", 1), 0x0bd602f7)
46

47 48
        self.assertEqual(zlib.crc32(b"penguin"), zlib.crc32(b"penguin", 0))
        self.assertEqual(zlib.adler32(b"penguin"),zlib.adler32(b"penguin",1))
49

50
    def test_crc32_adler32_unsigned(self):
51
        foo = b'abcdefghijklmnop'
52
        # explicitly test signed behavior
53
        self.assertEqual(zlib.crc32(foo), 2486878355)
54
        self.assertEqual(zlib.crc32(b'spam'), 1138425661)
55
        self.assertEqual(zlib.adler32(foo+foo), 3573550353)
56
        self.assertEqual(zlib.adler32(b'spam'), 72286642)
57

Christian Heimes's avatar
Christian Heimes committed
58
    def test_same_as_binascii_crc32(self):
59
        foo = b'abcdefghijklmnop'
60
        crc = 2486878355
Christian Heimes's avatar
Christian Heimes committed
61 62
        self.assertEqual(binascii.crc32(foo), crc)
        self.assertEqual(zlib.crc32(foo), crc)
63
        self.assertEqual(binascii.crc32(b'spam'), zlib.crc32(b'spam'))
64 65


66 67 68 69 70 71 72
# Issue #10276 - check that inputs >=4GB are handled correctly.
class ChecksumBigBufferTestCase(unittest.TestCase):

    def setUp(self):
        with open(support.TESTFN, "wb+") as f:
            f.seek(_4G)
            f.write(b"asdf")
73
            f.flush()
74 75 76 77 78 79 80 81 82 83 84 85 86 87
            self.mapping = mmap.mmap(f.fileno(), 0, access=mmap.ACCESS_READ)

    def tearDown(self):
        self.mapping.close()
        support.unlink(support.TESTFN)

    @unittest.skipUnless(mmap, "mmap() is not available.")
    @unittest.skipUnless(sys.maxsize > _4G, "Can't run on a 32-bit system.")
    @unittest.skipUnless(support.is_resource_enabled("largefile"),
                         "May use lots of disk space.")
    def test_big_buffer(self):
        self.assertEqual(zlib.crc32(self.mapping), 3058686908)
        self.assertEqual(zlib.adler32(self.mapping), 82837919)

Christian Heimes's avatar
Christian Heimes committed
88

89 90
class ExceptionTestCase(unittest.TestCase):
    # make sure we generate some expected errors
91 92 93 94
    def test_badlevel(self):
        # specifying compression level out of range causes an error
        # (but -1 is Z_DEFAULT_COMPRESSION and apparently the zlib
        # accepts 0 too)
95 96 97 98 99 100 101 102 103 104 105 106
        self.assertRaises(zlib.error, zlib.compress, b'ERROR', 10)

    def test_badargs(self):
        self.assertRaises(TypeError, zlib.adler32)
        self.assertRaises(TypeError, zlib.crc32)
        self.assertRaises(TypeError, zlib.compress)
        self.assertRaises(TypeError, zlib.decompress)
        for arg in (42, None, '', 'abc', (), []):
            self.assertRaises(TypeError, zlib.adler32, arg)
            self.assertRaises(TypeError, zlib.crc32, arg)
            self.assertRaises(TypeError, zlib.compress, arg)
            self.assertRaises(TypeError, zlib.decompress, arg)
107 108 109

    def test_badcompressobj(self):
        # verify failure on building compress object with bad params
110
        self.assertRaises(ValueError, zlib.compressobj, 1, zlib.DEFLATED, 0)
111 112 113
        # specifying total bits too large causes an error
        self.assertRaises(ValueError,
                zlib.compressobj, 1, zlib.DEFLATED, zlib.MAX_WBITS + 1)
114 115 116

    def test_baddecompressobj(self):
        # verify failure on building decompress object with bad params
117
        self.assertRaises(ValueError, zlib.decompressobj, -1)
118

Christian Heimes's avatar
Christian Heimes committed
119 120 121 122 123
    def test_decompressobj_badflush(self):
        # verify failure on calling decompressobj.flush with bad params
        self.assertRaises(ValueError, zlib.decompressobj().flush, 0)
        self.assertRaises(ValueError, zlib.decompressobj().flush, -1)

124

125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157
class BaseCompressTestCase(object):
    def check_big_compress_buffer(self, size, compress_func):
        _1M = 1024 * 1024
        fmt = "%%0%dx" % (2 * _1M)
        # Generate 10MB worth of random, and expand it by repeating it.
        # The assumption is that zlib's memory is not big enough to exploit
        # such spread out redundancy.
        data = b''.join([random.getrandbits(8 * _1M).to_bytes(_1M, 'little')
                        for i in range(10)])
        data = data * (size // len(data) + 1)
        try:
            compress_func(data)
        finally:
            # Release memory
            data = None

    def check_big_decompress_buffer(self, size, decompress_func):
        data = b'x' * size
        try:
            compressed = zlib.compress(data, 1)
        finally:
            # Release memory
            data = None
        data = decompress_func(compressed)
        # Sanity check
        try:
            self.assertEqual(len(data), size)
            self.assertEqual(len(data.strip(b'x')), 0)
        finally:
            data = None


class CompressTestCase(BaseCompressTestCase, unittest.TestCase):
158 159
    # Test compression in one go (whole message compression)
    def test_speech(self):
160 161
        x = zlib.compress(HAMLET_SCENE)
        self.assertEqual(zlib.decompress(x), HAMLET_SCENE)
162 163

    def test_speech128(self):
164 165
        # compress more data
        data = HAMLET_SCENE * 128
166
        x = zlib.compress(data)
167 168 169
        self.assertEqual(zlib.compress(bytearray(data)), x)
        for ob in x, bytearray(x):
            self.assertEqual(zlib.decompress(ob), data)
170

171 172 173
    def test_incomplete_stream(self):
        # An useful error message is given
        x = zlib.compress(HAMLET_SCENE)
174
        self.assertRaisesRegex(zlib.error,
175 176 177
            "Error -5 while decompressing data: incomplete or truncated stream",
            zlib.decompress, x[:-1])

178 179 180 181 182 183
    # Memory use of the following functions takes into account overallocation

    @precisionbigmemtest(size=_1G + 1024 * 1024, memuse=3)
    def test_big_compress_buffer(self, size):
        compress = lambda s: zlib.compress(s, 1)
        self.check_big_compress_buffer(size, compress)
184

185 186 187
    @precisionbigmemtest(size=_1G + 1024 * 1024, memuse=2)
    def test_big_decompress_buffer(self, size):
        self.check_big_decompress_buffer(size, zlib.decompress)
188

189 190 191 192 193 194 195
    @precisionbigmemtest(size=_4G + 100, memuse=1)
    def test_length_overflow(self, size):
        if size < _4G + 100:
            self.skipTest("not enough free memory, need at least 4 GB")
        data = b'x' * size
        try:
            self.assertRaises(OverflowError, zlib.compress, data, 1)
196
            self.assertRaises(OverflowError, zlib.decompress, data)
197 198 199
        finally:
            data = None

200 201

class CompressObjectTestCase(BaseCompressTestCase, unittest.TestCase):
202 203
    # Test compression object
    def test_pair(self):
204
        # straightforward compress/decompress objects
205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220
        datasrc = HAMLET_SCENE * 128
        datazip = zlib.compress(datasrc)
        # should compress both bytes and bytearray data
        for data in (datasrc, bytearray(datasrc)):
            co = zlib.compressobj()
            x1 = co.compress(data)
            x2 = co.flush()
            self.assertRaises(zlib.error, co.flush) # second flush should not work
            self.assertEqual(x1 + x2, datazip)
        for v1, v2 in ((x1, x2), (bytearray(x1), bytearray(x2))):
            dco = zlib.decompressobj()
            y1 = dco.decompress(v1 + v2)
            y2 = dco.flush()
            self.assertEqual(data, y1 + y2)
            self.assertIsInstance(dco.unconsumed_tail, bytes)
            self.assertIsInstance(dco.unused_data, bytes)
221

222 223 224 225 226 227 228 229
    def test_compressoptions(self):
        # specify lots of options to compressobj()
        level = 2
        method = zlib.DEFLATED
        wbits = -12
        memlevel = 9
        strategy = zlib.Z_FILTERED
        co = zlib.compressobj(level, method, wbits, memlevel, strategy)
230
        x1 = co.compress(HAMLET_SCENE)
231 232 233 234
        x2 = co.flush()
        dco = zlib.decompressobj(wbits)
        y1 = dco.decompress(x1 + x2)
        y2 = dco.flush()
235
        self.assertEqual(HAMLET_SCENE, y1 + y2)
236

237 238
    def test_compressincremental(self):
        # compress object in steps, decompress object as one-shot
239
        data = HAMLET_SCENE * 128
240
        co = zlib.compressobj()
241 242 243 244
        bufs = []
        for i in range(0, len(data), 256):
            bufs.append(co.compress(data[i:i+256]))
        bufs.append(co.flush())
245
        combuf = b''.join(bufs)
246

247
        dco = zlib.decompressobj()
248
        y1 = dco.decompress(b''.join(bufs))
249 250 251
        y2 = dco.flush()
        self.assertEqual(data, y1 + y2)

252
    def test_decompinc(self, flush=False, source=None, cx=256, dcx=64):
253
        # compress object in steps, decompress object in steps
254 255
        source = source or HAMLET_SCENE
        data = source * 128
256
        co = zlib.compressobj()
257
        bufs = []
258 259
        for i in range(0, len(data), cx):
            bufs.append(co.compress(data[i:i+cx]))
260
        bufs.append(co.flush())
261
        combuf = b''.join(bufs)
262

263 264
        decombuf = zlib.decompress(combuf)
        # Test type of return value
265
        self.assertIsInstance(decombuf, bytes)
266 267

        self.assertEqual(data, decombuf)
268

269
        dco = zlib.decompressobj()
270
        bufs = []
271 272
        for i in range(0, len(combuf), dcx):
            bufs.append(dco.decompress(combuf[i:i+dcx]))
273 274
            self.assertEqual(b'', dco.unconsumed_tail, ########
                             "(A) uct should be b'': not %d long" %
275
                                       len(dco.unconsumed_tail))
276
            self.assertEqual(b'', dco.unused_data)
277 278 279 280
        if flush:
            bufs.append(dco.flush())
        else:
            while True:
281
                chunk = dco.decompress(b'')
282 283 284 285
                if chunk:
                    bufs.append(chunk)
                else:
                    break
286 287
        self.assertEqual(b'', dco.unconsumed_tail, ########
                         "(B) uct should be b'': not %d long" %
288
                                       len(dco.unconsumed_tail))
289
        self.assertEqual(b'', dco.unused_data)
290
        self.assertEqual(data, b''.join(bufs))
291 292
        # Failure means: "decompressobj with init options failed"

293 294
    def test_decompincflush(self):
        self.test_decompinc(flush=True)
295

296 297 298
    def test_decompimax(self, source=None, cx=256, dcx=64):
        # compress in steps, decompress in length-restricted steps
        source = source or HAMLET_SCENE
299
        # Check a decompression object with max_length specified
300
        data = source * 128
301
        co = zlib.compressobj()
302
        bufs = []
303 304
        for i in range(0, len(data), cx):
            bufs.append(co.compress(data[i:i+cx]))
305
        bufs.append(co.flush())
306
        combuf = b''.join(bufs)
307
        self.assertEqual(data, zlib.decompress(combuf),
308 309
                         'compressed data failure')

310
        dco = zlib.decompressobj()
311 312 313
        bufs = []
        cb = combuf
        while cb:
314 315
            #max_length = 1 + len(cb)//10
            chunk = dco.decompress(cb, dcx)
316
            self.assertFalse(len(chunk) > dcx,
317
                    'chunk too big (%d>%d)' % (len(chunk), dcx))
318 319 320
            bufs.append(chunk)
            cb = dco.unconsumed_tail
        bufs.append(dco.flush())
321
        self.assertEqual(data, b''.join(bufs), 'Wrong data retrieved')
322

323 324 325
    def test_decompressmaxlen(self, flush=False):
        # Check a decompression object with max_length specified
        data = HAMLET_SCENE * 128
326
        co = zlib.compressobj()
327 328 329 330
        bufs = []
        for i in range(0, len(data), 256):
            bufs.append(co.compress(data[i:i+256]))
        bufs.append(co.flush())
331
        combuf = b''.join(bufs)
332
        self.assertEqual(data, zlib.decompress(combuf),
333
                         'compressed data failure')
334

335
        dco = zlib.decompressobj()
336 337 338
        bufs = []
        cb = combuf
        while cb:
Guido van Rossum's avatar
Guido van Rossum committed
339
            max_length = 1 + len(cb)//10
340
            chunk = dco.decompress(cb, max_length)
341
            self.assertFalse(len(chunk) > max_length,
342 343 344
                        'chunk too big (%d>%d)' % (len(chunk),max_length))
            bufs.append(chunk)
            cb = dco.unconsumed_tail
345 346 347 348
        if flush:
            bufs.append(dco.flush())
        else:
            while chunk:
349
                chunk = dco.decompress(b'', max_length)
350
                self.assertFalse(len(chunk) > max_length,
351 352
                            'chunk too big (%d>%d)' % (len(chunk),max_length))
                bufs.append(chunk)
353
        self.assertEqual(data, b''.join(bufs), 'Wrong data retrieved')
354

355 356 357
    def test_decompressmaxlenflush(self):
        self.test_decompressmaxlen(flush=True)

358 359
    def test_maxlenmisc(self):
        # Misc tests of max_length
360
        dco = zlib.decompressobj()
361
        self.assertRaises(ValueError, dco.decompress, b"", -1)
362
        self.assertEqual(b'', dco.unconsumed_tail)
363

364 365 366 367 368 369 370 371 372
    def test_clear_unconsumed_tail(self):
        # Issue #12050: calling decompress() without providing max_length
        # should clear the unconsumed_tail attribute.
        cdata = b"x\x9cKLJ\x06\x00\x02M\x01"    # "abc"
        dco = zlib.decompressobj()
        ddata = dco.decompress(cdata, 1)
        ddata += dco.decompress(dco.unconsumed_tail)
        self.assertEqual(dco.unconsumed_tail, b"")

373 374 375 376 377 378
    def test_flushes(self):
        # Test flush() with the various options, using all the
        # different levels in order to provide more variations.
        sync_opt = ['Z_NO_FLUSH', 'Z_SYNC_FLUSH', 'Z_FULL_FLUSH']
        sync_opt = [getattr(zlib, opt) for opt in sync_opt
                    if hasattr(zlib, opt)]
379
        data = HAMLET_SCENE * 8
380 381 382 383 384 385 386 387

        for sync in sync_opt:
            for level in range(10):
                obj = zlib.compressobj( level )
                a = obj.compress( data[:3000] )
                b = obj.flush( sync )
                c = obj.compress( data[3000:] )
                d = obj.flush()
388
                self.assertEqual(zlib.decompress(b''.join([a,b,c,d])),
389 390 391 392 393 394 395 396 397 398 399 400
                                 data, ("Decompress failed: flush "
                                        "mode=%i, level=%i") % (sync, level))
                del obj

    def test_odd_flush(self):
        # Test for odd flushing bugs noted in 2.0, and hopefully fixed in 2.1
        import random

        if hasattr(zlib, 'Z_SYNC_FLUSH'):
            # Testing on 17K of "random" data

            # Create compressor and decompressor objects
401
            co = zlib.compressobj(zlib.Z_BEST_COMPRESSION)
402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426
            dco = zlib.decompressobj()

            # Try 17K of data
            # generate random data stream
            try:
                # In 2.3 and later, WichmannHill is the RNG of the bug report
                gen = random.WichmannHill()
            except AttributeError:
                try:
                    # 2.2 called it Random
                    gen = random.Random()
                except AttributeError:
                    # others might simply have a single RNG
                    gen = random
            gen.seed(1)
            data = genblock(1, 17 * 1024, generator=gen)

            # compress, sync-flush, and decompress
            first = co.compress(data)
            second = co.flush(zlib.Z_SYNC_FLUSH)
            expanded = dco.decompress(first + second)

            # if decompressed data is different from the input data, choke.
            self.assertEqual(expanded, data, "17K random source doesn't match")

427 428 429 430 431 432
    def test_empty_flush(self):
        # Test that calling .flush() on unused objects works.
        # (Bug #1083110 -- calling .flush() on decompress objects
        # caused a core dump.)

        co = zlib.compressobj(zlib.Z_BEST_COMPRESSION)
433
        self.assertTrue(co.flush())  # Returns a zlib header
434
        dco = zlib.decompressobj()
435
        self.assertEqual(dco.flush(), b"") # Returns nothing
Tim Peters's avatar
Tim Peters committed
436

437 438 439 440 441 442 443 444 445 446 447 448 449
    def test_decompress_incomplete_stream(self):
        # This is 'foo', deflated
        x = b'x\x9cK\xcb\xcf\x07\x00\x02\x82\x01E'
        # For the record
        self.assertEqual(zlib.decompress(x), b'foo')
        self.assertRaises(zlib.error, zlib.decompress, x[:-5])
        # Omitting the stream end works with decompressor objects
        # (see issue #8672).
        dco = zlib.decompressobj()
        y = dco.decompress(x[:-5])
        y += dco.flush()
        self.assertEqual(y, b'foo')

450 451 452 453
    if hasattr(zlib.compressobj(), "copy"):
        def test_compresscopy(self):
            # Test copying a compression object
            data0 = HAMLET_SCENE
454
            data1 = bytes(str(HAMLET_SCENE, "ascii").swapcase(), "ascii")
455 456 457 458 459 460 461 462 463
            c0 = zlib.compressobj(zlib.Z_BEST_COMPRESSION)
            bufs0 = []
            bufs0.append(c0.compress(data0))

            c1 = c0.copy()
            bufs1 = bufs0[:]

            bufs0.append(c0.compress(data0))
            bufs0.append(c0.flush())
464
            s0 = b''.join(bufs0)
465 466 467

            bufs1.append(c1.compress(data1))
            bufs1.append(c1.flush())
468
            s1 = b''.join(bufs1)
469 470 471 472 473 474 475 476 477 478 479 480 481 482 483 484

            self.assertEqual(zlib.decompress(s0),data0+data0)
            self.assertEqual(zlib.decompress(s1),data0+data1)

        def test_badcompresscopy(self):
            # Test copying a compression object in an inconsistent state
            c = zlib.compressobj()
            c.compress(HAMLET_SCENE)
            c.flush()
            self.assertRaises(ValueError, c.copy)

    if hasattr(zlib.decompressobj(), "copy"):
        def test_decompresscopy(self):
            # Test copying a decompression object
            data = HAMLET_SCENE
            comp = zlib.compress(data)
485
            # Test type of return value
486
            self.assertIsInstance(comp, bytes)
487 488 489 490 491 492 493 494 495

            d0 = zlib.decompressobj()
            bufs0 = []
            bufs0.append(d0.decompress(comp[:32]))

            d1 = d0.copy()
            bufs1 = bufs0[:]

            bufs0.append(d0.decompress(comp[32:]))
496
            s0 = b''.join(bufs0)
497 498

            bufs1.append(d1.decompress(comp[32:]))
499
            s1 = b''.join(bufs1)
500 501 502 503 504 505 506 507 508 509 510

            self.assertEqual(s0,s1)
            self.assertEqual(s0,data)

        def test_baddecompresscopy(self):
            # Test copying a compression object in an inconsistent state
            data = zlib.compress(HAMLET_SCENE)
            d = zlib.decompressobj()
            d.decompress(data)
            d.flush()
            self.assertRaises(ValueError, d.copy)
511

512 513 514 515 516 517 518 519 520 521 522 523 524 525 526
    # Memory use of the following functions takes into account overallocation

    @precisionbigmemtest(size=_1G + 1024 * 1024, memuse=3)
    def test_big_compress_buffer(self, size):
        c = zlib.compressobj(1)
        compress = lambda s: c.compress(s) + c.flush()
        self.check_big_compress_buffer(size, compress)

    @precisionbigmemtest(size=_1G + 1024 * 1024, memuse=2)
    def test_big_decompress_buffer(self, size):
        d = zlib.decompressobj()
        decompress = lambda s: d.decompress(s) + d.flush()
        self.check_big_decompress_buffer(size, decompress)


527 528 529 530 531 532 533
def genblock(seed, length, step=1024, generator=random):
    """length-byte stream of random data from a seed (in step-byte blocks)."""
    if seed is not None:
        generator.seed(seed)
    randint = generator.randint
    if length < step or step < 2:
        step = length
534
    blocks = bytes()
535
    for i in range(0, length, step):
536 537
        blocks += bytes(randint(0, 255) for x in range(step))
    return blocks
538 539 540 541 542 543 544 545 546 547 548 549



def choose_lines(source, number, seed=None, generator=random):
    """Return a list of number lines randomly chosen from the source"""
    if seed is not None:
        generator.seed(seed)
    sources = source.split('\n')
    return [generator.choice(sources) for n in range(number)]



550
HAMLET_SCENE = b"""
551
LAERTES
Jeremy Hylton's avatar
Jeremy Hylton committed
552 553 554 555 556 557 558 559 560

       O, fear me not.
       I stay too long: but here my father comes.

       Enter POLONIUS

       A double blessing is a double grace,
       Occasion smiles upon a second leave.

561
LORD POLONIUS
Jeremy Hylton's avatar
Jeremy Hylton committed
562 563 564 565 566 567 568 569 570 571 572 573 574 575 576 577 578 579 580 581 582 583 584 585 586 587 588 589 590

       Yet here, Laertes! aboard, aboard, for shame!
       The wind sits in the shoulder of your sail,
       And you are stay'd for. There; my blessing with thee!
       And these few precepts in thy memory
       See thou character. Give thy thoughts no tongue,
       Nor any unproportioned thought his act.
       Be thou familiar, but by no means vulgar.
       Those friends thou hast, and their adoption tried,
       Grapple them to thy soul with hoops of steel;
       But do not dull thy palm with entertainment
       Of each new-hatch'd, unfledged comrade. Beware
       Of entrance to a quarrel, but being in,
       Bear't that the opposed may beware of thee.
       Give every man thy ear, but few thy voice;
       Take each man's censure, but reserve thy judgment.
       Costly thy habit as thy purse can buy,
       But not express'd in fancy; rich, not gaudy;
       For the apparel oft proclaims the man,
       And they in France of the best rank and station
       Are of a most select and generous chief in that.
       Neither a borrower nor a lender be;
       For loan oft loses both itself and friend,
       And borrowing dulls the edge of husbandry.
       This above all: to thine ownself be true,
       And it must follow, as the night the day,
       Thou canst not then be false to any man.
       Farewell: my blessing season this in thee!

591
LAERTES
Jeremy Hylton's avatar
Jeremy Hylton committed
592 593 594

       Most humbly do I take my leave, my lord.

595
LORD POLONIUS
Jeremy Hylton's avatar
Jeremy Hylton committed
596 597 598

       The time invites you; go; your servants tend.

599
LAERTES
Jeremy Hylton's avatar
Jeremy Hylton committed
600 601 602 603

       Farewell, Ophelia; and remember well
       What I have said to you.

604
OPHELIA
Jeremy Hylton's avatar
Jeremy Hylton committed
605 606 607 608

       'Tis in my memory lock'd,
       And you yourself shall keep the key of it.

609
LAERTES
Jeremy Hylton's avatar
Jeremy Hylton committed
610 611 612

       Farewell.
"""
613 614 615


def test_main():
616
    support.run_unittest(
617
        ChecksumTestCase,
618
        ChecksumBigBufferTestCase,
619 620 621 622
        ExceptionTestCase,
        CompressTestCase,
        CompressObjectTestCase
    )
623 624

if __name__ == "__main__":
625 626
    unittest.main() # XXX
    ###test_main()