Commit 94190bb6 authored by Antoine Pitrou's avatar Antoine Pitrou

Start fixing test_bigmem:

- bigmemtest is replaced by precisionbigmemtest
- add a poor man's watchdog thread to print memory consumption
parent ffd41d9f
...@@ -8,7 +8,7 @@ from http.cookies import SimpleCookie ...@@ -8,7 +8,7 @@ from http.cookies import SimpleCookie
from test.support import ( from test.support import (
TestFailed, TESTFN, run_with_locale, TestFailed, TESTFN, run_with_locale,
_2G, _4G, precisionbigmemtest, _2G, _4G, bigmemtest,
) )
from pickle import bytes_types from pickle import bytes_types
...@@ -1159,7 +1159,7 @@ class BigmemPickleTests(unittest.TestCase): ...@@ -1159,7 +1159,7 @@ class BigmemPickleTests(unittest.TestCase):
# Binary protocols can serialize longs of up to 2GB-1 # Binary protocols can serialize longs of up to 2GB-1
@precisionbigmemtest(size=_2G, memuse=1 + 1, dry_run=False) @bigmemtest(size=_2G, memuse=1 + 1, dry_run=False)
def test_huge_long_32b(self, size): def test_huge_long_32b(self, size):
data = 1 << (8 * size) data = 1 << (8 * size)
try: try:
...@@ -1175,7 +1175,7 @@ class BigmemPickleTests(unittest.TestCase): ...@@ -1175,7 +1175,7 @@ class BigmemPickleTests(unittest.TestCase):
# (older protocols don't have a dedicated opcode for bytes and are # (older protocols don't have a dedicated opcode for bytes and are
# too inefficient) # too inefficient)
@precisionbigmemtest(size=_2G, memuse=1 + 1, dry_run=False) @bigmemtest(size=_2G, memuse=1 + 1, dry_run=False)
def test_huge_bytes_32b(self, size): def test_huge_bytes_32b(self, size):
data = b"abcd" * (size // 4) data = b"abcd" * (size // 4)
try: try:
...@@ -1191,7 +1191,7 @@ class BigmemPickleTests(unittest.TestCase): ...@@ -1191,7 +1191,7 @@ class BigmemPickleTests(unittest.TestCase):
finally: finally:
data = None data = None
@precisionbigmemtest(size=_4G, memuse=1 + 1, dry_run=False) @bigmemtest(size=_4G, memuse=1 + 1, dry_run=False)
def test_huge_bytes_64b(self, size): def test_huge_bytes_64b(self, size):
data = b"a" * size data = b"a" * size
try: try:
...@@ -1206,7 +1206,7 @@ class BigmemPickleTests(unittest.TestCase): ...@@ -1206,7 +1206,7 @@ class BigmemPickleTests(unittest.TestCase):
# All protocols use 1-byte per printable ASCII character; we add another # All protocols use 1-byte per printable ASCII character; we add another
# byte because the encoded form has to be copied into the internal buffer. # byte because the encoded form has to be copied into the internal buffer.
@precisionbigmemtest(size=_2G, memuse=2 + character_size, dry_run=False) @bigmemtest(size=_2G, memuse=2 + character_size, dry_run=False)
def test_huge_str_32b(self, size): def test_huge_str_32b(self, size):
data = "abcd" * (size // 4) data = "abcd" * (size // 4)
try: try:
...@@ -1223,7 +1223,7 @@ class BigmemPickleTests(unittest.TestCase): ...@@ -1223,7 +1223,7 @@ class BigmemPickleTests(unittest.TestCase):
# BINUNICODE (protocols 1, 2 and 3) cannot carry more than # BINUNICODE (protocols 1, 2 and 3) cannot carry more than
# 2**32 - 1 bytes of utf-8 encoded unicode. # 2**32 - 1 bytes of utf-8 encoded unicode.
@precisionbigmemtest(size=_4G, memuse=1 + character_size, dry_run=False) @bigmemtest(size=_4G, memuse=1 + character_size, dry_run=False)
def test_huge_str_64b(self, size): def test_huge_str_64b(self, size):
data = "a" * size data = "a" * size
try: try:
......
...@@ -1053,43 +1053,52 @@ def set_memlimit(limit): ...@@ -1053,43 +1053,52 @@ def set_memlimit(limit):
raise ValueError('Memory limit %r too low to be useful' % (limit,)) raise ValueError('Memory limit %r too low to be useful' % (limit,))
max_memuse = memlimit max_memuse = memlimit
def bigmemtest(minsize, memuse): def _memory_watchdog(start_evt, finish_evt, period=10.0):
"""A function which periodically watches the process' memory consumption
and prints it out.
"""
# XXX: because of the GIL, and because the very long operations tested
# in most bigmem tests are uninterruptible, the loop below gets woken up
# much less often than expected.
# The polling code should be rewritten in raw C, without holding the GIL,
# and push results onto an anonymous pipe.
try:
page_size = os.sysconf('SC_PAGESIZE')
except (ValueError, AttributeError):
try:
page_size = os.sysconf('SC_PAGE_SIZE')
except (ValueError, AttributeError):
page_size = 4096
procfile = '/proc/{pid}/statm'.format(pid=os.getpid())
try:
f = open(procfile, 'rb')
except IOError as e:
warnings.warn('/proc not available for stats: {}'.format(e),
RuntimeWarning)
sys.stderr.flush()
return
with f:
start_evt.set()
old_data = -1
while not finish_evt.wait(period):
f.seek(0)
statm = f.read().decode('ascii')
data = int(statm.split()[5])
if data != old_data:
old_data = data
print(" ... process data size: {data:.1f}G"
.format(data=data * page_size / (1024 ** 3)))
def bigmemtest(size, memuse, dry_run=True):
"""Decorator for bigmem tests. """Decorator for bigmem tests.
'minsize' is the minimum useful size for the test (in arbitrary, 'minsize' is the minimum useful size for the test (in arbitrary,
test-interpreted units.) 'memuse' is the number of 'bytes per size' for test-interpreted units.) 'memuse' is the number of 'bytes per size' for
the test, or a good estimate of it. the test, or a good estimate of it.
The decorator tries to guess a good value for 'size' and passes it to if 'dry_run' is False, it means the test doesn't support dummy runs
the decorated test function. If minsize * memuse is more than the when -M is not specified.
allowed memory use (as defined by max_memuse), the test is skipped.
Otherwise, minsize is adjusted upward to use up to max_memuse.
""" """
def decorator(f):
def wrapper(self):
# Retrieve values in case someone decided to adjust them
minsize = wrapper.minsize
memuse = wrapper.memuse
if not max_memuse:
# If max_memuse is 0 (the default),
# we still want to run the tests with size set to a few kb,
# to make sure they work. We still want to avoid using
# too much memory, though, but we do that noisily.
maxsize = 5147
self.assertFalse(maxsize * memuse > 20 * _1M)
else:
maxsize = int(max_memuse / memuse)
if maxsize < minsize:
raise unittest.SkipTest(
"not enough memory: %.1fG minimum needed"
% (minsize * memuse / (1024 ** 3)))
return f(self, maxsize)
wrapper.minsize = minsize
wrapper.memuse = memuse
return wrapper
return decorator
def precisionbigmemtest(size, memuse, dry_run=True):
def decorator(f): def decorator(f):
def wrapper(self): def wrapper(self):
size = wrapper.size size = wrapper.size
...@@ -1105,7 +1114,28 @@ def precisionbigmemtest(size, memuse, dry_run=True): ...@@ -1105,7 +1114,28 @@ def precisionbigmemtest(size, memuse, dry_run=True):
"not enough memory: %.1fG minimum needed" "not enough memory: %.1fG minimum needed"
% (size * memuse / (1024 ** 3))) % (size * memuse / (1024 ** 3)))
if real_max_memuse and verbose and threading:
print()
print(" ... expected peak memory use: {peak:.1f}G"
.format(peak=size * memuse / (1024 ** 3)))
sys.stdout.flush()
start_evt = threading.Event()
finish_evt = threading.Event()
t = threading.Thread(target=_memory_watchdog,
args=(start_evt, finish_evt, 0.5))
t.daemon = True
t.start()
start_evt.set()
else:
t = None
try:
return f(self, maxsize) return f(self, maxsize)
finally:
if t:
finish_evt.set()
t.join()
wrapper.size = size wrapper.size = size
wrapper.memuse = memuse wrapper.memuse = memuse
return wrapper return wrapper
......
This diff is collapsed.
...@@ -17,7 +17,7 @@ except ImportError: ...@@ -17,7 +17,7 @@ except ImportError:
import unittest import unittest
import warnings import warnings
from test import support from test import support
from test.support import _4G, precisionbigmemtest from test.support import _4G, bigmemtest
# Were we compiled --with-pydebug or with #define Py_DEBUG? # Were we compiled --with-pydebug or with #define Py_DEBUG?
COMPILED_WITH_PYDEBUG = hasattr(sys, 'gettotalrefcount') COMPILED_WITH_PYDEBUG = hasattr(sys, 'gettotalrefcount')
...@@ -196,7 +196,7 @@ class HashLibTestCase(unittest.TestCase): ...@@ -196,7 +196,7 @@ class HashLibTestCase(unittest.TestCase):
b'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789', b'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789',
'd174ab98d277d9f5a5611c2c9f419d9f') 'd174ab98d277d9f5a5611c2c9f419d9f')
@precisionbigmemtest(size=_4G + 5, memuse=1) @bigmemtest(size=_4G + 5, memuse=1)
def test_case_md5_huge(self, size): def test_case_md5_huge(self, size):
if size == _4G + 5: if size == _4G + 5:
try: try:
...@@ -204,7 +204,7 @@ class HashLibTestCase(unittest.TestCase): ...@@ -204,7 +204,7 @@ class HashLibTestCase(unittest.TestCase):
except OverflowError: except OverflowError:
pass # 32-bit arch pass # 32-bit arch
@precisionbigmemtest(size=_4G - 1, memuse=1) @bigmemtest(size=_4G - 1, memuse=1)
def test_case_md5_uintmax(self, size): def test_case_md5_uintmax(self, size):
if size == _4G - 1: if size == _4G - 1:
try: try:
......
# xml.etree test for cElementTree # xml.etree test for cElementTree
from test import support from test import support
from test.support import precisionbigmemtest, _2G from test.support import bigmemtest, _2G
import unittest import unittest
cET = support.import_module('xml.etree.cElementTree') cET = support.import_module('xml.etree.cElementTree')
...@@ -35,7 +35,7 @@ def sanity(): ...@@ -35,7 +35,7 @@ def sanity():
class MiscTests(unittest.TestCase): class MiscTests(unittest.TestCase):
# Issue #8651. # Issue #8651.
@support.precisionbigmemtest(size=support._2G + 100, memuse=1) @support.bigmemtest(size=support._2G + 100, memuse=1)
def test_length_overflow(self, size): def test_length_overflow(self, size):
if size < support._2G + 100: if size < support._2G + 100:
self.skipTest("not enough free memory, need at least 2 GB") self.skipTest("not enough free memory, need at least 2 GB")
......
...@@ -3,7 +3,7 @@ from test import support ...@@ -3,7 +3,7 @@ from test import support
import binascii import binascii
import random import random
import sys import sys
from test.support import precisionbigmemtest, _1G, _4G from test.support import bigmemtest, _1G, _4G
zlib = support.import_module('zlib') zlib = support.import_module('zlib')
...@@ -177,16 +177,16 @@ class CompressTestCase(BaseCompressTestCase, unittest.TestCase): ...@@ -177,16 +177,16 @@ class CompressTestCase(BaseCompressTestCase, unittest.TestCase):
# Memory use of the following functions takes into account overallocation # Memory use of the following functions takes into account overallocation
@precisionbigmemtest(size=_1G + 1024 * 1024, memuse=3) @bigmemtest(size=_1G + 1024 * 1024, memuse=3)
def test_big_compress_buffer(self, size): def test_big_compress_buffer(self, size):
compress = lambda s: zlib.compress(s, 1) compress = lambda s: zlib.compress(s, 1)
self.check_big_compress_buffer(size, compress) self.check_big_compress_buffer(size, compress)
@precisionbigmemtest(size=_1G + 1024 * 1024, memuse=2) @bigmemtest(size=_1G + 1024 * 1024, memuse=2)
def test_big_decompress_buffer(self, size): def test_big_decompress_buffer(self, size):
self.check_big_decompress_buffer(size, zlib.decompress) self.check_big_decompress_buffer(size, zlib.decompress)
@precisionbigmemtest(size=_4G + 100, memuse=1) @bigmemtest(size=_4G + 100, memuse=1)
def test_length_overflow(self, size): def test_length_overflow(self, size):
if size < _4G + 100: if size < _4G + 100:
self.skipTest("not enough free memory, need at least 4 GB") self.skipTest("not enough free memory, need at least 4 GB")
...@@ -511,19 +511,19 @@ class CompressObjectTestCase(BaseCompressTestCase, unittest.TestCase): ...@@ -511,19 +511,19 @@ class CompressObjectTestCase(BaseCompressTestCase, unittest.TestCase):
# Memory use of the following functions takes into account overallocation # Memory use of the following functions takes into account overallocation
@precisionbigmemtest(size=_1G + 1024 * 1024, memuse=3) @bigmemtest(size=_1G + 1024 * 1024, memuse=3)
def test_big_compress_buffer(self, size): def test_big_compress_buffer(self, size):
c = zlib.compressobj(1) c = zlib.compressobj(1)
compress = lambda s: c.compress(s) + c.flush() compress = lambda s: c.compress(s) + c.flush()
self.check_big_compress_buffer(size, compress) self.check_big_compress_buffer(size, compress)
@precisionbigmemtest(size=_1G + 1024 * 1024, memuse=2) @bigmemtest(size=_1G + 1024 * 1024, memuse=2)
def test_big_decompress_buffer(self, size): def test_big_decompress_buffer(self, size):
d = zlib.decompressobj() d = zlib.decompressobj()
decompress = lambda s: d.decompress(s) + d.flush() decompress = lambda s: d.decompress(s) + d.flush()
self.check_big_decompress_buffer(size, decompress) self.check_big_decompress_buffer(size, decompress)
@precisionbigmemtest(size=_4G + 100, memuse=1) @bigmemtest(size=_4G + 100, memuse=1)
def test_length_overflow(self, size): def test_length_overflow(self, size):
if size < _4G + 100: if size < _4G + 100:
self.skipTest("not enough free memory, need at least 4 GB") self.skipTest("not enough free memory, need at least 4 GB")
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment