Commit 3fb774ec authored by Jesus Cea's avatar Jesus Cea

Closes #15910: MD5 and SHA1 crash when "updated" with strings bigger than 2**32 bytes

parent 03a9d2a2
......@@ -167,6 +167,21 @@ class HashLibTestCase(unittest.TestCase):
% (name, hash_object_constructor,
computed, len(data), digest))
def check_update(self, name, data, digest):
constructors = self.constructors_to_test[name]
# 2 is for hashlib.name(...) and hashlib.new(name, ...)
self.assertGreaterEqual(len(constructors), 2)
for hash_object_constructor in constructors:
h = hash_object_constructor()
h.update(data)
computed = h.hexdigest()
self.assertEqual(
computed, digest,
"Hash algorithm %s using %s when updated returned hexdigest"
" %r for %d byte input data that should have hashed to %r."
% (name, hash_object_constructor,
computed, len(data), digest))
def check_unicode(self, algorithm_name):
# Unicode objects are not allowed as input.
expected = hashlib.new(algorithm_name, str(u'spam')).hexdigest()
......@@ -200,6 +215,15 @@ class HashLibTestCase(unittest.TestCase):
except OverflowError:
pass # 32-bit arch
@precisionbigmemtest(size=_4G + 5, memuse=1)
def test_case_md5_huge_update(self, size):
if size == _4G + 5:
try:
self.check_update('md5', 'A'*size,
'c9af2dff37468ce5dfee8f2cfc0a9c6d')
except OverflowError:
pass # 32-bit arch
@precisionbigmemtest(size=_4G - 1, memuse=1)
def test_case_md5_uintmax(self, size):
if size == _4G - 1:
......@@ -237,6 +261,15 @@ class HashLibTestCase(unittest.TestCase):
except OverflowError:
pass # 32-bit arch
@precisionbigmemtest(size=_4G + 5, memuse=1)
def test_case_sha1_huge_update(self, size):
if size == _4G + 5:
try:
self.check_update('sha1', 'A'*size,
'87d745c50e6b2879ffa0fb2c930e9fbfe0dc9a5b')
except OverflowError:
pass # 32-bit arch
# use the examples from Federal Information Processing Standards
# Publication 180-2, Secure Hash Standard, 2002 August 1
# http://csrc.nist.gov/publications/fips/fips180-2/fips180-2.pdf
......
......@@ -247,6 +247,9 @@ Library
- Issue #15908: Fix misbehaviour of the sha1 module when called on data
larger than 2**32 bytes.
- Issue #15910: Fix misbehaviour of _md5 and sha1 modules when "updating"
on data larger than 2**32 bytes.
- Issue #14875: Use float('inf') instead of float('1e66666') in the json module.
- Issue #14572: Prevent build failures with pre-3.5.0 versions of
......
......@@ -51,12 +51,25 @@ static PyObject *
md5_update(md5object *self, PyObject *args)
{
Py_buffer view;
Py_ssize_t n;
unsigned char *buf;
if (!PyArg_ParseTuple(args, "s*:update", &view))
return NULL;
md5_append(&self->md5, (unsigned char*)view.buf,
Py_SAFE_DOWNCAST(view.len, Py_ssize_t, unsigned int));
n = view.len;
buf = (unsigned char *) view.buf;
while (n > 0) {
Py_ssize_t nbytes;
if (n > INT_MAX)
nbytes = INT_MAX;
else
nbytes = n;
md5_append(&self->md5, buf,
Py_SAFE_DOWNCAST(nbytes, Py_ssize_t, unsigned int));
buf += nbytes;
n -= nbytes;
}
PyBuffer_Release(&view);
Py_RETURN_NONE;
......
......@@ -429,12 +429,25 @@ static PyObject *
SHA_update(SHAobject *self, PyObject *args)
{
Py_buffer view;
Py_ssize_t n;
unsigned char *buf;
if (!PyArg_ParseTuple(args, "s*:update", &view))
return NULL;
sha_update(self, (unsigned char*)view.buf,
Py_SAFE_DOWNCAST(view.len, Py_ssize_t, unsigned int));
n = view.len;
buf = (unsigned char *) view.buf;
while (n > 0) {
Py_ssize_t nbytes;
if (n > INT_MAX)
nbytes = INT_MAX;
else
nbytes = n;
sha_update(self, buf,
Py_SAFE_DOWNCAST(nbytes, Py_ssize_t, unsigned int));
buf += nbytes;
n -= nbytes;
}
PyBuffer_Release(&view);
Py_RETURN_NONE;
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment