Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
C
cpython
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
0
Merge Requests
0
Analytics
Analytics
Repository
Value Stream
Wiki
Wiki
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Commits
Issue Boards
Open sidebar
Kirill Smelkov
cpython
Commits
fb1a5eb1
Commit
fb1a5eb1
authored
Sep 11, 2008
by
Amaury Forgeot d'Arc
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
#3640: Correct a crash in cPickle on 64bit platforms, in the case of deeply nested lists or dicts.
Reviewed by Martin von Loewis.
parent
06974bb1
Changes
2
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
138 additions
and
62 deletions
+138
-62
Misc/find_recursionlimit.py
Misc/find_recursionlimit.py
+20
-0
Modules/_pickle.c
Modules/_pickle.c
+118
-62
No files found.
Misc/find_recursionlimit.py
View file @
fb1a5eb1
...
...
@@ -20,6 +20,7 @@ MemoryError.
"""
import
sys
import
itertools
class
RecursiveBlowup1
:
def
__init__
(
self
):
...
...
@@ -59,6 +60,24 @@ def test_getitem():
def
test_recurse
():
return
test_recurse
()
def
test_cpickle
(
_cache
=
{}):
import
io
try
:
import
_pickle
except
ImportError
:
print
(
"cannot import _pickle, skipped!"
)
return
l
=
None
for
n
in
itertools
.
count
():
try
:
l
=
_cache
[
n
]
continue
# Already tried and it works, let's save some time
except
KeyError
:
for
i
in
range
(
100
):
l
=
[
l
]
_pickle
.
Pickler
(
io
.
BytesIO
(),
protocol
=-
1
).
dump
(
l
)
_cache
[
n
]
=
l
def
check_limit
(
n
,
test_func_name
):
sys
.
setrecursionlimit
(
n
)
if
test_func_name
.
startswith
(
"test_"
):
...
...
@@ -81,5 +100,6 @@ while 1:
check_limit
(
limit
,
"test_init"
)
check_limit
(
limit
,
"test_getattr"
)
check_limit
(
limit
,
"test_getitem"
)
check_limit
(
limit
,
"test_cpickle"
)
print
(
"Limit of %d is fine"
%
limit
)
limit
=
limit
+
100
Modules/_pickle.c
View file @
fb1a5eb1
...
...
@@ -1353,8 +1353,8 @@ save_tuple(PicklerObject *self, PyObject *obj)
static
int
batch_list
(
PicklerObject
*
self
,
PyObject
*
iter
)
{
PyObject
*
obj
;
PyObject
*
slice
[
BATCHSIZE
]
;
PyObject
*
obj
=
NULL
;
PyObject
*
firstitem
=
NULL
;
int
i
,
n
;
const
char
mark_op
=
MARK
;
...
...
@@ -1389,44 +1389,69 @@ batch_list(PicklerObject *self, PyObject *iter)
/* proto > 0: write in batches of BATCHSIZE. */
do
{
/* Get next group of (no more than) BATCHSIZE elements. */
for
(
n
=
0
;
n
<
BATCHSIZE
;
n
++
)
{
/* Get first item */
firstitem
=
PyIter_Next
(
iter
);
if
(
firstitem
==
NULL
)
{
if
(
PyErr_Occurred
())
goto
error
;
/* nothing more to add */
break
;
}
/* Try to get a second item */
obj
=
PyIter_Next
(
iter
);
if
(
obj
==
NULL
)
{
if
(
PyErr_Occurred
())
goto
error
;
/* Only one item to write */
if
(
save
(
self
,
firstitem
,
0
)
<
0
)
goto
error
;
if
(
pickler_write
(
self
,
&
append_op
,
1
)
<
0
)
goto
error
;
Py_CLEAR
(
firstitem
);
break
;
}
/* More than one item to write */
/* Pump out MARK, items, APPENDS. */
if
(
pickler_write
(
self
,
&
mark_op
,
1
)
<
0
)
goto
error
;
if
(
save
(
self
,
firstitem
,
0
)
<
0
)
goto
error
;
Py_CLEAR
(
firstitem
);
n
=
1
;
/* Fetch and save up to BATCHSIZE items */
while
(
obj
)
{
if
(
save
(
self
,
obj
,
0
)
<
0
)
goto
error
;
Py_CLEAR
(
obj
);
n
+=
1
;
if
(
n
==
BATCHSIZE
)
break
;
obj
=
PyIter_Next
(
iter
);
if
(
obj
==
NULL
)
{
if
(
PyErr_Occurred
())
goto
error
;
break
;
}
slice
[
n
]
=
obj
;
}
if
(
n
>
1
)
{
/* Pump out MARK, slice[0:n], APPENDS. */
if
(
pickler_write
(
self
,
&
mark_op
,
1
)
<
0
)
goto
error
;
for
(
i
=
0
;
i
<
n
;
i
++
)
{
if
(
save
(
self
,
slice
[
i
],
0
)
<
0
)
goto
error
;
}
if
(
pickler_write
(
self
,
&
appends_op
,
1
)
<
0
)
goto
error
;
}
else
if
(
n
==
1
)
{
if
(
save
(
self
,
slice
[
0
],
0
)
<
0
||
pickler_write
(
self
,
&
append_op
,
1
)
<
0
)
goto
error
;
}
if
(
pickler_write
(
self
,
&
appends_op
,
1
)
<
0
)
goto
error
;
for
(
i
=
0
;
i
<
n
;
i
++
)
{
Py_DECREF
(
slice
[
i
]);
}
}
while
(
n
==
BATCHSIZE
);
return
0
;
error:
while
(
--
n
>=
0
)
{
Py_DECREF
(
slice
[
n
]);
}
Py_XDECREF
(
firstitem
);
Py_XDECREF
(
obj
);
return
-
1
;
}
...
...
@@ -1496,8 +1521,8 @@ save_list(PicklerObject *self, PyObject *obj)
static
int
batch_dict
(
PicklerObject
*
self
,
PyObject
*
iter
)
{
PyObject
*
obj
;
PyObject
*
slice
[
BATCHSIZE
]
;
PyObject
*
obj
=
NULL
;
PyObject
*
firstitem
=
NULL
;
int
i
,
n
;
const
char
mark_op
=
MARK
;
...
...
@@ -1534,53 +1559,84 @@ batch_dict(PicklerObject *self, PyObject *iter)
/* proto > 0: write in batches of BATCHSIZE. */
do
{
/* Get next group of (no more than) BATCHSIZE elements. */
for
(
n
=
0
;
n
<
BATCHSIZE
;
n
++
)
{
obj
=
PyIter_Next
(
iter
);
if
(
obj
==
NULL
)
{
if
(
PyErr_Occurred
())
goto
error
;
break
;
}
if
(
!
PyTuple_Check
(
obj
)
||
PyTuple_Size
(
obj
)
!=
2
)
{
PyErr_SetString
(
PyExc_TypeError
,
"dict items "
"iterator must return 2-tuples"
);
/* Get first item */
firstitem
=
PyIter_Next
(
iter
);
if
(
firstitem
==
NULL
)
{
if
(
PyErr_Occurred
())
goto
error
;
}
slice
[
n
]
=
obj
;
/* nothing more to add */
break
;
}
if
(
!
PyTuple_Check
(
firstitem
)
||
PyTuple_Size
(
firstitem
)
!=
2
)
{
PyErr_SetString
(
PyExc_TypeError
,
"dict items "
"iterator must return 2-tuples"
);
goto
error
;
}
if
(
n
>
1
)
{
/* Pump out MARK, slice[0:n], SETITEMS. */
if
(
pickler_write
(
self
,
&
mark_op
,
1
)
<
0
)
/* Try to get a second item */
obj
=
PyIter_Next
(
iter
);
if
(
obj
==
NULL
)
{
if
(
PyErr_Occurred
())
goto
error
;
for
(
i
=
0
;
i
<
n
;
i
++
)
{
obj
=
slice
[
i
];
if
(
save
(
self
,
PyTuple_GET_ITEM
(
obj
,
0
),
0
)
<
0
||
save
(
self
,
PyTuple_GET_ITEM
(
obj
,
1
),
0
)
<
0
)
goto
error
;
}
if
(
pickler_write
(
self
,
&
setitem
s
_op
,
1
)
<
0
)
/* Only one item to write */
if
(
save
(
self
,
PyTuple_GET_ITEM
(
firstitem
,
0
),
0
)
<
0
)
goto
error
;
if
(
save
(
self
,
PyTuple_GET_ITEM
(
firstitem
,
1
),
0
)
<
0
)
goto
error
;
if
(
pickler_write
(
self
,
&
setitem_op
,
1
)
<
0
)
goto
error
;
Py_CLEAR
(
firstitem
);
break
;
}
else
if
(
n
==
1
)
{
obj
=
slice
[
0
];
/* More than one item to write */
/* Pump out MARK, items, SETITEMS. */
if
(
pickler_write
(
self
,
&
mark_op
,
1
)
<
0
)
goto
error
;
if
(
save
(
self
,
PyTuple_GET_ITEM
(
firstitem
,
0
),
0
)
<
0
)
goto
error
;
if
(
save
(
self
,
PyTuple_GET_ITEM
(
firstitem
,
1
),
0
)
<
0
)
goto
error
;
Py_CLEAR
(
firstitem
);
n
=
1
;
/* Fetch and save up to BATCHSIZE items */
while
(
obj
)
{
if
(
!
PyTuple_Check
(
obj
)
||
PyTuple_Size
(
obj
)
!=
2
)
{
PyErr_SetString
(
PyExc_TypeError
,
"dict items "
"iterator must return 2-tuples"
);
goto
error
;
}
if
(
save
(
self
,
PyTuple_GET_ITEM
(
obj
,
0
),
0
)
<
0
||
save
(
self
,
PyTuple_GET_ITEM
(
obj
,
1
),
0
)
<
0
||
pickler_write
(
self
,
&
setitem_op
,
1
)
<
0
)
save
(
self
,
PyTuple_GET_ITEM
(
obj
,
1
),
0
)
<
0
)
goto
error
;
}
Py_CLEAR
(
obj
);
n
+=
1
;
if
(
n
==
BATCHSIZE
)
break
;
for
(
i
=
0
;
i
<
n
;
i
++
)
{
Py_DECREF
(
slice
[
i
]);
obj
=
PyIter_Next
(
iter
);
if
(
obj
==
NULL
)
{
if
(
PyErr_Occurred
())
goto
error
;
break
;
}
}
if
(
pickler_write
(
self
,
&
setitems_op
,
1
)
<
0
)
goto
error
;
}
while
(
n
==
BATCHSIZE
);
return
0
;
error:
while
(
--
n
>=
0
)
{
Py_DECREF
(
slice
[
n
]);
}
Py_XDECREF
(
firstitem
);
Py_XDECREF
(
obj
);
return
-
1
;
}
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment