Commit 62f7f2f4 authored by Kevin Modzelewski's avatar Kevin Modzelewski

Update exceptions_ubench to make it harder for us

Add some stack frames before where all the exceptions happen.
This makes us take some obscenely large amount of time, particularly
since we currently generate a traceback for the entire stack when
an exception is thrown.
parent 239b08c3
......@@ -977,7 +977,7 @@ $(call make_target,_prof)
$(call make_target,_gcc)
runpy_% pyrun_%: %.py ext_python
PYTHONPATH=test/test_extension/build/lib.linux-x86_64-2.7 python $<
$(VERB) PYTHONPATH=test/test_extension/build/lib.linux-x86_64-2.7 zsh -c 'time python $<'
$(call make_search,runpy_%)
$(call make_search,pyrun_%)
......
......@@ -3,9 +3,15 @@ def f():
from __builtin__ import Exception
e = Exception()
for i in xrange(100000):
for i in xrange(20000):
try:
raise e
except Exception:
pass
f()
def recurse(n):
if n:
recurse(n - 1)
else:
f()
recurse(100)
......@@ -478,6 +478,7 @@ static const LineInfo* lineInfoForFrame(PythonFrameIterator& frame_it) {
return new LineInfo(current_stmt->lineno, current_stmt->col_offset, source->parent_module->fn, source->getName());
}
static StatCounter us_gettraceback("us_gettraceback");
BoxedTraceback* getTraceback() {
if (!ENABLE_FRAME_INTROSPECTION) {
static bool printed_warning = false;
......@@ -488,6 +489,8 @@ BoxedTraceback* getTraceback() {
return new BoxedTraceback();
}
Timer _t("getTraceback");
std::vector<const LineInfo*> entries;
for (auto& frame_info : unwindPythonFrames()) {
const LineInfo* line_info = lineInfoForFrame(frame_info);
......@@ -496,6 +499,10 @@ BoxedTraceback* getTraceback() {
}
std::reverse(entries.begin(), entries.end());
long us = _t.end();
us_gettraceback.log(us);
return new BoxedTraceback(std::move(entries));
}
......
......@@ -1845,10 +1845,6 @@ extern "C" void setattr(Box* obj, const char* attr, Box* attr_val) {
static Box* object_setattr = object_cls->getattr("__setattr__");
assert(object_setattr);
if (DEBUG >= 2) {
assert((typeLookup(obj->cls, setattr_str, NULL) == object_setattr) == (tp_setattro == PyObject_GenericSetAttr));
}
// I guess this check makes it ok for us to just rely on having guarded on the value of setattr without
// invalidating on deallocation, since we assume that object.__setattr__ will never get deallocated.
if (tp_setattro == PyObject_GenericSetAttr) {
......@@ -2993,9 +2989,12 @@ Box* runtimeCallInternal(Box* obj, CallRewriteArgs* rewrite_args, ArgPassSpec ar
if (obj->cls != function_cls && obj->cls != builtin_function_or_method_cls && obj->cls != instancemethod_cls) {
Box* rtn;
if (DEBUG >= 2) {
assert((obj->cls->tp_call == NULL) == (typeLookup(obj->cls, call_str, NULL) == NULL));
}
if (rewrite_args) {
// TODO is this ok?
// rewrite_args->rewriter->trap();
rtn = callattrInternal(obj, &call_str, CLASS_ONLY, rewrite_args, argspec, arg1, arg2, arg3, args,
keyword_names);
} else {
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment