Commit b8d99c33 authored by Mark Florisson's avatar Mark Florisson

merge

parents 1489a615 24065537
......@@ -744,7 +744,7 @@ typedef struct {
__Pyx_StructField root;
__Pyx_BufFmt_StackElem* head;
size_t fmt_offset;
int new_count, enc_count;
size_t new_count, enc_count;
int is_complex;
char enc_type;
char new_packmode;
......@@ -794,8 +794,8 @@ static int __Pyx_BufFmt_ParseNumber(const char** ts) {
}
static void __Pyx_BufFmt_RaiseUnexpectedChar(char ch) {
char msg[] = {ch, 0};
PyErr_Format(PyExc_ValueError, "Unexpected format string character: '%s'", msg);
PyErr_Format(PyExc_ValueError,
"Unexpected format string character: '%c'", ch);
}
static const char* __Pyx_BufFmt_DescribeTypeChar(char ch, int is_complex) {
......@@ -868,7 +868,7 @@ typedef struct { char c; double x; } __Pyx_st_double;
typedef struct { char c; long double x; } __Pyx_st_longdouble;
typedef struct { char c; void *x; } __Pyx_st_void_p;
#ifdef HAVE_LONG_LONG
typedef struct { char c; PY_LONG_LONG x; } __Pyx_s_long_long;
typedef struct { char c; PY_LONG_LONG x; } __Pyx_st_longlong;
#endif
static size_t __Pyx_BufFmt_TypeCharToAlignment(char ch, int is_complex) {
......@@ -878,7 +878,7 @@ static size_t __Pyx_BufFmt_TypeCharToAlignment(char ch, int is_complex) {
case 'i': case 'I': return sizeof(__Pyx_st_int) - sizeof(int);
case 'l': case 'L': return sizeof(__Pyx_st_long) - sizeof(long);
#ifdef HAVE_LONG_LONG
case 'q': case 'Q': return sizeof(__Pyx_s_long_long) - sizeof(PY_LONG_LONG);
case 'q': case 'Q': return sizeof(__Pyx_st_longlong) - sizeof(PY_LONG_LONG);
#endif
case 'f': return sizeof(__Pyx_st_float) - sizeof(float);
case 'd': return sizeof(__Pyx_st_double) - sizeof(double);
......@@ -890,7 +890,7 @@ static size_t __Pyx_BufFmt_TypeCharToAlignment(char ch, int is_complex) {
}
}
static size_t __Pyx_BufFmt_TypeCharToGroup(char ch, int is_complex) {
static char __Pyx_BufFmt_TypeCharToGroup(char ch, int is_complex) {
switch (ch) {
case 'c': case 'b': case 'h': case 'i': case 'l': case 'q': return 'I';
case 'B': case 'H': case 'I': case 'L': case 'Q': return 'U';
......@@ -944,8 +944,8 @@ static int __Pyx_BufFmt_ProcessTypeChunk(__Pyx_BufFmt_Context* ctx) {
size = __Pyx_BufFmt_TypeCharToStandardSize(ctx->enc_type, ctx->is_complex);
}
if (ctx->enc_packmode == '@') {
int align_at = __Pyx_BufFmt_TypeCharToAlignment(ctx->enc_type, ctx->is_complex);
int align_mod_offset;
size_t align_at = __Pyx_BufFmt_TypeCharToAlignment(ctx->enc_type, ctx->is_complex);
size_t align_mod_offset;
if (align_at == 0) return -1;
align_mod_offset = ctx->fmt_offset % align_at;
if (align_mod_offset > 0) ctx->fmt_offset += align_at - align_mod_offset;
......@@ -968,8 +968,8 @@ static int __Pyx_BufFmt_ProcessTypeChunk(__Pyx_BufFmt_Context* ctx) {
offset = ctx->head->parent_offset + field->offset;
if (ctx->fmt_offset != offset) {
PyErr_Format(PyExc_ValueError,
"Buffer dtype mismatch; next field is at offset %"PY_FORMAT_SIZE_T"d "
"but %"PY_FORMAT_SIZE_T"d expected", (Py_ssize_t)ctx->fmt_offset, (Py_ssize_t)offset);
"Buffer dtype mismatch; next field is at offset %"PY_FORMAT_SIZE_T"d but %"PY_FORMAT_SIZE_T"d expected",
(Py_ssize_t)ctx->fmt_offset, (Py_ssize_t)offset);
return -1;
}
......@@ -1054,9 +1054,8 @@ static const char* __Pyx_BufFmt_CheckString(__Pyx_BufFmt_Context* ctx, const cha
break;
case 'T': /* substruct */
{
int i;
const char* ts_after_sub;
int struct_count = ctx->new_count;
size_t i, struct_count = ctx->new_count;
ctx->new_count = 1;
++ts;
if (*ts != '{') {
......@@ -1118,15 +1117,14 @@ static const char* __Pyx_BufFmt_CheckString(__Pyx_BufFmt_Context* ctx, const cha
break;
default:
{
ctx->new_count = __Pyx_BufFmt_ParseNumber(&ts);
if (ctx->new_count == -1) { /* First char was not a digit */
char msg[2] = { *ts, 0 };
int number = __Pyx_BufFmt_ParseNumber(&ts);
if (number == -1) { /* First char was not a digit */
PyErr_Format(PyExc_ValueError,
"Does not understand character buffer dtype format string ('%s')", msg);
"Does not understand character buffer dtype format string ('%c')", *ts);
return NULL;
}
ctx->new_count = (size_t)number;
}
}
}
}
......@@ -1140,7 +1138,7 @@ static CYTHON_INLINE void __Pyx_ZeroBuffer(Py_buffer* buf) {
}
static CYTHON_INLINE int __Pyx_GetBufferAndValidate(Py_buffer* buf, PyObject* obj, __Pyx_TypeInfo* dtype, int flags, int nd, int cast, __Pyx_BufFmt_StackElem* stack) {
if (obj == Py_None) {
if (obj == Py_None || obj == NULL) {
__Pyx_ZeroBuffer(buf);
return 0;
}
......@@ -1161,8 +1159,7 @@ static CYTHON_INLINE int __Pyx_GetBufferAndValidate(Py_buffer* buf, PyObject* ob
PyErr_Format(PyExc_ValueError,
"Item size of buffer (%"PY_FORMAT_SIZE_T"d byte%s) does not match size of '%s' (%"PY_FORMAT_SIZE_T"d byte%s)",
buf->itemsize, (buf->itemsize > 1) ? "s" : "",
dtype->name,
(Py_ssize_t)dtype->size, (dtype->size > 1) ? "s" : "");
dtype->name, (Py_ssize_t)dtype->size, (dtype->size > 1) ? "s" : "");
goto fail;
}
if (buf->suboffsets == NULL) buf->suboffsets = __Pyx_minusones;
......
......@@ -7,6 +7,7 @@ from Code import UtilityCode
from TypeSlots import Signature
import PyrexTypes
import Naming
import Options
# C-level implementations of builtin types, functions and methods
......@@ -76,25 +77,6 @@ bad:
}
""")
hasattr_utility_code = UtilityCode(
proto = """
static CYTHON_INLINE int __Pyx_HasAttr(PyObject *, PyObject *); /*proto*/
""",
impl = """
static CYTHON_INLINE int __Pyx_HasAttr(PyObject *o, PyObject *n) {
PyObject *v = PyObject_GetAttr(o, n);
if (v) {
Py_DECREF(v);
return 1;
}
if (PyErr_ExceptionMatches(PyExc_AttributeError)) {
PyErr_Clear();
return 0;
}
return -1;
}
""")
globals_utility_code = UtilityCode(
# This is a stub implementation until we have something more complete.
# Currently, we only handle the most common case of a read-only dict
......@@ -432,10 +414,7 @@ builtin_function_table = [
utility_code = getattr3_utility_code),
BuiltinFunction('getattr3', "OOO", "O", "__Pyx_GetAttr3", "getattr",
utility_code = getattr3_utility_code), # Pyrex compatibility
BuiltinFunction('globals', "", "O", "__Pyx_Globals",
utility_code = globals_utility_code),
BuiltinFunction('hasattr', "OO", "b", "__Pyx_HasAttr",
utility_code = hasattr_utility_code),
BuiltinFunction('hasattr', "OO", "b", "PyObject_HasAttr"),
BuiltinFunction('hash', "O", "h", "PyObject_Hash"),
#('hex', "", "", ""),
#('id', "", "", ""),
......@@ -482,6 +461,11 @@ builtin_function_table = [
BuiltinFunction('__Pyx_PyObject_Append', "OO", "O", "__Pyx_PyObject_Append"),
]
if not Options.old_style_globals:
builtin_function_table.append(
BuiltinFunction('globals', "", "O", "__Pyx_Globals",
utility_code = globals_utility_code))
# Builtin types
# bool
# buffer
......@@ -544,10 +528,14 @@ builtin_types_table = [
]),
# ("file", "PyFile_Type", []), # not in Py3
("set", "PySet_Type", [BuiltinMethod("clear", "T", "r", "PySet_Clear"),
BuiltinMethod("discard", "TO", "r", "PySet_Discard"),
BuiltinMethod("add", "TO", "r", "PySet_Add"),
BuiltinMethod("pop", "T", "O", "PySet_Pop")]),
("set", "PySet_Type", [BuiltinMethod("clear", "T", "r", "PySet_Clear",
utility_code = py23_set_utility_code),
BuiltinMethod("discard", "TO", "r", "PySet_Discard",
utility_code = py23_set_utility_code),
BuiltinMethod("add", "TO", "r", "PySet_Add",
utility_code = py23_set_utility_code),
BuiltinMethod("pop", "T", "O", "PySet_Pop",
utility_code = py23_set_utility_code)]),
("frozenset", "PyFrozenSet_Type", []),
]
......
......@@ -39,6 +39,7 @@ Options:
-3 Compile based on Python-3 syntax and code semantics.
--fast-fail Abort the compilation on the first error
--warning-error, -Werror Make all warnings into errors
--warning-extra, -Wextra Enable extra warnings
-X, --directive <name>=<value>[,<name=value,...] Overrides a compiler directive
"""
......@@ -132,8 +133,12 @@ def parse_command_line(args):
Options.fast_fail = True
elif option in ('-Werror', '--warning-errors'):
Options.warning_errors = True
elif option in ('-Wextra', '--warning-extra'):
options.compiler_directives.update(Options.extra_warnings)
elif option == "--disable-function-redefinition":
Options.disable_function_redefinition = True
elif option == "--old-style-globals":
Options.old_style_globals = True
elif option == "--directive" or option.startswith('-X'):
if option.startswith('-X') and option[2:].strip():
x_args = option[2:]
......
......@@ -1187,6 +1187,8 @@ class CCodeWriter(object):
entry.cname, dll_linkage = dll_linkage))
if entry.init is not None:
self.put_safe(" = %s" % entry.type.literal_code(entry.init))
elif entry.type.is_pyobject:
self.put(" = NULL");
self.putln(";")
def put_temp_declarations(self, func_context):
......@@ -1290,10 +1292,7 @@ class CCodeWriter(object):
def put_var_decref(self, entry):
if entry.type.is_pyobject:
if entry.init_to_none is False: # FIXME: 0 and False are treated differently???
self.putln("__Pyx_XDECREF(%s);" % self.entry_as_pyobject(entry))
else:
self.putln("__Pyx_DECREF(%s);" % self.entry_as_pyobject(entry))
def put_var_decref_clear(self, entry):
if entry.type.is_pyobject:
......@@ -1420,6 +1419,19 @@ class CCodeWriter(object):
# return self.putln("if (unlikely(%s < 0)) %s" % (value, self.error_goto(pos))) # TODO this path is almost _never_ taken, yet this macro makes is slower!
return self.putln("if (%s < 0) %s" % (value, self.error_goto(pos)))
def put_error_if_unbound(self, pos, entry):
import ExprNodes
if entry.from_closure:
func = '__Pyx_RaiseClosureNameError'
self.globalstate.use_utility_code(
ExprNodes.raise_closure_name_error_utility_code)
else:
func = '__Pyx_RaiseUnboundLocalError'
self.globalstate.use_utility_code(
ExprNodes.raise_unbound_local_error_utility_code)
self.put('if (unlikely(!%s)) { %s("%s"); %s }' % (
entry.cname, func, entry.name, self.error_goto(pos)))
def set_error_info(self, pos):
self.funcstate.should_declare_error_indicator = True
if self.c_line_in_traceback:
......
import bisect, sys
# This module keeps track of arbitrary "states" at any point of the code.
# A state is considered known if every path to the given point agrees on
# its state, otherwise it is None (i.e. unknown).
# It might be useful to be able to "freeze" the set of states by pushing
# all state changes to the tips of the trees for fast reading. Perhaps this
# could be done on get_state, clearing the cache on set_state (assuming
# incoming is immutable).
# This module still needs a lot of work, and probably should totally be
# redesigned. It doesn't take return, raise, continue, or break into
# account.
from Cython.Compiler.Scanning import StringSourceDescriptor
try:
_END_POS = (StringSourceDescriptor(unichr(sys.maxunicode)*10, ''),
sys.maxint, sys.maxint)
except AttributeError: # Py3
_END_POS = (StringSourceDescriptor(unichr(sys.maxunicode)*10, ''),
sys.maxsize, sys.maxsize)
class ControlFlow(object):
def __init__(self, start_pos, incoming, parent):
self.start_pos = start_pos
self.incoming = incoming
if parent is None and incoming is not None:
parent = incoming.parent
self.parent = parent
self.tip = {}
self.end_pos = _END_POS
def start_branch(self, pos):
self.end_pos = pos
branch_point = BranchingControlFlow(pos, self)
if self.parent is not None:
self.parent.branches[-1] = branch_point
return branch_point.branches[0]
def next_branch(self, pos):
self.end_pos = pos
return self.parent.new_branch(pos)
def finish_branch(self, pos):
self.end_pos = pos
self.parent.end_pos = pos
return LinearControlFlow(pos, self.parent)
def get_state(self, item, pos=_END_POS):
return self.get_pos_state(item, pos)[1]
def get_pos_state(self, item, pos=_END_POS):
# do some caching
if pos > self.end_pos:
try:
return self.tip[item]
except KeyError:
pass
pos_state = self._get_pos_state(item, pos)
if pos > self.end_pos:
self.tip[item] = pos_state
return pos_state
def _get_pos_state(self, item, pos):
current = self
while current is not None and pos <= current.start_pos:
current = current.incoming
if current is None:
return (None, None)
state = current._get_pos_state_local(item, pos)
while (state is None or state == (None, None)) and current.incoming is not None:
current = current.incoming
state = current._get_pos_state_local(item, pos)
if state is None:
return (None, None)
return state
def set_state(self, pos, item, state):
if item in self.tip:
del self.tip[item]
current = self
while pos < current.start_pos and current.incoming is not None:
current = current.incoming
if item in current.tip:
del current.tip[item]
current._set_state_local(pos, item, state)
class LinearControlFlow(ControlFlow):
def __init__(self, start_pos=(), incoming=None, parent=None):
ControlFlow.__init__(self, start_pos, incoming, parent)
self.events = {}
def _set_state_local(self, pos, item, state):
if item in self.events:
event_list = self.events[item]
else:
event_list = []
self.events[item] = event_list
bisect.insort(event_list, (pos, state))
def _get_pos_state_local(self, item, pos):
if item in self.events:
event_list = self.events[item]
for event in event_list[::-1]:
if event[0] < pos:
return event
return None
def to_string(self, indent='', limit=None):
if len(self.events) == 0:
s = indent + "[no state changes]"
else:
all = []
for item, event_list in self.events.items():
for pos, state in event_list:
all.append((indent, pos, item, state))
all.sort()
all = ["%s%s: %s <- %s" % data for data in all]
s = "\n".join(all)
if self.incoming is not limit and self.incoming is not None:
s = "%s\n%s" % (self.incoming.to_string(indent, limit=limit), s)
return s
class BranchingControlFlow(ControlFlow):
def __init__(self, start_pos, incoming, parent=None):
ControlFlow.__init__(self, start_pos, incoming, parent)
self.branches = [LinearControlFlow(start_pos, incoming, parent=self)]
self.branch_starts = [start_pos]
def _set_state_local(self, pos, item, state):
for branch_pos, branch in zip(self.branch_starts[::-1], self.branches[::-1]):
if pos >= branch_pos:
branch._set_state_local(pos, item, state)
return
def _get_pos_state_local(self, item, pos, stop_at=None):
if pos < self.end_pos:
for branch_pos, branch in zip(self.branch_starts[::-1], self.branches[::-1]):
if pos >= branch_pos:
return branch._get_pos_state_local(item, pos)
else:
state = self.branches[0]._get_pos_state_local(item, pos)
if state is None:
return None, None
last_pos, last_state = state
if last_state is None:
return None, None
for branch in self.branches[1:]:
state = branch._get_pos_state_local(item, pos)
if state is None:
return None, None
other_pos, other_state = state
if other_state != last_state:
return None, None
elif last_pos is not other_pos:
last_pos = max(last_pos, other_pos)
return last_pos, last_state
return None
def new_branch(self, pos):
self.branches.append(LinearControlFlow(pos, self.incoming, parent=self))
self.branch_starts.append(pos)
return self.branches[-1]
def to_string(self, indent='', limit=None):
join = "\n%sor\n" % indent
s = join.join([branch.to_string(indent+" ", limit=self.incoming) for branch in self.branches])
if self.incoming is not limit and self.incoming is not None:
s = "%s\n%s" % (self.incoming.to_string(indent, limit=limit), s)
return s
......@@ -20,7 +20,7 @@ def context(position):
assert not (isinstance(source, unicode) or isinstance(source, str)), (
"Please replace filename strings with Scanning.FileSourceDescriptor instances %r" % source)
try:
F = list(source.get_lines())
F = source.get_lines()
except UnicodeDecodeError:
# file has an encoding problem
s = u"[unprintable code]\n"
......
......@@ -702,11 +702,11 @@ class ExprNode(Node):
def as_cython_attribute(self):
return None
def as_none_safe_node(self, message, error="PyExc_TypeError"):
def as_none_safe_node(self, message, error="PyExc_TypeError", format_args=()):
# Wraps the node in a NoneCheckNode if it is not known to be
# not-None (e.g. because it is a Python literal).
if self.may_be_none():
return NoneCheckNode(self, error, message)
return NoneCheckNode(self, error, message, format_args)
else:
return self
......@@ -1288,14 +1288,20 @@ class NameNode(AtomicExprNode):
# name string Python name of the variable
# entry Entry Symbol table entry
# type_entry Entry For extension type names, the original type entry
# cf_is_null boolean Is uninitialized before this node
# cf_maybe_null boolean Maybe uninitialized before this node
# allow_null boolean Don't raise UnboundLocalError
is_name = True
is_cython_module = False
cython_attribute = None
lhs_of_first_assignment = False
lhs_of_first_assignment = False # TODO: remove me
is_used_as_rvalue = 0
entry = None
type_entry = None
cf_maybe_null = True
cf_is_null = False
allow_null = False
def create_analysed_rvalue(pos, env, entry):
node = NameNode(pos)
......@@ -1407,8 +1413,6 @@ class NameNode(AtomicExprNode):
else:
type = py_object_type
self.entry = env.declare_var(self.name, type, self.pos)
env.control_flow.set_state(self.pos, (self.name, 'initialized'), True)
env.control_flow.set_state(self.pos, (self.name, 'source'), 'assignment')
if self.entry.is_declared_generic:
self.result_ctype = py_object_type
......@@ -1578,15 +1582,11 @@ class NameNode(AtomicExprNode):
code.error_goto_if_null(self.result(), self.pos)))
code.put_gotref(self.py_result())
elif entry.is_local and False:
# control flow not good enough yet
assigned = entry.scope.control_flow.get_state((entry.name, 'initialized'), self.pos)
if assigned is False:
error(self.pos, "local variable '%s' referenced before assignment" % entry.name)
elif not Options.init_local_none and assigned is None:
code.putln('if (%s == 0) { PyErr_SetString(PyExc_UnboundLocalError, "%s"); %s }' %
(entry.cname, entry.name, code.error_goto(self.pos)))
entry.scope.control_flow.set_state(self.pos, (entry.name, 'initialized'), True)
elif entry.is_local or entry.in_closure or entry.from_closure:
if entry.type.is_pyobject:
if (self.cf_maybe_null or self.cf_is_null) \
and not self.allow_null:
code.put_error_if_unbound(self.pos, entry)
def generate_assignment_code(self, rhs, code):
#print "NameNode.generate_assignment_code:", self.name ###
......@@ -1655,14 +1655,17 @@ class NameNode(AtomicExprNode):
if self.use_managed_ref:
rhs.make_owned_reference(code)
is_external_ref = entry.is_cglobal or self.entry.in_closure or self.entry.from_closure
if not self.lhs_of_first_assignment:
if is_external_ref:
if not self.cf_is_null:
if self.cf_maybe_null:
code.put_xgotref(self.py_result())
else:
code.put_gotref(self.py_result())
if entry.is_local and not Options.init_local_none:
initialized = entry.scope.control_flow.get_state((entry.name, 'initialized'), self.pos)
if initialized is True:
if entry.is_cglobal:
code.put_decref(self.result(), self.ctype())
elif initialized is None:
else:
if not self.cf_is_null:
if self.cf_maybe_null:
code.put_xdecref(self.result(), self.ctype())
else:
code.put_decref(self.result(), self.ctype())
......@@ -1704,18 +1707,22 @@ class NameNode(AtomicExprNode):
return # There was an error earlier
elif self.entry.is_pyclass_attr:
namespace = self.entry.scope.namespace_cname
interned_cname = code.intern_identifier(self.entry.name)
code.put_error_if_neg(self.pos,
'PyMapping_DelItemString(%s, "%s")' % (
'PyMapping_DelItem(%s, %s)' % (
namespace,
self.entry.name))
interned_cname))
elif self.entry.is_pyglobal:
code.put_error_if_neg(self.pos,
'__Pyx_DelAttrString(%s, "%s")' % (
Naming.module_cname,
self.entry.name))
elif self.entry.type.is_pyobject:
# Fake it until we can do it for real...
self.generate_assignment_code(NoneNode(self.pos), code)
if not self.cf_is_null:
if self.cf_maybe_null:
code.put_error_if_unbound(self.pos, self.entry)
code.put_decref(self.result(), self.ctype())
code.putln('%s = NULL;' % self.result())
else:
error(self.pos, "Deletion of C names not supported")
......@@ -3210,8 +3217,9 @@ class SimpleCallNode(CallNode):
self_arg = func_type.args[0]
if self_arg.not_none: # C methods must do the None test for self at *call* time
self.self = self.self.as_none_safe_node(
"'NoneType' object has no attribute '%s'" % self.function.entry.name,
'PyExc_AttributeError')
"'NoneType' object has no attribute '%s'",
error = 'PyExc_AttributeError',
format_args = [self.function.entry.name])
expected_type = self_arg.type
self.coerced_self = CloneNode(self.self).coerce_to(
expected_type, env)
......@@ -4642,8 +4650,6 @@ class ScopedExprNode(ExprNode):
generate_inner_evaluation_code(code)
code.putln('} /* exit inner scope */')
return
for entry in py_entries:
code.put_init_var_to_py_none(entry)
# must free all local Python references at each exit point
old_loop_labels = tuple(code.new_loop_labels())
......@@ -4889,11 +4895,13 @@ class DictNode(ExprNode):
# Dictionary constructor.
#
# key_value_pairs [DictItemNode]
# exclude_null_values [boolean] Do not add NULL values to dict
#
# obj_conversion_errors [PyrexError] used internally
subexprs = ['key_value_pairs']
is_temp = 1
exclude_null_values = False
type = dict_type
obj_conversion_errors = []
......@@ -4981,11 +4989,15 @@ class DictNode(ExprNode):
for item in self.key_value_pairs:
item.generate_evaluation_code(code)
if self.type.is_pyobject:
if self.exclude_null_values:
code.putln('if (%s) {' % item.value.py_result())
code.put_error_if_neg(self.pos,
"PyDict_SetItem(%s, %s, %s)" % (
self.result(),
item.key.py_result(),
item.value.py_result()))
if self.exclude_null_values:
code.putln('}')
else:
code.putln("%s.%s = %s;" % (
self.result(),
......@@ -5908,8 +5920,8 @@ class TypecastNode(ExprNode):
self.type = self.operand.type
def is_simple(self):
# either temp or a C cast => no side effects
return True
# either temp or a C cast => no side effects other than the operand's
return self.operand.is_simple()
def nonlocally_immutable(self):
return self.operand.nonlocally_immutable()
......@@ -7139,7 +7151,9 @@ class CmpNode(object):
contains_utility_code = UtilityCode(
proto="""
static CYTHON_INLINE long __Pyx_NegateNonNeg(long b) { return unlikely(b < 0) ? b : !b; }
static CYTHON_INLINE int __Pyx_NegateNonNeg(int b) {
return unlikely(b < 0) ? b : !b;
}
static CYTHON_INLINE PyObject* __Pyx_PyBoolOrNull_FromLong(long b) {
return unlikely(b < 0) ? NULL : __Pyx_PyBool_FromLong(b);
}
......@@ -7264,7 +7278,7 @@ static CYTHON_INLINE int __Pyx_PyBytes_Equals(PyObject* s1, PyObject* s2, int eq
else
return (PyBytes_AS_STRING(s1)[0] != PyBytes_AS_STRING(s2)[0]);
} else {
int result = memcmp(PyBytes_AS_STRING(s1), PyBytes_AS_STRING(s2), PyBytes_GET_SIZE(s1));
int result = memcmp(PyBytes_AS_STRING(s1), PyBytes_AS_STRING(s2), (size_t)PyBytes_GET_SIZE(s1));
return (equals == Py_EQ) ? (result == 0) : (result != 0);
}
} else if ((s1 == Py_None) & PyBytes_CheckExact(s2)) {
......@@ -7711,12 +7725,14 @@ class NoneCheckNode(CoercionNode):
# raises an appropriate exception (as specified by the creating
# transform).
def __init__(self, arg, exception_type_cname, exception_message):
def __init__(self, arg, exception_type_cname, exception_message,
exception_format_args):
CoercionNode.__init__(self, arg)
self.type = arg.type
self.result_ctype = arg.ctype()
self.exception_type_cname = exception_type_cname
self.exception_message = exception_message
self.exception_format_args = tuple(exception_format_args or ())
def analyse_types(self, env):
pass
......@@ -7736,10 +7752,19 @@ class NoneCheckNode(CoercionNode):
def generate_result_code(self, code):
code.putln(
"if (unlikely(%s == Py_None)) {" % self.arg.py_result())
code.putln('PyErr_SetString(%s, "%s"); %s ' % (
escape = StringEncoding.escape_byte_string
if self.exception_format_args:
code.putln('PyErr_Format(%s, "%s", %s); %s ' % (
self.exception_type_cname,
StringEncoding.escape_byte_string(
self.exception_message.encode('UTF-8')),
', '.join([ '"%s"' % escape(str(arg).encode('UTF-8'))
for arg in self.exception_format_args ]),
code.error_goto(self.pos)))
else:
code.putln('PyErr_SetString(%s, "%s"); %s ' % (
self.exception_type_cname,
escape(self.exception_message.encode('UTF-8')),
code.error_goto(self.pos)))
code.putln("}")
......@@ -8414,19 +8439,34 @@ cpp_exception_utility_code = UtilityCode(
proto = """
#ifndef __Pyx_CppExn2PyErr
static void __Pyx_CppExn2PyErr() {
// Catch a handful of different errors here and turn them into the
// equivalent Python errors.
try {
if (PyErr_Occurred())
; // let the latest Python exn pass through and ignore the current one
else
throw;
} catch (const std::bad_alloc& exn) {
PyErr_SetString(PyExc_MemoryError, exn.what());
} catch (const std::bad_cast& exn) {
PyErr_SetString(PyExc_TypeError, exn.what());
} catch (const std::domain_error& exn) {
PyErr_SetString(PyExc_ValueError, exn.what());
} catch (const std::invalid_argument& exn) {
// Catch a handful of different errors here and turn them into the
// equivalent Python errors.
// Change invalid_argument to ValueError
PyErr_SetString(PyExc_ValueError, exn.what());
} catch (const std::ios_base::failure& exn) {
// Unfortunately, in standard C++ we have no way of distinguishing EOF
// from other errors here; be careful with the exception mask
PyErr_SetString(PyExc_IOError, exn.what());
} catch (const std::out_of_range& exn) {
// Change out_of_range to IndexError
PyErr_SetString(PyExc_IndexError, exn.what());
} catch (const std::overflow_error& exn) {
PyErr_SetString(PyExc_OverflowError, exn.what());
} catch (const std::range_error& exn) {
PyErr_SetString(PyExc_ArithmeticError, exn.what());
} catch (const std::underflow_error& exn) {
PyErr_SetString(PyExc_ArithmeticError, exn.what());
} catch (const std::exception& exn) {
PyErr_SetString(PyExc_RuntimeError, exn.what());
}
......@@ -8491,6 +8531,26 @@ static CYTHON_INLINE void __Pyx_RaiseNoneNotIterableError(void) {
}
''')
raise_unbound_local_error_utility_code = UtilityCode(
proto = """
static CYTHON_INLINE void __Pyx_RaiseUnboundLocalError(const char *varname);
""",
impl = """
static CYTHON_INLINE void __Pyx_RaiseUnboundLocalError(const char *varname) {
PyErr_Format(PyExc_UnboundLocalError, "local variable '%s' referenced before assignment", varname);
}
""")
raise_closure_name_error_utility_code = UtilityCode(
proto = """
static CYTHON_INLINE void __Pyx_RaiseClosureNameError(const char *varname);
""",
impl = """
static CYTHON_INLINE void __Pyx_RaiseClosureNameError(const char *varname) {
PyErr_Format(PyExc_NameError, "free variable '%s' referenced before assignment in enclosing scope", varname);
}
""")
#------------------------------------------------------------------------------------
getitem_dict_utility_code = UtilityCode(
......@@ -8684,11 +8744,7 @@ static CYTHON_INLINE void __Pyx_RaiseTooManyValuesError(Py_ssize_t expected);
impl = '''
static CYTHON_INLINE void __Pyx_RaiseTooManyValuesError(Py_ssize_t expected) {
PyErr_Format(PyExc_ValueError,
#if PY_VERSION_HEX < 0x02050000
"too many values to unpack (expected %d)", (int)expected);
#else
"too many values to unpack (expected %zd)", expected);
#endif
"too many values to unpack (expected %"PY_FORMAT_SIZE_T"d)", expected);
}
''')
......@@ -8699,12 +8755,8 @@ static CYTHON_INLINE void __Pyx_RaiseNeedMoreValuesError(Py_ssize_t index);
impl = '''
static CYTHON_INLINE void __Pyx_RaiseNeedMoreValuesError(Py_ssize_t index) {
PyErr_Format(PyExc_ValueError,
#if PY_VERSION_HEX < 0x02050000
"need more than %d value%s to unpack", (int)index,
#else
"need more than %zd value%s to unpack", index,
#endif
(index == 1) ? "" : "s");
"need more than %"PY_FORMAT_SIZE_T"d value%s to unpack",
index, (index == 1) ? "" : "s");
}
''')
......
cimport cython
cdef class ControlBlock:
cdef public set children
cdef public set parents
cdef public set positions
cdef public list stats
cdef public dict gen
cdef public set bounded
cdef public dict input
cdef public dict output
# Big integer it bitsets
cdef public object i_input
cdef public object i_output
cdef public object i_gen
cdef public object i_kill
cdef public object i_state
cpdef bint empty(self)
cpdef detach(self)
cpdef add_child(self, block)
cdef class ExitBlock(ControlBlock):
cpdef bint empty(self)
cdef class NameAssignment:
cdef public bint is_arg
cdef public object lhs
cdef public object rhs
cdef public object entry
cdef public object pos
cdef public set refs
cdef public object bit
cdef class AssignmentList:
cdef public object bit
cdef public object mask
cdef public list stats
cdef class ControlFlow:
cdef public set blocks
cdef public set entries
cdef public list loops
cdef public list exceptions
cdef public ControlBlock entry_point
cdef public ExitBlock exit_point
cdef public ControlBlock block
cdef public dict assmts
cpdef newblock(self, parent=*)
cpdef nextblock(self, parent=*)
cpdef bint is_tracked(self, entry)
cpdef mark_position(self, node)
cpdef mark_assignment(self, lhs, rhs, entry=*)
cpdef mark_argument(self, lhs, rhs, entry)
cpdef mark_deletion(self, node, entry)
cpdef mark_reference(self, node, entry)
cpdef normalize(self)
@cython.locals(offset=object, assmts=AssignmentList,
block=ControlBlock)
cpdef initialize(self)
@cython.locals(assmts=AssignmentList, assmt=NameAssignment)
cpdef set map_one(self, istate, entry)
@cython.locals(block=ControlBlock, parent=ControlBlock)
cdef reaching_definitions(self)
cdef class Uninitialized:
pass
@cython.locals(dirty=bint, block=ControlBlock, parent=ControlBlock)
cdef check_definitions(ControlFlow flow, dict compiler_directives)
import cython
cython.declare(PyrexTypes=object, Naming=object, ExprNodes=object, Nodes=object,
Options=object, UtilNodes=object, ModuleNode=object,
LetNode=object, LetRefNode=object, TreeFragment=object,
TemplateTransform=object, EncodedString=object,
error=object, warning=object, copy=object)
import Builtin
import ExprNodes
import Nodes
from PyrexTypes import py_object_type, unspecified_type
from Visitor import TreeVisitor, CythonTransform
from Errors import error, warning, CompileError, InternalError
from cython import set
class TypedExprNode(ExprNodes.ExprNode):
# Used for declaring assignments of a specified type whithout a known entry.
def __init__(self, type):
self.type = type
object_expr = TypedExprNode(py_object_type)
class ControlBlock(object):
"""Control flow graph node. Sequence of assignments and name references.
children set of children nodes
parents set of parent nodes
positions set of position markers
stats list of block statements
gen dict of assignments generated by this block
bounded set of entries that are definitely bounded in this block
Example:
a = 1
b = a + c # 'c' is already bounded or exception here
stats = [Assignment(a), NameReference(a), NameReference(c),
Assignment(b)]
gen = {Entry(a): Assignment(a), Entry(b): Assignment(b)}
bounded = set([Entry(a), Entry(c)])
"""
def __init__(self):
self.children = set()
self.parents = set()
self.positions = set()
self.stats = []
self.gen = {}
self.bounded = set()
self.i_input = 0
self.i_output = 0
self.i_gen = 0
self.i_kill = 0
self.i_state = 0
def empty(self):
return (not self.stats and not self.positions)
def detach(self):
"""Detach block from parents and children."""
for child in self.children:
child.parents.remove(self)
for parent in self.parents:
parent.children.remove(self)
self.parents.clear()
self.children.clear()
def add_child(self, block):
self.children.add(block)
block.parents.add(self)
class ExitBlock(ControlBlock):
"""Non-empty exit point block."""
def empty(self):
return False
class AssignmentList:
def __init__(self):
self.stats = []
class ControlFlow(object):
"""Control-flow graph.
entry_point ControlBlock entry point for this graph
exit_point ControlBlock normal exit point
block ControlBlock current block
blocks set children nodes
entries set tracked entries
loops list stack for loop descriptors
exceptions list stack for exception descriptors
"""
def __init__(self):
self.blocks = set()
self.entries = set()
self.loops = []
self.exceptions = []
self.entry_point = ControlBlock()
self.exit_point = ExitBlock()
self.blocks.add(self.exit_point)
self.block = self.entry_point
def newblock(self, parent=None):
"""Create floating block linked to `parent` if given.
NOTE: Block is NOT added to self.blocks
"""
block = ControlBlock()
self.blocks.add(block)
if parent:
parent.add_child(block)
return block
def nextblock(self, parent=None):
"""Create block children block linked to current or `parent` if given.
NOTE: Block is added to self.blocks
"""
block = ControlBlock()
self.blocks.add(block)
if parent:
parent.add_child(block)
elif self.block:
self.block.add_child(block)
self.block = block
return self.block
def is_tracked(self, entry):
if entry.is_anonymous:
return False
if entry.type.is_array or entry.type.is_struct_or_union:
return False
return (entry.is_local or entry.is_pyclass_attr or entry.is_arg or
entry.from_closure or entry.in_closure)
def mark_position(self, node):
"""Mark position, will be used to draw graph nodes."""
if self.block:
self.block.positions.add(node.pos[:2])
def mark_assignment(self, lhs, rhs, entry=None):
if self.block:
if entry is None:
entry = lhs.entry
if not self.is_tracked(entry):
return
assignment = NameAssignment(lhs, rhs, entry)
self.block.stats.append(assignment)
self.block.gen[entry] = assignment
self.entries.add(entry)
def mark_argument(self, lhs, rhs, entry):
if self.block and self.is_tracked(entry):
assignment = Argument(lhs, rhs, entry)
self.block.stats.append(assignment)
self.block.gen[entry] = assignment
self.entries.add(entry)
def mark_deletion(self, node, entry):
if self.block and self.is_tracked(entry):
assignment = NameAssignment(node, None, entry)
self.block.stats.append(assignment)
self.block.gen[entry] = Uninitialized
self.entries.add(entry)
def mark_reference(self, node, entry):
if self.block and self.is_tracked(entry):
self.block.stats.append(NameReference(node, entry))
# Local variable is definitely bound after this reference
if not node.allow_null:
self.block.bounded.add(entry)
self.entries.add(entry)
def normalize(self):
"""Delete unreachable and orphan blocks."""
queue = set([self.entry_point])
visited = set()
while queue:
root = queue.pop()
visited.add(root)
for child in root.children:
if child not in visited:
queue.add(child)
unreachable = self.blocks - visited
for block in unreachable:
block.detach()
visited.remove(self.entry_point)
for block in visited:
if block.empty():
for parent in block.parents: # Re-parent
for child in block.children:
parent.add_child(child)
block.detach()
unreachable.add(block)
self.blocks -= unreachable
def initialize(self):
"""Set initial state, map assignments to bits."""
self.assmts = {}
offset = 0
for entry in self.entries:
assmts = AssignmentList()
assmts.bit = 1 << offset
assmts.mask = assmts.bit
self.assmts[entry] = assmts
offset += 1
for block in self.blocks:
for stat in block.stats:
if isinstance(stat, NameAssignment):
stat.bit = 1 << offset
assmts = self.assmts[stat.entry]
assmts.stats.append(stat)
assmts.mask |= stat.bit
offset += 1
for block in self.blocks:
for entry, stat in block.gen.items():
assmts = self.assmts[entry]
if stat is Uninitialized:
block.i_gen |= assmts.bit
else:
block.i_gen |= stat.bit
block.i_kill |= assmts.mask
block.i_output = block.i_gen
for entry in block.bounded:
block.i_kill |= self.assmts[entry].bit
for assmts in self.assmts.itervalues():
self.entry_point.i_gen |= assmts.bit
self.entry_point.i_output = self.entry_point.i_gen
def map_one(self, istate, entry):
ret = set()
assmts = self.assmts[entry]
if istate & assmts.bit:
ret.add(Uninitialized)
for assmt in assmts.stats:
if istate & assmt.bit:
ret.add(assmt)
return ret
def reaching_definitions(self):
"""Per-block reaching definitions analysis."""
dirty = True
while dirty:
dirty = False
for block in self.blocks:
i_input = 0
for parent in block.parents:
i_input |= parent.i_output
i_output = (i_input & ~block.i_kill) | block.i_gen
if i_output != block.i_output:
dirty = True
block.i_input = i_input
block.i_output = i_output
class LoopDescr(object):
def __init__(self, next_block, loop_block):
self.next_block = next_block
self.loop_block = loop_block
self.exceptions = []
class ExceptionDescr(object):
"""Exception handling helper.
entry_point ControlBlock Exception handling entry point
finally_enter ControlBlock Normal finally clause entry point
finally_exit ControlBlock Normal finally clause exit point
"""
def __init__(self, entry_point, finally_enter=None, finally_exit=None):
self.entry_point = entry_point
self.finally_enter = finally_enter
self.finally_exit = finally_exit
class NameAssignment(object):
def __init__(self, lhs, rhs, entry):
if lhs.cf_state is None:
lhs.cf_state = set()
self.lhs = lhs
self.rhs = rhs
self.entry = entry
self.pos = lhs.pos
self.refs = set()
self.is_arg = False
def __repr__(self):
return '%s(entry=%r)' % (self.__class__.__name__, self.entry)
class Argument(NameAssignment):
def __init__(self, lhs, rhs, entry):
NameAssignment.__init__(self, lhs, rhs, entry)
self.is_arg = True
class Uninitialized(object):
pass
class NameReference(object):
def __init__(self, node, entry):
if node.cf_state is None:
node.cf_state = set()
self.node = node
self.entry = entry
self.pos = node.pos
def __repr__(self):
return '%s(entry=%r)' % (self.__class__.__name__, self.entry)
class GVContext(object):
"""Graphviz subgraph object."""
def __init__(self):
self.blockids = {}
self.nextid = 0
self.children = []
self.sources = {}
def add(self, child):
self.children.append(child)
def nodeid(self, block):
if block not in self.blockids:
self.blockids[block] = 'block%d' % self.nextid
self.nextid += 1
return self.blockids[block]
def extract_sources(self, block):
if not block.positions:
return ''
start = min(block.positions)
stop = max(block.positions)
srcdescr = start[0]
if not srcdescr in self.sources:
self.sources[srcdescr] = list(srcdescr.get_lines())
lines = self.sources[srcdescr]
return '\\n'.join([l.strip() for l in lines[start[1] - 1:stop[1]]])
def render(self, fp, name, annotate_defs=False):
"""Render graphviz dot graph"""
fp.write('digraph %s {\n' % name)
fp.write(' node [shape=box];\n')
for child in self.children:
child.render(fp, self, annotate_defs)
fp.write('}\n')
def escape(self, text):
return text.replace('"', '\\"').replace('\n', '\\n')
class GV(object):
"""Graphviz DOT renderer."""
def __init__(self, name, flow):
self.name = name
self.flow = flow
def render(self, fp, ctx, annotate_defs=False):
fp.write(' subgraph %s {\n' % self.name)
for block in self.flow.blocks:
label = ctx.extract_sources(block)
if annotate_defs:
for stat in block.stats:
if isinstance(stat, NameAssignment):
label += '\n %s [definition]' % stat.entry.name
elif isinstance(stat, NameReference):
if stat.entry:
label += '\n %s [reference]' % stat.entry.name
if not label:
label = 'empty'
pid = ctx.nodeid(block)
fp.write(' %s [label="%s"];\n' % (pid, ctx.escape(label)))
for block in self.flow.blocks:
pid = ctx.nodeid(block)
for child in block.children:
fp.write(' %s -> %s;\n' % (pid, ctx.nodeid(child)))
fp.write(' }\n')
class MessageCollection:
"""Collect error/warnings messages first then sort"""
def __init__(self):
self.messages = []
def error(self, pos, message):
self.messages.append((pos, True, message))
def warning(self, pos, message):
self.messages.append((pos, False, message))
def report(self):
self.messages.sort()
for pos, is_error, message in self.messages:
if is_error:
error(pos, message)
else:
warning(pos, message, 2)
def check_definitions(flow, compiler_directives):
flow.initialize()
flow.reaching_definitions()
# Track down state
assignments = set()
# Node to entry map
references = {}
assmt_nodes = set()
for block in flow.blocks:
i_state = block.i_input
for stat in block.stats:
i_assmts = flow.assmts[stat.entry]
state = flow.map_one(i_state, stat.entry)
if isinstance(stat, NameAssignment):
stat.lhs.cf_state.update(state)
assmt_nodes.add(stat.lhs)
i_state = i_state & ~i_assmts.mask
if stat.rhs:
i_state |= stat.bit
else:
i_state |= i_assmts.bit
assignments.add(stat)
stat.entry.cf_assignments.append(stat)
elif isinstance(stat, NameReference):
references[stat.node] = stat.entry
stat.entry.cf_references.append(stat)
stat.node.cf_state.update(state)
if not stat.node.allow_null:
i_state &= ~i_assmts.bit
state.discard(Uninitialized)
for assmt in state:
assmt.refs.add(stat)
# Check variable usage
warn_maybe_uninitialized = compiler_directives['warn.maybe_uninitialized']
warn_unused_result = compiler_directives['warn.unused_result']
warn_unused = compiler_directives['warn.unused']
warn_unused_arg = compiler_directives['warn.unused_arg']
messages = MessageCollection()
# assignment hints
for node in assmt_nodes:
if Uninitialized in node.cf_state:
node.cf_maybe_null = True
if len(node.cf_state) == 1:
node.cf_is_null = True
else:
node.cf_is_null = False
else:
node.cf_is_null = False
node.cf_maybe_null = False
# Find uninitialized references and cf-hints
for node, entry in references.iteritems():
if Uninitialized in node.cf_state:
node.cf_maybe_null = True
if not entry.from_closure and len(node.cf_state) == 1:
node.cf_is_null = True
if node.allow_null or entry.from_closure:
pass # Can be uninitialized here
elif node.cf_is_null:
if entry.type.is_pyobject or entry.type.is_unspecified:
messages.error(
node.pos,
"local variable '%s' referenced before assignment"
% entry.name)
else:
messages.warning(
node.pos,
"local variable '%s' referenced before assignment"
% entry.name)
elif warn_maybe_uninitialized:
messages.warning(
node.pos,
"local variable '%s' might be referenced before assignment"
% entry.name)
else:
node.cf_is_null = False
node.cf_maybe_null = False
# Unused result
for assmt in assignments:
if not assmt.refs and not assmt.entry.is_pyclass_attr \
and not assmt.entry.in_closure:
if assmt.entry.cf_references and warn_unused_result:
if assmt.is_arg:
messages.warning(assmt.pos, "Unused argument value '%s'" % assmt.entry.name)
else:
messages.warning(assmt.pos, "Unused result in '%s'" % assmt.entry.name)
assmt.lhs.cf_used = False
# Unused entries
for entry in flow.entries:
if not entry.cf_references and not entry.is_pyclass_attr and not entry.in_closure:
# TODO: starred args entries are not marked with is_arg flag
for assmt in entry.cf_assignments:
if assmt.is_arg:
is_arg = True
break
else:
is_arg = False
if is_arg:
if warn_unused_arg:
messages.warning(entry.pos, "Unused argument '%s'" % entry.name)
# TODO: handle unused arguments
entry.cf_used = True
else:
if warn_unused:
messages.warning(entry.pos, "Unused entry '%s'" % entry.name)
entry.cf_used = False
messages.report()
class AssignmentCollector(TreeVisitor):
def __init__(self):
super(AssignmentCollector, self).__init__()
self.assignments = []
def visit_Node(self):
self.visitchildren(self)
def visit_SingleAssignmentNode(self, node):
self.assignments.append((node.lhs, node.rhs))
def visit_CascadedAssignmentNode(self, node):
for lhs in node.lhs_list:
self.assignments.append((lhs, node.rhs))
class CreateControlFlowGraph(CythonTransform):
"""Create NameNode use and assignment graph."""
def visit_ModuleNode(self, node):
self.gv_ctx = GVContext()
self.env_stack = []
self.env = node.scope
self.stack = []
self.flow = ControlFlow()
self.visitchildren(node)
dot_output = self.current_directives['control_flow.dot_output']
if dot_output:
annotate_defs = self.current_directives['control_flow.dot_annotate_defs']
fp = open(dot_output, 'wt')
try:
self.gv_ctx.render(fp, 'module', annotate_defs=annotate_defs)
finally:
fp.close()
return node
def visit_FuncDefNode(self, node):
self.env_stack.append(self.env)
self.env = node.local_scope
self.stack.append(self.flow)
self.flow = ControlFlow()
# Collect all entries
for entry in node.local_scope.entries.values():
if self.flow.is_tracked(entry):
self.flow.entries.add(entry)
self.mark_position(node)
# Function body block
self.flow.nextblock()
if node.star_arg:
self.flow.mark_argument(node.star_arg,
TypedExprNode(Builtin.tuple_type),
node.star_arg.entry)
if node.starstar_arg:
self.flow.mark_argument(node.starstar_arg,
TypedExprNode(Builtin.dict_type),
node.starstar_arg.entry)
self.visitchildren(node)
# Workaround for generators
if node.is_generator:
self.visit(node.gbody.body)
# Exit point
if self.flow.block:
self.flow.block.add_child(self.flow.exit_point)
# Cleanup graph
self.flow.normalize()
check_definitions(self.flow, self.current_directives)
self.flow.blocks.add(self.flow.entry_point)
self.gv_ctx.add(GV(node.local_scope.name, self.flow))
self.flow = self.stack.pop()
self.env = self.env_stack.pop()
return node
def visit_DefNode(self, node):
## XXX: no target name node here
node.used = True
self.flow.mark_assignment(node, object_expr, self.env.lookup(node.name))
return self.visit_FuncDefNode(node)
def visit_GeneratorBodyDefNode(self, node):
return node
def visit_CTypeDefNode(self, node):
return node
def mark_assignment(self, lhs, rhs=None):
if not self.flow.block:
return
if self.flow.exceptions:
exc_descr = self.flow.exceptions[-1]
self.flow.block.add_child(exc_descr.entry_point)
self.flow.nextblock()
if isinstance(lhs, (ExprNodes.AttributeNode, ExprNodes.IndexNode)):
self.visit(lhs)
return
if not rhs:
rhs = object_expr
if lhs.is_name:
if lhs.entry is None:
# TODO: This shouldn't happen...
return
self.flow.mark_assignment(lhs, rhs)
elif isinstance(lhs, ExprNodes.SequenceNode):
for arg in lhs.args:
self.mark_assignment(arg)
else:
# Could use this info to infer cdef class attributes...
pass
if self.flow.exceptions:
exc_descr = self.flow.exceptions[-1]
self.flow.block.add_child(exc_descr.entry_point)
self.flow.nextblock()
def mark_position(self, node):
"""Mark position if DOT output is enabled."""
if self.current_directives['control_flow.dot_output']:
self.flow.mark_position(node)
def visit_FromImportStatNode(self, node):
for name, target in node.items:
if name != "*":
self.mark_assignment(target)
self.visitchildren(node)
return node
def visit_AssignmentNode(self, node):
raise InternalError, "Unhandled assignment node"
def visit_SingleAssignmentNode(self, node):
self.visit(node.rhs)
self.mark_assignment(node.lhs, node.rhs)
return node
def visit_CascadedAssignmentNode(self, node):
self.visit(node.rhs)
for lhs in node.lhs_list:
self.mark_assignment(lhs, node.rhs)
return node
def visit_ParallelAssignmentNode(self, node):
collector = AssignmentCollector()
collector.visitchildren(node)
for lhs, rhs in collector.assignments:
self.visit(rhs)
for lhs, rhs in collector.assignments:
self.mark_assignment(lhs, rhs)
return node
def visit_InPlaceAssignmentNode(self, node):
self.visitchildren(node)
self.mark_assignment(node.lhs, node.create_binop_node())
return node
def visit_DelStatNode(self, node):
for arg in node.args:
if arg.is_name:
entry = arg.entry or self.env.lookup(arg.name)
if entry.in_closure or entry.from_closure:
error(arg.pos, "can not delete variable '%s' referenced in nested scope" % entry.name)
# Mark reference
self.visit(arg)
self.flow.mark_deletion(arg, entry)
return node
def visit_CArgDeclNode(self, node):
entry = self.env.lookup(node.name)
if entry:
self.flow.mark_argument(node, TypedExprNode(entry.type), entry)
return node
def visit_NameNode(self, node):
if self.flow.block:
entry = node.entry or self.env.lookup(node.name)
if entry:
self.flow.mark_reference(node, entry)
return node
def visit_StatListNode(self, node):
if self.flow.block:
for stat in node.stats:
self.visit(stat)
if not self.flow.block:
stat.is_terminator = True
break
return node
def visit_Node(self, node):
self.visitchildren(node)
self.mark_position(node)
return node
def visit_IfStatNode(self, node):
next_block = self.flow.newblock()
parent = self.flow.block
# If clauses
for clause in node.if_clauses:
parent = self.flow.nextblock(parent)
self.visit(clause.condition)
self.flow.nextblock()
self.visit(clause.body)
if self.flow.block:
self.flow.block.add_child(next_block)
# Else clause
if node.else_clause:
self.flow.nextblock(parent=parent)
self.visit(node.else_clause)
if self.flow.block:
self.flow.block.add_child(next_block)
else:
parent.add_child(next_block)
if next_block.parents:
self.flow.block = next_block
else:
self.flow.block = None
return node
def visit_WhileStatNode(self, node):
condition_block = self.flow.nextblock()
next_block = self.flow.newblock()
# Condition block
self.flow.loops.append(LoopDescr(next_block, condition_block))
self.visit(node.condition)
# Body block
self.flow.nextblock()
self.visit(node.body)
self.flow.loops.pop()
# Loop it
if self.flow.block:
self.flow.block.add_child(condition_block)
self.flow.block.add_child(next_block)
# Else clause
if node.else_clause:
self.flow.nextblock(parent=condition_block)
self.visit(node.else_clause)
if self.flow.block:
self.flow.block.add_child(next_block)
else:
condition_block.add_child(next_block)
if next_block.parents:
self.flow.block = next_block
else:
self.flow.block = None
return node
def visit_ForInStatNode(self, node):
condition_block = self.flow.nextblock()
next_block = self.flow.newblock()
# Condition with iterator
self.flow.loops.append(LoopDescr(next_block, condition_block))
self.visit(node.iterator)
# Target assignment
self.flow.nextblock()
self.mark_assignment(node.target)
# Body block
self.flow.nextblock()
self.visit(node.body)
self.flow.loops.pop()
# Loop it
if self.flow.block:
self.flow.block.add_child(condition_block)
# Else clause
if node.else_clause:
self.flow.nextblock(parent=condition_block)
self.visit(node.else_clause)
if self.flow.block:
self.flow.block.add_child(next_block)
else:
condition_block.add_child(next_block)
if next_block.parents:
self.flow.block = next_block
else:
self.flow.block = None
return node
def visit_ForFromStatNode(self, node):
condition_block = self.flow.nextblock()
next_block = self.flow.newblock()
# Condition with iterator
self.flow.loops.append(LoopDescr(next_block, condition_block))
self.visit(node.bound1)
self.visit(node.bound2)
if node.step:
self.visit(node.step)
# Target assignment
self.flow.nextblock()
self.mark_assignment(node.target)
# Body block
self.flow.nextblock()
self.visit(node.body)
self.flow.loops.pop()
# Loop it
if self.flow.block:
self.flow.block.add_child(condition_block)
# Else clause
if node.else_clause:
self.flow.nextblock(parent=condition_block)
self.visit(node.else_clause)
if self.flow.block:
self.flow.block.add_child(next_block)
else:
condition_block.add_child(next_block)
if next_block.parents:
self.flow.block = next_block
else:
self.flow.block = None
return node
def visit_LoopNode(self, node):
raise InternalError, "Generic loops are not supported"
def visit_WithTargetAssignmentStatNode(self, node):
self.mark_assignment(node.lhs)
return node
def visit_WithStatNode(self, node):
self.visit(node.manager)
self.visit(node.body)
return node
def visit_TryExceptStatNode(self, node):
# After exception handling
next_block = self.flow.newblock()
# Body block
self.flow.newblock()
# Exception entry point
entry_point = self.flow.newblock()
self.flow.exceptions.append(ExceptionDescr(entry_point))
self.flow.nextblock()
## XXX: links to exception handling point should be added by
## XXX: children nodes
self.flow.block.add_child(entry_point)
self.visit(node.body)
self.flow.exceptions.pop()
# After exception
if self.flow.block:
if node.else_clause:
self.flow.nextblock()
self.visit(node.else_clause)
if self.flow.block:
self.flow.block.add_child(next_block)
for clause in node.except_clauses:
self.flow.block = entry_point
if clause.pattern:
for pattern in clause.pattern:
self.visit(pattern)
else:
# TODO: handle * pattern
pass
if clause.target:
self.mark_assignment(clause.target)
entry_point = self.flow.newblock(parent=self.flow.block)
self.flow.nextblock()
self.visit(clause.body)
if self.flow.block:
self.flow.block.add_child(next_block)
if self.flow.exceptions:
entry_point.add_child(self.flow.exceptions[-1].entry_point)
if next_block.parents:
self.flow.block = next_block
else:
self.flow.block = None
return node
def visit_TryFinallyStatNode(self, node):
body_block = self.flow.nextblock()
# Exception entry point
entry_point = self.flow.newblock()
self.flow.block = entry_point
self.visit(node.finally_clause)
if self.flow.block and self.flow.exceptions:
self.flow.block.add_child(self.flow.exceptions[-1].entry_point)
# Normal execution
finally_enter = self.flow.newblock()
self.flow.block = finally_enter
self.visit(node.finally_clause)
finally_exit = self.flow.block
descr = ExceptionDescr(entry_point, finally_enter, finally_exit)
self.flow.exceptions.append(descr)
if self.flow.loops:
self.flow.loops[-1].exceptions.append(descr)
self.flow.block = body_block
## XXX: Is it still required
body_block.add_child(entry_point)
self.visit(node.body)
self.flow.exceptions.pop()
if self.flow.loops:
self.flow.loops[-1].exceptions.pop()
if self.flow.block:
self.flow.block.add_child(finally_enter)
if finally_exit:
self.flow.block = self.flow.nextblock(parent=finally_exit)
else:
self.flow.block = None
return node
def visit_RaiseStatNode(self, node):
self.mark_position(node)
self.visitchildren(node)
if self.flow.exceptions:
self.flow.block.add_child(self.flow.exceptions[-1].entry_point)
self.flow.block = None
return node
def visit_ReraiseStatNode(self, node):
self.mark_position(node)
if self.flow.exceptions:
self.flow.block.add_child(self.flow.exceptions[-1].entry_point)
self.flow.block = None
return node
def visit_ReturnStatNode(self, node):
self.mark_position(node)
self.visitchildren(node)
for exception in self.flow.exceptions[::-1]:
if exception.finally_enter:
self.flow.block.add_child(exception.finally_enter)
if exception.finally_exit:
exception.finally_exit.add_child(self.flow.exit_point)
break
else:
if self.flow.block:
self.flow.block.add_child(self.flow.exit_point)
self.flow.block = None
return node
def visit_BreakStatNode(self, node):
if not self.flow.loops:
#error(node.pos, "break statement not inside loop")
return node
loop = self.flow.loops[-1]
self.mark_position(node)
for exception in loop.exceptions[::-1]:
if exception.finally_enter:
self.flow.block.add_child(exception.finally_enter)
if exception.finally_exit:
exception.finally_exit.add_child(loop.next_block)
break
else:
self.flow.block.add_child(loop.next_block)
self.flow.block = None
return node
def visit_ContinueStatNode(self, node):
if not self.flow.loops:
#error(node.pos, "continue statement not inside loop")
return node
loop = self.flow.loops[-1]
self.mark_position(node)
for exception in loop.exceptions[::-1]:
if exception.finally_enter:
self.flow.block.add_child(exception.finally_enter)
if exception.finally_exit:
exception.finally_exit.add_child(loop.loop_block)
break
else:
self.flow.block.add_child(loop.loop_block)
self.flow.block = None
return node
def visit_ComprehensionNode(self, node):
if node.expr_scope:
self.env_stack.append(self.env)
self.env = node.expr_scope
# Skip append node here
self.visit(node.target)
self.visit(node.loop)
if node.expr_scope:
self.env = self.env_stack.pop()
return node
def visit_ScopedExprNode(self, node):
if node.expr_scope:
self.env_stack.append(self.env)
self.env = node.expr_scope
self.visitchildren(node)
if node.expr_scope:
self.env = self.env_stack.pop()
return node
def visit_PyClassDefNode(self, node):
self.flow.mark_assignment(node.target,
object_expr, self.env.lookup(node.name))
# TODO: add negative attribute list to "visitchildren"?
self.visitchildren(node, attrs=['dict', 'metaclass', 'mkw', 'bases', 'classobj'])
self.env_stack.append(self.env)
self.env = node.scope
self.flow.nextblock()
self.visitchildren(node, attrs=['body'])
self.flow.nextblock()
self.env = self.env_stack.pop()
return node
......@@ -31,6 +31,7 @@ from Cython import Utils
from Cython.Utils import open_new_file, replace_suffix
import CythonScope
import DebugFlags
import Options
module_name_pattern = re.compile(r"[A-Za-z_][A-Za-z0-9_]*(\.[A-Za-z_][A-Za-z0-9_]*)*$")
......@@ -73,7 +74,7 @@ class Context(object):
# future_directives [object]
# language_level int currently 2 or 3 for Python 2/3
def __init__(self, include_directories, compiler_directives, cpp=False, language_level=2):
def __init__(self, include_directories, compiler_directives, cpp=False, language_level=2, options=None):
import Builtin, CythonScope
self.modules = {"__builtin__" : Builtin.builtin_scope}
self.modules["cython"] = CythonScope.create_cython_scope(self)
......@@ -81,6 +82,7 @@ class Context(object):
self.future_directives = set()
self.compiler_directives = compiler_directives
self.cpp = cpp
self.options = options
self.pxds = {} # full name -> node tree
......@@ -103,13 +105,15 @@ class Context(object):
def create_pipeline(self, pxd, py=False):
from Visitor import PrintTree
from ParseTreeTransforms import WithTransform, NormalizeTree, PostParse, PxdPostParse
from ParseTreeTransforms import AnalyseDeclarationsTransform, AnalyseExpressionsTransform
from ParseTreeTransforms import ForwardDeclareTypes, AnalyseDeclarationsTransform
from ParseTreeTransforms import AnalyseExpressionsTransform
from ParseTreeTransforms import CreateClosureClasses, MarkClosureVisitor, DecoratorTransform
from ParseTreeTransforms import InterpretCompilerDirectives, TransformBuiltinMethods
from ParseTreeTransforms import ExpandInplaceOperators, ParallelRangeTransform
from TypeInference import MarkAssignments, MarkOverflowingArithmetic
from ParseTreeTransforms import AdjustDefByDirectives, AlignFunctionDefinitions
from ParseTreeTransforms import RemoveUnreachableCode, GilCheck
from FlowControl import CreateControlFlowGraph
from AnalysedTreeTransforms import AutoTestDictTransform
from AutoDocTransforms import EmbedSignature
from Optimize import FlattenInListTransform, SwitchTransform, IterationTransform
......@@ -145,15 +149,16 @@ class Context(object):
FlattenInListTransform(),
WithTransform(self),
DecoratorTransform(self),
# PrintTree(),
ForwardDeclareTypes(self),
AnalyseDeclarationsTransform(self),
# PrintTree(),
AutoTestDictTransform(self),
EmbedSignature(self),
EarlyReplaceBuiltinCalls(self), ## Necessary?
TransformBuiltinMethods(self), ## Necessary?
CreateControlFlowGraph(self),
RemoveUnreachableCode(self),
MarkAssignments(self),
MarkOverflowingArithmetic(self),
TransformBuiltinMethods(self), ## Necessary?
IntroduceBufferAuxiliaryVars(self),
_check_c_declarations,
AnalyseExpressionsTransform(self),
......@@ -165,7 +170,6 @@ class Context(object):
DropRefcountingTransform(),
FinalOptimizePhase(self),
GilCheck(),
# PrintTree(),
]
def create_pyx_pipeline(self, options, result, py=False):
......@@ -229,8 +233,41 @@ class Context(object):
def create_py_pipeline(self, options, result):
return self.create_pyx_pipeline(options, result, py=True)
def create_pyx_as_pxd_pipeline(self, source):
from ParseTreeTransforms import (AlignFunctionDefinitions,
MarkClosureVisitor, WithTransform, AnalyseDeclarationsTransform)
from Optimize import ConstantFolding, FlattenInListTransform
from Nodes import StatListNode
pipeline = []
result = create_default_resultobj(source, self.options)
pyx_pipeline = self.create_pyx_pipeline(self.options, result)
for stage in pyx_pipeline:
if stage.__class__ in [
AlignFunctionDefinitions,
MarkClosureVisitor,
ConstantFolding,
FlattenInListTransform,
WithTransform,
]:
# Skip these unnecessary stages.
continue
pipeline.append(stage)
if isinstance(stage, AnalyseDeclarationsTransform):
# This is the last stage we need.
break
def fake_pxd(root):
for entry in root.scope.entries.values():
entry.defined_in_pxd = 1
return StatListNode(root.pos, stats=[]), root.scope
pipeline.append(fake_pxd)
return pipeline
def process_pxd(self, source_desc, scope, module_name):
if isinstance(source_desc, FileSourceDescriptor) and source_desc._file_type == 'pyx':
source = CompilationSource(source_desc, module_name, os.getcwd())
pipeline = self.create_pyx_as_pxd_pipeline(source)
result = self.run_pipeline(pipeline, source)
else:
pipeline = self.create_pxd_pipeline(scope, module_name)
result = self.run_pipeline(pipeline, source_desc)
return result
......@@ -363,6 +400,8 @@ class Context(object):
warning(pos, "'%s' is deprecated, use 'libc.%s'" % (name, name), 1)
elif name in ('stl'):
warning(pos, "'%s' is deprecated, use 'libcpp.*.*'" % name, 1)
if pxd is None and Options.cimport_from_pyx:
return self.find_pyx_file(qualified_name, pos)
return pxd
def find_pyx_file(self, qualified_name, pos):
......@@ -570,7 +609,9 @@ def create_parse(context):
source_desc = compsrc.source_desc
full_module_name = compsrc.full_module_name
initial_pos = (source_desc, 1, 0)
saved_cimport_from_pyx, Options.cimport_from_pyx = Options.cimport_from_pyx, False
scope = context.find_module(full_module_name, pos = initial_pos, need_pxd = 0)
Options.cimport_from_pyx = saved_cimport_from_pyx
tree = context.parse(source_desc, scope, pxd = 0, full_module_name = full_module_name)
tree.compilation_source = compsrc
tree.scope = scope
......@@ -684,7 +725,7 @@ class CompilationOptions(object):
def create_context(self):
return Context(self.include_path, self.compiler_directives,
self.cplus, self.language_level)
self.cplus, self.language_level, options=self)
class CompilationResult(object):
......
......@@ -296,7 +296,8 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
self.generate_cached_builtins_decls(env, code)
self.generate_lambda_definitions(env, code)
# generate normal function definitions
# generate normal variable and function definitions
self.generate_variable_definitions(env, code)
self.body.generate_function_definitions(env, code)
code.mark_pos(None)
self.generate_typeobj_definitions(env, code)
......@@ -416,6 +417,9 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
return (vtab_list, vtabslot_list)
def generate_type_definitions(self, env, modules, vtab_list, vtabslot_list, code):
# TODO: Why are these separated out?
for entry in vtabslot_list:
self.generate_objstruct_predeclaration(entry.type, code)
vtabslot_entries = set(vtabslot_list)
for module in modules:
definition = module is env
......@@ -426,18 +430,8 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
for entry in module.type_entries:
if entry.defined_in_pxd:
type_entries.append(entry)
for entry in type_entries:
if not entry.in_cinclude:
#print "generate_type_header_code:", entry.name, repr(entry.type) ###
type = entry.type
if type.is_typedef: # Must test this first!
self.generate_typedef(entry, code)
elif type.is_struct_or_union:
self.generate_struct_union_definition(entry, code)
elif type.is_enum:
self.generate_enum_definition(entry, code)
elif type.is_extension_type and entry not in vtabslot_entries:
self.generate_objstruct_definition(type, code)
type_entries = [t for t in type_entries if t not in vtabslot_entries]
self.generate_type_header_code(type_entries, code)
for entry in vtabslot_list:
self.generate_objstruct_definition(entry.type, code)
self.generate_typeobj_predeclaration(entry, code)
......@@ -699,10 +693,12 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
code.putln("")
self.generate_extern_c_macro_definition(code)
code.putln("")
code.putln("#if defined(WIN32) || defined(MS_WINDOWS)")
code.putln("#define _USE_MATH_DEFINES")
code.putln("#endif")
code.putln("#include <math.h>")
code.putln("#define %s" % Naming.h_guard_prefix + self.api_name(env))
code.putln("#define %s" % Naming.api_guard_prefix + self.api_name(env))
self.generate_includes(env, cimported_modules, code)
......@@ -711,6 +707,7 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
code.putln("#define CYTHON_WITHOUT_ASSERTIONS")
code.putln("#endif")
code.putln("")
if env.directives['ccomplex']:
code.putln("")
code.putln("#if !defined(CYTHON_CCOMPLEX)")
......@@ -778,17 +775,28 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
def generate_type_header_code(self, type_entries, code):
# Generate definitions of structs/unions/enums/typedefs/objstructs.
#self.generate_gcc33_hack(env, code) # Is this still needed?
#for entry in env.type_entries:
# Forward declarations
for entry in type_entries:
if not entry.in_cinclude:
#print "generate_type_header_code:", entry.name, repr(entry.type) ###
type = entry.type
if type.is_typedef: # Must test this first!
self.generate_typedef(entry, code)
pass
elif type.is_struct_or_union:
self.generate_struct_union_definition(entry, code)
self.generate_struct_union_predeclaration(entry, code)
elif type.is_extension_type:
self.generate_objstruct_predeclaration(type, code)
# Actual declarations
for entry in type_entries:
if not entry.in_cinclude:
#print "generate_type_header_code:", entry.name, repr(entry.type) ###
type = entry.type
if type.is_typedef: # Must test this first!
self.generate_typedef(entry, code)
elif type.is_enum:
self.generate_enum_definition(entry, code)
elif type.is_struct_or_union:
self.generate_struct_union_definition(entry, code)
elif type.is_extension_type:
self.generate_objstruct_definition(type, code)
......@@ -818,11 +826,19 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
writer.mark_pos(entry.pos)
writer.putln("typedef %s;" % base_type.declaration_code(entry.cname))
def sue_header_footer(self, type, kind, name):
def sue_predeclaration(self, type, kind, name):
if type.typedef_flag:
header = "typedef %s {" % kind
footer = "} %s;" % name
return "%s %s;\ntypedef %s %s %s;" % (
kind, name,
kind, name, name)
else:
return "%s %s;" % (kind, name)
def generate_struct_union_predeclaration(self, entry, code):
type = entry.type
code.putln(self.sue_predeclaration(type, type.kind, type.cname))
def sue_header_footer(self, type, kind, name):
header = "%s %s {" % (kind, name)
footer = "};"
return header, footer
......@@ -893,6 +909,9 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
value_code += ","
code.putln(value_code)
code.putln(footer)
if entry.type.typedef_flag:
# Not pre-declared.
code.putln("typedef enum %s %s;" % (name, name))
def generate_typeobj_predeclaration(self, entry, code):
code.putln("")
......@@ -942,6 +961,11 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
type.vtabstruct_cname,
type.vtabptr_cname))
def generate_objstruct_predeclaration(self, type, code):
if not type.scope:
return
code.putln(self.sue_predeclaration(type, "struct", type.objstruct_cname))
def generate_objstruct_definition(self, type, code):
code.mark_pos(type.pos)
# Generate object struct definition for an
......@@ -1001,25 +1025,25 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
storage_class = Naming.extern_c_macro
dll_linkage = "DL_IMPORT"
elif entry.visibility == 'public':
storage_class = Naming.extern_c_macro
if definition:
dll_linkage = "DL_EXPORT"
else:
storage_class = Naming.extern_c_macro
dll_linkage = "DL_IMPORT"
elif entry.visibility == 'private':
storage_class = "static"
dll_linkage = None
if entry.init is not None:
init = entry.type.literal_code(entry.init)
type = entry.type
cname = entry.cname
if entry.defined_in_pxd and not definition:
type = CPtrType(entry.type)
storage_class = "static"
dll_linkage = None
type = CPtrType(type)
cname = env.mangle(Naming.varptr_prefix, entry.name)
init = 0
else:
type = entry.type
cname = entry.cname
if entry.init is not None:
init = type.literal_code(entry.init)
if storage_class:
code.put("%s " % storage_class)
......@@ -1045,7 +1069,7 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
storage_class = "%s " % Naming.extern_c_macro
dll_linkage = "DL_IMPORT"
elif entry.visibility == 'public':
storage_class = ""
storage_class = "%s " % Naming.extern_c_macro
dll_linkage = "DL_EXPORT"
elif entry.visibility == 'private':
storage_class = "static "
......@@ -1055,7 +1079,7 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
dll_linkage = None
type = entry.type
if not definition and entry.defined_in_pxd:
if entry.defined_in_pxd and not definition:
storage_class = "static "
dll_linkage = None
type = CPtrType(type)
......@@ -1071,6 +1095,16 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
modifiers,
header))
def generate_variable_definitions(self, env, code):
for entry in env.var_entries:
if (not entry.in_cinclude and
entry.visibility == "public"):
code.put(entry.type.declaration_code(entry.cname))
if entry.init is not None:
init = entry.type.literal_code(entry.init)
code.put_safe(" = %s" % init)
code.putln(";")
def generate_typeobj_definitions(self, env, code):
full_module_name = env.qualified_name
for entry in env.c_class_entries:
......@@ -2088,7 +2122,9 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
# Generate code to create PyCFunction wrappers for exported C functions.
entries = []
for entry in env.var_entries:
if entry.api or entry.defined_in_pxd:
if (entry.api
or entry.defined_in_pxd
or (Options.cimport_from_pyx and not entry.visibility == 'extern')):
entries.append(entry)
if entries:
env.use_utility_code(voidptr_export_utility_code)
......@@ -2102,7 +2138,9 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
# Generate code to create PyCFunction wrappers for exported C functions.
entries = []
for entry in env.cfunc_entries:
if entry.api or entry.defined_in_pxd:
if (entry.api
or entry.defined_in_pxd
or (Options.cimport_from_pyx and not entry.visibility == 'extern')):
entries.append(entry)
if entries:
env.use_utility_code(function_export_utility_code)
......@@ -2424,13 +2462,13 @@ bad:
type_import_utility_code = UtilityCode(
proto = """
static PyTypeObject *__Pyx_ImportType(const char *module_name, const char *class_name, long size, int strict); /*proto*/
static PyTypeObject *__Pyx_ImportType(const char *module_name, const char *class_name, size_t size, int strict); /*proto*/
""",
impl = """
#ifndef __PYX_HAVE_RT_ImportType
#define __PYX_HAVE_RT_ImportType
static PyTypeObject *__Pyx_ImportType(const char *module_name, const char *class_name,
long size, int strict)
size_t size, int strict)
{
PyObject *py_module = 0;
PyObject *result = 0;
......@@ -2460,17 +2498,17 @@ static PyTypeObject *__Pyx_ImportType(const char *module_name, const char *class
module_name, class_name);
goto bad;
}
if (!strict && ((PyTypeObject *)result)->tp_basicsize > size) {
if (!strict && ((PyTypeObject *)result)->tp_basicsize > (Py_ssize_t)size) {
PyOS_snprintf(warning, sizeof(warning),
"%s.%s size changed, may indicate binary incompatibility",
module_name, class_name);
#if PY_VERSION_HEX < 0x02050000
PyErr_Warn(NULL, warning);
if (PyErr_Warn(NULL, warning) < 0) goto bad;
#else
PyErr_WarnEx(NULL, warning, 0);
if (PyErr_WarnEx(NULL, warning, 0) < 0) goto bad;
#endif
}
else if (((PyTypeObject *)result)->tp_basicsize != size) {
else if (((PyTypeObject *)result)->tp_basicsize != (Py_ssize_t)size) {
PyErr_Format(PyExc_ValueError,
"%s.%s has the wrong size, try recompiling",
module_name, class_name);
......@@ -2480,7 +2518,7 @@ static PyTypeObject *__Pyx_ImportType(const char *module_name, const char *class
bad:
Py_XDECREF(py_module);
Py_XDECREF(result);
return 0;
return NULL;
}
#endif
""")
......@@ -2759,7 +2797,11 @@ __Pyx_import_all_from(PyObject *locals, PyObject *v)
"from-import-* object has no __dict__ and no __all__");
return -1;
}
#if PY_MAJOR_VERSION < 3
all = PyObject_CallMethod(dict, (char *)"keys", NULL);
#else
all = PyMapping_Keys(dict);
#endif
Py_DECREF(dict);
if (all == NULL)
return -1;
......
......@@ -117,5 +117,7 @@ h_guard_prefix = "__PYX_HAVE__"
api_guard_prefix = "__PYX_HAVE_API__"
api_func_guard = "__PYX_HAVE_API_FUNC_"
PYX_NAN = "__PYX_NAN"
def py_version_hex(major, minor=0, micro=0, release_level=0, release_serial=0):
return (major << 24) | (minor << 16) | (micro << 8) | (release_level << 4) | (release_serial)
......@@ -26,7 +26,6 @@ from Cython.Utils import open_new_file, replace_suffix
from Code import UtilityCode, ClosureTempAllocator
from StringEncoding import EncodedString, escape_byte_string, split_string_literal
import Options
import ControlFlow
import DebugFlags
from Cython.Compiler import Errors
......@@ -138,6 +137,9 @@ class Node(object):
# can either contain a single node or a list of nodes. See Visitor.py.
child_attrs = None
cf_state = None
def __init__(self, pos, **kw):
self.pos = pos
self.__dict__.update(kw)
......@@ -173,19 +175,15 @@ class Node(object):
#
# There are 4 phases of parse tree processing, applied in order to
# There are 3 phases of parse tree processing, applied in order to
# all the statements in a given scope-block:
#
# (0) analyse_control_flow
# Create the control flow tree into which state can be asserted and
# queried.
#
# (1) analyse_declarations
# (0) analyse_declarations
# Make symbol table entries for all declarations at the current
# level, both explicit (def, cdef, etc.) and implicit (assignment
# to an otherwise undeclared name).
#
# (2) analyse_expressions
# (1) analyse_expressions
# Determine the result types of expressions and fill in the
# 'type' attribute of each ExprNode. Insert coercion nodes into the
# tree where needed to convert to and from Python objects.
......@@ -193,15 +191,12 @@ class Node(object):
# in the 'result_code' attribute of each ExprNode with a C code
# fragment.
#
# (3) generate_code
# (2) generate_code
# Emit C code for all declarations, statements and expressions.
# Recursively applies the 3 processing phases to the bodies of
# functions.
#
def analyse_control_flow(self, env):
pass
def analyse_declarations(self, env):
pass
......@@ -277,12 +272,6 @@ class CompilerDirectivesNode(Node):
# body Node
child_attrs = ["body"]
def analyse_control_flow(self, env):
old = env.directives
env.directives = self.directives
self.body.analyse_control_flow(env)
env.directives = old
def analyse_declarations(self, env):
old = env.directives
env.directives = self.directives
......@@ -338,10 +327,6 @@ class StatListNode(Node):
return node # No node-specific analysis necesarry
create_analysed = staticmethod(create_analysed)
def analyse_control_flow(self, env):
for stat in self.stats:
stat.analyse_control_flow(env)
def analyse_declarations(self, env):
#print "StatListNode.analyse_declarations" ###
for stat in self.stats:
......@@ -578,7 +563,10 @@ class CFuncDeclaratorNode(CDeclaratorNode):
exc_val = None
exc_check = 0
if self.exception_check == '+':
env.add_include_file('ios') # for std::ios_base::failure
env.add_include_file('new') # for std::bad_alloc
env.add_include_file('stdexcept')
env.add_include_file('typeinfo') # for std::bad_cast
if return_type.is_pyobject \
and (self.exception_value or self.exception_check) \
and self.exception_check != '+':
......@@ -1070,44 +1058,31 @@ class CStructOrUnionDefNode(StatNode):
child_attrs = ["attributes"]
def analyse_declarations(self, env):
scope = None
if self.visibility == 'extern' and self.packed:
def declare(self, env, scope=None):
if self.visibility == 'extern' and self.packed and not scope:
error(self.pos, "Cannot declare extern struct as 'packed'")
if self.attributes is not None:
scope = StructOrUnionScope(self.name)
self.entry = env.declare_struct_or_union(
self.name, self.kind, scope, self.typedef_flag, self.pos,
self.cname, visibility = self.visibility, api = self.api,
packed = self.packed)
def analyse_declarations(self, env):
scope = None
if self.attributes is not None:
scope = StructOrUnionScope(self.name)
self.declare(env, scope)
if self.attributes is not None:
if self.in_pxd and not env.in_cinclude:
self.entry.defined_in_pxd = 1
for attr in self.attributes:
attr.analyse_declarations(env, scope)
if self.visibility != 'extern':
need_typedef_indirection = False
for attr in scope.var_entries:
type = attr.type
while type.is_array:
type = type.base_type
if type == self.entry.type:
error(attr.pos, "Struct cannot contain itself as a member.")
if self.typedef_flag:
while type.is_ptr:
type = type.base_type
if type == self.entry.type:
need_typedef_indirection = True
if need_typedef_indirection:
# C can't handle typedef structs that refer to themselves.
struct_entry = self.entry
self.entry = env.declare_typedef(
self.name, struct_entry.type, self.pos,
cname = self.cname, visibility='ignore')
struct_entry.type.typedef_flag = False
# FIXME: this might be considered a hack ;-)
struct_entry.cname = struct_entry.type.cname = \
'_' + self.entry.type.typedef_cname
def analyse_expressions(self, env):
pass
......@@ -1127,6 +1102,15 @@ class CppClassNode(CStructOrUnionDefNode):
# base_classes [string]
# templates [string] or None
def declare(self, env):
if self.templates is None:
template_types = None
else:
template_types = [PyrexTypes.TemplatePlaceholderType(template_name) for template_name in self.templates]
self.entry = env.declare_cpp_class(
self.name, None, self.pos,
self.cname, base_classes = [], visibility = self.visibility, templates = template_types)
def analyse_declarations(self, env):
scope = None
if self.attributes is not None:
......@@ -1168,10 +1152,12 @@ class CEnumDefNode(StatNode):
child_attrs = ["items"]
def analyse_declarations(self, env):
def declare(self, env):
self.entry = env.declare_enum(self.name, self.pos,
cname = self.cname, typedef_flag = self.typedef_flag,
visibility = self.visibility, api = self.api)
def analyse_declarations(self, env):
if self.items is not None:
if self.in_pxd and not env.in_cinclude:
self.entry.defined_in_pxd = 1
......@@ -1257,6 +1243,8 @@ class FuncDefNode(StatNode, BlockNode):
# needs_closure boolean Whether or not this function has inner functions/classes/yield
# needs_outer_scope boolean Whether or not this function requires outer scope
# directive_locals { string : NameNode } locals defined by cython.locals(...)
# star_arg PyArgDeclNode or None * argument
# starstar_arg PyArgDeclNode or None ** argument
# has_fused_arguments boolean
# Whether this cdef function has fused parameters. This is needed
......@@ -1271,6 +1259,8 @@ class FuncDefNode(StatNode, BlockNode):
is_generator_body = False
modifiers = []
has_fused_arguments = False
star_arg = None
starstar_arg = None
def analyse_default_values(self, env):
genv = env.global_scope()
......@@ -1495,10 +1485,6 @@ class FuncDefNode(StatNode, BlockNode):
if entry.type.is_pyobject:
if (acquire_gil or entry.assignments) and not entry.in_closure:
code.put_var_incref(entry)
# ----- Initialise local variables
for entry in lenv.var_entries:
if entry.type.is_pyobject and entry.init_to_none and entry.used:
code.put_init_var_to_py_none(entry)
# ----- Initialise local buffer auxiliary variables
for entry in lenv.var_entries + lenv.arg_entries:
if entry.type.is_buffer and entry.buffer_aux.buffer_info_var.used:
......@@ -1560,9 +1546,14 @@ class FuncDefNode(StatNode, BlockNode):
else:
warning(self.entry.pos, "Unraisable exception in function '%s'." \
% self.entry.qualified_name, 0)
format_tuple = (
self.entry.qualified_name,
Naming.clineno_cname,
Naming.lineno_cname,
Naming.filename_cname,
)
code.putln(
'__Pyx_WriteUnraisable("%s");' %
self.entry.qualified_name)
'__Pyx_WriteUnraisable("%s", %s, %s, %s);' % format_tuple)
env.use_utility_code(unraisable_exception_utility_code)
env.use_utility_code(restore_exception_utility_code)
default_retval = self.return_type.default_value
......@@ -1589,10 +1580,6 @@ class FuncDefNode(StatNode, BlockNode):
self.getbuffer_normal_cleanup(code)
# ----- Return cleanup for both error and no-error return
code.put_label(code.return_from_error_cleanup_label)
if not Options.init_local_none:
for entry in lenv.var_entries:
if lenv.control_flow.get_state((entry.name, 'initialized')) is not True:
entry.xdecref_cleanup = 1
for entry in lenv.var_entries:
if entry.type.is_pyobject:
......@@ -2339,8 +2326,6 @@ class DefNode(FuncDefNode):
# lambda_name string the internal name of a lambda 'function'
# decorators [DecoratorNode] list of decorators
# args [CArgDeclNode] formal arguments
# star_arg PyArgDeclNode or None * argument
# starstar_arg PyArgDeclNode or None ** argument
# doc EncodedString or None
# body StatListNode
# return_type_annotation
......@@ -2369,8 +2354,6 @@ class DefNode(FuncDefNode):
entry = None
acquire_gil = 0
self_in_stararg = 0
star_arg = None
starstar_arg = None
doc = None
fused_py_func = False
......@@ -2660,14 +2643,10 @@ class DefNode(FuncDefNode):
for arg in self.args:
if not arg.name:
error(arg.pos, "Missing argument name")
else:
env.control_flow.set_state((), (arg.name, 'source'), 'arg')
env.control_flow.set_state((), (arg.name, 'initialized'), True)
if arg.needs_conversion:
arg.entry = env.declare_var(arg.name, arg.type, arg.pos)
if arg.type.is_pyobject:
arg.entry.init = "0"
arg.entry.init_to_none = 0
else:
arg.entry = self.declare_argument(env, arg)
arg.entry.used = 1
......@@ -2688,10 +2667,8 @@ class DefNode(FuncDefNode):
entry = env.declare_var(arg.name, type, arg.pos)
entry.used = 1
entry.init = "0"
entry.init_to_none = 0
entry.xdecref_cleanup = 1
arg.entry = entry
env.control_flow.set_state((), (arg.name, 'initialized'), True)
def analyse_expressions(self, env):
self.local_scope.directives = env.directives
......@@ -2735,6 +2712,8 @@ class DefNode(FuncDefNode):
if env.is_py_class_scope:
if not self.is_staticmethod and not self.is_classmethod:
rhs.binding = True
else:
rhs.binding = False
self.assmt = SingleAssignmentNode(self.pos,
lhs = ExprNodes.NameNode(self.pos, name = self.name),
......@@ -3793,18 +3772,46 @@ class CClassDefNode(ClassDefNode):
decorators = None
shadow = False
def analyse_declarations(self, env):
#print "CClassDefNode.analyse_declarations:", self.class_name
#print "...visibility =", self.visibility
#print "...module_name =", self.module_name
def buffer_defaults(self, env):
if not hasattr(self, '_buffer_defaults'):
import Buffer
if self.buffer_defaults_node:
buffer_defaults = Buffer.analyse_buffer_options(self.buffer_defaults_pos,
self._buffer_defaults = Buffer.analyse_buffer_options(
self.buffer_defaults_pos,
env, [], self.buffer_defaults_node,
need_complete=False)
else:
buffer_defaults = None
self._buffer_defaults = None
return self._buffer_defaults
def declare(self, env):
if self.module_name and self.visibility != 'extern':
module_path = self.module_name.split(".")
home_scope = env.find_imported_module(module_path, self.pos)
if not home_scope:
return None
else:
home_scope = env
self.entry = home_scope.declare_c_class(
name = self.class_name,
pos = self.pos,
defining = 0,
implementing = 0,
module_name = self.module_name,
base_type = None,
objstruct_cname = self.objstruct_name,
typeobj_cname = self.typeobj_name,
visibility = self.visibility,
typedef_flag = self.typedef_flag,
api = self.api,
buffer_defaults = self.buffer_defaults(env),
shadow = self.shadow)
def analyse_declarations(self, env):
#print "CClassDefNode.analyse_declarations:", self.class_name
#print "...visibility =", self.visibility
#print "...module_name =", self.module_name
if env.in_cinclude and not self.objstruct_name:
error(self.pos, "Object struct name specification required for "
......@@ -3882,7 +3889,7 @@ class CClassDefNode(ClassDefNode):
visibility = self.visibility,
typedef_flag = self.typedef_flag,
api = self.api,
buffer_defaults = buffer_defaults,
buffer_defaults = self.buffer_defaults(env),
shadow = self.shadow)
if self.shadow:
home_scope.lookup(self.class_name).as_variable = self.entry
......@@ -4784,15 +4791,6 @@ class IfStatNode(StatNode):
child_attrs = ["if_clauses", "else_clause"]
def analyse_control_flow(self, env):
env.start_branching(self.pos)
for if_clause in self.if_clauses:
if_clause.analyse_control_flow(env)
env.next_branch(if_clause.end_pos())
if self.else_clause:
self.else_clause.analyse_control_flow(env)
env.finish_branching(self.end_pos())
def analyse_declarations(self, env):
for if_clause in self.if_clauses:
if_clause.analyse_declarations(env)
......@@ -4837,9 +4835,6 @@ class IfClauseNode(Node):
child_attrs = ["condition", "body"]
def analyse_control_flow(self, env):
self.body.analyse_control_flow(env)
def analyse_declarations(self, env):
self.body.analyse_declarations(env)
......@@ -4935,14 +4930,7 @@ class SwitchStatNode(StatNode):
self.else_clause.annotate(code)
class LoopNode(object):
def analyse_control_flow(self, env):
env.start_branching(self.pos)
self.body.analyse_control_flow(env)
env.next_branch(self.body.end_pos())
if self.else_clause:
self.else_clause.analyse_control_flow(env)
env.finish_branching(self.end_pos())
pass
class WhileStatNode(LoopNode, StatNode):
......@@ -5481,24 +5469,6 @@ class TryExceptStatNode(StatNode):
child_attrs = ["body", "except_clauses", "else_clause"]
def analyse_control_flow(self, env):
env.start_branching(self.pos)
self.body.analyse_control_flow(env)
successful_try = env.control_flow # grab this for later
env.next_branch(self.body.end_pos())
env.finish_branching(self.body.end_pos())
env.start_branching(self.except_clauses[0].pos)
for except_clause in self.except_clauses:
except_clause.analyse_control_flow(env)
env.next_branch(except_clause.end_pos())
# the else cause it executed only when the try clause finishes
env.control_flow.incoming = successful_try
if self.else_clause:
self.else_clause.analyse_control_flow(env)
env.finish_branching(self.end_pos())
def analyse_declarations(self, env):
self.body.analyse_declarations(env)
for except_clause in self.except_clauses:
......@@ -5819,13 +5789,6 @@ class TryFinallyStatNode(StatNode):
return node
create_analysed = staticmethod(create_analysed)
def analyse_control_flow(self, env):
env.start_branching(self.pos)
self.body.analyse_control_flow(env)
env.next_branch(self.body.end_pos())
env.finish_branching(self.body.end_pos())
self.finally_clause.analyse_control_flow(env)
def analyse_declarations(self, env):
self.body.analyse_declarations(env)
self.finally_clause.analyse_declarations(env)
......@@ -6295,6 +6258,10 @@ class ParallelStatNode(StatNode, ParallelNode):
privatization_insertion_point a code insertion point used to make temps
private (esp. the "nsteps" temp)
args tuple the arguments passed to the parallel construct
kwargs DictNode the keyword arguments passed to the parallel
construct (replaced by its compile time value)
"""
child_attrs = ['body']
......@@ -6319,8 +6286,21 @@ class ParallelStatNode(StatNode, ParallelNode):
def analyse_declarations(self, env):
self.body.analyse_declarations(env)
if self.kwargs:
self.kwargs = self.kwargs.compile_time_value(env)
else:
self.kwargs = {}
for kw, val in self.kwargs.iteritems():
if kw not in self.valid_keyword_arguments:
error(self.pos, "Invalid keyword argument: %s" % kw)
else:
setattr(self, kw, val)
def analyse_expressions(self, env):
self.body.analyse_expressions(env)
self.analyse_sharing_attributes(env)
self.check_independent_iterations()
def analyse_sharing_attributes(self, env):
"""
......@@ -6426,6 +6406,64 @@ class ParallelStatNode(StatNode, ParallelNode):
code.putln("%s = %s;" % (cname, entry.cname))
entry.cname = cname
def check_independent_iterations(self):
"""
This checks for uninitialized thread-private variables, it's far from
fool-proof as it does not take control flow into account, nor
assignment to a variable as both the lhs and rhs. So it detects only
cases like this:
for i in prange(10, nogil=True):
var = x # error, x is private and read before assigned
x = i
Fortunately, it doesn't need to be perfect, as we still initialize
private variables to "invalid" values, such as NULL or NaN whenever
possible.
"""
from Cython.Compiler import ParseTreeTransforms
transform = ParseTreeTransforms.FindUninitializedParallelVars()
transform(self.body)
for entry, pos in transform.used_vars:
if entry in self.privates:
assignment_pos, op = self.assignments[entry]
# Reading reduction variables is valid (in fact necessary)
# before assignment
if not op and pos < assignment_pos:
if self.is_prange:
error(pos, "Expression value depends on previous loop "
"iteration, cannot execute in parallel")
else:
error(pos, "Expression depends on an uninitialized "
"thread-private variable")
def initialize_privates_to_nan(self, code, exclude=None):
code.putln("/* Initialize private variables to invalid values */")
for entry, op in self.privates.iteritems():
if not op and (not exclude or entry != exclude):
invalid_value = entry.type.invalid_value()
if invalid_value:
code.globalstate.use_utility_code(
invalid_values_utility_code)
code.putln("%s = %s;" % (entry.cname,
entry.type.cast_code(invalid_value)))
def put_num_threads(self, code):
"""
Write self.num_threads if set as the num_threads OpenMP directive
"""
if self.num_threads is not None:
if isinstance(self.num_threads, (int, long)):
code.put(" num_threads(%d)" % (self.num_threads,))
else:
error(self.pos, "Invalid value for num_threads argument, "
"expected an int")
def declare_closure_privates(self, code):
"""
Set self.privates to a dict mapping C variable names that are to be
......@@ -6463,9 +6501,15 @@ class ParallelWithBlockNode(ParallelStatNode):
nogil_check = None
def analyse_expressions(self, env):
super(ParallelWithBlockNode, self).analyse_expressions(env)
self.analyse_sharing_attributes(env)
valid_keyword_arguments = ['num_threads']
num_threads = None
def analyse_declarations(self, env):
super(ParallelWithBlockNode, self).analyse_declarations(env)
if self.args:
error(self.pos, "cython.parallel.parallel() does not take "
"positional arguments")
def generate_execution_code(self, code):
self.declare_closure_privates(code)
......@@ -6478,11 +6522,13 @@ class ParallelWithBlockNode(ParallelStatNode):
'private(%s)' % ', '.join([e.cname for e in self.privates]))
self.privatization_insertion_point = code.insertion_point()
self.put_num_threads(code)
code.putln("")
code.putln("#endif /* _OPENMP */")
code.begin_block()
self.initialize_privates_to_nan(code)
self.body.generate_execution_code(code)
code.end_block()
......@@ -6495,11 +6541,6 @@ class ParallelRangeNode(ParallelStatNode):
target NameNode the target iteration variable
else_clause Node or None the else clause of this loop
args tuple the arguments passed to prange()
kwargs DictNode the keyword arguments passed to prange()
(replaced by its compile time value)
is_nogil bool indicates whether this is a nogil prange() node
"""
child_attrs = ['body', 'target', 'else_clause', 'args']
......@@ -6509,7 +6550,12 @@ class ParallelRangeNode(ParallelStatNode):
start = stop = step = None
is_prange = True
is_nogil = False
nogil = False
schedule = None
num_threads = None
valid_keyword_arguments = ['schedule', 'nogil', 'num_threads']
def analyse_declarations(self, env):
super(ParallelRangeNode, self).analyse_declarations(env)
......@@ -6528,14 +6574,6 @@ class ParallelRangeNode(ParallelStatNode):
else:
self.start, self.stop, self.step = self.args
if self.kwargs:
self.kwargs = self.kwargs.compile_time_value(env)
else:
self.kwargs = {}
self.is_nogil = self.kwargs.pop('nogil', False)
self.schedule = self.kwargs.pop('schedule', None)
if hasattr(self.schedule, 'decode'):
self.schedule = self.schedule.decode('ascii')
......@@ -6544,9 +6582,6 @@ class ParallelRangeNode(ParallelStatNode):
error(self.pos, "Invalid schedule argument to prange: %s" %
(self.schedule,))
for kw in self.kwargs:
error(self.pos, "Invalid keyword argument to prange: %s" % kw)
def analyse_expressions(self, env):
if self.target is None:
error(self.pos, "prange() can only be used as part of a for loop")
......@@ -6587,7 +6622,6 @@ class ParallelRangeNode(ParallelStatNode):
self.index_type = PyrexTypes.widest_numeric_type(
self.index_type, node.type)
super(ParallelRangeNode, self).analyse_expressions(env)
if self.else_clause is not None:
self.else_clause.analyse_expressions(env)
......@@ -6600,6 +6634,7 @@ class ParallelRangeNode(ParallelStatNode):
self.assignments[self.target.entry] = self.target.pos, None
self.analyse_sharing_attributes(env)
super(ParallelRangeNode, self).analyse_expressions(env)
def nogil_check(self, env):
names = 'start', 'stop', 'step', 'target'
......@@ -6734,6 +6769,8 @@ class ParallelRangeNode(ParallelStatNode):
c = self.parent.privatization_insertion_point
c.put(" private(%(nsteps)s)" % fmt_dict)
self.put_num_threads(code)
self.privatization_insertion_point = code.insertion_point()
code.putln("")
......@@ -6742,6 +6779,9 @@ class ParallelRangeNode(ParallelStatNode):
code.put("for (%(i)s = 0; %(i)s < %(nsteps)s; %(i)s++)" % fmt_dict)
code.begin_block()
code.putln("%(target)s = %(start)s + %(step)s * %(i)s;" % fmt_dict)
self.initialize_privates_to_nan(code, exclude=self.target.entry)
self.body.generate_execution_code(code)
code.end_block()
......@@ -6790,17 +6830,17 @@ if Options.gcc_branch_hints:
branch_prediction_macros = \
"""
#ifdef __GNUC__
/* Test for GCC > 2.95 */
#if __GNUC__ > 2 || (__GNUC__ == 2 && (__GNUC_MINOR__ > 95))
#define likely(x) __builtin_expect(!!(x), 1)
#define unlikely(x) __builtin_expect(!!(x), 0)
#else /* __GNUC__ > 2 ... */
#define likely(x) (x)
#define unlikely(x) (x)
#endif /* __GNUC__ > 2 ... */
/* Test for GCC > 2.95 */
#if __GNUC__ > 2 || (__GNUC__ == 2 && (__GNUC_MINOR__ > 95))
#define likely(x) __builtin_expect(!!(x), 1)
#define unlikely(x) __builtin_expect(!!(x), 0)
#else /* __GNUC__ > 2 ... */
#define likely(x) (x)
#define unlikely(x) (x)
#endif /* __GNUC__ > 2 ... */
#else /* __GNUC__ */
#define likely(x) (x)
#define unlikely(x) (x)
#define likely(x) (x)
#define unlikely(x) (x)
#endif /* __GNUC__ */
"""
else:
......@@ -7361,7 +7401,7 @@ static void __Pyx_RaiseArgtupleInvalid(
Py_ssize_t num_found)
{
Py_ssize_t num_expected;
const char *number, *more_or_less;
const char *more_or_less;
if (num_found < num_min) {
num_expected = num_min;
......@@ -7373,14 +7413,10 @@ static void __Pyx_RaiseArgtupleInvalid(
if (exact) {
more_or_less = "exactly";
}
number = (num_expected == 1) ? "" : "s";
PyErr_Format(PyExc_TypeError,
#if PY_VERSION_HEX < 0x02050000
"%s() takes %s %d positional argument%s (%d given)",
#else
"%s() takes %s %zd positional argument%s (%zd given)",
#endif
func_name, more_or_less, num_expected, number, num_found);
"%s() takes %s %"PY_FORMAT_SIZE_T"d positional argument%s (%"PY_FORMAT_SIZE_T"d given)",
func_name, more_or_less, num_expected,
(num_expected == 1) ? "" : "s", num_found);
}
""")
......@@ -7671,10 +7707,12 @@ bad:
unraisable_exception_utility_code = UtilityCode(
proto = """
static void __Pyx_WriteUnraisable(const char *name); /*proto*/
static void __Pyx_WriteUnraisable(const char *name, int clineno,
int lineno, const char *filename); /*proto*/
""",
impl = """
static void __Pyx_WriteUnraisable(const char *name) {
static void __Pyx_WriteUnraisable(const char *name, int clineno,
int lineno, const char *filename) {
PyObject *old_exc, *old_val, *old_tb;
PyObject *ctx;
__Pyx_ErrFetch(&old_exc, &old_val, &old_tb);
......@@ -7943,3 +7981,22 @@ bad:
'EMPTY_BYTES' : Naming.empty_bytes,
"MODULE": Naming.module_cname,
})
################ Utility code for cython.parallel stuff ################
invalid_values_utility_code = UtilityCode(
proto="""\
#include <string.h>
void __pyx_init_nan(void);
static float %(PYX_NAN)s;
""" % vars(Naming),
init="""
/* Initialize NaN. The sign is irrelevant, an exponent with all bits 1 and
a nonzero mantissa means NaN. If the first bit in the mantissa is 1, it is
a signalling NaN. */
memset(&%(PYX_NAN)s, 0xFF, sizeof(%(PYX_NAN)s));
""" % vars(Naming))
......@@ -2129,14 +2129,31 @@ class OptimizeBuiltinCalls(Visitor.EnvTransform):
return test_node
def _handle_simple_function_ord(self, node, pos_args):
"""Unpack ord(Py_UNICODE).
"""Unpack ord(Py_UNICODE) and ord('X').
"""
if len(pos_args) != 1:
return node
arg = pos_args[0]
if isinstance(arg, ExprNodes.CoerceToPyTypeNode):
if arg.arg.type.is_unicode_char:
return arg.arg.coerce_to(node.type, self.current_env())
return ExprNodes.TypecastNode(
arg.pos, operand=arg.arg, type=PyrexTypes.c_int_type
).coerce_to(node.type, self.current_env())
elif isinstance(arg, ExprNodes.UnicodeNode):
if len(arg.value) == 1:
return ExprNodes.IntNode(
ord(arg.value), type=PyrexTypes.c_int_type,
value=str(ord(arg.value)),
constant_result=ord(arg.value)
).coerce_to(node.type, self.current_env())
elif isinstance(arg, ExprNodes.StringNode):
if arg.unicode_value and len(arg.unicode_value) == 1 \
and ord(arg.unicode_value) <= 255: # Py2/3 portability
return ExprNodes.IntNode(
ord(arg.unicode_value), type=PyrexTypes.c_int_type,
value=str(ord(arg.unicode_value)),
constant_result=ord(arg.unicode_value)
).coerce_to(node.type, self.current_env())
return node
### special methods
......@@ -2396,9 +2413,9 @@ class OptimizeBuiltinCalls(Visitor.EnvTransform):
node, "PyUnicode_Split", self.PyUnicode_Split_func_type,
'split', is_unbound_method, args)
PyUnicode_Tailmatch_func_type = PyrexTypes.CFuncType(
PyString_Tailmatch_func_type = PyrexTypes.CFuncType(
PyrexTypes.c_bint_type, [
PyrexTypes.CFuncTypeArg("str", Builtin.unicode_type, None),
PyrexTypes.CFuncTypeArg("str", PyrexTypes.py_object_type, None), # bytes/str/unicode
PyrexTypes.CFuncTypeArg("substring", PyrexTypes.py_object_type, None),
PyrexTypes.CFuncTypeArg("start", PyrexTypes.c_py_ssize_t_type, None),
PyrexTypes.CFuncTypeArg("end", PyrexTypes.c_py_ssize_t_type, None),
......@@ -2407,20 +2424,22 @@ class OptimizeBuiltinCalls(Visitor.EnvTransform):
exception_value = '-1')
def _handle_simple_method_unicode_endswith(self, node, args, is_unbound_method):
return self._inject_unicode_tailmatch(
node, args, is_unbound_method, 'endswith', +1)
return self._inject_tailmatch(
node, args, is_unbound_method, 'unicode', 'endswith',
unicode_tailmatch_utility_code, +1)
def _handle_simple_method_unicode_startswith(self, node, args, is_unbound_method):
return self._inject_unicode_tailmatch(
node, args, is_unbound_method, 'startswith', -1)
return self._inject_tailmatch(
node, args, is_unbound_method, 'unicode', 'startswith',
unicode_tailmatch_utility_code, -1)
def _inject_unicode_tailmatch(self, node, args, is_unbound_method,
method_name, direction):
def _inject_tailmatch(self, node, args, is_unbound_method, type_name,
method_name, utility_code, direction):
"""Replace unicode.startswith(...) and unicode.endswith(...)
by a direct call to the corresponding C-API function.
"""
if len(args) not in (2,3,4):
self._error_wrong_arg_count('unicode.%s' % method_name, node, args, "2-4")
self._error_wrong_arg_count('%s.%s' % (type_name, method_name), node, args, "2-4")
return node
self._inject_int_default_argument(
node, args, 2, PyrexTypes.c_py_ssize_t_type, "0")
......@@ -2430,9 +2449,10 @@ class OptimizeBuiltinCalls(Visitor.EnvTransform):
node.pos, value=str(direction), type=PyrexTypes.c_int_type))
method_call = self._substitute_method_call(
node, "__Pyx_PyUnicode_Tailmatch", self.PyUnicode_Tailmatch_func_type,
node, "__Pyx_Py%s_Tailmatch" % type_name.capitalize(),
self.PyString_Tailmatch_func_type,
method_name, is_unbound_method, args,
utility_code = unicode_tailmatch_utility_code)
utility_code = utility_code)
return method_call.coerce_to(Builtin.bool_type, self.current_env())
PyUnicode_Find_func_type = PyrexTypes.CFuncType(
......@@ -2760,6 +2780,25 @@ class OptimizeBuiltinCalls(Visitor.EnvTransform):
return (encoding, encoding_node, error_handling, error_handling_node)
def _handle_simple_method_str_endswith(self, node, args, is_unbound_method):
return self._inject_tailmatch(
node, args, is_unbound_method, 'str', 'endswith',
str_tailmatch_utility_code, +1)
def _handle_simple_method_str_startswith(self, node, args, is_unbound_method):
return self._inject_tailmatch(
node, args, is_unbound_method, 'str', 'startswith',
str_tailmatch_utility_code, -1)
def _handle_simple_method_bytes_endswith(self, node, args, is_unbound_method):
return self._inject_tailmatch(
node, args, is_unbound_method, 'bytes', 'endswith',
bytes_tailmatch_utility_code, +1)
def _handle_simple_method_bytes_startswith(self, node, args, is_unbound_method):
return self._inject_tailmatch(
node, args, is_unbound_method, 'bytes', 'startswith',
bytes_tailmatch_utility_code, -1)
### helpers
......@@ -2772,12 +2811,13 @@ class OptimizeBuiltinCalls(Visitor.EnvTransform):
self_arg = args[0]
if is_unbound_method:
self_arg = self_arg.as_none_safe_node(
"descriptor '%s' requires a '%s' object but received a 'NoneType'" % (
attr_name, node.function.obj.name))
"descriptor '%s' requires a '%s' object but received a 'NoneType'",
format_args = [attr_name, node.function.obj.name])
else:
self_arg = self_arg.as_none_safe_node(
"'NoneType' object has no attribute '%s'" % attr_name,
error = "PyExc_AttributeError")
"'NoneType' object has no attribute '%s'",
error = "PyExc_AttributeError",
format_args = [attr_name])
args[0] = self_arg
return ExprNodes.PythonCapiCallNode(
node.pos, name, func_type,
......@@ -2853,6 +2893,121 @@ static int __Pyx_PyUnicode_Tailmatch(PyObject* s, PyObject* substr,
''',
)
bytes_tailmatch_utility_code = UtilityCode(
proto="""
static int __Pyx_PyBytes_Tailmatch(PyObject* self, PyObject* arg, Py_ssize_t start,
Py_ssize_t end, int direction);
""",
impl = """
static int __Pyx_PyBytes_SingleTailmatch(PyObject* self, PyObject* arg, Py_ssize_t start,
Py_ssize_t end, int direction)
{
const char* self_ptr = PyBytes_AS_STRING(self);
Py_ssize_t self_len = PyBytes_GET_SIZE(self);
const char* sub_ptr;
Py_ssize_t sub_len;
int retval;
#if PY_VERSION_HEX >= 0x02060000
Py_buffer view;
view.obj = NULL;
#endif
if ( PyBytes_Check(arg) ) {
sub_ptr = PyBytes_AS_STRING(arg);
sub_len = PyBytes_GET_SIZE(arg);
}
#if PY_MAJOR_VERSION < 3
// Python 2.x allows mixing unicode and str
else if ( PyUnicode_Check(arg) ) {
return PyUnicode_Tailmatch(self, arg, start, end, direction);
}
#endif
else {
#if PY_VERSION_HEX < 0x02060000
if (unlikely(PyObject_AsCharBuffer(arg, &sub_ptr, &sub_len)))
return -1;
#else
if (unlikely(PyObject_GetBuffer(self, &view, PyBUF_SIMPLE) == -1))
return -1;
sub_ptr = (const char*) view.buf;
sub_len = view.len;
#endif
}
if (end > self_len)
end = self_len;
else if (end < 0)
end += self_len;
if (end < 0)
end = 0;
if (start < 0)
start += self_len;
if (start < 0)
start = 0;
if (direction > 0) {
/* endswith */
if (end-sub_len > start)
start = end - sub_len;
}
if (start + sub_len <= end)
retval = !memcmp(self_ptr+start, sub_ptr, sub_len);
else
retval = 0;
#if PY_VERSION_HEX >= 0x02060000
if (view.obj)
PyBuffer_Release(&view);
#endif
return retval;
}
static int __Pyx_PyBytes_Tailmatch(PyObject* self, PyObject* substr, Py_ssize_t start,
Py_ssize_t end, int direction)
{
if (unlikely(PyTuple_Check(substr))) {
int result;
Py_ssize_t i;
for (i = 0; i < PyTuple_GET_SIZE(substr); i++) {
result = __Pyx_PyBytes_SingleTailmatch(self, PyTuple_GET_ITEM(substr, i),
start, end, direction);
if (result) {
return result;
}
}
return 0;
}
return __Pyx_PyBytes_SingleTailmatch(self, substr, start, end, direction);
}
""")
str_tailmatch_utility_code = UtilityCode(
proto = '''
static CYTHON_INLINE int __Pyx_PyStr_Tailmatch(PyObject* self, PyObject* arg, Py_ssize_t start,
Py_ssize_t end, int direction);
''',
# We do not use a C compiler macro here to avoid "unused function"
# warnings for the *_Tailmatch() function that is not being used in
# the specific CPython version. The C compiler will generate the same
# code anyway, and will usually just remove the unused function.
impl = '''
static CYTHON_INLINE int __Pyx_PyStr_Tailmatch(PyObject* self, PyObject* arg, Py_ssize_t start,
Py_ssize_t end, int direction)
{
if (PY_MAJOR_VERSION < 3)
return __Pyx_PyBytes_Tailmatch(self, arg, start, end, direction);
else
return __Pyx_PyUnicode_Tailmatch(self, arg, start, end, direction);
}
''',
requires=[unicode_tailmatch_utility_code, bytes_tailmatch_utility_code]
)
dict_getitem_default_utility_code = UtilityCode(
proto = '''
static PyObject* __Pyx_PyDict_GetItemDefault(PyObject* d, PyObject* key, PyObject* default_value) {
......@@ -3287,10 +3442,6 @@ class FinalOptimizePhase(Visitor.CythonTransform):
if node.first:
lhs = node.lhs
lhs.lhs_of_first_assignment = True
if isinstance(lhs, ExprNodes.NameNode) and lhs.entry.type.is_pyobject:
# Have variable initialized to 0 rather than None
lhs.entry.init_to_none = False
lhs.entry.init = 0
return node
def visit_SimpleCallNode(self, node):
......
......@@ -49,12 +49,6 @@ convert_range = True
# If this is 0 it simply creates a wrapper.
lookup_module_cpdef = False
# This will set local variables to None rather than NULL which may cause
# surpress what would be an UnboundLocalError in pure Python but eliminates
# checking for NULL on every use, and can decref rather than xdecref at the end.
# WARNING: This is a work in progress, may currently segfault.
init_local_none = True
# Whether or not to embed the Python interpreter, for use in making a
# standalone executable or calling from external libraries.
# This will provide a method which initalizes the interpreter and
......@@ -65,6 +59,14 @@ embed = None
# module creation time. For legacy code only, needed for some circular imports.
disable_function_redefinition = False
# In previous iterations of Cython, globals() gave the first non-Cython module
# globals in the call stack. Sage relies on this behavior for variable injection.
old_style_globals = False
# Allows cimporting from a pyx file without a pxd file.
cimport_from_pyx = False
# Declare compiler directives
directive_defaults = {
......@@ -95,10 +97,19 @@ directive_defaults = {
'warn': None,
'warn.undeclared': False,
'warn.unreachable': True,
'warn.maybe_uninitialized': False,
'warn.unreachable': True,
'warn.unused': False,
'warn.unused_arg': False,
'warn.unused_result': False,
# remove unreachable code
'remove_unreachable': True,
# control flow debug directives
'control_flow.dot_output': "", # Graphviz output filename
'control_flow.dot_annotate_defs': False, # Annotate definitions
# test support
'test_assert_path_exists' : [],
'test_fail_if_path_exists' : [],
......@@ -107,6 +118,13 @@ directive_defaults = {
'binding': False,
}
# Extra warning directives
extra_warnings = {
'warn.maybe_uninitialized': True,
'warn.unreachable': True,
'warn.unused': True,
}
# Override types possibilities above, if needed
directive_types = {
'final' : bool, # final cdef classes and methods
......
......@@ -639,7 +639,8 @@ class InterpretCompilerDirectives(CythonTransform, SkipDeclarations):
'is not allowed in %s scope' % (directive, scope)))
return False
else:
if directive not in Options.directive_defaults:
if (directive not in Options.directive_defaults
and directive not in Options.directive_types):
error(pos, "Invalid directive: '%s'." % (directive,))
return True
......@@ -1264,6 +1265,44 @@ class DecoratorTransform(CythonTransform, SkipDeclarations):
return [node, reassignment]
class ForwardDeclareTypes(CythonTransform):
def visit_CompilerDirectivesNode(self, node):
env = self.module_scope
old = env.directives
env.directives = node.directives
self.visitchildren(node)
env.directives = old
return node
def visit_ModuleNode(self, node):
self.module_scope = node.scope
self.module_scope.directives = node.directives
self.visitchildren(node)
return node
def visit_CDefExternNode(self, node):
old_cinclude_flag = self.module_scope.in_cinclude
self.module_scope.in_cinclude = 1
self.visitchildren(node)
self.module_scope.in_cinclude = old_cinclude_flag
return node
def visit_CEnumDefNode(self, node):
node.declare(self.module_scope)
return node
def visit_CStructOrUnionDefNode(self, node):
if node.name not in self.module_scope.entries:
node.declare(self.module_scope)
return node
def visit_CClassDefNode(self, node):
if node.class_name not in self.module_scope.entries:
node.declare(self.module_scope)
return node
class AnalyseDeclarationsTransform(CythonTransform):
basic_property = TreeFragment(u"""
......@@ -1362,7 +1401,6 @@ if VALUE is not None:
"""
self.seen_vars_stack.append(cython.set())
lenv = node.local_scope
node.body.analyse_control_flow(lenv) # this will be totally refactored
node.declare_arguments(lenv)
for var, type_node in node.directive_locals.items():
if not lenv.lookup_here(var): # don't redeclare args
......@@ -1556,8 +1594,6 @@ if VALUE is not None:
type_name = entry.type.module_name + '.' + type_name
if entry.init is not None:
default_value = ' = ' + entry.init
elif entry.init_to_none:
default_value = ' = ' + repr(None)
docstring = attr_name + ': ' + type_name + default_value
property.doc = EncodedString(docstring)
# ---------------------------------------
......@@ -2132,8 +2168,8 @@ class GilCheck(VisitorTransform):
return node
def visit_ParallelRangeNode(self, node):
if node.is_nogil:
node.is_nogil = False
if node.nogil:
node.nogil = False
node = Nodes.GILStatNode(node.pos, state='nogil', body=node)
return self.visit_GILStatNode(node)
......@@ -2240,9 +2276,9 @@ class TransformBuiltinMethods(EnvTransform):
return node # nothing to do
items = [ ExprNodes.DictItemNode(pos,
key=ExprNodes.StringNode(pos, value=var),
value=ExprNodes.NameNode(pos, name=var))
value=ExprNodes.NameNode(pos, name=var, allow_null=True))
for var in lenv.entries ]
return ExprNodes.DictNode(pos, key_value_pairs=items)
return ExprNodes.DictNode(pos, key_value_pairs=items, exclude_null_values=True)
else: # dir()
if len(node.args) > 1:
error(self.pos, "Builtin 'dir()' called with wrong number of args, expected 0-1, got %d"
......@@ -2400,6 +2436,24 @@ class ReplaceFusedTypeChecks(VisitorTransform):
return node
class FindUninitializedParallelVars(CythonTransform, SkipDeclarations):
"""
This transform isn't part of the pipeline, it simply finds all references
to variables in parallel blocks.
"""
def __init__(self):
CythonTransform.__init__(self, None)
self.used_vars = []
def visit_ParallelStatNode(self, node):
return node
def visit_NameNode(self, node):
self.used_vars.append((node.entry, node.pos))
return node
class DebugTransform(CythonTransform):
"""
Write debug information for this Cython module.
......
......@@ -85,6 +85,13 @@ class BaseType(object):
return index_name
def invalid_value(self):
"""
Returns the most invalid value an object of this type can assume as a
C expression string. Returns None if no such value exists.
"""
class PyrexType(BaseType):
#
# Base class for all Pyrex types.
......@@ -264,6 +271,9 @@ class CTypedefType(BaseType):
self.typedef_base_type = base_type
self.typedef_is_external = is_external
def invalid_value(self):
return self.typedef_base_type.invalid_value()
def resolve(self):
return self.typedef_base_type.resolve()
......@@ -453,6 +463,9 @@ class PyObjectType(PyrexType):
"""
return False
def invalid_value(self):
return "1"
class BuiltinObjectType(PyObjectType):
# objstruct_cname string Name of PyObject struct
......@@ -885,9 +898,9 @@ static CYTHON_INLINE %(type)s __Pyx_PyInt_As%(SignWord)s%(TypeName)s(PyObject* x
"can't convert negative value to %(type)s");
return (%(type)s)-1;
}
return PyLong_AsUnsigned%(TypeName)s(x);
return (%(type)s)PyLong_AsUnsigned%(TypeName)s(x);
} else {
return PyLong_As%(TypeName)s(x);
return (%(type)s)PyLong_As%(TypeName)s(x);
}
} else {
%(type)s val;
......@@ -1039,6 +1052,14 @@ class CIntType(CNumericType):
def assignable_from_resolved_type(self, src_type):
return src_type.is_int or src_type.is_enum or src_type is error_type
def invalid_value(self):
if rank_to_type_name[self.rank] == 'char':
return "'?'"
else:
# We do not really know the size of the type, so return
# a 32-bit literal and rely on casting to final type. It will
# be negative for signed ints, which is good.
return "0xbad0bad0";
class CAnonEnumType(CIntType):
......@@ -1116,13 +1137,8 @@ static CYTHON_INLINE Py_UCS4 __Pyx_PyObject_AsPy_UCS4(PyObject* x) {
}
#endif
PyErr_Format(PyExc_ValueError,
"only single character unicode strings can be converted to Py_UCS4, got length "
#if PY_VERSION_HEX < 0x02050000
"%d",
#else
"%zd",
#endif
PyUnicode_GET_SIZE(x));
"only single character unicode strings can be converted to Py_UCS4, "
"got length %"PY_FORMAT_SIZE_T"d", PyUnicode_GET_SIZE(x));
return (Py_UCS4)-1;
}
ival = __Pyx_PyInt_AsLong(x);
......@@ -1172,13 +1188,8 @@ static CYTHON_INLINE Py_UNICODE __Pyx_PyObject_AsPy_UNICODE(PyObject* x) {
if (PyUnicode_Check(x)) {
if (unlikely(PyUnicode_GET_SIZE(x) != 1)) {
PyErr_Format(PyExc_ValueError,
"only single character unicode strings can be converted to Py_UNICODE, got length "
#if PY_VERSION_HEX < 0x02050000
"%d",
#else
"%zd",
#endif
PyUnicode_GET_SIZE(x));
"only single character unicode strings can be converted to Py_UNICODE, "
"got length %"PY_FORMAT_SIZE_T"d", PyUnicode_GET_SIZE(x));
return (Py_UNICODE)-1;
}
return PyUnicode_AS_UNICODE(x)[0];
......@@ -1249,6 +1260,8 @@ class CFloatType(CNumericType):
def assignable_from_resolved_type(self, src_type):
return (src_type.is_numeric and not src_type.is_complex) or src_type is error_type
def invalid_value(self):
return Naming.PYX_NAN
class CComplexType(CNumericType):
......@@ -1769,6 +1782,8 @@ class CPtrType(CType):
else:
return CPtrType(base_type)
def invalid_value(self):
return "1"
class CNullPtrType(CPtrType):
......@@ -2496,6 +2511,12 @@ class TemplatePlaceholderType(CType):
else:
return cmp(type(self), type(other))
def __eq__(self, other):
if isinstance(other, TemplatePlaceholderType):
return self.name == other.name
else:
return False
class CEnumType(CType):
# name string
# cname string or None
......
......@@ -172,13 +172,34 @@ class FileSourceDescriptor(SourceDescriptor):
self.filename = filename
self.set_file_type_from_name(filename)
self._cmp_name = filename
self._lines = {}
def get_lines(self, encoding=None, error_handling=None):
return Utils.open_source_file(
# we cache the lines only the second time this is called, in
# order to save memory when they are only used once
key = (encoding, error_handling)
try:
lines = self._lines[key]
if lines is not None:
return lines
except KeyError:
pass
f = Utils.open_source_file(
self.filename, encoding=encoding,
error_handling=error_handling,
# newline normalisation is costly before Py2.6
require_normalised_newlines=False)
try:
lines = list(f)
finally:
f.close()
if key in self._lines:
self._lines[key] = lines
else:
# do not cache the first access, but remember that we
# already read it once
self._lines[key] = None
return lines
def get_description(self):
return self.path_description
......
......@@ -13,7 +13,6 @@ import TypeSlots
from TypeSlots import \
pyfunction_signature, pymethod_signature, \
get_special_method_signature, get_property_accessor_signature
import ControlFlow
import Code
import __builtin__ as builtins
try:
......@@ -96,7 +95,6 @@ class Entry(object):
# holding its home namespace
# pymethdef_cname string PyMethodDef structure
# signature Signature Arg & return types for Python func
# init_to_none boolean True if initial value should be None
# as_variable Entry Alternative interpretation of extension
# type name or builtin C function as a variable
# xdecref_cleanup boolean Use Py_XDECREF for error cleanup
......@@ -157,7 +155,6 @@ class Entry(object):
func_cname = None
func_modifiers = []
doc = None
init_to_none = 0
as_variable = None
xdecref_cleanup = 0
in_cinclude = 0
......@@ -187,6 +184,8 @@ class Entry(object):
self.init = init
self.overloaded_alternatives = []
self.assignments = []
self.cf_assignments = []
self.cf_references = []
def __repr__(self):
return "Entry(name=%s, type=%s)" % (self.name, self.type)
......@@ -225,7 +224,6 @@ class Scope(object):
# qualified_name string "modname" or "modname.classname"
# pystring_entries [Entry] String const entries newly used as
# Python strings in this scope
# control_flow ControlFlow Used for keeping track of environment state
# nogil boolean In a nogil section
# directives dict Helper variable for the recursive
# analysis, contains directive values.
......@@ -278,22 +276,12 @@ class Scope(object):
self.pystring_entries = []
self.buffer_entries = []
self.lambda_defs = []
self.control_flow = ControlFlow.LinearControlFlow()
self.return_type = None
self.id_counters = {}
def __deepcopy__(self, memo):
return self
def start_branching(self, pos):
self.control_flow = self.control_flow.start_branch(pos)
def next_branch(self, pos):
self.control_flow = self.control_flow.next_branch(pos)
def finish_branching(self, pos):
self.control_flow = self.control_flow.finish_branch(pos)
def __str__(self):
return "<%s %s>" % (self.__class__.__name__, self.qualified_name)
......@@ -446,8 +434,6 @@ class Scope(object):
if scope:
entry.type.scope = scope
self.type_entries.append(entry)
if not scope and not entry.type.scope:
self.check_for_illegal_incomplete_ctypedef(typedef_flag, pos)
return entry
def declare_cpp_class(self, name, scope,
......@@ -473,13 +459,24 @@ class Scope(object):
if scope:
entry.type.scope = scope
self.type_entries.append(entry)
if templates is not None:
if base_classes:
if entry.type.base_classes and not entry.type.base_classes == base_classes:
error(pos, "Base type does not match previous declaration")
else:
entry.type.base_classes = base_classes
if templates or entry.type.templates:
if templates != entry.type.templates:
error(pos, "Template parameters do not match previous declaration")
if templates is not None and entry.type.scope is not None:
for T in templates:
template_entry = entry.type.scope.declare(T.name, T.name, T, None, 'extern')
template_entry.is_type = 1
def declare_inherited_attributes(entry, base_classes):
for base_class in base_classes:
if base_class.scope is None:
error(pos, "Cannot inherit from incomplete type")
else:
declare_inherited_attributes(entry, base_class.base_classes)
entry.type.scope.declare_inherited_cpp_attributes(base_class.scope)
if entry.type.scope:
......@@ -536,7 +533,6 @@ class Scope(object):
if api:
entry.api = 1
entry.used = 1
self.control_flow.set_state((), (name, 'initialized'), False)
return entry
def declare_builtin(self, name, pos):
......@@ -1094,6 +1090,8 @@ class ModuleScope(Scope):
self.var_entries.append(entry)
else:
entry.is_pyglobal = 1
if Options.cimport_from_pyx:
entry.used = 1
return entry
def declare_cfunction(self, name, type, pos,
......@@ -1196,8 +1194,6 @@ class ModuleScope(Scope):
scope.declare_inherited_c_attributes(base_type.scope)
type.set_scope(scope)
self.type_entries.append(entry)
else:
self.check_for_illegal_incomplete_ctypedef(typedef_flag, pos)
else:
if defining and type.scope.defined:
error(pos, "C class '%s' already defined" % name)
......@@ -1228,10 +1224,6 @@ class ModuleScope(Scope):
#
return entry
def check_for_illegal_incomplete_ctypedef(self, typedef_flag, pos):
if typedef_flag and not self.in_cinclude:
error(pos, "Forward-referenced type must use 'cdef', not 'ctypedef'")
def allocate_vtable_names(self, entry):
# If extension type has a vtable, allocate vtable struct and
# slot names for it.
......@@ -1386,7 +1378,6 @@ class LocalScope(Scope):
entry.is_arg = 1
#entry.borrowed = 1 # Not using borrowed arg refs for now
self.arg_entries.append(entry)
self.control_flow.set_state((), (name, 'source'), 'arg')
return entry
def declare_var(self, name, type, pos,
......@@ -1398,9 +1389,8 @@ class LocalScope(Scope):
entry = Scope.declare_var(self, name, type, pos,
cname=cname, visibility=visibility,
api=api, in_pxd=in_pxd, is_cdef=is_cdef)
if type.is_pyobject and not Options.init_local_none:
if type.is_pyobject:
entry.init = "0"
entry.init_to_none = (type.is_pyobject or type.is_unspecified) and Options.init_local_none
entry.is_local = 1
entry.in_with_gil_block = self._in_with_gil_block
......
......@@ -39,7 +39,24 @@ class MarkAssignments(CythonTransform):
if self.parallel_block_stack:
parallel_node = self.parallel_block_stack[-1]
parallel_node.assignments[lhs.entry] = (lhs.pos, inplace_op)
previous_assignment = parallel_node.assignments.get(lhs.entry)
# If there was a previous assignment to the variable, keep the
# previous assignment position
if previous_assignment:
pos, previous_inplace_op = previous_assignment
if (inplace_op and previous_inplace_op and
inplace_op != previous_inplace_op):
# x += y; x *= y
t = (inplace_op, previous_inplace_op)
error(lhs.pos,
"Reduction operator '%s' is inconsistent "
"with previous reduction operator '%s'" % t)
else:
pos = lhs.pos
parallel_node.assignments[lhs.entry] = (pos, inplace_op)
elif isinstance(lhs, ExprNodes.SequenceNode):
for arg in lhs.args:
......
......@@ -100,9 +100,6 @@ class TempsBlockNode(Node):
code.put_decref_clear(handle.temp, handle.type)
code.funcstate.release_temp(handle.temp)
def analyse_control_flow(self, env):
self.body.analyse_control_flow(env)
def analyse_declarations(self, env):
self.body.analyse_declarations(env)
......@@ -290,9 +287,6 @@ class LetNode(Nodes.StatNode, LetNodeMixin):
self.pos = body.pos
self.body = body
def analyse_control_flow(self, env):
self.body.analyse_control_flow(env)
def analyse_declarations(self, env):
self.temp_expression.analyse_declarations(env)
self.body.analyse_declarations(env)
......
......@@ -31,6 +31,7 @@ cdef extern from "string.h" nogil:
int strcoll (const_char *S1, const_char *S2)
size_t strxfrm (char *TO, const_char *FROM, size_t SIZE)
char *strerror (int ERRNUM)
char *strchr (const_char *STRING, int C)
char *strrchr (const_char *STRING, int C)
......
......@@ -81,6 +81,8 @@ cdef extern from "numpy/arrayobject.h":
NPY_COMPLEX256
NPY_COMPLEX512
NPY_INTP
ctypedef enum NPY_ORDER:
NPY_ANYORDER
NPY_CORDER
......@@ -215,9 +217,9 @@ cdef extern from "numpy/arrayobject.h":
info.buf = PyArray_DATA(self)
info.ndim = ndim
if copy_shape:
# Allocate new buffer for strides and shape info. This is allocated
# as one block, strides first.
info.strides = <Py_ssize_t*>stdlib.malloc(sizeof(Py_ssize_t) * ndim * 2)
# Allocate new buffer for strides and shape info.
# This is allocated as one block, strides first.
info.strides = <Py_ssize_t*>stdlib.malloc(sizeof(Py_ssize_t) * <size_t>ndim * 2)
info.shape = info.strides + ndim
for i in range(ndim):
info.strides[i] = PyArray_STRIDES(self)[i]
......
......@@ -8,42 +8,42 @@ cdef extern from "omp.h":
omp_sched_guided = 3,
omp_sched_auto = 4
extern void omp_set_num_threads(int)
extern int omp_get_num_threads()
extern int omp_get_max_threads()
extern int omp_get_thread_num()
extern int omp_get_num_procs()
extern int omp_in_parallel()
extern void omp_set_dynamic(int)
extern int omp_get_dynamic()
extern void omp_set_nested(int)
extern int omp_get_nested()
extern void omp_init_lock(omp_lock_t *)
extern void omp_destroy_lock(omp_lock_t *)
extern void omp_set_lock(omp_lock_t *)
extern void omp_unset_lock(omp_lock_t *)
extern int omp_test_lock(omp_lock_t *)
extern void omp_init_nest_lock(omp_nest_lock_t *)
extern void omp_destroy_nest_lock(omp_nest_lock_t *)
extern void omp_set_nest_lock(omp_nest_lock_t *)
extern void omp_unset_nest_lock(omp_nest_lock_t *)
extern int omp_test_nest_lock(omp_nest_lock_t *)
extern double omp_get_wtime()
extern double omp_get_wtick()
void omp_set_schedule(omp_sched_t, int)
void omp_get_schedule(omp_sched_t *, int *)
int omp_get_thread_limit()
void omp_set_max_active_levels(int)
int omp_get_max_active_levels()
int omp_get_level()
int omp_get_ancestor_thread_num(int)
int omp_get_team_size(int)
int omp_get_active_level()
extern void omp_set_num_threads(int) nogil
extern int omp_get_num_threads() nogil
extern int omp_get_max_threads() nogil
extern int omp_get_thread_num() nogil
extern int omp_get_num_procs() nogil
extern int omp_in_parallel() nogil
extern void omp_set_dynamic(int) nogil
extern int omp_get_dynamic() nogil
extern void omp_set_nested(int) nogil
extern int omp_get_nested() nogil
extern void omp_init_lock(omp_lock_t *) nogil
extern void omp_destroy_lock(omp_lock_t *) nogil
extern void omp_set_lock(omp_lock_t *) nogil
extern void omp_unset_lock(omp_lock_t *) nogil
extern int omp_test_lock(omp_lock_t *) nogil
extern void omp_init_nest_lock(omp_nest_lock_t *) nogil
extern void omp_destroy_nest_lock(omp_nest_lock_t *) nogil
extern void omp_set_nest_lock(omp_nest_lock_t *) nogil
extern void omp_unset_nest_lock(omp_nest_lock_t *) nogil
extern int omp_test_nest_lock(omp_nest_lock_t *) nogil
extern double omp_get_wtime() nogil
extern double omp_get_wtick() nogil
void omp_set_schedule(omp_sched_t, int) nogil
void omp_get_schedule(omp_sched_t *, int *) nogil
int omp_get_thread_limit() nogil
void omp_set_max_active_levels(int) nogil
int omp_get_max_active_levels() nogil
int omp_get_level() nogil
int omp_get_ancestor_thread_num(int) nogil
int omp_get_team_size(int) nogil
int omp_get_active_level() nogil
......@@ -337,7 +337,8 @@ class CythonDotParallel(object):
__all__ = ['parallel', 'prange', 'threadid']
parallel = nogil
def parallel(self, num_threads=None):
return nogil
def prange(self, start=0, stop=None, step=1, schedule=None, nogil=False):
if stop is None:
......
def f(x):
return x**2-x
def integrate_f(a, b, N):
s = 0.0
dx = (b-a)/N
for i in range(N):
s += f(a+i*dx)
return s * dx
def f(x):
return x**2-x
def integrate_f(a, b, N):
s = 0.0
dx = (b-a)/N
for i in range(N):
s += f(a+i*dx)
return s * dx
cdef double f(double x) except? -2:
return x**2-x
def integrate_f(double a, double b, int N):
cdef int i
s = 0.0
dx = (b-a)/N
for i in range(N):
s += f(a+i*dx)
return s * dx
import timeit
import integrate0, integrate1, integrate2
number = 10
py_time = None
for m in ('integrate0', 'integrate1', 'integrate2'):
print m
t = min(timeit.repeat("integrate_f(0.0, 10.0, 10000000)", "from %s import integrate_f" % m, number=number))
if py_time is None:
py_time = t
print " ", t / number, "s"
print " ", py_time / t
# Run as:
# python setup.py build_ext --inplace
import sys
sys.path.insert(0, "..")
from distutils.core import setup
from distutils.extension import Extension
from Cython.Build import cythonize
......
......@@ -80,7 +80,7 @@ argument in order to pass it.
Therefore Cython provides a syntax for declaring a C-style function,
the cdef keyword::
cdef double f(double) except? -2:
cdef double f(double x) except? -2:
return x**2-x
Some form of except-modifier should usually be added, otherwise Cython
......
......@@ -359,9 +359,37 @@ exception and converting it into a Python exception. For example, ::
cdef extern from "some_file.h":
cdef int foo() except +
This will translate try and the C++ error into an appropriate Python exception
(currently an IndexError on std::out_of_range and a RuntimeError otherwise
(preserving the what() message). ::
This will translate try and the C++ error into an appropriate Python exception.
The translation is performed according to the following table
(the ``std::`` prefix is omitted from the C++ identifiers):
+-----------------------+---------------------+
| C++ | Python |
+=======================+=====================+
| ``bad_alloc`` | ``MemoryError`` |
+-----------------------+---------------------+
| ``bad_cast`` | ``TypeError`` |
+-----------------------+---------------------+
| ``domain_error`` | ``ValueError`` |
+-----------------------+---------------------+
| ``invalid_argument`` | ``ValueError`` |
+-----------------------+---------------------+
| ``ios_base::failure`` | ``IOError`` |
+-----------------------+---------------------+
| ``out_of_range`` | ``IndexError`` |
+-----------------------+---------------------+
| ``overflow_error`` | ``OverflowError`` |
+-----------------------+---------------------+
| ``range_error`` | ``ArithmeticError`` |
+-----------------------+---------------------+
| ``underflow_error`` | ``ArithmeticError`` |
+-----------------------+---------------------+
| (all others) | ``RuntimeError`` |
+-----------------------+---------------------+
The ``what()`` message, if any, is preserved. Note that a C++
``ios_base_failure`` can denote EOF, but does not carry enough information
for Cython to discern that, so watch out with exception masks on IO streams. ::
cdef int bar() except +MemoryError
......
......@@ -109,6 +109,7 @@ def get_openmp_compiler_flags(language):
cc = sysconfig.get_config_var('CXX')
else:
cc = sysconfig.get_config_var('CC')
if not cc: return None # Windows?
# For some reason, cc can be e.g. 'gcc -pthread'
cc = cc.split()[0]
......@@ -140,8 +141,10 @@ def get_openmp_compiler_flags(language):
if compiler_version and compiler_version.split('.') >= ['4', '2']:
return '-fopenmp', '-fopenmp'
locale.setlocale(locale.LC_ALL, '')
try:
locale.setlocale(locale.LC_ALL, '')
except locale.Error:
pass
OPENMP_C_COMPILER_FLAGS = get_openmp_compiler_flags('c')
OPENMP_CPP_COMPILER_FLAGS = get_openmp_compiler_flags('cpp')
......@@ -174,6 +177,7 @@ VER_DEP_MODULES = {
'run.purecdef',
]),
(2,7) : (operator.lt, lambda x: x in ['run.withstat_py', # multi context with statement
'run.yield_inside_lambda',
]),
# The next line should start (3,); but this is a dictionary, so
# we can only have one (3,) key. Since 2.7 is supposed to be the
......@@ -199,6 +203,9 @@ KEEP_2X_FILES = [
COMPILER = None
INCLUDE_DIRS = [ d for d in os.getenv('INCLUDE', '').split(os.pathsep) if d ]
CFLAGS = os.getenv('CFLAGS', '').split()
CCACHE = os.getenv('CYTHON_RUNTESTS_CCACHE', '').split()
BACKENDS = ['c', 'cpp']
def memoize(f):
uncomputed = object()
......@@ -236,13 +243,15 @@ list_unchanging_dir = memoize(lambda x: os.listdir(x))
class build_ext(_build_ext):
def build_extension(self, ext):
if ext.language == 'c++':
try:
try: # Py2.7+ & Py3.2+
compiler_obj = self.compiler_obj
except AttributeError:
compiler_obj = self.compiler
if ext.language == 'c++':
compiler_obj.compiler_so.remove('-Wstrict-prototypes')
if CCACHE:
compiler_obj.compiler_so = CCACHE + compiler_obj.compiler_so
except Exception:
pass
_build_ext.build_extension(self, ext)
......@@ -1245,12 +1254,15 @@ def main():
help="do not run the Cython compiler, only the C compiler")
parser.add_option("--compiler", dest="compiler", default=None,
help="C compiler type")
backend_list = ','.join(BACKENDS)
parser.add_option("--backends", dest="backends", default=backend_list,
help="select backends to test (default: %s)" % backend_list)
parser.add_option("--no-c", dest="use_c",
action="store_false", default=True,
help="do not test C compilation")
help="do not test C compilation backend")
parser.add_option("--no-cpp", dest="use_cpp",
action="store_false", default=True,
help="do not test C++ compilation")
help="do not test C++ compilation backend")
parser.add_option("--no-unit", dest="unittests",
action="store_false", default=True,
help="do not run the unit tests")
......@@ -1399,8 +1411,6 @@ def main():
if WITH_CYTHON and options.language_level == 3:
sys.stderr.write("Using Cython language level 3.\n")
sys.stderr.write("\n")
test_bugs = False
if options.tickets:
for ticket_number in options.tickets:
......@@ -1435,11 +1445,23 @@ def main():
global COMPILER
if options.compiler:
COMPILER = options.compiler
languages = []
if options.use_c:
languages.append('c')
if options.use_cpp:
languages.append('cpp')
selected_backends = [ name.strip() for name in options.backends.split(',') if name.strip() ]
backends = []
for backend in selected_backends:
if backend == 'c' and not options.use_c:
continue
elif backend == 'cpp' and not options.use_cpp:
continue
elif backend not in BACKENDS:
sys.stderr.write("Unknown backend requested: '%s' not one of [%s]\n" % (
backend, ','.join(BACKENDS)))
sys.exit(1)
backends.append(backend)
sys.stderr.write("Backends: %s\n" % ','.join(backends))
languages = backends
sys.stderr.write("\n")
test_suite = unittest.TestSuite()
......
......@@ -99,8 +99,10 @@ def compile_cython_modules(profile=False, compile_more=False, cython_with_refnan
"Cython.Compiler.Scanning",
"Cython.Compiler.Parsing",
"Cython.Compiler.Visitor",
"Cython.Compiler.FlowControl",
"Cython.Compiler.Code",
"Cython.Runtime.refnanny",]
"Cython.Runtime.refnanny",
]
if compile_more:
compiled_modules.extend([
"Cython.Compiler.ParseTreeTransforms",
......
......@@ -3,7 +3,7 @@
cdef void spam():
cdef long long L
cdef unsigned long long U
cdef object x
cdef object x = object()
L = x
x = L
U = x
......
# mode: compile
cdef int f() except -1:
cdef object x, y, z, w
cdef object x, y = 0, z = 0, w = 0
cdef int i
x = abs(y)
delattr(x, 'spam')
......
......@@ -14,7 +14,7 @@ cdef class SuperSpam(Spam):
cdef void tomato():
cdef Spam spam
cdef SuperSpam superspam
cdef SuperSpam superspam = SuperSpam()
spam = superspam
spam.add_tons(42)
superspam.add_tons(1764)
......
......@@ -10,8 +10,8 @@ cdef class Swallow:
def f(Grail g):
cdef int i = 0
cdef Swallow s
cdef object x
cdef Swallow s = Swallow()
cdef object x = Grail()
g = x
x = g
g = i
......
# mode: compile
ctypedef enum MyEnum:
Value1
Value2
Value3 = 100
cdef MyEnum my_enum = Value3
ctypedef struct StructA:
StructA *a
StructB *b
cdef struct StructB:
StructA *a
StructB *b
cdef class ClassA:
cdef ClassA a
cdef ClassB b
ctypedef public class ClassB [ object ClassB, type TypeB ]:
cdef ClassA a
cdef ClassB b
cdef StructA struct_a
cdef StructB struct_b
struct_a.a = &struct_a
struct_a.b = &struct_b
struct_b.a = &struct_a
struct_b.b = &struct_b
cdef ClassA class_a = ClassA()
cdef ClassB class_b = ClassB()
class_a.a = class_a
class_a.b = class_b
class_b.a = class_a
class_b.b = class_b
# ticket: 444
# mode: compile
# mode: error
def test():
cdef object[int] not_assigned_to
not_assigned_to[2] = 3
_ERRORS = """
6:20: local variable 'not_assigned_to' referenced before assignment
"""
# mode: error
ctypedef struct Spam
cdef extern from *:
ctypedef struct Ham
ctypedef struct Spam:
int i
ctypedef struct Spam
_ERRORS = u"""
3:0: Forward-referenced type must use 'cdef', not 'ctypedef'
"""
......@@ -39,6 +39,32 @@ with nogil, cython.parallel.parallel():
with nogil, cython.parallel.parallel:
pass
cdef int y
# this is not valid
for i in prange(10, nogil=True):
i = y * 4
y = i
# this is valid
for i in prange(10, nogil=True):
y = i
i = y * 4
y = i
with nogil, cython.parallel.parallel():
i = y
y = i
for i in prange(10, nogil=True):
y += i
y *= i
with nogil, cython.parallel.parallel("invalid"):
pass
with nogil, cython.parallel.parallel(invalid=True):
pass
_ERRORS = u"""
e_cython_parallel.pyx:3:8: cython.parallel.parallel is not a module
e_cython_parallel.pyx:4:0: No such directive: cython.parallel.something
......@@ -53,4 +79,9 @@ e_cython_parallel.pyx:30:9: Can only iterate over an iteration variable
e_cython_parallel.pyx:33:10: Must be of numeric type, not int *
e_cython_parallel.pyx:36:33: Closely nested 'with parallel:' blocks are disallowed
e_cython_parallel.pyx:39:12: The parallel directive must be called
e_cython_parallel.pyx:45:10: Expression value depends on previous loop iteration, cannot execute in parallel
e_cython_parallel.pyx:55:9: Expression depends on an uninitialized thread-private variable
e_cython_parallel.pyx:60:6: Reduction operator '*' is inconsistent with previous reduction operator '+'
e_cython_parallel.pyx:62:36: cython.parallel.parallel() does not take positional arguments
e_cython_parallel.pyx:65:36: Invalid keyword argument: invalid
"""
......@@ -12,9 +12,18 @@ def f(a):
del j # error: deletion of non-Python object
del x[i] # error: deletion of non-Python object
del s.m # error: deletion of non-Python object
def outer(a):
def inner():
print a
del a
return inner()
_ERRORS = u"""
10:6: Cannot assign to or delete this
11:45: Deletion of non-Python, non-C++ object
13:6: Deletion of non-Python, non-C++ object
14:6: Deletion of non-Python, non-C++ object
10:9: Cannot assign to or delete this
11:48: Deletion of non-Python, non-C++ object
13:9: Deletion of non-Python, non-C++ object
14:9: Deletion of non-Python, non-C++ object
19:9: can not delete variable 'a' referenced in nested scope
"""
......@@ -10,7 +10,7 @@ cdef class E:
cdef readonly object __weakref__
cdef void f():
cdef C c
cdef C c = C()
cdef object x
x = c.__weakref__
c.__weakref__ = x
......
# ticket: 692
# mode: error
def func((a, b)):
return a + b
_ERRORS = u"""
4:9: Missing argument name
5:13: undeclared name not builtin: a
5:16: undeclared name not builtin: b
"""
......@@ -18,7 +18,7 @@ cdef void r() nogil:
q()
cdef object m():
cdef object x, y, obj
cdef object x, y = 0, obj
cdef int i, j, k
global fred
q()
......
# cython: warn.maybe_uninitialized=True
# mode: error
# tag: werror
def simple():
print a
a = 0
def simple2(arg):
if arg > 0:
a = 1
return a
def simple_pos(arg):
if arg > 0:
a = 1
else:
a = 0
return a
def ifelif(c1, c2):
if c1 == 1:
if c2:
a = 1
else:
a = 2
elif c1 == 2:
a = 3
return a
def nowimpossible(a):
if a:
b = 1
if a:
print b
def fromclosure():
def bar():
print a
a = 1
return bar
# Should work ok in both py2 and py3
def list_comp(a):
return [i for i in a]
def set_comp(a):
return set(i for i in a)
def dict_comp(a):
return {i: j for i, j in a}
# args and kwargs
def generic_args_call(*args, **kwargs):
return args, kwargs
def cascaded(x):
print a, b
a = b = x
def from_import():
print bar
from foo import bar
def regular_import():
print foo
import foo
def raise_stat():
try:
raise exc, msg
except:
pass
exc = ValueError
msg = 'dummy'
_ERRORS = """
6:11: local variable 'a' referenced before assignment
12:12: local variable 'a' might be referenced before assignment
29:12: local variable 'a' might be referenced before assignment
35:15: local variable 'b' might be referenced before assignment
58:11: local variable 'a' referenced before assignment
58:14: local variable 'b' referenced before assignment
62:13: local variable 'bar' referenced before assignment
66:13: local variable 'foo' referenced before assignment
71:17: local variable 'exc' referenced before assignment
71:22: local variable 'msg' referenced before assignment
"""
# cython: warn.maybe_uninitialized=True
# mode: error
# tag: werror
# class scope
def foo(c):
class Foo(object):
if c > 0:
b = 1
print a, b
a = 1
return Foo
_ERRORS = """
10:15: local variable 'a' referenced before assignment
10:18: local variable 'b' might be referenced before assignment
"""
# cython: warn.maybe_uninitialized=True
# mode: error
# tag: werror
def foo(x):
a = 1
del a, b
b = 2
return a, b
_ERRORS = """
7:9: Deletion of non-Python, non-C++ object
7:12: local variable 'b' referenced before assignment
7:12: Deletion of non-Python, non-C++ object
9:12: local variable 'a' referenced before assignment
"""
# cython: warn.maybe_uninitialized=True
# mode: error
# tag: werror
def exc_target():
try:
{}['foo']
except KeyError, e:
pass
except IndexError, i:
pass
return e, i
def exc_body():
try:
a = 1
except Exception:
pass
return a
def exc_else_pos():
try:
pass
except Exception, e:
pass
else:
e = 1
return e
def exc_body_pos(d):
try:
a = d['foo']
except KeyError:
a = None
return a
def exc_pos():
try:
a = 1
except Exception:
a = 1
return a
def exc_finally():
try:
a = 1
finally:
pass
return a
def exc_finally2():
try:
pass
finally:
a = 1
return a
def exc_assmt_except(a):
try:
x = a
except:
return x
def exc_assmt_finaly(a):
try:
x = a
except:
return x
def raise_stat(a):
try:
if a < 0:
raise IndexError
except IndexError:
oops = 1
print oops
def try_loop(args):
try:
x = 0
for i in args:
if i is 0:
continue
elif i is None:
break
elif i is False:
return
i()
except ValueError:
x = 1
finally:
return x
def try_finally(a):
try:
for i in a:
if i > 0:
x = 1
finally:
return x
def try_finally_nested(m):
try:
try:
try:
f = m()
except:
pass
finally:
pass
except:
print f
_ERRORS = """
12:12: local variable 'e' might be referenced before assignment
12:15: local variable 'i' might be referenced before assignment
19:12: local variable 'a' might be referenced before assignment
63:16: local variable 'x' might be referenced before assignment
69:16: local variable 'x' might be referenced before assignment
77:14: local variable 'oops' might be referenced before assignment
93:16: local variable 'x' might be referenced before assignment
101:16: local variable 'x' might be referenced before assignment
113:15: local variable 'f' might be referenced before assignment
"""
# cython: warn.maybe_uninitialized=True
# mode: error
# tag: werror
def simple_for(n):
for i in n:
a = 1
return a
def simple_for_break(n):
for i in n:
a = 1
break
return a
def simple_for_pos(n):
for i in n:
a = 1
else:
a = 0
return a
def simple_target(n):
for i in n:
pass
return i
def simple_target_f(n):
for i in n:
i *= i
return i
def simple_for_from(n):
for i from 0 <= i <= n:
x = i
else:
return x
def for_continue(l):
for i in l:
if i > 0:
continue
x = i
print x
def for_break(l):
for i in l:
if i > 0:
break
x = i
print x
def for_finally_continue(f):
for i in f:
try:
x = i()
finally:
print x
continue
def for_finally_break(f):
for i in f:
try:
x = i()
finally:
print x
break
def for_finally_outer(p, f):
x = 1
try:
for i in f:
print x
x = i()
if x > 0:
continue
if x < 0:
break
finally:
del x
_ERRORS = """
8:12: local variable 'a' might be referenced before assignment
14:12: local variable 'a' might be referenced before assignment
26:12: local variable 'i' might be referenced before assignment
31:12: local variable 'i' might be referenced before assignment
37:16: local variable 'x' might be referenced before assignment
44:11: local variable 'x' might be referenced before assignment
51:11: local variable 'x' might be referenced before assignment
58:19: local variable 'x' might be referenced before assignment
66:19: local variable 'x' might be referenced before assignment
"""
# cython: warn.maybe_uninitialized=True
# mode: error
# tag: werror
def unbound_inside_generator(*args):
for i in args:
yield x
x = i + i
_ERRORS = """
7:15: local variable 'x' might be referenced before assignment
"""
# cython: language_level=2, warn.maybe_uninitialized=True
# mode: error
# tag: werror
def list_comp(a):
r = [i for i in a]
return i
# dict comp is py3 feuture and don't leak here
def dict_comp(a):
r = {i: j for i, j in a}
return i, j
def dict_comp2(a):
r = {i: j for i, j in a}
print i, j
i, j = 0, 0
_ERRORS = """
7:12: local variable 'i' might be referenced before assignment
12:12: undeclared name not builtin: i
12:15: undeclared name not builtin: j
16:11: local variable 'i' referenced before assignment
16:14: local variable 'j' referenced before assignment
"""
# cython: language_level=3, warn.maybe_uninitialized=True
# mode: error
# tag: werror
def ref(obj):
pass
def list_comp(a):
r = [i for i in a]
ref(i)
i = 0
return r
def dict_comp(a):
r = {i: j for i, j in a}
ref(i)
i = 0
return r
_ERRORS = """
10:9: local variable 'i' referenced before assignment
16:9: local variable 'i' referenced before assignment
"""
# cython: warn.maybe_uninitialized=True
# mode: error
# tag: werror
def simple_while(n):
while n > 0:
n -= 1
a = 0
return a
def simple_while_break(n):
while n > 0:
n -= 1
break
else:
a = 1
return a
def simple_while_pos(n):
while n > 0:
n -= 1
a = 0
else:
a = 1
return a
def while_finally_continue(p, f):
while p():
try:
x = f()
finally:
print x
continue
def while_finally_break(p, f):
while p():
try:
x = f()
finally:
print x
break
def while_finally_outer(p, f):
x = 1
try:
while p():
print x
x = f()
if x > 0:
continue
if x < 0:
break
finally:
del x
_ERRORS = """
9:12: local variable 'a' might be referenced before assignment
17:12: local variable 'a' might be referenced before assignment
32:19: local variable 'x' might be referenced before assignment
40:19: local variable 'x' might be referenced before assignment
"""
# cython: warn.maybe_uninitialized=True
# mode: error
# tag: werror
def with_no_target(m):
with m:
print a
a = 1
def unbound_manager(m1):
with m2:
pass
m2 = m1
def with_target(m):
with m as f:
print(f)
def with_mgr(m):
try:
with m() as f:
pass
except:
print f
_ERRORS = """
7:15: local variable 'a' referenced before assignment
11:11: local variable 'm2' referenced before assignment
24:15: local variable 'f' might be referenced before assignment
"""
# mode: error
# tag: werror, unreachable, control-flow
def try_finally():
try:
return
finally:
return
print 'oops'
def try_return():
try:
return
except:
return
print 'oops'
def for_return(a):
for i in a:
return
else:
return
print 'oops'
def while_return(a):
while a:
return
else:
return
print 'oops'
def forfrom_return(a):
for i from 0 <= i <= a:
return
else:
return
print 'oops'
_ERRORS = """
9:4: Unreachable code
16:4: Unreachable code
23:4: Unreachable code
30:4: Unreachable code
37:4: Unreachable code
"""
# cython: warn.unused=True, warn.unused_arg=True, warn.unused_result=True
# mode: error
# tag: werror
def unused_variable():
a = 1
def unused_cascade(arg):
a, b = arg.split()
return a
def unused_arg(arg):
pass
def unused_result():
r = 1 + 1
r = 2
return r
def unused_nested():
def unused_one():
pass
def unused_class():
class Unused:
pass
# this should not generate warning
def used(x, y):
x.y = 1
y[0] = 1
lambda x: x
def unused_and_unassigned():
cdef object foo
cdef int i
_ERRORS = """
6:6: Unused entry 'a'
9:9: Unused entry 'b'
12:15: Unused argument 'arg'
16:6: Unused result in 'r'
21:4: Unused entry 'unused_one'
25:4: Unused entry 'Unused'
35:16: Unused entry 'foo'
36:13: Unused entry 'i'
"""
......@@ -47,15 +47,6 @@ def nousage():
"""
cdef object[int, ndim=2] buf
def printbuf():
"""
Just compilation.
"""
cdef object[int, ndim=2] buf
print buf
return
buf[0,0] = 0
@testcase
def acquire_release(o1, o2):
"""
......@@ -681,20 +672,20 @@ def mixed_get(object[int] buf, int unsafe_idx, int safe_idx):
#
# Coercions
#
@testcase
def coercions(object[unsigned char] uc):
"""
TODO
"""
print type(uc[0])
uc[0] = -1
print uc[0]
uc[0] = <int>3.14
print uc[0]
cdef char* ch = b"asfd"
cdef object[object] objbuf
objbuf[3] = ch
## @testcase
## def coercions(object[unsigned char] uc):
## """
## TODO
## """
## print type(uc[0])
## uc[0] = -1
## print uc[0]
## uc[0] = <int>3.14
## print uc[0]
## cdef char* ch = b"asfd"
## cdef object[object] objbuf
## objbuf[3] = ch
#
......@@ -1064,7 +1055,8 @@ cdef class MockBuffer:
stdlib.free(self.buffer)
cdef void* create_buffer(self, data):
cdef char* buf = <char*>stdlib.malloc(len(data) * self.itemsize)
cdef size_t n = <size_t>(len(data) * self.itemsize)
cdef char* buf = <char*>stdlib.malloc(n)
cdef char* it = buf
for value in data:
self.write(it, value)
......@@ -1072,19 +1064,22 @@ cdef class MockBuffer:
return buf
cdef void* create_indirect_buffer(self, data, shape):
cdef size_t n = 0
cdef void** buf
assert shape[0] == len(data)
if len(shape) == 1:
return self.create_buffer(data)
else:
shape = shape[1:]
buf = <void**>stdlib.malloc(len(data) * sizeof(void*))
n = <size_t>len(data) * sizeof(void*)
buf = <void**>stdlib.malloc(n)
for idx, subdata in enumerate(data):
buf[idx] = self.create_indirect_buffer(subdata, shape)
return buf
cdef Py_ssize_t* list_to_sizebuf(self, l):
cdef Py_ssize_t* buf = <Py_ssize_t*>stdlib.malloc(len(l) * sizeof(Py_ssize_t))
cdef size_t n = <size_t>len(l) * sizeof(Py_ssize_t)
cdef Py_ssize_t* buf = <Py_ssize_t*>stdlib.malloc(n)
for i, x in enumerate(l):
buf[i] = x
return buf
......@@ -1137,7 +1132,7 @@ cdef class MockBuffer:
cdef class CharMockBuffer(MockBuffer):
cdef int write(self, char* buf, object value) except -1:
(<char*>buf)[0] = <int>value
(<char*>buf)[0] = <char>value
return 0
cdef get_itemsize(self): return sizeof(char)
cdef get_default_format(self): return b"@b"
......@@ -1172,7 +1167,7 @@ cdef class UnsignedShortMockBuffer(MockBuffer):
cdef class FloatMockBuffer(MockBuffer):
cdef int write(self, char* buf, object value) except -1:
(<float*>buf)[0] = <float>value
(<float*>buf)[0] = <float>(<double>value)
return 0
cdef get_itemsize(self): return sizeof(float)
cdef get_default_format(self): return b"f"
......
cimport cython
uspace = u' '
ustring_with_a = u'abcdefg'
ustring_without_a = u'bcdefg'
@cython.test_fail_if_path_exists('//SimpleCallNode')
def ord_Py_UNICODE(unicode s):
"""
>>> ord_Py_UNICODE(uspace)
32
"""
cdef Py_UNICODE u
u = s[0]
return ord(u)
@cython.test_assert_path_exists('//IntNode')
@cython.test_fail_if_path_exists('//SimpleCallNode')
def ord_const():
"""
>>> ord_const()
32
"""
return ord(u' ')
@cython.test_assert_path_exists('//PrimaryCmpNode//IntNode')
@cython.test_fail_if_path_exists('//SimpleCallNode')
def unicode_for_loop_ord(unicode s):
"""
......@@ -13,6 +34,6 @@ def unicode_for_loop_ord(unicode s):
False
"""
for c in s:
if ord(c) == u'a':
if ord(c) == ord(u'a'):
return True
return False
cimport cython
b_a = b'a'
b_b = b'b'
@cython.test_assert_path_exists(
"//PythonCapiCallNode")
def bytes_startswith(bytes s, sub, start=None, stop=None):
"""
>>> bytes_startswith(b_a, b_a)
True
>>> bytes_startswith(b_a+b_b, b_a)
True
>>> bytes_startswith(b_a, b_b)
False
>>> bytes_startswith(b_a+b_b, b_b)
False
>>> bytes_startswith(b_a, (b_a, b_b))
True
>>> bytes_startswith(b_a, b_a, 1)
False
>>> bytes_startswith(b_a, b_a, 0, 0)
False
"""
if start is None:
return s.startswith(sub)
elif stop is None:
return s.startswith(sub, start)
else:
return s.startswith(sub, start, stop)
@cython.test_assert_path_exists(
"//PythonCapiCallNode")
def bytes_endswith(bytes s, sub, start=None, stop=None):
"""
>>> bytes_endswith(b_a, b_a)
True
>>> bytes_endswith(b_b+b_a, b_a)
True
>>> bytes_endswith(b_a, b_b)
False
>>> bytes_endswith(b_b+b_a, b_b)
False
>>> bytes_endswith(b_a, (b_a, b_b))
True
>>> bytes_endswith(b_a, b_a, 1)
False
>>> bytes_endswith(b_a, b_a, 0, 0)
False
"""
if start is None:
return s.endswith(sub)
elif stop is None:
return s.endswith(sub, start)
else:
return s.endswith(sub, start, stop)
# mode: run
# ticket: 466
# extension to T409
......@@ -20,3 +21,49 @@ def simple_parallel_int_mix():
cdef object ao, bo
ai, bi = al, bl = ao, bo = c = d = [1,2]
return ao, bo, ai, bi, al, bl, c, d
cdef int called = 0
cdef char* get_string():
global called
called += 1
return "abcdefg"
def non_simple_rhs():
"""
>>> non_simple_rhs()
1
"""
cdef char *a, *b
cdef int orig_called = called
a = b = <char*>get_string()
assert a is b
return called - orig_called
from libc.stdlib cimport malloc, free
def non_simple_rhs_malloc():
"""
>>> non_simple_rhs_malloc()
"""
cdef char *a, *b, **c
c = &b
c[0] = a = <char*>malloc(2)
a[0] = c'X'
b[1] = c'\0'
# copy from different pointers to make sure they all point to the
# same memory
cdef char[2] x
x[0] = b[0]
x[1] = a[1]
# clean up
free(a)
if b is not a: # shouldn't happen
free(b)
# check copied values
assert x[0] == c'X'
assert x[1] == c'\0'
PYTHON setup.py build_ext --inplace
PYTHON -c "import a"
######## setup.py ########
from Cython.Build.Dependencies import cythonize
import Cython.Compiler.Options
Cython.Compiler.Options.cimport_from_pyx = True
from distutils.core import setup
setup(
ext_modules = cythonize("*.pyx"),
)
######## a.pyx ########
from b cimport Bclass, Bfunc, Bstruct, Benum, Benum_value, Btypedef
cdef Bclass b = Bclass(5)
assert Bfunc(&b.value) == b.value
assert b.asStruct().value == b.value
cdef Btypedef b_type = &b.value
cdef Benum b_enum = Benum_value
#from c cimport ClassC
#cdef ClassC c = ClassC()
#print c.value
######## b.pyx ########
cdef enum Benum:
Benum_value
cdef struct Bstruct:
int value
ctypedef long *Btypedef
cdef class Bclass:
cdef long value
def __init__(self, value):
self.value = value
cdef Bstruct asStruct(self):
return Bstruct(value=self.value)
cdef long Bfunc(Btypedef x):
return x[0]
######## c.pxd ########
cdef class ClassC:
cdef int value
......@@ -12,12 +12,93 @@ cdef extern from "cpp_exceptions_helper.h":
cdef int raise_index_value "raise_index"(bint fire) except +ValueError
cdef int raise_index_custom "raise_index"(bint fire) except +raise_py_error
cdef void raise_domain_error() except +
cdef void raise_ios_failure() except +
cdef void raise_memory() except +
cdef void raise_overflow() except +
cdef void raise_range_error() except +
cdef void raise_typeerror() except +
cdef void raise_underflow() except +
cdef cppclass Foo:
int bar_raw "bar"(bint fire) except +
int bar_value "bar"(bint fire) except +ValueError
int bar_custom "bar"(bint fire) except +raise_py_error
def test_domain_error():
"""
>>> test_domain_error()
Traceback (most recent call last):
...
ValueError: domain_error
"""
raise_domain_error()
def test_ios_failure():
"""
>>> test_ios_failure()
Traceback (most recent call last):
...
IOError: iostream failure
"""
raise_ios_failure()
def test_memory():
"""
>>> test_memory()
Traceback (most recent call last):
...
MemoryError
"""
# Re-raise the exception without a description string because we can't
# rely on the implementation-defined value of what() in the doctest.
try:
raise_memory()
except MemoryError:
raise MemoryError
def test_overflow():
"""
>>> test_overflow()
Traceback (most recent call last):
...
OverflowError: overflow_error
"""
raise_overflow()
def test_range_error():
"""
>>> test_range_error()
Traceback (most recent call last):
...
ArithmeticError: range_error
"""
raise_range_error()
def test_typeerror():
"""
>>> test_typeerror()
Traceback (most recent call last):
...
TypeError
"""
# Re-raise the exception without a description string because we can't
# rely on the implementation-defined value of what() in the doctest.
try:
raise_typeerror()
except TypeError:
raise TypeError
def test_underflow():
"""
>>> test_underflow()
Traceback (most recent call last):
...
ArithmeticError: underflow_error
"""
raise_underflow()
def test_int_raw(bint fire):
"""
>>> test_int_raw(False)
......
#include <ios>
#include <new>
#include <stdexcept>
int raise_int(int fire) {
......@@ -23,3 +25,39 @@ class Foo {
return 0;
}
};
void raise_domain_error() {
throw std::domain_error("domain_error");
}
void raise_ios_failure() {
throw std::ios_base::failure("iostream failure");
}
void raise_memory() {
// std::bad_alloc can only be default constructed,
// so we have no control over the error message
throw std::bad_alloc();
}
void raise_overflow() {
throw std::overflow_error("overflow_error");
}
void raise_range_error() {
throw std::range_error("range_error");
}
struct Base { virtual ~Base() {} };
struct Derived : Base { void use() const { abort(); } };
void raise_typeerror() {
Base foo;
Base &bar = foo; // prevents "dynamic_cast can never succeed" warning
Derived &baz = dynamic_cast<Derived &>(bar);
baz.use(); // not reached; prevents "unused variable" warning
}
void raise_underflow() {
throw std::underflow_error("underflow_error");
}
......@@ -86,13 +86,13 @@ def del_local(a):
>>> del_local(object())
"""
del a
assert a is None # Until we have unbound locals...
assert 'a' not in locals()
def del_seq(a, b, c):
"""
>>> del_seq(1, 2, 3)
"""
del a, (b, c)
assert a is None # Until we have unbound locals...
assert b is None # Until we have unbound locals...
assert c is None # Until we have unbound locals...
assert 'a' not in locals()
assert 'b' not in locals()
assert 'c' not in locals()
......@@ -305,19 +305,6 @@ def test_nested_yield():
"""
yield (yield (yield 1))
def test_inside_lambda():
"""
>>> obj = test_inside_lambda()()
>>> next(obj)
1
>>> next(obj)
2
>>> next(obj)
Traceback (most recent call last):
StopIteration
"""
return lambda:((yield 1), (yield 2))
def test_nested_gen(n):
"""
>>> [list(a) for a in test_nested_gen(5)]
......
......@@ -21,9 +21,11 @@ def wrap_hasattr(obj, name):
False
>>> wrap_hasattr(Foo(), "bar")
False
>>> wrap_hasattr(Foo(), "baz") #doctest: +ELLIPSIS
>>> Foo().baz #doctest: +ELLIPSIS
Traceback (most recent call last):
...
ZeroDivisionError: ...
>>> wrap_hasattr(Foo(), "baz")
False
"""
return hasattr(obj, name)
......@@ -8,7 +8,7 @@ __doc__ = u"""
[('args', (2, 3)), ('kwds', {'k': 5}), ('x', 1), ('y', 'hi'), ('z', 5)]
>>> sorted(get_locals_items_listcomp(1,2,3, k=5))
[('args', (2, 3)), ('item', None), ('kwds', {'k': 5}), ('x', 1), ('y', 'hi'), ('z', 5)]
[('args', (2, 3)), ('kwds', {'k': 5}), ('x', 1), ('y', 'hi'), ('z', 5)]
"""
def get_locals(x, *args, **kwds):
......
cdef extern from *:
bint FALSE "0"
void import_array()
void import_umath()
if 0:
if FALSE:
import_array()
import_umath()
......@@ -4,6 +4,7 @@
cimport cython
from cython.parallel import prange
cimport numpy as np
include "numpy_common.pxi"
@cython.boundscheck(False)
......
......@@ -24,4 +24,22 @@ def test_parallel():
free(buf)
def test_num_threads():
"""
>>> test_num_threads()
1
"""
cdef int dyn = openmp.omp_get_dynamic()
cdef int num_threads
cdef int *p = &num_threads
openmp.omp_set_dynamic(0)
with nogil, cython.parallel.parallel(num_threads=1):
p[0] = openmp.omp_get_num_threads()
openmp.omp_set_dynamic(dyn)
return num_threads
include "sequential_parallel.pyx"
......@@ -171,6 +171,64 @@ def test_pure_mode():
for i in pure_parallel.prange(4, -1, -1, schedule='dynamic', nogil=True):
print i
with pure_parallel.parallel:
with pure_parallel.parallel():
print pure_parallel.threadid()
cdef extern from "types.h":
ctypedef short actually_long_t
ctypedef long actually_short_t
ctypedef int myint_t
def test_nan_init():
"""
>>> test_nan_init()
"""
cdef int mybool = 0
cdef int err = 0
cdef int *errp = &err
cdef signed char a1 = 10
cdef unsigned char a2 = 10
cdef short b1 = 10
cdef unsigned short b2 = 10
cdef int c1 = 10
cdef unsigned int c2 = 10
cdef long d1 = 10
cdef unsigned long d2 = 10
cdef long long e1 = 10
cdef unsigned long long e2 = 10
cdef actually_long_t miss1 = 10
cdef actually_short_t miss2 = 10
cdef myint_t typedef1 = 10
cdef float f = 10.0
cdef double g = 10.0
cdef long double h = 10.0
cdef void *p = <void *> 10
with nogil, cython.parallel.parallel():
# First, trick the error checking to make it believe these variables
# are initialized after this if
if mybool: # mybool is always false!
a1 = a2 = b1 = b2 = c1 = c2 = d1 = d2 = e1 = e2 = 0
f = g = h = 0.0
p = NULL
miss1 = miss2 = typedef1 = 0
if (a1 == 10 or a2 == 10 or
b1 == 10 or b2 == 10 or
c1 == 10 or c2 == 10 or
d1 == 10 or d2 == 10 or
e1 == 10 or e2 == 10 or
f == 10.0 or g == 10.0 or h == 10.0 or
p == <void *> 10 or miss1 == 10 or miss2 == 10
or typedef1 == 10):
errp[0] = 1
if err:
raise Exception("One of the values was not initialized to a maximum "
"or NaN value")
......@@ -25,3 +25,21 @@ class class4:
@staticmethod
def plus1(a):
return a + 1
def nested_class():
"""
>>> cls = nested_class()
>>> cls.plus1(1)
2
>>> obj = cls()
>>> obj.plus1(1)
2
"""
class class5(object):
def __new__(cls): # implicit staticmethod
return object.__new__(cls)
@staticmethod
def plus1(a):
return a + 1
return class5
cimport cython
@cython.test_assert_path_exists(
"//PythonCapiCallNode")
def str_startswith(str s, sub, start=None, stop=None):
"""
>>> str_startswith('a', 'a')
True
>>> str_startswith('ab', 'a')
True
>>> str_startswith('a', 'b')
False
>>> str_startswith('ab', 'b')
False
>>> str_startswith('a', ('a', 'b'))
True
>>> str_startswith('a', 'a', 1)
False
>>> str_startswith('a', 'a', 0, 0)
False
"""
if start is None:
return s.startswith(sub)
elif stop is None:
return s.startswith(sub, start)
else:
return s.startswith(sub, start, stop)
@cython.test_assert_path_exists(
"//PythonCapiCallNode")
def str_endswith(str s, sub, start=None, stop=None):
"""
>>> str_endswith('a', 'a')
True
>>> str_endswith('ba', 'a')
True
>>> str_endswith('a', 'b')
False
>>> str_endswith('ba', 'b')
False
>>> str_endswith('a', ('a', 'b'))
True
>>> str_endswith('a', 'a', 1)
False
>>> str_endswith('a', 'a', 0, 0)
False
"""
if start is None:
return s.endswith(sub)
elif stop is None:
return s.endswith(sub, start)
else:
return s.endswith(sub, start, stop)
/*
This header is present to test effects of misdeclaring
types Cython-side.
*/
typedef long actually_long_t;
typedef short actually_short_t;
# mode: run
# tag: control-flow, uninitialized
def conditional(cond):
"""
>>> conditional(True)
[]
>>> conditional(False)
Traceback (most recent call last):
...
UnboundLocalError: local variable 'a' referenced before assignment
"""
if cond:
a = []
return a
def inside_loop(iter):
"""
>>> inside_loop([1,2,3])
3
>>> inside_loop([])
Traceback (most recent call last):
...
UnboundLocalError: local variable 'i' referenced before assignment
"""
for i in iter:
pass
return i
def try_except(cond):
"""
>>> try_except(True)
[]
>>> try_except(False)
Traceback (most recent call last):
...
UnboundLocalError: local variable 'a' referenced before assignment
"""
try:
if cond:
a = []
raise ValueError
except ValueError:
return a
def try_finally(cond):
"""
>>> try_finally(True)
[]
>>> try_finally(False)
Traceback (most recent call last):
...
UnboundLocalError: local variable 'a' referenced before assignment
"""
try:
if cond:
a = []
raise ValueError
finally:
return a
def deleted(cond):
"""
>>> deleted(False)
{}
>>> deleted(True)
Traceback (most recent call last):
...
UnboundLocalError: local variable 'a' referenced before assignment
"""
a = {}
if cond:
del a
return a
def test_nested(cond):
"""
>>> test_nested(True)
>>> test_nested(False)
Traceback (most recent call last):
...
UnboundLocalError: local variable 'a' referenced before assignment
"""
if cond:
def a():
pass
return a()
def test_outer(cond):
"""
>>> test_outer(True)
{}
>>> test_outer(False)
Traceback (most recent call last):
...
UnboundLocalError: local variable 'a' referenced before assignment
"""
if cond:
a = {}
def inner():
return a
return a
def test_inner(cond):
"""
>>> test_inner(True)
{}
>>> test_inner(False)
Traceback (most recent call last):
...
NameError: free variable 'a' referenced before assignment in enclosing scope
"""
if cond:
a = {}
def inner():
return a
return inner()
def test_class(cond):
"""
>>> test_class(True)
1
>>> test_class(False)
Traceback (most recent call last):
...
UnboundLocalError: local variable 'A' referenced before assignment
"""
if cond:
class A:
x = 1
return A.x
......@@ -24,6 +24,15 @@ cdef void puts(char *string) with gil:
"""
print string.decode('ascii')
class ExceptionWithMsg(Exception):
"""
In python2.4 Exception is formatted as <exceptions.Exception
instance at 0x1b8f948> when swallowed.
"""
def __repr__(self):
return "ExceptionWithMsg(%r)" % self.args
# Start with some normal Python functions
......@@ -139,42 +148,46 @@ def test_restore_exception():
finally:
raise Exception("Override the raised exception")
def test_declared_variables():
"""
>>> test_declared_variables()
None
None
['s', 'p', 'a', 'm']
['s', 'p', 'a', 'm']
"""
cdef object somevar
print somevar
with nogil:
with gil:
print somevar
somevar = list("spam")
print somevar
print somevar
def test_undeclared_variables():
"""
>>> test_undeclared_variables()
None
None
['s', 'p', 'a', 'm']
['s', 'p', 'a', 'm']
"""
print somevar
with nogil:
with gil:
print somevar
somevar = list("spam")
print somevar
print somevar
### DISABLED: this cannot work with flow control analysis
##
## def test_declared_variables():
## """
## >>> test_declared_variables()
## None
## None
## ['s', 'p', 'a', 'm']
## ['s', 'p', 'a', 'm']
## """
## cdef object somevar
##
## print somevar
##
## with nogil:
## with gil:
## print somevar
## somevar = list("spam")
## print somevar
##
## print somevar
### DISABLED: this cannot work with flow control analysis
##
## def test_undeclared_variables():
## """
## >>> test_undeclared_variables()
## None
## None
## ['s', 'p', 'a', 'm']
## ['s', 'p', 'a', 'm']
## """
## print somevar
## with nogil:
## with gil:
## print somevar
## somevar = list("spam")
## print somevar
##
## print somevar
def test_loops_and_boxing():
"""
......@@ -247,7 +260,7 @@ cpdef test_cpdef():
cdef void void_nogil_ignore_exception() nogil:
with gil:
raise Exception("This is swallowed")
raise ExceptionWithMsg("This is swallowed")
puts("unreachable")
with gil:
......@@ -259,16 +272,16 @@ cdef void void_nogil_nested_gil() nogil:
with gil:
print 'Inner gil section'
puts("nogil section")
raise Exception("Swallow this")
raise ExceptionWithMsg("Swallow this")
puts("Don't print this")
def test_nogil_void_funcs_with_gil():
"""
>>> redirect_stderr(test_nogil_void_funcs_with_gil)
Exception Exception: Exception('This is swallowed',) in 'with_gil.void_nogil_ignore_exception' ignored
Exception with_gil.ExceptionWithMsg: ExceptionWithMsg('This is swallowed') in 'with_gil.void_nogil_ignore_exception' ignored
Inner gil section
nogil section
Exception Exception: Exception('Swallow this',) in 'with_gil.void_nogil_nested_gil' ignored
Exception with_gil.ExceptionWithMsg: ExceptionWithMsg('Swallow this') in 'with_gil.void_nogil_nested_gil' ignored
"""
void_nogil_ignore_exception()
void_nogil_nested_gil()
......@@ -276,10 +289,10 @@ def test_nogil_void_funcs_with_gil():
def test_nogil_void_funcs_with_nogil():
"""
>>> redirect_stderr(test_nogil_void_funcs_with_nogil)
Exception Exception: Exception('This is swallowed',) in 'with_gil.void_nogil_ignore_exception' ignored
Exception with_gil.ExceptionWithMsg: ExceptionWithMsg('This is swallowed') in 'with_gil.void_nogil_ignore_exception' ignored
Inner gil section
nogil section
Exception Exception: Exception('Swallow this',) in 'with_gil.void_nogil_nested_gil' ignored
Exception with_gil.ExceptionWithMsg: ExceptionWithMsg('Swallow this') in 'with_gil.void_nogil_nested_gil' ignored
"""
with nogil:
void_nogil_ignore_exception()
......
# mode: run
# tag: generators, lambda
try:
from builtins import next # Py3k
except ImportError:
def next(it):
return it.next()
def test_inside_lambda():
"""
>>> obj = test_inside_lambda()()
>>> next(obj)
1
>>> next(obj)
2
>>> next(obj)
Traceback (most recent call last):
StopIteration
"""
return lambda:((yield 1), (yield 2))
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment