Commit b8d99c33 authored by Mark Florisson's avatar Mark Florisson

merge

parents 1489a615 24065537
......@@ -744,7 +744,7 @@ typedef struct {
__Pyx_StructField root;
__Pyx_BufFmt_StackElem* head;
size_t fmt_offset;
int new_count, enc_count;
size_t new_count, enc_count;
int is_complex;
char enc_type;
char new_packmode;
......@@ -794,8 +794,8 @@ static int __Pyx_BufFmt_ParseNumber(const char** ts) {
}
static void __Pyx_BufFmt_RaiseUnexpectedChar(char ch) {
char msg[] = {ch, 0};
PyErr_Format(PyExc_ValueError, "Unexpected format string character: '%s'", msg);
PyErr_Format(PyExc_ValueError,
"Unexpected format string character: '%c'", ch);
}
static const char* __Pyx_BufFmt_DescribeTypeChar(char ch, int is_complex) {
......@@ -868,7 +868,7 @@ typedef struct { char c; double x; } __Pyx_st_double;
typedef struct { char c; long double x; } __Pyx_st_longdouble;
typedef struct { char c; void *x; } __Pyx_st_void_p;
#ifdef HAVE_LONG_LONG
typedef struct { char c; PY_LONG_LONG x; } __Pyx_s_long_long;
typedef struct { char c; PY_LONG_LONG x; } __Pyx_st_longlong;
#endif
static size_t __Pyx_BufFmt_TypeCharToAlignment(char ch, int is_complex) {
......@@ -878,7 +878,7 @@ static size_t __Pyx_BufFmt_TypeCharToAlignment(char ch, int is_complex) {
case 'i': case 'I': return sizeof(__Pyx_st_int) - sizeof(int);
case 'l': case 'L': return sizeof(__Pyx_st_long) - sizeof(long);
#ifdef HAVE_LONG_LONG
case 'q': case 'Q': return sizeof(__Pyx_s_long_long) - sizeof(PY_LONG_LONG);
case 'q': case 'Q': return sizeof(__Pyx_st_longlong) - sizeof(PY_LONG_LONG);
#endif
case 'f': return sizeof(__Pyx_st_float) - sizeof(float);
case 'd': return sizeof(__Pyx_st_double) - sizeof(double);
......@@ -890,7 +890,7 @@ static size_t __Pyx_BufFmt_TypeCharToAlignment(char ch, int is_complex) {
}
}
static size_t __Pyx_BufFmt_TypeCharToGroup(char ch, int is_complex) {
static char __Pyx_BufFmt_TypeCharToGroup(char ch, int is_complex) {
switch (ch) {
case 'c': case 'b': case 'h': case 'i': case 'l': case 'q': return 'I';
case 'B': case 'H': case 'I': case 'L': case 'Q': return 'U';
......@@ -944,8 +944,8 @@ static int __Pyx_BufFmt_ProcessTypeChunk(__Pyx_BufFmt_Context* ctx) {
size = __Pyx_BufFmt_TypeCharToStandardSize(ctx->enc_type, ctx->is_complex);
}
if (ctx->enc_packmode == '@') {
int align_at = __Pyx_BufFmt_TypeCharToAlignment(ctx->enc_type, ctx->is_complex);
int align_mod_offset;
size_t align_at = __Pyx_BufFmt_TypeCharToAlignment(ctx->enc_type, ctx->is_complex);
size_t align_mod_offset;
if (align_at == 0) return -1;
align_mod_offset = ctx->fmt_offset % align_at;
if (align_mod_offset > 0) ctx->fmt_offset += align_at - align_mod_offset;
......@@ -968,8 +968,8 @@ static int __Pyx_BufFmt_ProcessTypeChunk(__Pyx_BufFmt_Context* ctx) {
offset = ctx->head->parent_offset + field->offset;
if (ctx->fmt_offset != offset) {
PyErr_Format(PyExc_ValueError,
"Buffer dtype mismatch; next field is at offset %"PY_FORMAT_SIZE_T"d "
"but %"PY_FORMAT_SIZE_T"d expected", (Py_ssize_t)ctx->fmt_offset, (Py_ssize_t)offset);
"Buffer dtype mismatch; next field is at offset %"PY_FORMAT_SIZE_T"d but %"PY_FORMAT_SIZE_T"d expected",
(Py_ssize_t)ctx->fmt_offset, (Py_ssize_t)offset);
return -1;
}
......@@ -1054,9 +1054,8 @@ static const char* __Pyx_BufFmt_CheckString(__Pyx_BufFmt_Context* ctx, const cha
break;
case 'T': /* substruct */
{
int i;
const char* ts_after_sub;
int struct_count = ctx->new_count;
size_t i, struct_count = ctx->new_count;
ctx->new_count = 1;
++ts;
if (*ts != '{') {
......@@ -1118,15 +1117,14 @@ static const char* __Pyx_BufFmt_CheckString(__Pyx_BufFmt_Context* ctx, const cha
break;
default:
{
ctx->new_count = __Pyx_BufFmt_ParseNumber(&ts);
if (ctx->new_count == -1) { /* First char was not a digit */
char msg[2] = { *ts, 0 };
int number = __Pyx_BufFmt_ParseNumber(&ts);
if (number == -1) { /* First char was not a digit */
PyErr_Format(PyExc_ValueError,
"Does not understand character buffer dtype format string ('%s')", msg);
"Does not understand character buffer dtype format string ('%c')", *ts);
return NULL;
}
ctx->new_count = (size_t)number;
}
}
}
}
......@@ -1140,7 +1138,7 @@ static CYTHON_INLINE void __Pyx_ZeroBuffer(Py_buffer* buf) {
}
static CYTHON_INLINE int __Pyx_GetBufferAndValidate(Py_buffer* buf, PyObject* obj, __Pyx_TypeInfo* dtype, int flags, int nd, int cast, __Pyx_BufFmt_StackElem* stack) {
if (obj == Py_None) {
if (obj == Py_None || obj == NULL) {
__Pyx_ZeroBuffer(buf);
return 0;
}
......@@ -1161,8 +1159,7 @@ static CYTHON_INLINE int __Pyx_GetBufferAndValidate(Py_buffer* buf, PyObject* ob
PyErr_Format(PyExc_ValueError,
"Item size of buffer (%"PY_FORMAT_SIZE_T"d byte%s) does not match size of '%s' (%"PY_FORMAT_SIZE_T"d byte%s)",
buf->itemsize, (buf->itemsize > 1) ? "s" : "",
dtype->name,
(Py_ssize_t)dtype->size, (dtype->size > 1) ? "s" : "");
dtype->name, (Py_ssize_t)dtype->size, (dtype->size > 1) ? "s" : "");
goto fail;
}
if (buf->suboffsets == NULL) buf->suboffsets = __Pyx_minusones;
......
......@@ -7,6 +7,7 @@ from Code import UtilityCode
from TypeSlots import Signature
import PyrexTypes
import Naming
import Options
# C-level implementations of builtin types, functions and methods
......@@ -76,25 +77,6 @@ bad:
}
""")
hasattr_utility_code = UtilityCode(
proto = """
static CYTHON_INLINE int __Pyx_HasAttr(PyObject *, PyObject *); /*proto*/
""",
impl = """
static CYTHON_INLINE int __Pyx_HasAttr(PyObject *o, PyObject *n) {
PyObject *v = PyObject_GetAttr(o, n);
if (v) {
Py_DECREF(v);
return 1;
}
if (PyErr_ExceptionMatches(PyExc_AttributeError)) {
PyErr_Clear();
return 0;
}
return -1;
}
""")
globals_utility_code = UtilityCode(
# This is a stub implementation until we have something more complete.
# Currently, we only handle the most common case of a read-only dict
......@@ -432,10 +414,7 @@ builtin_function_table = [
utility_code = getattr3_utility_code),
BuiltinFunction('getattr3', "OOO", "O", "__Pyx_GetAttr3", "getattr",
utility_code = getattr3_utility_code), # Pyrex compatibility
BuiltinFunction('globals', "", "O", "__Pyx_Globals",
utility_code = globals_utility_code),
BuiltinFunction('hasattr', "OO", "b", "__Pyx_HasAttr",
utility_code = hasattr_utility_code),
BuiltinFunction('hasattr', "OO", "b", "PyObject_HasAttr"),
BuiltinFunction('hash', "O", "h", "PyObject_Hash"),
#('hex', "", "", ""),
#('id', "", "", ""),
......@@ -482,6 +461,11 @@ builtin_function_table = [
BuiltinFunction('__Pyx_PyObject_Append', "OO", "O", "__Pyx_PyObject_Append"),
]
if not Options.old_style_globals:
builtin_function_table.append(
BuiltinFunction('globals', "", "O", "__Pyx_Globals",
utility_code = globals_utility_code))
# Builtin types
# bool
# buffer
......@@ -544,10 +528,14 @@ builtin_types_table = [
]),
# ("file", "PyFile_Type", []), # not in Py3
("set", "PySet_Type", [BuiltinMethod("clear", "T", "r", "PySet_Clear"),
BuiltinMethod("discard", "TO", "r", "PySet_Discard"),
BuiltinMethod("add", "TO", "r", "PySet_Add"),
BuiltinMethod("pop", "T", "O", "PySet_Pop")]),
("set", "PySet_Type", [BuiltinMethod("clear", "T", "r", "PySet_Clear",
utility_code = py23_set_utility_code),
BuiltinMethod("discard", "TO", "r", "PySet_Discard",
utility_code = py23_set_utility_code),
BuiltinMethod("add", "TO", "r", "PySet_Add",
utility_code = py23_set_utility_code),
BuiltinMethod("pop", "T", "O", "PySet_Pop",
utility_code = py23_set_utility_code)]),
("frozenset", "PyFrozenSet_Type", []),
]
......
......@@ -39,6 +39,7 @@ Options:
-3 Compile based on Python-3 syntax and code semantics.
--fast-fail Abort the compilation on the first error
--warning-error, -Werror Make all warnings into errors
--warning-extra, -Wextra Enable extra warnings
-X, --directive <name>=<value>[,<name=value,...] Overrides a compiler directive
"""
......@@ -132,8 +133,12 @@ def parse_command_line(args):
Options.fast_fail = True
elif option in ('-Werror', '--warning-errors'):
Options.warning_errors = True
elif option in ('-Wextra', '--warning-extra'):
options.compiler_directives.update(Options.extra_warnings)
elif option == "--disable-function-redefinition":
Options.disable_function_redefinition = True
elif option == "--old-style-globals":
Options.old_style_globals = True
elif option == "--directive" or option.startswith('-X'):
if option.startswith('-X') and option[2:].strip():
x_args = option[2:]
......
......@@ -1187,6 +1187,8 @@ class CCodeWriter(object):
entry.cname, dll_linkage = dll_linkage))
if entry.init is not None:
self.put_safe(" = %s" % entry.type.literal_code(entry.init))
elif entry.type.is_pyobject:
self.put(" = NULL");
self.putln(";")
def put_temp_declarations(self, func_context):
......@@ -1290,10 +1292,7 @@ class CCodeWriter(object):
def put_var_decref(self, entry):
if entry.type.is_pyobject:
if entry.init_to_none is False: # FIXME: 0 and False are treated differently???
self.putln("__Pyx_XDECREF(%s);" % self.entry_as_pyobject(entry))
else:
self.putln("__Pyx_DECREF(%s);" % self.entry_as_pyobject(entry))
def put_var_decref_clear(self, entry):
if entry.type.is_pyobject:
......@@ -1420,6 +1419,19 @@ class CCodeWriter(object):
# return self.putln("if (unlikely(%s < 0)) %s" % (value, self.error_goto(pos))) # TODO this path is almost _never_ taken, yet this macro makes is slower!
return self.putln("if (%s < 0) %s" % (value, self.error_goto(pos)))
def put_error_if_unbound(self, pos, entry):
import ExprNodes
if entry.from_closure:
func = '__Pyx_RaiseClosureNameError'
self.globalstate.use_utility_code(
ExprNodes.raise_closure_name_error_utility_code)
else:
func = '__Pyx_RaiseUnboundLocalError'
self.globalstate.use_utility_code(
ExprNodes.raise_unbound_local_error_utility_code)
self.put('if (unlikely(!%s)) { %s("%s"); %s }' % (
entry.cname, func, entry.name, self.error_goto(pos)))
def set_error_info(self, pos):
self.funcstate.should_declare_error_indicator = True
if self.c_line_in_traceback:
......
import bisect, sys
# This module keeps track of arbitrary "states" at any point of the code.
# A state is considered known if every path to the given point agrees on
# its state, otherwise it is None (i.e. unknown).
# It might be useful to be able to "freeze" the set of states by pushing
# all state changes to the tips of the trees for fast reading. Perhaps this
# could be done on get_state, clearing the cache on set_state (assuming
# incoming is immutable).
# This module still needs a lot of work, and probably should totally be
# redesigned. It doesn't take return, raise, continue, or break into
# account.
from Cython.Compiler.Scanning import StringSourceDescriptor
try:
_END_POS = (StringSourceDescriptor(unichr(sys.maxunicode)*10, ''),
sys.maxint, sys.maxint)
except AttributeError: # Py3
_END_POS = (StringSourceDescriptor(unichr(sys.maxunicode)*10, ''),
sys.maxsize, sys.maxsize)
class ControlFlow(object):
def __init__(self, start_pos, incoming, parent):
self.start_pos = start_pos
self.incoming = incoming
if parent is None and incoming is not None:
parent = incoming.parent
self.parent = parent
self.tip = {}
self.end_pos = _END_POS
def start_branch(self, pos):
self.end_pos = pos
branch_point = BranchingControlFlow(pos, self)
if self.parent is not None:
self.parent.branches[-1] = branch_point
return branch_point.branches[0]
def next_branch(self, pos):
self.end_pos = pos
return self.parent.new_branch(pos)
def finish_branch(self, pos):
self.end_pos = pos
self.parent.end_pos = pos
return LinearControlFlow(pos, self.parent)
def get_state(self, item, pos=_END_POS):
return self.get_pos_state(item, pos)[1]
def get_pos_state(self, item, pos=_END_POS):
# do some caching
if pos > self.end_pos:
try:
return self.tip[item]
except KeyError:
pass
pos_state = self._get_pos_state(item, pos)
if pos > self.end_pos:
self.tip[item] = pos_state
return pos_state
def _get_pos_state(self, item, pos):
current = self
while current is not None and pos <= current.start_pos:
current = current.incoming
if current is None:
return (None, None)
state = current._get_pos_state_local(item, pos)
while (state is None or state == (None, None)) and current.incoming is not None:
current = current.incoming
state = current._get_pos_state_local(item, pos)
if state is None:
return (None, None)
return state
def set_state(self, pos, item, state):
if item in self.tip:
del self.tip[item]
current = self
while pos < current.start_pos and current.incoming is not None:
current = current.incoming
if item in current.tip:
del current.tip[item]
current._set_state_local(pos, item, state)
class LinearControlFlow(ControlFlow):
def __init__(self, start_pos=(), incoming=None, parent=None):
ControlFlow.__init__(self, start_pos, incoming, parent)
self.events = {}
def _set_state_local(self, pos, item, state):
if item in self.events:
event_list = self.events[item]
else:
event_list = []
self.events[item] = event_list
bisect.insort(event_list, (pos, state))
def _get_pos_state_local(self, item, pos):
if item in self.events:
event_list = self.events[item]
for event in event_list[::-1]:
if event[0] < pos:
return event
return None
def to_string(self, indent='', limit=None):
if len(self.events) == 0:
s = indent + "[no state changes]"
else:
all = []
for item, event_list in self.events.items():
for pos, state in event_list:
all.append((indent, pos, item, state))
all.sort()
all = ["%s%s: %s <- %s" % data for data in all]
s = "\n".join(all)
if self.incoming is not limit and self.incoming is not None:
s = "%s\n%s" % (self.incoming.to_string(indent, limit=limit), s)
return s
class BranchingControlFlow(ControlFlow):
def __init__(self, start_pos, incoming, parent=None):
ControlFlow.__init__(self, start_pos, incoming, parent)
self.branches = [LinearControlFlow(start_pos, incoming, parent=self)]
self.branch_starts = [start_pos]
def _set_state_local(self, pos, item, state):
for branch_pos, branch in zip(self.branch_starts[::-1], self.branches[::-1]):
if pos >= branch_pos:
branch._set_state_local(pos, item, state)
return
def _get_pos_state_local(self, item, pos, stop_at=None):
if pos < self.end_pos:
for branch_pos, branch in zip(self.branch_starts[::-1], self.branches[::-1]):
if pos >= branch_pos:
return branch._get_pos_state_local(item, pos)
else:
state = self.branches[0]._get_pos_state_local(item, pos)
if state is None:
return None, None
last_pos, last_state = state
if last_state is None:
return None, None
for branch in self.branches[1:]:
state = branch._get_pos_state_local(item, pos)
if state is None:
return None, None
other_pos, other_state = state
if other_state != last_state:
return None, None
elif last_pos is not other_pos:
last_pos = max(last_pos, other_pos)
return last_pos, last_state
return None
def new_branch(self, pos):
self.branches.append(LinearControlFlow(pos, self.incoming, parent=self))
self.branch_starts.append(pos)
return self.branches[-1]
def to_string(self, indent='', limit=None):
join = "\n%sor\n" % indent
s = join.join([branch.to_string(indent+" ", limit=self.incoming) for branch in self.branches])
if self.incoming is not limit and self.incoming is not None:
s = "%s\n%s" % (self.incoming.to_string(indent, limit=limit), s)
return s
......@@ -20,7 +20,7 @@ def context(position):
assert not (isinstance(source, unicode) or isinstance(source, str)), (
"Please replace filename strings with Scanning.FileSourceDescriptor instances %r" % source)
try:
F = list(source.get_lines())
F = source.get_lines()
except UnicodeDecodeError:
# file has an encoding problem
s = u"[unprintable code]\n"
......
This diff is collapsed.
cimport cython
cdef class ControlBlock:
cdef public set children
cdef public set parents
cdef public set positions
cdef public list stats
cdef public dict gen
cdef public set bounded
cdef public dict input
cdef public dict output
# Big integer it bitsets
cdef public object i_input
cdef public object i_output
cdef public object i_gen
cdef public object i_kill
cdef public object i_state
cpdef bint empty(self)
cpdef detach(self)
cpdef add_child(self, block)
cdef class ExitBlock(ControlBlock):
cpdef bint empty(self)
cdef class NameAssignment:
cdef public bint is_arg
cdef public object lhs
cdef public object rhs
cdef public object entry
cdef public object pos
cdef public set refs
cdef public object bit
cdef class AssignmentList:
cdef public object bit
cdef public object mask
cdef public list stats
cdef class ControlFlow:
cdef public set blocks
cdef public set entries
cdef public list loops
cdef public list exceptions
cdef public ControlBlock entry_point
cdef public ExitBlock exit_point
cdef public ControlBlock block
cdef public dict assmts
cpdef newblock(self, parent=*)
cpdef nextblock(self, parent=*)
cpdef bint is_tracked(self, entry)
cpdef mark_position(self, node)
cpdef mark_assignment(self, lhs, rhs, entry=*)
cpdef mark_argument(self, lhs, rhs, entry)
cpdef mark_deletion(self, node, entry)
cpdef mark_reference(self, node, entry)
cpdef normalize(self)
@cython.locals(offset=object, assmts=AssignmentList,
block=ControlBlock)
cpdef initialize(self)
@cython.locals(assmts=AssignmentList, assmt=NameAssignment)
cpdef set map_one(self, istate, entry)
@cython.locals(block=ControlBlock, parent=ControlBlock)
cdef reaching_definitions(self)
cdef class Uninitialized:
pass
@cython.locals(dirty=bint, block=ControlBlock, parent=ControlBlock)
cdef check_definitions(ControlFlow flow, dict compiler_directives)
This diff is collapsed.
......@@ -31,6 +31,7 @@ from Cython import Utils
from Cython.Utils import open_new_file, replace_suffix
import CythonScope
import DebugFlags
import Options
module_name_pattern = re.compile(r"[A-Za-z_][A-Za-z0-9_]*(\.[A-Za-z_][A-Za-z0-9_]*)*$")
......@@ -73,7 +74,7 @@ class Context(object):
# future_directives [object]
# language_level int currently 2 or 3 for Python 2/3
def __init__(self, include_directories, compiler_directives, cpp=False, language_level=2):
def __init__(self, include_directories, compiler_directives, cpp=False, language_level=2, options=None):
import Builtin, CythonScope
self.modules = {"__builtin__" : Builtin.builtin_scope}
self.modules["cython"] = CythonScope.create_cython_scope(self)
......@@ -81,6 +82,7 @@ class Context(object):
self.future_directives = set()
self.compiler_directives = compiler_directives
self.cpp = cpp
self.options = options
self.pxds = {} # full name -> node tree
......@@ -103,13 +105,15 @@ class Context(object):
def create_pipeline(self, pxd, py=False):
from Visitor import PrintTree
from ParseTreeTransforms import WithTransform, NormalizeTree, PostParse, PxdPostParse
from ParseTreeTransforms import AnalyseDeclarationsTransform, AnalyseExpressionsTransform
from ParseTreeTransforms import ForwardDeclareTypes, AnalyseDeclarationsTransform
from ParseTreeTransforms import AnalyseExpressionsTransform
from ParseTreeTransforms import CreateClosureClasses, MarkClosureVisitor, DecoratorTransform
from ParseTreeTransforms import InterpretCompilerDirectives, TransformBuiltinMethods
from ParseTreeTransforms import ExpandInplaceOperators, ParallelRangeTransform
from TypeInference import MarkAssignments, MarkOverflowingArithmetic
from ParseTreeTransforms import AdjustDefByDirectives, AlignFunctionDefinitions
from ParseTreeTransforms import RemoveUnreachableCode, GilCheck
from FlowControl import CreateControlFlowGraph
from AnalysedTreeTransforms import AutoTestDictTransform
from AutoDocTransforms import EmbedSignature
from Optimize import FlattenInListTransform, SwitchTransform, IterationTransform
......@@ -145,15 +149,16 @@ class Context(object):
FlattenInListTransform(),
WithTransform(self),
DecoratorTransform(self),
# PrintTree(),
ForwardDeclareTypes(self),
AnalyseDeclarationsTransform(self),
# PrintTree(),
AutoTestDictTransform(self),
EmbedSignature(self),
EarlyReplaceBuiltinCalls(self), ## Necessary?
TransformBuiltinMethods(self), ## Necessary?
CreateControlFlowGraph(self),
RemoveUnreachableCode(self),
MarkAssignments(self),
MarkOverflowingArithmetic(self),
TransformBuiltinMethods(self), ## Necessary?
IntroduceBufferAuxiliaryVars(self),
_check_c_declarations,
AnalyseExpressionsTransform(self),
......@@ -165,7 +170,6 @@ class Context(object):
DropRefcountingTransform(),
FinalOptimizePhase(self),
GilCheck(),
# PrintTree(),
]
def create_pyx_pipeline(self, options, result, py=False):
......@@ -229,8 +233,41 @@ class Context(object):
def create_py_pipeline(self, options, result):
return self.create_pyx_pipeline(options, result, py=True)
def create_pyx_as_pxd_pipeline(self, source):
from ParseTreeTransforms import (AlignFunctionDefinitions,
MarkClosureVisitor, WithTransform, AnalyseDeclarationsTransform)
from Optimize import ConstantFolding, FlattenInListTransform
from Nodes import StatListNode
pipeline = []
result = create_default_resultobj(source, self.options)
pyx_pipeline = self.create_pyx_pipeline(self.options, result)
for stage in pyx_pipeline:
if stage.__class__ in [
AlignFunctionDefinitions,
MarkClosureVisitor,
ConstantFolding,
FlattenInListTransform,
WithTransform,
]:
# Skip these unnecessary stages.
continue
pipeline.append(stage)
if isinstance(stage, AnalyseDeclarationsTransform):
# This is the last stage we need.
break
def fake_pxd(root):
for entry in root.scope.entries.values():
entry.defined_in_pxd = 1
return StatListNode(root.pos, stats=[]), root.scope
pipeline.append(fake_pxd)
return pipeline
def process_pxd(self, source_desc, scope, module_name):
if isinstance(source_desc, FileSourceDescriptor) and source_desc._file_type == 'pyx':
source = CompilationSource(source_desc, module_name, os.getcwd())
pipeline = self.create_pyx_as_pxd_pipeline(source)
result = self.run_pipeline(pipeline, source)
else:
pipeline = self.create_pxd_pipeline(scope, module_name)
result = self.run_pipeline(pipeline, source_desc)
return result
......@@ -363,6 +400,8 @@ class Context(object):
warning(pos, "'%s' is deprecated, use 'libc.%s'" % (name, name), 1)
elif name in ('stl'):
warning(pos, "'%s' is deprecated, use 'libcpp.*.*'" % name, 1)
if pxd is None and Options.cimport_from_pyx:
return self.find_pyx_file(qualified_name, pos)
return pxd
def find_pyx_file(self, qualified_name, pos):
......@@ -570,7 +609,9 @@ def create_parse(context):
source_desc = compsrc.source_desc
full_module_name = compsrc.full_module_name
initial_pos = (source_desc, 1, 0)
saved_cimport_from_pyx, Options.cimport_from_pyx = Options.cimport_from_pyx, False
scope = context.find_module(full_module_name, pos = initial_pos, need_pxd = 0)
Options.cimport_from_pyx = saved_cimport_from_pyx
tree = context.parse(source_desc, scope, pxd = 0, full_module_name = full_module_name)
tree.compilation_source = compsrc
tree.scope = scope
......@@ -684,7 +725,7 @@ class CompilationOptions(object):
def create_context(self):
return Context(self.include_path, self.compiler_directives,
self.cplus, self.language_level)
self.cplus, self.language_level, options=self)
class CompilationResult(object):
......
This diff is collapsed.
......@@ -117,5 +117,7 @@ h_guard_prefix = "__PYX_HAVE__"
api_guard_prefix = "__PYX_HAVE_API__"
api_func_guard = "__PYX_HAVE_API_FUNC_"
PYX_NAN = "__PYX_NAN"
def py_version_hex(major, minor=0, micro=0, release_level=0, release_serial=0):
return (major << 24) | (minor << 16) | (micro << 8) | (release_level << 4) | (release_serial)
This diff is collapsed.
This diff is collapsed.
......@@ -49,12 +49,6 @@ convert_range = True
# If this is 0 it simply creates a wrapper.
lookup_module_cpdef = False
# This will set local variables to None rather than NULL which may cause
# surpress what would be an UnboundLocalError in pure Python but eliminates
# checking for NULL on every use, and can decref rather than xdecref at the end.
# WARNING: This is a work in progress, may currently segfault.
init_local_none = True
# Whether or not to embed the Python interpreter, for use in making a
# standalone executable or calling from external libraries.
# This will provide a method which initalizes the interpreter and
......@@ -65,6 +59,14 @@ embed = None
# module creation time. For legacy code only, needed for some circular imports.
disable_function_redefinition = False
# In previous iterations of Cython, globals() gave the first non-Cython module
# globals in the call stack. Sage relies on this behavior for variable injection.
old_style_globals = False
# Allows cimporting from a pyx file without a pxd file.
cimport_from_pyx = False
# Declare compiler directives
directive_defaults = {
......@@ -95,10 +97,19 @@ directive_defaults = {
'warn': None,
'warn.undeclared': False,
'warn.unreachable': True,
'warn.maybe_uninitialized': False,
'warn.unreachable': True,
'warn.unused': False,
'warn.unused_arg': False,
'warn.unused_result': False,
# remove unreachable code
'remove_unreachable': True,
# control flow debug directives
'control_flow.dot_output': "", # Graphviz output filename
'control_flow.dot_annotate_defs': False, # Annotate definitions
# test support
'test_assert_path_exists' : [],
'test_fail_if_path_exists' : [],
......@@ -107,6 +118,13 @@ directive_defaults = {
'binding': False,
}
# Extra warning directives
extra_warnings = {
'warn.maybe_uninitialized': True,
'warn.unreachable': True,
'warn.unused': True,
}
# Override types possibilities above, if needed
directive_types = {
'final' : bool, # final cdef classes and methods
......
......@@ -639,7 +639,8 @@ class InterpretCompilerDirectives(CythonTransform, SkipDeclarations):
'is not allowed in %s scope' % (directive, scope)))
return False
else:
if directive not in Options.directive_defaults:
if (directive not in Options.directive_defaults
and directive not in Options.directive_types):
error(pos, "Invalid directive: '%s'." % (directive,))
return True
......@@ -1264,6 +1265,44 @@ class DecoratorTransform(CythonTransform, SkipDeclarations):
return [node, reassignment]
class ForwardDeclareTypes(CythonTransform):
def visit_CompilerDirectivesNode(self, node):
env = self.module_scope
old = env.directives
env.directives = node.directives
self.visitchildren(node)
env.directives = old
return node
def visit_ModuleNode(self, node):
self.module_scope = node.scope
self.module_scope.directives = node.directives
self.visitchildren(node)
return node
def visit_CDefExternNode(self, node):
old_cinclude_flag = self.module_scope.in_cinclude
self.module_scope.in_cinclude = 1
self.visitchildren(node)
self.module_scope.in_cinclude = old_cinclude_flag
return node
def visit_CEnumDefNode(self, node):
node.declare(self.module_scope)
return node
def visit_CStructOrUnionDefNode(self, node):
if node.name not in self.module_scope.entries:
node.declare(self.module_scope)
return node
def visit_CClassDefNode(self, node):
if node.class_name not in self.module_scope.entries:
node.declare(self.module_scope)
return node
class AnalyseDeclarationsTransform(CythonTransform):
basic_property = TreeFragment(u"""
......@@ -1362,7 +1401,6 @@ if VALUE is not None:
"""
self.seen_vars_stack.append(cython.set())
lenv = node.local_scope
node.body.analyse_control_flow(lenv) # this will be totally refactored
node.declare_arguments(lenv)
for var, type_node in node.directive_locals.items():
if not lenv.lookup_here(var): # don't redeclare args
......@@ -1556,8 +1594,6 @@ if VALUE is not None:
type_name = entry.type.module_name + '.' + type_name
if entry.init is not None:
default_value = ' = ' + entry.init
elif entry.init_to_none:
default_value = ' = ' + repr(None)
docstring = attr_name + ': ' + type_name + default_value
property.doc = EncodedString(docstring)
# ---------------------------------------
......@@ -2132,8 +2168,8 @@ class GilCheck(VisitorTransform):
return node
def visit_ParallelRangeNode(self, node):
if node.is_nogil:
node.is_nogil = False
if node.nogil:
node.nogil = False
node = Nodes.GILStatNode(node.pos, state='nogil', body=node)
return self.visit_GILStatNode(node)
......@@ -2240,9 +2276,9 @@ class TransformBuiltinMethods(EnvTransform):
return node # nothing to do
items = [ ExprNodes.DictItemNode(pos,
key=ExprNodes.StringNode(pos, value=var),
value=ExprNodes.NameNode(pos, name=var))
value=ExprNodes.NameNode(pos, name=var, allow_null=True))
for var in lenv.entries ]
return ExprNodes.DictNode(pos, key_value_pairs=items)
return ExprNodes.DictNode(pos, key_value_pairs=items, exclude_null_values=True)
else: # dir()
if len(node.args) > 1:
error(self.pos, "Builtin 'dir()' called with wrong number of args, expected 0-1, got %d"
......@@ -2400,6 +2436,24 @@ class ReplaceFusedTypeChecks(VisitorTransform):
return node
class FindUninitializedParallelVars(CythonTransform, SkipDeclarations):
"""
This transform isn't part of the pipeline, it simply finds all references
to variables in parallel blocks.
"""
def __init__(self):
CythonTransform.__init__(self, None)
self.used_vars = []
def visit_ParallelStatNode(self, node):
return node
def visit_NameNode(self, node):
self.used_vars.append((node.entry, node.pos))
return node
class DebugTransform(CythonTransform):
"""
Write debug information for this Cython module.
......
......@@ -85,6 +85,13 @@ class BaseType(object):
return index_name
def invalid_value(self):
"""
Returns the most invalid value an object of this type can assume as a
C expression string. Returns None if no such value exists.
"""
class PyrexType(BaseType):
#
# Base class for all Pyrex types.
......@@ -264,6 +271,9 @@ class CTypedefType(BaseType):
self.typedef_base_type = base_type
self.typedef_is_external = is_external
def invalid_value(self):
return self.typedef_base_type.invalid_value()
def resolve(self):
return self.typedef_base_type.resolve()
......@@ -453,6 +463,9 @@ class PyObjectType(PyrexType):
"""
return False
def invalid_value(self):
return "1"
class BuiltinObjectType(PyObjectType):
# objstruct_cname string Name of PyObject struct
......@@ -885,9 +898,9 @@ static CYTHON_INLINE %(type)s __Pyx_PyInt_As%(SignWord)s%(TypeName)s(PyObject* x
"can't convert negative value to %(type)s");
return (%(type)s)-1;
}
return PyLong_AsUnsigned%(TypeName)s(x);
return (%(type)s)PyLong_AsUnsigned%(TypeName)s(x);
} else {
return PyLong_As%(TypeName)s(x);
return (%(type)s)PyLong_As%(TypeName)s(x);
}
} else {
%(type)s val;
......@@ -1039,6 +1052,14 @@ class CIntType(CNumericType):
def assignable_from_resolved_type(self, src_type):
return src_type.is_int or src_type.is_enum or src_type is error_type
def invalid_value(self):
if rank_to_type_name[self.rank] == 'char':
return "'?'"
else:
# We do not really know the size of the type, so return
# a 32-bit literal and rely on casting to final type. It will
# be negative for signed ints, which is good.
return "0xbad0bad0";
class CAnonEnumType(CIntType):
......@@ -1116,13 +1137,8 @@ static CYTHON_INLINE Py_UCS4 __Pyx_PyObject_AsPy_UCS4(PyObject* x) {
}
#endif
PyErr_Format(PyExc_ValueError,
"only single character unicode strings can be converted to Py_UCS4, got length "
#if PY_VERSION_HEX < 0x02050000
"%d",
#else
"%zd",
#endif
PyUnicode_GET_SIZE(x));
"only single character unicode strings can be converted to Py_UCS4, "
"got length %"PY_FORMAT_SIZE_T"d", PyUnicode_GET_SIZE(x));
return (Py_UCS4)-1;
}
ival = __Pyx_PyInt_AsLong(x);
......@@ -1172,13 +1188,8 @@ static CYTHON_INLINE Py_UNICODE __Pyx_PyObject_AsPy_UNICODE(PyObject* x) {
if (PyUnicode_Check(x)) {
if (unlikely(PyUnicode_GET_SIZE(x) != 1)) {
PyErr_Format(PyExc_ValueError,
"only single character unicode strings can be converted to Py_UNICODE, got length "
#if PY_VERSION_HEX < 0x02050000
"%d",
#else
"%zd",
#endif
PyUnicode_GET_SIZE(x));
"only single character unicode strings can be converted to Py_UNICODE, "
"got length %"PY_FORMAT_SIZE_T"d", PyUnicode_GET_SIZE(x));
return (Py_UNICODE)-1;
}
return PyUnicode_AS_UNICODE(x)[0];
......@@ -1249,6 +1260,8 @@ class CFloatType(CNumericType):
def assignable_from_resolved_type(self, src_type):
return (src_type.is_numeric and not src_type.is_complex) or src_type is error_type
def invalid_value(self):
return Naming.PYX_NAN
class CComplexType(CNumericType):
......@@ -1769,6 +1782,8 @@ class CPtrType(CType):
else:
return CPtrType(base_type)
def invalid_value(self):
return "1"
class CNullPtrType(CPtrType):
......@@ -2496,6 +2511,12 @@ class TemplatePlaceholderType(CType):
else:
return cmp(type(self), type(other))
def __eq__(self, other):
if isinstance(other, TemplatePlaceholderType):
return self.name == other.name
else:
return False
class CEnumType(CType):
# name string
# cname string or None
......
......@@ -172,13 +172,34 @@ class FileSourceDescriptor(SourceDescriptor):
self.filename = filename
self.set_file_type_from_name(filename)
self._cmp_name = filename
self._lines = {}
def get_lines(self, encoding=None, error_handling=None):
return Utils.open_source_file(
# we cache the lines only the second time this is called, in
# order to save memory when they are only used once
key = (encoding, error_handling)
try:
lines = self._lines[key]
if lines is not None:
return lines
except KeyError:
pass
f = Utils.open_source_file(
self.filename, encoding=encoding,
error_handling=error_handling,
# newline normalisation is costly before Py2.6
require_normalised_newlines=False)
try:
lines = list(f)
finally:
f.close()
if key in self._lines:
self._lines[key] = lines
else:
# do not cache the first access, but remember that we
# already read it once
self._lines[key] = None
return lines
def get_description(self):
return self.path_description
......
......@@ -13,7 +13,6 @@ import TypeSlots
from TypeSlots import \
pyfunction_signature, pymethod_signature, \
get_special_method_signature, get_property_accessor_signature
import ControlFlow
import Code
import __builtin__ as builtins
try:
......@@ -96,7 +95,6 @@ class Entry(object):
# holding its home namespace
# pymethdef_cname string PyMethodDef structure
# signature Signature Arg & return types for Python func
# init_to_none boolean True if initial value should be None
# as_variable Entry Alternative interpretation of extension
# type name or builtin C function as a variable
# xdecref_cleanup boolean Use Py_XDECREF for error cleanup
......@@ -157,7 +155,6 @@ class Entry(object):
func_cname = None
func_modifiers = []
doc = None
init_to_none = 0
as_variable = None
xdecref_cleanup = 0
in_cinclude = 0
......@@ -187,6 +184,8 @@ class Entry(object):
self.init = init
self.overloaded_alternatives = []
self.assignments = []
self.cf_assignments = []
self.cf_references = []
def __repr__(self):
return "Entry(name=%s, type=%s)" % (self.name, self.type)
......@@ -225,7 +224,6 @@ class Scope(object):
# qualified_name string "modname" or "modname.classname"
# pystring_entries [Entry] String const entries newly used as
# Python strings in this scope
# control_flow ControlFlow Used for keeping track of environment state
# nogil boolean In a nogil section
# directives dict Helper variable for the recursive
# analysis, contains directive values.
......@@ -278,22 +276,12 @@ class Scope(object):
self.pystring_entries = []
self.buffer_entries = []
self.lambda_defs = []
self.control_flow = ControlFlow.LinearControlFlow()
self.return_type = None
self.id_counters = {}
def __deepcopy__(self, memo):
return self
def start_branching(self, pos):
self.control_flow = self.control_flow.start_branch(pos)
def next_branch(self, pos):
self.control_flow = self.control_flow.next_branch(pos)
def finish_branching(self, pos):
self.control_flow = self.control_flow.finish_branch(pos)
def __str__(self):
return "<%s %s>" % (self.__class__.__name__, self.qualified_name)
......@@ -446,8 +434,6 @@ class Scope(object):
if scope:
entry.type.scope = scope
self.type_entries.append(entry)
if not scope and not entry.type.scope:
self.check_for_illegal_incomplete_ctypedef(typedef_flag, pos)
return entry
def declare_cpp_class(self, name, scope,
......@@ -473,13 +459,24 @@ class Scope(object):
if scope:
entry.type.scope = scope
self.type_entries.append(entry)
if templates is not None:
if base_classes:
if entry.type.base_classes and not entry.type.base_classes == base_classes:
error(pos, "Base type does not match previous declaration")
else:
entry.type.base_classes = base_classes
if templates or entry.type.templates:
if templates != entry.type.templates:
error(pos, "Template parameters do not match previous declaration")
if templates is not None and entry.type.scope is not None:
for T in templates:
template_entry = entry.type.scope.declare(T.name, T.name, T, None, 'extern')
template_entry.is_type = 1
def declare_inherited_attributes(entry, base_classes):
for base_class in base_classes:
if base_class.scope is None:
error(pos, "Cannot inherit from incomplete type")
else:
declare_inherited_attributes(entry, base_class.base_classes)
entry.type.scope.declare_inherited_cpp_attributes(base_class.scope)
if entry.type.scope:
......@@ -536,7 +533,6 @@ class Scope(object):
if api:
entry.api = 1
entry.used = 1
self.control_flow.set_state((), (name, 'initialized'), False)
return entry
def declare_builtin(self, name, pos):
......@@ -1094,6 +1090,8 @@ class ModuleScope(Scope):
self.var_entries.append(entry)
else:
entry.is_pyglobal = 1
if Options.cimport_from_pyx:
entry.used = 1
return entry
def declare_cfunction(self, name, type, pos,
......@@ -1196,8 +1194,6 @@ class ModuleScope(Scope):
scope.declare_inherited_c_attributes(base_type.scope)
type.set_scope(scope)
self.type_entries.append(entry)
else:
self.check_for_illegal_incomplete_ctypedef(typedef_flag, pos)
else:
if defining and type.scope.defined:
error(pos, "C class '%s' already defined" % name)
......@@ -1228,10 +1224,6 @@ class ModuleScope(Scope):
#
return entry
def check_for_illegal_incomplete_ctypedef(self, typedef_flag, pos):
if typedef_flag and not self.in_cinclude:
error(pos, "Forward-referenced type must use 'cdef', not 'ctypedef'")
def allocate_vtable_names(self, entry):
# If extension type has a vtable, allocate vtable struct and
# slot names for it.
......@@ -1386,7 +1378,6 @@ class LocalScope(Scope):
entry.is_arg = 1
#entry.borrowed = 1 # Not using borrowed arg refs for now
self.arg_entries.append(entry)
self.control_flow.set_state((), (name, 'source'), 'arg')
return entry
def declare_var(self, name, type, pos,
......@@ -1398,9 +1389,8 @@ class LocalScope(Scope):
entry = Scope.declare_var(self, name, type, pos,
cname=cname, visibility=visibility,
api=api, in_pxd=in_pxd, is_cdef=is_cdef)
if type.is_pyobject and not Options.init_local_none:
if type.is_pyobject:
entry.init = "0"
entry.init_to_none = (type.is_pyobject or type.is_unspecified) and Options.init_local_none
entry.is_local = 1
entry.in_with_gil_block = self._in_with_gil_block
......
......@@ -39,7 +39,24 @@ class MarkAssignments(CythonTransform):
if self.parallel_block_stack:
parallel_node = self.parallel_block_stack[-1]
parallel_node.assignments[lhs.entry] = (lhs.pos, inplace_op)
previous_assignment = parallel_node.assignments.get(lhs.entry)
# If there was a previous assignment to the variable, keep the
# previous assignment position
if previous_assignment:
pos, previous_inplace_op = previous_assignment
if (inplace_op and previous_inplace_op and
inplace_op != previous_inplace_op):
# x += y; x *= y
t = (inplace_op, previous_inplace_op)
error(lhs.pos,
"Reduction operator '%s' is inconsistent "
"with previous reduction operator '%s'" % t)
else:
pos = lhs.pos
parallel_node.assignments[lhs.entry] = (pos, inplace_op)
elif isinstance(lhs, ExprNodes.SequenceNode):
for arg in lhs.args:
......
......@@ -100,9 +100,6 @@ class TempsBlockNode(Node):
code.put_decref_clear(handle.temp, handle.type)
code.funcstate.release_temp(handle.temp)
def analyse_control_flow(self, env):
self.body.analyse_control_flow(env)
def analyse_declarations(self, env):
self.body.analyse_declarations(env)
......@@ -290,9 +287,6 @@ class LetNode(Nodes.StatNode, LetNodeMixin):
self.pos = body.pos
self.body = body
def analyse_control_flow(self, env):
self.body.analyse_control_flow(env)
def analyse_declarations(self, env):
self.temp_expression.analyse_declarations(env)
self.body.analyse_declarations(env)
......
......@@ -31,6 +31,7 @@ cdef extern from "string.h" nogil:
int strcoll (const_char *S1, const_char *S2)
size_t strxfrm (char *TO, const_char *FROM, size_t SIZE)
char *strerror (int ERRNUM)
char *strchr (const_char *STRING, int C)
char *strrchr (const_char *STRING, int C)
......
......@@ -81,6 +81,8 @@ cdef extern from "numpy/arrayobject.h":
NPY_COMPLEX256
NPY_COMPLEX512
NPY_INTP
ctypedef enum NPY_ORDER:
NPY_ANYORDER
NPY_CORDER
......@@ -215,9 +217,9 @@ cdef extern from "numpy/arrayobject.h":
info.buf = PyArray_DATA(self)
info.ndim = ndim
if copy_shape:
# Allocate new buffer for strides and shape info. This is allocated
# as one block, strides first.
info.strides = <Py_ssize_t*>stdlib.malloc(sizeof(Py_ssize_t) * ndim * 2)
# Allocate new buffer for strides and shape info.
# This is allocated as one block, strides first.
info.strides = <Py_ssize_t*>stdlib.malloc(sizeof(Py_ssize_t) * <size_t>ndim * 2)
info.shape = info.strides + ndim
for i in range(ndim):
info.strides[i] = PyArray_STRIDES(self)[i]
......
......@@ -8,42 +8,42 @@ cdef extern from "omp.h":
omp_sched_guided = 3,
omp_sched_auto = 4
extern void omp_set_num_threads(int)
extern int omp_get_num_threads()
extern int omp_get_max_threads()
extern int omp_get_thread_num()
extern int omp_get_num_procs()
extern int omp_in_parallel()
extern void omp_set_dynamic(int)
extern int omp_get_dynamic()
extern void omp_set_nested(int)
extern int omp_get_nested()
extern void omp_init_lock(omp_lock_t *)
extern void omp_destroy_lock(omp_lock_t *)
extern void omp_set_lock(omp_lock_t *)
extern void omp_unset_lock(omp_lock_t *)
extern int omp_test_lock(omp_lock_t *)
extern void omp_init_nest_lock(omp_nest_lock_t *)
extern void omp_destroy_nest_lock(omp_nest_lock_t *)
extern void omp_set_nest_lock(omp_nest_lock_t *)
extern void omp_unset_nest_lock(omp_nest_lock_t *)
extern int omp_test_nest_lock(omp_nest_lock_t *)
extern double omp_get_wtime()
extern double omp_get_wtick()
void omp_set_schedule(omp_sched_t, int)
void omp_get_schedule(omp_sched_t *, int *)
int omp_get_thread_limit()
void omp_set_max_active_levels(int)
int omp_get_max_active_levels()
int omp_get_level()
int omp_get_ancestor_thread_num(int)
int omp_get_team_size(int)
int omp_get_active_level()
extern void omp_set_num_threads(int) nogil
extern int omp_get_num_threads() nogil
extern int omp_get_max_threads() nogil
extern int omp_get_thread_num() nogil
extern int omp_get_num_procs() nogil
extern int omp_in_parallel() nogil
extern void omp_set_dynamic(int) nogil
extern int omp_get_dynamic() nogil
extern void omp_set_nested(int) nogil
extern int omp_get_nested() nogil
extern void omp_init_lock(omp_lock_t *) nogil
extern void omp_destroy_lock(omp_lock_t *) nogil
extern void omp_set_lock(omp_lock_t *) nogil
extern void omp_unset_lock(omp_lock_t *) nogil
extern int omp_test_lock(omp_lock_t *) nogil
extern void omp_init_nest_lock(omp_nest_lock_t *) nogil
extern void omp_destroy_nest_lock(omp_nest_lock_t *) nogil
extern void omp_set_nest_lock(omp_nest_lock_t *) nogil
extern void omp_unset_nest_lock(omp_nest_lock_t *) nogil
extern int omp_test_nest_lock(omp_nest_lock_t *) nogil
extern double omp_get_wtime() nogil
extern double omp_get_wtick() nogil
void omp_set_schedule(omp_sched_t, int) nogil
void omp_get_schedule(omp_sched_t *, int *) nogil
int omp_get_thread_limit() nogil
void omp_set_max_active_levels(int) nogil
int omp_get_max_active_levels() nogil
int omp_get_level() nogil
int omp_get_ancestor_thread_num(int) nogil
int omp_get_team_size(int) nogil
int omp_get_active_level() nogil
......@@ -337,7 +337,8 @@ class CythonDotParallel(object):
__all__ = ['parallel', 'prange', 'threadid']
parallel = nogil
def parallel(self, num_threads=None):
return nogil
def prange(self, start=0, stop=None, step=1, schedule=None, nogil=False):
if stop is None:
......
def f(x):
return x**2-x
def integrate_f(a, b, N):
s = 0.0
dx = (b-a)/N
for i in range(N):
s += f(a+i*dx)
return s * dx
def f(x):
return x**2-x
def integrate_f(a, b, N):
s = 0.0
dx = (b-a)/N
for i in range(N):
s += f(a+i*dx)
return s * dx
cdef double f(double x) except? -2:
return x**2-x
def integrate_f(double a, double b, int N):
cdef int i
s = 0.0
dx = (b-a)/N
for i in range(N):
s += f(a+i*dx)
return s * dx
import timeit
import integrate0, integrate1, integrate2
number = 10
py_time = None
for m in ('integrate0', 'integrate1', 'integrate2'):
print m
t = min(timeit.repeat("integrate_f(0.0, 10.0, 10000000)", "from %s import integrate_f" % m, number=number))
if py_time is None:
py_time = t
print " ", t / number, "s"
print " ", py_time / t
# Run as:
# python setup.py build_ext --inplace
import sys
sys.path.insert(0, "..")
from distutils.core import setup
from distutils.extension import Extension
from Cython.Build import cythonize
......
......@@ -80,7 +80,7 @@ argument in order to pass it.
Therefore Cython provides a syntax for declaring a C-style function,
the cdef keyword::
cdef double f(double) except? -2:
cdef double f(double x) except? -2:
return x**2-x
Some form of except-modifier should usually be added, otherwise Cython
......
......@@ -359,9 +359,37 @@ exception and converting it into a Python exception. For example, ::
cdef extern from "some_file.h":
cdef int foo() except +
This will translate try and the C++ error into an appropriate Python exception
(currently an IndexError on std::out_of_range and a RuntimeError otherwise
(preserving the what() message). ::
This will translate try and the C++ error into an appropriate Python exception.
The translation is performed according to the following table
(the ``std::`` prefix is omitted from the C++ identifiers):
+-----------------------+---------------------+
| C++ | Python |
+=======================+=====================+
| ``bad_alloc`` | ``MemoryError`` |
+-----------------------+---------------------+
| ``bad_cast`` | ``TypeError`` |
+-----------------------+---------------------+
| ``domain_error`` | ``ValueError`` |
+-----------------------+---------------------+
| ``invalid_argument`` | ``ValueError`` |
+-----------------------+---------------------+
| ``ios_base::failure`` | ``IOError`` |
+-----------------------+---------------------+
| ``out_of_range`` | ``IndexError`` |
+-----------------------+---------------------+
| ``overflow_error`` | ``OverflowError`` |
+-----------------------+---------------------+
| ``range_error`` | ``ArithmeticError`` |
+-----------------------+---------------------+
| ``underflow_error`` | ``ArithmeticError`` |
+-----------------------+---------------------+
| (all others) | ``RuntimeError`` |
+-----------------------+---------------------+
The ``what()`` message, if any, is preserved. Note that a C++
``ios_base_failure`` can denote EOF, but does not carry enough information
for Cython to discern that, so watch out with exception masks on IO streams. ::
cdef int bar() except +MemoryError
......
......@@ -109,6 +109,7 @@ def get_openmp_compiler_flags(language):
cc = sysconfig.get_config_var('CXX')
else:
cc = sysconfig.get_config_var('CC')
if not cc: return None # Windows?
# For some reason, cc can be e.g. 'gcc -pthread'
cc = cc.split()[0]
......@@ -140,8 +141,10 @@ def get_openmp_compiler_flags(language):
if compiler_version and compiler_version.split('.') >= ['4', '2']:
return '-fopenmp', '-fopenmp'
locale.setlocale(locale.LC_ALL, '')
try:
locale.setlocale(locale.LC_ALL, '')
except locale.Error:
pass
OPENMP_C_COMPILER_FLAGS = get_openmp_compiler_flags('c')
OPENMP_CPP_COMPILER_FLAGS = get_openmp_compiler_flags('cpp')
......@@ -174,6 +177,7 @@ VER_DEP_MODULES = {
'run.purecdef',
]),
(2,7) : (operator.lt, lambda x: x in ['run.withstat_py', # multi context with statement
'run.yield_inside_lambda',
]),
# The next line should start (3,); but this is a dictionary, so
# we can only have one (3,) key. Since 2.7 is supposed to be the
......@@ -199,6 +203,9 @@ KEEP_2X_FILES = [
COMPILER = None
INCLUDE_DIRS = [ d for d in os.getenv('INCLUDE', '').split(os.pathsep) if d ]
CFLAGS = os.getenv('CFLAGS', '').split()
CCACHE = os.getenv('CYTHON_RUNTESTS_CCACHE', '').split()
BACKENDS = ['c', 'cpp']
def memoize(f):
uncomputed = object()
......@@ -236,13 +243,15 @@ list_unchanging_dir = memoize(lambda x: os.listdir(x))
class build_ext(_build_ext):
def build_extension(self, ext):
if ext.language == 'c++':
try:
try: # Py2.7+ & Py3.2+
compiler_obj = self.compiler_obj
except AttributeError:
compiler_obj = self.compiler
if ext.language == 'c++':
compiler_obj.compiler_so.remove('-Wstrict-prototypes')
if CCACHE:
compiler_obj.compiler_so = CCACHE + compiler_obj.compiler_so
except Exception:
pass
_build_ext.build_extension(self, ext)
......@@ -1245,12 +1254,15 @@ def main():
help="do not run the Cython compiler, only the C compiler")
parser.add_option("--compiler", dest="compiler", default=None,
help="C compiler type")
backend_list = ','.join(BACKENDS)
parser.add_option("--backends", dest="backends", default=backend_list,
help="select backends to test (default: %s)" % backend_list)
parser.add_option("--no-c", dest="use_c",
action="store_false", default=True,
help="do not test C compilation")
help="do not test C compilation backend")
parser.add_option("--no-cpp", dest="use_cpp",
action="store_false", default=True,
help="do not test C++ compilation")
help="do not test C++ compilation backend")
parser.add_option("--no-unit", dest="unittests",
action="store_false", default=True,
help="do not run the unit tests")
......@@ -1399,8 +1411,6 @@ def main():
if WITH_CYTHON and options.language_level == 3:
sys.stderr.write("Using Cython language level 3.\n")
sys.stderr.write("\n")
test_bugs = False
if options.tickets:
for ticket_number in options.tickets:
......@@ -1435,11 +1445,23 @@ def main():
global COMPILER
if options.compiler:
COMPILER = options.compiler
languages = []
if options.use_c:
languages.append('c')
if options.use_cpp:
languages.append('cpp')
selected_backends = [ name.strip() for name in options.backends.split(',') if name.strip() ]
backends = []
for backend in selected_backends:
if backend == 'c' and not options.use_c:
continue
elif backend == 'cpp' and not options.use_cpp:
continue
elif backend not in BACKENDS:
sys.stderr.write("Unknown backend requested: '%s' not one of [%s]\n" % (
backend, ','.join(BACKENDS)))
sys.exit(1)
backends.append(backend)
sys.stderr.write("Backends: %s\n" % ','.join(backends))
languages = backends
sys.stderr.write("\n")
test_suite = unittest.TestSuite()
......
......@@ -99,8 +99,10 @@ def compile_cython_modules(profile=False, compile_more=False, cython_with_refnan
"Cython.Compiler.Scanning",
"Cython.Compiler.Parsing",
"Cython.Compiler.Visitor",
"Cython.Compiler.FlowControl",
"Cython.Compiler.Code",
"Cython.Runtime.refnanny",]
"Cython.Runtime.refnanny",
]
if compile_more:
compiled_modules.extend([
"Cython.Compiler.ParseTreeTransforms",
......
......@@ -3,7 +3,7 @@
cdef void spam():
cdef long long L
cdef unsigned long long U
cdef object x
cdef object x = object()
L = x
x = L
U = x
......
# mode: compile
cdef int f() except -1:
cdef object x, y, z, w
cdef object x, y = 0, z = 0, w = 0
cdef int i
x = abs(y)
delattr(x, 'spam')
......
......@@ -14,7 +14,7 @@ cdef class SuperSpam(Spam):
cdef void tomato():
cdef Spam spam
cdef SuperSpam superspam
cdef SuperSpam superspam = SuperSpam()
spam = superspam
spam.add_tons(42)
superspam.add_tons(1764)
......
......@@ -10,8 +10,8 @@ cdef class Swallow:
def f(Grail g):
cdef int i = 0
cdef Swallow s
cdef object x
cdef Swallow s = Swallow()
cdef object x = Grail()
g = x
x = g
g = i
......
# mode: compile
ctypedef enum MyEnum:
Value1
Value2
Value3 = 100
cdef MyEnum my_enum = Value3
ctypedef struct StructA:
StructA *a
StructB *b
cdef struct StructB:
StructA *a
StructB *b
cdef class ClassA:
cdef ClassA a
cdef ClassB b
ctypedef public class ClassB [ object ClassB, type TypeB ]:
cdef ClassA a
cdef ClassB b
cdef StructA struct_a
cdef StructB struct_b
struct_a.a = &struct_a
struct_a.b = &struct_b
struct_b.a = &struct_a
struct_b.b = &struct_b
cdef ClassA class_a = ClassA()
cdef ClassB class_b = ClassB()
class_a.a = class_a
class_a.b = class_b
class_b.a = class_a
class_b.b = class_b
# ticket: 444
# mode: compile
# mode: error
def test():
cdef object[int] not_assigned_to
not_assigned_to[2] = 3
_ERRORS = """
6:20: local variable 'not_assigned_to' referenced before assignment
"""
# mode: error
ctypedef struct Spam
cdef extern from *:
ctypedef struct Ham
ctypedef struct Spam:
int i
ctypedef struct Spam
_ERRORS = u"""
3:0: Forward-referenced type must use 'cdef', not 'ctypedef'
"""
......@@ -39,6 +39,32 @@ with nogil, cython.parallel.parallel():
with nogil, cython.parallel.parallel:
pass
cdef int y
# this is not valid
for i in prange(10, nogil=True):
i = y * 4
y = i
# this is valid
for i in prange(10, nogil=True):
y = i
i = y * 4
y = i
with nogil, cython.parallel.parallel():
i = y
y = i
for i in prange(10, nogil=True):
y += i
y *= i
with nogil, cython.parallel.parallel("invalid"):
pass
with nogil, cython.parallel.parallel(invalid=True):
pass
_ERRORS = u"""
e_cython_parallel.pyx:3:8: cython.parallel.parallel is not a module
e_cython_parallel.pyx:4:0: No such directive: cython.parallel.something
......@@ -53,4 +79,9 @@ e_cython_parallel.pyx:30:9: Can only iterate over an iteration variable
e_cython_parallel.pyx:33:10: Must be of numeric type, not int *
e_cython_parallel.pyx:36:33: Closely nested 'with parallel:' blocks are disallowed
e_cython_parallel.pyx:39:12: The parallel directive must be called
e_cython_parallel.pyx:45:10: Expression value depends on previous loop iteration, cannot execute in parallel
e_cython_parallel.pyx:55:9: Expression depends on an uninitialized thread-private variable
e_cython_parallel.pyx:60:6: Reduction operator '*' is inconsistent with previous reduction operator '+'
e_cython_parallel.pyx:62:36: cython.parallel.parallel() does not take positional arguments
e_cython_parallel.pyx:65:36: Invalid keyword argument: invalid
"""
......@@ -12,9 +12,18 @@ def f(a):
del j # error: deletion of non-Python object
del x[i] # error: deletion of non-Python object
del s.m # error: deletion of non-Python object
def outer(a):
def inner():
print a
del a
return inner()
_ERRORS = u"""
10:6: Cannot assign to or delete this
11:45: Deletion of non-Python, non-C++ object
13:6: Deletion of non-Python, non-C++ object
14:6: Deletion of non-Python, non-C++ object
10:9: Cannot assign to or delete this
11:48: Deletion of non-Python, non-C++ object
13:9: Deletion of non-Python, non-C++ object
14:9: Deletion of non-Python, non-C++ object
19:9: can not delete variable 'a' referenced in nested scope
"""
......@@ -10,7 +10,7 @@ cdef class E:
cdef readonly object __weakref__
cdef void f():
cdef C c
cdef C c = C()
cdef object x
x = c.__weakref__
c.__weakref__ = x
......
# ticket: 692
# mode: error
def func((a, b)):
return a + b
_ERRORS = u"""
4:9: Missing argument name
5:13: undeclared name not builtin: a
5:16: undeclared name not builtin: b
"""
......@@ -18,7 +18,7 @@ cdef void r() nogil:
q()
cdef object m():
cdef object x, y, obj
cdef object x, y = 0, obj
cdef int i, j, k
global fred
q()
......
# cython: warn.maybe_uninitialized=True
# mode: error
# tag: werror
def simple():
print a
a = 0
def simple2(arg):
if arg > 0:
a = 1
return a
def simple_pos(arg):
if arg > 0:
a = 1
else:
a = 0
return a
def ifelif(c1, c2):
if c1 == 1:
if c2:
a = 1
else:
a = 2
elif c1 == 2:
a = 3
return a
def nowimpossible(a):
if a:
b = 1
if a:
print b
def fromclosure():
def bar():
print a
a = 1
return bar
# Should work ok in both py2 and py3
def list_comp(a):
return [i for i in a]
def set_comp(a):
return set(i for i in a)
def dict_comp(a):
return {i: j for i, j in a}
# args and kwargs
def generic_args_call(*args, **kwargs):
return args, kwargs
def cascaded(x):
print a, b
a = b = x
def from_import():
print bar
from foo import bar
def regular_import():
print foo
import foo
def raise_stat():
try:
raise exc, msg
except:
pass
exc = ValueError
msg = 'dummy'
_ERRORS = """
6:11: local variable 'a' referenced before assignment
12:12: local variable 'a' might be referenced before assignment
29:12: local variable 'a' might be referenced before assignment
35:15: local variable 'b' might be referenced before assignment
58:11: local variable 'a' referenced before assignment
58:14: local variable 'b' referenced before assignment
62:13: local variable 'bar' referenced before assignment
66:13: local variable 'foo' referenced before assignment
71:17: local variable 'exc' referenced before assignment
71:22: local variable 'msg' referenced before assignment
"""
# cython: warn.maybe_uninitialized=True
# mode: error
# tag: werror
# class scope
def foo(c):
class Foo(object):
if c > 0:
b = 1
print a, b
a = 1
return Foo
_ERRORS = """
10:15: local variable 'a' referenced before assignment
10:18: local variable 'b' might be referenced before assignment
"""
# cython: warn.maybe_uninitialized=True
# mode: error
# tag: werror
def foo(x):
a = 1
del a, b
b = 2
return a, b
_ERRORS = """
7:9: Deletion of non-Python, non-C++ object
7:12: local variable 'b' referenced before assignment
7:12: Deletion of non-Python, non-C++ object
9:12: local variable 'a' referenced before assignment
"""
# cython: warn.maybe_uninitialized=True
# mode: error
# tag: werror
def exc_target():
try:
{}['foo']
except KeyError, e:
pass
except IndexError, i:
pass
return e, i
def exc_body():
try:
a = 1
except Exception:
pass
return a
def exc_else_pos():
try:
pass
except Exception, e:
pass
else:
e = 1
return e
def exc_body_pos(d):
try:
a = d['foo']
except KeyError:
a = None
return a
def exc_pos():
try:
a = 1
except Exception:
a = 1
return a
def exc_finally():
try:
a = 1
finally:
pass
return a
def exc_finally2():
try:
pass
finally:
a = 1
return a
def exc_assmt_except(a):
try:
x = a
except:
return x
def exc_assmt_finaly(a):
try:
x = a
except:
return x
def raise_stat(a):
try:
if a < 0:
raise IndexError
except IndexError:
oops = 1
print oops
def try_loop(args):
try:
x = 0
for i in args:
if i is 0:
continue
elif i is None:
break
elif i is False:
return
i()
except ValueError:
x = 1
finally:
return x
def try_finally(a):
try:
for i in a:
if i > 0:
x = 1
finally:
return x
def try_finally_nested(m):
try:
try:
try:
f = m()
except:
pass
finally:
pass
except:
print f
_ERRORS = """
12:12: local variable 'e' might be referenced before assignment
12:15: local variable 'i' might be referenced before assignment
19:12: local variable 'a' might be referenced before assignment
63:16: local variable 'x' might be referenced before assignment
69:16: local variable 'x' might be referenced before assignment
77:14: local variable 'oops' might be referenced before assignment
93:16: local variable 'x' might be referenced before assignment
101:16: local variable 'x' might be referenced before assignment
113:15: local variable 'f' might be referenced before assignment
"""
# cython: warn.maybe_uninitialized=True
# mode: error
# tag: werror
def simple_for(n):
for i in n:
a = 1
return a
def simple_for_break(n):
for i in n:
a = 1
break
return a
def simple_for_pos(n):
for i in n:
a = 1
else:
a = 0
return a
def simple_target(n):
for i in n:
pass
return i
def simple_target_f(n):
for i in n:
i *= i
return i
def simple_for_from(n):
for i from 0 <= i <= n:
x = i
else:
return x
def for_continue(l):
for i in l:
if i > 0:
continue
x = i
print x
def for_break(l):
for i in l:
if i > 0:
break
x = i
print x
def for_finally_continue(f):
for i in f:
try:
x = i()
finally:
print x
continue
def for_finally_break(f):
for i in f:
try:
x = i()
finally:
print x
break
def for_finally_outer(p, f):
x = 1
try:
for i in f:
print x
x = i()
if x > 0:
continue
if x < 0:
break
finally:
del x
_ERRORS = """
8:12: local variable 'a' might be referenced before assignment
14:12: local variable 'a' might be referenced before assignment
26:12: local variable 'i' might be referenced before assignment
31:12: local variable 'i' might be referenced before assignment
37:16: local variable 'x' might be referenced before assignment
44:11: local variable 'x' might be referenced before assignment
51:11: local variable 'x' might be referenced before assignment
58:19: local variable 'x' might be referenced before assignment
66:19: local variable 'x' might be referenced before assignment
"""
# cython: warn.maybe_uninitialized=True
# mode: error
# tag: werror
def unbound_inside_generator(*args):
for i in args:
yield x
x = i + i
_ERRORS = """
7:15: local variable 'x' might be referenced before assignment
"""
# cython: language_level=2, warn.maybe_uninitialized=True
# mode: error
# tag: werror
def list_comp(a):
r = [i for i in a]
return i
# dict comp is py3 feuture and don't leak here
def dict_comp(a):
r = {i: j for i, j in a}
return i, j
def dict_comp2(a):
r = {i: j for i, j in a}
print i, j
i, j = 0, 0
_ERRORS = """
7:12: local variable 'i' might be referenced before assignment
12:12: undeclared name not builtin: i
12:15: undeclared name not builtin: j
16:11: local variable 'i' referenced before assignment
16:14: local variable 'j' referenced before assignment
"""
# cython: language_level=3, warn.maybe_uninitialized=True
# mode: error
# tag: werror
def ref(obj):
pass
def list_comp(a):
r = [i for i in a]
ref(i)
i = 0
return r
def dict_comp(a):
r = {i: j for i, j in a}
ref(i)
i = 0
return r
_ERRORS = """
10:9: local variable 'i' referenced before assignment
16:9: local variable 'i' referenced before assignment
"""
# cython: warn.maybe_uninitialized=True
# mode: error
# tag: werror
def simple_while(n):
while n > 0:
n -= 1
a = 0
return a
def simple_while_break(n):
while n > 0:
n -= 1
break
else:
a = 1
return a
def simple_while_pos(n):
while n > 0:
n -= 1
a = 0
else:
a = 1
return a
def while_finally_continue(p, f):
while p():
try:
x = f()
finally:
print x
continue
def while_finally_break(p, f):
while p():
try:
x = f()
finally:
print x
break
def while_finally_outer(p, f):
x = 1
try:
while p():
print x
x = f()
if x > 0:
continue
if x < 0:
break
finally:
del x
_ERRORS = """
9:12: local variable 'a' might be referenced before assignment
17:12: local variable 'a' might be referenced before assignment
32:19: local variable 'x' might be referenced before assignment
40:19: local variable 'x' might be referenced before assignment
"""
# cython: warn.maybe_uninitialized=True
# mode: error
# tag: werror
def with_no_target(m):
with m:
print a
a = 1
def unbound_manager(m1):
with m2:
pass
m2 = m1
def with_target(m):
with m as f:
print(f)
def with_mgr(m):
try:
with m() as f:
pass
except:
print f
_ERRORS = """
7:15: local variable 'a' referenced before assignment
11:11: local variable 'm2' referenced before assignment
24:15: local variable 'f' might be referenced before assignment
"""
# mode: error
# tag: werror, unreachable, control-flow
def try_finally():
try:
return
finally:
return
print 'oops'
def try_return():
try:
return
except:
return
print 'oops'
def for_return(a):
for i in a:
return
else:
return
print 'oops'
def while_return(a):
while a:
return
else:
return
print 'oops'
def forfrom_return(a):
for i from 0 <= i <= a:
return
else:
return
print 'oops'
_ERRORS = """
9:4: Unreachable code
16:4: Unreachable code
23:4: Unreachable code
30:4: Unreachable code
37:4: Unreachable code
"""
# cython: warn.unused=True, warn.unused_arg=True, warn.unused_result=True
# mode: error
# tag: werror
def unused_variable():
a = 1
def unused_cascade(arg):
a, b = arg.split()
return a
def unused_arg(arg):
pass
def unused_result():
r = 1 + 1
r = 2
return r
def unused_nested():
def unused_one():
pass
def unused_class():
class Unused:
pass
# this should not generate warning
def used(x, y):
x.y = 1
y[0] = 1
lambda x: x
def unused_and_unassigned():
cdef object foo
cdef int i
_ERRORS = """
6:6: Unused entry 'a'
9:9: Unused entry 'b'
12:15: Unused argument 'arg'
16:6: Unused result in 'r'
21:4: Unused entry 'unused_one'
25:4: Unused entry 'Unused'
35:16: Unused entry 'foo'
36:13: Unused entry 'i'
"""
......@@ -47,15 +47,6 @@ def nousage():
"""
cdef object[int, ndim=2] buf
def printbuf():
"""
Just compilation.
"""
cdef object[int, ndim=2] buf
print buf
return
buf[0,0] = 0
@testcase
def acquire_release(o1, o2):
"""
......@@ -681,20 +672,20 @@ def mixed_get(object[int] buf, int unsafe_idx, int safe_idx):
#
# Coercions
#
@testcase
def coercions(object[unsigned char] uc):
"""
TODO
"""
print type(uc[0])
uc[0] = -1
print uc[0]
uc[0] = <int>3.14
print uc[0]
cdef char* ch = b"asfd"
cdef object[object] objbuf
objbuf[3] = ch
## @testcase
## def coercions(object[unsigned char] uc):
## """
## TODO
## """
## print type(uc[0])
## uc[0] = -1
## print uc[0]
## uc[0] = <int>3.14
## print uc[0]
## cdef char* ch = b"asfd"
## cdef object[object] objbuf
## objbuf[3] = ch
#
......@@ -1064,7 +1055,8 @@ cdef class MockBuffer:
stdlib.free(self.buffer)
cdef void* create_buffer(self, data):
cdef char* buf = <char*>stdlib.malloc(len(data) * self.itemsize)
cdef size_t n = <size_t>(len(data) * self.itemsize)
cdef char* buf = <char*>stdlib.malloc(n)
cdef char* it = buf
for value in data:
self.write(it, value)
......@@ -1072,19 +1064,22 @@ cdef class MockBuffer:
return buf
cdef void* create_indirect_buffer(self, data, shape):
cdef size_t n = 0
cdef void** buf
assert shape[0] == len(data)
if len(shape) == 1:
return self.create_buffer(data)
else:
shape = shape[1:]
buf = <void**>stdlib.malloc(len(data) * sizeof(void*))
n = <size_t>len(data) * sizeof(void*)
buf = <void**>stdlib.malloc(n)
for idx, subdata in enumerate(data):
buf[idx] = self.create_indirect_buffer(subdata, shape)
return buf
cdef Py_ssize_t* list_to_sizebuf(self, l):
cdef Py_ssize_t* buf = <Py_ssize_t*>stdlib.malloc(len(l) * sizeof(Py_ssize_t))
cdef size_t n = <size_t>len(l) * sizeof(Py_ssize_t)
cdef Py_ssize_t* buf = <Py_ssize_t*>stdlib.malloc(n)
for i, x in enumerate(l):
buf[i] = x
return buf
......@@ -1137,7 +1132,7 @@ cdef class MockBuffer:
cdef class CharMockBuffer(MockBuffer):
cdef int write(self, char* buf, object value) except -1:
(<char*>buf)[0] = <int>value
(<char*>buf)[0] = <char>value
return 0
cdef get_itemsize(self): return sizeof(char)
cdef get_default_format(self): return b"@b"
......@@ -1172,7 +1167,7 @@ cdef class UnsignedShortMockBuffer(MockBuffer):
cdef class FloatMockBuffer(MockBuffer):
cdef int write(self, char* buf, object value) except -1:
(<float*>buf)[0] = <float>value
(<float*>buf)[0] = <float>(<double>value)
return 0
cdef get_itemsize(self): return sizeof(float)
cdef get_default_format(self): return b"f"
......
cimport cython
uspace = u' '
ustring_with_a = u'abcdefg'
ustring_without_a = u'bcdefg'
@cython.test_fail_if_path_exists('//SimpleCallNode')
def ord_Py_UNICODE(unicode s):
"""
>>> ord_Py_UNICODE(uspace)
32
"""
cdef Py_UNICODE u
u = s[0]
return ord(u)
@cython.test_assert_path_exists('//IntNode')
@cython.test_fail_if_path_exists('//SimpleCallNode')
def ord_const():
"""
>>> ord_const()
32
"""
return ord(u' ')
@cython.test_assert_path_exists('//PrimaryCmpNode//IntNode')
@cython.test_fail_if_path_exists('//SimpleCallNode')
def unicode_for_loop_ord(unicode s):
"""
......@@ -13,6 +34,6 @@ def unicode_for_loop_ord(unicode s):
False
"""
for c in s:
if ord(c) == u'a':
if ord(c) == ord(u'a'):
return True
return False
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
......@@ -86,13 +86,13 @@ def del_local(a):
>>> del_local(object())
"""
del a
assert a is None # Until we have unbound locals...
assert 'a' not in locals()
def del_seq(a, b, c):
"""
>>> del_seq(1, 2, 3)
"""
del a, (b, c)
assert a is None # Until we have unbound locals...
assert b is None # Until we have unbound locals...
assert c is None # Until we have unbound locals...
assert 'a' not in locals()
assert 'b' not in locals()
assert 'c' not in locals()
This diff is collapsed.
......@@ -21,9 +21,11 @@ def wrap_hasattr(obj, name):
False
>>> wrap_hasattr(Foo(), "bar")
False
>>> wrap_hasattr(Foo(), "baz") #doctest: +ELLIPSIS
>>> Foo().baz #doctest: +ELLIPSIS
Traceback (most recent call last):
...
ZeroDivisionError: ...
>>> wrap_hasattr(Foo(), "baz")
False
"""
return hasattr(obj, name)
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
/*
This header is present to test effects of misdeclaring
types Cython-side.
*/
typedef long actually_long_t;
typedef short actually_short_t;
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment