Commit 8cd31587 authored by Mark Florisson's avatar Mark Florisson

Merge branch 'master' into fusedmerge

Conflicts:
	Cython/Compiler/MemoryView.py
	Cython/Compiler/Naming.py
	Cython/Compiler/Nodes.py
	Cython/Compiler/Parsing.py
parents d16da160 5008e863
......@@ -97,7 +97,7 @@ globals_utility_code = UtilityCode(
# of Python names. Supporting cdef names in the module and write
# access requires a rewrite as a dedicated class.
proto = """
static PyObject* __Pyx_Globals(); /*proto*/
static PyObject* __Pyx_Globals(void); /*proto*/
""",
impl = '''
static PyObject* __Pyx_Globals() {
......
......@@ -317,6 +317,8 @@ class ContentHashingUtilityCode(UtilityCode):
return hash((self.proto, self.impl))
def __eq__(self, other):
if self is other:
return True
if not isinstance(other, type(self)):
return False
......
......@@ -3,7 +3,7 @@ from Cython.Compiler.ModuleNode import ModuleNode
from Cython.Compiler.Nodes import *
from Cython.Compiler.ExprNodes import *
class ExtractPxdCode(CythonTransform):
class ExtractPxdCode(VisitorTransform):
"""
Finds nodes in a pxd file that should generate code, and
returns them in a StatListNode.
......@@ -28,3 +28,7 @@ class ExtractPxdCode(CythonTransform):
# Do not visit children, nested funcdefnodes will
# also be moved by this action...
return node
def visit_Node(self, node):
self.visitchildren(node)
return node
This diff is collapsed.
......@@ -925,11 +925,12 @@ class CreateControlFlowGraph(CythonTransform):
raise InternalError, "Generic loops are not supported"
def visit_WithTargetAssignmentStatNode(self, node):
self.mark_assignment(node.lhs)
self.mark_assignment(node.lhs, node.rhs)
return node
def visit_WithStatNode(self, node):
self.visit(node.manager)
self.visit(node.enter_call)
self.visit(node.body)
return node
......
......@@ -93,6 +93,7 @@ frame_cname = pyrex_prefix + "frame"
frame_code_cname = pyrex_prefix + "frame_code"
binding_cfunc = pyrex_prefix + "binding_PyCFunctionType"
fused_func_prefix = pyrex_prefix + 'fuse_'
quick_temp_cname = pyrex_prefix + "temp" # temp variable for quick'n'dirty temping
genexpr_id_ref = 'genexpr'
......
......@@ -1916,6 +1916,14 @@ class CFuncDefNode(FuncDefNode):
if type_arg.type.is_buffer and 'inline' in self.modifiers:
warning(formal_arg.pos, "Buffer unpacking not optimized away.", 1)
if type_arg.type.is_buffer:
if self.type.nogil:
error(formal_arg.pos,
"Buffer may not be acquired without the GIL. "
"Consider using memoryview slices instead.")
elif 'inline' in self.modifiers:
warning(formal_arg.pos, "Buffer unpacking not optimized away.", 1)
self._validate_type_visibility(type.return_type, self.pos, env)
name = name_declarator.name
......@@ -3964,9 +3972,12 @@ class PyClassDefNode(ClassDefNode):
# find metaclass" dance at runtime
self.metaclass = item.value
del keyword_args.key_value_pairs[i]
if starstar_arg or (keyword_args and keyword_args.key_value_pairs):
if starstar_arg:
self.mkw = ExprNodes.KeywordArgsNode(
pos, keyword_args = keyword_args, starstar_arg = starstar_arg)
pos, keyword_args = keyword_args and keyword_args.key_value_pairs or [],
starstar_arg = starstar_arg)
elif keyword_args and keyword_args.key_value_pairs:
self.mkw = keyword_args
else:
self.mkw = ExprNodes.NullNode(pos)
if self.metaclass is None:
......@@ -5731,21 +5742,26 @@ class WithStatNode(StatNode):
# manager The with statement manager object
# target ExprNode the target lhs of the __enter__() call
# body StatNode
# enter_call ExprNode the call to the __enter__() method
# exit_var String the cname of the __exit__() method reference
child_attrs = ["manager", "target", "body"]
child_attrs = ["manager", "enter_call", "target", "body"]
has_target = False
enter_call = None
def analyse_declarations(self, env):
self.manager.analyse_declarations(env)
self.enter_call.analyse_declarations(env)
self.body.analyse_declarations(env)
def analyse_expressions(self, env):
self.manager.analyse_types(env)
self.enter_call.analyse_types(env)
self.body.analyse_expressions(env)
def generate_function_definitions(self, env, code):
self.manager.generate_function_definitions(env, code)
self.enter_call.generate_function_definitions(env, code)
self.body.generate_function_definitions(env, code)
def generate_execution_code(self, code):
......@@ -5764,34 +5780,22 @@ class WithStatNode(StatNode):
old_error_label = code.new_error_label()
intermediate_error_label = code.error_label
enter_func = code.funcstate.allocate_temp(py_object_type, manage_ref=True)
code.putln("%s = PyObject_GetAttr(%s, %s); %s" % (
enter_func,
self.manager.py_result(),
code.get_py_string_const(EncodedString('__enter__'), identifier=True),
code.error_goto_if_null(enter_func, self.pos),
))
code.put_gotref(enter_func)
self.enter_call.generate_evaluation_code(code)
if not self.target:
self.enter_call.generate_disposal_code(code)
self.enter_call.free_temps(code)
else:
# Otherwise, the node will be cleaned up by the
# WithTargetAssignmentStatNode after assigning its result
# to the target of the 'with' statement.
pass
self.manager.generate_disposal_code(code)
self.manager.free_temps(code)
self.target_temp.allocate(code)
code.putln('%s = PyObject_Call(%s, ((PyObject *)%s), NULL); %s' % (
self.target_temp.result(),
enter_func,
Naming.empty_tuple,
code.error_goto_if_null(self.target_temp.result(), self.pos),
))
code.put_gotref(self.target_temp.result())
code.put_decref_clear(enter_func, py_object_type)
code.funcstate.release_temp(enter_func)
if not self.has_target:
code.put_decref_clear(self.target_temp.result(), type=py_object_type)
self.target_temp.release(code)
# otherwise, WithTargetAssignmentStatNode will do it for us
code.error_label = old_error_label
self.body.generate_execution_code(code)
if code.label_used(intermediate_error_label):
step_over_label = code.new_label()
code.put_goto(step_over_label)
code.put_label(intermediate_error_label)
......@@ -5810,27 +5814,43 @@ class WithTargetAssignmentStatNode(AssignmentNode):
# and frees its temp.
#
# lhs ExprNode the assignment target
# rhs TempNode the return value of the __enter__() call
# rhs CloneNode a (coerced) CloneNode for the orig_rhs (not owned by this node)
# orig_rhs ExprNode the original ExprNode of the rhs. this node will clean up the
# temps of the orig_rhs. basically, it takes ownership of the node
# when the WithStatNode is done with it.
child_attrs = ["lhs", "rhs"]
child_attrs = ["lhs"]
def analyse_declarations(self, env):
self.lhs.analyse_target_declaration(env)
def analyse_types(self, env):
def analyse_expressions(self, env):
self.rhs.analyse_types(env)
self.lhs.analyse_target_types(env)
self.lhs.gil_assignment_check(env)
self.orig_rhs = self.rhs
self.rhs = self.rhs.coerce_to(self.lhs.type, env)
def generate_execution_code(self, code):
if self.orig_rhs.type.is_pyobject:
# make sure rhs gets freed on errors, see below
old_error_label = code.new_error_label()
intermediate_error_label = code.error_label
self.rhs.generate_evaluation_code(code)
self.lhs.generate_assignment_code(self.rhs, code)
self.orig_rhs.release(code)
def generate_function_definitions(self, env, code):
self.rhs.generate_function_definitions(env, code)
if self.orig_rhs.type.is_pyobject:
self.orig_rhs.generate_disposal_code(code)
code.error_label = old_error_label
if code.label_used(intermediate_error_label):
step_over_label = code.new_label()
code.put_goto(step_over_label)
code.put_label(intermediate_error_label)
self.orig_rhs.generate_disposal_code(code)
code.put_goto(old_error_label)
code.put_label(step_over_label)
self.orig_rhs.free_temps(code)
def annotate(self, code):
self.lhs.annotate(code)
......@@ -6574,6 +6594,8 @@ class FromImportStatNode(StatNode):
else:
coerced_item = self.item.coerce_to(target.type, env)
self.interned_items.append((name, target, coerced_item))
if self.interned_items:
env.use_utility_code(raise_import_error_utility_code)
def generate_execution_code(self, code):
self.module.generate_evaluation_code(code)
......@@ -6588,11 +6610,16 @@ class FromImportStatNode(StatNode):
for name, target, coerced_item in self.interned_items:
cname = code.intern_identifier(name)
code.putln(
'%s = PyObject_GetAttr(%s, %s); %s' % (
'%s = PyObject_GetAttr(%s, %s);' % (
item_temp,
self.module.py_result(),
cname,
code.error_goto_if_null(item_temp, self.pos)))
cname))
code.putln('if (%s == NULL) {' % item_temp)
code.putln(
'if (PyErr_ExceptionMatches(PyExc_AttributeError)) '
'__Pyx_RaiseImportError(%s);' % cname)
code.putln(code.error_goto_if_null(item_temp, self.pos))
code.putln('}')
code.put_gotref(item_temp)
if coerced_item is None:
target.generate_assignment_code(self.item, code)
......@@ -8922,3 +8949,19 @@ init="""
memset(&%(PYX_NAN)s, 0xFF, sizeof(%(PYX_NAN)s));
""" % vars(Naming))
#------------------------------------------------------------------------------------
raise_import_error_utility_code = UtilityCode(
proto = '''
static CYTHON_INLINE void __Pyx_RaiseImportError(PyObject *name);
''',
impl = '''
static CYTHON_INLINE void __Pyx_RaiseImportError(PyObject *name) {
#if PY_MAJOR_VERSION < 3
PyErr_Format(PyExc_ImportError, "cannot import name %.230s",
PyString_AsString(name));
#else
PyErr_Format(PyExc_ImportError, "cannot import name %S", name);
#endif
}
''')
This diff is collapsed.
......@@ -1165,16 +1165,20 @@ class WithTransform(CythonTransform, SkipDeclarations):
self.visitchildren(node, 'body')
pos = node.pos
body, target, manager = node.body, node.target, node.manager
node.target_temp = ExprNodes.TempNode(pos, type=PyrexTypes.py_object_type)
node.enter_call = ExprNodes.SimpleCallNode(
pos, function = ExprNodes.AttributeNode(
pos, obj = ExprNodes.CloneNode(manager),
attribute = EncodedString('__enter__')),
args = [],
is_temp = True)
if target is not None:
node.has_target = True
body = Nodes.StatListNode(
pos, stats = [
Nodes.WithTargetAssignmentStatNode(
pos, lhs = target, rhs = node.target_temp),
body
])
node.target = None
pos, lhs = target,
rhs = ResultRefNode(node.enter_call),
orig_rhs = node.enter_call),
body])
excinfo_target = ResultRefNode(
pos=pos, type=Builtin.tuple_type, may_hold_none=False)
......@@ -2334,6 +2338,17 @@ class TransformBuiltinMethods(EnvTransform):
error(node.pos, u"'%s' not a valid cython attribute or is being used incorrectly" % attribute)
return node
def visit_ExecStatNode(self, node):
lenv = self.current_env()
self.visitchildren(node)
if len(node.args) == 1:
node.args.append(ExprNodes.GlobalsExprNode(node.pos))
if not lenv.is_module_scope:
node.args.append(
ExprNodes.LocalsExprNode(
node.pos, self.current_scope_node(), lenv))
return node
def _inject_locals(self, node, func_name):
# locals()/dir()/vars() builtins
lenv = self.current_env()
......@@ -2342,7 +2357,6 @@ class TransformBuiltinMethods(EnvTransform):
# not the builtin
return node
pos = node.pos
local_names = [ var.name for var in lenv.entries.values() if var.name ]
if func_name in ('locals', 'vars'):
if func_name == 'locals' and len(node.args) > 0:
error(self.pos, "Builtin 'locals()' called with wrong number of args, expected 0, got %d"
......@@ -2354,11 +2368,7 @@ class TransformBuiltinMethods(EnvTransform):
% len(node.args))
if len(node.args) > 0:
return node # nothing to do
items = [ ExprNodes.DictItemNode(pos,
key=ExprNodes.IdentifierStringNode(pos, value=var),
value=ExprNodes.NameNode(pos, name=var, allow_null=True))
for var in local_names ]
return ExprNodes.DictNode(pos, key_value_pairs=items, exclude_null_values=True)
return ExprNodes.LocalsExprNode(pos, self.current_scope_node(), lenv)
else: # dir()
if len(node.args) > 1:
error(self.pos, "Builtin 'dir()' called with wrong number of args, expected 0-1, got %d"
......@@ -2366,16 +2376,36 @@ class TransformBuiltinMethods(EnvTransform):
if len(node.args) > 0:
# optimised in Builtin.py
return node
if lenv.is_py_class_scope or lenv.is_module_scope:
if lenv.is_py_class_scope:
pyclass = self.current_scope_node()
locals_dict = ExprNodes.CloneNode(pyclass.dict)
else:
locals_dict = ExprNodes.GlobalsExprNode(pos)
return ExprNodes.SimpleCallNode(
pos,
function=ExprNodes.AttributeNode(
pos, obj=locals_dict, attribute="keys"),
args=[])
local_names = [ var.name for var in lenv.entries.values() if var.name ]
items = [ ExprNodes.IdentifierStringNode(pos, value=var)
for var in local_names ]
return ExprNodes.ListNode(pos, args=items)
def visit_SimpleCallNode(self, node):
if isinstance(node.function, ExprNodes.NameNode):
func_name = node.function.name
if func_name in ('dir', 'locals', 'vars'):
return self._inject_locals(node, func_name)
def _inject_eval(self, node, func_name):
lenv = self.current_env()
entry = lenv.lookup_here(func_name)
if entry or len(node.args) != 1:
return node
# Inject globals and locals
node.args.append(ExprNodes.GlobalsExprNode(node.pos))
if not lenv.is_module_scope:
node.args.append(
ExprNodes.LocalsExprNode(
node.pos, self.current_scope_node(), lenv))
return node
def visit_SimpleCallNode(self, node):
# cython.foo
function = node.function.as_cython_attribute()
if function:
......@@ -2428,6 +2458,13 @@ class TransformBuiltinMethods(EnvTransform):
u"'%s' not a valid cython language construct" % function)
self.visitchildren(node)
if isinstance(node, ExprNodes.SimpleCallNode) and node.function.is_name:
func_name = node.function.name
if func_name in ('dir', 'locals', 'vars'):
return self._inject_locals(node, func_name)
if func_name == 'eval':
return self._inject_eval(node, func_name)
return node
......
......@@ -46,7 +46,7 @@ cdef p_power(PyrexScanner s)
cdef p_new_expr(PyrexScanner s)
cdef p_trailer(PyrexScanner s, node1)
cpdef p_call_parse_args(PyrexScanner s, bint allow_genexp = *)
cdef p_call_build_packed_args(pos, positional_args, keyword_args, star_arg)
cdef p_call_build_packed_args(pos, positional_args, keyword_args, star_arg, starstar_arg)
cdef p_call(PyrexScanner s, function)
cdef p_index(PyrexScanner s, base)
cdef p_subscript_list(PyrexScanner s)
......
......@@ -440,7 +440,8 @@ def p_call_parse_args(s, allow_genexp = True):
s.expect(')')
return positional_args, keyword_args, star_arg, starstar_arg
def p_call_build_packed_args(pos, positional_args, keyword_args, star_arg):
def p_call_build_packed_args(pos, positional_args, keyword_args,
star_arg, starstar_arg):
arg_tuple = None
keyword_dict = None
if positional_args or not star_arg:
......@@ -454,11 +455,17 @@ def p_call_build_packed_args(pos, positional_args, keyword_args, star_arg):
operand2 = star_arg_tuple)
else:
arg_tuple = star_arg_tuple
if keyword_args:
if keyword_args or starstar_arg:
keyword_args = [ExprNodes.DictItemNode(pos=key.pos, key=key, value=value)
for key, value in keyword_args]
keyword_dict = ExprNodes.DictNode(pos,
key_value_pairs = keyword_args)
if starstar_arg:
keyword_dict = ExprNodes.KeywordArgsNode(
pos,
starstar_arg = starstar_arg,
keyword_args = keyword_args)
else:
keyword_dict = ExprNodes.DictNode(
pos, key_value_pairs = keyword_args)
return arg_tuple, keyword_dict
def p_call(s, function):
......@@ -474,12 +481,11 @@ def p_call(s, function):
args = positional_args)
else:
arg_tuple, keyword_dict = p_call_build_packed_args(
pos, positional_args, keyword_args, star_arg)
pos, positional_args, keyword_args, star_arg, starstar_arg)
return ExprNodes.GeneralCallNode(pos,
function = function,
positional_args = arg_tuple,
keyword_args = keyword_dict,
starstar_arg = starstar_arg)
keyword_args = keyword_dict)
#lambdef: 'lambda' [varargslist] ':' test
......@@ -1130,8 +1136,6 @@ def p_exec_statement(s):
if s.sy == ',':
s.next()
args.append(p_test(s))
else:
error(pos, "'exec' currently requires a target mapping (globals/locals)")
return Nodes.ExecStatNode(pos, args = args)
def p_del_statement(s):
......@@ -2738,9 +2742,6 @@ def p_ctypedef_statement(s, ctx):
return p_fused_definition(s, pos, ctx)
else:
base_type = p_c_base_type(s, nonempty = 1)
if base_type.name is None:
s.error("Syntax error in ctypedef statement")
declarator = p_c_declarator(s, ctx, is_type = 1, nonempty = 1)
s.expect_newline("Syntax error in ctypedef statement")
return Nodes.CTypeDefNode(
......@@ -2829,7 +2830,7 @@ def p_class_statement(s, decorators):
positional_args, keyword_args, star_arg, starstar_arg = \
p_call_parse_args(s, allow_genexp = False)
arg_tuple, keyword_dict = p_call_build_packed_args(
pos, positional_args, keyword_args, star_arg)
pos, positional_args, keyword_args, star_arg, None)
if arg_tuple is None:
# XXX: empty arg_tuple
arg_tuple = ExprNodes.TupleNode(pos, args = [])
......
......@@ -240,7 +240,7 @@ def create_pxd_pipeline(context, scope, module_name):
return [
parse_pxd_stage_factory(context, scope, module_name)
] + create_pipeline(context, 'pxd') + [
ExtractPxdCode(context)
ExtractPxdCode()
]
def create_py_pipeline(context, options, result):
......
......@@ -1843,7 +1843,7 @@ class CClassScope(ClassScope):
if defining and entry.func_cname:
error(pos, "'%s' already defined" % name)
#print "CClassScope.declare_cfunction: checking signature" ###
if entry.is_final_cmethod:
if entry.is_final_cmethod and entry.is_inherited:
error(pos, "Overriding final methods is not allowed")
elif type.same_c_signature_as(entry.type, as_cmethod = 1) and type.nogil == entry.type.nogil:
pass
......
......@@ -65,6 +65,11 @@ class MarkAssignments(CythonTransform):
# Could use this info to infer cdef class attributes...
pass
def visit_WithTargetAssignmentStatNode(self, node):
self.mark_assignment(node.lhs, node.rhs)
self.visitchildren(node)
return node
def visit_SingleAssignmentNode(self, node):
self.mark_assignment(node.lhs, node.rhs)
self.visitchildren(node)
......
......@@ -138,15 +138,23 @@ class ResultRefNode(AtomicExprNode):
# nothing to do here
return self
def type_dependencies(self, env):
if self.expression:
return self.expression.type_dependencies(env)
else:
return ()
def analyse_types(self, env):
if self.expression is not None:
self.type = self.expression.type
def infer_type(self, env):
if self.expression is not None:
return self.expression.infer_type(env)
if self.type is not None:
return self.type
if self.expression is not None:
if self.expression.type is not None:
return self.expression.type
return self.expression.infer_type(env)
assert False, "cannot infer type of ResultRefNode"
def may_be_none(self):
......
......@@ -319,18 +319,36 @@ class EnvTransform(CythonTransform):
This transformation keeps a stack of the environments.
"""
def __call__(self, root):
self.env_stack = [root.scope]
self.env_stack = [(root, root.scope)]
return super(EnvTransform, self).__call__(root)
def current_env(self):
return self.env_stack[-1]
return self.env_stack[-1][1]
def current_scope_node(self):
return self.env_stack[-1][0]
def visit_FuncDefNode(self, node):
self.env_stack.append(node.local_scope)
self.env_stack.append((node, node.local_scope))
self.visitchildren(node)
self.env_stack.pop()
return node
def visit_ClassDefNode(self, node):
self.env_stack.append((node, node.scope))
self.visitchildren(node)
self.env_stack.pop()
return node
def visit_ScopedExprNode(self, node):
if node.expr_scope:
self.env_stack.append((node, node.expr_scope))
self.visitchildren(node)
self.env_stack.pop()
else:
self.visitchildren(node)
return node
class RecursiveNodeReplacer(VisitorTransform):
"""
......
cdef extern from "omp.h":
ctypedef struct omp_lock_t
ctypedef struct omp_nest_lock_t
ctypedef struct omp_lock_t:
pass
ctypedef struct omp_nest_lock_t:
pass
ctypedef enum omp_sched_t:
omp_sched_static = 1,
......
This diff is collapsed.
cimport cython
@cython.final
cdef class Packet:
cdef public object link
cdef public object ident
......@@ -12,20 +13,24 @@ cdef class Packet:
cdef class TaskRec:
pass
@cython.final
cdef class DeviceTaskRec(TaskRec):
cdef public object pending
@cython.final
cdef class IdleTaskRec(TaskRec):
cdef public long control
cdef public Py_ssize_t count
@cython.final
cdef class HandlerTaskRec(TaskRec):
cdef public object work_in # = None
cdef public object device_in # = None
cpdef workInAdd(self,p)
cpdef deviceInAdd(self,p)
cpdef workInAdd(self, Packet p)
cpdef deviceInAdd(self, Packet p)
@cython.final
cdef class WorkerTaskRec(TaskRec):
cdef public object destination # = I_HANDLERA
cdef public Py_ssize_t count
......@@ -60,7 +65,7 @@ cdef class Task(TaskState):
cdef public object input # = w
cdef public object handle # = r
cpdef addPacket(self,Packet p,old)
cpdef addPacket(self,Packet p,Task old)
cpdef runTask(self)
cpdef waitTask(self)
cpdef hold(self)
......@@ -70,19 +75,19 @@ cdef class Task(TaskState):
cdef class DeviceTask(Task):
@cython.locals(d=DeviceTaskRec)
cpdef fn(self,Packet pkt,r)
cpdef fn(self,Packet pkt,DeviceTaskRec r)
cdef class HandlerTask(Task):
@cython.locals(h=HandlerTaskRec)
cpdef fn(self,Packet pkt,r)
cpdef fn(self,Packet pkt,HandlerTaskRec r)
cdef class IdleTask(Task):
@cython.locals(i=IdleTaskRec)
cpdef fn(self,Packet pkt,r)
cpdef fn(self,Packet pkt,IdleTaskRec r)
cdef class WorkTask(Task):
@cython.locals(w=WorkerTaskRec)
cpdef fn(self,Packet pkt,r)
cpdef fn(self,Packet pkt,WorkerTaskRec r)
@cython.locals(t=Task)
cpdef schedule()
......
......@@ -2,13 +2,14 @@ Cython's entire documentation suite is currently being overhauled.
For the time being, I'll use this page to post notes.
The previous Cython documentation files are hosted at http://hg.cython.org/cython-docs
The previous Cython documentation files are hosted at
http://hg.cython.org/cython-docs
Notes
=======
1) Some css work should definately be done.
1) Some css work should definitely be done.
2) Use local 'top-of-page' contents rather than the sidebar, imo.
3) Provide a link from each (sub)section to the TOC of the page.
4) Fix cython highlighter for cdef blocks
Background
----------
[brain dump]
The "Old Cython Users Guide" is a derivative of the old Pyrex documentation.
It underwent substantial editing by Peter Alexandar
to become the Reference Guide, which is oriented around bullet points
and lists rather than prose. This transition was incomplete.
At nearly the same time, Robert, Dag, and Stefan wrote a tutorial as
part of the SciPy proceedings. It was felt that the content there was
cleaner and more up to date than anything else, and this became the
basis for the "Getting Started" and "Tutorials" sections. However,
it simply doesn't have as much content as the old documentation used to.
Eventually, it seems all of the old users manual could be whittled
down into independent tutorial topics. Much discussion of what we'd
like to see is at
http://www.mail-archive.com/cython-dev@codespeak.net/msg06945.html
There is currently a huge amount of redundancy, but no one section has
it all.
Also, we should go through the wiki enhancement proposal list
and make sure to transfer the (done) ones into the user manual.
......@@ -420,6 +420,8 @@ Cython provides facilities for releasing the Global Interpreter Lock (GIL)
before calling C code, and for acquiring the GIL in functions that are to be
called back from C code that is executed without the GIL.
.. _nogil:
Releasing the GIL
^^^^^^^^^^^^^^^^^
......
......@@ -7,8 +7,11 @@ Using Parallelism
**********************************
Cython supports native parallelism through the :py:mod:`cython.parallel`
module. To use this kind of parallelism, the GIL must be released. It
currently supports OpenMP, but later on more backends might be supported.
module. To use this kind of parallelism, the GIL must be released
(see :ref:`Releasing the GIL <nogil>`).
It currently supports OpenMP, but later on more backends might be supported.
__ nogil_
.. function:: prange([start,] stop[, step], nogil=False, schedule=None)
......@@ -59,11 +62,11 @@ currently supports OpenMP, but later on more backends might be supported.
+-----------------+------------------------------------------------------+
The default schedule is implementation defined. For more information consult
the OpenMP specification: [#]_.
the OpenMP specification [#]_.
Example with a reduction::
from cython.parallel import prange, parallel, threadid
from cython.parallel import prange
cdef int i
cdef int sum = 0
......@@ -75,7 +78,7 @@ currently supports OpenMP, but later on more backends might be supported.
Example with a shared numpy array::
from cython.parallel import *
from cython.parallel import prange
def func(np.ndarray[double] x, double alpha):
cdef Py_ssize_t i
......@@ -94,7 +97,7 @@ currently supports OpenMP, but later on more backends might be supported.
Example with thread-local buffers::
from cython.parallel import *
from cython.parallel import parallel, prange
from libc.stdlib cimport abort, malloc, free
cdef Py_ssize_t idx, i, n = 100
......@@ -175,12 +178,20 @@ particular order::
In the example above it is undefined whether an exception shall be raised,
whether it will simply break or whether it will return 2.
Nested Parallelism
==================
Nested parallelism is currently disabled due to a bug in gcc 4.5 [#]_. However,
you can freely call functions with parallel sections from a parallel section.
Using OpenMP Functions
======================
OpenMP functions can be used by cimporting ``openmp``::
from cython.parallel cimport parallel
cimport openmp
cdef int num_threads
openmp.omp_set_dynamic(1)
with nogil, parallel():
num_threads = openmp.omp_get_num_threads()
...
.. rubric:: References
.. [#] http://www.openmp.org/mp-documents/spec30.pdf
.. [#] http://gcc.gnu.org/bugzilla/show_bug.cgi?id=49897
Background
----------
[brain dump]
The "Old Cython Users Guide" is a derivative of the old Pyrex documentation. It underwent substantial editing by Peter Alexandar
to become the Reference Guide, which is oriented around bullet points
and lists rather than prose. This transition was incomplete.
At nearly the same time, Robert, Dag, and Stefan wrote a tutorial as
part of the SciPy proceedings. It was felt that the content there was
cleaner and more up to date than anything else, and this became the
basis for the "Getting Started" and "Tutorials" sections. However,
it simply doesn't have as much content as the old documentation used to.
Eventually, it seems all of the old users manual could be whittled
down into independent tutorial topics. Much discussion of what we'd
like to see is at
http://www.mail-archive.com/cython-dev@codespeak.net/msg06945.html
There is currently a huge amount of redundancy, but no one section has
it all.
Also, we should go through the wiki enhancement proposal list and make sure to transfer the (done) ones into the user manual.
......@@ -183,7 +183,12 @@ def load_module(name, pyxfilename, pyxbuild_dir=None):
so_path = build_module(name, pyxfilename, pyxbuild_dir)
mod = imp.load_dynamic(name, so_path)
assert mod.__file__ == so_path, (mod.__file__, so_path)
except Exception, e:
except Exception:
if pyxargs.load_py_module_on_import_failure and pyxfilename.endswith('.py'):
# try to fall back to normal import
mod = imp.load_source(name, pyxfilename)
assert mod.__file__ in (pyxfilename, pyxfilename+'c', pyxfilename+'o'), (mod.__file__, pyxfilename)
else:
import traceback
raise ImportError("Building module failed: %s" %
traceback.format_exception_only(*sys.exc_info()[:2])),None,sys.exc_info()[2]
......@@ -345,7 +350,8 @@ class PyxArgs(object):
##pyxargs=None
def install(pyximport=True, pyimport=False, build_dir=None, build_in_temp=True,
setup_args={}, reload_support=False ):
setup_args={}, reload_support=False,
load_py_module_on_import_failure=False):
"""Main entry point. Call this to install the .pyx import hook in
your meta-path for a single Python process. If you want it to be
installed whenever you use Python, add it to your sitecustomize
......@@ -374,6 +380,15 @@ def install(pyximport=True, pyimport=False, build_dir=None, build_in_temp=True,
reload(<pyxmodulename>), e.g. after a change in the Cython code.
Additional files <so_path>.reloadNN may arise on that account, when
the previously loaded module file cannot be overwritten.
``load_py_module_on_import_failure``: If the compilation of a .py
file succeeds, but the subsequent import fails for some reason,
retry the import with the normal .py module instead of the
compiled module. Note that this may lead to unpredictable results
for modules that change the system state during their import, as
the second import will rerun these modifications in whatever state
the system was left after the import of the compiled module
failed.
"""
if not build_dir:
build_dir = os.path.expanduser('~/.pyxbld')
......@@ -384,6 +399,7 @@ def install(pyximport=True, pyimport=False, build_dir=None, build_in_temp=True,
pyxargs.build_in_temp = build_in_temp
pyxargs.setup_args = (setup_args or {}).copy()
pyxargs.reload_support = reload_support
pyxargs.load_py_module_on_import_failure = load_py_module_on_import_failure
has_py_importer = False
has_pyx_importer = False
......
......@@ -285,8 +285,8 @@ class ErrorWriter(object):
class TestBuilder(object):
def __init__(self, rootdir, workdir, selectors, exclude_selectors, annotate,
cleanup_workdir, cleanup_sharedlibs, with_pyregr, cython_only,
languages, test_bugs, fork, language_level):
cleanup_workdir, cleanup_sharedlibs, cleanup_failures,
with_pyregr, cython_only, languages, test_bugs, fork, language_level):
self.rootdir = rootdir
self.workdir = workdir
self.selectors = selectors
......@@ -294,6 +294,7 @@ class TestBuilder(object):
self.annotate = annotate
self.cleanup_workdir = cleanup_workdir
self.cleanup_sharedlibs = cleanup_sharedlibs
self.cleanup_failures = cleanup_failures
self.with_pyregr = with_pyregr
self.cython_only = cython_only
self.languages = languages
......@@ -410,6 +411,7 @@ class TestBuilder(object):
annotate=self.annotate,
cleanup_workdir=self.cleanup_workdir,
cleanup_sharedlibs=self.cleanup_sharedlibs,
cleanup_failures=self.cleanup_failures,
cython_only=self.cython_only,
fork=self.fork,
language_level=self.language_level,
......@@ -418,8 +420,8 @@ class TestBuilder(object):
class CythonCompileTestCase(unittest.TestCase):
def __init__(self, test_directory, workdir, module, language='c',
expect_errors=False, annotate=False, cleanup_workdir=True,
cleanup_sharedlibs=True, cython_only=False, fork=True,
language_level=2, warning_errors=False):
cleanup_sharedlibs=True, cleanup_failures=True, cython_only=False,
fork=True, language_level=2, warning_errors=False):
self.test_directory = test_directory
self.workdir = workdir
self.module = module
......@@ -428,6 +430,7 @@ class CythonCompileTestCase(unittest.TestCase):
self.annotate = annotate
self.cleanup_workdir = cleanup_workdir
self.cleanup_sharedlibs = cleanup_sharedlibs
self.cleanup_failures = cleanup_failures
self.cython_only = cython_only
self.fork = fork
self.language_level = language_level
......@@ -461,16 +464,17 @@ class CythonCompileTestCase(unittest.TestCase):
del sys.modules[self.module]
except KeyError:
pass
cleanup_c_files = WITH_CYTHON and self.cleanup_workdir
cleanup_lib_files = self.cleanup_sharedlibs
cleanup = self.cleanup_failures or self.success
cleanup_c_files = WITH_CYTHON and self.cleanup_workdir and cleanup
cleanup_lib_files = self.cleanup_sharedlibs and cleanup
if os.path.exists(self.workdir):
for rmfile in os.listdir(self.workdir):
if not cleanup_c_files:
if rmfile[-2:] in (".c", ".h") or rmfile[-4:] == ".cpp":
if (rmfile[-2:] in (".c", ".h") or
rmfile[-4:] == ".cpp" or
rmfile.endswith(".html")):
continue
if not cleanup_lib_files and rmfile.endswith(".so") or rmfile.endswith(".dll"):
continue
if self.annotate and rmfile.endswith(".html"):
if not cleanup_lib_files and (rmfile.endswith(".so") or rmfile.endswith(".dll")):
continue
try:
rmfile = os.path.join(self.workdir, rmfile)
......@@ -484,7 +488,9 @@ class CythonCompileTestCase(unittest.TestCase):
os.makedirs(self.workdir)
def runTest(self):
self.success = False
self.runCompileTest()
self.success = True
def runCompileTest(self):
self.compile(self.test_directory, self.module, self.workdir,
......@@ -676,8 +682,13 @@ class CythonRunTestCase(CythonCompileTestCase):
try:
self.setUp()
try:
self.success = False
self.runCompileTest()
failures, errors = len(result.failures), len(result.errors)
self.run_tests(result)
if failures == len(result.failures) and errors == len(result.errors):
# No new errors...
self.success = True
finally:
check_thread_termination()
except Exception:
......@@ -1032,6 +1043,7 @@ class EndToEndTest(unittest.TestCase):
os.chdir(self.old_dir)
def runTest(self):
self.success = False
commands = (self.commands
.replace("CYTHON", "PYTHON %s" % os.path.join(self.cython_root, 'cython.py'))
.replace("PYTHON", sys.executable))
......@@ -1055,6 +1067,7 @@ class EndToEndTest(unittest.TestCase):
os.environ['PYTHONPATH'] = old_path
else:
del os.environ['PYTHONPATH']
self.success = True
# TODO: Support cython_freeze needed here as well.
......@@ -1278,6 +1291,9 @@ def main():
parser.add_option("--no-cleanup-sharedlibs", dest="cleanup_sharedlibs",
action="store_false", default=True,
help="do not delete the generated shared libary files (allows manual module experimentation)")
parser.add_option("--no-cleanup-failures", dest="cleanup_failures",
action="store_false", default=True,
help="enable --no-cleanup and --no-cleanup-sharedlibs for failed tests only")
parser.add_option("--no-cython", dest="with_cython",
action="store_false", default=True,
help="do not run the Cython compiler, only the C compiler")
......@@ -1354,6 +1370,8 @@ def main():
help="working directory")
parser.add_option("--debug", dest="for_debugging", default=False, action="store_true",
help="configure for easier use with a debugger (e.g. gdb)")
parser.add_option("--pyximport-py", dest="pyximport_py", default=False, action="store_true",
help="use pyximport to automatically compile imported .pyx and .py files")
options, cmd_args = parser.parse_args()
......@@ -1509,7 +1527,8 @@ def main():
if options.filetests and languages:
filetests = TestBuilder(ROOTDIR, WORKDIR, selectors, exclude_selectors,
options.annotate_source, options.cleanup_workdir,
options.cleanup_sharedlibs, options.pyregr,
options.cleanup_sharedlibs, options.cleanup_failures,
options.pyregr,
options.cython_only, languages, test_bugs,
options.fork, options.language_level)
test_suite.addTest(filetests.build_suite())
......@@ -1519,7 +1538,8 @@ def main():
if os.path.isdir(sys_pyregr_dir):
filetests = TestBuilder(ROOTDIR, WORKDIR, selectors, exclude_selectors,
options.annotate_source, options.cleanup_workdir,
options.cleanup_sharedlibs, True,
options.cleanup_sharedlibs, options.cleanup_failures,
True,
options.cython_only, languages, test_bugs,
options.fork, sys.version_info[0])
sys.stderr.write("Including CPython regression tests in %s\n" % sys_pyregr_dir)
......@@ -1532,6 +1552,11 @@ def main():
else:
test_runner = unittest.TextTestRunner(verbosity=options.verbosity)
if options.pyximport_py:
from pyximport import pyximport
pyximport.install(pyimport=True, build_dir=os.path.join(WORKDIR, '_pyximport'),
load_py_module_on_import_failure=True)
result = test_runner.run(test_suite)
if options.coverage or options.coverage_xml or options.coverage_html:
......
......@@ -28,6 +28,7 @@ pyregr.test_socket
pyregr.test_threading
pyregr.test_sys
pyregr.test_pep3131
pyregr.test_multiprocessing
# CPython regression tests that don't make sense
pyregr.test_gdb
......
......@@ -31,3 +31,7 @@ cdef char f = d.getValue2()
f = e.getValue2()
del b, e
ctypedef TemplateTest1[int] TemplateTest1_int
cdef TemplateTest1_int aa
......@@ -4,5 +4,6 @@
ctypedef object[float] mybuffer
_ERRORS = u"""
4:23: Syntax error in ctypedef statement
1:0: Buffer vars not allowed in module scope
4:0: Buffer types only allowed as function local variables
"""
......@@ -40,6 +40,9 @@ cdef int[:, ::view.contiguous, ::view.indirect_contiguous] a6
#cdef int[::view.generic_contiguous, ::view.contiguous] a7
#cdef int[::view.contiguous, ::view.generic_contiguous] a8
ctypedef int *intp
cdef intp[:, :] myarray
# These are VALID
cdef int[::view.indirect_contiguous, ::view.contiguous] a9
......@@ -61,4 +64,5 @@ _ERRORS = u'''
31:9: Dimension may not be contiguous
37:9: Only one direct contiguous axis may be specified.
38:9:Only dimensions 3 and 2 may be contiguous and direct
44:10: Invalid base type for memoryview slice
'''
# mode: error
cimport numpy as np
cdef void func(np.ndarray[np.double_t, ndim=1] myarray) nogil:
pass
_ERRORS = u"""
5:15: Buffer may not be acquired without the GIL. Consider using memoryview slices instead.
"""
......@@ -12,6 +12,15 @@ max_long_long = 2 ** (sizeof(long long) * 8 - 1) - 1
cimport cython
def abs_as_name():
"""
>>> _abs = abs_as_name()
>>> _abs(-5)
5
"""
x = abs
return x
def py_abs(a):
"""
>>> py_abs(-5)
......
......@@ -8,7 +8,7 @@ DEF INT_VAL = 1
def _func(a,b,c):
return a+b+c
@cython.test_fail_if_path_exists("//BinopNode")
@cython.test_fail_if_path_exists("//AddNode")
def add():
"""
>>> add() == 1+2+3+4
......@@ -16,7 +16,7 @@ def add():
"""
return 1+2+3+4
@cython.test_fail_if_path_exists("//BinopNode")
#@cython.test_fail_if_path_exists("//AddNode")
def add_var(a):
"""
>>> add_var(10) == 1+2+10+3+4
......@@ -24,7 +24,7 @@ def add_var(a):
"""
return 1+2 +a+ 3+4
@cython.test_fail_if_path_exists("//BinopNode")
@cython.test_fail_if_path_exists("//AddNode", "//SubNode")
def neg():
"""
>>> neg() == -1 -2 - (-3+4)
......@@ -32,7 +32,7 @@ def neg():
"""
return -1 -2 - (-3+4)
@cython.test_fail_if_path_exists("//BinopNode")
@cython.test_fail_if_path_exists("//AddNode", "//MulNode", "//DivNode")
def long_int_mix():
"""
>>> long_int_mix() == 1 + (2 * 3) // 2
......@@ -43,7 +43,7 @@ def long_int_mix():
"""
return 1L + (2 * 3L) // 2
@cython.test_fail_if_path_exists("//BinopNode")
@cython.test_fail_if_path_exists("//AddNode", "//MulNode", "//DivNode")
def char_int_mix():
"""
>>> char_int_mix() == 1 + (ord(' ') * 3) // 2 + ord('A')
......@@ -51,7 +51,7 @@ def char_int_mix():
"""
return 1L + (c' ' * 3L) // 2 + c'A'
@cython.test_fail_if_path_exists("//BinopNode")
@cython.test_fail_if_path_exists("//AddNode", "//MulNode")
def int_cast():
"""
>>> int_cast() == 1 + 2 * 6000
......@@ -59,7 +59,7 @@ def int_cast():
"""
return <int>(1 + 2 * 6000)
@cython.test_fail_if_path_exists("//BinopNode")
@cython.test_fail_if_path_exists("//MulNode")
def mul():
"""
>>> mul() == 1*60*1000
......@@ -67,7 +67,7 @@ def mul():
"""
return 1*60*1000
@cython.test_fail_if_path_exists("//BinopNode")
@cython.test_fail_if_path_exists("//AddNode", "//MulNode")
def arithm():
"""
>>> arithm() == 9*2+3*8//6-10
......@@ -75,7 +75,7 @@ def arithm():
"""
return 9*2+3*8//6-10
@cython.test_fail_if_path_exists("//BinopNode")
@cython.test_fail_if_path_exists("//AddNode", "//MulNode")
def parameters():
"""
>>> parameters() == _func(-1 -2, - (-3+4), 1*2*3)
......@@ -83,7 +83,7 @@ def parameters():
"""
return _func(-1 -2, - (-3+4), 1*2*3)
@cython.test_fail_if_path_exists("//BinopNode")
#@cython.test_fail_if_path_exists("//AddNode")
def lists():
"""
>>> lists() == [1,2,3] + [4,5,6]
......@@ -91,6 +91,176 @@ def lists():
"""
return [1,2,3] + [4,5,6]
@cython.test_fail_if_path_exists("//MulNode")
def multiplied_lists_right_len1():
"""
>>> multiplied_lists_right_len1() == [1] * 5
True
"""
return [1] * 5
@cython.test_fail_if_path_exists("//MulNode")
def multiplied_lists_right():
"""
>>> multiplied_lists_right() == [1,2,3] * 5
True
"""
return [1,2,3] * 5
@cython.test_fail_if_path_exists("//MulNode")
def multiplied_lists_left():
"""
>>> multiplied_lists_left() == [1,2,3] * 5
True
"""
return 5 * [1,2,3]
@cython.test_fail_if_path_exists("//MulNode")
def multiplied_lists_neg():
"""
>>> multiplied_lists_neg() == [1,2,3] * -5
True
"""
return [1,2,3] * -5
@cython.test_fail_if_path_exists("//MulNode")
def multiplied_lists_nonconst(x):
"""
>>> multiplied_lists_nonconst(5) == [1,2,3] * 5
True
>>> multiplied_lists_nonconst(-5) == [1,2,3] * -5
True
>>> multiplied_lists_nonconst(0) == [1,2,3] * 0
True
>>> [1,2,3] * 'abc' # doctest: +ELLIPSIS
Traceback (most recent call last):
TypeError: can't multiply sequence by non-int...
>>> multiplied_nonconst_tuple_arg('abc') # doctest: +ELLIPSIS
Traceback (most recent call last):
TypeError: can't multiply sequence by non-int...
>>> [1,2,3] * 1.0 # doctest: +ELLIPSIS
Traceback (most recent call last):
TypeError: can't multiply sequence by non-int...
>>> multiplied_nonconst_tuple_arg(1.0) # doctest: +ELLIPSIS
Traceback (most recent call last):
TypeError: can't multiply sequence by non-int...
"""
return [1,2,3] * x
@cython.test_assert_path_exists("//MulNode")
def multiplied_lists_nonconst_left(x):
"""
>>> multiplied_lists_nonconst_left(5) == 5 * [1,2,3]
True
>>> multiplied_lists_nonconst_left(-5) == -5 * [1,2,3]
True
>>> multiplied_lists_nonconst_left(0) == 0 * [1,2,3]
True
"""
return x * [1,2,3]
@cython.test_fail_if_path_exists("//MulNode//ListNode")
@cython.test_assert_path_exists("//MulNode")
def multiplied_lists_nonconst_expression(x):
"""
>>> multiplied_lists_nonconst_expression(5) == [1,2,3] * (5 * 2)
True
>>> multiplied_lists_nonconst_expression(-5) == [1,2,3] * (-5 * 2)
True
>>> multiplied_lists_nonconst_expression(0) == [1,2,3] * (0 * 2)
True
"""
return [1,2,3] * (x*2)
cdef side_effect(int x):
print x
return x
@cython.test_fail_if_path_exists("//MulNode")
def multiplied_lists_with_side_effects():
"""
>>> multiplied_lists_with_side_effects() == [1,2,3] * 5
1
2
3
True
"""
return [side_effect(1), side_effect(2), side_effect(3)] * 5
@cython.test_fail_if_path_exists("//MulNode")
def multiplied_lists_nonconst_with_side_effects(x):
"""
>>> multiplied_lists_nonconst_with_side_effects(5) == [1,2,3] * 5
1
2
3
True
"""
return [side_effect(1), side_effect(2), side_effect(3)] * x
@cython.test_fail_if_path_exists("//MulNode")
def multiplied_nonconst_tuple_arg(x):
"""
>>> multiplied_nonconst_tuple_arg(5) == (1,2) * 5
True
>>> multiplied_nonconst_tuple_arg(-5) == (1,2) * -5
True
>>> multiplied_nonconst_tuple_arg(0) == (1,2) * 0
True
>>> (1,2) * 'abc' # doctest: +ELLIPSIS
Traceback (most recent call last):
TypeError: can't multiply sequence by non-int...
>>> multiplied_nonconst_tuple_arg('abc') # doctest: +ELLIPSIS
Traceback (most recent call last):
TypeError: can't multiply sequence by non-int...
>>> (1,2) * 1.0 # doctest: +ELLIPSIS
Traceback (most recent call last):
TypeError: can't multiply sequence by non-int...
>>> multiplied_nonconst_tuple_arg(1.0) # doctest: +ELLIPSIS
Traceback (most recent call last):
TypeError: can't multiply sequence by non-int...
"""
return (1,2) * x
@cython.test_fail_if_path_exists("//MulNode")
def multiplied_nonconst_tuple(x):
"""
>>> multiplied_nonconst_tuple(5) == (1,2) * (5+1)
True
"""
return (1,2) * (x + 1)
MULT = 5
@cython.test_fail_if_path_exists("//MulNode")
def multiplied_global_nonconst_tuple():
"""
>>> multiplied_global_nonconst_tuple() == (1,2,3) * 5
1
2
3
True
"""
return (side_effect(1), side_effect(2), side_effect(3)) * MULT
@cython.test_fail_if_path_exists("//MulNode")
def multiplied_const_tuple():
"""
>>> multiplied_const_tuple() == (1,2) * 5
True
"""
return (1,2) * 5
@cython.test_fail_if_path_exists("//MulNode")
def multiplied_const_tuple_len1():
"""
>>> multiplied_const_tuple_len1() == (1,) * 5
True
"""
return (1,) * 5
@cython.test_fail_if_path_exists("//PrimaryCmpNode")
def compile_time_DEF():
"""
......
# mode: run
# tags: kwargs, call
# ticket: 717
def f(**kwargs):
return sorted(kwargs.items())
def test_call(kwargs):
"""
>>> kwargs = {'b' : 2}
>>> f(a=1, **kwargs)
[('a', 1), ('b', 2)]
>>> test_call(kwargs)
[('a', 1), ('b', 2)]
>>> kwargs = {'a' : 2}
>>> f(a=1, **kwargs)
Traceback (most recent call last):
TypeError: f() got multiple values for keyword argument 'a'
FIXME: remove ellipsis, fix function name
>>> test_call(kwargs) # doctest: +ELLIPSIS
Traceback (most recent call last):
TypeError: ...() got multiple values for keyword argument 'a'
"""
return f(a=1, **kwargs)
# mode: run
# tags: eval
GLOBAL = 123
def eval_simple(local):
"""
>>> eval_simple(321)
(123, 321)
"""
return eval('GLOBAL, local')
def eval_class_scope():
"""
>>> eval_class_scope().c
3
"""
class TestClassScope:
a = 1
b = 2
c = eval('a + b')
return TestClassScope
def eval_locals(a, b):
"""
>>> eval_locals(1, 2)
(1, 2)
"""
return eval('a, b', {}, locals())
# mode: run
# tags: exec
exec "GLOBAL = 1234"
def exec_module_scope():
"""
>>> globals()['GLOBAL']
1234
"""
def exec_func_scope():
"""
>>> exec_func_scope()
{'a': 'b', 'G': 1234}
"""
d = {}
exec "d['a'] = 'b'; d['G'] = GLOBAL"
return d
def exec_pyclass_scope():
"""
>>> obj = exec_pyclass_scope()
>>> obj.a
'b'
>>> obj.G
1234
"""
class TestExec:
exec "a = 'b'; G = GLOBAL"
return TestExec
......@@ -29,6 +29,20 @@ cdef class FinalType(object):
self.cpdef_method()
def test_external_call():
"""
>>> test_external_call()
"""
f = FinalType()
return f.cpdef_method()
def test_external_call_in_temp():
"""
>>> test_external_call_in_temp()
"""
return FinalType().cpdef_method()
cdef class BaseTypeWithFinalMethods(object):
"""
>>> obj = BaseTypeWithFinalMethods()
......
# mode: run
# ticket: 734
def test_import_error():
"""
>>> test_import_error()
Traceback (most recent call last):
ImportError: cannot import name xxx
"""
from sys import xxx
# mode: run
cimport cython
@cython.final
cdef class TypedContextManager(object):
cdef double __enter__(self): # not callable from Python !
return 2.0
# FIXME: inline __exit__() as well
def __exit__(self, exc_type, exc_value, exc_tb):
return 0
def with_statement():
"""
>>> with_statement()
2.0
"""
with TypedContextManager() as x:
return x
......@@ -125,6 +125,29 @@ def index_pop_typed(list L, int i):
"""
return L.pop(i)
@cython.test_assert_path_exists('//PythonCapiCallNode')
@cython.test_fail_if_path_exists('//SimpleCallNode/AttributeNode')
def index_pop_literal(list L):
"""
>>> L = list(range(10))
>>> index_pop_literal(L)
0
>>> L
[1, 2, 3, 4, 5, 6, 7, 8, 9]
>>> while L:
... _ = index_pop_literal(L)
>>> L
[]
>>> index_pop_literal(L)
Traceback (most recent call last):
...
IndexError: pop from empty list
"""
return L.pop(0)
@cython.test_fail_if_path_exists('//PythonCapiCallNode')
def crazy_pop(L):
......
# mode: run
# ticket: 731
# tags: locals, vars, dir
LOCALS = locals()
GLOBALS = globals()
DIR_SAME = sorted(dir()) == sorted(globals().keys())
def test_module_locals_and_dir():
"""
>>> LOCALS is GLOBALS
True
>>> DIR_SAME
True
"""
def test_class_locals_and_dir():
"""
>>> klass = test_class_locals_and_dir()
>>> 'visible' in klass.locs and 'not_visible' not in klass.locs
True
>>> klass.names
['visible']
"""
not_visible = 1234
class Foo:
visible = 4321
names = dir()
locs = locals()
return Foo
......@@ -40,7 +40,7 @@ def call_non_dict_test():
return func(**NonDict())
def call_non_dict_test_kw():
return func(a=5, **NonDict())
return func(b=5, **NonDict())
class SubDict(dict):
......@@ -51,4 +51,4 @@ def call_sub_dict_test():
return func(**SubDict())
def call_sub_dict_test_kw():
return func(a=5, **SubDict())
return func(b=5, **SubDict())
This diff is collapsed.
......@@ -221,6 +221,24 @@ def c_functions():
assert typeof(f) == 'int (*)(int)', typeof(f)
assert 2 == f(1)
def builtin_functions():
"""
>>> _abs, _getattr = builtin_functions()
Python object
Python object
>>> _abs(-1)
1
>>> class o(object): pass
>>> o.x = 1
>>> _getattr(o, 'x')
1
"""
_abs = abs
print(typeof(_abs))
_getattr = getattr
print(typeof(_getattr))
return _abs, _getattr
def cascade():
"""
>>> cascade()
......@@ -513,6 +531,18 @@ def common_extension_type_base():
w = CC()
assert typeof(w) == "Python object", typeof(w)
cdef class AcceptsKeywords:
def __init__(self, *args, **kwds):
pass
@infer_types(None)
def constructor_call():
"""
>>> constructor_call()
"""
x = AcceptsKeywords(a=1, b=2)
assert typeof(x) == "AcceptsKeywords", typeof(x)
@infer_types(None)
def large_literals():
......@@ -529,6 +559,63 @@ def large_literals():
assert typeof(d) == "Python object", typeof(d)
class EmptyContextManager(object):
def __enter__(self):
return None
def __exit__(self, *args):
return 0
def with_statement():
"""
>>> with_statement()
Python object
Python object
"""
x = 1.0
with EmptyContextManager() as x:
print(typeof(x))
print(typeof(x))
return x
@cython.final
cdef class TypedContextManager(object):
cpdef double __enter__(self):
return 2.0
def __exit__(self, *args):
return 0
def with_statement_typed():
"""
>>> with_statement_typed()
double
double
2.0
"""
x = 1.0
with TypedContextManager() as x:
print(typeof(x))
print(typeof(x))
return x
def with_statement_untyped():
"""
>>> with_statement_untyped()
Python object
Python object
2.0
"""
x = 1.0
cdef object t = TypedContextManager()
with t as x:
print(typeof(x))
print(typeof(x))
return x
def self_lookup(a):
b = a
b = b.foo(keyword=None)
print typeof(b)
# Regression test for trac #638.
def bar(foo):
......
......@@ -181,6 +181,28 @@ def multimanager():
print('%s %s %s %s %s' % (a, b, c, d, e))
print(nested)
class GetManager(object):
def get(self, *args):
return ContextManager(*args)
def manager_from_expression():
"""
>>> manager_from_expression()
enter
1
exit <type 'NoneType'> <type 'NoneType'> <type 'NoneType'>
enter
2
exit <type 'NoneType'> <type 'NoneType'> <type 'NoneType'>
"""
with GetManager().get(1) as x:
print(x)
g = GetManager()
with g.get(2) as x:
print(x)
# Tests borrowed from pyregr test_with.py,
# modified to follow the constraints of Cython.
import unittest
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment