Commit 8cd31587 authored by Mark Florisson's avatar Mark Florisson

Merge branch 'master' into fusedmerge

Conflicts:
	Cython/Compiler/MemoryView.py
	Cython/Compiler/Naming.py
	Cython/Compiler/Nodes.py
	Cython/Compiler/Parsing.py
parents d16da160 5008e863
...@@ -97,7 +97,7 @@ globals_utility_code = UtilityCode( ...@@ -97,7 +97,7 @@ globals_utility_code = UtilityCode(
# of Python names. Supporting cdef names in the module and write # of Python names. Supporting cdef names in the module and write
# access requires a rewrite as a dedicated class. # access requires a rewrite as a dedicated class.
proto = """ proto = """
static PyObject* __Pyx_Globals(); /*proto*/ static PyObject* __Pyx_Globals(void); /*proto*/
""", """,
impl = ''' impl = '''
static PyObject* __Pyx_Globals() { static PyObject* __Pyx_Globals() {
......
...@@ -317,6 +317,8 @@ class ContentHashingUtilityCode(UtilityCode): ...@@ -317,6 +317,8 @@ class ContentHashingUtilityCode(UtilityCode):
return hash((self.proto, self.impl)) return hash((self.proto, self.impl))
def __eq__(self, other): def __eq__(self, other):
if self is other:
return True
if not isinstance(other, type(self)): if not isinstance(other, type(self)):
return False return False
......
...@@ -3,7 +3,7 @@ from Cython.Compiler.ModuleNode import ModuleNode ...@@ -3,7 +3,7 @@ from Cython.Compiler.ModuleNode import ModuleNode
from Cython.Compiler.Nodes import * from Cython.Compiler.Nodes import *
from Cython.Compiler.ExprNodes import * from Cython.Compiler.ExprNodes import *
class ExtractPxdCode(CythonTransform): class ExtractPxdCode(VisitorTransform):
""" """
Finds nodes in a pxd file that should generate code, and Finds nodes in a pxd file that should generate code, and
returns them in a StatListNode. returns them in a StatListNode.
...@@ -28,3 +28,7 @@ class ExtractPxdCode(CythonTransform): ...@@ -28,3 +28,7 @@ class ExtractPxdCode(CythonTransform):
# Do not visit children, nested funcdefnodes will # Do not visit children, nested funcdefnodes will
# also be moved by this action... # also be moved by this action...
return node return node
def visit_Node(self, node):
self.visitchildren(node)
return node
This diff is collapsed.
...@@ -925,11 +925,12 @@ class CreateControlFlowGraph(CythonTransform): ...@@ -925,11 +925,12 @@ class CreateControlFlowGraph(CythonTransform):
raise InternalError, "Generic loops are not supported" raise InternalError, "Generic loops are not supported"
def visit_WithTargetAssignmentStatNode(self, node): def visit_WithTargetAssignmentStatNode(self, node):
self.mark_assignment(node.lhs) self.mark_assignment(node.lhs, node.rhs)
return node return node
def visit_WithStatNode(self, node): def visit_WithStatNode(self, node):
self.visit(node.manager) self.visit(node.manager)
self.visit(node.enter_call)
self.visit(node.body) self.visit(node.body)
return node return node
......
...@@ -93,6 +93,7 @@ frame_cname = pyrex_prefix + "frame" ...@@ -93,6 +93,7 @@ frame_cname = pyrex_prefix + "frame"
frame_code_cname = pyrex_prefix + "frame_code" frame_code_cname = pyrex_prefix + "frame_code"
binding_cfunc = pyrex_prefix + "binding_PyCFunctionType" binding_cfunc = pyrex_prefix + "binding_PyCFunctionType"
fused_func_prefix = pyrex_prefix + 'fuse_' fused_func_prefix = pyrex_prefix + 'fuse_'
quick_temp_cname = pyrex_prefix + "temp" # temp variable for quick'n'dirty temping
genexpr_id_ref = 'genexpr' genexpr_id_ref = 'genexpr'
......
...@@ -1916,6 +1916,14 @@ class CFuncDefNode(FuncDefNode): ...@@ -1916,6 +1916,14 @@ class CFuncDefNode(FuncDefNode):
if type_arg.type.is_buffer and 'inline' in self.modifiers: if type_arg.type.is_buffer and 'inline' in self.modifiers:
warning(formal_arg.pos, "Buffer unpacking not optimized away.", 1) warning(formal_arg.pos, "Buffer unpacking not optimized away.", 1)
if type_arg.type.is_buffer:
if self.type.nogil:
error(formal_arg.pos,
"Buffer may not be acquired without the GIL. "
"Consider using memoryview slices instead.")
elif 'inline' in self.modifiers:
warning(formal_arg.pos, "Buffer unpacking not optimized away.", 1)
self._validate_type_visibility(type.return_type, self.pos, env) self._validate_type_visibility(type.return_type, self.pos, env)
name = name_declarator.name name = name_declarator.name
...@@ -3964,9 +3972,12 @@ class PyClassDefNode(ClassDefNode): ...@@ -3964,9 +3972,12 @@ class PyClassDefNode(ClassDefNode):
# find metaclass" dance at runtime # find metaclass" dance at runtime
self.metaclass = item.value self.metaclass = item.value
del keyword_args.key_value_pairs[i] del keyword_args.key_value_pairs[i]
if starstar_arg or (keyword_args and keyword_args.key_value_pairs): if starstar_arg:
self.mkw = ExprNodes.KeywordArgsNode( self.mkw = ExprNodes.KeywordArgsNode(
pos, keyword_args = keyword_args, starstar_arg = starstar_arg) pos, keyword_args = keyword_args and keyword_args.key_value_pairs or [],
starstar_arg = starstar_arg)
elif keyword_args and keyword_args.key_value_pairs:
self.mkw = keyword_args
else: else:
self.mkw = ExprNodes.NullNode(pos) self.mkw = ExprNodes.NullNode(pos)
if self.metaclass is None: if self.metaclass is None:
...@@ -5731,21 +5742,26 @@ class WithStatNode(StatNode): ...@@ -5731,21 +5742,26 @@ class WithStatNode(StatNode):
# manager The with statement manager object # manager The with statement manager object
# target ExprNode the target lhs of the __enter__() call # target ExprNode the target lhs of the __enter__() call
# body StatNode # body StatNode
# enter_call ExprNode the call to the __enter__() method
# exit_var String the cname of the __exit__() method reference
child_attrs = ["manager", "target", "body"] child_attrs = ["manager", "enter_call", "target", "body"]
has_target = False enter_call = None
def analyse_declarations(self, env): def analyse_declarations(self, env):
self.manager.analyse_declarations(env) self.manager.analyse_declarations(env)
self.enter_call.analyse_declarations(env)
self.body.analyse_declarations(env) self.body.analyse_declarations(env)
def analyse_expressions(self, env): def analyse_expressions(self, env):
self.manager.analyse_types(env) self.manager.analyse_types(env)
self.enter_call.analyse_types(env)
self.body.analyse_expressions(env) self.body.analyse_expressions(env)
def generate_function_definitions(self, env, code): def generate_function_definitions(self, env, code):
self.manager.generate_function_definitions(env, code) self.manager.generate_function_definitions(env, code)
self.enter_call.generate_function_definitions(env, code)
self.body.generate_function_definitions(env, code) self.body.generate_function_definitions(env, code)
def generate_execution_code(self, code): def generate_execution_code(self, code):
...@@ -5764,40 +5780,28 @@ class WithStatNode(StatNode): ...@@ -5764,40 +5780,28 @@ class WithStatNode(StatNode):
old_error_label = code.new_error_label() old_error_label = code.new_error_label()
intermediate_error_label = code.error_label intermediate_error_label = code.error_label
enter_func = code.funcstate.allocate_temp(py_object_type, manage_ref=True) self.enter_call.generate_evaluation_code(code)
code.putln("%s = PyObject_GetAttr(%s, %s); %s" % ( if not self.target:
enter_func, self.enter_call.generate_disposal_code(code)
self.manager.py_result(), self.enter_call.free_temps(code)
code.get_py_string_const(EncodedString('__enter__'), identifier=True), else:
code.error_goto_if_null(enter_func, self.pos), # Otherwise, the node will be cleaned up by the
)) # WithTargetAssignmentStatNode after assigning its result
code.put_gotref(enter_func) # to the target of the 'with' statement.
pass
self.manager.generate_disposal_code(code) self.manager.generate_disposal_code(code)
self.manager.free_temps(code) self.manager.free_temps(code)
self.target_temp.allocate(code)
code.putln('%s = PyObject_Call(%s, ((PyObject *)%s), NULL); %s' % (
self.target_temp.result(),
enter_func,
Naming.empty_tuple,
code.error_goto_if_null(self.target_temp.result(), self.pos),
))
code.put_gotref(self.target_temp.result())
code.put_decref_clear(enter_func, py_object_type)
code.funcstate.release_temp(enter_func)
if not self.has_target:
code.put_decref_clear(self.target_temp.result(), type=py_object_type)
self.target_temp.release(code)
# otherwise, WithTargetAssignmentStatNode will do it for us
code.error_label = old_error_label code.error_label = old_error_label
self.body.generate_execution_code(code) self.body.generate_execution_code(code)
step_over_label = code.new_label() if code.label_used(intermediate_error_label):
code.put_goto(step_over_label) step_over_label = code.new_label()
code.put_label(intermediate_error_label) code.put_goto(step_over_label)
code.put_decref_clear(self.exit_var, py_object_type) code.put_label(intermediate_error_label)
code.put_goto(old_error_label) code.put_decref_clear(self.exit_var, py_object_type)
code.put_label(step_over_label) code.put_goto(old_error_label)
code.put_label(step_over_label)
code.funcstate.release_temp(self.exit_var) code.funcstate.release_temp(self.exit_var)
code.putln('}') code.putln('}')
...@@ -5809,28 +5813,44 @@ class WithTargetAssignmentStatNode(AssignmentNode): ...@@ -5809,28 +5813,44 @@ class WithTargetAssignmentStatNode(AssignmentNode):
# This is a special cased assignment that steals the RHS reference # This is a special cased assignment that steals the RHS reference
# and frees its temp. # and frees its temp.
# #
# lhs ExprNode the assignment target # lhs ExprNode the assignment target
# rhs TempNode the return value of the __enter__() call # rhs CloneNode a (coerced) CloneNode for the orig_rhs (not owned by this node)
# orig_rhs ExprNode the original ExprNode of the rhs. this node will clean up the
# temps of the orig_rhs. basically, it takes ownership of the node
# when the WithStatNode is done with it.
child_attrs = ["lhs", "rhs"] child_attrs = ["lhs"]
def analyse_declarations(self, env): def analyse_declarations(self, env):
self.lhs.analyse_target_declaration(env) self.lhs.analyse_target_declaration(env)
def analyse_types(self, env): def analyse_expressions(self, env):
self.rhs.analyse_types(env) self.rhs.analyse_types(env)
self.lhs.analyse_target_types(env) self.lhs.analyse_target_types(env)
self.lhs.gil_assignment_check(env) self.lhs.gil_assignment_check(env)
self.orig_rhs = self.rhs
self.rhs = self.rhs.coerce_to(self.lhs.type, env) self.rhs = self.rhs.coerce_to(self.lhs.type, env)
def generate_execution_code(self, code): def generate_execution_code(self, code):
if self.orig_rhs.type.is_pyobject:
# make sure rhs gets freed on errors, see below
old_error_label = code.new_error_label()
intermediate_error_label = code.error_label
self.rhs.generate_evaluation_code(code) self.rhs.generate_evaluation_code(code)
self.lhs.generate_assignment_code(self.rhs, code) self.lhs.generate_assignment_code(self.rhs, code)
self.orig_rhs.release(code)
def generate_function_definitions(self, env, code): if self.orig_rhs.type.is_pyobject:
self.rhs.generate_function_definitions(env, code) self.orig_rhs.generate_disposal_code(code)
code.error_label = old_error_label
if code.label_used(intermediate_error_label):
step_over_label = code.new_label()
code.put_goto(step_over_label)
code.put_label(intermediate_error_label)
self.orig_rhs.generate_disposal_code(code)
code.put_goto(old_error_label)
code.put_label(step_over_label)
self.orig_rhs.free_temps(code)
def annotate(self, code): def annotate(self, code):
self.lhs.annotate(code) self.lhs.annotate(code)
...@@ -6574,6 +6594,8 @@ class FromImportStatNode(StatNode): ...@@ -6574,6 +6594,8 @@ class FromImportStatNode(StatNode):
else: else:
coerced_item = self.item.coerce_to(target.type, env) coerced_item = self.item.coerce_to(target.type, env)
self.interned_items.append((name, target, coerced_item)) self.interned_items.append((name, target, coerced_item))
if self.interned_items:
env.use_utility_code(raise_import_error_utility_code)
def generate_execution_code(self, code): def generate_execution_code(self, code):
self.module.generate_evaluation_code(code) self.module.generate_evaluation_code(code)
...@@ -6588,11 +6610,16 @@ class FromImportStatNode(StatNode): ...@@ -6588,11 +6610,16 @@ class FromImportStatNode(StatNode):
for name, target, coerced_item in self.interned_items: for name, target, coerced_item in self.interned_items:
cname = code.intern_identifier(name) cname = code.intern_identifier(name)
code.putln( code.putln(
'%s = PyObject_GetAttr(%s, %s); %s' % ( '%s = PyObject_GetAttr(%s, %s);' % (
item_temp, item_temp,
self.module.py_result(), self.module.py_result(),
cname, cname))
code.error_goto_if_null(item_temp, self.pos))) code.putln('if (%s == NULL) {' % item_temp)
code.putln(
'if (PyErr_ExceptionMatches(PyExc_AttributeError)) '
'__Pyx_RaiseImportError(%s);' % cname)
code.putln(code.error_goto_if_null(item_temp, self.pos))
code.putln('}')
code.put_gotref(item_temp) code.put_gotref(item_temp)
if coerced_item is None: if coerced_item is None:
target.generate_assignment_code(self.item, code) target.generate_assignment_code(self.item, code)
...@@ -8922,3 +8949,19 @@ init=""" ...@@ -8922,3 +8949,19 @@ init="""
memset(&%(PYX_NAN)s, 0xFF, sizeof(%(PYX_NAN)s)); memset(&%(PYX_NAN)s, 0xFF, sizeof(%(PYX_NAN)s));
""" % vars(Naming)) """ % vars(Naming))
#------------------------------------------------------------------------------------
raise_import_error_utility_code = UtilityCode(
proto = '''
static CYTHON_INLINE void __Pyx_RaiseImportError(PyObject *name);
''',
impl = '''
static CYTHON_INLINE void __Pyx_RaiseImportError(PyObject *name) {
#if PY_MAJOR_VERSION < 3
PyErr_Format(PyExc_ImportError, "cannot import name %.230s",
PyString_AsString(name));
#else
PyErr_Format(PyExc_ImportError, "cannot import name %S", name);
#endif
}
''')
This diff is collapsed.
...@@ -1165,16 +1165,20 @@ class WithTransform(CythonTransform, SkipDeclarations): ...@@ -1165,16 +1165,20 @@ class WithTransform(CythonTransform, SkipDeclarations):
self.visitchildren(node, 'body') self.visitchildren(node, 'body')
pos = node.pos pos = node.pos
body, target, manager = node.body, node.target, node.manager body, target, manager = node.body, node.target, node.manager
node.target_temp = ExprNodes.TempNode(pos, type=PyrexTypes.py_object_type) node.enter_call = ExprNodes.SimpleCallNode(
pos, function = ExprNodes.AttributeNode(
pos, obj = ExprNodes.CloneNode(manager),
attribute = EncodedString('__enter__')),
args = [],
is_temp = True)
if target is not None: if target is not None:
node.has_target = True
body = Nodes.StatListNode( body = Nodes.StatListNode(
pos, stats = [ pos, stats = [
Nodes.WithTargetAssignmentStatNode( Nodes.WithTargetAssignmentStatNode(
pos, lhs = target, rhs = node.target_temp), pos, lhs = target,
body rhs = ResultRefNode(node.enter_call),
]) orig_rhs = node.enter_call),
node.target = None body])
excinfo_target = ResultRefNode( excinfo_target = ResultRefNode(
pos=pos, type=Builtin.tuple_type, may_hold_none=False) pos=pos, type=Builtin.tuple_type, may_hold_none=False)
...@@ -2334,6 +2338,17 @@ class TransformBuiltinMethods(EnvTransform): ...@@ -2334,6 +2338,17 @@ class TransformBuiltinMethods(EnvTransform):
error(node.pos, u"'%s' not a valid cython attribute or is being used incorrectly" % attribute) error(node.pos, u"'%s' not a valid cython attribute or is being used incorrectly" % attribute)
return node return node
def visit_ExecStatNode(self, node):
lenv = self.current_env()
self.visitchildren(node)
if len(node.args) == 1:
node.args.append(ExprNodes.GlobalsExprNode(node.pos))
if not lenv.is_module_scope:
node.args.append(
ExprNodes.LocalsExprNode(
node.pos, self.current_scope_node(), lenv))
return node
def _inject_locals(self, node, func_name): def _inject_locals(self, node, func_name):
# locals()/dir()/vars() builtins # locals()/dir()/vars() builtins
lenv = self.current_env() lenv = self.current_env()
...@@ -2342,7 +2357,6 @@ class TransformBuiltinMethods(EnvTransform): ...@@ -2342,7 +2357,6 @@ class TransformBuiltinMethods(EnvTransform):
# not the builtin # not the builtin
return node return node
pos = node.pos pos = node.pos
local_names = [ var.name for var in lenv.entries.values() if var.name ]
if func_name in ('locals', 'vars'): if func_name in ('locals', 'vars'):
if func_name == 'locals' and len(node.args) > 0: if func_name == 'locals' and len(node.args) > 0:
error(self.pos, "Builtin 'locals()' called with wrong number of args, expected 0, got %d" error(self.pos, "Builtin 'locals()' called with wrong number of args, expected 0, got %d"
...@@ -2354,11 +2368,7 @@ class TransformBuiltinMethods(EnvTransform): ...@@ -2354,11 +2368,7 @@ class TransformBuiltinMethods(EnvTransform):
% len(node.args)) % len(node.args))
if len(node.args) > 0: if len(node.args) > 0:
return node # nothing to do return node # nothing to do
items = [ ExprNodes.DictItemNode(pos, return ExprNodes.LocalsExprNode(pos, self.current_scope_node(), lenv)
key=ExprNodes.IdentifierStringNode(pos, value=var),
value=ExprNodes.NameNode(pos, name=var, allow_null=True))
for var in local_names ]
return ExprNodes.DictNode(pos, key_value_pairs=items, exclude_null_values=True)
else: # dir() else: # dir()
if len(node.args) > 1: if len(node.args) > 1:
error(self.pos, "Builtin 'dir()' called with wrong number of args, expected 0-1, got %d" error(self.pos, "Builtin 'dir()' called with wrong number of args, expected 0-1, got %d"
...@@ -2366,16 +2376,36 @@ class TransformBuiltinMethods(EnvTransform): ...@@ -2366,16 +2376,36 @@ class TransformBuiltinMethods(EnvTransform):
if len(node.args) > 0: if len(node.args) > 0:
# optimised in Builtin.py # optimised in Builtin.py
return node return node
if lenv.is_py_class_scope or lenv.is_module_scope:
if lenv.is_py_class_scope:
pyclass = self.current_scope_node()
locals_dict = ExprNodes.CloneNode(pyclass.dict)
else:
locals_dict = ExprNodes.GlobalsExprNode(pos)
return ExprNodes.SimpleCallNode(
pos,
function=ExprNodes.AttributeNode(
pos, obj=locals_dict, attribute="keys"),
args=[])
local_names = [ var.name for var in lenv.entries.values() if var.name ]
items = [ ExprNodes.IdentifierStringNode(pos, value=var) items = [ ExprNodes.IdentifierStringNode(pos, value=var)
for var in local_names ] for var in local_names ]
return ExprNodes.ListNode(pos, args=items) return ExprNodes.ListNode(pos, args=items)
def visit_SimpleCallNode(self, node): def _inject_eval(self, node, func_name):
if isinstance(node.function, ExprNodes.NameNode): lenv = self.current_env()
func_name = node.function.name entry = lenv.lookup_here(func_name)
if func_name in ('dir', 'locals', 'vars'): if entry or len(node.args) != 1:
return self._inject_locals(node, func_name) return node
# Inject globals and locals
node.args.append(ExprNodes.GlobalsExprNode(node.pos))
if not lenv.is_module_scope:
node.args.append(
ExprNodes.LocalsExprNode(
node.pos, self.current_scope_node(), lenv))
return node
def visit_SimpleCallNode(self, node):
# cython.foo # cython.foo
function = node.function.as_cython_attribute() function = node.function.as_cython_attribute()
if function: if function:
...@@ -2428,6 +2458,13 @@ class TransformBuiltinMethods(EnvTransform): ...@@ -2428,6 +2458,13 @@ class TransformBuiltinMethods(EnvTransform):
u"'%s' not a valid cython language construct" % function) u"'%s' not a valid cython language construct" % function)
self.visitchildren(node) self.visitchildren(node)
if isinstance(node, ExprNodes.SimpleCallNode) and node.function.is_name:
func_name = node.function.name
if func_name in ('dir', 'locals', 'vars'):
return self._inject_locals(node, func_name)
if func_name == 'eval':
return self._inject_eval(node, func_name)
return node return node
......
...@@ -46,7 +46,7 @@ cdef p_power(PyrexScanner s) ...@@ -46,7 +46,7 @@ cdef p_power(PyrexScanner s)
cdef p_new_expr(PyrexScanner s) cdef p_new_expr(PyrexScanner s)
cdef p_trailer(PyrexScanner s, node1) cdef p_trailer(PyrexScanner s, node1)
cpdef p_call_parse_args(PyrexScanner s, bint allow_genexp = *) cpdef p_call_parse_args(PyrexScanner s, bint allow_genexp = *)
cdef p_call_build_packed_args(pos, positional_args, keyword_args, star_arg) cdef p_call_build_packed_args(pos, positional_args, keyword_args, star_arg, starstar_arg)
cdef p_call(PyrexScanner s, function) cdef p_call(PyrexScanner s, function)
cdef p_index(PyrexScanner s, base) cdef p_index(PyrexScanner s, base)
cdef p_subscript_list(PyrexScanner s) cdef p_subscript_list(PyrexScanner s)
......
...@@ -440,7 +440,8 @@ def p_call_parse_args(s, allow_genexp = True): ...@@ -440,7 +440,8 @@ def p_call_parse_args(s, allow_genexp = True):
s.expect(')') s.expect(')')
return positional_args, keyword_args, star_arg, starstar_arg return positional_args, keyword_args, star_arg, starstar_arg
def p_call_build_packed_args(pos, positional_args, keyword_args, star_arg): def p_call_build_packed_args(pos, positional_args, keyword_args,
star_arg, starstar_arg):
arg_tuple = None arg_tuple = None
keyword_dict = None keyword_dict = None
if positional_args or not star_arg: if positional_args or not star_arg:
...@@ -454,11 +455,17 @@ def p_call_build_packed_args(pos, positional_args, keyword_args, star_arg): ...@@ -454,11 +455,17 @@ def p_call_build_packed_args(pos, positional_args, keyword_args, star_arg):
operand2 = star_arg_tuple) operand2 = star_arg_tuple)
else: else:
arg_tuple = star_arg_tuple arg_tuple = star_arg_tuple
if keyword_args: if keyword_args or starstar_arg:
keyword_args = [ExprNodes.DictItemNode(pos=key.pos, key=key, value=value) keyword_args = [ExprNodes.DictItemNode(pos=key.pos, key=key, value=value)
for key, value in keyword_args] for key, value in keyword_args]
keyword_dict = ExprNodes.DictNode(pos, if starstar_arg:
key_value_pairs = keyword_args) keyword_dict = ExprNodes.KeywordArgsNode(
pos,
starstar_arg = starstar_arg,
keyword_args = keyword_args)
else:
keyword_dict = ExprNodes.DictNode(
pos, key_value_pairs = keyword_args)
return arg_tuple, keyword_dict return arg_tuple, keyword_dict
def p_call(s, function): def p_call(s, function):
...@@ -474,12 +481,11 @@ def p_call(s, function): ...@@ -474,12 +481,11 @@ def p_call(s, function):
args = positional_args) args = positional_args)
else: else:
arg_tuple, keyword_dict = p_call_build_packed_args( arg_tuple, keyword_dict = p_call_build_packed_args(
pos, positional_args, keyword_args, star_arg) pos, positional_args, keyword_args, star_arg, starstar_arg)
return ExprNodes.GeneralCallNode(pos, return ExprNodes.GeneralCallNode(pos,
function = function, function = function,
positional_args = arg_tuple, positional_args = arg_tuple,
keyword_args = keyword_dict, keyword_args = keyword_dict)
starstar_arg = starstar_arg)
#lambdef: 'lambda' [varargslist] ':' test #lambdef: 'lambda' [varargslist] ':' test
...@@ -1130,8 +1136,6 @@ def p_exec_statement(s): ...@@ -1130,8 +1136,6 @@ def p_exec_statement(s):
if s.sy == ',': if s.sy == ',':
s.next() s.next()
args.append(p_test(s)) args.append(p_test(s))
else:
error(pos, "'exec' currently requires a target mapping (globals/locals)")
return Nodes.ExecStatNode(pos, args = args) return Nodes.ExecStatNode(pos, args = args)
def p_del_statement(s): def p_del_statement(s):
...@@ -2738,16 +2742,13 @@ def p_ctypedef_statement(s, ctx): ...@@ -2738,16 +2742,13 @@ def p_ctypedef_statement(s, ctx):
return p_fused_definition(s, pos, ctx) return p_fused_definition(s, pos, ctx)
else: else:
base_type = p_c_base_type(s, nonempty = 1) base_type = p_c_base_type(s, nonempty = 1)
if base_type.name is None: declarator = p_c_declarator(s, ctx, is_type = 1, nonempty = 1)
s.error("Syntax error in ctypedef statement") s.expect_newline("Syntax error in ctypedef statement")
return Nodes.CTypeDefNode(
declarator = p_c_declarator(s, ctx, is_type = 1, nonempty = 1) pos, base_type = base_type,
s.expect_newline("Syntax error in ctypedef statement") declarator = declarator,
return Nodes.CTypeDefNode( visibility = visibility, api = api,
pos, base_type = base_type, in_pxd = ctx.level == 'module_pxd')
declarator = declarator,
visibility = visibility, api = api,
in_pxd = ctx.level == 'module_pxd')
def p_decorators(s): def p_decorators(s):
decorators = [] decorators = []
...@@ -2829,7 +2830,7 @@ def p_class_statement(s, decorators): ...@@ -2829,7 +2830,7 @@ def p_class_statement(s, decorators):
positional_args, keyword_args, star_arg, starstar_arg = \ positional_args, keyword_args, star_arg, starstar_arg = \
p_call_parse_args(s, allow_genexp = False) p_call_parse_args(s, allow_genexp = False)
arg_tuple, keyword_dict = p_call_build_packed_args( arg_tuple, keyword_dict = p_call_build_packed_args(
pos, positional_args, keyword_args, star_arg) pos, positional_args, keyword_args, star_arg, None)
if arg_tuple is None: if arg_tuple is None:
# XXX: empty arg_tuple # XXX: empty arg_tuple
arg_tuple = ExprNodes.TupleNode(pos, args = []) arg_tuple = ExprNodes.TupleNode(pos, args = [])
......
...@@ -240,7 +240,7 @@ def create_pxd_pipeline(context, scope, module_name): ...@@ -240,7 +240,7 @@ def create_pxd_pipeline(context, scope, module_name):
return [ return [
parse_pxd_stage_factory(context, scope, module_name) parse_pxd_stage_factory(context, scope, module_name)
] + create_pipeline(context, 'pxd') + [ ] + create_pipeline(context, 'pxd') + [
ExtractPxdCode(context) ExtractPxdCode()
] ]
def create_py_pipeline(context, options, result): def create_py_pipeline(context, options, result):
......
...@@ -1843,7 +1843,7 @@ class CClassScope(ClassScope): ...@@ -1843,7 +1843,7 @@ class CClassScope(ClassScope):
if defining and entry.func_cname: if defining and entry.func_cname:
error(pos, "'%s' already defined" % name) error(pos, "'%s' already defined" % name)
#print "CClassScope.declare_cfunction: checking signature" ### #print "CClassScope.declare_cfunction: checking signature" ###
if entry.is_final_cmethod: if entry.is_final_cmethod and entry.is_inherited:
error(pos, "Overriding final methods is not allowed") error(pos, "Overriding final methods is not allowed")
elif type.same_c_signature_as(entry.type, as_cmethod = 1) and type.nogil == entry.type.nogil: elif type.same_c_signature_as(entry.type, as_cmethod = 1) and type.nogil == entry.type.nogil:
pass pass
......
...@@ -65,6 +65,11 @@ class MarkAssignments(CythonTransform): ...@@ -65,6 +65,11 @@ class MarkAssignments(CythonTransform):
# Could use this info to infer cdef class attributes... # Could use this info to infer cdef class attributes...
pass pass
def visit_WithTargetAssignmentStatNode(self, node):
self.mark_assignment(node.lhs, node.rhs)
self.visitchildren(node)
return node
def visit_SingleAssignmentNode(self, node): def visit_SingleAssignmentNode(self, node):
self.mark_assignment(node.lhs, node.rhs) self.mark_assignment(node.lhs, node.rhs)
self.visitchildren(node) self.visitchildren(node)
......
...@@ -138,15 +138,23 @@ class ResultRefNode(AtomicExprNode): ...@@ -138,15 +138,23 @@ class ResultRefNode(AtomicExprNode):
# nothing to do here # nothing to do here
return self return self
def type_dependencies(self, env):
if self.expression:
return self.expression.type_dependencies(env)
else:
return ()
def analyse_types(self, env): def analyse_types(self, env):
if self.expression is not None: if self.expression is not None:
self.type = self.expression.type self.type = self.expression.type
def infer_type(self, env): def infer_type(self, env):
if self.expression is not None:
return self.expression.infer_type(env)
if self.type is not None: if self.type is not None:
return self.type return self.type
if self.expression is not None:
if self.expression.type is not None:
return self.expression.type
return self.expression.infer_type(env)
assert False, "cannot infer type of ResultRefNode" assert False, "cannot infer type of ResultRefNode"
def may_be_none(self): def may_be_none(self):
......
...@@ -319,18 +319,36 @@ class EnvTransform(CythonTransform): ...@@ -319,18 +319,36 @@ class EnvTransform(CythonTransform):
This transformation keeps a stack of the environments. This transformation keeps a stack of the environments.
""" """
def __call__(self, root): def __call__(self, root):
self.env_stack = [root.scope] self.env_stack = [(root, root.scope)]
return super(EnvTransform, self).__call__(root) return super(EnvTransform, self).__call__(root)
def current_env(self): def current_env(self):
return self.env_stack[-1] return self.env_stack[-1][1]
def current_scope_node(self):
return self.env_stack[-1][0]
def visit_FuncDefNode(self, node): def visit_FuncDefNode(self, node):
self.env_stack.append(node.local_scope) self.env_stack.append((node, node.local_scope))
self.visitchildren(node)
self.env_stack.pop()
return node
def visit_ClassDefNode(self, node):
self.env_stack.append((node, node.scope))
self.visitchildren(node) self.visitchildren(node)
self.env_stack.pop() self.env_stack.pop()
return node return node
def visit_ScopedExprNode(self, node):
if node.expr_scope:
self.env_stack.append((node, node.expr_scope))
self.visitchildren(node)
self.env_stack.pop()
else:
self.visitchildren(node)
return node
class RecursiveNodeReplacer(VisitorTransform): class RecursiveNodeReplacer(VisitorTransform):
""" """
......
cdef extern from "omp.h": cdef extern from "omp.h":
ctypedef struct omp_lock_t ctypedef struct omp_lock_t:
ctypedef struct omp_nest_lock_t pass
ctypedef struct omp_nest_lock_t:
pass
ctypedef enum omp_sched_t: ctypedef enum omp_sched_t:
omp_sched_static = 1, omp_sched_static = 1,
......
This diff is collapsed.
cimport cython cimport cython
@cython.final
cdef class Packet: cdef class Packet:
cdef public object link cdef public object link
cdef public object ident cdef public object ident
...@@ -12,20 +13,24 @@ cdef class Packet: ...@@ -12,20 +13,24 @@ cdef class Packet:
cdef class TaskRec: cdef class TaskRec:
pass pass
@cython.final
cdef class DeviceTaskRec(TaskRec): cdef class DeviceTaskRec(TaskRec):
cdef public object pending cdef public object pending
@cython.final
cdef class IdleTaskRec(TaskRec): cdef class IdleTaskRec(TaskRec):
cdef public long control cdef public long control
cdef public Py_ssize_t count cdef public Py_ssize_t count
@cython.final
cdef class HandlerTaskRec(TaskRec): cdef class HandlerTaskRec(TaskRec):
cdef public object work_in # = None cdef public object work_in # = None
cdef public object device_in # = None cdef public object device_in # = None
cpdef workInAdd(self,p) cpdef workInAdd(self, Packet p)
cpdef deviceInAdd(self,p) cpdef deviceInAdd(self, Packet p)
@cython.final
cdef class WorkerTaskRec(TaskRec): cdef class WorkerTaskRec(TaskRec):
cdef public object destination # = I_HANDLERA cdef public object destination # = I_HANDLERA
cdef public Py_ssize_t count cdef public Py_ssize_t count
...@@ -60,7 +65,7 @@ cdef class Task(TaskState): ...@@ -60,7 +65,7 @@ cdef class Task(TaskState):
cdef public object input # = w cdef public object input # = w
cdef public object handle # = r cdef public object handle # = r
cpdef addPacket(self,Packet p,old) cpdef addPacket(self,Packet p,Task old)
cpdef runTask(self) cpdef runTask(self)
cpdef waitTask(self) cpdef waitTask(self)
cpdef hold(self) cpdef hold(self)
...@@ -70,19 +75,19 @@ cdef class Task(TaskState): ...@@ -70,19 +75,19 @@ cdef class Task(TaskState):
cdef class DeviceTask(Task): cdef class DeviceTask(Task):
@cython.locals(d=DeviceTaskRec) @cython.locals(d=DeviceTaskRec)
cpdef fn(self,Packet pkt,r) cpdef fn(self,Packet pkt,DeviceTaskRec r)
cdef class HandlerTask(Task): cdef class HandlerTask(Task):
@cython.locals(h=HandlerTaskRec) @cython.locals(h=HandlerTaskRec)
cpdef fn(self,Packet pkt,r) cpdef fn(self,Packet pkt,HandlerTaskRec r)
cdef class IdleTask(Task): cdef class IdleTask(Task):
@cython.locals(i=IdleTaskRec) @cython.locals(i=IdleTaskRec)
cpdef fn(self,Packet pkt,r) cpdef fn(self,Packet pkt,IdleTaskRec r)
cdef class WorkTask(Task): cdef class WorkTask(Task):
@cython.locals(w=WorkerTaskRec) @cython.locals(w=WorkerTaskRec)
cpdef fn(self,Packet pkt,r) cpdef fn(self,Packet pkt,WorkerTaskRec r)
@cython.locals(t=Task) @cython.locals(t=Task)
cpdef schedule() cpdef schedule()
......
...@@ -2,13 +2,14 @@ Cython's entire documentation suite is currently being overhauled. ...@@ -2,13 +2,14 @@ Cython's entire documentation suite is currently being overhauled.
For the time being, I'll use this page to post notes. For the time being, I'll use this page to post notes.
The previous Cython documentation files are hosted at http://hg.cython.org/cython-docs The previous Cython documentation files are hosted at
http://hg.cython.org/cython-docs
Notes Notes
======= =======
1) Some css work should definately be done. 1) Some css work should definitely be done.
2) Use local 'top-of-page' contents rather than the sidebar, imo. 2) Use local 'top-of-page' contents rather than the sidebar, imo.
3) Provide a link from each (sub)section to the TOC of the page. 3) Provide a link from each (sub)section to the TOC of the page.
4) Fix cython highlighter for cdef blocks 4) Fix cython highlighter for cdef blocks
Background
----------
[brain dump]
The "Old Cython Users Guide" is a derivative of the old Pyrex documentation.
It underwent substantial editing by Peter Alexandar
to become the Reference Guide, which is oriented around bullet points
and lists rather than prose. This transition was incomplete.
At nearly the same time, Robert, Dag, and Stefan wrote a tutorial as
part of the SciPy proceedings. It was felt that the content there was
cleaner and more up to date than anything else, and this became the
basis for the "Getting Started" and "Tutorials" sections. However,
it simply doesn't have as much content as the old documentation used to.
Eventually, it seems all of the old users manual could be whittled
down into independent tutorial topics. Much discussion of what we'd
like to see is at
http://www.mail-archive.com/cython-dev@codespeak.net/msg06945.html
There is currently a huge amount of redundancy, but no one section has
it all.
Also, we should go through the wiki enhancement proposal list
and make sure to transfer the (done) ones into the user manual.
...@@ -420,6 +420,8 @@ Cython provides facilities for releasing the Global Interpreter Lock (GIL) ...@@ -420,6 +420,8 @@ Cython provides facilities for releasing the Global Interpreter Lock (GIL)
before calling C code, and for acquiring the GIL in functions that are to be before calling C code, and for acquiring the GIL in functions that are to be
called back from C code that is executed without the GIL. called back from C code that is executed without the GIL.
.. _nogil:
Releasing the GIL Releasing the GIL
^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^
......
...@@ -7,8 +7,11 @@ Using Parallelism ...@@ -7,8 +7,11 @@ Using Parallelism
********************************** **********************************
Cython supports native parallelism through the :py:mod:`cython.parallel` Cython supports native parallelism through the :py:mod:`cython.parallel`
module. To use this kind of parallelism, the GIL must be released. It module. To use this kind of parallelism, the GIL must be released
currently supports OpenMP, but later on more backends might be supported. (see :ref:`Releasing the GIL <nogil>`).
It currently supports OpenMP, but later on more backends might be supported.
__ nogil_
.. function:: prange([start,] stop[, step], nogil=False, schedule=None) .. function:: prange([start,] stop[, step], nogil=False, schedule=None)
...@@ -59,11 +62,11 @@ currently supports OpenMP, but later on more backends might be supported. ...@@ -59,11 +62,11 @@ currently supports OpenMP, but later on more backends might be supported.
+-----------------+------------------------------------------------------+ +-----------------+------------------------------------------------------+
The default schedule is implementation defined. For more information consult The default schedule is implementation defined. For more information consult
the OpenMP specification: [#]_. the OpenMP specification [#]_.
Example with a reduction:: Example with a reduction::
from cython.parallel import prange, parallel, threadid from cython.parallel import prange
cdef int i cdef int i
cdef int sum = 0 cdef int sum = 0
...@@ -75,7 +78,7 @@ currently supports OpenMP, but later on more backends might be supported. ...@@ -75,7 +78,7 @@ currently supports OpenMP, but later on more backends might be supported.
Example with a shared numpy array:: Example with a shared numpy array::
from cython.parallel import * from cython.parallel import prange
def func(np.ndarray[double] x, double alpha): def func(np.ndarray[double] x, double alpha):
cdef Py_ssize_t i cdef Py_ssize_t i
...@@ -94,7 +97,7 @@ currently supports OpenMP, but later on more backends might be supported. ...@@ -94,7 +97,7 @@ currently supports OpenMP, but later on more backends might be supported.
Example with thread-local buffers:: Example with thread-local buffers::
from cython.parallel import * from cython.parallel import parallel, prange
from libc.stdlib cimport abort, malloc, free from libc.stdlib cimport abort, malloc, free
cdef Py_ssize_t idx, i, n = 100 cdef Py_ssize_t idx, i, n = 100
...@@ -175,12 +178,20 @@ particular order:: ...@@ -175,12 +178,20 @@ particular order::
In the example above it is undefined whether an exception shall be raised, In the example above it is undefined whether an exception shall be raised,
whether it will simply break or whether it will return 2. whether it will simply break or whether it will return 2.
Nested Parallelism Using OpenMP Functions
================== ======================
Nested parallelism is currently disabled due to a bug in gcc 4.5 [#]_. However, OpenMP functions can be used by cimporting ``openmp``::
you can freely call functions with parallel sections from a parallel section.
from cython.parallel cimport parallel
cimport openmp
cdef int num_threads
openmp.omp_set_dynamic(1)
with nogil, parallel():
num_threads = openmp.omp_get_num_threads()
...
.. rubric:: References .. rubric:: References
.. [#] http://www.openmp.org/mp-documents/spec30.pdf .. [#] http://www.openmp.org/mp-documents/spec30.pdf
.. [#] http://gcc.gnu.org/bugzilla/show_bug.cgi?id=49897
Background
----------
[brain dump]
The "Old Cython Users Guide" is a derivative of the old Pyrex documentation. It underwent substantial editing by Peter Alexandar
to become the Reference Guide, which is oriented around bullet points
and lists rather than prose. This transition was incomplete.
At nearly the same time, Robert, Dag, and Stefan wrote a tutorial as
part of the SciPy proceedings. It was felt that the content there was
cleaner and more up to date than anything else, and this became the
basis for the "Getting Started" and "Tutorials" sections. However,
it simply doesn't have as much content as the old documentation used to.
Eventually, it seems all of the old users manual could be whittled
down into independent tutorial topics. Much discussion of what we'd
like to see is at
http://www.mail-archive.com/cython-dev@codespeak.net/msg06945.html
There is currently a huge amount of redundancy, but no one section has
it all.
Also, we should go through the wiki enhancement proposal list and make sure to transfer the (done) ones into the user manual.
...@@ -183,10 +183,15 @@ def load_module(name, pyxfilename, pyxbuild_dir=None): ...@@ -183,10 +183,15 @@ def load_module(name, pyxfilename, pyxbuild_dir=None):
so_path = build_module(name, pyxfilename, pyxbuild_dir) so_path = build_module(name, pyxfilename, pyxbuild_dir)
mod = imp.load_dynamic(name, so_path) mod = imp.load_dynamic(name, so_path)
assert mod.__file__ == so_path, (mod.__file__, so_path) assert mod.__file__ == so_path, (mod.__file__, so_path)
except Exception, e: except Exception:
import traceback if pyxargs.load_py_module_on_import_failure and pyxfilename.endswith('.py'):
raise ImportError("Building module failed: %s" % # try to fall back to normal import
traceback.format_exception_only(*sys.exc_info()[:2])),None,sys.exc_info()[2] mod = imp.load_source(name, pyxfilename)
assert mod.__file__ in (pyxfilename, pyxfilename+'c', pyxfilename+'o'), (mod.__file__, pyxfilename)
else:
import traceback
raise ImportError("Building module failed: %s" %
traceback.format_exception_only(*sys.exc_info()[:2])),None,sys.exc_info()[2]
return mod return mod
...@@ -345,7 +350,8 @@ class PyxArgs(object): ...@@ -345,7 +350,8 @@ class PyxArgs(object):
##pyxargs=None ##pyxargs=None
def install(pyximport=True, pyimport=False, build_dir=None, build_in_temp=True, def install(pyximport=True, pyimport=False, build_dir=None, build_in_temp=True,
setup_args={}, reload_support=False ): setup_args={}, reload_support=False,
load_py_module_on_import_failure=False):
"""Main entry point. Call this to install the .pyx import hook in """Main entry point. Call this to install the .pyx import hook in
your meta-path for a single Python process. If you want it to be your meta-path for a single Python process. If you want it to be
installed whenever you use Python, add it to your sitecustomize installed whenever you use Python, add it to your sitecustomize
...@@ -374,6 +380,15 @@ def install(pyximport=True, pyimport=False, build_dir=None, build_in_temp=True, ...@@ -374,6 +380,15 @@ def install(pyximport=True, pyimport=False, build_dir=None, build_in_temp=True,
reload(<pyxmodulename>), e.g. after a change in the Cython code. reload(<pyxmodulename>), e.g. after a change in the Cython code.
Additional files <so_path>.reloadNN may arise on that account, when Additional files <so_path>.reloadNN may arise on that account, when
the previously loaded module file cannot be overwritten. the previously loaded module file cannot be overwritten.
``load_py_module_on_import_failure``: If the compilation of a .py
file succeeds, but the subsequent import fails for some reason,
retry the import with the normal .py module instead of the
compiled module. Note that this may lead to unpredictable results
for modules that change the system state during their import, as
the second import will rerun these modifications in whatever state
the system was left after the import of the compiled module
failed.
""" """
if not build_dir: if not build_dir:
build_dir = os.path.expanduser('~/.pyxbld') build_dir = os.path.expanduser('~/.pyxbld')
...@@ -384,6 +399,7 @@ def install(pyximport=True, pyimport=False, build_dir=None, build_in_temp=True, ...@@ -384,6 +399,7 @@ def install(pyximport=True, pyimport=False, build_dir=None, build_in_temp=True,
pyxargs.build_in_temp = build_in_temp pyxargs.build_in_temp = build_in_temp
pyxargs.setup_args = (setup_args or {}).copy() pyxargs.setup_args = (setup_args or {}).copy()
pyxargs.reload_support = reload_support pyxargs.reload_support = reload_support
pyxargs.load_py_module_on_import_failure = load_py_module_on_import_failure
has_py_importer = False has_py_importer = False
has_pyx_importer = False has_pyx_importer = False
......
...@@ -285,8 +285,8 @@ class ErrorWriter(object): ...@@ -285,8 +285,8 @@ class ErrorWriter(object):
class TestBuilder(object): class TestBuilder(object):
def __init__(self, rootdir, workdir, selectors, exclude_selectors, annotate, def __init__(self, rootdir, workdir, selectors, exclude_selectors, annotate,
cleanup_workdir, cleanup_sharedlibs, with_pyregr, cython_only, cleanup_workdir, cleanup_sharedlibs, cleanup_failures,
languages, test_bugs, fork, language_level): with_pyregr, cython_only, languages, test_bugs, fork, language_level):
self.rootdir = rootdir self.rootdir = rootdir
self.workdir = workdir self.workdir = workdir
self.selectors = selectors self.selectors = selectors
...@@ -294,6 +294,7 @@ class TestBuilder(object): ...@@ -294,6 +294,7 @@ class TestBuilder(object):
self.annotate = annotate self.annotate = annotate
self.cleanup_workdir = cleanup_workdir self.cleanup_workdir = cleanup_workdir
self.cleanup_sharedlibs = cleanup_sharedlibs self.cleanup_sharedlibs = cleanup_sharedlibs
self.cleanup_failures = cleanup_failures
self.with_pyregr = with_pyregr self.with_pyregr = with_pyregr
self.cython_only = cython_only self.cython_only = cython_only
self.languages = languages self.languages = languages
...@@ -410,6 +411,7 @@ class TestBuilder(object): ...@@ -410,6 +411,7 @@ class TestBuilder(object):
annotate=self.annotate, annotate=self.annotate,
cleanup_workdir=self.cleanup_workdir, cleanup_workdir=self.cleanup_workdir,
cleanup_sharedlibs=self.cleanup_sharedlibs, cleanup_sharedlibs=self.cleanup_sharedlibs,
cleanup_failures=self.cleanup_failures,
cython_only=self.cython_only, cython_only=self.cython_only,
fork=self.fork, fork=self.fork,
language_level=self.language_level, language_level=self.language_level,
...@@ -418,8 +420,8 @@ class TestBuilder(object): ...@@ -418,8 +420,8 @@ class TestBuilder(object):
class CythonCompileTestCase(unittest.TestCase): class CythonCompileTestCase(unittest.TestCase):
def __init__(self, test_directory, workdir, module, language='c', def __init__(self, test_directory, workdir, module, language='c',
expect_errors=False, annotate=False, cleanup_workdir=True, expect_errors=False, annotate=False, cleanup_workdir=True,
cleanup_sharedlibs=True, cython_only=False, fork=True, cleanup_sharedlibs=True, cleanup_failures=True, cython_only=False,
language_level=2, warning_errors=False): fork=True, language_level=2, warning_errors=False):
self.test_directory = test_directory self.test_directory = test_directory
self.workdir = workdir self.workdir = workdir
self.module = module self.module = module
...@@ -428,6 +430,7 @@ class CythonCompileTestCase(unittest.TestCase): ...@@ -428,6 +430,7 @@ class CythonCompileTestCase(unittest.TestCase):
self.annotate = annotate self.annotate = annotate
self.cleanup_workdir = cleanup_workdir self.cleanup_workdir = cleanup_workdir
self.cleanup_sharedlibs = cleanup_sharedlibs self.cleanup_sharedlibs = cleanup_sharedlibs
self.cleanup_failures = cleanup_failures
self.cython_only = cython_only self.cython_only = cython_only
self.fork = fork self.fork = fork
self.language_level = language_level self.language_level = language_level
...@@ -461,16 +464,17 @@ class CythonCompileTestCase(unittest.TestCase): ...@@ -461,16 +464,17 @@ class CythonCompileTestCase(unittest.TestCase):
del sys.modules[self.module] del sys.modules[self.module]
except KeyError: except KeyError:
pass pass
cleanup_c_files = WITH_CYTHON and self.cleanup_workdir cleanup = self.cleanup_failures or self.success
cleanup_lib_files = self.cleanup_sharedlibs cleanup_c_files = WITH_CYTHON and self.cleanup_workdir and cleanup
cleanup_lib_files = self.cleanup_sharedlibs and cleanup
if os.path.exists(self.workdir): if os.path.exists(self.workdir):
for rmfile in os.listdir(self.workdir): for rmfile in os.listdir(self.workdir):
if not cleanup_c_files: if not cleanup_c_files:
if rmfile[-2:] in (".c", ".h") or rmfile[-4:] == ".cpp": if (rmfile[-2:] in (".c", ".h") or
rmfile[-4:] == ".cpp" or
rmfile.endswith(".html")):
continue continue
if not cleanup_lib_files and rmfile.endswith(".so") or rmfile.endswith(".dll"): if not cleanup_lib_files and (rmfile.endswith(".so") or rmfile.endswith(".dll")):
continue
if self.annotate and rmfile.endswith(".html"):
continue continue
try: try:
rmfile = os.path.join(self.workdir, rmfile) rmfile = os.path.join(self.workdir, rmfile)
...@@ -484,7 +488,9 @@ class CythonCompileTestCase(unittest.TestCase): ...@@ -484,7 +488,9 @@ class CythonCompileTestCase(unittest.TestCase):
os.makedirs(self.workdir) os.makedirs(self.workdir)
def runTest(self): def runTest(self):
self.success = False
self.runCompileTest() self.runCompileTest()
self.success = True
def runCompileTest(self): def runCompileTest(self):
self.compile(self.test_directory, self.module, self.workdir, self.compile(self.test_directory, self.module, self.workdir,
...@@ -676,8 +682,13 @@ class CythonRunTestCase(CythonCompileTestCase): ...@@ -676,8 +682,13 @@ class CythonRunTestCase(CythonCompileTestCase):
try: try:
self.setUp() self.setUp()
try: try:
self.success = False
self.runCompileTest() self.runCompileTest()
failures, errors = len(result.failures), len(result.errors)
self.run_tests(result) self.run_tests(result)
if failures == len(result.failures) and errors == len(result.errors):
# No new errors...
self.success = True
finally: finally:
check_thread_termination() check_thread_termination()
except Exception: except Exception:
...@@ -1032,6 +1043,7 @@ class EndToEndTest(unittest.TestCase): ...@@ -1032,6 +1043,7 @@ class EndToEndTest(unittest.TestCase):
os.chdir(self.old_dir) os.chdir(self.old_dir)
def runTest(self): def runTest(self):
self.success = False
commands = (self.commands commands = (self.commands
.replace("CYTHON", "PYTHON %s" % os.path.join(self.cython_root, 'cython.py')) .replace("CYTHON", "PYTHON %s" % os.path.join(self.cython_root, 'cython.py'))
.replace("PYTHON", sys.executable)) .replace("PYTHON", sys.executable))
...@@ -1055,6 +1067,7 @@ class EndToEndTest(unittest.TestCase): ...@@ -1055,6 +1067,7 @@ class EndToEndTest(unittest.TestCase):
os.environ['PYTHONPATH'] = old_path os.environ['PYTHONPATH'] = old_path
else: else:
del os.environ['PYTHONPATH'] del os.environ['PYTHONPATH']
self.success = True
# TODO: Support cython_freeze needed here as well. # TODO: Support cython_freeze needed here as well.
...@@ -1278,6 +1291,9 @@ def main(): ...@@ -1278,6 +1291,9 @@ def main():
parser.add_option("--no-cleanup-sharedlibs", dest="cleanup_sharedlibs", parser.add_option("--no-cleanup-sharedlibs", dest="cleanup_sharedlibs",
action="store_false", default=True, action="store_false", default=True,
help="do not delete the generated shared libary files (allows manual module experimentation)") help="do not delete the generated shared libary files (allows manual module experimentation)")
parser.add_option("--no-cleanup-failures", dest="cleanup_failures",
action="store_false", default=True,
help="enable --no-cleanup and --no-cleanup-sharedlibs for failed tests only")
parser.add_option("--no-cython", dest="with_cython", parser.add_option("--no-cython", dest="with_cython",
action="store_false", default=True, action="store_false", default=True,
help="do not run the Cython compiler, only the C compiler") help="do not run the Cython compiler, only the C compiler")
...@@ -1354,6 +1370,8 @@ def main(): ...@@ -1354,6 +1370,8 @@ def main():
help="working directory") help="working directory")
parser.add_option("--debug", dest="for_debugging", default=False, action="store_true", parser.add_option("--debug", dest="for_debugging", default=False, action="store_true",
help="configure for easier use with a debugger (e.g. gdb)") help="configure for easier use with a debugger (e.g. gdb)")
parser.add_option("--pyximport-py", dest="pyximport_py", default=False, action="store_true",
help="use pyximport to automatically compile imported .pyx and .py files")
options, cmd_args = parser.parse_args() options, cmd_args = parser.parse_args()
...@@ -1509,7 +1527,8 @@ def main(): ...@@ -1509,7 +1527,8 @@ def main():
if options.filetests and languages: if options.filetests and languages:
filetests = TestBuilder(ROOTDIR, WORKDIR, selectors, exclude_selectors, filetests = TestBuilder(ROOTDIR, WORKDIR, selectors, exclude_selectors,
options.annotate_source, options.cleanup_workdir, options.annotate_source, options.cleanup_workdir,
options.cleanup_sharedlibs, options.pyregr, options.cleanup_sharedlibs, options.cleanup_failures,
options.pyregr,
options.cython_only, languages, test_bugs, options.cython_only, languages, test_bugs,
options.fork, options.language_level) options.fork, options.language_level)
test_suite.addTest(filetests.build_suite()) test_suite.addTest(filetests.build_suite())
...@@ -1519,7 +1538,8 @@ def main(): ...@@ -1519,7 +1538,8 @@ def main():
if os.path.isdir(sys_pyregr_dir): if os.path.isdir(sys_pyregr_dir):
filetests = TestBuilder(ROOTDIR, WORKDIR, selectors, exclude_selectors, filetests = TestBuilder(ROOTDIR, WORKDIR, selectors, exclude_selectors,
options.annotate_source, options.cleanup_workdir, options.annotate_source, options.cleanup_workdir,
options.cleanup_sharedlibs, True, options.cleanup_sharedlibs, options.cleanup_failures,
True,
options.cython_only, languages, test_bugs, options.cython_only, languages, test_bugs,
options.fork, sys.version_info[0]) options.fork, sys.version_info[0])
sys.stderr.write("Including CPython regression tests in %s\n" % sys_pyregr_dir) sys.stderr.write("Including CPython regression tests in %s\n" % sys_pyregr_dir)
...@@ -1532,6 +1552,11 @@ def main(): ...@@ -1532,6 +1552,11 @@ def main():
else: else:
test_runner = unittest.TextTestRunner(verbosity=options.verbosity) test_runner = unittest.TextTestRunner(verbosity=options.verbosity)
if options.pyximport_py:
from pyximport import pyximport
pyximport.install(pyimport=True, build_dir=os.path.join(WORKDIR, '_pyximport'),
load_py_module_on_import_failure=True)
result = test_runner.run(test_suite) result = test_runner.run(test_suite)
if options.coverage or options.coverage_xml or options.coverage_html: if options.coverage or options.coverage_xml or options.coverage_html:
......
...@@ -28,6 +28,7 @@ pyregr.test_socket ...@@ -28,6 +28,7 @@ pyregr.test_socket
pyregr.test_threading pyregr.test_threading
pyregr.test_sys pyregr.test_sys
pyregr.test_pep3131 pyregr.test_pep3131
pyregr.test_multiprocessing
# CPython regression tests that don't make sense # CPython regression tests that don't make sense
pyregr.test_gdb pyregr.test_gdb
......
...@@ -31,3 +31,7 @@ cdef char f = d.getValue2() ...@@ -31,3 +31,7 @@ cdef char f = d.getValue2()
f = e.getValue2() f = e.getValue2()
del b, e del b, e
ctypedef TemplateTest1[int] TemplateTest1_int
cdef TemplateTest1_int aa
...@@ -4,5 +4,6 @@ ...@@ -4,5 +4,6 @@
ctypedef object[float] mybuffer ctypedef object[float] mybuffer
_ERRORS = u""" _ERRORS = u"""
4:23: Syntax error in ctypedef statement 1:0: Buffer vars not allowed in module scope
4:0: Buffer types only allowed as function local variables
""" """
...@@ -40,6 +40,9 @@ cdef int[:, ::view.contiguous, ::view.indirect_contiguous] a6 ...@@ -40,6 +40,9 @@ cdef int[:, ::view.contiguous, ::view.indirect_contiguous] a6
#cdef int[::view.generic_contiguous, ::view.contiguous] a7 #cdef int[::view.generic_contiguous, ::view.contiguous] a7
#cdef int[::view.contiguous, ::view.generic_contiguous] a8 #cdef int[::view.contiguous, ::view.generic_contiguous] a8
ctypedef int *intp
cdef intp[:, :] myarray
# These are VALID # These are VALID
cdef int[::view.indirect_contiguous, ::view.contiguous] a9 cdef int[::view.indirect_contiguous, ::view.contiguous] a9
...@@ -61,4 +64,5 @@ _ERRORS = u''' ...@@ -61,4 +64,5 @@ _ERRORS = u'''
31:9: Dimension may not be contiguous 31:9: Dimension may not be contiguous
37:9: Only one direct contiguous axis may be specified. 37:9: Only one direct contiguous axis may be specified.
38:9:Only dimensions 3 and 2 may be contiguous and direct 38:9:Only dimensions 3 and 2 may be contiguous and direct
44:10: Invalid base type for memoryview slice
''' '''
# mode: error
cimport numpy as np
cdef void func(np.ndarray[np.double_t, ndim=1] myarray) nogil:
pass
_ERRORS = u"""
5:15: Buffer may not be acquired without the GIL. Consider using memoryview slices instead.
"""
...@@ -12,6 +12,15 @@ max_long_long = 2 ** (sizeof(long long) * 8 - 1) - 1 ...@@ -12,6 +12,15 @@ max_long_long = 2 ** (sizeof(long long) * 8 - 1) - 1
cimport cython cimport cython
def abs_as_name():
"""
>>> _abs = abs_as_name()
>>> _abs(-5)
5
"""
x = abs
return x
def py_abs(a): def py_abs(a):
""" """
>>> py_abs(-5) >>> py_abs(-5)
......
...@@ -8,7 +8,7 @@ DEF INT_VAL = 1 ...@@ -8,7 +8,7 @@ DEF INT_VAL = 1
def _func(a,b,c): def _func(a,b,c):
return a+b+c return a+b+c
@cython.test_fail_if_path_exists("//BinopNode") @cython.test_fail_if_path_exists("//AddNode")
def add(): def add():
""" """
>>> add() == 1+2+3+4 >>> add() == 1+2+3+4
...@@ -16,7 +16,7 @@ def add(): ...@@ -16,7 +16,7 @@ def add():
""" """
return 1+2+3+4 return 1+2+3+4
@cython.test_fail_if_path_exists("//BinopNode") #@cython.test_fail_if_path_exists("//AddNode")
def add_var(a): def add_var(a):
""" """
>>> add_var(10) == 1+2+10+3+4 >>> add_var(10) == 1+2+10+3+4
...@@ -24,7 +24,7 @@ def add_var(a): ...@@ -24,7 +24,7 @@ def add_var(a):
""" """
return 1+2 +a+ 3+4 return 1+2 +a+ 3+4
@cython.test_fail_if_path_exists("//BinopNode") @cython.test_fail_if_path_exists("//AddNode", "//SubNode")
def neg(): def neg():
""" """
>>> neg() == -1 -2 - (-3+4) >>> neg() == -1 -2 - (-3+4)
...@@ -32,7 +32,7 @@ def neg(): ...@@ -32,7 +32,7 @@ def neg():
""" """
return -1 -2 - (-3+4) return -1 -2 - (-3+4)
@cython.test_fail_if_path_exists("//BinopNode") @cython.test_fail_if_path_exists("//AddNode", "//MulNode", "//DivNode")
def long_int_mix(): def long_int_mix():
""" """
>>> long_int_mix() == 1 + (2 * 3) // 2 >>> long_int_mix() == 1 + (2 * 3) // 2
...@@ -43,7 +43,7 @@ def long_int_mix(): ...@@ -43,7 +43,7 @@ def long_int_mix():
""" """
return 1L + (2 * 3L) // 2 return 1L + (2 * 3L) // 2
@cython.test_fail_if_path_exists("//BinopNode") @cython.test_fail_if_path_exists("//AddNode", "//MulNode", "//DivNode")
def char_int_mix(): def char_int_mix():
""" """
>>> char_int_mix() == 1 + (ord(' ') * 3) // 2 + ord('A') >>> char_int_mix() == 1 + (ord(' ') * 3) // 2 + ord('A')
...@@ -51,7 +51,7 @@ def char_int_mix(): ...@@ -51,7 +51,7 @@ def char_int_mix():
""" """
return 1L + (c' ' * 3L) // 2 + c'A' return 1L + (c' ' * 3L) // 2 + c'A'
@cython.test_fail_if_path_exists("//BinopNode") @cython.test_fail_if_path_exists("//AddNode", "//MulNode")
def int_cast(): def int_cast():
""" """
>>> int_cast() == 1 + 2 * 6000 >>> int_cast() == 1 + 2 * 6000
...@@ -59,7 +59,7 @@ def int_cast(): ...@@ -59,7 +59,7 @@ def int_cast():
""" """
return <int>(1 + 2 * 6000) return <int>(1 + 2 * 6000)
@cython.test_fail_if_path_exists("//BinopNode") @cython.test_fail_if_path_exists("//MulNode")
def mul(): def mul():
""" """
>>> mul() == 1*60*1000 >>> mul() == 1*60*1000
...@@ -67,7 +67,7 @@ def mul(): ...@@ -67,7 +67,7 @@ def mul():
""" """
return 1*60*1000 return 1*60*1000
@cython.test_fail_if_path_exists("//BinopNode") @cython.test_fail_if_path_exists("//AddNode", "//MulNode")
def arithm(): def arithm():
""" """
>>> arithm() == 9*2+3*8//6-10 >>> arithm() == 9*2+3*8//6-10
...@@ -75,7 +75,7 @@ def arithm(): ...@@ -75,7 +75,7 @@ def arithm():
""" """
return 9*2+3*8//6-10 return 9*2+3*8//6-10
@cython.test_fail_if_path_exists("//BinopNode") @cython.test_fail_if_path_exists("//AddNode", "//MulNode")
def parameters(): def parameters():
""" """
>>> parameters() == _func(-1 -2, - (-3+4), 1*2*3) >>> parameters() == _func(-1 -2, - (-3+4), 1*2*3)
...@@ -83,7 +83,7 @@ def parameters(): ...@@ -83,7 +83,7 @@ def parameters():
""" """
return _func(-1 -2, - (-3+4), 1*2*3) return _func(-1 -2, - (-3+4), 1*2*3)
@cython.test_fail_if_path_exists("//BinopNode") #@cython.test_fail_if_path_exists("//AddNode")
def lists(): def lists():
""" """
>>> lists() == [1,2,3] + [4,5,6] >>> lists() == [1,2,3] + [4,5,6]
...@@ -91,6 +91,176 @@ def lists(): ...@@ -91,6 +91,176 @@ def lists():
""" """
return [1,2,3] + [4,5,6] return [1,2,3] + [4,5,6]
@cython.test_fail_if_path_exists("//MulNode")
def multiplied_lists_right_len1():
"""
>>> multiplied_lists_right_len1() == [1] * 5
True
"""
return [1] * 5
@cython.test_fail_if_path_exists("//MulNode")
def multiplied_lists_right():
"""
>>> multiplied_lists_right() == [1,2,3] * 5
True
"""
return [1,2,3] * 5
@cython.test_fail_if_path_exists("//MulNode")
def multiplied_lists_left():
"""
>>> multiplied_lists_left() == [1,2,3] * 5
True
"""
return 5 * [1,2,3]
@cython.test_fail_if_path_exists("//MulNode")
def multiplied_lists_neg():
"""
>>> multiplied_lists_neg() == [1,2,3] * -5
True
"""
return [1,2,3] * -5
@cython.test_fail_if_path_exists("//MulNode")
def multiplied_lists_nonconst(x):
"""
>>> multiplied_lists_nonconst(5) == [1,2,3] * 5
True
>>> multiplied_lists_nonconst(-5) == [1,2,3] * -5
True
>>> multiplied_lists_nonconst(0) == [1,2,3] * 0
True
>>> [1,2,3] * 'abc' # doctest: +ELLIPSIS
Traceback (most recent call last):
TypeError: can't multiply sequence by non-int...
>>> multiplied_nonconst_tuple_arg('abc') # doctest: +ELLIPSIS
Traceback (most recent call last):
TypeError: can't multiply sequence by non-int...
>>> [1,2,3] * 1.0 # doctest: +ELLIPSIS
Traceback (most recent call last):
TypeError: can't multiply sequence by non-int...
>>> multiplied_nonconst_tuple_arg(1.0) # doctest: +ELLIPSIS
Traceback (most recent call last):
TypeError: can't multiply sequence by non-int...
"""
return [1,2,3] * x
@cython.test_assert_path_exists("//MulNode")
def multiplied_lists_nonconst_left(x):
"""
>>> multiplied_lists_nonconst_left(5) == 5 * [1,2,3]
True
>>> multiplied_lists_nonconst_left(-5) == -5 * [1,2,3]
True
>>> multiplied_lists_nonconst_left(0) == 0 * [1,2,3]
True
"""
return x * [1,2,3]
@cython.test_fail_if_path_exists("//MulNode//ListNode")
@cython.test_assert_path_exists("//MulNode")
def multiplied_lists_nonconst_expression(x):
"""
>>> multiplied_lists_nonconst_expression(5) == [1,2,3] * (5 * 2)
True
>>> multiplied_lists_nonconst_expression(-5) == [1,2,3] * (-5 * 2)
True
>>> multiplied_lists_nonconst_expression(0) == [1,2,3] * (0 * 2)
True
"""
return [1,2,3] * (x*2)
cdef side_effect(int x):
print x
return x
@cython.test_fail_if_path_exists("//MulNode")
def multiplied_lists_with_side_effects():
"""
>>> multiplied_lists_with_side_effects() == [1,2,3] * 5
1
2
3
True
"""
return [side_effect(1), side_effect(2), side_effect(3)] * 5
@cython.test_fail_if_path_exists("//MulNode")
def multiplied_lists_nonconst_with_side_effects(x):
"""
>>> multiplied_lists_nonconst_with_side_effects(5) == [1,2,3] * 5
1
2
3
True
"""
return [side_effect(1), side_effect(2), side_effect(3)] * x
@cython.test_fail_if_path_exists("//MulNode")
def multiplied_nonconst_tuple_arg(x):
"""
>>> multiplied_nonconst_tuple_arg(5) == (1,2) * 5
True
>>> multiplied_nonconst_tuple_arg(-5) == (1,2) * -5
True
>>> multiplied_nonconst_tuple_arg(0) == (1,2) * 0
True
>>> (1,2) * 'abc' # doctest: +ELLIPSIS
Traceback (most recent call last):
TypeError: can't multiply sequence by non-int...
>>> multiplied_nonconst_tuple_arg('abc') # doctest: +ELLIPSIS
Traceback (most recent call last):
TypeError: can't multiply sequence by non-int...
>>> (1,2) * 1.0 # doctest: +ELLIPSIS
Traceback (most recent call last):
TypeError: can't multiply sequence by non-int...
>>> multiplied_nonconst_tuple_arg(1.0) # doctest: +ELLIPSIS
Traceback (most recent call last):
TypeError: can't multiply sequence by non-int...
"""
return (1,2) * x
@cython.test_fail_if_path_exists("//MulNode")
def multiplied_nonconst_tuple(x):
"""
>>> multiplied_nonconst_tuple(5) == (1,2) * (5+1)
True
"""
return (1,2) * (x + 1)
MULT = 5
@cython.test_fail_if_path_exists("//MulNode")
def multiplied_global_nonconst_tuple():
"""
>>> multiplied_global_nonconst_tuple() == (1,2,3) * 5
1
2
3
True
"""
return (side_effect(1), side_effect(2), side_effect(3)) * MULT
@cython.test_fail_if_path_exists("//MulNode")
def multiplied_const_tuple():
"""
>>> multiplied_const_tuple() == (1,2) * 5
True
"""
return (1,2) * 5
@cython.test_fail_if_path_exists("//MulNode")
def multiplied_const_tuple_len1():
"""
>>> multiplied_const_tuple_len1() == (1,) * 5
True
"""
return (1,) * 5
@cython.test_fail_if_path_exists("//PrimaryCmpNode") @cython.test_fail_if_path_exists("//PrimaryCmpNode")
def compile_time_DEF(): def compile_time_DEF():
""" """
......
# mode: run
# tags: kwargs, call
# ticket: 717
def f(**kwargs):
return sorted(kwargs.items())
def test_call(kwargs):
"""
>>> kwargs = {'b' : 2}
>>> f(a=1, **kwargs)
[('a', 1), ('b', 2)]
>>> test_call(kwargs)
[('a', 1), ('b', 2)]
>>> kwargs = {'a' : 2}
>>> f(a=1, **kwargs)
Traceback (most recent call last):
TypeError: f() got multiple values for keyword argument 'a'
FIXME: remove ellipsis, fix function name
>>> test_call(kwargs) # doctest: +ELLIPSIS
Traceback (most recent call last):
TypeError: ...() got multiple values for keyword argument 'a'
"""
return f(a=1, **kwargs)
# mode: run
# tags: eval
GLOBAL = 123
def eval_simple(local):
"""
>>> eval_simple(321)
(123, 321)
"""
return eval('GLOBAL, local')
def eval_class_scope():
"""
>>> eval_class_scope().c
3
"""
class TestClassScope:
a = 1
b = 2
c = eval('a + b')
return TestClassScope
def eval_locals(a, b):
"""
>>> eval_locals(1, 2)
(1, 2)
"""
return eval('a, b', {}, locals())
# mode: run
# tags: exec
exec "GLOBAL = 1234"
def exec_module_scope():
"""
>>> globals()['GLOBAL']
1234
"""
def exec_func_scope():
"""
>>> exec_func_scope()
{'a': 'b', 'G': 1234}
"""
d = {}
exec "d['a'] = 'b'; d['G'] = GLOBAL"
return d
def exec_pyclass_scope():
"""
>>> obj = exec_pyclass_scope()
>>> obj.a
'b'
>>> obj.G
1234
"""
class TestExec:
exec "a = 'b'; G = GLOBAL"
return TestExec
...@@ -29,6 +29,20 @@ cdef class FinalType(object): ...@@ -29,6 +29,20 @@ cdef class FinalType(object):
self.cpdef_method() self.cpdef_method()
def test_external_call():
"""
>>> test_external_call()
"""
f = FinalType()
return f.cpdef_method()
def test_external_call_in_temp():
"""
>>> test_external_call_in_temp()
"""
return FinalType().cpdef_method()
cdef class BaseTypeWithFinalMethods(object): cdef class BaseTypeWithFinalMethods(object):
""" """
>>> obj = BaseTypeWithFinalMethods() >>> obj = BaseTypeWithFinalMethods()
......
# mode: run
# ticket: 734
def test_import_error():
"""
>>> test_import_error()
Traceback (most recent call last):
ImportError: cannot import name xxx
"""
from sys import xxx
# mode: run
cimport cython
@cython.final
cdef class TypedContextManager(object):
cdef double __enter__(self): # not callable from Python !
return 2.0
# FIXME: inline __exit__() as well
def __exit__(self, exc_type, exc_value, exc_tb):
return 0
def with_statement():
"""
>>> with_statement()
2.0
"""
with TypedContextManager() as x:
return x
...@@ -125,6 +125,29 @@ def index_pop_typed(list L, int i): ...@@ -125,6 +125,29 @@ def index_pop_typed(list L, int i):
""" """
return L.pop(i) return L.pop(i)
@cython.test_assert_path_exists('//PythonCapiCallNode')
@cython.test_fail_if_path_exists('//SimpleCallNode/AttributeNode')
def index_pop_literal(list L):
"""
>>> L = list(range(10))
>>> index_pop_literal(L)
0
>>> L
[1, 2, 3, 4, 5, 6, 7, 8, 9]
>>> while L:
... _ = index_pop_literal(L)
>>> L
[]
>>> index_pop_literal(L)
Traceback (most recent call last):
...
IndexError: pop from empty list
"""
return L.pop(0)
@cython.test_fail_if_path_exists('//PythonCapiCallNode') @cython.test_fail_if_path_exists('//PythonCapiCallNode')
def crazy_pop(L): def crazy_pop(L):
......
# mode: run
# ticket: 731
# tags: locals, vars, dir
LOCALS = locals()
GLOBALS = globals()
DIR_SAME = sorted(dir()) == sorted(globals().keys())
def test_module_locals_and_dir():
"""
>>> LOCALS is GLOBALS
True
>>> DIR_SAME
True
"""
def test_class_locals_and_dir():
"""
>>> klass = test_class_locals_and_dir()
>>> 'visible' in klass.locs and 'not_visible' not in klass.locs
True
>>> klass.names
['visible']
"""
not_visible = 1234
class Foo:
visible = 4321
names = dir()
locs = locals()
return Foo
...@@ -40,7 +40,7 @@ def call_non_dict_test(): ...@@ -40,7 +40,7 @@ def call_non_dict_test():
return func(**NonDict()) return func(**NonDict())
def call_non_dict_test_kw(): def call_non_dict_test_kw():
return func(a=5, **NonDict()) return func(b=5, **NonDict())
class SubDict(dict): class SubDict(dict):
...@@ -51,4 +51,4 @@ def call_sub_dict_test(): ...@@ -51,4 +51,4 @@ def call_sub_dict_test():
return func(**SubDict()) return func(**SubDict())
def call_sub_dict_test_kw(): def call_sub_dict_test_kw():
return func(a=5, **SubDict()) return func(b=5, **SubDict())
This diff is collapsed.
...@@ -221,6 +221,24 @@ def c_functions(): ...@@ -221,6 +221,24 @@ def c_functions():
assert typeof(f) == 'int (*)(int)', typeof(f) assert typeof(f) == 'int (*)(int)', typeof(f)
assert 2 == f(1) assert 2 == f(1)
def builtin_functions():
"""
>>> _abs, _getattr = builtin_functions()
Python object
Python object
>>> _abs(-1)
1
>>> class o(object): pass
>>> o.x = 1
>>> _getattr(o, 'x')
1
"""
_abs = abs
print(typeof(_abs))
_getattr = getattr
print(typeof(_getattr))
return _abs, _getattr
def cascade(): def cascade():
""" """
>>> cascade() >>> cascade()
...@@ -513,6 +531,18 @@ def common_extension_type_base(): ...@@ -513,6 +531,18 @@ def common_extension_type_base():
w = CC() w = CC()
assert typeof(w) == "Python object", typeof(w) assert typeof(w) == "Python object", typeof(w)
cdef class AcceptsKeywords:
def __init__(self, *args, **kwds):
pass
@infer_types(None)
def constructor_call():
"""
>>> constructor_call()
"""
x = AcceptsKeywords(a=1, b=2)
assert typeof(x) == "AcceptsKeywords", typeof(x)
@infer_types(None) @infer_types(None)
def large_literals(): def large_literals():
...@@ -529,6 +559,63 @@ def large_literals(): ...@@ -529,6 +559,63 @@ def large_literals():
assert typeof(d) == "Python object", typeof(d) assert typeof(d) == "Python object", typeof(d)
class EmptyContextManager(object):
def __enter__(self):
return None
def __exit__(self, *args):
return 0
def with_statement():
"""
>>> with_statement()
Python object
Python object
"""
x = 1.0
with EmptyContextManager() as x:
print(typeof(x))
print(typeof(x))
return x
@cython.final
cdef class TypedContextManager(object):
cpdef double __enter__(self):
return 2.0
def __exit__(self, *args):
return 0
def with_statement_typed():
"""
>>> with_statement_typed()
double
double
2.0
"""
x = 1.0
with TypedContextManager() as x:
print(typeof(x))
print(typeof(x))
return x
def with_statement_untyped():
"""
>>> with_statement_untyped()
Python object
Python object
2.0
"""
x = 1.0
cdef object t = TypedContextManager()
with t as x:
print(typeof(x))
print(typeof(x))
return x
def self_lookup(a):
b = a
b = b.foo(keyword=None)
print typeof(b)
# Regression test for trac #638. # Regression test for trac #638.
def bar(foo): def bar(foo):
......
...@@ -181,6 +181,28 @@ def multimanager(): ...@@ -181,6 +181,28 @@ def multimanager():
print('%s %s %s %s %s' % (a, b, c, d, e)) print('%s %s %s %s %s' % (a, b, c, d, e))
print(nested) print(nested)
class GetManager(object):
def get(self, *args):
return ContextManager(*args)
def manager_from_expression():
"""
>>> manager_from_expression()
enter
1
exit <type 'NoneType'> <type 'NoneType'> <type 'NoneType'>
enter
2
exit <type 'NoneType'> <type 'NoneType'> <type 'NoneType'>
"""
with GetManager().get(1) as x:
print(x)
g = GetManager()
with g.get(2) as x:
print(x)
# Tests borrowed from pyregr test_with.py, # Tests borrowed from pyregr test_with.py,
# modified to follow the constraints of Cython. # modified to follow the constraints of Cython.
import unittest import unittest
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment