Commit 8cd31587 authored by Mark Florisson's avatar Mark Florisson

Merge branch 'master' into fusedmerge

Conflicts:
	Cython/Compiler/MemoryView.py
	Cython/Compiler/Naming.py
	Cython/Compiler/Nodes.py
	Cython/Compiler/Parsing.py
parents d16da160 5008e863
...@@ -97,7 +97,7 @@ globals_utility_code = UtilityCode( ...@@ -97,7 +97,7 @@ globals_utility_code = UtilityCode(
# of Python names. Supporting cdef names in the module and write # of Python names. Supporting cdef names in the module and write
# access requires a rewrite as a dedicated class. # access requires a rewrite as a dedicated class.
proto = """ proto = """
static PyObject* __Pyx_Globals(); /*proto*/ static PyObject* __Pyx_Globals(void); /*proto*/
""", """,
impl = ''' impl = '''
static PyObject* __Pyx_Globals() { static PyObject* __Pyx_Globals() {
......
...@@ -317,6 +317,8 @@ class ContentHashingUtilityCode(UtilityCode): ...@@ -317,6 +317,8 @@ class ContentHashingUtilityCode(UtilityCode):
return hash((self.proto, self.impl)) return hash((self.proto, self.impl))
def __eq__(self, other): def __eq__(self, other):
if self is other:
return True
if not isinstance(other, type(self)): if not isinstance(other, type(self)):
return False return False
......
...@@ -3,7 +3,7 @@ from Cython.Compiler.ModuleNode import ModuleNode ...@@ -3,7 +3,7 @@ from Cython.Compiler.ModuleNode import ModuleNode
from Cython.Compiler.Nodes import * from Cython.Compiler.Nodes import *
from Cython.Compiler.ExprNodes import * from Cython.Compiler.ExprNodes import *
class ExtractPxdCode(CythonTransform): class ExtractPxdCode(VisitorTransform):
""" """
Finds nodes in a pxd file that should generate code, and Finds nodes in a pxd file that should generate code, and
returns them in a StatListNode. returns them in a StatListNode.
...@@ -28,3 +28,7 @@ class ExtractPxdCode(CythonTransform): ...@@ -28,3 +28,7 @@ class ExtractPxdCode(CythonTransform):
# Do not visit children, nested funcdefnodes will # Do not visit children, nested funcdefnodes will
# also be moved by this action... # also be moved by this action...
return node return node
def visit_Node(self, node):
self.visitchildren(node)
return node
...@@ -1376,8 +1376,12 @@ class NameNode(AtomicExprNode): ...@@ -1376,8 +1376,12 @@ class NameNode(AtomicExprNode):
# is used for the pointer to the type they represent. # is used for the pointer to the type they represent.
return type_type return type_type
elif self.entry.type.is_cfunction: elif self.entry.type.is_cfunction:
# special case: referring to a C function must return its pointer if self.entry.scope.is_builtin_scope:
return PyrexTypes.CPtrType(self.entry.type) # special case: optimised builtin functions must be treated as Python objects
return py_object_type
else:
# special case: referring to a C function must return its pointer
return PyrexTypes.CPtrType(self.entry.type)
else: else:
return self.entry.type return self.entry.type
...@@ -3371,6 +3375,32 @@ class CallNode(ExprNode): ...@@ -3371,6 +3375,32 @@ class CallNode(ExprNode):
# allow overriding the default 'may_be_none' behaviour # allow overriding the default 'may_be_none' behaviour
may_return_none = None may_return_none = None
def infer_type(self, env):
function = self.function
func_type = function.infer_type(env)
if isinstance(self.function, NewExprNode):
return PyrexTypes.CPtrType(self.function.class_type)
if func_type.is_ptr:
func_type = func_type.base_type
if func_type.is_cfunction:
return func_type.return_type
elif func_type is type_type:
if function.is_name and function.entry and function.entry.type:
result_type = function.entry.type
if result_type.is_extension_type:
return result_type
elif result_type.is_builtin_type:
if function.entry.name == 'float':
return PyrexTypes.c_double_type
elif function.entry.name in Builtin.types_that_construct_their_instance:
return result_type
return py_object_type
def type_dependencies(self, env):
# TODO: Update when Danilo's C++ code merged in to handle the
# the case of function overloading.
return self.function.type_dependencies(env)
def may_be_none(self): def may_be_none(self):
if self.may_return_none is not None: if self.may_return_none is not None:
return self.may_return_none return self.may_return_none
...@@ -3443,32 +3473,6 @@ class SimpleCallNode(CallNode): ...@@ -3443,32 +3473,6 @@ class SimpleCallNode(CallNode):
except Exception, e: except Exception, e:
self.compile_time_value_error(e) self.compile_time_value_error(e)
def type_dependencies(self, env):
# TODO: Update when Danilo's C++ code merged in to handle the
# the case of function overloading.
return self.function.type_dependencies(env)
def infer_type(self, env):
function = self.function
func_type = function.infer_type(env)
if isinstance(self.function, NewExprNode):
return PyrexTypes.CPtrType(self.function.class_type)
if func_type.is_ptr:
func_type = func_type.base_type
if func_type.is_cfunction:
return func_type.return_type
elif func_type is type_type:
if function.is_name and function.entry and function.entry.type:
result_type = function.entry.type
if result_type.is_extension_type:
return result_type
elif result_type.is_builtin_type:
if function.entry.name == 'float':
return PyrexTypes.c_double_type
elif function.entry.name in Builtin.types_that_construct_their_instance:
return result_type
return py_object_type
def analyse_as_type(self, env): def analyse_as_type(self, env):
attr = self.function.as_cython_attribute() attr = self.function.as_cython_attribute()
if attr == 'pointer': if attr == 'pointer':
...@@ -3881,11 +3885,10 @@ class GeneralCallNode(CallNode): ...@@ -3881,11 +3885,10 @@ class GeneralCallNode(CallNode):
# function ExprNode # function ExprNode
# positional_args ExprNode Tuple of positional arguments # positional_args ExprNode Tuple of positional arguments
# keyword_args ExprNode or None Dict of keyword arguments # keyword_args ExprNode or None Dict of keyword arguments
# starstar_arg ExprNode or None Dict of extra keyword args
type = py_object_type type = py_object_type
subexprs = ['function', 'positional_args', 'keyword_args', 'starstar_arg'] subexprs = ['function', 'positional_args', 'keyword_args']
nogil_check = Node.gil_error nogil_check = Node.gil_error
...@@ -3893,15 +3896,14 @@ class GeneralCallNode(CallNode): ...@@ -3893,15 +3896,14 @@ class GeneralCallNode(CallNode):
function = self.function.compile_time_value(denv) function = self.function.compile_time_value(denv)
positional_args = self.positional_args.compile_time_value(denv) positional_args = self.positional_args.compile_time_value(denv)
keyword_args = self.keyword_args.compile_time_value(denv) keyword_args = self.keyword_args.compile_time_value(denv)
starstar_arg = self.starstar_arg.compile_time_value(denv)
try: try:
keyword_args.update(starstar_arg)
return function(*positional_args, **keyword_args) return function(*positional_args, **keyword_args)
except Exception, e: except Exception, e:
self.compile_time_value_error(e) self.compile_time_value_error(e)
def explicit_args_kwds(self): def explicit_args_kwds(self):
if self.starstar_arg or not isinstance(self.positional_args, TupleNode): if (self.keyword_args and not isinstance(self.keyword_args, DictNode) or
not isinstance(self.positional_args, TupleNode)):
raise CompileError(self.pos, raise CompileError(self.pos,
'Compile-time keyword arguments must be explicit.') 'Compile-time keyword arguments must be explicit.')
return self.positional_args.args, self.keyword_args return self.positional_args.args, self.keyword_args
...@@ -3913,8 +3915,6 @@ class GeneralCallNode(CallNode): ...@@ -3913,8 +3915,6 @@ class GeneralCallNode(CallNode):
self.positional_args.analyse_types(env) self.positional_args.analyse_types(env)
if self.keyword_args: if self.keyword_args:
self.keyword_args.analyse_types(env) self.keyword_args.analyse_types(env)
if self.starstar_arg:
self.starstar_arg.analyse_types(env)
if not self.function.type.is_pyobject: if not self.function.type.is_pyobject:
if self.function.type.is_error: if self.function.type.is_error:
self.type = error_type self.type = error_type
...@@ -3925,9 +3925,6 @@ class GeneralCallNode(CallNode): ...@@ -3925,9 +3925,6 @@ class GeneralCallNode(CallNode):
self.function = self.function.coerce_to_pyobject(env) self.function = self.function.coerce_to_pyobject(env)
self.positional_args = \ self.positional_args = \
self.positional_args.coerce_to_pyobject(env) self.positional_args.coerce_to_pyobject(env)
if self.starstar_arg:
self.starstar_arg = \
self.starstar_arg.coerce_to_pyobject(env)
function = self.function function = self.function
if function.is_name and function.type_entry: if function.is_name and function.type_entry:
# We are calling an extension type constructor. As long # We are calling an extension type constructor. As long
...@@ -3941,37 +3938,16 @@ class GeneralCallNode(CallNode): ...@@ -3941,37 +3938,16 @@ class GeneralCallNode(CallNode):
def generate_result_code(self, code): def generate_result_code(self, code):
if self.type.is_error: return if self.type.is_error: return
kwargs_call_function = "PyEval_CallObjectWithKeywords" if self.keyword_args:
if self.keyword_args and self.starstar_arg: kwargs = self.keyword_args.py_result()
code.put_error_if_neg(self.pos,
"PyDict_Update(%s, %s)" % (
self.keyword_args.py_result(),
self.starstar_arg.py_result()))
keyword_code = self.keyword_args.py_result()
elif self.keyword_args:
keyword_code = self.keyword_args.py_result()
elif self.starstar_arg:
keyword_code = self.starstar_arg.py_result()
if self.starstar_arg.type is not Builtin.dict_type:
# CPython supports calling functions with non-dicts, so do we
code.globalstate.use_utility_code(kwargs_call_utility_code)
kwargs_call_function = "__Pyx_PyEval_CallObjectWithKeywords"
else:
keyword_code = None
if not keyword_code:
call_code = "PyObject_Call(%s, %s, NULL)" % (
self.function.py_result(),
self.positional_args.py_result())
else: else:
call_code = "%s(%s, %s, %s)" % ( kwargs = 'NULL'
kwargs_call_function,
self.function.py_result(),
self.positional_args.py_result(),
keyword_code)
code.putln( code.putln(
"%s = %s; %s" % ( "%s = PyObject_Call(%s, %s, %s); %s" % (
self.result(), self.result(),
call_code, self.function.py_result(),
self.positional_args.py_result(),
kwargs,
code.error_goto_if_null(self.result(), self.pos))) code.error_goto_if_null(self.result(), self.pos)))
code.put_gotref(self.py_result()) code.put_gotref(self.py_result())
...@@ -4526,11 +4502,13 @@ class SequenceNode(ExprNode): ...@@ -4526,11 +4502,13 @@ class SequenceNode(ExprNode):
# args [ExprNode] # args [ExprNode]
# unpacked_items [ExprNode] or None # unpacked_items [ExprNode] or None
# coerced_unpacked_items [ExprNode] or None # coerced_unpacked_items [ExprNode] or None
# mult_factor ExprNode the integer number of content repetitions ([1,2]*3)
subexprs = ['args'] subexprs = ['args', 'mult_factor']
is_sequence_constructor = 1 is_sequence_constructor = 1
unpacked_items = None unpacked_items = None
mult_factor = None
def compile_time_value_list(self, denv): def compile_time_value_list(self, denv):
return [arg.compile_time_value(denv) for arg in self.args] return [arg.compile_time_value(denv) for arg in self.args]
...@@ -4559,6 +4537,10 @@ class SequenceNode(ExprNode): ...@@ -4559,6 +4537,10 @@ class SequenceNode(ExprNode):
arg = self.args[i] arg = self.args[i]
if not skip_children: arg.analyse_types(env) if not skip_children: arg.analyse_types(env)
self.args[i] = arg.coerce_to_pyobject(env) self.args[i] = arg.coerce_to_pyobject(env)
if self.mult_factor:
self.mult_factor.analyse_types(env)
if not self.mult_factor.type.is_int:
self.mult_factor = self.mult_factor.coerce_to_pyobject(env)
self.is_temp = 1 self.is_temp = 1
# not setting self.type here, subtypes do this # not setting self.type here, subtypes do this
...@@ -4566,6 +4548,8 @@ class SequenceNode(ExprNode): ...@@ -4566,6 +4548,8 @@ class SequenceNode(ExprNode):
return False return False
def analyse_target_types(self, env): def analyse_target_types(self, env):
if self.mult_factor:
error(arg.pos, "can't assign to multiplied sequence")
self.unpacked_items = [] self.unpacked_items = []
self.coerced_unpacked_items = [] self.coerced_unpacked_items = []
self.any_coerced_items = False self.any_coerced_items = False
...@@ -4588,6 +4572,92 @@ class SequenceNode(ExprNode): ...@@ -4588,6 +4572,92 @@ class SequenceNode(ExprNode):
def generate_result_code(self, code): def generate_result_code(self, code):
self.generate_operation_code(code) self.generate_operation_code(code)
def generate_sequence_packing_code(self, code, target=None, plain=False):
if target is None:
target = self.result()
py_multiply = self.mult_factor and not self.mult_factor.type.is_int
if plain or py_multiply:
mult_factor = None
else:
mult_factor = self.mult_factor
if mult_factor:
mult = mult_factor.result()
if isinstance(mult_factor.constant_result, (int,long)) \
and mult_factor.constant_result > 0:
size_factor = ' * %s' % mult_factor.constant_result
else:
size_factor = ' * ((%s<0) ? 0:%s)' % (mult, mult)
else:
size_factor = ''
mult = ''
if self.type is Builtin.list_type:
create_func, set_item_func = 'PyList_New', 'PyList_SET_ITEM'
elif self.type is Builtin.tuple_type:
create_func, set_item_func = 'PyTuple_New', 'PyTuple_SET_ITEM'
else:
raise InternalError("sequence unpacking for unexpected type %s" % self.type)
arg_count = len(self.args)
code.putln("%s = %s(%s%s); %s" % (
target, create_func, arg_count, size_factor,
code.error_goto_if_null(target, self.pos)))
code.put_gotref(target)
if mult:
# FIXME: can't use a temp variable here as the code may
# end up in the constant building function. Temps
# currently don't work there.
#counter = code.funcstate.allocate_temp(mult_factor.type, manage_ref=False)
counter = Naming.quick_temp_cname
code.putln('{ Py_ssize_t %s;' % counter)
if arg_count == 1:
offset = counter
else:
offset = '%s * %s' % (counter, arg_count)
code.putln('for (%s=0; %s < %s; %s++) {' % (
counter, counter, mult, counter
))
else:
offset = ''
for i in xrange(arg_count):
arg = self.args[i]
if mult or not arg.result_in_temp():
code.put_incref(arg.result(), arg.ctype())
code.putln("%s(%s, %s, %s);" % (
set_item_func,
target,
(offset and i) and ('%s + %s' % (offset, i)) or (offset or i),
arg.py_result()))
code.put_giveref(arg.py_result())
if mult:
code.putln('}')
#code.funcstate.release_temp(counter)
code.putln('}')
elif py_multiply and not plain:
code.putln('{ PyObject* %s = PyNumber_InPlaceMultiply(%s, %s); %s' % (
Naming.quick_temp_cname, target, self.mult_factor.py_result(),
code.error_goto_if_null(Naming.quick_temp_cname, self.pos)
))
code.put_gotref(Naming.quick_temp_cname)
code.put_decref(target, py_object_type)
code.putln('%s = %s;' % (target, Naming.quick_temp_cname))
code.putln('}')
def generate_subexpr_disposal_code(self, code):
if self.mult_factor and self.mult_factor.type.is_int:
super(SequenceNode, self).generate_subexpr_disposal_code(code)
else:
# We call generate_post_assignment_code here instead
# of generate_disposal_code, because values were stored
# in the tuple using a reference-stealing operation.
for arg in self.args:
arg.generate_post_assignment_code(code)
# Should NOT call free_temps -- this is invoked by the default
# generate_evaluation_code which will do that.
if self.mult_factor:
self.mult_factor.generate_disposal_code(code)
def generate_assignment_code(self, rhs, code): def generate_assignment_code(self, rhs, code):
if self.starred_assignment: if self.starred_assignment:
self.generate_starred_assignment_code(rhs, code) self.generate_starred_assignment_code(rhs, code)
...@@ -4795,21 +4865,27 @@ class TupleNode(SequenceNode): ...@@ -4795,21 +4865,27 @@ class TupleNode(SequenceNode):
# Tuple constructor. # Tuple constructor.
type = tuple_type type = tuple_type
is_partly_literal = False
gil_message = "Constructing Python tuple" gil_message = "Constructing Python tuple"
def analyse_types(self, env, skip_children=False): def analyse_types(self, env, skip_children=False):
if len(self.args) == 0: if len(self.args) == 0:
self.is_temp = 0 self.is_temp = False
self.is_literal = 1 self.is_literal = True
else: else:
SequenceNode.analyse_types(self, env, skip_children) SequenceNode.analyse_types(self, env, skip_children)
for child in self.args: for child in self.args:
if not child.is_literal: if not child.is_literal:
break break
else: else:
self.is_temp = 0 if not self.mult_factor or self.mult_factor.is_literal and \
self.is_literal = 1 isinstance(self.mult_factor.constant_result, (int, long)):
self.is_temp = False
self.is_literal = True
else:
self.is_temp = True
self.is_partly_literal = True
def is_simple(self): def is_simple(self):
# either temp or constant => always simple # either temp or constant => always simple
...@@ -4840,40 +4916,28 @@ class TupleNode(SequenceNode): ...@@ -4840,40 +4916,28 @@ class TupleNode(SequenceNode):
if len(self.args) == 0: if len(self.args) == 0:
# result_code is Naming.empty_tuple # result_code is Naming.empty_tuple
return return
if self.is_literal: if self.is_partly_literal:
# underlying tuple is const, but factor is not
tuple_target = code.get_py_const(py_object_type, 'tuple_', cleanup_level=2)
const_code = code.get_cached_constants_writer()
const_code.mark_pos(self.pos)
self.generate_sequence_packing_code(const_code, tuple_target, plain=True)
const_code.put_giveref(tuple_target)
code.putln('%s = PyNumber_Multiply(%s, %s); %s' % (
self.result(), tuple_target, self.mult_factor.py_result(),
code.error_goto_if_null(self.result(), self.pos)
))
code.put_gotref(self.py_result())
elif self.is_literal:
# non-empty cached tuple => result is global constant, # non-empty cached tuple => result is global constant,
# creation code goes into separate code writer # creation code goes into separate code writer
self.result_code = code.get_py_const(py_object_type, 'tuple_', cleanup_level=2) self.result_code = code.get_py_const(py_object_type, 'tuple_', cleanup_level=2)
code = code.get_cached_constants_writer() code = code.get_cached_constants_writer()
code.mark_pos(self.pos) code.mark_pos(self.pos)
self.generate_sequence_packing_code(code)
code.putln(
"%s = PyTuple_New(%s); %s" % (
self.result(),
len(self.args),
code.error_goto_if_null(self.result(), self.pos)))
code.put_gotref(self.py_result())
for i in range(len(self.args)):
arg = self.args[i]
if not arg.result_in_temp():
code.put_incref(arg.result(), arg.ctype())
code.putln(
"PyTuple_SET_ITEM(%s, %s, %s);" % (
self.result(),
i,
arg.py_result()))
code.put_giveref(arg.py_result())
if self.is_literal:
code.put_giveref(self.py_result()) code.put_giveref(self.py_result())
else:
def generate_subexpr_disposal_code(self, code): self.generate_sequence_packing_code(code)
# We call generate_post_assignment_code here instead
# of generate_disposal_code, because values were stored
# in the tuple using a reference-stealing operation.
for arg in self.args:
arg.generate_post_assignment_code(code)
# Should NOT call free_temps -- this is invoked by the default
# generate_evaluation_code which will do that.
class ListNode(SequenceNode): class ListNode(SequenceNode):
...@@ -4912,6 +4976,8 @@ class ListNode(SequenceNode): ...@@ -4912,6 +4976,8 @@ class ListNode(SequenceNode):
self.obj_conversion_errors = [] self.obj_conversion_errors = []
if not self.type.subtype_of(dst_type): if not self.type.subtype_of(dst_type):
error(self.pos, "Cannot coerce list to type '%s'" % dst_type) error(self.pos, "Cannot coerce list to type '%s'" % dst_type)
elif self.mult_factor:
error(self.pos, "Cannot coerce multiplied list to '%s'" % dst_type)
elif dst_type.is_ptr and dst_type.base_type is not PyrexTypes.c_void_type: elif dst_type.is_ptr and dst_type.base_type is not PyrexTypes.c_void_type:
base_type = dst_type.base_type base_type = dst_type.base_type
self.type = PyrexTypes.CArrayType(base_type, len(self.args)) self.type = PyrexTypes.CArrayType(base_type, len(self.args))
...@@ -4945,31 +5011,22 @@ class ListNode(SequenceNode): ...@@ -4945,31 +5011,22 @@ class ListNode(SequenceNode):
SequenceNode.release_temp(self, env) SequenceNode.release_temp(self, env)
def calculate_constant_result(self): def calculate_constant_result(self):
if self.mult_factor:
raise ValueError() # may exceed the compile time memory
self.constant_result = [ self.constant_result = [
arg.constant_result for arg in self.args] arg.constant_result for arg in self.args]
def compile_time_value(self, denv): def compile_time_value(self, denv):
return self.compile_time_value_list(denv) l = self.compile_time_value_list(denv)
if self.mult_factor:
l *= self.mult_factor.compile_time_value(denv)
return l
def generate_operation_code(self, code): def generate_operation_code(self, code):
if self.type.is_pyobject: if self.type.is_pyobject:
for err in self.obj_conversion_errors: for err in self.obj_conversion_errors:
report_error(err) report_error(err)
code.putln("%s = PyList_New(%s); %s" % self.generate_sequence_packing_code(code)
(self.result(),
len(self.args),
code.error_goto_if_null(self.result(), self.pos)))
code.put_gotref(self.py_result())
for i in range(len(self.args)):
arg = self.args[i]
#if not arg.is_temp:
if not arg.result_in_temp():
code.put_incref(arg.result(), arg.ctype())
code.putln("PyList_SET_ITEM(%s, %s, %s);" %
(self.result(),
i,
arg.py_result()))
code.put_giveref(arg.py_result())
elif self.type.is_array: elif self.type.is_array:
for i, arg in enumerate(self.args): for i, arg in enumerate(self.args):
code.putln("%s[%s] = %s;" % ( code.putln("%s[%s] = %s;" % (
...@@ -4985,15 +5042,6 @@ class ListNode(SequenceNode): ...@@ -4985,15 +5042,6 @@ class ListNode(SequenceNode):
else: else:
raise InternalError("List type never specified") raise InternalError("List type never specified")
def generate_subexpr_disposal_code(self, code):
# We call generate_post_assignment_code here instead
# of generate_disposal_code, because values were stored
# in the list using a reference-stealing operation.
for arg in self.args:
arg.generate_post_assignment_code(code)
# Should NOT call free_temps -- this is invoked by the default
# generate_evaluation_code which will do that.
class ScopedExprNode(ExprNode): class ScopedExprNode(ExprNode):
# Abstract base class for ExprNodes that have their own local # Abstract base class for ExprNodes that have their own local
...@@ -5447,6 +5495,7 @@ class DictItemNode(ExprNode): ...@@ -5447,6 +5495,7 @@ class DictItemNode(ExprNode):
def __iter__(self): def __iter__(self):
return iter([self.key, self.value]) return iter([self.key, self.value])
class ModuleNameMixin(object): class ModuleNameMixin(object):
def set_mod_name(self, env): def set_mod_name(self, env):
self.module_name = env.global_scope().qualified_name self.module_name = env.global_scope().qualified_name
...@@ -5539,36 +5588,66 @@ class Py3ClassNode(ExprNode): ...@@ -5539,36 +5588,66 @@ class Py3ClassNode(ExprNode):
code.put_gotref(self.py_result()) code.put_gotref(self.py_result())
class KeywordArgsNode(ExprNode): class KeywordArgsNode(ExprNode):
# Helper class for keyword arguments # Helper class for keyword arguments.
# #
# keyword_args ExprNode or None Keyword arguments # starstar_arg DictNode
# starstar_arg ExprNode or None Extra arguments # keyword_args [DictItemNode]
subexprs = ['starstar_arg', 'keyword_args']
is_temp = 1
type = dict_type
def calculate_constant_result(self):
result = dict(self.starstar_arg.constant_result)
for item in self.keyword_args:
key, value = item.constant_result
if key in result:
raise ValueError("duplicate keyword argument found: %s" % key)
result[key] = value
self.constant_result = result
def compile_time_value(self, denv):
result = self.starstar_arg.compile_time_value(denv)
pairs = [ (item.key.compile_time_value(denv), item.value.compile_time_value(denv))
for item in self.keyword_args ]
try:
result = dict(result)
for key, value in pairs:
if key in result:
raise ValueError("duplicate keyword argument found: %s" % key)
result[key] = value
except Exception, e:
self.compile_time_value_error(e)
return result
subexprs = ['keyword_args', 'starstar_arg'] def type_dependencies(self, env):
return ()
def infer_type(self, env):
return dict_type
def analyse_types(self, env): def analyse_types(self, env):
if self.keyword_args: self.starstar_arg.analyse_types(env)
self.keyword_args.analyse_types(env) self.starstar_arg = self.starstar_arg.coerce_to_pyobject(env).as_none_safe_node(
if self.starstar_arg: # FIXME: CPython's error message starts with the runtime function name
self.starstar_arg.analyse_types(env) 'argument after ** must be a mapping, not NoneType')
# make sure we have a Python object as **kwargs mapping for item in self.keyword_args:
self.starstar_arg = \ item.analyse_types(env)
self.starstar_arg.coerce_to_pyobject(env)
self.type = py_object_type
self.is_temp = 1
gil_message = "Constructing Keyword Args" def may_be_none(self):
return False
def generate_result_code(self, code): gil_message = "Constructing Python dict"
if self.keyword_args and self.starstar_arg:
code.put_error_if_neg(self.pos, def generate_evaluation_code(self, code):
"PyDict_Update(%s, %s)" % ( code.mark_pos(self.pos)
self.keyword_args.py_result(), self.allocate_temp_result(code)
self.starstar_arg.py_result())) self.starstar_arg.generate_evaluation_code(code)
if self.starstar_arg.type is not Builtin.dict_type:
# CPython supports calling functions with non-dicts, so do we
code.putln('if (likely(PyDict_Check(%s))) {' %
self.starstar_arg.py_result())
if self.keyword_args: if self.keyword_args:
code.putln("%s = %s;" % (self.result(), self.keyword_args.result()))
code.put_incref(self.keyword_args.result(), self.keyword_args.ctype())
elif self.starstar_arg:
code.putln( code.putln(
"%s = PyDict_Copy(%s); %s" % ( "%s = PyDict_Copy(%s); %s" % (
self.result(), self.result(),
...@@ -5576,11 +5655,49 @@ class KeywordArgsNode(ExprNode): ...@@ -5576,11 +5655,49 @@ class KeywordArgsNode(ExprNode):
code.error_goto_if_null(self.result(), self.pos))) code.error_goto_if_null(self.result(), self.pos)))
code.put_gotref(self.py_result()) code.put_gotref(self.py_result())
else: else:
code.putln("%s = %s;" % (
self.result(),
self.starstar_arg.py_result()))
code.put_incref(self.result(), py_object_type)
if self.starstar_arg.type is not Builtin.dict_type:
code.putln('} else {')
code.putln( code.putln(
"%s = PyDict_New(); %s" % ( "%s = PyObject_CallFunctionObjArgs("
"(PyObject*)&PyDict_Type, %s, NULL); %s" % (
self.result(), self.result(),
self.starstar_arg.py_result(),
code.error_goto_if_null(self.result(), self.pos))) code.error_goto_if_null(self.result(), self.pos)))
code.put_gotref(self.py_result()) code.put_gotref(self.py_result())
code.putln('}')
self.starstar_arg.generate_disposal_code(code)
self.starstar_arg.free_temps(code)
if not self.keyword_args:
return
code.globalstate.use_utility_code(Nodes.raise_double_keywords_utility_code)
for item in self.keyword_args:
item.generate_evaluation_code(code)
code.putln("if (unlikely(PyDict_GetItem(%s, %s))) {" % (
self.result(),
item.key.py_result()))
# FIXME: find out function name at runtime!
code.putln('__Pyx_RaiseDoubleKeywordsError("function", %s); %s' % (
item.key.py_result(),
code.error_goto(self.pos)))
code.putln("}")
code.put_error_if_neg(self.pos,
"PyDict_SetItem(%s, %s, %s)" % (
self.result(),
item.key.py_result(),
item.value.py_result()))
item.generate_disposal_code(code)
item.free_temps(code)
def annotate(self, code):
self.starstar_arg.annotate(code)
for item in self.keyword_args:
item.annotate(code)
class PyClassMetaclassNode(ExprNode): class PyClassMetaclassNode(ExprNode):
# Helper class holds Python3 metaclass object # Helper class holds Python3 metaclass object
...@@ -6035,6 +6152,57 @@ class YieldExprNode(ExprNode): ...@@ -6035,6 +6152,57 @@ class YieldExprNode(ExprNode):
else: else:
code.putln(code.error_goto_if_null(Naming.sent_value_cname, self.pos)) code.putln(code.error_goto_if_null(Naming.sent_value_cname, self.pos))
class GlobalsExprNode(AtomicExprNode):
type = dict_type
is_temp = 1
def analyse_types(self, env):
env.use_utility_code(Builtin.globals_utility_code)
gil_message = "Constructing globals dict"
def generate_result_code(self, code):
code.putln('%s = __Pyx_Globals(); %s' % (
self.result(),
code.error_goto_if_null(self.result(), self.pos)))
code.put_gotref(self.result())
class FuncLocalsExprNode(DictNode):
def __init__(self, pos, env):
local_vars = [var.name for var in env.entries.values() if var.name]
items = [DictItemNode(pos, key=IdentifierStringNode(pos, value=var),
value=NameNode(pos, name=var, allow_null=True))
for var in local_vars]
DictNode.__init__(self, pos, key_value_pairs=items,
exclude_null_values=True)
class PyClassLocalsExprNode(AtomicExprNode):
def __init__(self, pos, pyclass_dict):
AtomicExprNode.__init__(self, pos)
self.pyclass_dict = pyclass_dict
def analyse_types(self, env):
self.type = self.pyclass_dict.type
self.is_tmep = 0
def result(self):
return self.pyclass_dict.result()
def generate_result_code(self, code):
pass
def LocalsExprNode(pos, scope_node, env):
if env.is_module_scope:
return GlobalsExprNode(pos)
if env.is_py_class_scope:
return PyClassLocalsExprNode(pos, scope_node.dict)
return FuncLocalsExprNode(pos, env)
#------------------------------------------------------------------- #-------------------------------------------------------------------
# #
# Unary operator nodes # Unary operator nodes
...@@ -7907,8 +8075,11 @@ static CYTHON_INLINE int __Pyx_PyUnicode_Equals(PyObject* s1, PyObject* s2, int ...@@ -7907,8 +8075,11 @@ static CYTHON_INLINE int __Pyx_PyUnicode_Equals(PyObject* s1, PyObject* s2, int
Py_UCS4 ch1 = PyUnicode_READ_CHAR(s1, 0); Py_UCS4 ch1 = PyUnicode_READ_CHAR(s1, 0);
Py_UCS4 ch2 = PyUnicode_READ_CHAR(s2, 0); Py_UCS4 ch2 = PyUnicode_READ_CHAR(s2, 0);
return (equals == Py_EQ) ? (ch1 == ch2) : (ch1 != ch2); return (equals == Py_EQ) ? (ch1 == ch2) : (ch1 != ch2);
} else if (PyUnicode_MAX_CHAR_VALUE(s1) != PyUnicode_MAX_CHAR_VALUE(s2)) { """
return (equals == Py_NE); ## currently disabled: may not be safe depending on who created the string
# } else if (PyUnicode_MAX_CHAR_VALUE(s1) != PyUnicode_MAX_CHAR_VALUE(s2)) {
# return (equals == Py_NE);
"""\
#else #else
if (PyUnicode_GET_SIZE(s1) != PyUnicode_GET_SIZE(s2)) { if (PyUnicode_GET_SIZE(s1) != PyUnicode_GET_SIZE(s2)) {
return (equals == Py_NE); return (equals == Py_NE);
...@@ -9374,22 +9545,29 @@ static CYTHON_INLINE PyObject *__Pyx_GetItemInt_%(type)s_Fast(PyObject *o, Py_ss ...@@ -9374,22 +9545,29 @@ static CYTHON_INLINE PyObject *__Pyx_GetItemInt_%(type)s_Fast(PyObject *o, Py_ss
__Pyx_GetItemInt_Generic(o, to_py_func(i))) __Pyx_GetItemInt_Generic(o, to_py_func(i)))
static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Fast(PyObject *o, Py_ssize_t i) { static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Fast(PyObject *o, Py_ssize_t i) {
PyObject *r; if (PyList_CheckExact(o)) {
if (PyList_CheckExact(o) && ((0 <= i) & (i < PyList_GET_SIZE(o)))) { Py_ssize_t n = (likely(i >= 0)) ? i : i + PyList_GET_SIZE(o);
r = PyList_GET_ITEM(o, i); if (likely((n >= 0) & (n < PyList_GET_SIZE(o)))) {
Py_INCREF(r); PyObject *r = PyList_GET_ITEM(o, n);
} Py_INCREF(r);
else if (PyTuple_CheckExact(o) && ((0 <= i) & (i < PyTuple_GET_SIZE(o)))) { return r;
r = PyTuple_GET_ITEM(o, i); }
Py_INCREF(r);
} }
else if (Py_TYPE(o)->tp_as_sequence && Py_TYPE(o)->tp_as_sequence->sq_item && (likely(i >= 0))) { else if (PyTuple_CheckExact(o)) {
r = PySequence_GetItem(o, i); Py_ssize_t n = (likely(i >= 0)) ? i : i + PyTuple_GET_SIZE(o);
if (likely((n >= 0) & (n < PyTuple_GET_SIZE(o)))) {
PyObject *r = PyTuple_GET_ITEM(o, n);
Py_INCREF(r);
return r;
}
} }
else { else if (likely(i >= 0)) {
r = __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); PySequenceMethods *m = Py_TYPE(o)->tp_as_sequence;
if (likely(m && m->sq_item)) {
return m->sq_item(o, i);
}
} }
return r; return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i));
} }
""", """,
impl = """ impl = """
...@@ -9414,18 +9592,23 @@ static CYTHON_INLINE int __Pyx_SetItemInt_Generic(PyObject *o, PyObject *j, PyOb ...@@ -9414,18 +9592,23 @@ static CYTHON_INLINE int __Pyx_SetItemInt_Generic(PyObject *o, PyObject *j, PyOb
} }
static CYTHON_INLINE int __Pyx_SetItemInt_Fast(PyObject *o, Py_ssize_t i, PyObject *v) { static CYTHON_INLINE int __Pyx_SetItemInt_Fast(PyObject *o, Py_ssize_t i, PyObject *v) {
if (PyList_CheckExact(o) && ((0 <= i) & (i < PyList_GET_SIZE(o)))) { if (PyList_CheckExact(o)) {
Py_INCREF(v); Py_ssize_t n = (likely(i >= 0)) ? i : i + PyList_GET_SIZE(o);
Py_DECREF(PyList_GET_ITEM(o, i)); if (likely((n >= 0) & (n < PyList_GET_SIZE(o)))) {
PyList_SET_ITEM(o, i, v); PyObject* old = PyList_GET_ITEM(o, n);
return 1; Py_INCREF(v);
PyList_SET_ITEM(o, n, v);
Py_DECREF(old);
return 1;
}
} }
else if (Py_TYPE(o)->tp_as_sequence && Py_TYPE(o)->tp_as_sequence->sq_ass_item && (likely(i >= 0))) else if (likely(i >= 0)) {
return PySequence_SetItem(o, i, v); PySequenceMethods *m = Py_TYPE(o)->tp_as_sequence;
else { if (likely(m && m->sq_ass_item)) {
PyObject *j = PyInt_FromSsize_t(i); return m->sq_ass_item(o, i, v);
return __Pyx_SetItemInt_Generic(o, j, v); }
} }
return __Pyx_SetItemInt_Generic(o, PyInt_FromSsize_t(i), v);
} }
""", """,
impl = """ impl = """
...@@ -9448,12 +9631,13 @@ static CYTHON_INLINE int __Pyx_DelItem_Generic(PyObject *o, PyObject *j) { ...@@ -9448,12 +9631,13 @@ static CYTHON_INLINE int __Pyx_DelItem_Generic(PyObject *o, PyObject *j) {
} }
static CYTHON_INLINE int __Pyx_DelItemInt_Fast(PyObject *o, Py_ssize_t i) { static CYTHON_INLINE int __Pyx_DelItemInt_Fast(PyObject *o, Py_ssize_t i) {
if (Py_TYPE(o)->tp_as_sequence && Py_TYPE(o)->tp_as_sequence->sq_ass_item && likely(i >= 0)) if (likely(i >= 0)) {
return PySequence_DelItem(o, i); PySequenceMethods *m = Py_TYPE(o)->tp_as_sequence;
else { if (likely(m && m->sq_ass_item)) {
PyObject *j = PyInt_FromSsize_t(i); return m->sq_ass_item(o, i, (PyObject *)NULL);
return __Pyx_DelItem_Generic(o, j); }
} }
return __Pyx_DelItem_Generic(o, PyInt_FromSsize_t(i));
} }
""", """,
impl = """ impl = """
......
...@@ -925,11 +925,12 @@ class CreateControlFlowGraph(CythonTransform): ...@@ -925,11 +925,12 @@ class CreateControlFlowGraph(CythonTransform):
raise InternalError, "Generic loops are not supported" raise InternalError, "Generic loops are not supported"
def visit_WithTargetAssignmentStatNode(self, node): def visit_WithTargetAssignmentStatNode(self, node):
self.mark_assignment(node.lhs) self.mark_assignment(node.lhs, node.rhs)
return node return node
def visit_WithStatNode(self, node): def visit_WithStatNode(self, node):
self.visit(node.manager) self.visit(node.manager)
self.visit(node.enter_call)
self.visit(node.body) self.visit(node.body)
return node return node
......
...@@ -93,6 +93,7 @@ frame_cname = pyrex_prefix + "frame" ...@@ -93,6 +93,7 @@ frame_cname = pyrex_prefix + "frame"
frame_code_cname = pyrex_prefix + "frame_code" frame_code_cname = pyrex_prefix + "frame_code"
binding_cfunc = pyrex_prefix + "binding_PyCFunctionType" binding_cfunc = pyrex_prefix + "binding_PyCFunctionType"
fused_func_prefix = pyrex_prefix + 'fuse_' fused_func_prefix = pyrex_prefix + 'fuse_'
quick_temp_cname = pyrex_prefix + "temp" # temp variable for quick'n'dirty temping
genexpr_id_ref = 'genexpr' genexpr_id_ref = 'genexpr'
......
...@@ -1916,6 +1916,14 @@ class CFuncDefNode(FuncDefNode): ...@@ -1916,6 +1916,14 @@ class CFuncDefNode(FuncDefNode):
if type_arg.type.is_buffer and 'inline' in self.modifiers: if type_arg.type.is_buffer and 'inline' in self.modifiers:
warning(formal_arg.pos, "Buffer unpacking not optimized away.", 1) warning(formal_arg.pos, "Buffer unpacking not optimized away.", 1)
if type_arg.type.is_buffer:
if self.type.nogil:
error(formal_arg.pos,
"Buffer may not be acquired without the GIL. "
"Consider using memoryview slices instead.")
elif 'inline' in self.modifiers:
warning(formal_arg.pos, "Buffer unpacking not optimized away.", 1)
self._validate_type_visibility(type.return_type, self.pos, env) self._validate_type_visibility(type.return_type, self.pos, env)
name = name_declarator.name name = name_declarator.name
...@@ -3964,9 +3972,12 @@ class PyClassDefNode(ClassDefNode): ...@@ -3964,9 +3972,12 @@ class PyClassDefNode(ClassDefNode):
# find metaclass" dance at runtime # find metaclass" dance at runtime
self.metaclass = item.value self.metaclass = item.value
del keyword_args.key_value_pairs[i] del keyword_args.key_value_pairs[i]
if starstar_arg or (keyword_args and keyword_args.key_value_pairs): if starstar_arg:
self.mkw = ExprNodes.KeywordArgsNode( self.mkw = ExprNodes.KeywordArgsNode(
pos, keyword_args = keyword_args, starstar_arg = starstar_arg) pos, keyword_args = keyword_args and keyword_args.key_value_pairs or [],
starstar_arg = starstar_arg)
elif keyword_args and keyword_args.key_value_pairs:
self.mkw = keyword_args
else: else:
self.mkw = ExprNodes.NullNode(pos) self.mkw = ExprNodes.NullNode(pos)
if self.metaclass is None: if self.metaclass is None:
...@@ -5731,21 +5742,26 @@ class WithStatNode(StatNode): ...@@ -5731,21 +5742,26 @@ class WithStatNode(StatNode):
# manager The with statement manager object # manager The with statement manager object
# target ExprNode the target lhs of the __enter__() call # target ExprNode the target lhs of the __enter__() call
# body StatNode # body StatNode
# enter_call ExprNode the call to the __enter__() method
# exit_var String the cname of the __exit__() method reference
child_attrs = ["manager", "target", "body"] child_attrs = ["manager", "enter_call", "target", "body"]
has_target = False enter_call = None
def analyse_declarations(self, env): def analyse_declarations(self, env):
self.manager.analyse_declarations(env) self.manager.analyse_declarations(env)
self.enter_call.analyse_declarations(env)
self.body.analyse_declarations(env) self.body.analyse_declarations(env)
def analyse_expressions(self, env): def analyse_expressions(self, env):
self.manager.analyse_types(env) self.manager.analyse_types(env)
self.enter_call.analyse_types(env)
self.body.analyse_expressions(env) self.body.analyse_expressions(env)
def generate_function_definitions(self, env, code): def generate_function_definitions(self, env, code):
self.manager.generate_function_definitions(env, code) self.manager.generate_function_definitions(env, code)
self.enter_call.generate_function_definitions(env, code)
self.body.generate_function_definitions(env, code) self.body.generate_function_definitions(env, code)
def generate_execution_code(self, code): def generate_execution_code(self, code):
...@@ -5764,40 +5780,28 @@ class WithStatNode(StatNode): ...@@ -5764,40 +5780,28 @@ class WithStatNode(StatNode):
old_error_label = code.new_error_label() old_error_label = code.new_error_label()
intermediate_error_label = code.error_label intermediate_error_label = code.error_label
enter_func = code.funcstate.allocate_temp(py_object_type, manage_ref=True) self.enter_call.generate_evaluation_code(code)
code.putln("%s = PyObject_GetAttr(%s, %s); %s" % ( if not self.target:
enter_func, self.enter_call.generate_disposal_code(code)
self.manager.py_result(), self.enter_call.free_temps(code)
code.get_py_string_const(EncodedString('__enter__'), identifier=True), else:
code.error_goto_if_null(enter_func, self.pos), # Otherwise, the node will be cleaned up by the
)) # WithTargetAssignmentStatNode after assigning its result
code.put_gotref(enter_func) # to the target of the 'with' statement.
pass
self.manager.generate_disposal_code(code) self.manager.generate_disposal_code(code)
self.manager.free_temps(code) self.manager.free_temps(code)
self.target_temp.allocate(code)
code.putln('%s = PyObject_Call(%s, ((PyObject *)%s), NULL); %s' % (
self.target_temp.result(),
enter_func,
Naming.empty_tuple,
code.error_goto_if_null(self.target_temp.result(), self.pos),
))
code.put_gotref(self.target_temp.result())
code.put_decref_clear(enter_func, py_object_type)
code.funcstate.release_temp(enter_func)
if not self.has_target:
code.put_decref_clear(self.target_temp.result(), type=py_object_type)
self.target_temp.release(code)
# otherwise, WithTargetAssignmentStatNode will do it for us
code.error_label = old_error_label code.error_label = old_error_label
self.body.generate_execution_code(code) self.body.generate_execution_code(code)
step_over_label = code.new_label() if code.label_used(intermediate_error_label):
code.put_goto(step_over_label) step_over_label = code.new_label()
code.put_label(intermediate_error_label) code.put_goto(step_over_label)
code.put_decref_clear(self.exit_var, py_object_type) code.put_label(intermediate_error_label)
code.put_goto(old_error_label) code.put_decref_clear(self.exit_var, py_object_type)
code.put_label(step_over_label) code.put_goto(old_error_label)
code.put_label(step_over_label)
code.funcstate.release_temp(self.exit_var) code.funcstate.release_temp(self.exit_var)
code.putln('}') code.putln('}')
...@@ -5809,28 +5813,44 @@ class WithTargetAssignmentStatNode(AssignmentNode): ...@@ -5809,28 +5813,44 @@ class WithTargetAssignmentStatNode(AssignmentNode):
# This is a special cased assignment that steals the RHS reference # This is a special cased assignment that steals the RHS reference
# and frees its temp. # and frees its temp.
# #
# lhs ExprNode the assignment target # lhs ExprNode the assignment target
# rhs TempNode the return value of the __enter__() call # rhs CloneNode a (coerced) CloneNode for the orig_rhs (not owned by this node)
# orig_rhs ExprNode the original ExprNode of the rhs. this node will clean up the
# temps of the orig_rhs. basically, it takes ownership of the node
# when the WithStatNode is done with it.
child_attrs = ["lhs", "rhs"] child_attrs = ["lhs"]
def analyse_declarations(self, env): def analyse_declarations(self, env):
self.lhs.analyse_target_declaration(env) self.lhs.analyse_target_declaration(env)
def analyse_types(self, env): def analyse_expressions(self, env):
self.rhs.analyse_types(env) self.rhs.analyse_types(env)
self.lhs.analyse_target_types(env) self.lhs.analyse_target_types(env)
self.lhs.gil_assignment_check(env) self.lhs.gil_assignment_check(env)
self.orig_rhs = self.rhs
self.rhs = self.rhs.coerce_to(self.lhs.type, env) self.rhs = self.rhs.coerce_to(self.lhs.type, env)
def generate_execution_code(self, code): def generate_execution_code(self, code):
if self.orig_rhs.type.is_pyobject:
# make sure rhs gets freed on errors, see below
old_error_label = code.new_error_label()
intermediate_error_label = code.error_label
self.rhs.generate_evaluation_code(code) self.rhs.generate_evaluation_code(code)
self.lhs.generate_assignment_code(self.rhs, code) self.lhs.generate_assignment_code(self.rhs, code)
self.orig_rhs.release(code)
def generate_function_definitions(self, env, code): if self.orig_rhs.type.is_pyobject:
self.rhs.generate_function_definitions(env, code) self.orig_rhs.generate_disposal_code(code)
code.error_label = old_error_label
if code.label_used(intermediate_error_label):
step_over_label = code.new_label()
code.put_goto(step_over_label)
code.put_label(intermediate_error_label)
self.orig_rhs.generate_disposal_code(code)
code.put_goto(old_error_label)
code.put_label(step_over_label)
self.orig_rhs.free_temps(code)
def annotate(self, code): def annotate(self, code):
self.lhs.annotate(code) self.lhs.annotate(code)
...@@ -6574,6 +6594,8 @@ class FromImportStatNode(StatNode): ...@@ -6574,6 +6594,8 @@ class FromImportStatNode(StatNode):
else: else:
coerced_item = self.item.coerce_to(target.type, env) coerced_item = self.item.coerce_to(target.type, env)
self.interned_items.append((name, target, coerced_item)) self.interned_items.append((name, target, coerced_item))
if self.interned_items:
env.use_utility_code(raise_import_error_utility_code)
def generate_execution_code(self, code): def generate_execution_code(self, code):
self.module.generate_evaluation_code(code) self.module.generate_evaluation_code(code)
...@@ -6588,11 +6610,16 @@ class FromImportStatNode(StatNode): ...@@ -6588,11 +6610,16 @@ class FromImportStatNode(StatNode):
for name, target, coerced_item in self.interned_items: for name, target, coerced_item in self.interned_items:
cname = code.intern_identifier(name) cname = code.intern_identifier(name)
code.putln( code.putln(
'%s = PyObject_GetAttr(%s, %s); %s' % ( '%s = PyObject_GetAttr(%s, %s);' % (
item_temp, item_temp,
self.module.py_result(), self.module.py_result(),
cname, cname))
code.error_goto_if_null(item_temp, self.pos))) code.putln('if (%s == NULL) {' % item_temp)
code.putln(
'if (PyErr_ExceptionMatches(PyExc_AttributeError)) '
'__Pyx_RaiseImportError(%s);' % cname)
code.putln(code.error_goto_if_null(item_temp, self.pos))
code.putln('}')
code.put_gotref(item_temp) code.put_gotref(item_temp)
if coerced_item is None: if coerced_item is None:
target.generate_assignment_code(self.item, code) target.generate_assignment_code(self.item, code)
...@@ -8922,3 +8949,19 @@ init=""" ...@@ -8922,3 +8949,19 @@ init="""
memset(&%(PYX_NAN)s, 0xFF, sizeof(%(PYX_NAN)s)); memset(&%(PYX_NAN)s, 0xFF, sizeof(%(PYX_NAN)s));
""" % vars(Naming)) """ % vars(Naming))
#------------------------------------------------------------------------------------
raise_import_error_utility_code = UtilityCode(
proto = '''
static CYTHON_INLINE void __Pyx_RaiseImportError(PyObject *name);
''',
impl = '''
static CYTHON_INLINE void __Pyx_RaiseImportError(PyObject *name) {
#if PY_MAJOR_VERSION < 3
PyErr_Format(PyExc_ImportError, "cannot import name %.230s",
PyString_AsString(name));
#else
PyErr_Format(PyExc_ImportError, "cannot import name %S", name);
#endif
}
''')
...@@ -15,7 +15,7 @@ import Symtab ...@@ -15,7 +15,7 @@ import Symtab
import Options import Options
import Naming import Naming
from Code import UtilityCode from Code import UtilityCode, ContentHashingUtilityCode
from StringEncoding import EncodedString, BytesLiteral from StringEncoding import EncodedString, BytesLiteral
from Errors import error from Errors import error
from ParseTreeTransforms import SkipDeclarations from ParseTreeTransforms import SkipDeclarations
...@@ -32,6 +32,12 @@ try: ...@@ -32,6 +32,12 @@ try:
except ImportError: except ImportError:
basestring = str # Python 3 basestring = str # Python 3
_utility_cache = {}
def load_c_utility(name):
if name not in _utility_cache:
_utility_cache[name] = ContentHashingUtilityCode.load(name, "Optimize.c")
return _utility_cache[name]
class FakePythonEnv(object): class FakePythonEnv(object):
"A fake environment for creating type test nodes etc." "A fake environment for creating type test nodes etc."
nogil = False nogil = False
...@@ -1638,9 +1644,6 @@ class EarlyReplaceBuiltinCalls(Visitor.EnvTransform): ...@@ -1638,9 +1644,6 @@ class EarlyReplaceBuiltinCalls(Visitor.EnvTransform):
return node return node
if not isinstance(kwargs, ExprNodes.DictNode): if not isinstance(kwargs, ExprNodes.DictNode):
return node return node
if node.starstar_arg:
# we could optimize this by updating the kw dict instead
return node
return kwargs return kwargs
...@@ -1663,11 +1666,13 @@ class OptimizeBuiltinCalls(Visitor.EnvTransform): ...@@ -1663,11 +1666,13 @@ class OptimizeBuiltinCalls(Visitor.EnvTransform):
arg_tuple = node.positional_args arg_tuple = node.positional_args
if not isinstance(arg_tuple, ExprNodes.TupleNode): if not isinstance(arg_tuple, ExprNodes.TupleNode):
return node return node
if node.starstar_arg: keyword_args = node.keyword_args
if keyword_args and not isinstance(keyword_args, ExprNodes.DictNode):
# can't handle **kwargs
return node return node
args = arg_tuple.args args = arg_tuple.args
return self._dispatch_to_handler( return self._dispatch_to_handler(
node, function, args, node.keyword_args) node, function, args, keyword_args)
def visit_SimpleCallNode(self, node): def visit_SimpleCallNode(self, node):
self.visitchildren(node) self.visitchildren(node)
...@@ -1787,7 +1792,7 @@ class OptimizeBuiltinCalls(Visitor.EnvTransform): ...@@ -1787,7 +1792,7 @@ class OptimizeBuiltinCalls(Visitor.EnvTransform):
bound_check_node, bound_check_node,
], ],
is_temp = True, is_temp = True,
utility_code=bytes_index_utility_code) utility_code=load_c_utility('bytes_index'))
if coerce_node.type is not PyrexTypes.c_char_type: if coerce_node.type is not PyrexTypes.c_char_type:
node = node.coerce_to(coerce_node.type, env) node = node.coerce_to(coerce_node.type, env)
return node return node
...@@ -2003,7 +2008,7 @@ class OptimizeBuiltinCalls(Visitor.EnvTransform): ...@@ -2003,7 +2008,7 @@ class OptimizeBuiltinCalls(Visitor.EnvTransform):
self.PyObject_AsDouble_func_type, self.PyObject_AsDouble_func_type,
args = pos_args, args = pos_args,
is_temp = node.is_temp, is_temp = node.is_temp,
utility_code = pyobject_as_double_utility_code, utility_code = load_c_utility('pyobject_as_double'),
py_name = "float") py_name = "float")
def _handle_simple_function_bool(self, node, pos_args): def _handle_simple_function_bool(self, node, pos_args):
...@@ -2254,7 +2259,7 @@ class OptimizeBuiltinCalls(Visitor.EnvTransform): ...@@ -2254,7 +2259,7 @@ class OptimizeBuiltinCalls(Visitor.EnvTransform):
node, "__Pyx_PyDict_Clear", self.PyDict_Clear_Retval_func_type, node, "__Pyx_PyDict_Clear", self.PyDict_Clear_Retval_func_type,
'clear', is_unbound_method, args, 'clear', is_unbound_method, args,
may_return_none=True, is_temp=True, may_return_none=True, is_temp=True,
utility_code=py_dict_clear_utility_code utility_code=load_c_utility('py_dict_clear')
).coerce_to(node.type, self.current_env) ).coerce_to(node.type, self.current_env)
else: else:
return self._substitute_method_call( return self._substitute_method_call(
...@@ -2279,7 +2284,7 @@ class OptimizeBuiltinCalls(Visitor.EnvTransform): ...@@ -2279,7 +2284,7 @@ class OptimizeBuiltinCalls(Visitor.EnvTransform):
args = args, args = args,
may_return_none = True, may_return_none = True,
is_temp = node.is_temp, is_temp = node.is_temp,
utility_code = append_utility_code utility_code = load_c_utility('append')
) )
PyObject_Pop_func_type = PyrexTypes.CFuncType( PyObject_Pop_func_type = PyrexTypes.CFuncType(
...@@ -2303,19 +2308,26 @@ class OptimizeBuiltinCalls(Visitor.EnvTransform): ...@@ -2303,19 +2308,26 @@ class OptimizeBuiltinCalls(Visitor.EnvTransform):
args = args, args = args,
may_return_none = True, may_return_none = True,
is_temp = node.is_temp, is_temp = node.is_temp,
utility_code = pop_utility_code utility_code = load_c_utility('pop')
) )
elif len(args) == 2: elif len(args) == 2:
if isinstance(args[1], ExprNodes.CoerceToPyTypeNode) and args[1].arg.type.is_int: index = args[1]
original_type = args[1].arg.type if isinstance(index, ExprNodes.CoerceToPyTypeNode):
if PyrexTypes.widest_numeric_type(original_type, PyrexTypes.c_py_ssize_t_type) == PyrexTypes.c_py_ssize_t_type: index = index.arg
args[1] = args[1].arg if isinstance(index, ExprNodes.IntNode):
index = index.coerce_to(PyrexTypes.c_py_ssize_t_type, None)
if index.type.is_int:
widest = PyrexTypes.widest_numeric_type(
index.type, PyrexTypes.c_py_ssize_t_type)
if widest == PyrexTypes.c_py_ssize_t_type:
args[1] = index
return ExprNodes.PythonCapiCallNode( return ExprNodes.PythonCapiCallNode(
node.pos, "__Pyx_PyObject_PopIndex", self.PyObject_PopIndex_func_type, node.pos, "__Pyx_PyObject_PopIndex",
self.PyObject_PopIndex_func_type,
args = args, args = args,
may_return_none = True, may_return_none = True,
is_temp = node.is_temp, is_temp = node.is_temp,
utility_code = pop_index_utility_code utility_code = load_c_utility("pop_index")
) )
return node return node
...@@ -2357,7 +2369,7 @@ class OptimizeBuiltinCalls(Visitor.EnvTransform): ...@@ -2357,7 +2369,7 @@ class OptimizeBuiltinCalls(Visitor.EnvTransform):
node, "__Pyx_PyDict_GetItemDefault", self.Pyx_PyDict_GetItem_func_type, node, "__Pyx_PyDict_GetItemDefault", self.Pyx_PyDict_GetItem_func_type,
'get', is_unbound_method, args, 'get', is_unbound_method, args,
may_return_none = True, may_return_none = True,
utility_code = dict_getitem_default_utility_code) utility_code = load_c_utility("dict_getitem_default"))
Pyx_PyDict_SetDefault_func_type = PyrexTypes.CFuncType( Pyx_PyDict_SetDefault_func_type = PyrexTypes.CFuncType(
PyrexTypes.py_object_type, [ PyrexTypes.py_object_type, [
...@@ -2379,7 +2391,7 @@ class OptimizeBuiltinCalls(Visitor.EnvTransform): ...@@ -2379,7 +2391,7 @@ class OptimizeBuiltinCalls(Visitor.EnvTransform):
node, "__Pyx_PyDict_SetDefault", self.Pyx_PyDict_SetDefault_func_type, node, "__Pyx_PyDict_SetDefault", self.Pyx_PyDict_SetDefault_func_type,
'setdefault', is_unbound_method, args, 'setdefault', is_unbound_method, args,
may_return_none = True, may_return_none = True,
utility_code = dict_setdefault_utility_code) utility_code = load_c_utility('dict_setdefault'))
### unicode type methods ### unicode type methods
...@@ -2400,7 +2412,7 @@ class OptimizeBuiltinCalls(Visitor.EnvTransform): ...@@ -2400,7 +2412,7 @@ class OptimizeBuiltinCalls(Visitor.EnvTransform):
method_name = node.function.attribute method_name = node.function.attribute
if method_name == 'istitle': if method_name == 'istitle':
# istitle() doesn't directly map to Py_UNICODE_ISTITLE() # istitle() doesn't directly map to Py_UNICODE_ISTITLE()
utility_code = py_unicode_istitle_utility_code utility_code = load_c_utility("py_unicode_istitle")
function_name = '__Pyx_Py_UNICODE_ISTITLE' function_name = '__Pyx_Py_UNICODE_ISTITLE'
else: else:
utility_code = None utility_code = None
...@@ -2927,146 +2939,9 @@ class OptimizeBuiltinCalls(Visitor.EnvTransform): ...@@ -2927,146 +2939,9 @@ class OptimizeBuiltinCalls(Visitor.EnvTransform):
args[arg_index] = args[arg_index].coerce_to_boolean(self.current_env()) args[arg_index] = args[arg_index].coerce_to_boolean(self.current_env())
py_unicode_istitle_utility_code = UtilityCode( unicode_tailmatch_utility_code = load_c_utility('unicode_tailmatch')
# Py_UNICODE_ISTITLE() doesn't match unicode.istitle() as the latter
# additionally allows character that comply with Py_UNICODE_ISUPPER()
proto = '''
#if PY_VERSION_HEX < 0x030200A2
static CYTHON_INLINE int __Pyx_Py_UNICODE_ISTITLE(Py_UNICODE uchar); /* proto */
#else
static CYTHON_INLINE int __Pyx_Py_UNICODE_ISTITLE(Py_UCS4 uchar); /* proto */
#endif
''',
impl = '''
#if PY_VERSION_HEX < 0x030200A2
static CYTHON_INLINE int __Pyx_Py_UNICODE_ISTITLE(Py_UNICODE uchar) {
#else
static CYTHON_INLINE int __Pyx_Py_UNICODE_ISTITLE(Py_UCS4 uchar) {
#endif
return Py_UNICODE_ISTITLE(uchar) || Py_UNICODE_ISUPPER(uchar);
}
''')
unicode_tailmatch_utility_code = UtilityCode(
# Python's unicode.startswith() and unicode.endswith() support a
# tuple of prefixes/suffixes, whereas it's much more common to
# test for a single unicode string.
proto = '''
static int __Pyx_PyUnicode_Tailmatch(PyObject* s, PyObject* substr, \
Py_ssize_t start, Py_ssize_t end, int direction);
''',
impl = '''
static int __Pyx_PyUnicode_Tailmatch(PyObject* s, PyObject* substr,
Py_ssize_t start, Py_ssize_t end, int direction) {
if (unlikely(PyTuple_Check(substr))) {
int result;
Py_ssize_t i;
for (i = 0; i < PyTuple_GET_SIZE(substr); i++) {
result = PyUnicode_Tailmatch(s, PyTuple_GET_ITEM(substr, i),
start, end, direction);
if (result) {
return result;
}
}
return 0;
}
return PyUnicode_Tailmatch(s, substr, start, end, direction);
}
''',
)
bytes_tailmatch_utility_code = UtilityCode( bytes_tailmatch_utility_code = load_c_utility('bytes_tailmatch')
proto="""
static int __Pyx_PyBytes_Tailmatch(PyObject* self, PyObject* arg, Py_ssize_t start,
Py_ssize_t end, int direction);
""",
impl = """
static int __Pyx_PyBytes_SingleTailmatch(PyObject* self, PyObject* arg, Py_ssize_t start,
Py_ssize_t end, int direction)
{
const char* self_ptr = PyBytes_AS_STRING(self);
Py_ssize_t self_len = PyBytes_GET_SIZE(self);
const char* sub_ptr;
Py_ssize_t sub_len;
int retval;
#if PY_VERSION_HEX >= 0x02060000
Py_buffer view;
view.obj = NULL;
#endif
if ( PyBytes_Check(arg) ) {
sub_ptr = PyBytes_AS_STRING(arg);
sub_len = PyBytes_GET_SIZE(arg);
}
#if PY_MAJOR_VERSION < 3
// Python 2.x allows mixing unicode and str
else if ( PyUnicode_Check(arg) ) {
return PyUnicode_Tailmatch(self, arg, start, end, direction);
}
#endif
else {
#if PY_VERSION_HEX < 0x02060000
if (unlikely(PyObject_AsCharBuffer(arg, &sub_ptr, &sub_len)))
return -1;
#else
if (unlikely(PyObject_GetBuffer(self, &view, PyBUF_SIMPLE) == -1))
return -1;
sub_ptr = (const char*) view.buf;
sub_len = view.len;
#endif
}
if (end > self_len)
end = self_len;
else if (end < 0)
end += self_len;
if (end < 0)
end = 0;
if (start < 0)
start += self_len;
if (start < 0)
start = 0;
if (direction > 0) {
/* endswith */
if (end-sub_len > start)
start = end - sub_len;
}
if (start + sub_len <= end)
retval = !memcmp(self_ptr+start, sub_ptr, sub_len);
else
retval = 0;
#if PY_VERSION_HEX >= 0x02060000
if (view.obj)
PyBuffer_Release(&view);
#endif
return retval;
}
static int __Pyx_PyBytes_Tailmatch(PyObject* self, PyObject* substr, Py_ssize_t start,
Py_ssize_t end, int direction)
{
if (unlikely(PyTuple_Check(substr))) {
int result;
Py_ssize_t i;
for (i = 0; i < PyTuple_GET_SIZE(substr); i++) {
result = __Pyx_PyBytes_SingleTailmatch(self, PyTuple_GET_ITEM(substr, i),
start, end, direction);
if (result) {
return result;
}
}
return 0;
}
return __Pyx_PyBytes_SingleTailmatch(self, substr, start, end, direction);
}
""")
str_tailmatch_utility_code = UtilityCode( str_tailmatch_utility_code = UtilityCode(
proto = ''' proto = '''
...@@ -3090,243 +2965,6 @@ static CYTHON_INLINE int __Pyx_PyStr_Tailmatch(PyObject* self, PyObject* arg, Py ...@@ -3090,243 +2965,6 @@ static CYTHON_INLINE int __Pyx_PyStr_Tailmatch(PyObject* self, PyObject* arg, Py
requires=[unicode_tailmatch_utility_code, bytes_tailmatch_utility_code] requires=[unicode_tailmatch_utility_code, bytes_tailmatch_utility_code]
) )
dict_getitem_default_utility_code = UtilityCode(
proto = '''
static PyObject* __Pyx_PyDict_GetItemDefault(PyObject* d, PyObject* key, PyObject* default_value) {
PyObject* value;
#if PY_MAJOR_VERSION >= 3
value = PyDict_GetItemWithError(d, key);
if (unlikely(!value)) {
if (unlikely(PyErr_Occurred()))
return NULL;
value = default_value;
}
Py_INCREF(value);
#else
if (PyString_CheckExact(key) || PyUnicode_CheckExact(key) || PyInt_CheckExact(key)) {
/* these presumably have safe hash functions */
value = PyDict_GetItem(d, key);
if (unlikely(!value)) {
value = default_value;
}
Py_INCREF(value);
} else {
PyObject *m;
m = __Pyx_GetAttrString(d, "get");
if (!m) return NULL;
value = PyObject_CallFunctionObjArgs(m, key,
(default_value == Py_None) ? NULL : default_value, NULL);
Py_DECREF(m);
}
#endif
return value;
}
''',
impl = ""
)
dict_setdefault_utility_code = UtilityCode(
proto = """
static PyObject *__Pyx_PyDict_SetDefault(PyObject *, PyObject *, PyObject *); /*proto*/
""",
impl = '''
static PyObject *__Pyx_PyDict_SetDefault(PyObject *d, PyObject *key, PyObject *default_value) {
PyObject* value;
#if PY_MAJOR_VERSION >= 3
value = PyDict_GetItemWithError(d, key);
if (unlikely(!value)) {
if (unlikely(PyErr_Occurred()))
return NULL;
if (unlikely(PyDict_SetItem(d, key, default_value) == -1))
return NULL;
value = default_value;
}
Py_INCREF(value);
#else
if (PyString_CheckExact(key) || PyUnicode_CheckExact(key) || PyInt_CheckExact(key)) {
/* these presumably have safe hash functions */
value = PyDict_GetItem(d, key);
if (unlikely(!value)) {
if (unlikely(PyDict_SetItem(d, key, default_value) == -1))
return NULL;
value = default_value;
}
Py_INCREF(value);
} else {
PyObject *m;
m = __Pyx_GetAttrString(d, "setdefault");
if (!m) return NULL;
value = PyObject_CallFunctionObjArgs(m, key, default_value, NULL);
Py_DECREF(m);
}
#endif
return value;
}
''')
append_utility_code = UtilityCode(
proto = """
static CYTHON_INLINE PyObject* __Pyx_PyObject_Append(PyObject* L, PyObject* x) {
if (likely(PyList_CheckExact(L))) {
if (PyList_Append(L, x) < 0) return NULL;
Py_INCREF(Py_None);
return Py_None; /* this is just to have an accurate signature */
}
else {
PyObject *r, *m;
m = __Pyx_GetAttrString(L, "append");
if (!m) return NULL;
r = PyObject_CallFunctionObjArgs(m, x, NULL);
Py_DECREF(m);
return r;
}
}
""",
impl = ""
)
pop_utility_code = UtilityCode(
proto = """
static CYTHON_INLINE PyObject* __Pyx_PyObject_Pop(PyObject* L) {
#if PY_VERSION_HEX >= 0x02040000
if (likely(PyList_CheckExact(L))
/* Check that both the size is positive and no reallocation shrinking needs to be done. */
&& likely(PyList_GET_SIZE(L) > (((PyListObject*)L)->allocated >> 1))) {
Py_SIZE(L) -= 1;
return PyList_GET_ITEM(L, PyList_GET_SIZE(L));
}
#if PY_VERSION_HEX >= 0x02050000
else if (Py_TYPE(L) == (&PySet_Type)) {
return PySet_Pop(L);
}
#endif
#endif
return PyObject_CallMethod(L, (char*)"pop", NULL);
}
""",
impl = ""
)
pop_index_utility_code = UtilityCode(
proto = """
static PyObject* __Pyx_PyObject_PopIndex(PyObject* L, Py_ssize_t ix);
""",
impl = """
static PyObject* __Pyx_PyObject_PopIndex(PyObject* L, Py_ssize_t ix) {
PyObject *r, *m, *t, *py_ix;
#if PY_VERSION_HEX >= 0x02040000
if (likely(PyList_CheckExact(L))) {
Py_ssize_t size = PyList_GET_SIZE(L);
if (likely(size > (((PyListObject*)L)->allocated >> 1))) {
if (ix < 0) {
ix += size;
}
if (likely(0 <= ix && ix < size)) {
Py_ssize_t i;
PyObject* v = PyList_GET_ITEM(L, ix);
Py_SIZE(L) -= 1;
size -= 1;
for(i=ix; i<size; i++) {
PyList_SET_ITEM(L, i, PyList_GET_ITEM(L, i+1));
}
return v;
}
}
}
#endif
py_ix = t = NULL;
m = __Pyx_GetAttrString(L, "pop");
if (!m) goto bad;
py_ix = PyInt_FromSsize_t(ix);
if (!py_ix) goto bad;
t = PyTuple_New(1);
if (!t) goto bad;
PyTuple_SET_ITEM(t, 0, py_ix);
py_ix = NULL;
r = PyObject_CallObject(m, t);
Py_DECREF(m);
Py_DECREF(t);
return r;
bad:
Py_XDECREF(m);
Py_XDECREF(t);
Py_XDECREF(py_ix);
return NULL;
}
"""
)
py_dict_clear_utility_code = UtilityCode(
proto = '''
static CYTHON_INLINE PyObject* __Pyx_PyDict_Clear(PyObject* d) {
PyDict_Clear(d);
Py_INCREF(Py_None);
return Py_None;
}
''')
pyobject_as_double_utility_code = UtilityCode(
proto = '''
static double __Pyx__PyObject_AsDouble(PyObject* obj); /* proto */
#define __Pyx_PyObject_AsDouble(obj) \\
((likely(PyFloat_CheckExact(obj))) ? \\
PyFloat_AS_DOUBLE(obj) : __Pyx__PyObject_AsDouble(obj))
''',
impl='''
static double __Pyx__PyObject_AsDouble(PyObject* obj) {
PyObject* float_value;
if (Py_TYPE(obj)->tp_as_number && Py_TYPE(obj)->tp_as_number->nb_float) {
return PyFloat_AsDouble(obj);
} else if (PyUnicode_CheckExact(obj) || PyBytes_CheckExact(obj)) {
#if PY_MAJOR_VERSION >= 3
float_value = PyFloat_FromString(obj);
#else
float_value = PyFloat_FromString(obj, 0);
#endif
} else {
PyObject* args = PyTuple_New(1);
if (unlikely(!args)) goto bad;
PyTuple_SET_ITEM(args, 0, obj);
float_value = PyObject_Call((PyObject*)&PyFloat_Type, args, 0);
PyTuple_SET_ITEM(args, 0, 0);
Py_DECREF(args);
}
if (likely(float_value)) {
double value = PyFloat_AS_DOUBLE(float_value);
Py_DECREF(float_value);
return value;
}
bad:
return (double)-1;
}
'''
)
bytes_index_utility_code = UtilityCode(
proto = """
static CYTHON_INLINE char __Pyx_PyBytes_GetItemInt(PyObject* unicode, Py_ssize_t index, int check_bounds); /* proto */
""",
impl = """
static CYTHON_INLINE char __Pyx_PyBytes_GetItemInt(PyObject* bytes, Py_ssize_t index, int check_bounds) {
if (check_bounds) {
if (unlikely(index >= PyBytes_GET_SIZE(bytes)) |
((index < 0) & unlikely(index < -PyBytes_GET_SIZE(bytes)))) {
PyErr_Format(PyExc_IndexError, "string index out of range");
return -1;
}
}
if (index < 0)
index += PyBytes_GET_SIZE(bytes);
return PyBytes_AS_STRING(bytes)[index];
}
"""
)
tpnew_utility_code = UtilityCode( tpnew_utility_code = UtilityCode(
proto = """ proto = """
...@@ -3512,6 +3150,26 @@ class ConstantFolding(Visitor.VisitorTransform, SkipDeclarations): ...@@ -3512,6 +3150,26 @@ class ConstantFolding(Visitor.VisitorTransform, SkipDeclarations):
constant_result = node.constant_result) constant_result = node.constant_result)
return new_node return new_node
def visit_MulNode(self, node):
if isinstance(node.operand1, (ExprNodes.ListNode, ExprNodes.TupleNode)):
sequence_node = node.operand1
factor = node.operand2
self._calculate_const(factor)
if factor.constant_result != 1:
sequence_node.mult_factor = factor
self.visitchildren(sequence_node)
return sequence_node
if isinstance(node.operand1, ExprNodes.IntNode) and \
isinstance(node.operand2, (ExprNodes.ListNode, ExprNodes.TupleNode)):
sequence_node = node.operand2
factor = node.operand1
self._calculate_const(factor)
if factor.constant_result != 1:
sequence_node.mult_factor = factor
self.visitchildren(sequence_node)
return sequence_node
return self.visit_BinopNode(node)
def visit_PrimaryCmpNode(self, node): def visit_PrimaryCmpNode(self, node):
self._calculate_const(node) self._calculate_const(node)
if node.constant_result is ExprNodes.not_a_constant: if node.constant_result is ExprNodes.not_a_constant:
......
...@@ -1165,16 +1165,20 @@ class WithTransform(CythonTransform, SkipDeclarations): ...@@ -1165,16 +1165,20 @@ class WithTransform(CythonTransform, SkipDeclarations):
self.visitchildren(node, 'body') self.visitchildren(node, 'body')
pos = node.pos pos = node.pos
body, target, manager = node.body, node.target, node.manager body, target, manager = node.body, node.target, node.manager
node.target_temp = ExprNodes.TempNode(pos, type=PyrexTypes.py_object_type) node.enter_call = ExprNodes.SimpleCallNode(
pos, function = ExprNodes.AttributeNode(
pos, obj = ExprNodes.CloneNode(manager),
attribute = EncodedString('__enter__')),
args = [],
is_temp = True)
if target is not None: if target is not None:
node.has_target = True
body = Nodes.StatListNode( body = Nodes.StatListNode(
pos, stats = [ pos, stats = [
Nodes.WithTargetAssignmentStatNode( Nodes.WithTargetAssignmentStatNode(
pos, lhs = target, rhs = node.target_temp), pos, lhs = target,
body rhs = ResultRefNode(node.enter_call),
]) orig_rhs = node.enter_call),
node.target = None body])
excinfo_target = ResultRefNode( excinfo_target = ResultRefNode(
pos=pos, type=Builtin.tuple_type, may_hold_none=False) pos=pos, type=Builtin.tuple_type, may_hold_none=False)
...@@ -2334,6 +2338,17 @@ class TransformBuiltinMethods(EnvTransform): ...@@ -2334,6 +2338,17 @@ class TransformBuiltinMethods(EnvTransform):
error(node.pos, u"'%s' not a valid cython attribute or is being used incorrectly" % attribute) error(node.pos, u"'%s' not a valid cython attribute or is being used incorrectly" % attribute)
return node return node
def visit_ExecStatNode(self, node):
lenv = self.current_env()
self.visitchildren(node)
if len(node.args) == 1:
node.args.append(ExprNodes.GlobalsExprNode(node.pos))
if not lenv.is_module_scope:
node.args.append(
ExprNodes.LocalsExprNode(
node.pos, self.current_scope_node(), lenv))
return node
def _inject_locals(self, node, func_name): def _inject_locals(self, node, func_name):
# locals()/dir()/vars() builtins # locals()/dir()/vars() builtins
lenv = self.current_env() lenv = self.current_env()
...@@ -2342,7 +2357,6 @@ class TransformBuiltinMethods(EnvTransform): ...@@ -2342,7 +2357,6 @@ class TransformBuiltinMethods(EnvTransform):
# not the builtin # not the builtin
return node return node
pos = node.pos pos = node.pos
local_names = [ var.name for var in lenv.entries.values() if var.name ]
if func_name in ('locals', 'vars'): if func_name in ('locals', 'vars'):
if func_name == 'locals' and len(node.args) > 0: if func_name == 'locals' and len(node.args) > 0:
error(self.pos, "Builtin 'locals()' called with wrong number of args, expected 0, got %d" error(self.pos, "Builtin 'locals()' called with wrong number of args, expected 0, got %d"
...@@ -2354,11 +2368,7 @@ class TransformBuiltinMethods(EnvTransform): ...@@ -2354,11 +2368,7 @@ class TransformBuiltinMethods(EnvTransform):
% len(node.args)) % len(node.args))
if len(node.args) > 0: if len(node.args) > 0:
return node # nothing to do return node # nothing to do
items = [ ExprNodes.DictItemNode(pos, return ExprNodes.LocalsExprNode(pos, self.current_scope_node(), lenv)
key=ExprNodes.IdentifierStringNode(pos, value=var),
value=ExprNodes.NameNode(pos, name=var, allow_null=True))
for var in local_names ]
return ExprNodes.DictNode(pos, key_value_pairs=items, exclude_null_values=True)
else: # dir() else: # dir()
if len(node.args) > 1: if len(node.args) > 1:
error(self.pos, "Builtin 'dir()' called with wrong number of args, expected 0-1, got %d" error(self.pos, "Builtin 'dir()' called with wrong number of args, expected 0-1, got %d"
...@@ -2366,16 +2376,36 @@ class TransformBuiltinMethods(EnvTransform): ...@@ -2366,16 +2376,36 @@ class TransformBuiltinMethods(EnvTransform):
if len(node.args) > 0: if len(node.args) > 0:
# optimised in Builtin.py # optimised in Builtin.py
return node return node
if lenv.is_py_class_scope or lenv.is_module_scope:
if lenv.is_py_class_scope:
pyclass = self.current_scope_node()
locals_dict = ExprNodes.CloneNode(pyclass.dict)
else:
locals_dict = ExprNodes.GlobalsExprNode(pos)
return ExprNodes.SimpleCallNode(
pos,
function=ExprNodes.AttributeNode(
pos, obj=locals_dict, attribute="keys"),
args=[])
local_names = [ var.name for var in lenv.entries.values() if var.name ]
items = [ ExprNodes.IdentifierStringNode(pos, value=var) items = [ ExprNodes.IdentifierStringNode(pos, value=var)
for var in local_names ] for var in local_names ]
return ExprNodes.ListNode(pos, args=items) return ExprNodes.ListNode(pos, args=items)
def visit_SimpleCallNode(self, node): def _inject_eval(self, node, func_name):
if isinstance(node.function, ExprNodes.NameNode): lenv = self.current_env()
func_name = node.function.name entry = lenv.lookup_here(func_name)
if func_name in ('dir', 'locals', 'vars'): if entry or len(node.args) != 1:
return self._inject_locals(node, func_name) return node
# Inject globals and locals
node.args.append(ExprNodes.GlobalsExprNode(node.pos))
if not lenv.is_module_scope:
node.args.append(
ExprNodes.LocalsExprNode(
node.pos, self.current_scope_node(), lenv))
return node
def visit_SimpleCallNode(self, node):
# cython.foo # cython.foo
function = node.function.as_cython_attribute() function = node.function.as_cython_attribute()
if function: if function:
...@@ -2428,6 +2458,13 @@ class TransformBuiltinMethods(EnvTransform): ...@@ -2428,6 +2458,13 @@ class TransformBuiltinMethods(EnvTransform):
u"'%s' not a valid cython language construct" % function) u"'%s' not a valid cython language construct" % function)
self.visitchildren(node) self.visitchildren(node)
if isinstance(node, ExprNodes.SimpleCallNode) and node.function.is_name:
func_name = node.function.name
if func_name in ('dir', 'locals', 'vars'):
return self._inject_locals(node, func_name)
if func_name == 'eval':
return self._inject_eval(node, func_name)
return node return node
......
...@@ -46,7 +46,7 @@ cdef p_power(PyrexScanner s) ...@@ -46,7 +46,7 @@ cdef p_power(PyrexScanner s)
cdef p_new_expr(PyrexScanner s) cdef p_new_expr(PyrexScanner s)
cdef p_trailer(PyrexScanner s, node1) cdef p_trailer(PyrexScanner s, node1)
cpdef p_call_parse_args(PyrexScanner s, bint allow_genexp = *) cpdef p_call_parse_args(PyrexScanner s, bint allow_genexp = *)
cdef p_call_build_packed_args(pos, positional_args, keyword_args, star_arg) cdef p_call_build_packed_args(pos, positional_args, keyword_args, star_arg, starstar_arg)
cdef p_call(PyrexScanner s, function) cdef p_call(PyrexScanner s, function)
cdef p_index(PyrexScanner s, base) cdef p_index(PyrexScanner s, base)
cdef p_subscript_list(PyrexScanner s) cdef p_subscript_list(PyrexScanner s)
......
...@@ -440,7 +440,8 @@ def p_call_parse_args(s, allow_genexp = True): ...@@ -440,7 +440,8 @@ def p_call_parse_args(s, allow_genexp = True):
s.expect(')') s.expect(')')
return positional_args, keyword_args, star_arg, starstar_arg return positional_args, keyword_args, star_arg, starstar_arg
def p_call_build_packed_args(pos, positional_args, keyword_args, star_arg): def p_call_build_packed_args(pos, positional_args, keyword_args,
star_arg, starstar_arg):
arg_tuple = None arg_tuple = None
keyword_dict = None keyword_dict = None
if positional_args or not star_arg: if positional_args or not star_arg:
...@@ -454,11 +455,17 @@ def p_call_build_packed_args(pos, positional_args, keyword_args, star_arg): ...@@ -454,11 +455,17 @@ def p_call_build_packed_args(pos, positional_args, keyword_args, star_arg):
operand2 = star_arg_tuple) operand2 = star_arg_tuple)
else: else:
arg_tuple = star_arg_tuple arg_tuple = star_arg_tuple
if keyword_args: if keyword_args or starstar_arg:
keyword_args = [ExprNodes.DictItemNode(pos=key.pos, key=key, value=value) keyword_args = [ExprNodes.DictItemNode(pos=key.pos, key=key, value=value)
for key, value in keyword_args] for key, value in keyword_args]
keyword_dict = ExprNodes.DictNode(pos, if starstar_arg:
key_value_pairs = keyword_args) keyword_dict = ExprNodes.KeywordArgsNode(
pos,
starstar_arg = starstar_arg,
keyword_args = keyword_args)
else:
keyword_dict = ExprNodes.DictNode(
pos, key_value_pairs = keyword_args)
return arg_tuple, keyword_dict return arg_tuple, keyword_dict
def p_call(s, function): def p_call(s, function):
...@@ -474,12 +481,11 @@ def p_call(s, function): ...@@ -474,12 +481,11 @@ def p_call(s, function):
args = positional_args) args = positional_args)
else: else:
arg_tuple, keyword_dict = p_call_build_packed_args( arg_tuple, keyword_dict = p_call_build_packed_args(
pos, positional_args, keyword_args, star_arg) pos, positional_args, keyword_args, star_arg, starstar_arg)
return ExprNodes.GeneralCallNode(pos, return ExprNodes.GeneralCallNode(pos,
function = function, function = function,
positional_args = arg_tuple, positional_args = arg_tuple,
keyword_args = keyword_dict, keyword_args = keyword_dict)
starstar_arg = starstar_arg)
#lambdef: 'lambda' [varargslist] ':' test #lambdef: 'lambda' [varargslist] ':' test
...@@ -1130,8 +1136,6 @@ def p_exec_statement(s): ...@@ -1130,8 +1136,6 @@ def p_exec_statement(s):
if s.sy == ',': if s.sy == ',':
s.next() s.next()
args.append(p_test(s)) args.append(p_test(s))
else:
error(pos, "'exec' currently requires a target mapping (globals/locals)")
return Nodes.ExecStatNode(pos, args = args) return Nodes.ExecStatNode(pos, args = args)
def p_del_statement(s): def p_del_statement(s):
...@@ -2738,16 +2742,13 @@ def p_ctypedef_statement(s, ctx): ...@@ -2738,16 +2742,13 @@ def p_ctypedef_statement(s, ctx):
return p_fused_definition(s, pos, ctx) return p_fused_definition(s, pos, ctx)
else: else:
base_type = p_c_base_type(s, nonempty = 1) base_type = p_c_base_type(s, nonempty = 1)
if base_type.name is None: declarator = p_c_declarator(s, ctx, is_type = 1, nonempty = 1)
s.error("Syntax error in ctypedef statement") s.expect_newline("Syntax error in ctypedef statement")
return Nodes.CTypeDefNode(
declarator = p_c_declarator(s, ctx, is_type = 1, nonempty = 1) pos, base_type = base_type,
s.expect_newline("Syntax error in ctypedef statement") declarator = declarator,
return Nodes.CTypeDefNode( visibility = visibility, api = api,
pos, base_type = base_type, in_pxd = ctx.level == 'module_pxd')
declarator = declarator,
visibility = visibility, api = api,
in_pxd = ctx.level == 'module_pxd')
def p_decorators(s): def p_decorators(s):
decorators = [] decorators = []
...@@ -2829,7 +2830,7 @@ def p_class_statement(s, decorators): ...@@ -2829,7 +2830,7 @@ def p_class_statement(s, decorators):
positional_args, keyword_args, star_arg, starstar_arg = \ positional_args, keyword_args, star_arg, starstar_arg = \
p_call_parse_args(s, allow_genexp = False) p_call_parse_args(s, allow_genexp = False)
arg_tuple, keyword_dict = p_call_build_packed_args( arg_tuple, keyword_dict = p_call_build_packed_args(
pos, positional_args, keyword_args, star_arg) pos, positional_args, keyword_args, star_arg, None)
if arg_tuple is None: if arg_tuple is None:
# XXX: empty arg_tuple # XXX: empty arg_tuple
arg_tuple = ExprNodes.TupleNode(pos, args = []) arg_tuple = ExprNodes.TupleNode(pos, args = [])
......
...@@ -240,7 +240,7 @@ def create_pxd_pipeline(context, scope, module_name): ...@@ -240,7 +240,7 @@ def create_pxd_pipeline(context, scope, module_name):
return [ return [
parse_pxd_stage_factory(context, scope, module_name) parse_pxd_stage_factory(context, scope, module_name)
] + create_pipeline(context, 'pxd') + [ ] + create_pipeline(context, 'pxd') + [
ExtractPxdCode(context) ExtractPxdCode()
] ]
def create_py_pipeline(context, options, result): def create_py_pipeline(context, options, result):
......
...@@ -1843,7 +1843,7 @@ class CClassScope(ClassScope): ...@@ -1843,7 +1843,7 @@ class CClassScope(ClassScope):
if defining and entry.func_cname: if defining and entry.func_cname:
error(pos, "'%s' already defined" % name) error(pos, "'%s' already defined" % name)
#print "CClassScope.declare_cfunction: checking signature" ### #print "CClassScope.declare_cfunction: checking signature" ###
if entry.is_final_cmethod: if entry.is_final_cmethod and entry.is_inherited:
error(pos, "Overriding final methods is not allowed") error(pos, "Overriding final methods is not allowed")
elif type.same_c_signature_as(entry.type, as_cmethod = 1) and type.nogil == entry.type.nogil: elif type.same_c_signature_as(entry.type, as_cmethod = 1) and type.nogil == entry.type.nogil:
pass pass
......
...@@ -65,6 +65,11 @@ class MarkAssignments(CythonTransform): ...@@ -65,6 +65,11 @@ class MarkAssignments(CythonTransform):
# Could use this info to infer cdef class attributes... # Could use this info to infer cdef class attributes...
pass pass
def visit_WithTargetAssignmentStatNode(self, node):
self.mark_assignment(node.lhs, node.rhs)
self.visitchildren(node)
return node
def visit_SingleAssignmentNode(self, node): def visit_SingleAssignmentNode(self, node):
self.mark_assignment(node.lhs, node.rhs) self.mark_assignment(node.lhs, node.rhs)
self.visitchildren(node) self.visitchildren(node)
......
...@@ -138,15 +138,23 @@ class ResultRefNode(AtomicExprNode): ...@@ -138,15 +138,23 @@ class ResultRefNode(AtomicExprNode):
# nothing to do here # nothing to do here
return self return self
def type_dependencies(self, env):
if self.expression:
return self.expression.type_dependencies(env)
else:
return ()
def analyse_types(self, env): def analyse_types(self, env):
if self.expression is not None: if self.expression is not None:
self.type = self.expression.type self.type = self.expression.type
def infer_type(self, env): def infer_type(self, env):
if self.expression is not None:
return self.expression.infer_type(env)
if self.type is not None: if self.type is not None:
return self.type return self.type
if self.expression is not None:
if self.expression.type is not None:
return self.expression.type
return self.expression.infer_type(env)
assert False, "cannot infer type of ResultRefNode" assert False, "cannot infer type of ResultRefNode"
def may_be_none(self): def may_be_none(self):
......
...@@ -319,18 +319,36 @@ class EnvTransform(CythonTransform): ...@@ -319,18 +319,36 @@ class EnvTransform(CythonTransform):
This transformation keeps a stack of the environments. This transformation keeps a stack of the environments.
""" """
def __call__(self, root): def __call__(self, root):
self.env_stack = [root.scope] self.env_stack = [(root, root.scope)]
return super(EnvTransform, self).__call__(root) return super(EnvTransform, self).__call__(root)
def current_env(self): def current_env(self):
return self.env_stack[-1] return self.env_stack[-1][1]
def current_scope_node(self):
return self.env_stack[-1][0]
def visit_FuncDefNode(self, node): def visit_FuncDefNode(self, node):
self.env_stack.append(node.local_scope) self.env_stack.append((node, node.local_scope))
self.visitchildren(node)
self.env_stack.pop()
return node
def visit_ClassDefNode(self, node):
self.env_stack.append((node, node.scope))
self.visitchildren(node) self.visitchildren(node)
self.env_stack.pop() self.env_stack.pop()
return node return node
def visit_ScopedExprNode(self, node):
if node.expr_scope:
self.env_stack.append((node, node.expr_scope))
self.visitchildren(node)
self.env_stack.pop()
else:
self.visitchildren(node)
return node
class RecursiveNodeReplacer(VisitorTransform): class RecursiveNodeReplacer(VisitorTransform):
""" """
......
cdef extern from "omp.h": cdef extern from "omp.h":
ctypedef struct omp_lock_t ctypedef struct omp_lock_t:
ctypedef struct omp_nest_lock_t pass
ctypedef struct omp_nest_lock_t:
pass
ctypedef enum omp_sched_t: ctypedef enum omp_sched_t:
omp_sched_static = 1, omp_sched_static = 1,
......
/////////////// append.proto ///////////////
static CYTHON_INLINE PyObject* __Pyx_PyObject_Append(PyObject* L, PyObject* x) {
if (likely(PyList_CheckExact(L))) {
if (PyList_Append(L, x) < 0) return NULL;
Py_INCREF(Py_None);
return Py_None; /* this is just to have an accurate signature */
}
else {
PyObject *r, *m;
m = __Pyx_GetAttrString(L, "append");
if (!m) return NULL;
r = PyObject_CallFunctionObjArgs(m, x, NULL);
Py_DECREF(m);
return r;
}
}
/////////////// pop.proto ///////////////
static CYTHON_INLINE PyObject* __Pyx_PyObject_Pop(PyObject* L) {
#if PY_VERSION_HEX >= 0x02040000
if (likely(PyList_CheckExact(L))
/* Check that both the size is positive and no reallocation shrinking needs to be done. */
&& likely(PyList_GET_SIZE(L) > (((PyListObject*)L)->allocated >> 1))) {
Py_SIZE(L) -= 1;
return PyList_GET_ITEM(L, PyList_GET_SIZE(L));
}
#if PY_VERSION_HEX >= 0x02050000
else if (Py_TYPE(L) == (&PySet_Type)) {
return PySet_Pop(L);
}
#endif
#endif
return PyObject_CallMethod(L, (char*)"pop", NULL);
}
/////////////// pop_index.proto ///////////////
static PyObject* __Pyx_PyObject_PopIndex(PyObject* L, Py_ssize_t ix) {
PyObject *r, *m, *t, *py_ix;
#if PY_VERSION_HEX >= 0x02040000
if (likely(PyList_CheckExact(L))) {
Py_ssize_t size = PyList_GET_SIZE(L);
if (likely(size > (((PyListObject*)L)->allocated >> 1))) {
if (ix < 0) {
ix += size;
}
if (likely(0 <= ix && ix < size)) {
PyObject* v = PyList_GET_ITEM(L, ix);
Py_SIZE(L) -= 1;
size -= 1;
memmove(&PyList_GET_ITEM(L, ix), &PyList_GET_ITEM(L, ix+1), (size-ix)*sizeof(PyObject*));
return v;
}
}
}
#endif
py_ix = t = NULL;
m = __Pyx_GetAttrString(L, "pop");
if (!m) goto bad;
py_ix = PyInt_FromSsize_t(ix);
if (!py_ix) goto bad;
t = PyTuple_New(1);
if (!t) goto bad;
PyTuple_SET_ITEM(t, 0, py_ix);
py_ix = NULL;
r = PyObject_CallObject(m, t);
Py_DECREF(m);
Py_DECREF(t);
return r;
bad:
Py_XDECREF(m);
Py_XDECREF(t);
Py_XDECREF(py_ix);
return NULL;
}
/////////////// py_unicode_istitle.proto ///////////////
// Py_UNICODE_ISTITLE() doesn't match unicode.istitle() as the latter
// additionally allows character that comply with Py_UNICODE_ISUPPER()
#if PY_VERSION_HEX < 0x030200A2
static CYTHON_INLINE int __Pyx_Py_UNICODE_ISTITLE(Py_UNICODE uchar)
#else
static CYTHON_INLINE int __Pyx_Py_UNICODE_ISTITLE(Py_UCS4 uchar)
#endif
{
return Py_UNICODE_ISTITLE(uchar) || Py_UNICODE_ISUPPER(uchar);
}
/////////////// unicode_tailmatch.proto ///////////////
// Python's unicode.startswith() and unicode.endswith() support a
// tuple of prefixes/suffixes, whereas it's much more common to
// test for a single unicode string.
static int __Pyx_PyUnicode_Tailmatch(PyObject* s, PyObject* substr,
Py_ssize_t start, Py_ssize_t end, int direction) {
if (unlikely(PyTuple_Check(substr))) {
int result;
Py_ssize_t i;
for (i = 0; i < PyTuple_GET_SIZE(substr); i++) {
result = PyUnicode_Tailmatch(s, PyTuple_GET_ITEM(substr, i),
start, end, direction);
if (result) {
return result;
}
}
return 0;
}
return PyUnicode_Tailmatch(s, substr, start, end, direction);
}
/////////////// bytes_tailmatch.proto ///////////////
static int __Pyx_PyBytes_SingleTailmatch(PyObject* self, PyObject* arg, Py_ssize_t start,
Py_ssize_t end, int direction)
{
const char* self_ptr = PyBytes_AS_STRING(self);
Py_ssize_t self_len = PyBytes_GET_SIZE(self);
const char* sub_ptr;
Py_ssize_t sub_len;
int retval;
#if PY_VERSION_HEX >= 0x02060000
Py_buffer view;
view.obj = NULL;
#endif
if ( PyBytes_Check(arg) ) {
sub_ptr = PyBytes_AS_STRING(arg);
sub_len = PyBytes_GET_SIZE(arg);
}
#if PY_MAJOR_VERSION < 3
// Python 2.x allows mixing unicode and str
else if ( PyUnicode_Check(arg) ) {
return PyUnicode_Tailmatch(self, arg, start, end, direction);
}
#endif
else {
#if PY_VERSION_HEX < 0x02060000
if (unlikely(PyObject_AsCharBuffer(arg, &sub_ptr, &sub_len)))
return -1;
#else
if (unlikely(PyObject_GetBuffer(self, &view, PyBUF_SIMPLE) == -1))
return -1;
sub_ptr = (const char*) view.buf;
sub_len = view.len;
#endif
}
if (end > self_len)
end = self_len;
else if (end < 0)
end += self_len;
if (end < 0)
end = 0;
if (start < 0)
start += self_len;
if (start < 0)
start = 0;
if (direction > 0) {
/* endswith */
if (end-sub_len > start)
start = end - sub_len;
}
if (start + sub_len <= end)
retval = !memcmp(self_ptr+start, sub_ptr, sub_len);
else
retval = 0;
#if PY_VERSION_HEX >= 0x02060000
if (view.obj)
PyBuffer_Release(&view);
#endif
return retval;
}
static int __Pyx_PyBytes_Tailmatch(PyObject* self, PyObject* substr, Py_ssize_t start,
Py_ssize_t end, int direction)
{
if (unlikely(PyTuple_Check(substr))) {
int result;
Py_ssize_t i;
for (i = 0; i < PyTuple_GET_SIZE(substr); i++) {
result = __Pyx_PyBytes_SingleTailmatch(self, PyTuple_GET_ITEM(substr, i),
start, end, direction);
if (result) {
return result;
}
}
return 0;
}
return __Pyx_PyBytes_SingleTailmatch(self, substr, start, end, direction);
}
/////////////// bytes_index.proto ///////////////
static CYTHON_INLINE char __Pyx_PyBytes_GetItemInt(PyObject* bytes, Py_ssize_t index, int check_bounds) {
if (check_bounds) {
if (unlikely(index >= PyBytes_GET_SIZE(bytes)) |
((index < 0) & unlikely(index < -PyBytes_GET_SIZE(bytes)))) {
PyErr_Format(PyExc_IndexError, "string index out of range");
return -1;
}
}
if (index < 0)
index += PyBytes_GET_SIZE(bytes);
return PyBytes_AS_STRING(bytes)[index];
}
/////////////// dict_getitem_default.proto ///////////////
static PyObject* __Pyx_PyDict_GetItemDefault(PyObject* d, PyObject* key, PyObject* default_value) {
PyObject* value;
#if PY_MAJOR_VERSION >= 3
value = PyDict_GetItemWithError(d, key);
if (unlikely(!value)) {
if (unlikely(PyErr_Occurred()))
return NULL;
value = default_value;
}
Py_INCREF(value);
#else
if (PyString_CheckExact(key) || PyUnicode_CheckExact(key) || PyInt_CheckExact(key)) {
/* these presumably have safe hash functions */
value = PyDict_GetItem(d, key);
if (unlikely(!value)) {
value = default_value;
}
Py_INCREF(value);
} else {
PyObject *m;
m = __Pyx_GetAttrString(d, "get");
if (!m) return NULL;
value = PyObject_CallFunctionObjArgs(m, key,
(default_value == Py_None) ? NULL : default_value, NULL);
Py_DECREF(m);
}
#endif
return value;
}
/////////////// dict_setdefault.proto ///////////////
static PyObject *__Pyx_PyDict_SetDefault(PyObject *d, PyObject *key, PyObject *default_value) {
PyObject* value;
#if PY_MAJOR_VERSION >= 3
value = PyDict_GetItemWithError(d, key);
if (unlikely(!value)) {
if (unlikely(PyErr_Occurred()))
return NULL;
if (unlikely(PyDict_SetItem(d, key, default_value) == -1))
return NULL;
value = default_value;
}
Py_INCREF(value);
#else
if (PyString_CheckExact(key) || PyUnicode_CheckExact(key) || PyInt_CheckExact(key)) {
/* these presumably have safe hash functions */
value = PyDict_GetItem(d, key);
if (unlikely(!value)) {
if (unlikely(PyDict_SetItem(d, key, default_value) == -1))
return NULL;
value = default_value;
}
Py_INCREF(value);
} else {
PyObject *m;
m = __Pyx_GetAttrString(d, "setdefault");
if (!m) return NULL;
value = PyObject_CallFunctionObjArgs(m, key, default_value, NULL);
Py_DECREF(m);
}
#endif
return value;
}
/////////////// py_dict_clear.proto ///////////////
static CYTHON_INLINE PyObject* __Pyx_PyDict_Clear(PyObject* d) {
PyDict_Clear(d);
Py_INCREF(Py_None);
return Py_None;
}
/////////////// pyobject_as_double.proto ///////////////
static double __Pyx__PyObject_AsDouble(PyObject* obj); /* proto */
#define __Pyx_PyObject_AsDouble(obj) \
((likely(PyFloat_CheckExact(obj))) ? \
PyFloat_AS_DOUBLE(obj) : __Pyx__PyObject_AsDouble(obj))
/////////////// pyobject_as_double ///////////////
static double __Pyx__PyObject_AsDouble(PyObject* obj) {
PyObject* float_value;
if (Py_TYPE(obj)->tp_as_number && Py_TYPE(obj)->tp_as_number->nb_float) {
return PyFloat_AsDouble(obj);
} else if (PyUnicode_CheckExact(obj) || PyBytes_CheckExact(obj)) {
#if PY_MAJOR_VERSION >= 3
float_value = PyFloat_FromString(obj);
#else
float_value = PyFloat_FromString(obj, 0);
#endif
} else {
PyObject* args = PyTuple_New(1);
if (unlikely(!args)) goto bad;
PyTuple_SET_ITEM(args, 0, obj);
float_value = PyObject_Call((PyObject*)&PyFloat_Type, args, 0);
PyTuple_SET_ITEM(args, 0, 0);
Py_DECREF(args);
}
if (likely(float_value)) {
double value = PyFloat_AS_DOUBLE(float_value);
Py_DECREF(float_value);
return value;
}
bad:
return (double)-1;
}
cimport cython cimport cython
@cython.final
cdef class Packet: cdef class Packet:
cdef public object link cdef public object link
cdef public object ident cdef public object ident
...@@ -12,20 +13,24 @@ cdef class Packet: ...@@ -12,20 +13,24 @@ cdef class Packet:
cdef class TaskRec: cdef class TaskRec:
pass pass
@cython.final
cdef class DeviceTaskRec(TaskRec): cdef class DeviceTaskRec(TaskRec):
cdef public object pending cdef public object pending
@cython.final
cdef class IdleTaskRec(TaskRec): cdef class IdleTaskRec(TaskRec):
cdef public long control cdef public long control
cdef public Py_ssize_t count cdef public Py_ssize_t count
@cython.final
cdef class HandlerTaskRec(TaskRec): cdef class HandlerTaskRec(TaskRec):
cdef public object work_in # = None cdef public object work_in # = None
cdef public object device_in # = None cdef public object device_in # = None
cpdef workInAdd(self,p) cpdef workInAdd(self, Packet p)
cpdef deviceInAdd(self,p) cpdef deviceInAdd(self, Packet p)
@cython.final
cdef class WorkerTaskRec(TaskRec): cdef class WorkerTaskRec(TaskRec):
cdef public object destination # = I_HANDLERA cdef public object destination # = I_HANDLERA
cdef public Py_ssize_t count cdef public Py_ssize_t count
...@@ -60,7 +65,7 @@ cdef class Task(TaskState): ...@@ -60,7 +65,7 @@ cdef class Task(TaskState):
cdef public object input # = w cdef public object input # = w
cdef public object handle # = r cdef public object handle # = r
cpdef addPacket(self,Packet p,old) cpdef addPacket(self,Packet p,Task old)
cpdef runTask(self) cpdef runTask(self)
cpdef waitTask(self) cpdef waitTask(self)
cpdef hold(self) cpdef hold(self)
...@@ -70,19 +75,19 @@ cdef class Task(TaskState): ...@@ -70,19 +75,19 @@ cdef class Task(TaskState):
cdef class DeviceTask(Task): cdef class DeviceTask(Task):
@cython.locals(d=DeviceTaskRec) @cython.locals(d=DeviceTaskRec)
cpdef fn(self,Packet pkt,r) cpdef fn(self,Packet pkt,DeviceTaskRec r)
cdef class HandlerTask(Task): cdef class HandlerTask(Task):
@cython.locals(h=HandlerTaskRec) @cython.locals(h=HandlerTaskRec)
cpdef fn(self,Packet pkt,r) cpdef fn(self,Packet pkt,HandlerTaskRec r)
cdef class IdleTask(Task): cdef class IdleTask(Task):
@cython.locals(i=IdleTaskRec) @cython.locals(i=IdleTaskRec)
cpdef fn(self,Packet pkt,r) cpdef fn(self,Packet pkt,IdleTaskRec r)
cdef class WorkTask(Task): cdef class WorkTask(Task):
@cython.locals(w=WorkerTaskRec) @cython.locals(w=WorkerTaskRec)
cpdef fn(self,Packet pkt,r) cpdef fn(self,Packet pkt,WorkerTaskRec r)
@cython.locals(t=Task) @cython.locals(t=Task)
cpdef schedule() cpdef schedule()
......
...@@ -2,13 +2,14 @@ Cython's entire documentation suite is currently being overhauled. ...@@ -2,13 +2,14 @@ Cython's entire documentation suite is currently being overhauled.
For the time being, I'll use this page to post notes. For the time being, I'll use this page to post notes.
The previous Cython documentation files are hosted at http://hg.cython.org/cython-docs The previous Cython documentation files are hosted at
http://hg.cython.org/cython-docs
Notes Notes
======= =======
1) Some css work should definately be done. 1) Some css work should definitely be done.
2) Use local 'top-of-page' contents rather than the sidebar, imo. 2) Use local 'top-of-page' contents rather than the sidebar, imo.
3) Provide a link from each (sub)section to the TOC of the page. 3) Provide a link from each (sub)section to the TOC of the page.
4) Fix cython highlighter for cdef blocks 4) Fix cython highlighter for cdef blocks
Background
----------
[brain dump]
The "Old Cython Users Guide" is a derivative of the old Pyrex documentation.
It underwent substantial editing by Peter Alexandar
to become the Reference Guide, which is oriented around bullet points
and lists rather than prose. This transition was incomplete.
At nearly the same time, Robert, Dag, and Stefan wrote a tutorial as
part of the SciPy proceedings. It was felt that the content there was
cleaner and more up to date than anything else, and this became the
basis for the "Getting Started" and "Tutorials" sections. However,
it simply doesn't have as much content as the old documentation used to.
Eventually, it seems all of the old users manual could be whittled
down into independent tutorial topics. Much discussion of what we'd
like to see is at
http://www.mail-archive.com/cython-dev@codespeak.net/msg06945.html
There is currently a huge amount of redundancy, but no one section has
it all.
Also, we should go through the wiki enhancement proposal list
and make sure to transfer the (done) ones into the user manual.
...@@ -420,6 +420,8 @@ Cython provides facilities for releasing the Global Interpreter Lock (GIL) ...@@ -420,6 +420,8 @@ Cython provides facilities for releasing the Global Interpreter Lock (GIL)
before calling C code, and for acquiring the GIL in functions that are to be before calling C code, and for acquiring the GIL in functions that are to be
called back from C code that is executed without the GIL. called back from C code that is executed without the GIL.
.. _nogil:
Releasing the GIL Releasing the GIL
^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^
......
...@@ -7,8 +7,11 @@ Using Parallelism ...@@ -7,8 +7,11 @@ Using Parallelism
********************************** **********************************
Cython supports native parallelism through the :py:mod:`cython.parallel` Cython supports native parallelism through the :py:mod:`cython.parallel`
module. To use this kind of parallelism, the GIL must be released. It module. To use this kind of parallelism, the GIL must be released
currently supports OpenMP, but later on more backends might be supported. (see :ref:`Releasing the GIL <nogil>`).
It currently supports OpenMP, but later on more backends might be supported.
__ nogil_
.. function:: prange([start,] stop[, step], nogil=False, schedule=None) .. function:: prange([start,] stop[, step], nogil=False, schedule=None)
...@@ -59,11 +62,11 @@ currently supports OpenMP, but later on more backends might be supported. ...@@ -59,11 +62,11 @@ currently supports OpenMP, but later on more backends might be supported.
+-----------------+------------------------------------------------------+ +-----------------+------------------------------------------------------+
The default schedule is implementation defined. For more information consult The default schedule is implementation defined. For more information consult
the OpenMP specification: [#]_. the OpenMP specification [#]_.
Example with a reduction:: Example with a reduction::
from cython.parallel import prange, parallel, threadid from cython.parallel import prange
cdef int i cdef int i
cdef int sum = 0 cdef int sum = 0
...@@ -75,7 +78,7 @@ currently supports OpenMP, but later on more backends might be supported. ...@@ -75,7 +78,7 @@ currently supports OpenMP, but later on more backends might be supported.
Example with a shared numpy array:: Example with a shared numpy array::
from cython.parallel import * from cython.parallel import prange
def func(np.ndarray[double] x, double alpha): def func(np.ndarray[double] x, double alpha):
cdef Py_ssize_t i cdef Py_ssize_t i
...@@ -94,7 +97,7 @@ currently supports OpenMP, but later on more backends might be supported. ...@@ -94,7 +97,7 @@ currently supports OpenMP, but later on more backends might be supported.
Example with thread-local buffers:: Example with thread-local buffers::
from cython.parallel import * from cython.parallel import parallel, prange
from libc.stdlib cimport abort, malloc, free from libc.stdlib cimport abort, malloc, free
cdef Py_ssize_t idx, i, n = 100 cdef Py_ssize_t idx, i, n = 100
...@@ -175,12 +178,20 @@ particular order:: ...@@ -175,12 +178,20 @@ particular order::
In the example above it is undefined whether an exception shall be raised, In the example above it is undefined whether an exception shall be raised,
whether it will simply break or whether it will return 2. whether it will simply break or whether it will return 2.
Nested Parallelism Using OpenMP Functions
================== ======================
Nested parallelism is currently disabled due to a bug in gcc 4.5 [#]_. However, OpenMP functions can be used by cimporting ``openmp``::
you can freely call functions with parallel sections from a parallel section.
from cython.parallel cimport parallel
cimport openmp
cdef int num_threads
openmp.omp_set_dynamic(1)
with nogil, parallel():
num_threads = openmp.omp_get_num_threads()
...
.. rubric:: References .. rubric:: References
.. [#] http://www.openmp.org/mp-documents/spec30.pdf .. [#] http://www.openmp.org/mp-documents/spec30.pdf
.. [#] http://gcc.gnu.org/bugzilla/show_bug.cgi?id=49897
Background
----------
[brain dump]
The "Old Cython Users Guide" is a derivative of the old Pyrex documentation. It underwent substantial editing by Peter Alexandar
to become the Reference Guide, which is oriented around bullet points
and lists rather than prose. This transition was incomplete.
At nearly the same time, Robert, Dag, and Stefan wrote a tutorial as
part of the SciPy proceedings. It was felt that the content there was
cleaner and more up to date than anything else, and this became the
basis for the "Getting Started" and "Tutorials" sections. However,
it simply doesn't have as much content as the old documentation used to.
Eventually, it seems all of the old users manual could be whittled
down into independent tutorial topics. Much discussion of what we'd
like to see is at
http://www.mail-archive.com/cython-dev@codespeak.net/msg06945.html
There is currently a huge amount of redundancy, but no one section has
it all.
Also, we should go through the wiki enhancement proposal list and make sure to transfer the (done) ones into the user manual.
...@@ -183,10 +183,15 @@ def load_module(name, pyxfilename, pyxbuild_dir=None): ...@@ -183,10 +183,15 @@ def load_module(name, pyxfilename, pyxbuild_dir=None):
so_path = build_module(name, pyxfilename, pyxbuild_dir) so_path = build_module(name, pyxfilename, pyxbuild_dir)
mod = imp.load_dynamic(name, so_path) mod = imp.load_dynamic(name, so_path)
assert mod.__file__ == so_path, (mod.__file__, so_path) assert mod.__file__ == so_path, (mod.__file__, so_path)
except Exception, e: except Exception:
import traceback if pyxargs.load_py_module_on_import_failure and pyxfilename.endswith('.py'):
raise ImportError("Building module failed: %s" % # try to fall back to normal import
traceback.format_exception_only(*sys.exc_info()[:2])),None,sys.exc_info()[2] mod = imp.load_source(name, pyxfilename)
assert mod.__file__ in (pyxfilename, pyxfilename+'c', pyxfilename+'o'), (mod.__file__, pyxfilename)
else:
import traceback
raise ImportError("Building module failed: %s" %
traceback.format_exception_only(*sys.exc_info()[:2])),None,sys.exc_info()[2]
return mod return mod
...@@ -345,7 +350,8 @@ class PyxArgs(object): ...@@ -345,7 +350,8 @@ class PyxArgs(object):
##pyxargs=None ##pyxargs=None
def install(pyximport=True, pyimport=False, build_dir=None, build_in_temp=True, def install(pyximport=True, pyimport=False, build_dir=None, build_in_temp=True,
setup_args={}, reload_support=False ): setup_args={}, reload_support=False,
load_py_module_on_import_failure=False):
"""Main entry point. Call this to install the .pyx import hook in """Main entry point. Call this to install the .pyx import hook in
your meta-path for a single Python process. If you want it to be your meta-path for a single Python process. If you want it to be
installed whenever you use Python, add it to your sitecustomize installed whenever you use Python, add it to your sitecustomize
...@@ -374,6 +380,15 @@ def install(pyximport=True, pyimport=False, build_dir=None, build_in_temp=True, ...@@ -374,6 +380,15 @@ def install(pyximport=True, pyimport=False, build_dir=None, build_in_temp=True,
reload(<pyxmodulename>), e.g. after a change in the Cython code. reload(<pyxmodulename>), e.g. after a change in the Cython code.
Additional files <so_path>.reloadNN may arise on that account, when Additional files <so_path>.reloadNN may arise on that account, when
the previously loaded module file cannot be overwritten. the previously loaded module file cannot be overwritten.
``load_py_module_on_import_failure``: If the compilation of a .py
file succeeds, but the subsequent import fails for some reason,
retry the import with the normal .py module instead of the
compiled module. Note that this may lead to unpredictable results
for modules that change the system state during their import, as
the second import will rerun these modifications in whatever state
the system was left after the import of the compiled module
failed.
""" """
if not build_dir: if not build_dir:
build_dir = os.path.expanduser('~/.pyxbld') build_dir = os.path.expanduser('~/.pyxbld')
...@@ -384,6 +399,7 @@ def install(pyximport=True, pyimport=False, build_dir=None, build_in_temp=True, ...@@ -384,6 +399,7 @@ def install(pyximport=True, pyimport=False, build_dir=None, build_in_temp=True,
pyxargs.build_in_temp = build_in_temp pyxargs.build_in_temp = build_in_temp
pyxargs.setup_args = (setup_args or {}).copy() pyxargs.setup_args = (setup_args or {}).copy()
pyxargs.reload_support = reload_support pyxargs.reload_support = reload_support
pyxargs.load_py_module_on_import_failure = load_py_module_on_import_failure
has_py_importer = False has_py_importer = False
has_pyx_importer = False has_pyx_importer = False
......
...@@ -285,8 +285,8 @@ class ErrorWriter(object): ...@@ -285,8 +285,8 @@ class ErrorWriter(object):
class TestBuilder(object): class TestBuilder(object):
def __init__(self, rootdir, workdir, selectors, exclude_selectors, annotate, def __init__(self, rootdir, workdir, selectors, exclude_selectors, annotate,
cleanup_workdir, cleanup_sharedlibs, with_pyregr, cython_only, cleanup_workdir, cleanup_sharedlibs, cleanup_failures,
languages, test_bugs, fork, language_level): with_pyregr, cython_only, languages, test_bugs, fork, language_level):
self.rootdir = rootdir self.rootdir = rootdir
self.workdir = workdir self.workdir = workdir
self.selectors = selectors self.selectors = selectors
...@@ -294,6 +294,7 @@ class TestBuilder(object): ...@@ -294,6 +294,7 @@ class TestBuilder(object):
self.annotate = annotate self.annotate = annotate
self.cleanup_workdir = cleanup_workdir self.cleanup_workdir = cleanup_workdir
self.cleanup_sharedlibs = cleanup_sharedlibs self.cleanup_sharedlibs = cleanup_sharedlibs
self.cleanup_failures = cleanup_failures
self.with_pyregr = with_pyregr self.with_pyregr = with_pyregr
self.cython_only = cython_only self.cython_only = cython_only
self.languages = languages self.languages = languages
...@@ -410,6 +411,7 @@ class TestBuilder(object): ...@@ -410,6 +411,7 @@ class TestBuilder(object):
annotate=self.annotate, annotate=self.annotate,
cleanup_workdir=self.cleanup_workdir, cleanup_workdir=self.cleanup_workdir,
cleanup_sharedlibs=self.cleanup_sharedlibs, cleanup_sharedlibs=self.cleanup_sharedlibs,
cleanup_failures=self.cleanup_failures,
cython_only=self.cython_only, cython_only=self.cython_only,
fork=self.fork, fork=self.fork,
language_level=self.language_level, language_level=self.language_level,
...@@ -418,8 +420,8 @@ class TestBuilder(object): ...@@ -418,8 +420,8 @@ class TestBuilder(object):
class CythonCompileTestCase(unittest.TestCase): class CythonCompileTestCase(unittest.TestCase):
def __init__(self, test_directory, workdir, module, language='c', def __init__(self, test_directory, workdir, module, language='c',
expect_errors=False, annotate=False, cleanup_workdir=True, expect_errors=False, annotate=False, cleanup_workdir=True,
cleanup_sharedlibs=True, cython_only=False, fork=True, cleanup_sharedlibs=True, cleanup_failures=True, cython_only=False,
language_level=2, warning_errors=False): fork=True, language_level=2, warning_errors=False):
self.test_directory = test_directory self.test_directory = test_directory
self.workdir = workdir self.workdir = workdir
self.module = module self.module = module
...@@ -428,6 +430,7 @@ class CythonCompileTestCase(unittest.TestCase): ...@@ -428,6 +430,7 @@ class CythonCompileTestCase(unittest.TestCase):
self.annotate = annotate self.annotate = annotate
self.cleanup_workdir = cleanup_workdir self.cleanup_workdir = cleanup_workdir
self.cleanup_sharedlibs = cleanup_sharedlibs self.cleanup_sharedlibs = cleanup_sharedlibs
self.cleanup_failures = cleanup_failures
self.cython_only = cython_only self.cython_only = cython_only
self.fork = fork self.fork = fork
self.language_level = language_level self.language_level = language_level
...@@ -461,16 +464,17 @@ class CythonCompileTestCase(unittest.TestCase): ...@@ -461,16 +464,17 @@ class CythonCompileTestCase(unittest.TestCase):
del sys.modules[self.module] del sys.modules[self.module]
except KeyError: except KeyError:
pass pass
cleanup_c_files = WITH_CYTHON and self.cleanup_workdir cleanup = self.cleanup_failures or self.success
cleanup_lib_files = self.cleanup_sharedlibs cleanup_c_files = WITH_CYTHON and self.cleanup_workdir and cleanup
cleanup_lib_files = self.cleanup_sharedlibs and cleanup
if os.path.exists(self.workdir): if os.path.exists(self.workdir):
for rmfile in os.listdir(self.workdir): for rmfile in os.listdir(self.workdir):
if not cleanup_c_files: if not cleanup_c_files:
if rmfile[-2:] in (".c", ".h") or rmfile[-4:] == ".cpp": if (rmfile[-2:] in (".c", ".h") or
rmfile[-4:] == ".cpp" or
rmfile.endswith(".html")):
continue continue
if not cleanup_lib_files and rmfile.endswith(".so") or rmfile.endswith(".dll"): if not cleanup_lib_files and (rmfile.endswith(".so") or rmfile.endswith(".dll")):
continue
if self.annotate and rmfile.endswith(".html"):
continue continue
try: try:
rmfile = os.path.join(self.workdir, rmfile) rmfile = os.path.join(self.workdir, rmfile)
...@@ -484,7 +488,9 @@ class CythonCompileTestCase(unittest.TestCase): ...@@ -484,7 +488,9 @@ class CythonCompileTestCase(unittest.TestCase):
os.makedirs(self.workdir) os.makedirs(self.workdir)
def runTest(self): def runTest(self):
self.success = False
self.runCompileTest() self.runCompileTest()
self.success = True
def runCompileTest(self): def runCompileTest(self):
self.compile(self.test_directory, self.module, self.workdir, self.compile(self.test_directory, self.module, self.workdir,
...@@ -676,8 +682,13 @@ class CythonRunTestCase(CythonCompileTestCase): ...@@ -676,8 +682,13 @@ class CythonRunTestCase(CythonCompileTestCase):
try: try:
self.setUp() self.setUp()
try: try:
self.success = False
self.runCompileTest() self.runCompileTest()
failures, errors = len(result.failures), len(result.errors)
self.run_tests(result) self.run_tests(result)
if failures == len(result.failures) and errors == len(result.errors):
# No new errors...
self.success = True
finally: finally:
check_thread_termination() check_thread_termination()
except Exception: except Exception:
...@@ -1032,6 +1043,7 @@ class EndToEndTest(unittest.TestCase): ...@@ -1032,6 +1043,7 @@ class EndToEndTest(unittest.TestCase):
os.chdir(self.old_dir) os.chdir(self.old_dir)
def runTest(self): def runTest(self):
self.success = False
commands = (self.commands commands = (self.commands
.replace("CYTHON", "PYTHON %s" % os.path.join(self.cython_root, 'cython.py')) .replace("CYTHON", "PYTHON %s" % os.path.join(self.cython_root, 'cython.py'))
.replace("PYTHON", sys.executable)) .replace("PYTHON", sys.executable))
...@@ -1055,6 +1067,7 @@ class EndToEndTest(unittest.TestCase): ...@@ -1055,6 +1067,7 @@ class EndToEndTest(unittest.TestCase):
os.environ['PYTHONPATH'] = old_path os.environ['PYTHONPATH'] = old_path
else: else:
del os.environ['PYTHONPATH'] del os.environ['PYTHONPATH']
self.success = True
# TODO: Support cython_freeze needed here as well. # TODO: Support cython_freeze needed here as well.
...@@ -1278,6 +1291,9 @@ def main(): ...@@ -1278,6 +1291,9 @@ def main():
parser.add_option("--no-cleanup-sharedlibs", dest="cleanup_sharedlibs", parser.add_option("--no-cleanup-sharedlibs", dest="cleanup_sharedlibs",
action="store_false", default=True, action="store_false", default=True,
help="do not delete the generated shared libary files (allows manual module experimentation)") help="do not delete the generated shared libary files (allows manual module experimentation)")
parser.add_option("--no-cleanup-failures", dest="cleanup_failures",
action="store_false", default=True,
help="enable --no-cleanup and --no-cleanup-sharedlibs for failed tests only")
parser.add_option("--no-cython", dest="with_cython", parser.add_option("--no-cython", dest="with_cython",
action="store_false", default=True, action="store_false", default=True,
help="do not run the Cython compiler, only the C compiler") help="do not run the Cython compiler, only the C compiler")
...@@ -1354,6 +1370,8 @@ def main(): ...@@ -1354,6 +1370,8 @@ def main():
help="working directory") help="working directory")
parser.add_option("--debug", dest="for_debugging", default=False, action="store_true", parser.add_option("--debug", dest="for_debugging", default=False, action="store_true",
help="configure for easier use with a debugger (e.g. gdb)") help="configure for easier use with a debugger (e.g. gdb)")
parser.add_option("--pyximport-py", dest="pyximport_py", default=False, action="store_true",
help="use pyximport to automatically compile imported .pyx and .py files")
options, cmd_args = parser.parse_args() options, cmd_args = parser.parse_args()
...@@ -1509,7 +1527,8 @@ def main(): ...@@ -1509,7 +1527,8 @@ def main():
if options.filetests and languages: if options.filetests and languages:
filetests = TestBuilder(ROOTDIR, WORKDIR, selectors, exclude_selectors, filetests = TestBuilder(ROOTDIR, WORKDIR, selectors, exclude_selectors,
options.annotate_source, options.cleanup_workdir, options.annotate_source, options.cleanup_workdir,
options.cleanup_sharedlibs, options.pyregr, options.cleanup_sharedlibs, options.cleanup_failures,
options.pyregr,
options.cython_only, languages, test_bugs, options.cython_only, languages, test_bugs,
options.fork, options.language_level) options.fork, options.language_level)
test_suite.addTest(filetests.build_suite()) test_suite.addTest(filetests.build_suite())
...@@ -1519,7 +1538,8 @@ def main(): ...@@ -1519,7 +1538,8 @@ def main():
if os.path.isdir(sys_pyregr_dir): if os.path.isdir(sys_pyregr_dir):
filetests = TestBuilder(ROOTDIR, WORKDIR, selectors, exclude_selectors, filetests = TestBuilder(ROOTDIR, WORKDIR, selectors, exclude_selectors,
options.annotate_source, options.cleanup_workdir, options.annotate_source, options.cleanup_workdir,
options.cleanup_sharedlibs, True, options.cleanup_sharedlibs, options.cleanup_failures,
True,
options.cython_only, languages, test_bugs, options.cython_only, languages, test_bugs,
options.fork, sys.version_info[0]) options.fork, sys.version_info[0])
sys.stderr.write("Including CPython regression tests in %s\n" % sys_pyregr_dir) sys.stderr.write("Including CPython regression tests in %s\n" % sys_pyregr_dir)
...@@ -1532,6 +1552,11 @@ def main(): ...@@ -1532,6 +1552,11 @@ def main():
else: else:
test_runner = unittest.TextTestRunner(verbosity=options.verbosity) test_runner = unittest.TextTestRunner(verbosity=options.verbosity)
if options.pyximport_py:
from pyximport import pyximport
pyximport.install(pyimport=True, build_dir=os.path.join(WORKDIR, '_pyximport'),
load_py_module_on_import_failure=True)
result = test_runner.run(test_suite) result = test_runner.run(test_suite)
if options.coverage or options.coverage_xml or options.coverage_html: if options.coverage or options.coverage_xml or options.coverage_html:
......
...@@ -28,6 +28,7 @@ pyregr.test_socket ...@@ -28,6 +28,7 @@ pyregr.test_socket
pyregr.test_threading pyregr.test_threading
pyregr.test_sys pyregr.test_sys
pyregr.test_pep3131 pyregr.test_pep3131
pyregr.test_multiprocessing
# CPython regression tests that don't make sense # CPython regression tests that don't make sense
pyregr.test_gdb pyregr.test_gdb
......
...@@ -31,3 +31,7 @@ cdef char f = d.getValue2() ...@@ -31,3 +31,7 @@ cdef char f = d.getValue2()
f = e.getValue2() f = e.getValue2()
del b, e del b, e
ctypedef TemplateTest1[int] TemplateTest1_int
cdef TemplateTest1_int aa
...@@ -4,5 +4,6 @@ ...@@ -4,5 +4,6 @@
ctypedef object[float] mybuffer ctypedef object[float] mybuffer
_ERRORS = u""" _ERRORS = u"""
4:23: Syntax error in ctypedef statement 1:0: Buffer vars not allowed in module scope
4:0: Buffer types only allowed as function local variables
""" """
...@@ -40,6 +40,9 @@ cdef int[:, ::view.contiguous, ::view.indirect_contiguous] a6 ...@@ -40,6 +40,9 @@ cdef int[:, ::view.contiguous, ::view.indirect_contiguous] a6
#cdef int[::view.generic_contiguous, ::view.contiguous] a7 #cdef int[::view.generic_contiguous, ::view.contiguous] a7
#cdef int[::view.contiguous, ::view.generic_contiguous] a8 #cdef int[::view.contiguous, ::view.generic_contiguous] a8
ctypedef int *intp
cdef intp[:, :] myarray
# These are VALID # These are VALID
cdef int[::view.indirect_contiguous, ::view.contiguous] a9 cdef int[::view.indirect_contiguous, ::view.contiguous] a9
...@@ -61,4 +64,5 @@ _ERRORS = u''' ...@@ -61,4 +64,5 @@ _ERRORS = u'''
31:9: Dimension may not be contiguous 31:9: Dimension may not be contiguous
37:9: Only one direct contiguous axis may be specified. 37:9: Only one direct contiguous axis may be specified.
38:9:Only dimensions 3 and 2 may be contiguous and direct 38:9:Only dimensions 3 and 2 may be contiguous and direct
44:10: Invalid base type for memoryview slice
''' '''
# mode: error
cimport numpy as np
cdef void func(np.ndarray[np.double_t, ndim=1] myarray) nogil:
pass
_ERRORS = u"""
5:15: Buffer may not be acquired without the GIL. Consider using memoryview slices instead.
"""
...@@ -12,6 +12,15 @@ max_long_long = 2 ** (sizeof(long long) * 8 - 1) - 1 ...@@ -12,6 +12,15 @@ max_long_long = 2 ** (sizeof(long long) * 8 - 1) - 1
cimport cython cimport cython
def abs_as_name():
"""
>>> _abs = abs_as_name()
>>> _abs(-5)
5
"""
x = abs
return x
def py_abs(a): def py_abs(a):
""" """
>>> py_abs(-5) >>> py_abs(-5)
......
...@@ -8,7 +8,7 @@ DEF INT_VAL = 1 ...@@ -8,7 +8,7 @@ DEF INT_VAL = 1
def _func(a,b,c): def _func(a,b,c):
return a+b+c return a+b+c
@cython.test_fail_if_path_exists("//BinopNode") @cython.test_fail_if_path_exists("//AddNode")
def add(): def add():
""" """
>>> add() == 1+2+3+4 >>> add() == 1+2+3+4
...@@ -16,7 +16,7 @@ def add(): ...@@ -16,7 +16,7 @@ def add():
""" """
return 1+2+3+4 return 1+2+3+4
@cython.test_fail_if_path_exists("//BinopNode") #@cython.test_fail_if_path_exists("//AddNode")
def add_var(a): def add_var(a):
""" """
>>> add_var(10) == 1+2+10+3+4 >>> add_var(10) == 1+2+10+3+4
...@@ -24,7 +24,7 @@ def add_var(a): ...@@ -24,7 +24,7 @@ def add_var(a):
""" """
return 1+2 +a+ 3+4 return 1+2 +a+ 3+4
@cython.test_fail_if_path_exists("//BinopNode") @cython.test_fail_if_path_exists("//AddNode", "//SubNode")
def neg(): def neg():
""" """
>>> neg() == -1 -2 - (-3+4) >>> neg() == -1 -2 - (-3+4)
...@@ -32,7 +32,7 @@ def neg(): ...@@ -32,7 +32,7 @@ def neg():
""" """
return -1 -2 - (-3+4) return -1 -2 - (-3+4)
@cython.test_fail_if_path_exists("//BinopNode") @cython.test_fail_if_path_exists("//AddNode", "//MulNode", "//DivNode")
def long_int_mix(): def long_int_mix():
""" """
>>> long_int_mix() == 1 + (2 * 3) // 2 >>> long_int_mix() == 1 + (2 * 3) // 2
...@@ -43,7 +43,7 @@ def long_int_mix(): ...@@ -43,7 +43,7 @@ def long_int_mix():
""" """
return 1L + (2 * 3L) // 2 return 1L + (2 * 3L) // 2
@cython.test_fail_if_path_exists("//BinopNode") @cython.test_fail_if_path_exists("//AddNode", "//MulNode", "//DivNode")
def char_int_mix(): def char_int_mix():
""" """
>>> char_int_mix() == 1 + (ord(' ') * 3) // 2 + ord('A') >>> char_int_mix() == 1 + (ord(' ') * 3) // 2 + ord('A')
...@@ -51,7 +51,7 @@ def char_int_mix(): ...@@ -51,7 +51,7 @@ def char_int_mix():
""" """
return 1L + (c' ' * 3L) // 2 + c'A' return 1L + (c' ' * 3L) // 2 + c'A'
@cython.test_fail_if_path_exists("//BinopNode") @cython.test_fail_if_path_exists("//AddNode", "//MulNode")
def int_cast(): def int_cast():
""" """
>>> int_cast() == 1 + 2 * 6000 >>> int_cast() == 1 + 2 * 6000
...@@ -59,7 +59,7 @@ def int_cast(): ...@@ -59,7 +59,7 @@ def int_cast():
""" """
return <int>(1 + 2 * 6000) return <int>(1 + 2 * 6000)
@cython.test_fail_if_path_exists("//BinopNode") @cython.test_fail_if_path_exists("//MulNode")
def mul(): def mul():
""" """
>>> mul() == 1*60*1000 >>> mul() == 1*60*1000
...@@ -67,7 +67,7 @@ def mul(): ...@@ -67,7 +67,7 @@ def mul():
""" """
return 1*60*1000 return 1*60*1000
@cython.test_fail_if_path_exists("//BinopNode") @cython.test_fail_if_path_exists("//AddNode", "//MulNode")
def arithm(): def arithm():
""" """
>>> arithm() == 9*2+3*8//6-10 >>> arithm() == 9*2+3*8//6-10
...@@ -75,7 +75,7 @@ def arithm(): ...@@ -75,7 +75,7 @@ def arithm():
""" """
return 9*2+3*8//6-10 return 9*2+3*8//6-10
@cython.test_fail_if_path_exists("//BinopNode") @cython.test_fail_if_path_exists("//AddNode", "//MulNode")
def parameters(): def parameters():
""" """
>>> parameters() == _func(-1 -2, - (-3+4), 1*2*3) >>> parameters() == _func(-1 -2, - (-3+4), 1*2*3)
...@@ -83,7 +83,7 @@ def parameters(): ...@@ -83,7 +83,7 @@ def parameters():
""" """
return _func(-1 -2, - (-3+4), 1*2*3) return _func(-1 -2, - (-3+4), 1*2*3)
@cython.test_fail_if_path_exists("//BinopNode") #@cython.test_fail_if_path_exists("//AddNode")
def lists(): def lists():
""" """
>>> lists() == [1,2,3] + [4,5,6] >>> lists() == [1,2,3] + [4,5,6]
...@@ -91,6 +91,176 @@ def lists(): ...@@ -91,6 +91,176 @@ def lists():
""" """
return [1,2,3] + [4,5,6] return [1,2,3] + [4,5,6]
@cython.test_fail_if_path_exists("//MulNode")
def multiplied_lists_right_len1():
"""
>>> multiplied_lists_right_len1() == [1] * 5
True
"""
return [1] * 5
@cython.test_fail_if_path_exists("//MulNode")
def multiplied_lists_right():
"""
>>> multiplied_lists_right() == [1,2,3] * 5
True
"""
return [1,2,3] * 5
@cython.test_fail_if_path_exists("//MulNode")
def multiplied_lists_left():
"""
>>> multiplied_lists_left() == [1,2,3] * 5
True
"""
return 5 * [1,2,3]
@cython.test_fail_if_path_exists("//MulNode")
def multiplied_lists_neg():
"""
>>> multiplied_lists_neg() == [1,2,3] * -5
True
"""
return [1,2,3] * -5
@cython.test_fail_if_path_exists("//MulNode")
def multiplied_lists_nonconst(x):
"""
>>> multiplied_lists_nonconst(5) == [1,2,3] * 5
True
>>> multiplied_lists_nonconst(-5) == [1,2,3] * -5
True
>>> multiplied_lists_nonconst(0) == [1,2,3] * 0
True
>>> [1,2,3] * 'abc' # doctest: +ELLIPSIS
Traceback (most recent call last):
TypeError: can't multiply sequence by non-int...
>>> multiplied_nonconst_tuple_arg('abc') # doctest: +ELLIPSIS
Traceback (most recent call last):
TypeError: can't multiply sequence by non-int...
>>> [1,2,3] * 1.0 # doctest: +ELLIPSIS
Traceback (most recent call last):
TypeError: can't multiply sequence by non-int...
>>> multiplied_nonconst_tuple_arg(1.0) # doctest: +ELLIPSIS
Traceback (most recent call last):
TypeError: can't multiply sequence by non-int...
"""
return [1,2,3] * x
@cython.test_assert_path_exists("//MulNode")
def multiplied_lists_nonconst_left(x):
"""
>>> multiplied_lists_nonconst_left(5) == 5 * [1,2,3]
True
>>> multiplied_lists_nonconst_left(-5) == -5 * [1,2,3]
True
>>> multiplied_lists_nonconst_left(0) == 0 * [1,2,3]
True
"""
return x * [1,2,3]
@cython.test_fail_if_path_exists("//MulNode//ListNode")
@cython.test_assert_path_exists("//MulNode")
def multiplied_lists_nonconst_expression(x):
"""
>>> multiplied_lists_nonconst_expression(5) == [1,2,3] * (5 * 2)
True
>>> multiplied_lists_nonconst_expression(-5) == [1,2,3] * (-5 * 2)
True
>>> multiplied_lists_nonconst_expression(0) == [1,2,3] * (0 * 2)
True
"""
return [1,2,3] * (x*2)
cdef side_effect(int x):
print x
return x
@cython.test_fail_if_path_exists("//MulNode")
def multiplied_lists_with_side_effects():
"""
>>> multiplied_lists_with_side_effects() == [1,2,3] * 5
1
2
3
True
"""
return [side_effect(1), side_effect(2), side_effect(3)] * 5
@cython.test_fail_if_path_exists("//MulNode")
def multiplied_lists_nonconst_with_side_effects(x):
"""
>>> multiplied_lists_nonconst_with_side_effects(5) == [1,2,3] * 5
1
2
3
True
"""
return [side_effect(1), side_effect(2), side_effect(3)] * x
@cython.test_fail_if_path_exists("//MulNode")
def multiplied_nonconst_tuple_arg(x):
"""
>>> multiplied_nonconst_tuple_arg(5) == (1,2) * 5
True
>>> multiplied_nonconst_tuple_arg(-5) == (1,2) * -5
True
>>> multiplied_nonconst_tuple_arg(0) == (1,2) * 0
True
>>> (1,2) * 'abc' # doctest: +ELLIPSIS
Traceback (most recent call last):
TypeError: can't multiply sequence by non-int...
>>> multiplied_nonconst_tuple_arg('abc') # doctest: +ELLIPSIS
Traceback (most recent call last):
TypeError: can't multiply sequence by non-int...
>>> (1,2) * 1.0 # doctest: +ELLIPSIS
Traceback (most recent call last):
TypeError: can't multiply sequence by non-int...
>>> multiplied_nonconst_tuple_arg(1.0) # doctest: +ELLIPSIS
Traceback (most recent call last):
TypeError: can't multiply sequence by non-int...
"""
return (1,2) * x
@cython.test_fail_if_path_exists("//MulNode")
def multiplied_nonconst_tuple(x):
"""
>>> multiplied_nonconst_tuple(5) == (1,2) * (5+1)
True
"""
return (1,2) * (x + 1)
MULT = 5
@cython.test_fail_if_path_exists("//MulNode")
def multiplied_global_nonconst_tuple():
"""
>>> multiplied_global_nonconst_tuple() == (1,2,3) * 5
1
2
3
True
"""
return (side_effect(1), side_effect(2), side_effect(3)) * MULT
@cython.test_fail_if_path_exists("//MulNode")
def multiplied_const_tuple():
"""
>>> multiplied_const_tuple() == (1,2) * 5
True
"""
return (1,2) * 5
@cython.test_fail_if_path_exists("//MulNode")
def multiplied_const_tuple_len1():
"""
>>> multiplied_const_tuple_len1() == (1,) * 5
True
"""
return (1,) * 5
@cython.test_fail_if_path_exists("//PrimaryCmpNode") @cython.test_fail_if_path_exists("//PrimaryCmpNode")
def compile_time_DEF(): def compile_time_DEF():
""" """
......
# mode: run
# tags: kwargs, call
# ticket: 717
def f(**kwargs):
return sorted(kwargs.items())
def test_call(kwargs):
"""
>>> kwargs = {'b' : 2}
>>> f(a=1, **kwargs)
[('a', 1), ('b', 2)]
>>> test_call(kwargs)
[('a', 1), ('b', 2)]
>>> kwargs = {'a' : 2}
>>> f(a=1, **kwargs)
Traceback (most recent call last):
TypeError: f() got multiple values for keyword argument 'a'
FIXME: remove ellipsis, fix function name
>>> test_call(kwargs) # doctest: +ELLIPSIS
Traceback (most recent call last):
TypeError: ...() got multiple values for keyword argument 'a'
"""
return f(a=1, **kwargs)
# mode: run
# tags: eval
GLOBAL = 123
def eval_simple(local):
"""
>>> eval_simple(321)
(123, 321)
"""
return eval('GLOBAL, local')
def eval_class_scope():
"""
>>> eval_class_scope().c
3
"""
class TestClassScope:
a = 1
b = 2
c = eval('a + b')
return TestClassScope
def eval_locals(a, b):
"""
>>> eval_locals(1, 2)
(1, 2)
"""
return eval('a, b', {}, locals())
# mode: run
# tags: exec
exec "GLOBAL = 1234"
def exec_module_scope():
"""
>>> globals()['GLOBAL']
1234
"""
def exec_func_scope():
"""
>>> exec_func_scope()
{'a': 'b', 'G': 1234}
"""
d = {}
exec "d['a'] = 'b'; d['G'] = GLOBAL"
return d
def exec_pyclass_scope():
"""
>>> obj = exec_pyclass_scope()
>>> obj.a
'b'
>>> obj.G
1234
"""
class TestExec:
exec "a = 'b'; G = GLOBAL"
return TestExec
...@@ -29,6 +29,20 @@ cdef class FinalType(object): ...@@ -29,6 +29,20 @@ cdef class FinalType(object):
self.cpdef_method() self.cpdef_method()
def test_external_call():
"""
>>> test_external_call()
"""
f = FinalType()
return f.cpdef_method()
def test_external_call_in_temp():
"""
>>> test_external_call_in_temp()
"""
return FinalType().cpdef_method()
cdef class BaseTypeWithFinalMethods(object): cdef class BaseTypeWithFinalMethods(object):
""" """
>>> obj = BaseTypeWithFinalMethods() >>> obj = BaseTypeWithFinalMethods()
......
# mode: run
# ticket: 734
def test_import_error():
"""
>>> test_import_error()
Traceback (most recent call last):
ImportError: cannot import name xxx
"""
from sys import xxx
# mode: run
cimport cython
@cython.final
cdef class TypedContextManager(object):
cdef double __enter__(self): # not callable from Python !
return 2.0
# FIXME: inline __exit__() as well
def __exit__(self, exc_type, exc_value, exc_tb):
return 0
def with_statement():
"""
>>> with_statement()
2.0
"""
with TypedContextManager() as x:
return x
...@@ -125,6 +125,29 @@ def index_pop_typed(list L, int i): ...@@ -125,6 +125,29 @@ def index_pop_typed(list L, int i):
""" """
return L.pop(i) return L.pop(i)
@cython.test_assert_path_exists('//PythonCapiCallNode')
@cython.test_fail_if_path_exists('//SimpleCallNode/AttributeNode')
def index_pop_literal(list L):
"""
>>> L = list(range(10))
>>> index_pop_literal(L)
0
>>> L
[1, 2, 3, 4, 5, 6, 7, 8, 9]
>>> while L:
... _ = index_pop_literal(L)
>>> L
[]
>>> index_pop_literal(L)
Traceback (most recent call last):
...
IndexError: pop from empty list
"""
return L.pop(0)
@cython.test_fail_if_path_exists('//PythonCapiCallNode') @cython.test_fail_if_path_exists('//PythonCapiCallNode')
def crazy_pop(L): def crazy_pop(L):
......
# mode: run
# ticket: 731
# tags: locals, vars, dir
LOCALS = locals()
GLOBALS = globals()
DIR_SAME = sorted(dir()) == sorted(globals().keys())
def test_module_locals_and_dir():
"""
>>> LOCALS is GLOBALS
True
>>> DIR_SAME
True
"""
def test_class_locals_and_dir():
"""
>>> klass = test_class_locals_and_dir()
>>> 'visible' in klass.locs and 'not_visible' not in klass.locs
True
>>> klass.names
['visible']
"""
not_visible = 1234
class Foo:
visible = 4321
names = dir()
locs = locals()
return Foo
...@@ -40,7 +40,7 @@ def call_non_dict_test(): ...@@ -40,7 +40,7 @@ def call_non_dict_test():
return func(**NonDict()) return func(**NonDict())
def call_non_dict_test_kw(): def call_non_dict_test_kw():
return func(a=5, **NonDict()) return func(b=5, **NonDict())
class SubDict(dict): class SubDict(dict):
...@@ -51,4 +51,4 @@ def call_sub_dict_test(): ...@@ -51,4 +51,4 @@ def call_sub_dict_test():
return func(**SubDict()) return func(**SubDict())
def call_sub_dict_test_kw(): def call_sub_dict_test_kw():
return func(a=5, **SubDict()) return func(b=5, **SubDict())
# mode: run
# ticket: 561 # ticket: 561
# ticket: 3
# The patch in #561 changes code generation for most special methods # The patch in #561 changes code generation for most special methods
# to remove the Cython-generated wrapper and let PyType_Ready() # to remove the Cython-generated wrapper and let PyType_Ready()
# generate its own wrapper. (This wrapper would be used, for instance, # generate its own wrapper. (This wrapper would be used, for instance,
...@@ -12,13 +15,117 @@ ...@@ -12,13 +15,117 @@
# special_methods_T561_py3.pyx for tests of the differences between # special_methods_T561_py3.pyx for tests of the differences between
# Python 2 and 3. # Python 2 and 3.
# Regarding ticket 3, we should additionally test that unbound method
# calls to these special methods (e.g. ExtType.__init__()) do not use
# a runtime lookup indirection.
import sys import sys
__doc__ = u""" __doc__ = u"""
>>> # If you define either setitem or delitem, you get wrapper objects
>>> # for both methods. (This behavior is unchanged by #561.)
>>> si_setitem = SetItem().__setitem__
>>> si_setitem('foo', 'bar')
SetItem setitem 'foo' 'bar'
>>> si_delitem = SetItem().__delitem__
>>> si_delitem('foo')
Traceback (most recent call last):
...
NotImplementedError: Subscript deletion not supported by special_methods_T561.SetItem
>>> di_setitem = DelItem().__setitem__
>>> di_setitem('foo', 'bar')
Traceback (most recent call last):
...
NotImplementedError: Subscript assignment not supported by special_methods_T561.DelItem
>>> di_delitem = DelItem().__delitem__
>>> di_delitem('foo')
DelItem delitem 'foo'
>>> sdi_setitem = SetDelItem().__setitem__
>>> sdi_setitem('foo', 'bar')
SetDelItem setitem 'foo' 'bar'
>>> sdi_delitem = SetDelItem().__delitem__
>>> sdi_delitem('foo')
SetDelItem delitem 'foo'
>>> g01 = object.__getattribute__(GetAttr(), '__getattribute__')
>>> g01('attr')
GetAttr getattr 'attr'
>>> g10 = object.__getattribute__(GetAttribute(), '__getattr__')
Traceback (most recent call last):
...
AttributeError: 'special_methods_T561.GetAttribute' object has no attribute '__getattr__'
>>> g11 = object.__getattribute__(GetAttribute(), '__getattribute__')
>>> g11('attr')
GetAttribute getattribute 'attr'
>>> # If you define either setattr or delattr, you get wrapper objects
>>> # for both methods. (This behavior is unchanged by #561.)
>>> sa_setattr = SetAttr().__setattr__
>>> sa_setattr('foo', 'bar')
SetAttr setattr 'foo' 'bar'
>>> sa_delattr = SetAttr().__delattr__
>>> sa_delattr('foo')
Traceback (most recent call last):
...
AttributeError: 'special_methods_T561.SetAttr' object has no attribute 'foo'
>>> da_setattr = DelAttr().__setattr__
>>> da_setattr('foo', 'bar')
Traceback (most recent call last):
...
AttributeError: 'special_methods_T561.DelAttr' object has no attribute 'foo'
>>> da_delattr = DelAttr().__delattr__
>>> da_delattr('foo')
DelAttr delattr 'foo'
>>> sda_setattr = SetDelAttr().__setattr__
>>> sda_setattr('foo', 'bar')
SetDelAttr setattr 'foo' 'bar'
>>> sda_delattr = SetDelAttr().__delattr__
>>> sda_delattr('foo')
SetDelAttr delattr 'foo'
>>> # If you define either set or delete, you get wrapper objects
>>> # for both methods. (This behavior is unchanged by #561.)
>>> s_set = Set().__set__
>>> s_set('instance', 'val')
Set set 'instance' 'val'
>>> s_delete = Set().__delete__
>>> s_delete('instance')
Traceback (most recent call last):
...
NotImplementedError: __delete__
>>> d_set = Delete().__set__
>>> d_set('instance', 'val')
Traceback (most recent call last):
...
NotImplementedError: __set__
>>> d_delete = Delete().__delete__
>>> d_delete('instance')
Delete delete 'instance'
>>> sd_set = SetDelete().__set__
>>> sd_set('instance', 'val')
SetDelete set 'instance' 'val'
>>> sd_delete = SetDelete().__delete__
>>> sd_delete('instance')
SetDelete delete 'instance'
>>> # If you define __long__, you get a wrapper object for __int__.
>>> # (This behavior is unchanged by #561.)
>>> Li = Long().__int__
>>> Li()
Long __long__
"""
if sys.version_info >= (2,5):
__doc__ += u"""\
>>> vs0 = VerySpecial(0)
VS __init__ 0
>>> vs0_index = vs0.__index__
>>> vs0_index()
VS __index__ 0
"""
cdef class VerySpecial:
"""
>>> vs0 = VerySpecial(0) >>> vs0 = VerySpecial(0)
VS __init__ 0 VS __init__ 0
>>> vs1 = VerySpecial(1) >>> vs1 = VerySpecial(1)
VS __init__ 1 VS __init__ 1
>>> vs0_add = vs0.__add__ >>> vs0_add = vs0.__add__
>>> vs0_add(vs1) >>> vs0_add(vs1)
VS __add__ 0 1 VS __add__ 0 1
...@@ -114,8 +221,9 @@ __doc__ = u""" ...@@ -114,8 +221,9 @@ __doc__ = u"""
>>> vs0_itruediv = vs0.__itruediv__ >>> vs0_itruediv = vs0.__itruediv__
>>> vs0_itruediv(vs1) >>> vs0_itruediv(vs1)
VS __itruediv__ 0 /= 1 VS __itruediv__ 0 /= 1
>>> # If you define an arithmetic method, you get wrapper objects for
>>> # the reversed version as well. (This behavior is unchanged by #561.) # If you define an arithmetic method, you get wrapper objects for
# the reversed version as well. (This behavior is unchanged by #561.)
>>> vs0_radd = vs0.__radd__ >>> vs0_radd = vs0.__radd__
>>> vs0_radd(vs1) >>> vs0_radd(vs1)
VS __add__ 1 0 VS __add__ 1 0
...@@ -166,30 +274,6 @@ __doc__ = u""" ...@@ -166,30 +274,6 @@ __doc__ = u"""
>>> vs0_len() >>> vs0_len()
VS __len__ 0 VS __len__ 0
0 0
>>> # If you define either setitem or delitem, you get wrapper objects
>>> # for both methods. (This behavior is unchanged by #561.)
>>> si_setitem = SetItem().__setitem__
>>> si_setitem('foo', 'bar')
SetItem setitem 'foo' 'bar'
>>> si_delitem = SetItem().__delitem__
>>> si_delitem('foo')
Traceback (most recent call last):
...
NotImplementedError: Subscript deletion not supported by special_methods_T561.SetItem
>>> di_setitem = DelItem().__setitem__
>>> di_setitem('foo', 'bar')
Traceback (most recent call last):
...
NotImplementedError: Subscript assignment not supported by special_methods_T561.DelItem
>>> di_delitem = DelItem().__delitem__
>>> di_delitem('foo')
DelItem delitem 'foo'
>>> sdi_setitem = SetDelItem().__setitem__
>>> sdi_setitem('foo', 'bar')
SetDelItem setitem 'foo' 'bar'
>>> sdi_delitem = SetDelItem().__delitem__
>>> sdi_delitem('foo')
SetDelItem delitem 'foo'
>>> vs0_repr = vs0.__repr__ >>> vs0_repr = vs0.__repr__
>>> vs0_repr() >>> vs0_repr()
VS __repr__ 0 VS __repr__ 0
...@@ -203,44 +287,11 @@ __doc__ = u""" ...@@ -203,44 +287,11 @@ __doc__ = u"""
>>> vs0_str = vs0.__str__ >>> vs0_str = vs0.__str__
>>> vs0_str() >>> vs0_str()
VS __str__ 0 VS __str__ 0
>>> g01 = object.__getattribute__(GetAttr(), '__getattribute__')
>>> g01('attr') # If you define __richcmp__, you get all of __lt__, __le__,
GetAttr getattr 'attr' # __eq__, __ne__, __gt__, __ge__ (this behavior is unchanged by #561).
>>> g10 = object.__getattribute__(GetAttribute(), '__getattr__') # (you don't get a __richcmp__ method, because it doesn't have a
Traceback (most recent call last): # Python signature)
...
AttributeError: 'special_methods_T561.GetAttribute' object has no attribute '__getattr__'
>>> g11 = object.__getattribute__(GetAttribute(), '__getattribute__')
>>> g11('attr')
GetAttribute getattribute 'attr'
>>> # If you define either setattr or delattr, you get wrapper objects
>>> # for both methods. (This behavior is unchanged by #561.)
>>> sa_setattr = SetAttr().__setattr__
>>> sa_setattr('foo', 'bar')
SetAttr setattr 'foo' 'bar'
>>> sa_delattr = SetAttr().__delattr__
>>> sa_delattr('foo')
Traceback (most recent call last):
...
AttributeError: 'special_methods_T561.SetAttr' object has no attribute 'foo'
>>> da_setattr = DelAttr().__setattr__
>>> da_setattr('foo', 'bar')
Traceback (most recent call last):
...
AttributeError: 'special_methods_T561.DelAttr' object has no attribute 'foo'
>>> da_delattr = DelAttr().__delattr__
>>> da_delattr('foo')
DelAttr delattr 'foo'
>>> sda_setattr = SetDelAttr().__setattr__
>>> sda_setattr('foo', 'bar')
SetDelAttr setattr 'foo' 'bar'
>>> sda_delattr = SetDelAttr().__delattr__
>>> sda_delattr('foo')
SetDelAttr delattr 'foo'
>>> # If you define __richcmp__, you get all of __lt__, __le__,
>>> # __eq__, __ne__, __gt__, __ge__ (this behavior is unchanged by #561).
>>> # (you don't get a __richcmp__ method, because it doesn't have a
>>> # Python signature)
>>> vs0_lt = vs0.__lt__ >>> vs0_lt = vs0.__lt__
>>> vs0_lt(vs1) >>> vs0_lt(vs1)
VS richcmp 0 1 (kind=0) VS richcmp 0 1 (kind=0)
...@@ -265,50 +316,14 @@ __doc__ = u""" ...@@ -265,50 +316,14 @@ __doc__ = u"""
>>> vs0_next = vs0.__next__ >>> vs0_next = vs0.__next__
>>> vs0_next() >>> vs0_next()
VS next/__next__ 0 VS next/__next__ 0
>>> vs0_get = vs0.__get__ >>> vs0_get = vs0.__get__
>>> vs0_get('instance', 'owner') >>> vs0_get('instance', 'owner')
VS __get__ 0 'instance' 'owner' VS __get__ 0 'instance' 'owner'
>>> # If you define either set or delete, you get wrapper objects
>>> # for both methods. (This behavior is unchanged by #561.)
>>> s_set = Set().__set__
>>> s_set('instance', 'val')
Set set 'instance' 'val'
>>> s_delete = Set().__delete__
>>> s_delete('instance')
Traceback (most recent call last):
...
NotImplementedError: __delete__
>>> d_set = Delete().__set__
>>> d_set('instance', 'val')
Traceback (most recent call last):
...
NotImplementedError: __set__
>>> d_delete = Delete().__delete__
>>> d_delete('instance')
Delete delete 'instance'
>>> sd_set = SetDelete().__set__
>>> sd_set('instance', 'val')
SetDelete set 'instance' 'val'
>>> sd_delete = SetDelete().__delete__
>>> sd_delete('instance')
SetDelete delete 'instance'
>>> vs0_init = vs0.__init__ >>> vs0_init = vs0.__init__
>>> vs0_init(0) >>> vs0_init(0)
VS __init__ 0 VS __init__ 0
>>> # If you define __long__, you get a wrapper object for __int__. """
>>> # (This behavior is unchanged by #561.)
>>> Li = Long().__int__
>>> Li()
Long __long__
"""
if sys.version_info >= (2,5):
__doc__ += u"""\
>>> vs0_index = vs0.__index__
>>> vs0_index()
VS __index__ 0
"""
cdef class VerySpecial:
cdef readonly int value cdef readonly int value
def __init__(self, v): def __init__(self, v):
...@@ -552,3 +567,359 @@ cdef class GetAttrGetItemRedirect: ...@@ -552,3 +567,359 @@ cdef class GetAttrGetItemRedirect:
if key == 'attr': if key == 'attr':
return getattr(self, key) return getattr(self, key)
return ('item', self.obj) return ('item', self.obj)
# test unbound method usage in subtypes
cdef class VerySpecialSubType(VerySpecial):
"""
>>> vs0 = VerySpecialSubType(0)
VS __init__ 0
>>> vs1 = VerySpecialSubType(1)
VS __init__ 1
>>> vs0_add = vs0.__add__
>>> vs0_add(vs1)
VS __add__ 0 1
>>> vs0_sub = vs0.__sub__
>>> vs0_sub(vs1)
VS __sub__ 0 1
>>> vs0_mul = vs0.__mul__
>>> vs0_mul(vs1)
VS __mul__ 0 1
>>> vs0_mod = vs0.__mod__
>>> vs0_mod(vs1)
VS __mod__ 0 1
>>> vs0_divmod = vs0.__divmod__
>>> vs0_divmod(vs1)
VS __divmod__ 0 1
>>> vs0_pow = vs0.__pow__
>>> vs0_pow(vs1)
VS __pow__ pow(0, 1, None)
>>> vs0_pow(vs1, 13)
VS __pow__ pow(0, 1, 13)
>>> vs0_neg = vs0.__neg__
>>> vs0_neg()
VS __neg__ 0
>>> vs0_pos = vs0.__pos__
>>> vs0_pos()
VS __pos__ 0
>>> vs0_abs = vs0.__abs__
>>> vs0_abs()
VS __abs__ 0
>>> vs0_invert = vs0.__invert__
>>> vs0_invert()
VS __invert__ 0
>>> vs0_lshift = vs0.__lshift__
>>> vs0_lshift(vs1)
VS __lshift__ 0 << 1
>>> vs0_rshift = vs0.__rshift__
>>> vs0_rshift(vs1)
VS __rshift__ 0 >> 1
>>> vs0_and = vs0.__and__
>>> vs0_and(vs1)
VS __and__ 0 & 1
>>> vs0_xor = vs0.__xor__
>>> vs0_xor(vs1)
VS __xor__ 0 ^ 1
>>> vs0_or = vs0.__or__
>>> vs0_or(vs1)
VS __or__ 0 | 1
>>> vs0_int = vs0.__int__
>>> vs0_int()
VS __int__ 0
>>> vs0_float = vs0.__float__
>>> vs0_float()
VS __float__ 0
>>> vs0_iadd = vs0.__iadd__
>>> vs0_iadd(vs1)
VS __iadd__ 0 += 1
>>> vs0_isub = vs0.__isub__
>>> vs0_isub(vs1)
VS __isub__ 0 -= 1
>>> vs0_imul = vs0.__imul__
>>> vs0_imul(vs1)
VS __imul__ 0 *= 1
>>> vs0_imod = vs0.__imod__
>>> vs0_imod(vs1)
VS __imod__ 0 %= 1
>>> vs0_ipow = vs0.__ipow__
>>> vs0_ipow(vs1)
VS __ipow__ 0 1
>>> vs0_ilshift = vs0.__ilshift__
>>> vs0_ilshift(vs1)
VS __ilshift__ 0 <<= 1
>>> vs0_irshift = vs0.__irshift__
>>> vs0_irshift(vs1)
VS __irshift__ 0 >>= 1
>>> vs0_iand = vs0.__iand__
>>> vs0_iand(vs1)
VS __iand__ 0 &= 1
>>> vs0_ixor = vs0.__ixor__
>>> vs0_ixor(vs1)
VS __ixor__ 0 ^= 1
>>> vs0_ior = vs0.__ior__
>>> vs0_ior(vs1)
VS __ior__ 0 |= 1
>>> vs0_floordiv = vs0.__floordiv__
>>> vs0_floordiv(vs1)
VS __floordiv__ 0 / 1
>>> vs0_truediv = vs0.__truediv__
>>> vs0_truediv(vs1)
VS __truediv__ 0 / 1
>>> vs0_ifloordiv = vs0.__ifloordiv__
>>> vs0_ifloordiv(vs1)
VS __ifloordiv__ 0 /= 1
>>> vs0_itruediv = vs0.__itruediv__
>>> vs0_itruediv(vs1)
VS __itruediv__ 0 /= 1
# If you define an arithmetic method, you get wrapper objects for
# the reversed version as well. (This behavior is unchanged by #561.)
>>> vs0_radd = vs0.__radd__
>>> vs0_radd(vs1)
VS __add__ 1 0
>>> vs0_rsub = vs0.__rsub__
>>> vs0_rsub(vs1)
VS __sub__ 1 0
>>> vs0_rmul = vs0.__rmul__
>>> vs0_rmul(vs1)
VS __mul__ 1 0
>>> vs0_rmod = vs0.__rmod__
>>> vs0_rmod(vs1)
VS __mod__ 1 0
>>> vs0_rdivmod = vs0.__rdivmod__
>>> vs0_rdivmod(vs1)
VS __divmod__ 1 0
>>> vs0_rpow = vs0.__rpow__
>>> vs0_rpow(vs1)
VS __pow__ pow(1, 0, None)
>>> vs0_rlshift = vs0.__rlshift__
>>> vs0_rlshift(vs1)
VS __lshift__ 1 << 0
>>> vs0_rrshift = vs0.__rrshift__
>>> vs0_rrshift(vs1)
VS __rshift__ 1 >> 0
>>> vs0_rand = vs0.__rand__
>>> vs0_rand(vs1)
VS __and__ 1 & 0
>>> vs0_rxor = vs0.__rxor__
>>> vs0_rxor(vs1)
VS __xor__ 1 ^ 0
>>> vs0_ror = vs0.__ror__
>>> vs0_ror(vs1)
VS __or__ 1 | 0
>>> vs0_rfloordiv = vs0.__rfloordiv__
>>> vs0_rfloordiv(vs1)
VS __floordiv__ 1 / 0
>>> vs0_rtruediv = vs0.__rtruediv__
>>> vs0_rtruediv(vs1)
VS __truediv__ 1 / 0
>>> vs0_getitem = vs0.__getitem__
>>> vs0_getitem('foo')
VS __getitem__ 0['foo']
>>> vs0_contains = vs0.__contains__
>>> vs0_contains(vs1)
VS __contains__ 0 1
False
>>> vs0_len = vs0.__len__
>>> vs0_len()
VS __len__ 0
0
>>> vs0_repr = vs0.__repr__
>>> vs0_repr()
VS __repr__ 0
>>> vs0_hash = vs0.__hash__
>>> vs0_hash()
VS __hash__ 0
1000
>>> vs0_call = vs0.__call__
>>> vs0_call(vs1)
VS __call__ 0(1)
>>> vs0_str = vs0.__str__
>>> vs0_str()
VS __str__ 0
>>> vs0_lt = vs0.__lt__
>>> vs0_lt(vs1)
VS richcmp 0 1 (kind=0)
>>> vs0_le = vs0.__le__
>>> vs0_le(vs1)
VS richcmp 0 1 (kind=1)
>>> vs0_eq = vs0.__eq__
>>> vs0_eq(vs1)
VS richcmp 0 1 (kind=2)
>>> vs0_ne = vs0.__ne__
>>> vs0_ne(vs1)
VS richcmp 0 1 (kind=3)
>>> vs0_gt = vs0.__gt__
>>> vs0_gt(vs1)
VS richcmp 0 1 (kind=4)
>>> vs0_ge = vs0.__ge__
>>> vs0_ge(vs1)
VS richcmp 0 1 (kind=5)
>>> vs0_iter = vs0.__iter__
>>> vs0_iter()
VS __iter__ 0
>>> vs0_next = vs0.__next__
>>> vs0_next()
VS next/__next__ 0
>>> vs0_get = vs0.__get__
>>> vs0_get('instance', 'owner')
VS __get__ 0 'instance' 'owner'
>>> vs0_init = vs0.__init__
>>> vs0_init(0)
VS __init__ 0
"""
def __init__(self, v):
VerySpecial.__init__(self, v)
def __add__(self, other):
return VerySpecial.__add__(self, other)
def __sub__(self, other):
return VerySpecial.__sub__(self, other)
def __mul__(self, other):
return VerySpecial.__mul__(self, other)
def __div__(self, other):
return VerySpecial.__div__(self, other)
def __mod__(self, other):
return VerySpecial.__mod__(self, other)
def __divmod__(self, other):
return VerySpecial.__divmod__(self, other)
def __pow__(self, other, mod):
return VerySpecial.__pow__(self, other, mod)
def __lshift__(self, other):
return VerySpecial.__lshift__(self, other)
def __rshift__(self, other):
return VerySpecial.__rshift__(self, other)
def __and__(self, other):
return VerySpecial.__and__(self, other)
def __xor__(self, other):
return VerySpecial.__xor__(self, other)
def __or__(self, other):
return VerySpecial.__or__(self, other)
def __floordiv__(self, other):
return VerySpecial.__floordiv__(self, other)
def __truediv__(self, other):
return VerySpecial.__truediv__(self, other)
def __neg__(self):
return VerySpecial.__neg__(self)
def __pos__(self):
return VerySpecial.__pos__(self)
def __abs__(self):
return VerySpecial.__abs__(self)
def __nonzero__(self):
return VerySpecial.__nonzero__(self)
def __invert__(self):
return VerySpecial.__invert__(self)
def __int__(self):
return VerySpecial.__int__(self)
def __long__(self):
return VerySpecial.__long__(self)
def __float__(self):
return VerySpecial.__float__(self)
def __oct__(self):
return VerySpecial.__oct__(self)
def __hex__(self):
return VerySpecial.__hex__(self)
def __iadd__(self, other):
return VerySpecial.__iadd__(self, other)
def __isub__(self, other):
return VerySpecial.__isub__(self, other)
def __imul__(self, other):
return VerySpecial.__imul__(self, other)
def __idiv__(self, other):
return VerySpecial.__idiv__(self, other)
def __imod__(self, other):
return VerySpecial.__imod__(self, other)
def __ipow__(self, other):
return VerySpecial.__ipow__(self, other)
def __ilshift__(self, other):
return VerySpecial.__ilshift__(self, other)
def __irshift__(self, other):
return VerySpecial.__irshift__(self, other)
def __iand__(self, other):
return VerySpecial.__iand__(self, other)
def __ixor__(self, other):
return VerySpecial.__ixor__(self, other)
def __ior__(self, other):
return VerySpecial.__ior__(self, other)
def __ifloordiv__(self, other):
return VerySpecial.__ifloordiv__(self, other)
def __itruediv__(self, other):
return VerySpecial.__itruediv__(self, other)
def __index__(self):
return VerySpecial.__index__(self)
def __getitem__(self, index):
return VerySpecial.__getitem__(self, index)
def __contains__(self, other):
return VerySpecial.__contains__(self, other)
def __len__(self):
return VerySpecial.__len__(self)
def __cmp__(self, other):
return VerySpecial.__cmp__(self, other)
def __repr__(self):
return VerySpecial.__repr__(self)
def __hash__(self):
return VerySpecial.__hash__(self)
def __call__(self, arg):
return VerySpecial.__call__(self, arg)
def __str__(self):
return VerySpecial.__str__(self)
# there is no __richcmp__ at the Python level
# def __richcmp__(self, other, kind):
# return VerySpecial.__richcmp__(self, other, kind)
def __iter__(self):
return VerySpecial.__iter__(self)
def __next__(self):
return VerySpecial.__next__(self)
def __get__(self, inst, own):
return VerySpecial.__get__(self, inst, own)
...@@ -221,6 +221,24 @@ def c_functions(): ...@@ -221,6 +221,24 @@ def c_functions():
assert typeof(f) == 'int (*)(int)', typeof(f) assert typeof(f) == 'int (*)(int)', typeof(f)
assert 2 == f(1) assert 2 == f(1)
def builtin_functions():
"""
>>> _abs, _getattr = builtin_functions()
Python object
Python object
>>> _abs(-1)
1
>>> class o(object): pass
>>> o.x = 1
>>> _getattr(o, 'x')
1
"""
_abs = abs
print(typeof(_abs))
_getattr = getattr
print(typeof(_getattr))
return _abs, _getattr
def cascade(): def cascade():
""" """
>>> cascade() >>> cascade()
...@@ -513,6 +531,18 @@ def common_extension_type_base(): ...@@ -513,6 +531,18 @@ def common_extension_type_base():
w = CC() w = CC()
assert typeof(w) == "Python object", typeof(w) assert typeof(w) == "Python object", typeof(w)
cdef class AcceptsKeywords:
def __init__(self, *args, **kwds):
pass
@infer_types(None)
def constructor_call():
"""
>>> constructor_call()
"""
x = AcceptsKeywords(a=1, b=2)
assert typeof(x) == "AcceptsKeywords", typeof(x)
@infer_types(None) @infer_types(None)
def large_literals(): def large_literals():
...@@ -529,6 +559,63 @@ def large_literals(): ...@@ -529,6 +559,63 @@ def large_literals():
assert typeof(d) == "Python object", typeof(d) assert typeof(d) == "Python object", typeof(d)
class EmptyContextManager(object):
def __enter__(self):
return None
def __exit__(self, *args):
return 0
def with_statement():
"""
>>> with_statement()
Python object
Python object
"""
x = 1.0
with EmptyContextManager() as x:
print(typeof(x))
print(typeof(x))
return x
@cython.final
cdef class TypedContextManager(object):
cpdef double __enter__(self):
return 2.0
def __exit__(self, *args):
return 0
def with_statement_typed():
"""
>>> with_statement_typed()
double
double
2.0
"""
x = 1.0
with TypedContextManager() as x:
print(typeof(x))
print(typeof(x))
return x
def with_statement_untyped():
"""
>>> with_statement_untyped()
Python object
Python object
2.0
"""
x = 1.0
cdef object t = TypedContextManager()
with t as x:
print(typeof(x))
print(typeof(x))
return x
def self_lookup(a):
b = a
b = b.foo(keyword=None)
print typeof(b)
# Regression test for trac #638. # Regression test for trac #638.
def bar(foo): def bar(foo):
......
...@@ -181,6 +181,28 @@ def multimanager(): ...@@ -181,6 +181,28 @@ def multimanager():
print('%s %s %s %s %s' % (a, b, c, d, e)) print('%s %s %s %s %s' % (a, b, c, d, e))
print(nested) print(nested)
class GetManager(object):
def get(self, *args):
return ContextManager(*args)
def manager_from_expression():
"""
>>> manager_from_expression()
enter
1
exit <type 'NoneType'> <type 'NoneType'> <type 'NoneType'>
enter
2
exit <type 'NoneType'> <type 'NoneType'> <type 'NoneType'>
"""
with GetManager().get(1) as x:
print(x)
g = GetManager()
with g.get(2) as x:
print(x)
# Tests borrowed from pyregr test_with.py, # Tests borrowed from pyregr test_with.py,
# modified to follow the constraints of Cython. # modified to follow the constraints of Cython.
import unittest import unittest
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment