Commit 0edf4568 authored by Robert Bradshaw's avatar Robert Bradshaw

Merge branch 'master' into 0.20.x

Conflicts:
	CHANGES.rst
parents 5e08f83c e40d032f
...@@ -6,7 +6,6 @@ Cython Changelog ...@@ -6,7 +6,6 @@ Cython Changelog
Latest Latest
======= =======
Features added Features added
-------------- --------------
...@@ -16,6 +15,12 @@ Bugs fixed ...@@ -16,6 +15,12 @@ Bugs fixed
* List/Tuple literals multiplied by more than one factor were only multiplied * List/Tuple literals multiplied by more than one factor were only multiplied
by the last factor instead of all. by the last factor instead of all.
* Lookups of special methods (specifically for context managers) could fail
in Python <= 2.6/3.1.
* Local variables were erroneously appended to the signature introspection
of Cython implemented functions with keyword-only arguments under Python 3.
* In-place assignments to variables with inferred Python builtin/extension * In-place assignments to variables with inferred Python builtin/extension
types could fail with type errors if the result value type was incompatible types could fail with type errors if the result value type was incompatible
with the type of the previous value. with the type of the previous value.
......
...@@ -91,7 +91,14 @@ def file_hash(filename): ...@@ -91,7 +91,14 @@ def file_hash(filename):
path = os.path.normpath(filename.encode("UTF-8")) path = os.path.normpath(filename.encode("UTF-8"))
m = hashlib.md5(str(len(path)) + ":") m = hashlib.md5(str(len(path)) + ":")
m.update(path) m.update(path)
m.update(open(filename).read()) f = open(filename, 'rb')
try:
data = f.read(65000)
while data:
m.update(data)
data = f.read(65000)
finally:
f.close()
return m.hexdigest() return m.hexdigest()
def parse_list(s): def parse_list(s):
...@@ -671,7 +678,7 @@ def cythonize(module_list, exclude=[], nthreads=0, aliases=None, quiet=False, fo ...@@ -671,7 +678,7 @@ def cythonize(module_list, exclude=[], nthreads=0, aliases=None, quiet=False, fo
options['include_path'] = ['.'] options['include_path'] = ['.']
if 'common_utility_include_dir' in options: if 'common_utility_include_dir' in options:
if options.get('cache'): if options.get('cache'):
raise NotImplementedError, "common_utility_include_dir does not yet work with caching" raise NotImplementedError("common_utility_include_dir does not yet work with caching")
if not os.path.exists(options['common_utility_include_dir']): if not os.path.exists(options['common_utility_include_dir']):
os.makedirs(options['common_utility_include_dir']) os.makedirs(options['common_utility_include_dir'])
c_options = CompilationOptions(**options) c_options = CompilationOptions(**options)
......
...@@ -267,12 +267,12 @@ except ImportError: ...@@ -267,12 +267,12 @@ except ImportError:
for name, value in kwd_values.items(): for name, value in kwd_values.items():
if name in args: if name in args:
if name in all: if name in all:
raise TypeError, "Duplicate argument %s" % name raise TypeError("Duplicate argument %s" % name)
all[name] = kwd_values.pop(name) all[name] = kwd_values.pop(name)
if kwds is not None: if kwds is not None:
all[kwds] = kwd_values all[kwds] = kwd_values
elif kwd_values: elif kwd_values:
raise TypeError, "Unexpected keyword arguments: %s" % kwd_values.keys() raise TypeError("Unexpected keyword arguments: %s" % kwd_values.keys())
if defaults is None: if defaults is None:
defaults = () defaults = ()
first_default = len(args) - len(defaults) first_default = len(args) - len(defaults)
...@@ -281,7 +281,7 @@ except ImportError: ...@@ -281,7 +281,7 @@ except ImportError:
if ix >= first_default: if ix >= first_default:
all[name] = defaults[ix - first_default] all[name] = defaults[ix - first_default]
else: else:
raise TypeError, "Missing argument: %s" % name raise TypeError("Missing argument: %s" % name)
return all return all
def get_body(source): def get_body(source):
......
...@@ -17,7 +17,6 @@ special_chars = [ ...@@ -17,7 +17,6 @@ special_chars = [
(u'>', u'\xF1', u'&gt;'), (u'>', u'\xF1', u'&gt;'),
] ]
line_pos_comment = re.compile(r'/\*.*?<<<<<<<<<<<<<<.*?\*/\n*', re.DOTALL)
class AnnotationCCodeWriter(CCodeWriter): class AnnotationCCodeWriter(CCodeWriter):
...@@ -141,6 +140,7 @@ function toggleDiv(id) { ...@@ -141,6 +140,7 @@ function toggleDiv(id) {
return ur"<span class='%s'>%s</span>" % ( return ur"<span class='%s'>%s</span>" % (
group_name, match.group(group_name)) group_name, match.group(group_name))
pos_comment_marker = u'/* \N{HORIZONTAL ELLIPSIS} */\n'
k = 0 k = 0
code_source_file = self.code.get(source_filename, {}) code_source_file = self.code.get(source_filename, {})
for line in lines: for line in lines:
...@@ -150,6 +150,9 @@ function toggleDiv(id) { ...@@ -150,6 +150,9 @@ function toggleDiv(id) {
except KeyError: except KeyError:
code = '' code = ''
else: else:
code = _replace_pos_comment(pos_comment_marker, code)
if code.startswith(pos_comment_marker):
code = code[len(pos_comment_marker):]
code = html_escape(code) code = html_escape(code)
calls = zero_calls.copy() calls = zero_calls.copy()
...@@ -165,7 +168,6 @@ function toggleDiv(id) { ...@@ -165,7 +168,6 @@ function toggleDiv(id) {
f.write(line.rstrip()) f.write(line.rstrip())
f.write(u'</pre>\n') f.write(u'</pre>\n')
code = re.sub(line_pos_comment, '', code) # inline annotations are redundant
f.write(u"<pre id='line%s' class='code' style='background-color: #%s'>%s</pre>" % (k, color, code)) f.write(u"<pre id='line%s' class='code' style='background-color: #%s'>%s</pre>" % (k, color, code))
f.write(u'</body></html>\n') f.write(u'</body></html>\n')
f.close() f.close()
...@@ -183,6 +185,13 @@ _parse_code = re.compile( ...@@ -183,6 +185,13 @@ _parse_code = re.compile(
).sub ).sub
_replace_pos_comment = re.compile(
# this matches what Cython generates as code line marker comment
ur'^\s*/\*(?:(?:[^*]|\*[^/])*\n)+\s*\*/\s*\n',
re.M
).sub
class AnnotationItem(object): class AnnotationItem(object):
def __init__(self, style, text, tag="", size=0): def __init__(self, style, text, tag="", size=0):
......
...@@ -249,7 +249,13 @@ class UtilityCodeBase(object): ...@@ -249,7 +249,13 @@ class UtilityCodeBase(object):
continue continue
# only pass lists when we have to: most argument expect one value or None # only pass lists when we have to: most argument expect one value or None
if name == 'requires': if name == 'requires':
values = [ cls.load(dep, from_file, **orig_kwargs) for dep in values ] if orig_kwargs:
values = [cls.load(dep, from_file, **orig_kwargs)
for dep in sorted(values)]
else:
# dependencies are rarely unique, so use load_cached() when we can
values = [cls.load_cached(dep, from_file)
for dep in sorted(values)]
elif not values: elif not values:
values = None values = None
elif len(values) == 1: elif len(values) == 1:
...@@ -269,16 +275,16 @@ class UtilityCodeBase(object): ...@@ -269,16 +275,16 @@ class UtilityCodeBase(object):
return cls(**kwargs) return cls(**kwargs)
@classmethod @classmethod
def load_cached(cls, utility_code_name, from_file=None, _cache={}): def load_cached(cls, utility_code_name, from_file=None, __cache={}):
""" """
Calls .load(), but using a per-type cache based on utility name and file name. Calls .load(), but using a per-type cache based on utility name and file name.
""" """
key = (cls, from_file, utility_code_name) key = (cls, from_file, utility_code_name)
try: try:
return _cache[key] return __cache[key]
except KeyError: except KeyError:
pass pass
code = _cache[key] = cls.load(utility_code_name, from_file) code = __cache[key] = cls.load(utility_code_name, from_file)
return code return code
@classmethod @classmethod
......
...@@ -159,7 +159,7 @@ def report_error(err): ...@@ -159,7 +159,7 @@ def report_error(err):
echo_file.write(line.encode('ASCII', 'replace')) echo_file.write(line.encode('ASCII', 'replace'))
num_errors = num_errors + 1 num_errors = num_errors + 1
if Options.fast_fail: if Options.fast_fail:
raise AbortError, "fatal errors" raise AbortError("fatal errors")
def error(position, message): def error(position, message):
#print "Errors.error:", repr(position), repr(message) ### #print "Errors.error:", repr(position), repr(message) ###
......
...@@ -1690,7 +1690,7 @@ class NameNode(AtomicExprNode): ...@@ -1690,7 +1690,7 @@ class NameNode(AtomicExprNode):
return self return self
def analyse_target_types(self, env): def analyse_target_types(self, env):
self.analyse_entry(env) self.analyse_entry(env, is_target=True)
if (not self.is_lvalue() and self.entry.is_cfunction and if (not self.is_lvalue() and self.entry.is_cfunction and
self.entry.fused_cfunction and self.entry.as_variable): self.entry.fused_cfunction and self.entry.as_variable):
...@@ -1750,12 +1750,12 @@ class NameNode(AtomicExprNode): ...@@ -1750,12 +1750,12 @@ class NameNode(AtomicExprNode):
gil_message = "Accessing Python global or builtin" gil_message = "Accessing Python global or builtin"
def analyse_entry(self, env): def analyse_entry(self, env, is_target=False):
#print "NameNode.analyse_entry:", self.name ### #print "NameNode.analyse_entry:", self.name ###
self.check_identifier_kind() self.check_identifier_kind()
entry = self.entry entry = self.entry
type = entry.type type = entry.type
if (type.is_pyobject and self.inferred_type and if (not is_target and type.is_pyobject and self.inferred_type and
self.inferred_type.is_builtin_type): self.inferred_type.is_builtin_type):
# assume that type inference is smarter than the static entry # assume that type inference is smarter than the static entry
type = self.inferred_type type = self.inferred_type
...@@ -2536,7 +2536,9 @@ class WithExitCallNode(ExprNode): ...@@ -2536,7 +2536,9 @@ class WithExitCallNode(ExprNode):
result_var = code.funcstate.allocate_temp(py_object_type, manage_ref=False) result_var = code.funcstate.allocate_temp(py_object_type, manage_ref=False)
code.mark_pos(self.pos) code.mark_pos(self.pos)
code.putln("%s = PyObject_Call(%s, %s, NULL);" % ( code.globalstate.use_utility_code(UtilityCode.load_cached(
"PyObjectCall", "ObjectHandling.c"))
code.putln("%s = __Pyx_PyObject_Call(%s, %s, NULL);" % (
result_var, result_var,
self.with_stat.exit_var, self.with_stat.exit_var,
self.args.result())) self.args.result()))
...@@ -4657,8 +4659,10 @@ class SimpleCallNode(CallNode): ...@@ -4657,8 +4659,10 @@ class SimpleCallNode(CallNode):
code.globalstate.use_utility_code(self.function.entry.utility_code) code.globalstate.use_utility_code(self.function.entry.utility_code)
if func_type.is_pyobject: if func_type.is_pyobject:
arg_code = self.arg_tuple.py_result() arg_code = self.arg_tuple.py_result()
code.globalstate.use_utility_code(UtilityCode.load_cached(
"PyObjectCall", "ObjectHandling.c"))
code.putln( code.putln(
"%s = PyObject_Call(%s, %s, NULL); %s" % ( "%s = __Pyx_PyObject_Call(%s, %s, NULL); %s" % (
self.result(), self.result(),
self.function.py_result(), self.function.py_result(),
arg_code, arg_code,
...@@ -5087,8 +5091,10 @@ class GeneralCallNode(CallNode): ...@@ -5087,8 +5091,10 @@ class GeneralCallNode(CallNode):
kwargs = self.keyword_args.py_result() kwargs = self.keyword_args.py_result()
else: else:
kwargs = 'NULL' kwargs = 'NULL'
code.globalstate.use_utility_code(UtilityCode.load_cached(
"PyObjectCall", "ObjectHandling.c"))
code.putln( code.putln(
"%s = PyObject_Call(%s, %s, %s); %s" % ( "%s = __Pyx_PyObject_Call(%s, %s, %s); %s" % (
self.result(), self.result(),
self.function.py_result(), self.function.py_result(),
self.positional_args.py_result(), self.positional_args.py_result(),
...@@ -7604,18 +7610,14 @@ class CodeObjectNode(ExprNode): ...@@ -7604,18 +7610,14 @@ class CodeObjectNode(ExprNode):
def __init__(self, def_node): def __init__(self, def_node):
ExprNode.__init__(self, def_node.pos, def_node=def_node) ExprNode.__init__(self, def_node.pos, def_node=def_node)
args = list(def_node.args) args = list(def_node.args)
if def_node.star_arg: # if we have args/kwargs, then the first two in var_entries are those
args.append(def_node.star_arg) local_vars = [arg for arg in def_node.local_scope.var_entries if arg.name]
if def_node.starstar_arg:
args.append(def_node.starstar_arg)
local_vars = [ arg for arg in def_node.local_scope.var_entries
if arg.name ]
self.varnames = TupleNode( self.varnames = TupleNode(
def_node.pos, def_node.pos,
args = [ IdentifierStringNode(arg.pos, value=arg.name) args=[IdentifierStringNode(arg.pos, value=arg.name)
for arg in args + local_vars ], for arg in args + local_vars],
is_temp = 0, is_temp=0,
is_literal = 1) is_literal=1)
def may_be_none(self): def may_be_none(self):
return False return False
...@@ -7635,11 +7637,18 @@ class CodeObjectNode(ExprNode): ...@@ -7635,11 +7637,18 @@ class CodeObjectNode(ExprNode):
file_path = StringEncoding.BytesLiteral(func.pos[0].get_filenametable_entry().encode('utf8')) file_path = StringEncoding.BytesLiteral(func.pos[0].get_filenametable_entry().encode('utf8'))
file_path_const = code.get_py_string_const(file_path, identifier=False, is_str=True) file_path_const = code.get_py_string_const(file_path, identifier=False, is_str=True)
code.putln("%s = (PyObject*)__Pyx_PyCode_New(%d, %d, %d, 0, 0, %s, %s, %s, %s, %s, %s, %s, %s, %d, %s); %s" % ( flags = []
if self.def_node.star_arg:
flags.append('CO_VARARGS')
if self.def_node.starstar_arg:
flags.append('CO_VARKEYWORDS')
code.putln("%s = (PyObject*)__Pyx_PyCode_New(%d, %d, %d, 0, %s, %s, %s, %s, %s, %s, %s, %s, %s, %d, %s); %s" % (
self.result_code, self.result_code,
len(func.args), # argcount len(func.args) - func.num_kwonly_args, # argcount
func.num_kwonly_args, # kwonlyargcount (Py3 only) func.num_kwonly_args, # kwonlyargcount (Py3 only)
len(self.varnames.args), # nlocals len(self.varnames.args), # nlocals
'|'.join(flags) or '0', # flags
Naming.empty_bytes, # code Naming.empty_bytes, # code
Naming.empty_tuple, # consts Naming.empty_tuple, # consts
Naming.empty_tuple, # names (FIXME) Naming.empty_tuple, # names (FIXME)
...@@ -7950,8 +7959,8 @@ class LocalsDictItemNode(DictItemNode): ...@@ -7950,8 +7959,8 @@ class LocalsDictItemNode(DictItemNode):
class FuncLocalsExprNode(DictNode): class FuncLocalsExprNode(DictNode):
def __init__(self, pos, env): def __init__(self, pos, env):
local_vars = [entry.name for entry in env.entries.values() local_vars = sorted([
if entry.name] entry.name for entry in env.entries.values() if entry.name])
items = [LocalsDictItemNode( items = [LocalsDictItemNode(
pos, key=IdentifierStringNode(pos, value=var), pos, key=IdentifierStringNode(pos, value=var),
value=NameNode(pos, name=var, allow_null=True)) value=NameNode(pos, name=var, allow_null=True))
...@@ -8373,6 +8382,9 @@ class TypecastNode(ExprNode): ...@@ -8373,6 +8382,9 @@ class TypecastNode(ExprNode):
"Cannot cast to a function type") "Cannot cast to a function type")
self.type = PyrexTypes.error_type self.type = PyrexTypes.error_type
self.operand = self.operand.analyse_types(env) self.operand = self.operand.analyse_types(env)
if self.type is PyrexTypes.c_bint_type:
# short circuit this to a coercion
return self.operand.coerce_to_boolean(env)
to_py = self.type.is_pyobject to_py = self.type.is_pyobject
from_py = self.operand.type.is_pyobject from_py = self.operand.type.is_pyobject
if from_py and not to_py and self.operand.is_ephemeral(): if from_py and not to_py and self.operand.is_ephemeral():
...@@ -8380,10 +8392,7 @@ class TypecastNode(ExprNode): ...@@ -8380,10 +8392,7 @@ class TypecastNode(ExprNode):
error(self.pos, "Casting temporary Python object to non-numeric non-Python type") error(self.pos, "Casting temporary Python object to non-numeric non-Python type")
if to_py and not from_py: if to_py and not from_py:
if self.type is bytes_type and self.operand.type.is_int: if self.type is bytes_type and self.operand.type.is_int:
# FIXME: the type cast node isn't needed in this case return CoerceIntToBytesNode(self.operand, env)
# and can be dropped once analyse_types() can return a
# different node
self.operand = CoerceIntToBytesNode(self.operand, env)
elif self.operand.type.can_coerce_to_pyobject(env): elif self.operand.type.can_coerce_to_pyobject(env):
self.result_ctype = py_object_type self.result_ctype = py_object_type
base_type = self.base_type.analyse(env) base_type = self.base_type.analyse(env)
...@@ -8405,7 +8414,7 @@ class TypecastNode(ExprNode): ...@@ -8405,7 +8414,7 @@ class TypecastNode(ExprNode):
else: else:
warning(self.pos, "No conversion from %s to %s, python object pointer used." % (self.type, self.operand.type)) warning(self.pos, "No conversion from %s to %s, python object pointer used." % (self.type, self.operand.type))
elif from_py and to_py: elif from_py and to_py:
if self.typecheck and self.type.is_pyobject: if self.typecheck:
self.operand = PyTypeTestNode(self.operand, self.type, env, notnone=True) self.operand = PyTypeTestNode(self.operand, self.type, env, notnone=True)
elif isinstance(self.operand, SliceIndexNode): elif isinstance(self.operand, SliceIndexNode):
# This cast can influence the created type of string slices. # This cast can influence the created type of string slices.
...@@ -9214,9 +9223,9 @@ class AddNode(NumBinopNode): ...@@ -9214,9 +9223,9 @@ class AddNode(NumBinopNode):
if type1 is unicode_type or type2 is unicode_type: if type1 is unicode_type or type2 is unicode_type:
if type1.is_builtin_type and type2.is_builtin_type: if type1.is_builtin_type and type2.is_builtin_type:
if self.operand1.may_be_none() or self.operand2.may_be_none(): if self.operand1.may_be_none() or self.operand2.may_be_none():
return '__Pyx_PyUnicode_Concat' return '__Pyx_PyUnicode_ConcatSafe'
else: else:
return 'PyUnicode_Concat' return '__Pyx_PyUnicode_Concat'
return super(AddNode, self).py_operation_function() return super(AddNode, self).py_operation_function()
......
...@@ -193,9 +193,11 @@ class ControlFlow(object): ...@@ -193,9 +193,11 @@ class ControlFlow(object):
def mark_reference(self, node, entry): def mark_reference(self, node, entry):
if self.block and self.is_tracked(entry): if self.block and self.is_tracked(entry):
self.block.stats.append(NameReference(node, entry)) self.block.stats.append(NameReference(node, entry))
# Local variable is definitely bound after this reference ## XXX: We don't track expression evaluation order so we can't use
if not node.allow_null: ## XXX: successful reference as initialization sign.
self.block.bounded.add(entry) ## # Local variable is definitely bound after this reference
## if not node.allow_null:
## self.block.bounded.add(entry)
self.entries.add(entry) self.entries.add(entry)
def normalize(self): def normalize(self):
...@@ -548,9 +550,9 @@ def check_definitions(flow, compiler_directives): ...@@ -548,9 +550,9 @@ def check_definitions(flow, compiler_directives):
references[stat.node] = stat.entry references[stat.node] = stat.entry
stat.entry.cf_references.append(stat) stat.entry.cf_references.append(stat)
stat.node.cf_state.update(state) stat.node.cf_state.update(state)
if not stat.node.allow_null: ## if not stat.node.allow_null:
i_state &= ~i_assmts.bit ## i_state &= ~i_assmts.bit
# after successful read, the state is known to be initialised ## # after successful read, the state is known to be initialised
state.discard(Uninitialized) state.discard(Uninitialized)
state.discard(Unknown) state.discard(Unknown)
for assmt in state: for assmt in state:
...@@ -798,7 +800,7 @@ class ControlFlowAnalysis(CythonTransform): ...@@ -798,7 +800,7 @@ class ControlFlowAnalysis(CythonTransform):
return node return node
def visit_AssignmentNode(self, node): def visit_AssignmentNode(self, node):
raise InternalError, "Unhandled assignment node" raise InternalError("Unhandled assignment node")
def visit_SingleAssignmentNode(self, node): def visit_SingleAssignmentNode(self, node):
self._visit(node.rhs) self._visit(node.rhs)
...@@ -1097,7 +1099,7 @@ class ControlFlowAnalysis(CythonTransform): ...@@ -1097,7 +1099,7 @@ class ControlFlowAnalysis(CythonTransform):
return node return node
def visit_LoopNode(self, node): def visit_LoopNode(self, node):
raise InternalError, "Generic loops are not supported" raise InternalError("Generic loops are not supported")
def visit_WithTargetAssignmentStatNode(self, node): def visit_WithTargetAssignmentStatNode(self, node):
self.mark_assignment(node.lhs, node.rhs) self.mark_assignment(node.lhs, node.rhs)
...@@ -1121,6 +1123,7 @@ class ControlFlowAnalysis(CythonTransform): ...@@ -1121,6 +1123,7 @@ class ControlFlowAnalysis(CythonTransform):
## XXX: links to exception handling point should be added by ## XXX: links to exception handling point should be added by
## XXX: children nodes ## XXX: children nodes
self.flow.block.add_child(entry_point) self.flow.block.add_child(entry_point)
self.flow.nextblock()
self._visit(node.body) self._visit(node.body)
self.flow.exceptions.pop() self.flow.exceptions.pop()
...@@ -1181,6 +1184,7 @@ class ControlFlowAnalysis(CythonTransform): ...@@ -1181,6 +1184,7 @@ class ControlFlowAnalysis(CythonTransform):
self.flow.block = body_block self.flow.block = body_block
## XXX: Is it still required ## XXX: Is it still required
body_block.add_child(entry_point) body_block.add_child(entry_point)
self.flow.nextblock()
self._visit(node.body) self._visit(node.body)
self.flow.exceptions.pop() self.flow.exceptions.pop()
if self.flow.loops: if self.flow.loops:
......
...@@ -309,7 +309,14 @@ class Context(object): ...@@ -309,7 +309,14 @@ class Context(object):
position = e.args[2] position = e.args[2]
encoding = e.args[0] encoding = e.args[0]
for idx, c in enumerate(open(source_filename, "rb").read()): f = open(source_filename, "rb")
try:
byte_data = f.read()
finally:
f.close()
# FIXME: make this at least a little less inefficient
for idx, c in enumerate(byte_data):
if c in (ord('\n'), '\n'): if c in (ord('\n'), '\n'):
line += 1 line += 1
column = 0 column = 0
......
...@@ -346,14 +346,43 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode): ...@@ -346,14 +346,43 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
globalstate.finalize_main_c_code() globalstate.finalize_main_c_code()
f = open_new_file(result.c_file) f = open_new_file(result.c_file)
rootwriter.copyto(f) try:
rootwriter.copyto(f)
finally:
f.close()
result.c_file_generated = 1
if options.gdb_debug: if options.gdb_debug:
self._serialize_lineno_map(env, rootwriter) self._serialize_lineno_map(env, rootwriter)
f.close()
result.c_file_generated = 1
if Options.annotate or options.annotate: if Options.annotate or options.annotate:
self.annotate(rootwriter) self._generate_annotations(rootwriter, result)
rootwriter.save_annotation(result.main_source_file, result.c_file)
def _generate_annotations(self, rootwriter, result):
self.annotate(rootwriter)
rootwriter.save_annotation(result.main_source_file, result.c_file)
# if we included files, additionally generate one annotation file for each
if not self.scope.included_files:
return
search_include_file = self.scope.context.search_include_directories
target_dir = os.path.abspath(os.path.dirname(result.c_file))
for included_file in self.scope.included_files:
target_file = os.path.abspath(os.path.join(target_dir, included_file))
target_file_dir = os.path.dirname(target_file)
if not target_file_dir.startswith(target_dir):
# any other directories may not be writable => avoid trying
continue
source_file = search_include_file(included_file, "", self.pos, include=True)
if not source_file:
continue
if target_file_dir != target_dir and not os.path.exists(target_file_dir):
try:
os.makedirs(target_file_dir)
except OSError, e:
import errno
if e.errno != errno.EEXIST:
raise
rootwriter.save_annotation(source_file, target_file)
def _serialize_lineno_map(self, env, ccodewriter): def _serialize_lineno_map(self, env, ccodewriter):
tb = env.context.gdb_debug_outputwriter tb = env.context.gdb_debug_outputwriter
...@@ -382,13 +411,12 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode): ...@@ -382,13 +411,12 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
self.find_referenced_modules(imported_module, module_list, modules_seen) self.find_referenced_modules(imported_module, module_list, modules_seen)
module_list.append(env) module_list.append(env)
def sort_types_by_inheritance(self, type_dict, getkey): def sort_types_by_inheritance(self, type_dict, type_order, getkey):
# copy the types into a list moving each parent type before # copy the types into a list moving each parent type before
# its first child # its first child
type_items = type_dict.items()
type_list = [] type_list = []
for i, item in enumerate(type_items): for i, key in enumerate(type_order):
key, new_entry = item new_entry = type_dict[key]
# collect all base classes to check for children # collect all base classes to check for children
hierarchy = set() hierarchy = set()
...@@ -413,43 +441,59 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode): ...@@ -413,43 +441,59 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
return type_list return type_list
def sort_type_hierarchy(self, module_list, env): def sort_type_hierarchy(self, module_list, env):
vtab_dict = {} # poor developer's OrderedDict
vtabslot_dict = {} vtab_dict, vtab_dict_order = {}, []
vtabslot_dict, vtabslot_dict_order = {}, []
for module in module_list: for module in module_list:
for entry in module.c_class_entries: for entry in module.c_class_entries:
if not entry.in_cinclude: if entry.used and not entry.in_cinclude:
type = entry.type type = entry.type
if type.vtabstruct_cname: key = type.vtabstruct_cname
vtab_dict[type.vtabstruct_cname] = entry if not key:
continue
if key in vtab_dict:
# FIXME: this should *never* happen, but apparently it does
# for Cython generated utility code
from Cython.Compiler.UtilityCode import NonManglingModuleScope
assert isinstance(entry.scope, NonManglingModuleScope), str(entry.scope)
assert isinstance(vtab_dict[key].scope, NonManglingModuleScope), str(vtab_dict[key].scope)
else:
vtab_dict[key] = entry
vtab_dict_order.append(key)
all_defined_here = module is env all_defined_here = module is env
for entry in module.type_entries: for entry in module.type_entries:
if all_defined_here or entry.defined_in_pxd: if entry.used and (all_defined_here or entry.defined_in_pxd):
type = entry.type type = entry.type
if type.is_extension_type and not entry.in_cinclude: if type.is_extension_type and not entry.in_cinclude:
type = entry.type type = entry.type
vtabslot_dict[type.objstruct_cname] = entry key = type.objstruct_cname
assert key not in vtabslot_dict, key
vtabslot_dict[key] = entry
vtabslot_dict_order.append(key)
def vtabstruct_cname(entry_type): def vtabstruct_cname(entry_type):
return entry_type.vtabstruct_cname return entry_type.vtabstruct_cname
vtab_list = self.sort_types_by_inheritance( vtab_list = self.sort_types_by_inheritance(
vtab_dict, vtabstruct_cname) vtab_dict, vtab_dict_order, vtabstruct_cname)
def objstruct_cname(entry_type): def objstruct_cname(entry_type):
return entry_type.objstruct_cname return entry_type.objstruct_cname
vtabslot_list = self.sort_types_by_inheritance( vtabslot_list = self.sort_types_by_inheritance(
vtabslot_dict, objstruct_cname) vtabslot_dict, vtabslot_dict_order, objstruct_cname)
return (vtab_list, vtabslot_list) return (vtab_list, vtabslot_list)
def sort_cdef_classes(self, env): def sort_cdef_classes(self, env):
key_func = operator.attrgetter('objstruct_cname') key_func = operator.attrgetter('objstruct_cname')
entry_dict = {} entry_dict, entry_order = {}, []
for entry in env.c_class_entries: for entry in env.c_class_entries:
key = key_func(entry.type) key = key_func(entry.type)
assert key not in entry_dict assert key not in entry_dict, key
entry_dict[key] = entry entry_dict[key] = entry
entry_order.append(key)
env.c_class_entries[:] = self.sort_types_by_inheritance( env.c_class_entries[:] = self.sort_types_by_inheritance(
entry_dict, key_func) entry_dict, entry_order, key_func)
def generate_type_definitions(self, env, modules, vtab_list, vtabslot_list, code): def generate_type_definitions(self, env, modules, vtab_list, vtabslot_list, code):
# TODO: Why are these separated out? # TODO: Why are these separated out?
...@@ -1892,7 +1936,7 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode): ...@@ -1892,7 +1936,7 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
env.use_utility_code(streq_utility_code) env.use_utility_code(streq_utility_code)
code.putln() code.putln()
code.putln("static char* %s_type_names[] = {" % Naming.import_star) code.putln("static char* %s_type_names[] = {" % Naming.import_star)
for name, entry in env.entries.items(): for name, entry in sorted(env.entries.items()):
if entry.is_type: if entry.is_type:
code.putln('"%s",' % name) code.putln('"%s",' % name)
code.putln("0") code.putln("0")
......
...@@ -1554,8 +1554,11 @@ class EarlyReplaceBuiltinCalls(Visitor.EnvTransform): ...@@ -1554,8 +1554,11 @@ class EarlyReplaceBuiltinCalls(Visitor.EnvTransform):
"""Replace min(a,b,...) and max(a,b,...) by explicit comparison code. """Replace min(a,b,...) and max(a,b,...) by explicit comparison code.
""" """
if len(args) <= 1: if len(args) <= 1:
# leave this to Python if len(args) == 1 and args[0].is_sequence_constructor:
return node args = args[0].args
else:
# leave this to Python
return node
cascaded_nodes = list(map(UtilNodes.ResultRefNode, args[1:])) cascaded_nodes = list(map(UtilNodes.ResultRefNode, args[1:]))
...@@ -2187,7 +2190,8 @@ class OptimizeBuiltinCalls(Visitor.MethodDispatcherTransform): ...@@ -2187,7 +2190,8 @@ class OptimizeBuiltinCalls(Visitor.MethodDispatcherTransform):
temp = None temp = None
if isinstance(types, ExprNodes.TupleNode): if isinstance(types, ExprNodes.TupleNode):
types = types.args types = types.args
arg = temp = UtilNodes.ResultRefNode(arg) if arg.is_attribute or not arg.is_simple():
arg = temp = UtilNodes.ResultRefNode(arg)
elif types.type is Builtin.type_type: elif types.type is Builtin.type_type:
types = [types] types = [types]
else: else:
......
...@@ -1970,7 +1970,7 @@ class ExpandInplaceOperators(EnvTransform): ...@@ -1970,7 +1970,7 @@ class ExpandInplaceOperators(EnvTransform):
return node, [node] return node, [node]
elif isinstance(node, ExprNodes.IndexNode): elif isinstance(node, ExprNodes.IndexNode):
if node.is_buffer_access: if node.is_buffer_access:
raise ValueError, "Buffer access" raise ValueError("Buffer access")
base, temps = side_effect_free_reference(node.base) base, temps = side_effect_free_reference(node.base)
index = LetRefNode(node.index) index = LetRefNode(node.index)
return ExprNodes.IndexNode(node.pos, base=base, index=index), temps + [index] return ExprNodes.IndexNode(node.pos, base=base, index=index), temps + [index]
...@@ -2304,7 +2304,7 @@ class CreateClosureClasses(CythonTransform): ...@@ -2304,7 +2304,7 @@ class CreateClosureClasses(CythonTransform):
if not from_closure and (self.path or inner_node): if not from_closure and (self.path or inner_node):
if not inner_node: if not inner_node:
if not node.py_cfunc_node: if not node.py_cfunc_node:
raise InternalError, "DefNode does not have assignment node" raise InternalError("DefNode does not have assignment node")
inner_node = node.py_cfunc_node inner_node = node.py_cfunc_node
inner_node.needs_self_code = False inner_node.needs_self_code = False
node.needs_outer_scope = False node.needs_outer_scope = False
......
...@@ -19,7 +19,7 @@ def dumptree(t): ...@@ -19,7 +19,7 @@ def dumptree(t):
def abort_on_errors(node): def abort_on_errors(node):
# Stop the pipeline if there are any errors. # Stop the pipeline if there are any errors.
if Errors.num_errors != 0: if Errors.num_errors != 0:
raise AbortError, "pipeline break" raise AbortError("pipeline break")
return node return node
def parse_stage_factory(context): def parse_stage_factory(context):
......
...@@ -1542,7 +1542,7 @@ class LocalScope(Scope): ...@@ -1542,7 +1542,7 @@ class LocalScope(Scope):
if entry is not None: if entry is not None:
if entry.scope is not self and entry.scope.is_closure_scope: if entry.scope is not self and entry.scope.is_closure_scope:
if hasattr(entry.scope, "scope_class"): if hasattr(entry.scope, "scope_class"):
raise InternalError, "lookup() after scope class created." raise InternalError("lookup() after scope class created.")
# The actual c fragment for the different scopes differs # The actual c fragment for the different scopes differs
# on the outside and inside, so we make a new entry # on the outside and inside, so we make a new entry
entry.in_closure = True entry.in_closure = True
......
...@@ -271,13 +271,15 @@ class TestDebugTransform(DebuggerTestCase): ...@@ -271,13 +271,15 @@ class TestDebugTransform(DebuggerTestCase):
assert 'puts' in spam_stepinto assert 'puts' in spam_stepinto
assert 'some_c_function' in spam_stepinto assert 'some_c_function' in spam_stepinto
except: except:
print open(self.debug_dest).read() f = open(self.debug_dest)
try:
print(f.read())
finally:
f.close()
raise raise
if __name__ == "__main__": if __name__ == "__main__":
import unittest import unittest
unittest.main() unittest.main()
...@@ -383,6 +383,7 @@ class SimpleAssignmentTypeInferer(object): ...@@ -383,6 +383,7 @@ class SimpleAssignmentTypeInferer(object):
if not types: if not types:
node_type = py_object_type node_type = py_object_type
else: else:
entry = node.entry
node_type = spanning_type( node_type = spanning_type(
types, entry.might_overflow, entry.pos) types, entry.might_overflow, entry.pos)
node.inferred_type = node_type node.inferred_type = node_type
...@@ -392,6 +393,7 @@ class SimpleAssignmentTypeInferer(object): ...@@ -392,6 +393,7 @@ class SimpleAssignmentTypeInferer(object):
if assmt.inferred_type is not None] if assmt.inferred_type is not None]
if not types: if not types:
return return
entry = node.entry
return spanning_type(types, entry.might_overflow, entry.pos) return spanning_type(types, entry.might_overflow, entry.pos)
def resolve_assignments(assignments): def resolve_assignments(assignments):
...@@ -404,10 +406,9 @@ class SimpleAssignmentTypeInferer(object): ...@@ -404,10 +406,9 @@ class SimpleAssignmentTypeInferer(object):
infer_name_node_type(node) infer_name_node_type(node)
# Resolve assmt # Resolve assmt
inferred_type = assmt.infer_type() inferred_type = assmt.infer_type()
done = False
assmts_resolved.add(assmt) assmts_resolved.add(assmt)
resolved.add(assmt) resolved.add(assmt)
assignments -= resolved assignments.difference_update(resolved)
return resolved return resolved
def partial_infer(assmt): def partial_infer(assmt):
...@@ -427,13 +428,11 @@ class SimpleAssignmentTypeInferer(object): ...@@ -427,13 +428,11 @@ class SimpleAssignmentTypeInferer(object):
# try to handle circular references # try to handle circular references
partials = set() partials = set()
for assmt in assignments: for assmt in assignments:
partial_types = []
if assmt in partial_assmts: if assmt in partial_assmts:
continue continue
for node in assmt_to_names[assmt]: if partial_infer(assmt):
if partial_infer(assmt): partials.add(assmt)
partials.add(assmt) assmts_resolved.add(assmt)
assmts_resolved.add(assmt)
partial_assmts.update(partials) partial_assmts.update(partials)
return partials return partials
...@@ -542,7 +541,7 @@ def safe_spanning_type(types, might_overflow, pos): ...@@ -542,7 +541,7 @@ def safe_spanning_type(types, might_overflow, pos):
return result_type return result_type
# TODO: double complex should be OK as well, but we need # TODO: double complex should be OK as well, but we need
# to make sure everything is supported. # to make sure everything is supported.
elif result_type.is_int and not might_overflow: elif (result_type.is_int or result_type.is_enum) and not might_overflow:
return result_type return result_type
return py_object_type return py_object_type
......
...@@ -35,35 +35,40 @@ def make_command_file(path_to_debug_info, prefix_code='', no_import=False): ...@@ -35,35 +35,40 @@ def make_command_file(path_to_debug_info, prefix_code='', no_import=False):
fd, tempfilename = tempfile.mkstemp() fd, tempfilename = tempfile.mkstemp()
f = os.fdopen(fd, 'w') f = os.fdopen(fd, 'w')
f.write(prefix_code) try:
f.write('set breakpoint pending on\n') f.write(prefix_code)
f.write("set print pretty on\n") f.write('set breakpoint pending on\n')
f.write('python from Cython.Debugger import libcython, libpython\n') f.write("set print pretty on\n")
f.write('python from Cython.Debugger import libcython, libpython\n')
if no_import:
# don't do this, this overrides file command in .gdbinit if no_import:
# f.write("file %s\n" % sys.executable) # don't do this, this overrides file command in .gdbinit
pass # f.write("file %s\n" % sys.executable)
else: pass
path = os.path.join(path_to_debug_info, "cython_debug", "interpreter") else:
interpreter = open(path).read() path = os.path.join(path_to_debug_info, "cython_debug", "interpreter")
f.write("file %s\n" % interpreter) interpreter_file = open(path)
f.write('\n'.join('cy import %s\n' % fn for fn in debug_files))
f.write(textwrap.dedent('''\
python
import sys
try: try:
gdb.lookup_type('PyModuleObject') interpreter = interpreter_file.read()
except RuntimeError: finally:
sys.stderr.write( interpreter_file.close()
'Python was not compiled with debug symbols (or it was ' f.write("file %s\n" % interpreter)
'stripped). Some functionality may not work (properly).\\n') f.write('\n'.join('cy import %s\n' % fn for fn in debug_files))
end f.write(textwrap.dedent('''\
python
source .cygdbinit import sys
''')) try:
gdb.lookup_type('PyModuleObject')
f.close() except RuntimeError:
sys.stderr.write(
'Python was not compiled with debug symbols (or it was '
'stripped). Some functionality may not work (properly).\\n')
end
source .cygdbinit
'''))
finally:
f.close()
return tempfilename return tempfilename
......
...@@ -35,44 +35,52 @@ build_ext = sys.modules['Cython.Distutils.build_ext'] ...@@ -35,44 +35,52 @@ build_ext = sys.modules['Cython.Distutils.build_ext']
have_gdb = None have_gdb = None
def test_gdb(): def test_gdb():
global have_gdb global have_gdb
if have_gdb is None: if have_gdb is not None:
try: return have_gdb
p = subprocess.Popen(['gdb', '-v'], stdout=subprocess.PIPE)
have_gdb = True try:
except OSError: p = subprocess.Popen(['gdb', '-v'], stdout=subprocess.PIPE)
# gdb was not installed have_gdb = True
have_gdb = False except OSError:
else: # gdb was not installed
gdb_version = p.stdout.read().decode('ascii') have_gdb = False
p.wait() else:
p.stdout.close() gdb_version = p.stdout.read().decode('ascii', 'ignore')
p.wait()
if have_gdb: p.stdout.close()
# Based on Lib/test/test_gdb.py
regex = "^GNU gdb [^\d]*(\d+)\.(\d+)" if have_gdb:
gdb_version_number = list(map(int, re.search(regex, gdb_version).groups())) # Based on Lib/test/test_gdb.py
regex = "^GNU gdb [^\d]*(\d+)\.(\d+)"
if gdb_version_number >= [7, 2]: gdb_version_number = list(map(int, re.search(regex, gdb_version).groups()))
python_version_script = tempfile.NamedTemporaryFile(mode='w+')
if gdb_version_number >= [7, 2]:
python_version_script = tempfile.NamedTemporaryFile(mode='w+')
try:
python_version_script.write( python_version_script.write(
'python import sys; print("%s %s" % sys.version_info[:2])') 'python import sys; print("%s %s" % sys.version_info[:2])')
python_version_script.flush() python_version_script.flush()
p = subprocess.Popen(['gdb', '-batch', '-x', python_version_script.name], p = subprocess.Popen(['gdb', '-batch', '-x', python_version_script.name],
stdout=subprocess.PIPE) stdout=subprocess.PIPE)
python_version = p.stdout.read().decode('ascii') try:
p.wait() python_version = p.stdout.read().decode('ascii')
p.wait()
finally:
p.stdout.close()
try: try:
python_version_number = list(map(int, python_version.split())) python_version_number = list(map(int, python_version.split()))
except ValueError: except ValueError:
have_gdb = False have_gdb = False
finally:
python_version_script.close()
# Be Python 3 compatible # Be Python 3 compatible
if (not have_gdb if (not have_gdb
or gdb_version_number < [7, 2] or gdb_version_number < [7, 2]
or python_version_number < [2, 6]): or python_version_number < [2, 6]):
warnings.warn( warnings.warn(
'Skipping gdb tests, need gdb >= 7.2 with Python >= 2.6') 'Skipping gdb tests, need gdb >= 7.2 with Python >= 2.6')
have_gdb = False have_gdb = False
return have_gdb return have_gdb
...@@ -146,6 +154,7 @@ class DebuggerTestCase(unittest.TestCase): ...@@ -146,6 +154,7 @@ class DebuggerTestCase(unittest.TestCase):
finally: finally:
optimization_disabler.restore_state() optimization_disabler.restore_state()
sys.stderr = stderr sys.stderr = stderr
new_stderr.close()
# ext = Cython.Distutils.extension.Extension( # ext = Cython.Distutils.extension.Extension(
# 'codefile', # 'codefile',
...@@ -227,60 +236,26 @@ class GdbDebuggerTestCase(DebuggerTestCase): ...@@ -227,60 +236,26 @@ class GdbDebuggerTestCase(DebuggerTestCase):
os.path.abspath(Cython.__file__)))) os.path.abspath(Cython.__file__))))
env = dict(os.environ, PYTHONPATH=os.pathsep.join(paths)) env = dict(os.environ, PYTHONPATH=os.pathsep.join(paths))
try: self.p = subprocess.Popen(
p = subprocess.Popen(['gdb', '-v'], stdout=subprocess.PIPE) args,
have_gdb = True stdout=open(os.devnull, 'w'),
except OSError: stderr=subprocess.PIPE,
# gdb was not installed env=env)
have_gdb = False
else:
gdb_version = p.stdout.read().decode('ascii')
p.wait()
p.stdout.close()
if have_gdb:
# Based on Lib/test/test_gdb.py
regex = "^GNU gdb [^\d]*(\d+)\.(\d+)"
gdb_version_number = list(map(int, re.search(regex, gdb_version).groups()))
if gdb_version_number >= [7, 2]:
python_version_script = tempfile.NamedTemporaryFile(mode='w+')
python_version_script.write(
'python import sys; print("%s %s" % sys.version_info[:2])')
python_version_script.flush()
p = subprocess.Popen(['gdb', '-batch', '-x', python_version_script.name],
stdout=subprocess.PIPE)
python_version = p.stdout.read().decode('ascii')
p.wait()
try:
python_version_number = list(map(int, python_version.split()))
except ValueError:
have_gdb = False
# Be Python 3 compatible
if (not have_gdb
or gdb_version_number < [7, 2]
or python_version_number < [2, 6]):
self.p = None
warnings.warn(
'Skipping gdb tests, need gdb >= 7.2 with Python >= 2.6')
else:
self.p = subprocess.Popen(
args,
stdout=open(os.devnull, 'w'),
stderr=subprocess.PIPE,
env=env)
def tearDown(self): def tearDown(self):
if not test_gdb(): if not test_gdb():
return return
super(GdbDebuggerTestCase, self).tearDown() try:
if self.p: super(GdbDebuggerTestCase, self).tearDown()
self.p.stderr.close() if self.p:
self.p.wait() try: self.p.stdout.close()
os.remove(self.gdb_command_file) except: pass
try: self.p.stderr.close()
except: pass
self.p.wait()
finally:
os.remove(self.gdb_command_file)
class TestAll(GdbDebuggerTestCase): class TestAll(GdbDebuggerTestCase):
...@@ -292,15 +267,15 @@ class TestAll(GdbDebuggerTestCase): ...@@ -292,15 +267,15 @@ class TestAll(GdbDebuggerTestCase):
out, err = self.p.communicate() out, err = self.p.communicate()
err = err.decode('UTF-8') err = err.decode('UTF-8')
exit_status = self.p.wait() exit_status = self.p.returncode
if exit_status == 1: if exit_status == 1:
sys.stderr.write(err) sys.stderr.write(err)
elif exit_status >= 2: elif exit_status >= 2:
border = '*' * 30 border = u'*' * 30
start = '%s v INSIDE GDB v %s' % (border, border) start = u'%s v INSIDE GDB v %s' % (border, border)
end = '%s ^ INSIDE GDB ^ %s' % (border, border) end = u'%s ^ INSIDE GDB ^ %s' % (border, border)
errmsg = '\n%s\n%s%s' % (start, err, end) errmsg = u'\n%s\n%s%s' % (start, err, end)
sys.stderr.write(errmsg) sys.stderr.write(errmsg)
......
...@@ -17,7 +17,7 @@ ...@@ -17,7 +17,7 @@
# #
# Author: Lars Buitinck # Author: Lars Buitinck
cdef extern from "numpy/npy_math.h": cdef extern from "numpy/npy_math.h" nogil:
# Floating-point classification # Floating-point classification
long double NAN "NPY_NAN" long double NAN "NPY_NAN"
long double INFINITY "NPY_INFINITY" long double INFINITY "NPY_INFINITY"
...@@ -30,8 +30,6 @@ cdef extern from "numpy/npy_math.h": ...@@ -30,8 +30,6 @@ cdef extern from "numpy/npy_math.h":
bint isnan "npy_isnan"(long double) bint isnan "npy_isnan"(long double)
bint signbit "npy_signbit"(long double) bint signbit "npy_signbit"(long double)
double copysign "npy_copysign"(double, double)
# Math constants # Math constants
long double E "NPY_E" long double E "NPY_E"
long double LOG2E "NPY_LOG2E" # ln(e) / ln(2) long double LOG2E "NPY_LOG2E" # ln(e) / ln(2)
...@@ -46,5 +44,90 @@ cdef extern from "numpy/npy_math.h": ...@@ -46,5 +44,90 @@ cdef extern from "numpy/npy_math.h":
long double EULER "NPY_EULER" # Euler constant (gamma, 0.57721) long double EULER "NPY_EULER" # Euler constant (gamma, 0.57721)
# Low-level floating point manipulation (NumPy >=1.4) # Low-level floating point manipulation (NumPy >=1.4)
float copysignf "npy_copysignf"(float, float)
float nextafterf "npy_nextafterf"(float x, float y)
float spacingf "npy_spacingf"(float x)
double copysign "npy_copysign"(double, double)
double nextafter "npy_nextafter"(double x, double y) double nextafter "npy_nextafter"(double x, double y)
double spacing "npy_spacing"(double x) double spacing "npy_spacing"(double x)
long double copysignl "npy_copysignl"(long double, long double)
long double nextafterl "npy_nextafterl"(long double x, long double y)
long double spacingl "npy_spacingl"(long double x)
# Float C99 functions
float sinf "npy_sinf"(float x)
float cosf "npy_cosf"(float x)
float tanf "npy_tanf"(float x)
float sinhf "npy_sinhf"(float x)
float coshf "npy_coshf"(float x)
float tanhf "npy_tanhf"(float x)
float fabsf "npy_fabsf"(float x)
float floorf "npy_floorf"(float x)
float ceilf "npy_ceilf"(float x)
float rintf "npy_rintf"(float x)
float sqrtf "npy_sqrtf"(float x)
float log10f "npy_log10f"(float x)
float logf "npy_logf"(float x)
float expf "npy_expf"(float x)
float expm1f "npy_expm1f"(float x)
float asinf "npy_asinf"(float x)
float acosf "npy_acosf"(float x)
float atanf "npy_atanf"(float x)
float asinhf "npy_asinhf"(float x)
float acoshf "npy_acoshf"(float x)
float atanhf "npy_atanhf"(float x)
float log1pf "npy_log1pf"(float x)
float exp2f "npy_exp2f"(float x)
float log2f "npy_log2f"(float x)
float atan2f "npy_atan2f"(float x)
float hypotf "npy_hypotf"(float x)
float powf "npy_powf"(float x)
float fmodf "npy_fmodf"(float x)
float modff "npy_modff"(float x)
# Long double C99 functions
long double sinl "npy_sinl"(long double x)
long double cosl "npy_cosl"(long double x)
long double tanl "npy_tanl"(long double x)
long double sinhl "npy_sinhl"(long double x)
long double coshl "npy_coshl"(long double x)
long double tanhl "npy_tanhl"(long double x)
long double fabsl "npy_fabsl"(long double x)
long double floorl "npy_floorl"(long double x)
long double ceill "npy_ceill"(long double x)
long double rintl "npy_rintl"(long double x)
long double sqrtl "npy_sqrtl"(long double x)
long double log10l "npy_log10l"(long double x)
long double logl "npy_logl"(long double x)
long double expl "npy_expl"(long double x)
long double expm1l "npy_expm1l"(long double x)
long double asinl "npy_asinl"(long double x)
long double acosl "npy_acosl"(long double x)
long double atanl "npy_atanl"(long double x)
long double asinhl "npy_asinhl"(long double x)
long double acoshl "npy_acoshl"(long double x)
long double atanhl "npy_atanhl"(long double x)
long double log1pl "npy_log1pl"(long double x)
long double exp2l "npy_exp2l"(long double x)
long double log2l "npy_log2l"(long double x)
long double atan2l "npy_atan2l"(long double x)
long double hypotl "npy_hypotl"(long double x)
long double powl "npy_powl"(long double x)
long double fmodl "npy_fmodl"(long double x)
long double modfl "npy_modfl"(long double x)
# NumPy extensions
float deg2radf "npy_deg2radf"(float x)
float rad2degf "npy_rad2degf"(float x)
float logaddexpf "npy_logaddexpf"(float x)
float logaddexp2f "npy_logaddexp2f"(float x)
double deg2rad "npy_deg2rad"(double x)
double rad2deg "npy_rad2deg"(double x)
double logaddexp "npy_logaddexp"(double x)
double logaddexp2 "npy_logaddexp2"(double x)
long double deg2radl "npy_deg2radl"(long double x)
long double rad2degl "npy_rad2degl"(long double x)
long double logaddexpl "npy_logaddexpl"(long double x)
long double logaddexp2l "npy_logaddexp2l"(long double x)
# cython.* namespace for pure mode. # cython.* namespace for pure mode.
__version__ = "0.20" __version__ = "0.20.post0"
# BEGIN shameless copy from Cython/minivect/minitypes.py # BEGIN shameless copy from Cython/minivect/minitypes.py
......
...@@ -8,13 +8,15 @@ import unittest ...@@ -8,13 +8,15 @@ import unittest
import os, sys import os, sys
import tempfile import tempfile
class NodeTypeWriter(TreeVisitor): class NodeTypeWriter(TreeVisitor):
def __init__(self): def __init__(self):
super(NodeTypeWriter, self).__init__() super(NodeTypeWriter, self).__init__()
self._indents = 0 self._indents = 0
self.result = [] self.result = []
def visit_Node(self, node): def visit_Node(self, node):
if len(self.access_path) == 0: if not self.access_path:
name = u"(root)" name = u"(root)"
else: else:
tip = self.access_path[-1] tip = self.access_path[-1]
...@@ -29,6 +31,7 @@ class NodeTypeWriter(TreeVisitor): ...@@ -29,6 +31,7 @@ class NodeTypeWriter(TreeVisitor):
self.visitchildren(node) self.visitchildren(node)
self._indents -= 1 self._indents -= 1
def treetypes(root): def treetypes(root):
"""Returns a string representing the tree by class names. """Returns a string representing the tree by class names.
There's a leading and trailing whitespace so that it can be There's a leading and trailing whitespace so that it can be
...@@ -38,6 +41,7 @@ def treetypes(root): ...@@ -38,6 +41,7 @@ def treetypes(root):
w.visit(root) w.visit(root)
return u"\n".join([u""] + w.result + [u""]) return u"\n".join([u""] + w.result + [u""])
class CythonTest(unittest.TestCase): class CythonTest(unittest.TestCase):
def setUp(self): def setUp(self):
...@@ -110,6 +114,7 @@ class CythonTest(unittest.TestCase): ...@@ -110,6 +114,7 @@ class CythonTest(unittest.TestCase):
except: except:
self.fail(str(sys.exc_info()[1])) self.fail(str(sys.exc_info()[1]))
class TransformTest(CythonTest): class TransformTest(CythonTest):
""" """
Utility base class for transform unit tests. It is based around constructing Utility base class for transform unit tests. It is based around constructing
...@@ -134,7 +139,6 @@ class TransformTest(CythonTest): ...@@ -134,7 +139,6 @@ class TransformTest(CythonTest):
Plans: One could have a pxd dictionary parameter to run_pipeline. Plans: One could have a pxd dictionary parameter to run_pipeline.
""" """
def run_pipeline(self, pipeline, pyx, pxds={}): def run_pipeline(self, pipeline, pyx, pxds={}):
tree = self.fragment(pyx, pxds).root tree = self.fragment(pyx, pxds).root
# Run pipeline # Run pipeline
...@@ -166,6 +170,7 @@ class TreeAssertVisitor(VisitorTransform): ...@@ -166,6 +170,7 @@ class TreeAssertVisitor(VisitorTransform):
visit_Node = VisitorTransform.recurse_to_children visit_Node = VisitorTransform.recurse_to_children
def unpack_source_tree(tree_file, dir=None): def unpack_source_tree(tree_file, dir=None):
if dir is None: if dir is None:
dir = tempfile.mkdtemp() dir = tempfile.mkdtemp()
...@@ -176,21 +181,24 @@ def unpack_source_tree(tree_file, dir=None): ...@@ -176,21 +181,24 @@ def unpack_source_tree(tree_file, dir=None):
lines = f.readlines() lines = f.readlines()
finally: finally:
f.close() f.close()
f = None del f
for line in lines: try:
if line[:5] == '#####': for line in lines:
filename = line.strip().strip('#').strip().replace('/', os.path.sep) if line[:5] == '#####':
path = os.path.join(dir, filename) filename = line.strip().strip('#').strip().replace('/', os.path.sep)
if not os.path.exists(os.path.dirname(path)): path = os.path.join(dir, filename)
os.makedirs(os.path.dirname(path)) if not os.path.exists(os.path.dirname(path)):
if cur_file is not None: os.makedirs(os.path.dirname(path))
cur_file.close() if cur_file is not None:
cur_file = open(path, 'w') f, cur_file = cur_file, None
elif cur_file is not None: f.close()
cur_file.write(line) cur_file = open(path, 'w')
elif line.strip() and not line.lstrip().startswith('#'): elif cur_file is not None:
if line.strip() not in ('"""', "'''"): cur_file.write(line)
header.append(line) elif line.strip() and not line.lstrip().startswith('#'):
if cur_file is not None: if line.strip() not in ('"""', "'''"):
cur_file.close() header.append(line)
finally:
if cur_file is not None:
cur_file.close()
return dir, ''.join(header) return dir, ''.join(header)
...@@ -222,14 +222,14 @@ class _XMLTestResult(_TextTestResult): ...@@ -222,14 +222,14 @@ class _XMLTestResult(_TextTestResult):
testsuite.setAttribute('name', str(suite_name)) testsuite.setAttribute('name', str(suite_name))
testsuite.setAttribute('tests', str(len(tests))) testsuite.setAttribute('tests', str(len(tests)))
testsuite.setAttribute('time', '%.3f' % \ testsuite.setAttribute('time', '%.3f' %
sum(map(lambda e: e.get_elapsed_time(), tests))) sum([e.get_elapsed_time() for e in tests]))
failures = filter(lambda e: e.outcome==_TestInfo.FAILURE, tests) failures = len([1 for e in tests if e.outcome == _TestInfo.FAILURE])
testsuite.setAttribute('failures', str(len(failures))) testsuite.setAttribute('failures', str(failures))
errors = filter(lambda e: e.outcome==_TestInfo.ERROR, tests) errors = len([1 for e in tests if e.outcome == _TestInfo.ERROR])
testsuite.setAttribute('errors', str(len(errors))) testsuite.setAttribute('errors', str(errors))
return testsuite return testsuite
......
...@@ -593,8 +593,11 @@ __pyx_buffmt_parse_array(__Pyx_BufFmt_Context* ctx, const char** tsp) ...@@ -593,8 +593,11 @@ __pyx_buffmt_parse_array(__Pyx_BufFmt_Context* ctx, const char** tsp)
/* Parse all numbers in the format string */ /* Parse all numbers in the format string */
while (*ts && *ts != ')') { while (*ts && *ts != ')') {
if (isspace(*ts)) // ignore space characters (not using isspace() due to C/C++ problem on MacOS-X)
continue; switch (*ts) {
case ' ': case '\f': case '\r': case '\n': case '\t': case '\v': continue;
default: break; /* not a 'break' in the loop */
}
number = __Pyx_BufFmt_ExpectNumber(&ts); number = __Pyx_BufFmt_ExpectNumber(&ts);
if (number == -1) return NULL; if (number == -1) return NULL;
......
...@@ -21,7 +21,6 @@ ...@@ -21,7 +21,6 @@
typedef struct { typedef struct {
PyCFunctionObject func; PyCFunctionObject func;
int flags;
PyObject *func_dict; PyObject *func_dict;
PyObject *func_weakreflist; PyObject *func_weakreflist;
PyObject *func_name; PyObject *func_name;
...@@ -35,6 +34,7 @@ typedef struct { ...@@ -35,6 +34,7 @@ typedef struct {
/* Dynamic default args and annotations */ /* Dynamic default args and annotations */
void *defaults; void *defaults;
int defaults_pyobjects; int defaults_pyobjects;
int flags;
/* Defaults info */ /* Defaults info */
PyObject *defaults_tuple; /* Const defaults tuple */ PyObject *defaults_tuple; /* Const defaults tuple */
......
...@@ -108,7 +108,7 @@ ...@@ -108,7 +108,7 @@
#if PY_MAJOR_VERSION < 3 #if PY_MAJOR_VERSION < 3
#define __Pyx_BUILTIN_MODULE_NAME "__builtin__" #define __Pyx_BUILTIN_MODULE_NAME "__builtin__"
#define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) \ #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) \
PyCode_New(a, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) PyCode_New(a+k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)
#define __Pyx_DefaultClassType PyClass_Type #define __Pyx_DefaultClassType PyClass_Type
#else #else
#define __Pyx_BUILTIN_MODULE_NAME "builtins" #define __Pyx_BUILTIN_MODULE_NAME "builtins"
...@@ -161,10 +161,17 @@ ...@@ -161,10 +161,17 @@
#define __Pyx_PyUnicode_READ(k, d, i) ((void)(k), (Py_UCS4)(((Py_UNICODE*)d)[i])) #define __Pyx_PyUnicode_READ(k, d, i) ((void)(k), (Py_UCS4)(((Py_UNICODE*)d)[i]))
#endif #endif
#if CYTHON_COMPILING_IN_PYPY
#define __Pyx_PyUnicode_Concat(a, b) PyNumber_Add(a, b)
#define __Pyx_PyUnicode_ConcatSafe(a, b) PyNumber_Add(a, b)
#else
#define __Pyx_PyUnicode_Concat(a, b) PyUnicode_Concat(a, b)
#define __Pyx_PyUnicode_ConcatSafe(a, b) ((unlikely((a) == Py_None) || unlikely((b) == Py_None)) ? \
PyNumber_Add(a, b) : __Pyx_PyUnicode_Concat(a, b))
#endif
#define __Pyx_PyString_FormatSafe(a, b) ((unlikely((a) == Py_None)) ? PyNumber_Remainder(a, b) : __Pyx_PyString_Format(a, b)) #define __Pyx_PyString_FormatSafe(a, b) ((unlikely((a) == Py_None)) ? PyNumber_Remainder(a, b) : __Pyx_PyString_Format(a, b))
#define __Pyx_PyUnicode_FormatSafe(a, b) ((unlikely((a) == Py_None)) ? PyNumber_Remainder(a, b) : PyUnicode_Format(a, b)) #define __Pyx_PyUnicode_FormatSafe(a, b) ((unlikely((a) == Py_None)) ? PyNumber_Remainder(a, b) : PyUnicode_Format(a, b))
#define __Pyx_PyUnicode_Concat(a, b) ((unlikely((a) == Py_None) || unlikely((b) == Py_None)) ? \
PyNumber_Add(a, b) : PyUnicode_Concat(a, b))
#if PY_MAJOR_VERSION >= 3 #if PY_MAJOR_VERSION >= 3
#define __Pyx_PyString_Format(a, b) PyUnicode_Format(a, b) #define __Pyx_PyString_Format(a, b) PyUnicode_Format(a, b)
...@@ -240,7 +247,7 @@ ...@@ -240,7 +247,7 @@
#define PyBoolObject PyLongObject #define PyBoolObject PyLongObject
#endif #endif
#if PY_VERSION_HEX < 0x03020000 #if PY_VERSION_HEX < 0x030200A4
typedef long Py_hash_t; typedef long Py_hash_t;
#define __Pyx_PyInt_FromHash_t PyInt_FromLong #define __Pyx_PyInt_FromHash_t PyInt_FromLong
#define __Pyx_PyInt_AsHash_t PyInt_AsLong #define __Pyx_PyInt_AsHash_t PyInt_AsLong
......
...@@ -1029,7 +1029,8 @@ static CYTHON_INLINE PyObject *__Pyx_GetAttr(PyObject *o, PyObject *n) { ...@@ -1029,7 +1029,8 @@ static CYTHON_INLINE PyObject *__Pyx_GetAttr(PyObject *o, PyObject *n) {
/////////////// PyObjectLookupSpecial.proto /////////////// /////////////// PyObjectLookupSpecial.proto ///////////////
//@requires: PyObjectGetAttrStr //@requires: PyObjectGetAttrStr
#if CYTHON_COMPILING_IN_CPYTHON #if CYTHON_COMPILING_IN_CPYTHON && (PY_VERSION_HEX >= 0x03020000 || PY_MAJOR_VERSION < 3 && PY_VERSION_HEX >= 0x02070000)
// looks like calling _PyType_Lookup() isn't safe in Py<=2.6/3.1
static CYTHON_INLINE PyObject* __Pyx_PyObject_LookupSpecial(PyObject* obj, PyObject* attr_name) { static CYTHON_INLINE PyObject* __Pyx_PyObject_LookupSpecial(PyObject* obj, PyObject* attr_name) {
PyObject *res; PyObject *res;
PyTypeObject *tp = Py_TYPE(obj); PyTypeObject *tp = Py_TYPE(obj);
...@@ -1052,7 +1053,7 @@ static CYTHON_INLINE PyObject* __Pyx_PyObject_LookupSpecial(PyObject* obj, PyObj ...@@ -1052,7 +1053,7 @@ static CYTHON_INLINE PyObject* __Pyx_PyObject_LookupSpecial(PyObject* obj, PyObj
return res; return res;
} }
#else #else
#define __Pyx_PyObject_LookupSpecial(o,n) PyObject_GetAttr(o,n) #define __Pyx_PyObject_LookupSpecial(o,n) __Pyx_PyObject_GetAttrStr(o,n)
#endif #endif
/////////////// PyObjectGetAttrStr.proto /////////////// /////////////// PyObjectGetAttrStr.proto ///////////////
...@@ -1093,6 +1094,7 @@ static CYTHON_INLINE int __Pyx_PyObject_SetAttrStr(PyObject* obj, PyObject* attr ...@@ -1093,6 +1094,7 @@ static CYTHON_INLINE int __Pyx_PyObject_SetAttrStr(PyObject* obj, PyObject* attr
/////////////// PyObjectCallMethod.proto /////////////// /////////////// PyObjectCallMethod.proto ///////////////
//@requires: PyObjectGetAttrStr //@requires: PyObjectGetAttrStr
//@requires: PyObjectCall
//@substitute: naming //@substitute: naming
static PyObject* __Pyx_PyObject_CallMethodTuple(PyObject* obj, PyObject* method_name, PyObject* args) { static PyObject* __Pyx_PyObject_CallMethodTuple(PyObject* obj, PyObject* method_name, PyObject* args) {
...@@ -1100,7 +1102,7 @@ static PyObject* __Pyx_PyObject_CallMethodTuple(PyObject* obj, PyObject* method_ ...@@ -1100,7 +1102,7 @@ static PyObject* __Pyx_PyObject_CallMethodTuple(PyObject* obj, PyObject* method_
if (unlikely(!args)) return NULL; if (unlikely(!args)) return NULL;
method = __Pyx_PyObject_GetAttrStr(obj, method_name); method = __Pyx_PyObject_GetAttrStr(obj, method_name);
if (unlikely(!method)) goto bad; if (unlikely(!method)) goto bad;
result = PyObject_Call(method, args, NULL); result = __Pyx_PyObject_Call(method, args, NULL);
Py_DECREF(method); Py_DECREF(method);
bad: bad:
Py_DECREF(args); Py_DECREF(args);
...@@ -1123,3 +1125,38 @@ bad: ...@@ -1123,3 +1125,38 @@ bad:
static CYTHON_INLINE PyObject* __Pyx_tp_new_kwargs(PyObject* type_obj, PyObject* args, PyObject* kwargs) { static CYTHON_INLINE PyObject* __Pyx_tp_new_kwargs(PyObject* type_obj, PyObject* args, PyObject* kwargs) {
return (PyObject*) (((PyTypeObject*)type_obj)->tp_new((PyTypeObject*)type_obj, args, kwargs)); return (PyObject*) (((PyTypeObject*)type_obj)->tp_new((PyTypeObject*)type_obj, args, kwargs));
} }
/////////////// PyObjectCall.proto ///////////////
#if CYTHON_COMPILING_IN_CPYTHON
static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw); /*proto*/
#else
#define __Pyx_PyObject_Call(func, arg, kw) PyObject_Call(func, arg, kw)
#endif
/////////////// PyObjectCall ///////////////
#if CYTHON_COMPILING_IN_CPYTHON
static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw) {
PyObject *result;
ternaryfunc call = func->ob_type->tp_call;
if (unlikely(!call))
return PyObject_Call(func, arg, kw);
#if PY_VERSION_HEX >= 0x02060000
if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object")))
return NULL;
#endif
result = (*call)(func, arg, kw);
#if PY_VERSION_HEX >= 0x02060000
Py_LeaveRecursiveCall();
#endif
if (unlikely(!result) && unlikely(!PyErr_Occurred())) {
PyErr_SetString(
PyExc_SystemError,
"NULL result without error in PyObject_Call");
}
return result;
}
#endif
...@@ -47,10 +47,16 @@ static int __Pyx_Print(PyObject* f, PyObject *arg_tuple, int newline) { ...@@ -47,10 +47,16 @@ static int __Pyx_Print(PyObject* f, PyObject *arg_tuple, int newline) {
if (PyString_Check(v)) { if (PyString_Check(v)) {
char *s = PyString_AsString(v); char *s = PyString_AsString(v);
Py_ssize_t len = PyString_Size(v); Py_ssize_t len = PyString_Size(v);
if (len > 0 && if (len > 0) {
isspace(Py_CHARMASK(s[len-1])) && // append soft-space if necessary (not using isspace() due to C/C++ problem on MacOS-X)
s[len-1] != ' ') switch (s[len-1]) {
PyFile_SoftSpace(f, 0); case ' ': break;
case '\f': case '\r': case '\n': case '\t': case '\v':
PyFile_SoftSpace(f, 0);
break;
default: break;
}
}
} }
} }
if (newline) { if (newline) {
......
...@@ -206,11 +206,13 @@ static CYTHON_INLINE char* __Pyx_PyObject_AsStringAndSize(PyObject* o, Py_ssize_ ...@@ -206,11 +206,13 @@ static CYTHON_INLINE char* __Pyx_PyObject_AsStringAndSize(PyObject* o, Py_ssize_
} else } else
#endif /* __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT */ #endif /* __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT */
#if !CYTHON_COMPILING_IN_PYPY
#if PY_VERSION_HEX >= 0x02060000 #if PY_VERSION_HEX >= 0x02060000
if (PyByteArray_Check(o)) { if (PyByteArray_Check(o)) {
*length = PyByteArray_GET_SIZE(o); *length = PyByteArray_GET_SIZE(o);
return PyByteArray_AS_STRING(o); return PyByteArray_AS_STRING(o);
} else } else
#endif
#endif #endif
{ {
char* result; char* result;
......
...@@ -398,7 +398,7 @@ Globally ...@@ -398,7 +398,7 @@ Globally
One can set compiler directives through a special header comment at the top of the file, like this:: One can set compiler directives through a special header comment at the top of the file, like this::
#!python #!python
#cython: boundscheck=False #cython: language_level=3, boundscheck=False
The comment must appear before any code (but can appear after other The comment must appear before any code (but can appear after other
comments or whitespace). comments or whitespace).
...@@ -426,7 +426,8 @@ statement, like this:: ...@@ -426,7 +426,8 @@ statement, like this::
@cython.boundscheck(False) # turn off boundscheck for this function @cython.boundscheck(False) # turn off boundscheck for this function
def f(): def f():
... ...
with cython.boundscheck(True): # turn it temporarily on again for this block # turn it temporarily on again for this block
with cython.boundscheck(True):
... ...
.. Warning:: These two methods of setting directives are **not** .. Warning:: These two methods of setting directives are **not**
......
This diff is collapsed.
This diff is collapsed.
...@@ -61,11 +61,10 @@ import a, b, c ...@@ -61,11 +61,10 @@ import a, b, c
######## fake_grep.py ######## ######## fake_grep.py ########
import platform
import re import re
import sys import sys
if platform == 'Windows': if sys.platform == 'win32':
opt, pattern, file = sys.argv[1:] opt, pattern, file = sys.argv[1:]
assert opt == '-c' assert opt == '-c'
count = 0 count = 0
......
...@@ -175,6 +175,7 @@ def test_cdef_attribute(): ...@@ -175,6 +175,7 @@ def test_cdef_attribute():
>>> test_cdef_attribute() >>> test_cdef_attribute()
Memoryview is not initialized Memoryview is not initialized
local variable 'myview' referenced before assignment local variable 'myview' referenced before assignment
local variable 'myview' referenced before assignment
get_ext_obj called get_ext_obj called
Memoryview is not initialized Memoryview is not initialized
<MemoryView of 'array' object> <MemoryView of 'array' object>
...@@ -195,8 +196,11 @@ def test_cdef_attribute(): ...@@ -195,8 +196,11 @@ def test_cdef_attribute():
else: else:
print "No UnboundLocalError was raised" print "No UnboundLocalError was raised"
# uninitialized assignment is valid cdef int[:] otherview
cdef int[:] otherview = myview try:
otherview = myview
except UnboundLocalError, e:
print e.args[0]
try: try:
print get_ext_obj().mview print get_ext_obj().mview
......
...@@ -406,7 +406,7 @@ cdef class DeallocateMe(object): ...@@ -406,7 +406,7 @@ cdef class DeallocateMe(object):
# Disabled! References cycles don't seem to be supported by NumPy # Disabled! References cycles don't seem to be supported by NumPy
# @testcase # @testcase
def acquire_release_cycle(obj): def acquire_release_cycle(obj):
""" DISABLED_DOCSTRING = """
>>> a = np.arange(20, dtype=np.object) >>> a = np.arange(20, dtype=np.object)
>>> a[10] = DeallocateMe() >>> a[10] = DeallocateMe()
>>> acquire_release_cycle(a) >>> acquire_release_cycle(a)
......
...@@ -20,6 +20,40 @@ def min3(a,b,c): ...@@ -20,6 +20,40 @@ def min3(a,b,c):
""" """
return min(a,b,c) return min(a,b,c)
@cython.test_assert_path_exists("//CondExprNode")
@cython.test_fail_if_path_exists("//SimpleCallNode")
def min3_list(a,b,c):
"""
>>> min3_list(1,2,3)
1
>>> min3_list(2,3,1)
1
>>> min3_list(2,1,3)
1
>>> min3_list(3,1,2)
1
>>> min3_list(3,2,1)
1
"""
return min([a,b,c])
@cython.test_assert_path_exists("//CondExprNode")
@cython.test_fail_if_path_exists("//SimpleCallNode")
def min3_tuple(a,b,c):
"""
>>> min3_tuple(1,2,3)
1
>>> min3_tuple(2,3,1)
1
>>> min3_tuple(2,1,3)
1
>>> min3_tuple(3,1,2)
1
>>> min3_tuple(3,2,1)
1
"""
return min((a,b,c))
@cython.test_assert_path_exists("//CondExprNode") @cython.test_assert_path_exists("//CondExprNode")
@cython.test_fail_if_path_exists("//SimpleCallNode") @cython.test_fail_if_path_exists("//SimpleCallNode")
def min3_typed(int a, int b, int c): def min3_typed(int a, int b, int c):
......
...@@ -199,7 +199,7 @@ def bytearray_decode_unbound_method(bytearray s, start=None, stop=None): ...@@ -199,7 +199,7 @@ def bytearray_decode_unbound_method(bytearray s, start=None, stop=None):
return bytearray.decode(s[start:stop], 'utf8') return bytearray.decode(s[start:stop], 'utf8')
def bytearray_append(bytearray b, char c, int i, object o): def bytearray_append(bytearray b, signed char c, int i, object o):
""" """
>>> b = bytearray(b'abc') >>> b = bytearray(b'abc')
>>> b = bytearray_append(b, ord('x'), ord('y'), ord('z')) >>> b = bytearray_append(b, ord('x'), ord('y'), ord('z'))
......
...@@ -4,6 +4,56 @@ ...@@ -4,6 +4,56 @@
import sys import sys
IS_PY3 = sys.version_info[0] >= 3 IS_PY3 = sys.version_info[0] >= 3
IS_PY34 = sys.version_info > (3, 4, 0, 'beta', 3)
def inspect_isroutine():
"""
>>> inspect_isroutine()
True
"""
import inspect
return inspect.isroutine(inspect_isroutine)
def inspect_isfunction():
"""
>>> inspect_isfunction()
False
False
"""
import inspect, types
print isinstance(inspect_isfunction, types.FunctionType)
return inspect.isfunction(inspect_isfunction)
def inspect_isbuiltin():
"""
>>> inspect_isbuiltin()
False
False
"""
import inspect, types
print isinstance(inspect_isfunction, types.BuiltinFunctionType)
return inspect.isbuiltin(inspect_isbuiltin)
def inspect_signature(a, b, c=123, *, d=234):
"""
>>> sig = inspect_signature(1, 2)
>>> if IS_PY34: list(sig.parameters)
... else: ['a', 'b', 'c', 'd']
['a', 'b', 'c', 'd']
>>> if IS_PY34: sig.parameters['c'].default == 123
... else: True
True
>>> if IS_PY34: sig.parameters['d'].default == 234
... else: True
True
"""
import inspect
return inspect.signature(inspect_signature) if IS_PY34 else None
def test_dict(): def test_dict():
""" """
...@@ -42,6 +92,19 @@ def test_doc(): ...@@ -42,6 +92,19 @@ def test_doc():
'docstring' 'docstring'
""" """
def test_hash():
"""
>>> d = {test_hash: 123}
>>> test_hash in d
True
>>> d[test_hash]
123
>>> hash(test_hash) == hash(test_hash)
True
"""
def test_closure(): def test_closure():
""" """
>>> test_closure.func_closure is None >>> test_closure.func_closure is None
......
...@@ -4,6 +4,8 @@ import sys ...@@ -4,6 +4,8 @@ import sys
if sys.version_info >= (3, 4): if sys.version_info >= (3, 4):
def funcdoc(f): def funcdoc(f):
if not f.__text_signature__:
return f.__doc__
doc = '%s%s' % (f.__name__, f.__text_signature__) doc = '%s%s' % (f.__name__, f.__text_signature__)
if f.__doc__: if f.__doc__:
if '\n' in f.__doc__: if '\n' in f.__doc__:
......
__doc__ = u""" __doc__ = u"""
>>> D >>> D
2 2
>>> XYZ
5
""" """
D = 1 D = 1
include "testinclude.pxi" include "testinclude.pxi"
include "includes/includefile.pxi"
# this file will be included
XYZ = 5
...@@ -286,6 +286,11 @@ def conditional_none(int a): ...@@ -286,6 +286,11 @@ def conditional_none(int a):
""" """
return None if a in {1,2,3,4} else 1 return None if a in {1,2,3,4} else 1
@cython.test_assert_path_exists(
"//BoolBinopNode",
"//BoolBinopNode//PrimaryCmpNode"
)
@cython.test_fail_if_path_exists("//ListNode")
def n(a): def n(a):
""" """
>>> n('d *') >>> n('d *')
......
# cython: binding=True, language_level=3
# mode: run
# tag: cyfunction
import inspect
sig = inspect.Signature.from_function
def signatures_match(f1, f2):
if sig(f1) == sig(f2):
return None # nothing to show in doctest
return sig(f1), sig(f2)
def b(a, b, c):
"""
>>> def py_b(a, b, c): pass
>>> signatures_match(b, py_b)
"""
def c(a, b, c=1):
"""
>>> def py_c(a, b, c=1): pass
>>> signatures_match(c, py_c)
"""
def d(a, b, *, c = 88):
"""
>>> def py_d(a, b, *, c = 88): pass
>>> signatures_match(d, py_d)
"""
def e(a, b, c = 88, **kwds):
"""
>>> def py_e(a, b, c = 88, **kwds): pass
>>> signatures_match(e, py_e)
"""
def f(a, b, *, c, d = 42):
"""
>>> def py_f(a, b, *, c, d = 42): pass
>>> signatures_match(f, py_f)
"""
def g(a, b, *, c, d = 42, e = 17, f, **kwds):
"""
>>> def py_g(a, b, *, c, d = 42, e = 17, f, **kwds): pass
>>> signatures_match(g, py_g)
"""
def h(a, b, *args, c, d = 42, e = 17, f, **kwds):
"""
>>> def py_h(a, b, *args, c, d = 42, e = 17, f, **kwds): pass
>>> signatures_match(h, py_h)
"""
def k(a, b, c=1, *args, d = 42, e = 17, f, **kwds):
"""
>>> def py_k(a, b, c=1, *args, d = 42, e = 17, f, **kwds): pass
>>> signatures_match(k, py_k)
"""
def l(*, a, b, c = 88):
"""
>>> def py_l(*, a, b, c = 88): pass
>>> signatures_match(l, py_l)
"""
def m(a, *, b, c = 88):
"""
>>> def py_m(a, *, b, c = 88): pass
>>> signatures_match(m, py_m)
"""
a, b, c = b, c, a
def n(a, *, b, c = 88):
"""
>>> def py_n(a, *, b, c = 88): pass
>>> signatures_match(n, py_n)
"""
...@@ -647,3 +647,27 @@ def self_lookup(a): ...@@ -647,3 +647,27 @@ def self_lookup(a):
def bar(foo): def bar(foo):
qux = foo qux = foo
quux = foo[qux.baz] quux = foo[qux.baz]
cdef enum MyEnum:
enum_x = 1
enum_y = 2
cdef class InferInProperties:
"""
>>> InferInProperties().x
('double', 'unicode object', 'MyEnum', 'MyEnum')
"""
cdef MyEnum attr
def __cinit__(self):
self.attr = enum_x
property x:
def __get__(self):
a = 1.0
b = u'abc'
c = self.attr
d = enum_y
c = d
return typeof(a), typeof(b), typeof(c), typeof(d)
...@@ -26,6 +26,37 @@ def test_object_assmt(): ...@@ -26,6 +26,37 @@ def test_object_assmt():
assert typeof(a) == "Python object", typeof(a) assert typeof(a) == "Python object", typeof(a)
assert typeof(b) == "long", typeof(b) assert typeof(b) == "long", typeof(b)
class RAdd(object):
other = None
def __radd__(self, other):
self._other = other
return self
def __repr__(self):
return '%s(%s)' % (type(self).__name__, self._other)
def test_inplace_assignment():
"""
>>> test_inplace_assignment()
RAdd([1, 2, 3])
"""
l = [1, 2, 3]
# inferred type of l is list, but assignment result is object
l += RAdd()
return l
def test_reassignment():
"""
>>> test_reassignment()
(1, 2, 3)
"""
l = [1, 2, 3]
l = (1, 2, 3)
return l
def test_long_vs_double(cond): def test_long_vs_double(cond):
""" """
>>> test_long_vs_double(0) >>> test_long_vs_double(0)
......
...@@ -129,3 +129,51 @@ def test_class(cond): ...@@ -129,3 +129,51 @@ def test_class(cond):
class A: class A:
x = 1 x = 1
return A.x return A.x
def test_try_except_regression(c):
"""
>>> test_try_except_regression(True)
(123,)
>>> test_try_except_regression(False)
Traceback (most recent call last):
...
UnboundLocalError: local variable 'a' referenced before assignment
"""
if c:
a = (123,)
try:
return a
except:
return a
def test_try_finally_regression(c):
"""
>>> test_try_finally_regression(True)
(123,)
>>> test_try_finally_regression(False)
Traceback (most recent call last):
...
UnboundLocalError: local variable 'a' referenced before assignment
"""
if c:
a = (123,)
try:
return a
finally:
return a
def test_expression_calculation_order_bug(a):
"""
>>> test_expression_calculation_order_bug(False)
[]
>>> test_expression_calculation_order_bug(True)
Traceback (most recent call last):
...
UnboundLocalError: local variable 'b' referenced before assignment
"""
if not a:
b = []
return (a or b) and (b or a)
import sys import sys
def typename(t): def typename(t):
name = type(t).__name__ name = type(t).__name__
if sys.version_info < (2,5): if sys.version_info < (2,5):
...@@ -9,9 +10,11 @@ def typename(t): ...@@ -9,9 +10,11 @@ def typename(t):
name = 'MyException' name = 'MyException'
return "<type '%s'>" % name return "<type '%s'>" % name
class MyException(Exception): class MyException(Exception):
pass pass
class ContextManager(object): class ContextManager(object):
def __init__(self, value, exit_ret = None): def __init__(self, value, exit_ret = None):
self.value = value self.value = value
...@@ -25,6 +28,7 @@ class ContextManager(object): ...@@ -25,6 +28,7 @@ class ContextManager(object):
print("enter") print("enter")
return self.value return self.value
def no_as(): def no_as():
""" """
>>> no_as() >>> no_as()
...@@ -35,6 +39,7 @@ def no_as(): ...@@ -35,6 +39,7 @@ def no_as():
with ContextManager("value"): with ContextManager("value"):
print("hello") print("hello")
def basic(): def basic():
""" """
>>> basic() >>> basic()
...@@ -45,6 +50,7 @@ def basic(): ...@@ -45,6 +50,7 @@ def basic():
with ContextManager("value") as x: with ContextManager("value") as x:
print(x) print(x)
def with_pass(): def with_pass():
""" """
>>> with_pass() >>> with_pass()
...@@ -54,6 +60,7 @@ def with_pass(): ...@@ -54,6 +60,7 @@ def with_pass():
with ContextManager("value") as x: with ContextManager("value") as x:
pass pass
def with_return(): def with_return():
""" """
>>> print(with_return()) >>> print(with_return())
...@@ -64,6 +71,7 @@ def with_return(): ...@@ -64,6 +71,7 @@ def with_return():
with ContextManager("value") as x: with ContextManager("value") as x:
return x return x
def with_break(): def with_break():
""" """
>>> print(with_break()) >>> print(with_break())
...@@ -77,6 +85,7 @@ def with_break(): ...@@ -77,6 +85,7 @@ def with_break():
print("FAILED") print("FAILED")
return c return c
def with_continue(): def with_continue():
""" """
>>> print(with_continue()) >>> print(with_continue())
...@@ -94,6 +103,7 @@ def with_continue(): ...@@ -94,6 +103,7 @@ def with_continue():
print("FAILED") print("FAILED")
return c return c
def with_exception(exit_ret): def with_exception(exit_ret):
""" """
>>> with_exception(None) >>> with_exception(None)
...@@ -113,6 +123,25 @@ def with_exception(exit_ret): ...@@ -113,6 +123,25 @@ def with_exception(exit_ret):
except: except:
print("outer except") print("outer except")
def with_real_lock():
"""
>>> with_real_lock()
about to acquire lock
holding lock
lock no longer held
"""
from threading import Lock
lock = Lock()
print("about to acquire lock")
with lock:
print("holding lock")
print("lock no longer held")
def functions_in_with(): def functions_in_with():
""" """
>>> f = functions_in_with() >>> f = functions_in_with()
...@@ -133,6 +162,7 @@ def functions_in_with(): ...@@ -133,6 +162,7 @@ def functions_in_with():
print("outer except") print("outer except")
return f return f
def multitarget(): def multitarget():
""" """
>>> multitarget() >>> multitarget()
...@@ -143,6 +173,7 @@ def multitarget(): ...@@ -143,6 +173,7 @@ def multitarget():
with ContextManager((1, 2, (3, (4, 5)))) as (a, b, (c, (d, e))): with ContextManager((1, 2, (3, (4, 5)))) as (a, b, (c, (d, e))):
print('%s %s %s %s %s' % (a, b, c, d, e)) print('%s %s %s %s %s' % (a, b, c, d, e))
def tupletarget(): def tupletarget():
""" """
>>> tupletarget() >>> tupletarget()
...@@ -153,39 +184,12 @@ def tupletarget(): ...@@ -153,39 +184,12 @@ def tupletarget():
with ContextManager((1, 2, (3, (4, 5)))) as t: with ContextManager((1, 2, (3, (4, 5)))) as t:
print(t) print(t)
def multimanager():
"""
>>> multimanager()
enter
enter
enter
enter
enter
enter
2
value
1 2 3 4 5
nested
exit <type 'NoneType'> <type 'NoneType'> <type 'NoneType'>
exit <type 'NoneType'> <type 'NoneType'> <type 'NoneType'>
exit <type 'NoneType'> <type 'NoneType'> <type 'NoneType'>
exit <type 'NoneType'> <type 'NoneType'> <type 'NoneType'>
exit <type 'NoneType'> <type 'NoneType'> <type 'NoneType'>
exit <type 'NoneType'> <type 'NoneType'> <type 'NoneType'>
"""
with ContextManager(1), ContextManager(2) as x, ContextManager('value') as y,\
ContextManager(3), ContextManager((1, 2, (3, (4, 5)))) as (a, b, (c, (d, e))):
with ContextManager('nested') as nested:
print(x)
print(y)
print('%s %s %s %s %s' % (a, b, c, d, e))
print(nested)
class GetManager(object): class GetManager(object):
def get(self, *args): def get(self, *args):
return ContextManager(*args) return ContextManager(*args)
def manager_from_expression(): def manager_from_expression():
""" """
>>> manager_from_expression() >>> manager_from_expression()
...@@ -201,94 +205,3 @@ def manager_from_expression(): ...@@ -201,94 +205,3 @@ def manager_from_expression():
g = GetManager() g = GetManager()
with g.get(2) as x: with g.get(2) as x:
print(x) print(x)
# Tests borrowed from pyregr test_with.py,
# modified to follow the constraints of Cython.
import unittest
class Dummy(object):
def __init__(self, value=None, gobble=False):
if value is None:
value = self
self.value = value
self.gobble = gobble
self.enter_called = False
self.exit_called = False
def __enter__(self):
self.enter_called = True
return self.value
def __exit__(self, *exc_info):
self.exit_called = True
self.exc_info = exc_info
if self.gobble:
return True
class InitRaises(object):
def __init__(self): raise RuntimeError()
class EnterRaises(object):
def __enter__(self): raise RuntimeError()
def __exit__(self, *exc_info): pass
class ExitRaises(object):
def __enter__(self): pass
def __exit__(self, *exc_info): raise RuntimeError()
class NestedWith(unittest.TestCase):
"""
>>> NestedWith().runTest()
"""
def runTest(self):
self.testNoExceptions()
self.testExceptionInExprList()
self.testExceptionInEnter()
self.testExceptionInExit()
self.testEnterReturnsTuple()
def testNoExceptions(self):
with Dummy() as a, Dummy() as b:
self.assertTrue(a.enter_called)
self.assertTrue(b.enter_called)
self.assertTrue(a.exit_called)
self.assertTrue(b.exit_called)
def testExceptionInExprList(self):
try:
with Dummy() as a, InitRaises():
pass
except:
pass
self.assertTrue(a.enter_called)
self.assertTrue(a.exit_called)
def testExceptionInEnter(self):
try:
with Dummy() as a, EnterRaises():
self.fail('body of bad with executed')
except RuntimeError:
pass
else:
self.fail('RuntimeError not reraised')
self.assertTrue(a.enter_called)
self.assertTrue(a.exit_called)
def testExceptionInExit(self):
body_executed = False
with Dummy(gobble=True) as a, ExitRaises():
body_executed = True
self.assertTrue(a.enter_called)
self.assertTrue(a.exit_called)
self.assertTrue(body_executed)
self.assertNotEqual(a.exc_info[0], None)
def testEnterReturnsTuple(self):
with Dummy(value=(1,2)) as (a1, a2), \
Dummy(value=(10, 20)) as (b1, b2):
self.assertEqual(1, a1)
self.assertEqual(2, a2)
self.assertEqual(10, b1)
self.assertEqual(20, b2)
import sys
def typename(t):
name = type(t).__name__
if sys.version_info < (2,5):
if name == 'classobj' and issubclass(t, MyException):
name = 'type'
elif name == 'instance' and isinstance(t, MyException):
name = 'MyException'
return "<type '%s'>" % name
class MyException(Exception):
pass
class ContextManager(object):
def __init__(self, value, exit_ret = None):
self.value = value
self.exit_ret = exit_ret
def __exit__(self, a, b, tb):
print("exit %s %s %s" % (typename(a), typename(b), typename(tb)))
return self.exit_ret
def __enter__(self):
print("enter")
return self.value
def multimanager():
"""
>>> multimanager()
enter
enter
enter
enter
enter
enter
2
value
1 2 3 4 5
nested
exit <type 'NoneType'> <type 'NoneType'> <type 'NoneType'>
exit <type 'NoneType'> <type 'NoneType'> <type 'NoneType'>
exit <type 'NoneType'> <type 'NoneType'> <type 'NoneType'>
exit <type 'NoneType'> <type 'NoneType'> <type 'NoneType'>
exit <type 'NoneType'> <type 'NoneType'> <type 'NoneType'>
exit <type 'NoneType'> <type 'NoneType'> <type 'NoneType'>
"""
with ContextManager(1), ContextManager(2) as x, ContextManager('value') as y,\
ContextManager(3), ContextManager((1, 2, (3, (4, 5)))) as (a, b, (c, (d, e))):
with ContextManager('nested') as nested:
print(x)
print(y)
print('%s %s %s %s %s' % (a, b, c, d, e))
print(nested)
class GetManager(object):
def get(self, *args):
return ContextManager(*args)
def manager_from_expression():
"""
>>> manager_from_expression()
enter
1
exit <type 'NoneType'> <type 'NoneType'> <type 'NoneType'>
enter
2
exit <type 'NoneType'> <type 'NoneType'> <type 'NoneType'>
"""
with GetManager().get(1) as x:
print(x)
g = GetManager()
with g.get(2) as x:
print(x)
# Tests borrowed from pyregr test_with.py,
# modified to follow the constraints of Cython.
import unittest
class Dummy(object):
def __init__(self, value=None, gobble=False):
if value is None:
value = self
self.value = value
self.gobble = gobble
self.enter_called = False
self.exit_called = False
def __enter__(self):
self.enter_called = True
return self.value
def __exit__(self, *exc_info):
self.exit_called = True
self.exc_info = exc_info
if self.gobble:
return True
class InitRaises(object):
def __init__(self): raise RuntimeError()
class EnterRaises(object):
def __enter__(self): raise RuntimeError()
def __exit__(self, *exc_info): pass
class ExitRaises(object):
def __enter__(self): pass
def __exit__(self, *exc_info): raise RuntimeError()
class NestedWith(unittest.TestCase):
"""
>>> NestedWith().runTest()
"""
def runTest(self):
self.testNoExceptions()
self.testExceptionInExprList()
self.testExceptionInEnter()
self.testExceptionInExit()
self.testEnterReturnsTuple()
def testNoExceptions(self):
with Dummy() as a, Dummy() as b:
self.assertTrue(a.enter_called)
self.assertTrue(b.enter_called)
self.assertTrue(a.exit_called)
self.assertTrue(b.exit_called)
def testExceptionInExprList(self):
try:
with Dummy() as a, InitRaises():
pass
except:
pass
self.assertTrue(a.enter_called)
self.assertTrue(a.exit_called)
def testExceptionInEnter(self):
try:
with Dummy() as a, EnterRaises():
self.fail('body of bad with executed')
except RuntimeError:
pass
else:
self.fail('RuntimeError not reraised')
self.assertTrue(a.enter_called)
self.assertTrue(a.exit_called)
def testExceptionInExit(self):
body_executed = False
with Dummy(gobble=True) as a, ExitRaises():
body_executed = True
self.assertTrue(a.enter_called)
self.assertTrue(a.exit_called)
self.assertTrue(body_executed)
self.assertNotEqual(a.exc_info[0], None)
def testEnterReturnsTuple(self):
with Dummy(value=(1,2)) as (a1, a2), \
Dummy(value=(10, 20)) as (b1, b2):
self.assertEqual(1, a1)
self.assertEqual(2, a2)
self.assertEqual(10, b1)
self.assertEqual(20, b2)
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment