Commit f82d41da authored by Jeroen Demeyer's avatar Jeroen Demeyer

Merge remote-tracking branch 'origin/master' into optimize_dependencies

parents a845f9e8 da1d5bb6
......@@ -7,6 +7,7 @@ python:
- 3.3
- 3.4
- 3.5
- 3.5-dev
- pypy
- pypy3
......@@ -36,6 +37,7 @@ matrix:
allow_failures:
- python: pypy
- python: pypy3
- python: 3.5-dev
exclude:
- python: pypy
env: BACKEND=cpp
......
......@@ -68,6 +68,7 @@ else:
return filename
basestring = str
def extended_iglob(pattern):
if '{' in pattern:
m = re.match('(.*){([^}]+)}(.*)', pattern)
......@@ -122,8 +123,13 @@ def file_hash(filename):
f.close()
return m.hexdigest()
def parse_list(s):
"""
>>> parse_list("")
[]
>>> parse_list("a")
['a']
>>> parse_list("a b c")
['a', 'b', 'c']
>>> parse_list("[a, b, c]")
......@@ -133,7 +139,7 @@ def parse_list(s):
>>> parse_list('[a, ",a", "a,", ",", ]')
['a', ',a', 'a,', ',']
"""
if s[0] == '[' and s[-1] == ']':
if len(s) >= 2 and s[0] == '[' and s[-1] == ']':
s = s[1:-1]
delimiter = ','
else:
......@@ -147,6 +153,7 @@ def parse_list(s):
return literal
return [unquote(item) for item in s.split(delimiter) if item.strip()]
transitive_str = object()
transitive_list = object()
......@@ -167,6 +174,7 @@ distutils_settings = {
'language': transitive_str,
}
@cython.locals(start=cython.Py_ssize_t, end=cython.Py_ssize_t)
def line_iter(source):
if isinstance(source, basestring):
......@@ -182,6 +190,7 @@ def line_iter(source):
for line in source:
yield line
class DistutilsInfo(object):
def __init__(self, source=None, exn=None):
......@@ -258,6 +267,7 @@ class DistutilsInfo(object):
value = getattr(extension, key) + list(value)
setattr(extension, key, value)
@cython.locals(start=cython.Py_ssize_t, q=cython.Py_ssize_t,
single_q=cython.Py_ssize_t, double_q=cython.Py_ssize_t,
hash_mark=cython.Py_ssize_t, end=cython.Py_ssize_t,
......@@ -353,9 +363,11 @@ dependency_regex = re.compile(r"(?:^from +([0-9a-zA-Z_.]+) +cimport)|"
r"(?:^cdef +extern +from +['\"]([^'\"]+)['\"])|"
r"(?:^include +['\"]([^'\"]+)['\"])", re.M)
def normalize_existing(base_path, rel_paths):
return normalize_existing0(os.path.dirname(base_path), tuple(set(rel_paths)))
@cached_function
def normalize_existing0(base_dir, rel_paths):
normalized = []
......@@ -367,6 +379,7 @@ def normalize_existing0(base_dir, rel_paths):
normalized.append(rel)
return normalized
def resolve_depends(depends, include_dirs):
include_dirs = tuple(include_dirs)
resolved = []
......@@ -376,6 +389,7 @@ def resolve_depends(depends, include_dirs):
resolved.append(path)
return resolved
@cached_function
def resolve_depend(depend, include_dirs):
if depend[0] == '<' and depend[-1] == '>':
......@@ -386,6 +400,7 @@ def resolve_depend(depend, include_dirs):
return os.path.normpath(path)
return None
@cached_function
def package(filename):
dir = os.path.dirname(os.path.abspath(str(filename)))
......@@ -394,6 +409,7 @@ def package(filename):
else:
return ()
@cached_function
def fully_qualified_name(filename):
module = os.path.splitext(os.path.basename(filename))[0]
......@@ -604,7 +620,9 @@ class DependencyTree(object):
finally:
del stack[node]
_dep_tree = None
def create_dependency_tree(ctx=None, quiet=False):
global _dep_tree
if _dep_tree is None:
......@@ -615,8 +633,10 @@ def create_dependency_tree(ctx=None, quiet=False):
# This may be useful for advanced users?
def create_extension_list(patterns, exclude=[], ctx=None, aliases=None, quiet=False, language=None,
def create_extension_list(patterns, exclude=None, ctx=None, aliases=None, quiet=False, language=None,
exclude_failures=False):
if exclude is None:
exclude = []
if not isinstance(patterns, (list, tuple)):
patterns = [patterns]
explicit_modules = set([m.name for m in patterns if isinstance(m, Extension)])
......@@ -715,7 +735,7 @@ def create_extension_list(patterns, exclude=[], ctx=None, aliases=None, quiet=Fa
# This is the user-exposed entry point.
def cythonize(module_list, exclude=[], nthreads=0, aliases=None, quiet=False, force=False, language=None,
def cythonize(module_list, exclude=None, nthreads=0, aliases=None, quiet=False, force=False, language=None,
exclude_failures=False, **options):
"""
Compile a set of source modules into C/C++ files and return a list of distutils
......@@ -743,6 +763,8 @@ def cythonize(module_list, exclude=[], nthreads=0, aliases=None, quiet=False, fo
Additional compilation options can be passed as keyword arguments.
"""
if exclude is None:
exclude = []
if 'include_path' not in options:
options['include_path'] = ['.']
if 'common_utility_include_dir' in options:
......@@ -937,7 +959,9 @@ if os.environ.get('XML_RESULTS'):
output.close()
return with_record
else:
record_results = lambda x: x
def record_results(func):
return func
# TODO: Share context? Issue: pyx processing leaks into pxd module
@record_results
......
......@@ -48,6 +48,7 @@ class UnboundSymbols(EnvTransform, SkipDeclarations):
super(UnboundSymbols, self).__call__(node)
return self.unbound
@cached_function
def unbound_symbols(code, context=None):
code = to_unicode(code)
......@@ -67,6 +68,7 @@ def unbound_symbols(code, context=None):
import __builtin__ as builtins
return UnboundSymbols()(tree) - set(dir(builtins))
def unsafe_type(arg, context=None):
py_type = type(arg)
if py_type is int:
......@@ -74,6 +76,7 @@ def unsafe_type(arg, context=None):
else:
return safe_type(arg, context)
def safe_type(arg, context=None):
py_type = type(arg)
if py_type in [list, tuple, dict, str]:
......@@ -97,6 +100,7 @@ def safe_type(arg, context=None):
return '%s.%s' % (base_type.__module__, base_type.__name__)
return 'object'
def _get_build_extension():
dist = Distribution()
# Ensure the build respects distutils configuration by parsing
......@@ -107,19 +111,16 @@ def _get_build_extension():
build_extension.finalize_options()
return build_extension
@cached_function
def _create_context(cython_include_dirs):
return Context(list(cython_include_dirs), default_options)
def cython_inline(code,
get_type=unsafe_type,
lib_dir=os.path.join(get_cython_cache_dir(), 'inline'),
cython_include_dirs=['.'],
force=False,
quiet=False,
locals=None,
globals=None,
**kwds):
def cython_inline(code, get_type=unsafe_type, lib_dir=os.path.join(get_cython_cache_dir(), 'inline'),
cython_include_dirs=None, force=False, quiet=False, locals=None, globals=None, **kwds):
if cython_include_dirs is None:
cython_include_dirs = ['.']
if get_type is None:
get_type = lambda x: 'object'
code = to_unicode(code)
......@@ -263,7 +264,6 @@ def extract_func_code(code):
return '\n'.join(module), ' ' + '\n '.join(function)
try:
from inspect import getcallargs
except ImportError:
......@@ -294,6 +294,7 @@ except ImportError:
raise TypeError("Missing argument: %s" % name)
return all
def get_body(source):
ix = source.index(':')
if source[:5] == 'lambda':
......@@ -301,6 +302,7 @@ def get_body(source):
else:
return source[ix+1:]
# Lots to be done here... It would be especially cool if compiled functions
# could invoke each other quickly.
class RuntimeCompiledFunction(object):
......
......@@ -574,7 +574,7 @@ class GetAndReleaseBufferUtilityCode(object):
def __hash__(self):
return 24342342
def get_tree(self): pass
def get_tree(self, **kwargs): pass
def put_code(self, output):
code = output['utility_code_def']
......
......@@ -318,7 +318,7 @@ class UtilityCodeBase(object):
def __str__(self):
return "<%s(%s)>" % (type(self).__name__, self.name)
def get_tree(self):
def get_tree(self, **kwargs):
pass
......@@ -2145,8 +2145,9 @@ class CCodeWriter(object):
def error_goto(self, pos):
lbl = self.funcstate.error_label
self.funcstate.use_label(lbl)
return "{%s goto %s;}" % (
self.set_error_info(pos),
return "__PYX_ERR(%s, %s, %s)" % (
self.lookup_filename(pos[0]),
pos[1],
lbl)
def error_goto_if(self, cond, pos):
......
......@@ -11150,6 +11150,12 @@ class CondExprNode(ExprNode):
self.type_error()
return self
def coerce_to_integer(self, env):
self.true_val = self.true_val.coerce_to_integer(env)
self.false_val = self.false_val.coerce_to_integer(env)
self.result_ctype = None
return self.analyse_result_type(env)
def coerce_to(self, dst_type, env):
self.true_val = self.true_val.coerce_to(dst_type, env)
self.false_val = self.false_val.coerce_to(dst_type, env)
......
......@@ -341,6 +341,14 @@ class NameAssignment(object):
return self.entry.type
return self.inferred_type
def __getstate__(self):
return (self.lhs, self.rhs, self.entry, self.pos,
self.refs, self.is_arg, self.is_deletion, self.inferred_type)
def __setstate__(self, state):
(self.lhs, self.rhs, self.entry, self.pos,
self.refs, self.is_arg, self.is_deletion, self.inferred_type) = state
class StaticAssignment(NameAssignment):
"""Initialised at declaration time, e.g. stack allocation."""
......
......@@ -40,6 +40,17 @@ def check_c_declarations(module_node):
module_node.scope.check_c_functions()
return module_node
def generate_c_code_config(env, options):
if Options.annotate or options.annotate:
emit_linenums = False
else:
emit_linenums = options.emit_linenums
rootwriter = Code.CCodeWriter()
return Code.CCodeConfig(emit_linenums=emit_linenums,
emit_code_comments=env.directives['emit_code_comments'],
c_line_in_traceback=options.c_line_in_traceback)
class ModuleNode(Nodes.Node, Nodes.BlockNode):
# doc string or None
# body StatListNode
......@@ -117,7 +128,7 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
self.sort_cdef_classes(env)
self.generate_c_code(env, options, result)
self.generate_h_code(env, options, result)
self.generate_api_code(env, result)
self.generate_api_code(env, options, result)
def has_imported_c_functions(self):
for module in self.referenced_modules:
......@@ -139,7 +150,8 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
if (h_types or h_vars or h_funcs or h_extension_types):
result.h_file = replace_suffix(result.c_file, ".h")
h_code = Code.CCodeWriter()
Code.GlobalState(h_code, self, Code.CCodeConfig()) # FIXME: config?
c_code_config = generate_c_code_config(env, options)
Code.GlobalState(h_code, self, c_code_config)
if options.generate_pxi:
result.i_file = replace_suffix(result.c_file, ".pxi")
i_code = Code.PyrexCodeWriter(result.i_file)
......@@ -203,7 +215,7 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
def api_name(self, env):
return env.qualified_name.replace(".", "__")
def generate_api_code(self, env, result):
def generate_api_code(self, env, options, result):
def api_entries(entries, pxd=0):
return [entry for entry in entries
if entry.api or (pxd and entry.defined_in_pxd)]
......@@ -213,7 +225,8 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
if api_vars or api_funcs or api_extension_types:
result.api_file = replace_suffix(result.c_file, "_api.h")
h_code = Code.CCodeWriter()
Code.GlobalState(h_code, self, Code.CCodeConfig()) # FIXME: config?
c_code_config = generate_c_code_config(env, options)
Code.GlobalState(h_code, self, c_code_config)
h_code.put_generated_by()
api_guard = Naming.api_guard_prefix + self.api_name(env)
h_code.put_h_guard(api_guard)
......@@ -308,17 +321,12 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
modules = self.referenced_modules
if Options.annotate or options.annotate:
emit_linenums = False
rootwriter = Annotate.AnnotationCCodeWriter()
else:
emit_linenums = options.emit_linenums
rootwriter = Code.CCodeWriter()
c_code_config = Code.CCodeConfig(
emit_linenums=emit_linenums,
emit_code_comments=env.directives['emit_code_comments'],
c_line_in_traceback=options.c_line_in_traceback,
)
c_code_config = generate_c_code_config(env, options)
globalstate = Code.GlobalState(
rootwriter, self,
code_config=c_code_config,
......@@ -327,7 +335,7 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
globalstate.initialize_main_c_code()
h_code = globalstate['h_code']
self.generate_module_preamble(env, modules, result.embedded_metadata, h_code)
self.generate_module_preamble(env, options, modules, result.embedded_metadata, h_code)
globalstate.module_pos = self.pos
globalstate.directives = self.directives
......@@ -582,7 +590,7 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
def _put_setup_code(self, code, name):
code.put(UtilityCode.load_as_string(name, "ModuleSetupCode.c")[1])
def generate_module_preamble(self, env, cimported_modules, metadata, code):
def generate_module_preamble(self, env, options, cimported_modules, metadata, code):
code.put_generated_by()
if metadata:
code.putln("/* BEGIN: Cython Metadata")
......@@ -610,6 +618,18 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
self._put_setup_code(code, "CInitCode")
self._put_setup_code(code, "MathInitCode")
if options.c_line_in_traceback:
cinfo = "%s = %s; " % (Naming.clineno_cname, Naming.line_c_macro)
else:
cinfo = ""
code.put("""
#define __PYX_ERR(f_index, lineno, Ln_error) \\
{ \\
%s = %s[f_index]; %s = lineno; %sgoto Ln_error; \\
}
""" % (Naming.filename_cname, Naming.filetable_cname, Naming.lineno_cname,
cinfo))
code.put("""
#if PY_MAJOR_VERSION >= 3
#define __Pyx_PyNumber_Divide(x,y) PyNumber_TrueDivide(x,y)
......
......@@ -649,7 +649,9 @@ class CFuncDeclaratorNode(CDeclaratorNode):
else:
return None
def analyse(self, return_type, env, nonempty = 0, directive_locals = {}):
def analyse(self, return_type, env, nonempty=0, directive_locals=None):
if directive_locals is None:
directive_locals = {}
if nonempty:
nonempty -= 1
func_type_args = []
......
......@@ -2021,6 +2021,9 @@ class OptimizeBuiltinCalls(Visitor.NodeRefCleanupMixin,
if node.type.assignable_from(arg.arg.type):
# completely redundant C->Py->C coercion
return arg.arg.coerce_to(node.type, self.current_env())
elif arg.type is Builtin.unicode_type:
if arg.arg.type.is_unicode_char and node.type.is_unicode_char:
return arg.arg.coerce_to(node.type, self.current_env())
elif isinstance(arg, ExprNodes.SimpleCallNode):
if node.type.is_int or node.type.is_float:
return self._optimise_numeric_cast_call(node, arg)
......
......@@ -1271,37 +1271,118 @@ class WithTransform(CythonTransform, SkipDeclarations):
class DecoratorTransform(ScopeTrackingTransform, SkipDeclarations):
"""Originally, this was the only place where decorators were
transformed into the corresponding calling code. Now, this is
done directly in DefNode and PyClassDefNode to avoid reassignments
to the function/class name - except for cdef class methods. For
those, the reassignment is required as methods are originally
defined in the PyMethodDef struct.
The IndirectionNode allows DefNode to override the decorator
"""
Transforms method decorators in cdef classes into nested calls or properties.
def visit_DefNode(self, func_node):
Python-style decorator properties are transformed into a PropertyNode
with up to the three getter, setter and deleter DefNodes.
The functional style isn't supported yet.
"""
_properties = None
_map_property_attribute = {
'getter': '__get__',
'setter': '__set__',
'deleter': '__del__',
}.get
def visit_CClassDefNode(self, node):
if self._properties is None:
self._properties = []
self._properties.append({})
super(DecoratorTransform, self).visit_CClassDefNode(node)
self._properties.pop()
return node
def visit_PropertyNode(self, node):
# Suppress warning for our code until we can convert all our uses over.
if isinstance(node.pos[0], str) or True:
warning(node.pos, "'property %s:' syntax is deprecated, use '@property'" % node.name, 2)
return node
def visit_DefNode(self, node):
scope_type = self.scope_type
func_node = self.visit_FuncDefNode(func_node)
if scope_type != 'cclass' or not func_node.decorators:
return func_node
return self.handle_decorators(func_node, func_node.decorators,
func_node.name)
def handle_decorators(self, node, decorators, name):
decorator_result = ExprNodes.NameNode(node.pos, name = name)
node = self.visit_FuncDefNode(node)
if scope_type != 'cclass' or not node.decorators:
return node
# transform @property decorators
properties = self._properties[-1]
for decorator_node in node.decorators[::-1]:
decorator = decorator_node.decorator
if decorator.is_name and decorator.name == 'property':
if len(node.decorators) > 1:
return self._reject_decorated_property(node, decorator_node)
name = node.name
node.name = '__get__'
node.decorators.remove(decorator_node)
stat_list = [node]
if name in properties:
prop = properties[name]
prop.pos = node.pos
prop.doc = node.doc
prop.body.stats = stat_list
return []
prop = Nodes.PropertyNode(node.pos, name=name)
prop.doc = node.doc
prop.body = Nodes.StatListNode(node.pos, stats=stat_list)
properties[name] = prop
return [prop]
elif decorator.is_attribute and decorator.obj.name in properties:
handler_name = self._map_property_attribute(decorator.attribute)
if handler_name:
assert decorator.obj.name == node.name
if len(node.decorators) > 1:
return self._reject_decorated_property(node, decorator_node)
return self._add_to_property(properties, node, handler_name, decorator_node)
# transform normal decorators
return self.chain_decorators(node, node.decorators, node.name)
@staticmethod
def _reject_decorated_property(node, decorator_node):
# restrict transformation to outermost decorator as wrapped properties will probably not work
for deco in node.decorators:
if deco != decorator_node:
error(deco.pos, "Property methods with additional decorators are not supported")
return node
@staticmethod
def _add_to_property(properties, node, name, decorator):
prop = properties[node.name]
node.name = name
node.decorators.remove(decorator)
stats = prop.body.stats
for i, stat in enumerate(stats):
if stat.name == name:
stats[i] = node
break
else:
stats.append(node)
return []
@staticmethod
def chain_decorators(node, decorators, name):
"""
Decorators are applied directly in DefNode and PyClassDefNode to avoid
reassignments to the function/class name - except for cdef class methods.
For those, the reassignment is required as methods are originally
defined in the PyMethodDef struct.
The IndirectionNode allows DefNode to override the decorator.
"""
decorator_result = ExprNodes.NameNode(node.pos, name=name)
for decorator in decorators[::-1]:
decorator_result = ExprNodes.SimpleCallNode(
decorator.pos,
function = decorator.decorator,
args = [decorator_result])
function=decorator.decorator,
args=[decorator_result])
name_node = ExprNodes.NameNode(node.pos, name = name)
name_node = ExprNodes.NameNode(node.pos, name=name)
reassignment = Nodes.SingleAssignmentNode(
node.pos,
lhs = name_node,
rhs = decorator_result)
lhs=name_node,
rhs=decorator_result)
reassignment = Nodes.IndirectionNode([reassignment])
node.decorator_indirection = reassignment
......@@ -1500,7 +1581,7 @@ if VALUE is not None:
if decorators:
transform = DecoratorTransform(self.context)
def_node = node.node
_, reassignments = transform.handle_decorators(
_, reassignments = transform.chain_decorators(
def_node, decorators, def_node.name)
reassignments.analyse_declarations(env)
node = [node, reassignments]
......@@ -2110,6 +2191,8 @@ class AlignFunctionDefinitions(CythonTransform):
if pxd_def is None:
pxd_def = self.scope.lookup(node.class_name)
if pxd_def:
if not pxd_def.defined_in_pxd:
return node
outer_scope = self.scope
self.scope = pxd_def.type.scope
self.visitchildren(node)
......@@ -2773,6 +2856,8 @@ class TransformBuiltinMethods(EnvTransform):
node.function.pos, type=type, operand=args[1], typecheck=typecheck)
else:
error(args[0].pos, "Not a type")
self.visitchildren(node)
return node
......
......@@ -127,7 +127,7 @@ def inject_utility_code_stage_factory(context):
for dep in utilcode.requires:
if dep not in added and dep not in module_node.scope.utility_code_list:
module_node.scope.utility_code_list.append(dep)
tree = utilcode.get_tree()
tree = utilcode.get_tree(cython_scope=context.cython_scope)
if tree:
module_node.merge_in(tree.body, tree.scope, merge_scope=True)
return module_node
......
......@@ -2061,7 +2061,8 @@ proto="""
#define __Pyx_CIMAG(z) ((z).imag)
#endif
#if (defined(_WIN32) || defined(__clang__)) && defined(__cplusplus) && CYTHON_CCOMPLEX
#if defined(__cplusplus) && CYTHON_CCOMPLEX \
&& (defined(_WIN32) || defined(__clang__) || (defined(__GNUC__) && (__GNUC__ >= 5 || __GNUC__ == 4 && __GNUC_MINOR__ >= 4 )) || __cplusplus >= 201103)
#define __Pyx_SET_CREAL(z,x) ((z).real(x))
#define __Pyx_SET_CIMAG(z,y) ((z).imag(y))
#else
......@@ -3202,7 +3203,7 @@ class ToPyStructUtilityCode(object):
def __hash__(self):
return hash(self.header)
def get_tree(self):
def get_tree(self, **kwargs):
pass
def put_code(self, output):
......@@ -3257,8 +3258,10 @@ class CStructOrUnionType(CType):
self.scope = scope
self.typedef_flag = typedef_flag
self.is_struct = kind == 'struct'
self.to_py_function = "%s_to_py_%s" % (Naming.convert_func_prefix, self.cname)
self.from_py_function = "%s_from_py_%s" % (Naming.convert_func_prefix, self.cname)
self.to_py_function = "%s_to_py_%s" % (
Naming.convert_func_prefix, self.specialization_name())
self.from_py_function = "%s_from_py_%s" % (
Naming.convert_func_prefix, self.specialization_name())
self.exception_check = True
self._convert_to_py_code = None
self._convert_from_py_code = None
......@@ -4250,6 +4253,10 @@ def merge_template_deductions(a, b):
def widest_numeric_type(type1, type2):
"""Given two numeric types, return the narrowest type encompassing both of them.
"""
if type1.is_reference:
type1 = type1.ref_base_type
if type2.is_reference:
type2 = type2.ref_base_type
if type1 == type2:
widest_type = type1
elif type1.is_complex or type2.is_complex:
......
......@@ -158,8 +158,11 @@ class SourceDescriptor(object):
def get_escaped_description(self):
if self._escaped_description is None:
self._escaped_description = \
esc_desc = \
self.get_description().encode('ASCII', 'replace').decode("ASCII")
# Use foreward slashes on Windows since these paths
# will be used in the #line directives in the C/C++ files.
self._escaped_description = esc_desc.replace('\\', '/')
return self._escaped_description
def __gt__(self, other):
......
from __future__ import absolute_import
from copy import deepcopy
from unittest import TestCase
from Cython.Compiler.FlowControl import (
NameAssignment, StaticAssignment, Argument, NameDeletion)
class FakeType(object):
is_pyobject = True
class FakeNode(object):
pos = ('filename.pyx', 1, 2)
cf_state = None
type = FakeType()
def infer_type(self, scope):
return self.type
class FakeEntry(object):
type = FakeType()
class TestGraph(TestCase):
def test_deepcopy(self):
lhs, rhs = FakeNode(), FakeNode()
entry = FakeEntry()
entry.pos = lhs.pos
name_ass = NameAssignment(lhs, rhs, entry)
ass = deepcopy(name_ass)
self.assertTrue(ass.lhs)
self.assertTrue(ass.rhs)
self.assertTrue(ass.entry)
self.assertEqual(ass.pos, name_ass.pos)
self.assertFalse(ass.is_arg)
self.assertFalse(ass.is_deletion)
static_ass = StaticAssignment(entry)
ass = deepcopy(static_ass)
self.assertTrue(ass.lhs)
self.assertTrue(ass.rhs)
self.assertTrue(ass.entry)
self.assertEqual(ass.pos, static_ass.pos)
self.assertFalse(ass.is_arg)
self.assertFalse(ass.is_deletion)
arg_ass = Argument(lhs, rhs, entry)
ass = deepcopy(arg_ass)
self.assertTrue(ass.lhs)
self.assertTrue(ass.rhs)
self.assertTrue(ass.entry)
self.assertEqual(ass.pos, arg_ass.pos)
self.assertTrue(ass.is_arg)
self.assertFalse(ass.is_deletion)
name_del = NameDeletion(lhs, entry)
ass = deepcopy(name_del)
self.assertTrue(ass.lhs)
self.assertTrue(ass.rhs)
self.assertTrue(ass.entry)
self.assertEqual(ass.pos, name_del.pos)
self.assertFalse(ass.is_arg)
self.assertTrue(ass.is_deletion)
......@@ -39,7 +39,7 @@ class StringParseContext(Main.Context):
return ModuleScope(module_name, parent_module=None, context=self)
def parse_from_strings(name, code, pxds={}, level=None, initial_pos=None,
def parse_from_strings(name, code, pxds=None, level=None, initial_pos=None,
context=None, allow_struct_enum_decorator=False):
"""
Utility method to parse a (unicode) string of code. This is mostly
......@@ -86,6 +86,7 @@ def parse_from_strings(name, code, pxds={}, level=None, initial_pos=None,
tree.scope = scope
return tree
class TreeCopier(VisitorTransform):
def visit_Node(self, node):
if node is None:
......@@ -95,6 +96,7 @@ class TreeCopier(VisitorTransform):
self.visitchildren(c)
return c
class ApplyPositionAndCopy(TreeCopier):
def __init__(self, pos):
super(ApplyPositionAndCopy, self).__init__()
......@@ -105,6 +107,7 @@ class ApplyPositionAndCopy(TreeCopier):
copy.pos = self.pos
return copy
class TemplateTransform(VisitorTransform):
"""
Makes a copy of a template tree while doing substitutions.
......@@ -212,9 +215,16 @@ def strip_common_indent(lines):
class TreeFragment(object):
def __init__(self, code, name=None, pxds={}, temps=[], pipeline=[], level=None, initial_pos=None):
def __init__(self, code, name=None, pxds=None, temps=None, pipeline=None, level=None, initial_pos=None):
if pxds is None:
pxds = {}
if temps is None:
temps = []
if pipeline is None:
pipeline = []
if not name:
name = "(tree fragment)"
if isinstance(code, _unicode):
def fmt(x): return u"\n".join(strip_common_indent(x.split(u"\n")))
......@@ -233,7 +243,8 @@ class TreeFragment(object):
t = transform(t)
self.root = t
elif isinstance(code, Node):
if pxds != {}: raise NotImplementedError()
if pxds:
raise NotImplementedError()
self.root = code
else:
raise ValueError("Unrecognized code format (accepts unicode and Node)")
......@@ -242,11 +253,16 @@ class TreeFragment(object):
def copy(self):
return copy_code_tree(self.root)
def substitute(self, nodes={}, temps=[], pos = None):
def substitute(self, nodes=None, temps=None, pos = None):
if nodes is None:
nodes = {}
if temps is None:
temps = []
return TemplateTransform()(self.root,
substitutions = nodes,
temps = self.temps + temps, pos = pos)
class SetPosTransform(VisitorTransform):
def __init__(self, pos):
super(SetPosTransform, self).__init__()
......
......@@ -8,6 +8,7 @@ from __future__ import absolute_import
import re
import os.path
import sys
from collections import defaultdict
from coverage.plugin import CoveragePlugin, FileTracer, FileReporter # requires coverage.py 4.0+
......@@ -35,6 +36,12 @@ def _find_dep_file_path(main_file, file_path):
pxi_file_path = os.path.join(os.path.dirname(main_file), file_path)
if os.path.exists(pxi_file_path):
abs_path = os.path.abspath(pxi_file_path)
# search sys.path for external locations if a valid file hasn't been found
if not os.path.exists(abs_path):
for sys_path in sys.path:
test_path = os.path.realpath(os.path.join(sys_path, file_path))
if os.path.exists(test_path):
return test_path
return abs_path
......@@ -132,7 +139,7 @@ class Plugin(CoveragePlugin):
try:
with open(c_file, 'rb') as f:
if b'/* Generated by Cython ' not in f.read(30):
return None # not a Cython file
return None, None # not a Cython file
except (IOError, OSError):
c_file = None
......@@ -238,7 +245,7 @@ class CythonModuleTracer(FileTracer):
return self._file_path_map[source_file]
except KeyError:
pass
abs_path = os.path.abspath(source_file)
abs_path = _find_dep_file_path(filename, source_file)
if self.py_file and source_file[-3:].lower() == '.py':
# always let coverage.py handle this case itself
......
......@@ -52,7 +52,7 @@ from libc.string cimport strcat, strncat, \
from cpython.object cimport Py_SIZE
from cpython.ref cimport PyTypeObject, Py_TYPE
from cpython.exc cimport PyErr_BadArgument
from cpython.mem cimport PyMem_Malloc, PyMem_Free
from cpython.mem cimport PyObject_Malloc, PyObject_Free
cdef extern from *: # Hard-coded utility code hack.
ctypedef class array.array [object arrayobject]
......@@ -102,7 +102,7 @@ cdef extern from *: # Hard-coded utility code hack.
info.itemsize = self.ob_descr.itemsize # e.g. sizeof(float)
info.len = info.itemsize * item_count
info.shape = <Py_ssize_t*> PyMem_Malloc(sizeof(Py_ssize_t) + 2)
info.shape = <Py_ssize_t*> PyObject_Malloc(sizeof(Py_ssize_t) + 2)
if not info.shape:
raise MemoryError()
info.shape[0] = item_count # constant regardless of resizing
......@@ -114,7 +114,7 @@ cdef extern from *: # Hard-coded utility code hack.
info.obj = self
def __releasebuffer__(self, Py_buffer* info):
PyMem_Free(info.shape)
PyObject_Free(info.shape)
array newarrayobject(PyTypeObject* type, Py_ssize_t size, arraydescr *descr)
......
......@@ -73,3 +73,36 @@ cdef extern from "Python.h":
# PyMem_MALLOC(), PyMem_REALLOC(), PyMem_FREE().
# PyMem_NEW(), PyMem_RESIZE(), PyMem_DEL().
#####################################################################
# Raw object memory interface
#####################################################################
# Functions to call the same malloc/realloc/free as used by Python's
# object allocator. If WITH_PYMALLOC is enabled, these may differ from
# the platform malloc/realloc/free. The Python object allocator is
# designed for fast, cache-conscious allocation of many "small" objects,
# and with low hidden memory overhead.
#
# PyObject_Malloc(0) returns a unique non-NULL pointer if possible.
#
# PyObject_Realloc(NULL, n) acts like PyObject_Malloc(n).
# PyObject_Realloc(p != NULL, 0) does not return NULL, or free the memory
# at p.
#
# Returned pointers must be checked for NULL explicitly; no action is
# performed on failure other than to return NULL (no warning it printed, no
# exception is set, etc).
#
# For allocating objects, use PyObject_{New, NewVar} instead whenever
# possible. The PyObject_{Malloc, Realloc, Free} family is exposed
# so that you can exploit Python's small-block allocator for non-object
# uses. If you must use these routines to allocate object memory, make sure
# the object gets initialized via PyObject_{Init, InitVar} after obtaining
# the raw memory.
void* PyObject_Malloc(size_t size)
void* PyObject_Calloc(size_t nelem, size_t elsize)
void* PyObject_Realloc(void *ptr, size_t new_size)
void PyObject_Free(void *ptr)
#Basic reference: http://www.cplusplus.com/reference/iterator/
#Most of these classes are in fact empty structs
cdef extern from "<iterator>" namespace "std" nogil:
cdef cppclass iterator[Category,T,Distance,Pointer,Reference]:
pass
cdef cppclass output_iterator_tag:
pass
cdef cppclass input_iterator_tag:
pass
cdef cppclass forward_iterator_tag(input_iterator_tag):
pass
cdef cppclass bidirectional_iterator_tag(forward_iterator_tag):
pass
cdef cppclass random_access_iterator_tag(bidirectional_iterator_tag):
pass
cdef cppclass back_insert_iterator[T](iterator[output_iterator_tag,void,void,void,void]):
pass
cdef cppclass front_insert_iterator[T](iterator[output_iterator_tag,void,void,void,void]):
pass
cdef cppclass insert_iterator[T](iterator[output_iterator_tag,void,void,void,void]):
pass
back_insert_iterator[CONTAINER] back_inserter[CONTAINER](CONTAINER &)
front_insert_iterator[CONTAINER] front_inserter[CONTAINER](CONTAINER &)
##Note: this is the C++98 version of inserter.
##The C++11 versions's prototype relies on typedef members of classes, which Cython doesn't currently support:
##template <class Container>
##insert_iterator<Container> inserter (Container& x, typename Container::iterator it)
insert_iterator[CONTAINER] inserter[CONTAINER,ITERATOR](CONTAINER &, ITERATOR)
cdef extern from "limits" namespace "std" nogil:
enum float_round_style:
round_indeterminate = -1
round_toward_zero = 0
round_to_nearest = 1
round_toward_infinity = 2
round_toward_neg_infinity = 3
enum float_denorm_style:
denorm_indeterminate = -1
denorm_absent = 0
denorm_present = 1
#The static methods can be called as, e.g. numeric_limits[int].round_error(), etc.
#The const data members should be declared as static. Cython currently doesn't allow that
#and/or I can't figure it out, so you must instantiate an object to access, e.g.
#cdef numeric_limits[double] lm
#print lm.round_style
cdef cppclass numeric_limits[T]:
const bint is_specialized
@staticmethod
T min()
@staticmethod
T max()
const int digits
const int digits10
const bint is_signed
const bint is_integer
const bint is_exact
const int radix
@staticmethod
T epsilon()
@staticmethod
T round_error()
const int min_exponent
const int min_exponent10
const int max_exponent
const int max_exponent10
const bint has_infinity
const bint has_quiet_NaN
const bint has_signaling_NaN
const float_denorm_style has_denorm
const bint has_denorm_loss
@staticmethod
T infinity()
@staticmethod
T quiet_NaN()
@staticmethod
T signaling_NaN()
@staticmethod
T denorm_min()
const bint is_iec559
const bint is_bounded
const bint is_modulo
const bint traps
const bint tinyness_before
const float_round_style round_style
......@@ -84,10 +84,15 @@ class CythonTest(unittest.TestCase):
self.assertNotEqual(TreePath.find_first(result_tree, path), None,
"Path '%s' not found in result tree" % path)
def fragment(self, code, pxds={}, pipeline=[]):
def fragment(self, code, pxds=None, pipeline=None):
"Simply create a tree fragment using the name of the test-case in parse errors."
if pxds is None:
pxds = {}
if pipeline is None:
pipeline = []
name = self.id()
if name.startswith("__main__."): name = name[len("__main__."):]
if name.startswith("__main__."):
name = name[len("__main__."):]
name = name.replace(".", "_")
return TreeFragment(code, name, pxds, pipeline=pipeline)
......@@ -139,7 +144,9 @@ class TransformTest(CythonTest):
Plans: One could have a pxd dictionary parameter to run_pipeline.
"""
def run_pipeline(self, pipeline, pyx, pxds={}):
def run_pipeline(self, pipeline, pyx, pxds=None):
if pxds is None:
pxds = {}
tree = self.fragment(pyx, pxds).root
# Run pipeline
for T in pipeline:
......
......@@ -498,7 +498,7 @@ __Pyx_CyFunction_clear(__pyx_CyFunctionObject *m)
for (i = 0; i < m->defaults_pyobjects; i++)
Py_XDECREF(pydefaults[i]);
PyMem_Free(m->defaults);
PyObject_Free(m->defaults);
m->defaults = NULL;
}
......@@ -708,7 +708,7 @@ static int __pyx_CyFunction_init(void) {
static CYTHON_INLINE void *__Pyx_CyFunction_InitDefaults(PyObject *func, size_t size, int pyobjects) {
__pyx_CyFunctionObject *m = (__pyx_CyFunctionObject *) func;
m->defaults = PyMem_Malloc(size);
m->defaults = PyObject_Malloc(size);
if (!m->defaults)
return PyErr_NoMemory();
memset(m->defaults, 0, size);
......
......@@ -394,14 +394,14 @@ static PyTypeObject *__Pyx_ImportType(const char *module_name, const char *class
#endif
if (!strict && (size_t)basicsize > size) {
PyOS_snprintf(warning, sizeof(warning),
"%s.%s size changed, may indicate binary incompatibility",
module_name, class_name);
"%s.%s size changed, may indicate binary incompatibility. Expected %zd, got %zd",
module_name, class_name, basicsize, size);
if (PyErr_WarnEx(NULL, warning, 0) < 0) goto bad;
}
else if ((size_t)basicsize != size) {
PyErr_Format(PyExc_ValueError,
"%.200s.%.200s has the wrong size, try recompiling",
module_name, class_name);
"%.200s.%.200s has the wrong size, try recompiling. Expected %zd, got %zd",
module_name, class_name, basicsize, size);
goto bad;
}
return (PyTypeObject *)result;
......
......@@ -33,6 +33,8 @@ cdef extern from *:
void* PyMem_Malloc(size_t n)
void PyMem_Free(void *p)
void* PyObject_Malloc(size_t n)
void PyObject_Free(void *p)
cdef struct __pyx_memoryview "__pyx_memoryview_obj":
Py_buffer view
......@@ -137,7 +139,7 @@ cdef class array:
self.format = self._format
# use single malloc() for both shape and strides
self._shape = <Py_ssize_t *> PyMem_Malloc(sizeof(Py_ssize_t)*self.ndim*2)
self._shape = <Py_ssize_t *> PyObject_Malloc(sizeof(Py_ssize_t)*self.ndim*2)
self._strides = self._shape + self.ndim
if not self._shape:
......@@ -212,7 +214,7 @@ cdef class array:
refcount_objects_in_slice(self.data, self._shape,
self._strides, self.ndim, False)
free(self.data)
PyMem_Free(self._shape)
PyObject_Free(self._shape)
property memview:
@cname('get_memview')
......
......@@ -415,7 +415,7 @@ Configurable optimisations
``optimize.unpack_method_calls`` (True / False)
Cython can generate code that optimistically checks for Python method objects
at call time and unpacks the underlying function to call it directly. This
can substantially speed up method calls, especially for bultins, but may also
can substantially speed up method calls, especially for builtins, but may also
have a slight negative performance impact in some cases where the guess goes
completely wrong.
Disabling this option can also reduce the code size. Default is True.
......
......@@ -36,7 +36,7 @@ Attributes
* Are fixed at compile time.
* You can't add attributes to an extension type instance at run time like in normal Python.
* You can sub-class the extenstion type in Python to add attributes at run-time.
* You can sub-class the extension type in Python to add attributes at run-time.
* There are two ways to access extension type attributes:
......@@ -314,11 +314,11 @@ Subclassing
* If the base type is a built-in type, it must have been previously declared as an ``extern`` extension type.
* ``cimport`` can be used to import the base type, if the extern declared base type is in a ``.pxd`` definition file.
* In Cython, multiple inheritance is not permitted.. singlular inheritance only
* In Cython, multiple inheritance is not permitted.. singular inheritance only
* Cython extenstion types can also be sub-classed in Python.
* Cython extension types can also be sub-classed in Python.
* Here multiple inhertance is permissible as is normal for Python.
* Here multiple inheritance is permissible as is normal for Python.
* Even multiple extension types may be inherited, but C-layout of all the base classes must be compatible.
......@@ -448,7 +448,7 @@ External
print "Imag:", c.cval.imag
.. note:: Some important things in the example:
#. ``ctypedef`` has been used because because Python's header file has the struct decalared with::
#. ``ctypedef`` has been used because Python's header file has the struct declared with::
ctypedef struct {
...
......
......@@ -66,7 +66,7 @@ cimport
* Use the **cimport** statement, as you would Python's import statement, to access these files
from other definition or implementation files.
* **cimport** does not need to be called in ``.pyx`` file for for ``.pxd`` file that has the
* **cimport** does not need to be called in ``.pyx`` file for ``.pxd`` file that has the
same name, as they are already in the same namespace.
* For cimport to find the stated definition file, the path to the file must be appended to the
``-I`` option of the **Cython compile command**.
......@@ -705,7 +705,7 @@ Error and Exception Handling
.. note:: Python Objects
* Declared exception values are **not** need.
* Remember that Cython assumes that a function function without a declared return value, returns a Python object.
* Remember that Cython assumes that a function without a declared return value, returns a Python object.
* Exceptions on such functions are implicitly propagated by returning ``NULL``
.. note:: C++
......
......@@ -73,7 +73,7 @@ run a Python session to test both the Python version (imported from
[2, 2, 2],
[1, 1, 1]])
In [4]: import convolve1
In [4]: convolve1.naive_convolve(np.array([[1, 1, 1]], dtype=np.int),
In [4]: convolve1.naive_convolve(np.array([[1, 1, 1]], dtype=np.int),
... np.array([[1],[2],[1]], dtype=np.int))
Out [4]:
array([[1, 1, 1],
......@@ -126,7 +126,7 @@ compatibility. Consider this code (*read the comments!*) ::
raise ValueError("Only odd dimensions on filter supported")
assert f.dtype == DTYPE and g.dtype == DTYPE
# The "cdef" keyword is also used within functions to type variables. It
# can only be used at the top indendation level (there are non-trivial
# can only be used at the top indentation level (there are non-trivial
# problems with allowing them in other places, though we'd love to see
# good and thought out proposals for it).
#
......@@ -196,7 +196,7 @@ These are the needed changes::
def naive_convolve(np.ndarray[DTYPE_t, ndim=2] f, np.ndarray[DTYPE_t, ndim=2] g):
...
cdef np.ndarray[DTYPE_t, ndim=2] h = ...
Usage:
.. sourcecode:: ipython
......@@ -227,42 +227,20 @@ The array lookups are still slowed down by two factors:
...
cimport cython
@cython.boundscheck(False) # turn of bounds-checking for entire function
@cython.boundscheck(False) # turn off bounds-checking for entire function
@cython.wraparound(False) # turn off negative index wrapping for entire function
def naive_convolve(np.ndarray[DTYPE_t, ndim=2] f, np.ndarray[DTYPE_t, ndim=2] g):
...
Now bounds checking is not performed (and, as a side-effect, if you ''do''
happen to access out of bounds you will in the best case crash your program
and in the worst case corrupt data). It is possible to switch bounds-checking
mode in many ways, see :ref:`compiler-directives` for more
information.
Negative indices are dealt with by ensuring Cython that the indices will be
positive, by casting the variables to unsigned integer types (if you do have
negative values, then this casting will create a very large positive value
instead and you will attempt to access out-of-bounds values). Casting is done
with a special ``<>``-syntax. The code below is changed to use either
unsigned ints or casting as appropriate::
...
cdef int s, t # changed
cdef unsigned int x, y, v, w # changed
cdef int s_from, s_to, t_from, t_to
cdef DTYPE_t value
for x in range(xmax):
for y in range(ymax):
s_from = max(smid - x, -smid)
s_to = min((xmax - x) - smid, smid + 1)
t_from = max(tmid - y, -tmid)
t_to = min((ymax - y) - tmid, tmid + 1)
value = 0
for s in range(s_from, s_to):
for t in range(t_from, t_to):
v = <unsigned int>(x - smid + s) # changed
w = <unsigned int>(y - tmid + t) # changed
value += g[<unsigned int>(smid - s), <unsigned int>(tmid - t)] * f[v, w] # changed
h[x, y] = value
...
Also, we've disabled the check to wrap negative indices (e.g. g[-1] giving
the last value). As with disabling bounds checking, bad things will happen
if we try to actually use negative indices with this disabled.
The function call overhead now starts to play a role, so we compare the latter
two examples with larger N:
......@@ -310,4 +288,3 @@ There is some speed penalty to this though (as one makes more assumptions
compile-time if the type is set to :obj:`np.ndarray`, specifically it is
assumed that the data is stored in pure strided mode and not in indirect
mode).
......@@ -164,7 +164,7 @@ write a short script to profile our code::
Running this on my box gives the following output::
TODO: how to display this not as code but verbatimly?
TODO: how to display this not as code but verbatim?
Sat Nov 7 17:40:54 2009 Profile.prof
......
......@@ -132,7 +132,7 @@ you can use a cast to write::
This may be dangerous if :meth:`quest()` is not actually a :class:`Shrubbery`, as it
will try to access width as a C struct member which may not exist. At the C level,
rather than raising an :class:`AttributeError`, either an nonsensical result will be
returned (interpreting whatever data is at at that address as an int) or a segfault
returned (interpreting whatever data is at that address as an int) or a segfault
may result from trying to access invalid memory. Instead, one can write::
print (<Shrubbery?>quest()).width
......@@ -649,7 +649,7 @@ When you declare::
the name Spam serves both these roles. There may be other names by which you
can refer to the constructor, but only Spam can be used as a type name. For
example, if you were to explicity import MyModule, you could use
example, if you were to explicitly import MyModule, you could use
``MyModule.Spam()`` to create a Spam instance, but you wouldn't be able to use
:class:`MyModule.Spam` as a type name.
......
......@@ -22,6 +22,7 @@ Contents:
buffer
parallelism
debugging
numpy_tutorial
Indices and tables
------------------
......
......@@ -649,7 +649,7 @@ None Slices
===========
Although memoryview slices are not objects they can be set to None and they can
be be checked for being None as well::
be checked for being None as well::
def func(double[:] myarray = None):
print(myarray is None)
......
......@@ -263,7 +263,7 @@ compatibility. Here's :file:`convolve2.pyx`. *Read the comments!* ::
raise ValueError("Only odd dimensions on filter supported")
assert f.dtype == DTYPE and g.dtype == DTYPE
# The "cdef" keyword is also used within functions to type variables. It
# can only be used at the top indendation level (there are non-trivial
# can only be used at the top indentation level (there are non-trivial
# problems with allowing them in other places, though we'd love to see
# good and thought out proposals for it).
#
......@@ -463,7 +463,7 @@ if someone is interested also under Python 2.x.
There is some speed penalty to this though (as one makes more assumptions
compile-time if the type is set to :obj:`np.ndarray`, specifically it is
assumed that the data is stored in pure strided more and not in indirect
assumed that the data is stored in pure strided mode and not in indirect
mode).
[:enhancements/buffer:More information]
......
......@@ -94,7 +94,7 @@ It currently supports OpenMP, but later on more backends might be supported.
The ``chunksize`` argument indicates the chunksize to be used for dividing the iterations among threads.
This is only valid for ``static``, ``dynamic`` and ``guided`` scheduling, and is optional. Different chunksizes
may give substatially different performance results, depending on the schedule, the load balance it provides,
may give substantially different performance results, depending on the schedule, the load balance it provides,
the scheduling overhead and the amount of false sharing (if any).
Example with a reduction::
......
I think this is a result of a recent change to Pyrex that
has been merged into Cython.
If a directory contains an :file:`__init__.py` or :file:`__init__.pyx` file,
it's now assumed to be a package directory. So, for example,
if you have a directory structure::
foo/
__init__.py
shrubbing.pxd
shrubbing.pyx
then the shrubbing module is assumed to belong to a package
called 'foo', and its fully qualified module name is
'foo.shrubbing'.
So when Pyrex wants to find out whether there is a `.pxd` file for shrubbing,
it looks for one corresponding to a module called `foo.shrubbing`. It
does this by searching the include path for a top-level package directory
called 'foo' containing a file called 'shrubbing.pxd'.
However, if foo is the current directory you're running
the compiler from, and you haven't added foo to the
include path using a -I option, then it won't be on
the include path, and the `.pxd` won't be found.
What to do about this depends on whether you really
intend the module to reside in a package.
If you intend shrubbing to be a top-level module, you
will have to move it somewhere else where there is
no :file:`__init__.*` file.
If you do intend it to reside in a package, then there
are two alternatives:
1. cd to the directory containing foo and compile
from there::
cd ..; cython foo/shrubbing.pyx
2. arrange for the directory containing foo to be
passed as a -I option, e.g.::
cython -I .. shrubbing.pyx
Arguably this behaviour is not very desirable, and I'll
see if I can do something about it.
......@@ -3,7 +3,7 @@
.. _wrapping-cplusplus:
********************************
Using C++ in Cython
Using C++ in Cythonp
********************************
Overview
......@@ -145,7 +145,7 @@ is then handled by ``cythonize()`` as follows::
from Cython.Build import cythonize
setup(ext_modules = cythonize(Extension(
"rect", # the extesion name
"rect", # the extension name
sources=["rect.pyx", "Rectangle.cpp"], # the Cython source and
# additional C++ source files
language="c++", # generate and compile C++ code
......@@ -166,7 +166,7 @@ version 0.17, Cython also allows to pass external source files into the
And in the .pyx source file, write this into the first comment block, before
any source code, to compile it in C++ mode and link it statically against the
:file:`Rectange.cpp` code file::
:file:`Rectangle.cpp` code file::
# distutils: language = c++
# distutils: sources = Rectangle.cpp
......@@ -363,13 +363,18 @@ a special module ``cython.operator``. The functions provided are:
* ``cython.operator.dereference`` for dereferencing. ``dereference(foo)``
will produce the C++ code ``*(foo)``
* ``cython.operator.preincrement`` for pre-incrementation. ``preincrement(foo)``
will produce the C++ code ``++(foo)``
* ...
will produce the C++ code ``++(foo)``.
Similarly for ``predecrement``, ``postincrement`` and ``postdecrement``.
* ``cython.operator.comma`` for the comma operator. ``comma(a, b)``
will produce the C++ code ``((a), (b))``.
These functions need to be cimported. Of course, one can use a
``from ... cimport ... as`` to have shorter and more readable functions.
For example: ``from cython.operator cimport dereference as deref``.
For completeness, it's also worth mentioning ``cython.operator.address``
which can also be written ``&foo``.
Templates
----------
......
.. highlight:: cython
.. _overview:
********
Welcome!
********
===============
What is Cython?
===============
Cython is a programming language based on Python
with extra syntax to provide static type declarations.
================
What Does It Do?
================
It takes advantage of the benefits of Python while allowing one to achieve the speed of C.
============================
How Exactly Does It Do That?
============================
The source code gets translated into optimized C/C++
code and compiled as Python extension modules.
This allows for both very fast program execution and tight
integration with external C libraries, while keeping
up the high *programmer productivity* for which the
Python language is well known.
=============
Tell Me More!
=============
The Python language is well known.
The primary Python execution environment is commonly referred to as CPython, as it is written in
C. Other major implementations use:
:Java: Jython [#Jython]_
:C#: IronPython [#IronPython]_)
:Python itself: PyPy [#PyPy]_
Written in C, CPython has been
conducive to wrapping many external libraries that interface through the C language. It has, however, remained non trivial to write the necessary glue code in
C, especially for programmers who are more fluent in a
high-level language like Python than in a do-it-yourself
language like C.
Originally based on the well-known Pyrex [#Pyrex]_, the
Cython project has approached this problem by means
of a source code compiler that translates Python code
to equivalent C code. This code is executed within the
CPython runtime environment, but at the speed of
compiled C and with the ability to call directly into C
libraries.
At the same time, it keeps the original interface of the Python source code, which makes it directly
usable from Python code. These two-fold characteristics enable Cython’s two major use cases:
#. Extending the CPython interpreter with fast binary modules, and
#. Interfacing Python code with external C libraries.
While Cython can compile (most) regular Python
code, the generated C code usually gains major (and
sometime impressive) speed improvements from optional static type declarations for both Python and
C types. These allow Cython to assign C semantics to
parts of the code, and to translate them into very efficient C code.
Type declarations can therefore be used
for two purposes:
#. For moving code sections from dynamic Python semantics into static-and-fast C semantics, but also for..
#. Directly manipulating types defined in external libraries. Cython thus merges the two worlds into a very broadly applicable programming language.
==================
Where Do I Get It?
==================
Well.. at `cython.org <http://cython.org>`_.. of course!
======================
How Do I Report a Bug?
======================
=================================
I Want To Make A Feature Request!
=================================
============================================
Is There a Mail List? How Do I Contact You?
============================================
.. rubric:: Footnotes
.. [#Jython] **Jython:** \J. Huginin, B. Warsaw, F. Bock, et al., Jython: Python for the Java platform, http://www.jython.org
.. [#IronPython] **IronPython:** Jim Hugunin et al., http://www.codeplex.com/IronPython.
.. [#PyPy] **PyPy:** The PyPy Group, PyPy: a Python implementation written in Python, http://pypy.org
.. [#Pyrex] **Pyrex:** G. Ewing, Pyrex: C-Extensions for Python, http://www.cosc.canterbury.ac.nz/greg.ewing/python/Pyrex/
......@@ -19,9 +19,9 @@ DEBUG = 0
_reloads={}
def pyx_to_dll(filename, ext = None, force_rebuild = 0,
build_in_temp=False, pyxbuild_dir=None, setup_args={},
reload_support=False, inplace=False):
def pyx_to_dll(filename, ext=None, force_rebuild=0, build_in_temp=False, pyxbuild_dir=None,
setup_args=None, reload_support=False, inplace=False):
"""Compile a PYX file to a DLL and return the name of the generated .so
or .dll ."""
assert os.path.exists(filename), "Could not find %s" % os.path.abspath(filename)
......@@ -35,6 +35,8 @@ def pyx_to_dll(filename, ext = None, force_rebuild = 0,
filename = filename[:-len(extension)] + '.c'
ext = Extension(name=modname, sources=[filename])
if setup_args is None:
setup_args = {}
if not pyxbuild_dir:
pyxbuild_dir = os.path.join(path, "_pyxbld")
......@@ -151,6 +153,7 @@ def pyx_to_dll(filename, ext = None, force_rebuild = 0,
sys.stderr.write(error + "\n")
raise
if __name__=="__main__":
pyx_to_dll("dummy.pyx")
from . import test
......
......@@ -62,19 +62,23 @@ PYXBLD_EXT = ".pyxbld"
DEBUG_IMPORT = False
def _print(message, args):
if args:
message = message % args
print(message)
def _debug(message, *args):
if DEBUG_IMPORT:
_print(message, args)
def _info(message, *args):
_print(message, args)
# Performance problem: for every PYX file that is imported, we will
# Performance problem: for every PYX file that is imported, we will
# invoke the whole distutils infrastructure even if the module is
# already built. It might be more efficient to only do it when the
# mod time of the .pyx is newer than the mod time of the .so but
......@@ -84,6 +88,7 @@ def _info(message, *args):
def _load_pyrex(name, filename):
"Load a pyrex file given a name and filename."
def get_distutils_extension(modname, pyxfilename, language_level=None):
# try:
# import hashlib
......@@ -103,6 +108,7 @@ def get_distutils_extension(modname, pyxfilename, language_level=None):
extension_mod.cython_directives = {'language_level': language_level}
return extension_mod,setup_args
def handle_special_build(modname, pyxfilename):
special_build = os.path.splitext(pyxfilename)[0] + PYXBLD_EXT
ext = None
......@@ -116,9 +122,8 @@ def handle_special_build(modname, pyxfilename):
make_ext = getattr(mod,'make_ext',None)
if make_ext:
ext = make_ext(modname, pyxfilename)
assert ext and ext.sources, ("make_ext in %s did not return Extension"
% special_build)
make_setup_args = getattr(mod,'make_setup_args',None)
assert ext and ext.sources, "make_ext in %s did not return Extension" % special_build
make_setup_args = getattr(mod, 'make_setup_args',None)
if make_setup_args:
setup_args = make_setup_args()
assert isinstance(setup_args,dict), ("make_setup_args in %s did not return a dict"
......@@ -129,6 +134,7 @@ def handle_special_build(modname, pyxfilename):
for source in ext.sources]
return ext, setup_args
def handle_dependencies(pyxfilename):
testing = '_test_files' in globals()
dependfile = os.path.splitext(pyxfilename)[0] + PYXDEP_EXT
......@@ -166,16 +172,16 @@ def handle_dependencies(pyxfilename):
if testing:
_test_files.append(file)
def build_module(name, pyxfilename, pyxbuild_dir=None, inplace=False, language_level=None):
assert os.path.exists(pyxfilename), (
"Path does not exist: %s" % pyxfilename)
assert os.path.exists(pyxfilename), "Path does not exist: %s" % pyxfilename
handle_dependencies(pyxfilename)
extension_mod,setup_args = get_distutils_extension(name, pyxfilename, language_level)
build_in_temp=pyxargs.build_in_temp
sargs=pyxargs.setup_args.copy()
extension_mod, setup_args = get_distutils_extension(name, pyxfilename, language_level)
build_in_temp = pyxargs.build_in_temp
sargs = pyxargs.setup_args.copy()
sargs.update(setup_args)
build_in_temp=sargs.pop('build_in_temp',build_in_temp)
build_in_temp = sargs.pop('build_in_temp',build_in_temp)
from . import pyxbuild
so_path = pyxbuild.pyx_to_dll(pyxfilename, extension_mod,
......@@ -189,7 +195,7 @@ def build_module(name, pyxfilename, pyxbuild_dir=None, inplace=False, language_l
junkpath = os.path.join(os.path.dirname(so_path), name+"_*") #very dangerous with --inplace ? yes, indeed, trying to eat my files ;)
junkstuff = glob.glob(junkpath)
for path in junkstuff:
if path!=so_path:
if path != so_path:
try:
os.remove(path)
except IOError:
......@@ -197,6 +203,7 @@ def build_module(name, pyxfilename, pyxbuild_dir=None, inplace=False, language_l
return so_path
def load_module(name, pyxfilename, pyxbuild_dir=None, is_package=False,
build_inplace=False, language_level=None, so_path=None):
try:
......@@ -314,6 +321,7 @@ class PyxImporter(object):
_debug("%s not found" % fullname)
return None
class PyImporter(PyxImporter):
"""A meta-path importer for normal .py files.
"""
......@@ -384,6 +392,7 @@ class PyImporter(PyxImporter):
self.blocked_modules.pop()
return importer
class LibLoader(object):
def __init__(self):
self._libs = {}
......@@ -404,6 +413,7 @@ class LibLoader(object):
_lib_loader = LibLoader()
class PyxLoader(object):
def __init__(self, fullname, path, init_path=None, pyxbuild_dir=None,
inplace=False, language_level=None):
......@@ -442,7 +452,8 @@ class PyxArgs(object):
build_in_temp=True
setup_args={} #None
##pyxargs=None
##pyxargs=None
def _have_importers():
has_py_importer = False
......@@ -456,8 +467,9 @@ def _have_importers():
return has_py_importer, has_pyx_importer
def install(pyximport=True, pyimport=False, build_dir=None, build_in_temp=True,
setup_args={}, reload_support=False,
setup_args=None, reload_support=False,
load_py_module_on_import_failure=False, inplace=False,
language_level=None):
"""Main entry point. Call this to install the .pyx import hook in
......@@ -504,6 +516,8 @@ def install(pyximport=True, pyimport=False, build_dir=None, build_in_temp=True,
The default is to use the language level of the current Python
runtime for .py files and Py2 for .pyx files.
"""
if setup_args is None:
setup_args = {}
if not build_dir:
build_dir = os.path.join(os.path.expanduser('~'), '.pyxbld')
......@@ -532,6 +546,7 @@ def install(pyximport=True, pyimport=False, build_dir=None, build_in_temp=True,
return py_importer, pyx_importer
def uninstall(py_importer, pyx_importer):
"""
Uninstall an import hook.
......@@ -546,6 +561,7 @@ def uninstall(py_importer, pyx_importer):
except ValueError:
pass
# MAIN
def show_docs():
......@@ -559,5 +575,6 @@ def show_docs():
pass
help(__main__)
if __name__ == '__main__':
show_docs()
......@@ -2019,6 +2019,8 @@ def runtests(options, cmd_args, coverage=None):
if options.system_pyregr and languages:
sys_pyregr_dir = os.path.join(sys.prefix, 'lib', 'python'+sys.version[:3], 'test')
if not os.path.isdir(sys_pyregr_dir):
sys_pyregr_dir = os.path.join(os.path.dirname(sys.executable), 'Lib', 'test') # source build
if os.path.isdir(sys_pyregr_dir):
filetests = TestBuilder(ROOTDIR, WORKDIR, selectors, exclude_selectors,
options.annotate_source, options.cleanup_workdir,
......
# mode: error
# ticket: 264
# tag: property, decorator
from functools import wraps
def wrap_func(f):
@wraps(f)
def wrap(*args, **kwargs):
print("WRAPPED")
return f(*args, **kwargs)
return wrap
cdef class Prop:
@property
@wrap_func
def prop1(self):
return 1
@property
def prop2(self):
return 2
@wrap_func
@prop2.setter
def prop2(self, value):
pass
@prop2.setter
@wrap_func
def prop2(self, value):
pass
_ERRORS = """
19:4: Property methods with additional decorators are not supported
27:4: Property methods with additional decorators are not supported
33:4: Property methods with additional decorators are not supported
"""
# mode: error
cdef class Test
# mode: error
class Test(object):
pass
_ERRORS = u"""
3:5: C class 'Test' is declared but not defined
"""
# mode: run
# ticket: 264
# tag: property, decorator
cdef class Prop:
"""
>>> p = Prop()
>>> p.prop
GETTING 'None'
>>> p.prop = 1
SETTING '1' (previously: 'None')
>>> p.prop
GETTING '1'
1
>>> p.prop = 2
SETTING '2' (previously: '1')
>>> p.prop
GETTING '2'
2
>>> del p.prop
DELETING '2'
>>> p.prop
GETTING 'None'
"""
cdef _value
def __init__(self):
self._value = None
@property
def prop(self):
print("FAIL")
return 0
@prop.getter
def prop(self):
print("FAIL")
@property
def prop(self):
print("GETTING '%s'" % self._value)
return self._value
@prop.setter
def prop(self, value):
print("SETTING '%s' (previously: '%s')" % (value, self._value))
self._value = value
@prop.deleter
def prop(self):
print("DELETING '%s'" % self._value)
self._value = None
......@@ -3,6 +3,9 @@
cdef extern from "cpp_namespaces_helper.h" namespace "A":
ctypedef int A_t
cdef struct S:
double x
A_t k
A_t A_func(A_t first, A_t)
cdef void f(A_t)
......@@ -36,3 +39,11 @@ def test_typedef(A_t a):
3
"""
return a
def test_convert_struct(S s):
"""
>>> py_value = {'x': 3.5, 'k': 10}
>>> test_convert_struct(py_value) == py_value
True
"""
return s
......@@ -18,6 +18,11 @@ namespace A {
typedef int A_t;
struct S {
A_t k;
double x;
};
A_t A_func(A_t first, A_t second) {
return first + second;
}
......
......@@ -13,6 +13,7 @@ cimport libcpp.set
cimport libcpp.stack
cimport libcpp.vector
cimport libcpp.complex
cimport libcpp.limits
from libcpp.deque cimport *
from libcpp.list cimport *
......@@ -23,6 +24,7 @@ from libcpp.set cimport *
from libcpp.stack cimport *
from libcpp.vector cimport *
from libcpp.complex cimport *
from libcpp.limits cimport *
cdef libcpp.deque.deque[int] d1 = deque[int]()
cdef libcpp.list.list[int] l1 = list[int]()
......@@ -91,3 +93,17 @@ cdef const_vector_to_list(const vector[double]& cv):
lst.append(cython.operator.dereference(iter))
cython.operator.preincrement(iter)
return lst
cdef double dmax = numeric_limits[double].max()
cdef double dmin = numeric_limits[double].min()
cdef double deps = numeric_limits[double].epsilon()
cdef double dqnan = numeric_limits[double].quiet_NaN()
cdef double dsnan = numeric_limits[double].signaling_NaN()
cdef double dinf = numeric_limits[double].infinity()
cdef int imax = numeric_limits[int].max()
cdef int imin = numeric_limits[int].min()
cdef int ieps = numeric_limits[int].epsilon()
cdef int iqnan = numeric_limits[int].quiet_NaN()
cdef int isnan = numeric_limits[int].signaling_NaN()
cdef int iinf = numeric_limits[int].infinity()
......@@ -88,3 +88,18 @@ def locals_ctype_inferred():
cdef int *p = NULL
b = p
return 'b' in locals()
def pass_on_locals(f):
"""
>>> def print_locals(l, **kwargs):
... print(sorted(l))
>>> pass_on_locals(print_locals)
['f']
['f']
['f']
"""
f(locals())
f(l=locals())
f(l=locals(), a=1)
# mode: run
# ticket: 264
# tag: property, decorator
cdef class Prop:
cdef _value
# mode: run
# ticket: 264
# tag: property, decorator
class Prop(object):
"""
>>> p = Prop()
>>> p.prop
GETTING 'None'
>>> p.prop = 1
SETTING '1' (previously: 'None')
>>> p.prop
GETTING '1'
1
>>> p.prop = 2
SETTING '2' (previously: '1')
>>> p.prop
GETTING '2'
2
>>> del p.prop
DELETING '2'
>>> p.prop
GETTING 'None'
"""
def __init__(self):
self._value = None
@property
def prop(self):
print("FAIL")
return 0
@prop.getter
def prop(self):
print("FAIL")
@property
def prop(self):
print("GETTING '%s'" % self._value)
return self._value
@prop.setter
def prop(self, value):
print("SETTING '%s' (previously: '%s')" % (value, self._value))
self._value = value
@prop.deleter
def prop(self):
print("DELETING '%s'" % self._value)
self._value = None
......@@ -145,6 +145,24 @@ def unicode_methods(Py_UCS4 uchar):
uchar.title(),
]
@cython.test_assert_path_exists('//PythonCapiCallNode')
@cython.test_fail_if_path_exists(
'//SimpleCallNode',
'//CoerceFromPyTypeNode',
)
def unicode_method_return_type(Py_UCS4 uchar):
"""
>>> unicode_method_return_type(ord('A'))
[True, False]
>>> unicode_method_return_type(ord('a'))
[False, True]
"""
cdef Py_UCS4 uc, ul
uc, ul = uchar.upper(), uchar.lower()
return [uc == uchar, ul == uchar]
@cython.test_assert_path_exists('//IntNode')
@cython.test_fail_if_path_exists('//SimpleCallNode',
'//PythonCapiCallNode')
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment