Commit 1345bdd8 authored by gabrieldemarmiesse's avatar gabrieldemarmiesse

Merge branch 'master' into update_cpp_examples

parents 3b6d1aa3 5fadf79e
...@@ -8,19 +8,31 @@ Cython Changelog ...@@ -8,19 +8,31 @@ Cython Changelog
Features added Features added
-------------- --------------
* Raising exceptions from nogil code will automatically acquire the GIL, instead
of requiring an explicit ``with gil`` block.
* In CPython 3.6 and later, looking up globals in the module dict is almost * In CPython 3.6 and later, looking up globals in the module dict is almost
as fast as looking up C globals. as fast as looking up C globals.
(Github issue #2313)
* For a Python subclass of an extension type, repeated method calls to non-overridden
cpdef methods can avoid the attribute lookup in Py3.6+, which makes them 4x faster.
(Github issue #2313)
* (In-)equality comparisons of objects to integer literals are faster. * (In-)equality comparisons of objects to integer literals are faster.
(Github issue #2188) (Github issue #2188)
* Some internal and 1-argument method calls are faster. * Some internal and 1-argument method calls are faster.
* Modules that cimport many external extension types from other Cython modules
execute less import requests during module initialisation.
* The coverage plugin considers more C file extensions such as ``.cc`` and ``.cxx``. * The coverage plugin considers more C file extensions such as ``.cc`` and ``.cxx``.
(Github issue #2266) (Github issue #2266)
* Modules that cimport many external extension types from other Cython modules * The `cythonize` command accepts compile time variable values (as set by `DEF`)
execute less import requests during module initialisation. through the new `-E` option.
Patch by Jerome Kieffer. (Github issue #2315)
Bugs fixed Bugs fixed
---------- ----------
...@@ -28,20 +40,35 @@ Bugs fixed ...@@ -28,20 +40,35 @@ Bugs fixed
* The directive ``language_level=3`` did not apply to the first token in the * The directive ``language_level=3`` did not apply to the first token in the
source file. (Github issue #2230) source file. (Github issue #2230)
* Overriding cpdef methods did not work in Python subclasses with slots.
Note that this can have a performance impact on calls from Cython code.
(Github issue #1771)
* Fix declarations of builtin or C types using strings in pure python mode.
(Github issue #2046)
Other changes Other changes
------------- -------------
0.28.3 (2018-??-??) 0.28.4 (2018-??-??)
=================== ===================
Bugs fixed Bugs fixed
---------- ----------
* Set iteration was broken in non-CPython since 0.28. * Reallowing ``tp_clear()`` in a subtype of an ``@no_gc_clear`` extension type
generated an invalid C function call to the (non-existent) base type implementation.
(Github issue #2309)
* Overriding cpdef methods did not work in Python subclasses with slots.
(Github issue #1771) 0.28.3 (2018-05-27)
===================
Bugs fixed
----------
* Set iteration was broken in non-CPython since 0.28.
* ``UnicodeEncodeError`` in Py2 when ``%s`` formatting is optimised for * ``UnicodeEncodeError`` in Py2 when ``%s`` formatting is optimised for
unicode strings. (Github issue #2276) unicode strings. (Github issue #2276)
......
...@@ -25,9 +25,14 @@ class _FakePool(object): ...@@ -25,9 +25,14 @@ class _FakePool(object):
for _ in imap(func, args): for _ in imap(func, args):
pass pass
def close(self): pass def close(self):
def terminate(self): pass pass
def join(self): pass
def terminate(self):
pass
def join(self):
pass
def parse_directives(option, name, value, parser): def parse_directives(option, name, value, parser):
...@@ -52,6 +57,13 @@ def parse_options(option, name, value, parser): ...@@ -52,6 +57,13 @@ def parse_options(option, name, value, parser):
setattr(parser.values, dest, options) setattr(parser.values, dest, options)
def parse_compile_time_env(option, name, value, parser):
dest = option.dest
old_env = dict(getattr(parser.values, dest, {}))
new_env = Options.parse_compile_time_env(value, current_settings=old_env)
setattr(parser.values, dest, new_env)
def find_package_base(path): def find_package_base(path):
base_dir, package_path = os.path.split(path) base_dir, package_path = os.path.split(path)
while os.path.isfile(os.path.join(base_dir, '__init__.py')): while os.path.isfile(os.path.join(base_dir, '__init__.py')):
...@@ -85,6 +97,7 @@ def cython_compile(path_pattern, options): ...@@ -85,6 +97,7 @@ def cython_compile(path_pattern, options):
exclude_failures=options.keep_going, exclude_failures=options.keep_going,
exclude=options.excludes, exclude=options.excludes,
compiler_directives=options.directives, compiler_directives=options.directives,
compile_time_env=options.compile_time_env,
force=options.force, force=options.force,
quiet=options.quiet, quiet=options.quiet,
**options.options) **options.options)
...@@ -136,11 +149,17 @@ def parse_args(args): ...@@ -136,11 +149,17 @@ def parse_args(args):
from optparse import OptionParser from optparse import OptionParser
parser = OptionParser(usage='%prog [options] [sources and packages]+') parser = OptionParser(usage='%prog [options] [sources and packages]+')
parser.add_option('-X', '--directive', metavar='NAME=VALUE,...', dest='directives', parser.add_option('-X', '--directive', metavar='NAME=VALUE,...',
type=str, action='callback', callback=parse_directives, default={}, dest='directives', default={}, type="str",
action='callback', callback=parse_directives,
help='set a compiler directive') help='set a compiler directive')
parser.add_option('-s', '--option', metavar='NAME=VALUE', dest='options', parser.add_option('-E', '--compile-time-env', metavar='NAME=VALUE,...',
type=str, action='callback', callback=parse_options, default={}, dest='compile_time_env', default={}, type="str",
action='callback', callback=parse_compile_time_env,
help='set a compile time environment variable')
parser.add_option('-s', '--option', metavar='NAME=VALUE',
dest='options', default={}, type="str",
action='callback', callback=parse_options,
help='set a cythonize option') help='set a cythonize option')
parser.add_option('-3', dest='python3_mode', action='store_true', parser.add_option('-3', dest='python3_mode', action='store_true',
help='use Python 3 syntax mode by default') help='use Python 3 syntax mode by default')
......
...@@ -127,6 +127,27 @@ def file_hash(filename): ...@@ -127,6 +127,27 @@ def file_hash(filename):
return m.hexdigest() return m.hexdigest()
def update_pythran_extension(ext):
if not PythranAvailable:
raise RuntimeError("You first need to install Pythran to use the np_pythran directive.")
pythran_ext = pythran.config.make_extension()
ext.include_dirs.extend(pythran_ext['include_dirs'])
ext.extra_compile_args.extend(pythran_ext['extra_compile_args'])
ext.extra_link_args.extend(pythran_ext['extra_link_args'])
ext.define_macros.extend(pythran_ext['define_macros'])
ext.undef_macros.extend(pythran_ext['undef_macros'])
ext.library_dirs.extend(pythran_ext['library_dirs'])
ext.libraries.extend(pythran_ext['libraries'])
ext.language = 'c++'
# These options are not compatible with the way normal Cython extensions work
for bad_option in ["-fwhole-program", "-fvisibility=hidden"]:
try:
ext.extra_compile_args.remove(bad_option)
except ValueError:
pass
def parse_list(s): def parse_list(s):
""" """
>>> parse_list("") >>> parse_list("")
...@@ -180,27 +201,6 @@ distutils_settings = { ...@@ -180,27 +201,6 @@ distutils_settings = {
} }
def update_pythran_extension(ext):
if not PythranAvailable:
raise RuntimeError("You first need to install Pythran to use the np_pythran directive.")
pythran_ext = pythran.config.make_extension()
ext.include_dirs.extend(pythran_ext['include_dirs'])
ext.extra_compile_args.extend(pythran_ext['extra_compile_args'])
ext.extra_link_args.extend(pythran_ext['extra_link_args'])
ext.define_macros.extend(pythran_ext['define_macros'])
ext.undef_macros.extend(pythran_ext['undef_macros'])
ext.library_dirs.extend(pythran_ext['library_dirs'])
ext.libraries.extend(pythran_ext['libraries'])
ext.language = 'c++'
# These options are not compatible with the way normal Cython extensions work
for bad_option in ["-fwhole-program", "-fvisibility=hidden"]:
try:
ext.extra_compile_args.remove(bad_option)
except ValueError:
pass
@cython.locals(start=cython.Py_ssize_t, end=cython.Py_ssize_t) @cython.locals(start=cython.Py_ssize_t, end=cython.Py_ssize_t)
def line_iter(source): def line_iter(source):
if isinstance(source, basestring): if isinstance(source, basestring):
...@@ -964,6 +964,8 @@ def cythonize(module_list, exclude=None, nthreads=0, aliases=None, quiet=False, ...@@ -964,6 +964,8 @@ def cythonize(module_list, exclude=None, nthreads=0, aliases=None, quiet=False,
# setup for out of place build directory if enabled # setup for out of place build directory if enabled
if build_dir: if build_dir:
if os.path.isabs(c_file):
warnings.warn("build_dir has no effect for absolute source paths")
c_file = os.path.join(build_dir, c_file) c_file = os.path.join(build_dir, c_file)
dir = os.path.dirname(c_file) dir = os.path.dirname(c_file)
safe_makedirs_once(dir) safe_makedirs_once(dir)
......
...@@ -47,10 +47,11 @@ Options: ...@@ -47,10 +47,11 @@ Options:
--warning-errors, -Werror Make all warnings into errors --warning-errors, -Werror Make all warnings into errors
--warning-extra, -Wextra Enable extra warnings --warning-extra, -Wextra Enable extra warnings
-X, --directive <name>=<value>[,<name=value,...] Overrides a compiler directive -X, --directive <name>=<value>[,<name=value,...] Overrides a compiler directive
-E, --compile-time-env name=value[,<name=value,...] Provides compile time env like DEF would do.
""" """
#The following experimental options are supported only on MacOSX: # The following experimental options are supported only on MacOSX:
# -C, --compile Compile generated .c file to .o file # -C, --compile Compile generated .c file to .o file
# --link Link .o file to produce extension module (implies -C) # --link Link .o file to produce extension module (implies -C)
# -+, --cplus Use C++ compiler for compiling and linking # -+, --cplus Use C++ compiler for compiling and linking
...@@ -174,6 +175,17 @@ def parse_command_line(args): ...@@ -174,6 +175,17 @@ def parse_command_line(args):
except ValueError as e: except ValueError as e:
sys.stderr.write("Error in compiler directive: %s\n" % e.args[0]) sys.stderr.write("Error in compiler directive: %s\n" % e.args[0])
sys.exit(1) sys.exit(1)
elif option == "--compile-time-env" or option.startswith('-E'):
if option.startswith('-E') and option[2:].strip():
x_args = option[2:]
else:
x_args = pop_value()
try:
options.compile_time_env = Options.parse_compile_time_env(
x_args, current_settings=options.compile_time_env)
except ValueError as e:
sys.stderr.write("Error in compile-time-env: %s\n" % e.args[0])
sys.exit(1)
elif option.startswith('--debug'): elif option.startswith('--debug'):
option = option[2:].replace('-', '_') option = option[2:].replace('-', '_')
from . import DebugFlags from . import DebugFlags
......
...@@ -1376,8 +1376,12 @@ def _analyse_name_as_type(name, pos, env): ...@@ -1376,8 +1376,12 @@ def _analyse_name_as_type(name, pos, env):
if type is not None: if type is not None:
return type return type
global_entry = env.global_scope().lookup_here(name) global_entry = env.global_scope().lookup(name)
if global_entry and global_entry.type and global_entry.type.is_extension_type: if global_entry and global_entry.type and (
global_entry.type.is_extension_type
or global_entry.type.is_struct_or_union
or global_entry.type.is_builtin_type
or global_entry.type.is_cpp_class):
return global_entry.type return global_entry.type
from .TreeFragment import TreeFragment from .TreeFragment import TreeFragment
......
...@@ -1606,7 +1606,7 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode): ...@@ -1606,7 +1606,7 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
code.putln("}") code.putln("}")
def generate_clear_function(self, scope, code, cclass_entry): def generate_clear_function(self, scope, code, cclass_entry):
tp_slot = TypeSlots.GCDependentSlot("tp_clear") tp_slot = TypeSlots.get_slot_by_name("tp_clear")
slot_func = scope.mangle_internal("tp_clear") slot_func = scope.mangle_internal("tp_clear")
base_type = scope.parent_type.base_type base_type = scope.parent_type.base_type
if tp_slot.slot_code(scope) != slot_func: if tp_slot.slot_code(scope) != slot_func:
......
...@@ -4341,6 +4341,25 @@ class OverrideCheckNode(StatNode): ...@@ -4341,6 +4341,25 @@ class OverrideCheckNode(StatNode):
code.putln("else if (unlikely((Py_TYPE(%s)->tp_dictoffset != 0)" code.putln("else if (unlikely((Py_TYPE(%s)->tp_dictoffset != 0)"
" || (Py_TYPE(%s)->tp_flags & (Py_TPFLAGS_IS_ABSTRACT | Py_TPFLAGS_HEAPTYPE)))) {" % ( " || (Py_TYPE(%s)->tp_flags & (Py_TPFLAGS_IS_ABSTRACT | Py_TPFLAGS_HEAPTYPE)))) {" % (
self_arg, self_arg)) self_arg, self_arg))
code.putln("#if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP")
# TODO: remove the object dict version check by 'inlining' the getattr implementation for methods.
# This would allow checking the dict versions around _PyType_Lookup() if it returns a descriptor,
# and would (tada!) make this check a pure type based thing instead of supporting only a single
# instance at a time.
code.putln("static PY_UINT64_T tp_dict_version = 0, obj_dict_version = 0;")
code.putln("if (likely("
"Py_TYPE(%s)->tp_dict && "
"tp_dict_version == __PYX_GET_DICT_VERSION(Py_TYPE(%s)->tp_dict) && "
"(!Py_TYPE(%s)->tp_dictoffset || "
"obj_dict_version == __PYX_GET_DICT_VERSION(_PyObject_GetDictPtr(%s)))"
"));" % (
self_arg, self_arg, self_arg, self_arg))
code.putln("else {")
code.putln("PY_UINT64_T type_dict_guard = (likely(Py_TYPE(%s)->tp_dict)) ? __PYX_GET_DICT_VERSION(Py_TYPE(%s)->tp_dict) : 0;" % (
self_arg, self_arg))
code.putln("#endif")
func_node_temp = code.funcstate.allocate_temp(py_object_type, manage_ref=True) func_node_temp = code.funcstate.allocate_temp(py_object_type, manage_ref=True)
self.func_node.set_cname(func_node_temp) self.func_node.set_cname(func_node_temp)
# need to get attribute manually--scope would return cdef method # need to get attribute manually--scope would return cdef method
...@@ -4350,14 +4369,41 @@ class OverrideCheckNode(StatNode): ...@@ -4350,14 +4369,41 @@ class OverrideCheckNode(StatNode):
code.putln("%s = __Pyx_PyObject_GetAttrStr(%s, %s); %s" % ( code.putln("%s = __Pyx_PyObject_GetAttrStr(%s, %s); %s" % (
func_node_temp, self_arg, interned_attr_cname, err)) func_node_temp, self_arg, interned_attr_cname, err))
code.put_gotref(func_node_temp) code.put_gotref(func_node_temp)
is_builtin_function_or_method = "PyCFunction_Check(%s)" % func_node_temp is_builtin_function_or_method = "PyCFunction_Check(%s)" % func_node_temp
is_overridden = "(PyCFunction_GET_FUNCTION(%s) != (PyCFunction)%s)" % ( is_overridden = "(PyCFunction_GET_FUNCTION(%s) != (PyCFunction)%s)" % (
func_node_temp, self.py_func.entry.func_cname) func_node_temp, self.py_func.entry.func_cname)
code.putln("if (!%s || %s) {" % (is_builtin_function_or_method, is_overridden)) code.putln("if (!%s || %s) {" % (is_builtin_function_or_method, is_overridden))
self.body.generate_execution_code(code) self.body.generate_execution_code(code)
code.putln("}") code.putln("}")
# NOTE: it's not 100% sure that we catch the exact versions here that were used for the lookup,
# but it is very unlikely that the versions change during lookup, and the type dict safe guard
# should increase the chance of detecting such a case.
code.putln("#if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP")
code.putln("tp_dict_version = likely(Py_TYPE(%s)->tp_dict) ?"
" __PYX_GET_DICT_VERSION(Py_TYPE(%s)->tp_dict) : 0;" % (
self_arg, self_arg))
code.putln("obj_dict_version = likely(Py_TYPE(%s)->tp_dictoffset) ?"
" __PYX_GET_DICT_VERSION(_PyObject_GetDictPtr(%s)) : 0;" % (
self_arg, self_arg))
# Safety check that the type dict didn't change during the lookup. Since CPython looks up the
# attribute (descriptor) first in the type dict and then in the instance dict or through the
# descriptor, the only really far-away lookup when we get here is one in the type dict. So we
# double check the type dict version before and afterwards to guard against later changes of
# the type dict during the lookup process.
code.putln("if (unlikely(type_dict_guard != tp_dict_version)) {")
code.putln("tp_dict_version = obj_dict_version = 0;")
code.putln("}")
code.putln("#endif")
code.put_decref_clear(func_node_temp, PyrexTypes.py_object_type) code.put_decref_clear(func_node_temp, PyrexTypes.py_object_type)
code.funcstate.release_temp(func_node_temp) code.funcstate.release_temp(func_node_temp)
code.putln("#if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP")
code.putln("}")
code.putln("#endif")
code.putln("}") code.putln("}")
......
...@@ -4,6 +4,7 @@ ...@@ -4,6 +4,7 @@
from __future__ import absolute_import from __future__ import absolute_import
class ShouldBeFromDirective(object): class ShouldBeFromDirective(object):
known_directives = [] known_directives = []
...@@ -120,21 +121,21 @@ closure_freelist_size = 8 ...@@ -120,21 +121,21 @@ closure_freelist_size = 8
def get_directive_defaults(): def get_directive_defaults():
# To add an item to this list, all accesses should be changed to use the new # To add an item to this list, all accesses should be changed to use the new
# directive, and the global option itself should be set to an instance of # directive, and the global option itself should be set to an instance of
# ShouldBeFromDirective. # ShouldBeFromDirective.
for old_option in ShouldBeFromDirective.known_directives: for old_option in ShouldBeFromDirective.known_directives:
value = globals().get(old_option.options_name) value = globals().get(old_option.options_name)
assert old_option.directive_name in _directive_defaults assert old_option.directive_name in _directive_defaults
if not isinstance(value, ShouldBeFromDirective): if not isinstance(value, ShouldBeFromDirective):
if old_option.disallow: if old_option.disallow:
raise RuntimeError( raise RuntimeError(
"Option '%s' must be set from directive '%s'" % ( "Option '%s' must be set from directive '%s'" % (
old_option.option_name, old_option.directive_name)) old_option.option_name, old_option.directive_name))
else: else:
# Warn? # Warn?
_directive_defaults[old_option.directive_name] = value _directive_defaults[old_option.directive_name] = value
return _directive_defaults return _directive_defaults
# Declare compiler directives # Declare compiler directives
_directive_defaults = { _directive_defaults = {
...@@ -146,14 +147,14 @@ _directive_defaults = { ...@@ -146,14 +147,14 @@ _directive_defaults = {
'exceptval' : None, # (except value=None, check=True) 'exceptval' : None, # (except value=None, check=True)
'auto_cpdef': False, 'auto_cpdef': False,
'auto_pickle': None, 'auto_pickle': None,
'cdivision': False, # was True before 0.12 'cdivision': False, # was True before 0.12
'cdivision_warnings': False, 'cdivision_warnings': False,
'overflowcheck': False, 'overflowcheck': False,
'overflowcheck.fold': True, 'overflowcheck.fold': True,
'always_allow_keywords': False, 'always_allow_keywords': False,
'allow_none_for_extension_args': True, 'allow_none_for_extension_args': True,
'wraparound' : True, 'wraparound' : True,
'ccomplex' : False, # use C99/C++ for complex types and arith 'ccomplex' : False, # use C99/C++ for complex types and arith
'callspec' : "", 'callspec' : "",
'final' : False, 'final' : False,
'internal' : False, 'internal' : False,
...@@ -162,20 +163,20 @@ _directive_defaults = { ...@@ -162,20 +163,20 @@ _directive_defaults = {
'no_gc': False, 'no_gc': False,
'linetrace': False, 'linetrace': False,
'emit_code_comments': True, # copy original source code into C code comments 'emit_code_comments': True, # copy original source code into C code comments
'annotation_typing': True, # read type declarations from Python function annotations 'annotation_typing': True, # read type declarations from Python function annotations
'infer_types': None, 'infer_types': None,
'infer_types.verbose': False, 'infer_types.verbose': False,
'autotestdict': True, 'autotestdict': True,
'autotestdict.cdef': False, 'autotestdict.cdef': False,
'autotestdict.all': False, 'autotestdict.all': False,
'language_level': 2, 'language_level': 2,
'fast_getattr': False, # Undocumented until we come up with a better way to handle this everywhere. 'fast_getattr': False, # Undocumented until we come up with a better way to handle this everywhere.
'py2_import': False, # For backward compatibility of Cython's source code in Py3 source mode 'py2_import': False, # For backward compatibility of Cython's source code in Py3 source mode
'preliminary_late_includes_cy28': False, # Temporary directive in 0.28, to be removed in a later version (see GH#2079). 'preliminary_late_includes_cy28': False, # Temporary directive in 0.28, to be removed in a later version (see GH#2079).
'iterable_coroutine': False, # Make async coroutines backwards compatible with the old asyncio yield-from syntax. 'iterable_coroutine': False, # Make async coroutines backwards compatible with the old asyncio yield-from syntax.
'c_string_type': 'bytes', 'c_string_type': 'bytes',
'c_string_encoding': '', 'c_string_encoding': '',
'type_version_tag': True, # enables Py_TPFLAGS_HAVE_VERSION_TAG on extension types 'type_version_tag': True, # enables Py_TPFLAGS_HAVE_VERSION_TAG on extension types
'unraisable_tracebacks': True, 'unraisable_tracebacks': True,
'old_style_globals': False, 'old_style_globals': False,
'np_pythran': False, 'np_pythran': False,
...@@ -195,16 +196,16 @@ _directive_defaults = { ...@@ -195,16 +196,16 @@ _directive_defaults = {
# optimizations # optimizations
'optimize.inline_defnode_calls': True, 'optimize.inline_defnode_calls': True,
'optimize.unpack_method_calls': True, # increases code size when True 'optimize.unpack_method_calls': True, # increases code size when True
'optimize.unpack_method_calls_in_pyinit': False, # uselessly increases code size when True 'optimize.unpack_method_calls_in_pyinit': False, # uselessly increases code size when True
'optimize.use_switch': True, 'optimize.use_switch': True,
# remove unreachable code # remove unreachable code
'remove_unreachable': True, 'remove_unreachable': True,
# control flow debug directives # control flow debug directives
'control_flow.dot_output': "", # Graphviz output filename 'control_flow.dot_output': "", # Graphviz output filename
'control_flow.dot_annotate_defs': False, # Annotate definitions 'control_flow.dot_annotate_defs': False, # Annotate definitions
# test support # test support
'test_assert_path_exists' : [], 'test_assert_path_exists' : [],
...@@ -273,9 +274,9 @@ directive_types = { ...@@ -273,9 +274,9 @@ directive_types = {
'auto_pickle': bool, 'auto_pickle': bool,
'final' : bool, # final cdef classes and methods 'final' : bool, # final cdef classes and methods
'internal' : bool, # cdef class visibility in the module dict 'internal' : bool, # cdef class visibility in the module dict
'infer_types' : bool, # values can be True/None/False 'infer_types' : bool, # values can be True/None/False
'binding' : bool, 'binding' : bool,
'cfunc' : None, # decorators do not take directive value 'cfunc' : None, # decorators do not take directive value
'ccall' : None, 'ccall' : None,
'inline' : None, 'inline' : None,
'staticmethod' : None, 'staticmethod' : None,
...@@ -291,7 +292,7 @@ for key, val in _directive_defaults.items(): ...@@ -291,7 +292,7 @@ for key, val in _directive_defaults.items():
if key not in directive_types: if key not in directive_types:
directive_types[key] = type(val) directive_types[key] = type(val)
directive_scopes = { # defaults to available everywhere directive_scopes = { # defaults to available everywhere
# 'module', 'function', 'class', 'with statement' # 'module', 'function', 'class', 'with statement'
'auto_pickle': ('module', 'cclass'), 'auto_pickle': ('module', 'cclass'),
'final' : ('cclass', 'function'), 'final' : ('cclass', 'function'),
...@@ -423,7 +424,7 @@ def parse_directive_list(s, relaxed_bool=False, ignore_unknown=False, ...@@ -423,7 +424,7 @@ def parse_directive_list(s, relaxed_bool=False, ignore_unknown=False,
item = item.strip() item = item.strip()
if not item: if not item:
continue continue
if not '=' in item: if '=' not in item:
raise ValueError('Expected "=" in option "%s"' % item) raise ValueError('Expected "=" in option "%s"' % item)
name, value = [s.strip() for s in item.strip().split('=', 1)] name, value = [s.strip() for s in item.strip().split('=', 1)]
if name not in _directive_defaults: if name not in _directive_defaults:
...@@ -441,3 +442,73 @@ def parse_directive_list(s, relaxed_bool=False, ignore_unknown=False, ...@@ -441,3 +442,73 @@ def parse_directive_list(s, relaxed_bool=False, ignore_unknown=False,
parsed_value = parse_directive_value(name, value, relaxed_bool=relaxed_bool) parsed_value = parse_directive_value(name, value, relaxed_bool=relaxed_bool)
result[name] = parsed_value result[name] = parsed_value
return result return result
def parse_variable_value(value):
"""
Parses value as an option value for the given name and returns
the interpreted value.
>>> parse_variable_value('True')
True
>>> parse_variable_value('true')
'true'
>>> parse_variable_value('us-ascii')
'us-ascii'
>>> parse_variable_value('str')
'str'
>>> parse_variable_value('123')
123
>>> parse_variable_value('1.23')
1.23
"""
if value == "True":
return True
elif value == "False":
return False
elif value == "None":
return None
elif value.isdigit():
return int(value)
else:
try:
value = float(value)
except Exception:
# Not a float
pass
return value
def parse_compile_time_env(s, current_settings=None):
"""
Parses a comma-separated list of pragma options. Whitespace
is not considered.
>>> parse_compile_time_env(' ')
{}
>>> (parse_compile_time_env('HAVE_OPENMP=True') ==
... {'HAVE_OPENMP': True})
True
>>> parse_compile_time_env(' asdf')
Traceback (most recent call last):
...
ValueError: Expected "=" in option "asdf"
>>> parse_compile_time_env('NUM_THREADS=4') == {'NUM_THREADS': 4}
True
>>> parse_compile_time_env('unknown=anything') == {'unknown': 'anything'}
True
"""
if current_settings is None:
result = {}
else:
result = current_settings
for item in s.split(','):
item = item.strip()
if not item:
continue
if '=' not in item:
raise ValueError('Expected "=" in option "%s"' % item)
name, value = [s.strip() for s in item.split('=', 1)]
result[name] = parse_variable_value(value)
return result
...@@ -70,6 +70,9 @@ cdef class CreateClosureClasses(CythonTransform): ...@@ -70,6 +70,9 @@ cdef class CreateClosureClasses(CythonTransform):
cdef create_class_from_scope(self, node, target_module_scope, inner_node=*) cdef create_class_from_scope(self, node, target_module_scope, inner_node=*)
cdef find_entries_used_in_closures(self, node) cdef find_entries_used_in_closures(self, node)
#cdef class InjectGilHandling(VisitorTransform, SkipDeclarations):
# cdef bint nogil
cdef class GilCheck(VisitorTransform): cdef class GilCheck(VisitorTransform):
cdef list env_stack cdef list env_stack
cdef bint nogil cdef bint nogil
......
...@@ -2776,6 +2776,60 @@ class CreateClosureClasses(CythonTransform): ...@@ -2776,6 +2776,60 @@ class CreateClosureClasses(CythonTransform):
return node return node
class InjectGilHandling(VisitorTransform, SkipDeclarations):
"""
Allow certain Python operations inside of nogil blocks by implicitly acquiring the GIL.
Must run before the AnalyseDeclarationsTransform to make sure the GILStatNodes get
set up, parallel sections know that the GIL is acquired inside of them, etc.
"""
def __call__(self, root):
self.nogil = False
return super(InjectGilHandling, self).__call__(root)
# special node handling
def visit_RaiseStatNode(self, node):
"""Allow raising exceptions in nogil sections by wrapping them in a 'with gil' block."""
if self.nogil:
node = Nodes.GILStatNode(node.pos, state='gil', body=node)
return node
# further candidates:
# def visit_AssertStatNode(self, node):
# def visit_ReraiseStatNode(self, node):
# nogil tracking
def visit_GILStatNode(self, node):
was_nogil = self.nogil
self.nogil = (node.state == 'nogil')
self.visitchildren(node)
self.nogil = was_nogil
return node
def visit_CFuncDefNode(self, node):
was_nogil = self.nogil
if isinstance(node.declarator, Nodes.CFuncDeclaratorNode):
self.nogil = node.declarator.nogil and not node.declarator.with_gil
self.visitchildren(node)
self.nogil = was_nogil
return node
def visit_ParallelRangeNode(self, node):
was_nogil = self.nogil
self.nogil = node.nogil
self.visitchildren(node)
self.nogil = was_nogil
return node
def visit_ExprNode(self, node):
# No special GIL handling inside of expressions for now.
return node
visit_Node = VisitorTransform.recurse_to_children
class GilCheck(VisitorTransform): class GilCheck(VisitorTransform):
""" """
Call `node.gil_check(env)` on each node to make sure we hold the Call `node.gil_check(env)` on each node to make sure we hold the
......
...@@ -141,7 +141,7 @@ def create_pipeline(context, mode, exclude_classes=()): ...@@ -141,7 +141,7 @@ def create_pipeline(context, mode, exclude_classes=()):
assert mode in ('pyx', 'py', 'pxd') assert mode in ('pyx', 'py', 'pxd')
from .Visitor import PrintTree from .Visitor import PrintTree
from .ParseTreeTransforms import WithTransform, NormalizeTree, PostParse, PxdPostParse from .ParseTreeTransforms import WithTransform, NormalizeTree, PostParse, PxdPostParse
from .ParseTreeTransforms import ForwardDeclareTypes, AnalyseDeclarationsTransform from .ParseTreeTransforms import ForwardDeclareTypes, InjectGilHandling, AnalyseDeclarationsTransform
from .ParseTreeTransforms import AnalyseExpressionsTransform, FindInvalidUseOfFusedTypes from .ParseTreeTransforms import AnalyseExpressionsTransform, FindInvalidUseOfFusedTypes
from .ParseTreeTransforms import CreateClosureClasses, MarkClosureVisitor, DecoratorTransform from .ParseTreeTransforms import CreateClosureClasses, MarkClosureVisitor, DecoratorTransform
from .ParseTreeTransforms import TrackNumpyAttributes, InterpretCompilerDirectives, TransformBuiltinMethods from .ParseTreeTransforms import TrackNumpyAttributes, InterpretCompilerDirectives, TransformBuiltinMethods
...@@ -194,6 +194,7 @@ def create_pipeline(context, mode, exclude_classes=()): ...@@ -194,6 +194,7 @@ def create_pipeline(context, mode, exclude_classes=()):
FlattenInListTransform(), FlattenInListTransform(),
DecoratorTransform(context), DecoratorTransform(context),
ForwardDeclareTypes(context), ForwardDeclareTypes(context),
InjectGilHandling(),
AnalyseDeclarationsTransform(context), AnalyseDeclarationsTransform(context),
AutoTestDictTransform(context), AutoTestDictTransform(context),
EmbedSignature(context), EmbedSignature(context),
......
cdef extern from "Python.h":
void PyEval_InitThreads()
# Initialize and acquire the global interpreter lock.
int PyEval_ThreadsInitialized(void)
# Returns a non-zero value if PyEval_InitThreads() has been called.
# cython.* namespace for pure mode. # cython.* namespace for pure mode.
from __future__ import absolute_import from __future__ import absolute_import
__version__ = "0.28.2" __version__ = "0.29a0"
try: try:
from __builtin__ import basestring from __builtin__ import basestring
......
...@@ -17,12 +17,12 @@ environment: ...@@ -17,12 +17,12 @@ environment:
PYTHON_VERSION: "2.7" PYTHON_VERSION: "2.7"
PYTHON_ARCH: "64" PYTHON_ARCH: "64"
- PYTHON: "C:\\Python34" - PYTHON: "C:\\Python36"
PYTHON_VERSION: "3.4" PYTHON_VERSION: "3.6"
PYTHON_ARCH: "32" PYTHON_ARCH: "32"
- PYTHON: "C:\\Python34-x64" - PYTHON: "C:\\Python36-x64"
PYTHON_VERSION: "3.4" PYTHON_VERSION: "3.6"
PYTHON_ARCH: "64" PYTHON_ARCH: "64"
- PYTHON: "C:\\Python35" - PYTHON: "C:\\Python35"
...@@ -33,12 +33,12 @@ environment: ...@@ -33,12 +33,12 @@ environment:
PYTHON_VERSION: "3.5" PYTHON_VERSION: "3.5"
PYTHON_ARCH: "64" PYTHON_ARCH: "64"
- PYTHON: "C:\\Python36" - PYTHON: "C:\\Python34"
PYTHON_VERSION: "3.6" PYTHON_VERSION: "3.4"
PYTHON_ARCH: "32" PYTHON_ARCH: "32"
- PYTHON: "C:\\Python36-x64" - PYTHON: "C:\\Python34-x64"
PYTHON_VERSION: "3.6" PYTHON_VERSION: "3.4"
PYTHON_ARCH: "64" PYTHON_ARCH: "64"
clone_depth: 5 clone_depth: 5
......
...@@ -90,14 +90,6 @@ complaining about the signature mismatch. ...@@ -90,14 +90,6 @@ complaining about the signature mismatch.
See :ref:`existing-pointers-instantiation` for an example. See :ref:`existing-pointers-instantiation` for an example.
.. Note::
Older Cython files may use :meth:`__new__` rather than :meth:`__cinit__`. The two are synonyms.
The name change from :meth:`__new__` to :meth:`__cinit__` was to avoid
confusion with Python :meth:`__new__` (which is an entirely different
concept) and eventually the use of :meth:`__new__` in Cython will be
disallowed to pave the way for supporting Python-style :meth:`__new__`
.. [#] http://docs.python.org/reference/datamodel.html#object.__new__ .. [#] http://docs.python.org/reference/datamodel.html#object.__new__
.. _finalization_method: .. _finalization_method:
......
...@@ -354,11 +354,13 @@ For example:: ...@@ -354,11 +354,13 @@ For example::
from libcpp.vector cimport vector from libcpp.vector cimport vector
cdef vector[int] vect cdef vector[int] vect
cdef int i cdef int i, x
for i in range(10): for i in range(10):
vect.push_back(i) vect.push_back(i)
for i in range(10): for i in range(10):
print(vect[i]) print(vect[i])
for x in vect:
print(x)
The pxd files in ``/Cython/Includes/libcpp`` also work as good examples on The pxd files in ``/Cython/Includes/libcpp`` also work as good examples on
how to declare C++ classes. how to declare C++ classes.
......
...@@ -102,13 +102,17 @@ def get_distutils_distro(_cache=[]): ...@@ -102,13 +102,17 @@ def get_distutils_distro(_cache=[]):
if sys.platform == 'win32': if sys.platform == 'win32':
# TODO: Figure out why this hackery (see http://thread.gmane.org/gmane.comp.python.cython.devel/8280/). # TODO: Figure out why this hackery (see http://thread.gmane.org/gmane.comp.python.cython.devel/8280/).
config_files = distutils_distro.find_config_files() config_files = distutils_distro.find_config_files()
try: config_files.remove('setup.cfg') try:
except ValueError: pass config_files.remove('setup.cfg')
except ValueError:
pass
distutils_distro.parse_config_files(config_files) distutils_distro.parse_config_files(config_files)
cfgfiles = distutils_distro.find_config_files() cfgfiles = distutils_distro.find_config_files()
try: cfgfiles.remove('setup.cfg') try:
except ValueError: pass cfgfiles.remove('setup.cfg')
except ValueError:
pass
distutils_distro.parse_config_files(cfgfiles) distutils_distro.parse_config_files(cfgfiles)
_cache.append(distutils_distro) _cache.append(distutils_distro)
return distutils_distro return distutils_distro
...@@ -888,8 +892,7 @@ class CythonCompileTestCase(unittest.TestCase): ...@@ -888,8 +892,7 @@ class CythonCompileTestCase(unittest.TestCase):
include_dirs = INCLUDE_DIRS + [os.path.join(test_directory, '..', TEST_SUPPORT_DIR)] include_dirs = INCLUDE_DIRS + [os.path.join(test_directory, '..', TEST_SUPPORT_DIR)]
if incdir: if incdir:
include_dirs.append(incdir) include_dirs.append(incdir)
source = self.find_module_source_file(
os.path.join(test_directory, module + '.pyx'))
if self.preparse == 'id': if self.preparse == 'id':
source = self.find_module_source_file( source = self.find_module_source_file(
os.path.join(test_directory, module + '.pyx')) os.path.join(test_directory, module + '.pyx'))
...@@ -1233,8 +1236,10 @@ def run_forked_test(result, run_func, test_name, fork=True): ...@@ -1233,8 +1236,10 @@ def run_forked_test(result, run_func, test_name, fork=True):
raise Exception("Tests in module '%s' exited with status %d" % raise Exception("Tests in module '%s' exited with status %d" %
(module_name, result_code)) (module_name, result_code))
finally: finally:
try: os.unlink(result_file) try:
except: pass os.unlink(result_file)
except:
pass
class PureDoctestTestCase(unittest.TestCase): class PureDoctestTestCase(unittest.TestCase):
def __init__(self, module_name, module_path): def __init__(self, module_name, module_path):
...@@ -1600,7 +1605,7 @@ class EmbedTest(unittest.TestCase): ...@@ -1600,7 +1605,7 @@ class EmbedTest(unittest.TestCase):
if sys.version_info[0] >=3 and CY3_DIR: if sys.version_info[0] >=3 and CY3_DIR:
cython = os.path.join(CY3_DIR, cython) cython = os.path.join(CY3_DIR, cython)
cython = os.path.abspath(os.path.join('..', '..', cython)) cython = os.path.abspath(os.path.join('..', '..', cython))
self.assert_(os.system( self.assertTrue(os.system(
"make PYTHON='%s' CYTHON='%s' LIBDIR1='%s' test > make.output" % (sys.executable, cython, libdir)) == 0) "make PYTHON='%s' CYTHON='%s' LIBDIR1='%s' test > make.output" % (sys.executable, cython, libdir)) == 0)
try: try:
os.remove('make.output') os.remove('make.output')
...@@ -1905,8 +1910,6 @@ def main(): ...@@ -1905,8 +1910,6 @@ def main():
options, cmd_args = parser.parse_args(args) options, cmd_args = parser.parse_args(args)
WORKDIR = os.path.abspath(options.work_dir)
if options.with_cython and sys.version_info[0] >= 3: if options.with_cython and sys.version_info[0] >= 3:
sys.path.insert(0, options.cython_dir) sys.path.insert(0, options.cython_dir)
...@@ -2178,8 +2181,10 @@ def runtests(options, cmd_args, coverage=None): ...@@ -2178,8 +2181,10 @@ def runtests(options, cmd_args, coverage=None):
if xml_output_dir: if xml_output_dir:
from Cython.Tests.xmlrunner import XMLTestRunner from Cython.Tests.xmlrunner import XMLTestRunner
if not os.path.exists(xml_output_dir): if not os.path.exists(xml_output_dir):
try: os.makedirs(xml_output_dir) try:
except OSError: pass # concurrency issue? os.makedirs(xml_output_dir)
except OSError:
pass # concurrency issue?
test_runner = XMLTestRunner(output=xml_output_dir, test_runner = XMLTestRunner(output=xml_output_dir,
verbose=options.verbosity > 0) verbose=options.verbosity > 0)
if options.failfast: if options.failfast:
......
# mode: compile
import cython
@cython.cclass
class BuiltinRef:
cython.declare(pybuf = 'Py_buffer')
...@@ -55,7 +55,7 @@ cdef object m(): ...@@ -55,7 +55,7 @@ cdef object m():
print obj print obj
del fred del fred
return obj return obj
raise obj raise obj # allowed!
if obj: if obj:
pass pass
while obj: while obj:
...@@ -155,7 +155,6 @@ _ERRORS = u""" ...@@ -155,7 +155,6 @@ _ERRORS = u"""
55:8: Python print statement not allowed without gil 55:8: Python print statement not allowed without gil
56:8: Deleting Python object not allowed without gil 56:8: Deleting Python object not allowed without gil
57:8: Returning Python object not allowed without gil 57:8: Returning Python object not allowed without gil
58:8: Raising exception not allowed without gil
59:11: Truth-testing Python object not allowed without gil 59:11: Truth-testing Python object not allowed without gil
61:14: Truth-testing Python object not allowed without gil 61:14: Truth-testing Python object not allowed without gil
63:8: For-loop using object bounds or target not allowed without gil 63:8: For-loop using object bounds or target not allowed without gil
......
# mode: run
# tag: cpdef
# This also makes a nice benchmark for the cpdef method call dispatching code.
cdef class Ext:
"""
>>> x = Ext()
>>> x.rec(10)
0
"""
cpdef rec(self, int i):
return 0 if i < 0 else self.rec(i-1)
class Py(Ext):
"""
>>> p = Py()
>>> p.rec(10)
0
"""
pass
class Slots(Ext):
"""
>>> s = Slots()
>>> s.rec(10)
0
"""
__slots__ = ()
class PyOverride(Ext):
"""
>>> p = PyOverride()
>>> p.rec(10)
10
5
>>> p.rec(12)
12
11
10
5
"""
def rec(self, i):
print(i)
return Ext.rec(self, i) if i > 10 else 5
class SlotsOverride(Ext):
"""
>>> s = SlotsOverride()
>>> s.rec(10)
10
6
>>> s.rec(12)
12
11
10
6
"""
__slots__ = ()
def rec(self, i):
print(i)
return Ext.rec(self, i) if i > 10 else 6
# mode: run
# tag: nogil, withgil, exceptions
cdef void foo_nogil(int i) nogil except *:
if i != 0: raise ValueError("huhu !")
cdef void foo(int i) except * with gil: cdef void foo(int i) except * with gil:
if i != 0: raise ValueError if i != 0: raise ValueError
cdef int bar(int i) except? -1 with gil: cdef int bar(int i) except? -1 with gil:
if i != 0: raise ValueError if i != 0: raise ValueError
return 0 return 0
cdef int spam(int i) except? -1 with gil: cdef int spam(int i) except? -1 with gil:
if i != 0: raise TypeError if i != 0: raise TypeError
return -1 return -1
def test_foo_nogil():
"""
>>> test_foo_nogil()
"""
#
foo_nogil(0)
foo_nogil(0)
with nogil:
foo_nogil(0)
foo_nogil(0)
#
try:
with nogil:
foo_nogil(0)
finally:
pass
#
try:
with nogil:
foo_nogil(0)
with nogil:
foo_nogil(0)
finally:
pass
#
try:
with nogil:
foo_nogil(0)
with nogil:
foo_nogil(1)
except:
with nogil:
foo_nogil(0)
finally:
with nogil:
foo_nogil(0)
pass
#
try:
with nogil:
foo_nogil(0)
foo_nogil(0)
finally:
pass
#
try:
with nogil:
foo_nogil(0)
foo_nogil(1)
except:
with nogil:
foo_nogil(0)
finally:
with nogil:
foo_nogil(0)
pass
#
try:
with nogil:
foo_nogil(0)
try:
with nogil:
foo_nogil(1)
except:
with nogil:
foo_nogil(1)
finally:
with nogil:
foo_nogil(0)
pass
except:
with nogil:
foo_nogil(0)
finally:
with nogil:
foo_nogil(0)
pass
#
try:
with nogil:
foo_nogil(0)
try:
with nogil:
foo_nogil(1)
except:
with nogil:
foo_nogil(1)
finally:
with nogil:
foo_nogil(1)
pass
except:
with nogil:
foo_nogil(0)
finally:
with nogil:
foo_nogil(0)
pass
#
def test_foo(): def test_foo():
""" """
>>> test_foo() >>> test_foo()
...@@ -109,6 +220,7 @@ def test_foo(): ...@@ -109,6 +220,7 @@ def test_foo():
pass pass
# #
def test_bar(): def test_bar():
""" """
>>> test_bar() >>> test_bar()
......
...@@ -56,6 +56,21 @@ cdef class DisableTpClear: ...@@ -56,6 +56,21 @@ cdef class DisableTpClear:
pto.tp_clear(self) pto.tp_clear(self)
cdef class ReallowTpClear(DisableTpClear):
"""
>>> import gc
>>> obj = ReallowTpClear()
>>> is_tp_clear_null(obj)
False
>>> obj.attr = obj # create hard reference cycle
>>> del obj; _ignore = gc.collect()
# Problem: cannot really validate that the cycle was cleaned up without using weakrefs etc...
"""
cdef public object attr
def test_closure_without_clear(str x): def test_closure_without_clear(str x):
""" """
>>> c = test_closure_without_clear('abc') >>> c = test_closure_without_clear('abc')
......
...@@ -106,7 +106,7 @@ def test_boundscheck(x): ...@@ -106,7 +106,7 @@ def test_boundscheck(x):
## return y ## return y
def test_with_nogil(nogil): def test_with_nogil(nogil, should_raise=False):
""" """
>>> raised = [] >>> raised = []
>>> class nogil(object): >>> class nogil(object):
...@@ -121,14 +121,25 @@ def test_with_nogil(nogil): ...@@ -121,14 +121,25 @@ def test_with_nogil(nogil):
True True
>>> raised >>> raised
[None] [None]
>>> test_with_nogil(nogil(), should_raise=True)
Traceback (most recent call last):
ValueError: RAISED!
>>> raised[1] is None
False
""" """
result = False result = False
should_raise_bool = True if should_raise else False # help the type inference ...
with nogil: with nogil:
print("WORKS") print("WORKS")
with cython.nogil: with cython.nogil:
result = True result = True
if should_raise_bool:
raise ValueError("RAISED!")
return result return result
MyUnion = cython.union(n=cython.int, x=cython.double) MyUnion = cython.union(n=cython.int, x=cython.double)
MyStruct = cython.struct(is_integral=cython.bint, data=MyUnion) MyStruct = cython.struct(is_integral=cython.bint, data=MyUnion)
MyStruct2 = cython.typedef(MyStruct[2]) MyStruct2 = cython.typedef(MyStruct[2])
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment