Commit e5642d0e authored by Robert Bradshaw's avatar Robert Bradshaw

Merge branch 'master' into 0.20.x

parents 3c9244bf 26c706f2
...@@ -1755,6 +1755,12 @@ class NameNode(AtomicExprNode): ...@@ -1755,6 +1755,12 @@ class NameNode(AtomicExprNode):
self.check_identifier_kind() self.check_identifier_kind()
entry = self.entry entry = self.entry
type = entry.type type = entry.type
if (type.is_pyobject and self.inferred_type and
self.inferred_type.is_builtin_type):
# assume that type inference is smarter than the static entry
type = self.inferred_type
if entry.type != self.inferred_type:
print self.pos, entry.type, self.inferred_type
self.type = type self.type = type
def check_identifier_kind(self): def check_identifier_kind(self):
......
...@@ -1075,6 +1075,7 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode): ...@@ -1075,6 +1075,7 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
have_entries, (py_attrs, py_buffers, memoryview_slices) = \ have_entries, (py_attrs, py_buffers, memoryview_slices) = \
scope.get_refcounted_entries() scope.get_refcounted_entries()
is_final_type = scope.parent_type.is_final_type
if scope.is_internal: if scope.is_internal:
# internal classes (should) never need None inits, normal zeroing will do # internal classes (should) never need None inits, normal zeroing will do
py_attrs = [] py_attrs = []
...@@ -1124,9 +1125,13 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode): ...@@ -1124,9 +1125,13 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
if freelist_size: if freelist_size:
code.globalstate.use_utility_code( code.globalstate.use_utility_code(
UtilityCode.load_cached("IncludeStringH", "StringTools.c")) UtilityCode.load_cached("IncludeStringH", "StringTools.c"))
if is_final_type:
abstract_check = ''
else:
abstract_check = ' & ((t->tp_flags & Py_TPFLAGS_IS_ABSTRACT) == 0)'
obj_struct = type.declaration_code("", deref=True) obj_struct = type.declaration_code("", deref=True)
code.putln("if (likely((%s > 0) & (t->tp_basicsize == sizeof(%s)) & ((t->tp_flags & Py_TPFLAGS_IS_ABSTRACT) == 0))) {" % ( code.putln("if (likely((%s > 0) & (t->tp_basicsize == sizeof(%s))%s)) {" % (
freecount_name, obj_struct)) freecount_name, obj_struct, abstract_check))
code.putln("o = (PyObject*)%s[--%s];" % ( code.putln("o = (PyObject*)%s[--%s];" % (
freelist_name, freecount_name)) freelist_name, freecount_name))
code.putln("memset(o, 0, sizeof(%s));" % obj_struct) code.putln("memset(o, 0, sizeof(%s));" % obj_struct)
...@@ -1134,7 +1139,13 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode): ...@@ -1134,7 +1139,13 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
if scope.needs_gc(): if scope.needs_gc():
code.putln("PyObject_GC_Track(o);") code.putln("PyObject_GC_Track(o);")
code.putln("} else {") code.putln("} else {")
code.putln("o = (PyObject *) PyBaseObject_Type.tp_new(t, %s, 0);" % Naming.empty_tuple) if not is_final_type:
code.putln("if (likely((t->tp_flags & Py_TPFLAGS_IS_ABSTRACT) == 0)) {")
code.putln("o = (*t->tp_alloc)(t, 0);")
if not is_final_type:
code.putln("} else {")
code.putln("o = (PyObject *) PyBaseObject_Type.tp_new(t, %s, 0);" % Naming.empty_tuple)
code.putln("}")
code.putln("if (unlikely(!o)) return 0;") code.putln("if (unlikely(!o)) return 0;")
if freelist_size and not base_type: if freelist_size and not base_type:
code.putln('}') code.putln('}')
......
...@@ -182,5 +182,6 @@ cdef p_c_class_definition(PyrexScanner s, pos, ctx) ...@@ -182,5 +182,6 @@ cdef p_c_class_definition(PyrexScanner s, pos, ctx)
cdef p_c_class_options(PyrexScanner s) cdef p_c_class_options(PyrexScanner s)
cdef p_property_decl(PyrexScanner s) cdef p_property_decl(PyrexScanner s)
cdef p_doc_string(PyrexScanner s) cdef p_doc_string(PyrexScanner s)
cdef p_ignorable_statement(PyrexScanner s)
cdef p_compiler_directive_comments(PyrexScanner s) cdef p_compiler_directive_comments(PyrexScanner s)
cdef p_cpp_class_definition(PyrexScanner s, pos, ctx) cdef p_cpp_class_definition(PyrexScanner s, pos, ctx)
...@@ -1894,6 +1894,9 @@ def p_statement(s, ctx, first_statement = 0): ...@@ -1894,6 +1894,9 @@ def p_statement(s, ctx, first_statement = 0):
return p_pass_statement(s, with_newline=True) return p_pass_statement(s, with_newline=True)
else: else:
if ctx.level in ('c_class_pxd', 'property'): if ctx.level in ('c_class_pxd', 'property'):
node = p_ignorable_statement(s)
if node is not None:
return node
s.error("Executable statement not allowed here") s.error("Executable statement not allowed here")
if s.sy == 'if': if s.sy == 'if':
return p_if_statement(s) return p_if_statement(s)
...@@ -3079,6 +3082,19 @@ def p_property_decl(s): ...@@ -3079,6 +3082,19 @@ def p_property_decl(s):
return Nodes.PropertyNode(pos, name=name, doc=doc, body=body) return Nodes.PropertyNode(pos, name=name, doc=doc, body=body)
def p_ignorable_statement(s):
"""
Parses any kind of ignorable statement that is allowed in .pxd files.
"""
if s.sy == 'BEGIN_STRING':
pos = s.position()
string_node = p_atom(s)
if s.sy != 'EOF':
s.expect_newline("Syntax error in string")
return Nodes.ExprStatNode(pos, expr=string_node)
return None
def p_doc_string(s): def p_doc_string(s):
if s.sy == 'BEGIN_STRING': if s.sy == 'BEGIN_STRING':
pos = s.position() pos = s.position()
......
...@@ -473,18 +473,18 @@ static CYTHON_INLINE PyObject* {{TO_PY_FUNCTION}}({{TYPE}} value) { ...@@ -473,18 +473,18 @@ static CYTHON_INLINE PyObject* {{TO_PY_FUNCTION}}({{TYPE}} value) {
const {{TYPE}} neg_one = ({{TYPE}}) -1, const_zero = 0; const {{TYPE}} neg_one = ({{TYPE}}) -1, const_zero = 0;
const int is_unsigned = neg_one > const_zero; const int is_unsigned = neg_one > const_zero;
if (is_unsigned) { if (is_unsigned) {
if (sizeof({{TYPE}}) < sizeof(unsigned long)) { if (sizeof({{TYPE}}) < sizeof(long)) {
return PyInt_FromLong(value); return PyInt_FromLong((long) value);
} else if (sizeof({{TYPE}}) <= sizeof(unsigned long)) { } else if (sizeof({{TYPE}}) <= sizeof(unsigned long)) {
return PyLong_FromUnsignedLong(value); return PyLong_FromUnsignedLong((unsigned long) value);
} else if (sizeof({{TYPE}}) <= sizeof(unsigned long long)) { } else if (sizeof({{TYPE}}) <= sizeof(unsigned long long)) {
return PyLong_FromUnsignedLongLong(value); return PyLong_FromUnsignedLongLong((unsigned long long) value);
} }
} else { } else {
if (sizeof({{TYPE}}) <= sizeof(long)) { if (sizeof({{TYPE}}) <= sizeof(long)) {
return PyInt_FromLong(value); return PyInt_FromLong((long) value);
} else if (sizeof({{TYPE}}) <= sizeof(long long)) { } else if (sizeof({{TYPE}}) <= sizeof(long long)) {
return PyLong_FromLongLong(value); return PyLong_FromLongLong((long long) value);
} }
} }
{ {
......
...@@ -193,6 +193,28 @@ of the extension module sources:: ...@@ -193,6 +193,28 @@ of the extension module sources::
ext_modules = extensions ext_modules = extensions
) )
If you have many extensions and want to avoid the additional complexity in the
declarations, you can declare them with their normal Cython sources and then
call the following function instead of ``cythonize()`` to adapt the sources
list in the Extensions when not using Cython::
import os.path
def no_cythonize(extensions, **_ignore):
for extension in extensions:
sources = []
for sfile in extension.sources:
path, ext = os.path.splitext(sfile)
if ext in ('.pyx', '.py'):
if extension.language == 'c++':
ext = '.cpp'
else:
ext = '.c'
sfile = path + ext
sources.append(sfile)
extension.sources[:] = sources
return extensions
Compiling with ``pyximport`` Compiling with ``pyximport``
============================= =============================
......
PYTHON -c "import os; os.symlink('subdir', 'fake')" PYTHON symlink_or_copy.py subdir fake
PYTHON setup.py build_ext --inplace PYTHON setup.py build_ext --inplace
PYTHON -c "import a" PYTHON -c "import a"
PYTHON -c "import pkg.b" PYTHON -c "import pkg.b"
PYTHON check_paths.py PYTHON check_paths.py
######## symlink_or_copy.py ########
import platform
import sys
if platform.system() == "Windows":
import shutil
shutil.copytree(sys.argv[1], sys.argv[2])
else:
import os
os.symlink(sys.argv[1], sys.argv[2])
######## setup.py ######## ######## setup.py ########
...@@ -21,7 +32,7 @@ setup( ...@@ -21,7 +32,7 @@ setup(
cdef extern from "helper.h": cdef extern from "helper.h":
int value1 int value1
cdef extern from "subdir/helper.h": cdef extern from "subdir/helper.h":
int value2 int value2
......
...@@ -2,12 +2,14 @@ PYTHON setup.py build_ext --inplace ...@@ -2,12 +2,14 @@ PYTHON setup.py build_ext --inplace
PYTHON -c "import runner" PYTHON -c "import runner"
# Verify some files were created. # Verify some files were created.
ls common/AddTraceback_impl*.h common/RaiseException_impl_*.h # ls common/AddTraceback_impl*.h common/RaiseException_impl_*.h
PYTHON -c "import glob; assert glob.glob('common/AddTraceback_impl*.h')"
PYTHON -c "import glob; assert glob.glob('common/RaiseException_impl_*.h')"
# Verify that they're used. # Verify that they're used.
grep -c '#include "common/AddTraceback_impl_.*h"' a.c PYTHON fake_grep.py -c '#include "common/AddTraceback_impl_.*h"' a.c
grep -c '#include "common/AddTraceback_impl_.*h"' b.c PYTHON fake_grep.py -c '#include "common/AddTraceback_impl_.*h"' b.c
grep -c '#include "common/AddTraceback_impl_.*h"' c.c PYTHON fake_grep.py -c '#include "common/AddTraceback_impl_.*h"' c.c
######## setup.py ######## ######## setup.py ########
...@@ -56,3 +58,23 @@ if __name__ == "__main__": ...@@ -56,3 +58,23 @@ if __name__ == "__main__":
######## runner.py ######## ######## runner.py ########
import a, b, c import a, b, c
######## fake_grep.py ########
import platform
import re
import sys
if platform == 'Windows':
opt, pattern, file = sys.argv[1:]
assert opt == '-c'
count = 0
regex = re.compile(pattern)
for line in open(file):
if regex.search(line):
count += 1
print count
sys.exit(count == 0)
else:
import subprocess
sys.exit(subprocess.call(['grep'] + sys.argv[1:]))
# mode: compile # mode: compile
def f(obj, int i, float f, char *s1, char s2[]): def f(obj, int i, double f, char *s1, char s2[]):
pass pass
cdef g(obj, int i, float f, char *s1, char s2[]): cdef g(obj, int i, double f, char *s1, char s2[]):
pass pass
cdef do_g(object (*func)(object, int, float, char*, char*)): cdef do_g(object (*func)(object, int, double, char*, char*)):
return func(1, 2, 3.14159, "a", "b") return func(1, 2, 3.14159, "a", "b")
do_g(&g) do_g(&g)
PYTHON setup.py build_ext --inplace
PYTHON -c "import a; a.test()"
######## setup.py ########
from Cython.Build.Dependencies import cythonize
from distutils.core import setup
setup(
ext_modules = cythonize("a.pyx"),
)
######## a.pyx ########
cdef class ExtTypeDocstringPass:
pass
cdef class ExtTypeDocstring:
"huhu!" # this should override the .pxd docstring
cdef class ExtTypePass:
pass
cdef class ExtTypeDocstringPassString:
pass
def test():
assert not ExtTypePass().__doc__, ExtTypePass().__doc__
assert ExtTypeDocstring().__doc__ == "huhu!", ExtTypeDocstring().__doc__
assert ExtTypeDocstringPass().__doc__ == "hoho!", ExtTypeDocstringPass().__doc__
assert ExtTypeDocstringPassString().__doc__ == "hoho!", ExtTypeDocstringPassString().__doc__
######## a.pxd ########
cdef class ExtTypePass:
pass
cdef class ExtTypeDocstring:
"""
hoho
"""
cdef class ExtTypeDocstringPass:
"hoho!"
pass
cdef class ExtTypeDocstringPassString:
"hoho!"
pass
"more hoho"
...@@ -252,6 +252,23 @@ def iter_and_in(): ...@@ -252,6 +252,23 @@ def iter_and_in():
if c in u'abCDefGh': if c in u'abCDefGh':
print c print c
@cython.test_fail_if_path_exists('//ForInStatNode')
def iter_inferred():
"""
>>> iter_inferred()
a
b
c
d
e
"""
uchars = list(u"abcde")
uchars = u''.join(uchars)
for c in uchars:
print c
@cython.test_assert_path_exists('//SwitchStatNode', @cython.test_assert_path_exists('//SwitchStatNode',
'//ForFromStatNode') '//ForFromStatNode')
@cython.test_fail_if_path_exists('//ForInStatNode') @cython.test_fail_if_path_exists('//ForInStatNode')
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment