Commit 0ca51142 authored by Stefan Behnel's avatar Stefan Behnel

merged in latest cython-devel

parents be8d3798 9dfe33b2
...@@ -446,9 +446,9 @@ def buf_lookup_full_code(proto, defin, name, nd): ...@@ -446,9 +446,9 @@ def buf_lookup_full_code(proto, defin, name, nd):
proto.putln("#define %s(type, buf, %s) (type)(%s_imp(buf, %s))" % (name, macroargs, name, macroargs)) proto.putln("#define %s(type, buf, %s) (type)(%s_imp(buf, %s))" % (name, macroargs, name, macroargs))
funcargs = ", ".join(["Py_ssize_t i%d, Py_ssize_t s%d, Py_ssize_t o%d" % (i, i, i) for i in range(nd)]) funcargs = ", ".join(["Py_ssize_t i%d, Py_ssize_t s%d, Py_ssize_t o%d" % (i, i, i) for i in range(nd)])
proto.putln("static INLINE void* %s_imp(void* buf, %s);" % (name, funcargs)) proto.putln("static CYTHON_INLINE void* %s_imp(void* buf, %s);" % (name, funcargs))
defin.putln(dedent(""" defin.putln(dedent("""
static INLINE void* %s_imp(void* buf, %s) { static CYTHON_INLINE void* %s_imp(void* buf, %s) {
char* ptr = (char*)buf; char* ptr = (char*)buf;
""") % (name, funcargs) + "".join([dedent("""\ """) % (name, funcargs) + "".join([dedent("""\
ptr += s%d * i%d; ptr += s%d * i%d;
...@@ -723,10 +723,10 @@ typedef struct { ...@@ -723,10 +723,10 @@ typedef struct {
} __Pyx_BufFmt_StackElem; } __Pyx_BufFmt_StackElem;
static INLINE void __Pyx_SafeReleaseBuffer(Py_buffer* info); static CYTHON_INLINE void __Pyx_SafeReleaseBuffer(Py_buffer* info);
static int __Pyx_GetBufferAndValidate(Py_buffer* buf, PyObject* obj, __Pyx_TypeInfo* dtype, int flags, int nd, int cast, __Pyx_BufFmt_StackElem* stack); static int __Pyx_GetBufferAndValidate(Py_buffer* buf, PyObject* obj, __Pyx_TypeInfo* dtype, int flags, int nd, int cast, __Pyx_BufFmt_StackElem* stack);
""", impl=""" """, impl="""
static INLINE int __Pyx_IsLittleEndian(void) { static CYTHON_INLINE int __Pyx_IsLittleEndian(void) {
unsigned int n = 1; unsigned int n = 1;
return *(unsigned char*)(&n) != 0; return *(unsigned char*)(&n) != 0;
} }
...@@ -1123,7 +1123,7 @@ static const char* __Pyx_BufFmt_CheckString(__Pyx_BufFmt_Context* ctx, const cha ...@@ -1123,7 +1123,7 @@ static const char* __Pyx_BufFmt_CheckString(__Pyx_BufFmt_Context* ctx, const cha
} }
} }
static INLINE void __Pyx_ZeroBuffer(Py_buffer* buf) { static CYTHON_INLINE void __Pyx_ZeroBuffer(Py_buffer* buf) {
buf->buf = NULL; buf->buf = NULL;
buf->obj = NULL; buf->obj = NULL;
buf->strides = __Pyx_zeros; buf->strides = __Pyx_zeros;
...@@ -1164,7 +1164,7 @@ fail:; ...@@ -1164,7 +1164,7 @@ fail:;
return -1; return -1;
} }
static INLINE void __Pyx_SafeReleaseBuffer(Py_buffer* info) { static CYTHON_INLINE void __Pyx_SafeReleaseBuffer(Py_buffer* info) {
if (info->buf == NULL) return; if (info->buf == NULL) return;
if (info->suboffsets == __Pyx_minusones) info->suboffsets = NULL; if (info->suboffsets == __Pyx_minusones) info->suboffsets = NULL;
__Pyx_ReleaseBuffer(info); __Pyx_ReleaseBuffer(info);
......
...@@ -50,7 +50,7 @@ builtin_function_table = [ ...@@ -50,7 +50,7 @@ builtin_function_table = [
#('round', "", "", ""), #('round', "", "", ""),
('setattr', "OOO", "r", "PyObject_SetAttr"), ('setattr', "OOO", "r", "PyObject_SetAttr"),
#('sum', "", "", ""), #('sum', "", "", ""),
('type', "O", "O", "PyObject_Type"), #('type', "O", "O", "PyObject_Type"),
#('unichr', "", "", ""), #('unichr', "", "", ""),
#('unicode', "", "", ""), #('unicode', "", "", ""),
#('vars', "", "", ""), #('vars', "", "", ""),
...@@ -120,6 +120,13 @@ builtin_types_table = [ ...@@ -120,6 +120,13 @@ builtin_types_table = [
("frozenset", "PyFrozenSet_Type", []), ("frozenset", "PyFrozenSet_Type", []),
] ]
types_that_construct_their_instance = (
# some builtin types do not always return an instance of
# themselves - these do:
'type', 'bool', 'long', 'float', 'bytes', 'unicode', 'tuple', 'list',
'dict', 'file', 'set', 'frozenset'
# 'str', # only in Py3.x
)
builtin_structs_table = [ builtin_structs_table = [
...@@ -287,19 +294,19 @@ proto = """ ...@@ -287,19 +294,19 @@ proto = """
#define PySet_Pop(set) \\ #define PySet_Pop(set) \\
PyObject_CallMethod(set, (char *)"pop", NULL) PyObject_CallMethod(set, (char *)"pop", NULL)
static INLINE int PySet_Clear(PyObject *set) { static CYTHON_INLINE int PySet_Clear(PyObject *set) {
PyObject *ret = PyObject_CallMethod(set, (char *)"clear", NULL); PyObject *ret = PyObject_CallMethod(set, (char *)"clear", NULL);
if (!ret) return -1; if (!ret) return -1;
Py_DECREF(ret); return 0; Py_DECREF(ret); return 0;
} }
static INLINE int PySet_Discard(PyObject *set, PyObject *key) { static CYTHON_INLINE int PySet_Discard(PyObject *set, PyObject *key) {
PyObject *ret = PyObject_CallMethod(set, (char *)"discard", (char *)"O", key); PyObject *ret = PyObject_CallMethod(set, (char *)"discard", (char *)"O", key);
if (!ret) return -1; if (!ret) return -1;
Py_DECREF(ret); return 0; Py_DECREF(ret); return 0;
} }
static INLINE int PySet_Add(PyObject *set, PyObject *key) { static CYTHON_INLINE int PySet_Add(PyObject *set, PyObject *key) {
PyObject *ret = PyObject_CallMethod(set, (char *)"add", (char *)"O", key); PyObject *ret = PyObject_CallMethod(set, (char *)"add", (char *)"O", key);
if (!ret) return -1; if (!ret) return -1;
Py_DECREF(ret); return 0; Py_DECREF(ret); return 0;
...@@ -407,7 +414,7 @@ def init_builtins(): ...@@ -407,7 +414,7 @@ def init_builtins():
init_builtin_types() init_builtin_types()
init_builtin_structs() init_builtin_structs()
global list_type, tuple_type, dict_type, set_type, type_type global list_type, tuple_type, dict_type, set_type, type_type
global bytes_type, str_type, unicode_type global bytes_type, str_type, unicode_type, float_type
type_type = builtin_scope.lookup('type').type type_type = builtin_scope.lookup('type').type
list_type = builtin_scope.lookup('list').type list_type = builtin_scope.lookup('list').type
tuple_type = builtin_scope.lookup('tuple').type tuple_type = builtin_scope.lookup('tuple').type
...@@ -416,5 +423,6 @@ def init_builtins(): ...@@ -416,5 +423,6 @@ def init_builtins():
bytes_type = builtin_scope.lookup('bytes').type bytes_type = builtin_scope.lookup('bytes').type
str_type = builtin_scope.lookup('str').type str_type = builtin_scope.lookup('str').type
unicode_type = builtin_scope.lookup('unicode').type unicode_type = builtin_scope.lookup('unicode').type
float_type = builtin_scope.lookup('float').type
init_builtins() init_builtins()
...@@ -80,11 +80,6 @@ def parse_command_line(args): ...@@ -80,11 +80,6 @@ def parse_command_line(args):
options.show_version = 1 options.show_version = 1
elif option in ("-l", "--create-listing"): elif option in ("-l", "--create-listing"):
options.use_listing_file = 1 options.use_listing_file = 1
elif option in ("-C", "--compile"):
options.c_only = 0
elif option in ("--link"):
options.c_only = 0
options.obj_only = 0
elif option in ("-+", "--cplus"): elif option in ("-+", "--cplus"):
options.cplus = 1 options.cplus = 1
elif option == "--embed": elif option == "--embed":
...@@ -121,7 +116,7 @@ def parse_command_line(args): ...@@ -121,7 +116,7 @@ def parse_command_line(args):
options.emit_linenums = True options.emit_linenums = True
elif option in ("-X", "--directive"): elif option in ("-X", "--directive"):
try: try:
options.compiler_directives = Options.parse_directive_list(pop_arg()) options.compiler_directives = Options.parse_directive_list(pop_arg(), relaxed_bool=True)
except ValueError, e: except ValueError, e:
sys.stderr.write("Error in compiler directive: %s\n" % e.message) sys.stderr.write("Error in compiler directive: %s\n" % e.message)
sys.exit(1) sys.exit(1)
...@@ -135,14 +130,9 @@ def parse_command_line(args): ...@@ -135,14 +130,9 @@ def parse_command_line(args):
elif arg.endswith(".py"): elif arg.endswith(".py"):
# maybe do some other stuff, but this should work for now # maybe do some other stuff, but this should work for now
sources.append(arg) sources.append(arg)
elif arg.endswith(".o"):
options.objects.append(arg)
else: else:
sys.stderr.write( sys.stderr.write(
"cython: %s: Unknown filename suffix\n" % arg) "cython: %s: Unknown filename suffix\n" % arg)
if options.objects and len(sources) > 1:
sys.stderr.write(
"cython: Only one source file allowed together with .o files\n")
if options.use_listing_file and len(sources) > 1: if options.use_listing_file and len(sources) > 1:
sys.stderr.write( sys.stderr.write(
"cython: Only one source file allowed when using -o\n") "cython: Only one source file allowed when using -o\n")
......
...@@ -619,15 +619,30 @@ class GlobalState(object): ...@@ -619,15 +619,30 @@ class GlobalState(object):
def add_cached_builtin_decl(self, entry): def add_cached_builtin_decl(self, entry):
if Options.cache_builtins: if Options.cache_builtins:
if self.should_declare(entry.cname, entry): if self.should_declare(entry.cname, entry):
interned_cname = self.get_interned_identifier(entry.name).cname
self.put_pyobject_decl(entry) self.put_pyobject_decl(entry)
w = self.parts['cached_builtins'] w = self.parts['cached_builtins']
w.putln('%s = __Pyx_GetName(%s, %s); if (!%s) %s' % ( if entry.name == 'xrange':
entry.cname, # replaced by range() in Py3
Naming.builtins_cname, w.putln('#if PY_MAJOR_VERSION >= 3')
interned_cname, self.put_cached_builtin_init(
entry.cname, entry.pos, StringEncoding.EncodedString('range'),
w.error_goto(entry.pos))) entry.cname)
w.putln('#else')
self.put_cached_builtin_init(
entry.pos, StringEncoding.EncodedString(entry.name),
entry.cname)
if entry.name == 'xrange':
w.putln('#endif')
def put_cached_builtin_init(self, pos, name, cname):
w = self.parts['cached_builtins']
interned_cname = self.get_interned_identifier(name).cname
w.putln('%s = __Pyx_GetName(%s, %s); if (!%s) %s' % (
cname,
Naming.builtins_cname,
interned_cname,
cname,
w.error_goto(pos)))
def generate_const_declarations(self): def generate_const_declarations(self):
self.generate_string_constants() self.generate_string_constants()
...@@ -1281,6 +1296,9 @@ class CCodeWriter(object): ...@@ -1281,6 +1296,9 @@ class CCodeWriter(object):
def put_finish_refcount_context(self): def put_finish_refcount_context(self):
self.putln("__Pyx_RefNannyFinishContext();") self.putln("__Pyx_RefNannyFinishContext();")
def put_trace_declarations(self):
self.putln('__Pyx_TraceDeclarations');
def put_trace_call(self, name, pos): def put_trace_call(self, name, pos):
self.putln('__Pyx_TraceCall("%s", %s[%s], %s);' % (name, Naming.filetable_cname, self.lookup_filename(pos[0]), pos[1])); self.putln('__Pyx_TraceCall("%s", %s[%s], %s);' % (name, Naming.filetable_cname, self.lookup_filename(pos[0]), pos[1]));
......
This diff is collapsed.
...@@ -8,7 +8,6 @@ compile-time values. ...@@ -8,7 +8,6 @@ compile-time values.
from Nodes import * from Nodes import *
from ExprNodes import * from ExprNodes import *
from Visitor import BasicVisitor
from Errors import CompileError from Errors import CompileError
......
# #
# Pyrex Scanner - Lexical Definitions # Cython Scanner - Lexical Definitions
#
# Changing anything in this file will cause Lexicon.pickle
# to be rebuilt next time pyrexc is run.
# #
raw_prefixes = "rR" raw_prefixes = "rR"
......
...@@ -92,7 +92,8 @@ class Context(object): ...@@ -92,7 +92,8 @@ class Context(object):
from AnalysedTreeTransforms import AutoTestDictTransform from AnalysedTreeTransforms import AutoTestDictTransform
from AutoDocTransforms import EmbedSignature from AutoDocTransforms import EmbedSignature
from Optimize import FlattenInListTransform, SwitchTransform, IterationTransform from Optimize import FlattenInListTransform, SwitchTransform, IterationTransform
from Optimize import OptimizeBuiltinCalls, ConstantFolding, FinalOptimizePhase from Optimize import EarlyReplaceBuiltinCalls, OptimizeBuiltinCalls
from Optimize import ConstantFolding, FinalOptimizePhase
from Optimize import DropRefcountingTransform from Optimize import DropRefcountingTransform
from Buffer import IntroduceBufferAuxiliaryVars from Buffer import IntroduceBufferAuxiliaryVars
from ModuleNode import check_c_declarations, check_c_declarations_pxd from ModuleNode import check_c_declarations, check_c_declarations_pxd
...@@ -133,6 +134,7 @@ class Context(object): ...@@ -133,6 +134,7 @@ class Context(object):
CreateClosureClasses(self), CreateClosureClasses(self),
AutoTestDictTransform(self), AutoTestDictTransform(self),
EmbedSignature(self), EmbedSignature(self),
EarlyReplaceBuiltinCalls(self),
MarkAssignments(self), MarkAssignments(self),
TransformBuiltinMethods(self), TransformBuiltinMethods(self),
IntroduceBufferAuxiliaryVars(self), IntroduceBufferAuxiliaryVars(self),
...@@ -509,15 +511,6 @@ class Context(object): ...@@ -509,15 +511,6 @@ class Context(object):
except EnvironmentError: except EnvironmentError:
pass pass
result.c_file = None result.c_file = None
if result.c_file and not options.c_only and c_compile:
result.object_file = c_compile(result.c_file,
verbose_flag = options.show_version,
cplus = options.cplus)
if not options.obj_only and c_link:
result.extension_file = c_link(result.object_file,
extra_objects = options.objects,
verbose_flag = options.show_version,
cplus = options.cplus)
def create_parse(context): def create_parse(context):
def parse(compsrc): def parse(compsrc):
...@@ -605,17 +598,11 @@ class CompilationOptions(object): ...@@ -605,17 +598,11 @@ class CompilationOptions(object):
compiler_directives dict Overrides for pragma options (see Options.py) compiler_directives dict Overrides for pragma options (see Options.py)
evaluate_tree_assertions boolean Test support: evaluate parse tree assertions evaluate_tree_assertions boolean Test support: evaluate parse tree assertions
Following options are experimental and only used on MacOSX:
c_only boolean Stop after generating C file (default)
obj_only boolean Stop after compiling to .o file
objects [string] Extra .o files to link with
cplus boolean Compile as c++ code cplus boolean Compile as c++ code
""" """
def __init__(self, defaults = None, c_compile = 0, c_link = 0, **kw): def __init__(self, defaults = None, **kw):
self.include_path = [] self.include_path = []
self.objects = []
if defaults: if defaults:
if isinstance(defaults, CompilationOptions): if isinstance(defaults, CompilationOptions):
defaults = defaults.__dict__ defaults = defaults.__dict__
...@@ -623,10 +610,6 @@ class CompilationOptions(object): ...@@ -623,10 +610,6 @@ class CompilationOptions(object):
defaults = default_options defaults = default_options
self.__dict__.update(defaults) self.__dict__.update(defaults)
self.__dict__.update(kw) self.__dict__.update(kw)
if c_compile:
self.c_only = 0
if c_link:
self.obj_only = 0
class CompilationResult(object): class CompilationResult(object):
...@@ -719,8 +702,7 @@ def compile_multiple(sources, options): ...@@ -719,8 +702,7 @@ def compile_multiple(sources, options):
"Cannot find .pyx file for cimported module '%s'\n" % module_name) "Cannot find .pyx file for cimported module '%s'\n" % module_name)
return results return results
def compile(source, options = None, c_compile = 0, c_link = 0, def compile(source, options = None, full_module_name = None, **kwds):
full_module_name = None, **kwds):
""" """
compile(source [, options], [, <option> = <value>]...) compile(source [, options], [, <option> = <value>]...)
...@@ -730,8 +712,7 @@ def compile(source, options = None, c_compile = 0, c_link = 0, ...@@ -730,8 +712,7 @@ def compile(source, options = None, c_compile = 0, c_link = 0,
checking is requested, a CompilationResult is returned, otherwise a checking is requested, a CompilationResult is returned, otherwise a
CompilationResultSet is returned. CompilationResultSet is returned.
""" """
options = CompilationOptions(defaults = options, c_compile = c_compile, options = CompilationOptions(defaults = options, **kwds)
c_link = c_link, **kwds)
if isinstance(source, basestring) and not options.timestamps \ if isinstance(source, basestring) and not options.timestamps \
and not options.recursive: and not options.recursive:
return compile_single(source, options, full_module_name) return compile_single(source, options, full_module_name)
...@@ -782,8 +763,6 @@ default_options = dict( ...@@ -782,8 +763,6 @@ default_options = dict(
show_version = 0, show_version = 0,
use_listing_file = 0, use_listing_file = 0,
errors_to_stderr = 1, errors_to_stderr = 1,
c_only = 1,
obj_only = 1,
cplus = 0, cplus = 0,
output_file = None, output_file = None,
annotate = False, annotate = False,
...@@ -797,13 +776,3 @@ default_options = dict( ...@@ -797,13 +776,3 @@ default_options = dict(
evaluate_tree_assertions = False, evaluate_tree_assertions = False,
emit_linenums = False, emit_linenums = False,
) )
if sys.platform == "mac":
from Cython.Mac.MacSystem import c_compile, c_link, CCompilerError
default_options['use_listing_file'] = 1
elif sys.platform == "darwin":
from Cython.Mac.DarwinSystem import c_compile, c_link, CCompilerError
else:
c_compile = None
c_link = None
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
...@@ -63,7 +63,11 @@ directive_defaults = { ...@@ -63,7 +63,11 @@ directive_defaults = {
'callspec' : "", 'callspec' : "",
'profile': False, 'profile': False,
'infer_types': False, 'infer_types': False,
'infer_types.verbose': False,
'autotestdict': True, 'autotestdict': True,
'warn': None,
'warn.undeclared': False,
# test support # test support
'test_assert_path_exists' : [], 'test_assert_path_exists' : [],
...@@ -71,7 +75,9 @@ directive_defaults = { ...@@ -71,7 +75,9 @@ directive_defaults = {
} }
# Override types possibilities above, if needed # Override types possibilities above, if needed
directive_types = {} directive_types = {
'infer_types' : bool, # values can be True/None/False
}
for key, val in directive_defaults.items(): for key, val in directive_defaults.items():
if key not in directive_types: if key not in directive_types:
...@@ -84,10 +90,10 @@ directive_scopes = { # defaults to available everywhere ...@@ -84,10 +90,10 @@ directive_scopes = { # defaults to available everywhere
'test_fail_if_path_exists' : ('function',), 'test_fail_if_path_exists' : ('function',),
} }
def parse_directive_value(name, value): def parse_directive_value(name, value, relaxed_bool=False):
""" """
Parses value as an option value for the given name and returns Parses value as an option value for the given name and returns
the interpreted value. None is returned if the option does not exist. the interpreted value. None is returned if the option does not exist.
>>> print parse_directive_value('nonexisting', 'asdf asdfd') >>> print parse_directive_value('nonexisting', 'asdf asdfd')
None None
...@@ -102,18 +108,25 @@ def parse_directive_value(name, value): ...@@ -102,18 +108,25 @@ def parse_directive_value(name, value):
type = directive_types.get(name) type = directive_types.get(name)
if not type: return None if not type: return None
if type is bool: if type is bool:
if value == "True": return True value = str(value)
elif value == "False": return False if value == 'True': return True
else: raise ValueError("%s directive must be set to True or False" % name) if value == 'False': return False
if relaxed_bool:
value = value.lower()
if value in ("true", "yes"): return True
elif value in ("false", "no"): return False
raise ValueError("%s directive must be set to True or False" % name)
elif type is int: elif type is int:
try: try:
return int(value) return int(value)
except ValueError: except ValueError:
raise ValueError("%s directive must be set to an integer" % name) raise ValueError("%s directive must be set to an integer" % name)
elif type is str:
return str(value)
else: else:
assert False assert False
def parse_directive_list(s): def parse_directive_list(s, relaxed_bool=False, ignore_unknown=False):
""" """
Parses a comma-seperated list of pragma options. Whitespace Parses a comma-seperated list of pragma options. Whitespace
is not considered. is not considered.
...@@ -130,7 +143,7 @@ def parse_directive_list(s): ...@@ -130,7 +143,7 @@ def parse_directive_list(s):
>>> parse_directive_list('boundscheck=hey') >>> parse_directive_list('boundscheck=hey')
Traceback (most recent call last): Traceback (most recent call last):
... ...
ValueError: Must pass a boolean value for option "boundscheck" ValueError: boundscheck directive must be set to True or False
>>> parse_directive_list('unknown=True') >>> parse_directive_list('unknown=True')
Traceback (most recent call last): Traceback (most recent call last):
... ...
...@@ -141,19 +154,11 @@ def parse_directive_list(s): ...@@ -141,19 +154,11 @@ def parse_directive_list(s):
item = item.strip() item = item.strip()
if not item: continue if not item: continue
if not '=' in item: raise ValueError('Expected "=" in option "%s"' % item) if not '=' in item: raise ValueError('Expected "=" in option "%s"' % item)
name, value = item.strip().split('=') name, value = [ s.strip() for s in item.strip().split('=', 1) ]
try: parsed_value = parse_directive_value(name, value, relaxed_bool=relaxed_bool)
type = directive_types[name] if parsed_value is None:
except KeyError: if not ignore_unknown:
raise ValueError('Unknown option: "%s"' % name) raise ValueError('Unknown option: "%s"' % name)
if type is bool:
value = value.lower()
if value in ('true', 'yes'):
value = True
elif value in ('false', 'no'):
value = False
else: raise ValueError('Must pass a boolean value for option "%s"' % name)
result[name] = value
else: else:
assert False result[name] = parsed_value
return result return result
This diff is collapsed.
# cython: auto_cpdef=True # cython: auto_cpdef=True, infer_types=True
# #
# Pyrex Parser # Pyrex Parser
# #
...@@ -2647,18 +2647,17 @@ def p_code(s, level=None): ...@@ -2647,18 +2647,17 @@ def p_code(s, level=None):
repr(s.sy), repr(s.systring))) repr(s.sy), repr(s.systring)))
return body return body
COMPILER_DIRECTIVE_COMMENT_RE = re.compile(r"^#\s*cython:\s*(\w+)\s*=(.*)$") COMPILER_DIRECTIVE_COMMENT_RE = re.compile(r"^#\s*cython:\s*((\w|[.])+\s*=.*)$")
def p_compiler_directive_comments(s): def p_compiler_directive_comments(s):
result = {} result = {}
while s.sy == 'commentline': while s.sy == 'commentline':
m = COMPILER_DIRECTIVE_COMMENT_RE.match(s.systring) m = COMPILER_DIRECTIVE_COMMENT_RE.match(s.systring)
if m: if m:
name = m.group(1) directives = m.group(1).strip()
try: try:
value = Options.parse_directive_value(str(name), str(m.group(2).strip())) result.update( Options.parse_directive_list(
if value is not None: # can be False! directives, ignore_unknown=True) )
result[name] = value
except ValueError, e: except ValueError, e:
s.error(e.args[0], fatal=False) s.error(e.args[0], fatal=False)
s.next() s.next()
......
This diff is collapsed.
# #
# Pyrex Scanner # Cython Scanner
# #
#import pickle
import cPickle as pickle
import os import os
import platform import platform
import stat import stat
...@@ -23,132 +20,17 @@ from Lexicon import string_prefixes, raw_prefixes, make_lexicon, IDENT ...@@ -23,132 +20,17 @@ from Lexicon import string_prefixes, raw_prefixes, make_lexicon, IDENT
from StringEncoding import EncodedString from StringEncoding import EncodedString
try:
plex_version = Plex._version
except AttributeError:
plex_version = None
#print "Plex version:", plex_version ###
debug_scanner = 0 debug_scanner = 0
trace_scanner = 0 trace_scanner = 0
scanner_debug_flags = 0 scanner_debug_flags = 0
scanner_dump_file = None scanner_dump_file = None
binary_lexicon_pickle = 1
notify_lexicon_unpickling = 0
notify_lexicon_pickling = 1
lexicon = None lexicon = None
#-----------------------------------------------------------------
def hash_source_file(path):
# Try to calculate a hash code for the given source file.
# Returns an empty string if the file cannot be accessed.
#print "Hashing", path ###
try:
from hashlib import md5 as new_md5
except ImportError:
from md5 import new as new_md5
f = None
try:
try:
f = open(path, "rU")
text = f.read()
except IOError, e:
print("Unable to hash scanner source file (%s)" % e)
return ""
finally:
if f:
f.close()
# Normalise spaces/tabs. We don't know what sort of
# space-tab substitution the file may have been
# through, so we replace all spans of spaces and
# tabs by a single space.
import re
text = re.sub("[ \t]+", " ", text)
hash = new_md5(text.encode("ASCII")).hexdigest()
return hash
def open_pickled_lexicon(expected_hash):
# Try to open pickled lexicon file and verify that
# it matches the source file. Returns the opened
# file if successful, otherwise None. ???
global lexicon_pickle
f = None
result = None
if os.path.exists(lexicon_pickle):
try:
f = open(lexicon_pickle, "rb")
actual_hash = pickle.load(f)
if actual_hash == expected_hash:
result = f
f = None
else:
print("Lexicon hash mismatch:") ###
print(" expected " + expected_hash) ###
print(" got " + actual_hash) ###
except (IOError, pickle.UnpicklingError), e:
print("Warning: Unable to read pickled lexicon " + lexicon_pickle)
print(e)
if f:
f.close()
return result
def try_to_unpickle_lexicon():
global lexicon, lexicon_pickle, lexicon_hash
dir = os.path.dirname(__file__)
source_file = os.path.join(dir, "Lexicon.py")
lexicon_hash = hash_source_file(source_file)
lexicon_pickle = os.path.join(dir, "Lexicon.pickle")
f = open_pickled_lexicon(lexicon_hash)
if f:
if notify_lexicon_unpickling:
t0 = time()
print("Unpickling lexicon...")
try:
lexicon = pickle.load(f)
except Exception, e:
print "WARNING: Exception while loading lexicon pickle, regenerating"
print e
lexicon = None
f.close()
if notify_lexicon_unpickling:
t1 = time()
print("Done (%.2f seconds)" % (t1 - t0))
def create_new_lexicon():
global lexicon
t0 = time()
print("Creating lexicon...")
lexicon = make_lexicon()
t1 = time()
print("Done (%.2f seconds)" % (t1 - t0))
def pickle_lexicon():
f = None
try:
f = open(lexicon_pickle, "wb")
except IOError:
print("Warning: Unable to save pickled lexicon in " + lexicon_pickle)
if f:
if notify_lexicon_pickling:
t0 = time()
print("Pickling lexicon...")
pickle.dump(lexicon_hash, f, binary_lexicon_pickle)
pickle.dump(lexicon, f, binary_lexicon_pickle)
f.close()
if notify_lexicon_pickling:
t1 = time()
print("Done (%.2f seconds)" % (t1 - t0))
def get_lexicon(): def get_lexicon():
global lexicon global lexicon
if not lexicon and plex_version is None:
try_to_unpickle_lexicon()
if not lexicon: if not lexicon:
create_new_lexicon() lexicon = make_lexicon()
if plex_version is None:
pickle_lexicon()
return lexicon return lexicon
#------------------------------------------------------------------ #------------------------------------------------------------------
......
...@@ -637,6 +637,7 @@ class BuiltinScope(Scope): ...@@ -637,6 +637,7 @@ class BuiltinScope(Scope):
var_entry.is_variable = 1 var_entry.is_variable = 1
var_entry.is_cglobal = 1 var_entry.is_cglobal = 1
var_entry.is_readonly = 1 var_entry.is_readonly = 1
var_entry.is_builtin = 1
var_entry.utility_code = utility_code var_entry.utility_code = utility_code
entry.as_variable = var_entry entry.as_variable = var_entry
...@@ -739,7 +740,8 @@ class ModuleScope(Scope): ...@@ -739,7 +740,8 @@ class ModuleScope(Scope):
return self return self
def declare_builtin(self, name, pos): def declare_builtin(self, name, pos):
if not hasattr(builtins, name): if not hasattr(builtins, name) and name != 'xrange':
# 'xrange' is special cased in Code.py
if self.has_import_star: if self.has_import_star:
entry = self.declare_var(name, py_object_type, pos) entry = self.declare_var(name, py_object_type, pos)
return entry return entry
...@@ -899,7 +901,7 @@ class ModuleScope(Scope): ...@@ -899,7 +901,7 @@ class ModuleScope(Scope):
# Make a new entry if needed # Make a new entry if needed
# #
if not entry: if not entry:
type = PyrexTypes.PyExtensionType(name, typedef_flag, base_type) type = PyrexTypes.PyExtensionType(name, typedef_flag, base_type, visibility == 'extern')
type.pos = pos type.pos = pos
type.buffer_defaults = buffer_defaults type.buffer_defaults = buffer_defaults
if objtypedef_cname is not None: if objtypedef_cname is not None:
...@@ -1230,6 +1232,9 @@ class ClassScope(Scope): ...@@ -1230,6 +1232,9 @@ class ClassScope(Scope):
return self.outer_scope.add_string_const(value, identifier) return self.outer_scope.add_string_const(value, identifier)
def lookup(self, name): def lookup(self, name):
entry = Scope.lookup(self, name)
if entry:
return entry
if name == "classmethod": if name == "classmethod":
# We don't want to use the builtin classmethod here 'cause it won't do the # We don't want to use the builtin classmethod here 'cause it won't do the
# right thing in this scope (as the class memebers aren't still functions). # right thing in this scope (as the class memebers aren't still functions).
...@@ -1243,9 +1248,7 @@ class ClassScope(Scope): ...@@ -1243,9 +1248,7 @@ class ClassScope(Scope):
py_object_type, py_object_type,
[PyrexTypes.CFuncTypeArg("", py_object_type, None)], 0, 0)) [PyrexTypes.CFuncTypeArg("", py_object_type, None)], 0, 0))
entry.is_cfunction = 1 entry.is_cfunction = 1
return entry return entry
else:
return Scope.lookup(self, name)
class PyClassScope(ClassScope): class PyClassScope(ClassScope):
......
...@@ -36,6 +36,11 @@ class TestTreePath(TransformTest): ...@@ -36,6 +36,11 @@ class TestTreePath(TransformTest):
self.assertEquals(2, len(find_all(t, "//NameNode/@name"))) self.assertEquals(2, len(find_all(t, "//NameNode/@name")))
self.assertEquals(['fun', 'decorator'], find_all(t, "//NameNode/@name")) self.assertEquals(['fun', 'decorator'], find_all(t, "//NameNode/@name"))
def test_node_path_attribute_dotted(self):
t = self._build_tree()
self.assertEquals(1, len(find_all(t, "//ReturnStatNode/@value.name")))
self.assertEquals(['fun'], find_all(t, "//ReturnStatNode/@value.name"))
def test_node_path_child(self): def test_node_path_child(self):
t = self._build_tree() t = self._build_tree()
self.assertEquals(1, len(find_all(t, "//DefNode/ReturnStatNode/NameNode"))) self.assertEquals(1, len(find_all(t, "//DefNode/ReturnStatNode/NameNode")))
......
...@@ -7,6 +7,7 @@ specific descendant or a node that holds an attribute. ...@@ -7,6 +7,7 @@ specific descendant or a node that holds an attribute.
""" """
import re import re
import sys
path_tokenizer = re.compile( path_tokenizer = re.compile(
"(" "("
...@@ -144,11 +145,21 @@ def handle_attribute(next, token): ...@@ -144,11 +145,21 @@ def handle_attribute(next, token):
else: else:
if token[0] == '=': if token[0] == '=':
value = parse_path_value(next) value = parse_path_value(next)
if sys.version_info >= (2,6) or (sys.version_info >= (2,4) and '.' not in name):
import operator
readattr = operator.attrgetter(name)
else:
name_path = name.split('.')
def readattr(node):
attr_value = node
for attr in name_path:
attr_value = getattr(attr_value, attr)
return attr_value
if value is None: if value is None:
def select(result): def select(result):
for node in result: for node in result:
try: try:
attr_value = getattr(node, name) attr_value = readattr(node)
except AttributeError: except AttributeError:
continue continue
if attr_value is not None: if attr_value is not None:
...@@ -157,11 +168,11 @@ def handle_attribute(next, token): ...@@ -157,11 +168,11 @@ def handle_attribute(next, token):
def select(result): def select(result):
for node in result: for node in result:
try: try:
attr_value = getattr(node, name) attr_value = readattr(node)
except AttributeError: except AttributeError:
continue continue
if attr_value == value: if attr_value == value:
yield value yield attr_value
return select return select
def parse_path_value(next): def parse_path_value(next):
......
from Errors import error, warning, warn_once, InternalError
import ExprNodes import ExprNodes
from PyrexTypes import py_object_type, unspecified_type, spanning_type import Nodes
import Builtin
import PyrexTypes
from PyrexTypes import py_object_type, unspecified_type
from Visitor import CythonTransform from Visitor import CythonTransform
try: try:
...@@ -19,10 +23,9 @@ object_expr = TypedExprNode(py_object_type) ...@@ -19,10 +23,9 @@ object_expr = TypedExprNode(py_object_type)
class MarkAssignments(CythonTransform): class MarkAssignments(CythonTransform):
def mark_assignment(self, lhs, rhs): def mark_assignment(self, lhs, rhs):
if isinstance(lhs, ExprNodes.NameNode): if isinstance(lhs, (ExprNodes.NameNode, Nodes.PyArgDeclNode)):
if lhs.entry is None: if lhs.entry is None:
# TODO: This shouldn't happen... # TODO: This shouldn't happen...
# It looks like comprehension loop targets are not declared soon enough.
return return
lhs.entry.assignments.append(rhs) lhs.entry.assignments.append(rhs)
elif isinstance(lhs, ExprNodes.SequenceNode): elif isinstance(lhs, ExprNodes.SequenceNode):
...@@ -50,24 +53,23 @@ class MarkAssignments(CythonTransform): ...@@ -50,24 +53,23 @@ class MarkAssignments(CythonTransform):
def visit_ForInStatNode(self, node): def visit_ForInStatNode(self, node):
# TODO: Remove redundancy with range optimization... # TODO: Remove redundancy with range optimization...
is_range = False is_special = False
sequence = node.iterator.sequence sequence = node.iterator.sequence
if isinstance(sequence, ExprNodes.SimpleCallNode): if isinstance(sequence, ExprNodes.SimpleCallNode):
function = sequence.function function = sequence.function
if sequence.self is None and \ if sequence.self is None and function.is_name:
isinstance(function, ExprNodes.NameNode) and \ if function.name in ('range', 'xrange'):
function.name in ('range', 'xrange'): is_special = True
is_range = True for arg in sequence.args[:2]:
self.mark_assignment(node.target, sequence.args[0]) self.mark_assignment(node.target, arg)
if len(sequence.args) > 1:
self.mark_assignment(node.target, sequence.args[1])
if len(sequence.args) > 2: if len(sequence.args) > 2:
self.mark_assignment(node.target, self.mark_assignment(
ExprNodes.binop_node(node.pos, node.target,
'+', ExprNodes.binop_node(node.pos,
sequence.args[0], '+',
sequence.args[2])) sequence.args[0],
if not is_range: sequence.args[2]))
if not is_special:
self.mark_assignment(node.target, object_expr) self.mark_assignment(node.target, object_expr)
self.visitchildren(node) self.visitchildren(node)
return node return node
...@@ -99,6 +101,17 @@ class MarkAssignments(CythonTransform): ...@@ -99,6 +101,17 @@ class MarkAssignments(CythonTransform):
self.visitchildren(node) self.visitchildren(node)
return node return node
def visit_DefNode(self, node):
# use fake expressions with the right result type
if node.star_arg:
self.mark_assignment(
node.star_arg, TypedExprNode(Builtin.tuple_type))
if node.starstar_arg:
self.mark_assignment(
node.starstar_arg, TypedExprNode(Builtin.dict_type))
self.visitchildren(node)
return node
class PyObjectTypeInferer: class PyObjectTypeInferer:
""" """
...@@ -119,6 +132,18 @@ class SimpleAssignmentTypeInferer: ...@@ -119,6 +132,18 @@ class SimpleAssignmentTypeInferer:
# TODO: Implement a real type inference algorithm. # TODO: Implement a real type inference algorithm.
# (Something more powerful than just extending this one...) # (Something more powerful than just extending this one...)
def infer_types(self, scope): def infer_types(self, scope):
enabled = scope.directives['infer_types']
verbose = scope.directives['infer_types.verbose']
if enabled == True:
spanning_type = aggressive_spanning_type
elif enabled is None: # safe mode
spanning_type = safe_spanning_type
else:
for entry in scope.entries.values():
if entry.type is unspecified_type:
entry.type = py_object_type
return
dependancies_by_entry = {} # entry -> dependancies dependancies_by_entry = {} # entry -> dependancies
entries_by_dependancy = {} # dependancy -> entries entries_by_dependancy = {} # dependancy -> entries
ready_to_infer = [] ready_to_infer = []
...@@ -150,20 +175,22 @@ class SimpleAssignmentTypeInferer: ...@@ -150,20 +175,22 @@ class SimpleAssignmentTypeInferer:
entry = ready_to_infer.pop() entry = ready_to_infer.pop()
types = [expr.infer_type(scope) for expr in entry.assignments] types = [expr.infer_type(scope) for expr in entry.assignments]
if types: if types:
entry.type = reduce(spanning_type, types) entry.type = spanning_type(types)
else: else:
# List comprehension? # FIXME: raise a warning?
# print "No assignments", entry.pos, entry # print "No assignments", entry.pos, entry
entry.type = py_object_type entry.type = py_object_type
if verbose:
warning(entry.pos, "inferred '%s' to be of type '%s'" % (entry.name, entry.type), 1)
resolve_dependancy(entry) resolve_dependancy(entry)
# Deal with simple circular dependancies... # Deal with simple circular dependancies...
for entry, deps in dependancies_by_entry.items(): for entry, deps in dependancies_by_entry.items():
if len(deps) == 1 and deps == set([entry]): if len(deps) == 1 and deps == set([entry]):
types = [expr.infer_type(scope) for expr in entry.assignments if expr.type_dependencies(scope) == ()] types = [expr.infer_type(scope) for expr in entry.assignments if expr.type_dependencies(scope) == ()]
if types: if types:
entry.type = reduce(spanning_type, types) entry.type = spanning_type(types)
types = [expr.infer_type(scope) for expr in entry.assignments] types = [expr.infer_type(scope) for expr in entry.assignments]
entry.type = reduce(spanning_type, types) # might be wider... entry.type = spanning_type(types) # might be wider...
resolve_dependancy(entry) resolve_dependancy(entry)
del dependancies_by_entry[entry] del dependancies_by_entry[entry]
if ready_to_infer: if ready_to_infer:
...@@ -174,6 +201,42 @@ class SimpleAssignmentTypeInferer: ...@@ -174,6 +201,42 @@ class SimpleAssignmentTypeInferer:
# We can't figure out the rest with this algorithm, let them be objects. # We can't figure out the rest with this algorithm, let them be objects.
for entry in dependancies_by_entry: for entry in dependancies_by_entry:
entry.type = py_object_type entry.type = py_object_type
if verbose:
warning(entry.pos, "inferred '%s' to be of type '%s' (default)" % (entry.name, entry.type), 1)
def find_spanning_type(type1, type2):
if type1 is type2:
return type1
elif type1 is PyrexTypes.c_bint_type or type2 is PyrexTypes.c_bint_type:
# type inference can break the coercion back to a Python bool
# if it returns an arbitrary int type here
return py_object_type
result_type = PyrexTypes.spanning_type(type1, type2)
if result_type in (PyrexTypes.c_double_type, PyrexTypes.c_float_type, Builtin.float_type):
# Python's float type is just a C double, so it's safe to
# use the C type instead
return PyrexTypes.c_double_type
return result_type
def aggressive_spanning_type(types):
result_type = reduce(find_spanning_type, types)
return result_type
def safe_spanning_type(types):
result_type = reduce(find_spanning_type, types)
if result_type.is_pyobject:
# any specific Python type is always safe to infer
return result_type
elif result_type is PyrexTypes.c_double_type:
# Python's float type is just a C double, so it's safe to use
# the C type instead
return result_type
elif result_type is PyrexTypes.c_bint_type:
# find_spanning_type() only returns 'bint' for clean boolean
# operations without other int types, so this is safe, too
return result_type
return py_object_type
def get_type_inferer(): def get_type_inferer():
return SimpleAssignmentTypeInferer() return SimpleAssignmentTypeInferer()
...@@ -83,6 +83,7 @@ class Signature(object): ...@@ -83,6 +83,7 @@ class Signature(object):
return len(self.fixed_arg_format) return len(self.fixed_arg_format)
def is_self_arg(self, i): def is_self_arg(self, i):
# argument is 'self' for methods or 'class' for classmethods
return self.fixed_arg_format[i] == 'T' return self.fixed_arg_format[i] == 'T'
def fixed_arg_type(self, i): def fixed_arg_type(self, i):
......
...@@ -127,6 +127,9 @@ class ResultRefNode(AtomicExprNode): ...@@ -127,6 +127,9 @@ class ResultRefNode(AtomicExprNode):
def analyse_types(self, env): def analyse_types(self, env):
self.type = self.expression.type self.type = self.expression.type
def infer_type(self, env):
return self.expression.infer_type(env)
def result(self): def result(self):
return self.result_code return self.result_code
...@@ -164,9 +167,9 @@ class LetNodeMixin: ...@@ -164,9 +167,9 @@ class LetNodeMixin:
def setup_temp_expr(self, code): def setup_temp_expr(self, code):
self.temp_expression.generate_evaluation_code(code) self.temp_expression.generate_evaluation_code(code)
self.result_in_temp = self.temp_expression.result_in_temp() self._result_in_temp = self.temp_expression.result_in_temp()
self.temp_type = self.temp_expression.type self.temp_type = self.temp_expression.type
if self.result_in_temp: if self._result_in_temp:
self.temp = self.temp_expression.result() self.temp = self.temp_expression.result()
else: else:
self.temp_expression.make_owned_reference(code) self.temp_expression.make_owned_reference(code)
...@@ -176,7 +179,7 @@ class LetNodeMixin: ...@@ -176,7 +179,7 @@ class LetNodeMixin:
self.lazy_temp.result_code = self.temp self.lazy_temp.result_code = self.temp
def teardown_temp_expr(self, code): def teardown_temp_expr(self, code):
if not self.result_in_temp: if not self._result_in_temp:
if self.temp_type.is_pyobject: if self.temp_type.is_pyobject:
code.put_decref_clear(self.temp, self.temp_type) code.put_decref_clear(self.temp, self.temp_type)
code.funcstate.release_temp(self.temp) code.funcstate.release_temp(self.temp)
...@@ -191,6 +194,11 @@ class EvalWithTempExprNode(ExprNodes.ExprNode, LetNodeMixin): ...@@ -191,6 +194,11 @@ class EvalWithTempExprNode(ExprNodes.ExprNode, LetNodeMixin):
self.set_temp_expr(lazy_temp) self.set_temp_expr(lazy_temp)
self.pos = subexpression.pos self.pos = subexpression.pos
self.subexpression = subexpression self.subexpression = subexpression
# if called after type analysis, we already know the type here
self.type = self.subexpression.type
def infer_type(self, env):
return self.subexpression.infer_type(env)
def result(self): def result(self):
return self.subexpression.result() return self.subexpression.result()
......
version = '0.12' version = '0.12.1.beta0'
cimport cython
cdef class BasicVisitor: cdef class BasicVisitor:
cdef dict dispatch_table cdef dict dispatch_table
cpdef visit(self, obj) cpdef visit(self, obj)
cpdef find_handler(self, obj)
cdef class TreeVisitor(BasicVisitor): cdef class TreeVisitor(BasicVisitor):
cdef public list access_path cdef public list access_path
cpdef visitchild(self, child, parent, attrname, idx) cpdef visitchild(self, child, parent, attrname, idx)
@cython.locals(idx=int)
cpdef dict _visitchildren(self, parent, attrs)
# cpdef visitchildren(self, parent, attrs=*) # cpdef visitchildren(self, parent, attrs=*)
cdef class VisitorTransform(TreeVisitor): cdef class VisitorTransform(TreeVisitor):
......
# cython: infer_types=True
# #
# Tree visitor and transform framework # Tree visitor and transform framework
# #
...@@ -8,7 +10,6 @@ import ExprNodes ...@@ -8,7 +10,6 @@ import ExprNodes
import Naming import Naming
import Errors import Errors
import DebugFlags import DebugFlags
from StringEncoding import EncodedString
class BasicVisitor(object): class BasicVisitor(object):
"""A generic visitor base class which can be used for visiting any kind of object.""" """A generic visitor base class which can be used for visiting any kind of object."""
...@@ -19,32 +20,36 @@ class BasicVisitor(object): ...@@ -19,32 +20,36 @@ class BasicVisitor(object):
self.dispatch_table = {} self.dispatch_table = {}
def visit(self, obj): def visit(self, obj):
cls = type(obj)
try: try:
handler_method = self.dispatch_table[cls] handler_method = self.dispatch_table[type(obj)]
except KeyError: except KeyError:
#print "Cache miss for class %s in visitor %s" % ( handler_method = self.find_handler(obj)
# cls.__name__, type(self).__name__) self.dispatch_table[type(obj)] = handler_method
# Must resolve, try entire hierarchy
pattern = "visit_%s"
mro = inspect.getmro(cls)
handler_method = None
for mro_cls in mro:
if hasattr(self, pattern % mro_cls.__name__):
handler_method = getattr(self, pattern % mro_cls.__name__)
break
if handler_method is None:
print type(self), type(obj)
if hasattr(self, 'access_path') and self.access_path:
print self.access_path
if self.access_path:
print self.access_path[-1][0].pos
print self.access_path[-1][0].__dict__
raise RuntimeError("Visitor does not accept object: %s" % obj)
#print "Caching " + cls.__name__
self.dispatch_table[cls] = handler_method
return handler_method(obj) return handler_method(obj)
def find_handler(self, obj):
cls = type(obj)
#print "Cache miss for class %s in visitor %s" % (
# cls.__name__, type(self).__name__)
# Must resolve, try entire hierarchy
pattern = "visit_%s"
mro = inspect.getmro(cls)
handler_method = None
for mro_cls in mro:
if hasattr(self, pattern % mro_cls.__name__):
handler_method = getattr(self, pattern % mro_cls.__name__)
break
if handler_method is None:
print type(self), cls
if hasattr(self, 'access_path') and self.access_path:
print self.access_path
if self.access_path:
print self.access_path[-1][0].pos
print self.access_path[-1][0].__dict__
raise RuntimeError("Visitor does not accept object: %s" % obj)
#print "Caching " + cls.__name__
return handler_method
class TreeVisitor(BasicVisitor): class TreeVisitor(BasicVisitor):
""" """
Base class for writing visitors for a Cython tree, contains utilities for Base class for writing visitors for a Cython tree, contains utilities for
...@@ -144,6 +149,29 @@ class TreeVisitor(BasicVisitor): ...@@ -144,6 +149,29 @@ class TreeVisitor(BasicVisitor):
stacktrace = stacktrace.tb_next stacktrace = stacktrace.tb_next
return (last_traceback, nodes) return (last_traceback, nodes)
def _raise_compiler_error(self, child, e):
import sys
trace = ['']
for parent, attribute, index in self.access_path:
node = getattr(parent, attribute)
if index is None:
index = ''
else:
node = node[index]
index = u'[%d]' % index
trace.append(u'%s.%s%s = %s' % (
parent.__class__.__name__, attribute, index,
self.dump_node(node)))
stacktrace, called_nodes = self._find_node_path(sys.exc_info()[2])
last_node = child
for node, method_name, pos in called_nodes:
last_node = node
trace.append(u"File '%s', line %d, in %s: %s" % (
pos[0], pos[1], method_name, self.dump_node(node)))
raise Errors.CompilerCrash(
last_node.pos, self.__class__.__name__,
u'\n'.join(trace), e, stacktrace)
def visitchild(self, child, parent, attrname, idx): def visitchild(self, child, parent, attrname, idx):
self.access_path.append((parent, attrname, idx)) self.access_path.append((parent, attrname, idx))
try: try:
...@@ -151,33 +179,16 @@ class TreeVisitor(BasicVisitor): ...@@ -151,33 +179,16 @@ class TreeVisitor(BasicVisitor):
except Errors.CompileError: except Errors.CompileError:
raise raise
except Exception, e: except Exception, e:
import sys
if DebugFlags.debug_no_exception_intercept: if DebugFlags.debug_no_exception_intercept:
raise raise
trace = [''] self._raise_compiler_error(child, e)
for parent, attribute, index in self.access_path:
node = getattr(parent, attribute)
if index is None:
index = ''
else:
node = node[index]
index = u'[%d]' % index
trace.append(u'%s.%s%s = %s' % (
parent.__class__.__name__, attribute, index,
self.dump_node(node)))
stacktrace, called_nodes = self._find_node_path(sys.exc_info()[2])
last_node = child
for node, method_name, pos in called_nodes:
last_node = node
trace.append(u"File '%s', line %d, in %s: %s" % (
pos[0], pos[1], method_name, self.dump_node(node)))
raise Errors.CompilerCrash(
last_node.pos, self.__class__.__name__,
u'\n'.join(trace), e, stacktrace)
self.access_path.pop() self.access_path.pop()
return result return result
def visitchildren(self, parent, attrs=None): def visitchildren(self, parent, attrs=None):
return self._visitchildren(parent, attrs)
def _visitchildren(self, parent, attrs):
""" """
Visits the children of the given parent. If parent is None, returns Visits the children of the given parent. If parent is None, returns
immediately (returning None). immediately (returning None).
...@@ -223,8 +234,7 @@ class VisitorTransform(TreeVisitor): ...@@ -223,8 +234,7 @@ class VisitorTransform(TreeVisitor):
are within a StatListNode or similar before doing this.) are within a StatListNode or similar before doing this.)
""" """
def visitchildren(self, parent, attrs=None): def visitchildren(self, parent, attrs=None):
result = cython.declare(dict) result = self._visitchildren(parent, attrs)
result = TreeVisitor.visitchildren(self, parent, attrs)
for attr, newnode in result.iteritems(): for attr, newnode in result.iteritems():
if not type(newnode) is list: if not type(newnode) is list:
setattr(parent, attr, newnode) setattr(parent, attr, newnode)
......
...@@ -146,6 +146,7 @@ from python_getargs cimport * ...@@ -146,6 +146,7 @@ from python_getargs cimport *
# Python <= 2.x # Python <= 2.x
from python_cobject cimport * from python_cobject cimport *
from python_oldbuffer cimport *
# Python >= 2.4 # Python >= 2.4
from python_set cimport * from python_set cimport *
......
# Legacy Python 2 buffer interface.
#
# These functions are no longer available in Python 3, use the new
# buffer interface instead.
cdef extern from "Python.h":
cdef enum _:
Py_END_OF_BUFFER
# This constant may be passed as the size parameter to
# PyBuffer_FromObject() or PyBuffer_FromReadWriteObject(). It
# indicates that the new PyBufferObject should refer to base object
# from the specified offset to the end of its exported
# buffer. Using this enables the caller to avoid querying the base
# object for its length.
bint PyBuffer_Check(object p)
# Return true if the argument has type PyBuffer_Type.
object PyBuffer_FromObject(object base, Py_ssize_t offset, Py_ssize_t size)
# Return value: New reference.
#
# Return a new read-only buffer object. This raises TypeError if
# base doesn't support the read-only buffer protocol or doesn't
# provide exactly one buffer segment, or it raises ValueError if
# offset is less than zero. The buffer will hold a reference to the
# base object, and the buffer's contents will refer to the base
# object's buffer interface, starting as position offset and
# extending for size bytes. If size is Py_END_OF_BUFFER, then the
# new buffer's contents extend to the length of the base object's
# exported buffer data.
object PyBuffer_FromReadWriteObject(object base, Py_ssize_t offset, Py_ssize_t size)
# Return value: New reference.
#
# Return a new writable buffer object. Parameters and exceptions
# are similar to those for PyBuffer_FromObject(). If the base
# object does not export the writeable buffer protocol, then
# TypeError is raised.
object PyBuffer_FromMemory(void *ptr, Py_ssize_t size)
# Return value: New reference.
#
# Return a new read-only buffer object that reads from a specified
# location in memory, with a specified size. The caller is
# responsible for ensuring that the memory buffer, passed in as
# ptr, is not deallocated while the returned buffer object
# exists. Raises ValueError if size is less than zero. Note that
# Py_END_OF_BUFFER may not be passed for the size parameter;
# ValueError will be raised in that case.
object PyBuffer_FromReadWriteMemory(void *ptr, Py_ssize_t size)
# Return value: New reference.
#
# Similar to PyBuffer_FromMemory(), but the returned buffer is
# writable.
object PyBuffer_New(Py_ssize_t size)
# Return value: New reference.
#
# Return a new writable buffer object that maintains its own memory
# buffer of size bytes. ValueError is returned if size is not zero
# or positive. Note that the memory buffer (as returned by
# PyObject_AsWriteBuffer()) is not specifically aligned.
#
# Pyrex - Darwin system interface
#
verbose = 0
gcc_pendantic = True
gcc_warnings_are_errors = True
gcc_all_warnings = True
gcc_optimize = False
import os, sys
from Cython.Utils import replace_suffix
from Cython.Compiler.Errors import PyrexError
version_string = "%s.%s" % sys.version_info[:2]
py_include_dirs = [
"/Library/Frameworks/Python.framework/Versions/%s/Headers" % version_string
]
osx_version = os.popen('sw_vers | grep ProductVersion').read().split()[1]
# MACOSX_DEPLOYMENT_TARGET can be set to 10.3 in most cases.
# But for the built-in Python 2.5.1 on Leopard, it needs to be set for 10.5.
# This looks like a bug that will be fixed in 2.5.2. If Apple updates their
# Python to 2.5.2, this fix should be OK.
import distutils.sysconfig as sc
python_prefix = sc.get_config_var('prefix')
leopard_python_prefix = '/System/Library/Frameworks/Python.framework/Versions/2.5'
full_version = "%s.%s.%s" % sys.version_info[:3]
if python_prefix == leopard_python_prefix and full_version == '2.5.1':
os.environ["MACOSX_DEPLOYMENT_TARGET"] = "10.5"
elif osx_version >= "10.6":
os.environ["MACOSX_DEPLOYMENT_TARGET"] = "10.4"
else:
os.environ["MACOSX_DEPLOYMENT_TARGET"] = "10.3"
compilers = ["gcc", "g++"]
compiler_options = \
"-g -c -fno-strict-aliasing -no-cpp-precomp " \
"-mno-fused-madd -fno-common -dynamic " \
.split()
if gcc_pendantic:
compiler_options.extend(["-pedantic", "-Wno-long-long"])
if gcc_warnings_are_errors:
compiler_options.append("-Werror")
if gcc_all_warnings:
compiler_options.append("-Wall")
compiler_options.append("-Wno-unused-function")
if gcc_optimize:
compiler_options.append("-O")
linkers = ["gcc", "g++"]
linker_options = \
"-Wl,-F.,-w -bundle -undefined dynamic_lookup" \
.split()
#linker_options = \
# "-Wl,-F.,-w -bundle -framework Python" \
# .split()
class CCompilerError(PyrexError):
pass
def c_compile(c_file, verbose_flag = 0, cplus = 0, obj_suffix = ".o"):
# Compile the given C source file to produce
# an object file. Returns the pathname of the
# resulting file.
c_file = os.path.join(os.getcwd(), c_file)
o_file = replace_suffix(c_file, obj_suffix)
include_options = []
for dir in py_include_dirs:
include_options.append("-I%s" % dir)
compiler = compilers[bool(cplus)]
args = [compiler] + compiler_options + include_options + [c_file, "-o", o_file]
if verbose_flag or verbose:
print(" ".join(args))
#print compiler, args ###
status = os.spawnvp(os.P_WAIT, compiler, args)
if status != 0:
raise CCompilerError("C compiler returned status %s" % status)
return o_file
def c_link(obj_file, verbose_flag = 0, extra_objects = [], cplus = 0):
return c_link_list([obj_file] + extra_objects, verbose_flag, cplus)
def c_link_list(obj_files, verbose_flag = 0, cplus = 0):
# Link the given object files into a dynamically
# loadable extension file. Returns the pathname
# of the resulting file.
out_file = replace_suffix(obj_files[0], ".so")
linker = linkers[bool(cplus)]
args = [linker] + linker_options + obj_files + ["-o", out_file]
if verbose_flag or verbose:
print(" ".join(args))
status = os.spawnvp(os.P_WAIT, linker, args)
if status != 0:
raise CCompilerError("Linker returned status %s" % status)
return out_file
#
# Pyrex -- Mac system interface
#
import os, sys
import aetools
from aetools import TalkTo
from StdSuites.Standard_Suite import Standard_Suite_Events as Standard_Suite
from Cython.Utils import replace_suffix
from Cython.Compiler.Errors import PyrexError
c_compiler = "MWCPPC"
c_optimizations = "off"
#c_linker = "PPCLink"
c_linker = "MWLinkPPC"
shared_lib_suffix = ".slb"
#py_home = "Python2.2:Home:"
py_home = sys.exec_prefix
py_include_dirs = (
py_home + "Include:",
py_home + "Mac:Include:"
)
pythoncore = py_home + "PythonCore"
mwlibdir = "MPW:Interfaces&Libraries:Libraries:MWPPCLibraries:"
libraries = (
#mwlibdir + "'MSL C.PPC.Lib'",
#mwlibdir + "'MSL RuntimePPC.Lib'",
mwlibdir + "'MSL ShLibRuntime.Lib'",
mwlibdir + "InterfaceLib",
#mwlibdir + "MathLib",
)
class CCompilerError(PyrexError):
pass
#---------------- ToolServer ---------------------------
from TS_Misc_Suite import TS_Misc_Suite
class ToolServer(Standard_Suite, TS_Misc_Suite, TalkTo):
pass
def send_toolserver_command(cmd):
ts = ToolServer('MPSX', start = 1)
return ts.DoScript(cmd)
def do_toolserver_command(command):
try:
result = send_toolserver_command(command)
except aetools.Error, e:
raise CCompilerError("Apple Event error: %s" % e)
errn, stat, stdout, stderr = result
if errn:
raise CCompilerError("ToolServer error: %s" % errn)
stdout = stdout.replace("\r", "\n")
stderr = stderr.replace("\r", "\n")
if stdout:
#print "<<< Begin ToolServer StdOut >>>"
sys.stderr.write(stdout)
#print "<<< End ToolServer StdOut >>>"
if stderr:
#print "<<< Begin ToolServer StdErr >>>"
sys.stderr.write(stderr)
#print "<<< End ToolServer StdErr >>>"
return stat
#-------------------------------------------------------
def c_compile(c_file):
# Compile the given C source file to produce
# an object file. Returns the pathname of the
# resulting file.
c_file = os.path.join(os.getcwd(), c_file)
#print "c_compile: c_file =", repr(c_file) ###
c_file_dir = os.path.dirname(c_file)
o_file = replace_suffix(c_file, ".o")
include_options = ["-i %s" % c_file_dir]
for dir in py_include_dirs:
include_options.append("-i %s" % dir)
command = "%s -opt %s -nomapcr -w off -r %s %s -o %s" % (
c_compiler,
c_optimizations,
' '.join(include_options),
c_file,
o_file,
#e_file
)
#print "...command =", repr(command) ###
stat = do_toolserver_command(command)
if stat:
raise CCompilerError("C compiler returned status %s" % stat)
return o_file
def c_link(obj_file):
return c_link_list([obj_file])
def c_link_list(obj_files):
# Link the given object files into a dynamically
# loadable extension file. Returns the pathname
# of the resulting file.
out_file = replace_suffix(obj_files[0], shared_lib_suffix)
command = "%s -xm s -export all %s %s %s -o %s" % (
c_linker,
' '.join(obj_files),
pythoncore,
' '.join(libraries),
out_file)
stat = do_toolserver_command(command)
if stat:
raise CCompilerError("Linker returned status %s" % stat)
return out_file
def test_c_compile(link = 0):
objs = []
for arg in sys.argv[1:]:
if arg.endswith(".c"):
try:
obj = c_compile(arg)
except PyrexError, e:
#print "Caught a PyrexError:" ###
#print repr(e) ###
print("%s.%s: %s" % (e.__class__.__module__,
e.__class__.__name__, e))
sys.exit(1)
else:
obj = arg
objs.append(obj)
if link:
c_link_list(objs)
#
# Pyrex -- Misc Mac-specific things
#
import os, MacOS, macfs
def open_new_file(path):
# On the Mac, try to preserve Finder position
# of previously existing file.
fsspec = macfs.FSSpec(path)
try:
old_finfo = fsspec.GetFInfo()
except MacOS.Error, e:
#print "MacUtils.open_new_file:", e ###
old_finfo = None
try:
os.unlink(path)
except OSError:
pass
file = open(path, "w")
new_finfo = fsspec.GetFInfo()
if old_finfo:
#print "MacUtils.open_new_file:", path ###
#print "...old file info =", old_finfo.Creator, old_finfo.Type, old_finfo.Location ###
#print "...new file info =", new_finfo.Creator, new_finfo.Type, new_finfo.Location ###
new_finfo.Location = old_finfo.Location
new_finfo.Flags = old_finfo.Flags
# Make darn sure the type and creator are right. There seems
# to be a bug in MacPython 2.2 that screws them up sometimes.
new_finfo.Creator = "R*ch"
new_finfo.Type = "TEXT"
fsspec.SetFInfo(new_finfo)
return file
# Makefile for Darwin
# Change this to your Python source location
PYTHON := /Local/Build/Pythonic/python/2.3
INCLUDE := -I$(PYTHON) -I$(PYTHON)/Include -I$(PYTHON)/Mac/Include
CCOPTS := -fno-strict-aliasing -no-cpp-precomp \
-mno-fused-madd -fno-common -dynamic
LDOPTS := -Wl,-F.,-w -bundle -framework Python -framework Carbon
all: _File.so
_File.o: _Filemodule_patched.c
gcc -c $(INCLUDE) $(OPTS) $< -o $@
_File.so: _File.o
gcc $(LDOPTS) $< -o $@
"""Suite Misc Suite: Suite that adds additional features to the Application.
Level 1, version 1
Generated from Macintosh HD:Desktop Folder:ToolServer 3.4.1:ToolServer
AETE/AEUT resource version 1/0, language 0, script 0
"""
import aetools
import MacOS
_code = 'misc'
class TS_Misc_Suite(object):
def DoScript(self, _object, _attributes={}, **_arguments):
"""DoScript: Execute an MPW command, any command that could be executed from the command line can be sent as a script.
Required argument: The script to execute
Keyword argument _attributes: AppleEvent attribute dictionary
"""
_code = 'misc'
_subcode = 'dosc'
if _arguments: raise TypeError('No optional args expected')
_arguments['----'] = _object
_reply, _arguments, _attributes = self.send(_code, _subcode,
_arguments, _attributes)
#if _arguments.has_key('errn'):
# raise aetools.Error, aetools.decodeerror(_arguments)
# XXXX Optionally decode result
#if _arguments.has_key('----'):
# return _arguments['----']
errn = 0
stat = 0
stdout = ""
stderr = ""
if 'errn' in _arguments:
errn = _arguments['errn']
if errn:
errn = aetools.decodeerror(_arguments)
if 'stat' in _arguments:
stat = _arguments['stat']
if '----' in _arguments:
stdout = _arguments['----']
if 'diag' in _arguments:
stderr = _arguments['diag']
return (errn, stat, stdout, stderr)
#
# Indices of types declared in this module
#
_classdeclarations = {
}
_propdeclarations = {
}
_compdeclarations = {
}
_enumdeclarations = {
}
This diff is collapsed.
#
# Pyrex - Linux system interface
#
verbose = 0
gcc_pendantic = True
gcc_warnings_are_errors = True
gcc_all_warnings = True
import os, sys
from Cython.Utils import replace_suffix
from Cython.Compiler.Errors import PyrexError
version = "%s.%s" % sys.version_info[:2]
py_include_dirs = [
"%s/include/python%s" % (sys.prefix, version)
]
compilers = ["gcc", "g++"]
compiler_options = \
"-g -c -fno-strict-aliasing -Wno-long-double -no-cpp-precomp " \
"-mno-fused-madd -fno-common -dynamic " \
.split()
if gcc_pendantic:
compiler_options.extend(["-pedantic", "-Wno-long-long"])
if gcc_warnings_are_errors:
compiler_options.append("-Werror")
if gcc_all_warnings:
compiler_options.append("-Wall")
compiler_options.append("-Wno-unused-function")
linkers = ["gcc", "g++"]
linker_options = \
"-shared" \
.split()
class CCompilerError(PyrexError):
pass
def c_compile(c_file, verbose_flag = 0, cplus = 0, obj_suffix = ".o"):
# Compile the given C source file to produce
# an object file. Returns the pathname of the
# resulting file.
c_file = os.path.join(os.getcwd(), c_file)
o_file = replace_suffix(c_file, obj_suffix)
include_options = []
for dir in py_include_dirs:
include_options.append("-I%s" % dir)
compiler = compilers[bool(cplus)]
args = [compiler] + compiler_options + include_options + [c_file, "-o", o_file]
if verbose_flag or verbose:
print(" ".join(args))
#print compiler, args ###
status = os.spawnvp(os.P_WAIT, compiler, args)
if status != 0:
raise CCompilerError("C compiler returned status %s" % status)
return o_file
def c_link(obj_file, verbose_flag = 0, extra_objects = [], cplus = 0):
return c_link_list([obj_file] + extra_objects, verbose_flag, cplus)
def c_link_list(obj_files, verbose_flag = 0, cplus = 0):
# Link the given object files into a dynamically
# loadable extension file. Returns the pathname
# of the resulting file.
out_file = replace_suffix(obj_files[0], ".so")
linker = linkers[bool(cplus)]
args = [linker] + linker_options + obj_files + ["-o", out_file]
if verbose_flag or verbose:
print(" ".join(args))
status = os.spawnvp(os.P_WAIT, linker, args)
if status != 0:
raise CCompilerError("Linker returned status %s" % status)
return out_file
# Makefile for creating our standalone Cython program # Makefile for creating our standalone Cython program
PYVERSION=$(shell python -c "import sys; print sys.version[:3]") PYVERSION=$(shell python -c "import sys; print(sys.version[:3])")
PYPREFIX=$(shell python -c "import sys; print sys.prefix") PYPREFIX=$(shell python -c "import sys; print(sys.prefix)")
LINKFORSHARED=$(shell python -c "import distutils.sysconfig; print(distutils.sysconfig.get_config_var('LINKFORSHARED'))")
INCLUDES=-I$(PYPREFIX)/include/python$(PYVERSION) INCLUDES=-I$(PYPREFIX)/include/python$(PYVERSION)
embedded: embedded.o embedded: embedded.o
gcc -o $@ $^ -lpython$(PYVERSION) gcc -o $@ $^ $(LINKFORSHARED) -lpython$(PYVERSION) -lm -lpthread -ldl -lutil -L$(PYPREFIX)/lib
embedded.o: embedded.c embedded.o: embedded.c
gcc -c $^ $(INCLUDES) gcc -c $^ $(INCLUDES)
......
cdef extern from "mymath.h":
double sinc(double)
def call_sinc(x):
return sinc(x)
#include "math.h"
double sinc(double x) {
return x == 0 ? 1 : sin(x)/x;
}
\ No newline at end of file
double sinc(double);
import os
from distutils.core import setup
from distutils.extension import Extension
from Cython.Distutils import build_ext
# For demo purposes, we build our own tiny library.
try:
print "building libmymath.a"
assert os.system("gcc -c mymath.c -o mymath.o") == 0
assert os.system("ar rcs libmymath.a mymath.o") == 0
except:
if not os.path.exists("libmymath.a"):
print "Error building external library, please create libmymath.a manually."
sys.exit(1)
# Here is how to use the library built above.
ext_modules=[
Extension("call_mymath",
sources = ["call_mymath.pyx"],
include_dirs = [os.getcwd()], # path to .h file(s)
library_dirs = [os.getcwd()], # path to .a or .so file(s)
libraries = ['mymath'])
]
setup(
name = 'Demos',
cmdclass = {'build_ext': build_ext},
ext_modules = ext_modules,
)
include MANIFEST.in README.txt INSTALL.txt ToDo.txt USAGE.txt include MANIFEST.in README.txt INSTALL.txt ToDo.txt USAGE.txt
include COPYING.txt LICENSE.txt Makefile include COPYING.txt LICENSE.txt Makefile
recursive-include .hg * include .hgrev
include .hgignore .hgtags
include setup.py include setup.py
include bin/* include bin/*
include cython.py include cython.py
......
PYTHON?=python PYTHON?=python
REPO = http://hg.cython.org/cython-devel
all: local all: local
local: local:
${PYTHON} setup.py build_ext --inplace ${PYTHON} setup.py build_ext --inplace
.hg: REV := $(shell cat .hgrev)
.hg: TMPDIR := $(shell mktemp -d tmprepo.XXXXXX)
.hg:
hg clone --rev $(REV) $(REPO) $(TMPDIR)
hg -R $(TMPDIR) update
mv $(TMPDIR)/.hg .
mv $(TMPDIR)/.hgignore .
mv $(TMPDIR)/.hgtags .
rm -rf $(TMPDIR)
repo: .hg
clean: clean:
@echo Cleaning Source @echo Cleaning Source
@rm -fr build @rm -fr build
......
...@@ -34,7 +34,8 @@ TEST_RUN_DIRS = ['run', 'pyregr'] ...@@ -34,7 +34,8 @@ TEST_RUN_DIRS = ['run', 'pyregr']
# Lists external modules, and a matcher matching tests # Lists external modules, and a matcher matching tests
# which should be excluded if the module is not present. # which should be excluded if the module is not present.
EXT_DEP_MODULES = { EXT_DEP_MODULES = {
'numpy' : re.compile('.*\.numpy_.*').match 'numpy' : re.compile('.*\.numpy_.*').match,
'pstats' : re.compile('.*\.pstats_.*').match
} }
def get_numpy_include_dirs(): def get_numpy_include_dirs():
...@@ -403,30 +404,32 @@ class CythonRunTestCase(CythonCompileTestCase): ...@@ -403,30 +404,32 @@ class CythonRunTestCase(CythonCompileTestCase):
# fork to make sure we do not keep the tested module loaded # fork to make sure we do not keep the tested module loaded
result_handle, result_file = tempfile.mkstemp() result_handle, result_file = tempfile.mkstemp()
os.close(result_handle)
child_id = os.fork() child_id = os.fork()
if not child_id: if not child_id:
result_code = 0 result_code = 0
try: try:
output = os.fdopen(result_handle, 'wb')
tests = None
try: try:
partial_result = PartialTestResult(result) tests = None
tests = doctest.DocTestSuite(module_name) try:
tests.run(partial_result) partial_result = PartialTestResult(result)
gc.collect() tests = doctest.DocTestSuite(module_name)
except Exception: tests.run(partial_result)
if tests is None: gc.collect()
# importing failed, try to fake a test class except Exception:
tests = _FakeClass( if tests is None:
failureException=None, # importing failed, try to fake a test class
shortDescription = self.shortDescription, tests = _FakeClass(
**{module_name: None}) failureException=None,
partial_result.addError(tests, sys.exc_info()) shortDescription = self.shortDescription,
result_code = 1 **{module_name: None})
pickle.dump(partial_result.data(), output) partial_result.addError(tests, sys.exc_info())
except: result_code = 1
import traceback output = open(result_file, 'wb')
traceback.print_exc() pickle.dump(partial_result.data(), output)
except:
import traceback
traceback.print_exc()
finally: finally:
try: output.close() try: output.close()
except: pass except: pass
...@@ -740,8 +743,13 @@ if __name__ == '__main__': ...@@ -740,8 +743,13 @@ if __name__ == '__main__':
''') ''')
sys.path.insert(0, cy3_dir) sys.path.insert(0, cy3_dir)
elif sys.version_info[0] >= 3: elif sys.version_info[0] >= 3:
# make sure we do not import (or run) Cython itself # make sure we do not import (or run) Cython itself (unless
options.with_cython = False # 2to3 was already run)
cy3_dir = os.path.join(WORKDIR, 'Cy3')
if os.path.isdir(cy3_dir):
sys.path.insert(0, cy3_dir)
else:
options.with_cython = False
options.doctests = False options.doctests = False
options.unittests = False options.unittests = False
options.pyregr = False options.pyregr = False
...@@ -856,7 +864,7 @@ if __name__ == '__main__': ...@@ -856,7 +864,7 @@ if __name__ == '__main__':
os.path.join(sys.prefix, 'lib', 'python'+sys.version[:3], 'test'), os.path.join(sys.prefix, 'lib', 'python'+sys.version[:3], 'test'),
'pyregr')) 'pyregr'))
unittest.TextTestRunner(verbosity=options.verbosity).run(test_suite) result = unittest.TextTestRunner(verbosity=options.verbosity).run(test_suite)
if options.coverage: if options.coverage:
coverage.stop() coverage.stop()
...@@ -875,3 +883,5 @@ if __name__ == '__main__': ...@@ -875,3 +883,5 @@ if __name__ == '__main__':
if options.with_refnanny: if options.with_refnanny:
import refnanny import refnanny
sys.stderr.write("\n".join([repr(x) for x in refnanny.reflog])) sys.stderr.write("\n".join([repr(x) for x in refnanny.reflog]))
sys.exit(not result.wasSuccessful())
...@@ -3,6 +3,17 @@ from distutils.sysconfig import get_python_lib ...@@ -3,6 +3,17 @@ from distutils.sysconfig import get_python_lib
import os, os.path import os, os.path
import sys import sys
if 'sdist' in sys.argv:
# Record the current revision in .hgrev
import subprocess # os.popen is cleaner but depricated
changset = subprocess.Popen("hg log --rev tip | grep changeset",
shell=True,
stdout=subprocess.PIPE).stdout.read()
rev = changset.split(':')[-1].strip()
hgrev = open('.hgrev', 'w')
hgrev.write(rev)
hgrev.close()
compiler_dir = os.path.join(get_python_lib(prefix=''), 'Cython/Compiler') compiler_dir = os.path.join(get_python_lib(prefix=''), 'Cython/Compiler')
if sys.platform == "win32": if sys.platform == "win32":
compiler_dir = compiler_dir[len(sys.prefix)+1:] compiler_dir = compiler_dir[len(sys.prefix)+1:]
...@@ -31,7 +42,6 @@ if sys.version_info < (2,4): ...@@ -31,7 +42,6 @@ if sys.version_info < (2,4):
cython_dir = os.path.join(get_python_lib(prefix=''), 'Cython') cython_dir = os.path.join(get_python_lib(prefix=''), 'Cython')
compiler_dir = os.path.join(cython_dir, 'Compiler') compiler_dir = os.path.join(cython_dir, 'Compiler')
setup_args['data_files'] = [ setup_args['data_files'] = [
(compiler_dir, ['Cython/Compiler/Lexicon.pickle']),
(cython_dir, [ f for pattern in (cython_dir, [ f for pattern in
['Cython/Includes/*.pxd', ['Cython/Includes/*.pxd',
'Cython/Plex/*.pxd', 'Cython/Plex/*.pxd',
...@@ -39,8 +49,7 @@ if sys.version_info < (2,4): ...@@ -39,8 +49,7 @@ if sys.version_info < (2,4):
'Cython/Runtime/*.pyx'] 'Cython/Runtime/*.pyx']
for f in glob.glob(pattern) ])] for f in glob.glob(pattern) ])]
else: else:
setup_args['package_data'] = {'Cython.Compiler' : ['Lexicon.pickle'], setup_args['package_data'] = {'Cython' : ['Includes/*.pxd',
'Cython' : ['Includes/*.pxd',
'Plex/*.pxd', 'Plex/*.pxd',
'Compiler/*.pxd', 'Compiler/*.pxd',
'Runtime/*.pyx']} 'Runtime/*.pyx']}
...@@ -161,8 +170,6 @@ setup( ...@@ -161,8 +170,6 @@ setup(
'Cython.Compiler', 'Cython.Compiler',
'Cython.Runtime', 'Cython.Runtime',
'Cython.Distutils', 'Cython.Distutils',
'Cython.Mac',
'Cython.Unix',
'Cython.Plex', 'Cython.Plex',
'Cython.Tests', 'Cython.Tests',
......
...@@ -6,4 +6,5 @@ class_attribute_init_values_T18 ...@@ -6,4 +6,5 @@ class_attribute_init_values_T18
numpy_ValueError_T172 numpy_ValueError_T172
unsignedbehaviour_T184 unsignedbehaviour_T184
missing_baseclass_in_predecl_T262 missing_baseclass_in_predecl_T262
tp_new_T454 cfunc_call_tuple_args_T408
cascaded_list_unpacking_T467
# cython: boundscheck = False # cython: boundscheck = False
# cython: ignoreme = OK # cython: ignoreme = OK
# cython: warn.undeclared = False
# This testcase is most useful if you inspect the generated C file # This testcase is most useful if you inspect the generated C file
...@@ -32,3 +33,8 @@ def i(object[int] buf): ...@@ -32,3 +33,8 @@ def i(object[int] buf):
with bc(True): with bc(True):
print buf[3] # bs print buf[3] # bs
from cython cimport warn as my_warn
@my_warn(undeclared=True)
def j():
pass
...@@ -9,6 +9,7 @@ cdef class Swallow: ...@@ -9,6 +9,7 @@ cdef class Swallow:
def f(Grail g): def f(Grail g):
cdef int i = 0 cdef int i = 0
cdef Swallow s cdef Swallow s
cdef object x
g = x g = x
x = g x = g
g = i g = i
......
cdef class vector: cdef class vector:
def __div__(vector self, double factor): def __div__(vector self, double factor):
result = vector() cdef object result = vector()
return result return result
cdef extern from *:
ctypedef class __builtin__.list [object PyListObject]:
pass
cdef list foo = []
# This is too invasive for Python 0.11.x, re-enable in 0.12
NEW_ERRORS = u"""
:2:4: list already a builtin Cython type
"""
_ERRORS = u"""
5:16: Cannot coerce list to type 'list'
"""
...@@ -15,17 +15,17 @@ cdef void eggs(Spam s): ...@@ -15,17 +15,17 @@ cdef void eggs(Spam s):
j = s.k # error - undef attribute j = s.k # error - undef attribute
j = s.p # type error j = s.p # type error
s.p = j # type error s.p = j # type error
j = j.i # error - no attributes j = j.i # no error - coercion to Python object
j.i = j # error - no attributes j.i = j # no error - coercion to Python object
j = gp.x # error - incomplete type j = gp.x # error - incomplete type
gp.x = j # error - incomplete type gp.x = j # error - incomplete type
_ERRORS = u""" _ERRORS = u"""
5:36: C struct/union member cannot be a Python object 5:36: C struct/union member cannot be a Python object
15:6: Object of type 'Spam' has no attribute 'k' 15:6: Object of type 'Spam' has no attribute 'k'
16:6: Cannot assign type 'float *[42]' to 'int' 16:6: Cannot assign type 'float *[42]' to 'int'
17:21: Cannot assign type 'int' to 'float *[42]' 17:21: Cannot assign type 'int' to 'float *[42]'
18:6: Object of type 'int' has no attribute 'i'
19:2: Object of type 'int' has no attribute 'i'
20:7: Cannot select attribute of incomplete type 'Grail' 20:7: Cannot select attribute of incomplete type 'Grail'
21:3: Cannot select attribute of incomplete type 'Grail' 21:3: Cannot select attribute of incomplete type 'Grail'
""" """
...@@ -8,6 +8,7 @@ def test(): ...@@ -8,6 +8,7 @@ def test():
neg neg
pos pos
""" """
cdef object D
cdef long neg = -1 cdef long neg = -1
cdef unsigned long pos = -2 # will be a large positive number cdef unsigned long pos = -2 # will be a large positive number
......
...@@ -177,7 +177,7 @@ def char3int(fmt): ...@@ -177,7 +177,7 @@ def char3int(fmt):
... ...
ValueError: Buffer dtype mismatch, expected 'int' but got end in 'Char3Int.d' ValueError: Buffer dtype mismatch, expected 'int' but got end in 'Char3Int.d'
""" """
obj = MockBuffer(fmt, sizeof(Char3Int)) cdef object obj = MockBuffer(fmt, sizeof(Char3Int))
cdef object[Char3Int, ndim=1] buf = obj cdef object[Char3Int, ndim=1] buf = obj
@testcase @testcase
...@@ -195,7 +195,7 @@ def unpacked_struct(fmt): ...@@ -195,7 +195,7 @@ def unpacked_struct(fmt):
assert (sizeof(UnpackedStruct1) == sizeof(UnpackedStruct2) assert (sizeof(UnpackedStruct1) == sizeof(UnpackedStruct2)
== sizeof(UnpackedStruct3) == sizeof(UnpackedStruct4)) == sizeof(UnpackedStruct3) == sizeof(UnpackedStruct4))
obj = MockBuffer(fmt, sizeof(UnpackedStruct1)) cdef object obj = MockBuffer(fmt, sizeof(UnpackedStruct1))
cdef object[UnpackedStruct1, ndim=1] buf1 = obj cdef object[UnpackedStruct1, ndim=1] buf1 = obj
cdef object[UnpackedStruct2, ndim=1] buf2 = obj cdef object[UnpackedStruct2, ndim=1] buf2 = obj
cdef object[UnpackedStruct3, ndim=1] buf3 = obj cdef object[UnpackedStruct3, ndim=1] buf3 = obj
...@@ -218,7 +218,7 @@ def complex_test(fmt): ...@@ -218,7 +218,7 @@ def complex_test(fmt):
ValueError: Buffer dtype mismatch, expected 'float' but got 'complex float' in 'ComplexFloat.imag' ValueError: Buffer dtype mismatch, expected 'float' but got 'complex float' in 'ComplexFloat.imag'
""" """
obj = MockBuffer(fmt, sizeof(ComplexTest)) cdef object obj = MockBuffer(fmt, sizeof(ComplexTest))
cdef object[ComplexTest] buf1 = obj cdef object[ComplexTest] buf1 = obj
......
__doc__ = u"""
>>> test_xrange()
0
1
2
>>> test_range()
0
1
2
>>> test_long() == 12
True
>>> test_int() == 12
True
"""
# the builtins 'xrange' and 'long' are not available in Py3, but they
# can safely be replaced by 'range' and 'int' on that platform
import sys
IS_PY3 = sys.version_info[0] >= 3
def test_xrange():
r = xrange(3)
assert type(r) is xrange
for i in r:
print i
def test_range():
r = range(3)
assert (type(r) is range) if IS_PY3 else (type(r) is list)
for i in r:
print i
def test_long():
long_val = long(12)
assert type(long_val) is long
return long_val
def test_int():
int_val = int(12)
assert type(int_val) is int
return int_val
cimport cython
@cython.test_assert_path_exists(
'//PythonCapiCallNode/PythonCapiFunctionNode[@cname="Py_TYPE"]')
def get_type_of(a):
"""
>>> get_type_of(object()) is object
True
"""
return type(a)
@cython.test_assert_path_exists(
'//PythonCapiCallNode/PythonCapiFunctionNode[@cname="Py_TYPE"]')
def get_type_through_local(a):
"""
>>> get_type_of(object()) is object
True
"""
t = type(a)
return t
@cython.test_assert_path_exists(
'//PythonCapiCallNode/PythonCapiFunctionNode[@cname="Py_TYPE"]')
@cython.test_fail_if_path_exists(
'//PythonCapiCallNode/PythonCapiFunctionNode[@cname="__Pyx_Type"]',
'//NameNode[@name="type"]')
def test_type(a, t):
"""
>>> test_type(object(), object)
True
"""
return type(a) and type(a) is t and type(a) == t
@cython.test_assert_path_exists('//NameNode[@name="type"]')
def type_type():
"""
>>> type_type()(object()) is object
True
"""
return type
cimport cython
def test_file_py(file): def test_file_py(file):
assert isinstance(file, (str, unicode)), \ assert isinstance(file, (str, unicode)), \
u"not a string, found '%s' instead" % file.__class__.__name__ u"not a string, found '%s' instead" % file.__class__.__name__
...@@ -12,19 +14,45 @@ cdef test_file_c(file): ...@@ -12,19 +14,45 @@ cdef test_file_c(file):
def range(arg): def range(arg):
return u'range' + arg return u'range' + arg
def len(arg):
return u'len' + arg
cdef type(arg): cdef type(arg):
return u'type' + arg return u'type' + arg
@cython.test_fail_if_path_exists(
'//SimpleCallNode/NameNode[@name="type" and @entry.is_cfunction=False]',
'//SimpleCallNode/NameNode[@name="len" and @entry.is_cfunction=True]',
)
@cython.test_assert_path_exists(
'//SimpleCallNode/NameNode[@name="type"]',
'//SimpleCallNode/NameNode[@name="type" and @entry.is_cfunction=True]',
'//SimpleCallNode/NameNode[@name="len"]',
)
def test_c(arg): def test_c(arg):
""" """
>>> test_c('abc') >>> test_c('abc')
fileabc fileabc
lenabc
typeabc typeabc
>>> print(test_file_py('abc')) >>> print(test_file_py('abc'))
abc abc
>>> print(range('abc')) >>> print(range('abc'))
rangeabc rangeabc
>>> print(len('abc'))
lenabc
""" """
print test_file_c(arg) print test_file_c(arg)
print len(arg)
print type(arg) print type(arg)
def test_for_in_range(arg):
"""
>>> print(str(test_for_in_range('abc')).replace("u'", "'"))
['r', 'a', 'n', 'g', 'e', 'a', 'b', 'c']
"""
l = []
for c in range(arg):
l.append(c)
return l
__doc__ = ''
import sys
if sys.version_info >= (2,6):
__doc__ += '''
>>> float_is_integer(1.0)
True
>>> float_is_integer(1.1)
False
'''
if sys.version_info >= (3,1):
__doc__ += '''
>>> int_bit_length(1) == (1).bit_length()
True
>>> int_bit_length(1234) == (1234).bit_length()
True
'''
def float_is_integer(float f):
# requires Python 2.6+
return f.is_integer()
def int_bit_length(int i):
# requires Python 3.x
return i.bit_length()
def float__add__(float f):
"""
>>> float__add__(5.0)
7.0
"""
return f.__add__(2)
def int__add__(int i):
"""
>>> int__add__(5)
7
"""
return i.__add__(2)
...@@ -13,39 +13,6 @@ def slice_charptr_end(): ...@@ -13,39 +13,6 @@ def slice_charptr_end():
""" """
return cstring[:1], cstring[:3], cstring[:9] return cstring[:1], cstring[:3], cstring[:9]
@cython.test_assert_path_exists("//PythonCapiCallNode")
@cython.test_fail_if_path_exists("//AttributeNode")
def slice_charptr_decode():
"""
>>> print(str(slice_charptr_decode()).replace("u'", "'"))
('a', 'abc', 'abcABCqtp')
"""
return (cstring[:1].decode('UTF-8'),
cstring[:3].decode('UTF-8'),
cstring[:9].decode('UTF-8'))
@cython.test_assert_path_exists("//PythonCapiCallNode")
@cython.test_fail_if_path_exists("//AttributeNode")
def slice_charptr_decode_unbound():
"""
>>> print(str(slice_charptr_decode_unbound()).replace("u'", "'"))
('a', 'abc', 'abcABCqtp')
"""
return (bytes.decode(cstring[:1], 'UTF-8'),
bytes.decode(cstring[:3], 'UTF-8', 'replace'),
bytes.decode(cstring[:9], 'UTF-8'))
@cython.test_assert_path_exists("//PythonCapiCallNode")
@cython.test_fail_if_path_exists("//AttributeNode")
def slice_charptr_decode_errormode():
"""
>>> print(str(slice_charptr_decode_errormode()).replace("u'", "'"))
('a', 'abc', 'abcABCqtp')
"""
return (cstring[:1].decode('UTF-8', 'strict'),
cstring[:3].decode('UTF-8', 'replace'),
cstring[:9].decode('UTF-8', 'unicode_escape'))
@cython.test_assert_path_exists("//ForFromStatNode", @cython.test_assert_path_exists("//ForFromStatNode",
"//ForFromStatNode//SliceIndexNode") "//ForFromStatNode//SliceIndexNode")
@cython.test_fail_if_path_exists("//ForInStatNode") @cython.test_fail_if_path_exists("//ForInStatNode")
...@@ -95,7 +62,7 @@ def slice_charptr_for_loop_c(): ...@@ -95,7 +62,7 @@ def slice_charptr_for_loop_c():
@cython.test_fail_if_path_exists("//ForInStatNode") @cython.test_fail_if_path_exists("//ForInStatNode")
def slice_charptr_for_loop_c_dynamic_bounds(): def slice_charptr_for_loop_c_dynamic_bounds():
""" """
>>> slice_charptr_for_loop_c() >>> slice_charptr_for_loop_c_dynamic_bounds()
['a', 'b', 'c'] ['a', 'b', 'c']
['b', 'c', 'A', 'B'] ['b', 'c', 'A', 'B']
['B', 'C', 'q', 't', 'p'] ['B', 'C', 'q', 't', 'p']
......
def simple_parallel_assignment_from_call():
"""
>>> simple_parallel_assignment_from_call()
(2, 1, 2, 1, 2, 1, 2, [1, 2], [1, 2])
"""
cdef int ai, bi
cdef long al, bl
cdef object ao, bo
cdef int side_effect_count = call_count
ai, bi = al, bl = ao, bo = c = d = [intval(1), intval(2)]
side_effect_count = call_count - side_effect_count
return side_effect_count, ao, bo, ai, bi, al, bl, c, d
def recursive_parallel_assignment_from_call():
"""
>>> recursive_parallel_assignment_from_call()
(3, 1, 2, 3, 1, 2, 3, (1, 2), 3, [(1, 2), 3])
"""
cdef int ai, bi, ci
cdef object ao, bo, co
cdef int side_effect_count = call_count
(ai, bi), ci = (ao, bo), co = t,o = d = [(intval(1), intval(2)), intval(3)]
side_effect_count = call_count - side_effect_count
return side_effect_count, ao, bo, co, ai, bi, ci, t, o, d
cdef int call_count = 0
cdef int intval(int x):
global call_count
call_count += 1
return x
# extension to T409
def simple_parallel_typed():
"""
>>> simple_parallel_typed()
(1, 2, [1, 2], [1, 2])
"""
cdef int a,c
a, c = d = e = [1,2]
return a, c, d, e
def simple_parallel_int_mix():
"""
>>> simple_parallel_int_mix()
(1, 2, 1, 2, 1, 2, [1, 2], [1, 2])
"""
cdef int ai,bi
cdef long al,bl
cdef object ao, bo
ai, bi = al, bl = ao, bo = c = d = [1,2]
return ao, bo, ai, bi, al, bl, c, d
This diff is collapsed.
cimport cython
cdef class cclass:
def test_self(self):
"""
>>> cclass().test_self()
'cclass'
"""
return cython.typeof(self)
def test_self_1(self, arg):
"""
>>> cclass().test_self_1(1)
('cclass', 1)
"""
return cython.typeof(self), arg
def test_self_args(self, *args):
"""
>>> cclass().test_self_args(1,2,3)
('cclass', (1, 2, 3))
"""
return cython.typeof(self), args
def test_args(*args):
"""
>>> cclass().test_args(1,2,3)
('Python object', (1, 2, 3))
"""
return cython.typeof(args[0]), args[1:]
def test_args_kwargs(*args, **kwargs):
"""
>>> cclass().test_args_kwargs(1,2,3, a=4)
('Python object', (1, 2, 3), {'a': 4})
"""
return cython.typeof(args[0]), args[1:], kwargs
This diff is collapsed.
...@@ -2,10 +2,10 @@ def no_cdef(): ...@@ -2,10 +2,10 @@ def no_cdef():
""" """
>>> no_cdef() >>> no_cdef()
""" """
lst = list(range(11)) cdef object lst = list(range(11))
ob = 10L ob = 10L
lst[ob] = -10 lst[ob] = -10
dd = {} cdef object dd = {}
dd[ob] = -10 dd[ob] = -10
def with_cdef(): def with_cdef():
......
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment