Commit f968f684 authored by Stefan Behnel's avatar Stefan Behnel

merge of 0.9.8.1 beta2

parents 857e7852 e751a61a
......@@ -17,8 +17,8 @@ special_chars = [(u'<', u'\xF0', u'&lt;'),
class AnnotationCCodeWriter(CCodeWriter):
def __init__(self, create_from=None, buffer=None):
CCodeWriter.__init__(self, create_from, buffer)
def __init__(self, create_from=None, buffer=None, copy_formatting=True):
CCodeWriter.__init__(self, create_from, buffer, copy_formatting=True)
self.annotation_buffer = StringIO()
if create_from is None:
self.annotations = []
......@@ -30,8 +30,8 @@ class AnnotationCCodeWriter(CCodeWriter):
self.annotations = create_from.annotations
self.code = create_from.code
def create_new(self, create_from, buffer):
return AnnotationCCodeWriter(create_from, buffer)
def create_new(self, create_from, buffer, copy_formatting):
return AnnotationCCodeWriter(create_from, buffer, copy_formatting)
def write(self, s):
CCodeWriter.write(self, s)
......
This diff is collapsed.
......@@ -180,9 +180,10 @@ def init_builtins():
init_builtin_funcs()
init_builtin_types()
init_builtin_structs()
global list_type, tuple_type, dict_type
global list_type, tuple_type, dict_type, unicode_type
list_type = builtin_scope.lookup('list').type
tuple_type = builtin_scope.lookup('tuple').type
dict_type = builtin_scope.lookup('dict').type
unicode_type = builtin_scope.lookup('unicode').type
init_builtins()
......@@ -38,6 +38,7 @@ Options:
-a, --annotate Produce a colorized HTML version of the source.
--convert-range Convert for loops using range() function to for...from loops.
--cplus Output a c++ rather than c file.
-O, --option <name>=<value>[,<name=value,...] Overrides an optimization/code generation option
"""
#The following experimental options are supported only on MacOSX:
# -C, --compile Compile generated .c file to .o file
......@@ -45,37 +46,12 @@ Options:
# -+, --cplus Use C++ compiler for compiling and linking
# Additional .o files to link may be supplied when using -X."""
#The following options are very experimental and is used for plugging in code
#into different transform stages.
# -T phase:factory At the phase given, hand off the tree to the transform returned
# when calling factory without arguments. Factory should be fully
# specified (ie Module.SubModule.factory) and the containing module
# will be imported. This option can be repeated to add more transforms,
# transforms for the same phase will be used in the order they are given.
def bad_usage():
sys.stderr.write(usage)
sys.exit(1)
def parse_command_line(args):
def parse_add_transform(transforms, param):
from Main import PHASES
def import_symbol(fqn):
modsplitpt = fqn.rfind(".")
if modsplitpt == -1: bad_usage()
modulename = fqn[:modsplitpt]
symbolname = fqn[modsplitpt+1:]
module = __import__(modulename, globals(), locals(), [symbolname])
return getattr(module, symbolname)
stagename, factoryname = param.split(":")
if not stagename in PHASES:
bad_usage()
factory = import_symbol(factoryname)
transform = factory()
transforms[stagename].append(transform)
from Cython.Compiler.Main import \
CompilationOptions, default_options
......@@ -138,9 +114,12 @@ def parse_command_line(args):
Options.annotate = True
elif option == "--convert-range":
Options.convert_range = True
elif option.startswith("-T"):
parse_add_transform(options.transforms, get_param(option))
# Note: this can occur multiple times, each time appends
elif option in ("-O", "--option"):
try:
options.pragma_overrides = Options.parse_option_list(pop_arg())
except ValueError, e:
sys.stderr.write("Error in option string: %s\n" % e.message)
sys.exit(1)
else:
bad_usage()
else:
......
This diff is collapsed.
from Cython.Compiler.Visitor import VisitorTransform, temp_name_handle, CythonTransform
from Cython.Compiler.ModuleNode import ModuleNode
from Cython.Compiler.Nodes import *
from Cython.Compiler.ExprNodes import *
class ExtractPxdCode(CythonTransform):
"""
Finds nodes in a pxd file that should generate code, and
returns them in a StatListNode.
The result is a tuple (StatListNode, ModuleScope), i.e.
everything that is needed from the pxd after it is processed.
A purer approach would be to seperately compile the pxd code,
but the result would have to be slightly more sophisticated
than pure strings (functions + wanted interned strings +
wanted utility code + wanted cached objects) so for now this
approach is taken.
"""
def __call__(self, root):
self.funcs = []
self.visitchildren(root)
return (StatListNode(root.pos, stats=self.funcs), root.scope)
def visit_FuncDefNode(self, node):
self.funcs.append(node)
# Do not visit children, nested funcdefnodes will
# also be moved by this action...
return node
from Symtab import ModuleScope
from PyrexTypes import *
shape_func_type = CFuncType(
c_ptr_type(c_py_ssize_t_type),
[CFuncTypeArg("buffer", py_object_type, None)])
class CythonScope(ModuleScope):
def __init__(self, context):
ModuleScope.__init__(self, u'cython', None, context)
self.pxd_file_loaded = True
self.shape_entry = self.declare_cfunction('shape',
shape_func_type,
pos=None,
visibility='public',
cname='<error>')
def create_cython_scope(context):
return CythonScope(context)
......@@ -104,7 +104,7 @@ def report_error(err):
def error(position, message):
#print "Errors.error:", repr(position), repr(message) ###
err = CompileError(position, message)
# if position is not None: raise Exception(err) # debug
#if position is not None: raise Exception(err) # debug
report_error(err)
return err
......
......@@ -10,7 +10,7 @@ import Naming
from Nodes import Node
import PyrexTypes
from PyrexTypes import py_object_type, c_long_type, typecast, error_type
from Builtin import list_type, tuple_type, dict_type
from Builtin import list_type, tuple_type, dict_type, unicode_type
import Symtab
import Options
from Annotate import AnnotationItem
......@@ -708,8 +708,7 @@ class StringNode(ConstNode):
def coerce_to(self, dst_type, env):
if dst_type.is_int:
if not self.type.is_pyobject and len(self.entry.init) == 1:
# we use the *encoded* value here
return CharNode(self.pos, value=self.entry.init)
return CharNode(self.pos, value=self.value)
else:
error(self.pos, "Only coerce single-character ascii strings can be used as ints.")
return self
......@@ -741,7 +740,7 @@ class StringNode(ConstNode):
class UnicodeNode(PyConstNode):
# entry Symtab.Entry
type = PyrexTypes.c_unicode_type
type = unicode_type
def analyse_types(self, env):
self.entry = env.add_string_const(self.value)
......@@ -759,6 +758,9 @@ class UnicodeNode(PyConstNode):
# We still need to perform normal coerce_to processing on the
# result, because we might be coercing to an extension type,
# in which case a type test node will be needed.
def compile_time_value(self, env):
return self.value
class IdentifierStringNode(ConstNode):
......@@ -913,9 +915,6 @@ class NameNode(AtomicExprNode):
self.type = PyrexTypes.error_type
self.entry.used = 1
if self.entry.type.is_buffer:
# Have an rhs temp just in case. All rhs I could
# think of had a single symbol result_code but better
# safe than sorry. Feel free to change this.
import Buffer
Buffer.used_buffer_aux_vars(self.entry)
......@@ -992,6 +991,9 @@ class NameNode(AtomicExprNode):
entry = self.entry
if entry:
entry.used = 1
if entry.type.is_buffer:
import Buffer
Buffer.used_buffer_aux_vars(entry)
if entry.utility_code:
env.use_utility_code(entry.utility_code)
......@@ -1093,7 +1095,7 @@ class NameNode(AtomicExprNode):
rhs.generate_post_assignment_code(code)
def generate_acquire_buffer(self, rhs, code):
rhstmp = code.func.allocate_temp(self.entry.type)
rhstmp = code.funcstate.allocate_temp(self.entry.type)
buffer_aux = self.entry.buffer_aux
bufstruct = buffer_aux.buffer_info_var.cname
code.putln('%s = %s;' % (rhstmp, rhs.result_as(self.ctype())))
......@@ -1103,7 +1105,7 @@ class NameNode(AtomicExprNode):
is_initialized=not self.skip_assignment_decref,
pos=self.pos, code=code)
code.putln("%s = 0;" % rhstmp)
code.func.release_temp(rhstmp)
code.funcstate.release_temp(rhstmp)
def generate_deletion_code(self, code):
if self.entry is None:
......@@ -1370,6 +1372,9 @@ class IndexNode(ExprNode):
self.is_buffer_access = False
self.base.analyse_types(env)
# Handle the case where base is a literal char* (and we expect a string, not an int)
if isinstance(self.base, StringNode):
self.base = self.base.coerce_to_pyobject(env)
skip_child_analysis = False
buffer_access = False
......@@ -1392,6 +1397,7 @@ class IndexNode(ExprNode):
self.index = None
self.type = self.base.type.dtype
self.is_buffer_access = True
self.buffer_type = self.base.entry.type
if getting:
# we only need a temp because result_code isn't refactored to
......@@ -1479,8 +1485,13 @@ class IndexNode(ExprNode):
def generate_result_code(self, code):
if self.is_buffer_access:
valuecode = self.buffer_access_code(code)
code.putln("%s = %s;" % (self.result_code, valuecode))
ptrcode = self.buffer_lookup_code(code)
code.putln("%s = *%s;" % (
self.result_code,
self.buffer_type.buffer_ptr_type.cast_code(ptrcode)))
# Must incref the value we pulled out.
if self.buffer_type.dtype.is_pyobject:
code.putln("Py_INCREF((PyObject*)%s);" % self.result_code)
elif self.type.is_pyobject:
if self.index.type.is_int:
function = "__Pyx_GetItemInt"
......@@ -1518,8 +1529,26 @@ class IndexNode(ExprNode):
def generate_assignment_code(self, rhs, code):
self.generate_subexpr_evaluation_code(code)
if self.is_buffer_access:
valuecode = self.buffer_access_code(code)
code.putln("%s = %s;" % (valuecode, rhs.result_code))
ptrexpr = self.buffer_lookup_code(code)
if self.buffer_type.dtype.is_pyobject:
# Must manage refcounts. Decref what is already there
# and incref what we put in.
ptr = code.funcstate.allocate_temp(self.buffer_type.buffer_ptr_type)
if rhs.is_temp:
rhs_code = code.funcstate.allocate_temp(rhs.type)
else:
rhs_code = rhs.result_code
code.putln("%s = %s;" % (ptr, ptrexpr))
code.putln("Py_DECREF(*%s); Py_INCREF(%s);" % (
ptr, rhs_code
))
code.putln("*%s = %s;" % (ptr, rhs_code))
if rhs.is_temp:
code.funcstate.release_temp(rhs_code)
code.funcstate.release_temp(ptr)
else:
# Simple case
code.putln("*%s = %s;" % (ptrexpr, rhs.result_code))
elif self.type.is_pyobject:
self.generate_setitem_code(rhs.py_result(), code)
else:
......@@ -1546,19 +1575,18 @@ class IndexNode(ExprNode):
code.error_goto(self.pos)))
self.generate_subexpr_disposal_code(code)
def buffer_access_code(self, code):
def buffer_lookup_code(self, code):
# Assign indices to temps
index_temps = [code.func.allocate_temp(i.type) for i in self.indices]
index_temps = [code.funcstate.allocate_temp(i.type) for i in self.indices]
for temp, index in zip(index_temps, self.indices):
code.putln("%s = %s;" % (temp, index.result_code))
# Generate buffer access code using these temps
import Buffer
valuecode = Buffer.put_access(entry=self.base.entry,
index_signeds=[i.type.signed for i in self.indices],
index_cnames=index_temps,
pos=self.pos, code=code)
return valuecode
return Buffer.put_buffer_lookup_code(entry=self.base.entry,
index_signeds=[i.type.signed for i in self.indices],
index_cnames=index_temps,
options=self.options,
pos=self.pos, code=code)
class SliceIndexNode(ExprNode):
# 2-element slice indexing
......
"""
This module deals with interpreting the parse tree as Python
would have done, in the compiler.
For now this only covers parse tree to value conversion of
compile-time values.
"""
from Nodes import *
from ExprNodes import *
from Visitor import BasicVisitor
from Errors import CompileError
class EmptyScope:
def lookup(self, name):
return None
empty_scope = EmptyScope()
def interpret_compiletime_options(optlist, optdict, type_env=None):
"""
Tries to interpret a list of compile time option nodes.
The result will be a tuple (optlist, optdict) but where
all expression nodes have been interpreted. The result is
in the form of tuples (value, pos).
optlist is a list of nodes, while optdict is a DictNode (the
result optdict is a dict)
If type_env is set, all type nodes will be analysed and the resulting
type set. Otherwise only interpretateable ExprNodes
are allowed, other nodes raises errors.
A CompileError will be raised if there are problems.
"""
def interpret(node):
if isinstance(node, CBaseTypeNode):
if type_env:
return (node.analyse(type_env), node.pos)
else:
raise CompileError(node.pos, "Type not allowed here.")
else:
return (node.compile_time_value(empty_scope), node.pos)
if optlist:
optlist = [interpret(x) for x in optlist]
if optdict:
assert isinstance(optdict, DictNode)
new_optdict = {}
for item in optdict.key_value_pairs:
new_optdict[item.key.value] = interpret(item.value)
optdict = new_optdict
return (optlist, new_optdict)
......@@ -63,10 +63,11 @@ def make_lexicon():
three_oct = octdigit + octdigit + octdigit
two_hex = hexdigit + hexdigit
four_hex = two_hex + two_hex
escapeseq = Str("\\") + (two_oct | three_oct | two_hex |
escapeseq = Str("\\") + (two_oct | three_oct |
Str('u') + four_hex | Str('x') + two_hex |
Str('U') + four_hex + four_hex | AnyChar)
deco = Str("@")
bra = Any("([{")
ket = Any(")]}")
......@@ -75,9 +76,12 @@ def make_lexicon():
"+=", "-=", "*=", "/=", "%=", "|=", "^=", "&=",
"<<=", ">>=", "**=", "//=")
spaces = Rep1(Any(" \t\f"))
comment = Str("#") + Rep(AnyBut("\n"))
escaped_newline = Str("\\\n")
lineterm = Eol + Opt(Str("\n"))
comment_start = Str("#")
comment = comment_start + Rep(AnyBut("\n"))
option_comment = comment_start + Str("cython:") + Rep(AnyBut("\n"))
return Lexicon([
(name, 'IDENT'),
......@@ -94,11 +98,13 @@ def make_lexicon():
#(stringlit, 'STRING'),
(beginstring, Method('begin_string_action')),
(option_comment, Method('option_comment')),
(comment, IGNORE),
(spaces, IGNORE),
(escaped_newline, IGNORE),
State('INDENT', [
(option_comment + lineterm, Method('option_comment')),
(Opt(spaces) + Opt(comment) + lineterm, IGNORE),
(indentation, Method('indentation_action')),
(Eof, Method('eof_action'))
......
This diff is collapsed.
......@@ -23,7 +23,7 @@ import Version
from Errors import error, warning
from PyrexTypes import py_object_type
from Cython.Utils import open_new_file, replace_suffix, escape_byte_string
from Cython.Utils import open_new_file, replace_suffix, escape_byte_string, EncodedString
def check_c_classes(module_node):
......@@ -45,9 +45,10 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
def analyse_declarations(self, env):
if Options.embed_pos_in_docstring:
env.doc = 'File: %s (starting at line %s)'%Nodes.relative_position(self.pos)
env.doc = EncodedString(u'File: %s (starting at line %s)' % Nodes.relative_position(self.pos))
if not self.doc is None:
env.doc = env.doc + '\\n' + self.doc
env.doc = EncodedString(env.doc + u'\n' + self.doc)
env.doc.encoding = self.doc.encoding
else:
env.doc = self.doc
self.body.analyse_declarations(env)
......@@ -242,16 +243,20 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
h_code = code.insertion_point()
self.generate_module_preamble(env, modules, h_code)
code.globalstate.module_pos = self.pos
code.putln("")
code.putln("/* Implementation of %s */" % env.qualified_name)
self.generate_const_definitions(env, code)
self.generate_interned_num_decls(env, code)
self.generate_interned_string_decls(env, code)
self.generate_py_string_decls(env, code)
code.globalstate.insert_global_var_declarations_into(code)
self.generate_cached_builtins_decls(env, code)
self.body.generate_function_definitions(env, code, options.transforms)
self.body.generate_function_definitions(env, code)
code.mark_pos(None)
self.generate_py_string_table(env, code)
self.generate_typeobj_definitions(env, code)
self.generate_method_table(env, code)
self.generate_filename_init_prototype(code)
......@@ -267,6 +272,8 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
self.generate_declarations_for_modules(env, modules, h_code)
h_code.write('\n')
code.globalstate.close_global_decls()
f = open_new_file(result.c_file)
code.copyto(f)
f.close()
......@@ -531,8 +538,8 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
def generate_filename_table(self, code):
code.putln("")
code.putln("static const char *%s[] = {" % Naming.filenames_cname)
if code.filename_list:
for source_desc in code.filename_list:
if code.globalstate.filename_list:
for source_desc in code.globalstate.filename_list:
filename = os.path.basename(source_desc.get_filenametable_entry())
escaped_filename = filename.replace("\\", "\\\\").replace('"', r'\"')
code.putln('"%s",' %
......@@ -1451,28 +1458,6 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
code.putln(
"};")
def generate_py_string_table(self, env, code):
entries = env.all_pystring_entries
if entries:
code.putln("")
code.putln(
"static __Pyx_StringTabEntry %s[] = {" %
Naming.stringtab_cname)
for entry in entries:
code.putln(
"{&%s, %s, sizeof(%s), %d, %d, %d}," % (
entry.pystring_cname,
entry.cname,
entry.cname,
entry.type.is_unicode,
entry.is_interned,
entry.is_identifier
))
code.putln(
"{0, 0, 0, 0, 0, 0}")
code.putln(
"};")
def generate_filename_init_prototype(self, code):
code.putln("");
code.putln("static void %s(void); /*proto*/" % Naming.fileinit_cname)
......@@ -1540,6 +1525,9 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
code.exit_cfunc_scope() # done with labels
def generate_module_init_func(self, imported_modules, env, code):
# Insert code stream of __Pyx_InitGlobals
code.globalstate.insert_initcode_into(code)
code.enter_cfunc_scope()
code.putln("")
header2 = "PyMODINIT_FUNC init%s(void)" % env.module_name
......@@ -1559,19 +1547,17 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
env.generate_library_function_declarations(code)
self.generate_filename_init_call(code)
code.putln("/*--- Initialize various global constants etc. ---*/")
code.putln(code.error_goto_if_neg("__Pyx_InitGlobals()", self.pos))
code.putln("/*--- Module creation code ---*/")
self.generate_module_creation_code(env, code)
code.putln("/*--- Intern code ---*/")
self.generate_intern_code(env, code)
code.putln("/*--- String init code ---*/")
self.generate_string_init_code(env, code)
if Options.cache_builtins:
code.putln("/*--- Builtin init code ---*/")
self.generate_builtin_init_code(env, code)
code.putln(code.error_goto_if_neg("__Pyx_InitCachedBuiltins()",
self.pos))
code.putln("%s = 0;" % Naming.skip_dispatch_cname);
code.putln("/*--- Global init code ---*/")
......@@ -1615,7 +1601,7 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
code.putln('}')
tempdecl_code.put_var_declarations(env.temp_entries)
tempdecl_code.put_temp_declarations(code.func)
tempdecl_code.put_temp_declarations(code.funcstate)
code.exit_cfunc_scope()
......@@ -1727,41 +1713,7 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
"if (!%s) %s;" % (
Naming.preimport_cname,
code.error_goto(self.pos)));
def generate_intern_code(self, env, code):
for entry in env.pynum_entries:
if entry.init[-1] == "L":
code.putln('%s = PyLong_FromString("%s", 0, 0); %s;' % (
entry.cname,
entry.init,
code.error_goto_if_null(entry.cname, self.pos)))
else:
code.putln("%s = PyInt_FromLong(%s); %s;" % (
entry.cname,
entry.init,
code.error_goto_if_null(entry.cname, self.pos)))
def generate_string_init_code(self, env, code):
if env.all_pystring_entries:
env.use_utility_code(Nodes.init_string_tab_utility_code)
code.putln(
"if (__Pyx_InitStrings(%s) < 0) %s;" % (
Naming.stringtab_cname,
code.error_goto(self.pos)))
def generate_builtin_init_code(self, env, code):
# Lookup and cache builtin objects.
if Options.cache_builtins:
for entry in env.cached_builtins:
#assert entry.interned_cname is not None
code.putln(
'%s = __Pyx_GetName(%s, %s); if (!%s) %s' % (
entry.cname,
Naming.builtins_cname,
entry.interned_cname,
entry.cname,
code.error_goto(entry.pos)))
def generate_global_init_code(self, env, code):
# Generate code to initialise global PyObject *
# variables to None.
......@@ -1961,6 +1913,10 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
type.typeptr_cname, type.typeobj_cname))
def generate_utility_functions(self, env, code, h_code):
for codetup, name in env.utility_code_list:
code.globalstate.use_utility_code(codetup, name)
code.globalstate.put_utility_code_protos(h_code)
code.putln("")
code.putln("/* Runtime support code */")
code.putln("")
......@@ -1968,9 +1924,7 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
code.putln("%s = %s;" %
(Naming.filetable_cname, Naming.filenames_cname))
code.putln("}")
for utility_code in env.utility_code_used:
h_code.put(utility_code[0])
code.put(utility_code[1])
code.globalstate.put_utility_code_defs(code)
code.put(PyrexTypes.type_conversion_functions)
code.putln("")
......
This diff is collapsed.
#
# Pyrex - Compilation-wide options
# Cython - Compilation-wide options and pragma declarations
#
cache_builtins = 1 # Perform lookups on builtin names only once
......@@ -52,3 +52,58 @@ optimize_simple_methods = 1
# Append the c file and line number to the traceback for exceptions.
c_line_in_traceback = 1
# Declare pragmas
option_types = {
'boundscheck' : bool
}
option_defaults = {
'boundscheck' : True
}
def parse_option_list(s):
"""
Parses a comma-seperated list of pragma options. Whitespace
is not considered.
>>> parse_option_list(' ')
{}
>>> (parse_option_list('boundscheck=True') ==
... {'boundscheck': True})
True
>>> parse_option_list(' asdf')
Traceback (most recent call last):
...
ValueError: Expected "=" in option "asdf"
>>> parse_option_list('boundscheck=hey')
Traceback (most recent call last):
...
ValueError: Must pass a boolean value for option "boundscheck"
>>> parse_option_list('unknown=True')
Traceback (most recent call last):
...
ValueError: Unknown option: "unknown"
"""
result = {}
for item in s.split(','):
item = item.strip()
if not item: continue
if not '=' in item: raise ValueError('Expected "=" in option "%s"' % item)
name, value = item.strip().split('=')
try:
type = option_types[name]
except KeyError:
raise ValueError('Unknown option: "%s"' % name)
if type is bool:
value = value.lower()
if value in ('true', 'yes'):
value = True
elif value in ('false', 'no'):
value = False
else: raise ValueError('Must pass a boolean value for option "%s"' % name)
result[name] = value
else:
assert False
return result
This diff is collapsed.
......@@ -13,6 +13,7 @@ from ModuleNode import ModuleNode
from Errors import error, warning, InternalError
from Cython import Utils
import Future
import Options
class Ctx(object):
# Parsing context
......@@ -602,7 +603,7 @@ def p_string_literal(s):
else:
c = systr[1]
if c in "01234567":
chars.append(chr(int(systr[1:])))
chars.append(chr(int(systr[1:], 8)))
elif c in "'\"\\":
chars.append(c)
elif c in "abfnrtv":
......@@ -621,7 +622,7 @@ def p_string_literal(s):
strval = systr
chars.append(strval)
else:
chars.append(r'\\' + systr[1:])
chars.append('\\' + systr[1:])
elif sy == 'NEWLINE':
chars.append('\n')
elif sy == 'END_STRING':
......@@ -1412,7 +1413,7 @@ def p_statement(s, ctx, first_statement = 0):
if ctx.api:
error(s.pos, "'api' not allowed with this statement")
elif s.sy == 'def':
if ctx.level not in ('module', 'class', 'c_class', 'property'):
if ctx.level not in ('module', 'class', 'c_class', 'c_class_pxd', 'property'):
s.error('def statement not allowed here')
s.level = ctx.level
return p_def_statement(s)
......@@ -1626,8 +1627,13 @@ def p_c_simple_base_type(s, self_flag, nonempty):
longness = longness, is_self_arg = self_flag)
# Treat trailing [] on type as buffer access
if s.sy == '[':
# Treat trailing [] on type as buffer access if it appears in a context
# where declarator names are required (so that it cannot mean int[] or
# sizeof(int[SIZE]))...
#
# (This means that buffers cannot occur where there can be empty declarators,
# which is an ok restriction to make.)
if nonempty and s.sy == '[':
return p_buffer_access(s, type_node)
else:
return type_node
......@@ -1636,10 +1642,6 @@ def p_buffer_access(s, base_type_node):
# s.sy == '['
pos = s.position()
s.next()
if s.sy == ']' or s.sy == 'INT':
# not buffer, could be [] on C type nameless array arguments
s.put_back('[', '[')
return base_type_node
positional_args, keyword_args = (
p_positional_and_keyword_args(s, (']',), (0,), ('dtype',))
)
......@@ -1887,7 +1889,7 @@ def p_c_arg_decl(s, ctx, in_pyfunc, cmethod_flag = 0, nonempty = 0, kw_only = 0)
if 'pxd' in s.level:
if s.sy not in ['*', '?']:
error(pos, "default values cannot be specified in pxd files, use ? or *")
default = 1
default = ExprNodes.BoolNode(1)
s.next()
else:
default = p_simple_expr(s)
......@@ -2324,6 +2326,17 @@ def p_code(s, level=None):
repr(s.sy), repr(s.systring)))
return body
def p_option_comments(s):
result = {}
while s.sy == 'option_comment':
opts = s.systring[len("#cython:"):]
try:
result.update(Options.parse_option_list(opts))
except ValueError, e:
s.error(e.message, fatal=False)
s.next()
return result
def p_module(s, pxd, full_module_name):
s.add_type_name("object")
s.add_type_name("Py_buffer")
......@@ -2333,11 +2346,16 @@ def p_module(s, pxd, full_module_name):
level = 'module_pxd'
else:
level = 'module'
option_comments = p_option_comments(s)
s.parse_option_comments = False
body = p_statement_list(s, Ctx(level = level), first_statement = 1)
if s.sy != 'EOF':
s.error("Syntax error in statement [%s,%s]" % (
repr(s.sy), repr(s.systring)))
return ModuleNode(pos, doc = doc, body = body, full_module_name = full_module_name)
return ModuleNode(pos, doc = doc, body = body,
full_module_name = full_module_name,
option_comments = option_comments)
#----------------------------------------------
#
......
......@@ -149,6 +149,7 @@ class CTypedefType(BaseType):
# typedef_base_type PyrexType
is_typedef = 1
typestring = None # Because typedefs are not known exactly
def __init__(self, cname, base_type):
self.typedef_cname = cname
......@@ -223,11 +224,14 @@ class PyObjectType(PyrexType):
#
# Base class for all Python object types (reference-counted).
#
# buffer_defaults dict or None Default options for bu
is_pyobject = 1
default_value = "0"
parsetuple_format = "O"
pymemberdef_typecode = "T_OBJECT"
buffer_defaults = None
typestring = "O"
def __str__(self):
return "Python object"
......@@ -270,6 +274,7 @@ class BuiltinObjectType(PyObjectType):
return "<%s>"% self.cname
def assignable_from(self, src_type):
if isinstance(src_type, BuiltinObjectType):
return src_type.name == self.name
else:
......@@ -998,6 +1003,19 @@ class CStringType:
return '"%s"' % Utils.escape_byte_string(value)
class CUTF8CharArrayType(CStringType, CArrayType):
# C 'char []' type.
parsetuple_format = "s"
pymemberdef_typecode = "T_STRING_INPLACE"
is_unicode = 1
to_py_function = "PyUnicode_DecodeUTF8"
exception_value = "NULL"
def __init__(self, size):
CArrayType.__init__(self, c_char_type, size)
class CCharArrayType(CStringType, CArrayType):
# C 'char []' type.
......@@ -1018,29 +1036,6 @@ class CCharPtrType(CStringType, CPtrType):
CPtrType.__init__(self, c_char_type)
class UnicodeType(BuiltinObjectType):
# The Python unicode type.
is_string = 1
is_unicode = 1
parsetuple_format = "U"
def __init__(self):
BuiltinObjectType.__init__(self, "unicode", "PyUnicodeObject")
def literal_code(self, value):
assert isinstance(value, str)
return '"%s"' % Utils.escape_byte_string(value)
def declaration_code(self, entity_code,
for_display = 0, dll_linkage = None, pyrex = 0):
if pyrex or for_display:
return self.base_declaration_code(self.name, entity_code)
else:
return "%s %s[]" % (public_decl("char", dll_linkage), entity_code)
class ErrorType(PyrexType):
# Used to prevent propagation of error messages.
......@@ -1049,6 +1044,7 @@ class ErrorType(PyrexType):
exception_check = 0
to_py_function = "dummy"
from_py_function = "dummy"
typestring = None
def declaration_code(self, entity_code,
for_display = 0, dll_linkage = None, pyrex = 0):
......@@ -1105,8 +1101,8 @@ c_longdouble_type = CFloatType(8, typestring="g")
c_null_ptr_type = CNullPtrType(c_void_type)
c_char_array_type = CCharArrayType(None)
c_unicode_type = UnicodeType()
c_char_ptr_type = CCharPtrType()
c_utf8_char_array_type = CUTF8CharArrayType(None)
c_char_ptr_ptr_type = CPtrType(c_char_ptr_type)
c_py_ssize_t_ptr_type = CPtrType(c_py_ssize_t_type)
c_int_ptr_type = CPtrType(c_int_type)
......
......@@ -306,6 +306,7 @@ class PyrexScanner(Scanner):
self.compile_time_env = initial_compile_time_env()
self.compile_time_eval = 1
self.compile_time_expr = 0
self.parse_option_comments = True
self.source_encoding = source_encoding
self.trace = trace_scanner
self.indentation_stack = [0]
......@@ -314,6 +315,13 @@ class PyrexScanner(Scanner):
self.begin('INDENT')
self.sy = ''
self.next()
def option_comment(self, text):
# #cython:-comments should be treated as literals until
# parse_option_comments is set to False, at which point
# they should be ignored.
if self.parse_option_comments:
self.produce('option_comment', text)
def current_level(self):
return self.indentation_stack[-1]
......@@ -432,12 +440,13 @@ class PyrexScanner(Scanner):
def looking_at_type_name(self):
return self.sy == 'IDENT' and self.systring in self.type_names
def error(self, message, pos = None):
def error(self, message, pos = None, fatal = True):
if pos is None:
pos = self.position()
if self.sy == 'INDENT':
error(pos, "Possible inconsistent indentation")
raise error(pos, message)
err = error(pos, "Possible inconsistent indentation")
err = error(pos, message)
if fatal: raise err
def expect(self, what, message = None):
if self.sy == what:
......
......@@ -26,11 +26,12 @@ nice_identifier = re.compile('^[a-zA-Z0-0_]+$').match
class BufferAux:
writable_needed = False
def __init__(self, buffer_info_var, stridevars, shapevars, tschecker):
def __init__(self, buffer_info_var, stridevars, shapevars,
suboffsetvars):
self.buffer_info_var = buffer_info_var
self.stridevars = stridevars
self.shapevars = shapevars
self.tschecker = tschecker
self.suboffsetvars = suboffsetvars
def __repr__(self):
return "<BufferAux %r>" % self.__dict__
......@@ -504,7 +505,7 @@ class Scope:
else:
cname = self.new_const_cname()
if value.is_unicode:
c_type = PyrexTypes.c_unicode_type
c_type = PyrexTypes.c_utf8_char_array_type
value = value.utf8encode()
else:
c_type = PyrexTypes.c_char_array_type
......@@ -629,9 +630,6 @@ class Scope:
def use_utility_code(self, new_code, name=None):
self.global_scope().use_utility_code(new_code, name)
def has_utility_code(self, name):
return self.global_scope().has_utility_code(name)
def generate_library_function_declarations(self, code):
# Generate extern decls for C library funcs used.
......@@ -743,6 +741,8 @@ class BuiltinScope(Scope):
"True": ["Py_True", py_object_type],
}
const_counter = 1 # As a temporary solution for compiling code in pxds
class ModuleScope(Scope):
# module_name string Python name of the module
# module_cname string C name of Python module object
......@@ -750,9 +750,8 @@ class ModuleScope(Scope):
# method_table_cname string C name of method table
# doc string Module doc string
# doc_cname string C name of module doc string
# const_counter integer Counter for naming constants
# utility_code_used [string] Utility code to be included
# utility_code_names set(string) (Optional) names for named (often generated) utility code
# const_counter integer Counter for naming constants (PS: MOVED TO GLOBAL)
# utility_code_list [((string, string), string)] Queuing utility codes for forwarding to Code.py
# default_entries [Entry] Function argument default entries
# python_include_files [string] Standard Python headers to be included
# include_files [string] Other C headers to be included
......@@ -785,9 +784,7 @@ class ModuleScope(Scope):
self.method_table_cname = Naming.methtable_cname
self.doc = ""
self.doc_cname = Naming.moddoc_cname
self.const_counter = 1
self.utility_code_used = []
self.utility_code_names = set()
self.utility_code_list = []
self.default_entries = []
self.module_entries = {}
self.python_include_files = ["Python.h", "structmember.h"]
......@@ -940,35 +937,20 @@ class ModuleScope(Scope):
return entry
def new_const_cname(self):
global const_counter
# Create a new globally-unique name for a constant.
prefix=''
n = self.const_counter
self.const_counter = n + 1
n = const_counter
const_counter = n + 1
return "%s%s%d" % (Naming.const_prefix, prefix, n)
def use_utility_code(self, new_code, name=None):
# Add string to list of utility code to be included,
# if not already there (tested using the provided name,
# or 'is' if name=None -- if the utility code is dynamically
# generated, use the name, otherwise it is not needed).
if name is not None:
if name in self.utility_code_names:
return
for old_code in self.utility_code_used:
if old_code is new_code:
return
self.utility_code_used.append(new_code)
self.utility_code_names.add(name)
def has_utility_code(self, name):
# Checks if utility code (that is registered by name) has
# previously been registered. This is useful if the utility code
# is dynamically generated to avoid re-generation.
return name in self.utility_code_names
self.utility_code_list.append((new_code, name))
def declare_c_class(self, name, pos, defining = 0, implementing = 0,
module_name = None, base_type = None, objstruct_cname = None,
typeobj_cname = None, visibility = 'private', typedef_flag = 0, api = 0):
typeobj_cname = None, visibility = 'private', typedef_flag = 0, api = 0,
buffer_defaults = None):
#
# Look for previous declaration as a type
#
......@@ -992,6 +974,7 @@ class ModuleScope(Scope):
if not entry:
type = PyrexTypes.PyExtensionType(name, typedef_flag, base_type)
type.pos = pos
type.buffer_defaults = buffer_defaults
if visibility == 'extern':
type.module_name = module_name
else:
......
......@@ -2,6 +2,7 @@ from Cython.TestUtils import CythonTest
import Cython.Compiler.Errors as Errors
from Cython.Compiler.Nodes import *
from Cython.Compiler.ParseTreeTransforms import *
from Cython.Compiler.Buffer import *
class TestBufferParsing(CythonTest):
......@@ -45,6 +46,8 @@ class TestBufferParsing(CythonTest):
# See also tests/error/e_bufaccess.pyx and tets/run/bufaccess.pyx
# THESE TESTS ARE NOW DISABLED, the code they test was pretty much
# refactored away
class TestBufferOptions(CythonTest):
# Tests the full parsing of the options within the brackets
......@@ -74,24 +77,24 @@ class TestBufferOptions(CythonTest):
# e = self.should_fail(lambda: self.parse_opts(opts))
self.assertEqual(expected_err, self.error.message_only)
def test_basic(self):
def __test_basic(self):
buf = self.parse_opts(u"unsigned short int, 3")
self.assert_(isinstance(buf.dtype_node, CSimpleBaseTypeNode))
self.assert_(buf.dtype_node.signed == 0 and buf.dtype_node.longness == -1)
self.assertEqual(3, buf.ndim)
def test_dict(self):
def __test_dict(self):
buf = self.parse_opts(u"ndim=3, dtype=unsigned short int")
self.assert_(isinstance(buf.dtype_node, CSimpleBaseTypeNode))
self.assert_(buf.dtype_node.signed == 0 and buf.dtype_node.longness == -1)
self.assertEqual(3, buf.ndim)
def test_ndim(self):
def __test_ndim(self):
self.parse_opts(u"int, 2")
self.non_parse(ERR_BUF_INT % 'ndim', u"int, 'a'")
self.non_parse(ERR_BUF_NONNEG % 'ndim', u"int, -34")
self.non_parse(ERR_BUF_NDIM, u"int, 'a'")
self.non_parse(ERR_BUF_NDIM, u"int, -34")
def test_use_DEF(self):
def __test_use_DEF(self):
t = self.fragment(u"""
DEF ndim = 3
def f():
......
......@@ -20,7 +20,7 @@ Support for parsing strings into code trees.
class StringParseContext(Main.Context):
def __init__(self, include_directories, name):
Main.Context.__init__(self, include_directories)
Main.Context.__init__(self, include_directories, {})
self.module_name = name
def find_module(self, module_name, relative_to = None, pos = None, need_pxd = 1):
......
......@@ -28,6 +28,10 @@ class BasicVisitor(object):
if m is not None:
break
else:
print type(self), type(obj)
print self.access_path
print self.access_path[-1][0].pos
print self.access_path[-1][0].__dict__
raise RuntimeError("Visitor does not accept object: %s" % obj)
self.dispatch_table[mname] = m
return m(obj)
......
......@@ -2,16 +2,80 @@ cdef extern from "Python.h":
ctypedef int Py_intptr_t
cdef extern from "numpy/arrayobject.h":
ctypedef Py_intptr_t npy_intp
ctypedef struct PyArray_Descr:
int elsize
ctypedef class numpy.ndarray [object PyArrayObject]:
cdef char *data
cdef int nd
cdef Py_intptr_t *dimensions
cdef Py_intptr_t *strides
cdef object base
# descr not implemented yet here...
cdef int flags
cdef int itemsize
cdef object weakreflist
cdef:
char *data
int nd
npy_intp *dimensions
npy_intp *strides
object base
# descr not implemented yet here...
int flags
int itemsize
object weakreflist
PyArray_Descr* descr
def __getbuffer__(ndarray self, Py_buffer* info, int flags):
if sizeof(npy_intp) != sizeof(Py_ssize_t):
raise RuntimeError("Py_intptr_t and Py_ssize_t differs in size, numpy.pxd does not support this")
cdef int typenum = PyArray_TYPE(self)
info.buf = <void*>self.data
info.ndim = 2
info.strides = <Py_ssize_t*>self.strides
info.shape = <Py_ssize_t*>self.dimensions
info.suboffsets = NULL
info.format = "i"
info.itemsize = self.descr.elsize
info.readonly = not PyArray_ISWRITEABLE(self)
# PS TODO TODO!: Py_ssize_t vs Py_intptr_t
## PyArrayObject *arr = (PyArrayObject*)obj;
## PyArray_Descr *type = (PyArray_Descr*)arr->descr;
## int typenum = PyArray_TYPE(obj);
## if (!PyTypeNum_ISNUMBER(typenum)) {
## PyErr_Format(PyExc_TypeError, "Only numeric NumPy types currently supported.");
## return -1;
## }
## /*
## NumPy format codes doesn't completely match buffer codes;
## seems safest to retranslate.
## 01234567890123456789012345*/
## const char* base_codes = "?bBhHiIlLqQfdgfdgO";
## char* format = (char*)malloc(4);
## char* fp = format;
## *fp++ = type->byteorder;
## if (PyTypeNum_ISCOMPLEX(typenum)) *fp++ = 'Z';
## *fp++ = base_codes[typenum];
## *fp = 0;
## view->buf = arr->data;
## view->readonly = !PyArray_ISWRITEABLE(obj);
## view->ndim = PyArray_NDIM(arr);
## view->strides = PyArray_STRIDES(arr);
## view->shape = PyArray_DIMS(arr);
## view->suboffsets = NULL;
## view->format = format;
## view->itemsize = type->elsize;
## view->internal = 0;
## return 0;
## print "hello" + str(43) + "asdf" + "three"
## pass
cdef int PyArray_TYPE(ndarray arr)
cdef int PyArray_ISWRITEABLE(ndarray arr)
ctypedef unsigned int npy_uint8
ctypedef unsigned int npy_uint16
......@@ -27,4 +91,5 @@ cdef extern from "numpy/arrayobject.h":
ctypedef float npy_float96
ctypedef float npy_float128
ctypedef npy_int64 Tint64
ctypedef npy_int64 int64
......@@ -118,32 +118,28 @@
# just to be sure you understand what is going on.
#
#################################################################
cdef extern from "Python.h":
ctypedef void PyObject
ctypedef void PyTypeObject
ctypedef struct FILE
include 'python_ref.pxi'
include 'python_exc.pxi'
include 'python_module.pxi'
include 'python_mem.pxi'
include 'python_tuple.pxi'
include 'python_list.pxi'
include 'python_object.pxi'
include 'python_sequence.pxi'
include 'python_mapping.pxi'
include 'python_iterator.pxi'
include 'python_type.pxi'
include 'python_number.pxi'
include 'python_int.pxi'
include 'python_bool.pxi'
include 'python_long.pxi'
include 'python_float.pxi'
include 'python_complex.pxi'
include 'python_string.pxi'
include 'python_dict.pxi'
include 'python_instance.pxi'
include 'python_function.pxi'
include 'python_method.pxi'
include 'python_set.pxi'
from python_ref cimport *
from python_exc cimport *
from python_module cimport *
from python_mem cimport *
from python_tuple cimport *
from python_list cimport *
from python_object cimport *
from python_sequence cimport *
from python_mapping cimport *
from python_iterator cimport *
from python_type cimport *
from python_number cimport *
from python_int cimport *
from python_bool cimport *
from python_long cimport *
from python_float cimport *
from python_complex cimport *
from python_string cimport *
from python_dict cimport *
from python_instance cimport *
from python_function cimport *
from python_method cimport *
from python_set cimport *
cdef extern from "Python.h":
ctypedef void PyObject
ctypedef void PyTypeObject
ctypedef struct PyObject:
Py_ssize_t ob_refcnt
PyTypeObject *ob_type
ctypedef struct FILE
......
......@@ -7,8 +7,7 @@ class StringIOTree(object):
def __init__(self, stream=None):
self.prepended_children = []
if stream is None: stream = StringIO()
self.stream = stream
self.stream = stream # if set to None, it will be constructed on first write
def getvalue(self):
return ("".join([x.getvalue() for x in self.prepended_children]) +
......@@ -19,20 +18,44 @@ class StringIOTree(object):
needs to happen."""
for child in self.prepended_children:
child.copyto(target)
target.write(self.stream.getvalue())
if self.stream:
target.write(self.stream.getvalue())
def write(self, what):
if not self.stream:
self.stream = StringIO()
self.stream.write(what)
def commit(self):
# Save what we have written until now so that the buffer
# itself is empty -- this makes it ready for insertion
if self.stream:
self.prepended_children.append(StringIOTree(self.stream))
self.stream = None
def insert(self, iotree):
"""
Insert a StringIOTree (and all of its contents) at this location.
Further writing to self appears after what is inserted.
"""
self.commit()
self.prepended_children.append(iotree)
def insertion_point(self):
"""
Returns a new StringIOTree, which is left behind at the current position
(it what is written to the result will appear right before whatever is
next written to self).
Calling getvalue() or copyto() on the result will only return the
contents written to it.
"""
# Save what we have written until now
# (would it be more efficient to check with len(self.stream.getvalue())?
# leaving it out for now)
self.prepended_children.append(StringIOTree(self.stream))
# This is so that getvalue on the result doesn't include it.
self.commit()
# Construct the new forked object to return
other = StringIOTree()
self.prepended_children.append(other)
self.stream = StringIO()
return other
__doc__ = r"""
......@@ -57,13 +80,11 @@ EXAMPLE:
>>> c.write('beta\n')
>>> b.getvalue().split()
['second', 'alpha', 'beta', 'gamma']
>>> i = StringIOTree()
>>> d.insert(i)
>>> i.write('inserted\n')
>>> out = StringIO()
>>> a.copyto(out)
>>> out.getvalue().split()
['first', 'second', 'alpha', 'beta', 'gamma', 'third']
"""
if __name__ == "__main__":
import doctest
doctest.testmod()
['first', 'second', 'alpha', 'inserted', 'beta', 'gamma', 'third']
"""
\ No newline at end of file
......@@ -46,7 +46,7 @@ class ErrorWriter(object):
class TestBuilder(object):
def __init__(self, rootdir, workdir, selectors, annotate,
cleanup_workdir, cleanup_sharedlibs, with_pyregr):
cleanup_workdir, cleanup_sharedlibs, with_pyregr, cythononly):
self.rootdir = rootdir
self.workdir = workdir
self.selectors = selectors
......@@ -54,6 +54,7 @@ class TestBuilder(object):
self.cleanup_workdir = cleanup_workdir
self.cleanup_sharedlibs = cleanup_sharedlibs
self.with_pyregr = with_pyregr
self.cythononly = cythononly
def build_suite(self):
suite = unittest.TestSuite()
......@@ -102,21 +103,23 @@ class TestBuilder(object):
path, workdir, module,
annotate=self.annotate,
cleanup_workdir=self.cleanup_workdir,
cleanup_sharedlibs=self.cleanup_sharedlibs)
cleanup_sharedlibs=self.cleanup_sharedlibs,
cythononly=self.cythononly)
else:
test = CythonCompileTestCase(
path, workdir, module,
expect_errors=expect_errors,
annotate=self.annotate,
cleanup_workdir=self.cleanup_workdir,
cleanup_sharedlibs=self.cleanup_sharedlibs)
cleanup_sharedlibs=self.cleanup_sharedlibs,
cythononly=self.cythononly)
suite.addTest(test)
return suite
class CythonCompileTestCase(unittest.TestCase):
def __init__(self, directory, workdir, module,
expect_errors=False, annotate=False, cleanup_workdir=True,
cleanup_sharedlibs=True):
cleanup_sharedlibs=True, cythononly=False):
self.directory = directory
self.workdir = workdir
self.module = module
......@@ -124,6 +127,7 @@ class CythonCompileTestCase(unittest.TestCase):
self.annotate = annotate
self.cleanup_workdir = cleanup_workdir
self.cleanup_sharedlibs = cleanup_sharedlibs
self.cythononly = cythononly
unittest.TestCase.__init__(self)
def shortDescription(self):
......@@ -247,7 +251,8 @@ class CythonCompileTestCase(unittest.TestCase):
unexpected_error = errors[len(expected_errors)]
self.assertEquals(None, unexpected_error)
else:
self.run_distutils(module, workdir, incdir)
if not self.cythononly:
self.run_distutils(module, workdir, incdir)
class CythonRunTestCase(CythonCompileTestCase):
def shortDescription(self):
......@@ -259,8 +264,9 @@ class CythonRunTestCase(CythonCompileTestCase):
result.startTest(self)
try:
self.runCompileTest()
sys.stderr.write('running doctests in %s ...\n' % self.module)
doctest.DocTestSuite(self.module).run(result)
if not self.cythononly:
sys.stderr.write('running doctests in %s ...\n' % self.module)
doctest.DocTestSuite(self.module).run(result)
except Exception:
result.addError(self, sys.exc_info())
result.stopTest(self)
......@@ -372,7 +378,10 @@ if __name__ == '__main__':
help="do not run the file based tests")
parser.add_option("--no-pyregr", dest="pyregr",
action="store_false", default=True,
help="do not run the regression tests of CPython in tests/pyregr/")
help="do not run the regression tests of CPython in tests/pyregr/")
parser.add_option("--cython-only", dest="cythononly",
action="store_true", default=False,
help="only compile pyx to c, do not run C compiler or run the tests")
parser.add_option("--sys-pyregr", dest="system_pyregr",
action="store_true", default=False,
help="run the regression tests of the CPython installation")
......@@ -445,7 +454,7 @@ if __name__ == '__main__':
if options.filetests:
filetests = TestBuilder(ROOTDIR, WORKDIR, selectors,
options.annotate_source, options.cleanup_workdir,
options.cleanup_sharedlibs, options.pyregr)
options.cleanup_sharedlibs, options.pyregr, options.cythononly)
test_suite.addTest(filetests.build_suite())
if options.system_pyregr:
......
cdef extern from *:
cdef void foo(int[])
......@@ -17,3 +18,8 @@ cdef struct OtherStruct:
a = sizeof(int[23][34])
b = sizeof(OtherStruct[43])
DEF COUNT = 4
c = sizeof(int[COUNT])
d = sizeof(OtherStruct[COUNT])
#cython: boundscheck=False
print 3
cimport python_dict as asadf, python_exc, cython as cy
@cy.boundscheck(False)
def f(object[int, 2] buf):
print buf[3, 2]
@cy.boundscheck(True)
def g(object[int, 2] buf):
# Please leave this comment,
#cython: this should have no special meaning
# even if the above line doesn't follow indentation.
print buf[3, 2]
def h(object[int, 2] buf):
print buf[3, 2]
with cy.boundscheck(True):
print buf[3,2]
from cython cimport boundscheck as bc
def i(object[int] buf):
with bc(True):
print buf[3]
......@@ -15,11 +15,13 @@ _ERRORS = u"""
1:11: Buffer types only allowed as function local variables
3:15: Buffer types only allowed as function local variables
6:27: "fakeoption" is not a buffer option
7:22: "ndim" must be non-negative
8:15: "dtype" missing
9:21: "ndim" must be an integer
10:15: Too many buffer options
11:24: Only allowed buffer modes are "full" or "strided" (as a compile-time string)
12:28: Only allowed buffer modes are "full" or "strided" (as a compile-time string)
"""
#TODO:
#7:22: "ndim" must be non-negative
#8:15: "dtype" missing
#9:21: "ndim" must be an integer
#10:15: Too many buffer options
#11:24: Only allowed buffer modes are "full" or "strided" (as a compile-time string)
#12:28: Only allowed buffer modes are "full" or "strided" (as a compile-time string)
#"""
cimport e_bufaccess_pxd # was needed to provoke a bug involving ErrorType
def f():
cdef object[e_bufaccess_pxd.T] buf
_ERRORS = u"""
3:17: Syntax error in ctypedef statement
4:31: 'T' is not a type identifier
4:31: 'T' is not declared
"""
# See e_bufaccess2.pyx
ctypedef nothing T
#cython: nonexistant
#cython: some=9
# The one below should NOT raise an error
#cython: boundscheck=True
# However this one should
#cython: boundscheck=sadf
print 3
#cython: boundscheck=True
_ERRORS = u"""
2:0: Expected "=" in option "nonexistant"
3:0: Unknown option: "some"
10:0: Must pass a boolean value for option "boundscheck"
"""
cimport e_pxdimpl_imported
_ERRORS = """
6:4: function definition not allowed here
18:4: function definition not allowed here
23:8: function definition not allowed here
"""
cdef class A:
cdef int test(self)
# Should give error:
def somefunc(self):
pass
# While this should *not* be an error...:
def __getbuffer__(self, Py_buffer* info, int flags):
pass
# This neither:
def __releasebuffer__(self, Py_buffer* info):
pass
# Terminate with an error to be sure the compiler is
# not terminating prior to previous errors
def terminate(self):
pass
cdef extern from "foo.h":
cdef class pxdimpl.B [object MyB]:
def otherfunc(self):
pass
/* See bufaccess.pyx */
typedef short htypedef_short;
This diff is collapsed.
cdef class A:
cpdef foo(self, bint a=*, b=*)
__doc__ = """
>>> a = A()
>>> a.foo()
(True, 'yo')
>>> a.foo(False)
(False, 'yo')
>>> a.foo(10, 'yes')
(True, 'yes')
"""
cdef class A:
cpdef foo(self, bint a=True, b="yo"):
return a, b
......@@ -4,8 +4,8 @@ __doc__ = u"""
>>> test_unicode_ascii(2)
u'c'
>>> test_unicode(2)
u'\u00e4'
>>> test_unicode(2) == u'\u00e4'
True
>>> test_int_list(2)
3
......
# cannot be named "numpy" in order to no clash with the numpy module!
cimport numpy
try:
import numpy
__doc__ = """
>>> basic()
[[0 1 2 3 4]
[5 6 7 8 9]]
2 0 9 5
"""
except:
__doc__ = ""
def basic():
cdef object[int, 2] buf = numpy.arange(10).reshape((2, 5))
print buf
print buf[0, 2], buf[0, 0], buf[1, 4], buf[1, 0]
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment