Commit 63c07e29 authored by Robert Bradshaw's avatar Robert Bradshaw

Merge branch 'master' into ctuple

Conflicts:
	CHANGES.rst
parents b7d80418 99e68228
...@@ -22,12 +22,14 @@ before_install: ...@@ -22,12 +22,14 @@ before_install:
- sudo apt-get install gdb python$( python -c 'import sys; print("%d.%d" % sys.version_info[:2])' )-dbg || true - sudo apt-get install gdb python$( python -c 'import sys; print("%d.%d" % sys.version_info[:2])' )-dbg || true
- dpkg -l | grep gdb || true - dpkg -l | grep gdb || true
install: CFLAGS="-O2 -ggdb" pip install . install:
- CFLAGS="-O2 -ggdb -Wall -Wextra $(python -c 'import sys; print("-fno-strict-aliasing" if sys.version_info[0] == 2 else "")')" python setup.py build
script: script:
- PYTHON_DBG="python$( python -c 'import sys; print("%d.%d" % sys.version_info[:2])' )-dbg" - PYTHON_DBG="python$( python -c 'import sys; print("%d.%d" % sys.version_info[:2])' )-dbg"
- if $PYTHON_DBG -V >&2; then CFLAGS="-O0 -ggdb" $PYTHON_DBG runtests.py -vv Debugger --backends=$BACKEND; fi - if $PYTHON_DBG -V >&2; then CFLAGS="-O0 -ggdb" $PYTHON_DBG runtests.py -vv Debugger --backends=$BACKEND; fi
- CFLAGS="-O0 -ggdb" python runtests.py -vv -x Debugger --backends=$BACKEND - CFLAGS="-O2 -ggdb -Wall -Wextra" python setup.py build_ext -i
- CFLAGS="-O0 -ggdb -Wall -Wextra" python runtests.py -vv -x Debugger --backends=$BACKEND
matrix: matrix:
allow_failures: allow_failures:
...@@ -38,4 +40,3 @@ matrix: ...@@ -38,4 +40,3 @@ matrix:
env: BACKEND=cpp env: BACKEND=cpp
- python: pypy3 - python: pypy3
env: BACKEND=cpp env: BACKEND=cpp
fast_finish: true
...@@ -8,6 +8,21 @@ Latest ...@@ -8,6 +8,21 @@ Latest
Features added Features added
-------------- --------------
* C functions can coerce to Python functions, which allows passing them
around as callable objects.
* New ``cythonize`` option ``-a`` to generate the annotated HTML source view.
* Extern C functions can now be declared as cpdef to export them to
the module's Python namespace. Extern C functions in pxd files export
their values to their own module, iff it exists.
* Missing C-API declarations in ``cpython.unicode`` were added.
* Passing ``language='c++'`` into cythonize() globally enables C++ mode for
all modules that were not passed as Extension objects (i.e. only source
files and file patterns).
* ``Py_hash_t`` is a known type (used in CPython for hash values). * ``Py_hash_t`` is a known type (used in CPython for hash values).
* ``PySlice_*()`` C-API functions are available from the ``cpython.slice`` * ``PySlice_*()`` C-API functions are available from the ``cpython.slice``
...@@ -15,14 +30,20 @@ Features added ...@@ -15,14 +30,20 @@ Features added
* Anonymous C tuple types can be declared as (ctype1, ctype2, ...). * Anonymous C tuple types can be declared as (ctype1, ctype2, ...).
* Allow arrays of C++ classes.
Bugs fixed Bugs fixed
---------- ----------
* Mismatching 'except' declarations on signatures in .pxd and .pyx files failed
to produce a compile error.
* Reference leak for non-simple Python expressions in boolean and/or expressions. * Reference leak for non-simple Python expressions in boolean and/or expressions.
* ``getitimer()``, ``setitimer()``, ``gettimeofday()`` and related type/constant * To fix a name collision and to reflect availability on host platforms,
definitions were moved from ``posix/time.pxd`` to ``posix/sys_time.pxd`` to standard C declarations [ clock(), time(), struct tm and tm* functions ]
fix a naming collision. were moved from posix/time.pxd to a new libc/time.pxd. Patch by Charles
Blake.
* Rerunning unmodified modules in IPython's cython support failed. * Rerunning unmodified modules in IPython's cython support failed.
Patch by Matthias Bussonier. Patch by Matthias Bussonier.
...@@ -34,7 +55,12 @@ Bugs fixed ...@@ -34,7 +55,12 @@ Bugs fixed
if the already created module was used later on (e.g. through a if the already created module was used later on (e.g. through a
stale reference in sys.modules or elsewhere). stale reference in sys.modules or elsewhere).
* Allow arrays of C++ classes. Other changes
-------------
* Compilation no longer fails hard when unknown compilation options are
passed. Instead, it raises a warning and ignores them (as it did silently
before 0.21). This will be changed back to an error in a future release.
0.21 (2014-09-10) 0.21 (2014-09-10)
......
...@@ -145,6 +145,8 @@ def parse_args(args): ...@@ -145,6 +145,8 @@ def parse_args(args):
help='set a cythonize option') help='set a cythonize option')
parser.add_option('-3', dest='python3_mode', action='store_true', parser.add_option('-3', dest='python3_mode', action='store_true',
help='use Python 3 syntax mode by default') help='use Python 3 syntax mode by default')
parser.add_option('-a', '--annotate', dest='annotate', action='store_true',
help='generate annotated HTML page for source files')
parser.add_option('-x', '--exclude', metavar='PATTERN', dest='excludes', parser.add_option('-x', '--exclude', metavar='PATTERN', dest='excludes',
action='append', default=[], action='append', default=[],
...@@ -188,6 +190,9 @@ def main(args=None): ...@@ -188,6 +190,9 @@ def main(args=None):
Options.error_on_unknown_names = False Options.error_on_unknown_names = False
Options.error_on_uninitialized = False Options.error_on_uninitialized = False
if options.annotate:
Options.annotate = True
for path in paths: for path in paths:
cython_compile(path, options) cython_compile(path, options)
......
...@@ -251,6 +251,7 @@ def strip_string_literals(code, prefix='__Pyx_L'): ...@@ -251,6 +251,7 @@ def strip_string_literals(code, prefix='__Pyx_L'):
in_quote = False in_quote = False
hash_mark = single_q = double_q = -1 hash_mark = single_q = double_q = -1
code_len = len(code) code_len = len(code)
quote_type = quote_len = None
while True: while True:
if hash_mark < q: if hash_mark < q:
...@@ -260,7 +261,8 @@ def strip_string_literals(code, prefix='__Pyx_L'): ...@@ -260,7 +261,8 @@ def strip_string_literals(code, prefix='__Pyx_L'):
if double_q < q: if double_q < q:
double_q = code.find('"', q) double_q = code.find('"', q)
q = min(single_q, double_q) q = min(single_q, double_q)
if q == -1: q = max(single_q, double_q) if q == -1:
q = max(single_q, double_q)
# We're done. # We're done.
if q == -1 and hash_mark == -1: if q == -1 and hash_mark == -1:
...@@ -276,7 +278,8 @@ def strip_string_literals(code, prefix='__Pyx_L'): ...@@ -276,7 +278,8 @@ def strip_string_literals(code, prefix='__Pyx_L'):
if k % 2 == 0: if k % 2 == 0:
q += 1 q += 1
continue continue
if code[q] == quote_type and (quote_len == 1 or (code_len > q + 2 and quote_type == code[q+1] == code[q+2])): if code[q] == quote_type and (
quote_len == 1 or (code_len > q + 2 and quote_type == code[q+1] == code[q+2])):
counter += 1 counter += 1
label = "%s%s_" % (prefix, counter) label = "%s%s_" % (prefix, counter)
literals[label] = code[start+quote_len:q] literals[label] = code[start+quote_len:q]
...@@ -586,7 +589,8 @@ def create_dependency_tree(ctx=None, quiet=False): ...@@ -586,7 +589,8 @@ def create_dependency_tree(ctx=None, quiet=False):
# This may be useful for advanced users? # This may be useful for advanced users?
def create_extension_list(patterns, exclude=[], ctx=None, aliases=None, quiet=False, exclude_failures=False): def create_extension_list(patterns, exclude=[], ctx=None, aliases=None, quiet=False, language=None,
exclude_failures=False):
if not isinstance(patterns, (list, tuple)): if not isinstance(patterns, (list, tuple)):
patterns = [patterns] patterns = [patterns]
explicit_modules = set([m.name for m in patterns if isinstance(m, Extension)]) explicit_modules = set([m.name for m in patterns if isinstance(m, Extension)])
...@@ -606,6 +610,7 @@ def create_extension_list(patterns, exclude=[], ctx=None, aliases=None, quiet=Fa ...@@ -606,6 +610,7 @@ def create_extension_list(patterns, exclude=[], ctx=None, aliases=None, quiet=Fa
name = '*' name = '*'
base = None base = None
exn_type = Extension exn_type = Extension
ext_language = language
elif isinstance(pattern, Extension): elif isinstance(pattern, Extension):
for filepattern in pattern.sources: for filepattern in pattern.sources:
if os.path.splitext(filepattern)[1] in ('.py', '.pyx'): if os.path.splitext(filepattern)[1] in ('.py', '.pyx'):
...@@ -618,6 +623,7 @@ def create_extension_list(patterns, exclude=[], ctx=None, aliases=None, quiet=Fa ...@@ -618,6 +623,7 @@ def create_extension_list(patterns, exclude=[], ctx=None, aliases=None, quiet=Fa
name = template.name name = template.name
base = DistutilsInfo(exn=template) base = DistutilsInfo(exn=template)
exn_type = template.__class__ exn_type = template.__class__
ext_language = None # do not override whatever the Extension says
else: else:
raise TypeError(pattern) raise TypeError(pattern)
...@@ -661,6 +667,9 @@ def create_extension_list(patterns, exclude=[], ctx=None, aliases=None, quiet=Fa ...@@ -661,6 +667,9 @@ def create_extension_list(patterns, exclude=[], ctx=None, aliases=None, quiet=Fa
depends = list(set(template.depends).union(set(depends))) depends = list(set(template.depends).union(set(depends)))
kwds['depends'] = depends kwds['depends'] = depends
if ext_language and 'language' not in kwds:
kwds['language'] = ext_language
module_list.append(exn_type( module_list.append(exn_type(
name=module_name, name=module_name,
sources=sources, sources=sources,
...@@ -671,7 +680,7 @@ def create_extension_list(patterns, exclude=[], ctx=None, aliases=None, quiet=Fa ...@@ -671,7 +680,7 @@ def create_extension_list(patterns, exclude=[], ctx=None, aliases=None, quiet=Fa
# This is the user-exposed entry point. # This is the user-exposed entry point.
def cythonize(module_list, exclude=[], nthreads=0, aliases=None, quiet=False, force=False, def cythonize(module_list, exclude=[], nthreads=0, aliases=None, quiet=False, force=False, language=None,
exclude_failures=False, **options): exclude_failures=False, **options):
""" """
Compile a set of source modules into C/C++ files and return a list of distutils Compile a set of source modules into C/C++ files and return a list of distutils
...@@ -684,6 +693,11 @@ def cythonize(module_list, exclude=[], nthreads=0, aliases=None, quiet=False, fo ...@@ -684,6 +693,11 @@ def cythonize(module_list, exclude=[], nthreads=0, aliases=None, quiet=False, fo
When using glob patterns, you can exclude certain module names explicitly When using glob patterns, you can exclude certain module names explicitly
by passing them into the 'exclude' option. by passing them into the 'exclude' option.
To globally enable C++ mode, you can pass language='c++'. Otherwise, this
will be determined at a per-file level based on compiler directives. This
affects only modules found based on file names. Extension instances passed
into cythonize() will not be changed.
For parallel compilation, set the 'nthreads' option to the number of For parallel compilation, set the 'nthreads' option to the number of
concurrent builds. concurrent builds.
...@@ -711,6 +725,7 @@ def cythonize(module_list, exclude=[], nthreads=0, aliases=None, quiet=False, fo ...@@ -711,6 +725,7 @@ def cythonize(module_list, exclude=[], nthreads=0, aliases=None, quiet=False, fo
ctx=ctx, ctx=ctx,
quiet=quiet, quiet=quiet,
exclude_failures=exclude_failures, exclude_failures=exclude_failures,
language=language,
aliases=aliases) aliases=aliases)
deps = create_dependency_tree(ctx, quiet=quiet) deps = create_dependency_tree(ctx, quiet=quiet)
build_dir = getattr(options, 'build_dir', None) build_dir = getattr(options, 'build_dir', None)
......
...@@ -4282,19 +4282,6 @@ class SliceNode(ExprNode): ...@@ -4282,19 +4282,6 @@ class SliceNode(ExprNode):
if self.is_literal: if self.is_literal:
code.put_giveref(self.py_result()) code.put_giveref(self.py_result())
def __deepcopy__(self, memo):
"""
There is a copy bug in python 2.4 for slice objects.
"""
return SliceNode(
self.pos,
start=copy.deepcopy(self.start, memo),
stop=copy.deepcopy(self.stop, memo),
step=copy.deepcopy(self.step, memo),
is_temp=self.is_temp,
is_literal=self.is_literal,
constant_result=self.constant_result)
class CallNode(ExprNode): class CallNode(ExprNode):
...@@ -6064,8 +6051,10 @@ class SequenceNode(ExprNode): ...@@ -6064,8 +6051,10 @@ class SequenceNode(ExprNode):
if isinstance(mult_factor.constant_result, (int,long)) \ if isinstance(mult_factor.constant_result, (int,long)) \
and mult_factor.constant_result > 0: and mult_factor.constant_result > 0:
size_factor = ' * %s' % mult_factor.constant_result size_factor = ' * %s' % mult_factor.constant_result
else: elif mult_factor.type.signed:
size_factor = ' * ((%s<0) ? 0:%s)' % (c_mult, c_mult) size_factor = ' * ((%s<0) ? 0:%s)' % (c_mult, c_mult)
else:
size_factor = ' * (%s)' % (c_mult,)
if self.type is Builtin.tuple_type and (self.is_literal or self.slow) and not c_mult: if self.type is Builtin.tuple_type and (self.is_literal or self.slow) and not c_mult:
# use PyTuple_Pack() to avoid generating huge amounts of one-time code # use PyTuple_Pack() to avoid generating huge amounts of one-time code
...@@ -7597,7 +7586,7 @@ class BoundMethodNode(ExprNode): ...@@ -7597,7 +7586,7 @@ class BoundMethodNode(ExprNode):
def generate_result_code(self, code): def generate_result_code(self, code):
code.putln( code.putln(
"%s = PyMethod_New(%s, %s, (PyObject*)%s->ob_type); %s" % ( "%s = __Pyx_PyMethod_New(%s, %s, (PyObject*)%s->ob_type); %s" % (
self.result(), self.result(),
self.function.py_result(), self.function.py_result(),
self.self_object.py_result(), self.self_object.py_result(),
...@@ -7629,7 +7618,7 @@ class UnboundMethodNode(ExprNode): ...@@ -7629,7 +7618,7 @@ class UnboundMethodNode(ExprNode):
def generate_result_code(self, code): def generate_result_code(self, code):
class_cname = code.pyclass_stack[-1].classobj.result() class_cname = code.pyclass_stack[-1].classobj.result()
code.putln( code.putln(
"%s = PyMethod_New(%s, 0, %s); %s" % ( "%s = __Pyx_PyMethod_New(%s, 0, %s); %s" % (
self.result(), self.result(),
self.function.py_result(), self.function.py_result(),
class_cname, class_cname,
......
...@@ -634,7 +634,7 @@ def check_definitions(flow, compiler_directives): ...@@ -634,7 +634,7 @@ def check_definitions(flow, compiler_directives):
for entry in flow.entries: for entry in flow.entries:
if (not entry.cf_references if (not entry.cf_references
and not entry.is_pyclass_attr): and not entry.is_pyclass_attr):
if entry.name != '_': if entry.name != '_' and not entry.name.startswith('unused'):
# '_' is often used for unused variables, e.g. in loops # '_' is often used for unused variables, e.g. in loops
if entry.is_arg: if entry.is_arg:
if warn_unused_arg: if warn_unused_arg:
......
...@@ -361,8 +361,7 @@ class Context(object): ...@@ -361,8 +361,7 @@ class Context(object):
def setup_errors(self, options, result): def setup_errors(self, options, result):
Errors.reset() # clear any remaining error state Errors.reset() # clear any remaining error state
if options.use_listing_file: if options.use_listing_file:
result.listing_file = Utils.replace_suffix(source, ".lis") path = result.listing_file = Utils.replace_suffix(result.main_source_file, ".lis")
path = result.listing_file
else: else:
path = None path = None
Errors.open_listing_file(path=path, Errors.open_listing_file(path=path,
...@@ -499,9 +498,12 @@ class CompilationOptions(object): ...@@ -499,9 +498,12 @@ class CompilationOptions(object):
# ignore valid options that are not in the defaults # ignore valid options that are not in the defaults
unknown_options.difference_update(['include_path']) unknown_options.difference_update(['include_path'])
if unknown_options: if unknown_options:
raise ValueError("got unexpected compilation option%s: %s" % ( # TODO: make this a hard error in 0.22
message = "got unknown compilation option%s, please remove: %s" % (
's' if len(unknown_options) > 1 else '', 's' if len(unknown_options) > 1 else '',
', '.join(unknown_options))) ', '.join(unknown_options))
import warnings
warnings.warn(message)
directives = dict(options['compiler_directives']) # copy mutable field directives = dict(options['compiler_directives']) # copy mutable field
options['compiler_directives'] = directives options['compiler_directives'] = directives
......
...@@ -439,13 +439,16 @@ def get_is_contig_utility(c_contig, ndim): ...@@ -439,13 +439,16 @@ def get_is_contig_utility(c_contig, ndim):
return utility return utility
def copy_src_to_dst_cname(): def copy_src_to_dst_cname():
return "__pyx_memoryview_copy_contents" return "__pyx_memoryview_copy_contents"
def verify_direct_dimensions(node): def verify_direct_dimensions(node):
for access, packing in node.type.axes: for access, packing in node.type.axes:
if access != 'direct': if access != 'direct':
error(self.pos, "All dimensions must be direct") error(node.pos, "All dimensions must be direct")
def copy_broadcast_memview_src_to_dst(src, dst, code): def copy_broadcast_memview_src_to_dst(src, dst, code):
""" """
...@@ -662,7 +665,7 @@ def get_axes_specs(env, axes): ...@@ -662,7 +665,7 @@ def get_axes_specs(env, axes):
if entry.name in view_constant_to_access_packing: if entry.name in view_constant_to_access_packing:
axes_specs.append(view_constant_to_access_packing[entry.name]) axes_specs.append(view_constant_to_access_packing[entry.name])
else: else:
raise CompilerError(axis.step.pos, INVALID_ERR) raise CompileError(axis.step.pos, INVALID_ERR)
else: else:
raise CompileError(axis.step.pos, INVALID_ERR) raise CompileError(axis.step.pos, INVALID_ERR)
......
...@@ -1289,6 +1289,11 @@ class CVarDefNode(StatNode): ...@@ -1289,6 +1289,11 @@ class CVarDefNode(StatNode):
"Non-trivial type declarators in shared declaration (e.g. mix of pointers and values). " + "Non-trivial type declarators in shared declaration (e.g. mix of pointers and values). " +
"Each pointer declaration should be on its own line.", 1) "Each pointer declaration should be on its own line.", 1)
create_extern_wrapper = (self.overridable
and self.visibility == 'extern'
and env.is_module_scope)
if create_extern_wrapper:
declarator.overridable = False
if isinstance(declarator, CFuncDeclaratorNode): if isinstance(declarator, CFuncDeclaratorNode):
name_declarator, type = declarator.analyse(base_type, env, directive_locals=self.directive_locals) name_declarator, type = declarator.analyse(base_type, env, directive_locals=self.directive_locals)
else: else:
...@@ -1314,6 +1319,9 @@ class CVarDefNode(StatNode): ...@@ -1314,6 +1319,9 @@ class CVarDefNode(StatNode):
self.entry.directive_locals = copy.copy(self.directive_locals) self.entry.directive_locals = copy.copy(self.directive_locals)
if 'staticmethod' in env.directives: if 'staticmethod' in env.directives:
type.is_static_method = True type.is_static_method = True
if create_extern_wrapper:
self.entry.type.create_to_py_utility_code(env)
self.entry.create_wrapper = True
else: else:
if self.directive_locals: if self.directive_locals:
error(self.pos, "Decorators can only be followed by functions") error(self.pos, "Decorators can only be followed by functions")
...@@ -1601,7 +1609,7 @@ class FuncDefNode(StatNode, BlockNode): ...@@ -1601,7 +1609,7 @@ class FuncDefNode(StatNode, BlockNode):
if arg.name in directive_locals: if arg.name in directive_locals:
type_node = directive_locals[arg.name] type_node = directive_locals[arg.name]
other_type = type_node.analyse_as_type(env) other_type = type_node.analyse_as_type(env)
elif isinstance(arg, CArgDeclNode) and arg.annotation: elif isinstance(arg, CArgDeclNode) and arg.annotation and env.directives['annotation_typing']:
type_node = arg.annotation type_node = arg.annotation
other_type = arg.inject_type_from_annotations(env) other_type = arg.inject_type_from_annotations(env)
if other_type is None: if other_type is None:
......
...@@ -1244,6 +1244,9 @@ class CConstType(BaseType): ...@@ -1244,6 +1244,9 @@ class CConstType(BaseType):
def declaration_code(self, entity_code, def declaration_code(self, entity_code,
for_display = 0, dll_linkage = None, pyrex = 0): for_display = 0, dll_linkage = None, pyrex = 0):
if for_display or pyrex:
return "const " + self.const_base_type.declaration_code(entity_code, for_display, dll_linkage, pyrex)
else:
return self.const_base_type.declaration_code("const %s" % entity_code, for_display, dll_linkage, pyrex) return self.const_base_type.declaration_code("const %s" % entity_code, for_display, dll_linkage, pyrex)
def specialize(self, values): def specialize(self, values):
...@@ -1539,8 +1542,10 @@ class CBIntType(CIntType): ...@@ -1539,8 +1542,10 @@ class CBIntType(CIntType):
def declaration_code(self, entity_code, def declaration_code(self, entity_code,
for_display = 0, dll_linkage = None, pyrex = 0): for_display = 0, dll_linkage = None, pyrex = 0):
if pyrex or for_display: if for_display:
base_code = 'bool' base_code = 'bool'
elif pyrex:
base_code = 'bint'
else: else:
base_code = public_decl('int', dll_linkage) base_code = public_decl('int', dll_linkage)
return self.base_declaration_code(base_code, entity_code) return self.base_declaration_code(base_code, entity_code)
...@@ -2410,6 +2415,10 @@ class CFuncType(CType): ...@@ -2410,6 +2415,10 @@ class CFuncType(CType):
return 0 return 0
if not self.same_calling_convention_as(other_type): if not self.same_calling_convention_as(other_type):
return 0 return 0
if self.exception_value != other_type.exception_value:
return 0
if self.exception_check != other_type.exception_check:
return 0
return 1 return 1
def compatible_signature_with(self, other_type, as_cmethod = 0): def compatible_signature_with(self, other_type, as_cmethod = 0):
...@@ -2444,10 +2453,14 @@ class CFuncType(CType): ...@@ -2444,10 +2453,14 @@ class CFuncType(CType):
return 0 return 0
if self.nogil != other_type.nogil: if self.nogil != other_type.nogil:
return 0 return 0
if self.exception_value != other_type.exception_value:
return 0
if not self.exception_check and other_type.exception_check:
# a redundant exception check doesn't make functions incompatible, but a missing one does
return 0
self.original_sig = other_type.original_sig or other_type self.original_sig = other_type.original_sig or other_type
return 1 return 1
def narrower_c_signature_than(self, other_type, as_cmethod = 0): def narrower_c_signature_than(self, other_type, as_cmethod = 0):
return self.narrower_c_signature_than_resolved_type(other_type.resolve(), as_cmethod) return self.narrower_c_signature_than_resolved_type(other_type.resolve(), as_cmethod)
...@@ -2471,6 +2484,11 @@ class CFuncType(CType): ...@@ -2471,6 +2484,11 @@ class CFuncType(CType):
return 0 return 0
if not self.return_type.subtype_of_resolved_type(other_type.return_type): if not self.return_type.subtype_of_resolved_type(other_type.return_type):
return 0 return 0
if self.exception_value != other_type.exception_value:
return 0
if not self.exception_check and other_type.exception_check:
# a redundant exception check doesn't make functions incompatible, but a missing one does
return 0
return 1 return 1
def same_calling_convention_as(self, other): def same_calling_convention_as(self, other):
...@@ -2487,22 +2505,12 @@ class CFuncType(CType): ...@@ -2487,22 +2505,12 @@ class CFuncType(CType):
sc2 = other.calling_convention == '__stdcall' sc2 = other.calling_convention == '__stdcall'
return sc1 == sc2 return sc1 == sc2
def same_exception_signature_as(self, other_type):
return self.same_exception_signature_as_resolved_type(
other_type.resolve())
def same_exception_signature_as_resolved_type(self, other_type):
return self.exception_value == other_type.exception_value \
and self.exception_check == other_type.exception_check
def same_as_resolved_type(self, other_type, as_cmethod = 0): def same_as_resolved_type(self, other_type, as_cmethod = 0):
return self.same_c_signature_as_resolved_type(other_type, as_cmethod) \ return self.same_c_signature_as_resolved_type(other_type, as_cmethod) \
and self.same_exception_signature_as_resolved_type(other_type) \
and self.nogil == other_type.nogil and self.nogil == other_type.nogil
def pointer_assignable_from_resolved_type(self, other_type): def pointer_assignable_from_resolved_type(self, other_type):
return self.same_c_signature_as_resolved_type(other_type) \ return self.same_c_signature_as_resolved_type(other_type) \
and self.same_exception_signature_as_resolved_type(other_type) \
and not (self.nogil and not other_type.nogil) and not (self.nogil and not other_type.nogil)
def declaration_code(self, entity_code, def declaration_code(self, entity_code,
...@@ -2649,6 +2657,74 @@ class CFuncType(CType): ...@@ -2649,6 +2657,74 @@ class CFuncType(CType):
assert not self.is_fused assert not self.is_fused
specialize_entry(entry, cname) specialize_entry(entry, cname)
def create_to_py_utility_code(self, env):
# FIXME: it seems we're trying to coerce in more cases than we should
if self.has_varargs or self.optional_arg_count:
return False
if self.to_py_function is not None:
return self.to_py_function
from .UtilityCode import CythonUtilityCode
import re
safe_typename = re.sub('[^a-zA-Z0-9]', '__', self.declaration_code("", pyrex=1))
to_py_function = "__Pyx_CFunc_%s_to_py" % safe_typename
for arg in self.args:
if not arg.type.is_pyobject and not arg.type.create_from_py_utility_code(env):
return False
if not (self.return_type.is_pyobject or self.return_type.is_void or
self.return_type.create_to_py_utility_code(env)):
return False
def declared_type(ctype):
type_displayname = str(ctype.declaration_code("", for_display=True))
if ctype.is_pyobject:
arg_ctype = type_name = type_displayname
if ctype.is_builtin_type:
arg_ctype = ctype.name
elif not ctype.is_extension_type:
type_name = 'object'
type_displayname = None
else:
type_displayname = repr(type_displayname)
elif ctype is c_bint_type:
type_name = arg_ctype = 'bint'
else:
type_name = arg_ctype = type_displayname
if ctype is c_double_type:
type_displayname = 'float'
else:
type_displayname = repr(type_displayname)
return type_name, arg_ctype, type_displayname
class Arg(object):
def __init__(self, arg_name, arg_type):
self.name = arg_name
self.type = arg_type
self.type_cname, self.ctype, self.type_displayname = declared_type(arg_type)
if self.return_type.is_void:
except_clause = 'except *'
elif self.return_type.is_pyobject:
except_clause = ''
elif self.exception_value:
except_clause = ('except? %s' if self.exception_check else 'except %s') % self.exception_value
else:
except_clause = 'except *'
context = {
'cname': to_py_function,
'args': [Arg(arg.name or 'arg%s' % ix, arg.type) for ix, arg in enumerate(self.args)],
'return_type': Arg('return', self.return_type),
'except_clause': except_clause,
}
# FIXME: directives come from first defining environment and do not adapt for reuse
env.use_utility_code(CythonUtilityCode.load(
"cfunc.to_py", "CFuncConvert.pyx",
outer_module_scope=env.global_scope(), # need access to types declared in module
context=context, compiler_directives=dict(env.directives)))
self.to_py_function = to_py_function
return True
def specialize_entry(entry, cname): def specialize_entry(entry, cname):
""" """
...@@ -3161,7 +3237,7 @@ class CppClassType(CType): ...@@ -3161,7 +3237,7 @@ class CppClassType(CType):
if self == actual: if self == actual:
return {} return {}
# TODO(robertwb): Actual type equality. # TODO(robertwb): Actual type equality.
elif self.empty_declaration_code() == actual.template_type.declaration_code(""): elif self.empty_declaration_code() == actual.template_type.empty_declaration_code():
return reduce( return reduce(
merge_template_deductions, merge_template_deductions,
[formal_param.deduce_template_params(actual_param) for (formal_param, actual_param) in zip(self.templates, actual.templates)], [formal_param.deduce_template_params(actual_param) for (formal_param, actual_param) in zip(self.templates, actual.templates)],
......
...@@ -4,10 +4,14 @@ import cython ...@@ -4,10 +4,14 @@ import cython
from ..Plex.Scanners cimport Scanner from ..Plex.Scanners cimport Scanner
cdef get_lexicon()
cdef initial_compile_time_env()
cdef class Method: cdef class Method:
cdef object name cdef object name
cdef object __name__ cdef object __name__
@cython.final
cdef class CompileTimeScope: cdef class CompileTimeScope:
cdef public dict entries cdef public dict entries
cdef public CompileTimeScope outer cdef public CompileTimeScope outer
...@@ -15,6 +19,7 @@ cdef class CompileTimeScope: ...@@ -15,6 +19,7 @@ cdef class CompileTimeScope:
cdef lookup_here(self, name) cdef lookup_here(self, name)
cpdef lookup(self, name) cpdef lookup(self, name)
@cython.final
cdef class PyrexScanner(Scanner): cdef class PyrexScanner(Scanner):
cdef public context cdef public context
cdef public list included_files cdef public list included_files
......
...@@ -5,13 +5,15 @@ ...@@ -5,13 +5,15 @@
from __future__ import absolute_import from __future__ import absolute_import
import cython
cython.declare(EncodedString=object, make_lexicon=object, lexicon=object,
any_string_prefix=unicode, IDENT=unicode,
print_function=object, error=object, warning=object,
os=object, platform=object)
import os import os
import platform import platform
import cython
cython.declare(EncodedString=object, any_string_prefix=unicode, IDENT=unicode,
print_function=object, error=object, warning=object)
from .. import Utils from .. import Utils
from ..Plex.Scanners import Scanner from ..Plex.Scanners import Scanner
from ..Plex.Errors import UnrecognizedInput from ..Plex.Errors import UnrecognizedInput
...@@ -28,12 +30,14 @@ scanner_dump_file = None ...@@ -28,12 +30,14 @@ scanner_dump_file = None
lexicon = None lexicon = None
def get_lexicon(): def get_lexicon():
global lexicon global lexicon
if not lexicon: if not lexicon:
lexicon = make_lexicon() lexicon = make_lexicon()
return lexicon return lexicon
#------------------------------------------------------------------ #------------------------------------------------------------------
py_reserved_words = [ py_reserved_words = [
...@@ -49,6 +53,7 @@ pyx_reserved_words = py_reserved_words + [ ...@@ -49,6 +53,7 @@ pyx_reserved_words = py_reserved_words + [
"cimport", "DEF", "IF", "ELIF", "ELSE" "cimport", "DEF", "IF", "ELIF", "ELSE"
] ]
class Method(object): class Method(object):
def __init__(self, name): def __init__(self, name):
...@@ -58,6 +63,7 @@ class Method(object): ...@@ -58,6 +63,7 @@ class Method(object):
def __call__(self, stream, text): def __call__(self, stream, text):
return getattr(stream, self.name)(text) return getattr(stream, self.name)(text)
#------------------------------------------------------------------ #------------------------------------------------------------------
class CompileTimeScope(object): class CompileTimeScope(object):
...@@ -88,6 +94,7 @@ class CompileTimeScope(object): ...@@ -88,6 +94,7 @@ class CompileTimeScope(object):
else: else:
raise raise
def initial_compile_time_env(): def initial_compile_time_env():
benv = CompileTimeScope() benv = CompileTimeScope()
names = ('UNAME_SYSNAME', 'UNAME_NODENAME', 'UNAME_RELEASE', names = ('UNAME_SYSNAME', 'UNAME_NODENAME', 'UNAME_RELEASE',
...@@ -116,6 +123,7 @@ def initial_compile_time_env(): ...@@ -116,6 +123,7 @@ def initial_compile_time_env():
denv = CompileTimeScope(benv) denv = CompileTimeScope(benv)
return denv return denv
#------------------------------------------------------------------ #------------------------------------------------------------------
class SourceDescriptor(object): class SourceDescriptor(object):
...@@ -166,6 +174,7 @@ class SourceDescriptor(object): ...@@ -166,6 +174,7 @@ class SourceDescriptor(object):
except AttributeError: except AttributeError:
return False return False
class FileSourceDescriptor(SourceDescriptor): class FileSourceDescriptor(SourceDescriptor):
""" """
Represents a code source. A code source is a more generic abstraction Represents a code source. A code source is a more generic abstraction
...@@ -235,6 +244,7 @@ class FileSourceDescriptor(SourceDescriptor): ...@@ -235,6 +244,7 @@ class FileSourceDescriptor(SourceDescriptor):
def __repr__(self): def __repr__(self):
return "<FileSourceDescriptor:%s>" % self.filename return "<FileSourceDescriptor:%s>" % self.filename
class StringSourceDescriptor(SourceDescriptor): class StringSourceDescriptor(SourceDescriptor):
""" """
Instances of this class can be used instead of a filenames if the Instances of this class can be used instead of a filenames if the
...@@ -275,6 +285,7 @@ class StringSourceDescriptor(SourceDescriptor): ...@@ -275,6 +285,7 @@ class StringSourceDescriptor(SourceDescriptor):
def __repr__(self): def __repr__(self):
return "<StringSourceDescriptor:%s>" % self.name return "<StringSourceDescriptor:%s>" % self.name
#------------------------------------------------------------------ #------------------------------------------------------------------
class PyrexScanner(Scanner): class PyrexScanner(Scanner):
...@@ -284,8 +295,8 @@ class PyrexScanner(Scanner): ...@@ -284,8 +295,8 @@ class PyrexScanner(Scanner):
# compile_time_eval boolean In a true conditional compilation context # compile_time_eval boolean In a true conditional compilation context
# compile_time_expr boolean In a compile-time expression context # compile_time_expr boolean In a compile-time expression context
def __init__(self, file, filename, parent_scanner = None, def __init__(self, file, filename, parent_scanner=None,
scope = None, context = None, source_encoding=None, parse_comments=True, initial_pos=None): scope=None, context=None, source_encoding=None, parse_comments=True, initial_pos=None):
Scanner.__init__(self, get_lexicon(), file, filename, initial_pos) Scanner.__init__(self, get_lexicon(), file, filename, initial_pos)
if parent_scanner: if parent_scanner:
self.context = parent_scanner.context self.context = parent_scanner.context
...@@ -299,8 +310,7 @@ class PyrexScanner(Scanner): ...@@ -299,8 +310,7 @@ class PyrexScanner(Scanner):
self.compile_time_env = initial_compile_time_env() self.compile_time_env = initial_compile_time_env()
self.compile_time_eval = 1 self.compile_time_eval = 1
self.compile_time_expr = 0 self.compile_time_expr = 0
if hasattr(context.options, 'compile_time_env') and \ if getattr(context.options, 'compile_time_env', None):
context.options.compile_time_env is not None:
self.compile_time_env.update(context.options.compile_time_env) self.compile_time_env.update(context.options.compile_time_env)
self.parse_comments = parse_comments self.parse_comments = parse_comments
self.source_encoding = source_encoding self.source_encoding = source_encoding
...@@ -326,11 +336,11 @@ class PyrexScanner(Scanner): ...@@ -326,11 +336,11 @@ class PyrexScanner(Scanner):
return self.indentation_stack[-1] return self.indentation_stack[-1]
def open_bracket_action(self, text): def open_bracket_action(self, text):
self.bracket_nesting_level = self.bracket_nesting_level + 1 self.bracket_nesting_level += 1
return text return text
def close_bracket_action(self, text): def close_bracket_action(self, text):
self.bracket_nesting_level = self.bracket_nesting_level - 1 self.bracket_nesting_level -= 1
return text return text
def newline_action(self, text): def newline_action(self, text):
...@@ -406,6 +416,7 @@ class PyrexScanner(Scanner): ...@@ -406,6 +416,7 @@ class PyrexScanner(Scanner):
sy, systring = self.read() sy, systring = self.read()
except UnrecognizedInput: except UnrecognizedInput:
self.error("Unrecognized character") self.error("Unrecognized character")
return # just a marker, error() always raises
if sy == IDENT: if sy == IDENT:
if systring in self.keywords: if systring in self.keywords:
if systring == u'print' and print_function in self.context.future_directives: if systring == u'print' and print_function in self.context.future_directives:
...@@ -445,21 +456,21 @@ class PyrexScanner(Scanner): ...@@ -445,21 +456,21 @@ class PyrexScanner(Scanner):
# This method should be added to Plex # This method should be added to Plex
self.queue.insert(0, (token, value)) self.queue.insert(0, (token, value))
def error(self, message, pos = None, fatal = True): def error(self, message, pos=None, fatal=True):
if pos is None: if pos is None:
pos = self.position() pos = self.position()
if self.sy == 'INDENT': if self.sy == 'INDENT':
err = error(pos, "Possible inconsistent indentation") error(pos, "Possible inconsistent indentation")
err = error(pos, message) err = error(pos, message)
if fatal: raise err if fatal: raise err
def expect(self, what, message = None): def expect(self, what, message=None):
if self.sy == what: if self.sy == what:
self.next() self.next()
else: else:
self.expected(what, message) self.expected(what, message)
def expect_keyword(self, what, message = None): def expect_keyword(self, what, message=None):
if self.sy == IDENT and self.systring == what: if self.sy == IDENT and self.systring == what:
self.next() self.next()
else: else:
...@@ -476,12 +487,10 @@ class PyrexScanner(Scanner): ...@@ -476,12 +487,10 @@ class PyrexScanner(Scanner):
self.error("Expected '%s', found '%s'" % (what, found)) self.error("Expected '%s', found '%s'" % (what, found))
def expect_indent(self): def expect_indent(self):
self.expect('INDENT', self.expect('INDENT', "Expected an increase in indentation level")
"Expected an increase in indentation level")
def expect_dedent(self): def expect_dedent(self):
self.expect('DEDENT', self.expect('DEDENT', "Expected a decrease in indentation level")
"Expected a decrease in indentation level")
def expect_newline(self, message="Expected a newline", ignore_semicolon=False): def expect_newline(self, message="Expected a newline", ignore_semicolon=False):
# Expect either a newline or end of file # Expect either a newline or end of file
......
...@@ -303,7 +303,7 @@ class Scope(object): ...@@ -303,7 +303,7 @@ class Scope(object):
self.name = name self.name = name
self.outer_scope = outer_scope self.outer_scope = outer_scope
self.parent_scope = parent_scope self.parent_scope = parent_scope
mangled_name = "%d%s_" % (len(name), name) mangled_name = "%d%s_" % (len(name), name.replace('.', '_dot_'))
qual_scope = self.qualifying_scope() qual_scope = self.qualifying_scope()
if qual_scope: if qual_scope:
self.qualified_name = qual_scope.qualify_name(name) self.qualified_name = qual_scope.qualify_name(name)
...@@ -1044,15 +1044,13 @@ class ModuleScope(Scope): ...@@ -1044,15 +1044,13 @@ class ModuleScope(Scope):
def global_scope(self): def global_scope(self):
return self return self
def lookup(self, name): def lookup(self, name, language_level=None):
entry = self.lookup_here(name) entry = self.lookup_here(name)
if entry is not None: if entry is not None:
return entry return entry
if self.context is not None: if language_level is None:
language_level = self.context.language_level language_level = self.context.language_level if self.context is not None else 3
else:
language_level = 3
return self.outer_scope.lookup(name, language_level=language_level) return self.outer_scope.lookup(name, language_level=language_level)
......
...@@ -23,16 +23,19 @@ from . import UtilNodes ...@@ -23,16 +23,19 @@ from . import UtilNodes
class StringParseContext(Main.Context): class StringParseContext(Main.Context):
def __init__(self, name, include_directories=None): def __init__(self, name, include_directories=None, compiler_directives=None):
if include_directories is None: include_directories = [] if include_directories is None:
Main.Context.__init__(self, include_directories, {}, include_directories = []
if compiler_directives is None:
compiler_directives = {}
Main.Context.__init__(self, include_directories, compiler_directives,
create_testscope=False) create_testscope=False)
self.module_name = name self.module_name = name
def find_module(self, module_name, relative_to = None, pos = None, need_pxd = 1): def find_module(self, module_name, relative_to=None, pos=None, need_pxd=1):
if module_name not in (self.module_name, 'cython'): if module_name not in (self.module_name, 'cython'):
raise AssertionError("Not yet supporting any cimports/includes from string code snippets") raise AssertionError("Not yet supporting any cimports/includes from string code snippets")
return ModuleScope(module_name, parent_module = None, context = self) return ModuleScope(module_name, parent_module=None, context=self)
def parse_from_strings(name, code, pxds={}, level=None, initial_pos=None, def parse_from_strings(name, code, pxds={}, level=None, initial_pos=None,
...@@ -64,7 +67,7 @@ def parse_from_strings(name, code, pxds={}, level=None, initial_pos=None, ...@@ -64,7 +67,7 @@ def parse_from_strings(name, code, pxds={}, level=None, initial_pos=None,
initial_pos = (name, 1, 0) initial_pos = (name, 1, 0)
code_source = StringSourceDescriptor(name, code) code_source = StringSourceDescriptor(name, code)
scope = context.find_module(module_name, pos = initial_pos, need_pxd = 0) scope = context.find_module(module_name, pos=initial_pos, need_pxd=False)
buf = StringIO(code) buf = StringIO(code)
...@@ -190,20 +193,27 @@ class TemplateTransform(VisitorTransform): ...@@ -190,20 +193,27 @@ class TemplateTransform(VisitorTransform):
else: else:
return self.visit_Node(node) return self.visit_Node(node)
def copy_code_tree(node): def copy_code_tree(node):
return TreeCopier()(node) return TreeCopier()(node)
INDENT_RE = re.compile(ur"^ *")
_match_indent = re.compile(ur"^ *").match
def strip_common_indent(lines): def strip_common_indent(lines):
"Strips empty lines and common indentation from the list of strings given in lines" """Strips empty lines and common indentation from the list of strings given in lines"""
# TODO: Facilitate textwrap.indent instead # TODO: Facilitate textwrap.indent instead
lines = [x for x in lines if x.strip() != u""] lines = [x for x in lines if x.strip() != u""]
minindent = min([len(INDENT_RE.match(x).group(0)) for x in lines]) minindent = min([len(_match_indent(x).group(0)) for x in lines])
lines = [x[minindent:] for x in lines] lines = [x[minindent:] for x in lines]
return lines return lines
class TreeFragment(object): class TreeFragment(object):
def __init__(self, code, name="(tree fragment)", pxds={}, temps=[], pipeline=[], level=None, initial_pos=None): def __init__(self, code, name=None, pxds={}, temps=[], pipeline=[], level=None, initial_pos=None):
if not name:
name = "(tree fragment)"
if isinstance(code, unicode): if isinstance(code, unicode):
def fmt(x): return u"\n".join(strip_common_indent(x.split(u"\n"))) def fmt(x): return u"\n".join(strip_common_indent(x.split(u"\n")))
......
...@@ -494,24 +494,22 @@ def find_spanning_type(type1, type2): ...@@ -494,24 +494,22 @@ def find_spanning_type(type1, type2):
return PyrexTypes.c_double_type return PyrexTypes.c_double_type
return result_type return result_type
def aggressive_spanning_type(types, might_overflow, pos): def simply_type(result_type, pos):
result_type = reduce(find_spanning_type, types)
if result_type.is_reference: if result_type.is_reference:
result_type = result_type.ref_base_type result_type = result_type.ref_base_type
if result_type.is_const: if result_type.is_const:
result_type = result_type.const_base_type result_type = result_type.const_base_type
if result_type.is_cpp_class: if result_type.is_cpp_class:
result_type.check_nullary_constructor(pos) result_type.check_nullary_constructor(pos)
if result_type.is_array:
result_type = PyrexTypes.c_ptr_type(result_type.base_type)
return result_type return result_type
def aggressive_spanning_type(types, might_overflow, pos):
return simply_type(reduce(find_spanning_type, types), pos)
def safe_spanning_type(types, might_overflow, pos): def safe_spanning_type(types, might_overflow, pos):
result_type = reduce(find_spanning_type, types) result_type = simply_type(reduce(find_spanning_type, types), pos)
if result_type.is_const:
result_type = result_type.const_base_type
if result_type.is_reference:
result_type = result_type.ref_base_type
if result_type.is_cpp_class:
result_type.check_nullary_constructor(pos)
if result_type.is_pyobject: if result_type.is_pyobject:
# In theory, any specific Python type is always safe to # In theory, any specific Python type is always safe to
# infer. However, inferring str can cause some existing code # infer. However, inferring str can cause some existing code
......
...@@ -8,6 +8,8 @@ from . import Code ...@@ -8,6 +8,8 @@ from . import Code
class NonManglingModuleScope(Symtab.ModuleScope): class NonManglingModuleScope(Symtab.ModuleScope):
cpp = False
def __init__(self, prefix, *args, **kw): def __init__(self, prefix, *args, **kw):
self.prefix = prefix self.prefix = prefix
self.cython_scope = None self.cython_scope = None
...@@ -28,12 +30,11 @@ class NonManglingModuleScope(Symtab.ModuleScope): ...@@ -28,12 +30,11 @@ class NonManglingModuleScope(Symtab.ModuleScope):
else: else:
return Symtab.ModuleScope.mangle(self, prefix) return Symtab.ModuleScope.mangle(self, prefix)
class CythonUtilityCodeContext(StringParseContext): class CythonUtilityCodeContext(StringParseContext):
scope = None scope = None
def find_module(self, module_name, relative_to = None, pos = None, def find_module(self, module_name, relative_to=None, pos=None, need_pxd=True):
need_pxd = 1):
if module_name != self.module_name: if module_name != self.module_name:
if module_name not in self.modules: if module_name not in self.modules:
raise AssertionError("Only the cython cimport is supported.") raise AssertionError("Only the cython cimport is supported.")
...@@ -41,10 +42,8 @@ class CythonUtilityCodeContext(StringParseContext): ...@@ -41,10 +42,8 @@ class CythonUtilityCodeContext(StringParseContext):
return self.modules[module_name] return self.modules[module_name]
if self.scope is None: if self.scope is None:
self.scope = NonManglingModuleScope(self.prefix, self.scope = NonManglingModuleScope(
module_name, self.prefix, module_name, parent_module=None, context=self)
parent_module=None,
context=self)
return self.scope return self.scope
...@@ -69,7 +68,8 @@ class CythonUtilityCode(Code.UtilityCodeBase): ...@@ -69,7 +68,8 @@ class CythonUtilityCode(Code.UtilityCodeBase):
is_cython_utility = True is_cython_utility = True
def __init__(self, impl, name="__pyxutil", prefix="", requires=None, def __init__(self, impl, name="__pyxutil", prefix="", requires=None,
file=None, from_scope=None, context=None): file=None, from_scope=None, context=None, compiler_directives=None,
outer_module_scope=None):
# 1) We need to delay the parsing/processing, so that all modules can be # 1) We need to delay the parsing/processing, so that all modules can be
# imported without import loops # imported without import loops
# 2) The same utility code object can be used for multiple source files; # 2) The same utility code object can be used for multiple source files;
...@@ -84,6 +84,20 @@ class CythonUtilityCode(Code.UtilityCodeBase): ...@@ -84,6 +84,20 @@ class CythonUtilityCode(Code.UtilityCodeBase):
self.prefix = prefix self.prefix = prefix
self.requires = requires or [] self.requires = requires or []
self.from_scope = from_scope self.from_scope = from_scope
self.outer_module_scope = outer_module_scope
self.compiler_directives = compiler_directives
def __eq__(self, other):
if isinstance(other, CythonUtilityCode):
return self._equality_params() == other._equality_params()
else:
return False
def _equality_params(self):
return self.impl, self.outer_module_scope, self.compiler_directives
def __hash__(self):
return hash(self.impl)
def get_tree(self, entries_only=False, cython_scope=None): def get_tree(self, entries_only=False, cython_scope=None):
from .AnalysedTreeTransforms import AutoTestDictTransform from .AnalysedTreeTransforms import AutoTestDictTransform
...@@ -93,12 +107,13 @@ class CythonUtilityCode(Code.UtilityCodeBase): ...@@ -93,12 +107,13 @@ class CythonUtilityCode(Code.UtilityCodeBase):
excludes = [AutoTestDictTransform] excludes = [AutoTestDictTransform]
from . import Pipeline, ParseTreeTransforms from . import Pipeline, ParseTreeTransforms
context = CythonUtilityCodeContext(self.name) context = CythonUtilityCodeContext(
self.name, compiler_directives=self.compiler_directives)
context.prefix = self.prefix context.prefix = self.prefix
context.cython_scope = cython_scope context.cython_scope = cython_scope
#context = StringParseContext(self.name) #context = StringParseContext(self.name)
tree = parse_from_strings(self.name, self.impl, context=context, tree = parse_from_strings(
allow_struct_enum_decorator=True) self.name, self.impl, context=context, allow_struct_enum_decorator=True)
pipeline = Pipeline.create_pipeline(context, 'pyx', exclude_classes=excludes) pipeline = Pipeline.create_pipeline(context, 'pyx', exclude_classes=excludes)
if entries_only: if entries_only:
...@@ -126,6 +141,16 @@ class CythonUtilityCode(Code.UtilityCodeBase): ...@@ -126,6 +141,16 @@ class CythonUtilityCode(Code.UtilityCodeBase):
pipeline = Pipeline.insert_into_pipeline(pipeline, scope_transform, pipeline = Pipeline.insert_into_pipeline(pipeline, scope_transform,
before=transform) before=transform)
if self.outer_module_scope:
# inject outer module between utility code module and builtin module
def scope_transform(module_node):
module_node.scope.outer_scope = self.outer_module_scope
return module_node
transform = ParseTreeTransforms.AnalyseDeclarationsTransform
pipeline = Pipeline.insert_into_pipeline(pipeline, scope_transform,
before=transform)
(err, tree) = Pipeline.run_pipeline(pipeline, tree, printtree=False) (err, tree) = Pipeline.run_pipeline(pipeline, tree, printtree=False)
assert not err, err assert not err, err
return tree return tree
......
This diff is collapsed.
# http://en.wikipedia.org/wiki/C_date_and_time_functions
from libc.stddef cimport wchar_t
cdef extern from "time.h" nogil:
ctypedef long clock_t
ctypedef long time_t
enum: CLOCKS_PER_SEC
clock_t clock() # CPU time
time_t time(time_t *) # wall clock time since Unix epoch
cdef struct tm:
int tm_sec
int tm_min
int tm_hour
int tm_mday
int tm_mon
int tm_year
int tm_wday
int tm_yday
int tm_isdst
char *tm_zone
long tm_gmtoff
int daylight # global state
long timezone
char *tzname[2]
void tzset()
char *asctime(const tm *)
char *asctime_r(const tm *, char *)
char *ctime(const time_t *)
char *ctime_r(const time_t *, char *)
double difftime(time_t, time_t)
tm *getdate(const char *)
tm *gmtime(const time_t *)
tm *gmtime_r(const time_t *, tm *)
tm *localtime(const time_t *)
tm *localtime_r(const time_t *, tm *)
time_t mktime(tm *)
size_t strftime(char *, size_t, const char *, const tm *)
size_t wcsftime(wchar_t *str, size_t cnt, const wchar_t *fmt, tm *time)
# POSIX not stdC
char *strptime(const char *, const char *, tm *)
# http://pubs.opengroup.org/onlinepubs/009695399/basedefs/sys/resource.h.html # http://pubs.opengroup.org/onlinepubs/009695399/basedefs/sys/resource.h.html
from posix.sys_time cimport timeval from posix.time cimport timeval
from posix.types cimport id_t from posix.types cimport id_t
cdef extern from "sys/resource.h" nogil: cdef extern from "sys/resource.h" nogil:
......
# http://pubs.opengroup.org/onlinepubs/009695399/basedefs/sys/time.h.html
from posix.types cimport suseconds_t, time_t
cdef extern from "sys/time.h" nogil:
enum: ITIMER_REAL
enum: ITIMER_VIRTUAL
enum: ITIMER_PROF
cdef struct timezone:
int tz_minuteswest
int dsttime
cdef struct timeval:
time_t tv_sec
suseconds_t tv_usec
cdef struct itimerval:
timeval it_interval
timeval it_value
int getitimer(int, itimerval *)
int gettimeofday(timeval *tp, timezone *tzp)
int setitimer(int, const itimerval *, itimerval *)
# http://pubs.opengroup.org/onlinepubs/009695399/basedefs/sys/time.h.html # http://pubs.opengroup.org/onlinepubs/009695399/basedefs/sys/time.h.html
from posix.types cimport suseconds_t, time_t, clockid_t, timer_t
from posix.signal cimport sigevent from posix.signal cimport sigevent
from posix.types cimport clock_t, clockid_t, suseconds_t, time_t, timer_t
cdef extern from "time.h" nogil: cdef extern from "sys/time.h" nogil:
enum: CLOCKS_PER_SEC
enum: CLOCK_PROCESS_CPUTIME_ID enum: CLOCK_PROCESS_CPUTIME_ID
enum: CLOCK_THREAD_CPUTIME_ID enum: CLOCK_THREAD_CPUTIME_ID
...@@ -33,55 +31,44 @@ cdef extern from "time.h" nogil: ...@@ -33,55 +31,44 @@ cdef extern from "time.h" nogil:
enum: CLOCK_REALTIME_ALARM enum: CLOCK_REALTIME_ALARM
enum: CLOCK_BOOTTIME_ALARM enum: CLOCK_BOOTTIME_ALARM
enum: ITIMER_REAL
enum: ITIMER_VIRTUAL
enum: ITIMER_PROF
cdef struct timezone:
int tz_minuteswest
int dsttime
cdef struct timeval:
time_t tv_sec
suseconds_t tv_usec
cdef struct timespec: cdef struct timespec:
time_t tv_sec time_t tv_sec
long tv_nsec long tv_nsec
cdef struct itimerval:
timeval it_interval
timeval it_value
cdef struct itimerspec: cdef struct itimerspec:
timespec it_interval timespec it_interval
timespec it_value timespec it_value
cdef struct tm: int nanosleep(const timespec *, timespec *)
int tm_sec
int tm_min int getitimer(int, itimerval *)
int tm_hour int gettimeofday(timeval *tp, timezone *tzp)
int tm_mday int setitimer(int, const itimerval *, itimerval *)
int tm_mon
int tm_year
int tm_wday
int tm_yday
int tm_isdst
char *tm_zone
long tm_gmtoff
char *asctime(const tm *)
char *asctime_r(const tm *, char *)
clock_t clock()
int clock_getcpuclockid(pid_t, clockid_t *) int clock_getcpuclockid(pid_t, clockid_t *)
int clock_getres(clockid_t, timespec *) int clock_getres(clockid_t, timespec *)
int clock_gettime(clockid_t, timespec *) int clock_gettime(clockid_t, timespec *)
int clock_nanosleep(clockid_t, int, const timespec *, timespec *) int clock_nanosleep(clockid_t, int, const timespec *, timespec *)
int clock_settime(clockid_t, const timespec *) int clock_settime(clockid_t, const timespec *)
char *ctime(const time_t *)
char *ctime_r(const time_t *, char *)
double difftime(time_t, time_t)
tm *getdate(const char *)
tm *gmtime(const time_t *)
tm *gmtime_r(const time_t *, tm *)
tm *localtime(const time_t *)
tm *localtime_r(const time_t *, tm *)
time_t mktime(tm *)
int nanosleep(const timespec *, timespec *)
size_t strftime(char *, size_t, const char *, const tm *)
char *strptime(const char *, const char *, tm *)
time_t time(time_t *)
int timer_create(clockid_t, sigevent *, timer_t *) int timer_create(clockid_t, sigevent *, timer_t *)
int timer_delete(timer_t) int timer_delete(timer_t)
int timer_gettime(timer_t, itimerspec *) int timer_gettime(timer_t, itimerspec *)
int timer_getoverrun(timer_t) int timer_getoverrun(timer_t)
int timer_settime(timer_t, int, const itimerspec *, itimerspec *) int timer_settime(timer_t, int, const itimerspec *, itimerspec *)
void tzset()
int daylight
long timezone
char *tzname[2]
cdef extern from "sys/types.h": cdef extern from "sys/types.h":
ctypedef long blkcnt_t ctypedef long blkcnt_t
ctypedef long blksize_t ctypedef long blksize_t
ctypedef long clock_t
ctypedef long clockid_t ctypedef long clockid_t
ctypedef long dev_t ctypedef long dev_t
ctypedef long gid_t ctypedef long gid_t
......
...@@ -7,7 +7,6 @@ ...@@ -7,7 +7,6 @@
#======================================================================= #=======================================================================
class Action(object): class Action(object):
def perform(self, token_stream, text): def perform(self, token_stream, text):
pass # abstract pass # abstract
...@@ -78,15 +77,18 @@ class Ignore(Action): ...@@ -78,15 +77,18 @@ class Ignore(Action):
to be ignored. See the docstring of Plex.Lexicon for more to be ignored. See the docstring of Plex.Lexicon for more
information. information.
""" """
def perform(self, token_stream, text): def perform(self, token_stream, text):
return None return None
def __repr__(self): def __repr__(self):
return "IGNORE" return "IGNORE"
IGNORE = Ignore() IGNORE = Ignore()
#IGNORE.__doc__ = Ignore.__doc__ #IGNORE.__doc__ = Ignore.__doc__
class Text(Action): class Text(Action):
""" """
TEXT is a Plex action which causes the text of a token to TEXT is a Plex action which causes the text of a token to
...@@ -100,6 +102,7 @@ class Text(Action): ...@@ -100,6 +102,7 @@ class Text(Action):
def __repr__(self): def __repr__(self):
return "TEXT" return "TEXT"
TEXT = Text() TEXT = Text()
#TEXT.__doc__ = Text.__doc__ #TEXT.__doc__ = Text.__doc__
......
...@@ -13,7 +13,7 @@ from .Machines import LOWEST_PRIORITY ...@@ -13,7 +13,7 @@ from .Machines import LOWEST_PRIORITY
from .Transitions import TransitionMap from .Transitions import TransitionMap
def nfa_to_dfa(old_machine, debug = None): def nfa_to_dfa(old_machine, debug=None):
""" """
Given a nondeterministic Machine, return a new equivalent Given a nondeterministic Machine, return a new equivalent
Machine which is deterministic. Machine which is deterministic.
...@@ -50,6 +50,7 @@ def nfa_to_dfa(old_machine, debug = None): ...@@ -50,6 +50,7 @@ def nfa_to_dfa(old_machine, debug = None):
state_map.dump(debug) state_map.dump(debug)
return new_machine return new_machine
def set_epsilon_closure(state_set): def set_epsilon_closure(state_set):
""" """
Given a set of states, return the union of the epsilon Given a set of states, return the union of the epsilon
...@@ -61,6 +62,7 @@ def set_epsilon_closure(state_set): ...@@ -61,6 +62,7 @@ def set_epsilon_closure(state_set):
result[state2] = 1 result[state2] = 1
return result return result
def epsilon_closure(state): def epsilon_closure(state):
""" """
Return the set of states reachable from the given state Return the set of states reachable from the given state
...@@ -74,6 +76,7 @@ def epsilon_closure(state): ...@@ -74,6 +76,7 @@ def epsilon_closure(state):
add_to_epsilon_closure(result, state) add_to_epsilon_closure(result, state)
return result return result
def add_to_epsilon_closure(state_set, state): def add_to_epsilon_closure(state_set, state):
""" """
Recursively add to |state_set| states reachable from the given state Recursively add to |state_set| states reachable from the given state
...@@ -86,6 +89,7 @@ def add_to_epsilon_closure(state_set, state): ...@@ -86,6 +89,7 @@ def add_to_epsilon_closure(state_set, state):
for state2 in state_set_2: for state2 in state_set_2:
add_to_epsilon_closure(state_set, state2) add_to_epsilon_closure(state_set, state2)
class StateMap(object): class StateMap(object):
""" """
Helper class used by nfa_to_dfa() to map back and forth between Helper class used by nfa_to_dfa() to map back and forth between
...@@ -98,7 +102,7 @@ class StateMap(object): ...@@ -98,7 +102,7 @@ class StateMap(object):
def __init__(self, new_machine): def __init__(self, new_machine):
self.new_machine = new_machine self.new_machine = new_machine
self.old_to_new_dict = {} self.old_to_new_dict = {}
self.new_to_old_dict= {} self.new_to_old_dict = {}
def old_to_new(self, old_state_set): def old_to_new(self, old_state_set):
""" """
...@@ -129,12 +133,12 @@ class StateMap(object): ...@@ -129,12 +133,12 @@ class StateMap(object):
best_priority = priority best_priority = priority
return best_action return best_action
# def old_to_new_set(self, old_state_set): # def old_to_new_set(self, old_state_set):
# """ # """
# Return the new state corresponding to a set of old states as # Return the new state corresponding to a set of old states as
# a singleton set. # a singleton set.
# """ # """
# return {self.old_to_new(old_state_set):1} # return {self.old_to_new(old_state_set):1}
def new_to_old(self, new_state): def new_to_old(self, new_state):
"""Given a new state, return a set of corresponding old states.""" """Given a new state, return a set of corresponding old states."""
...@@ -151,6 +155,7 @@ class StateMap(object): ...@@ -151,6 +155,7 @@ class StateMap(object):
def dump(self, file): def dump(self, file):
from .Transitions import state_set_str from .Transitions import state_set_str
for new_state in self.new_machine.states: for new_state in self.new_machine.states:
old_state_set = self.new_to_old_dict[id(new_state)] old_state_set = self.new_to_old_dict[id(new_state)]
file.write(" State %s <-- %s\n" % ( file.write(" State %s <-- %s\n" % (
......
...@@ -6,32 +6,39 @@ ...@@ -6,32 +6,39 @@
# #
#======================================================================= #=======================================================================
class PlexError(Exception): class PlexError(Exception):
message = "" message = ""
class PlexTypeError(PlexError, TypeError): class PlexTypeError(PlexError, TypeError):
pass pass
class PlexValueError(PlexError, ValueError): class PlexValueError(PlexError, ValueError):
pass pass
class InvalidRegex(PlexError): class InvalidRegex(PlexError):
pass pass
class InvalidToken(PlexError):
class InvalidToken(PlexError):
def __init__(self, token_number, message): def __init__(self, token_number, message):
PlexError.__init__(self, "Token number %d: %s" % (token_number, message)) PlexError.__init__(self, "Token number %d: %s" % (token_number, message))
class InvalidScanner(PlexError): class InvalidScanner(PlexError):
pass pass
class AmbiguousAction(PlexError): class AmbiguousAction(PlexError):
message = "Two tokens with different actions can match the same string" message = "Two tokens with different actions can match the same string"
def __init__(self): def __init__(self):
pass pass
class UnrecognizedInput(PlexError): class UnrecognizedInput(PlexError):
scanner = None scanner = None
position = None position = None
...@@ -43,8 +50,5 @@ class UnrecognizedInput(PlexError): ...@@ -43,8 +50,5 @@ class UnrecognizedInput(PlexError):
self.state_name = state_name self.state_name = state_name
def __str__(self): def __str__(self):
return ("'%s', line %d, char %d: Token not recognised in state %s" return ("'%s', line %d, char %d: Token not recognised in state %r" % (
% (self.position + (repr(self.state_name),))) self.position + (self.state_name,)))
...@@ -38,6 +38,7 @@ class State(object): ...@@ -38,6 +38,7 @@ class State(object):
self.name = name self.name = name
self.tokens = tokens self.tokens = tokens
class Lexicon(object): class Lexicon(object):
""" """
Lexicon(specification) builds a lexical analyser from the given Lexicon(specification) builds a lexical analyser from the given
...@@ -113,11 +114,12 @@ class Lexicon(object): ...@@ -113,11 +114,12 @@ class Lexicon(object):
machine = None # Machine machine = None # Machine
tables = None # StateTableMachine tables = None # StateTableMachine
def __init__(self, specifications, debug = None, debug_flags = 7, timings = None): def __init__(self, specifications, debug=None, debug_flags=7, timings=None):
if type(specifications) != types.ListType: if type(specifications) != types.ListType:
raise Errors.InvalidScanner("Scanner definition is not a list") raise Errors.InvalidScanner("Scanner definition is not a list")
if timings: if timings:
from .Timing import time from .Timing import time
total_time = 0.0 total_time = 0.0
time1 = time() time1 = time()
nfa = Machines.Machine() nfa = Machines.Machine()
...@@ -129,11 +131,11 @@ class Lexicon(object): ...@@ -129,11 +131,11 @@ class Lexicon(object):
for token in spec.tokens: for token in spec.tokens:
self.add_token_to_machine( self.add_token_to_machine(
nfa, user_initial_state, token, token_number) nfa, user_initial_state, token, token_number)
token_number = token_number + 1 token_number += 1
elif type(spec) == types.TupleType: elif type(spec) == types.TupleType:
self.add_token_to_machine( self.add_token_to_machine(
nfa, default_initial_state, spec, token_number) nfa, default_initial_state, spec, token_number)
token_number = token_number + 1 token_number += 1
else: else:
raise Errors.InvalidToken( raise Errors.InvalidToken(
token_number, token_number,
...@@ -145,7 +147,7 @@ class Lexicon(object): ...@@ -145,7 +147,7 @@ class Lexicon(object):
if debug and (debug_flags & 1): if debug and (debug_flags & 1):
debug.write("\n============= NFA ===========\n") debug.write("\n============= NFA ===========\n")
nfa.dump(debug) nfa.dump(debug)
dfa = DFA.nfa_to_dfa(nfa, debug = (debug_flags & 3) == 3 and debug) dfa = DFA.nfa_to_dfa(nfa, debug=(debug_flags & 3) == 3 and debug)
if timings: if timings:
time4 = time() time4 = time()
total_time = total_time + (time4 - time3) total_time = total_time + (time4 - time3)
...@@ -176,8 +178,8 @@ class Lexicon(object): ...@@ -176,8 +178,8 @@ class Lexicon(object):
action = Actions.Call(action_spec) action = Actions.Call(action_spec)
final_state = machine.new_state() final_state = machine.new_state()
re.build_machine(machine, initial_state, final_state, re.build_machine(machine, initial_state, final_state,
match_bol = 1, nocase = 0) match_bol=1, nocase=0)
final_state.set_action(action, priority = -token_number) final_state.set_action(action, priority=-token_number)
except Errors.PlexError, e: except Errors.PlexError, e:
raise e.__class__("Token number %d: %s" % (token_number, e)) raise e.__class__("Token number %d: %s" % (token_number, e))
......
...@@ -59,6 +59,7 @@ class Machine(object): ...@@ -59,6 +59,7 @@ class Machine(object):
for s in self.states: for s in self.states:
s.dump(file) s.dump(file)
class Node(object): class Node(object):
"""A state of an NFA or DFA.""" """A state of an NFA or DFA."""
transitions = None # TransitionMap transitions = None # TransitionMap
...@@ -111,7 +112,7 @@ class Node(object): ...@@ -111,7 +112,7 @@ class Node(object):
# Header # Header
file.write(" State %d:\n" % self.number) file.write(" State %d:\n" % self.number)
# Transitions # Transitions
# self.dump_transitions(file) # self.dump_transitions(file)
self.transitions.dump(file) self.transitions.dump(file)
# Action # Action
action = self.action action = self.action
...@@ -122,21 +123,21 @@ class Node(object): ...@@ -122,21 +123,21 @@ class Node(object):
def __lt__(self, other): def __lt__(self, other):
return self.number < other.number return self.number < other.number
class FastMachine(object): class FastMachine(object):
""" """
FastMachine is a deterministic machine represented in a way that FastMachine is a deterministic machine represented in a way that
allows fast scanning. allows fast scanning.
""" """
initial_states = None # {state_name:state} initial_states = None # {state_name:state}
states = None # [state] states = None # [state] where state = {event:state, 'else':state, 'action':Action}
# where state = {event:state, 'else':state, 'action':Action}
next_number = 1 # for debugging next_number = 1 # for debugging
new_state_template = { new_state_template = {
'':None, 'bol':None, 'eol':None, 'eof':None, 'else':None '': None, 'bol': None, 'eol': None, 'eof': None, 'else': None
} }
def __init__(self, old_machine = None): def __init__(self, old_machine=None):
self.initial_states = initial_states = {} self.initial_states = initial_states = {}
self.states = [] self.states = []
if old_machine: if old_machine:
...@@ -159,7 +160,7 @@ class FastMachine(object): ...@@ -159,7 +160,7 @@ class FastMachine(object):
for state in self.states: for state in self.states:
state.clear() state.clear()
def new_state(self, action = None): def new_state(self, action=None):
number = self.next_number number = self.next_number
self.next_number = number + 1 self.next_number = number + 1
result = self.new_state_template.copy() result = self.new_state_template.copy()
...@@ -178,8 +179,8 @@ class FastMachine(object): ...@@ -178,8 +179,8 @@ class FastMachine(object):
state['else'] = new_state state['else'] = new_state
elif code1 != maxint: elif code1 != maxint:
while code0 < code1: while code0 < code1:
state[chr(code0)] = new_state state[unichr(code0)] = new_state
code0 = code0 + 1 code0 += 1
else: else:
state[event] = new_state state[event] = new_state
...@@ -241,10 +242,10 @@ class FastMachine(object): ...@@ -241,10 +242,10 @@ class FastMachine(object):
while i < n: while i < n:
c1 = ord(char_list[i]) c1 = ord(char_list[i])
c2 = c1 c2 = c1
i = i + 1 i += 1
while i < n and ord(char_list[i]) == c2 + 1: while i < n and ord(char_list[i]) == c2 + 1:
i = i + 1 i += 1
c2 = c2 + 1 c2 += 1
result.append((chr(c1), chr(c2))) result.append((chr(c1), chr(c2)))
return tuple(result) return tuple(result)
......
...@@ -42,14 +42,15 @@ def chars_to_ranges(s): ...@@ -42,14 +42,15 @@ def chars_to_ranges(s):
while i < n: while i < n:
code1 = ord(char_list[i]) code1 = ord(char_list[i])
code2 = code1 + 1 code2 = code1 + 1
i = i + 1 i += 1
while i < n and code2 >= ord(char_list[i]): while i < n and code2 >= ord(char_list[i]):
code2 = code2 + 1 code2 += 1
i = i + 1 i += 1
result.append(code1) result.append(code1)
result.append(code2) result.append(code2)
return result return result
def uppercase_range(code1, code2): def uppercase_range(code1, code2):
""" """
If the range of characters from code1 to code2-1 includes any If the range of characters from code1 to code2-1 includes any
...@@ -63,6 +64,7 @@ def uppercase_range(code1, code2): ...@@ -63,6 +64,7 @@ def uppercase_range(code1, code2):
else: else:
return None return None
def lowercase_range(code1, code2): def lowercase_range(code1, code2):
""" """
If the range of characters from code1 to code2-1 includes any If the range of characters from code1 to code2-1 includes any
...@@ -76,6 +78,7 @@ def lowercase_range(code1, code2): ...@@ -76,6 +78,7 @@ def lowercase_range(code1, code2):
else: else:
return None return None
def CodeRanges(code_list): def CodeRanges(code_list):
""" """
Given a list of codes as returned by chars_to_ranges, return Given a list of codes as returned by chars_to_ranges, return
...@@ -86,6 +89,7 @@ def CodeRanges(code_list): ...@@ -86,6 +89,7 @@ def CodeRanges(code_list):
re_list.append(CodeRange(code_list[i], code_list[i + 1])) re_list.append(CodeRange(code_list[i], code_list[i + 1]))
return Alt(*re_list) return Alt(*re_list)
def CodeRange(code1, code2): def CodeRange(code1, code2):
""" """
CodeRange(code1, code2) is an RE which matches any character CodeRange(code1, code2) is an RE which matches any character
...@@ -98,6 +102,7 @@ def CodeRange(code1, code2): ...@@ -98,6 +102,7 @@ def CodeRange(code1, code2):
else: else:
return RawCodeRange(code1, code2) return RawCodeRange(code1, code2)
# #
# Abstract classes # Abstract classes
# #
...@@ -211,6 +216,7 @@ class RE(object): ...@@ -211,6 +216,7 @@ class RE(object):
## def calc_str(self): ## def calc_str(self):
## return "Char(%s)" % repr(self.char) ## return "Char(%s)" % repr(self.char)
def Char(c): def Char(c):
""" """
Char(c) is an RE which matches the character |c|. Char(c) is an RE which matches the character |c|.
...@@ -222,6 +228,7 @@ def Char(c): ...@@ -222,6 +228,7 @@ def Char(c):
result.str = "Char(%s)" % repr(c) result.str = "Char(%s)" % repr(c)
return result return result
class RawCodeRange(RE): class RawCodeRange(RE):
""" """
RawCodeRange(code1, code2) is a low-level RE which matches any character RawCodeRange(code1, code2) is a low-level RE which matches any character
...@@ -252,6 +259,7 @@ class RawCodeRange(RE): ...@@ -252,6 +259,7 @@ class RawCodeRange(RE):
def calc_str(self): def calc_str(self):
return "CodeRange(%d,%d)" % (self.code1, self.code2) return "CodeRange(%d,%d)" % (self.code1, self.code2)
class _RawNewline(RE): class _RawNewline(RE):
""" """
RawNewline is a low-level RE which matches a newline character. RawNewline is a low-level RE which matches a newline character.
...@@ -266,6 +274,7 @@ class _RawNewline(RE): ...@@ -266,6 +274,7 @@ class _RawNewline(RE):
s = self.build_opt(m, initial_state, EOL) s = self.build_opt(m, initial_state, EOL)
s.add_transition((nl_code, nl_code + 1), final_state) s.add_transition((nl_code, nl_code + 1), final_state)
RawNewline = _RawNewline() RawNewline = _RawNewline()
...@@ -304,7 +313,7 @@ class Seq(RE): ...@@ -304,7 +313,7 @@ class Seq(RE):
i = len(re_list) i = len(re_list)
match_nl = 0 match_nl = 0
while i: while i:
i = i - 1 i -= 1
re = re_list[i] re = re_list[i]
if re.match_nl: if re.match_nl:
match_nl = 1 match_nl = 1
...@@ -354,7 +363,7 @@ class Alt(RE): ...@@ -354,7 +363,7 @@ class Alt(RE):
non_nullable_res.append(re) non_nullable_res.append(re)
if re.match_nl: if re.match_nl:
match_nl = 1 match_nl = 1
i = i + 1 i += 1
self.nullable_res = nullable_res self.nullable_res = nullable_res
self.non_nullable_res = non_nullable_res self.non_nullable_res = non_nullable_res
self.nullable = nullable self.nullable = nullable
...@@ -434,6 +443,7 @@ Empty.__doc__ = \ ...@@ -434,6 +443,7 @@ Empty.__doc__ = \
""" """
Empty.str = "Empty" Empty.str = "Empty"
def Str1(s): def Str1(s):
""" """
Str1(s) is an RE which matches the literal string |s|. Str1(s) is an RE which matches the literal string |s|.
...@@ -442,6 +452,7 @@ def Str1(s): ...@@ -442,6 +452,7 @@ def Str1(s):
result.str = "Str(%s)" % repr(s) result.str = "Str(%s)" % repr(s)
return result return result
def Str(*strs): def Str(*strs):
""" """
Str(s) is an RE which matches the literal string |s|. Str(s) is an RE which matches the literal string |s|.
...@@ -454,6 +465,7 @@ def Str(*strs): ...@@ -454,6 +465,7 @@ def Str(*strs):
result.str = "Str(%s)" % ','.join(map(repr, strs)) result.str = "Str(%s)" % ','.join(map(repr, strs))
return result return result
def Any(s): def Any(s):
""" """
Any(s) is an RE which matches any character in the string |s|. Any(s) is an RE which matches any character in the string |s|.
...@@ -463,6 +475,7 @@ def Any(s): ...@@ -463,6 +475,7 @@ def Any(s):
result.str = "Any(%s)" % repr(s) result.str = "Any(%s)" % repr(s)
return result return result
def AnyBut(s): def AnyBut(s):
""" """
AnyBut(s) is an RE which matches any character (including AnyBut(s) is an RE which matches any character (including
...@@ -475,6 +488,7 @@ def AnyBut(s): ...@@ -475,6 +488,7 @@ def AnyBut(s):
result.str = "AnyBut(%s)" % repr(s) result.str = "AnyBut(%s)" % repr(s)
return result return result
AnyChar = AnyBut("") AnyChar = AnyBut("")
AnyChar.__doc__ = \ AnyChar.__doc__ = \
""" """
...@@ -482,7 +496,8 @@ AnyChar.__doc__ = \ ...@@ -482,7 +496,8 @@ AnyChar.__doc__ = \
""" """
AnyChar.str = "AnyChar" AnyChar.str = "AnyChar"
def Range(s1, s2 = None):
def Range(s1, s2=None):
""" """
Range(c1, c2) is an RE which matches any single character in the range Range(c1, c2) is an RE which matches any single character in the range
|c1| to |c2| inclusive. |c1| to |c2| inclusive.
...@@ -495,11 +510,12 @@ def Range(s1, s2 = None): ...@@ -495,11 +510,12 @@ def Range(s1, s2 = None):
else: else:
ranges = [] ranges = []
for i in range(0, len(s1), 2): for i in range(0, len(s1), 2):
ranges.append(CodeRange(ord(s1[i]), ord(s1[i+1]) + 1)) ranges.append(CodeRange(ord(s1[i]), ord(s1[i + 1]) + 1))
result = Alt(*ranges) result = Alt(*ranges)
result.str = "Range(%s)" % repr(s1) result.str = "Range(%s)" % repr(s1)
return result return result
def Opt(re): def Opt(re):
""" """
Opt(re) is an RE which matches either |re| or the empty string. Opt(re) is an RE which matches either |re| or the empty string.
...@@ -508,6 +524,7 @@ def Opt(re): ...@@ -508,6 +524,7 @@ def Opt(re):
result.str = "Opt(%s)" % re result.str = "Opt(%s)" % re
return result return result
def Rep(re): def Rep(re):
""" """
Rep(re) is an RE which matches zero or more repetitions of |re|. Rep(re) is an RE which matches zero or more repetitions of |re|.
...@@ -516,12 +533,14 @@ def Rep(re): ...@@ -516,12 +533,14 @@ def Rep(re):
result.str = "Rep(%s)" % re result.str = "Rep(%s)" % re
return result return result
def NoCase(re): def NoCase(re):
""" """
NoCase(re) is an RE which matches the same strings as RE, but treating NoCase(re) is an RE which matches the same strings as RE, but treating
upper and lower case letters as equivalent. upper and lower case letters as equivalent.
""" """
return SwitchCase(re, nocase = 1) return SwitchCase(re, nocase=1)
def Case(re): def Case(re):
""" """
...@@ -529,7 +548,7 @@ def Case(re): ...@@ -529,7 +548,7 @@ def Case(re):
upper and lower case letters as distinct, i.e. it cancels the effect upper and lower case letters as distinct, i.e. it cancels the effect
of any enclosing NoCase(). of any enclosing NoCase().
""" """
return SwitchCase(re, nocase = 0) return SwitchCase(re, nocase=0)
# #
# RE Constants # RE Constants
......
...@@ -31,7 +31,7 @@ cdef class Scanner: ...@@ -31,7 +31,7 @@ cdef class Scanner:
@cython.locals(input_state=long) @cython.locals(input_state=long)
cdef next_char(self) cdef next_char(self)
@cython.locals(action=Action) @cython.locals(action=Action)
cdef tuple read(self) cpdef tuple read(self)
cdef tuple scan_a_token(self) cdef tuple scan_a_token(self)
cdef tuple position(self) cdef tuple position(self)
......
...@@ -10,6 +10,7 @@ ...@@ -10,6 +10,7 @@
from __future__ import absolute_import from __future__ import absolute_import
import cython import cython
cython.declare(BOL=object, EOL=object, EOF=object, NOT_FOUND=object) cython.declare(BOL=object, EOL=object, EOF=object, NOT_FOUND=object)
from . import Errors from . import Errors
...@@ -50,25 +51,25 @@ class Scanner(object): ...@@ -50,25 +51,25 @@ class Scanner(object):
""" """
# lexicon = None # Lexicon # lexicon = None # Lexicon
# stream = None # file-like object # stream = None # file-like object
# name = '' # name = ''
# buffer = '' # buffer = ''
# buf_start_pos = 0 # position in input of start of buffer # buf_start_pos = 0 # position in input of start of buffer
# next_pos = 0 # position in input of next char to read # next_pos = 0 # position in input of next char to read
# cur_pos = 0 # position in input of current char # cur_pos = 0 # position in input of current char
# cur_line = 1 # line number of current char # cur_line = 1 # line number of current char
# cur_line_start = 0 # position in input of start of current line # cur_line_start = 0 # position in input of start of current line
# start_pos = 0 # position in input of start of token # start_pos = 0 # position in input of start of token
# start_line = 0 # line number of start of token # start_line = 0 # line number of start of token
# start_col = 0 # position in line of start of token # start_col = 0 # position in line of start of token
# text = None # text of last token read # text = None # text of last token read
# initial_state = None # Node # initial_state = None # Node
# state_name = '' # Name of initial state # state_name = '' # Name of initial state
# queue = None # list of tokens to be returned # queue = None # list of tokens to be returned
# trace = 0 # trace = 0
def __init__(self, lexicon, stream, name = '', initial_pos = None): def __init__(self, lexicon, stream, name='', initial_pos=None):
""" """
Scanner(lexicon, stream, name = '') Scanner(lexicon, stream, name = '')
...@@ -143,7 +144,8 @@ class Scanner(object): ...@@ -143,7 +144,8 @@ class Scanner(object):
if self.trace: if self.trace:
print("Scanner: read: Performing %s %d:%d" % ( print("Scanner: read: Performing %s %d:%d" % (
action, self.start_pos, self.cur_pos)) action, self.start_pos, self.cur_pos))
text = self.buffer[self.start_pos - self.buf_start_pos : text = self.buffer[
self.start_pos - self.buf_start_pos:
self.cur_pos - self.buf_start_pos] self.cur_pos - self.buf_start_pos]
return (text, action) return (text, action)
else: else:
...@@ -198,19 +200,19 @@ class Scanner(object): ...@@ -198,19 +200,19 @@ class Scanner(object):
buf_index = next_pos - buf_start_pos buf_index = next_pos - buf_start_pos
if buf_index < buf_len: if buf_index < buf_len:
c = buffer[buf_index] c = buffer[buf_index]
next_pos = next_pos + 1 next_pos += 1
else: else:
discard = self.start_pos - buf_start_pos discard = self.start_pos - buf_start_pos
data = self.stream.read(0x1000) data = self.stream.read(0x1000)
buffer = self.buffer[discard:] + data buffer = self.buffer[discard:] + data
self.buffer = buffer self.buffer = buffer
buf_start_pos = buf_start_pos + discard buf_start_pos += discard
self.buf_start_pos = buf_start_pos self.buf_start_pos = buf_start_pos
buf_len = len(buffer) buf_len = len(buffer)
buf_index = buf_index - discard buf_index -= discard
if data: if data:
c = buffer[buf_index] c = buffer[buf_index]
next_pos = next_pos + 1 next_pos += 1
else: else:
c = u'' c = u''
# End inlined: c = self.read_char() # End inlined: c = self.read_char()
...@@ -226,7 +228,7 @@ class Scanner(object): ...@@ -226,7 +228,7 @@ class Scanner(object):
cur_char = u'\n' cur_char = u'\n'
input_state = 3 input_state = 3
elif input_state == 3: elif input_state == 3:
cur_line = cur_line + 1 cur_line += 1
cur_line_start = cur_pos = next_pos cur_line_start = cur_pos = next_pos
cur_char = BOL cur_char = BOL
input_state = 1 input_state = 1
...@@ -263,7 +265,7 @@ class Scanner(object): ...@@ -263,7 +265,7 @@ class Scanner(object):
def next_char(self): def next_char(self):
input_state = self.input_state input_state = self.input_state
if self.trace: if self.trace:
print("Scanner: next: %s [%d] %d" % (" "*20, input_state, self.cur_pos)) print("Scanner: next: %s [%d] %d" % (" " * 20, input_state, self.cur_pos))
if input_state == 1: if input_state == 1:
self.cur_pos = self.next_pos self.cur_pos = self.next_pos
c = self.read_char() c = self.read_char()
...@@ -279,7 +281,7 @@ class Scanner(object): ...@@ -279,7 +281,7 @@ class Scanner(object):
self.cur_char = u'\n' self.cur_char = u'\n'
self.input_state = 3 self.input_state = 3
elif input_state == 3: elif input_state == 3:
self.cur_line = self.cur_line + 1 self.cur_line += 1
self.cur_line_start = self.cur_pos = self.next_pos self.cur_line_start = self.cur_pos = self.next_pos
self.cur_char = BOL self.cur_char = BOL
self.input_state = 1 self.input_state = 1
...@@ -313,7 +315,7 @@ class Scanner(object): ...@@ -313,7 +315,7 @@ class Scanner(object):
self.lexicon.get_initial_state(state_name)) self.lexicon.get_initial_state(state_name))
self.state_name = state_name self.state_name = state_name
def produce(self, value, text = None): def produce(self, value, text=None):
""" """
Called from an action procedure, causes |value| to be returned Called from an action procedure, causes |value| to be returned
as the token value from read(). If |text| is supplied, it is as the token value from read(). If |text| is supplied, it is
......
...@@ -25,7 +25,6 @@ def re(s): ...@@ -25,7 +25,6 @@ def re(s):
class REParser(object): class REParser(object):
def __init__(self, s): def __init__(self, s):
self.s = s self.s = s
self.i = -1 self.i = -1
......
...@@ -40,7 +40,7 @@ class TransitionMap(object): ...@@ -40,7 +40,7 @@ class TransitionMap(object):
map = None # The list of codes and states map = None # The list of codes and states
special = None # Mapping for special events special = None # Mapping for special events
def __init__(self, map = None, special = None): def __init__(self, map=None, special=None):
if not map: if not map:
map = [-maxint, {}, maxint] map = [-maxint, {}, maxint]
if not special: if not special:
...@@ -50,7 +50,7 @@ class TransitionMap(object): ...@@ -50,7 +50,7 @@ class TransitionMap(object):
#self.check() ### #self.check() ###
def add(self, event, new_state, def add(self, event, new_state,
TupleType = tuple): TupleType=tuple):
""" """
Add transition to |new_state| on |event|. Add transition to |new_state| on |event|.
""" """
...@@ -61,12 +61,12 @@ class TransitionMap(object): ...@@ -61,12 +61,12 @@ class TransitionMap(object):
map = self.map map = self.map
while i < j: while i < j:
map[i + 1][new_state] = 1 map[i + 1][new_state] = 1
i = i + 2 i += 2
else: else:
self.get_special(event)[new_state] = 1 self.get_special(event)[new_state] = 1
def add_set(self, event, new_set, def add_set(self, event, new_set,
TupleType = tuple): TupleType=tuple):
""" """
Add transitions to the states in |new_set| on |event|. Add transitions to the states in |new_set| on |event|.
""" """
...@@ -77,19 +77,19 @@ class TransitionMap(object): ...@@ -77,19 +77,19 @@ class TransitionMap(object):
map = self.map map = self.map
while i < j: while i < j:
map[i + 1].update(new_set) map[i + 1].update(new_set)
i = i + 2 i += 2
else: else:
self.get_special(event).update(new_set) self.get_special(event).update(new_set)
def get_epsilon(self, def get_epsilon(self,
none = None): none=None):
""" """
Return the mapping for epsilon, or None. Return the mapping for epsilon, or None.
""" """
return self.special.get('', none) return self.special.get('', none)
def iteritems(self, def iteritems(self,
len = len): len=len):
""" """
Return the mapping as an iterable of ((code1, code2), state_set) and Return the mapping as an iterable of ((code1, code2), state_set) and
(special_event, state_set) pairs. (special_event, state_set) pairs.
...@@ -106,17 +106,18 @@ class TransitionMap(object): ...@@ -106,17 +106,18 @@ class TransitionMap(object):
if set or else_set: if set or else_set:
result.append(((code0, code1), set)) result.append(((code0, code1), set))
code0 = code1 code0 = code1
i = i + 2 i += 2
for event, set in self.special.iteritems(): for event, set in self.special.iteritems():
if set: if set:
result.append((event, set)) result.append((event, set))
return iter(result) return iter(result)
items = iteritems items = iteritems
# ------------------- Private methods -------------------- # ------------------- Private methods --------------------
def split(self, code, def split(self, code,
len = len, maxint = maxint): len=len, maxint=maxint):
""" """
Search the list for the position of the split point for |code|, Search the list for the position of the split point for |code|,
inserting a new split point if necessary. Returns index |i| such inserting a new split point if necessary. Returns index |i| such
...@@ -173,10 +174,10 @@ class TransitionMap(object): ...@@ -173,10 +174,10 @@ class TransitionMap(object):
else: else:
code_str = str(code) code_str = str(code)
map_strs.append(code_str) map_strs.append(code_str)
i = i + 1 i += 1
if i < n: if i < n:
map_strs.append(state_set_str(map[i])) map_strs.append(state_set_str(map[i]))
i = i + 1 i += 1
special_strs = {} special_strs = {}
for event, set in self.special.iteritems(): for event, set in self.special.iteritems():
special_strs[event] = state_set_str(set) special_strs[event] = state_set_str(set)
...@@ -199,7 +200,7 @@ class TransitionMap(object): ...@@ -199,7 +200,7 @@ class TransitionMap(object):
n = len(map) - 1 n = len(map) - 1
while i < n: while i < n:
self.dump_range(map[i], map[i + 2], map[i + 1], file) self.dump_range(map[i], map[i + 2], map[i + 1], file)
i = i + 2 i += 2
for event, set in self.special.iteritems(): for event, set in self.special.iteritems():
if set: if set:
if not event: if not event:
...@@ -234,6 +235,7 @@ class TransitionMap(object): ...@@ -234,6 +235,7 @@ class TransitionMap(object):
def dump_set(self, set): def dump_set(self, set):
return state_set_str(set) return state_set_str(set)
# #
# State set manipulation functions # State set manipulation functions
# #
......
# cython.* namespace for pure mode. # cython.* namespace for pure mode.
__version__ = "0.21" __version__ = "0.21.1pre"
# BEGIN shameless copy from Cython/minivect/minitypes.py # BEGIN shameless copy from Cython/minivect/minitypes.py
......
#################### cfunc.to_py ####################
@cname("{{cname}}")
cdef object {{cname}}({{return_type.ctype}} (*f)({{ ', '.join(arg.type_cname for arg in args) }}) {{except_clause}}):
def wrap({{ ', '.join('{arg.ctype} {arg.name}'.format(arg=arg) for arg in args) }}):
"""wrap({{', '.join(('{arg.name}: {arg.type_displayname}'.format(arg=arg) if arg.type_displayname else arg.name) for arg in args)}}){{if return_type.type_displayname}} -> {{return_type.type_displayname}}{{endif}}"""
{{'' if return_type.type.is_void else 'return '}}f({{ ', '.join(arg.name for arg in args) }})
return wrap
...@@ -545,13 +545,12 @@ static PyObject *__Pyx_CyFunction_descr_get(PyObject *func, PyObject *obj, PyObj ...@@ -545,13 +545,12 @@ static PyObject *__Pyx_CyFunction_descr_get(PyObject *func, PyObject *obj, PyObj
if (m->flags & __Pyx_CYFUNCTION_CLASSMETHOD) { if (m->flags & __Pyx_CYFUNCTION_CLASSMETHOD) {
if (type == NULL) if (type == NULL)
type = (PyObject *)(Py_TYPE(obj)); type = (PyObject *)(Py_TYPE(obj));
return PyMethod_New(func, return __Pyx_PyMethod_New(func, type, (PyObject *)(Py_TYPE(type)));
type, (PyObject *)(Py_TYPE(type)));
} }
if (obj == Py_None) if (obj == Py_None)
obj = NULL; obj = NULL;
return PyMethod_New(func, obj, type); return __Pyx_PyMethod_New(func, obj, type);
} }
static PyObject* static PyObject*
......
...@@ -213,6 +213,13 @@ static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject ...@@ -213,6 +213,13 @@ static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject
PyErr_SetObject(type, value); PyErr_SetObject(type, value);
if (tb) { if (tb) {
#if CYTHON_COMPILING_IN_PYPY
PyObject *tmp_type, *tmp_value, *tmp_tb;
PyErr_Fetch(tmp_type, tmp_value, tmp_tb);
Py_INCREF(tb);
PyErr_Restore(tmp_type, tmp_value, tb);
Py_XDECREF(tmp_tb);
#else
PyThreadState *tstate = PyThreadState_GET(); PyThreadState *tstate = PyThreadState_GET();
PyObject* tmp_tb = tstate->curexc_traceback; PyObject* tmp_tb = tstate->curexc_traceback;
if (tb != tmp_tb) { if (tb != tmp_tb) {
...@@ -220,6 +227,7 @@ static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject ...@@ -220,6 +227,7 @@ static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject
tstate->curexc_traceback = tb; tstate->curexc_traceback = tb;
Py_XDECREF(tmp_tb); Py_XDECREF(tmp_tb);
} }
#endif
} }
bad: bad:
......
...@@ -62,9 +62,6 @@ ...@@ -62,9 +62,6 @@
#if PY_MAJOR_VERSION >= 3 #if PY_MAJOR_VERSION >= 3
#define Py_TPFLAGS_CHECKTYPES 0 #define Py_TPFLAGS_CHECKTYPES 0
#define Py_TPFLAGS_HAVE_INDEX 0 #define Py_TPFLAGS_HAVE_INDEX 0
#endif
#if PY_MAJOR_VERSION >= 3
#define Py_TPFLAGS_HAVE_NEWBUFFER 0 #define Py_TPFLAGS_HAVE_NEWBUFFER 0
#endif #endif
...@@ -158,6 +155,12 @@ ...@@ -158,6 +155,12 @@
#define PyBoolObject PyLongObject #define PyBoolObject PyLongObject
#endif #endif
#if PY_MAJOR_VERSION >= 3 && CYTHON_COMPILING_IN_PYPY
#ifndef PyUnicode_InternFromString
#define PyUnicode_InternFromString(s) PyUnicode_FromString(s)
#endif
#endif
#if PY_VERSION_HEX < 0x030200A4 #if PY_VERSION_HEX < 0x030200A4
typedef long Py_hash_t; typedef long Py_hash_t;
#define __Pyx_PyInt_FromHash_t PyInt_FromLong #define __Pyx_PyInt_FromHash_t PyInt_FromLong
...@@ -168,7 +171,9 @@ ...@@ -168,7 +171,9 @@
#endif #endif
#if PY_MAJOR_VERSION >= 3 #if PY_MAJOR_VERSION >= 3
#define PyMethod_New(func, self, klass) ((self) ? PyMethod_New(func, self) : PyInstanceMethod_New(func)) #define __Pyx_PyMethod_New(func, self, klass) ((self) ? PyMethod_New(func, self) : PyInstanceMethod_New(func))
#else
#define __Pyx_PyMethod_New(func, self, klass) PyMethod_New(func, self, klass)
#endif #endif
/* inline attribute */ /* inline attribute */
......
import os import os
import sys
from distutils.core import setup from distutils.core import setup
from distutils.extension import Extension from distutils.extension import Extension
from Cython.Distutils import build_ext from Cython.Build import cythonize
# For demo purposes, we build our own tiny library. # For demo purposes, we build our own tiny library.
...@@ -12,20 +13,19 @@ try: ...@@ -12,20 +13,19 @@ try:
assert os.system("ar rcs libmymath.a mymath.o") == 0 assert os.system("ar rcs libmymath.a mymath.o") == 0
except: except:
if not os.path.exists("libmymath.a"): if not os.path.exists("libmymath.a"):
print "Error building external library, please create libmymath.a manually." print("Error building external library, please create libmymath.a manually.")
sys.exit(1) sys.exit(1)
# Here is how to use the library built above. # Here is how to use the library built above.
ext_modules=[ ext_modules = cythonize([
Extension("call_mymath", Extension("call_mymath",
sources = ["call_mymath.pyx"], sources=["call_mymath.pyx"],
include_dirs = [os.getcwd()], # path to .h file(s) include_dirs=[os.getcwd()], # path to .h file(s)
library_dirs = [os.getcwd()], # path to .a or .so file(s) library_dirs=[os.getcwd()], # path to .a or .so file(s)
libraries = ['mymath']) libraries=['mymath'])
] ])
setup( setup(
name = 'Demos', name='Demos',
cmdclass = {'build_ext': build_ext}, ext_modules=ext_modules,
ext_modules = ext_modules,
) )
...@@ -5,8 +5,7 @@ include pylintrc ...@@ -5,8 +5,7 @@ include pylintrc
include setup.py include setup.py
include setupegg.py include setupegg.py
include bin/* include bin/*
include cython.py include cython.py cythonize.py cygdb.py
include cygdb.py
recursive-include Cython *.pyx *.pxd recursive-include Cython *.pyx *.pxd
include Doc/* include Doc/*
......
def primes(kmax): def primes(kmax):
result = [] result = []
if kmax > 1000: if kmax > 1000:
kmax = 1000 kmax = 1000
p = [0] * 1000
k = 0
n = 2
while k < kmax: while k < kmax:
i = 0 i = 0
while i < k and n % p[i] != 0: while i < k and n % p[i] != 0:
i = i + 1 i += 1
if i == k: if i == k:
p[k] = n p[k] = n
k = k + 1 k += 1
result.append(n) result.append(n)
n = n + 1 n += 1
return result return result
...@@ -78,6 +78,8 @@ You can show Cython's code analysis by passing the ``--annotate`` option:: ...@@ -78,6 +78,8 @@ You can show Cython's code analysis by passing the ``--annotate`` option::
%%cython --annotate %%cython --annotate
... ...
.. figure:: ipython.png
Using the Sage notebook Using the Sage notebook
----------------------- -----------------------
......
...@@ -137,10 +137,14 @@ together into :file:`rect.so`, which you can then import in Python using ...@@ -137,10 +137,14 @@ together into :file:`rect.so`, which you can then import in Python using
``import rect`` (if you forget to link the :file:`Rectangle.o`, you will ``import rect`` (if you forget to link the :file:`Rectangle.o`, you will
get missing symbols while importing the library in Python). get missing symbols while importing the library in Python).
Note that the ``language`` option has no effect on user provided Extension
objects that are passed into ``cythonize()``. It is only used for modules
found by file name (as in the example above).
The options can also be passed directly from the source file, which is The options can also be passed directly from the source file, which is
often preferable. Starting with version 0.17, Cython also allows to often preferable (and overrides any global option). Starting with
pass external source files into the ``cythonize()`` command this way. version 0.17, Cython also allows to pass external source files into the
Here is a simplified setup.py file:: ``cythonize()`` command this way. Here is a simplified setup.py file::
from distutils.core import setup from distutils.core import setup
from Cython.Build import cythonize from Cython.Build import cythonize
......
...@@ -1951,6 +1951,8 @@ def runtests(options, cmd_args, coverage=None): ...@@ -1951,6 +1951,8 @@ def runtests(options, cmd_args, coverage=None):
try: try:
import jedi import jedi
if list(map(int, re.findall('[0-9]+', jedi.__version__))) < [0, 8, 1]:
raise ImportError
except ImportError: except ImportError:
exclude_selectors.append(RegExSelector('Jedi')) exclude_selectors.append(RegExSelector('Jedi'))
......
...@@ -190,13 +190,13 @@ def acquire_nonbuffer1(first, second=None): ...@@ -190,13 +190,13 @@ def acquire_nonbuffer1(first, second=None):
""" """
>>> acquire_nonbuffer1(3) # doctest: +ELLIPSIS >>> acquire_nonbuffer1(3) # doctest: +ELLIPSIS
Traceback (most recent call last): Traceback (most recent call last):
TypeError: 'int' does not ... the buffer interface TypeError:... 'int'...
>>> acquire_nonbuffer1(type) # doctest: +ELLIPSIS >>> acquire_nonbuffer1(type) # doctest: +ELLIPSIS
Traceback (most recent call last): Traceback (most recent call last):
TypeError: 'type' does not ... the buffer interface TypeError:... 'type'...
>>> acquire_nonbuffer1(None, 2) # doctest: +ELLIPSIS >>> acquire_nonbuffer1(None, 2) # doctest: +ELLIPSIS
Traceback (most recent call last): Traceback (most recent call last):
TypeError: 'int' does not ... the buffer interface TypeError:... 'int'...
""" """
cdef object[int] buf cdef object[int] buf
buf = first buf = first
......
...@@ -10,13 +10,11 @@ from Cython.Build.Dependencies import cythonize ...@@ -10,13 +10,11 @@ from Cython.Build.Dependencies import cythonize
from distutils.core import setup from distutils.core import setup
setup( setup(
ext_modules = cythonize("*.pyx"), ext_modules = cythonize("*.pyx", language='c++'),
) )
######## a.pyx ######## ######## a.pyx ########
# distutils: language = c++
from libcpp.vector cimport vector from libcpp.vector cimport vector
def use_vector(L): def use_vector(L):
......
cdef int wrong_args(int x, long y)
cdef long wrong_return_type(int x, int y)
cdef int wrong_exception_check(int x, int y) except 0
cdef int wrong_exception_value(int x, int y) except 0
cdef int wrong_exception_value_check(int x, int y) except 0
cdef int inherit_exception_value(int x, int y) except 0
cdef int inherit_exception_check(int x, int y) except *
# mode: error
# tag: pxd
cdef int wrong_args(int x, int y):
return 2
cdef int wrong_return_type(int x, int y):
return 2
cdef int wrong_exception_check(int x, int y) except? 0:
return 2
cdef int wrong_exception_value(int x, int y) except 1:
return 2
cdef int wrong_exception_value_check(int x, int y) except? 1:
return 2
cdef int inherit_exception_value(int x, int y):
return 2
cdef int inherit_exception_check(int x, int y):
return 2
_ERRORS = """
4:5: Function signature does not match previous declaration
7:5: Function signature does not match previous declaration
10:5: Function signature does not match previous declaration
13:5: Function signature does not match previous declaration
16:5: Function signature does not match previous declaration
19:5: Function signature does not match previous declaration
22:5: Function signature does not match previous declaration
"""
...@@ -18,7 +18,7 @@ def unused_result(): ...@@ -18,7 +18,7 @@ def unused_result():
return r return r
def unused_nested(): def unused_nested():
def unused_one(): def _unused_one():
pass pass
def unused_class(): def unused_class():
...@@ -53,7 +53,7 @@ _ERRORS = """ ...@@ -53,7 +53,7 @@ _ERRORS = """
9:9: Unused entry 'b' 9:9: Unused entry 'b'
12:15: Unused argument 'arg' 12:15: Unused argument 'arg'
16:6: Unused result in 'r' 16:6: Unused result in 'r'
21:4: Unused entry 'unused_one' 21:4: Unused entry '_unused_one'
25:4: Unused entry 'Unused' 25:4: Unused entry 'Unused'
35:16: Unused entry 'foo' 35:16: Unused entry 'foo'
36:13: Unused entry 'i' 36:13: Unused entry 'i'
......
...@@ -14,7 +14,6 @@ from cython.parallel cimport prange, parallel ...@@ -14,7 +14,6 @@ from cython.parallel cimport prange, parallel
import gc import gc
import sys import sys
import re
if sys.version_info[0] < 3: if sys.version_info[0] < 3:
import __builtin__ as builtins import __builtin__ as builtins
...@@ -26,9 +25,6 @@ __test__ = {} ...@@ -26,9 +25,6 @@ __test__ = {}
def testcase(func): def testcase(func):
doctest = func.__doc__ doctest = func.__doc__
if sys.version_info >= (3,1,1):
doctest = doctest.replace('does not have the buffer interface',
'does not support the buffer interface')
if sys.version_info >= (3, 0): if sys.version_info >= (3, 0):
_u = str _u = str
else: else:
...@@ -162,22 +158,22 @@ def acquire_failure3(): ...@@ -162,22 +158,22 @@ def acquire_failure3():
@testcase @testcase
def acquire_nonbuffer1(first, second=None): def acquire_nonbuffer1(first, second=None):
""" """
>>> acquire_nonbuffer1(3) >>> acquire_nonbuffer1(3) # doctest: +ELLIPSIS
Traceback (most recent call last): Traceback (most recent call last):
... ...
TypeError: 'int' does not have the buffer interface TypeError:... 'int'...
>>> acquire_nonbuffer1(type) >>> acquire_nonbuffer1(type) # doctest: +ELLIPSIS
Traceback (most recent call last): Traceback (most recent call last):
... ...
TypeError: 'type' does not have the buffer interface TypeError:... 'type'...
>>> acquire_nonbuffer1(None, 2) >>> acquire_nonbuffer1(None, 2) # doctest: +ELLIPSIS
Traceback (most recent call last): Traceback (most recent call last):
... ...
TypeError: 'int' does not have the buffer interface TypeError:... 'int'...
>>> acquire_nonbuffer1(4, object()) >>> acquire_nonbuffer1(4, object()) # doctest: +ELLIPSIS
Traceback (most recent call last): Traceback (most recent call last):
... ...
TypeError: 'int' does not have the buffer interface TypeError:... 'int'...
""" """
cdef int[:] buf cdef int[:] buf
buf = first buf = first
......
# mode: run
# cython: always_allow_keywords=True
cimport cython
from libc.math cimport sqrt
cdef void empty_cfunc():
print "here"
# same signature
cdef void another_empty_cfunc():
print "there"
def call_empty_cfunc():
"""
>>> call_empty_cfunc()
here
there
"""
cdef object py_func = empty_cfunc
py_func()
cdef object another_py_func = another_empty_cfunc
another_py_func()
cdef double square_c(double x):
return x * x
def call_square_c(x):
"""
>>> call_square_c(2)
4.0
>>> call_square_c(-7)
49.0
"""
cdef object py_func = square_c
return py_func(x)
def return_square_c():
"""
>>> square_c = return_square_c()
>>> square_c(5)
25.0
>>> square_c(x=4)
16.0
>>> square_c.__doc__ # FIXME: try to make original C function name available
'wrap(x: float) -> float'
"""
return square_c
def return_libc_sqrt():
"""
>>> sqrt = return_libc_sqrt()
>>> sqrt(9)
3.0
>>> sqrt(x=9)
3.0
>>> sqrt.__doc__
'wrap(x: float) -> float'
"""
return sqrt
global_csqrt = sqrt
def test_global():
"""
>>> global_csqrt(9)
3.0
>>> global_csqrt.__doc__
'wrap(x: float) -> float'
>>> test_global()
double (double) nogil
Python object
"""
print cython.typeof(sqrt)
print cython.typeof(global_csqrt)
cdef long long rad(long long x):
cdef long long rad = 1
for p in range(2, <long long>sqrt(x) + 1):
if x % p == 0:
rad *= p
while x % p == 0:
x //= p
if x == 1:
break
return rad
cdef bint abc(long long a, long long b, long long c) except -1:
if a + b != c:
raise ValueError("Not a valid abc candidate: (%s, %s, %s)" % (a, b, c))
return rad(a*b*c) < c
def call_abc(a, b, c):
"""
>>> call_abc(2, 3, 5)
False
>>> call_abc(1, 63, 64)
True
>>> call_abc(2, 3**10 * 109, 23**5)
True
>>> call_abc(a=2, b=3**10 * 109, c=23**5)
True
>>> call_abc(1, 1, 1)
Traceback (most recent call last):
...
ValueError: Not a valid abc candidate: (1, 1, 1)
"""
cdef object py_func = abc
return py_func(a, b, c)
def return_abc():
"""
>>> abc = return_abc()
>>> abc(2, 3, 5)
False
>>> abc.__doc__
"wrap(a: 'long long', b: 'long long', c: 'long long') -> bool"
"""
return abc
ctypedef double foo
cdef foo test_typedef_cfunc(foo x):
return x
def test_typedef(x):
"""
>>> test_typedef(100)
100.0
"""
return (<object>test_typedef_cfunc)(x)
cdef union my_union:
int a
double b
cdef struct my_struct:
int which
my_union y
cdef my_struct c_struct_builder(int which, int a, double b):
cdef my_struct value
value.which = which
if which:
value.y.a = a
else:
value.y.b = b
return value
def return_struct_builder():
"""
>>> make = return_struct_builder()
>>> d = make(0, 1, 2)
>>> d['which']
0
>>> d['y']['b']
2.0
>>> d = make(1, 1, 2)
>>> d['which']
1
>>> d['y']['a']
1
>>> make.__doc__
"wrap(which: 'int', a: 'int', b: float) -> 'my_struct'"
"""
return c_struct_builder
cdef object test_object_params_cfunc(a, b):
return a, b
def test_object_params(a, b):
"""
>>> test_object_params(1, 'a')
(1, 'a')
"""
return (<object>test_object_params_cfunc)(a, b)
cdef tuple test_builtin_params_cfunc(list a, dict b):
return a, b
def test_builtin_params(a, b):
"""
>>> test_builtin_params([], {})
([], {})
>>> test_builtin_params(1, 2)
Traceback (most recent call last):
...
TypeError: Argument 'a' has incorrect type (expected list, got int)
"""
return (<object>test_builtin_params_cfunc)(a, b)
def return_builtin_params_cfunc():
"""
>>> cfunc = return_builtin_params_cfunc()
>>> cfunc([1, 2], {'a': 3})
([1, 2], {'a': 3})
>>> cfunc.__doc__
'wrap(a: list, b: dict) -> tuple'
"""
return test_builtin_params_cfunc
cdef class A:
def __repr__(self):
return self.__class__.__name__
cdef class B(A):
pass
cdef A test_cdef_class_params_cfunc(A a, B b):
return b
def test_cdef_class_params(a, b):
"""
>>> test_cdef_class_params(A(), B())
B
>>> test_cdef_class_params(B(), A())
Traceback (most recent call last):
...
TypeError: Argument 'b' has incorrect type (expected cfunc_convert.B, got cfunc_convert.A)
"""
return (<object>test_cdef_class_params_cfunc)(a, b)
# cython: c_string_type=str
# cython: c_string_encoding=ascii
cdef extern from "math.h":
cpdef double pxd_sqrt "sqrt"(double)
# cython: c_string_type=str
# cython: c_string_encoding=ascii
__doc__ = """
>>> sqrt(1)
1.0
>>> pyx_sqrt(4)
2.0
>>> pxd_sqrt(9)
3.0
>>> log(10)
Traceback (most recent call last):
...
NameError: name 'log' is not defined
>>> strchr('abcabc', ord('c'))
'cabc'
"""
cdef extern from "math.h":
cpdef double sqrt(double)
cpdef double pyx_sqrt "sqrt"(double)
cdef double log(double) # not wrapped
cdef extern from "string.h":
# signature must be exact in C++, disagrees with C
cpdef const char* strchr(const char *haystack, int needle);
# tag: posix
from libc.stdlib cimport getenv
from posix.stdlib cimport setenv, unsetenv
from libc.time cimport *
def test_time():
"""
>>> test_time()
"""
cdef time_t t1, t2
t1 = time(NULL)
assert t1 != 0
t1 = time(&t2)
assert t1 == t2
def test_mktime():
"""
>>> test_mktime() # doctest:+ELLIPSIS
(986138177, ...'Sun Apr 1 15:16:17 2001\\n')
"""
cdef tm t, gmt
cdef time_t tt
cdef char *ct
cdef char *tz
tz = getenv("TZ")
setenv("TZ", "UTC", 1)
tzset()
t.tm_sec = 17
t.tm_min = 16
t.tm_hour = 15
t.tm_year = 101
t.tm_mon = 3
t.tm_mday = 1
t.tm_isdst = 0
tt = mktime(&t)
assert tt != -1
ct = ctime(&tt)
assert ct != NULL
if tz:
setenv("TZ", tz, 1)
else:
unsetenv("TZ")
tzset()
return tt, ct
# tag: posix
from posix.sys_time cimport *
def test_itimer(sec, usec):
"""
>>> test_itimer(10, 2)
(10, 2)
"""
cdef itimerval t, gtime
t.it_interval.tv_sec = sec
t.it_interval.tv_usec = usec
t.it_value.tv_sec = sec
t.it_value.tv_usec = usec
ret = setitimer(ITIMER_REAL, &t, NULL)
assert ret == 0
ret = getitimer(ITIMER_REAL, &gtime)
assert ret == 0
t.it_interval.tv_sec = 0
t.it_interval.tv_usec = 0
t.it_value.tv_sec = 0
t.it_value.tv_usec = 0
ret = setitimer(ITIMER_REAL, &t, NULL)
return gtime.it_interval.tv_sec, gtime.it_interval.tv_usec
def test_gettimeofday():
"""
>>> test_gettimeofday()
"""
cdef timeval t
ret = gettimeofday(&t, NULL)
assert ret == 0
# tag: posix # tag: posix
from libc.stdlib cimport getenv
from posix.stdlib cimport setenv, unsetenv
from posix.time cimport *
from posix.time cimport *
def test_time(): def test_itimer(sec, usec):
""" """
>>> test_time() >>> test_itimer(10, 2)
(10, 2)
""" """
cdef time_t t1, t2 cdef itimerval t, gtime
t1 = time(NULL)
assert t1 != 0
t1 = time(&t2)
assert t1 == t2
t.it_interval.tv_sec = sec
t.it_interval.tv_usec = usec
t.it_value.tv_sec = sec
t.it_value.tv_usec = usec
ret = setitimer(ITIMER_REAL, &t, NULL)
assert ret == 0
ret = getitimer(ITIMER_REAL, &gtime)
assert ret == 0
t.it_interval.tv_sec = 0
t.it_interval.tv_usec = 0
t.it_value.tv_sec = 0
t.it_value.tv_usec = 0
ret = setitimer(ITIMER_REAL, &t, NULL)
return gtime.it_interval.tv_sec, gtime.it_interval.tv_usec
def test_mktime(): def test_gettimeofday():
""" """
>>> test_mktime() # doctest:+ELLIPSIS >>> test_gettimeofday()
(986138177, ...'Sun Apr 1 15:16:17 2001\\n')
""" """
cdef tm t, gmt cdef timeval t
cdef time_t tt ret = gettimeofday(&t, NULL)
cdef char *ct assert ret == 0
cdef char *tz
tz = getenv("TZ")
setenv("TZ", "UTC", 1)
tzset()
t.tm_sec = 17
t.tm_min = 16
t.tm_hour = 15
t.tm_year = 101
t.tm_mon = 3
t.tm_mday = 1
t.tm_isdst = 0
tt = mktime(&t)
assert tt != -1
ct = ctime(&tt)
assert ct != NULL
if tz:
setenv("TZ", tz, 1)
else:
unsetenv("TZ")
tzset()
return tt, ct
...@@ -496,6 +496,32 @@ def safe_c_functions(): ...@@ -496,6 +496,32 @@ def safe_c_functions():
assert typeof(f) == 'int (*)(int)', typeof(f) assert typeof(f) == 'int (*)(int)', typeof(f)
assert 2 == f(1) assert 2 == f(1)
@infer_types(None)
def ptr_types():
"""
>>> ptr_types()
"""
cdef int a
a_ptr = &a
assert typeof(a_ptr) == "int *", typeof(a_ptr)
a_ptr_ptr = &a_ptr
assert typeof(a_ptr_ptr) == "int **", typeof(a_ptr_ptr)
cdef int[1] b
b_ref = b
assert typeof(b_ref) == "int *", typeof(b_ref)
ptr = &a
ptr = b
assert typeof(ptr) == "int *", typeof(ptr)
def const_types(const double x, double y, double& z):
"""
>>> const_types(1, 1, 1)
"""
a = x
a = y
a = z
assert typeof(a) == "double", typeof(a)
@infer_types(None) @infer_types(None)
def args_tuple_keywords(*args, **kwargs): def args_tuple_keywords(*args, **kwargs):
""" """
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment