Commit 8193f74a authored by Mark Florisson's avatar Mark Florisson
parents 76567d05 43a3dbea
......@@ -38,3 +38,4 @@ ef9d2c680684d0df7d81f529cda29e9e1741f575 cython-0.10.1
7fa84cb6d3d75eb3d015aeeb60bf8b642171fe93 0.14.beta2
8412b39fbc3eb709a543e2f1e95c0c8881ea9ed4 0.14.beta2
a6b9f0a6d02d23fc3d3a9d0587867faa3afb2fcd 0.14.rc0
15bf34c9387444e262acb1de594405444dd571a4 0.14
......@@ -5,9 +5,9 @@ redistribute, modify and distribute modified versions."
------------------
Cython, which derives from Pyrex, is licensed under the Python
Software Foundation License. More precisely, all modifications
Software Foundation License. More precisely, all modifications
made to go from Pyrex to Cython are so licensed.
See LICENSE.txt for more details.
......@@ -36,7 +36,7 @@ def parse_list(s):
return literals[literal[1:-1]]
else:
return literal
return [unquote(item) for item in s.split(delimiter)]
transitive_str = object()
......@@ -70,7 +70,7 @@ def line_iter(source):
start = end+1
class DistutilsInfo(object):
def __init__(self, source=None, exn=None):
self.values = {}
if source is not None:
......@@ -97,7 +97,7 @@ class DistutilsInfo(object):
value = getattr(exn, key, None)
if value:
self.values[key] = value
def merge(self, other):
if other is None:
return self
......@@ -114,7 +114,7 @@ class DistutilsInfo(object):
else:
self.values[key] = value
return self
def subs(self, aliases):
if aliases is None:
return self
......@@ -140,9 +140,9 @@ class DistutilsInfo(object):
def strip_string_literals(code, prefix='__Pyx_L'):
"""
Normalizes every string literal to be of the form '__Pyx_Lxxx',
Normalizes every string literal to be of the form '__Pyx_Lxxx',
returning the normalized code and a mapping of labels to
string literals.
string literals.
"""
new_code = []
literals = {}
......@@ -156,7 +156,7 @@ def strip_string_literals(code, prefix='__Pyx_L'):
double_q = code.find('"', q)
q = min(single_q, double_q)
if q == -1: q = max(single_q, double_q)
# We're done.
if q == -1 and hash_mark == -1:
new_code.append(code[start:])
......@@ -181,7 +181,7 @@ def strip_string_literals(code, prefix='__Pyx_L'):
start = q
else:
q += 1
# Process comment.
elif -1 != hash_mark and (hash_mark < q or q == -1):
end = code.find('\n', hash_mark)
......@@ -212,7 +212,7 @@ def strip_string_literals(code, prefix='__Pyx_L'):
new_code.append(code[start:end])
start = q
q += len(in_quote)
return "".join(new_code), literals
......@@ -220,7 +220,11 @@ def parse_dependencies(source_filename):
# Actual parsing is way to slow, so we use regular expressions.
# The only catch is that we must strip comments and string
# literals ahead of time.
source = Utils.open_source_file(source_filename, "rU").read()
fh = Utils.open_source_file(source_filename, "rU")
try:
source = fh.read()
finally:
fh.close()
distutils_info = DistutilsInfo(source)
source, literals = strip_string_literals(source)
source = source.replace('\\\n', ' ')
......@@ -245,16 +249,16 @@ def parse_dependencies(source_filename):
class DependencyTree(object):
def __init__(self, context):
self.context = context
self._transitive_cache = {}
#@cached_method
def parse_dependencies(self, source_filename):
return parse_dependencies(source_filename)
parse_dependencies = cached_method(parse_dependencies)
#@cached_method
def cimports_and_externs(self, filename):
cimports, includes, externs = self.parse_dependencies(filename)[:3]
......@@ -272,10 +276,10 @@ class DependencyTree(object):
print("Unable to locate '%s' referenced from '%s'" % (filename, include))
return tuple(cimports), tuple(externs)
cimports_and_externs = cached_method(cimports_and_externs)
def cimports(self, filename):
return self.cimports_and_externs(filename)[0]
#@cached_method
def package(self, filename):
dir = os.path.dirname(filename)
......@@ -284,13 +288,13 @@ class DependencyTree(object):
else:
return ()
package = cached_method(package)
#@cached_method
def fully_qualifeid_name(self, filename):
module = os.path.splitext(os.path.basename(filename))[0]
return '.'.join(self.package(filename) + (module,))
fully_qualifeid_name = cached_method(fully_qualifeid_name)
def find_pxd(self, module, filename=None):
if module[0] == '.':
raise NotImplementedError("New relative imports.")
......@@ -301,7 +305,7 @@ class DependencyTree(object):
return pxd
return self.context.find_pxd_file(module, None)
find_pxd = cached_method(find_pxd)
#@cached_method
def cimported_files(self, filename):
if filename[-4:] == '.pyx' and os.path.exists(filename[:-4] + '.pxd'):
......@@ -316,33 +320,33 @@ class DependencyTree(object):
print("\n\t".join(b))
return tuple(self_pxd + filter(None, [self.find_pxd(m, filename) for m in self.cimports(filename)]))
cimported_files = cached_method(cimported_files)
def immediate_dependencies(self, filename):
all = list(self.cimported_files(filename))
for extern in sum(self.cimports_and_externs(filename), ()):
all.append(os.path.normpath(os.path.join(os.path.dirname(filename), extern)))
return tuple(all)
#@cached_method
def timestamp(self, filename):
return os.path.getmtime(filename)
timestamp = cached_method(timestamp)
def extract_timestamp(self, filename):
# TODO: .h files from extern blocks
return self.timestamp(filename), filename
def newest_dependency(self, filename):
return self.transitive_merge(filename, self.extract_timestamp, max)
def distutils_info0(self, filename):
return self.parse_dependencies(filename)[3]
def distutils_info(self, filename, aliases=None, base=None):
return (self.transitive_merge(filename, self.distutils_info0, DistutilsInfo.merge)
.subs(aliases)
.merge(base))
def transitive_merge(self, node, extract, merge):
try:
seen = self._transitive_cache[extract, merge]
......@@ -350,7 +354,7 @@ class DependencyTree(object):
seen = self._transitive_cache[extract, merge] = {}
return self.transitive_merge_helper(
node, extract, merge, seen, {}, self.cimported_files)[0]
def transitive_merge_helper(self, node, extract, merge, seen, stack, outgoing):
if node in seen:
return seen[node], None
......
......@@ -52,9 +52,12 @@ def unbound_symbols(code, context=None):
symbol_collector = AllSymbols()
symbol_collector(tree)
unbound = []
import __builtin__
try:
import builtins
except ImportError:
import __builtin__ as builtins
for name in symbol_collector.names:
if not tree.scope.lookup(name) and not hasattr(__builtin__, name):
if not tree.scope.lookup(name) and not hasattr(builtins, name):
unbound.append(name)
return unbound
......@@ -79,7 +82,7 @@ def safe_type(arg, context=None):
return 'numpy.ndarray[numpy.%s_t, ndim=%s]' % (arg.dtype.name, arg.ndim)
else:
for base_type in py_type.mro():
if base_type.__module__ == '__builtin__':
if base_type.__module__ in ('__builtin__', 'builtins'):
return 'object'
module = context.find_module(base_type.__module__, need_pxd=False)
if module:
......@@ -88,7 +91,7 @@ def safe_type(arg, context=None):
return '%s.%s' % (base_type.__module__, base_type.__name__)
return 'object'
def cython_inline(code,
def cython_inline(code,
get_type=unsafe_type,
lib_dir=os.path.expanduser('~/.cython/inline'),
cython_include_dirs=['.'],
......@@ -125,7 +128,7 @@ def cython_inline(code,
arg_names.sort()
arg_sigs = tuple([(get_type(kwds[arg], ctx), arg) for arg in arg_names])
key = code, arg_sigs, sys.version_info, sys.executable, Cython.__version__
module_name = "_cython_inline_" + hashlib.md5(str(key)).hexdigest()
module_name = "_cython_inline_" + hashlib.md5(str(key).encode('utf-8')).hexdigest()
try:
if not os.path.exists(lib_dir):
os.makedirs(lib_dir)
......@@ -160,7 +163,11 @@ def __invoke(%(params)s):
for key, value in literals.items():
module_code = module_code.replace(key, value)
pyx_file = os.path.join(lib_dir, module_name + '.pyx')
open(pyx_file, 'w').write(module_code)
fh = open(pyx_file, 'w')
try:
fh.write(module_code)
finally:
fh.close()
extension = Extension(
name = module_name,
sources = [pyx_file],
......@@ -252,14 +259,14 @@ def get_body(source):
else:
return source[ix+1:]
# Lots to be done here... It would be especially cool if compiled functions
# Lots to be done here... It would be especially cool if compiled functions
# could invoke each other quickly.
class RuntimeCompiledFunction(object):
def __init__(self, f):
self._f = f
self._body = get_body(inspect.getsource(f))
def __call__(self, *args, **kwds):
all = getcallargs(self._f, *args, **kwds)
return cython_inline(self._body, locals=self._f.func_globals, globals=self._f.func_globals, **all)
......@@ -32,7 +32,7 @@ class TestInline(CythonTest):
self.assertEquals(inline("return global_value + 1", **test_kwds), global_value + 1)
if has_numpy:
def test_numpy(self):
import numpy
a = numpy.ndarray((10, 20))
......
......@@ -14,14 +14,14 @@ class LinesResult(object):
def __init__(self):
self.lines = []
self.s = u""
def put(self, s):
self.s += s
def newline(self):
self.lines.append(self.s)
self.s = u""
def putline(self, s):
self.put(s)
self.newline()
......@@ -29,7 +29,7 @@ class LinesResult(object):
class CodeWriter(TreeVisitor):
indent_string = u" "
def __init__(self, result = None):
super(CodeWriter, self).__init__()
if result is None:
......@@ -38,22 +38,22 @@ class CodeWriter(TreeVisitor):
self.numindents = 0
self.tempnames = {}
self.tempblockindex = 0
def write(self, tree):
self.visit(tree)
def indent(self):
self.numindents += 1
def dedent(self):
self.numindents -= 1
def startline(self, s = u""):
self.result.put(self.indent_string * self.numindents + s)
def put(self, s):
self.result.put(s)
def endline(self, s = u""):
self.result.putline(s)
......@@ -70,13 +70,13 @@ class CodeWriter(TreeVisitor):
self.visit(item.default)
self.put(u", ")
self.visit(items[-1])
def visit_Node(self, node):
raise AssertionError("Node not handled by serializer: %r" % node)
def visit_ModuleNode(self, node):
self.visitchildren(node)
def visit_StatListNode(self, node):
self.visitchildren(node)
......@@ -87,7 +87,7 @@ class CodeWriter(TreeVisitor):
self.indent()
self.visit(node.body)
self.dedent()
def visit_CArgDeclNode(self, node):
if node.base_type.name is not None:
self.visit(node.base_type)
......@@ -96,10 +96,10 @@ class CodeWriter(TreeVisitor):
if node.default is not None:
self.put(u" = ")
self.visit(node.default)
def visit_CNameDeclaratorNode(self, node):
self.put(node.name)
def visit_CSimpleBaseTypeNode(self, node):
# See Parsing.p_sign_and_longness
if node.is_basic_c_type:
......@@ -108,16 +108,16 @@ class CodeWriter(TreeVisitor):
self.put("short " * -node.longness)
elif node.longness > 0:
self.put("long " * node.longness)
self.put(node.name)
def visit_SingleAssignmentNode(self, node):
self.startline()
self.visit(node.lhs)
self.put(u" = ")
self.visit(node.rhs)
self.endline()
def visit_CascadedAssignmentNode(self, node):
self.startline()
for lhs in node.lhs_list:
......@@ -125,10 +125,10 @@ class CodeWriter(TreeVisitor):
self.put(u" = ")
self.visit(node.rhs)
self.endline()
def visit_NameNode(self, node):
self.put(node.name)
def visit_IntNode(self, node):
self.put(node.value)
......@@ -164,7 +164,7 @@ class CodeWriter(TreeVisitor):
def visit_PassStatNode(self, node):
self.startline(u"pass")
self.endline()
def visit_PrintStatNode(self, node):
self.startline(u"print ")
self.comma_separated_list(node.arg_tuple.args)
......@@ -176,7 +176,7 @@ class CodeWriter(TreeVisitor):
self.visit(node.operand1)
self.put(u" %s " % node.operator)
self.visit(node.operand2)
def visit_CVarDefNode(self, node):
self.startline(u"cdef ")
self.visit(node.base_type)
......@@ -201,7 +201,7 @@ class CodeWriter(TreeVisitor):
def visit_SequenceNode(self, node):
self.comma_separated_list(node.args) # Might need to discover whether we need () around tuples...hmm...
def visit_SimpleCallNode(self, node):
self.visit(node.function)
self.put(u"(")
......@@ -224,14 +224,14 @@ class CodeWriter(TreeVisitor):
self.startline()
self.visit(node.expr)
self.endline()
def visit_InPlaceAssignmentNode(self, node):
self.startline()
self.visit(node.lhs)
self.put(u" %s= " % node.operator)
self.visit(node.rhs)
self.endline()
def visit_WithStatNode(self, node):
self.startline()
self.put(u"with ")
......@@ -243,7 +243,7 @@ class CodeWriter(TreeVisitor):
self.indent()
self.visit(node.body)
self.dedent()
def visit_AttributeNode(self, node):
self.visit(node.obj)
self.put(u".%s" % node.attribute)
......
......@@ -11,7 +11,7 @@ import Symtab
class AutoTestDictTransform(ScopeTrackingTransform):
# Handles autotestdict directive
blacklist = ['__cinit__', '__dealloc__', '__richcmp__',
blacklist = ['__cinit__', '__dealloc__', '__richcmp__',
'__nonzero__', '__bool__',
'__len__', '__contains__']
......
......@@ -12,9 +12,9 @@ from Cython import Utils
# need one-characters subsitutions (for now) so offsets aren't off
special_chars = [(u'<', u'\xF0', u'&lt;'),
(u'>', u'\xF1', u'&gt;'),
(u'>', u'\xF1', u'&gt;'),
(u'&', u'\xF2', u'&amp;')]
line_pos_comment = re.compile(r'/\*.*?<<<<<<<<<<<<<<.*?\*/\n*', re.DOTALL)
class AnnotationCCodeWriter(CCodeWriter):
......@@ -32,19 +32,19 @@ class AnnotationCCodeWriter(CCodeWriter):
self.annotations = create_from.annotations
self.code = create_from.code
self.last_pos = create_from.last_pos
def create_new(self, create_from, buffer, copy_formatting):
return AnnotationCCodeWriter(create_from, buffer, copy_formatting)
def write(self, s):
CCodeWriter.write(self, s)
self.annotation_buffer.write(s)
def mark_pos(self, pos):
if pos is not None:
CCodeWriter.mark_pos(self, pos)
if self.last_pos:
pos_code = self.code.setdefault(self.last_pos[0].get_description(),{})
pos_code = self.code.setdefault(self.last_pos[0].filename,{})
code = pos_code.get(self.last_pos[1], "")
pos_code[self.last_pos[1]] = code + self.annotation_buffer.getvalue()
self.annotation_buffer = StringIO()
......@@ -52,7 +52,7 @@ class AnnotationCCodeWriter(CCodeWriter):
def annotate(self, pos, item):
self.annotations.append((pos, item))
def save_annotation(self, source_filename, target_filename):
self.mark_pos(None)
f = Utils.open_source_file(source_filename)
......@@ -74,7 +74,7 @@ class AnnotationCCodeWriter(CCodeWriter):
all.append(((source_filename, pos[1], pos[2]+size), end))
else:
all.append((pos, start+end))
all.sort()
all.reverse()
for pos, item in all:
......@@ -83,7 +83,7 @@ class AnnotationCCodeWriter(CCodeWriter):
col += 1
line = lines[line_no]
lines[line_no] = line[:col] + item + line[col:]
html_filename = os.path.splitext(target_filename)[0] + ".html"
f = codecs.open(html_filename, "w", encoding="UTF-8")
f.write(u'<html>\n')
......@@ -130,14 +130,14 @@ function toggleDiv(id) {
c_file = Utils.decode_filename(os.path.basename(target_filename))
f.write(u'<p>Raw output: <a href="%s">%s</a>\n' % (c_file, c_file))
k = 0
py_c_api = re.compile(u'(Py[A-Z][a-z]+_[A-Z][a-z][A-Za-z_]+)\(')
py_marco_api = re.compile(u'(Py[A-Z][a-z]+_[A-Z][A-Z_]+)\(')
pyx_c_api = re.compile(u'(__Pyx_[A-Z][a-z_][A-Za-z_]+)\(')
pyx_macro_api = re.compile(u'(__Pyx_[A-Z][A-Z_]+)\(')
error_goto = re.compile(ur'((; *if .*)? \{__pyx_filename = .*goto __pyx_L\w+;\})')
refnanny = re.compile(u'(__Pyx_X?(GOT|GIVE)REF|__Pyx_RefNanny[A-Za-z]+)')
code_source_file = self.code[source_filename]
for line in lines:
......@@ -146,18 +146,18 @@ function toggleDiv(id) {
code = code_source_file[k]
except KeyError:
code = ''
code = code.replace('<', '<code><</code>')
code, py_c_api_calls = py_c_api.subn(ur"<span class='py_c_api'>\1</span>(", code)
code, pyx_c_api_calls = pyx_c_api.subn(ur"<span class='pyx_c_api'>\1</span>(", code)
code, py_macro_api_calls = py_marco_api.subn(ur"<span class='py_macro_api'>\1</span>(", code)
code, pyx_macro_api_calls = pyx_macro_api.subn(ur"<span class='pyx_macro_api'>\1</span>(", code)
code, refnanny_calls = refnanny.subn(ur"<span class='refnanny'>\1</span>", code)
code, error_goto_calls = error_goto.subn(ur"<span class='error_goto'>\1</span>", code)
code = code.replace(u"<span class='error_goto'>;", u";<span class='error_goto'>")
score = 5*py_c_api_calls + 2*pyx_c_api_calls + py_macro_api_calls + pyx_macro_api_calls - refnanny_calls
color = u"FFFF%02x" % int(255/(1+score/10.0))
f.write(u"<pre class='line' style='background-color: #%s' onclick='toggleDiv(\"line%s\")'>" % (color, k))
......@@ -166,13 +166,13 @@ function toggleDiv(id) {
for c, cc, html in special_chars:
line = line.replace(cc, html)
f.write(line.rstrip())
f.write(u'</pre>\n')
code = re.sub(line_pos_comment, '', code) # inline annotations are redundant
f.write(u"<pre id='line%s' class='code' style='background-color: #%s'>%s</pre>" % (k, color, code))
f.write(u'</body></html>\n')
f.close()
# TODO: make this cleaner
def escape(raw_string):
......@@ -184,15 +184,15 @@ def escape(raw_string):
class AnnotationItem(object):
def __init__(self, style, text, tag="", size=0):
self.style = style
self.text = text
self.tag = tag
self.size = size
def start(self):
return u"<span class='tag %s' title='%s'>%s" % (self.style, self.text, self.tag)
def end(self):
return self.size, u"</span>"
......@@ -101,7 +101,7 @@ class EmbedSignature(CythonTransform):
return node
else:
return super(EmbedSignature, self).__call__(node)
def visit_ClassDefNode(self, node):
oldname = self.class_name
oldclass = self.class_node
......@@ -120,7 +120,7 @@ class EmbedSignature(CythonTransform):
def visit_DefNode(self, node):
if not self.current_directives['embedsignature']:
return node
is_constructor = False
hide_self = False
if node.entry.is_special:
......
......@@ -57,12 +57,12 @@ class IntroduceBufferAuxiliaryVars(CythonTransform):
if isinstance(node, ModuleNode) and len(bufvars) > 0:
# for now...note that pos is wrong
# for now...note that pos is wrong
raise CompileError(node.pos, "Buffer vars not allowed in module scope")
for entry in bufvars:
if entry.type.dtype.is_ptr:
raise CompileError(node.pos, "Buffers with pointer types not yet supported.")
name = entry.name
buftype = entry.type
if buftype.ndim > self.max_ndim:
......@@ -84,10 +84,10 @@ class IntroduceBufferAuxiliaryVars(CythonTransform):
if entry.is_arg:
result.used = True
return result
stridevars = [var(Naming.bufstride_prefix, i, "0") for i in range(entry.type.ndim)]
shapevars = [var(Naming.bufshape_prefix, i, "0") for i in range(entry.type.ndim)]
shapevars = [var(Naming.bufshape_prefix, i, "0") for i in range(entry.type.ndim)]
mode = entry.type.mode
if mode == 'full':
suboffsetvars = [var(Naming.bufsuboffset_prefix, i, "-1") for i in range(entry.type.ndim)]
......@@ -95,7 +95,7 @@ class IntroduceBufferAuxiliaryVars(CythonTransform):
suboffsetvars = None
entry.buffer_aux = Symtab.BufferAux(bufinfo, stridevars, shapevars, suboffsetvars)
scope.buffer_entries = bufvars
self.scope = scope
......@@ -138,9 +138,9 @@ def analyse_buffer_options(globalpos, env, posargs, dictargs, defaults=None, nee
"""
if defaults is None:
defaults = buffer_defaults
posargs, dictargs = Interpreter.interpret_compiletime_options(posargs, dictargs, type_env=env, type_args = (0,'dtype'))
if len(posargs) > buffer_positional_options_count:
raise CompileError(posargs[-1][1], ERR_BUF_TOO_MANY)
......@@ -187,7 +187,7 @@ def analyse_buffer_options(globalpos, env, posargs, dictargs, defaults=None, nee
assert_bool('cast')
return options
#
# Code generation
......@@ -209,7 +209,7 @@ def get_flags(buffer_aux, buffer_type):
assert False
if buffer_aux.writable_needed: flags += "| PyBUF_WRITABLE"
return flags
def used_buffer_aux_vars(entry):
buffer_aux = entry.buffer_aux
buffer_aux.buffer_info_var.used = True
......@@ -258,10 +258,10 @@ def get_getbuffer_call(code, obj_cname, buffer_aux, buffer_type):
bufstruct = buffer_aux.buffer_info_var.cname
dtype_typeinfo = get_type_information_cname(code, buffer_type.dtype)
return ("__Pyx_GetBufferAndValidate(&%(bufstruct)s, "
"(PyObject*)%(obj_cname)s, &%(dtype_typeinfo)s, %(flags)s, %(ndim)d, "
"%(cast)d, __pyx_stack)" % locals())
"%(cast)d, __pyx_stack)" % locals())
def put_assign_to_buffer(lhs_cname, rhs_cname, buffer_aux, buffer_type,
is_initialized, pos, code):
......@@ -272,7 +272,7 @@ def put_assign_to_buffer(lhs_cname, rhs_cname, buffer_aux, buffer_type,
However, the assignment operation may throw an exception so that the reassignment
never happens.
Depending on the circumstances there are two possible outcomes:
- Old buffer released, new acquired, rhs assigned to lhs
- Old buffer released, new acquired which fails, reaqcuire old lhs buffer
......@@ -285,7 +285,7 @@ def put_assign_to_buffer(lhs_cname, rhs_cname, buffer_aux, buffer_type,
code.putln("{") # Set up necesarry stack for getbuffer
code.putln("__Pyx_BufFmt_StackElem __pyx_stack[%d];" % buffer_type.dtype.struct_nesting_depth())
getbuffer = get_getbuffer_call(code, "%s", buffer_aux, buffer_type) # fill in object below
if is_initialized:
......@@ -370,7 +370,7 @@ def put_buffer_lookup_code(entry, index_signeds, index_cnames, directives, pos,
code.putln("%s = %d;" % (tmp_cname, dim))
code.put("} else ")
# check bounds in positive direction
if signed != 0:
if signed != 0:
cast = ""
else:
cast = "(size_t)"
......@@ -389,7 +389,7 @@ def put_buffer_lookup_code(entry, index_signeds, index_cnames, directives, pos,
bufaux.shapevars):
if signed != 0:
code.putln("if (%s < 0) %s += %s;" % (cname, cname, shape.cname))
# Create buffer lookup and return it
# This is done via utility macros/inline functions, which vary
# according to the access mode used.
......@@ -418,7 +418,7 @@ def put_buffer_lookup_code(entry, index_signeds, index_cnames, directives, pos,
for i, s in zip(index_cnames, bufaux.stridevars):
params.append(i)
params.append(s.cname)
# Make sure the utility code is available
if funcname not in code.globalstate.utility_codes:
code.globalstate.utility_codes.add(funcname)
......@@ -458,7 +458,7 @@ def buf_lookup_full_code(proto, defin, name, nd):
char* ptr = (char*)buf;
""") % (name, funcargs) + "".join([dedent("""\
ptr += s%d * i%d;
if (o%d >= 0) ptr = *((char**)ptr) + o%d;
if (o%d >= 0) ptr = *((char**)ptr) + o%d;
""") % (i, i, i, i) for i in range(nd)]
) + "\nreturn ptr;\n}")
......@@ -563,7 +563,7 @@ def use_py2_buffer_functions(env):
#endif
""")
env.use_utility_code(UtilityCode(
proto = dedent("""\
#if PY_MAJOR_VERSION < 3
......@@ -613,9 +613,9 @@ def get_type_information_cname(code, dtype, maxdepth=None):
if name not in code.globalstate.utility_codes:
code.globalstate.utility_codes.add(name)
typecode = code.globalstate['typeinfo']
complex_possible = dtype.is_struct_or_union and dtype.can_be_complex()
declcode = dtype.declaration_code("")
if dtype.is_simple_buffer_dtype():
structinfo_name = "NULL"
......@@ -634,7 +634,7 @@ def get_type_information_cname(code, dtype, maxdepth=None):
typecode.putln("};", safe=True)
else:
assert False
rep = str(dtype)
if dtype.is_int:
if dtype.signed == 0:
......@@ -851,7 +851,7 @@ static size_t __Pyx_BufFmt_TypeCharToNativeSize(char ch, int is_complex) {
default: {
__Pyx_BufFmt_RaiseUnexpectedChar(ch);
return 0;
}
}
}
}
......@@ -895,7 +895,7 @@ static size_t __Pyx_BufFmt_TypeCharToGroup(char ch, int is_complex) {
default: {
__Pyx_BufFmt_RaiseUnexpectedChar(ch);
return 0;
}
}
}
}
......@@ -932,7 +932,7 @@ static int __Pyx_BufFmt_ProcessTypeChunk(__Pyx_BufFmt_Context* ctx) {
do {
__Pyx_StructField* field = ctx->head->field;
__Pyx_TypeInfo* type = field->type;
if (ctx->packmode == '@' || ctx->packmode == '^') {
size = __Pyx_BufFmt_TypeCharToNativeSize(ctx->enc_type, ctx->is_complex);
} else {
......@@ -955,7 +955,7 @@ static int __Pyx_BufFmt_ProcessTypeChunk(__Pyx_BufFmt_Context* ctx) {
ctx->head->parent_offset = parent_offset;
continue;
}
__Pyx_BufFmt_RaiseExpected(ctx);
return -1;
}
......@@ -969,7 +969,7 @@ static int __Pyx_BufFmt_ProcessTypeChunk(__Pyx_BufFmt_Context* ctx) {
}
ctx->fmt_offset += size;
--ctx->enc_count; /* Consume from buffer string */
/* Done checking, move to next field, pushing or popping struct stack if needed */
......@@ -1002,7 +1002,7 @@ static int __Pyx_BufFmt_ProcessTypeChunk(__Pyx_BufFmt_Context* ctx) {
} while (ctx->enc_count);
ctx->enc_type = 0;
ctx->is_complex = 0;
return 0;
return 0;
}
static int __Pyx_BufFmt_FirstPack(__Pyx_BufFmt_Context* ctx) {
......@@ -1124,7 +1124,7 @@ static const char* __Pyx_BufFmt_CheckString(__Pyx_BufFmt_Context* ctx, const cha
return NULL;
}
}
}
}
}
......
......@@ -438,14 +438,14 @@ builtin_types_table = [
("type", "PyType_Type", []),
# This conflicts with the C++ bool type, and unfortunately
# C++ is too liberal about PyObject* <-> bool conversions,
# C++ is too liberal about PyObject* <-> bool conversions,
# resulting in unintuitive runtime behavior and segfaults.
# ("bool", "PyBool_Type", []),
("int", "PyInt_Type", []),
("long", "PyLong_Type", []),
("float", "PyFloat_Type", []),
("complex", "PyComplex_Type", [BuiltinAttribute('cval', field_type_name = 'Py_complex'),
BuiltinAttribute('real', 'cval.real', field_type = PyrexTypes.c_double_type),
BuiltinAttribute('imag', 'cval.imag', field_type = PyrexTypes.c_double_type),
......@@ -474,7 +474,7 @@ builtin_types_table = [
]),
# ("file", "PyFile_Type", []), # not in Py3
("set", "PySet_Type", [BuiltinMethod("clear", "T", "i", "PySet_Clear"),
("set", "PySet_Type", [BuiltinMethod("clear", "T", "i", "PySet_Clear"),
BuiltinMethod("discard", "TO", "i", "PySet_Discard"),
BuiltinMethod("add", "TO", "i", "PySet_Add"),
BuiltinMethod("pop", "T", "O", "PySet_Pop")]),
......@@ -490,7 +490,7 @@ types_that_construct_their_instance = (
# 'file', # only in Py2.x
)
builtin_structs_table = [
('Py_buffer', 'Py_buffer',
[("buf", PyrexTypes.c_void_ptr_type),
......
......@@ -10,31 +10,31 @@ usage = """\
Cython (http://cython.org) is a compiler for code written in the
Cython language. Cython is based on Pyrex by Greg Ewing.
Usage: cython [options] sourcefile.pyx ...
Usage: cython [options] sourcefile.{pyx,py} ...
Options:
-V, --version Display version number of cython compiler
-l, --create-listing Write error messages to a listing file
-I, --include-dir <directory> Search for include files in named directory
(multiply include directories are allowed).
(multiple include directories are allowed).
-o, --output-file <filename> Specify name of generated C file
-t, --timestamps Only compile newer source files (implied with -r)
-t, --timestamps Only compile newer source files
-f, --force Compile all source files (overrides implied -t)
-q, --quiet Don't print module names in recursive mode
-v, --verbose Be verbose, print file names on multiple compilation
-p, --embed-positions If specified, the positions in Cython files of each
function definition is embedded in its docstring.
--cleanup <level> Release interned objects on python exit, for memory debugging.
Level indicates aggressiveness, default 0 releases nothing.
-w, --working <directory> Sets the working directory for Cython (the directory modules
--cleanup <level> Release interned objects on python exit, for memory debugging.
Level indicates aggressiveness, default 0 releases nothing.
-w, --working <directory> Sets the working directory for Cython (the directory modules
are searched from)
--gdb Output debug information for cygdb
-D, --no-docstrings Remove docstrings.
-D, --no-docstrings Strip docstrings from the compiled module.
-a, --annotate Produce a colorized HTML version of the source.
--line-directives Produce #line directives pointing to the .pyx source
--cplus Output a c++ rather than c file.
--embed Embed the Python interpreter in a main() method.
--cplus Output a C++ rather than C file.
--embed Generate a main() function that embeds the Python interpreter.
-2 Compile based on Python-2 syntax and code semantics.
-3 Compile based on Python-3 syntax and code semantics.
--fast-fail Abort the compilation on the first error
......@@ -42,7 +42,7 @@ Options:
"""
# The following is broken http://trac.cython.org/cython_trac/ticket/379
# -r, --recursive Recursively find and compile dependencies
# -r, --recursive Recursively find and compile dependencies (implies -t)
#The following experimental options are supported only on MacOSX:
......@@ -65,7 +65,7 @@ def parse_command_line(args):
return args.pop(0)
else:
bad_usage()
def get_param(option):
tail = option[2:]
if tail:
......@@ -125,6 +125,8 @@ def parse_command_line(args):
options.language_level = 3
elif option == "--fast-fail":
Options.fast_fail = True
elif option == "--disable-function-redefinition":
Options.disable_function_redefinition = True
elif option in ("-X", "--directive"):
try:
options.compiler_directives = Options.parse_directive_list(
......@@ -141,6 +143,9 @@ def parse_command_line(args):
else:
sys.stderr.write("Unknown debug flag: %s\n" % option)
bad_usage()
elif option in ('-h', '--help'):
sys.stdout.write(usage)
sys.exit(0)
else:
sys.stderr.write("Unknown compiler flag: %s\n" % option)
sys.exit(1)
......
This diff is collapsed.
import bisect, sys
# This module keeps track of arbitrary "states" at any point of the code.
# This module keeps track of arbitrary "states" at any point of the code.
# A state is considered known if every path to the given point agrees on
# its state, otherwise it is None (i.e. unknown).
# its state, otherwise it is None (i.e. unknown).
# It might be useful to be able to "freeze" the set of states by pushing
# It might be useful to be able to "freeze" the set of states by pushing
# all state changes to the tips of the trees for fast reading. Perhaps this
# could be done on get_state, clearing the cache on set_state (assuming
# incoming is immutable).
# could be done on get_state, clearing the cache on set_state (assuming
# incoming is immutable).
# This module still needs a lot of work, and probably should totally be
# redesigned. It doesn't take return, raise, continue, or break into
# account.
# This module still needs a lot of work, and probably should totally be
# redesigned. It doesn't take return, raise, continue, or break into
# account.
from Cython.Compiler.Scanning import StringSourceDescriptor
try:
......@@ -31,26 +31,26 @@ class ControlFlow(object):
self.parent = parent
self.tip = {}
self.end_pos = _END_POS
def start_branch(self, pos):
self.end_pos = pos
branch_point = BranchingControlFlow(pos, self)
if self.parent is not None:
self.parent.branches[-1] = branch_point
return branch_point.branches[0]
def next_branch(self, pos):
self.end_pos = pos
return self.parent.new_branch(pos)
def finish_branch(self, pos):
self.end_pos = pos
self.parent.end_pos = pos
return LinearControlFlow(pos, self.parent)
def get_state(self, item, pos=_END_POS):
return self.get_pos_state(item, pos)[1]
def get_pos_state(self, item, pos=_END_POS):
# do some caching
if pos > self.end_pos:
......@@ -86,14 +86,14 @@ class ControlFlow(object):
if item in current.tip:
del current.tip[item]
current._set_state_local(pos, item, state)
class LinearControlFlow(ControlFlow):
def __init__(self, start_pos=(), incoming=None, parent=None):
ControlFlow.__init__(self, start_pos, incoming, parent)
self.events = {}
def _set_state_local(self, pos, item, state):
if item in self.events:
event_list = self.events[item]
......@@ -111,10 +111,10 @@ class LinearControlFlow(ControlFlow):
return None
def to_string(self, indent='', limit=None):
if len(self.events) == 0:
s = indent + "[no state changes]"
else:
all = []
for item, event_list in self.events.items():
......@@ -126,21 +126,21 @@ class LinearControlFlow(ControlFlow):
if self.incoming is not limit and self.incoming is not None:
s = "%s\n%s" % (self.incoming.to_string(indent, limit=limit), s)
return s
class BranchingControlFlow(ControlFlow):
def __init__(self, start_pos, incoming, parent=None):
ControlFlow.__init__(self, start_pos, incoming, parent)
self.branches = [LinearControlFlow(start_pos, incoming, parent=self)]
self.branch_starts = [start_pos]
def _set_state_local(self, pos, item, state):
for branch_pos, branch in zip(self.branch_starts[::-1], self.branches[::-1]):
if pos >= branch_pos:
branch._set_state_local(pos, item, state)
return
def _get_pos_state_local(self, item, pos, stop_at=None):
if pos < self.end_pos:
for branch_pos, branch in zip(self.branch_starts[::-1], self.branches[::-1]):
......
......@@ -15,9 +15,9 @@ class CythonScope(ModuleScope):
pos=None,
defining = 1,
cname='<error>')
def lookup_type(self, name):
# This function should go away when types are all first-level objects.
# This function should go away when types are all first-level objects.
type = parse_basic_type(name)
if type:
return type
......@@ -32,12 +32,12 @@ def create_utility_scope(context):
utility_scope = ModuleScope(u'utility', None, context)
# These are used to optimize isinstance in FinalOptimizePhase
type_object = utility_scope.declare_typedef('PyTypeObject',
base_type = c_void_type,
type_object = utility_scope.declare_typedef('PyTypeObject',
base_type = c_void_type,
pos = None,
cname = 'PyTypeObject')
type_object.is_void = True
utility_scope.declare_cfunction(
'PyObject_TypeCheck',
CFuncType(c_bint_type, [CFuncTypeArg("o", py_object_type, None),
......@@ -45,5 +45,5 @@ def create_utility_scope(context):
pos = None,
defining = 1,
cname = 'PyObject_TypeCheck')
return utility_scope
......@@ -44,7 +44,7 @@ def format_error(message, position):
return message
class CompileError(PyrexError):
def __init__(self, position = None, message = u""):
self.position = position
self.message_only = message
......@@ -54,7 +54,7 @@ class CompileError(PyrexError):
Exception.__init__(self, format_error(message, position))
class CompileWarning(PyrexWarning):
def __init__(self, position = None, message = ""):
self.position = position
# Deprecated and withdrawn in 2.6:
......@@ -63,7 +63,7 @@ class CompileWarning(PyrexWarning):
class InternalError(Exception):
# If this is ever raised, there is a bug in the compiler.
def __init__(self, message):
self.message_only = message
Exception.__init__(self, u"Internal compiler error: %s"
......@@ -71,7 +71,7 @@ class InternalError(Exception):
class AbortError(Exception):
# Throw this to stop the compilation immediately.
def __init__(self, message):
self.message_only = message
Exception.__init__(self, u"Abort error: %s" % message)
......@@ -98,7 +98,7 @@ class CompilerCrash(CompileError):
CompileError.__init__(self, pos, message)
class NoElementTreeInstalledException(PyrexError):
"""raised when the user enabled options.gdb_debug but no ElementTree
"""raised when the user enabled options.gdb_debug but no ElementTree
implementation was found
"""
......@@ -155,7 +155,7 @@ def error(position, message):
#print "Errors.error:", repr(position), repr(message) ###
if position is None:
raise InternalError(message)
err = CompileError(position, message)
err = CompileError(position, message)
if debug_exception_on_error: raise Exception(err) # debug
report_error(err)
return err
......@@ -198,7 +198,7 @@ def warn_once(position, message, level=0):
return warn
# These functions can be used to momentarily suppress errors.
# These functions can be used to momentarily suppress errors.
error_stack = []
......
This source diff could not be displayed because it is too large. You can view the blob instead.
......@@ -14,7 +14,7 @@ from Errors import CompileError
class EmptyScope(object):
def lookup(self, name):
return None
empty_scope = EmptyScope()
def interpret_compiletime_options(optlist, optdict, type_env=None, type_args=()):
......@@ -45,7 +45,7 @@ def interpret_compiletime_options(optlist, optdict, type_env=None, type_args=())
raise CompileError(node.pos, "Type not allowed here.")
else:
return (node.compile_time_value(empty_scope), node.pos)
if optlist:
optlist = [interpret(x, ix) for ix, x in enumerate(optlist)]
if optdict:
......
......@@ -19,12 +19,12 @@ def make_lexicon():
octdigit = Any("01234567")
hexdigit = Any("0123456789ABCDEFabcdef")
indentation = Bol + Rep(Any(" \t"))
decimal = Rep1(digit)
dot = Str(".")
exponent = Any("Ee") + Opt(Any("+-")) + decimal
decimal_fract = (decimal + dot + Opt(decimal)) | (dot + decimal)
name = letter + Rep(letter | digit)
intconst = decimal | (Str("0") + ((Any("Xx") + Rep1(hexdigit)) |
(Any("Oo") + Rep1(octdigit)) |
......@@ -33,33 +33,33 @@ def make_lexicon():
intliteral = intconst + intsuffix
fltconst = (decimal_fract + Opt(exponent)) | (decimal + exponent)
imagconst = (intconst | fltconst) + Any("jJ")
sq_string = (
Str("'") +
Rep(AnyBut("\\\n'") | (Str("\\") + AnyChar)) +
Str("'") +
Rep(AnyBut("\\\n'") | (Str("\\") + AnyChar)) +
Str("'")
)
dq_string = (
Str('"') +
Rep(AnyBut('\\\n"') | (Str("\\") + AnyChar)) +
Str('"') +
Rep(AnyBut('\\\n"') | (Str("\\") + AnyChar)) +
Str('"')
)
non_sq = AnyBut("'") | (Str('\\') + AnyChar)
tsq_string = (
Str("'''")
+ Rep(non_sq | (Str("'") + non_sq) | (Str("''") + non_sq))
+ Rep(non_sq | (Str("'") + non_sq) | (Str("''") + non_sq))
+ Str("'''")
)
non_dq = AnyBut('"') | (Str('\\') + AnyChar)
tdq_string = (
Str('"""')
+ Rep(non_dq | (Str('"') + non_dq) | (Str('""') + non_dq))
+ Rep(non_dq | (Str('"') + non_dq) | (Str('""') + non_dq))
+ Str('"""')
)
beginstring = Opt(Any(string_prefixes)) + Opt(Any(raw_prefixes)) + (Str("'") | Str('"') | Str("'''") | Str('"""'))
two_oct = octdigit + octdigit
three_oct = octdigit + octdigit + octdigit
......@@ -68,21 +68,21 @@ def make_lexicon():
escapeseq = Str("\\") + (two_oct | three_oct |
Str('u') + four_hex | Str('x') + two_hex |
Str('U') + four_hex + four_hex | AnyChar)
deco = Str("@")
bra = Any("([{")
ket = Any(")]}")
punct = Any(":,;+-*/|&<>=.%`~^?")
diphthong = Str("==", "<>", "!=", "<=", ">=", "<<", ">>", "**", "//",
"+=", "-=", "*=", "/=", "%=", "|=", "^=", "&=",
"+=", "-=", "*=", "/=", "%=", "|=", "^=", "&=",
"<<=", ">>=", "**=", "//=", "->")
spaces = Rep1(Any(" \t\f"))
escaped_newline = Str("\\\n")
lineterm = Eol + Opt(Str("\n"))
comment = Str("#") + Rep(AnyBut("\n"))
comment = Str("#") + Rep(AnyBut("\n"))
return Lexicon([
(name, IDENT),
(intliteral, 'INT'),
......@@ -90,25 +90,25 @@ def make_lexicon():
(imagconst, 'IMAG'),
(deco, 'DECORATOR'),
(punct | diphthong, TEXT),
(bra, Method('open_bracket_action')),
(ket, Method('close_bracket_action')),
(lineterm, Method('newline_action')),
#(stringlit, 'STRING'),
(beginstring, Method('begin_string_action')),
(comment, IGNORE),
(spaces, IGNORE),
(escaped_newline, IGNORE),
State('INDENT', [
(comment + lineterm, Method('commentline')),
(Opt(spaces) + Opt(comment) + lineterm, IGNORE),
(indentation, Method('indentation_action')),
(Eof, Method('eof_action'))
]),
State('SQ_STRING', [
(escapeseq, 'ESCAPE'),
(Rep1(AnyBut("'\"\n\\")), 'CHARS'),
......@@ -117,7 +117,7 @@ def make_lexicon():
(Str("'"), Method('end_string_action')),
(Eof, 'EOF')
]),
State('DQ_STRING', [
(escapeseq, 'ESCAPE'),
(Rep1(AnyBut('"\n\\')), 'CHARS'),
......@@ -126,7 +126,7 @@ def make_lexicon():
(Str('"'), Method('end_string_action')),
(Eof, 'EOF')
]),
State('TSQ_STRING', [
(escapeseq, 'ESCAPE'),
(Rep1(AnyBut("'\"\n\\")), 'CHARS'),
......@@ -135,7 +135,7 @@ def make_lexicon():
(Str("'''"), Method('end_string_action')),
(Eof, 'EOF')
]),
State('TDQ_STRING', [
(escapeseq, 'ESCAPE'),
(Rep1(AnyBut('"\'\n\\')), 'CHARS'),
......@@ -144,10 +144,10 @@ def make_lexicon():
(Str('"""'), Method('end_string_action')),
(Eof, 'EOF')
]),
(Eof, Method('eof_action'))
],
# FIXME: Plex 1.9 needs different args here from Plex 1.1.4
#debug_flags = scanner_debug_flags,
#debug_file = scanner_dump_file
......
......@@ -74,7 +74,6 @@ class Context(object):
# language_level int currently 2 or 3 for Python 2/3
def __init__(self, include_directories, compiler_directives, cpp=False, language_level=2):
#self.modules = {"__builtin__" : BuiltinScope()}
import Builtin, CythonScope
self.modules = {"__builtin__" : Builtin.builtin_scope}
self.modules["cython"] = CythonScope.create_cython_scope(self)
......@@ -99,6 +98,7 @@ class Context(object):
from Future import print_function, unicode_literals
self.future_directives.add(print_function)
self.future_directives.add(unicode_literals)
self.modules['builtins'] = self.modules['__builtin__']
def create_pipeline(self, pxd, py=False):
from Visitor import PrintTree
......@@ -315,7 +315,10 @@ class Context(object):
try:
if debug_find_module:
print("Context.find_module: Parsing %s" % pxd_pathname)
source_desc = FileSourceDescriptor(pxd_pathname)
rel_path = module_name.replace('.', os.sep) + os.path.splitext(pxd_pathname)[1]
if not pxd_pathname.endswith(rel_path):
rel_path = pxd_pathname # safety measure to prevent printing incorrect paths
source_desc = FileSourceDescriptor(pxd_pathname, rel_path)
err, result = self.process_pxd(source_desc, scope, module_name)
if err:
raise err
......@@ -589,15 +592,23 @@ def run_pipeline(source, options, full_module_name = None):
# Set up source object
cwd = os.getcwd()
source_desc = FileSourceDescriptor(os.path.join(cwd, source))
abs_path = os.path.abspath(source)
source_ext = os.path.splitext(source)[1]
full_module_name = full_module_name or context.extract_module_name(source, options)
if options.relative_path_in_code_position_comments:
rel_path = full_module_name.replace('.', os.sep) + source_ext
if not abs_path.endswith(rel_path):
rel_path = source # safety measure to prevent printing incorrect paths
else:
rel_path = abs_path
source_desc = FileSourceDescriptor(abs_path, rel_path)
source = CompilationSource(source_desc, full_module_name, cwd)
# Set up result object
result = create_default_resultobj(source, options)
# Get pipeline
if source_desc.filename.endswith(".py"):
if source_ext.lower() == '.py':
pipeline = context.create_py_pipeline(options, result)
else:
pipeline = context.create_pyx_pipeline(options, result)
......@@ -825,6 +836,7 @@ default_options = dict(
compiler_directives = {},
evaluate_tree_assertions = False,
emit_linenums = False,
relative_path_in_code_position_comments = True,
language_level = 2,
gdb_debug = False,
)
This diff is collapsed.
This diff is collapsed.
......@@ -90,17 +90,17 @@ class IterationTransform(Visitor.VisitorTransform):
self.visitchildren(node)
self.current_scope = oldscope
return node
def visit_PrimaryCmpNode(self, node):
if node.is_ptr_contains():
# for t in operand2:
# if operand1 == t:
# res = True
# break
# else:
# res = False
pos = node.pos
res_handle = UtilNodes.TempHandle(PyrexTypes.c_bint_type)
res = res_handle.ref(pos)
......@@ -114,7 +114,7 @@ class IterationTransform(Visitor.VisitorTransform):
cmp_node = ExprNodes.PrimaryCmpNode(
pos, operator=u'==', operand1=node.operand1, operand2=target)
if_body = Nodes.StatListNode(
pos,
pos,
stats = [Nodes.SingleAssignmentNode(pos, lhs=result_ref, rhs=ExprNodes.BoolNode(pos, value=1)),
Nodes.BreakStatNode(pos)])
if_node = Nodes.IfStatNode(
......@@ -133,7 +133,7 @@ class IterationTransform(Visitor.VisitorTransform):
for_loop.analyse_expressions(self.current_scope)
for_loop = self(for_loop)
new_node = UtilNodes.TempResultFromStatNode(result_ref, for_loop)
if node.operator == 'not_in':
new_node = ExprNodes.NotNode(pos, operand=new_node)
return new_node
......@@ -145,7 +145,7 @@ class IterationTransform(Visitor.VisitorTransform):
def visit_ForInStatNode(self, node):
self.visitchildren(node)
return self._optimise_for_loop(node)
def _optimise_for_loop(self, node):
iterator = node.iterator.sequence
if iterator.type is Builtin.dict_type:
......@@ -690,9 +690,9 @@ class IterationTransform(Visitor.VisitorTransform):
class SwitchTransform(Visitor.VisitorTransform):
"""
This transformation tries to turn long if statements into C switch statements.
This transformation tries to turn long if statements into C switch statements.
The requirement is that every clause be an (or of) var == value, where the var
is common among all clauses and both var and value are ints.
is common among all clauses and both var and value are ints.
"""
NO_MATCH = (None, None, None)
......@@ -892,14 +892,14 @@ class SwitchTransform(Visitor.VisitorTransform):
return UtilNodes.TempResultFromStatNode(result_ref, switch_node)
visit_Node = Visitor.VisitorTransform.recurse_to_children
class FlattenInListTransform(Visitor.VisitorTransform, SkipDeclarations):
"""
This transformation flattens "x in [val1, ..., valn]" into a sequential list
of comparisons.
of comparisons.
"""
def visit_PrimaryCmpNode(self, node):
self.visitchildren(node)
if node.cascade is not None:
......@@ -938,12 +938,12 @@ class FlattenInListTransform(Visitor.VisitorTransform, SkipDeclarations):
operand2 = arg,
cascade = None)
conds.append(ExprNodes.TypecastNode(
pos = node.pos,
pos = node.pos,
operand = cond,
type = PyrexTypes.c_bint_type))
def concat(left, right):
return ExprNodes.BoolBinopNode(
pos = node.pos,
pos = node.pos,
operator = conjunction,
operand1 = left,
operand2 = right)
......@@ -1008,7 +1008,7 @@ class DropRefcountingTransform(Visitor.VisitorTransform):
if not index_id:
return node
rindices.append(index_id)
if set(lindices) != set(rindices):
return node
if len(set(lindices)) != len(right_indices):
......@@ -1110,8 +1110,9 @@ class EarlyReplaceBuiltinCalls(Visitor.EnvTransform):
def _function_is_builtin_name(self, function):
if not function.is_name:
return False
entry = self.current_env().lookup(function.name)
if entry and getattr(entry, 'scope', None) is not Builtin.builtin_scope:
env = self.current_env()
entry = env.lookup(function.name)
if entry is not env.builtin_scope().lookup_here(function.name):
return False
# if entry is None, it's at least an undeclared name, so likely builtin
return True
......@@ -1724,8 +1725,8 @@ class OptimizeBuiltinCalls(Visitor.EnvTransform):
# into a C function call (defined in the builtin scope)
if not function.entry:
return node
is_builtin = function.entry.is_builtin \
or getattr(function.entry, 'scope', None) is Builtin.builtin_scope
is_builtin = function.entry.is_builtin or \
function.entry is self.current_env().builtin_scope().lookup_here(function.name)
if not is_builtin:
return node
function_handler = self._find_handler(
......@@ -1985,20 +1986,26 @@ class OptimizeBuiltinCalls(Visitor.EnvTransform):
test_nodes = []
env = self.current_env()
for test_type_node in types:
if not test_type_node.entry:
return node
entry = env.lookup(test_type_node.entry.name)
if not entry or not entry.type or not entry.type.is_builtin_type:
return node
type_check_function = entry.type.type_check_function(exact=False)
if not type_check_function:
builtin_type = None
if isinstance(test_type_node, ExprNodes.NameNode):
if test_type_node.entry:
entry = env.lookup(test_type_node.entry.name)
if entry and entry.type and entry.type.is_builtin_type:
builtin_type = entry.type
if builtin_type and builtin_type is not Builtin.type_type:
type_check_function = entry.type.type_check_function(exact=False)
type_check_args = [arg]
elif test_type_node.type is Builtin.type_type:
type_check_function = '__Pyx_TypeCheck'
type_check_args = [arg, test_type_node]
else:
return node
if type_check_function not in tests:
tests.append(type_check_function)
test_nodes.append(
ExprNodes.PythonCapiCallNode(
test_type_node.pos, type_check_function, self.Py_type_check_func_type,
args = [arg],
args = type_check_args,
is_temp = True,
))
......@@ -2128,7 +2135,7 @@ class OptimizeBuiltinCalls(Visitor.EnvTransform):
is_temp = node.is_temp,
utility_code = pop_index_utility_code
)
return node
_handle_simple_method_list_pop = _handle_simple_method_object_pop
......@@ -3130,10 +3137,10 @@ class ConstantFolding(Visitor.VisitorTransform, SkipDeclarations):
class FinalOptimizePhase(Visitor.CythonTransform):
"""
This visitor handles several commuting optimizations, and is run
just before the C code generation phase.
The optimizations currently implemented in this class are:
- eliminate None assignment and refcounting for first assignment.
just before the C code generation phase.
The optimizations currently implemented in this class are:
- eliminate None assignment and refcounting for first assignment.
- isinstance -> typecheck for cdef types
- eliminate checks for None and/or types that became redundant after tree changes
"""
......
......@@ -10,10 +10,10 @@ gcc_branch_hints = 1
pre_import = None
docstrings = True
# Decref global variables in this module on exit for garbage collection.
# Decref global variables in this module on exit for garbage collection.
# 0: None, 1+: interned objects, 2+: cdef globals, 3+: types objects
# Mostly for reducing noise for Valgrind, only executes at process exit
# (when all memory will be reclaimed anyways).
# (when all memory will be reclaimed anyways).
generate_cleanup_code = 0
annotate = 0
......@@ -22,35 +22,39 @@ annotate = 0
# to keep going and printing further error messages.
fast_fail = False
# This will convert statements of the form "for i in range(...)"
# This will convert statements of the form "for i in range(...)"
# to "for i from ..." when i is a cdef'd integer type, and the direction
# (i.e. sign of step) can be determined.
# WARNING: This may change the semantics if the range causes assignment to
# (i.e. sign of step) can be determined.
# WARNING: This may change the semantics if the range causes assignment to
# i to overflow. Specifically, if this option is set, an error will be
# raised before the loop is entered, wheras without this option the loop
# will execute until an overflowing value is encountered.
# will execute until an overflowing value is encountered.
convert_range = 1
# Enable this to allow one to write your_module.foo = ... to overwrite the
# definition if the cpdef function foo, at the cost of an extra dictionary
# lookup on every call.
# If this is 0 it simply creates a wrapper.
# Enable this to allow one to write your_module.foo = ... to overwrite the
# definition if the cpdef function foo, at the cost of an extra dictionary
# lookup on every call.
# If this is 0 it simply creates a wrapper.
lookup_module_cpdef = 0
# This will set local variables to None rather than NULL which may cause
# surpress what would be an UnboundLocalError in pure Python but eliminates
# checking for NULL on every use, and can decref rather than xdecref at the end.
# This will set local variables to None rather than NULL which may cause
# surpress what would be an UnboundLocalError in pure Python but eliminates
# checking for NULL on every use, and can decref rather than xdecref at the end.
# WARNING: This is a work in progress, may currently segfault.
init_local_none = 1
# Append the c file and line number to the traceback for exceptions.
# Append the c file and line number to the traceback for exceptions.
c_line_in_traceback = 1
# Whether or not to embed the Python interpreter, for use in making a
# standalone executable. This will provide a main() method which simply
# executes the body of this module.
# Whether or not to embed the Python interpreter, for use in making a
# standalone executable. This will provide a main() method which simply
# executes the body of this module.
embed = False
# Disables function redefinition, allowing all functions to be declared at
# module creation time. For legacy code only.
disable_function_redefinition = False
# Declare compiler directives
directive_defaults = {
......@@ -76,7 +80,7 @@ directive_defaults = {
'autotestdict.all': False,
'language_level': 2,
'fast_getattr': False, # Undocumented until we come up with a better way to handle this everywhere.
'warn': None,
'warn.undeclared': False,
......@@ -123,7 +127,7 @@ def parse_directive_value(name, value, relaxed_bool=False):
Traceback (most recent call last):
...
ValueError: boundscheck directive must be set to True or False, got 'true'
"""
type = directive_types.get(name)
if not type: return None
......
This diff is collapsed.
......@@ -353,6 +353,10 @@ class PyObjectType(PyrexType):
def can_coerce_to_pyobject(self, env):
return True
def default_coerced_ctype(self):
"The default C type that this Python type coerces to, or None."
return None
def assignable_from(self, src_type):
# except for pointers, conversion will be attempted
return not src_type.is_ptr or src_type.is_string
......@@ -403,7 +407,16 @@ class BuiltinObjectType(PyObjectType):
def __repr__(self):
return "<%s>"% self.cname
def default_coerced_ctype(self):
if self.name == 'bytes':
return c_char_ptr_type
elif self.name == 'bool':
return c_bint_type
elif self.name == 'float':
return c_double_type
return None
def assignable_from(self, src_type):
if isinstance(src_type, BuiltinObjectType):
return src_type.name == self.name
......@@ -1371,10 +1384,10 @@ impl="""
}
#if %(is_float)s
static CYTHON_INLINE %(real_type)s __Pyx_c_abs%(m)s(%(type)s z) {
#if HAVE_HYPOT
return hypot%(m)s(z.real, z.imag);
#else
#if !defined(HAVE_HYPOT) || defined(_MSC_VER)
return sqrt%(m)s(z.real*z.real + z.imag*z.imag);
#else
return hypot%(m)s(z.real, z.imag);
#endif
}
static CYTHON_INLINE %(type)s __Pyx_c_pow%(m)s(%(type)s a, %(type)s b) {
......
This diff is collapsed.
This diff is collapsed.
......@@ -17,7 +17,7 @@ class TestBufferParsing(CythonTest):
def not_parseable(self, expected_error, s):
e = self.should_fail(lambda: self.fragment(s), Errors.CompileError)
self.assertEqual(expected_error, e.message_only)
def test_basic(self):
t = self.parse(u"cdef object[float, 4, ndim=2, foo=foo] x")
bufnode = t.stats[0].base_type
......@@ -25,7 +25,7 @@ class TestBufferParsing(CythonTest):
self.assertEqual(2, len(bufnode.positional_args))
# print bufnode.dump()
# should put more here...
def test_type_pos(self):
self.parse(u"cdef object[short unsigned int, 3] x")
......@@ -68,7 +68,7 @@ class TestBufferOptions(CythonTest):
self.parse_opts(opts, expect_error=True)
# e = self.should_fail(lambda: self.parse_opts(opts))
self.assertEqual(expected_err, self.error.message_only)
def __test_basic(self):
buf = self.parse_opts(u"unsigned short int, 3")
self.assert_(isinstance(buf.dtype_node, CSimpleBaseTypeNode))
......@@ -80,7 +80,7 @@ class TestBufferOptions(CythonTest):
self.assert_(isinstance(buf.dtype_node, CSimpleBaseTypeNode))
self.assert_(buf.dtype_node.signed == 0 and buf.dtype_node.longness == -1)
self.assertEqual(3, buf.ndim)
def __test_ndim(self):
self.parse_opts(u"int, 2")
self.non_parse(ERR_BUF_NDIM, u"int, 'a'")
......
......@@ -12,7 +12,7 @@ class TestDecorator(TransformTest):
def decorated():
pass
""")
self.assertCode(u"""
def decorator(fun):
return fun
......
......@@ -5,12 +5,12 @@ from Cython.Compiler.UtilNodes import *
import Cython.Compiler.Naming as Naming
class TestTreeFragments(CythonTest):
def test_basic(self):
F = self.fragment(u"x = 4")
T = F.copy()
self.assertCode(u"x = 4", T)
def test_copy_is_taken(self):
F = self.fragment(u"if True: x = 4")
T1 = F.root
......@@ -46,7 +46,7 @@ class TestTreeFragments(CythonTest):
v = F.root.stats[1].rhs.operand2.operand1
a = T.stats[1].rhs.operand2.operand1
self.assertEquals(v.pos, a.pos)
def test_temps(self):
TemplateTransform.temp_name_counter = 0
F = self.fragment(u"""
......
......@@ -5,7 +5,7 @@
import re
from StringIO import StringIO
from Scanning import PyrexScanner, StringSourceDescriptor
from Symtab import BuiltinScope, ModuleScope
from Symtab import ModuleScope
import Symtab
import PyrexTypes
from Visitor import VisitorTransform
......@@ -23,18 +23,18 @@ class StringParseContext(Main.Context):
def __init__(self, include_directories, name):
Main.Context.__init__(self, include_directories, {})
self.module_name = name
def find_module(self, module_name, relative_to = None, pos = None, need_pxd = 1):
if module_name != self.module_name:
raise AssertionError("Not yet supporting any cimports/includes from string code snippets")
return ModuleScope(module_name, parent_module = None, context = self)
def parse_from_strings(name, code, pxds={}, level=None, initial_pos=None):
"""
Utility method to parse a (unicode) string of code. This is mostly
used for internal Cython compiler purposes (creating code snippets
that transforms should emit, as well as unit testing).
code - a unicode string containing Cython (module-level) code
name - a descriptive name for the code source (to use in error messages etc.)
"""
......@@ -78,7 +78,7 @@ class ApplyPositionAndCopy(TreeCopier):
def __init__(self, pos):
super(ApplyPositionAndCopy, self).__init__()
self.pos = pos
def visit_Node(self, node):
copy = super(ApplyPositionAndCopy, self).visit_Node(node)
copy.pos = self.pos
......@@ -87,7 +87,7 @@ class ApplyPositionAndCopy(TreeCopier):
class TemplateTransform(VisitorTransform):
"""
Makes a copy of a template tree while doing substitutions.
A dictionary "substitutions" should be passed in when calling
the transform; mapping names to replacement nodes. Then replacement
happens like this:
......@@ -103,11 +103,11 @@ class TemplateTransform(VisitorTransform):
Also a list "temps" should be passed. Any names listed will
be transformed into anonymous, temporary names.
Currently supported for tempnames is:
NameNode
(various function and class definition nodes etc. should be added to this)
Each replacement node gets the position of the substituted node
recursively applied to every member node.
"""
......@@ -148,7 +148,7 @@ class TemplateTransform(VisitorTransform):
c.pos = self.pos
self.visitchildren(c)
return c
def try_substitution(self, node, key):
sub = self.substitutions.get(key)
if sub is not None:
......@@ -157,7 +157,7 @@ class TemplateTransform(VisitorTransform):
return ApplyPositionAndCopy(pos)(sub)
else:
return self.visit_Node(node) # make copy as usual
def visit_NameNode(self, node):
temphandle = self.tempmap.get(node.name)
if temphandle:
......@@ -174,7 +174,7 @@ class TemplateTransform(VisitorTransform):
return self.try_substitution(node, node.expr.name)
else:
return self.visit_Node(node)
def copy_code_tree(node):
return TreeCopier()(node)
......@@ -186,12 +186,12 @@ def strip_common_indent(lines):
minindent = min([len(INDENT_RE.match(x).group(0)) for x in lines])
lines = [x[minindent:] for x in lines]
return lines
class TreeFragment(object):
def __init__(self, code, name="(tree fragment)", pxds={}, temps=[], pipeline=[], level=None, initial_pos=None):
if isinstance(code, unicode):
def fmt(x): return u"\n".join(strip_common_indent(x.split(u"\n")))
def fmt(x): return u"\n".join(strip_common_indent(x.split(u"\n")))
fmt_code = fmt(code)
fmt_pxds = {}
for key, value in pxds.iteritems():
......
......@@ -129,7 +129,7 @@ def handle_descendants(next, token):
for node in result:
for child in iter_recursive(node):
yield child
return select
def handle_attribute(next, token):
......@@ -231,7 +231,7 @@ def logical_and(lhs_selects, rhs_select):
for result_node in rhs_select(subresult):
yield node
return select
operations = {
"@": handle_attribute,
......
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
......@@ -2,7 +2,7 @@ void
some_c_function(void)
{
int a, b, c;
a = 1;
b = 2;
}
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment