Commit 14af74de authored by Robert Bradshaw's avatar Robert Bradshaw

Merge branch 'master' of github.com:cython/cython

parents fc537c18 7a57ab27
lib2to3.fixes.fix_unicode
......@@ -68,6 +68,8 @@ Features added
* External C++ classes that overload the assignment operator can be used.
Patch by Ian Henriksen.
* Support operator bool() for C++ classes so they can be used in if statements.
Bugs fixed
----------
......@@ -118,6 +120,11 @@ Other changes
* Changed mangling scheme in header files generated by ``cdef api``
declarations.
* Installation under CPython 3.3+ no longer requires a pass of the
2to3 tool. This also makes it possible to run Cython in Python
3.3+ from a source checkout without installing it first.
Patch by Petr Viktorin.
0.22.1 (2015-06-20)
===================
......
......@@ -62,9 +62,9 @@ def find_package_base(path):
def cython_compile(path_pattern, options):
pool = None
paths = map(os.path.abspath, extended_iglob(path_pattern))
all_paths = map(os.path.abspath, extended_iglob(path_pattern))
try:
for path in paths:
for path in all_paths:
if options.build_inplace:
base_dir = path
while not os.path.isdir(base_dir) or is_package_dir(base_dir):
......
from __future__ import absolute_import
from __future__ import absolute_import, print_function
import cython
from .. import __version__
......@@ -56,7 +56,7 @@ if sys.version_info[0] < 3:
if _fs_encoding is None:
_fs_encoding = sys.getdefaultencoding()
def encode_filename_in_py2(filename):
if isinstance(filename, unicode):
if not isinstance(filename, bytes):
return filename.encode(_fs_encoding)
return filename
else:
......@@ -163,7 +163,7 @@ distutils_settings = {
'language': transitive_str,
}
@cython.locals(start=long, end=long)
@cython.locals(start=cython.Py_ssize_t, end=cython.Py_ssize_t)
def line_iter(source):
if isinstance(source, basestring):
start = 0
......@@ -254,8 +254,10 @@ class DistutilsInfo(object):
value = getattr(extension, key) + list(value)
setattr(extension, key, value)
@cython.locals(start=long, q=long, single_q=long, double_q=long, hash_mark=long,
end=long, k=long, counter=long, quote_len=long)
@cython.locals(start=cython.Py_ssize_t, q=cython.Py_ssize_t,
single_q=cython.Py_ssize_t, double_q=cython.Py_ssize_t,
hash_mark=cython.Py_ssize_t, end=cython.Py_ssize_t,
k=cython.Py_ssize_t, counter=cython.Py_ssize_t, quote_len=cython.Py_ssize_t)
def strip_string_literals(code, prefix='__Pyx_L'):
"""
Normalizes every string literal to be of the form '__Pyx_Lxxx',
......@@ -876,7 +878,7 @@ def cythonize(module_list, exclude=[], nthreads=0, aliases=None, quiet=False, fo
if exclude_failures:
failed_modules = set()
for c_file, modules in modules_by_cfile.iteritems():
for c_file, modules in modules_by_cfile.items():
if not os.path.exists(c_file):
failed_modules.update(modules)
elif os.path.getsize(c_file) < 200:
......@@ -978,7 +980,7 @@ def cythonize_one(pyx_file, c_file, fingerprint, quiet, options=None, raise_on_f
result = compile([pyx_file], options)
if result.num_errors > 0:
any_failures = 1
except (EnvironmentError, PyrexError), e:
except (EnvironmentError, PyrexError) as e:
sys.stderr.write('%s\n' % e)
any_failures = 1
# XXX
......
......@@ -17,15 +17,18 @@ from ..Compiler.Main import Context, CompilationOptions, default_options
from ..Compiler.ParseTreeTransforms import (CythonTransform,
SkipDeclarations, AnalyseDeclarationsTransform, EnvTransform)
from ..Compiler.TreeFragment import parse_from_strings
from ..Compiler.StringEncoding import _unicode
from .Dependencies import strip_string_literals, cythonize, cached_function
from ..Compiler import Pipeline, Nodes
from ..Utils import get_cython_cache_dir
import cython as cython_module
IS_PY3 = sys.version_info >= (3, 0)
# A utility function to convert user-supplied ASCII strings to unicode.
if sys.version_info[0] < 3:
def to_unicode(s):
if not isinstance(s, unicode):
if isinstance(s, bytes):
return s.decode('ascii')
else:
return s
......@@ -143,15 +146,14 @@ def cython_inline(code,
# Parsing from strings not fully supported (e.g. cimports).
print("Could not parse code as a string (to extract unbound symbols).")
cimports = []
for name, arg in kwds.items():
for name, arg in list(kwds.items()):
if arg is cython_module:
cimports.append('\ncimport cython as %s' % name)
del kwds[name]
arg_names = kwds.keys()
arg_names.sort()
arg_names = sorted(kwds)
arg_sigs = tuple([(get_type(kwds[arg], ctx), arg) for arg in arg_names])
key = orig_code, arg_sigs, sys.version_info, sys.executable, Cython.__version__
module_name = "_cython_inline_" + hashlib.md5(str(key).encode('utf-8')).hexdigest()
module_name = "_cython_inline_" + hashlib.md5(_unicode(key).encode('utf-8')).hexdigest()
if module_name in sys.modules:
module = sys.modules[module_name]
......@@ -221,27 +223,29 @@ def __invoke(%(params)s):
# overridden with actual value upon the first cython_inline invocation
cython_inline.so_ext = None
non_space = re.compile('[^ ]')
_find_non_space = re.compile('[^ ]').search
def strip_common_indent(code):
min_indent = None
lines = code.split('\n')
lines = code.splitlines()
for line in lines:
match = non_space.search(line)
match = _find_non_space(line)
if not match:
continue # blank
continue # blank
indent = match.start()
if line[indent] == '#':
continue # comment
elif min_indent is None or min_indent > indent:
continue # comment
if min_indent is None or min_indent > indent:
min_indent = indent
for ix, line in enumerate(lines):
match = non_space.search(line)
if not match or line[indent] == '#':
match = _find_non_space(line)
if not match or not line or line[indent:indent+1] == '#':
continue
else:
lines[ix] = line[min_indent:]
lines[ix] = line[min_indent:]
return '\n'.join(lines)
module_statement = re.compile(r'^((cdef +(extern|class))|cimport|(from .+ cimport)|(from .+ import +[*]))')
def extract_func_code(code):
module = []
......@@ -270,7 +274,7 @@ except ImportError:
all[varargs] = arg_values[len(args):]
for name, value in zip(args, arg_values):
all[name] = value
for name, value in kwd_values.items():
for name, value in list(kwd_values.items()):
if name in args:
if name in all:
raise TypeError("Duplicate argument %s" % name)
......@@ -278,7 +282,7 @@ except ImportError:
if kwds is not None:
all[kwds] = kwd_values
elif kwd_values:
raise TypeError("Unexpected keyword arguments: %s" % kwd_values.keys())
raise TypeError("Unexpected keyword arguments: %s" % list(kwd_values))
if defaults is None:
defaults = ()
first_default = len(args) - len(defaults)
......@@ -307,4 +311,7 @@ class RuntimeCompiledFunction(object):
def __call__(self, *args, **kwds):
all = getcallargs(self._f, *args, **kwds)
return cython_inline(self._body, locals=self._f.func_globals, globals=self._f.func_globals, **all)
if IS_PY3:
return cython_inline(self._body, locals=self._f.__globals__, globals=self._f.__globals__, **all)
else:
return cython_inline(self._body, locals=self._f.func_globals, globals=self._f.func_globals, **all)
......@@ -6,8 +6,11 @@ The output is in a strict format, no whitespace or comments from the input
is preserved (and it could not be as it is not present in the code tree).
"""
from Cython.Compiler.Visitor import TreeVisitor
from Cython.Compiler.ExprNodes import *
from __future__ import absolute_import, print_function
from .Compiler.Visitor import TreeVisitor
from .Compiler.ExprNodes import *
class LinesResult(object):
def __init__(self):
......@@ -497,7 +500,7 @@ class CodeWriter(DeclarationWriter):
class PxdWriter(DeclarationWriter):
def __call__(self, node):
print u'\n'.join(self.write(node).lines)
print(u'\n'.join(self.write(node).lines))
return node
def visit_CFuncDefNode(self, node):
......@@ -516,5 +519,3 @@ class PxdWriter(DeclarationWriter):
def visit_StatNode(self, node):
pass
......@@ -11,7 +11,10 @@ from datetime import datetime
from functools import partial
from collections import defaultdict
from xml.sax.saxutils import escape as html_escape
from StringIO import StringIO
try:
from StringIO import StringIO
except ImportError:
from io import StringIO # does not support writing 'str' in Py2
from . import Version
from .Code import CCodeWriter
......@@ -215,7 +218,7 @@ class AnnotationCCodeWriter(CCodeWriter):
def annotate(match):
group_name = match.lastgroup
calls[group_name] += 1
return ur"<span class='%s'>%s</span>" % (
return u"<span class='%s'>%s</span>" % (
group_name, match.group(group_name))
lines = self._htmlify_code(cython_code).splitlines()
......@@ -272,22 +275,22 @@ class AnnotationCCodeWriter(CCodeWriter):
return outlist
_parse_code = re.compile(
ur'(?P<refnanny>__Pyx_X?(?:GOT|GIVE)REF|__Pyx_RefNanny[A-Za-z]+)|'
ur'(?P<trace>__Pyx_Trace[A-Za-z]+)|'
ur'(?:'
ur'(?P<pyx_macro_api>__Pyx_[A-Z][A-Z_]+)|'
ur'(?P<pyx_c_api>__Pyx_[A-Z][a-z_][A-Za-z_]*)|'
ur'(?P<py_macro_api>Py[A-Z][a-z]+_[A-Z][A-Z_]+)|'
ur'(?P<py_c_api>Py[A-Z][a-z]+_[A-Z][a-z][A-Za-z_]*)'
ur')(?=\()|' # look-ahead to exclude subsequent '(' from replacement
ur'(?P<error_goto>(?:(?<=;) *if .* +)?\{__pyx_filename = .*goto __pyx_L\w+;\})'
).sub
_parse_code = re.compile((
br'(?P<refnanny>__Pyx_X?(?:GOT|GIVE)REF|__Pyx_RefNanny[A-Za-z]+)|'
br'(?P<trace>__Pyx_Trace[A-Za-z]+)|'
br'(?:'
br'(?P<pyx_macro_api>__Pyx_[A-Z][A-Z_]+)|'
br'(?P<pyx_c_api>__Pyx_[A-Z][a-z_][A-Za-z_]*)|'
br'(?P<py_macro_api>Py[A-Z][a-z]+_[A-Z][A-Z_]+)|'
br'(?P<py_c_api>Py[A-Z][a-z]+_[A-Z][a-z][A-Za-z_]*)'
br')(?=\()|' # look-ahead to exclude subsequent '(' from replacement
br'(?P<error_goto>(?:(?<=;) *if .* +)?\{__pyx_filename = .*goto __pyx_L\w+;\})'
).decode('ascii')).sub
_replace_pos_comment = re.compile(
# this matches what Cython generates as code line marker comment
ur'^\s*/\*(?:(?:[^*]|\*[^/])*\n)+\s*\*/\s*\n',
br'^\s*/\*(?:(?:[^*]|\*[^/])*\n)+\s*\*/\s*\n'.decode('ascii'),
re.M
).sub
......
......@@ -70,7 +70,7 @@ class EmbedSignature(CythonTransform):
except Exception:
try:
return self._fmt_expr_node(default_val)
except AttributeError, e:
except AttributeError as e:
return '<???>'
def _fmt_arg(self, arg):
......
......@@ -49,25 +49,22 @@ class IntroduceBufferAuxiliaryVars(CythonTransform):
# For all buffers, insert extra variables in the scope.
# The variables are also accessible from the buffer_info
# on the buffer entry
bufvars = [entry for name, entry
in scope.entries.iteritems()
if entry.type.is_buffer]
scope_items = scope.entries.items()
bufvars = [entry for name, entry in scope_items if entry.type.is_buffer]
if len(bufvars) > 0:
bufvars.sort(key=lambda entry: entry.name)
self.buffers_exists = True
memviewslicevars = [entry for name, entry
in scope.entries.iteritems()
if entry.type.is_memoryviewslice]
memviewslicevars = [entry for name, entry in scope_items if entry.type.is_memoryviewslice]
if len(memviewslicevars) > 0:
self.buffers_exists = True
for (name, entry) in scope.entries.iteritems():
for (name, entry) in scope_items:
if name == 'memoryview' and isinstance(entry.utility_code_definition, CythonUtilityCode):
self.using_memoryview = True
break
del scope_items
if isinstance(node, ModuleNode) and len(bufvars) > 0:
# for now...note that pos is wrong
......@@ -143,13 +140,14 @@ def analyse_buffer_options(globalpos, env, posargs, dictargs, defaults=None, nee
if defaults is None:
defaults = buffer_defaults
posargs, dictargs = Interpreter.interpret_compiletime_options(posargs, dictargs, type_env=env, type_args = (0,'dtype'))
posargs, dictargs = Interpreter.interpret_compiletime_options(
posargs, dictargs, type_env=env, type_args=(0, 'dtype'))
if len(posargs) > buffer_positional_options_count:
raise CompileError(posargs[-1][1], ERR_BUF_TOO_MANY)
options = {}
for name, (value, pos) in dictargs.iteritems():
for name, (value, pos) in dictargs.items():
if not name in buffer_options:
raise CompileError(pos, ERR_BUF_OPTION_UNKNOWN % name)
options[name] = value
......
......@@ -169,7 +169,7 @@ def parse_command_line(args):
options.compiler_directives = Options.parse_directive_list(
x_args, relaxed_bool=True,
current_settings=options.compiler_directives)
except ValueError, e:
except ValueError as e:
sys.stderr.write("Error in compiler directive: %s\n" % e.args[0])
sys.exit(1)
elif option.startswith('--debug'):
......
......@@ -128,7 +128,7 @@ class UtilityCodeBase(object):
del tags['substitute']
try:
code = Template(code).substitute(vars(Naming))
except (KeyError, ValueError), e:
except (KeyError, ValueError) as e:
raise RuntimeError("Error parsing templated utility code of type '%s' at line %d: %s" % (
type, begin_lineno, e))
......@@ -163,7 +163,7 @@ class UtilityCodeBase(object):
if ext in ('.pyx', '.py', '.pxd', '.pxi'):
comment = '#'
strip_comments = partial(re.compile(r'^\s*#.*').sub, '')
rstrip = unicode.rstrip
rstrip = StringEncoding._unicode.rstrip
else:
comment = '/'
strip_comments = partial(re.compile(r'^\s*//.*|/\*[^*]*\*/').sub, '')
......@@ -774,9 +774,8 @@ class FunctionState(object):
error case.
"""
return [(cname, type)
for (type, manage_ref), freelist in self.temps_free.items()
if manage_ref
for cname in freelist]
for (type, manage_ref), freelist in self.temps_free.items() if manage_ref
for cname in freelist]
def start_collecting_temps(self):
"""
......@@ -820,7 +819,7 @@ class PyObjectConst(object):
cython.declare(possible_unicode_identifier=object, possible_bytes_identifier=object,
replace_identifier=object, find_alphanums=object)
possible_unicode_identifier = re.compile(ur"(?![0-9])\w+$", re.U).match
possible_unicode_identifier = re.compile(br"(?![0-9])\w+$".decode('ascii'), re.U).match
possible_bytes_identifier = re.compile(r"(?![0-9])\w+$".encode('ASCII')).match
replace_identifier = re.compile(r'[^a-zA-Z0-9_]+').sub
find_alphanums = re.compile('([a-zA-Z0-9]+)').findall
......@@ -877,10 +876,10 @@ class StringConst(object):
if identifier:
intern = True
elif identifier is None:
if isinstance(text, unicode):
intern = bool(possible_unicode_identifier(text))
else:
if isinstance(text, bytes):
intern = bool(possible_bytes_identifier(text))
else:
intern = bool(possible_unicode_identifier(text))
else:
intern = False
if intern:
......@@ -1303,8 +1302,7 @@ class GlobalState(object):
cleanup.putln("Py_CLEAR(%s.method);" % cname)
def generate_string_constants(self):
c_consts = [ (len(c.cname), c.cname, c)
for c in self.string_const_index.values() ]
c_consts = [(len(c.cname), c.cname, c) for c in self.string_const_index.values()]
c_consts.sort()
py_strings = []
......@@ -2300,9 +2298,8 @@ class PyxCodeWriter(object):
def getvalue(self):
result = self.buffer.getvalue()
if not isinstance(result, unicode):
if isinstance(result, bytes):
result = result.decode(self.encoding)
return result
def putln(self, line, context=None):
......
......@@ -4,6 +4,11 @@
from __future__ import absolute_import
try:
from __builtin__ import basestring as any_string_type
except ImportError:
any_string_type = (bytes, str)
import sys
from ..Utils import open_new_file
......@@ -21,7 +26,7 @@ class PyrexWarning(Exception):
def context(position):
source = position[0]
assert not (isinstance(source, unicode) or isinstance(source, str)), (
assert not (isinstance(source, any_string_type)), (
"Please replace filename strings with Scanning.FileSourceDescriptor instances %r" % source)
try:
F = source.get_lines()
......@@ -167,7 +172,7 @@ def report_error(err):
def error(position, message):
#print "Errors.error:", repr(position), repr(message) ###
#print("Errors.error:", repr(position), repr(message)) ###
if position is None:
raise InternalError(message)
err = CompileError(position, message)
......
This diff is collapsed.
......@@ -259,7 +259,7 @@ class ControlFlow(object):
for entry in block.bounded:
block.i_kill |= self.assmts[entry].bit
for assmts in self.assmts.itervalues():
for assmts in self.assmts.values():
self.entry_point.i_gen |= assmts.bit
self.entry_point.i_output = self.entry_point.i_gen
......@@ -602,7 +602,7 @@ def check_definitions(flow, compiler_directives):
node.cf_maybe_null = False
# Find uninitialized references and cf-hints
for node, entry in references.iteritems():
for node, entry in references.items():
if Uninitialized in node.cf_state:
node.cf_maybe_null = True
if not entry.from_closure and len(node.cf_state) == 1:
......
......@@ -13,6 +13,11 @@ if sys.version_info[:2] < (2, 6) or (3, 0) <= sys.version_info[:2] < (3, 2):
sys.stderr.write("Sorry, Cython requires Python 2.6+ or 3.2+, found %d.%d\n" % tuple(sys.version_info[:2]))
sys.exit(1)
try:
from __builtin__ import basestring
except ImportError:
basestring = str
from . import Errors
# Do not import Parsing here, import it when needed, because Parsing imports
# Nodes, which globally needs debug command line options initialized to set a
......@@ -354,7 +359,7 @@ class Context(object):
raise RuntimeError(
"Formal grammer can only be used with compiled Cython with an available pgen.")
ConcreteSyntaxTree.p_module(source_filename)
except UnicodeDecodeError, e:
except UnicodeDecodeError as e:
#import traceback
#traceback.print_exc()
raise self._report_decode_error(source_desc, e)
......@@ -699,7 +704,7 @@ def main(command_line = 0):
result = compile(sources, options)
if result.num_errors > 0:
any_failures = 1
except (EnvironmentError, PyrexError), e:
except (EnvironmentError, PyrexError) as e:
sys.stderr.write(str(e) + '\n')
any_failures = 1
if any_failures:
......
......@@ -790,7 +790,7 @@ def validate_axes_specs(positions, specs, is_c_contig, is_f_contig):
if access == 'ptr':
last_indirect_dimension = idx
for idx, pos, (access, packing) in zip(xrange(len(specs)), positions, specs):
for idx, (pos, (access, packing)) in enumerate(zip(positions, specs)):
if not (access in access_specs and
packing in packing_specs):
......
......@@ -397,7 +397,7 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
if target_file_dir != target_dir and not os.path.exists(target_file_dir):
try:
os.makedirs(target_file_dir)
except OSError, e:
except OSError as e:
import errno
if e.errno != errno.EEXIST:
raise
......@@ -413,7 +413,7 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
d.setdefault(cython_lineno, []).append(c_lineno + 1)
tb.start('LineNumberMapping')
for cython_lineno, c_linenos in sorted(d.iteritems()):
for cython_lineno, c_linenos in sorted(d.items()):
attrs = {
'c_linenos': ' '.join(map(str, c_linenos)),
'cython_lineno': str(cython_lineno),
......
This diff is collapsed.
from __future__ import absolute_import
import sys
import copy
import codecs
from . import TypeSlots
from .ExprNodes import not_a_constant
import cython
cython.declare(UtilityCode=object, EncodedString=object, BytesLiteral=object,
Nodes=object, ExprNodes=object, PyrexTypes=object, Builtin=object,
UtilNodes=object)
UtilNodes=object, _py_int_types=object)
if sys.version_info[0] >= 3:
_py_int_types = int
else:
_py_int_types = (int, long)
from . import Nodes
from . import ExprNodes
......@@ -20,9 +29,6 @@ from .StringEncoding import EncodedString, BytesLiteral
from .Errors import error
from .ParseTreeTransforms import SkipDeclarations
import copy
import codecs
try:
from __builtin__ import reduce
except ImportError:
......@@ -444,7 +450,7 @@ class IterationTransform(Visitor.EnvTransform):
stop = filter_none_node(index.stop)
step = filter_none_node(index.step)
if step:
if not isinstance(step.constant_result, (int,long)) \
if not isinstance(step.constant_result, _py_int_types) \
or step.constant_result == 0 \
or step.constant_result > 0 and not stop \
or step.constant_result < 0 and not start:
......@@ -683,7 +689,7 @@ class IterationTransform(Visitor.EnvTransform):
else:
step = args[2]
step_pos = step.pos
if not isinstance(step.constant_result, (int, long)):
if not isinstance(step.constant_result, _py_int_types):
# cannot determine step direction
return node
step_value = step.constant_result
......@@ -708,8 +714,8 @@ class IterationTransform(Visitor.EnvTransform):
bound1, bound2 = bound2, bound1
abs_step = abs(step_value)
if abs_step != 1:
if (isinstance(bound1.constant_result, (int, long)) and
isinstance(bound2.constant_result, (int, long))):
if (isinstance(bound1.constant_result, _py_int_types) and
isinstance(bound2.constant_result, _py_int_types)):
# calculate final bounds now
if step_value < 0:
begin_value = bound2.constant_result
......@@ -3852,12 +3858,12 @@ class ConstantFolding(Visitor.VisitorTransform, SkipDeclarations):
def _calculate_constant_seq(self, node, sequence_node, factor):
if factor.constant_result != 1 and sequence_node.args:
if isinstance(factor.constant_result, (int, long)) and factor.constant_result <= 0:
if isinstance(factor.constant_result, _py_int_types) and factor.constant_result <= 0:
del sequence_node.args[:]
sequence_node.mult_factor = None
elif sequence_node.mult_factor is not None:
if (isinstance(factor.constant_result, (int, long)) and
isinstance(sequence_node.mult_factor.constant_result, (int, long))):
if (isinstance(factor.constant_result, _py_int_types) and
isinstance(sequence_node.mult_factor.constant_result, _py_int_types)):
value = sequence_node.mult_factor.constant_result * factor.constant_result
sequence_node.mult_factor = ExprNodes.IntNode(
sequence_node.mult_factor.pos,
......@@ -4198,12 +4204,12 @@ class FinalOptimizePhase(Visitor.CythonTransform, Visitor.NodeRefCleanupMixin):
elif function.is_name:
if function.entry.is_builtin:
may_be_a_method = False
elif function.cf_state:
elif function.entry.cf_assignments:
# local functions/classes are definitely not methods
non_method_nodes = (ExprNodes.PyCFunctionNode, ExprNodes.ClassNode, ExprNodes.Py3ClassNode)
may_be_a_method = any(
assignment.rhs and not isinstance(assignment.rhs, non_method_nodes)
for assignment in function.cf_state)
for assignment in function.entry.cf_assignments)
if may_be_a_method:
node = self.replace(node, ExprNodes.PyMethodCallNode.from_node(
node, function=function, arg_tuple=node.arg_tuple, type=node.type))
......
......@@ -257,7 +257,7 @@ def parse_directive_value(name, value, relaxed_bool=False):
Parses value as an option value for the given name and returns
the interpreted value. None is returned if the option does not exist.
>>> print parse_directive_value('nonexisting', 'asdf asdfd')
>>> print(parse_directive_value('nonexisting', 'asdf asdfd'))
None
>>> parse_directive_value('boundscheck', 'True')
True
......
......@@ -6,7 +6,7 @@ import cython
cython.declare(PyrexTypes=object, Naming=object, ExprNodes=object, Nodes=object,
Options=object, UtilNodes=object, LetNode=object,
LetRefNode=object, TreeFragment=object, EncodedString=object,
error=object, warning=object, copy=object)
error=object, warning=object, copy=object, _unicode=object)
from . import PyrexTypes
from . import Naming
......@@ -19,7 +19,7 @@ from .Visitor import VisitorTransform, TreeVisitor
from .Visitor import CythonTransform, EnvTransform, ScopeTrackingTransform
from .UtilNodes import LetNode, LetRefNode, ResultRefNode
from .TreeFragment import TreeFragment
from .StringEncoding import EncodedString
from .StringEncoding import EncodedString, _unicode
from .Errors import error, warning, CompileError, InternalError
from .Code import UtilityCode
......@@ -255,7 +255,7 @@ class PostParse(ScopeTrackingTransform):
newdecls.append(decl)
node.declarators = newdecls
return stats
except PostParseError, e:
except PostParseError as e:
# An error in a cdef clause is ok, simply remove the declaration
# and try to move on to report more errors
self.context.nonfatal_error(e)
......@@ -420,11 +420,11 @@ def sort_common_subsequences(items):
for pos, item in enumerate(items):
key = item[1] # the ResultRefNode which has already been injected into the sequences
new_pos = pos
for i in xrange(pos-1, -1, -1):
for i in range(pos-1, -1, -1):
if lower_than(key, items[i][0]):
new_pos = i
if new_pos != pos:
for i in xrange(pos, new_pos, -1):
for i in range(pos, new_pos, -1):
items[i] = items[i-1]
items[new_pos] = item
......@@ -460,7 +460,7 @@ def flatten_parallel_assignments(input, output):
rhs_args = unpack_string_to_character_literals(rhs)
rhs_size = len(rhs_args)
lhs_targets = [ [] for _ in xrange(rhs_size) ]
lhs_targets = [[] for _ in range(rhs_size)]
starred_assignments = []
for lhs in input[:-1]:
if not lhs.is_sequence_constructor:
......@@ -647,7 +647,7 @@ class InterpretCompilerDirectives(CythonTransform, SkipDeclarations):
special_methods = set(['declare', 'union', 'struct', 'typedef',
'sizeof', 'cast', 'pointer', 'compiled',
'NULL', 'fused_type', 'parallel'])
special_methods.update(unop_method_nodes.keys())
special_methods.update(unop_method_nodes)
valid_parallel_directives = set([
"parallel",
......@@ -663,7 +663,7 @@ class InterpretCompilerDirectives(CythonTransform, SkipDeclarations):
self.parallel_directives = {}
directives = copy.deepcopy(Options.directive_defaults)
for key, value in compilation_directive_defaults.items():
directives[unicode(key)] = copy.deepcopy(value)
directives[_unicode(key)] = copy.deepcopy(value)
self.directives = directives
def check_directive_scope(self, pos, directive, scope):
......@@ -934,7 +934,7 @@ class InterpretCompilerDirectives(CythonTransform, SkipDeclarations):
directives = self._extract_directives(node, 'function')
if not directives:
return node
for name, value in directives.iteritems():
for name, value in directives.items():
if name == 'locals':
node.directive_locals = value
elif name not in ('final', 'staticmethod'):
......@@ -2888,11 +2888,11 @@ class DebugTransform(CythonTransform):
self.tb.start('Globals')
entries = {}
for k, v in node.scope.entries.iteritems():
for k, v in node.scope.entries.items():
if (v.qualified_name not in self.visited and not
v.name.startswith('__pyx_') and not
v.type.is_cfunction and not
v.type.is_extension_type):
v.name.startswith('__pyx_') and not
v.type.is_cfunction and not
v.type.is_extension_type):
entries[k]= v
self.serialize_local_variables(entries)
......
......@@ -13,9 +13,10 @@ cython.declare(Nodes=object, ExprNodes=object, EncodedString=object,
Future=object, Options=object, error=object, warning=object,
Builtin=object, ModuleNode=object, Utils=object,
re=object, _unicode=object, _bytes=object,
partial=object, reduce=object)
partial=object, reduce=object, _IS_PY3=cython.bint)
import re
import sys
from unicodedata import lookup as lookup_unicodechar
from functools import partial, reduce
......@@ -31,6 +32,8 @@ from .. import Utils
from . import Future
from . import Options
_IS_PY3 = sys.version_info[0] >= 3
class Ctx(object):
# Parsing context
......@@ -759,8 +762,6 @@ def wrap_compile_time_constant(pos, value):
return ExprNodes.BoolNode(pos, value=value)
elif isinstance(value, int):
return ExprNodes.IntNode(pos, value=rep)
elif isinstance(value, long):
return ExprNodes.IntNode(pos, value=rep, longness="L")
elif isinstance(value, float):
return ExprNodes.FloatNode(pos, value=rep)
elif isinstance(value, _unicode):
......@@ -775,6 +776,8 @@ def wrap_compile_time_constant(pos, value):
else:
# error already reported
return None
elif not _IS_PY3 and isinstance(value, long):
return ExprNodes.IntNode(pos, value=rep, longness="L")
error(pos, "Invalid type for compile-time constant: %r (type %s)"
% (value, value.__class__.__name__))
return None
......@@ -2543,7 +2546,8 @@ supported_overloaded_operators = cython.declare(set, set([
'+', '-', '*', '/', '%',
'++', '--', '~', '|', '&', '^', '<<', '>>', ',',
'==', '!=', '>=', '>', '<=', '<',
'[]', '()', '!', '='
'[]', '()', '!', '=',
'bool',
]))
def p_c_simple_declarator(s, ctx, empty, is_type, cmethod_flag,
......@@ -2620,6 +2624,13 @@ def p_c_simple_declarator(s, ctx, empty, is_type, cmethod_flag,
s.error("Overloading operator '%s' not yet supported." % op,
fatal=False)
name += op
elif op == 'IDENT':
op = s.systring;
if op not in supported_overloaded_operators:
s.error("Overloading operator '%s' not yet supported." % op,
fatal=False)
name = name + ' ' + op
s.next()
result = Nodes.CNameDeclaratorNode(pos,
name = name, cname = cname, default = rhs)
result.calling_convention = calling_convention
......@@ -3057,6 +3068,9 @@ def p_decorators(s):
def p_def_statement(s, decorators=None, is_async_def=False):
# s.sy == 'def'
pos = s.position()
# PEP 492 switches the async/await keywords on in "async def" functions
if is_async_def:
s.enter_async()
s.next()
name = p_ident(s)
s.expect('(')
......@@ -3069,23 +3083,9 @@ def p_def_statement(s, decorators=None, is_async_def=False):
s.next()
return_type_annotation = p_test(s)
# PEP 492 switches the async/await keywords off in simple "def" functions
# and on in "async def" functions
await_was_enabled = s.enable_keyword('await') if is_async_def else s.disable_keyword('await')
async_was_enabled = s.enable_keyword('async') if is_async_def else s.disable_keyword('async')
doc, body = p_suite_with_docstring(s, Ctx(level='function'))
if is_async_def:
if not async_was_enabled:
s.disable_keyword('async')
if not await_was_enabled:
s.disable_keyword('await')
else:
if async_was_enabled:
s.enable_keyword('async')
if await_was_enabled:
s.enable_keyword('await')
s.exit_async()
return Nodes.DefNode(
pos, name=name, args=args, star_arg=star_arg, starstar_arg=starstar_arg,
......@@ -3332,7 +3332,7 @@ def p_compiler_directive_comments(s):
try:
result.update(Options.parse_directive_list(
directives, ignore_unknown=True))
except ValueError, e:
except ValueError as e:
s.error(e.args[0], fatal=False)
s.next()
return result
......@@ -3450,7 +3450,7 @@ def print_parse_tree(f, node, level, key = None):
t = type(node)
if t is tuple:
f.write("(%s @ %s\n" % (node[0], node[1]))
for i in xrange(2, len(node)):
for i in range(2, len(node)):
print_parse_tree(f, node[i], level+1)
f.write("%s)\n" % ind)
return
......@@ -3466,7 +3466,7 @@ def print_parse_tree(f, node, level, key = None):
return
elif t is list:
f.write("[\n")
for i in xrange(len(node)):
for i in range(len(node)):
print_parse_tree(f, node[i], level+1)
f.write("%s]\n" % ind)
return
......
......@@ -15,7 +15,7 @@ from . import Naming
#
def dumptree(t):
# For quick debugging in pipelines
print t.dump()
print(t.dump())
return t
def abort_on_errors(node):
......@@ -57,7 +57,7 @@ def generate_pyx_code_stage_factory(options, result):
def inject_pxd_code_stage_factory(context):
def inject_pxd_code_stage(module_node):
for name, (statlistnode, scope) in context.pxds.iteritems():
for name, (statlistnode, scope) in context.pxds.items():
module_node.merge_in(statlistnode, scope)
return module_node
return inject_pxd_code_stage
......@@ -66,7 +66,7 @@ def use_utility_code_definitions(scope, target, seen=None):
if seen is None:
seen = set()
for entry in scope.entries.itervalues():
for entry in scope.entries.values():
if entry in seen:
continue
......@@ -321,21 +321,21 @@ def run_pipeline(pipeline, source, printtree=True):
if phase is not None:
if DebugFlags.debug_verbose_pipeline:
t = time()
print "Entering pipeline phase %r" % phase
print("Entering pipeline phase %r" % phase)
if not printtree and isinstance(phase, PrintTree):
continue
data = phase(data)
if DebugFlags.debug_verbose_pipeline:
print " %.3f seconds" % (time() - t)
except CompileError, err:
print(" %.3f seconds" % (time() - t))
except CompileError as err:
# err is set
Errors.report_error(err)
error = err
except InternalError, err:
except InternalError as err:
# Only raise if there was not an earlier error
if Errors.num_errors == 0:
raise
error = err
except AbortError, err:
except AbortError as err:
error = err
return (error, data)
......@@ -7,6 +7,11 @@ from __future__ import absolute_import
import copy
import re
try:
reduce
except NameError:
from functools import reduce
from .Code import UtilityCode, LazyUtilityCode, TempitaUtilityCode
from . import StringEncoding
from . import Naming
......@@ -2542,7 +2547,7 @@ class CFuncType(CType):
self.is_strict_signature = is_strict_signature
def __repr__(self):
arg_reprs = map(repr, self.args)
arg_reprs = list(map(repr, self.args))
if self.has_varargs:
arg_reprs.append("...")
if self.exception_value:
......@@ -4129,7 +4134,7 @@ def merge_template_deductions(a, b):
if a is None or b is None:
return None
all = a
for param, value in b.iteritems():
for param, value in b.items():
if param in all:
if a[param] != b[param]:
return None
......
......@@ -4,6 +4,8 @@ import cython
from ..Plex.Scanners cimport Scanner
cdef unicode any_string_prefix, IDENT
cdef get_lexicon()
cdef initial_compile_time_env()
......@@ -33,6 +35,7 @@ cdef class PyrexScanner(Scanner):
cdef public list indentation_stack
cdef public indentation_char
cdef public int bracket_nesting_level
cdef bint async_enabled
cdef public sy
cdef public systring
......@@ -57,5 +60,5 @@ cdef class PyrexScanner(Scanner):
cdef expect_indent(self)
cdef expect_dedent(self)
cdef expect_newline(self, message=*, bint ignore_semicolon=*)
cdef bint enable_keyword(self, name) except -1
cdef bint disable_keyword(self, name) except -1
cdef int enter_async(self) except -1
cdef int exit_async(self) except -1
......@@ -7,7 +7,6 @@ from __future__ import absolute_import
import cython
cython.declare(make_lexicon=object, lexicon=object,
any_string_prefix=unicode, IDENT=unicode,
print_function=object, error=object, warning=object,
os=object, platform=object)
......@@ -317,6 +316,7 @@ class PyrexScanner(Scanner):
self.indentation_stack = [0]
self.indentation_char = None
self.bracket_nesting_level = 0
self.async_enabled = 0
self.begin('INDENT')
self.sy = ''
self.next()
......@@ -493,14 +493,17 @@ class PyrexScanner(Scanner):
if useless_trailing_semicolon is not None:
warning(useless_trailing_semicolon, "useless trailing semicolon")
def enable_keyword(self, name):
if name in self.keywords:
return True # was enabled before
self.keywords.add(name)
return False # was not enabled before
def disable_keyword(self, name):
if name not in self.keywords:
return False # was not enabled before
self.keywords.remove(name)
return True # was enabled before
def enter_async(self):
self.async_enabled += 1
if self.async_enabled == 1:
self.keywords.add('async')
self.keywords.add('await')
def exit_async(self):
assert self.async_enabled > 0
self.async_enabled -= 1
if not self.async_enabled:
self.keywords.discard('await')
self.keywords.discard('async')
if self.sy in ('async', 'await'):
self.sy, self.systring = IDENT, self.context.intern_ustring(self.sy)
......@@ -8,10 +8,10 @@ import re
import sys
if sys.version_info[0] >= 3:
_unicode, _str, _bytes = str, str, bytes
_unicode, _str, _bytes, _unichr = str, str, bytes, chr
IS_PYTHON3 = True
else:
_unicode, _str, _bytes = unicode, str, str
_unicode, _str, _bytes, _unichr = unicode, str, str, unichr
IS_PYTHON3 = False
empty_bytes = _bytes()
......@@ -39,13 +39,13 @@ class UnicodeLiteralBuilder(object):
# wide Unicode character on narrow platform => replace
# by surrogate pair
char_number -= 0x10000
self.chars.append( unichr((char_number // 1024) + 0xD800) )
self.chars.append( unichr((char_number % 1024) + 0xDC00) )
self.chars.append( _unichr((char_number // 1024) + 0xD800) )
self.chars.append( _unichr((char_number % 1024) + 0xDC00) )
else:
self.chars.append( unichr(char_number) )
self.chars.append( _unichr(char_number) )
else:
def append_charval(self, char_number):
self.chars.append( unichr(char_number) )
self.chars.append( _unichr(char_number) )
def append_uescape(self, char_number, escape_string):
self.append_charval(char_number)
......@@ -71,7 +71,7 @@ class BytesLiteralBuilder(object):
self.chars.append(characters)
def append_charval(self, char_number):
self.chars.append( unichr(char_number).encode('ISO-8859-1') )
self.chars.append( _unichr(char_number).encode('ISO-8859-1') )
def append_uescape(self, char_number, escape_string):
self.append(escape_string)
......@@ -289,7 +289,7 @@ def split_string_literal(s, limit=2000):
def encode_pyunicode_string(s):
"""Create Py_UNICODE[] representation of a given unicode string.
"""
s = map(ord, s) + [0]
s = list(map(ord, s)) + [0]
if sys.maxunicode >= 0x10000: # Wide build or Py3.3
utf16, utf32 = [], s
......@@ -311,4 +311,4 @@ def encode_pyunicode_string(s):
if utf16 == utf32:
utf16 = []
return ",".join(map(unicode, utf16)), ",".join(map(unicode, utf32))
return ",".join(map(_unicode, utf16)), ",".join(map(_unicode, utf32))
......@@ -6,6 +6,12 @@ from __future__ import absolute_import
import copy
import re
try:
import __builtin__ as builtins
except ImportError: # Py3
import builtins
from .Errors import warning, error, InternalError
from .StringEncoding import EncodedString
from . import Options, Naming
......@@ -14,8 +20,8 @@ from .PyrexTypes import py_object_type, unspecified_type
from .TypeSlots import \
pyfunction_signature, pymethod_signature, \
get_special_method_signature, get_property_accessor_signature
from . import Code
import __builtin__ as builtins
iso_c99_keywords = set(
['auto', 'break', 'case', 'char', 'const', 'continue', 'default', 'do',
......@@ -338,7 +344,7 @@ class Scope(object):
def merge_in(self, other, merge_unused=True, whitelist=None):
# Use with care...
entries = []
for name, entry in other.entries.iteritems():
for name, entry in other.entries.items():
if not whitelist or name in whitelist:
if entry.used or merge_unused:
entries.append((name, entry))
......@@ -490,7 +496,7 @@ class Scope(object):
try:
type = PyrexTypes.create_typedef_type(name, base_type, cname,
(visibility == 'extern'))
except ValueError, e:
except ValueError as e:
error(pos, e.args[0])
type = PyrexTypes.error_type
entry = self.declare_type(name, type, pos, cname,
......@@ -893,7 +899,7 @@ class BuiltinScope(Scope):
Scope.__init__(self, "__builtin__", PreImportScope(), None)
self.type_names = {}
for name, definition in self.builtin_entries.iteritems():
for name, definition in sorted(self.builtin_entries.items()):
cname, type = definition
self.declare_var(name, type, None, cname)
......
......@@ -9,7 +9,7 @@ Support for parsing strings into code trees.
from __future__ import absolute_import
import re
from StringIO import StringIO
from io import StringIO
from .Scanning import PyrexScanner, StringSourceDescriptor
from .Symtab import ModuleScope
......@@ -17,6 +17,7 @@ from . import PyrexTypes
from .Visitor import VisitorTransform
from .Nodes import Node, StatListNode
from .ExprNodes import NameNode
from .StringEncoding import _unicode
from . import Parsing
from . import Main
from . import UtilNodes
......@@ -59,7 +60,7 @@ def parse_from_strings(name, code, pxds={}, level=None, initial_pos=None,
# to use a unicode string so that code fragments don't have to bother
# with encoding. This means that test code passed in should not have an
# encoding header.
assert isinstance(code, unicode), "unicode code snippets only please"
assert isinstance(code, _unicode), "unicode code snippets only please"
encoding = "UTF-8"
module_name = name
......@@ -198,7 +199,7 @@ def copy_code_tree(node):
return TreeCopier()(node)
_match_indent = re.compile(ur"^ *").match
_match_indent = re.compile(u"^ *").match
def strip_common_indent(lines):
......@@ -214,12 +215,12 @@ class TreeFragment(object):
def __init__(self, code, name=None, pxds={}, temps=[], pipeline=[], level=None, initial_pos=None):
if not name:
name = "(tree fragment)"
if isinstance(code, unicode):
if isinstance(code, _unicode):
def fmt(x): return u"\n".join(strip_common_indent(x.split(u"\n")))
fmt_code = fmt(code)
fmt_pxds = {}
for key, value in pxds.iteritems():
for key, value in pxds.items():
fmt_pxds[key] = fmt(value)
mod = t = parse_from_strings(name, fmt_code, fmt_pxds, level=level, initial_pos=initial_pos)
if level is None:
......
......@@ -9,6 +9,11 @@ from .. import Utils
from .PyrexTypes import py_object_type, unspecified_type
from .Visitor import CythonTransform, EnvTransform
try:
reduce
except NameError:
from functools import reduce
class TypedExprNode(ExprNodes.ExprNode):
# Used for declaring assignments of a specified type without a known entry.
......
......@@ -71,7 +71,7 @@ class Signature(object):
}
type_to_format_map = dict(
(type_, format_) for format_, type_ in format_map.iteritems())
(type_, format_) for format_, type_ in format_map.items())
error_value_map = {
'O': "NULL",
......@@ -127,7 +127,7 @@ class Signature(object):
def function_type(self, self_arg_override=None):
# Construct a C function type descriptor for this signature
args = []
for i in xrange(self.num_fixed_args()):
for i in range(self.num_fixed_args()):
if self_arg_override is not None and self.is_self_arg(i):
assert isinstance(self_arg_override, PyrexTypes.CFuncTypeArg)
args.append(self_arg_override)
......
......@@ -53,12 +53,15 @@ class TempRefNode(AtomicExprNode):
def generate_result_code(self, code):
pass
def generate_assignment_code(self, rhs, code):
def generate_assignment_code(self, rhs, code, overloaded_assignment=False):
if self.type.is_pyobject:
rhs.make_owned_reference(code)
# TODO: analyse control flow to see if this is necessary
code.put_xdecref(self.result(), self.ctype())
code.putln('%s = %s;' % (self.result(), rhs.result_as(self.ctype())))
code.putln('%s = %s;' % (
self.result(),
rhs.result() if overloaded_assignment else rhs.result_as(self.ctype()),
))
rhs.generate_post_assignment_code(code)
rhs.free_temps(code)
......@@ -66,7 +69,7 @@ class CleanupTempRefNode(TempRefNode):
# THIS IS DEPRECATED, USE LetRefNode instead
# handle TempHandle
def generate_assignment_code(self, rhs, code):
def generate_assignment_code(self, rhs, code, overloaded_assignment=False):
pass
def generate_execution_code(self, code):
......@@ -200,12 +203,15 @@ class ResultRefNode(AtomicExprNode):
def generate_disposal_code(self, code):
pass
def generate_assignment_code(self, rhs, code):
def generate_assignment_code(self, rhs, code, overloaded_assignment=False):
if self.type.is_pyobject:
rhs.make_owned_reference(code)
if not self.lhs_of_first_assignment:
code.put_decref(self.result(), self.ctype())
code.putln('%s = %s;' % (self.result(), rhs.result_as(self.ctype())))
code.putln('%s = %s;' % (
self.result(),
rhs.result() if overloaded_assignment else rhs.result_as(self.ctype()),
))
rhs.generate_post_assignment_code(code)
rhs.free_temps(code)
......
......@@ -186,7 +186,7 @@ class CythonUtilityCode(Code.UtilityCodeBase):
entries.pop('__builtins__')
entries.pop('__doc__')
for name, entry in entries.iteritems():
for name, entry in entries.items():
entry.utility_code_definition = self
entry.used = used
......
......@@ -4,8 +4,9 @@
# Tree visitor and transform framework
#
from __future__ import absolute_import
from __future__ import absolute_import, print_function
import sys
import inspect
from . import TypeSlots
......@@ -19,6 +20,14 @@ from . import Future
import cython
cython.declare(_PRINTABLE=tuple)
if sys.version_info[0] >= 3:
_PRINTABLE = (bytes, str, int, float)
else:
_PRINTABLE = (str, unicode, long, int, float)
class TreeVisitor(object):
"""
Base class for writing visitors for a Cython tree, contains utilities for
......@@ -48,9 +57,9 @@ class TreeVisitor(object):
>>> tree = SampleNode(0, SampleNode(1), [SampleNode(2), SampleNode(3)])
>>> class MyVisitor(TreeVisitor):
... def visit_SampleNode(self, node):
... print "in", node.value, self.access_path
... print("in %s %s" % (node.value, self.access_path))
... self.visitchildren(node)
... print "out", node.value
... print("out %s" % node.value)
...
>>> MyVisitor().visit(tree)
in 0 []
......@@ -94,7 +103,7 @@ class TreeVisitor(object):
continue
elif isinstance(value, list):
value = u'[...]/%d' % len(value)
elif not isinstance(value, (str, unicode, long, int, float)):
elif not isinstance(value, _PRINTABLE):
continue
else:
value = repr(value)
......@@ -153,11 +162,11 @@ class TreeVisitor(object):
handler_method = getattr(self, pattern % mro_cls.__name__, None)
if handler_method is not None:
return handler_method
print type(self), cls
print(type(self), cls)
if self.access_path:
print self.access_path
print self.access_path[-1][0].pos
print self.access_path[-1][0].__dict__
print(self.access_path)
print(self.access_path[-1][0].pos)
print(self.access_path[-1][0].__dict__)
raise RuntimeError("Visitor %r does not accept object: %s" % (self, obj))
def visit(self, obj):
......@@ -176,7 +185,7 @@ class TreeVisitor(object):
raise
except Errors.AbortError:
raise
except Exception, e:
except Exception as e:
if DebugFlags.debug_no_exception_intercept:
raise
self._raise_compiler_error(obj, e)
......@@ -240,7 +249,7 @@ class VisitorTransform(TreeVisitor):
"""
def visitchildren(self, parent, attrs=None):
result = self._visitchildren(parent, attrs)
for attr, newnode in result.iteritems():
for attr, newnode in result.items():
if type(newnode) is not list:
setattr(parent, attr, newnode)
else:
......
......@@ -78,8 +78,11 @@ class Plugin(CoveragePlugin):
return CythonModuleTracer(filename, py_file, c_file, self._c_files_map, self._file_path_map)
def file_reporter(self, filename):
if os.path.splitext(filename)[1].lower() not in ('.pyx', '.pxi', '.pxd'):
return None # let coverage.py handle it (e.g. .py files)
# TODO: let coverage.py handle .py files itself
#ext = os.path.splitext(filename)[1].lower()
#if ext == '.py':
# from coverage.python import PythonFileReporter
# return PythonFileReporter(filename)
filename = os.path.abspath(filename)
if self._c_files_map and filename in self._c_files_map:
......
......@@ -61,7 +61,7 @@ class CythonDebugWriter(object):
try:
os.makedirs(self.output_dir)
except OSError, e:
except OSError as e:
if e.errno != errno.EEXIST:
raise
......
......@@ -5,6 +5,8 @@ Note: debug information is already imported by the file generated by
Cython.Debugger.Cygdb.make_command_file()
"""
from __future__ import absolute_import
import os
import re
import sys
......@@ -21,9 +23,10 @@ from test import test_support
import gdb
from Cython.Debugger import libcython
from Cython.Debugger import libpython
from Cython.Debugger.Tests import TestLibCython as test_libcython
from .. import libcython
from .. import libpython
from . import TestLibCython as test_libcython
from ...Utils import add_metaclass
# for some reason sys.argv is missing in gdb
sys.argv = ['gdb']
......@@ -36,7 +39,7 @@ def print_on_call_decorator(func):
try:
return func(self, *args, **kwargs)
except Exception, e:
except Exception as e:
_debug("An exception occurred:", traceback.format_exc(e))
raise
......@@ -45,19 +48,18 @@ def print_on_call_decorator(func):
class TraceMethodCallMeta(type):
def __init__(self, name, bases, dict):
for func_name, func in dict.iteritems():
for func_name, func in dict.items():
if inspect.isfunction(func):
setattr(self, func_name, print_on_call_decorator(func))
@add_metaclass(TraceMethodCallMeta)
class DebugTestCase(unittest.TestCase):
"""
Base class for test cases. On teardown it kills the inferior and unsets
all breakpoints.
"""
__metaclass__ = TraceMethodCallMeta
def __init__(self, name):
super(DebugTestCase, self).__init__(name)
self.cy = libcython.cy
......
......@@ -14,8 +14,8 @@ import gdb
from Cython.Debugger import libcython
from Cython.Debugger import libpython
import test_libcython_in_gdb
from test_libcython_in_gdb import _debug, inferior_python_version
from . import test_libcython_in_gdb
from .test_libcython_in_gdb import _debug, inferior_python_version
class TestPrettyPrinters(test_libcython_in_gdb.DebugTestCase):
......
......@@ -4,6 +4,11 @@ GDB extension that adds Cython support.
from __future__ import print_function
try:
input = raw_input
except NameError:
pass
import sys
import textwrap
import traceback
......@@ -376,7 +381,7 @@ class CythonBase(object):
result = {}
seen = set()
for k, v in pyobject_dict.iteritems():
for k, v in pyobject_dict.items():
result[k.proxyval(seen)] = v
return result
......@@ -400,7 +405,7 @@ class CythonBase(object):
# Closed over free variable
if cur_lineno > cython_func.lineno:
if cyvar.type == PythonObject:
return long(gdb.parse_and_eval(cyvar.cname))
return int(gdb.parse_and_eval(cyvar.cname))
return True
return False
......@@ -726,7 +731,7 @@ class CyImport(CythonCommand):
for marker in module.find('LineNumberMapping'):
cython_lineno = int(marker.attrib['cython_lineno'])
c_linenos = map(int, marker.attrib['c_linenos'].split())
c_linenos = list(map(int, marker.attrib['c_linenos'].split()))
cython_module.lineno_cy2c[cython_lineno] = min(c_linenos)
for c_lineno in c_linenos:
cython_module.lineno_c2cy[c_lineno] = cython_lineno
......@@ -799,7 +804,7 @@ class CyBreak(CythonCommand):
while True:
try:
result = raw_input(
result = input(
"Select a function, press 'a' for all "
"functions or press 'q' or '^D' to quit: ")
except EOFError:
......@@ -844,10 +849,10 @@ class CyBreak(CythonCommand):
def complete(self, text, word):
# Filter init-module functions (breakpoints can be set using
# modulename:linenumber).
names = [n for n, L in self.cy.functions_by_name.iteritems()
if any(not f.is_initmodule_function for f in L)]
qnames = [n for n, f in self.cy.functions_by_qualified_name.iteritems()
if not f.is_initmodule_function]
names = [n for n, L in self.cy.functions_by_name.items()
if any(not f.is_initmodule_function for f in L)]
qnames = [n for n, f in self.cy.functions_by_qualified_name.items()
if not f.is_initmodule_function]
if parameters.complete_unqualified:
all_names = itertools.chain(qnames, names)
......@@ -1137,7 +1142,7 @@ class CyLocals(CythonCommand):
local_cython_vars = cython_function.locals
max_name_length = len(max(local_cython_vars, key=len))
for name, cyvar in sorted(local_cython_vars.iteritems(), key=sortkey):
for name, cyvar in sorted(local_cython_vars.items(), key=sortkey):
if self.is_initialized(self.get_cython_function(), cyvar.name):
value = gdb.parse_and_eval(cyvar.cname)
if not value.is_optimized_out:
......@@ -1170,13 +1175,13 @@ class CyGlobals(CyLocals):
seen = set()
print('Python globals:')
for k, v in sorted(global_python_dict.iteritems(), key=sortkey):
for k, v in sorted(global_python_dict.items(), key=sortkey):
v = v.get_truncated_repr(libpython.MAX_OUTPUT_LEN)
seen.add(k)
print(' %-*s = %s' % (max_name_length, k, v))
print('C globals:')
for name, cyvar in sorted(module_globals.iteritems(), key=sortkey):
for name, cyvar in sorted(module_globals.items(), key=sortkey):
if name not in seen:
try:
value = gdb.parse_and_eval(cyvar.cname)
......@@ -1199,10 +1204,8 @@ class EvaluateOrExecuteCodeMixin(object):
"Fill a remotely allocated dict with values from the Cython C stack"
cython_func = self.get_cython_function()
for name, cyvar in cython_func.locals.iteritems():
if (cyvar.type == PythonObject and
self.is_initialized(cython_func, name)):
for name, cyvar in cython_func.locals.items():
if cyvar.type == PythonObject and self.is_initialized(cython_func, name):
try:
val = gdb.parse_and_eval(cyvar.cname)
except RuntimeError:
......
......@@ -45,6 +45,12 @@ the type names are known to the debugger
The module also extends gdb with some python-specific commands.
'''
try:
input = raw_input
except NameError:
pass
import os
import re
import sys
......@@ -112,7 +118,7 @@ def safety_limit(val):
def safe_range(val):
# As per range, but don't trust the value too much: cap it to a safety
# threshold in case the data was corrupted
return xrange(safety_limit(val))
return range(safety_limit(val))
def write_unicode(file, text):
......@@ -177,6 +183,25 @@ class PrettyPrinterTrackerMeta(type):
all_pretty_typenames.add(self._typename)
# Class decorator that adds a metaclass and recreates the class with it.
# Copied from 'six'. See Cython/Utils.py.
def _add_metaclass(metaclass):
"""Class decorator for creating a class with a metaclass."""
def wrapper(cls):
orig_vars = cls.__dict__.copy()
slots = orig_vars.get('__slots__')
if slots is not None:
if isinstance(slots, str):
slots = [slots]
for slots_var in slots:
orig_vars.pop(slots_var)
orig_vars.pop('__dict__', None)
orig_vars.pop('__weakref__', None)
return metaclass(cls.__name__, cls.__bases__, orig_vars)
return wrapper
@_add_metaclass(PrettyPrinterTrackerMeta)
class PyObjectPtr(object):
"""
Class wrapping a gdb.Value that's a either a (PyObject*) within the
......@@ -189,8 +214,6 @@ class PyObjectPtr(object):
to corrupt data, etc; this is the debugger, after all.
"""
__metaclass__ = PrettyPrinterTrackerMeta
_typename = 'PyObject'
def __init__(self, gdbval, cast_to=None):
......@@ -263,7 +286,7 @@ class PyObjectPtr(object):
return PyTypeObjectPtr(self.field('ob_type'))
def is_null(self):
return 0 == long(self._gdbval)
return 0 == int(self._gdbval)
def is_optimized_out(self):
'''
......@@ -324,7 +347,7 @@ class PyObjectPtr(object):
return '<%s at remote 0x%x>' % (self.tp_name, self.address)
return FakeRepr(self.safe_tp_name(),
long(self._gdbval))
int(self._gdbval))
def write_repr(self, out, visited):
'''
......@@ -426,7 +449,7 @@ class PyObjectPtr(object):
return gdb.lookup_type(cls._typename).pointer()
def as_address(self):
return long(self._gdbval)
return int(self._gdbval)
class PyVarObjectPtr(PyObjectPtr):
......@@ -457,7 +480,7 @@ def _write_instance_repr(out, visited, name, pyop_attrdict, address):
if isinstance(pyop_attrdict, PyDictObjectPtr):
out.write('(')
first = True
for pyop_arg, pyop_val in pyop_attrdict.iteritems():
for pyop_arg, pyop_val in pyop_attrdict.items():
if not first:
out.write(', ')
first = False
......@@ -477,8 +500,7 @@ class InstanceProxy(object):
def __repr__(self):
if isinstance(self.attrdict, dict):
kwargs = ', '.join("%s=%r" % (arg, val)
for arg, val in self.attrdict.iteritems())
kwargs = ', '.join("%s=%r" % (arg, val) for arg, val in self.attrdict.items())
return '<%s(%s) at remote 0x%x>' % (
self.cl_name, kwargs, self.address)
else:
......@@ -547,7 +569,7 @@ class PyTypeObjectPtr(PyObjectPtr):
tp_name = self.safe_tp_name()
# New-style class:
return InstanceProxy(tp_name, attr_dict, long(self._gdbval))
return InstanceProxy(tp_name, attr_dict, int(self._gdbval))
def write_repr(self, out, visited):
# Guard against infinite loops:
......@@ -687,7 +709,7 @@ class PyDictObjectPtr(PyObjectPtr):
def iteritems(self):
'''
Yields a sequence of (PyObjectPtr key, PyObjectPtr value) pairs,
analagous to dict.iteritems()
analagous to dict.items()
'''
for i in safe_range(self.field('ma_mask') + 1):
ep = self.field('ma_table') + i
......@@ -696,6 +718,8 @@ class PyDictObjectPtr(PyObjectPtr):
pyop_key = PyObjectPtr.from_pyobject_ptr(ep['me_key'])
yield (pyop_key, pyop_value)
items = iteritems
def proxyval(self, visited):
# Guard against infinite loops:
if self.as_address() in visited:
......@@ -703,7 +727,7 @@ class PyDictObjectPtr(PyObjectPtr):
visited.add(self.as_address())
result = {}
for pyop_key, pyop_value in self.iteritems():
for pyop_key, pyop_value in self.items():
proxy_key = pyop_key.proxyval(visited)
proxy_value = pyop_value.proxyval(visited)
result[proxy_key] = proxy_value
......@@ -718,7 +742,7 @@ class PyDictObjectPtr(PyObjectPtr):
out.write('{')
first = True
for pyop_key, pyop_value in self.iteritems():
for pyop_key, pyop_value in self.items():
if not first:
out.write(', ')
first = False
......@@ -745,7 +769,7 @@ class PyInstanceObjectPtr(PyObjectPtr):
in_dict = self.pyop_field('in_dict').proxyval(visited)
# Old-style class:
return InstanceProxy(cl_name, in_dict, long(self._gdbval))
return InstanceProxy(cl_name, in_dict, int(self._gdbval))
def write_repr(self, out, visited):
# Guard against infinite loops:
......@@ -830,9 +854,9 @@ class PyLongObjectPtr(PyObjectPtr):
#define PyLong_SHIFT 30
#define PyLong_SHIFT 15
'''
ob_size = long(self.field('ob_size'))
ob_size = int(self.field('ob_size'))
if ob_size == 0:
return long(0)
return int(0)
ob_digit = self.field('ob_digit')
......@@ -918,7 +942,7 @@ class PyFrameObjectPtr(PyObjectPtr):
return
pyop_globals = self.pyop_field('f_globals')
return pyop_globals.iteritems()
return iter(pyop_globals.items())
def iter_builtins(self):
'''
......@@ -929,7 +953,7 @@ class PyFrameObjectPtr(PyObjectPtr):
return
pyop_builtins = self.pyop_field('f_builtins')
return pyop_builtins.iteritems()
return iter(pyop_builtins.items())
def get_var_by_name(self, name):
'''
......@@ -965,7 +989,7 @@ class PyFrameObjectPtr(PyObjectPtr):
if self.is_optimized_out():
return None
f_trace = self.field('f_trace')
if long(f_trace) != 0:
if int(f_trace) != 0:
# we have a non-NULL f_trace:
return self.f_lineno
else:
......@@ -1197,7 +1221,7 @@ class PyUnicodeObjectPtr(PyObjectPtr):
# From unicodeobject.h:
# Py_ssize_t length; /* Length of raw Unicode data in buffer */
# Py_UNICODE *str; /* Raw Unicode buffer */
field_length = long(self.field('length'))
field_length = int(self.field('length'))
field_str = self.field('str')
# Gather a list of ints from the Py_UNICODE array; these are either
......@@ -2315,11 +2339,11 @@ def _pointervalue(gdbval):
"""
# don't convert with int() as it will raise a RuntimeError
if gdbval.address is not None:
return long(gdbval.address)
return int(gdbval.address)
else:
# the address attribute is None sometimes, in which case we can
# still convert the pointer to an int
return long(gdbval)
return int(gdbval)
def pointervalue(gdbval):
......@@ -2511,7 +2535,7 @@ class FixGdbCommand(gdb.Command):
warnings.filterwarnings('ignore', r'.*', RuntimeWarning,
re.escape(__name__))
try:
long(gdb.parse_and_eval("(void *) 0")) == 0
int(gdb.parse_and_eval("(void *) 0")) == 0
except RuntimeError:
pass
# warnings.resetwarnings()
......@@ -2549,7 +2573,7 @@ class PyExec(gdb.Command):
lines = []
while True:
try:
line = raw_input('>')
line = input('>')
except EOFError:
break
else:
......
......@@ -15,6 +15,11 @@ from distutils import log
from distutils.command import build_ext as _build_ext
from distutils import sysconfig
try:
from __builtin__ import basestring
except ImportError:
basestring = str
extension_name_re = _build_ext.extension_name_re
show_compilers = _build_ext.show_compilers
......
......@@ -45,7 +45,7 @@ class Extension(_Extension.Extension):
# Translate pyrex_X to cython_X for backwards compatibility.
had_pyrex_options = False
for key in kw.keys():
for key in list(kw):
if key.startswith('pyrex_'):
had_pyrex_options = True
kw['cython' + key[5:]] = kw.pop(key)
......
......@@ -32,7 +32,7 @@ def nfa_to_dfa(old_machine, debug=None):
# Seed the process using the initial states of the old machine.
# Make the corresponding new states into initial states of the new
# machine with the same names.
for (key, old_state) in old_machine.initial_states.iteritems():
for (key, old_state) in old_machine.initial_states.items():
new_state = state_map.old_to_new(epsilon_closure(old_state))
new_machine.make_initial_state(key, new_state)
# Tricky bit here: we add things to the end of this list while we're
......@@ -40,10 +40,10 @@ def nfa_to_dfa(old_machine, debug=None):
for new_state in new_machine.states:
transitions = TransitionMap()
for old_state in state_map.new_to_old(new_state):
for event, old_target_states in old_state.transitions.iteritems():
for event, old_target_states in old_state.transitions.items():
if event and old_target_states:
transitions.add_set(event, set_epsilon_closure(old_target_states))
for event, old_states in transitions.iteritems():
for event, old_states in transitions.items():
new_machine.add_transitions(new_state, event, state_map.old_to_new(old_states))
if debug:
debug.write("\n===== State Mapping =====\n")
......
......@@ -115,7 +115,7 @@ class Lexicon(object):
tables = None # StateTableMachine
def __init__(self, specifications, debug=None, debug_flags=7, timings=None):
if type(specifications) != types.ListType:
if not isinstance(specifications, list):
raise Errors.InvalidScanner("Scanner definition is not a list")
if timings:
from .Timing import time
......@@ -132,7 +132,7 @@ class Lexicon(object):
self.add_token_to_machine(
nfa, user_initial_state, token, token_number)
token_number += 1
elif type(spec) == types.TupleType:
elif isinstance(spec, tuple):
self.add_token_to_machine(
nfa, default_initial_state, spec, token_number)
token_number += 1
......@@ -180,11 +180,11 @@ class Lexicon(object):
re.build_machine(machine, initial_state, final_state,
match_bol=1, nocase=0)
final_state.set_action(action, priority=-token_number)
except Errors.PlexError, e:
except Errors.PlexError as e:
raise e.__class__("Token number %d: %s" % (token_number, e))
def parse_token_definition(self, token_spec):
if type(token_spec) != types.TupleType:
if not isinstance(token_spec, tuple):
raise Errors.InvalidToken("Token definition is not a tuple")
if len(token_spec) != 2:
raise Errors.InvalidToken("Wrong number of items in token definition")
......
......@@ -12,7 +12,17 @@ import sys
from .Transitions import TransitionMap
LOWEST_PRIORITY = -sys.maxint
try:
from sys import maxsize as maxint
except ImportError:
from sys import maxint
try:
unichr
except NameError:
unichr = chr
LOWEST_PRIORITY = -maxint
class Machine(object):
......@@ -54,7 +64,7 @@ class Machine(object):
file.write("Plex.Machine:\n")
if self.initial_states is not None:
file.write(" Initial states:\n")
for (name, state) in self.initial_states.iteritems():
for (name, state) in sorted(self.initial_states.items()):
file.write(" '%s': %d\n" % (name, state.number))
for s in self.states:
s.dump(file)
......@@ -145,11 +155,11 @@ class FastMachine(object):
for old_state in old_machine.states:
new_state = self.new_state()
old_to_new[old_state] = new_state
for name, old_state in old_machine.initial_states.iteritems():
for name, old_state in old_machine.initial_states.items():
initial_states[name] = old_to_new[old_state]
for old_state in old_machine.states:
new_state = old_to_new[old_state]
for event, old_state_set in old_state.transitions.iteritems():
for event, old_state_set in old_state.transitions.items():
if old_state_set:
new_state[event] = old_to_new[old_state_set.keys()[0]]
else:
......@@ -172,7 +182,7 @@ class FastMachine(object):
def make_initial_state(self, name, state):
self.initial_states[name] = state
def add_transitions(self, state, event, new_state, maxint=sys.maxint):
def add_transitions(self, state, event, new_state, maxint=maxint):
if type(event) is tuple:
code0, code1 = event
if code0 == -maxint:
......@@ -190,7 +200,7 @@ class FastMachine(object):
def dump(self, file):
file.write("Plex.FastMachine:\n")
file.write(" Initial states:\n")
for name, state in self.initial_states.iteritems():
for name, state in sorted(self.initial_states.items()):
file.write(" %s: %s\n" % (repr(name), state['number']))
for state in self.states:
self.dump_state(state, file)
......@@ -208,7 +218,7 @@ class FastMachine(object):
def dump_transitions(self, state, file):
chars_leading_to_state = {}
special_to_state = {}
for (c, s) in state.iteritems():
for (c, s) in state.items():
if len(c) == 1:
chars = chars_leading_to_state.get(id(s), None)
if chars is None:
......
......@@ -9,7 +9,10 @@
from __future__ import absolute_import
import types
from sys import maxint as maxint
try:
from sys import maxsize as maxint
except ImportError:
from sys import maxint
from . import Errors
......@@ -84,9 +87,7 @@ def CodeRanges(code_list):
Given a list of codes as returned by chars_to_ranges, return
an RE which will match a character in any of the ranges.
"""
re_list = []
for i in xrange(0, len(code_list), 2):
re_list.append(CodeRange(code_list[i], code_list[i + 1]))
re_list = [CodeRange(code_list[i], code_list[i + 1]) for i in range(0, len(code_list), 2)]
return Alt(*re_list)
......@@ -304,8 +305,7 @@ class Seq(RE):
def __init__(self, *re_list):
nullable = 1
for i in xrange(len(re_list)):
re = re_list[i]
for i, re in enumerate(re_list):
self.check_re(i, re)
nullable = nullable and re.nullable
self.re_list = re_list
......@@ -329,12 +329,11 @@ class Seq(RE):
else:
s1 = initial_state
n = len(re_list)
for i in xrange(n):
for i, re in enumerate(re_list):
if i < n - 1:
s2 = m.new_state()
else:
s2 = final_state
re = re_list[i]
re.build_machine(m, s1, s2, match_bol, nocase)
s1 = s2
match_bol = re.match_nl or (match_bol and re.nullable)
......
......@@ -104,7 +104,7 @@ class REParser(object):
if self.c == '-' and self.lookahead(1) != ']':
self.next()
c2 = self.get()
for a in xrange(ord(c1), ord(c2) + 1):
for a in range(ord(c1), ord(c2) + 1):
char_list.append(chr(a))
else:
char_list.append(c1)
......
......@@ -6,7 +6,10 @@
from __future__ import absolute_import
from sys import maxint as maxint
try:
from sys import maxsize as maxint
except ImportError:
from sys import maxint
class TransitionMap(object):
......@@ -107,7 +110,7 @@ class TransitionMap(object):
result.append(((code0, code1), set))
code0 = code1
i += 2
for event, set in self.special.iteritems():
for event, set in self.special.items():
if set:
result.append((event, set))
return iter(result)
......@@ -179,7 +182,7 @@ class TransitionMap(object):
map_strs.append(state_set_str(map[i]))
i += 1
special_strs = {}
for event, set in self.special.iteritems():
for event, set in self.special.items():
special_strs[event] = state_set_str(set)
return "[%s]+%s" % (
','.join(map_strs),
......@@ -201,7 +204,7 @@ class TransitionMap(object):
while i < n:
self.dump_range(map[i], map[i + 2], map[i + 1], file)
i += 2
for event, set in self.special.iteritems():
for event, set in self.special.items():
if set:
if not event:
event = 'empty'
......
# cython: language_level=3
from cpython.ref cimport PyObject, Py_INCREF, Py_DECREF, Py_XDECREF, Py_XINCREF
from cpython.exc cimport PyErr_Fetch, PyErr_Restore
from cpython.pystate cimport PyThreadState_Get
......@@ -72,7 +74,7 @@ cdef void report_unraisable(object e=None):
if e is None:
import sys
e = sys.exc_info()[1]
print u"refnanny raised an exception: %s" % e
print(u"refnanny raised an exception: %s" % e)
except:
pass # We absolutely cannot exit with an exception
......@@ -159,9 +161,10 @@ cdef void FinishContext(PyObject** ctx):
context = <Context>ctx[0]
errors = context.end()
if errors:
print u"%s: %s()" % (context.filename.decode('latin1'),
context.name.decode('latin1'))
print errors
print(u"%s: %s()" % (
context.filename.decode('latin1'),
context.name.decode('latin1')))
print(errors)
context = None
except:
report_unraisable()
......
# cython.* namespace for pure mode.
from __future__ import absolute_import
__version__ = "0.23.beta1"
try:
from __builtin__ import basestring
except ImportError:
basestring = str
# BEGIN shameless copy from Cython/minivect/minitypes.py
......@@ -233,7 +240,7 @@ class StructType(CythonType):
for key, value in cast_from.__dict__.items():
setattr(self, key, value)
else:
for key, value in data.iteritems():
for key, value in data.items():
setattr(self, key, value)
def __setattr__(self, key, value):
......@@ -260,7 +267,7 @@ class UnionType(CythonType):
datadict = data
if len(datadict) > 1:
raise AttributeError("Union can only store one field at a time.")
for key, value in datadict.iteritems():
for key, value in datadict.items():
setattr(self, key, value)
def __setattr__(self, key, value):
......@@ -344,7 +351,7 @@ def _specialized_from_args(signatures, args, kwargs):
py_int = typedef(int, "int")
try:
py_long = typedef(long, "long")
except NameError: # Py3
except NameError: # Py3
py_long = typedef(int, "long")
py_float = typedef(float, "float")
py_complex = typedef(complex, "double complex")
......
from cStringIO import StringIO
try:
from cStringIO import StringIO
except ImportError:
from io import StringIO # does not support writing 'str' in Py2
class StringIOTree(object):
......@@ -76,23 +79,23 @@ at that spot and get a new StringIOTree object that is "left behind".
EXAMPLE:
>>> a = StringIOTree()
>>> a.write('first\n')
>>> _= a.write('first\n')
>>> b = a.insertion_point()
>>> a.write('third\n')
>>> b.write('second\n')
>>> _= a.write('third\n')
>>> _= b.write('second\n')
>>> a.getvalue().split()
['first', 'second', 'third']
>>> c = b.insertion_point()
>>> d = c.insertion_point()
>>> d.write('alpha\n')
>>> b.write('gamma\n')
>>> c.write('beta\n')
>>> _= d.write('alpha\n')
>>> _= b.write('gamma\n')
>>> _= c.write('beta\n')
>>> b.getvalue().split()
['second', 'alpha', 'beta', 'gamma']
>>> i = StringIOTree()
>>> d.insert(i)
>>> i.write('inserted\n')
>>> _= i.write('inserted\n')
>>> out = StringIO()
>>> a.copyto(out)
>>> out.getvalue().split()
......
# The original Tempita implements all of its templating code here.
# Moved it to _tempita.py to make the compilation portable.
from _tempita import *
from ._tempita import *
......@@ -29,6 +29,8 @@ can use ``__name='tmpl.html'`` to set the name of the template.
If there are syntax errors ``TemplateError`` will be raised.
"""
from __future__ import absolute_import
import re
import sys
import cgi
......@@ -38,12 +40,10 @@ except ImportError: # Py3
from urllib.parse import quote as url_quote
import os
import tokenize
try:
from io import StringIO
except ImportError:
from cStringIO import StringIO
from Cython.Tempita._looper import looper
from Cython.Tempita.compat3 import bytes, basestring_, next, is_unicode, coerce_text
from io import StringIO
from ._looper import looper
from .compat3 import bytes, unicode_, basestring_, next, is_unicode, coerce_text
__all__ = ['TemplateError', 'Template', 'sub', 'HTMLTemplate',
'sub_html', 'html', 'bunch']
......@@ -202,7 +202,7 @@ class Template(object):
position=None, name=self.name)
templ = self.get_template(inherit_template, self)
self_ = TemplateObject(self.name)
for name, value in defs.iteritems():
for name, value in defs.items():
setattr(self_, name, value)
self_.body = body
ns = ns.copy()
......@@ -298,32 +298,28 @@ class Template(object):
try:
try:
value = eval(code, self.default_namespace, ns)
except SyntaxError, e:
except SyntaxError as e:
raise SyntaxError(
'invalid syntax in expression: %s' % code)
return value
except:
exc_info = sys.exc_info()
e = exc_info[1]
except Exception as e:
if getattr(e, 'args', None):
arg0 = e.args[0]
else:
arg0 = coerce_text(e)
e.args = (self._add_line_info(arg0, pos),)
raise exc_info[0], e, exc_info[2]
raise
def _exec(self, code, ns, pos):
__traceback_hide__ = True
try:
exec code in self.default_namespace, ns
except:
exc_info = sys.exc_info()
e = exc_info[1]
exec(code, self.default_namespace, ns)
except Exception as e:
if e.args:
e.args = (self._add_line_info(e.args[0], pos),)
else:
e.args = (self._add_line_info(None, pos),)
raise exc_info[0], e, exc_info[2]
raise
def _repr(self, value, pos):
__traceback_hide__ = True
......@@ -332,7 +328,7 @@ class Template(object):
return ''
if self._unicode:
try:
value = unicode(value)
value = unicode_(value)
except UnicodeDecodeError:
value = bytes(value)
else:
......@@ -341,11 +337,9 @@ class Template(object):
if (is_unicode(value)
and self.default_encoding):
value = value.encode(self.default_encoding)
except:
exc_info = sys.exc_info()
e = exc_info[1]
except Exception as e:
e.args = (self._add_line_info(e.args[0], pos),)
raise exc_info[0], e, exc_info[2]
raise
else:
if self._unicode and isinstance(value, bytes):
if not self.default_encoding:
......@@ -354,7 +348,7 @@ class Template(object):
'(no default_encoding provided)' % value)
try:
value = value.decode(self.default_encoding)
except UnicodeDecodeError, e:
except UnicodeDecodeError as e:
raise UnicodeDecodeError(
e.encoding,
e.object,
......@@ -391,7 +385,7 @@ def paste_script_template_renderer(content, vars, filename=None):
class bunch(dict):
def __init__(self, **kw):
for name, value in kw.iteritems():
for name, value in kw.items():
setattr(self, name, value)
def __setattr__(self, name, value):
......@@ -413,12 +407,9 @@ class bunch(dict):
return dict.__getitem__(self, key)
def __repr__(self):
items = [
(k, v) for k, v in self.iteritems()]
items.sort()
return '<%s %s>' % (
self.__class__.__name__,
' '.join(['%s=%r' % (k, v) for k, v in items]))
' '.join(['%s=%r' % (k, v) for k, v in sorted(self.items())]))
############################################################
## HTML Templating
......@@ -467,10 +458,8 @@ def url(v):
def attr(**kw):
kw = list(kw.iteritems())
kw.sort()
parts = []
for name, value in kw:
for name, value in sorted(kw.items()):
if value is None:
continue
if name.endswith('_'):
......@@ -549,7 +538,7 @@ class TemplateDef(object):
values = {}
sig_args, var_args, var_kw, defaults = self._func_signature
extra_kw = {}
for name, value in kw.iteritems():
for name, value in kw.items():
if not var_kw and name not in sig_args:
raise TypeError(
'Unexpected argument %s' % name)
......@@ -572,7 +561,7 @@ class TemplateDef(object):
raise TypeError(
'Extra position arguments: %s'
% ', '.join([repr(v) for v in args]))
for name, value_expr in defaults.iteritems():
for name, value_expr in defaults.items():
if name not in values:
values[name] = self._template._eval(
value_expr, self._ns, self._pos)
......
import sys
__all__ = ['b', 'basestring_', 'bytes', 'next', 'is_unicode']
__all__ = ['b', 'basestring_', 'bytes', 'unicode_', 'next', 'is_unicode']
if sys.version < "3":
b = bytes = str
basestring_ = basestring
unicode_ = unicode
else:
def b(s):
......@@ -13,6 +14,7 @@ else:
return bytes(s)
basestring_ = (bytes, str)
bytes = bytes
unicode_ = str
text = str
if sys.version < "3":
......
......@@ -101,7 +101,7 @@ class CythonTest(unittest.TestCase):
try:
func()
self.fail("Expected an exception of type %r" % exc_type)
except exc_type, e:
except exc_type as e:
self.assert_(isinstance(e, exc_type))
return e
......
......@@ -20,7 +20,7 @@ TOOLS_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..',
@contextmanager
def _tempfile(code):
code = dedent(code)
if isinstance(code, unicode):
if not isinstance(code, bytes):
code = code.encode('utf8')
with NamedTemporaryFile(suffix='.py') as f:
......
......@@ -52,7 +52,7 @@ class TestStringIOTree(unittest.TestCase):
self.write_line(10, tree=line_10_insertion_point)
self.write_line(12, tree=line_9_to_12_insertion_point)
self.assertEqual(self.tree.allmarkers(), range(1, 17))
self.assertEqual(self.tree.allmarkers(), list(range(1, 17)))
self.assertEqual(code.strip(), self.tree.getvalue().strip())
......
......@@ -38,12 +38,17 @@ if __name__ == '__main__':
unittest.main(testRunner=xmlrunner.XMLTestRunner(output='test-reports'))
"""
from __future__ import absolute_import
import os
import sys
import time
from unittest import TestResult, _TextTestResult, TextTestRunner
from cStringIO import StringIO
import xml.dom.minidom
try:
from StringIO import StringIO
except ImportError:
from io import StringIO # doesn't accept 'str' in Py2
class XMLDocument(xml.dom.minidom.Document):
......@@ -330,9 +335,10 @@ class _XMLTestResult(_TextTestResult):
class XMLTestRunner(TextTestRunner):
"""A test runner class that outputs the results in JUnit like XML files.
"""
def __init__(self, output='.', stream=sys.stderr, descriptions=True, \
verbose=False, elapsed_times=True):
def __init__(self, output='.', stream=None, descriptions=True, verbose=False, elapsed_times=True):
"Create a new instance of XMLTestRunner."
if stream is None:
stream = sys.stderr
verbosity = (1, 2)[verbose]
TextTestRunner.__init__(self, stream, descriptions, verbosity)
self.output = output
......
......@@ -2,7 +2,9 @@
# This utility provides cython.array and cython.view.memoryview
import cython
from __future__ import absolute_import
cimport cython
# from cpython cimport ...
cdef extern from "Python.h":
......@@ -129,8 +131,8 @@ cdef class array:
if itemsize <= 0:
raise ValueError("itemsize <= 0 for cython.array")
if isinstance(format, unicode):
format = (<unicode>format).encode('ASCII')
if not isinstance(format, bytes):
format = format.encode('ASCII')
self._format = format # keep a reference to the byte string
self.format = self._format
......
......@@ -442,8 +442,6 @@ static CYTHON_INLINE void __pyx_fatalerror(const char *fmt, ...) {
va_list vargs;
char msg[200];
va_start(vargs, fmt);
#ifdef HAVE_STDARG_PROTOTYPES
va_start(vargs, fmt);
#else
......
......@@ -3,6 +3,13 @@
# anywhere else in particular
#
from __future__ import absolute_import
try:
from __builtin__ import basestring
except ImportError:
basestring = str
import os
import sys
import re
......@@ -180,15 +187,14 @@ def path_exists(path):
# file name encodings
def decode_filename(filename):
if isinstance(filename, unicode):
return filename
try:
filename_encoding = sys.getfilesystemencoding()
if filename_encoding is None:
filename_encoding = sys.getdefaultencoding()
filename = filename.decode(filename_encoding)
except UnicodeDecodeError:
pass
if isinstance(filename, bytes):
try:
filename_encoding = sys.getfilesystemencoding()
if filename_encoding is None:
filename_encoding = sys.getdefaultencoding()
filename = filename.decode(filename_encoding)
except UnicodeDecodeError:
pass
return filename
# support for source file encoding detection
......@@ -408,3 +414,20 @@ class LazyStr:
def __radd__(self, left):
return left + self.callback()
# Class decorator that adds a metaclass and recreates the class with it.
# Copied from 'six'.
def add_metaclass(metaclass):
"""Class decorator for creating a class with a metaclass."""
def wrapper(cls):
orig_vars = cls.__dict__.copy()
slots = orig_vars.get('__slots__')
if slots is not None:
if isinstance(slots, str):
slots = [slots]
for slots_var in slots:
orig_vars.pop(slots_var)
orig_vars.pop('__dict__', None)
orig_vars.pop('__weakref__', None)
return metaclass(cls.__name__, cls.__bases__, orig_vars)
return wrapper
from __future__ import absolute_import, print_function
import sys
f1 = open(sys.argv[1])
f2 = open(sys.argv[2])
try:
if f1.read() != f2.read():
print ("Files differ")
print("Files differ")
sys.exit(1)
else:
print ("Files identical")
print("Files identical")
finally:
f1.close()
f2.close()
# cython: language_level=3
print __name__
print(__name__)
if __name__ == "__main__":
print "Hi, I'm embedded."
print("Hi, I'm embedded.")
else:
print "I'm being imported."
print("I'm being imported.")
# cython: language_level=3
import lcmath
def nCr(n, r):
"""Return the number of ways to choose r elements of a set of n."""
return lcmath.exp( lcmath.lfactorial(n) - lcmath.lfactorial(r)
- lcmath.lfactorial(n-r) )
return lcmath.exp(
lcmath.lfactorial(n) -
lcmath.lfactorial(r) -
lcmath.lfactorial(n-r)
)
if __name__ == "__main__":
import sys
......@@ -11,4 +17,4 @@ if __name__ == "__main__":
sys.stderr.write("USAGE: %s n r\nPrints n-choose-r.\n" % sys.argv[0])
sys.exit(2)
n, r = map(float, sys.argv[1:])
print nCr(n, r)
print(nCr(n, r))
# cython: language_level=3
cdef extern from "math.h":
double c_lgamma "lgamma" (double)
double c_exp "exp" (double)
def exp(n):
"""Return e**n."""
return c_exp(n)
def lfactorial(n):
"""Return an estimate of the log factorial of n."""
return c_lgamma(n+1)
def factorial(n):
"""Return an estimate of the factorial of n."""
return c_exp( c_lgamma(n+1) )
......@@ -21,4 +26,4 @@ if __name__ == "__main__":
sys.stderr.write("USAGE: %s n\nPrints n!.\n" % sys.argv[0])
sys.exit(2)
n, = map(float, sys.argv[1:])
print factorial(n)
print(factorial(n))
# cython: language_level=3
def f(x):
return x**2-x
def integrate_f(a, b, N):
s = 0.0
dx = (b-a)/N
......
# cython: language_level=3
cdef double f(double x) except? -2:
return x**2-x
def integrate_f(double a, double b, int N):
cdef int i
s = 0.0
......
from __future__ import absolute_import, print_function
import timeit
import integrate0, integrate1, integrate2
......@@ -5,9 +7,9 @@ import integrate0, integrate1, integrate2
number = 10
py_time = None
for m in ('integrate0', 'integrate1', 'integrate2'):
print m
print(m)
t = min(timeit.repeat("integrate_f(0.0, 10.0, 10000000)", "from %s import integrate_f" % m, number=number))
if py_time is None:
py_time = t
print " ", t / number, "s"
print " ", py_time / t
print(" ", t / number, "s")
print(" ", py_time / t)
from __future__ import absolute_import, print_function
import os
import sys
......@@ -8,7 +10,7 @@ from Cython.Build import cythonize
# For demo purposes, we build our own tiny library.
try:
print "building libmymath.a"
print("building libmymath.a")
assert os.system("gcc -shared -fPIC -c mymath.c -o mymath.o") == 0
assert os.system("ar rcs libmymath.a mymath.o") == 0
except:
......
cimport numpy
import numpy
cimport numpy as cnp
def sum_of_squares(numpy.ndarray[double, ndim=1] arr):
def sum_of_squares(cnp.ndarray[double, ndim=1] arr):
cdef long N = arr.shape[0]
cdef double ss = 0
for i in range(N):
......
# cython: language_level=3
# distutils: extra_compile_args = -O3
import cython
cimport cython
ctypedef fused INT:
int
......@@ -15,6 +16,7 @@ ctypedef fused C_INT:
unsigned int
unsigned long long
@cython.overflowcheck(False)
def fib(INT n):
"""
......@@ -54,12 +56,13 @@ def collatz(INT n):
cdef INT k = 0
while n != 1:
if n % 2 == 0:
n /= 2
n //= 2
else:
n = 3*n + 1
k += 1
return int(k)
@cython.overflowcheck(True)
@cython.overflowcheck.fold(False)
def collatz_overflow(INT n):
......@@ -74,12 +77,13 @@ def collatz_overflow(INT n):
cdef INT k = 0
while n != 1:
if n % 2 == 0:
n /= 2
n //= 2
else:
n = 3*n + 1
k += 1
return int(k)
@cython.overflowcheck(True)
@cython.overflowcheck.fold(True)
def collatz_overflow_fold(INT n):
......@@ -94,14 +98,13 @@ def collatz_overflow_fold(INT n):
cdef INT k = 0
while n != 1:
if n % 2 == 0:
n /= 2
n //= 2
else:
n = 3*n + 1
k += 1
return int(k)
@cython.overflowcheck(False)
def factorial(INT n):
"""
......@@ -129,7 +132,6 @@ def factorial_overflow(INT n):
return int(res)
@cython.overflowcheck(False)
def most_orthogonal(C_INT[:,::1] vectors):
cdef C_INT n = vectors.shape[0]
......@@ -148,6 +150,7 @@ def most_orthogonal(C_INT[:,::1] vectors):
min_pair = i, j
return vectors[i], vectors[j]
@cython.overflowcheck(True)
@cython.overflowcheck.fold(False)
def most_orthogonal_overflow(C_INT[:,::1] vectors):
......@@ -167,6 +170,7 @@ def most_orthogonal_overflow(C_INT[:,::1] vectors):
min_pair = i, j
return vectors[i], vectors[j]
@cython.overflowcheck(True)
@cython.overflowcheck.fold(True)
def most_orthogonal_overflow_fold(C_INT[:,::1] vectors):
......
from __future__ import absolute_import, print_function
from overflow_perf import *
import sys
......@@ -11,7 +13,7 @@ except ImportError:
def run_tests(N):
global f
for func in most_orthogonal, fib, collatz, factorial:
print func.__name__
print(func.__name__)
for type in ['int', 'unsigned int', 'long long', 'unsigned long long', 'object']:
if func == most_orthogonal:
if type == 'object' or np == None:
......@@ -23,15 +25,16 @@ def run_tests(N):
else:
arg = N
try:
print "%s[%s](%s)" % (func.__name__, type, N)
print("%s[%s](%s)" % (func.__name__, type, N))
with_overflow = my_timeit(globals()[func.__name__ + "_overflow"][type], arg)
no_overflow = my_timeit(func[type], arg)
print "\t%0.04e\t%0.04e\t%0.04f" % (no_overflow, with_overflow, with_overflow / no_overflow)
print("\t%0.04e\t%0.04e\t%0.04f" % (no_overflow, with_overflow, with_overflow / no_overflow))
if func.__name__ + "_overflow_fold" in globals():
with_overflow = my_timeit(globals()[func.__name__ + "_overflow_fold"][type], arg)
print "\t%0.04e\t%0.04e\t%0.04f" % (no_overflow, with_overflow, with_overflow / no_overflow), "(folded)"
print("\t%0.04e\t%0.04e\t%0.04f (folded)" % (
no_overflow, with_overflow, with_overflow / no_overflow))
except OverflowError:
print " ", "Overflow"
print(" ", "Overflow")
def my_timeit(func, N):
global f, arg
......@@ -44,10 +47,11 @@ def my_timeit(func, N):
break
return res / times
params = sys.argv[1:]
if not params:
params = [129, 9, 97]
for arg in params:
print
print "N", arg
print()
print("N", arg)
run_tests(int(arg))
print "starting"
# cython: language_level=3
print("starting")
def primes(int kmax):
# cdef int n, k, i
......@@ -10,11 +12,11 @@ def primes(int kmax):
n = 2
while k < kmax:
i = 0
while i < k and n % p[i] <> 0:
i = i + 1
while i < k and n % p[i] != 0:
i += 1
if i == k:
p[k] = n
k = k + 1
k += 1
result.append(n)
n = n + 1
n += 1
return result
from __future__ import absolute_import, print_function
import sys
from primes import primes
if len(sys.argv) >= 2:
n = int(sys.argv[1])
else:
n = 1000
print primes(n)
print(primes(n))
from __future__ import absolute_import, print_function
from spam import Spam
s = Spam()
print "Created:", s
print("Created:", s)
s.set_amount(42)
print "Amount =", s.get_amount()
print("Amount =", s.get_amount())
s.describe()
s = None
# cython: language_level=3
#
# Example of an extension type.
#
......@@ -9,7 +11,7 @@ cdef class Spam:
self.amount = 0
def __dealloc__(self):
print self.amount, "tons of spam is history."
print(self.amount, "tons of spam is history.")
def get_amount(self):
return self.amount
......@@ -18,4 +20,4 @@ cdef class Spam:
self.amount = new_amount
def describe(self):
print self.amount, "tons of spam!"
print(self.amount, "tons of spam!")
include MANIFEST.in README.txt INSTALL.txt ToDo.txt USAGE.txt CHANGES.rst
include COPYING.txt LICENSE.txt Makefile
include COPYING.txt LICENSE.txt 2to3-fixers.txt Makefile
include .gitrev
include pylintrc
include setup.py
......
......@@ -164,7 +164,7 @@ def _set_configuration_nodistutils(env):
env.AppendUnique(PYEXTLINKFLAGS = env['PYEXT_ALLOW_UNDEFINED'])
def ifnotset(env, name, value):
if not env.has_key(name):
if name not in env:
env[name] = value
def set_configuration(env, use_distutils):
......@@ -205,7 +205,7 @@ def generate(env):
"""Add Builders and construction variables for python extensions to an
Environment."""
if not env.has_key('PYEXT_USE_DISTUTILS'):
if 'PYEXT_USE_DISTUTILS' not in env:
env['PYEXT_USE_DISTUTILS'] = False
# This sets all constructions variables used for pyext builders.
......
from pyximport import *
from .pyximport import *
# replicate docstring
from pyximport import __doc__
from .pyximport import __doc__
......@@ -153,5 +153,5 @@ def pyx_to_dll(filename, ext = None, force_rebuild = 0,
if __name__=="__main__":
pyx_to_dll("dummy.pyx")
import test
from . import test
......@@ -177,7 +177,7 @@ def build_module(name, pyxfilename, pyxbuild_dir=None, inplace=False, language_l
sargs.update(setup_args)
build_in_temp=sargs.pop('build_in_temp',build_in_temp)
import pyxbuild
from . import pyxbuild
so_path = pyxbuild.pyx_to_dll(pyxfilename, extension_mod,
build_in_temp=build_in_temp,
pyxbuild_dir=pyxbuild_dir,
......@@ -217,10 +217,14 @@ def load_module(name, pyxfilename, pyxbuild_dir=None, is_package=False,
mod = imp.load_source(name, pyxfilename)
assert mod.__file__ in (pyxfilename, pyxfilename+'c', pyxfilename+'o'), (mod.__file__, pyxfilename)
else:
tb = sys.exc_info()[2]
import traceback
raise ImportError("Building module %s failed: %s" %
(name,
traceback.format_exception_only(*sys.exc_info()[:2]))), None, sys.exc_info()[2]
exc = ImportError("Building module %s failed: %s" % (
name, traceback.format_exception_only(*sys.exc_info()[:2])))
if sys.version_info[0] >= 3:
raise exc.with_traceback(tb)
else:
exec("raise exc, None, tb", {'exc': exc, 'tb': tb})
return mod
......
import pyximport; pyximport.install(reload_support=True)
from __future__ import absolute_import, print_function
from pyximport import pyximport; pyximport.install(reload_support=True)
import os, sys
import time, shutil
import tempfile
def make_tempdir():
tempdir = os.path.join(tempfile.gettempdir(), "pyrex_temp")
if os.path.exists(tempdir):
......@@ -11,14 +15,17 @@ def make_tempdir():
os.mkdir(tempdir)
return tempdir
def remove_tempdir(tempdir):
shutil.rmtree(tempdir, 0, on_remove_file_error)
def on_remove_file_error(func, path, excinfo):
print "Sorry! Could not remove a temp file:", path
print "Extra information."
print func, excinfo
print "You may want to delete this yourself when you get a chance."
print("Sorry! Could not remove a temp file:", path)
print("Extra information.")
print(func, excinfo)
print("You may want to delete this yourself when you get a chance.")
def test():
pyximport._test_files = []
......@@ -52,14 +59,14 @@ def make_ext(name, filename):
time.sleep(1) # sleep a second to get safer mtimes
open(os.path.join(tempdir, "abc.txt"), "w").write(" ")
print "Here goes the reolad"
print("Here goes the reolad")
reload(dummy)
assert len(pyximport._test_files) == 1, pyximport._test_files
reload(dummy)
assert len(pyximport._test_files) ==0, pyximport._test_files
assert len(pyximport._test_files) == 0, pyximport._test_files
remove_tempdir(tempdir)
if __name__=="__main__":
test()
# reload seems to work for Python 2.3 but not 2.2.
from __future__ import absolute_import, print_function
import time, os, sys
import test_pyximport
from . import test_pyximport
# debugging the 2.2 problem
if 1:
from distutils import sysconfig
try:
......@@ -10,7 +11,8 @@ if 1:
except AttributeError:
pass
import pyxbuild
print pyxbuild.distutils.sysconfig == sysconfig
print(pyxbuild.distutils.sysconfig == sysconfig)
def test():
tempdir = test_pyximport.make_tempdir()
......@@ -28,6 +30,6 @@ def test():
assert hello.x == 2, "Reload should work on Python 2.3 but not 2.2"
test_pyximport.remove_tempdir(tempdir)
if __name__=="__main__":
test()
......@@ -23,21 +23,17 @@ except (ImportError, AttributeError):
IS_CPYTHON = True
IS_PYPY = False
from io import open as io_open
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
from io import StringIO # doesn't accept 'str' in Py2
try:
import cPickle as pickle
except ImportError:
import pickle
try:
from io import open as io_open
except ImportError:
from codecs import open as io_open
try:
import threading
except ImportError: # No threads, no problems
......@@ -645,12 +641,11 @@ class CythonCompileTestCase(unittest.TestCase):
def setUp(self):
from Cython.Compiler import Options
self._saved_options = [ (name, getattr(Options, name))
for name in ('warning_errors',
'clear_to_none',
'error_on_unknown_names',
'error_on_uninitialized') ]
self._saved_default_directives = Options.directive_defaults.items()
self._saved_options = [
(name, getattr(Options, name))
for name in ('warning_errors', 'clear_to_none', 'error_on_unknown_names', 'error_on_uninitialized')
]
self._saved_default_directives = list(Options.directive_defaults.items())
Options.warning_errors = self.warning_errors
if sys.version_info >= (3, 4):
Options.directive_defaults['autotestdict'] = False
......@@ -1311,7 +1306,7 @@ def collect_doctests(path, module_prefix, suite, selectors, exclude_selectors):
def package_matches(dirname):
if dirname == 'Debugger' and not include_debugger:
return False
return dirname not in ("Mac", "Distutils", "Plex")
return dirname not in ("Mac", "Distutils", "Plex", "Tempita")
def file_matches(filename):
filename, ext = os.path.splitext(filename)
blacklist = ['libcython', 'libpython', 'test_libcython_in_gdb',
......@@ -1573,9 +1568,8 @@ def refactor_for_py3(distdir, cy3_dir):
# need to convert Cython sources first
import lib2to3.refactor
from distutils.util import copydir_run_2to3
fixers = [ fix for fix in lib2to3.refactor.get_fixers_from_package("lib2to3.fixes")
if fix.split('fix_')[-1] not in ('next',)
]
with open('2to3-fixers.txt') as f:
fixers = [line.strip() for line in f if line.strip()]
if not os.path.exists(cy3_dir):
os.makedirs(cy3_dir)
import distutils.log as dlog
......@@ -1774,10 +1768,9 @@ def main():
WORKDIR = os.path.abspath(options.work_dir)
if sys.version_info[0] >= 3:
options.doctests = False
if options.with_cython:
sys.path.insert(0, options.cython_dir)
if options.with_cython and sys.version_info[0] >= 3:
sys.path.insert(0, options.cython_dir)
if sys.version_info[:2] == (3, 2):
try:
# try if Cython is installed in a Py3 version
import Cython.Compiler.Main
......@@ -1791,12 +1784,7 @@ def main():
# hasn't been refactored yet - do it now
global CY3_DIR
CY3_DIR = cy3_dir = os.path.join(WORKDIR, 'Cy3')
if sys.version_info >= (3,1):
refactor_for_py3(DISTDIR, cy3_dir)
elif os.path.isdir(cy3_dir):
sys.path.insert(0, cy3_dir)
else:
options.with_cython = False
refactor_for_py3(DISTDIR, cy3_dir)
if options.watermark:
import Cython.Compiler.Version
......
......@@ -34,14 +34,13 @@ class sdist(sdist_orig):
sdist_orig.run(self)
add_command_class('sdist', sdist)
if sys.version_info[0] >= 3:
if sys.version_info[:2] == (3, 2):
import lib2to3.refactor
from distutils.command.build_py \
import build_py_2to3 as build_py
# need to convert sources to Py3 on installation
fixers = [ fix for fix in lib2to3.refactor.get_fixers_from_package("lib2to3.fixes")
if fix.split('fix_')[-1] not in ('next',)
]
with open('2to3-fixers.txt') as f:
fixers = [line.strip() for line in f if line.strip()]
build_py.fixer_names = fixers
add_command_class("build_py", build_py)
......@@ -202,7 +201,7 @@ def compile_cython_modules(profile=False, compile_more=False, cython_with_refnan
def build_extension(self, ext, *args, **kargs):
try:
build_ext_orig.build_extension(self, ext, *args, **kargs)
except StandardError:
except Exception:
print("Compilation of '%s' failed" % ext.sources[0])
from Cython.Compiler.Main import compile
from Cython import Utils
......
# mode: error
# tag: cpp
from libcpp.vector cimport vector
def vector_is_none(vector[int] iv):
# TODO: this isn't strictly wrong, so it might be allowed as a 'feature' at some point
if iv is None:
pass
_ERRORS = """
8:10: Invalid types for 'is' (vector[int], Python object)
"""
# mode: error
# tag: cpp
# cpp will convert function arguments to a type if it has suitable constructor
# we do not want that when calling from cython
cdef extern from "no_such_file.cpp" nogil:
cppclass wrapped_int:
long long val
wrapped_int()
wrapped_int(long long val)
wrapped_int& operator=(const wrapped_int &other)
wrapped_int& operator=(const long long other)
long long constructor_overload(const wrapped_int& x)
long long constructor_overload(const wrapped_int x)
cdef long long e = constructor_overload(17)
_ERRORS = u"""
18:40: Cannot assign type 'long' to 'const wrapped_int'
"""
PYTHON -c "import pyximport_test; pyximport_test.test()"
######## pyximport_test.py ########
import os.path
from contextlib import contextmanager
import pyximport
pyximport.install(build_dir=os.path.join(os.path.dirname(__file__), "BUILD"))
@contextmanager
def fails(exc=ImportError):
try:
yield
except exc:
pass
else:
raise RuntimeError("NOT RAISED!")
def test():
with fails():
import compiler_error
with fails():
import syntax_error
with fails():
import runtime_error
######## compiler_error.pyx ########
from __future__ import braces
######## syntax_error.pyx ########
def test {
BRACES!
}
######## runtime_error.pyx ########
raise ValueError()
......@@ -24,6 +24,10 @@ public:
this->val = other.val;
return *this;
}
wrapped_int &operator=(const long long val) {
this->val = val;
return *this;
}
};
......@@ -35,6 +39,7 @@ cdef extern from "assign.cpp" nogil:
wrapped_int()
wrapped_int(long long val)
wrapped_int& operator=(const wrapped_int &other)
wrapped_int& operator=(const long long &other)
######## assignment_overload.pyx ########
......@@ -44,6 +49,7 @@ from assign cimport wrapped_int
def test():
cdef wrapped_int a = wrapped_int(2)
cdef wrapped_int b = wrapped_int(3)
cdef long long c = 4
assert &a != &b
assert a.val != b.val
......@@ -51,3 +57,11 @@ def test():
a = b
assert &a != &b
assert a.val == b.val
a = c
assert a.val == c
a, b, c = 2, 3, 4
a = b = c
assert &a != &b
assert a.val == b.val
assert b.val == c
......@@ -19,6 +19,8 @@ public:
wint() { val = 0; }
wint(long long val) { this->val = val; }
long long &operator()() { return this->val; }
long long operator()(long long i) { return this->val + i; }
long long operator()(long long i, long long j) { return this->val + i + j; }
};
######## call.pxd ########
......@@ -29,6 +31,8 @@ cdef extern from "call.cpp" nogil:
wint()
wint(long long val)
long long& operator()()
long long operator()(long long i)
long long operator()(long long i, long long j)
######## call_stack_allocated.pyx ########
......@@ -39,4 +43,8 @@ def test():
cdef long long b = 3
b = a()
assert b == 4
b = a(1ll)
assert b == 5
b = a(1ll, 1ll)
assert b == 6
......@@ -7,6 +7,7 @@ cimport cython.operator
from cython.operator cimport dereference as deref
from libc.string cimport const_char
from libcpp cimport bool
cdef out(s, result_type=None):
print '%s [%s]' % (s.decode('ascii'), result_type)
......@@ -49,6 +50,12 @@ cdef extern from "cpp_operators_helper.h":
const_char* operator[](int)
const_char* operator()(int)
cppclass TruthClass:
TruthClass()
TruthClass(bool)
bool operator bool()
bool value
def test_unops():
"""
>>> test_unops()
......@@ -148,3 +155,30 @@ def test_index_call():
out(t[0][100], typeof(t[0][100]))
out(t[0](100), typeof(t[0](100)))
del t
def test_bool_op():
"""
>>> test_bool_op()
"""
cdef TruthClass yes = TruthClass(True)
cdef TruthClass no = TruthClass(False)
if yes:
pass
else:
assert False
if no:
assert False
def test_bool_cond():
"""
>>> test_bool_cond()
"""
assert (TruthClass(False) or TruthClass(False)).value == False
assert (TruthClass(False) or TruthClass(True)).value == True
assert (TruthClass(True) or TruthClass(False)).value == True
assert (TruthClass(True) or TruthClass(True)).value == True
assert (TruthClass(False) and TruthClass(False)).value == False
assert (TruthClass(False) and TruthClass(True)).value == False
assert (TruthClass(True) and TruthClass(False)).value == False
assert (TruthClass(True) and TruthClass(True)).value == True
......@@ -45,3 +45,11 @@ public:
BIN_OP(());
};
class TruthClass {
public:
TruthClass() : value(false) {}
TruthClass(bool value) : value(value) {}
operator bool() { return value; }
bool value;
};
......@@ -12,6 +12,8 @@ import unittest
import warnings
import contextlib
from Cython.Compiler import Errors
try:
from types import coroutine as types_coroutine
......@@ -54,7 +56,17 @@ except ImportError:
# compiled exec()
def exec(code_string, l, g):
from Cython.Shadow import inline
ns = inline(code_string, locals=l, globals=g, lib_dir=os.path.dirname(__file__))
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
old_stderr = sys.stderr
try:
sys.stderr = StringIO()
ns = inline(code_string, locals=l, globals=g, lib_dir=os.path.dirname(__file__))
finally:
sys.stderr = old_stderr
g.update(ns)
......@@ -116,6 +128,260 @@ def silence_coro_gc():
gc.collect()
class AsyncBadSyntaxTest(unittest.TestCase):
@contextlib.contextmanager
def assertRaisesRegex(self, exc_type, regex):
# the error messages usually don't match, so we just ignore them
try:
yield
except exc_type:
self.assertTrue(True)
else:
self.assertTrue(False)
def test_badsyntax_9(self):
ns = {}
for comp in {'(await a for a in b)',
'[await a for a in b]',
'{await a for a in b}',
'{await a: a for a in b}'}:
with self.assertRaisesRegex(Errors.CompileError, 'await.*in comprehen'):
exec('async def f():\n\t{0}'.format(comp), ns, ns)
def test_badsyntax_10(self):
# Tests for issue 24619
samples = [
"""async def foo():
def bar(): pass
await = 1
""",
"""async def foo():
def bar(): pass
await = 1
""",
"""async def foo():
def bar(): pass
if 1:
await = 1
""",
"""def foo():
async def bar(): pass
if 1:
await a
""",
"""def foo():
async def bar(): pass
await a
""",
"""def foo():
def baz(): pass
async def bar(): pass
await a
""",
"""def foo():
def baz(): pass
# 456
async def bar(): pass
# 123
await a
""",
"""async def foo():
def baz(): pass
# 456
async def bar(): pass
# 123
await = 2
""",
"""def foo():
def baz(): pass
async def bar(): pass
await a
""",
"""async def foo():
def baz(): pass
async def bar(): pass
await = 2
""",
"""async def foo():
def async(): pass
""",
"""async def foo():
def await(): pass
""",
"""async def foo():
def bar():
await
""",
"""async def foo():
return lambda async: await
""",
"""async def foo():
return lambda a: await
""",
"""await a()""",
"""async def foo(a=await b):
pass
""",
"""async def foo(a:await b):
pass
""",
"""def baz():
async def foo(a=await b):
pass
""",
"""async def foo(async):
pass
""",
"""async def foo():
def bar():
def baz():
async = 1
""",
"""async def foo():
def bar():
def baz():
pass
async = 1
""",
"""def foo():
async def bar():
async def baz():
pass
def baz():
42
async = 1
""",
"""async def foo():
def bar():
def baz():
pass\nawait foo()
""",
"""def foo():
def bar():
async def baz():
pass\nawait foo()
""",
"""async def foo(await):
pass
""",
"""def foo():
async def bar(): pass
await a
""",
"""def foo():
async def bar():
pass\nawait a
"""]
for code in samples:
# assertRaises() differs in Py2.6, so use our own assertRaisesRegex() instead
with self.subTest(code=code), self.assertRaisesRegex(Errors.CompileError, '.'):
exec(code, {}, {})
if not hasattr(unittest.TestCase, 'subTest'):
@contextlib.contextmanager
def subTest(self, code, **kwargs):
try:
yield
except Exception:
print(code)
raise
def test_goodsyntax_1(self):
# Tests for issue 24619
def foo(await):
async def foo(): pass
async def foo():
pass
return await + 1
self.assertEqual(foo(10), 11)
def foo(await):
async def foo(): pass
async def foo(): pass
return await + 2
self.assertEqual(foo(20), 22)
def foo(await):
async def foo(): pass
async def foo(): pass
return await + 2
self.assertEqual(foo(20), 22)
def foo(await):
"""spam"""
async def foo(): \
pass
# 123
async def foo(): pass
# 456
return await + 2
self.assertEqual(foo(20), 22)
def foo(await):
def foo(): pass
def foo(): pass
async def bar(): return await_
await_ = await
try:
bar().send(None)
except StopIteration as ex:
return ex.args[0]
self.assertEqual(foo(42), 42)
async def f(z):
async def g(): pass
await z
await = 1
#self.assertTrue(inspect.iscoroutinefunction(f))
class TokenizerRegrTest(unittest.TestCase):
def test_oneline_defs(self):
......@@ -138,17 +404,6 @@ class TokenizerRegrTest(unittest.TestCase):
self.assertEqual(type(ns['foo']()).__name__, 'coroutine')
#self.assertTrue(inspect.iscoroutinefunction(ns['foo']))
def test_syntax_async_await_as_names(self):
async def enable():
await 123
def disable():
await = 123
async = 'abc'
async def reenable():
await 432
class CoroutineTest(unittest.TestCase):
......@@ -511,8 +766,7 @@ class CoroutineTest(unittest.TestCase):
class Awaitable:
pass
async def foo():
return (await Awaitable())
async def foo(): return (await Awaitable())
with self.assertRaisesRegex(
TypeError, "object Awaitable can't be used in 'await' expression"):
......@@ -599,6 +853,39 @@ class CoroutineTest(unittest.TestCase):
run_async(foo())
def test_await_14(self):
class Wrapper:
# Forces the interpreter to use CoroutineType.__await__
def __init__(self, coro):
self.coro = coro
def __await__(self):
return self.coro.__await__()
class FutureLike:
def __await__(self):
return (yield)
class Marker(Exception):
pass
async def coro1():
try:
return await FutureLike()
except ZeroDivisionError:
raise Marker
async def coro2():
return await Wrapper(coro1())
c = coro2()
c.send(None)
with self.assertRaisesRegex(StopIteration, 'spam'):
c.send('spam')
c = coro2()
c.send(None)
with self.assertRaises(Marker):
c.throw(ZeroDivisionError)
def test_await_iterator(self):
async def foo():
return 123
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment