Commit 3ee07211 authored by Robert Bradshaw's avatar Robert Bradshaw

merge

parents f7b4f6fe a7bd92e2
......@@ -210,7 +210,7 @@ class CCodeWriter:
storage_class = "static"
if storage_class:
self.put("%s " % storage_class)
if visibility <> 'public':
if visibility != 'public':
dll_linkage = None
self.put(entry.type.declaration_code(entry.cname,
dll_linkage = dll_linkage))
......
......@@ -311,7 +311,7 @@ class ExprNode(Node):
def allocate_target_temps(self, env, rhs):
# Perform temp allocation for the LHS of an assignment.
if debug_temp_alloc:
print self, "Allocating target temps"
print("%s Allocating target temps" % self)
self.allocate_subexpr_temps(env)
self.result_code = self.target_code()
if rhs:
......@@ -325,7 +325,7 @@ class ExprNode(Node):
# is used as the result instead of allocating a new
# one.
if debug_temp_alloc:
print self, "Allocating temps"
print("%s Allocating temps" % self)
self.allocate_subexpr_temps(env)
self.allocate_temp(env, result)
if self.is_temp:
......@@ -335,11 +335,11 @@ class ExprNode(Node):
# Allocate temporary variables for all sub-expressions
# of this node.
if debug_temp_alloc:
print self, "Allocating temps for:", self.subexprs
print("%s Allocating temps for: %s" % (self, self.subexprs))
for node in self.subexpr_nodes():
if node:
if debug_temp_alloc:
print self, "Allocating temps for", node
print("%s Allocating temps for %s" % (self, node))
node.allocate_temps(env)
def allocate_temp(self, env, result = None):
......@@ -350,7 +350,7 @@ class ExprNode(Node):
# is used as the result instead of allocating a new
# one.
if debug_temp_alloc:
print self, "Allocating temp"
print("%s Allocating temp" % self)
if result:
if not self.is_temp:
raise InternalError("Result forced on non-temp node")
......@@ -364,7 +364,7 @@ class ExprNode(Node):
else:
self.result_code = None
if debug_temp_alloc:
print self, "Allocated result", self.result_code
print("%s Allocated result %s" % (self, self.result_code))
else:
self.result_code = self.calculate_result_code()
......@@ -384,7 +384,7 @@ class ExprNode(Node):
# otherwise release results of its sub-expressions.
if self.is_temp:
if debug_temp_alloc:
print self, "Releasing result", self.result_code
print("%s Releasing result %s" % (self, self.result_code))
env.release_temp(self.result_code)
else:
self.release_subexpr_temps(env)
......@@ -489,7 +489,7 @@ class ExprNode(Node):
src = CoerceFromPyTypeNode(dst_type, src, env)
else: # neither src nor dst are py types
# Added the string comparison, since for c types that
# is enough, but SageX gets confused when the types are
# is enough, but Cython gets confused when the types are
# in different files.
if not (str(src.type) == str(dst_type) or dst_type.assignable_from(src_type)):
error(self.pos, "Cannot assign type '%s' to '%s'" %
......@@ -588,7 +588,7 @@ class BoolNode(PyConstNode):
def coerce_to(self, dst_type, env):
value = self.value
if dst_type.is_numeric:
return IntNode(self.pos, value=self.value).coerce_to(dst_type, env)
return IntNode(self.pos, value=int(self.value)).coerce_to(dst_type, env)
else:
return PyConstNode.coerce_to(self, dst_type, env)
......@@ -977,8 +977,8 @@ class NameNode(AtomicExprNode):
entry.name,
rhs.py_result()))
if debug_disposal_code:
print "NameNode.generate_assignment_code:"
print "...generating disposal code for", rhs
print("NameNode.generate_assignment_code:")
print("...generating disposal code for %s" % rhs)
rhs.generate_disposal_code(code)
else:
......@@ -991,8 +991,8 @@ class NameNode(AtomicExprNode):
code.put_decref(self.result_code, self.ctype())
code.putln('%s = %s;' % (self.result_code, rhs.result_as(self.ctype())))
if debug_disposal_code:
print "NameNode.generate_assignment_code:"
print "...generating post-assignment code for", rhs
print("NameNode.generate_assignment_code:")
print("...generating post-assignment code for %s" % rhs)
rhs.generate_post_assignment_code(code)
def generate_deletion_code(self, code):
......@@ -2139,8 +2139,8 @@ class SequenceNode(ExprNode):
rhs.generate_disposal_code(code)
for i in range(len(self.args)):
item = self.unpacked_items[i]
unpack_code = "__Pyx_UnpackItem(%s)" % (
self.iterator.py_result())
unpack_code = "__Pyx_UnpackItem(%s, %d)" % (
self.iterator.py_result(), i)
code.putln(
"%s = %s; %s" % (
item.result_code,
......@@ -2153,8 +2153,8 @@ class SequenceNode(ExprNode):
"__Pyx_EndUnpack(%s)" % (
self.iterator.py_result()))
if debug_disposal_code:
print "UnpackNode.generate_assignment_code:"
print "...generating disposal code for", iterator
print("UnpackNode.generate_assignment_code:")
print("...generating disposal code for %s" % iterator)
self.iterator.generate_disposal_code(code)
code.putln("}")
......@@ -2261,7 +2261,7 @@ class ListComprehensionNode(SequenceNode):
def allocate_temps(self, env, result = None):
if debug_temp_alloc:
print self, "Allocating temps"
print("%s Allocating temps" % self)
self.allocate_temp(env, result)
self.loop.analyse_declarations(env)
self.loop.analyse_expressions(env)
......@@ -3578,7 +3578,7 @@ class CoercionNode(ExprNode):
self.pos = arg.pos
self.arg = arg
if debug_coercion:
print self, "Coercing", self.arg
print("%s Coercing %s" % (self, self.arg))
def annotate(self, code):
self.arg.annotate(code)
......@@ -3906,18 +3906,20 @@ bad:
unpacking_utility_code = [
"""
static PyObject *__Pyx_UnpackItem(PyObject *); /*proto*/
static PyObject *__Pyx_UnpackItem(PyObject *, Py_ssize_t index); /*proto*/
static int __Pyx_EndUnpack(PyObject *); /*proto*/
""","""
static void __Pyx_UnpackError(void) {
PyErr_SetString(PyExc_ValueError, "unpack sequence of wrong size");
}
static PyObject *__Pyx_UnpackItem(PyObject *iter) {
static PyObject *__Pyx_UnpackItem(PyObject *iter, Py_ssize_t index) {
PyObject *item;
if (!(item = PyIter_Next(iter))) {
if (!PyErr_Occurred())
__Pyx_UnpackError();
if (!PyErr_Occurred()) {
PyErr_Format(PyExc_ValueError,
#if PY_VERSION_HEX < 0x02050000
"need more than %d values to unpack", (int)index);
#else
"need more than %zd values to unpack", index);
#endif
}
}
return item;
}
......@@ -3926,7 +3928,7 @@ static int __Pyx_EndUnpack(PyObject *iter) {
PyObject *item;
if ((item = PyIter_Next(iter))) {
Py_DECREF(item);
__Pyx_UnpackError();
PyErr_SetString(PyExc_ValueError, "too many values to unpack");
return -1;
}
else if (!PyErr_Occurred())
......
......@@ -61,7 +61,9 @@ def make_lexicon():
two_oct = octdigit + octdigit
three_oct = octdigit + octdigit + octdigit
two_hex = hexdigit + hexdigit
escapeseq = Str("\\") + (two_oct | three_oct | two_hex | AnyChar)
four_hex = two_hex + two_hex
escapeseq = Str("\\") + (two_oct | three_oct | two_hex |
Str('u') + four_hex | Str('x') + two_hex | AnyChar)
bra = Any("([{")
ket = Any(")]}")
......
......@@ -48,13 +48,13 @@ class Context:
# that module, provided its name is not a dotted name.
debug_find_module = 0
if debug_find_module:
print "Context.find_module: module_name =", module_name, \
"relative_to =", relative_to, "pos =", pos, "need_pxd =", need_pxd
print("Context.find_module: module_name = %s, relative_to = %s, pos = %s, need_pxd = %s" % (
module_name, relative_to, pos, need_pxd))
scope = None
pxd_pathname = None
if "." not in module_name and relative_to:
if debug_find_module:
print "...trying relative import"
print("...trying relative import")
scope = relative_to.lookup_submodule(module_name)
if not scope:
qualified_name = relative_to.qualify_name(module_name)
......@@ -63,28 +63,28 @@ class Context:
scope = relative_to.find_submodule(module_name)
if not scope:
if debug_find_module:
print "...trying absolute import"
print("...trying absolute import")
scope = self
for name in module_name.split("."):
scope = scope.find_submodule(name)
if debug_find_module:
print "...scope =", scope
print("...scope =", scope)
if not scope.pxd_file_loaded:
if debug_find_module:
print "...pxd not loaded"
print("...pxd not loaded")
scope.pxd_file_loaded = 1
if not pxd_pathname:
if debug_find_module:
print "...looking for pxd file"
print("...looking for pxd file")
pxd_pathname = self.find_pxd_file(module_name, pos)
if debug_find_module:
print "......found ", pxd_pathname
print("......found ", pxd_pathname)
if not pxd_pathname and need_pxd:
error(pos, "'%s.pxd' not found" % module_name)
if pxd_pathname:
try:
if debug_find_module:
print "Context.find_module: Parsing", pxd_pathname
print("Context.find_module: Parsing %s" % pxd_pathname)
pxd_tree = self.parse(pxd_pathname, scope.type_names, pxd = 1,
full_module_name = module_name)
pxd_tree.analyse_declarations(scope)
......
......@@ -562,7 +562,7 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
for entry in env.c_class_entries:
#print "generate_typeobj_definitions:", entry.name
#print "...visibility =", entry.visibility
if entry.visibility <> 'extern':
if entry.visibility != 'extern':
type = entry.type
scope = type.scope
if scope: # could be None if there was an error
......@@ -681,7 +681,7 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
% scope.mangle_internal("tp_dealloc"))
py_attrs = []
for entry in scope.var_entries:
if entry.type.is_pyobject and entry.name <> "__weakref__":
if entry.type.is_pyobject and entry.name != "__weakref__":
py_attrs.append(entry)
if py_attrs or scope.lookup_here("__weakref__"):
self.generate_self_cast(scope, code)
......@@ -1520,7 +1520,7 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
def generate_base_type_import_code(self, env, entry, code):
base_type = entry.type.base_type
if base_type and base_type.module_name <> env.qualified_name:
if base_type and base_type.module_name != env.qualified_name:
self.generate_type_import_code(env, base_type, self.pos, code)
def use_type_import_utility_code(self, env):
......@@ -1569,7 +1569,7 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
typeobj_cname = type.typeobj_cname
scope = type.scope
if scope: # could be None if there was an error
if entry.visibility <> 'extern':
if entry.visibility != 'extern':
for slot in TypeSlots.slot_table:
slot.generate_dynamic_init_code(scope, code)
code.putln(
......
......@@ -943,7 +943,7 @@ class CFuncDefNode(FuncDefNode):
dll_linkage = None
header = self.return_type.declaration_code(entity,
dll_linkage = dll_linkage)
if visibility <> 'private':
if visibility != 'private':
storage_class = "%s " % Naming.extern_c_macro
else:
storage_class = "static "
......@@ -1094,6 +1094,8 @@ class DefNode(FuncDefNode):
if self.signature_has_generic_args():
if self.star_arg:
env.use_utility_code(get_stararg_utility_code)
elif self.signature_has_generic_args():
env.use_utility_code(raise_argtuple_too_long_utility_code)
if not self.signature_has_nongeneric_args():
env.use_utility_code(get_keyword_string_check_utility_code)
elif self.starstar_arg:
......@@ -1566,9 +1568,8 @@ class DefNode(FuncDefNode):
def generate_positional_args_check(self, code, nargs):
code.putln("if (unlikely(PyTuple_GET_SIZE(%s) > %d)) {" % (
Naming.args_cname, nargs))
error_message = "function takes at most %d positional arguments (%d given)"
code.putln("PyErr_Format(PyExc_TypeError, \"%s\", %d, PyTuple_GET_SIZE(%s));" % (
error_message, nargs, Naming.args_cname))
code.putln("__Pyx_RaiseArgtupleTooLong(%d, PyTuple_GET_SIZE(%s));" % (
nargs, Naming.args_cname))
code.putln("return %s;" % self.error_value())
code.putln("}")
......@@ -3147,7 +3148,7 @@ class TryFinallyStatNode(StatNode):
"__pyx_why = 0; goto %s;" % catch_label)
for i in cases_used:
new_label = new_labels[i]
#if new_label and new_label <> "<try>":
#if new_label and new_label != "<try>":
if new_label == new_error_label and self.preserve_exception:
self.put_error_catcher(code,
new_error_label, i+1, catch_label)
......@@ -3550,6 +3551,7 @@ static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb) {
Py_INCREF(type);
}
else {
type = 0;
PyErr_SetString(PyExc_TypeError,
"raise: exception must be an old-style class or instance");
goto raise_error;
......@@ -3646,6 +3648,30 @@ static INLINE int __Pyx_SplitStarArg(
}
"""]
#------------------------------------------------------------------------------------
#
# __Pyx_RaiseArgtupleTooLong raises the correct exception when too
# many positional arguments were found. This handles Py_ssize_t
# formatting correctly.
raise_argtuple_too_long_utility_code = [
"""
static INLINE void __Pyx_RaiseArgtupleTooLong(Py_ssize_t num_expected, Py_ssize_t num_found); /*proto*/
""","""
static INLINE void __Pyx_RaiseArgtupleTooLong(
Py_ssize_t num_expected,
Py_ssize_t num_found)
{
const char* error_message =
#if PY_VERSION_HEX < 0x02050000
"function takes at most %d positional arguments (%d given)";
#else
"function takes at most %zd positional arguments (%zd given)";
#endif
PyErr_Format(PyExc_TypeError, error_message, num_expected, num_found);
}
"""]
#------------------------------------------------------------------------------------
#
# __Pyx_CheckKeywordStrings raises an error if non-string keywords
......
This diff is collapsed.
......@@ -630,7 +630,7 @@ class CFuncType(CType):
if not self.is_overridable and other_type.is_overridable:
return 0
nargs = len(self.args)
if nargs <> len(other_type.args):
if nargs != len(other_type.args):
return 0
# When comparing C method signatures, the first argument
# is exempt from compatibility checking (the proper check
......@@ -639,9 +639,9 @@ class CFuncType(CType):
if not self.args[i].type.same_as(
other_type.args[i].type):
return 0
if self.has_varargs <> other_type.has_varargs:
if self.has_varargs != other_type.has_varargs:
return 0
if self.optional_arg_count <> other_type.optional_arg_count:
if self.optional_arg_count != other_type.optional_arg_count:
return 0
if not self.return_type.same_as(other_type.return_type):
return 0
......@@ -695,7 +695,7 @@ class CFuncType(CType):
if not other_type.is_cfunction:
return 0
nargs = len(self.args)
if nargs <> len(other_type.args):
if nargs != len(other_type.args):
return 0
for i in range(as_cmethod, nargs):
if not self.args[i].type.subtype_of_resolved_type(other_type.args[i].type):
......@@ -703,9 +703,9 @@ class CFuncType(CType):
else:
self.args[i].needs_type_test = other_type.args[i].needs_type_test \
or not self.args[i].type.same_as(other_type.args[i].type)
if self.has_varargs <> other_type.has_varargs:
if self.has_varargs != other_type.has_varargs:
return 0
if self.optional_arg_count <> other_type.optional_arg_count:
if self.optional_arg_count != other_type.optional_arg_count:
return 0
if not self.return_type.subtype_of_resolved_type(other_type.return_type):
return 0
......
......@@ -42,7 +42,7 @@ def hash_source_file(path):
f = open(path, "rU")
text = f.read()
except IOError, e:
print "Unable to hash scanner source file (%s)" % e
print("Unable to hash scanner source file (%s)" % e)
return ""
finally:
f.close()
......@@ -69,12 +69,12 @@ def open_pickled_lexicon(expected_hash):
result = f
f = None
else:
print "Lexicon hash mismatch:" ###
print " expected", expected_hash ###
print " got ", actual_hash ###
print("Lexicon hash mismatch:") ###
print(" expected " + expected_hash) ###
print(" got " + actual_hash) ###
except IOError, e:
print "Warning: Unable to read pickled lexicon", lexicon_pickle
print e
print("Warning: Unable to read pickled lexicon " + lexicon_pickle)
print(e)
if f:
f.close()
return result
......@@ -89,37 +89,37 @@ def try_to_unpickle_lexicon():
if f:
if notify_lexicon_unpickling:
t0 = time()
print "Unpickling lexicon..."
print("Unpickling lexicon...")
lexicon = pickle.load(f)
f.close()
if notify_lexicon_unpickling:
t1 = time()
print "Done (%.2f seconds)" % (t1 - t0)
print("Done (%.2f seconds)" % (t1 - t0))
def create_new_lexicon():
global lexicon
t0 = time()
print "Creating lexicon..."
print("Creating lexicon...")
lexicon = make_lexicon()
t1 = time()
print "Done (%.2f seconds)" % (t1 - t0)
print("Done (%.2f seconds)" % (t1 - t0))
def pickle_lexicon():
f = None
try:
f = open(lexicon_pickle, "wb")
except IOError:
print "Warning: Unable to save pickled lexicon in", lexicon_pickle
print("Warning: Unable to save pickled lexicon in " + lexicon_pickle)
if f:
if notify_lexicon_pickling:
t0 = time()
print "Pickling lexicon..."
print("Pickling lexicon...")
pickle.dump(lexicon_hash, f, binary_lexicon_pickle)
pickle.dump(lexicon, f, binary_lexicon_pickle)
f.close()
if notify_lexicon_pickling:
t1 = time()
print "Done (%.2f seconds)" % (t1 - t0)
print("Done (%.2f seconds)" % (t1 - t0))
def get_lexicon():
global lexicon
......@@ -284,9 +284,9 @@ class PyrexScanner(Scanner):
self.indentation_char = c
#print "Scanner.indentation_action: setting indent_char to", repr(c)
else:
if self.indentation_char <> c:
if self.indentation_char != c:
self.error("Mixed use of tabs and spaces")
if text.replace(c, "") <> "":
if text.replace(c, "") != "":
self.error("Mixed use of tabs and spaces")
# Figure out how many indents/dedents to do
current_level = self.current_level()
......@@ -304,7 +304,7 @@ class PyrexScanner(Scanner):
self.indentation_stack.pop()
self.produce('DEDENT', '')
#print "...current level now", self.current_level() ###
if new_level <> self.current_level():
if new_level != self.current_level():
self.error("Inconsistent indentation")
def eof_action(self, text):
......@@ -328,7 +328,7 @@ class PyrexScanner(Scanner):
t = self.sy
else:
t = "%s %s" % (self.sy, self.systring)
print "--- %3d %2d %s" % (line, col, t)
print("--- %3d %2d %s" % (line, col, t))
def put_back(self, sy, systring):
self.unread(self.sy, self.systring)
......@@ -380,5 +380,5 @@ class PyrexScanner(Scanner):
def expect_newline(self, message = "Expected a newline"):
# Expect either a newline or end of file
if self.sy <> 'EOF':
if self.sy != 'EOF':
self.expect('NEWLINE', message)
......@@ -304,12 +304,12 @@ class Scope:
return entry
def check_previous_typedef_flag(self, entry, typedef_flag, pos):
if typedef_flag <> entry.type.typedef_flag:
if typedef_flag != entry.type.typedef_flag:
error(pos, "'%s' previously declared using '%s'" % (
entry.name, ("cdef", "ctypedef")[entry.type.typedef_flag]))
def check_previous_visibility(self, entry, visibility, pos):
if entry.visibility <> visibility:
if entry.visibility != visibility:
error(pos, "'%s' previously declared as '%s'" % (
entry.name, entry.visibility))
......@@ -334,7 +334,7 @@ class Scope:
cname = None, visibility = 'private', is_cdef = 0):
# Add an entry for a variable.
if not cname:
if visibility <> 'private':
if visibility != 'private':
cname = name
else:
cname = self.mangle(Naming.var_prefix, name)
......@@ -361,24 +361,24 @@ class Scope:
# Add an entry for a C function.
entry = self.lookup_here(name)
if entry:
if visibility <> 'private' and visibility <> entry.visibility:
if visibility != 'private' and visibility != entry.visibility:
warning(pos, "Function '%s' previously declared as '%s'" % (name, entry.visibility), 1)
if not entry.type.same_as(type):
warning(pos, "Function signature does not match previous declaration", 1)
entry.type = type
else:
if not cname:
if api or visibility <> 'private':
if api or visibility != 'private':
cname = name
else:
cname = self.mangle(Naming.func_prefix, name)
entry = self.add_cfunction(name, type, pos, cname, visibility)
entry.func_cname = cname
if in_pxd and visibility <> 'extern':
if in_pxd and visibility != 'extern':
entry.defined_in_pxd = 1
if api:
entry.api = 1
if not defining and not in_pxd and visibility <> 'extern':
if not defining and not in_pxd and visibility != 'extern':
error(pos, "Non-extern C function declared but not defined")
return entry
......@@ -442,7 +442,7 @@ class Scope:
# Python identifier, it will be interned.
if not entry.pystring_cname:
value = entry.init
if identifier_pattern.match(value):
if identifier_pattern.match(value) and isinstance(value, str):
entry.pystring_cname = self.intern(value)
entry.is_interned = 1
else:
......@@ -577,12 +577,6 @@ class BuiltinScope(Scope):
else:
Scope.__init__(self, "__builtin__", PreImportScope(), None)
for name, definition in self.builtin_functions.iteritems():
if len(definition) < 4: definition.append(None) # exception_value
if len(definition) < 5: definition.append(False) # exception_check
cname, type, arg_types, exception_value, exception_check = definition
function = CFuncType(type, [CFuncTypeArg("", t, None) for t in arg_types], False, exception_value, exception_check)
self.add_cfunction(name, function, None, cname, False)
for name, definition in self.builtin_entries.iteritems():
cname, type = definition
self.declare_var(name, type, None, cname)
......@@ -612,30 +606,13 @@ class BuiltinScope(Scope):
def builtin_scope(self):
return self
# TODO: merge this into builtin_function_table when error handling in Pyrex
# is fixed. Also handle pyrex types as functions.
builtin_functions = {
"cmp": ["PyObject_Compare", c_int_type, (py_object_type, py_object_type), None, True],
"unicode": ["PyObject_Unicode", py_object_type, (py_object_type, ), 0],
"type": ["PyObject_Type", py_object_type, (py_object_type, ), 0],
# "str": ["PyObject_Str", py_object_type, (py_object_type, ), 0],
# "int": ["PyNumber_Int", py_object_type, (py_object_type, ), 0],
# "long": ["PyNumber_Long", py_object_type, (py_object_type, ), 0],
# "float": ["PyNumber_Float", py_object_type, (py_object_type, ), 0],
# "list": ["PyNumber_List", py_object_type, (py_object_type, ), 0],
# "tuple": ["PySequence_Tuple", py_object_type, (py_object_type, ), 0],
}
builtin_entries = {
"int": ["((PyObject*)&PyInt_Type)", py_object_type],
"long": ["((PyObject*)&PyLong_Type)", py_object_type],
"float": ["((PyObject*)&PyFloat_Type)", py_object_type],
"str": ["((PyObject*)&PyString_Type)", py_object_type],
"unicode":["((PyObject*)&PyUnicode_Type)", py_object_type],
"tuple": ["((PyObject*)&PyTuple_Type)", py_object_type],
"list": ["((PyObject*)&PyList_Type)", py_object_type],
"dict": ["((PyObject*)&PyDict_Type)", py_object_type],
......@@ -672,6 +649,7 @@ class ModuleScope(Scope):
# cimported_modules [ModuleScope] Modules imported with cimport
# intern_map {string : string} Mapping from Python names to interned strs
# interned_names [string] Interned names pending generation of declarations
# interned_nums [int/long] Interned numeric constants
# all_pystring_entries [Entry] Python string consts from all scopes
# types_imported {PyrexType : 1} Set of types for which import code generated
......@@ -871,7 +849,7 @@ class ModuleScope(Scope):
entry = None # Will cause an error when we redeclare it
else:
self.check_previous_typedef_flag(entry, typedef_flag, pos)
if base_type <> type.base_type:
if base_type != type.base_type:
error(pos, "Base type does not match previous declaration")
#
# Make a new entry if needed
......@@ -920,17 +898,17 @@ class ModuleScope(Scope):
entry.defined_in_pxd = 1
if implementing: # So that filenames in runtime exceptions refer to
entry.pos = pos # the .pyx file and not the .pxd file
if visibility <> 'private' and entry.visibility <> visibility:
if visibility != 'private' and entry.visibility != visibility:
error(pos, "Class '%s' previously declared as '%s'"
% (name, entry.visibility))
if api:
entry.api = 1
if objstruct_cname:
if type.objstruct_cname and type.objstruct_cname <> objstruct_cname:
if type.objstruct_cname and type.objstruct_cname != objstruct_cname:
error(pos, "Object struct name differs from previous declaration")
type.objstruct_cname = objstruct_cname
if typeobj_cname:
if type.typeobj_cname and type.typeobj_cname <> typeobj_cname:
if type.typeobj_cname and type.typeobj_cname != typeobj_cname:
error(pos, "Type object name differs from previous declaration")
type.typeobj_cname = typeobj_cname
#
......@@ -974,12 +952,12 @@ class ModuleScope(Scope):
#
debug_check_c_classes = 0
if debug_check_c_classes:
print "Scope.check_c_classes: checking scope", self.qualified_name
print("Scope.check_c_classes: checking scope " + self.qualified_name)
for entry in self.c_class_entries:
if debug_check_c_classes:
print "...entry", entry.name, entry
print "......type =", entry.type
print "......visibility =", entry.visibility
print("...entry %s %s" % (entry.name, entry))
print("......type = " + entry.type)
print("......visibility = " + entry.visibility)
type = entry.type
name = entry.name
visibility = entry.visibility
......@@ -987,7 +965,7 @@ class ModuleScope(Scope):
if not type.scope:
error(entry.pos, "C class '%s' is declared but not defined" % name)
# Generate typeobj_cname
if visibility <> 'extern' and not type.typeobj_cname:
if visibility != 'extern' and not type.typeobj_cname:
type.typeobj_cname = self.mangle(Naming.typeobj_prefix, name)
## Generate typeptr_cname
#type.typeptr_cname = self.mangle(Naming.typeptr_prefix, name)
......@@ -1076,7 +1054,7 @@ class StructOrUnionScope(Scope):
if type.is_pyobject and not allow_pyobject:
error(pos,
"C struct/union member cannot be a Python object")
if visibility <> 'private':
if visibility != 'private':
error(pos,
"C struct/union member cannot be declared %s" % visibility)
return entry
......@@ -1170,7 +1148,7 @@ class CClassScope(ClassScope):
def __init__(self, name, outer_scope, visibility):
ClassScope.__init__(self, name, outer_scope)
if visibility <> 'extern':
if visibility != 'extern':
self.method_table_cname = outer_scope.mangle(Naming.methtab_prefix, name)
self.member_table_cname = outer_scope.mangle(Naming.memtab_prefix, name)
self.getset_table_cname = outer_scope.mangle(Naming.gstab_prefix, name)
......
......@@ -147,7 +147,7 @@ class SlotDescriptor:
def generate_dynamic_init_code(self, scope, code):
if self.is_initialised_dynamically:
value = self.slot_code(scope)
if value <> "0":
if value != "0":
code.putln("%s.%s = %s;" % (
scope.parent_type.typeobj_cname,
self.slot_name,
......
......@@ -6,7 +6,7 @@
def print_call_chain(*args):
import sys
print " ".join(map(str, args))
print(" ".join(map(str, args)))
f = sys._getframe(1)
while f:
name = f.f_code.co_name
......@@ -15,6 +15,6 @@ def print_call_chain(*args):
c = getattr(s, "__class__", None)
if c:
name = "%s.%s" % (c.__name__, name)
print "Called from:", name, f.f_lineno
print("Called from: %s %s" % (name, f.f_lineno))
f = f.f_back
print "-" * 70
print("-" * 70)
......@@ -69,10 +69,10 @@ def c_compile(c_file, verbose_flag = 0, cplus = 0, obj_suffix = ".o"):
compiler = compilers[bool(cplus)]
args = [compiler] + compiler_options + include_options + [c_file, "-o", o_file]
if verbose_flag or verbose:
print " ".join(args)
print(" ".join(args))
#print compiler, args ###
status = os.spawnvp(os.P_WAIT, compiler, args)
if status <> 0:
if status != 0:
raise CCompilerError("C compiler returned status %s" % status)
return o_file
......@@ -87,8 +87,8 @@ def c_link_list(obj_files, verbose_flag = 0, cplus = 0):
linker = linkers[bool(cplus)]
args = [linker] + linker_options + obj_files + ["-o", out_file]
if verbose_flag or verbose:
print " ".join(args)
print(" ".join(args))
status = os.spawnvp(os.P_WAIT, linker, args)
if status <> 0:
if status != 0:
raise CCompilerError("Linker returned status %s" % status)
return out_file
......@@ -124,8 +124,8 @@ def test_c_compile(link = 0):
except PyrexError, e:
#print "Caught a PyrexError:" ###
#print repr(e) ###
print "%s.%s:" % (e.__class__.__module__,
e.__class__.__name__), e
print("%s.%s: %s" % (e.__class__.__module__,
e.__class__.__name__, e))
sys.exit(1)
else:
obj = arg
......
......@@ -111,7 +111,7 @@ class Lexicon:
tables = None # StateTableMachine
def __init__(self, specifications, debug = None, debug_flags = 7, timings = None):
if type(specifications) <> types.ListType:
if type(specifications) != types.ListType:
raise Errors.InvalidScanner("Scanner definition is not a list")
if timings:
from Timing import time
......@@ -176,9 +176,9 @@ class Lexicon:
raise e.__class__("Token number %d: %s" % (token_number, e))
def parse_token_definition(self, token_spec):
if type(token_spec) <> types.TupleType:
if type(token_spec) != types.TupleType:
raise Errors.InvalidToken("Token definition is not a tuple")
if len(token_spec) <> 2:
if len(token_spec) != 2:
raise Errors.InvalidToken("Wrong number of items in token definition")
pattern, action = token_spec
if not isinstance(pattern, Regexps.RE):
......
......@@ -182,7 +182,7 @@ class FastMachine:
code0, code1 = event
if code0 == -maxint:
state['else'] = new_state
elif code1 <> maxint:
elif code1 != maxint:
while code0 < code1:
state[chr(code0)] = new_state
code0 = code0 + 1
......
......@@ -152,12 +152,12 @@ class RE:
self.wrong_type(num, value, "Plex.RE instance")
def check_string(self, num, value):
if type(value) <> type(''):
if type(value) != type(''):
self.wrong_type(num, value, "string")
def check_char(self, num, value):
self.check_string(num, value)
if len(value) <> 1:
if len(value) != 1:
raise Errors.PlexValueError("Invalid value for argument %d of Plex.%s."
"Expected a string of length 1, got: %s" % (
num, self.__class__.__name__, repr(value)))
......@@ -192,7 +192,7 @@ class RE:
## def build_machine(self, m, initial_state, final_state, match_bol, nocase):
## c = self.char
## if match_bol and c <> BOL:
## if match_bol and c != BOL:
## s1 = self.build_opt(m, initial_state, BOL)
## else:
## s1 = initial_state
......
......@@ -122,8 +122,8 @@ class Scanner:
action = self.run_machine_inlined()
if action:
if self.trace:
print "Scanner: read: Performing", action, "%d:%d" % (
self.start_pos, self.cur_pos)
print("Scanner: read: Performing %s %d:%d" % (
action, self.start_pos, self.cur_pos))
base = self.buf_start_pos
text = self.buffer[self.start_pos - base : self.cur_pos - base]
return (text, action)
......@@ -163,8 +163,8 @@ class Scanner:
trace = self.trace
while 1:
if trace: #TRACE#
print "State %d, %d/%d:%s -->" % ( #TRACE#
state['number'], input_state, cur_pos, repr(cur_char)), #TRACE#
print("State %d, %d/%d:%s -->" % ( #TRACE#
state['number'], input_state, cur_pos, repr(cur_char))) #TRACE#
# Begin inlined self.save_for_backup()
#action = state.action #@slow
action = state['action'] #@fast
......@@ -179,7 +179,7 @@ class Scanner:
new_state = c and state.get('else') #@fast
if new_state:
if trace: #TRACE#
print "State %d" % new_state['number'] #TRACE#
print("State %d" % new_state['number']) #TRACE#
state = new_state
# Begin inlined: self.next_char()
if input_state == 1:
......@@ -228,7 +228,7 @@ class Scanner:
# End inlined self.next_char()
else: # not new_state
if trace: #TRACE#
print "blocked" #TRACE#
print("blocked") #TRACE#
# Begin inlined: action = self.back_up()
if backup_state:
(action, cur_pos, cur_line, cur_line_start,
......@@ -245,7 +245,7 @@ class Scanner:
self.next_pos = next_pos
if trace: #TRACE#
if action: #TRACE#
print "Doing", action #TRACE#
print("Doing " + action) #TRACE#
return action
# def transition(self):
......@@ -288,7 +288,7 @@ class Scanner:
def next_char(self):
input_state = self.input_state
if self.trace:
print "Scanner: next:", " "*20, "[%d] %d" % (input_state, self.cur_pos),
print("Scanner: next: %s [%d] %d" % (" "*20, input_state, self.cur_pos))
if input_state == 1:
self.cur_pos = self.next_pos
c = self.read_char()
......@@ -314,7 +314,7 @@ class Scanner:
else: # input_state = 5
self.cur_char = ''
if self.trace:
print "--> [%d] %d %s" % (input_state, self.cur_pos, repr(self.cur_char))
print("--> [%d] %d %s" % (input_state, self.cur_pos, repr(self.cur_char)))
# def read_char(self):
# """
......
......@@ -95,9 +95,9 @@ class REParser:
if self.c == ']':
char_list.append(']')
self.next()
while not self.end and self.c <> ']':
while not self.end and self.c != ']':
c1 = self.get()
if self.c == '-' and self.lookahead(1) <> ']':
if self.c == '-' and self.lookahead(1) != ']':
self.next()
c2 = self.get()
for a in xrange(ord(c1), ord(c2) + 1):
......
......@@ -190,7 +190,7 @@ class TransitionMap:
def check(self):
"""Check data structure integrity."""
if not self.map[-3] < self.map[-1]:
print self
print(self)
assert 0
def dump(self, file):
......
......@@ -49,10 +49,10 @@ def c_compile(c_file, verbose_flag = 0, cplus = 0, obj_suffix = ".o"):
compiler = compilers[bool(cplus)]
args = [compiler] + compiler_options + include_options + [c_file, "-o", o_file]
if verbose_flag or verbose:
print " ".join(args)
print(" ".join(args))
#print compiler, args ###
status = os.spawnvp(os.P_WAIT, compiler, args)
if status <> 0:
if status != 0:
raise CCompilerError("C compiler returned status %s" % status)
return o_file
......@@ -67,8 +67,8 @@ def c_link_list(obj_files, verbose_flag = 0, cplus = 0):
linker = linkers[bool(cplus)]
args = [linker] + linker_options + obj_files + ["-o", out_file]
if verbose_flag or verbose:
print " ".join(args)
print(" ".join(args))
status = os.spawnvp(os.P_WAIT, linker, args)
if status <> 0:
if status != 0:
raise CCompilerError("Linker returned status %s" % status)
return out_file
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment