Commit 7f26ba41 authored by scoder's avatar scoder Committed by GitHub

Fix many indentation and whitespace issues throughout the code base (GH-3673)

… and enforce them with pycodestyle.
parent 975a43a6
......@@ -28,7 +28,7 @@ if PYLIB_DYN == PYLIB:
# no shared library
PYLIB_DYN = ''
else:
PYLIB_DYN = os.path.splitext(PYLIB_DYN[3:])[0] # 'lib(XYZ).so' -> XYZ
PYLIB_DYN = os.path.splitext(PYLIB_DYN[3:])[0] # 'lib(XYZ).so' -> XYZ
CC = get_config_var('CC', os.environ.get('CC', ''))
CFLAGS = get_config_var('CFLAGS') + ' ' + os.environ.get('CFLAGS', '')
......@@ -65,12 +65,8 @@ def runcmd(cmd, shell=True):
else:
_debug(' '.join(cmd))
try:
import subprocess
except ImportError: # Python 2.3 ...
returncode = os.system(cmd)
else:
returncode = subprocess.call(cmd, shell=shell)
import subprocess
returncode = subprocess.call(cmd, shell=shell)
if returncode:
sys.exit(returncode)
......@@ -105,7 +101,7 @@ def build(input_file, compiler_args=(), force=False):
if not force and os.path.abspath(exe_file) == os.path.abspath(input_file):
raise ValueError("Input and output file names are the same, refusing to overwrite")
if (not force and os.path.exists(exe_file) and os.path.exists(input_file)
and os.path.getmtime(input_file) <= os.path.getmtime(exe_file)):
and os.path.getmtime(input_file) <= os.path.getmtime(exe_file)):
_debug("File is up to date, not regenerating %s", exe_file)
return exe_file
cycompile(input_file, compiler_args)
......
......@@ -1028,7 +1028,7 @@ def cythonize(module_list, exclude=None, nthreads=0, aliases=None, quiet=False,
# setup for out of place build directory if enabled
if build_dir:
if os.path.isabs(c_file):
warnings.warn("build_dir has no effect for absolute source paths")
warnings.warn("build_dir has no effect for absolute source paths")
c_file = os.path.join(build_dir, c_file)
dir = os.path.dirname(c_file)
safe_makedirs_once(dir)
......@@ -1289,7 +1289,7 @@ def cythonize_one(pyx_file, c_file, fingerprint, quiet, options=None,
else:
fingerprint_file = zip_fingerprint_file
with contextlib.closing(zipfile.ZipFile(
fingerprint_file + '.tmp', 'w', zipfile_compression_mode)) as zip:
fingerprint_file + '.tmp', 'w', zipfile_compression_mode)) as zip:
for artifact in artifacts:
zip.write(artifact, os.path.basename(artifact))
os.rename(fingerprint_file + '.tmp', fingerprint_file)
......
......@@ -27,14 +27,14 @@ class TestCythonizeArgsParser(TestCase):
empty_containers = ['directives', 'compile_time_env', 'options', 'excludes']
are_none = ['language_level', 'annotate', 'build', 'build_inplace', 'force', 'quiet', 'lenient', 'keep_going', 'no_docstrings']
for opt_name in empty_containers:
if len(getattr(options, opt_name))!=0 and (not opt_name in skip):
if len(getattr(options, opt_name))!=0 and (opt_name not in skip):
self.assertEqual(opt_name,"", msg="For option "+opt_name)
return False
for opt_name in are_none:
if (getattr(options, opt_name) is not None) and (not opt_name in skip):
if (getattr(options, opt_name) is not None) and (opt_name not in skip):
self.assertEqual(opt_name,"", msg="For option "+opt_name)
return False
if options.parallel!=parallel_compiles and (not 'parallel' in skip):
if options.parallel!=parallel_compiles and ('parallel' not in skip):
return False
return True
......@@ -112,8 +112,8 @@ class TestCythonizeArgsParser(TestCase):
def test_directives_wrong(self):
directives = {
'auto_pickle': 42, # for bool type
'auto_pickle': 'NONONO', # for bool type
'auto_pickle': 42, # for bool type
'auto_pickle': 'NONONO', # for bool type
'c_string_type': 'bites',
#'c_string_encoding' : 'a',
#'language_level' : 4,
......@@ -167,14 +167,14 @@ class TestCythonizeArgsParser(TestCase):
self.assertFalse(args)
self.assertTrue(self.are_default(options, ['options']))
self.assertEqual(options.options['docstrings'], True)
self.assertEqual(options.options['buffer_max_dims'], True) # really?
self.assertEqual(options.options['buffer_max_dims'], True) # really?
def test_option_multiple_v2(self):
options, args = self.parse_args(['-s', 'docstrings=True,buffer_max_dims=8'])
self.assertFalse(args)
self.assertTrue(self.are_default(options, ['options']))
self.assertEqual(options.options['docstrings'], True)
self.assertEqual(options.options['buffer_max_dims'], True) # really?
self.assertEqual(options.options['buffer_max_dims'], True) # really?
def test_option_value_yes(self):
options, args = self.parse_args(['-s', 'docstrings=YeS'])
......
......@@ -54,4 +54,3 @@ class TestStripLiterals(CythonTest):
def test_extern(self):
self.t("cdef extern from 'a.h': # comment",
"cdef extern from '_L1_': #_L2_")
......@@ -360,7 +360,7 @@ class StatementWriter(DeclarationWriter):
self.dedent()
def visit_SequenceNode(self, node):
self.comma_separated_list(node.args) # Might need to discover whether we need () around tuples...hmm...
self.comma_separated_list(node.args) # Might need to discover whether we need () around tuples...hmm...
def visit_ExprStatNode(self, node):
self.startline()
......
......@@ -22,7 +22,7 @@ from .. import Utils
class AnnotationCCodeWriter(CCodeWriter):
# also used as marker for detection of complete code emission in tests
COMPLETE_CODE_TITLE = "Complete cythonized code"
......@@ -294,7 +294,7 @@ class AnnotationCCodeWriter(CCodeWriter):
# now the whole c-code if needed:
if self.show_entire_c_code:
outlist.append(u'<p><div class="cython">')
onclick_title = u"<pre class='cython line'{onclick}>+ {title}</pre>\n";
onclick_title = u"<pre class='cython line'{onclick}>+ {title}</pre>\n"
outlist.append(onclick_title.format(
onclick=self._onclick_attr,
title=AnnotationCCodeWriter.COMPLETE_CODE_TITLE,
......
......@@ -201,7 +201,7 @@ class EmbedSignature(CythonTransform):
def visit_CFuncDefNode(self, node):
if not self.current_directives['embedsignature']:
return node
if not node.overridable: # not cpdef FOO(...):
if not node.overridable: # not cpdef FOO(...):
return node
signature = self._fmt_signature(
......
......@@ -85,7 +85,7 @@ class IntroduceBufferAuxiliaryVars(CythonTransform):
aux_var = scope.declare_var(name=None, cname=cname,
type=type, pos=node.pos)
if entry.is_arg:
aux_var.used = True # otherwise, NameNode will mark whether it is used
aux_var.used = True # otherwise, NameNode will mark whether it is used
return aux_var
......@@ -111,9 +111,9 @@ class IntroduceBufferAuxiliaryVars(CythonTransform):
#
# Analysis
#
buffer_options = ("dtype", "ndim", "mode", "negative_indices", "cast") # ordered!
buffer_options = ("dtype", "ndim", "mode", "negative_indices", "cast") # ordered!
buffer_defaults = {"ndim": 1, "mode": "full", "negative_indices": True, "cast": False}
buffer_positional_options_count = 1 # anything beyond this needs keyword argument
buffer_positional_options_count = 1 # anything beyond this needs keyword argument
ERR_BUF_OPTION_UNKNOWN = '"%s" is not a buffer option'
ERR_BUF_TOO_MANY = 'Too many buffer options'
......@@ -146,12 +146,12 @@ def analyse_buffer_options(globalpos, env, posargs, dictargs, defaults=None, nee
options = {}
for name, (value, pos) in dictargs.items():
if not name in buffer_options:
if name not in buffer_options:
raise CompileError(pos, ERR_BUF_OPTION_UNKNOWN % name)
options[name] = value
for name, (value, pos) in zip(buffer_options, posargs):
if not name in buffer_options:
if name not in buffer_options:
raise CompileError(pos, ERR_BUF_OPTION_UNKNOWN % name)
if name in options:
raise CompileError(pos, ERR_BUF_DUP % name)
......@@ -159,7 +159,7 @@ def analyse_buffer_options(globalpos, env, posargs, dictargs, defaults=None, nee
# Check that they are all there and copy defaults
for name in buffer_options:
if not name in options:
if name not in options:
try:
options[name] = defaults[name]
except KeyError:
......@@ -298,9 +298,10 @@ def put_unpack_buffer_aux_into_scope(buf_entry, code):
ln = []
for i in range(buf_entry.type.ndim):
for fldname in fldnames:
ln.append("%s.diminfo[%d].%s = %s.rcbuffer->pybuffer.%s[%d];" % \
(pybuffernd_struct, i, fldname,
pybuffernd_struct, fldname, i))
ln.append("%s.diminfo[%d].%s = %s.rcbuffer->pybuffer.%s[%d];" % (
pybuffernd_struct, i, fldname,
pybuffernd_struct, fldname, i,
))
code.putln(' '.join(ln))
def put_init_vars(entry, code):
......@@ -373,7 +374,7 @@ def put_assign_to_buffer(lhs_cname, rhs_cname, buf_entry,
code.putln("{") # Set up necessary stack for getbuffer
code.putln("__Pyx_BufFmt_StackElem __pyx_stack[%d];" % buffer_type.dtype.struct_nesting_depth())
getbuffer = get_getbuffer_call(code, "%s", buffer_aux, buffer_type) # fill in object below
getbuffer = get_getbuffer_call(code, "%s", buffer_aux, buffer_type) # fill in object below
if is_initialized:
# Release any existing buffer
......@@ -419,7 +420,7 @@ def put_assign_to_buffer(lhs_cname, rhs_cname, buf_entry,
put_unpack_buffer_aux_into_scope(buf_entry, code)
code.putln('}')
code.putln("}") # Release stack
code.putln("}") # Release stack
def put_buffer_lookup_code(entry, index_signeds, index_cnames, directives,
......
......@@ -56,7 +56,7 @@ class BuiltinAttribute(object):
def __init__(self, py_name, cname=None, field_type=None, field_type_name=None):
self.py_name = py_name
self.cname = cname or py_name
self.field_type_name = field_type_name # can't do the lookup before the type is declared!
self.field_type_name = field_type_name # can't do the lookup before the type is declared!
self.field_type = field_type
def declare_in_type(self, self_type):
......
......@@ -718,9 +718,9 @@ class FunctionState(object):
self.can_trace = False
self.gil_owned = True
self.temps_allocated = [] # of (name, type, manage_ref, static)
self.temps_free = {} # (type, manage_ref) -> list of free vars with same type/managed status
self.temps_used_type = {} # name -> (type, manage_ref)
self.temps_allocated = [] # of (name, type, manage_ref, static)
self.temps_free = {} # (type, manage_ref) -> list of free vars with same type/managed status
self.temps_used_type = {} # name -> (type, manage_ref)
self.temp_counter = 0
self.closure_temps = None
......@@ -891,7 +891,7 @@ class FunctionState(object):
"""
return [(name, type)
for name, type, manage_ref in self.temps_in_use()
if manage_ref and type.is_pyobject]
if manage_ref and type.is_pyobject]
def all_managed_temps(self):
"""Return a list of (cname, type) tuples of refcount-managed Python objects.
......@@ -1096,10 +1096,10 @@ class GlobalState(object):
'h_code',
'filename_table',
'utility_code_proto_before_types',
'numeric_typedefs', # Let these detailed individual parts stay!,
'complex_type_declarations', # as the proper solution is to make a full DAG...
'type_declarations', # More coarse-grained blocks would simply hide
'utility_code_proto', # the ugliness, not fix it
'numeric_typedefs', # Let these detailed individual parts stay!,
'complex_type_declarations', # as the proper solution is to make a full DAG...
'type_declarations', # More coarse-grained blocks would simply hide
'utility_code_proto', # the ugliness, not fix it
'module_declarations',
'typeinfo',
'before_global_var',
......@@ -1135,8 +1135,8 @@ class GlobalState(object):
self.code_config = code_config
self.common_utility_include_dir = common_utility_include_dir
self.parts = {}
self.module_node = module_node # because some utility code generation needs it
# (generating backwards-compatible Get/ReleaseBuffer
self.module_node = module_node # because some utility code generation needs it
# (generating backwards-compatible Get/ReleaseBuffer
self.const_cnames_used = {}
self.string_const_index = {}
......@@ -2084,7 +2084,7 @@ class CCodeWriter(object):
def entry_as_pyobject(self, entry):
type = entry.type
if (not entry.is_self_arg and not entry.type.is_complete()
or entry.type.is_extension_type):
or entry.type.is_extension_type):
return "(PyObject *)" + entry.cname
else:
return entry.cname
......@@ -2386,7 +2386,7 @@ class CCodeWriter(object):
self.putln('__Pyx_RefNannyDeclarations')
def put_setup_refcount_context(self, name, acquire_gil=False):
name = name.as_c_string_literal() # handle unicode names
name = name.as_c_string_literal() # handle unicode names
if acquire_gil:
self.globalstate.use_utility_code(
UtilityCode.load_cached("ForceInitThreads", "ModuleSetupCode.c"))
......@@ -2401,7 +2401,7 @@ class CCodeWriter(object):
qualified_name should be the qualified name of the function.
"""
qualified_name = qualified_name.as_c_string_literal() # handle unicode names
qualified_name = qualified_name.as_c_string_literal() # handle unicode names
format_tuple = (
qualified_name,
Naming.clineno_cname if include_cline else 0,
......
......@@ -172,12 +172,12 @@ def error(position, message):
if position is None:
raise InternalError(message)
err = CompileError(position, message)
if DebugFlags.debug_exception_on_error: raise Exception(err) # debug
if DebugFlags.debug_exception_on_error: raise Exception(err) # debug
report_error(err)
return err
LEVEL = 1 # warn about all errors level 1 or higher
LEVEL = 1 # warn about all errors level 1 or higher
def _write_file_encode(file, line):
try:
......
This diff is collapsed.
......@@ -217,7 +217,7 @@ class ControlFlow(object):
visited.remove(self.entry_point)
for block in visited:
if block.empty():
for parent in block.parents: # Re-parent
for parent in block.parents: # Re-parent
for child in block.children:
parent.add_child(child)
block.detach()
......@@ -455,7 +455,7 @@ class GVContext(object):
start = min(block.positions)
stop = max(block.positions)
srcdescr = start[0]
if not srcdescr in self.sources:
if srcdescr not in self.sources:
self.sources[srcdescr] = list(srcdescr.get_lines())
lines = self.sources[srcdescr]
return '\\n'.join([l.strip() for l in lines[start[1] - 1:stop[1]]])
......@@ -621,7 +621,7 @@ def check_definitions(flow, compiler_directives):
# Unused result
for assmt in assignments:
if (not assmt.refs and not assmt.entry.is_pyclass_attr
and not assmt.entry.in_closure):
and not assmt.entry.in_closure):
if assmt.entry.cf_references and warn_unused_result:
if assmt.is_arg:
messages.warning(assmt.pos, "Unused argument value '%s'" %
......@@ -774,7 +774,7 @@ class ControlFlowAnalysis(CythonTransform):
entry = lhs.entry
else:
entry = self.env.lookup(lhs.name)
if entry is None: # TODO: This shouldn't happen...
if entry is None: # TODO: This shouldn't happen...
return
self.flow.mark_assignment(lhs, rhs, entry)
elif lhs.is_sequence_constructor:
......@@ -1035,7 +1035,7 @@ class ControlFlowAnalysis(CythonTransform):
elif isinstance(node, Nodes.AsyncForStatNode):
# not entirely correct, but good enough for now
self.mark_assignment(node.target, node.item)
else: # Parallel
else: # Parallel
self.mark_assignment(node.target)
# Body block
......
......@@ -658,7 +658,7 @@ class FusedCFuncDefNode(StatListNode):
# instance check body
""")
pyx_code.indent() # indent following code to function body
pyx_code.indent() # indent following code to function body
pyx_code.named_insertion_point("imports")
pyx_code.named_insertion_point("func_defs")
pyx_code.named_insertion_point("local_variable_declarations")
......
......@@ -47,8 +47,8 @@ def interpret_compiletime_options(optlist, optdict, type_env=None, type_args=())
raise CompileError(node.pos, "Type not allowed here.")
else:
if (sys.version_info[0] >=3 and
isinstance(node, StringNode) and
node.unicode_value is not None):
isinstance(node, StringNode) and
node.unicode_value is not None):
return (node.unicode_value, node.pos)
return (node.compile_time_value(empty_scope), node.pos)
......
......@@ -455,7 +455,7 @@ def run_pipeline(source, options, full_module_name=None, context=None):
full_module_name = Utils.decode_filename(full_module_name)
source_ext = os.path.splitext(source)[1]
options.configure_language_defaults(source_ext[1:]) # py/pyx
options.configure_language_defaults(source_ext[1:]) # py/pyx
if context is None:
context = Context.from_options(options)
......@@ -470,7 +470,7 @@ def run_pipeline(source, options, full_module_name=None, context=None):
if options.relative_path_in_code_position_comments:
rel_path = full_module_name.replace('.', os.sep) + source_ext
if not abs_path.endswith(rel_path):
rel_path = source # safety measure to prevent printing incorrect paths
rel_path = source # safety measure to prevent printing incorrect paths
else:
rel_path = abs_path
source_desc = FileSourceDescriptor(abs_path, rel_path)
......
......@@ -168,7 +168,7 @@ def valid_memslice_dtype(dtype, i=0):
valid_memslice_dtype(dtype.base_type, i + 1)) or
dtype.is_numeric or
dtype.is_pyobject or
dtype.is_fused or # accept this as it will be replaced by specializations later
dtype.is_fused or # accept this as it will be replaced by specializations later
(dtype.is_typedef and valid_memslice_dtype(dtype.typedef_base_type))
)
......@@ -655,13 +655,13 @@ def is_cf_contig(specs):
is_c_contig = True
elif (specs[-1] == ('direct','contig') and
all(axis == ('direct','follow') for axis in specs[:-1])):
all(axis == ('direct','follow') for axis in specs[:-1])):
# c_contiguous: 'follow', 'follow', ..., 'follow', 'contig'
is_c_contig = True
elif (len(specs) > 1 and
specs[0] == ('direct','contig') and
all(axis == ('direct','follow') for axis in specs[1:])):
specs[0] == ('direct','contig') and
all(axis == ('direct','follow') for axis in specs[1:])):
# f_contiguous: 'contig', 'follow', 'follow', ..., 'follow'
is_f_contig = True
......@@ -836,7 +836,7 @@ overlapping_utility = load_memview_c_utility("OverlappingSlices", context)
copy_contents_new_utility = load_memview_c_utility(
"MemviewSliceCopyTemplate",
context,
requires=[], # require cython_array_utility_code
requires=[], # require cython_array_utility_code
)
view_utility_code = load_memview_cy_utility(
......
......@@ -155,8 +155,8 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
self.create_import_star_conversion_utility_code(env)
for name, entry in sorted(env.entries.items()):
if (entry.create_wrapper and entry.scope is env
and entry.is_type and entry.type.is_enum):
entry.type.create_type_wrapper(env)
and entry.is_type and entry.type.is_enum):
entry.type.create_type_wrapper(env)
def process_implementation(self, options, result):
env = self.scope
......@@ -185,7 +185,7 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
h_vars = h_entries(env.var_entries)
h_funcs = h_entries(env.cfunc_entries)
h_extension_types = h_entries(env.c_class_entries)
if h_types or h_vars or h_funcs or h_extension_types:
if h_types or h_vars or h_funcs or h_extension_types:
result.h_file = replace_suffix_encoded(result.c_file, ".h")
h_code = Code.CCodeWriter()
c_code_config = generate_c_code_config(env, options)
......@@ -330,7 +330,7 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
for entry in api_vars:
type = CPtrType(entry.type)
cname = env.mangle(Naming.varptr_prefix_api, entry.name)
h_code.putln("static %s = 0;" % type.declaration_code(cname))
h_code.putln("static %s = 0;" % type.declaration_code(cname))
h_code.putln("#define %s (*%s)" % (entry.name, cname))
h_code.put(UtilityCode.load_as_string("PyIdentifierFromString", "ImportExport.c")[0])
if api_vars:
......@@ -865,7 +865,7 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
if not entry.in_cinclude:
#print "generate_type_header_code:", entry.name, repr(entry.type) ###
type = entry.type
if type.is_typedef: # Must test this first!
if type.is_typedef: # Must test this first!
pass
elif type.is_struct_or_union or type.is_cpp_class:
self.generate_struct_union_predeclaration(entry, code)
......@@ -878,7 +878,7 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
if not entry.in_cinclude:
#print "generate_type_header_code:", entry.name, repr(entry.type) ###
type = entry.type
if type.is_typedef: # Must test this first!
if type.is_typedef: # Must test this first!
self.generate_typedef(entry, code)
elif type.is_enum:
self.generate_enum_definition(entry, code)
......@@ -1019,60 +1019,60 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
arg_decls = ["void"]
arg_names = []
if is_implementing:
code.putln("%s(%s) {" % (type.cname, ", ".join(arg_decls)))
if py_attrs:
code.put_ensure_gil()
for attr in py_attrs:
code.put_init_var_to_py_none(attr, nanny=False)
if constructor:
code.putln("%s(%s);" % (constructor.cname, ", ".join(arg_names)))
if py_attrs:
code.put_release_ensured_gil()
code.putln("}")
code.putln("%s(%s) {" % (type.cname, ", ".join(arg_decls)))
if py_attrs:
code.put_ensure_gil()
for attr in py_attrs:
code.put_init_var_to_py_none(attr, nanny=False)
if constructor:
code.putln("%s(%s);" % (constructor.cname, ", ".join(arg_names)))
if py_attrs:
code.put_release_ensured_gil()
code.putln("}")
else:
code.putln("%s(%s);" % (type.cname, ", ".join(arg_decls)))
code.putln("%s(%s);" % (type.cname, ", ".join(arg_decls)))
if destructor or py_attrs or has_virtual_methods:
if has_virtual_methods:
code.put("virtual ")
if is_implementing:
code.putln("~%s() {" % type.cname)
if py_attrs:
code.put_ensure_gil()
if destructor:
code.putln("%s();" % destructor.cname)
if py_attrs:
for attr in py_attrs:
code.put_var_xdecref(attr, nanny=False)
code.put_release_ensured_gil()
code.putln("}")
code.putln("~%s() {" % type.cname)
if py_attrs:
code.put_ensure_gil()
if destructor:
code.putln("%s();" % destructor.cname)
if py_attrs:
for attr in py_attrs:
code.put_var_xdecref(attr, nanny=False)
code.put_release_ensured_gil()
code.putln("}")
else:
code.putln("~%s();" % type.cname)
code.putln("~%s();" % type.cname)
if py_attrs:
# Also need copy constructor and assignment operators.
if is_implementing:
code.putln("%s(const %s& __Pyx_other) {" % (type.cname, type.cname))
code.put_ensure_gil()
for attr in scope.var_entries:
if not attr.type.is_cfunction:
code.putln("%s = __Pyx_other.%s;" % (attr.cname, attr.cname))
code.put_var_incref(attr, nanny=False)
code.put_release_ensured_gil()
code.putln("}")
code.putln("%s& operator=(const %s& __Pyx_other) {" % (type.cname, type.cname))
code.putln("if (this != &__Pyx_other) {")
code.put_ensure_gil()
for attr in scope.var_entries:
if not attr.type.is_cfunction:
code.put_var_xdecref(attr, nanny=False)
code.putln("%s = __Pyx_other.%s;" % (attr.cname, attr.cname))
code.put_var_incref(attr, nanny=False)
code.put_release_ensured_gil()
code.putln("}")
code.putln("return *this;")
code.putln("}")
code.putln("%s(const %s& __Pyx_other) {" % (type.cname, type.cname))
code.put_ensure_gil()
for attr in scope.var_entries:
if not attr.type.is_cfunction:
code.putln("%s = __Pyx_other.%s;" % (attr.cname, attr.cname))
code.put_var_incref(attr, nanny=False)
code.put_release_ensured_gil()
code.putln("}")
code.putln("%s& operator=(const %s& __Pyx_other) {" % (type.cname, type.cname))
code.putln("if (this != &__Pyx_other) {")
code.put_ensure_gil()
for attr in scope.var_entries:
if not attr.type.is_cfunction:
code.put_var_xdecref(attr, nanny=False)
code.putln("%s = __Pyx_other.%s;" % (attr.cname, attr.cname))
code.put_var_incref(attr, nanny=False)
code.put_release_ensured_gil()
code.putln("}")
code.putln("return *this;")
code.putln("}")
else:
code.putln("%s(const %s& __Pyx_other);" % (type.cname, type.cname))
code.putln("%s& operator=(const %s& __Pyx_other);" % (type.cname, type.cname))
code.putln("%s(const %s& __Pyx_other);" % (type.cname, type.cname))
code.putln("%s& operator=(const %s& __Pyx_other);" % (type.cname, type.cname))
code.putln("};")
def generate_enum_definition(self, entry, code):
......@@ -1183,7 +1183,7 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
# Generate object struct definition for an
# extension type.
if not type.scope:
return # Forward declared but never defined
return # Forward declared but never defined
header, footer = \
self.sue_header_footer(type, "struct", type.objstruct_cname)
code.putln(header)
......@@ -1321,7 +1321,7 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
if entry.visibility != 'extern':
type = entry.type
scope = type.scope
if scope: # could be None if there was an error
if scope: # could be None if there was an error
self.generate_exttype_vtable(scope, code)
self.generate_new_function(scope, code, entry)
self.generate_dealloc_function(scope, code)
......@@ -1769,7 +1769,7 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
slot_func = scope.mangle_internal("tp_clear")
base_type = scope.parent_type.base_type
if tp_slot.slot_code(scope) != slot_func:
return # never used
return # never used
have_entries, (py_attrs, py_buffers, memoryview_slices) = (
scope.get_refcounted_entries(include_gc_simple=False))
......
......@@ -48,12 +48,11 @@ vtable_prefix = pyrex_prefix + "vtable_"
vtabptr_prefix = pyrex_prefix + "vtabptr_"
vtabstruct_prefix = pyrex_prefix + "vtabstruct_"
unicode_vtabentry_prefix = pyrex_prefix + "Uvtabentry_"
# vtab entries aren't normally manged,
# but punycode names sometimes start with numbers leading
# to a C syntax error
# vtab entries aren't normally mangled,
# but punycode names sometimes start with numbers leading to a C syntax error
unicode_structmember_prefix = pyrex_prefix + "Umember_"
# as above -
# not normally manged but punycode names cause specific problems
# as above -
# not normally mangled but punycode names cause specific problems
opt_arg_prefix = pyrex_prefix + "opt_args_"
convert_func_prefix = pyrex_prefix + "convert_"
closure_scope_prefix = pyrex_prefix + "scope_"
......@@ -110,7 +109,7 @@ gilstate_cname = pyrex_prefix + "state"
skip_dispatch_cname = pyrex_prefix + "skip_dispatch"
empty_tuple = pyrex_prefix + "empty_tuple"
empty_bytes = pyrex_prefix + "empty_bytes"
empty_unicode = pyrex_prefix + "empty_unicode"
empty_unicode = pyrex_prefix + "empty_unicode"
print_function = pyrex_prefix + "print"
print_function_kwargs = pyrex_prefix + "print_kwargs"
cleanup_cname = pyrex_prefix + "module_cleanup"
......@@ -129,7 +128,7 @@ frame_cname = pyrex_prefix + "frame"
frame_code_cname = pyrex_prefix + "frame_code"
binding_cfunc = pyrex_prefix + "binding_PyCFunctionType"
fused_func_prefix = pyrex_prefix + 'fuse_'
quick_temp_cname = pyrex_prefix + "temp" # temp variable for quick'n'dirty temping
quick_temp_cname = pyrex_prefix + "temp" # temp variable for quick'n'dirty temping
tp_dict_version_temp = pyrex_prefix + "tp_dict_version"
obj_dict_version_temp = pyrex_prefix + "obj_dict_version"
type_dict_guard_temp = pyrex_prefix + "type_dict_guard"
......
This diff is collapsed.
......@@ -41,7 +41,7 @@ except ImportError:
try:
from __builtin__ import basestring
except ImportError:
basestring = str # Python 3
basestring = str # Python 3
def load_c_utility(name):
......@@ -1569,7 +1569,7 @@ class EarlyReplaceBuiltinCalls(Visitor.EnvTransform):
utility_code = utility_code)
def _error_wrong_arg_count(self, function_name, node, args, expected=None):
if not expected: # None or 0
if not expected: # None or 0
arg_str = ''
elif isinstance(expected, basestring) or expected > 1:
arg_str = '...'
......@@ -1803,7 +1803,7 @@ class EarlyReplaceBuiltinCalls(Visitor.EnvTransform):
if not yield_expression.is_literal or not yield_expression.type.is_int:
return node
except AttributeError:
return node # in case we don't have a type yet
return node # in case we don't have a type yet
# special case: old Py2 backwards compatible "sum([int_const for ...])"
# can safely be unpacked into a genexpr
......@@ -2236,7 +2236,7 @@ class OptimizeBuiltinCalls(Visitor.NodeRefCleanupMixin,
return node
def _error_wrong_arg_count(self, function_name, node, args, expected=None):
if not expected: # None or 0
if not expected: # None or 0
arg_str = ''
elif isinstance(expected, basestring) or expected > 1:
arg_str = '...'
......@@ -2669,7 +2669,7 @@ class OptimizeBuiltinCalls(Visitor.NodeRefCleanupMixin,
if cfunc_name is None:
arg_type = arg.type
if ((arg_type.is_extension_type or arg_type.is_builtin_type)
and arg_type.entry.qualified_name in self._ext_types_with_pysize):
and arg_type.entry.qualified_name in self._ext_types_with_pysize):
cfunc_name = 'Py_SIZE'
else:
return node
......@@ -3839,7 +3839,7 @@ class OptimizeBuiltinCalls(Visitor.NodeRefCleanupMixin,
if not stop:
# use strlen() to find the string length, just as CPython would
if not string_node.is_name:
string_node = UtilNodes.LetRefNode(string_node) # used twice
string_node = UtilNodes.LetRefNode(string_node) # used twice
temps.append(string_node)
stop = ExprNodes.PythonCapiCallNode(
string_node.pos, "strlen", self.Pyx_strlen_func_type,
......
......@@ -29,9 +29,8 @@ class ShouldBeFromDirective(object):
raise RuntimeError(repr(self))
def __repr__(self):
return (
"Illegal access of '%s' from Options module rather than directive '%s'"
% (self.options_name, self.directive_name))
return "Illegal access of '%s' from Options module rather than directive '%s'" % (
self.options_name, self.directive_name)
"""
......
......@@ -243,7 +243,7 @@ class PostParse(ScopeTrackingTransform):
if decl is not declbase:
raise PostParseError(decl.pos, ERR_INVALID_SPECIALATTR_TYPE)
handler(decl)
continue # Remove declaration
continue # Remove declaration
raise PostParseError(decl.pos, ERR_CDEF_INCLASS)
first_assignment = self.scope_type != 'module'
stats.append(Nodes.SingleAssignmentNode(node.pos,
......@@ -433,7 +433,7 @@ def sort_common_subsequences(items):
return b.is_sequence_constructor and contains(b.args, a)
for pos, item in enumerate(items):
key = item[1] # the ResultRefNode which has already been injected into the sequences
key = item[1] # the ResultRefNode which has already been injected into the sequences
new_pos = pos
for i in range(pos-1, -1, -1):
if lower_than(key, items[i][0]):
......@@ -463,7 +463,7 @@ def flatten_parallel_assignments(input, output):
# recursively, so that nested structures get matched as well.
rhs = input[-1]
if (not (rhs.is_sequence_constructor or isinstance(rhs, ExprNodes.UnicodeNode))
or not sum([lhs.is_sequence_constructor for lhs in input[:-1]])):
or not sum([lhs.is_sequence_constructor for lhs in input[:-1]])):
output.append(input)
return
......@@ -547,7 +547,7 @@ def map_starred_assignment(lhs_targets, starred_assignments, lhs_args, rhs_args)
targets.append(expr)
# the starred target itself, must be assigned a (potentially empty) list
target = lhs_args[starred].target # unpack starred node
target = lhs_args[starred].target # unpack starred node
starred_rhs = rhs_args[starred:]
if lhs_remaining:
starred_rhs = starred_rhs[:-lhs_remaining]
......@@ -594,7 +594,7 @@ class PxdPostParse(CythonTransform, SkipDeclarations):
if (isinstance(node, Nodes.DefNode) and self.scope_type == 'cclass'
and node.name in ('__getbuffer__', '__releasebuffer__')):
err = None # allow these slots
err = None # allow these slots
if isinstance(node, Nodes.CFuncDefNode):
if (u'inline' in node.modifiers and
......@@ -605,7 +605,7 @@ class PxdPostParse(CythonTransform, SkipDeclarations):
elif node.api:
err = self.ERR_NOGO_WITH_INLINE % 'api'
else:
err = None # allow inline function
err = None # allow inline function
else:
err = self.ERR_INLINE_ONLY
......@@ -1724,8 +1724,8 @@ if VALUE is not None:
if stats:
node.body.stats += stats
if (node.visibility != 'extern'
and not node.scope.lookup('__reduce__')
and not node.scope.lookup('__reduce_ex__')):
and not node.scope.lookup('__reduce__')
and not node.scope.lookup('__reduce_ex__')):
self._inject_pickle_methods(node)
return node
......@@ -1873,8 +1873,8 @@ if VALUE is not None:
for decorator in old_decorators:
func = decorator.decorator
if (not func.is_name or
func.name not in ('staticmethod', 'classmethod') or
env.lookup_here(func.name)):
func.name not in ('staticmethod', 'classmethod') or
env.lookup_here(func.name)):
# not a static or classmethod
decorators.append(decorator)
......@@ -2073,7 +2073,7 @@ if VALUE is not None:
# (so it can't happen later).
# Note that we don't return the original node, as it is
# never used after this phase.
if True: # private (default)
if True: # private (default)
return None
self_value = ExprNodes.AttributeNode(
......@@ -2165,7 +2165,7 @@ if VALUE is not None:
if node.name in self.seen_vars_stack[-1]:
entry = self.current_env().lookup(node.name)
if (entry is None or entry.visibility != 'extern'
and not entry.scope.is_c_class_scope):
and not entry.scope.is_c_class_scope):
warning(node.pos, "cdef variable '%s' declared after it is used" % node.name, 2)
self.visitchildren(node)
return node
......@@ -2179,7 +2179,7 @@ if VALUE is not None:
child_node = self.visit(node.node)
if not child_node:
return None
if type(child_node) is list: # Assignment synthesized
if type(child_node) is list: # Assignment synthesized
node.child_node = child_node[0]
return [node] + child_node[1:]
node.node = child_node
......@@ -2539,7 +2539,7 @@ class AlignFunctionDefinitions(CythonTransform):
return None
node = node.as_cfunction(pxd_def)
elif (self.scope.is_module_scope and self.directives['auto_cpdef']
and not node.name in self.imported_names
and node.name not in self.imported_names
and node.is_cdef_func_compatible()):
# FIXME: cpdef-ing should be done in analyse_declarations()
node = node.as_cfunction(scope=self.scope)
......@@ -3131,9 +3131,9 @@ class TransformBuiltinMethods(EnvTransform):
error(self.pos, "Builtin 'vars()' called with wrong number of args, expected 0-1, got %d"
% len(node.args))
if len(node.args) > 0:
return node # nothing to do
return node # nothing to do
return ExprNodes.LocalsExprNode(pos, self.current_scope_node(), lenv)
else: # dir()
else: # dir()
if len(node.args) > 1:
error(self.pos, "Builtin 'dir()' called with wrong number of args, expected 0-1, got %d"
% len(node.args))
......@@ -3186,8 +3186,7 @@ class TransformBuiltinMethods(EnvTransform):
return node
# Inject no-args super
def_node = self.current_scope_node()
if (not isinstance(def_node, Nodes.DefNode) or not def_node.args or
len(self.env_stack) < 2):
if not isinstance(def_node, Nodes.DefNode) or not def_node.args or len(self.env_stack) < 2:
return node
class_node, class_scope = self.env_stack[-2]
if class_scope.is_py_class_scope:
......@@ -3335,8 +3334,8 @@ class ReplaceFusedTypeChecks(VisitorTransform):
def visit_PrimaryCmpNode(self, node):
with Errors.local_errors(ignore=True):
type1 = node.operand1.analyse_as_type(self.local_scope)
type2 = node.operand2.analyse_as_type(self.local_scope)
type1 = node.operand1.analyse_as_type(self.local_scope)
type2 = node.operand2.analyse_as_type(self.local_scope)
if type1 and type2:
false_node = ExprNodes.BoolNode(node.pos, value=False)
......@@ -3502,10 +3501,10 @@ class DebugTransform(CythonTransform):
def visit_NameNode(self, node):
if (self.register_stepinto and
node.type is not None and
node.type.is_cfunction and
getattr(node, 'is_called', False) and
node.entry.func_cname is not None):
node.type is not None and
node.type.is_cfunction and
getattr(node, 'is_called', False) and
node.entry.func_cname is not None):
# don't check node.entry.in_cinclude, as 'cdef extern: ...'
# declared functions are not 'in_cinclude'.
# This means we will list called 'cdef' functions as
......
......@@ -103,7 +103,7 @@ def p_binop_expr(s, ops, p_sub_expr):
if Future.division in s.context.future_directives:
n1.truedivision = True
else:
n1.truedivision = None # unknown
n1.truedivision = None # unknown
return n1
#lambdef: 'lambda' [varargslist] ':' test
......@@ -442,7 +442,7 @@ def p_trailer(s, node1):
return p_call(s, node1)
elif s.sy == '[':
return p_index(s, node1)
else: # s.sy == '.'
else: # s.sy == '.'
s.next()
name = p_ident(s)
return ExprNodes.AttributeNode(pos,
......@@ -2029,7 +2029,7 @@ def p_except_clause(s):
def p_include_statement(s, ctx):
pos = s.position()
s.next() # 'include'
s.next() # 'include'
unicode_include_file_name = p_string_literal(s, 'u')[2]
s.expect_newline("Syntax error in include statement")
if s.compile_time_eval:
......@@ -2195,7 +2195,7 @@ def p_compile_time_expr(s):
def p_DEF_statement(s):
pos = s.position()
denv = s.compile_time_env
s.next() # 'DEF'
s.next() # 'DEF'
name = p_ident(s)
s.expect('=')
expr = p_compile_time_expr(s)
......@@ -2213,7 +2213,7 @@ def p_IF_statement(s, ctx):
denv = s.compile_time_env
result = None
while 1:
s.next() # 'IF' or 'ELIF'
s.next() # 'IF' or 'ELIF'
expr = p_compile_time_expr(s)
s.compile_time_eval = current_eval and bool(expr.compile_time_value(denv))
body = p_suite(s, ctx)
......@@ -2399,7 +2399,7 @@ def p_positional_and_keyword_args(s, end_sy_set, templates = None):
parsed_type = False
if s.sy == 'IDENT' and s.peek()[0] == '=':
ident = s.systring
s.next() # s.sy is '='
s.next() # s.sy is '='
s.next()
if looking_at_expr(s):
arg = p_test(s)
......@@ -2666,7 +2666,7 @@ def p_memoryviewslice_access(s, base_type_node):
return result
def looking_at_name(s):
return s.sy == 'IDENT' and not s.systring in calling_convention_words
return s.sy == 'IDENT' and s.systring not in calling_convention_words
def looking_at_expr(s):
if s.systring in base_type_start_words:
......@@ -2813,7 +2813,7 @@ def p_c_declarator(s, ctx = Ctx(), empty = 0, is_type = 0, cmethod_flag = 0,
pos = s.position()
if s.sy == '[':
result = p_c_array_declarator(s, result)
else: # sy == '('
else: # sy == '('
s.next()
result = p_c_func_declarator(s, pos, ctx, result, cmethod_flag)
cmethod_flag = 0
......@@ -2821,7 +2821,7 @@ def p_c_declarator(s, ctx = Ctx(), empty = 0, is_type = 0, cmethod_flag = 0,
def p_c_array_declarator(s, base):
pos = s.position()
s.next() # '['
s.next() # '['
if s.sy != ']':
dim = p_testlist(s)
else:
......@@ -2830,7 +2830,7 @@ def p_c_array_declarator(s, base):
return Nodes.CArrayDeclaratorNode(pos, base = base, dimension = dim)
def p_c_func_declarator(s, pos, ctx, base, cmethod_flag):
# Opening paren has already been skipped
# Opening paren has already been skipped
args = p_c_arg_list(s, ctx, cmethod_flag = cmethod_flag,
nonempty_declarators = 0)
ellipsis = p_optional_ellipsis(s)
......@@ -2872,7 +2872,7 @@ def p_c_simple_declarator(s, ctx, empty, is_type, cmethod_flag,
assignable = assignable, nonempty = nonempty)
result = Nodes.CPtrDeclaratorNode(pos,
base = base)
elif s.sy == '**': # scanner returns this as a single token
elif s.sy == '**': # scanner returns this as a single token
s.next()
base = p_c_declarator(s, ctx, empty = empty, is_type = is_type,
cmethod_flag = cmethod_flag,
......@@ -2926,7 +2926,7 @@ def p_c_simple_declarator(s, ctx, empty, is_type, cmethod_flag,
fatal=False)
name += op
elif op == 'IDENT':
op = s.systring;
op = s.systring
if op not in supported_overloaded_operators:
s.error("Overloading operator '%s' not yet supported." % op,
fatal=False)
......@@ -3148,7 +3148,7 @@ def p_c_enum_definition(s, pos, ctx):
if s.sy != 'NEWLINE':
p_c_enum_line(s, ctx, items)
else:
s.next() # 'NEWLINE'
s.next() # 'NEWLINE'
s.expect_indent()
while s.sy not in ('DEDENT', 'EOF'):
p_c_enum_line(s, ctx, items)
......@@ -3245,7 +3245,7 @@ def p_fused_definition(s, pos, ctx):
while s.sy != 'DEDENT':
if s.sy != 'pass':
#types.append(p_c_declarator(s))
types.append(p_c_base_type(s)) #, nonempty=1))
types.append(p_c_base_type(s)) #, nonempty=1))
else:
s.next()
......
......@@ -239,7 +239,7 @@ def create_pyx_pipeline(context, options, result, py=False, exclude_classes=()):
test_support.append(TreeAssertVisitor())
if options.gdb_debug:
from ..Debugger import DebugWriter # requires Py2.5+
from ..Debugger import DebugWriter # requires Py2.5+
from .ParseTreeTransforms import DebugTransform
context.gdb_debug_outputwriter = DebugWriter.CythonDebugWriter(
options.output_dir)
......
......@@ -479,9 +479,9 @@ class CTypedefType(BaseType):
"TO_PY_FUNCTION": self.to_py_function}))
return True
elif base_type.is_float:
pass # XXX implement!
pass # XXX implement!
elif base_type.is_complex:
pass # XXX implement!
pass # XXX implement!
pass
elif base_type.is_cpp_string:
cname = "__pyx_convert_PyObject_string_to_py_%s" % type_identifier(self)
......@@ -512,9 +512,9 @@ class CTypedefType(BaseType):
"FROM_PY_FUNCTION": self.from_py_function}))
return True
elif base_type.is_float:
pass # XXX implement!
pass # XXX implement!
elif base_type.is_complex:
pass # XXX implement!
pass # XXX implement!
elif base_type.is_cpp_string:
cname = '__pyx_convert_string_from_py_%s' % type_identifier(self)
context = {
......@@ -1466,7 +1466,7 @@ class BuiltinObjectType(PyObjectType):
def cast_code(self, expr_code, to_object_struct = False):
return "((%s*)%s)" % (
to_object_struct and self.objstruct_cname or self.decl_type, # self.objstruct_cname may be None
to_object_struct and self.objstruct_cname or self.decl_type, # self.objstruct_cname may be None
expr_code)
def py_type_name(self):
......@@ -2342,8 +2342,8 @@ class CComplexType(CNumericType):
def assignable_from(self, src_type):
# Temporary hack/feature disabling, see #441
if (not src_type.is_complex and src_type.is_numeric and src_type.is_typedef
and src_type.typedef_is_external):
return False
and src_type.typedef_is_external):
return False
elif src_type.is_pyobject:
return True
else:
......@@ -2351,8 +2351,8 @@ class CComplexType(CNumericType):
def assignable_from_resolved_type(self, src_type):
return (src_type.is_complex and self.real_type.assignable_from_resolved_type(src_type.real_type)
or src_type.is_numeric and self.real_type.assignable_from_resolved_type(src_type)
or src_type is error_type)
or src_type.is_numeric and self.real_type.assignable_from_resolved_type(src_type)
or src_type is error_type)
def attributes_known(self):
if self.scope is None:
......@@ -2538,7 +2538,7 @@ class CArrayType(CPointerBaseType):
return False
def __hash__(self):
return hash(self.base_type) + 28 # arbitrarily chosen offset
return hash(self.base_type) + 28 # arbitrarily chosen offset
def __repr__(self):
return "<CArrayType %s %s>" % (self.size, repr(self.base_type))
......@@ -2670,7 +2670,7 @@ class CPtrType(CPointerBaseType):
default_value = "0"
def __hash__(self):
return hash(self.base_type) + 27 # arbitrarily chosen offset
return hash(self.base_type) + 27 # arbitrarily chosen offset
def __eq__(self, other):
if isinstance(other, CType) and other.is_ptr:
......@@ -2965,8 +2965,8 @@ class CFuncType(CType):
# is performed elsewhere).
for i in range(as_cmethod, len(other_type.args)):
if not self.args[i].type.same_as(
other_type.args[i].type):
return 0
other_type.args[i].type):
return 0
if self.has_varargs != other_type.has_varargs:
return 0
if not self.return_type.subtype_of_resolved_type(other_type.return_type):
......@@ -3415,7 +3415,7 @@ class CFuncTypeArg(BaseType):
self.annotation = annotation
self.type = type
self.pos = pos
self.needs_type_test = False # TODO: should these defaults be set in analyse_types()?
self.needs_type_test = False # TODO: should these defaults be set in analyse_types()?
def __repr__(self):
return "%s:%s" % (self.name, repr(self.type))
......@@ -3867,18 +3867,18 @@ class CppClassType(CType):
specialized.namespace = self.namespace.specialize(values)
specialized.scope = self.scope.specialize(values, specialized)
if self.cname == 'std::vector':
# vector<bool> is special cased in the C++ standard, and its
# accessors do not necessarily return references to the underlying
# elements (which may be bit-packed).
# http://www.cplusplus.com/reference/vector/vector-bool/
# Here we pretend that the various methods return bool values
# (as the actual returned values are coercable to such, and
# we don't support call expressions as lvalues).
T = values.get(self.templates[0], None)
if T and not T.is_fused and T.empty_declaration_code() == 'bool':
for bit_ref_returner in ('at', 'back', 'front'):
if bit_ref_returner in specialized.scope.entries:
specialized.scope.entries[bit_ref_returner].type.return_type = T
# vector<bool> is special cased in the C++ standard, and its
# accessors do not necessarily return references to the underlying
# elements (which may be bit-packed).
# http://www.cplusplus.com/reference/vector/vector-bool/
# Here we pretend that the various methods return bool values
# (as the actual returned values are coercable to such, and
# we don't support call expressions as lvalues).
T = values.get(self.templates[0], None)
if T and not T.is_fused and T.empty_declaration_code() == 'bool':
for bit_ref_returner in ('at', 'back', 'front'):
if bit_ref_returner in specialized.scope.entries:
specialized.scope.entries[bit_ref_returner].type.return_type = T
return specialized
def deduce_template_params(self, actual):
......@@ -4075,7 +4075,7 @@ class CEnumType(CIntLike, CType):
is_enum = 1
signed = 1
rank = -1 # Ranks below any integer type
rank = -1 # Ranks below any integer type
def __init__(self, name, cname, typedef_flag, namespace=None):
self.name = name
......@@ -4258,14 +4258,14 @@ class ErrorType(PyrexType):
rank_to_type_name = (
"char", # 0
"short", # 1
"int", # 2
"long", # 3
"PY_LONG_LONG", # 4
"float", # 5
"double", # 6
"long double", # 7
"char", # 0
"short", # 1
"int", # 2
"long", # 3
"PY_LONG_LONG", # 4
"float", # 5
"double", # 6
"long double", # 7
)
_rank_to_type_name = list(rank_to_type_name)
......@@ -4475,8 +4475,7 @@ def best_match(arg_types, functions, pos=None, env=None, args=None):
# Check no. of args
max_nargs = len(func_type.args)
min_nargs = max_nargs - func_type.optional_arg_count
if actual_nargs < min_nargs or \
(not func_type.has_varargs and actual_nargs > max_nargs):
if actual_nargs < min_nargs or (not func_type.has_varargs and actual_nargs > max_nargs):
if max_nargs == min_nargs and not func_type.has_varargs:
expectation = max_nargs
elif actual_nargs < min_nargs:
......
......@@ -136,7 +136,7 @@ class SourceDescriptor(object):
_escaped_description = None
_cmp_name = ''
def __str__(self):
assert False # To catch all places where a descriptor is used directly as a filename
assert False # To catch all places where a descriptor is used directly as a filename
def set_file_type_from_name(self, filename):
name, ext = os.path.splitext(filename)
......@@ -344,7 +344,7 @@ class PyrexScanner(Scanner):
def normalize_ident(self, text):
try:
text.encode('ascii') # really just name.isascii but supports Python 2 and 3
text.encode('ascii') # really just name.isascii but supports Python 2 and 3
except UnicodeEncodeError:
text = normalize('NFKC', text)
self.produce(IDENT, text)
......@@ -452,7 +452,7 @@ class PyrexScanner(Scanner):
systring = self.context.intern_ustring(systring)
self.sy = sy
self.systring = systring
if False: # debug_scanner:
if False: # debug_scanner:
_, line, col = self.position()
if not self.systring or self.sy == self.systring:
t = self.sy
......
......@@ -292,7 +292,7 @@ def escape_byte_string(s):
"""
s = _replace_specials(s)
try:
return s.decode("ASCII") # trial decoding: plain ASCII => done
return s.decode("ASCII") # trial decoding: plain ASCII => done
except UnicodeDecodeError:
pass
if IS_PYTHON3:
......@@ -325,7 +325,7 @@ def split_string_literal(s, limit=2000):
while start < len(s):
end = start + limit
if len(s) > end-4 and '\\' in s[end-4:end]:
end -= 4 - s[end-4:end].find('\\') # just before the backslash
end -= 4 - s[end-4:end].find('\\') # just before the backslash
while s[end-1] == '\\':
end -= 1
if end == start:
......
This diff is collapsed.
......@@ -55,7 +55,7 @@ class TestBufferOptions(CythonTest):
root = self.fragment(s, pipeline=[NormalizeTree(self), PostParse(self)]).root
if not expect_error:
vardef = root.stats[0].body.stats[0]
assert isinstance(vardef, CVarDefNode) # use normal assert as this is to validate the test code
assert isinstance(vardef, CVarDefNode) # use normal assert as this is to validate the test code
buftype = vardef.base_type
self.assertTrue(isinstance(buftype, TemplatedTypeNode))
self.assertTrue(isinstance(buftype.base_type_node, CSimpleBaseTypeNode))
......@@ -99,7 +99,7 @@ class TestBufferOptions(CythonTest):
# add exotic and impossible combinations as they come along...
if __name__ == '__main__':
import unittest
unittest.main()
This diff is collapsed.
......@@ -53,11 +53,11 @@ class TestMemviewParsing(CythonTest):
# we also test other similar declarations (buffers, anonymous C arrays)
# since the parsing has to distinguish between them.
def disable_test_no_buf_arg(self): # TODO
def disable_test_no_buf_arg(self): # TODO
self.not_parseable(u"Expected ']'",
u"cdef extern foo(object[int, ndim=2])")
def disable_test_parse_sizeof(self): # TODO
def disable_test_parse_sizeof(self): # TODO
self.parse(u"sizeof(int[NN])")
self.parse(u"sizeof(int[])")
self.parse(u"sizeof(int[][NN])")
......
......@@ -89,7 +89,7 @@ class TestNormalizeTree(TransformTest):
t = self.run_pipeline([NormalizeTree(None)], u"pass")
self.assertTrue(len(t.stats) == 0)
class TestWithTransform(object): # (TransformTest): # Disabled!
class TestWithTransform(object): # (TransformTest): # Disabled!
def test_simplified(self):
t = self.run_pipeline([WithTransform(None)], u"""
......
......@@ -47,7 +47,7 @@ class SignatureMatcherTest(unittest.TestCase):
self.assertMatches(function_types[1], [pt.c_long_type, pt.c_int_type], functions)
def test_cpp_reference_cpp_class(self):
classes = [ cppclasstype("Test%d"%i, []) for i in range(2) ]
classes = [ cppclasstype("Test%d" % i, []) for i in range(2) ]
function_types = [
cfunctype(pt.CReferenceType(classes[0])),
cfunctype(pt.CReferenceType(classes[1])),
......@@ -58,7 +58,7 @@ class SignatureMatcherTest(unittest.TestCase):
self.assertMatches(function_types[1], [classes[1]], functions)
def test_cpp_reference_cpp_class_and_int(self):
classes = [ cppclasstype("Test%d"%i, []) for i in range(2) ]
classes = [ cppclasstype("Test%d" % i, []) for i in range(2) ]
function_types = [
cfunctype(pt.CReferenceType(classes[0]), pt.c_int_type),
cfunctype(pt.CReferenceType(classes[0]), pt.c_long_type),
......
......@@ -8,7 +8,7 @@ def backup_Options():
for name, value in vars(Options).items():
# we need a deep copy of _directive_defaults, because they can be changed
if name == '_directive_defaults':
value = copy.deepcopy(value)
value = copy.deepcopy(value)
backup[name] = value
return backup
......
......@@ -178,7 +178,7 @@ class TemplateTransform(VisitorTransform):
if pos is None: pos = node.pos
return ApplyPositionAndCopy(pos)(sub)
else:
return self.visit_Node(node) # make copy as usual
return self.visit_Node(node) # make copy as usual
def visit_NameNode(self, node):
temphandle = self.tempmap.get(node.name)
......@@ -234,7 +234,7 @@ class TreeFragment(object):
fmt_pxds[key] = fmt(value)
mod = t = parse_from_strings(name, fmt_code, fmt_pxds, level=level, initial_pos=initial_pos)
if level is None:
t = t.body # Make sure a StatListNode is at the top
t = t.body # Make sure a StatListNode is at the top
if not isinstance(t, StatListNode):
t = StatListNode(pos=mod.pos, stats=[t])
for transform in pipeline:
......
......@@ -178,7 +178,7 @@ class MarkParallelAssignments(EnvTransform):
return node
def visit_FromCImportStatNode(self, node):
return node # Can't be assigned to...
return node # Can't be assigned to...
def visit_FromImportStatNode(self, node):
for name, target in node.items:
......@@ -308,10 +308,10 @@ class MarkOverflowingArithmetic(CythonTransform):
def visit_SimpleCallNode(self, node):
if node.function.is_name and node.function.name == 'abs':
# Overflows for minimum value of fixed size ints.
return self.visit_dangerous_node(node)
# Overflows for minimum value of fixed size ints.
return self.visit_dangerous_node(node)
else:
return self.visit_neutral_node(node)
return self.visit_neutral_node(node)
visit_UnopNode = visit_neutral_node
......@@ -370,7 +370,7 @@ class SimpleAssignmentTypeInferer(object):
if enabled == True:
spanning_type = aggressive_spanning_type
elif enabled is None: # safe mode
elif enabled is None: # safe mode
spanning_type = safe_spanning_type
else:
for entry in scope.entries.values():
......
......@@ -788,7 +788,7 @@ ssizessizeargfunc = Signature("Tzz", "O") # typedef PyObject *(*ssizessizeargfu
intobjargproc = Signature("TiO", 'r') # typedef int(*intobjargproc)(PyObject *, int, PyObject *);
ssizeobjargproc = Signature("TzO", 'r') # typedef int(*ssizeobjargproc)(PyObject *, Py_ssize_t, PyObject *);
intintobjargproc = Signature("TiiO", 'r') # typedef int(*intintobjargproc)(PyObject *, int, int, PyObject *);
ssizessizeobjargproc = Signature("TzzO", 'r') # typedef int(*ssizessizeobjargproc)(PyObject *, Py_ssize_t, Py_ssize_t, PyObject *);
ssizessizeobjargproc = Signature("TzzO", 'r') # typedef int(*ssizessizeobjargproc)(PyObject *, Py_ssize_t, Py_ssize_t, PyObject *);
intintargproc = Signature("Tii", 'r')
ssizessizeargproc = Signature("Tzz", 'r')
......@@ -878,7 +878,7 @@ PyNumberMethods = (
MethodSlot(ibinaryfunc, "nb_inplace_multiply", "__imul__"),
MethodSlot(ibinaryfunc, "nb_inplace_divide", "__idiv__", ifdef = PyNumberMethods_Py3_GUARD),
MethodSlot(ibinaryfunc, "nb_inplace_remainder", "__imod__"),
MethodSlot(ibinaryfunc, "nb_inplace_power", "__ipow__"), # actually ternaryfunc!!!
MethodSlot(ibinaryfunc, "nb_inplace_power", "__ipow__"), # actually ternaryfunc!!!
MethodSlot(ibinaryfunc, "nb_inplace_lshift", "__ilshift__"),
MethodSlot(ibinaryfunc, "nb_inplace_rshift", "__irshift__"),
MethodSlot(ibinaryfunc, "nb_inplace_and", "__iand__"),
......@@ -902,15 +902,15 @@ PyNumberMethods = (
PySequenceMethods = (
MethodSlot(lenfunc, "sq_length", "__len__"),
EmptySlot("sq_concat"), # nb_add used instead
EmptySlot("sq_repeat"), # nb_multiply used instead
EmptySlot("sq_concat"), # nb_add used instead
EmptySlot("sq_repeat"), # nb_multiply used instead
SyntheticSlot("sq_item", ["__getitem__"], "0"), #EmptySlot("sq_item"), # mp_subscript used instead
MethodSlot(ssizessizeargfunc, "sq_slice", "__getslice__"),
EmptySlot("sq_ass_item"), # mp_ass_subscript used instead
EmptySlot("sq_ass_item"), # mp_ass_subscript used instead
SyntheticSlot("sq_ass_slice", ["__setslice__", "__delslice__"], "0"),
MethodSlot(cmpfunc, "sq_contains", "__contains__"),
EmptySlot("sq_inplace_concat"), # nb_inplace_add used instead
EmptySlot("sq_inplace_repeat"), # nb_inplace_multiply used instead
EmptySlot("sq_inplace_concat"), # nb_inplace_add used instead
EmptySlot("sq_inplace_repeat"), # nb_inplace_multiply used instead
)
PyMappingMethods = (
......@@ -964,8 +964,8 @@ slot_table = (
MethodSlot(callfunc, "tp_call", "__call__"),
MethodSlot(reprfunc, "tp_str", "__str__"),
SyntheticSlot("tp_getattro", ["__getattr__","__getattribute__"], "0"), #"PyObject_GenericGetAttr"),
SyntheticSlot("tp_setattro", ["__setattr__", "__delattr__"], "0"), #"PyObject_GenericSetAttr"),
SyntheticSlot("tp_getattro", ["__getattr__","__getattribute__"], "0"), #"PyObject_GenericGetAttr"),
SyntheticSlot("tp_setattro", ["__setattr__", "__delattr__"], "0"), #"PyObject_GenericSetAttr"),
SuiteSlot(PyBufferProcs, "PyBufferProcs", "tp_as_buffer"),
......@@ -986,7 +986,7 @@ slot_table = (
MemberTableSlot("tp_members"),
GetSetSlot("tp_getset"),
BaseClassSlot("tp_base"), #EmptySlot("tp_base"),
BaseClassSlot("tp_base"), #EmptySlot("tp_base"),
EmptySlot("tp_dict"),
SyntheticSlot("tp_descr_get", ["__get__"], "0"),
......@@ -995,7 +995,7 @@ slot_table = (
DictOffsetSlot("tp_dictoffset"),
MethodSlot(initproc, "tp_init", "__init__"),
EmptySlot("tp_alloc"), #FixedSlot("tp_alloc", "PyType_GenericAlloc"),
EmptySlot("tp_alloc"), #FixedSlot("tp_alloc", "PyType_GenericAlloc"),
ConstructorSlot("tp_new", "__cinit__"),
EmptySlot("tp_free"),
......
......@@ -45,7 +45,7 @@ class TempRefNode(AtomicExprNode):
def calculate_result_code(self):
result = self.handle.temp
if result is None: result = "<error>" # might be called and overwritten
if result is None: result = "<error>" # might be called and overwritten
return result
def generate_result_code(self, code):
......@@ -150,7 +150,7 @@ class ResultRefNode(AtomicExprNode):
def analyse_types(self, env):
if self.expression is not None:
if not self.expression.type:
self.expression = self.expression.analyse_types(env)
self.expression = self.expression.analyse_types(env)
self.type = self.expression.type
return self
......@@ -175,7 +175,7 @@ class ResultRefNode(AtomicExprNode):
return self.expression.may_be_none()
if self.type is not None:
return self.type.is_pyobject
return True # play safe
return True # play it safe
def is_simple(self):
return True
......
......@@ -196,7 +196,7 @@ class CythonUtilityCode(Code.UtilityCodeBase):
Load a utility code as a string. Returns (proto, implementation)
"""
util = cls.load(util_code_name, from_file, **kwargs)
return util.proto, util.impl # keep line numbers => no lstrip()
return util.proto, util.impl # keep line numbers => no lstrip()
def declare_in_scope(self, dest_scope, used=False, cython_scope=None,
whitelist=None):
......
......@@ -5,8 +5,8 @@ import sys
import errno
try:
from lxml import etree
have_lxml = True
from lxml import etree
have_lxml = True
except ImportError:
have_lxml = False
try:
......
......@@ -334,7 +334,7 @@ class CythonBase(object):
func_name = cyfunc.name
func_cname = cyfunc.cname
func_args = [] # [(arg, f(arg)) for arg in cyfunc.arguments]
func_args = [] # [(arg, f(arg)) for arg in cyfunc.arguments]
else:
source_desc, lineno = self.get_source_desc(frame)
func_name = frame.name()
......@@ -1219,8 +1219,8 @@ class EvaluateOrExecuteCodeMixin(object):
cython_func = self.get_cython_function()
for name, cyvar in cython_func.locals.iteritems():
if (cyvar.type == PythonObject and
self.is_initialized(cython_func, name)):
if (cyvar.type == PythonObject
and self.is_initialized(cython_func, name)):
try:
val = gdb.parse_and_eval(cyvar.cname)
......@@ -1249,8 +1249,8 @@ class EvaluateOrExecuteCodeMixin(object):
def _find_first_cython_or_python_frame(self):
frame = gdb.selected_frame()
while frame:
if (self.is_cython_function(frame) or
self.is_python_function(frame)):
if (self.is_cython_function(frame)
or self.is_python_function(frame)):
frame.select()
return frame
......
......@@ -625,7 +625,7 @@ class PyCFunctionObjectPtr(PyObjectPtr):
_typename = 'PyCFunctionObject'
def proxyval(self, visited):
m_ml = self.field('m_ml') # m_ml is a (PyMethodDef*)
m_ml = self.field('m_ml') # m_ml is a (PyMethodDef*)
try:
ml_name = m_ml['ml_name'].string()
except UnicodeDecodeError:
......@@ -1309,8 +1309,8 @@ class PyUnicodeObjectPtr(PyObjectPtr):
# If sizeof(Py_UNICODE) is 2 here (in gdb), join
# surrogate pairs before calling _unichr_is_printable.
if (i < len(proxy)
and 0xD800 <= ord(ch) < 0xDC00 \
and 0xDC00 <= ord(proxy[i]) <= 0xDFFF):
and 0xD800 <= ord(ch) < 0xDC00
and 0xDC00 <= ord(proxy[i]) <= 0xDFFF):
ch2 = proxy[i]
ucs = ch + ch2
i += 1
......
......@@ -26,17 +26,18 @@ except ImportError:
def _check_stack(path):
try:
for frame in inspect.getouterframes(inspect.currentframe(), 0):
if path in frame[1].replace(os.sep, '/'):
return True
except Exception:
pass
return False
try:
for frame in inspect.getouterframes(inspect.currentframe(), 0):
if path in frame[1].replace(os.sep, '/'):
return True
except Exception:
pass
return False
if (not _check_stack('setuptools/extensions.py')
and not _check_stack('pyximport/pyxbuild.py')
and not _check_stack('Cython/Distutils/build_ext.py')):
and not _check_stack('pyximport/pyxbuild.py')
and not _check_stack('Cython/Distutils/build_ext.py')):
warnings.warn(
"Cython.Distutils.old_build_ext does not properly handle dependencies "
"and is deprecated.")
......@@ -241,7 +242,7 @@ class old_build_ext(_build_ext.build_ext):
includes = list(self.cython_include_dirs)
try:
for i in extension.cython_include_dirs:
if not i in includes:
if i not in includes:
includes.append(i)
except AttributeError:
pass
......@@ -250,7 +251,7 @@ class old_build_ext(_build_ext.build_ext):
# result
extension.include_dirs = list(extension.include_dirs)
for i in extension.include_dirs:
if not i in includes:
if i not in includes:
includes.append(i)
# Set up Cython compiler directives:
......
......@@ -168,7 +168,7 @@ def cast(t, *args, **kwargs):
if isinstance(t, typedef):
return t(*args)
elif isinstance(t, type): #Doesn't work with old-style classes of Python 2.x
elif isinstance(t, type): # Doesn't work with old-style classes of Python 2.x
if len(args) != 1 or not (args[0] is None or isinstance(args[0], t)):
return t(*args)
......@@ -284,7 +284,7 @@ class StructType(CythonType):
if len(data) > 0:
raise ValueError('Cannot accept keyword arguments when casting.')
if type(cast_from) is not type(self):
raise ValueError('Cannot cast from %s'%cast_from)
raise ValueError('Cannot cast from %s' % cast_from)
for key, value in cast_from.__dict__.items():
setattr(self, key, value)
else:
......@@ -310,7 +310,7 @@ class UnionType(CythonType):
elif type(cast_from) is type(self):
datadict = cast_from.__dict__
else:
raise ValueError('Cannot cast from %s'%cast_from)
raise ValueError('Cannot cast from %s' % cast_from)
else:
datadict = data
if len(datadict) > 1:
......
......@@ -336,7 +336,7 @@ class Template(object):
if not isinstance(value, basestring_):
value = coerce_text(value)
if (is_unicode(value)
and self.default_encoding):
and self.default_encoding):
value = value.encode(self.default_encoding)
except Exception as e:
e.args = (self._add_line_info(e.args[0], pos),)
......@@ -724,7 +724,7 @@ def trim_lex(tokens):
else:
next_chunk = tokens[i + 1]
if (not isinstance(next_chunk, basestring_)
or not isinstance(prev, basestring_)):
or not isinstance(prev, basestring_)):
continue
prev_ok = not prev or trail_whitespace_re.search(prev)
if i == 1 and not prev.strip():
......@@ -736,7 +736,7 @@ def trim_lex(tokens):
or (i == len(tokens) - 2 and not next_chunk.strip()))):
if prev:
if ((i == 1 and not prev.strip())
or prev_ok == 'last'):
or prev_ok == 'last'):
tokens[i - 1] = ''
else:
m = trail_whitespace_re.search(prev)
......@@ -888,7 +888,7 @@ def parse_cond(tokens, name, context):
'Missing {{endif}}',
position=start, name=name)
if (isinstance(tokens[0], tuple)
and tokens[0][0] == 'endif'):
and tokens[0][0] == 'endif'):
return ('cond', start) + tuple(pieces), tokens[1:]
next_chunk, tokens = parse_one_cond(tokens, name, context)
pieces.append(next_chunk)
......@@ -950,7 +950,7 @@ def parse_for(tokens, name, context):
'No {{endfor}}',
position=pos, name=name)
if (isinstance(tokens[0], tuple)
and tokens[0][0] == 'endfor'):
and tokens[0][0] == 'endfor'):
return ('for', pos, vars, expr, content), tokens[1:]
next_chunk, tokens = parse_expr(tokens, name, context)
content.append(next_chunk)
......@@ -1010,7 +1010,7 @@ def parse_def(tokens, name, context):
'Missing {{enddef}}',
position=start, name=name)
if (isinstance(tokens[0], tuple)
and tokens[0][0] == 'enddef'):
and tokens[0][0] == 'enddef'):
return ('def', start, func_name, sig, content), tokens[1:]
next_chunk, tokens = parse_expr(tokens, name, context)
content.append(next_chunk)
......@@ -1073,7 +1073,7 @@ def parse_signature(sig_text, name, pos):
raise TemplateError('Invalid signature: (%s)' % sig_text,
position=pos, name=name)
if (not nest_count and
(tok_type == tokenize.ENDMARKER or (tok_type == tokenize.OP and tok_string == ','))):
(tok_type == tokenize.ENDMARKER or (tok_type == tokenize.OP and tok_string == ','))):
default_expr = isolate_expression(sig_text, start_pos, end_pos)
defaults[var_name] = default_expr
sig_args.append(var_name)
......
......@@ -180,7 +180,7 @@ class TreeAssertVisitor(VisitorTransform):
if TreePath.find_first(node, path) is not None:
Errors.error(
node.pos,
"Unexpected path '%s' found in result tree" % path)
"Unexpected path '%s' found in result tree" % path)
self.visitchildren(node)
return node
......
......@@ -87,4 +87,3 @@ class TestCodeWriter(CythonTest):
if __name__ == "__main__":
import unittest
unittest.main()
......@@ -109,8 +109,7 @@ class _XMLTestResult(_TextTestResult):
self.elapsed_times = elapsed_times
self.output_patched = False
def _prepare_callback(self, test_info, target_list, verbose_str,
short_str):
def _prepare_callback(self, test_info, target_list, verbose_str, short_str):
"""Append a _TestInfo to the given target list and sets a callback
method to be called by stopTest method.
"""
......@@ -125,7 +124,7 @@ class _XMLTestResult(_TextTestResult):
self.start_time = self.stop_time = 0
if self.showAll:
self.stream.writeln('(%.3fs) %s' % \
self.stream.writeln('(%.3fs) %s' %
(test_info.get_elapsed_time(), verbose_str))
elif self.dots:
self.stream.write(short_str)
......@@ -300,8 +299,7 @@ class _XMLTestResult(_TextTestResult):
"Generates the XML reports to a given XMLTestRunner object."
all_results = self._get_info_by_testcase()
if type(test_runner.output) == str and not \
os.path.exists(test_runner.output):
if isinstance(test_runner.output, str) and not os.path.exists(test_runner.output):
os.makedirs(test_runner.output)
for suite, tests in all_results.items():
......@@ -321,7 +319,7 @@ class _XMLTestResult(_TextTestResult):
xml_content = doc.toprettyxml(indent='\t')
if type(test_runner.output) is str:
report_file = open('%s%sTEST-%s.xml' % \
report_file = open('%s%sTEST-%s.xml' %
(test_runner.output, os.sep, suite), 'w')
try:
report_file.write(xml_content)
......@@ -348,7 +346,7 @@ class XMLTestRunner(TextTestRunner):
"""Create the TestResult object which will be used to store
information about the executed tests.
"""
return _XMLTestResult(self.stream, self.descriptions, \
return _XMLTestResult(self.stream, self.descriptions,
self.verbosity, self.elapsed_times)
def run(self, test):
......
......@@ -29,7 +29,7 @@ class NN(object):
# print 'class NN'
def __init__(self, ni, nh, no):
# number of input, hidden, and output nodes
self.ni = ni + 1 # +1 for bias node
self.ni = ni + 1 # +1 for bias node
self.nh = nh
self.no = no
......@@ -67,7 +67,7 @@ class NN(object):
for j in range(self.nh):
sum = 0.0
for i in range(self.ni):
sum = sum + self.ai[i] * self.wi[i][j]
sum = sum + self.ai[i] * self.wi[i][j]
self.ah[j] = 1.0/(1.0+math.exp(-sum))
# output activations
......
......@@ -130,7 +130,7 @@ class Spline(object):
I = ii
break
else:
I = dom[1] - 1
I = dom[1] - 1
return I
def __len__(self):
......
......@@ -64,7 +64,7 @@ def get_senh(board, cti):
def get_puzzle(w=w, h=h):
board = [E*x + S*y + (y%2) for y in range(h) for x in range(w)]
board = [E*x + S*y + (y % 2) for y in range(h) for x in range(w)]
cti = dict((board[i], i) for i in range(len(board)))
idos = [[E, E, E, SE], # incremental direction offsets
......@@ -152,4 +152,3 @@ if __name__ == "__main__":
options, args = parser.parse_args()
util.run_benchmark(options, options.num_runs, main)
......@@ -333,7 +333,7 @@ class WorkTask(Task):
pkt.ident = dest
pkt.datum = 0
for i in BUFSIZE_RANGE: # range(BUFSIZE)
for i in BUFSIZE_RANGE: # range(BUFSIZE)
w.count += 1
if w.count > 26:
w.count = 1
......@@ -382,9 +382,9 @@ class Richards(object):
wkq = Packet(wkq , I_DEVB, K_DEV)
HandlerTask(I_HANDLERB, 3000, wkq, TaskState().waitingWithPacket(), HandlerTaskRec())
wkq = None;
DeviceTask(I_DEVA, 4000, wkq, TaskState().waiting(), DeviceTaskRec());
DeviceTask(I_DEVB, 5000, wkq, TaskState().waiting(), DeviceTaskRec());
wkq = None
DeviceTask(I_DEVA, 4000, wkq, TaskState().waiting(), DeviceTaskRec())
DeviceTask(I_DEVB, 5000, wkq, TaskState().waiting(), DeviceTaskRec())
schedule()
......
......@@ -4,5 +4,3 @@ def report_cheese(name):
print("Found cheese: " + name)
cheese.find(report_cheese)
import math
def great_circle(lon1, lat1, lon2, lat2):
radius = 3956 # miles
radius = 3956 # miles
x = math.pi/180.0
a = (90.0 - lat1)*x
......
......@@ -128,7 +128,7 @@ class CythonLexer(RegexLexer):
(r'(\s+)(as)(\s+)', bygroups(Text, Keyword, Text)),
(r'[a-zA-Z_][a-zA-Z0-9_.]*', Name.Namespace),
(r'(\s*)(,)(\s*)', bygroups(Text, Operator, Text)),
(r'', Text, '#pop') # all else: go back
(r'', Text, '#pop') # all else: go back
],
'fromimport': [
(r'(\s+)(c?import)\b', bygroups(Text, Keyword), '#pop'),
......@@ -155,12 +155,12 @@ class CythonLexer(RegexLexer):
],
'dqs': [
(r'"', String, '#pop'),
(r'\\\\|\\"|\\\n', String.Escape), # included here again for raw strings
(r'\\\\|\\"|\\\n', String.Escape), # included here again for raw strings
include('strings')
],
'sqs': [
(r"'", String, '#pop'),
(r"\\\\|\\'|\\\n", String.Escape), # included here again for raw strings
(r"\\\\|\\'|\\\n", String.Escape), # included here again for raw strings
include('strings')
],
'tdqs': [
......
......@@ -119,9 +119,9 @@ def pyx_to_dll(filename, ext=None, force_rebuild=0, build_in_temp=False, pyxbuil
while count < 100:
count += 1
r_path = os.path.join(obj_build_ext.build_lib,
basename + '.reload%s'%count)
basename + '.reload%s' % count)
try:
import shutil # late import / reload_support is: debugging
import shutil # late import / reload_support is: debugging
try:
# Try to unlink first --- if the .so file
# is mmapped by another process,
......@@ -140,7 +140,7 @@ def pyx_to_dll(filename, ext=None, force_rebuild=0, build_in_temp=False, pyxbuil
break
else:
# used up all 100 slots
raise ImportError("reload count for %s reached maximum"%org_path)
raise ImportError("reload count for %s reached maximum" % org_path)
_reloads[org_path]=(timestamp, so_path, count)
return so_path
except KeyboardInterrupt:
......@@ -157,4 +157,3 @@ def pyx_to_dll(filename, ext=None, force_rebuild=0, build_in_temp=False, pyxbuil
if __name__=="__main__":
pyx_to_dll("dummy.pyx")
from . import test
......@@ -191,7 +191,7 @@ def build_module(name, pyxfilename, pyxbuild_dir=None, inplace=False, language_l
reload_support=pyxargs.reload_support)
assert os.path.exists(so_path), "Cannot find: %s" % so_path
junkpath = os.path.join(os.path.dirname(so_path), name+"_*") #very dangerous with --inplace ? yes, indeed, trying to eat my files ;)
junkpath = os.path.join(os.path.dirname(so_path), name+"_*") #very dangerous with --inplace ? yes, indeed, trying to eat my files ;)
junkstuff = glob.glob(junkpath)
for path in junkstuff:
if path != so_path:
......@@ -269,7 +269,7 @@ class PyxImporter(object):
pyxbuild_dir=self.pyxbuild_dir,
inplace=self.inplace,
language_level=self.language_level)
if ty != imp.C_EXTENSION: # only when an extension, check if we have a .pyx next!
if ty != imp.C_EXTENSION: # only when an extension, check if we have a .pyx next!
return None
# find .pyx fast, when .so/.pyd exist --inplace
......
......@@ -66,7 +66,7 @@ def make_ext(name, filename):
assert len(pyximport._test_files)==1, pyximport._test_files
reload(dummy)
time.sleep(1) # sleep a second to get safer mtimes
time.sleep(1) # sleep a second to get safer mtimes
open(os.path.join(tempdir, "abc.txt"), "w").write(" ")
print("Here goes the reload")
reload(dummy)
......
......@@ -22,8 +22,8 @@ def test():
import hello
assert hello.x == 1
time.sleep(1) # sleep to make sure that new "hello.pyx" has later
# timestamp than object file.
time.sleep(1) # sleep to make sure that new "hello.pyx" has later
# timestamp than object file.
open(hello_file, "w").write("x = 2; print x; after = 'after'\n")
reload(hello)
......
......@@ -2,10 +2,21 @@
max-complexity = 10
[pycodestyle]
exclude = .git,build,__pycache__,venv*,TEST*
exclude = .git,build,__pycache__,venv*,TEST*,tests/run/test*.py,Cython/Debugger/libpython.py
max-line-length = 150
format = pylint
select = E711, E714, E501, W291
# See https://pycodestyle.pycqa.org/en/latest/intro.html#configuration
select =
E711, E713, E714, E501, W291, E502, E703,
# indentation
E101, E111, E112, E113, E117
E121, E125, E129,
# E114, E115, E116, E122,
# whitespace
E223, E224, E228, E261, E273, E274, E275,
# E201, E202, E203, E211, E265
# E303, E306,
W1, W2, W3
#ignore = W, E
ignore =
W504,
......
......@@ -4,4 +4,3 @@ import cython
@cython.cclass
class BuiltinRef:
cython.declare(pybuf = 'Py_buffer')
......@@ -3,7 +3,7 @@
def int_literals():
a = 1L # ok
b = 10000000000000L # ok
b = 10000000000000L # ok
c = 1UL
d = 10000000000000UL
e = 10000000000000LL
......
......@@ -9,4 +9,3 @@ _ERRORS = u"""
5:11: undeclared name not builtin: a
5:15: undeclared name not builtin: b
"""
......@@ -238,7 +238,7 @@ C.class_assigned_function = free_function1
__global_arg = True
_D__arg1 = None
_D__global_arg = False # define these because otherwise Cython gives a compile-time error
_D__global_arg = False # define these because otherwise Cython gives a compile-time error
# while Python gives a runtime error (which is difficult to test)
def can_find_global_arg():
"""
......@@ -389,4 +389,4 @@ class CallsRegularFunction:
>>> CallsRegularFunction().call()
1
"""
return regular_function(__x=1) # __x shouldn't be mangled as an argument elsewhere
return regular_function(__x=1) # __x shouldn't be mangled as an argument elsewhere
......@@ -73,4 +73,4 @@ else:
__doc__ = """
>>> True
True
""" # stops Python2 from failing
""" # stops Python2 from failing
......@@ -12,14 +12,14 @@ class Test(object):
try:
print(__something)
except NameError:
print("NameError1") # correct - shouldn't exist
print("NameError1") # correct - shouldn't exist
globals()['__something'] = 'found unmangled'
try:
print(__something)
except NameError:
print("NameError2") # correct - shouldn't exist
print("NameError2") # correct - shouldn't exist
globals()['_Test__something'] = 'found mangled'
try:
print(__something) # should print this
print(__something) # should print this
except NameError:
print("NameError3")
......@@ -24,4 +24,3 @@ def f(a: 1+2==3, b: list, c: this_cant_evaluate, d: "Hello from inside a string"
True
"""
pass
......@@ -576,4 +576,3 @@ def array_init_with_list():
x[12] = 42
return [x[10], x[12]]
......@@ -31,6 +31,6 @@ def test_reraise_error():
... else: print("FAILED")
"""
import sys
if hasattr(sys, 'exc_clear'): # Py2
if hasattr(sys, 'exc_clear'): # Py2
sys.exc_clear()
raise
......@@ -363,7 +363,7 @@ the \'lazy\' dog.\n\
compile(s, "<test>", "exec")
self.assertIn("unexpected EOF", str(cm.exception))
var_annot_global: int # a global annotated is necessary for test_var_annot
var_annot_global: int # a global annotated is necessary for test_var_annot
# custom namespace for testing __annotations__
......@@ -843,7 +843,7 @@ class GrammarTests(unittest.TestCase):
### lambdef: 'lambda' [varargslist] ':' test
l1 = lambda : 0
self.assertEqual(l1(), 0)
l2 = lambda : a[d] # XXX just testing the expression
l2 = lambda : a[d] # XXX just testing the expression
l3 = lambda : [2 < x for x in [-1, 3, 0]]
self.assertEqual(l3(), [0, 1, 0])
l4 = lambda x = lambda y = lambda z=1 : z : y() : x()
......@@ -1819,7 +1819,7 @@ class GrammarTests(unittest.TestCase):
self.assertEqual(sum(b), sum([x for x in range(10)]))
self.assertEqual(sum(x**2 for x in range(10)), sum([x**2 for x in range(10)]))
self.assertEqual(sum(x*x for x in range(10) if x%2), sum([x*x for x in range(10) if x%2]))
self.assertEqual(sum(x*x for x in range(10) if x % 2), sum([x*x for x in range(10) if x % 2]))
self.assertEqual(sum(x for x in (y for y in range(10))), sum([x for x in range(10)]))
self.assertEqual(sum(x for x in (y for y in (z for z in range(10)))), sum([x for x in range(10)]))
self.assertEqual(sum(x for x in [y for y in (z for z in range(10))]), sum([x for x in range(10)]))
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment