Commit e5b5c0a3 authored by scoder's avatar scoder Committed by GitHub

Merge pull request #1802 from scoder/_pep525_async_gen

PEP 525: asynchronous generators
parents c02ee648 9a97b2ab
...@@ -35,6 +35,7 @@ branches: ...@@ -35,6 +35,7 @@ branches:
- master - master
install: install:
- python -c 'import sys; print("Python %s" % (sys.version,))'
- CFLAGS="-O2 -ggdb -Wall -Wextra $(python -c 'import sys; print("-fno-strict-aliasing" if sys.version_info[0] == 2 else "")')" python setup.py build - CFLAGS="-O2 -ggdb -Wall -Wextra $(python -c 'import sys; print("-fno-strict-aliasing" if sys.version_info[0] == 2 else "")')" python setup.py build
before_script: ccache -s before_script: ccache -s
......
...@@ -632,8 +632,8 @@ class FunctionState(object): ...@@ -632,8 +632,8 @@ class FunctionState(object):
label += '_' + name label += '_' + name
return label return label
def new_yield_label(self): def new_yield_label(self, expr_type='yield'):
label = self.new_label('resume_from_yield') label = self.new_label('resume_from_%s' % expr_type)
num_and_label = (len(self.yield_labels) + 1, label) num_and_label = (len(self.yield_labels) + 1, label)
self.yield_labels.append(num_and_label) self.yield_labels.append(num_and_label)
return num_and_label return num_and_label
...@@ -1628,7 +1628,7 @@ class CCodeWriter(object): ...@@ -1628,7 +1628,7 @@ class CCodeWriter(object):
# Functions delegated to function scope # Functions delegated to function scope
def new_label(self, name=None): return self.funcstate.new_label(name) def new_label(self, name=None): return self.funcstate.new_label(name)
def new_error_label(self): return self.funcstate.new_error_label() def new_error_label(self): return self.funcstate.new_error_label()
def new_yield_label(self): return self.funcstate.new_yield_label() def new_yield_label(self, *args): return self.funcstate.new_yield_label(*args)
def get_loop_labels(self): return self.funcstate.get_loop_labels() def get_loop_labels(self): return self.funcstate.get_loop_labels()
def set_loop_labels(self, labels): return self.funcstate.set_loop_labels(labels) def set_loop_labels(self, labels): return self.funcstate.set_loop_labels(labels)
def new_loop_labels(self): return self.funcstate.new_loop_labels() def new_loop_labels(self): return self.funcstate.new_loop_labels()
......
...@@ -2700,8 +2700,7 @@ class IteratorNode(ExprNode): ...@@ -2700,8 +2700,7 @@ class IteratorNode(ExprNode):
code.putln("if (unlikely(!%s)) {" % result_name) code.putln("if (unlikely(!%s)) {" % result_name)
code.putln("PyObject* exc_type = PyErr_Occurred();") code.putln("PyObject* exc_type = PyErr_Occurred();")
code.putln("if (exc_type) {") code.putln("if (exc_type) {")
code.putln("if (likely(exc_type == PyExc_StopIteration ||" code.putln("if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear();")
" PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear();")
code.putln("else %s" % code.error_goto(self.pos)) code.putln("else %s" % code.error_goto(self.pos))
code.putln("}") code.putln("}")
code.putln("break;") code.putln("break;")
...@@ -7883,9 +7882,8 @@ class ScopedExprNode(ExprNode): ...@@ -7883,9 +7882,8 @@ class ScopedExprNode(ExprNode):
code.putln('{ /* enter inner scope */') code.putln('{ /* enter inner scope */')
py_entries = [] py_entries = []
for entry in self.expr_scope.var_entries: for _, entry in sorted(item for item in self.expr_scope.entries.items() if item[0]):
if not entry.in_closure: if not entry.in_closure:
code.put_var_declaration(entry)
if entry.type.is_pyobject and entry.used: if entry.type.is_pyobject and entry.used:
py_entries.append(entry) py_entries.append(entry)
if not py_entries: if not py_entries:
...@@ -7895,14 +7893,14 @@ class ScopedExprNode(ExprNode): ...@@ -7895,14 +7893,14 @@ class ScopedExprNode(ExprNode):
return return
# must free all local Python references at each exit point # must free all local Python references at each exit point
old_loop_labels = tuple(code.new_loop_labels()) old_loop_labels = code.new_loop_labels()
old_error_label = code.new_error_label() old_error_label = code.new_error_label()
generate_inner_evaluation_code(code) generate_inner_evaluation_code(code)
# normal (non-error) exit # normal (non-error) exit
for entry in py_entries: for entry in py_entries:
code.put_var_decref(entry) code.put_var_xdecref_clear(entry)
# error/loop body exit points # error/loop body exit points
exit_scope = code.new_label('exit_scope') exit_scope = code.new_label('exit_scope')
...@@ -7912,7 +7910,7 @@ class ScopedExprNode(ExprNode): ...@@ -7912,7 +7910,7 @@ class ScopedExprNode(ExprNode):
if code.label_used(label): if code.label_used(label):
code.put_label(label) code.put_label(label)
for entry in py_entries: for entry in py_entries:
code.put_var_decref(entry) code.put_var_xdecref_clear(entry)
code.put_goto(old_label) code.put_goto(old_label)
code.put_label(exit_scope) code.put_label(exit_scope)
code.putln('} /* exit inner scope */') code.putln('} /* exit inner scope */')
...@@ -9415,10 +9413,11 @@ class YieldExprNode(ExprNode): ...@@ -9415,10 +9413,11 @@ class YieldExprNode(ExprNode):
label_num = 0 label_num = 0
is_yield_from = False is_yield_from = False
is_await = False is_await = False
in_async_gen = False
expr_keyword = 'yield' expr_keyword = 'yield'
def analyse_types(self, env): def analyse_types(self, env):
if not self.label_num: if not self.label_num or (self.is_yield_from and self.in_async_gen):
error(self.pos, "'%s' not supported here" % self.expr_keyword) error(self.pos, "'%s' not supported here" % self.expr_keyword)
self.is_temp = 1 self.is_temp = 1
if self.arg is not None: if self.arg is not None:
...@@ -9449,7 +9448,8 @@ class YieldExprNode(ExprNode): ...@@ -9449,7 +9448,8 @@ class YieldExprNode(ExprNode):
Generate the code to return the argument in 'Naming.retval_cname' Generate the code to return the argument in 'Naming.retval_cname'
and to continue at the yield label. and to continue at the yield label.
""" """
label_num, label_name = code.new_yield_label() label_num, label_name = code.new_yield_label(
self.expr_keyword.replace(' ', '_'))
code.use_label(label_name) code.use_label(label_name)
saved = [] saved = []
...@@ -9469,10 +9469,16 @@ class YieldExprNode(ExprNode): ...@@ -9469,10 +9469,16 @@ class YieldExprNode(ExprNode):
nogil=not code.funcstate.gil_owned) nogil=not code.funcstate.gil_owned)
code.put_finish_refcount_context() code.put_finish_refcount_context()
code.putln("/* return from generator, yielding value */") code.putln("/* return from %sgenerator, %sing value */" % (
'async ' if self.in_async_gen else '',
'await' if self.is_await else 'yield'))
code.putln("%s->resume_label = %d;" % ( code.putln("%s->resume_label = %d;" % (
Naming.generator_cname, label_num)) Naming.generator_cname, label_num))
code.putln("return %s;" % Naming.retval_cname) if self.in_async_gen and not self.is_await:
# __Pyx__PyAsyncGenValueWrapperNew() steals a reference to the return value
code.putln("return __Pyx__PyAsyncGenValueWrapperNew(%s);" % Naming.retval_cname)
else:
code.putln("return %s;" % Naming.retval_cname)
code.put_label(label_name) code.put_label(label_name)
for cname, save_cname, type in saved: for cname, save_cname, type in saved:
...@@ -9480,27 +9486,19 @@ class YieldExprNode(ExprNode): ...@@ -9480,27 +9486,19 @@ class YieldExprNode(ExprNode):
if type.is_pyobject: if type.is_pyobject:
code.putln('%s->%s = 0;' % (Naming.cur_scope_cname, save_cname)) code.putln('%s->%s = 0;' % (Naming.cur_scope_cname, save_cname))
code.put_xgotref(cname) code.put_xgotref(cname)
code.putln(code.error_goto_if_null(Naming.sent_value_cname, self.pos)) self.generate_sent_value_handling_code(code, Naming.sent_value_cname)
if self.result_is_used: if self.result_is_used:
self.allocate_temp_result(code) self.allocate_temp_result(code)
code.put('%s = %s; ' % (self.result(), Naming.sent_value_cname)) code.put('%s = %s; ' % (self.result(), Naming.sent_value_cname))
code.put_incref(self.result(), py_object_type) code.put_incref(self.result(), py_object_type)
def generate_sent_value_handling_code(self, code, value_cname):
code.putln(code.error_goto_if_null(value_cname, self.pos))
class YieldFromExprNode(YieldExprNode):
# "yield from GEN" expression
is_yield_from = True
expr_keyword = 'yield from'
def coerce_yield_argument(self, env):
if not self.arg.type.is_string:
# FIXME: support C arrays and C++ iterators?
error(self.pos, "yielding from non-Python object not supported")
self.arg = self.arg.coerce_to_pyobject(env)
class _YieldDelegationExprNode(YieldExprNode):
def yield_from_func(self, code): def yield_from_func(self, code):
code.globalstate.use_utility_code(UtilityCode.load_cached("GeneratorYieldFrom", "Coroutine.c")) raise NotImplementedError()
return "__Pyx_Generator_Yield_From"
def generate_evaluation_code(self, code, source_cname=None, decref_source=False): def generate_evaluation_code(self, code, source_cname=None, decref_source=False):
if source_cname is None: if source_cname is None:
...@@ -9536,13 +9534,29 @@ class YieldFromExprNode(YieldExprNode): ...@@ -9536,13 +9534,29 @@ class YieldFromExprNode(YieldExprNode):
def handle_iteration_exception(self, code): def handle_iteration_exception(self, code):
code.putln("PyObject* exc_type = PyErr_Occurred();") code.putln("PyObject* exc_type = PyErr_Occurred();")
code.putln("if (exc_type) {") code.putln("if (exc_type) {")
code.putln("if (likely(exc_type == PyExc_StopIteration ||" code.putln("if (likely(exc_type == PyExc_StopIteration || (exc_type != PyExc_GeneratorExit &&"
" PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear();") " __Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration)))) PyErr_Clear();")
code.putln("else %s" % code.error_goto(self.pos)) code.putln("else %s" % code.error_goto(self.pos))
code.putln("}") code.putln("}")
class AwaitExprNode(YieldFromExprNode): class YieldFromExprNode(_YieldDelegationExprNode):
# "yield from GEN" expression
is_yield_from = True
expr_keyword = 'yield from'
def coerce_yield_argument(self, env):
if not self.arg.type.is_string:
# FIXME: support C arrays and C++ iterators?
error(self.pos, "yielding from non-Python object not supported")
self.arg = self.arg.coerce_to_pyobject(env)
def yield_from_func(self, code):
code.globalstate.use_utility_code(UtilityCode.load_cached("GeneratorYieldFrom", "Coroutine.c"))
return "__Pyx_Generator_Yield_From"
class AwaitExprNode(_YieldDelegationExprNode):
# 'await' expression node # 'await' expression node
# #
# arg ExprNode the Awaitable value to await # arg ExprNode the Awaitable value to await
...@@ -9561,29 +9575,34 @@ class AwaitExprNode(YieldFromExprNode): ...@@ -9561,29 +9575,34 @@ class AwaitExprNode(YieldFromExprNode):
return "__Pyx_Coroutine_Yield_From" return "__Pyx_Coroutine_Yield_From"
class AIterAwaitExprNode(AwaitExprNode):
# 'await' expression node used in async-for loops to support the pre-Py3.5.2 'aiter' protocol
def yield_from_func(self, code):
code.globalstate.use_utility_code(UtilityCode.load_cached("CoroutineAIterYieldFrom", "Coroutine.c"))
return "__Pyx_Coroutine_AIter_Yield_From"
class AwaitIterNextExprNode(AwaitExprNode): class AwaitIterNextExprNode(AwaitExprNode):
# 'await' expression node as part of 'async for' iteration # 'await' expression node as part of 'async for' iteration
# #
# Breaks out of loop on StopAsyncIteration exception. # Breaks out of loop on StopAsyncIteration exception.
def fetch_iteration_result(self, code): def _generate_break(self, code):
assert code.break_label, "AwaitIterNextExprNode outside of 'async for' loop"
code.globalstate.use_utility_code(UtilityCode.load_cached("StopAsyncIteration", "Coroutine.c")) code.globalstate.use_utility_code(UtilityCode.load_cached("StopAsyncIteration", "Coroutine.c"))
code.putln("PyObject* exc_type = PyErr_Occurred();") code.putln("PyObject* exc_type = PyErr_Occurred();")
code.putln("if (exc_type && likely(exc_type == __Pyx_PyExc_StopAsyncIteration ||" code.putln("if (unlikely(exc_type && (exc_type == __Pyx_PyExc_StopAsyncIteration || ("
" PyErr_GivenExceptionMatches(exc_type, __Pyx_PyExc_StopAsyncIteration))) {") " exc_type != PyExc_StopIteration && exc_type != PyExc_GeneratorExit &&"
" __Pyx_PyErr_GivenExceptionMatches(exc_type, __Pyx_PyExc_StopAsyncIteration))))) {")
code.putln("PyErr_Clear();") code.putln("PyErr_Clear();")
code.putln("break;") code.putln("break;")
code.putln("}") code.putln("}")
def fetch_iteration_result(self, code):
assert code.break_label, "AwaitIterNextExprNode outside of 'async for' loop"
self._generate_break(code)
super(AwaitIterNextExprNode, self).fetch_iteration_result(code) super(AwaitIterNextExprNode, self).fetch_iteration_result(code)
def generate_sent_value_handling_code(self, code, value_cname):
assert code.break_label, "AwaitIterNextExprNode outside of 'async for' loop"
code.putln("if (unlikely(!%s)) {" % value_cname)
self._generate_break(code)
# all non-break exceptions are errors, as in parent class
code.putln(code.error_goto(self.pos))
code.putln("}")
class GlobalsExprNode(AtomicExprNode): class GlobalsExprNode(AtomicExprNode):
type = dict_type type = dict_type
......
...@@ -717,6 +717,7 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode): ...@@ -717,6 +717,7 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
code.putln('static const char * %s= %s;' % (Naming.cfilenm_cname, Naming.file_c_macro)) code.putln('static const char * %s= %s;' % (Naming.cfilenm_cname, Naming.file_c_macro))
code.putln('static const char *%s;' % Naming.filename_cname) code.putln('static const char *%s;' % Naming.filename_cname)
env.use_utility_code(UtilityCode.load_cached("FastTypeChecks", "ModuleSetupCode.c"))
if has_np_pythran(env): if has_np_pythran(env):
env.use_utility_code(UtilityCode.load_cached("PythranConversion", "CppSupport.cpp")) env.use_utility_code(UtilityCode.load_cached("PythranConversion", "CppSupport.cpp"))
...@@ -2151,7 +2152,7 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode): ...@@ -2151,7 +2152,7 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
code.putln("%s = PyUnicode_FromStringAndSize(\"\", 0); %s" % ( code.putln("%s = PyUnicode_FromStringAndSize(\"\", 0); %s" % (
Naming.empty_unicode, code.error_goto_if_null(Naming.empty_unicode, self.pos))) Naming.empty_unicode, code.error_goto_if_null(Naming.empty_unicode, self.pos)))
for ext_type in ('CyFunction', 'FusedFunction', 'Coroutine', 'Generator', 'StopAsyncIteration'): for ext_type in ('CyFunction', 'FusedFunction', 'Coroutine', 'Generator', 'AsyncGen', 'StopAsyncIteration'):
code.putln("#ifdef __Pyx_%s_USED" % ext_type) code.putln("#ifdef __Pyx_%s_USED" % ext_type)
code.put_error_if_neg(self.pos, "__pyx_%s_init()" % ext_type) code.put_error_if_neg(self.pos, "__pyx_%s_init()" % ext_type)
code.putln("#endif") code.putln("#endif")
......
This diff is collapsed.
...@@ -51,7 +51,10 @@ cdef class AlignFunctionDefinitions(CythonTransform): ...@@ -51,7 +51,10 @@ cdef class AlignFunctionDefinitions(CythonTransform):
cdef class YieldNodeCollector(TreeVisitor): cdef class YieldNodeCollector(TreeVisitor):
cdef public list yields cdef public list yields
cdef public list returns cdef public list returns
cdef public list finallys
cdef public bint has_return_value cdef public bint has_return_value
cdef public bint has_yield
cdef public bint has_await
cdef class MarkClosureVisitor(CythonTransform): cdef class MarkClosureVisitor(CythonTransform):
cdef bint needs_closure cdef bint needs_closure
......
...@@ -192,7 +192,7 @@ class PostParse(ScopeTrackingTransform): ...@@ -192,7 +192,7 @@ class PostParse(ScopeTrackingTransform):
# unpack a lambda expression into the corresponding DefNode # unpack a lambda expression into the corresponding DefNode
collector = YieldNodeCollector() collector = YieldNodeCollector()
collector.visitchildren(node.result_expr) collector.visitchildren(node.result_expr)
if collector.yields or collector.awaits or isinstance(node.result_expr, ExprNodes.YieldExprNode): if collector.has_yield or collector.has_await or isinstance(node.result_expr, ExprNodes.YieldExprNode):
body = Nodes.ExprStatNode( body = Nodes.ExprStatNode(
node.result_expr.pos, expr=node.result_expr) node.result_expr.pos, expr=node.result_expr)
else: else:
...@@ -208,11 +208,22 @@ class PostParse(ScopeTrackingTransform): ...@@ -208,11 +208,22 @@ class PostParse(ScopeTrackingTransform):
def visit_GeneratorExpressionNode(self, node): def visit_GeneratorExpressionNode(self, node):
# unpack a generator expression into the corresponding DefNode # unpack a generator expression into the corresponding DefNode
node.def_node = Nodes.DefNode(node.pos, name=node.name, collector = YieldNodeCollector()
doc=None, collector.visitchildren(node.loop)
args=[], star_arg=None, node.def_node = Nodes.DefNode(
starstar_arg=None, node.pos, name=node.name, doc=None,
body=node.loop) args=[], star_arg=None, starstar_arg=None,
body=node.loop, is_async_def=collector.has_await)
self.visitchildren(node)
return node
def visit_ComprehensionNode(self, node):
# enforce local scope also in Py2 for async generators (seriously, that's a Py3.6 feature...)
if not node.has_local_scope:
collector = YieldNodeCollector()
collector.visitchildren(node.loop)
if collector.has_await:
node.has_local_scope = True
self.visitchildren(node) self.visitchildren(node)
return node return node
...@@ -2457,19 +2468,23 @@ class YieldNodeCollector(TreeVisitor): ...@@ -2457,19 +2468,23 @@ class YieldNodeCollector(TreeVisitor):
def __init__(self): def __init__(self):
super(YieldNodeCollector, self).__init__() super(YieldNodeCollector, self).__init__()
self.yields = [] self.yields = []
self.awaits = []
self.returns = [] self.returns = []
self.finallys = []
self.has_return_value = False self.has_return_value = False
self.has_yield = False
self.has_await = False
def visit_Node(self, node): def visit_Node(self, node):
self.visitchildren(node) self.visitchildren(node)
def visit_YieldExprNode(self, node): def visit_YieldExprNode(self, node):
self.yields.append(node) self.yields.append(node)
self.has_yield = True
self.visitchildren(node) self.visitchildren(node)
def visit_AwaitExprNode(self, node): def visit_AwaitExprNode(self, node):
self.awaits.append(node) self.yields.append(node)
self.has_await = True
self.visitchildren(node) self.visitchildren(node)
def visit_ReturnStatNode(self, node): def visit_ReturnStatNode(self, node):
...@@ -2478,6 +2493,10 @@ class YieldNodeCollector(TreeVisitor): ...@@ -2478,6 +2493,10 @@ class YieldNodeCollector(TreeVisitor):
self.has_return_value = True self.has_return_value = True
self.returns.append(node) self.returns.append(node)
def visit_TryFinallyStatNode(self, node):
self.visitchildren(node)
self.finallys.append(node)
def visit_ClassDefNode(self, node): def visit_ClassDefNode(self, node):
pass pass
...@@ -2513,24 +2532,28 @@ class MarkClosureVisitor(CythonTransform): ...@@ -2513,24 +2532,28 @@ class MarkClosureVisitor(CythonTransform):
collector.visitchildren(node) collector.visitchildren(node)
if node.is_async_def: if node.is_async_def:
if collector.yields: coroutine_type = Nodes.AsyncGenNode if collector.has_yield else Nodes.AsyncDefNode
error(collector.yields[0].pos, "'yield' not allowed in async coroutines (use 'await')") if collector.has_yield:
yields = collector.awaits for yield_expr in collector.yields + collector.returns:
elif collector.yields: yield_expr.in_async_gen = True
if collector.awaits: elif collector.has_await:
error(collector.yields[0].pos, "'await' not allowed in generators (use 'yield')") found = next(y for y in collector.yields if y.is_await)
yields = collector.yields error(found.pos, "'await' not allowed in generators (use 'yield')")
return node
elif collector.has_yield:
coroutine_type = Nodes.GeneratorDefNode
else: else:
return node return node
for i, yield_expr in enumerate(yields, 1): for i, yield_expr in enumerate(collector.yields, 1):
yield_expr.label_num = i yield_expr.label_num = i
for retnode in collector.returns: for retnode in collector.returns + collector.finallys:
retnode.in_generator = True retnode.in_generator = True
gbody = Nodes.GeneratorBodyDefNode( gbody = Nodes.GeneratorBodyDefNode(
pos=node.pos, name=node.name, body=node.body) pos=node.pos, name=node.name, body=node.body,
coroutine = (Nodes.AsyncDefNode if node.is_async_def else Nodes.GeneratorDefNode)( is_async_gen_body=node.is_async_def and collector.has_yield)
coroutine = coroutine_type(
pos=node.pos, name=node.name, args=node.args, pos=node.pos, name=node.name, args=node.args,
star_arg=node.star_arg, starstar_arg=node.starstar_arg, star_arg=node.star_arg, starstar_arg=node.starstar_arg,
doc=node.doc, decorators=node.decorators, doc=node.doc, decorators=node.decorators,
...@@ -2576,24 +2599,28 @@ class CreateClosureClasses(CythonTransform): ...@@ -2576,24 +2599,28 @@ class CreateClosureClasses(CythonTransform):
def find_entries_used_in_closures(self, node): def find_entries_used_in_closures(self, node):
from_closure = [] from_closure = []
in_closure = [] in_closure = []
for name, entry in node.local_scope.entries.items(): for scope in node.local_scope.iter_local_scopes():
if entry.from_closure: for name, entry in scope.entries.items():
from_closure.append((name, entry)) if not name:
elif entry.in_closure: continue
in_closure.append((name, entry)) if entry.from_closure:
from_closure.append((name, entry))
elif entry.in_closure:
in_closure.append((name, entry))
return from_closure, in_closure return from_closure, in_closure
def create_class_from_scope(self, node, target_module_scope, inner_node=None): def create_class_from_scope(self, node, target_module_scope, inner_node=None):
# move local variables into closure # move local variables into closure
if node.is_generator: if node.is_generator:
for entry in node.local_scope.entries.values(): for scope in node.local_scope.iter_local_scopes():
if not entry.from_closure: for entry in scope.entries.values():
entry.in_closure = True if not entry.from_closure:
entry.in_closure = True
from_closure, in_closure = self.find_entries_used_in_closures(node) from_closure, in_closure = self.find_entries_used_in_closures(node)
in_closure.sort() in_closure.sort()
# Now from the begining # Now from the beginning
node.needs_closure = False node.needs_closure = False
node.needs_outer_scope = False node.needs_outer_scope = False
...@@ -2645,11 +2672,12 @@ class CreateClosureClasses(CythonTransform): ...@@ -2645,11 +2672,12 @@ class CreateClosureClasses(CythonTransform):
is_cdef=True) is_cdef=True)
node.needs_outer_scope = True node.needs_outer_scope = True
for name, entry in in_closure: for name, entry in in_closure:
closure_entry = class_scope.declare_var(pos=entry.pos, closure_entry = class_scope.declare_var(
name=entry.name, pos=entry.pos,
cname=entry.cname, name=entry.name if not entry.in_subscope else None,
type=entry.type, cname=entry.cname,
is_cdef=True) type=entry.type,
is_cdef=True)
if entry.is_declared_generic: if entry.is_declared_generic:
closure_entry.is_declared_generic = 1 closure_entry.is_declared_generic = 1
node.needs_closure = True node.needs_closure = True
......
...@@ -501,7 +501,7 @@ def p_call_parse_args(s, allow_genexp=True): ...@@ -501,7 +501,7 @@ def p_call_parse_args(s, allow_genexp=True):
break break
s.next() s.next()
if s.sy == 'for': if s.sy in ('for', 'async'):
if not keyword_args and not last_was_tuple_unpack: if not keyword_args and not last_was_tuple_unpack:
if len(positional_args) == 1 and len(positional_args[0]) == 1: if len(positional_args) == 1 and len(positional_args[0]) == 1:
positional_args = [[p_genexp(s, positional_args[0][0])]] positional_args = [[p_genexp(s, positional_args[0][0])]]
...@@ -1196,7 +1196,7 @@ def p_f_string_expr(s, unicode_value, pos, starting_index, is_raw): ...@@ -1196,7 +1196,7 @@ def p_f_string_expr(s, unicode_value, pos, starting_index, is_raw):
# list_display ::= "[" [listmaker] "]" # list_display ::= "[" [listmaker] "]"
# listmaker ::= (test|star_expr) ( comp_for | (',' (test|star_expr))* [','] ) # listmaker ::= (test|star_expr) ( comp_for | (',' (test|star_expr))* [','] )
# comp_iter ::= comp_for | comp_if # comp_iter ::= comp_for | comp_if
# comp_for ::= "for" expression_list "in" testlist [comp_iter] # comp_for ::= ["async"] "for" expression_list "in" testlist [comp_iter]
# comp_if ::= "if" test [comp_iter] # comp_if ::= "if" test [comp_iter]
def p_list_maker(s): def p_list_maker(s):
...@@ -1208,7 +1208,7 @@ def p_list_maker(s): ...@@ -1208,7 +1208,7 @@ def p_list_maker(s):
return ExprNodes.ListNode(pos, args=[]) return ExprNodes.ListNode(pos, args=[])
expr = p_test_or_starred_expr(s) expr = p_test_or_starred_expr(s)
if s.sy == 'for': if s.sy in ('for', 'async'):
if expr.is_starred: if expr.is_starred:
s.error("iterable unpacking cannot be used in comprehension") s.error("iterable unpacking cannot be used in comprehension")
append = ExprNodes.ComprehensionAppendNode(pos, expr=expr) append = ExprNodes.ComprehensionAppendNode(pos, expr=expr)
...@@ -1230,7 +1230,7 @@ def p_list_maker(s): ...@@ -1230,7 +1230,7 @@ def p_list_maker(s):
def p_comp_iter(s, body): def p_comp_iter(s, body):
if s.sy == 'for': if s.sy in ('for', 'async'):
return p_comp_for(s, body) return p_comp_for(s, body)
elif s.sy == 'if': elif s.sy == 'if':
return p_comp_if(s, body) return p_comp_if(s, body)
...@@ -1239,11 +1239,17 @@ def p_comp_iter(s, body): ...@@ -1239,11 +1239,17 @@ def p_comp_iter(s, body):
return body return body
def p_comp_for(s, body): def p_comp_for(s, body):
# s.sy == 'for'
pos = s.position() pos = s.position()
s.next() # [async] for ...
kw = p_for_bounds(s, allow_testlist=False) is_async = False
kw.update(else_clause = None, body = p_comp_iter(s, body)) if s.sy == 'async':
is_async = True
s.next()
# s.sy == 'for'
s.expect('for')
kw = p_for_bounds(s, allow_testlist=False, is_async=is_async)
kw.update(else_clause=None, body=p_comp_iter(s, body), is_async=is_async)
return Nodes.ForStatNode(pos, **kw) return Nodes.ForStatNode(pos, **kw)
def p_comp_if(s, body): def p_comp_if(s, body):
...@@ -1311,7 +1317,7 @@ def p_dict_or_set_maker(s): ...@@ -1311,7 +1317,7 @@ def p_dict_or_set_maker(s):
else: else:
break break
if s.sy == 'for': if s.sy in ('for', 'async'):
# dict/set comprehension # dict/set comprehension
if len(parts) == 1 and isinstance(parts[0], list) and len(parts[0]) == 1: if len(parts) == 1 and isinstance(parts[0], list) and len(parts[0]) == 1:
item = parts[0][0] item = parts[0][0]
...@@ -1441,13 +1447,13 @@ def p_testlist_comp(s): ...@@ -1441,13 +1447,13 @@ def p_testlist_comp(s):
s.next() s.next()
exprs = p_test_or_starred_expr_list(s, expr) exprs = p_test_or_starred_expr_list(s, expr)
return ExprNodes.TupleNode(pos, args = exprs) return ExprNodes.TupleNode(pos, args = exprs)
elif s.sy == 'for': elif s.sy in ('for', 'async'):
return p_genexp(s, expr) return p_genexp(s, expr)
else: else:
return expr return expr
def p_genexp(s, expr): def p_genexp(s, expr):
# s.sy == 'for' # s.sy == 'async' | 'for'
loop = p_comp_for(s, Nodes.ExprStatNode( loop = p_comp_for(s, Nodes.ExprStatNode(
expr.pos, expr = ExprNodes.YieldExprNode(expr.pos, arg=expr))) expr.pos, expr = ExprNodes.YieldExprNode(expr.pos, arg=expr)))
return ExprNodes.GeneratorExpressionNode(expr.pos, loop=loop) return ExprNodes.GeneratorExpressionNode(expr.pos, loop=loop)
......
...@@ -4,8 +4,9 @@ ...@@ -4,8 +4,9 @@
from __future__ import absolute_import from __future__ import absolute_import
import copy
import re import re
import copy
import operator
try: try:
import __builtin__ as builtins import __builtin__ as builtins
...@@ -88,6 +89,7 @@ class Entry(object): ...@@ -88,6 +89,7 @@ class Entry(object):
# is_arg boolean Is the arg of a method # is_arg boolean Is the arg of a method
# is_local boolean Is a local variable # is_local boolean Is a local variable
# in_closure boolean Is referenced in an inner scope # in_closure boolean Is referenced in an inner scope
# in_subscope boolean Belongs to a generator expression scope
# is_readonly boolean Can't be assigned to # is_readonly boolean Can't be assigned to
# func_cname string C func implementing Python func # func_cname string C func implementing Python func
# func_modifiers [string] C function modifiers ('inline') # func_modifiers [string] C function modifiers ('inline')
...@@ -163,6 +165,7 @@ class Entry(object): ...@@ -163,6 +165,7 @@ class Entry(object):
is_local = 0 is_local = 0
in_closure = 0 in_closure = 0
from_closure = 0 from_closure = 0
in_subscope = 0
is_declared_generic = 0 is_declared_generic = 0
is_readonly = 0 is_readonly = 0
pyfunc_cname = None pyfunc_cname = None
...@@ -299,6 +302,7 @@ class Scope(object): ...@@ -299,6 +302,7 @@ class Scope(object):
is_py_class_scope = 0 is_py_class_scope = 0
is_c_class_scope = 0 is_c_class_scope = 0
is_closure_scope = 0 is_closure_scope = 0
is_genexpr_scope = 0
is_passthrough = 0 is_passthrough = 0
is_cpp_class_scope = 0 is_cpp_class_scope = 0
is_property_scope = 0 is_property_scope = 0
...@@ -308,6 +312,7 @@ class Scope(object): ...@@ -308,6 +312,7 @@ class Scope(object):
in_cinclude = 0 in_cinclude = 0
nogil = 0 nogil = 0
fused_to_specific = None fused_to_specific = None
return_type = None
def __init__(self, name, outer_scope, parent_scope): def __init__(self, name, outer_scope, parent_scope):
# The outer_scope is the next scope in the lookup chain. # The outer_scope is the next scope in the lookup chain.
...@@ -324,6 +329,7 @@ class Scope(object): ...@@ -324,6 +329,7 @@ class Scope(object):
self.qualified_name = EncodedString(name) self.qualified_name = EncodedString(name)
self.scope_prefix = mangled_name self.scope_prefix = mangled_name
self.entries = {} self.entries = {}
self.subscopes = set()
self.const_entries = [] self.const_entries = []
self.type_entries = [] self.type_entries = []
self.sue_entries = [] self.sue_entries = []
...@@ -341,7 +347,6 @@ class Scope(object): ...@@ -341,7 +347,6 @@ class Scope(object):
self.obj_to_entry = {} self.obj_to_entry = {}
self.buffer_entries = [] self.buffer_entries = []
self.lambda_defs = [] self.lambda_defs = []
self.return_type = None
self.id_counters = {} self.id_counters = {}
def __deepcopy__(self, memo): def __deepcopy__(self, memo):
...@@ -419,6 +424,12 @@ class Scope(object): ...@@ -419,6 +424,12 @@ class Scope(object):
""" Return the module-level scope containing this scope. """ """ Return the module-level scope containing this scope. """
return self.outer_scope.builtin_scope() return self.outer_scope.builtin_scope()
def iter_local_scopes(self):
yield self
if self.subscopes:
for scope in sorted(self.subscopes, key=operator.attrgetter('scope_prefix')):
yield scope
def declare(self, name, cname, type, pos, visibility, shadow = 0, is_type = 0, create_wrapper = 0): def declare(self, name, cname, type, pos, visibility, shadow = 0, is_type = 0, create_wrapper = 0):
# Create new entry, and add to dictionary if # Create new entry, and add to dictionary if
# name is not None. Reports a warning if already # name is not None. Reports a warning if already
...@@ -1690,18 +1701,19 @@ class LocalScope(Scope): ...@@ -1690,18 +1701,19 @@ class LocalScope(Scope):
return entry return entry
def mangle_closure_cnames(self, outer_scope_cname): def mangle_closure_cnames(self, outer_scope_cname):
for entry in self.entries.values(): for scope in self.iter_local_scopes():
if entry.from_closure: for entry in scope.entries.values():
cname = entry.outer_entry.cname if entry.from_closure:
if self.is_passthrough: cname = entry.outer_entry.cname
entry.cname = cname if self.is_passthrough:
else: entry.cname = cname
if cname.startswith(Naming.cur_scope_cname): else:
cname = cname[len(Naming.cur_scope_cname)+2:] if cname.startswith(Naming.cur_scope_cname):
entry.cname = "%s->%s" % (outer_scope_cname, cname) cname = cname[len(Naming.cur_scope_cname)+2:]
elif entry.in_closure: entry.cname = "%s->%s" % (outer_scope_cname, cname)
entry.original_cname = entry.cname elif entry.in_closure:
entry.cname = "%s->%s" % (Naming.cur_scope_cname, entry.cname) entry.original_cname = entry.cname
entry.cname = "%s->%s" % (Naming.cur_scope_cname, entry.cname)
class GeneratorExpressionScope(Scope): class GeneratorExpressionScope(Scope):
...@@ -1709,12 +1721,19 @@ class GeneratorExpressionScope(Scope): ...@@ -1709,12 +1721,19 @@ class GeneratorExpressionScope(Scope):
to generators, these can be easily inlined in some cases, so all to generators, these can be easily inlined in some cases, so all
we really need is a scope that holds the loop variable(s). we really need is a scope that holds the loop variable(s).
""" """
is_genexpr_scope = True
def __init__(self, outer_scope): def __init__(self, outer_scope):
name = outer_scope.global_scope().next_id(Naming.genexpr_id_ref) name = outer_scope.global_scope().next_id(Naming.genexpr_id_ref)
Scope.__init__(self, name, outer_scope, outer_scope) Scope.__init__(self, name, outer_scope, outer_scope)
self.var_entries = outer_scope.var_entries # keep declarations outside
self.directives = outer_scope.directives self.directives = outer_scope.directives
self.genexp_prefix = "%s%d%s" % (Naming.pyrex_prefix, len(name), name) self.genexp_prefix = "%s%d%s" % (Naming.pyrex_prefix, len(name), name)
while outer_scope.is_genexpr_scope:
outer_scope = outer_scope.outer_scope
outer_scope.subscopes.add(self)
def mangle(self, prefix, name): def mangle(self, prefix, name):
return '%s%s' % (self.genexp_prefix, self.parent_scope.mangle(prefix, name)) return '%s%s' % (self.genexp_prefix, self.parent_scope.mangle(prefix, name))
...@@ -1730,8 +1749,9 @@ class GeneratorExpressionScope(Scope): ...@@ -1730,8 +1749,9 @@ class GeneratorExpressionScope(Scope):
# this scope must hold its name exclusively # this scope must hold its name exclusively
cname = '%s%s' % (self.genexp_prefix, self.parent_scope.mangle(Naming.var_prefix, name or self.next_id())) cname = '%s%s' % (self.genexp_prefix, self.parent_scope.mangle(Naming.var_prefix, name or self.next_id()))
entry = self.declare(name, cname, type, pos, visibility) entry = self.declare(name, cname, type, pos, visibility)
entry.is_variable = 1 entry.is_variable = True
entry.is_local = 1 entry.is_local = True
entry.in_subscope = True
self.var_entries.append(entry) self.var_entries.append(entry)
self.entries[name] = entry self.entries[name] = entry
return entry return entry
......
...@@ -250,8 +250,7 @@ class MarkParallelAssignments(EnvTransform): ...@@ -250,8 +250,7 @@ class MarkParallelAssignments(EnvTransform):
def visit_YieldExprNode(self, node): def visit_YieldExprNode(self, node):
if self.parallel_block_stack: if self.parallel_block_stack:
error(node.pos, "Yield not allowed in parallel sections") error(node.pos, "'%s' not allowed in parallel sections" % node.expr_keyword)
return node return node
def visit_ReturnStatNode(self, node): def visit_ReturnStatNode(self, node):
......
This diff is collapsed.
...@@ -111,7 +111,7 @@ static int __Pyx_GetBuffer(PyObject *obj, Py_buffer *view, int flags) { ...@@ -111,7 +111,7 @@ static int __Pyx_GetBuffer(PyObject *obj, Py_buffer *view, int flags) {
{{for type_ptr, getbuffer, releasebuffer in types}} {{for type_ptr, getbuffer, releasebuffer in types}}
{{if getbuffer}} {{if getbuffer}}
if (PyObject_TypeCheck(obj, {{type_ptr}})) return {{getbuffer}}(obj, view, flags); if (__Pyx_TypeCheck(obj, {{type_ptr}})) return {{getbuffer}}(obj, view, flags);
{{endif}} {{endif}}
{{endfor}} {{endfor}}
...@@ -130,7 +130,7 @@ static void __Pyx_ReleaseBuffer(Py_buffer *view) { ...@@ -130,7 +130,7 @@ static void __Pyx_ReleaseBuffer(Py_buffer *view) {
{{for type_ptr, getbuffer, releasebuffer in types}} {{for type_ptr, getbuffer, releasebuffer in types}}
{{if releasebuffer}} {{if releasebuffer}}
if (PyObject_TypeCheck(obj, {{type_ptr}})) { {{releasebuffer}}(obj, view); return; } if (__Pyx_TypeCheck(obj, {{type_ptr}})) { {{releasebuffer}}(obj, view); return; }
{{endif}} {{endif}}
{{endfor}} {{endfor}}
......
This diff is collapsed.
...@@ -1218,7 +1218,7 @@ static PyObject* __Pyx_Method_ClassMethod(PyObject *method) { ...@@ -1218,7 +1218,7 @@ static PyObject* __Pyx_Method_ClassMethod(PyObject *method) {
methoddescr_type = Py_TYPE(meth); methoddescr_type = Py_TYPE(meth);
Py_DECREF(meth); Py_DECREF(meth);
} }
if (PyObject_TypeCheck(method, methoddescr_type)) { if (__Pyx_TypeCheck(method, methoddescr_type)) {
#endif #endif
// cdef classes // cdef classes
PyMethodDescrObject *descr = (PyMethodDescrObject *)method; PyMethodDescrObject *descr = (PyMethodDescrObject *)method;
...@@ -1238,7 +1238,7 @@ static PyObject* __Pyx_Method_ClassMethod(PyObject *method) { ...@@ -1238,7 +1238,7 @@ static PyObject* __Pyx_Method_ClassMethod(PyObject *method) {
return PyClassMethod_New(method); return PyClassMethod_New(method);
} }
#ifdef __Pyx_CyFunction_USED #ifdef __Pyx_CyFunction_USED
else if (PyObject_TypeCheck(method, __pyx_CyFunctionType)) { else if (__Pyx_TypeCheck(method, __pyx_CyFunctionType)) {
return PyClassMethod_New(method); return PyClassMethod_New(method);
} }
#endif #endif
......
...@@ -11,7 +11,15 @@ ...@@ -11,7 +11,15 @@
#if CYTHON_FAST_THREAD_STATE #if CYTHON_FAST_THREAD_STATE
#define __Pyx_PyThreadState_declare PyThreadState *$local_tstate_cname; #define __Pyx_PyThreadState_declare PyThreadState *$local_tstate_cname;
#define __Pyx_PyThreadState_assign $local_tstate_cname = PyThreadState_GET(); #if PY_VERSION_HEX >= 0x03050000
#define __Pyx_PyThreadState_assign $local_tstate_cname = _PyThreadState_UncheckedGet();
#elif PY_VERSION_HEX >= 0x03000000
#define __Pyx_PyThreadState_assign $local_tstate_cname = PyThreadState_Get();
#elif PY_VERSION_HEX >= 0x02070000
#define __Pyx_PyThreadState_assign $local_tstate_cname = _PyThreadState_Current;
#else
#define __Pyx_PyThreadState_assign $local_tstate_cname = PyThreadState_Get();
#endif
#else #else
#define __Pyx_PyThreadState_declare #define __Pyx_PyThreadState_declare
#define __Pyx_PyThreadState_assign #define __Pyx_PyThreadState_assign
...@@ -31,11 +39,28 @@ static CYTHON_INLINE int __Pyx_PyErr_ExceptionMatchesInState(PyThreadState* tsta ...@@ -31,11 +39,28 @@ static CYTHON_INLINE int __Pyx_PyErr_ExceptionMatchesInState(PyThreadState* tsta
/////////////// PyErrExceptionMatches /////////////// /////////////// PyErrExceptionMatches ///////////////
#if CYTHON_FAST_THREAD_STATE #if CYTHON_FAST_THREAD_STATE
static int __Pyx_PyErr_ExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) {
Py_ssize_t i, n;
n = PyTuple_GET_SIZE(tuple);
#if PY_MAJOR_VERSION >= 3
// the tighter subtype checking in Py3 allows faster out-of-order comparison
for (i=0; i<n; i++) {
if (exc_type == PyTuple_GET_ITEM(tuple, i)) return 1;
}
#endif
for (i=0; i<n; i++) {
if (__Pyx_PyErr_GivenExceptionMatches(exc_type, PyTuple_GET_ITEM(tuple, i))) return 1;
}
return 0;
}
static CYTHON_INLINE int __Pyx_PyErr_ExceptionMatchesInState(PyThreadState* tstate, PyObject* err) { static CYTHON_INLINE int __Pyx_PyErr_ExceptionMatchesInState(PyThreadState* tstate, PyObject* err) {
PyObject *exc_type = tstate->curexc_type; PyObject *exc_type = tstate->curexc_type;
if (exc_type == err) return 1; if (exc_type == err) return 1;
if (unlikely(!exc_type)) return 0; if (unlikely(!exc_type)) return 0;
return PyErr_GivenExceptionMatches(exc_type, err); if (unlikely(PyTuple_Check(err)))
return __Pyx_PyErr_ExceptionMatchesTuple(exc_type, err);
return __Pyx_PyErr_GivenExceptionMatches(exc_type, err);
} }
#endif #endif
......
...@@ -26,7 +26,7 @@ static CYTHON_INLINE int __Pyx_ArgTypeTest(PyObject *obj, PyTypeObject *type, in ...@@ -26,7 +26,7 @@ static CYTHON_INLINE int __Pyx_ArgTypeTest(PyObject *obj, PyTypeObject *type, in
#endif #endif
} }
else { else {
if (likely(PyObject_TypeCheck(obj, type))) return 1; if (likely(__Pyx_TypeCheck(obj, type))) return 1;
} }
__Pyx_RaiseArgumentTypeInvalid(name, obj, type); __Pyx_RaiseArgumentTypeInvalid(name, obj, type);
return 0; return 0;
......
...@@ -326,7 +326,6 @@ ...@@ -326,7 +326,6 @@
#define PySet_CheckExact(obj) (Py_TYPE(obj) == &PySet_Type) #define PySet_CheckExact(obj) (Py_TYPE(obj) == &PySet_Type)
#endif #endif
#define __Pyx_TypeCheck(obj, type) PyObject_TypeCheck(obj, (PyTypeObject *)type)
#define __Pyx_PyException_Check(obj) __Pyx_TypeCheck(obj, PyExc_Exception) #define __Pyx_PyException_Check(obj) __Pyx_TypeCheck(obj, PyExc_Exception)
#if PY_MAJOR_VERSION >= 3 #if PY_MAJOR_VERSION >= 3
...@@ -387,15 +386,17 @@ ...@@ -387,15 +386,17 @@
#define __Pyx_PyAsyncMethodsStruct PyAsyncMethods #define __Pyx_PyAsyncMethodsStruct PyAsyncMethods
#define __Pyx_PyType_AsAsync(obj) (Py_TYPE(obj)->tp_as_async) #define __Pyx_PyType_AsAsync(obj) (Py_TYPE(obj)->tp_as_async)
#else #else
#define __Pyx_PyType_AsAsync(obj) ((__Pyx_PyAsyncMethodsStruct*) (Py_TYPE(obj)->tp_reserved))
#endif
#else
#define __Pyx_PyType_AsAsync(obj) NULL
#endif
#ifndef __Pyx_PyAsyncMethodsStruct
typedef struct { typedef struct {
unaryfunc am_await; unaryfunc am_await;
unaryfunc am_aiter; unaryfunc am_aiter;
unaryfunc am_anext; unaryfunc am_anext;
} __Pyx_PyAsyncMethodsStruct; } __Pyx_PyAsyncMethodsStruct;
#define __Pyx_PyType_AsAsync(obj) ((__Pyx_PyAsyncMethodsStruct*) (Py_TYPE(obj)->tp_reserved))
#endif
#else
#define __Pyx_PyType_AsAsync(obj) NULL
#endif #endif
// restrict // restrict
...@@ -535,6 +536,110 @@ class __Pyx_FakeReference { ...@@ -535,6 +536,110 @@ class __Pyx_FakeReference {
}; };
/////////////// FastTypeChecks.proto ///////////////
#if CYTHON_COMPILING_IN_CPYTHON
#define __Pyx_TypeCheck(obj, type) __Pyx_IsSubtype(Py_TYPE(obj), (PyTypeObject *)type)
static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b);/*proto*/
static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches(PyObject *err, PyObject *type);/*proto*/
static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches2(PyObject *err, PyObject *type1, PyObject *type2);/*proto*/
#else
#define __Pyx_TypeCheck(obj, type) PyObject_TypeCheck(obj, (PyTypeObject *)type)
#define __Pyx_PyErr_GivenExceptionMatches(err, type) PyErr_GivenExceptionMatches(err, type)
#define __Pyx_PyErr_GivenExceptionMatches2(err, type1, type2) (PyErr_GivenExceptionMatches(err, type1) || PyErr_GivenExceptionMatches(err, type2))
#endif
/////////////// FastTypeChecks ///////////////
//@requires: Exceptions.c::PyThreadStateGet
//@requires: Exceptions.c::PyErrFetchRestore
#if CYTHON_COMPILING_IN_CPYTHON
static int __Pyx_InBases(PyTypeObject *a, PyTypeObject *b) {
while (a) {
a = a->tp_base;
if (a == b)
return 1;
}
return b == &PyBaseObject_Type;
}
static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b) {
PyObject *mro;
if (a == b) return 1;
mro = a->tp_mro;
if (likely(mro)) {
Py_ssize_t i, n;
n = PyTuple_GET_SIZE(mro);
for (i = 0; i < n; i++) {
if (PyTuple_GET_ITEM(mro, i) == (PyObject *)b)
return 1;
}
return 0;
}
// should only get here for incompletely initialised types, i.e. never under normal usage patterns
return __Pyx_InBases(a, b);
}
#if PY_MAJOR_VERSION == 2
static int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject* exc_type2) {
// PyObject_IsSubclass() can recurse and therefore is not safe
PyObject *exception, *value, *tb;
int res;
__Pyx_PyThreadState_declare
__Pyx_PyThreadState_assign
__Pyx_ErrFetch(&exception, &value, &tb);
res = exc_type1 ? PyObject_IsSubclass(err, exc_type1) : 0;
// This function must not fail, so print the error here (which also clears it)
if (unlikely(res == -1)) {
PyErr_WriteUnraisable(err);
res = 0;
}
if (!res) {
res = PyObject_IsSubclass(err, exc_type2);
// This function must not fail, so print the error here (which also clears it)
if (unlikely(res == -1)) {
PyErr_WriteUnraisable(err);
res = 0;
}
}
__Pyx_ErrRestore(exception, value, tb);
return res;
}
#else
static CYTHON_INLINE int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject *exc_type2) {
int res = exc_type1 ? __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type1) : 0;
if (!res) {
res = __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type2);
}
return res;
}
#endif
// so far, we only call PyErr_GivenExceptionMatches() with an exception type (not instance) as first argument
// => optimise for that case
static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches(PyObject *err, PyObject* exc_type) {
if (likely(err == exc_type)) return 1;
if (likely(PyExceptionClass_Check(err))) {
return __Pyx_inner_PyErr_GivenExceptionMatches2(err, NULL, exc_type);
}
return PyErr_GivenExceptionMatches(err, exc_type);
}
static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches2(PyObject *err, PyObject *exc_type1, PyObject *exc_type2) {
if (likely(err == exc_type1 || err == exc_type2)) return 1;
if (likely(PyExceptionClass_Check(err))) {
return __Pyx_inner_PyErr_GivenExceptionMatches2(err, exc_type1, exc_type2);
}
return (PyErr_GivenExceptionMatches(err, exc_type1) || PyErr_GivenExceptionMatches(err, exc_type2));
}
#endif
/////////////// MathInitCode /////////////// /////////////// MathInitCode ///////////////
#if defined(WIN32) || defined(MS_WINDOWS) #if defined(WIN32) || defined(MS_WINDOWS)
......
...@@ -166,8 +166,7 @@ static CYTHON_INLINE PyObject *__Pyx_PyIter_Next2(PyObject* iterator, PyObject* ...@@ -166,8 +166,7 @@ static CYTHON_INLINE PyObject *__Pyx_PyIter_Next2(PyObject* iterator, PyObject*
if (defval) { if (defval) {
PyObject* exc_type = PyErr_Occurred(); PyObject* exc_type = PyErr_Occurred();
if (exc_type) { if (exc_type) {
if (unlikely(exc_type != PyExc_StopIteration) && if (unlikely(!__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration)))
!PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))
return NULL; return NULL;
PyErr_Clear(); PyErr_Clear();
} }
...@@ -194,7 +193,7 @@ static CYTHON_INLINE int __Pyx_IterFinish(void) { ...@@ -194,7 +193,7 @@ static CYTHON_INLINE int __Pyx_IterFinish(void) {
PyThreadState *tstate = PyThreadState_GET(); PyThreadState *tstate = PyThreadState_GET();
PyObject* exc_type = tstate->curexc_type; PyObject* exc_type = tstate->curexc_type;
if (unlikely(exc_type)) { if (unlikely(exc_type)) {
if (likely(exc_type == PyExc_StopIteration) || PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration)) { if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) {
PyObject *exc_value, *exc_tb; PyObject *exc_value, *exc_tb;
exc_value = tstate->curexc_value; exc_value = tstate->curexc_value;
exc_tb = tstate->curexc_traceback; exc_tb = tstate->curexc_traceback;
...@@ -917,7 +916,7 @@ static CYTHON_INLINE int __Pyx_TypeTest(PyObject *obj, PyTypeObject *type) { ...@@ -917,7 +916,7 @@ static CYTHON_INLINE int __Pyx_TypeTest(PyObject *obj, PyTypeObject *type) {
PyErr_SetString(PyExc_SystemError, "Missing type object"); PyErr_SetString(PyExc_SystemError, "Missing type object");
return 0; return 0;
} }
if (likely(PyObject_TypeCheck(obj, type))) if (likely(__Pyx_TypeCheck(obj, type)))
return 1; return 1;
PyErr_Format(PyExc_TypeError, "Cannot convert %.200s to %.200s", PyErr_Format(PyExc_TypeError, "Cannot convert %.200s to %.200s",
Py_TYPE(obj)->tp_name, type->tp_name); Py_TYPE(obj)->tp_name, type->tp_name);
...@@ -1123,7 +1122,7 @@ static int __Pyx_TryUnpackUnboundCMethod(__Pyx_CachedCFunction* target) { ...@@ -1123,7 +1122,7 @@ static int __Pyx_TryUnpackUnboundCMethod(__Pyx_CachedCFunction* target) {
#if CYTHON_COMPILING_IN_CPYTHON #if CYTHON_COMPILING_IN_CPYTHON
#if PY_MAJOR_VERSION >= 3 #if PY_MAJOR_VERSION >= 3
// method dscriptor type isn't exported in Py2.x, cannot easily check the type there // method dscriptor type isn't exported in Py2.x, cannot easily check the type there
if (likely(PyObject_TypeCheck(method, &PyMethodDescr_Type))) if (likely(__Pyx_TypeCheck(method, &PyMethodDescr_Type)))
#endif #endif
{ {
PyMethodDescrObject *descr = (PyMethodDescrObject*) method; PyMethodDescrObject *descr = (PyMethodDescrObject*) method;
...@@ -1269,6 +1268,7 @@ bad: ...@@ -1269,6 +1268,7 @@ bad:
/////////////// PyObjectCallMethod1.proto /////////////// /////////////// PyObjectCallMethod1.proto ///////////////
static PyObject* __Pyx_PyObject_CallMethod1(PyObject* obj, PyObject* method_name, PyObject* arg); /*proto*/ static PyObject* __Pyx_PyObject_CallMethod1(PyObject* obj, PyObject* method_name, PyObject* arg); /*proto*/
static PyObject* __Pyx__PyObject_CallMethod1(PyObject* method, PyObject* arg); /*proto*/
/////////////// PyObjectCallMethod1 /////////////// /////////////// PyObjectCallMethod1 ///////////////
//@requires: PyObjectGetAttrStr //@requires: PyObjectGetAttrStr
...@@ -1276,10 +1276,8 @@ static PyObject* __Pyx_PyObject_CallMethod1(PyObject* obj, PyObject* method_name ...@@ -1276,10 +1276,8 @@ static PyObject* __Pyx_PyObject_CallMethod1(PyObject* obj, PyObject* method_name
//@requires: PyFunctionFastCall //@requires: PyFunctionFastCall
//@requires: PyCFunctionFastCall //@requires: PyCFunctionFastCall
static PyObject* __Pyx_PyObject_CallMethod1(PyObject* obj, PyObject* method_name, PyObject* arg) { static PyObject* __Pyx__PyObject_CallMethod1(PyObject* method, PyObject* arg) {
PyObject *method, *result = NULL; PyObject *result = NULL;
method = __Pyx_PyObject_GetAttrStr(obj, method_name);
if (unlikely(!method)) goto done;
#if CYTHON_UNPACK_METHODS #if CYTHON_UNPACK_METHODS
if (likely(PyMethod_Check(method))) { if (likely(PyMethod_Check(method))) {
PyObject *self = PyMethod_GET_SELF(method); PyObject *self = PyMethod_GET_SELF(method);
...@@ -1307,7 +1305,6 @@ static PyObject* __Pyx_PyObject_CallMethod1(PyObject* obj, PyObject* method_name ...@@ -1307,7 +1305,6 @@ static PyObject* __Pyx_PyObject_CallMethod1(PyObject* obj, PyObject* method_name
Py_INCREF(arg); Py_INCREF(arg);
PyTuple_SET_ITEM(args, 1, arg); PyTuple_SET_ITEM(args, 1, arg);
Py_INCREF(function); Py_INCREF(function);
Py_DECREF(method); method = NULL;
result = __Pyx_PyObject_Call(function, args, NULL); result = __Pyx_PyObject_Call(function, args, NULL);
Py_DECREF(args); Py_DECREF(args);
Py_DECREF(function); Py_DECREF(function);
...@@ -1316,6 +1313,15 @@ static PyObject* __Pyx_PyObject_CallMethod1(PyObject* obj, PyObject* method_name ...@@ -1316,6 +1313,15 @@ static PyObject* __Pyx_PyObject_CallMethod1(PyObject* obj, PyObject* method_name
} }
#endif #endif
result = __Pyx_PyObject_CallOneArg(method, arg); result = __Pyx_PyObject_CallOneArg(method, arg);
done:
return result;
}
static PyObject* __Pyx_PyObject_CallMethod1(PyObject* obj, PyObject* method_name, PyObject* arg) {
PyObject *method, *result = NULL;
method = __Pyx_PyObject_GetAttrStr(obj, method_name);
if (unlikely(!method)) goto done;
result = __Pyx__PyObject_CallMethod1(method, arg);
done: done:
Py_XDECREF(method); Py_XDECREF(method);
return result; return result;
...@@ -1747,7 +1753,7 @@ static CYTHON_INLINE PyObject* __Pyx_PyObject_CallNoArg(PyObject *func) { ...@@ -1747,7 +1753,7 @@ static CYTHON_INLINE PyObject* __Pyx_PyObject_CallNoArg(PyObject *func) {
} }
#endif #endif
#ifdef __Pyx_CyFunction_USED #ifdef __Pyx_CyFunction_USED
if (likely(PyCFunction_Check(func) || PyObject_TypeCheck(func, __pyx_CyFunctionType))) { if (likely(PyCFunction_Check(func) || __Pyx_TypeCheck(func, __pyx_CyFunctionType))) {
#else #else
if (likely(PyCFunction_Check(func))) { if (likely(PyCFunction_Check(func))) {
#endif #endif
......
...@@ -1837,6 +1837,9 @@ def main(): ...@@ -1837,6 +1837,9 @@ def main():
parser.add_option("--exit-ok", dest="exit_ok", default=False, parser.add_option("--exit-ok", dest="exit_ok", default=False,
action="store_true", action="store_true",
help="exit without error code even on test failures") help="exit without error code even on test failures")
parser.add_option("--failfast", dest="failfast", default=False,
action="store_true",
help="stop on first failure or error")
parser.add_option("--root-dir", dest="root_dir", default=os.path.join(DISTDIR, 'tests'), parser.add_option("--root-dir", dest="root_dir", default=os.path.join(DISTDIR, 'tests'),
help="working directory") help="working directory")
parser.add_option("--work-dir", dest="work_dir", default=os.path.join(os.getcwd(), 'TEST_TMP'), parser.add_option("--work-dir", dest="work_dir", default=os.path.join(os.getcwd(), 'TEST_TMP'),
...@@ -2137,8 +2140,16 @@ def runtests(options, cmd_args, coverage=None): ...@@ -2137,8 +2140,16 @@ def runtests(options, cmd_args, coverage=None):
except OSError: pass # concurrency issue? except OSError: pass # concurrency issue?
test_runner = XMLTestRunner(output=xml_output_dir, test_runner = XMLTestRunner(output=xml_output_dir,
verbose=options.verbosity > 0) verbose=options.verbosity > 0)
if options.failfast:
sys.stderr.write("--failfast not supported with XML runner\n")
else: else:
test_runner = unittest.TextTestRunner(verbosity=options.verbosity) text_runner_options = {}
if options.failfast:
if sys.version_info < (2, 7):
sys.stderr.write("--failfast not supported with Python < 2.7\n")
else:
text_runner_options['failfast'] = True
test_runner = unittest.TextTestRunner(verbosity=options.verbosity, **text_runner_options)
if options.pyximport_py: if options.pyximport_py:
from pyximport import pyximport from pyximport import pyximport
......
...@@ -168,8 +168,8 @@ e_cython_parallel.pyx:55:9: local variable 'y' referenced before assignment ...@@ -168,8 +168,8 @@ e_cython_parallel.pyx:55:9: local variable 'y' referenced before assignment
e_cython_parallel.pyx:60:6: Reduction operator '*' is inconsistent with previous reduction operator '+' e_cython_parallel.pyx:60:6: Reduction operator '*' is inconsistent with previous reduction operator '+'
e_cython_parallel.pyx:62:36: cython.parallel.parallel() does not take positional arguments e_cython_parallel.pyx:62:36: cython.parallel.parallel() does not take positional arguments
e_cython_parallel.pyx:65:36: Invalid keyword argument: invalid e_cython_parallel.pyx:65:36: Invalid keyword argument: invalid
e_cython_parallel.pyx:73:12: Yield not allowed in parallel sections e_cython_parallel.pyx:73:12: 'yield' not allowed in parallel sections
e_cython_parallel.pyx:77:16: Yield not allowed in parallel sections e_cython_parallel.pyx:77:16: 'yield' not allowed in parallel sections
e_cython_parallel.pyx:97:19: Cannot assign to private of outer parallel block e_cython_parallel.pyx:97:19: Cannot assign to private of outer parallel block
e_cython_parallel.pyx:98:19: Cannot assign to private of outer parallel block e_cython_parallel.pyx:98:19: Cannot assign to private of outer parallel block
e_cython_parallel.pyx:104:6: Reductions not allowed for parallel blocks e_cython_parallel.pyx:104:6: Reductions not allowed for parallel blocks
......
# mode: error
# tag: pep492, async
async def genexpr(it):
return (await x for x in it)
async def listcomp(it):
return [await x for x in it]
async def setcomp(it):
return {await x for x in it}
async def dictcomp(it):
return {await x:x+1 for x in it}
# NOTE: CPython doesn't allow comprehensions either
_ERRORS = """
5:12: 'await' not allowed in generators (use 'yield')
5:12: 'await' not supported here
"""
# mode: error
# tag: pep492, async
async def foo():
[i async for i in els]
_ERRORS = """
5:7: Expected ']', found 'async'
"""
# mode: error
# tag: pep492, async
async def foo():
yield
_ERRORS = """
5:4: 'yield' not allowed in async coroutines (use 'await')
5:4: 'yield' not supported here
"""
...@@ -6,5 +6,4 @@ async def foo(): ...@@ -6,5 +6,4 @@ async def foo():
_ERRORS = """ _ERRORS = """
5:4: 'yield from' not supported here 5:4: 'yield from' not supported here
5:4: 'yield' not allowed in async coroutines (use 'await')
""" """
...@@ -175,7 +175,7 @@ def test_broken_anext(): ...@@ -175,7 +175,7 @@ def test_broken_anext():
>>> testfunc = test_broken_anext() >>> testfunc = test_broken_anext()
>>> try: run_async(testfunc()) >>> try: run_async(testfunc())
... except TypeError as exc: ... except TypeError as exc:
... assert ' int ' in str(exc) ... assert ' int' in str(exc)
... else: ... else:
... print("NOT RAISED!") ... print("NOT RAISED!")
""" """
...@@ -279,9 +279,10 @@ def test_with_for(): ...@@ -279,9 +279,10 @@ def test_with_for():
print(I[0]) print(I[0])
cdef class AI_old: # old-style pre-3.5.2 AIter protocol - no longer supported
async def __aiter__(self): #cdef class AI_old:
1/0 # async def __aiter__(self):
# 1/0
cdef class AI_new: cdef class AI_new:
...@@ -291,9 +292,9 @@ cdef class AI_new: ...@@ -291,9 +292,9 @@ cdef class AI_new:
def test_aiter_raises(AI): def test_aiter_raises(AI):
""" """
>>> test_aiter_raises(AI_old) #>>> test_aiter_raises(AI_old)
RAISED #RAISED
0 #0
>>> test_aiter_raises(AI_new) >>> test_aiter_raises(AI_new)
RAISED RAISED
0 0
......
...@@ -329,6 +329,7 @@ def loop_over_unicode_literal(): ...@@ -329,6 +329,7 @@ def loop_over_unicode_literal():
assert uchar in 'abcdefg' assert uchar in 'abcdefg'
return cython.typeof(uchar) return cython.typeof(uchar)
def list_comp(): def list_comp():
""" """
>>> list_comp() >>> list_comp()
...@@ -339,6 +340,28 @@ def list_comp(): ...@@ -339,6 +340,28 @@ def list_comp():
assert x == 'abc' # don't leak in Py3 code assert x == 'abc' # don't leak in Py3 code
return result return result
def list_comp_iterable(it):
"""
>>> list_comp_iterable([])
[]
>>> list_comp_iterable([0])
[0]
>>> list_comp_iterable([1])
[]
>>> list_comp_iterable([0, 1])
[0]
>>> list_comp_iterable([2])
[4]
>>> list_comp_iterable(range(5))
[0, 4, 8]
"""
x = 'abc'
result = [x*2 for x in it if x % 2 == 0]
assert x == 'abc' # don't leak in Py3 code
return result
def list_comp_with_lambda(): def list_comp_with_lambda():
""" """
>>> list_comp_with_lambda() >>> list_comp_with_lambda()
......
...@@ -97,6 +97,10 @@ def listcomp_as_condition(sequence): ...@@ -97,6 +97,10 @@ def listcomp_as_condition(sequence):
@cython.test_assert_path_exists("//ComprehensionNode") @cython.test_assert_path_exists("//ComprehensionNode")
def sorted_listcomp(sequence): def sorted_listcomp(sequence):
""" """
>>> sorted_listcomp([])
[]
>>> sorted_listcomp([1])
[2]
>>> sorted_listcomp([3,2,4]) >>> sorted_listcomp([3,2,4])
[3, 4, 5] [3, 4, 5]
""" """
......
This diff is collapsed.
This diff is collapsed.
...@@ -1177,7 +1177,7 @@ class GrammarTests(unittest.TestCase): ...@@ -1177,7 +1177,7 @@ class GrammarTests(unittest.TestCase):
class Done(Exception): pass class Done(Exception): pass
class AIter: class AIter:
async def __aiter__(self): def __aiter__(self):
return self return self
async def __anext__(self): async def __anext__(self):
raise StopAsyncIteration raise StopAsyncIteration
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment