Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
C
cython
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
0
Merge Requests
0
Analytics
Analytics
Repository
Value Stream
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Commits
Issue Boards
Open sidebar
Boxiang Sun
cython
Commits
18691f38
Commit
18691f38
authored
May 29, 2015
by
Stefan Behnel
Browse files
Options
Browse Files
Download
Plain Diff
Merge branch PEP492
parents
8c737902
734ee793
Changes
29
Hide whitespace changes
Inline
Side-by-side
Showing
29 changed files
with
3698 additions
and
158 deletions
+3698
-158
CHANGES.rst
CHANGES.rst
+6
-2
Cython/Compiler/Builtin.py
Cython/Compiler/Builtin.py
+7
-0
Cython/Compiler/Code.py
Cython/Compiler/Code.py
+10
-3
Cython/Compiler/ExprNodes.py
Cython/Compiler/ExprNodes.py
+168
-38
Cython/Compiler/FlowControl.py
Cython/Compiler/FlowControl.py
+6
-0
Cython/Compiler/ModuleNode.py
Cython/Compiler/ModuleNode.py
+13
-22
Cython/Compiler/Nodes.py
Cython/Compiler/Nodes.py
+76
-26
Cython/Compiler/ParseTreeTransforms.py
Cython/Compiler/ParseTreeTransforms.py
+43
-23
Cython/Compiler/Parsing.pxd
Cython/Compiler/Parsing.pxd
+7
-6
Cython/Compiler/Parsing.py
Cython/Compiler/Parsing.py
+90
-30
Cython/Compiler/Pipeline.py
Cython/Compiler/Pipeline.py
+1
-1
Cython/Compiler/Scanning.pxd
Cython/Compiler/Scanning.pxd
+3
-0
Cython/Compiler/Scanning.py
Cython/Compiler/Scanning.py
+16
-0
Cython/Compiler/TypeSlots.py
Cython/Compiler/TypeSlots.py
+17
-3
Cython/Parser/Grammar
Cython/Parser/Grammar
+4
-2
Cython/Utility/Coroutine.c
Cython/Utility/Coroutine.c
+1772
-0
Cython/Utility/CythonFunction.c
Cython/Utility/CythonFunction.c
+2
-2
Cython/Utility/ModuleSetupCode.c
Cython/Utility/ModuleSetupCode.c
+16
-0
tests/errors/pep492_badsyntax_async1.pyx
tests/errors/pep492_badsyntax_async1.pyx
+10
-0
tests/errors/pep492_badsyntax_async2.pyx
tests/errors/pep492_badsyntax_async2.pyx
+11
-0
tests/errors/pep492_badsyntax_async3.pyx
tests/errors/pep492_badsyntax_async3.pyx
+9
-0
tests/errors/pep492_badsyntax_async4.pyx
tests/errors/pep492_badsyntax_async4.pyx
+9
-0
tests/errors/pep492_badsyntax_async5.pyx
tests/errors/pep492_badsyntax_async5.pyx
+9
-0
tests/errors/pep492_badsyntax_async6.pyx
tests/errors/pep492_badsyntax_async6.pyx
+10
-0
tests/errors/pep492_badsyntax_async7.pyx
tests/errors/pep492_badsyntax_async7.pyx
+10
-0
tests/errors/pep492_badsyntax_async8.pyx
tests/errors/pep492_badsyntax_async8.pyx
+9
-0
tests/errors/pep492_badsyntax_async9.pyx
tests/errors/pep492_badsyntax_async9.pyx
+9
-0
tests/run/async_iter_pep492.pyx
tests/run/async_iter_pep492.pyx
+298
-0
tests/run/test_coroutines_pep492.pyx
tests/run/test_coroutines_pep492.pyx
+1057
-0
No files found.
CHANGES.rst
View file @
18691f38
...
...
@@ -8,6 +8,12 @@ Latest changes
Features added
--------------
* PEP 492 (async/await) was implemented.
See https://www.python.org/dev/peps/pep-0492/
* PEP 448 (Additional Unpacking Generalizations) was implemented.
See https://www.python.org/dev/peps/pep-0448/
* Support for coverage.py 4.0+ can be enabled by adding the plugin
"Cython.Coverage" to the ".coveragerc" config file.
...
...
@@ -15,8 +21,6 @@ Features added
* Tracing is supported in ``nogil`` functions/sections and module init code.
* PEP 448 (Additional Unpacking Generalizations) was implemented.
* When generators are used in a Cython module and the module imports the
modules "inspect" and/or "asyncio", Cython enables interoperability by
patching these modules to recognise Cython's internal generator type.
...
...
Cython/Compiler/Builtin.py
View file @
18691f38
...
...
@@ -398,9 +398,16 @@ def init_builtins():
init_builtin_structs
()
init_builtin_types
()
init_builtin_funcs
()
builtin_scope
.
declare_var
(
'__debug__'
,
PyrexTypes
.
c_const_type
(
PyrexTypes
.
c_bint_type
),
pos
=
None
,
cname
=
'(!Py_OptimizeFlag)'
,
is_cdef
=
True
)
entry
=
builtin_scope
.
declare_var
(
'StopAsyncIteration'
,
PyrexTypes
.
py_object_type
,
pos
=
None
,
cname
=
'__Pyx_PyExc_StopAsyncIteration'
)
entry
.
utility_code
=
UtilityCode
.
load_cached
(
"StopAsyncIteration"
,
"Coroutine.c"
)
global
list_type
,
tuple_type
,
dict_type
,
set_type
,
frozenset_type
global
bytes_type
,
str_type
,
unicode_type
,
basestring_type
,
slice_type
global
float_type
,
bool_type
,
type_type
,
complex_type
,
bytearray_type
...
...
Cython/Compiler/Code.py
View file @
18691f38
...
...
@@ -49,7 +49,7 @@ non_portable_builtins_map = {
'basestring'
:
(
'PY_MAJOR_VERSION >= 3'
,
'str'
),
'xrange'
:
(
'PY_MAJOR_VERSION >= 3'
,
'range'
),
'raw_input'
:
(
'PY_MAJOR_VERSION >= 3'
,
'input'
),
}
}
basicsize_builtins_map
=
{
# builtins whose type has a different tp_basicsize than sizeof(...)
...
...
@@ -63,6 +63,13 @@ uncachable_builtins = [
'_'
,
# e.g. gettext
]
special_py_methods
=
set
([
'__cinit__'
,
'__dealloc__'
,
'__richcmp__'
,
'__next__'
,
'__await__'
,
'__aiter__'
,
'__anext__'
,
'__getreadbuffer__'
,
'__getwritebuffer__'
,
'__getsegcount__'
,
'__getcharbuffer__'
,
'__getbuffer__'
,
'__releasebuffer__'
])
modifier_output_mapper
=
{
'inline'
:
'CYTHON_INLINE'
}.
get
...
...
@@ -454,7 +461,7 @@ class UtilityCode(UtilityCodeBase):
'"%s
\
\
n"
\
n
' % line if not line.endswith('
\\
') or line.endswith('
\\\\
') else '"%s"
\
n
' % line[:-1]
for line in content.splitlines())
impl = re.sub(r'
CSTRING
\
(
\
s
*
"""([^"]
+|"[^"]
)"""
\
s
*
\
)
', split_string, impl)
impl = re.sub(r'
CSTRING
\
(
\
s
*
"""([^"]
*(?:"[^"]+)*
)"""
\
s
*
\
)
', split_string, impl)
assert '
CSTRING
(
' not in impl
return impl
...
...
@@ -1999,7 +2006,7 @@ class CCodeWriter(object):
def
put_pymethoddef
(
self
,
entry
,
term
,
allow_skip
=
True
):
if
entry
.
is_special
or
entry
.
name
==
'__getattribute__'
:
if
entry
.
name
not
in
[
'__cinit__'
,
'__dealloc__'
,
'__richcmp__'
,
'__next__'
,
'__getreadbuffer__'
,
'__getwritebuffer__'
,
'__getsegcount__'
,
'__getcharbuffer__'
,
'__getbuffer__'
,
'__releasebuffer__'
]
:
if
entry
.
name
not
in
special_py_methods
:
if
entry
.
name
==
'__getattr__'
and
not
self
.
globalstate
.
directives
[
'fast_getattr'
]:
pass
# Python's typeobject.c will automatically fill in our slot
...
...
Cython/Compiler/ExprNodes.py
View file @
18691f38
...
...
@@ -2298,6 +2298,7 @@ class IteratorNode(ExprNode):
counter_cname
=
None
cpp_iterator_cname
=
None
reversed
=
False
# currently only used for list/tuple types (see Optimize.py)
is_async
=
False
subexprs
=
[
'sequence'
]
...
...
@@ -2311,8 +2312,7 @@ class IteratorNode(ExprNode):
self
.
analyse_cpp_types
(
env
)
else
:
self
.
sequence
=
self
.
sequence
.
coerce_to_pyobject
(
env
)
if
self
.
sequence
.
type
is
list_type
or
\
self
.
sequence
.
type
is
tuple_type
:
if
self
.
sequence
.
type
in
(
list_type
,
tuple_type
):
self
.
sequence
=
self
.
sequence
.
as_none_safe_node
(
"'NoneType' object is not iterable"
)
self
.
is_temp
=
1
return
self
...
...
@@ -2400,8 +2400,8 @@ class IteratorNode(ExprNode):
return
if
sequence_type
.
is_array
or
sequence_type
.
is_ptr
:
raise
InternalError
(
"for in carray slice not transformed"
)
is_builtin_sequence
=
sequence_type
is
list_type
or
\
sequence_type
is
tuple_type
is_builtin_sequence
=
sequence_type
in
(
list_type
,
tuple_type
)
if
not
is_builtin_sequence
:
# reversed() not currently optimised (see Optimize.py)
assert
not
self
.
reversed
,
"internal error: reversed() only implemented for list/tuple objects"
...
...
@@ -2411,6 +2411,7 @@ class IteratorNode(ExprNode):
"if (likely(PyList_CheckExact(%s)) || PyTuple_CheckExact(%s)) {"
%
(
self
.
sequence
.
py_result
(),
self
.
sequence
.
py_result
()))
if
is_builtin_sequence
or
self
.
may_be_a_sequence
:
self
.
counter_cname
=
code
.
funcstate
.
allocate_temp
(
PyrexTypes
.
c_py_ssize_t_type
,
manage_ref
=
False
)
...
...
@@ -2421,25 +2422,25 @@ class IteratorNode(ExprNode):
init_value
=
'PyTuple_GET_SIZE(%s) - 1'
%
self
.
result
()
else
:
init_value
=
'0'
code
.
putln
(
"%s = %s; __Pyx_INCREF(%s); %s = %s;"
%
(
self
.
result
(),
self
.
sequence
.
py_result
(),
self
.
result
(),
self
.
counter_cname
,
init_value
))
code
.
putln
(
"%s = %s; __Pyx_INCREF(%s); %s = %s;"
%
(
self
.
result
(),
self
.
sequence
.
py_result
(),
self
.
result
(),
self
.
counter_cname
,
init_value
))
if
not
is_builtin_sequence
:
self
.
iter_func_ptr
=
code
.
funcstate
.
allocate_temp
(
self
.
_func_iternext_type
,
manage_ref
=
False
)
if
self
.
may_be_a_sequence
:
code
.
putln
(
"%s = NULL;"
%
self
.
iter_func_ptr
)
code
.
putln
(
"} else {"
)
code
.
put
(
"%s = -1; "
%
self
.
counter_cname
)
code
.
putln
(
"%s = PyObject_GetIter(%s); %s"
%
(
self
.
result
(),
self
.
sequence
.
py_result
(),
code
.
error_goto_if_null
(
self
.
result
(),
self
.
pos
)))
self
.
result
(),
self
.
sequence
.
py_result
(),
code
.
error_goto_if_null
(
self
.
result
(),
self
.
pos
)))
code
.
put_gotref
(
self
.
py_result
())
# PyObject_GetIter() fails if "tp_iternext" is not set, but the check below
# makes it visible to the C compiler that the pointer really isn't NULL, so that
# it can distinguish between the special cases and the generic case
...
...
@@ -2553,7 +2554,7 @@ class IteratorNode(ExprNode):
class
NextNode
(
AtomicExprNode
):
# Used as part of for statement implementation.
# Implements result =
iterator.next(
)
# Implements result =
next(iterator
)
# Created during analyse_types phase.
# The iterator is not owned by this node.
#
...
...
@@ -2563,10 +2564,14 @@ class NextNode(AtomicExprNode):
AtomicExprNode
.
__init__
(
self
,
iterator
.
pos
)
self
.
iterator
=
iterator
def
nogil_check
(
self
,
env
):
# ignore - errors (if any) are already handled by IteratorNode
pass
def
type_dependencies
(
self
,
env
):
return
self
.
iterator
.
type_dependencies
(
env
)
def
infer_type
(
self
,
env
,
iterator_type
=
None
):
def
infer_type
(
self
,
env
,
iterator_type
=
None
):
if
iterator_type
is
None
:
iterator_type
=
self
.
iterator
.
infer_type
(
env
)
if
iterator_type
.
is_ptr
or
iterator_type
.
is_array
:
...
...
@@ -2596,18 +2601,84 @@ class NextNode(AtomicExprNode):
self
.
iterator
.
generate_iter_next_result_code
(
self
.
result
(),
code
)
class
AsyncIteratorNode
(
ExprNode
):
# Used as part of 'async for' statement implementation.
#
# Implements result = sequence.__aiter__()
#
# sequence ExprNode
subexprs
=
[
'sequence'
]
is_async
=
True
type
=
py_object_type
is_temp
=
1
def
infer_type
(
self
,
env
):
return
py_object_type
def
analyse_types
(
self
,
env
):
self
.
sequence
=
self
.
sequence
.
analyse_types
(
env
)
if
not
self
.
sequence
.
type
.
is_pyobject
:
error
(
self
.
pos
,
"async for loops not allowed on C/C++ types"
)
self
.
sequence
=
self
.
sequence
.
coerce_to_pyobject
(
env
)
return
self
def
generate_result_code
(
self
,
code
):
code
.
globalstate
.
use_utility_code
(
UtilityCode
.
load_cached
(
"AsyncIter"
,
"Coroutine.c"
))
code
.
putln
(
"%s = __Pyx_Coroutine_GetAsyncIter(%s); %s"
%
(
self
.
result
(),
self
.
sequence
.
py_result
(),
code
.
error_goto_if_null
(
self
.
result
(),
self
.
pos
)))
code
.
put_gotref
(
self
.
result
())
class
AsyncNextNode
(
AtomicExprNode
):
# Used as part of 'async for' statement implementation.
# Implements result = iterator.__anext__()
# Created during analyse_types phase.
# The iterator is not owned by this node.
#
# iterator IteratorNode
type
=
py_object_type
is_temp
=
1
def
__init__
(
self
,
iterator
):
AtomicExprNode
.
__init__
(
self
,
iterator
.
pos
)
self
.
iterator
=
iterator
def
infer_type
(
self
,
env
):
return
py_object_type
def
analyse_types
(
self
,
env
):
return
self
def
generate_result_code
(
self
,
code
):
code
.
globalstate
.
use_utility_code
(
UtilityCode
.
load_cached
(
"AsyncIter"
,
"Coroutine.c"
))
code
.
putln
(
"%s = __Pyx_Coroutine_AsyncIterNext(%s); %s"
%
(
self
.
result
(),
self
.
iterator
.
py_result
(),
code
.
error_goto_if_null
(
self
.
result
(),
self
.
pos
)))
code
.
put_gotref
(
self
.
result
())
class
WithExitCallNode
(
ExprNode
):
# The __exit__() call of a 'with' statement. Used in both the
# except and finally clauses.
# with_stat WithStatNode the surrounding 'with' statement
# args TupleNode or ResultStatNode the exception info tuple
# await AwaitExprNode the await expression of an 'async with' statement
subexprs
=
[
'args'
]
subexprs
=
[
'args'
,
'await'
]
test_if_run
=
True
await
=
None
def
analyse_types
(
self
,
env
):
self
.
args
=
self
.
args
.
analyse_types
(
env
)
if
self
.
await
:
self
.
await
=
self
.
await
.
analyse_types
(
env
)
self
.
type
=
PyrexTypes
.
c_bint_type
self
.
is_temp
=
True
return
self
...
...
@@ -2633,6 +2704,14 @@ class WithExitCallNode(ExprNode):
code
.
putln
(
code
.
error_goto_if_null
(
result_var
,
self
.
pos
))
code
.
put_gotref
(
result_var
)
if
self
.
await
:
# FIXME: result_var temp currently leaks into the closure
self
.
await
.
generate_evaluation_code
(
code
,
source_cname
=
result_var
,
decref_source
=
True
)
code
.
putln
(
"%s = %s;"
%
(
result_var
,
self
.
await
.
py_result
()))
self
.
await
.
generate_post_assignment_code
(
code
)
self
.
await
.
free_temps
(
code
)
if
self
.
result_is_used
:
self
.
allocate_temp_result
(
code
)
code
.
putln
(
"%s = __Pyx_PyObject_IsTrue(%s);"
%
(
self
.
result
(),
result_var
))
...
...
@@ -8593,10 +8672,12 @@ class YieldExprNode(ExprNode):
type
=
py_object_type
label_num
=
0
is_yield_from
=
False
is_await
=
False
expr_keyword
=
'yield'
def
analyse_types
(
self
,
env
):
if
not
self
.
label_num
:
error
(
self
.
pos
,
"'
yield' not supported here"
)
error
(
self
.
pos
,
"'
%s' not supported here"
%
self
.
expr_keyword
)
self
.
is_temp
=
1
if
self
.
arg
is
not
None
:
self
.
arg
=
self
.
arg
.
analyse_types
(
env
)
...
...
@@ -8661,6 +8742,7 @@ class YieldExprNode(ExprNode):
class
YieldFromExprNode
(
YieldExprNode
):
# "yield from GEN" expression
is_yield_from
=
True
expr_keyword
=
'yield from'
def
coerce_yield_argument
(
self
,
env
):
if
not
self
.
arg
.
type
.
is_string
:
...
...
@@ -8668,16 +8750,23 @@ class YieldFromExprNode(YieldExprNode):
error
(
self
.
pos
,
"yielding from non-Python object not supported"
)
self
.
arg
=
self
.
arg
.
coerce_to_pyobject
(
env
)
def
generate_evaluation_code
(
self
,
code
):
code
.
globalstate
.
use_utility_code
(
UtilityCode
.
load_cached
(
"YieldFrom"
,
"Generator.c"
))
def
yield_from_func
(
self
,
code
):
code
.
globalstate
.
use_utility_code
(
UtilityCode
.
load_cached
(
"GeneratorYieldFrom"
,
"Coroutine.c"
))
return
"__Pyx_Generator_Yield_From"
self
.
arg
.
generate_evaluation_code
(
code
)
code
.
putln
(
"%s = __Pyx_Generator_Yield_From(%s, %s);"
%
(
def
generate_evaluation_code
(
self
,
code
,
source_cname
=
None
,
decref_source
=
False
):
if
source_cname
is
None
:
self
.
arg
.
generate_evaluation_code
(
code
)
code
.
putln
(
"%s = %s(%s, %s);"
%
(
Naming
.
retval_cname
,
self
.
yield_from_func
(
code
),
Naming
.
generator_cname
,
self
.
arg
.
result_as
(
py_object_type
)))
self
.
arg
.
generate_disposal_code
(
code
)
self
.
arg
.
free_temps
(
code
)
self
.
arg
.
py_result
()
if
source_cname
is
None
else
source_cname
))
if
source_cname
is
None
:
self
.
arg
.
generate_disposal_code
(
code
)
self
.
arg
.
free_temps
(
code
)
elif
decref_source
:
code
.
put_decref_clear
(
source_cname
,
py_object_type
)
code
.
put_xgotref
(
Naming
.
retval_cname
)
code
.
putln
(
"if (likely(%s)) {"
%
Naming
.
retval_cname
)
...
...
@@ -8685,21 +8774,62 @@ class YieldFromExprNode(YieldExprNode):
code
.
putln
(
"} else {"
)
# either error or sub-generator has normally terminated: return value => node result
if
self
.
result_is_used
:
# YieldExprNode has allocated the result temp for us
code
.
putln
(
"%s = NULL;"
%
self
.
result
())
code
.
putln
(
"if (unlikely(__Pyx_PyGen_FetchStopIterationValue(&%s) < 0)) %s"
%
(
self
.
result
(),
code
.
error_goto
(
self
.
pos
)))
code
.
put_gotref
(
self
.
result
())
self
.
fetch_iteration_result
(
code
)
else
:
code
.
putln
(
"PyObject* exc_type = PyErr_Occurred();"
)
code
.
putln
(
"if (exc_type) {"
)
code
.
putln
(
"if (likely(exc_type == PyExc_StopIteration ||"
" PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear();"
)
code
.
putln
(
"else %s"
%
code
.
error_goto
(
self
.
pos
))
code
.
putln
(
"}"
)
self
.
handle_iteration_exception
(
code
)
code
.
putln
(
"}"
)
def
fetch_iteration_result
(
self
,
code
):
# YieldExprNode has allocated the result temp for us
code
.
putln
(
"%s = NULL;"
%
self
.
result
())
code
.
put_error_if_neg
(
self
.
pos
,
"__Pyx_PyGen_FetchStopIterationValue(&%s)"
%
self
.
result
())
code
.
put_gotref
(
self
.
result
())
def
handle_iteration_exception
(
self
,
code
):
code
.
putln
(
"PyObject* exc_type = PyErr_Occurred();"
)
code
.
putln
(
"if (exc_type) {"
)
code
.
putln
(
"if (likely(exc_type == PyExc_StopIteration ||"
" PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear();"
)
code
.
putln
(
"else %s"
%
code
.
error_goto
(
self
.
pos
))
code
.
putln
(
"}"
)
class
AwaitExprNode
(
YieldFromExprNode
):
# 'await' expression node
#
# arg ExprNode the Awaitable value to await
# label_num integer yield label number
is_await
=
True
expr_keyword
=
'await'
def
coerce_yield_argument
(
self
,
env
):
if
self
.
arg
is
not
None
:
# FIXME: use same check as in YieldFromExprNode.coerce_yield_argument() ?
self
.
arg
=
self
.
arg
.
coerce_to_pyobject
(
env
)
def
yield_from_func
(
self
,
code
):
code
.
globalstate
.
use_utility_code
(
UtilityCode
.
load_cached
(
"CoroutineYieldFrom"
,
"Coroutine.c"
))
return
"__Pyx_Coroutine_Yield_From"
class
AwaitIterNextExprNode
(
AwaitExprNode
):
# 'await' expression node as part of 'async for' iteration
#
# Breaks out of loop on StopAsyncIteration exception.
def
fetch_iteration_result
(
self
,
code
):
assert
code
.
break_label
,
"AwaitIterNextExprNode outside of 'async for' loop"
code
.
globalstate
.
use_utility_code
(
UtilityCode
.
load_cached
(
"StopAsyncIteration"
,
"Coroutine.c"
))
code
.
putln
(
"PyObject* exc_type = PyErr_Occurred();"
)
code
.
putln
(
"if (exc_type && likely(exc_type == __Pyx_PyExc_StopAsyncIteration ||"
" PyErr_GivenExceptionMatches(exc_type, __Pyx_PyExc_StopAsyncIteration))) {"
)
code
.
putln
(
"PyErr_Clear();"
)
code
.
putln
(
"break;"
)
code
.
putln
(
"}"
)
super
(
AwaitIterNextExprNode
,
self
).
fetch_iteration_result
(
code
)
class
GlobalsExprNode
(
AtomicExprNode
):
type
=
dict_type
is_temp
=
1
...
...
Cython/Compiler/FlowControl.py
View file @
18691f38
...
...
@@ -991,6 +991,9 @@ class ControlFlowAnalysis(CythonTransform):
self
.
mark_assignment
(
target
,
node
.
item
)
def
visit_AsyncForStatNode
(
self
,
node
):
return
self
.
visit_ForInStatNode
(
node
)
def
visit_ForInStatNode
(
self
,
node
):
condition_block
=
self
.
flow
.
nextblock
()
next_block
=
self
.
flow
.
newblock
()
...
...
@@ -1002,6 +1005,9 @@ class ControlFlowAnalysis(CythonTransform):
if
isinstance
(
node
,
Nodes
.
ForInStatNode
):
self
.
mark_forloop_target
(
node
)
elif
isinstance
(
node
,
Nodes
.
AsyncForStatNode
):
# not entirely correct, but good enough for now
self
.
mark_assignment
(
node
.
target
,
node
.
item
)
else
:
# Parallel
self
.
mark_assignment
(
node
.
target
)
...
...
Cython/Compiler/ModuleNode.py
View file @
18691f38
...
...
@@ -2064,24 +2064,17 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
code
.
put_setup_refcount_context
(
header3
)
env
.
use_utility_code
(
UtilityCode
.
load
(
"CheckBinaryVersion"
,
"ModuleSetupCode.c"
))
code
.
put
ln
(
"if ( __Pyx_check_binary_version() < 0) %s"
%
code
.
error_goto
(
self
.
pos
)
)
code
.
put
_error_if_neg
(
self
.
pos
,
"__Pyx_check_binary_version()"
)
code
.
putln
(
"%s = PyTuple_New(0); %s"
%
(
Naming
.
empty_tuple
,
code
.
error_goto_if_null
(
Naming
.
empty_tuple
,
self
.
pos
)))
code
.
putln
(
"%s = PyBytes_FromStringAndSize(
\
"
\
"
, 0); %s"
%
(
Naming
.
empty_bytes
,
code
.
error_goto_if_null
(
Naming
.
empty_bytes
,
self
.
pos
)))
code
.
putln
(
"#ifdef __Pyx_CyFunction_USED"
)
code
.
putln
(
"if (__Pyx_CyFunction_init() < 0) %s"
%
code
.
error_goto
(
self
.
pos
))
code
.
putln
(
"#endif"
)
code
.
putln
(
"#ifdef __Pyx_FusedFunction_USED"
)
code
.
putln
(
"if (__pyx_FusedFunction_init() < 0) %s"
%
code
.
error_goto
(
self
.
pos
))
code
.
putln
(
"#endif"
)
code
.
putln
(
"#ifdef __Pyx_Generator_USED"
)
code
.
putln
(
"if (__pyx_Generator_init() < 0) %s"
%
code
.
error_goto
(
self
.
pos
))
code
.
putln
(
"#endif"
)
for
ext_type
in
(
'CyFunction'
,
'FusedFunction'
,
'Coroutine'
,
'Generator'
,
'StopAsyncIteration'
):
code
.
putln
(
"#ifdef __Pyx_%s_USED"
%
ext_type
)
code
.
put_error_if_neg
(
self
.
pos
,
"__pyx_%s_init()"
%
ext_type
)
code
.
putln
(
"#endif"
)
code
.
putln
(
"/*--- Library function declarations ---*/"
)
env
.
generate_library_function_declarations
(
code
)
...
...
@@ -2097,20 +2090,18 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
self
.
generate_module_creation_code
(
env
,
code
)
code
.
putln
(
"/*--- Initialize various global constants etc. ---*/"
)
code
.
put
ln
(
code
.
error_goto_if_neg
(
"__Pyx_InitGlobals()"
,
self
.
pos
)
)
code
.
put
_error_if_neg
(
self
.
pos
,
"__Pyx_InitGlobals()"
)
code
.
putln
(
"#if PY_MAJOR_VERSION < 3 && (__PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT)"
)
code
.
put
ln
(
"if (__Pyx_init_sys_getdefaultencoding_params() < 0) %s"
%
code
.
error_goto
(
self
.
pos
)
)
code
.
put
_error_if_neg
(
self
.
pos
,
"__Pyx_init_sys_getdefaultencoding_params()"
)
code
.
putln
(
"#endif"
)
__main__name
=
code
.
globalstate
.
get_py_string_const
(
EncodedString
(
"__main__"
),
identifier
=
True
)
code
.
putln
(
"if (%s%s) {"
%
(
Naming
.
module_is_main
,
self
.
full_module_name
.
replace
(
'.'
,
'__'
)))
code
.
putln
(
'if (PyObject_SetAttrString(%s, "__name__", %s) < 0) %s;'
%
(
env
.
module_cname
,
__main__name
.
cname
,
code
.
error_goto
(
self
.
pos
)))
code
.
put_error_if_neg
(
self
.
pos
,
'PyObject_SetAttrString(%s, "__name__", %s)'
%
(
env
.
module_cname
,
__main__name
.
cname
))
code
.
putln
(
"}"
)
# set up __file__ and __path__, then add the module to sys.modules
...
...
@@ -2118,10 +2109,10 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
if
Options
.
cache_builtins
:
code
.
putln
(
"/*--- Builtin init code ---*/"
)
code
.
put
ln
(
code
.
error_goto_if_neg
(
"__Pyx_InitCachedBuiltins()"
,
self
.
pos
)
)
code
.
put
_error_if_neg
(
self
.
pos
,
"__Pyx_InitCachedBuiltins()"
)
code
.
putln
(
"/*--- Constants init code ---*/"
)
code
.
put
ln
(
code
.
error_goto_if_neg
(
"__Pyx_InitCachedConstants()"
,
self
.
pos
)
)
code
.
put
_error_if_neg
(
self
.
pos
,
"__Pyx_InitCachedConstants()"
)
code
.
putln
(
"/*--- Global init code ---*/"
)
self
.
generate_global_init_code
(
env
,
code
)
...
...
@@ -2151,7 +2142,7 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
code
.
putln
(
"/*--- Execution code ---*/"
)
code
.
mark_pos
(
None
)
code
.
putln
(
"#if
def __Pyx_Generator_USED
"
)
code
.
putln
(
"#if
defined(__Pyx_Generator_USED) || defined(__Pyx_Coroutine_USED)
"
)
code
.
put_error_if_neg
(
self
.
pos
,
"__Pyx_patch_abc()"
)
code
.
putln
(
"#endif"
)
...
...
Cython/Compiler/Nodes.py
View file @
18691f38
...
...
@@ -1574,9 +1574,11 @@ class FuncDefNode(StatNode, BlockNode):
# pymethdef_required boolean Force Python method struct generation
# directive_locals { string : ExprNode } locals defined by cython.locals(...)
# directive_returns [ExprNode] type defined by cython.returns(...)
# star_arg PyArgDeclNode or None * argument
# starstar_arg PyArgDeclNode or None ** argument
# star_arg PyArgDeclNode or None * argument
# starstar_arg PyArgDeclNode or None ** argument
#
# is_async_def boolean is a Coroutine function
#
# has_fused_arguments boolean
# Whether this cdef function has fused parameters. This is needed
# by AnalyseDeclarationsTransform, so it can replace CFuncDefNodes
...
...
@@ -1588,6 +1590,7 @@ class FuncDefNode(StatNode, BlockNode):
pymethdef_required
=
False
is_generator
=
False
is_generator_body
=
False
is_async_def
=
False
modifiers
=
[]
has_fused_arguments
=
False
star_arg
=
None
...
...
@@ -3936,6 +3939,7 @@ class GeneratorDefNode(DefNode):
#
is_generator
=
True
is_coroutine
=
False
needs_closure
=
True
child_attrs
=
DefNode
.
child_attrs
+
[
"gbody"
]
...
...
@@ -3956,8 +3960,9 @@ class GeneratorDefNode(DefNode):
qualname
=
code
.
intern_identifier
(
self
.
qualname
)
code
.
putln
(
'{'
)
code
.
putln
(
'__pyx_GeneratorObject *gen = __Pyx_Generator_New('
'(__pyx_generator_body_t) %s, (PyObject *) %s, %s, %s); %s'
%
(
code
.
putln
(
'__pyx_CoroutineObject *gen = __Pyx_%s_New('
'(__pyx_coroutine_body_t) %s, (PyObject *) %s, %s, %s); %s'
%
(
'Coroutine'
if
self
.
is_coroutine
else
'Generator'
,
body_cname
,
Naming
.
cur_scope_cname
,
name
,
qualname
,
code
.
error_goto_if_null
(
'gen'
,
self
.
pos
)))
code
.
put_decref
(
Naming
.
cur_scope_cname
,
py_object_type
)
...
...
@@ -3972,13 +3977,18 @@ class GeneratorDefNode(DefNode):
code
.
putln
(
'}'
)
def
generate_function_definitions
(
self
,
env
,
code
):
env
.
use_utility_code
(
UtilityCode
.
load_cached
(
"Generator"
,
"Generator.c"
))
env
.
use_utility_code
(
UtilityCode
.
load_cached
(
'Coroutine'
if
self
.
is_coroutine
else
'Generator'
,
"Coroutine.c"
))
self
.
gbody
.
generate_function_header
(
code
,
proto
=
True
)
super
(
GeneratorDefNode
,
self
).
generate_function_definitions
(
env
,
code
)
self
.
gbody
.
generate_function_definitions
(
env
,
code
)
class
AsyncDefNode
(
GeneratorDefNode
):
is_coroutine
=
True
class
GeneratorBodyDefNode
(
DefNode
):
# Main code body of a generator implemented as a DefNode.
#
...
...
@@ -4005,7 +4015,7 @@ class GeneratorBodyDefNode(DefNode):
self
.
declare_generator_body
(
env
)
def
generate_function_header
(
self
,
code
,
proto
=
False
):
header
=
"static PyObject *%s(__pyx_
Generator
Object *%s, PyObject *%s)"
%
(
header
=
"static PyObject *%s(__pyx_
Coroutine
Object *%s, PyObject *%s)"
%
(
self
.
entry
.
func_cname
,
Naming
.
generator_cname
,
Naming
.
sent_value_cname
)
...
...
@@ -4070,7 +4080,7 @@ class GeneratorBodyDefNode(DefNode):
code
.
put_label
(
code
.
error_label
)
if
Future
.
generator_stop
in
env
.
global_scope
().
context
.
future_directives
:
# PEP 479: turn accidental StopIteration exceptions into a RuntimeError
code
.
globalstate
.
use_utility_code
(
UtilityCode
.
load_cached
(
"pep479"
,
"
Generator
.c"
))
code
.
globalstate
.
use_utility_code
(
UtilityCode
.
load_cached
(
"pep479"
,
"
Coroutine
.c"
))
code
.
putln
(
"if (unlikely(PyErr_ExceptionMatches(PyExc_StopIteration))) "
"__Pyx_Generator_Replace_StopIteration();"
)
for
cname
,
type
in
code
.
funcstate
.
all_managed_temps
():
...
...
@@ -4082,7 +4092,7 @@ class GeneratorBodyDefNode(DefNode):
code
.
put_xdecref
(
Naming
.
retval_cname
,
py_object_type
)
code
.
putln
(
'%s->resume_label = -1;'
%
Naming
.
generator_cname
)
# clean up as early as possible to help breaking any reference cycles
code
.
putln
(
'__Pyx_
Generator
_clear((PyObject*)%s);'
%
Naming
.
generator_cname
)
code
.
putln
(
'__Pyx_
Coroutine
_clear((PyObject*)%s);'
%
Naming
.
generator_cname
)
code
.
put_finish_refcount_context
()
code
.
putln
(
'return NULL;'
)
code
.
putln
(
"}"
)
...
...
@@ -5512,7 +5522,7 @@ class ReturnStatNode(StatNode):
elif
self
.
in_generator
:
# return value == raise StopIteration(value), but uncatchable
code
.
globalstate
.
use_utility_code
(
UtilityCode
.
load_cached
(
"ReturnWithStopIteration"
,
"
Generator
.c"
))
UtilityCode
.
load_cached
(
"ReturnWithStopIteration"
,
"
Coroutine
.c"
))
code
.
putln
(
"%s = NULL; __Pyx_ReturnWithStopIteration(%s);"
%
(
Naming
.
retval_cname
,
self
.
value
.
py_result
()))
...
...
@@ -6059,40 +6069,49 @@ class DictIterationNextNode(Node):
target
.
generate_assignment_code
(
result
,
code
)
var
.
release
(
code
)
def
ForStatNode
(
pos
,
**
kw
):
if
'iterator'
in
kw
:
return
ForInStatNode
(
pos
,
**
kw
)
if
kw
[
'iterator'
].
is_async
:
return
AsyncForStatNode
(
pos
,
**
kw
)
else
:
return
ForInStatNode
(
pos
,
**
kw
)
else
:
return
ForFromStatNode
(
pos
,
**
kw
)
class
ForInStatNode
(
LoopNode
,
StatNode
):
# for statement
class
_ForInStatNode
(
LoopNode
,
StatNode
):
# Base class of 'for-in' statements.
#
# target ExprNode
# iterator IteratorNode
# iterator IteratorNode
| AwaitExprNode(AsyncIteratorNode)
# body StatNode
# else_clause StatNode
# item NextNode used internally
# item NextNode | AwaitExprNode(AsyncNextNode)
# is_async boolean true for 'async for' statements
child_attrs
=
[
"target"
,
"iterator"
,
"body"
,
"else_clause"
]
child_attrs
=
[
"target"
,
"ite
m"
,
"ite
rator"
,
"body"
,
"else_clause"
]
item
=
None
is_async
=
False
def
_create_item_node
(
self
):
raise
NotImplementedError
(
"must be implemented by subclasses"
)
def
analyse_declarations
(
self
,
env
):
from
.
import
ExprNodes
self
.
target
.
analyse_target_declaration
(
env
)
self
.
body
.
analyse_declarations
(
env
)
if
self
.
else_clause
:
self
.
else_clause
.
analyse_declarations
(
env
)
self
.
item
=
ExprNodes
.
NextNode
(
self
.
iterator
)
self
.
_create_item_node
(
)
def
analyse_expressions
(
self
,
env
):
self
.
target
=
self
.
target
.
analyse_target_types
(
env
)
self
.
iterator
=
self
.
iterator
.
analyse_expressions
(
env
)
from
.
import
ExprNodes
self
.
item
=
ExprNodes
.
NextNode
(
self
.
iterator
)
# must rewrap after analysis
self
.
_create_item_node
()
# must rewrap self.item after analysis
self
.
item
=
self
.
item
.
analyse_expressions
(
env
)
if
(
self
.
iterator
.
type
.
is_ptr
or
self
.
iterator
.
type
.
is_array
)
and
\
self
.
target
.
type
.
assignable_from
(
self
.
iterator
.
type
):
if
(
not
self
.
is_async
and
(
self
.
iterator
.
type
.
is_ptr
or
self
.
iterator
.
type
.
is_array
)
and
self
.
target
.
type
.
assignable_from
(
self
.
iterator
.
type
)):
# C array slice optimization.
pass
else
:
...
...
@@ -6158,6 +6177,37 @@ class ForInStatNode(LoopNode, StatNode):
self
.
item
.
annotate
(
code
)
class
ForInStatNode
(
_ForInStatNode
):
# 'for' statement
is_async
=
False
def
_create_item_node
(
self
):
from
.ExprNodes
import
NextNode
self
.
item
=
NextNode
(
self
.
iterator
)
class
AsyncForStatNode
(
_ForInStatNode
):
# 'async for' statement
#
# iterator AwaitExprNode(AsyncIteratorNode)
# item AwaitIterNextExprNode(AsyncIteratorNode)
is_async
=
True
def
__init__
(
self
,
pos
,
iterator
,
**
kw
):
assert
'item'
not
in
kw
from
.
import
ExprNodes
# AwaitExprNodes must appear before running MarkClosureVisitor
kw
[
'iterator'
]
=
ExprNodes
.
AwaitExprNode
(
iterator
.
pos
,
arg
=
iterator
)
kw
[
'item'
]
=
ExprNodes
.
AwaitIterNextExprNode
(
iterator
.
pos
,
arg
=
None
)
_ForInStatNode
.
__init__
(
self
,
pos
,
**
kw
)
def
_create_item_node
(
self
):
from
.
import
ExprNodes
self
.
item
.
arg
=
ExprNodes
.
AsyncNextNode
(
self
.
iterator
)
class
ForFromStatNode
(
LoopNode
,
StatNode
):
# for name from expr rel name rel expr
#
...
...
@@ -6444,7 +6494,7 @@ class WithStatNode(StatNode):
code
.
putln
(
"%s = __Pyx_PyObject_LookupSpecial(%s, %s); %s"
%
(
self
.
exit_var
,
self
.
manager
.
py_result
(),
code
.
intern_identifier
(
EncodedString
(
'__exit__'
)),
code
.
intern_identifier
(
EncodedString
(
'__
aexit__'
if
self
.
is_async
else
'__
exit__'
)),
code
.
error_goto_if_null
(
self
.
exit_var
,
self
.
pos
),
))
code
.
put_gotref
(
self
.
exit_var
)
...
...
@@ -7108,7 +7158,7 @@ class GILStatNode(NogilTryFinallyStatNode):
from
.ParseTreeTransforms
import
YieldNodeCollector
collector
=
YieldNodeCollector
()
collector
.
visitchildren
(
body
)
if
not
collector
.
yields
:
if
not
collector
.
yields
and
not
collector
.
awaits
:
return
if
state
==
'gil'
:
...
...
@@ -7205,8 +7255,8 @@ utility_code_for_cimports = {
utility_code_for_imports
=
{
# utility code used when special modules are imported.
# TODO: Consider a generic user-level mechanism for importing
'asyncio'
:
(
"__Pyx_patch_asyncio"
,
"PatchAsyncIO"
,
"
Generator
.c"
),
'inspect'
:
(
"__Pyx_patch_inspect"
,
"PatchInspect"
,
"
Generator
.c"
),
'asyncio'
:
(
"__Pyx_patch_asyncio"
,
"PatchAsyncIO"
,
"
Coroutine
.c"
),
'inspect'
:
(
"__Pyx_patch_inspect"
,
"PatchInspect"
,
"
Coroutine
.c"
),
}
...
...
Cython/Compiler/ParseTreeTransforms.py
View file @
18691f38
...
...
@@ -200,7 +200,7 @@ class PostParse(ScopeTrackingTransform):
node
.
lambda_name
=
EncodedString
(
u'lambda%d'
%
lambda_id
)
collector
=
YieldNodeCollector
()
collector
.
visitchildren
(
node
.
result_expr
)
if
collector
.
yields
or
isinstance
(
node
.
result_expr
,
ExprNodes
.
YieldExprNode
):
if
collector
.
yields
or
collector
.
awaits
or
isinstance
(
node
.
result_expr
,
ExprNodes
.
YieldExprNode
):
body
=
Nodes
.
ExprStatNode
(
node
.
result_expr
.
pos
,
expr
=
node
.
result_expr
)
else
:
...
...
@@ -1219,15 +1219,19 @@ class WithTransform(CythonTransform, SkipDeclarations):
def
visit_WithStatNode
(
self
,
node
):
self
.
visitchildren
(
node
,
'body'
)
pos
=
node
.
pos
is_async
=
node
.
is_async
body
,
target
,
manager
=
node
.
body
,
node
.
target
,
node
.
manager
node
.
enter_call
=
ExprNodes
.
SimpleCallNode
(
pos
,
function
=
ExprNodes
.
AttributeNode
(
pos
,
obj
=
ExprNodes
.
CloneNode
(
manager
),
attribute
=
EncodedString
(
'__enter__'
),
attribute
=
EncodedString
(
'__
aenter__'
if
is_async
else
'__
enter__'
),
is_special_lookup
=
True
),
args
=
[],
is_temp
=
True
)
if
is_async
:
node
.
enter_call
=
ExprNodes
.
AwaitExprNode
(
pos
,
arg
=
node
.
enter_call
)
if
target
is
not
None
:
body
=
Nodes
.
StatListNode
(
pos
,
stats
=
[
...
...
@@ -1245,7 +1249,8 @@ class WithTransform(CythonTransform, SkipDeclarations):
pos
,
operand
=
ExprNodes
.
WithExitCallNode
(
pos
,
with_stat
=
node
,
test_if_run
=
False
,
args
=
excinfo_target
)),
args
=
excinfo_target
,
await
=
ExprNodes
.
AwaitExprNode
(
pos
,
arg
=
None
)
if
is_async
else
None
)),
body
=
Nodes
.
ReraiseStatNode
(
pos
),
),
],
...
...
@@ -1266,8 +1271,8 @@ class WithTransform(CythonTransform, SkipDeclarations):
pos
,
with_stat
=
node
,
test_if_run
=
True
,
args
=
ExprNodes
.
TupleNode
(
pos
,
args
=
[
ExprNodes
.
NoneNode
(
pos
)
for
_
in
range
(
3
)]
)
)),
pos
,
args
=
[
ExprNodes
.
NoneNode
(
pos
)
for
_
in
range
(
3
)]
),
await
=
ExprNodes
.
AwaitExprNode
(
pos
,
arg
=
None
)
if
is_async
else
None
)),
handle_error_case
=
False
,
)
return
node
...
...
@@ -2205,6 +2210,7 @@ class YieldNodeCollector(TreeVisitor):
def
__init__
(
self
):
super
(
YieldNodeCollector
,
self
).
__init__
()
self
.
yields
=
[]
self
.
awaits
=
[]
self
.
returns
=
[]
self
.
has_return_value
=
False
...
...
@@ -2215,6 +2221,10 @@ class YieldNodeCollector(TreeVisitor):
self
.
yields
.
append
(
node
)
self
.
visitchildren
(
node
)
def
visit_AwaitExprNode
(
self
,
node
):
self
.
awaits
.
append
(
node
)
self
.
visitchildren
(
node
)
def
visit_ReturnStatNode
(
self
,
node
):
self
.
visitchildren
(
node
)
if
node
.
value
:
...
...
@@ -2250,27 +2260,36 @@ class MarkClosureVisitor(CythonTransform):
collector
=
YieldNodeCollector
()
collector
.
visitchildren
(
node
)
if
collector
.
yields
:
if
isinstance
(
node
,
Nodes
.
CFuncDefNode
):
# Will report error later
return
node
for
i
,
yield_expr
in
enumerate
(
collector
.
yields
,
1
):
yield_expr
.
label_num
=
i
for
retnode
in
collector
.
returns
:
retnode
.
in_generator
=
True
if
node
.
is_async_def
:
if
collector
.
yields
:
error
(
collector
.
yields
[
0
].
pos
,
"'yield' not allowed in async coroutines (use 'await')"
)
yields
=
collector
.
awaits
elif
collector
.
yields
:
if
collector
.
awaits
:
error
(
collector
.
yields
[
0
].
pos
,
"'await' not allowed in generators (use 'yield')"
)
yields
=
collector
.
yields
else
:
return
node
gbody
=
Nodes
.
GeneratorBodyDefNode
(
pos
=
node
.
pos
,
name
=
node
.
name
,
body
=
node
.
body
)
generator
=
Nodes
.
GeneratorDefNode
(
pos
=
node
.
pos
,
name
=
node
.
name
,
args
=
node
.
args
,
star_arg
=
node
.
star_arg
,
starstar_arg
=
node
.
starstar_arg
,
doc
=
node
.
doc
,
decorators
=
node
.
decorators
,
gbody
=
gbody
,
lambda_name
=
node
.
lambda_name
)
return
generator
return
node
for
i
,
yield_expr
in
enumerate
(
yields
,
1
):
yield_expr
.
label_num
=
i
for
retnode
in
collector
.
returns
:
retnode
.
in_generator
=
True
gbody
=
Nodes
.
GeneratorBodyDefNode
(
pos
=
node
.
pos
,
name
=
node
.
name
,
body
=
node
.
body
)
coroutine
=
(
Nodes
.
AsyncDefNode
if
node
.
is_async_def
else
Nodes
.
GeneratorDefNode
)(
pos
=
node
.
pos
,
name
=
node
.
name
,
args
=
node
.
args
,
star_arg
=
node
.
star_arg
,
starstar_arg
=
node
.
starstar_arg
,
doc
=
node
.
doc
,
decorators
=
node
.
decorators
,
gbody
=
gbody
,
lambda_name
=
node
.
lambda_name
)
return
coroutine
def
visit_CFuncDefNode
(
self
,
node
):
self
.
visit_FuncDefNode
(
node
)
self
.
needs_closure
=
False
self
.
visitchildren
(
node
)
node
.
needs_closure
=
self
.
needs_closure
self
.
needs_closure
=
True
if
node
.
needs_closure
and
node
.
overridable
:
error
(
node
.
pos
,
"closures inside cpdef functions not yet supported"
)
return
node
...
...
@@ -2287,6 +2306,7 @@ class MarkClosureVisitor(CythonTransform):
self
.
needs_closure
=
True
return
node
class
CreateClosureClasses
(
CythonTransform
):
# Output closure classes in module scope for all functions
# that really need it.
...
...
Cython/Compiler/Parsing.pxd
View file @
18691f38
...
...
@@ -44,6 +44,7 @@ cdef p_typecast(PyrexScanner s)
cdef
p_sizeof
(
PyrexScanner
s
)
cdef
p_yield_expression
(
PyrexScanner
s
)
cdef
p_yield_statement
(
PyrexScanner
s
)
cdef
p_async_statement
(
PyrexScanner
s
,
ctx
,
decorators
)
cdef
p_power
(
PyrexScanner
s
)
cdef
p_new_expr
(
PyrexScanner
s
)
cdef
p_trailer
(
PyrexScanner
s
,
node1
)
...
...
@@ -107,18 +108,18 @@ cdef p_if_statement(PyrexScanner s)
cdef
p_if_clause
(
PyrexScanner
s
)
cdef
p_else_clause
(
PyrexScanner
s
)
cdef
p_while_statement
(
PyrexScanner
s
)
cdef
p_for_statement
(
PyrexScanner
s
)
cdef
dict
p_for_bounds
(
PyrexScanner
s
,
bint
allow_testlist
=
*
)
cdef
p_for_statement
(
PyrexScanner
s
,
bint
is_async
=*
)
cdef
dict
p_for_bounds
(
PyrexScanner
s
,
bint
allow_testlist
=*
,
bint
is_async
=
*
)
cdef
p_for_from_relation
(
PyrexScanner
s
)
cdef
p_for_from_step
(
PyrexScanner
s
)
cdef
p_target
(
PyrexScanner
s
,
terminator
)
cdef
p_for_target
(
PyrexScanner
s
)
cdef
p_for_iterator
(
PyrexScanner
s
,
bint
allow_testlist
=
*
)
cdef
p_for_iterator
(
PyrexScanner
s
,
bint
allow_testlist
=*
,
bint
is_async
=
*
)
cdef
p_try_statement
(
PyrexScanner
s
)
cdef
p_except_clause
(
PyrexScanner
s
)
cdef
p_include_statement
(
PyrexScanner
s
,
ctx
)
cdef
p_with_statement
(
PyrexScanner
s
)
cdef
p_with_items
(
PyrexScanner
s
)
cdef
p_with_items
(
PyrexScanner
s
,
bint
is_async
=*
)
cdef
p_with_template
(
PyrexScanner
s
)
cdef
p_simple_statement
(
PyrexScanner
s
,
bint
first_statement
=
*
)
cdef
p_simple_statement_list
(
PyrexScanner
s
,
ctx
,
bint
first_statement
=
*
)
...
...
@@ -128,7 +129,7 @@ cdef p_IF_statement(PyrexScanner s, ctx)
cdef
p_statement
(
PyrexScanner
s
,
ctx
,
bint
first_statement
=
*
)
cdef
p_statement_list
(
PyrexScanner
s
,
ctx
,
bint
first_statement
=
*
)
cdef
p_suite
(
PyrexScanner
s
,
ctx
=
*
)
cdef
tuple
p_suite_with_docstring
(
PyrexScanner
s
,
ctx
,
with_doc_only
=
*
)
cdef
tuple
p_suite_with_docstring
(
PyrexScanner
s
,
ctx
,
bint
with_doc_only
=
*
)
cdef
tuple
_extract_docstring
(
node
)
cdef
p_positional_and_keyword_args
(
PyrexScanner
s
,
end_sy_set
,
templates
=
*
)
...
...
@@ -176,7 +177,7 @@ cdef p_c_modifiers(PyrexScanner s)
cdef
p_c_func_or_var_declaration
(
PyrexScanner
s
,
pos
,
ctx
)
cdef
p_ctypedef_statement
(
PyrexScanner
s
,
ctx
)
cdef
p_decorators
(
PyrexScanner
s
)
cdef
p_def_statement
(
PyrexScanner
s
,
list
decorators
=
*
)
cdef
p_def_statement
(
PyrexScanner
s
,
list
decorators
=*
,
bint
is_async_def
=
*
)
cdef
p_varargslist
(
PyrexScanner
s
,
terminator
=*
,
bint
annotated
=
*
)
cdef
p_py_arg_decl
(
PyrexScanner
s
,
bint
annotated
=
*
)
cdef
p_class_statement
(
PyrexScanner
s
,
decorators
)
...
...
Cython/Compiler/Parsing.py
View file @
18691f38
...
...
@@ -55,6 +55,7 @@ class Ctx(object):
d
.
update
(
kwds
)
return
ctx
def
p_ident
(
s
,
message
=
"Expected an identifier"
):
if
s
.
sy
==
'IDENT'
:
name
=
s
.
systring
...
...
@@ -350,6 +351,7 @@ def p_sizeof(s):
s
.
expect
(
')'
)
return
node
def
p_yield_expression
(
s
):
# s.sy == "yield"
pos
=
s
.
position
()
...
...
@@ -370,19 +372,47 @@ def p_yield_expression(s):
else
:
return
ExprNodes
.
YieldExprNode
(
pos
,
arg
=
arg
)
def
p_yield_statement
(
s
):
# s.sy == "yield"
yield_expr
=
p_yield_expression
(
s
)
return
Nodes
.
ExprStatNode
(
yield_expr
.
pos
,
expr
=
yield_expr
)
#power: atom trailer* ('**' factor)*
def
p_async_statement
(
s
,
ctx
,
decorators
):
# s.sy >> 'async' ...
if
s
.
sy
==
'def'
:
# 'async def' statements aren't allowed in pxd files
if
'pxd'
in
ctx
.
level
:
s
.
error
(
'def statement not allowed here'
)
s
.
level
=
ctx
.
level
return
p_def_statement
(
s
,
decorators
,
is_async_def
=
True
)
elif
decorators
:
s
.
error
(
"Decorators can only be followed by functions or classes"
)
elif
s
.
sy
==
'for'
:
return
p_for_statement
(
s
,
is_async
=
True
)
elif
s
.
sy
==
'with'
:
s
.
next
()
return
p_with_items
(
s
,
is_async
=
True
)
else
:
s
.
error
(
"expected one of 'def', 'for', 'with' after 'async'"
)
#power: atom_expr ('**' factor)*
#atom_expr: ['await'] atom trailer*
def
p_power
(
s
):
if
s
.
systring
==
'new'
and
s
.
peek
()[
0
]
==
'IDENT'
:
return
p_new_expr
(
s
)
await_pos
=
None
if
s
.
sy
==
'await'
:
await_pos
=
s
.
position
()
s
.
next
()
n1
=
p_atom
(
s
)
while
s
.
sy
in
(
'('
,
'['
,
'.'
):
n1
=
p_trailer
(
s
,
n1
)
if
await_pos
:
n1
=
ExprNodes
.
AwaitExprNode
(
await_pos
,
arg
=
n1
)
if
s
.
sy
==
'**'
:
pos
=
s
.
position
()
s
.
next
()
...
...
@@ -390,6 +420,7 @@ def p_power(s):
n1
=
ExprNodes
.
binop_node
(
pos
,
'**'
,
n1
,
n2
)
return
n1
def
p_new_expr
(
s
):
# s.systring == 'new'.
pos
=
s
.
position
()
...
...
@@ -1568,23 +1599,25 @@ def p_while_statement(s):
condition
=
test
,
body
=
body
,
else_clause
=
else_clause
)
def
p_for_statement
(
s
):
def
p_for_statement
(
s
,
is_async
=
False
):
# s.sy == 'for'
pos
=
s
.
position
()
s
.
next
()
kw
=
p_for_bounds
(
s
,
allow_testlist
=
True
)
kw
=
p_for_bounds
(
s
,
allow_testlist
=
True
,
is_async
=
is_async
)
body
=
p_suite
(
s
)
else_clause
=
p_else_clause
(
s
)
kw
.
update
(
body
=
body
,
else_clause
=
else_clause
)
kw
.
update
(
body
=
body
,
else_clause
=
else_clause
,
is_async
=
is_async
)
return
Nodes
.
ForStatNode
(
pos
,
**
kw
)
def
p_for_bounds
(
s
,
allow_testlist
=
True
):
def
p_for_bounds
(
s
,
allow_testlist
=
True
,
is_async
=
False
):
target
=
p_for_target
(
s
)
if
s
.
sy
==
'in'
:
s
.
next
()
iterator
=
p_for_iterator
(
s
,
allow_testlist
)
return
dict
(
target
=
target
,
iterator
=
iterator
)
elif
not
s
.
in_python_file
:
iterator
=
p_for_iterator
(
s
,
allow_testlist
,
is_async
=
is_async
)
return
dict
(
target
=
target
,
iterator
=
iterator
)
elif
not
s
.
in_python_file
and
not
is_async
:
if
s
.
sy
==
'from'
:
s
.
next
()
bound1
=
p_bit_expr
(
s
)
...
...
@@ -1654,16 +1687,19 @@ def p_target(s, terminator):
else
:
return
expr
def
p_for_target
(
s
):
return
p_target
(
s
,
'in'
)
def
p_for_iterator
(
s
,
allow_testlist
=
True
):
def
p_for_iterator
(
s
,
allow_testlist
=
True
,
is_async
=
False
):
pos
=
s
.
position
()
if
allow_testlist
:
expr
=
p_testlist
(
s
)
else
:
expr
=
p_or_test
(
s
)
return
ExprNodes
.
IteratorNode
(
pos
,
sequence
=
expr
)
return
(
ExprNodes
.
AsyncIteratorNode
if
is_async
else
ExprNodes
.
IteratorNode
)(
pos
,
sequence
=
expr
)
def
p_try_statement
(
s
):
# s.sy == 'try'
...
...
@@ -1745,17 +1781,21 @@ def p_include_statement(s, ctx):
else
:
return
Nodes
.
PassStatNode
(
pos
)
def
p_with_statement
(
s
):
s
.
next
()
# 'with'
s
.
next
()
# 'with'
if
s
.
systring
==
'template'
and
not
s
.
in_python_file
:
node
=
p_with_template
(
s
)
else
:
node
=
p_with_items
(
s
)
return
node
def
p_with_items
(
s
):
def
p_with_items
(
s
,
is_async
=
False
):
pos
=
s
.
position
()
if
not
s
.
in_python_file
and
s
.
sy
==
'IDENT'
and
s
.
systring
in
(
'nogil'
,
'gil'
):
if
is_async
:
s
.
error
(
"with gil/nogil cannot be async"
)
state
=
s
.
systring
s
.
next
()
if
s
.
sy
==
','
:
...
...
@@ -1763,7 +1803,7 @@ def p_with_items(s):
body
=
p_with_items
(
s
)
else
:
body
=
p_suite
(
s
)
return
Nodes
.
GILStatNode
(
pos
,
state
=
state
,
body
=
body
)
return
Nodes
.
GILStatNode
(
pos
,
state
=
state
,
body
=
body
)
else
:
manager
=
p_test
(
s
)
target
=
None
...
...
@@ -1772,11 +1812,11 @@ def p_with_items(s):
target
=
p_starred_expr
(
s
)
if
s
.
sy
==
','
:
s
.
next
()
body
=
p_with_items
(
s
)
body
=
p_with_items
(
s
,
is_async
=
is_async
)
else
:
body
=
p_suite
(
s
)
return
Nodes
.
WithStatNode
(
pos
,
manager
=
manager
,
target
=
target
,
body
=
body
)
return
Nodes
.
WithStatNode
(
pos
,
manager
=
manager
,
target
=
target
,
body
=
body
,
is_async
=
is_async
)
def
p_with_template
(
s
):
pos
=
s
.
position
()
...
...
@@ -1929,12 +1969,14 @@ def p_statement(s, ctx, first_statement = 0):
s
.
error
(
'decorator not allowed here'
)
s
.
level
=
ctx
.
level
decorators
=
p_decorators
(
s
)
bad_toks
=
'def'
,
'cdef'
,
'cpdef'
,
'class'
if
not
ctx
.
allow_struct_enum_decorator
and
s
.
sy
not
in
bad_toks
:
s
.
error
(
"Decorators can only be followed by functions or classes"
)
if
not
ctx
.
allow_struct_enum_decorator
and
s
.
sy
not
in
(
'def'
,
'cdef'
,
'cpdef'
,
'class'
):
if
s
.
sy
==
'IDENT'
and
s
.
systring
==
'async'
:
pass
# handled below
else
:
s
.
error
(
"Decorators can only be followed by functions or classes"
)
elif
s
.
sy
==
'pass'
and
cdef_flag
:
# empty cdef block
return
p_pass_statement
(
s
,
with_newline
=
1
)
return
p_pass_statement
(
s
,
with_newline
=
1
)
overridable
=
0
if
s
.
sy
==
'cdef'
:
...
...
@@ -1948,11 +1990,11 @@ def p_statement(s, ctx, first_statement = 0):
if
ctx
.
level
not
in
(
'module'
,
'module_pxd'
,
'function'
,
'c_class'
,
'c_class_pxd'
):
s
.
error
(
'cdef statement not allowed here'
)
s
.
level
=
ctx
.
level
node
=
p_cdef_statement
(
s
,
ctx
(
overridable
=
overridable
))
node
=
p_cdef_statement
(
s
,
ctx
(
overridable
=
overridable
))
if
decorators
is
not
None
:
tup
=
Nodes
.
CFuncDefNode
,
Nodes
.
CVarDefNode
,
Nodes
.
CClassDefNode
tup
=
(
Nodes
.
CFuncDefNode
,
Nodes
.
CVarDefNode
,
Nodes
.
CClassDefNode
)
if
ctx
.
allow_struct_enum_decorator
:
tup
+=
Nodes
.
CStructOrUnionDefNode
,
Nodes
.
CEnumDefNode
tup
+=
(
Nodes
.
CStructOrUnionDefNode
,
Nodes
.
CEnumDefNode
)
if
not
isinstance
(
node
,
tup
):
s
.
error
(
"Decorators can only be followed by functions or classes"
)
node
.
decorators
=
decorators
...
...
@@ -1995,9 +2037,25 @@ def p_statement(s, ctx, first_statement = 0):
return
p_try_statement
(
s
)
elif
s
.
sy
==
'with'
:
return
p_with_statement
(
s
)
elif
s
.
sy
==
'async'
:
s
.
next
()
return
p_async_statement
(
s
,
ctx
,
decorators
)
else
:
return
p_simple_statement_list
(
s
,
ctx
,
first_statement
=
first_statement
)
if
s
.
sy
==
'IDENT'
and
s
.
systring
==
'async'
:
# PEP 492 enables the async/await keywords when it spots "async def ..."
s
.
next
()
if
s
.
sy
==
'def'
:
s
.
enable_keyword
(
'async'
)
s
.
enable_keyword
(
'await'
)
result
=
p_async_statement
(
s
,
ctx
,
decorators
)
s
.
enable_keyword
(
'await'
)
s
.
disable_keyword
(
'async'
)
return
result
elif
decorators
:
s
.
error
(
"Decorators can only be followed by functions or classes"
)
s
.
put_back
(
'IDENT'
,
'async'
)
return
p_simple_statement_list
(
s
,
ctx
,
first_statement
=
first_statement
)
def
p_statement_list
(
s
,
ctx
,
first_statement
=
0
):
# Parse a series of statements separated by newlines.
...
...
@@ -3002,7 +3060,8 @@ def p_decorators(s):
s
.
expect_newline
(
"Expected a newline after decorator"
)
return
decorators
def
p_def_statement
(
s
,
decorators
=
None
):
def
p_def_statement
(
s
,
decorators
=
None
,
is_async_def
=
False
):
# s.sy == 'def'
pos
=
s
.
position
()
s
.
next
()
...
...
@@ -3017,10 +3076,11 @@ def p_def_statement(s, decorators=None):
s
.
next
()
return_type_annotation
=
p_test
(
s
)
doc
,
body
=
p_suite_with_docstring
(
s
,
Ctx
(
level
=
'function'
))
return
Nodes
.
DefNode
(
pos
,
name
=
name
,
args
=
args
,
star_arg
=
star_arg
,
starstar_arg
=
starstar_arg
,
doc
=
doc
,
body
=
body
,
decorators
=
decorators
,
return_type_annotation
=
return_type_annotation
)
return
Nodes
.
DefNode
(
pos
,
name
=
name
,
args
=
args
,
star_arg
=
star_arg
,
starstar_arg
=
starstar_arg
,
doc
=
doc
,
body
=
body
,
decorators
=
decorators
,
is_async_def
=
is_async_def
,
return_type_annotation
=
return_type_annotation
)
def
p_varargslist
(
s
,
terminator
=
')'
,
annotated
=
1
):
args
=
p_c_arg_list
(
s
,
in_pyfunc
=
1
,
nonempty_declarators
=
1
,
...
...
Cython/Compiler/Pipeline.py
View file @
18691f38
...
...
@@ -172,12 +172,12 @@ def create_pipeline(context, mode, exclude_classes=()):
InterpretCompilerDirectives
(
context
,
context
.
compiler_directives
),
ParallelRangeTransform
(
context
),
AdjustDefByDirectives
(
context
),
WithTransform
(
context
),
MarkClosureVisitor
(
context
),
_align_function_definitions
,
RemoveUnreachableCode
(
context
),
ConstantFolding
(),
FlattenInListTransform
(),
WithTransform
(
context
),
DecoratorTransform
(
context
),
ForwardDeclareTypes
(
context
),
AnalyseDeclarationsTransform
(
context
),
...
...
Cython/Compiler/Scanning.pxd
View file @
18691f38
...
...
@@ -30,6 +30,7 @@ cdef class PyrexScanner(Scanner):
cdef
public
bint
in_python_file
cdef
public
source_encoding
cdef
set
keywords
cdef
public
dict
keywords_stack
cdef
public
list
indentation_stack
cdef
public
indentation_char
cdef
public
int
bracket_nesting_level
...
...
@@ -57,3 +58,5 @@ cdef class PyrexScanner(Scanner):
cdef
expect_indent
(
self
)
cdef
expect_dedent
(
self
)
cdef
expect_newline
(
self
,
message
=*
,
bint
ignore_semicolon
=*
)
cdef
enable_keyword
(
self
,
name
)
cdef
disable_keyword
(
self
,
name
)
Cython/Compiler/Scanning.py
View file @
18691f38
...
...
@@ -319,6 +319,7 @@ class PyrexScanner(Scanner):
self
.
in_python_file
=
False
self
.
keywords
=
set
(
pyx_reserved_words
)
self
.
trace
=
trace_scanner
self
.
keywords_stack
=
{}
self
.
indentation_stack
=
[
0
]
self
.
indentation_char
=
None
self
.
bracket_nesting_level
=
0
...
...
@@ -497,3 +498,18 @@ class PyrexScanner(Scanner):
self
.
expect
(
'NEWLINE'
,
message
)
if
useless_trailing_semicolon
is
not
None
:
warning
(
useless_trailing_semicolon
,
"useless trailing semicolon"
)
def
enable_keyword
(
self
,
name
):
if
name
in
self
.
keywords_stack
:
self
.
keywords_stack
[
name
]
+=
1
else
:
self
.
keywords_stack
[
name
]
=
1
self
.
keywords
.
add
(
name
)
def
disable_keyword
(
self
,
name
):
count
=
self
.
keywords_stack
.
get
(
name
,
1
)
if
count
==
1
:
self
.
keywords
.
discard
(
name
)
del
self
.
keywords_stack
[
name
]
else
:
self
.
keywords_stack
[
name
]
=
count
-
1
Cython/Compiler/TypeSlots.py
View file @
18691f38
...
...
@@ -431,8 +431,8 @@ class SuiteSlot(SlotDescriptor):
#
# sub_slots [SlotDescriptor]
def
__init__
(
self
,
sub_slots
,
slot_type
,
slot_name
):
SlotDescriptor
.
__init__
(
self
,
slot_name
)
def
__init__
(
self
,
sub_slots
,
slot_type
,
slot_name
,
ifdef
=
None
):
SlotDescriptor
.
__init__
(
self
,
slot_name
,
ifdef
=
ifdef
)
self
.
sub_slots
=
sub_slots
self
.
slot_type
=
slot_type
substructures
.
append
(
self
)
...
...
@@ -454,6 +454,8 @@ class SuiteSlot(SlotDescriptor):
def
generate_substructure
(
self
,
scope
,
code
):
if
not
self
.
is_empty
(
scope
):
code
.
putln
(
""
)
if
self
.
ifdef
:
code
.
putln
(
"#if %s"
%
self
.
ifdef
)
code
.
putln
(
"static %s %s = {"
%
(
self
.
slot_type
,
...
...
@@ -461,6 +463,8 @@ class SuiteSlot(SlotDescriptor):
for
slot
in
self
.
sub_slots
:
slot
.
generate
(
scope
,
code
)
code
.
putln
(
"};"
)
if
self
.
ifdef
:
code
.
putln
(
"#endif"
)
substructures
=
[]
# List of all SuiteSlot instances
...
...
@@ -748,6 +752,12 @@ PyBufferProcs = (
MethodSlot
(
releasebufferproc
,
"bf_releasebuffer"
,
"__releasebuffer__"
)
)
PyAsyncMethods
=
(
MethodSlot
(
unaryfunc
,
"am_await"
,
"__await__"
),
MethodSlot
(
unaryfunc
,
"am_aiter"
,
"__aiter__"
),
MethodSlot
(
unaryfunc
,
"am_anext"
,
"__anext__"
),
)
#------------------------------------------------------------------------------------------
#
# The main slot table. This table contains descriptors for all the
...
...
@@ -761,7 +771,11 @@ slot_table = (
EmptySlot
(
"tp_print"
),
#MethodSlot(printfunc, "tp_print", "__print__"),
EmptySlot
(
"tp_getattr"
),
EmptySlot
(
"tp_setattr"
),
MethodSlot
(
cmpfunc
,
"tp_compare"
,
"__cmp__"
,
py3
=
'<RESERVED>'
),
# tp_compare (Py2) / tp_reserved (Py3<3.5) / tp_as_async (Py3.5+) is always used as tp_as_async in Py3
MethodSlot
(
cmpfunc
,
"tp_compare"
,
"__cmp__"
,
ifdef
=
"PY_MAJOR_VERSION < 3"
),
SuiteSlot
(
PyAsyncMethods
,
"__Pyx_PyAsyncMethodsStruct"
,
"tp_as_async"
,
ifdef
=
"PY_MAJOR_VERSION >= 3"
),
MethodSlot
(
reprfunc
,
"tp_repr"
,
"__repr__"
),
SuiteSlot
(
PyNumberMethods
,
"PyNumberMethods"
,
"tp_as_number"
),
...
...
Cython/Parser/Grammar
View file @
18691f38
...
...
@@ -13,7 +13,8 @@ eval_input: testlist NEWLINE* ENDMARKER
decorator: '@' dotted_PY_NAME [ '(' [arglist] ')' ] NEWLINE
decorators: decorator+
decorated: decorators (classdef | funcdef | cdef_stmt)
decorated: decorators (classdef | funcdef | async_funcdef | cdef_stmt)
async_funcdef: 'async' funcdef
funcdef: 'def' PY_NAME parameters ['->' test] ':' suite
parameters: '(' [typedargslist] ')'
typedargslist: (tfpdef ['=' (test | '*')] (',' tfpdef ['=' (test | '*')])* [','
...
...
@@ -96,7 +97,8 @@ shift_expr: arith_expr (('<<'|'>>') arith_expr)*
arith_expr: term (('+'|'-') term)*
term: factor (('*'|'/'|'%'|'//') factor)*
factor: ('+'|'-'|'~') factor | power | address | size_of | cast
power: atom trailer* ['**' factor]
power: atom_expr ['**' factor]
atom_expr: ['await'] atom trailer*
atom: ('(' [yield_expr|testlist_comp] ')' |
'[' [testlist_comp] ']' |
'{' [dictorsetmaker] '}' |
...
...
Cython/Utility/
Generator
.c
→
Cython/Utility/
Coroutine
.c
View file @
18691f38
//////////////////// YieldFrom.proto ////////////////////
////////////////////
Generator
YieldFrom.proto ////////////////////
static
CYTHON_INLINE
PyObject
*
__Pyx_Generator_Yield_From
(
__pyx_
Generator
Object
*
gen
,
PyObject
*
source
);
static
CYTHON_INLINE
PyObject
*
__Pyx_Generator_Yield_From
(
__pyx_
Coroutine
Object
*
gen
,
PyObject
*
source
);
//////////////////// YieldFrom ////////////////////
////////////////////
Generator
YieldFrom ////////////////////
//@requires: Generator
static
CYTHON_INLINE
PyObject
*
__Pyx_Generator_Yield_From
(
__pyx_
Generator
Object
*
gen
,
PyObject
*
source
)
{
static
CYTHON_INLINE
PyObject
*
__Pyx_Generator_Yield_From
(
__pyx_
Coroutine
Object
*
gen
,
PyObject
*
source
)
{
PyObject
*
source_gen
,
*
retval
;
source_gen
=
PyObject_GetIter
(
source
);
if
(
unlikely
(
!
source_gen
))
if
(
unlikely
(
!
source_gen
))
{
#ifdef __Pyx_Coroutine_USED
#if CYTHON_COMPILING_IN_CPYTHON
// avoid exception instantiation if possible
if
(
PyErr_Occurred
()
==
PyExc_TypeError
#else
if
(
PyErr_ExceptionMatches
(
PyExc_TypeError
)
#endif
&&
__Pyx_Coroutine_CheckExact
(
source
))
{
PyErr_Clear
();
// TODO: this should only happen for types.coroutine()ed generators, but we can't determine that here
source_gen
=
__Pyx_Coroutine_await
(
source
);
}
else
#endif
return
NULL
;
}
// source_gen is now the iterator, make the first next() call
retval
=
Py_TYPE
(
source_gen
)
->
tp_iternext
(
source_gen
);
if
(
likely
(
retval
))
{
...
...
@@ -21,6 +35,187 @@ static CYTHON_INLINE PyObject* __Pyx_Generator_Yield_From(__pyx_GeneratorObject
}
//////////////////// CoroutineYieldFrom.proto ////////////////////
static
CYTHON_INLINE
PyObject
*
__Pyx_Coroutine_Yield_From
(
__pyx_CoroutineObject
*
gen
,
PyObject
*
source
);
//////////////////// CoroutineYieldFrom ////////////////////
//@requires: Coroutine
//@requires: GetAwaitIter
static
CYTHON_INLINE
PyObject
*
__Pyx_Coroutine_Yield_From
(
__pyx_CoroutineObject
*
gen
,
PyObject
*
source
)
{
PyObject
*
retval
;
if
(
__Pyx_Coroutine_CheckExact
(
source
))
{
retval
=
__Pyx_Generator_Next
(
source
);
if
(
retval
)
{
Py_INCREF
(
source
);
gen
->
yieldfrom
=
source
;
return
retval
;
}
}
else
{
PyObject
*
source_gen
=
__Pyx__Coroutine_GetAwaitableIter
(
source
);
if
(
unlikely
(
!
source_gen
))
return
NULL
;
// source_gen is now the iterator, make the first next() call
if
(
__Pyx_Coroutine_CheckExact
(
source_gen
))
{
retval
=
__Pyx_Generator_Next
(
source_gen
);
}
else
{
retval
=
Py_TYPE
(
source_gen
)
->
tp_iternext
(
source_gen
);
}
if
(
retval
)
{
gen
->
yieldfrom
=
source_gen
;
return
retval
;
}
Py_DECREF
(
source_gen
);
}
return
NULL
;
}
//////////////////// GetAwaitIter.proto ////////////////////
static
CYTHON_INLINE
PyObject
*
__Pyx_Coroutine_GetAwaitableIter
(
PyObject
*
o
);
/*proto*/
static
PyObject
*
__Pyx__Coroutine_GetAwaitableIter
(
PyObject
*
o
);
/*proto*/
//////////////////// GetAwaitIter ////////////////////
//@requires: Coroutine
//@requires: ObjectHandling.c::PyObjectGetAttrStr
//@requires: ObjectHandling.c::PyObjectCallNoArg
//@requires: ObjectHandling.c::PyObjectCallOneArg
static
CYTHON_INLINE
PyObject
*
__Pyx_Coroutine_GetAwaitableIter
(
PyObject
*
o
)
{
#ifdef __Pyx_Coroutine_USED
if
(
__Pyx_Coroutine_CheckExact
(
o
))
{
Py_INCREF
(
o
);
return
o
;
}
#endif
return
__Pyx__Coroutine_GetAwaitableIter
(
o
);
}
// adapted from genobject.c in Py3.5
static
PyObject
*
__Pyx__Coroutine_GetAwaitableIter
(
PyObject
*
obj
)
{
PyObject
*
res
;
#if PY_MAJOR_VERSION >= 3
__Pyx_PyAsyncMethodsStruct
*
am
=
__Pyx_PyType_AsAsync
(
obj
);
if
(
likely
(
am
&&
am
->
am_await
))
{
res
=
(
*
am
->
am_await
)(
obj
);
}
else
#endif
{
#if PY_VERSION_HEX >= 0x030500B1
#if CYTHON_COMPILING_IN_CPYTHON
if
(
PyGen_CheckCoroutineExact
(
obj
))
{
// Python generator marked with "@types.coroutine" decorator
Py_INCREF
(
obj
);
return
obj
;
}
#endif
// no slot => no method
goto
slot_error
;
#else
PyObject
*
method
=
__Pyx_PyObject_GetAttrStr
(
obj
,
PYIDENT
(
"__await__"
));
if
(
unlikely
(
!
method
))
goto
slot_error
;
#if CYTHON_COMPILING_IN_CPYTHON
if
(
likely
(
PyMethod_Check
(
method
)))
{
PyObject
*
self
=
PyMethod_GET_SELF
(
method
);
if
(
likely
(
self
))
{
PyObject
*
function
=
PyMethod_GET_FUNCTION
(
method
);
res
=
__Pyx_PyObject_CallOneArg
(
function
,
self
);
}
else
res
=
__Pyx_PyObject_CallNoArg
(
method
);
}
else
#endif
res
=
__Pyx_PyObject_CallNoArg
(
method
);
Py_DECREF
(
method
);
#endif
}
if
(
unlikely
(
!
res
))
goto
bad
;
if
(
!
PyIter_Check
(
res
))
{
PyErr_Format
(
PyExc_TypeError
,
"__await__() returned non-iterator of type '%.100s'"
,
Py_TYPE
(
res
)
->
tp_name
);
Py_CLEAR
(
res
);
}
else
{
int
is_coroutine
=
0
;
#ifdef __Pyx_Coroutine_USED
is_coroutine
|=
__Pyx_Coroutine_CheckExact
(
res
);
#endif
#if PY_VERSION_HEX >= 0x030500B1
is_coroutine
|=
PyGen_CheckCoroutineExact
(
res
);
#endif
if
(
unlikely
(
is_coroutine
))
{
/* __await__ must return an *iterator*, not
a coroutine or another awaitable (see PEP 492) */
PyErr_SetString
(
PyExc_TypeError
,
"__await__() returned a coroutine"
);
Py_CLEAR
(
res
);
}
}
return
res
;
slot_error:
PyErr_Format
(
PyExc_TypeError
,
"object %.100s can't be used in 'await' expression"
,
Py_TYPE
(
obj
)
->
tp_name
);
bad:
return
NULL
;
}
//////////////////// AsyncIter.proto ////////////////////
static
CYTHON_INLINE
PyObject
*
__Pyx_Coroutine_GetAsyncIter
(
PyObject
*
o
);
/*proto*/
static
CYTHON_INLINE
PyObject
*
__Pyx_Coroutine_AsyncIterNext
(
PyObject
*
o
);
/*proto*/
//////////////////// AsyncIter ////////////////////
//@requires: GetAwaitIter
//@requires: ObjectHandling.c::PyObjectCallMethod0
static
CYTHON_INLINE
PyObject
*
__Pyx_Coroutine_GetAsyncIter
(
PyObject
*
obj
)
{
#if PY_MAJOR_VERSION >= 3
__Pyx_PyAsyncMethodsStruct
*
am
=
__Pyx_PyType_AsAsync
(
obj
);
if
(
likely
(
am
&&
am
->
am_aiter
))
{
return
(
*
am
->
am_aiter
)(
obj
);
}
#endif
#if PY_VERSION_HEX < 0x030500B1
{
PyObject
*
iter
=
__Pyx_PyObject_CallMethod0
(
obj
,
PYIDENT
(
"__aiter__"
));
if
(
likely
(
iter
))
return
iter
;
// FIXME: for the sake of a nicely conforming exception message, assume any AttributeError meant '__aiter__'
if
(
!
PyErr_ExceptionMatches
(
PyExc_AttributeError
))
return
NULL
;
}
#endif
PyErr_Format
(
PyExc_TypeError
,
"'async for' requires an object with __aiter__ method, got %.100s"
,
Py_TYPE
(
obj
)
->
tp_name
);
return
NULL
;
}
static
CYTHON_INLINE
PyObject
*
__Pyx_Coroutine_AsyncIterNext
(
PyObject
*
obj
)
{
#if PY_MAJOR_VERSION >= 3
__Pyx_PyAsyncMethodsStruct
*
am
=
__Pyx_PyType_AsAsync
(
obj
);
if
(
likely
(
am
&&
am
->
am_anext
))
{
return
(
*
am
->
am_anext
)(
obj
);
}
#endif
#if PY_VERSION_HEX < 0x030500B1
{
PyObject
*
value
=
__Pyx_PyObject_CallMethod0
(
obj
,
PYIDENT
(
"__anext__"
));
if
(
likely
(
value
))
return
value
;
}
// FIXME: for the sake of a nicely conforming exception message, assume any AttributeError meant '__anext__'
if
(
PyErr_ExceptionMatches
(
PyExc_AttributeError
))
#endif
PyErr_Format
(
PyExc_TypeError
,
"'async for' requires an object with __anext__ method, got %.100s"
,
Py_TYPE
(
obj
)
->
tp_name
);
return
NULL
;
}
//////////////////// pep479.proto ////////////////////
static
void
__Pyx_Generator_Replace_StopIteration
(
void
);
/*proto*/
...
...
@@ -40,16 +235,13 @@ static void __Pyx_Generator_Replace_StopIteration(void) {
}
//////////////////// Generator.proto ////////////////////
#define __Pyx_Generator_USED
#include <structmember.h>
#include <frameobject.h>
//////////////////// CoroutineBase.proto ////////////////////
typedef
PyObject
*
(
*
__pyx_
generator
_body_t
)(
PyObject
*
,
PyObject
*
);
typedef
PyObject
*
(
*
__pyx_
coroutine
_body_t
)(
PyObject
*
,
PyObject
*
);
typedef
struct
{
PyObject_HEAD
__pyx_
generator
_body_t
body
;
__pyx_
coroutine
_body_t
body
;
PyObject
*
closure
;
PyObject
*
exc_type
;
PyObject
*
exc_value
;
...
...
@@ -62,37 +254,61 @@ typedef struct {
int
resume_label
;
// using T_BOOL for property below requires char value
char
is_running
;
}
__pyx_
Generator
Object
;
}
__pyx_
Coroutine
Object
;
static
PyTypeObject
*
__pyx_GeneratorType
=
0
;
static
__pyx_GeneratorObject
*
__Pyx_Generator_New
(
__pyx_generator_body_t
body
,
PyObject
*
closure
,
PyObject
*
name
,
PyObject
*
qualname
);
static
int
__pyx_Generator_init
(
void
);
static
int
__Pyx_Generator_clear
(
PyObject
*
self
);
static
__pyx_CoroutineObject
*
__Pyx__Coroutine_New
(
PyTypeObject
*
type
,
__pyx_coroutine_body_t
body
,
PyObject
*
closure
,
PyObject
*
name
,
PyObject
*
qualname
);
/*proto*/
static
int
__Pyx_Coroutine_clear
(
PyObject
*
self
);
/*proto*/
#if 1 || PY_VERSION_HEX < 0x030300B0
static
int
__Pyx_PyGen_FetchStopIterationValue
(
PyObject
**
pvalue
);
static
int
__Pyx_PyGen_FetchStopIterationValue
(
PyObject
**
pvalue
);
/*proto*/
#else
#define __Pyx_PyGen_FetchStopIterationValue(pvalue) PyGen_FetchStopIterationValue(pvalue)
#endif
//////////////////// Generator ////////////////////
//////////////////// Coroutine.proto ////////////////////
#define __Pyx_Coroutine_USED
static
PyTypeObject
*
__pyx_CoroutineType
=
0
;
#define __Pyx_Coroutine_CheckExact(obj) (Py_TYPE(obj) == __pyx_CoroutineType)
#define __Pyx_Coroutine_New(body, closure, name, qualname) \
__Pyx__Coroutine_New(__pyx_CoroutineType, body, closure, name, qualname)
static
int
__pyx_Coroutine_init
(
void
);
/*proto*/
static
PyObject
*
__Pyx_Coroutine_await
(
PyObject
*
coroutine
);
/*proto*/
//////////////////// Generator.proto ////////////////////
#define __Pyx_Generator_USED
static
PyTypeObject
*
__pyx_GeneratorType
=
0
;
#define __Pyx_Generator_CheckExact(obj) (Py_TYPE(obj) == __pyx_GeneratorType)
#define __Pyx_Generator_New(body, closure, name, qualname) \
__Pyx__Coroutine_New(__pyx_GeneratorType, body, closure, name, qualname)
static
int
__pyx_Generator_init
(
void
);
/*proto*/
//////////////////// CoroutineBase ////////////////////
//@requires: Exceptions.c::PyErrFetchRestore
//@requires: Exceptions.c::SwapException
//@requires: Exceptions.c::RaiseException
//@requires: ObjectHandling.c::PyObjectCallMethod1
//@requires: ObjectHandling.c::PyObjectGetAttrStr
//@requires: CommonTypes.c::FetchCommonType
//@requires: PatchGeneratorABC
#include <structmember.h>
#include <frameobject.h>
static
PyObject
*
__Pyx_Generator_Next
(
PyObject
*
self
);
static
PyObject
*
__Pyx_
Generator
_Send
(
PyObject
*
self
,
PyObject
*
value
);
static
PyObject
*
__Pyx_
Generator
_Close
(
PyObject
*
self
);
static
PyObject
*
__Pyx_
Generator
_Throw
(
PyObject
*
gen
,
PyObject
*
args
);
static
PyObject
*
__Pyx_
Coroutine
_Send
(
PyObject
*
self
,
PyObject
*
value
);
static
PyObject
*
__Pyx_
Coroutine
_Close
(
PyObject
*
self
);
static
PyObject
*
__Pyx_
Coroutine
_Throw
(
PyObject
*
gen
,
PyObject
*
args
);
#define __Pyx_Generator_CheckExact(obj) (Py_TYPE(obj) == __pyx_GeneratorType)
#define __Pyx_Generator_Undelegate(gen) Py_CLEAR((gen)->yieldfrom)
#define __Pyx_Coroutine_Undelegate(gen) Py_CLEAR((gen)->yieldfrom)
// If StopIteration exception is set, fetches its 'value'
// attribute if any, otherwise sets pvalue to None.
...
...
@@ -135,6 +351,23 @@ static int __Pyx_PyGen_FetchStopIterationValue(PyObject **pvalue) {
if
(
!
ev
)
{
Py_INCREF
(
Py_None
);
ev
=
Py_None
;
}
else
if
(
PyTuple_Check
(
ev
))
{
// however, if it's a tuple, it is interpreted as separate constructor arguments (surprise!)
if
(
PyTuple_GET_SIZE
(
ev
)
>=
1
)
{
PyObject
*
value
;
#if CYTHON_COMPILING_IN_CPYTHON
value
=
PySequence_ITEM
(
ev
,
0
);
#else
value
=
PyTuple_GET_ITEM
(
ev
,
0
);
Py_INCREF
(
value
);
#endif
Py_DECREF
(
ev
);
ev
=
value
;
}
else
{
Py_INCREF
(
Py_None
);
Py_DECREF
(
ev
);
ev
=
Py_None
;
}
}
Py_XDECREF
(
tb
);
Py_DECREF
(
et
);
...
...
@@ -190,7 +423,7 @@ static int __Pyx_PyGen_FetchStopIterationValue(PyObject **pvalue) {
#endif
static
CYTHON_INLINE
void
__Pyx_
Generator_ExceptionClear
(
__pyx_Generator
Object
*
self
)
{
void
__Pyx_
Coroutine_ExceptionClear
(
__pyx_Coroutine
Object
*
self
)
{
PyObject
*
exc_type
=
self
->
exc_type
;
PyObject
*
exc_value
=
self
->
exc_value
;
PyObject
*
exc_traceback
=
self
->
exc_traceback
;
...
...
@@ -205,7 +438,7 @@ void __Pyx_Generator_ExceptionClear(__pyx_GeneratorObject *self) {
}
static
CYTHON_INLINE
int
__Pyx_
Generator_CheckRunning
(
__pyx_Generator
Object
*
gen
)
{
int
__Pyx_
Coroutine_CheckRunning
(
__pyx_Coroutine
Object
*
gen
)
{
if
(
unlikely
(
gen
->
is_running
))
{
PyErr_SetString
(
PyExc_ValueError
,
"generator already executing"
);
...
...
@@ -215,7 +448,7 @@ int __Pyx_Generator_CheckRunning(__pyx_GeneratorObject *gen) {
}
static
CYTHON_INLINE
PyObject
*
__Pyx_
Generator_SendEx
(
__pyx_Generator
Object
*
self
,
PyObject
*
value
)
{
PyObject
*
__Pyx_
Coroutine_SendEx
(
__pyx_Coroutine
Object
*
self
,
PyObject
*
value
)
{
PyObject
*
retval
;
assert
(
!
self
->
is_running
);
...
...
@@ -254,7 +487,7 @@ PyObject *__Pyx_Generator_SendEx(__pyx_GeneratorObject *self, PyObject *value) {
__Pyx_ExceptionSwap
(
&
self
->
exc_type
,
&
self
->
exc_value
,
&
self
->
exc_traceback
);
}
else
{
__Pyx_
Generator
_ExceptionClear
(
self
);
__Pyx_
Coroutine
_ExceptionClear
(
self
);
}
self
->
is_running
=
1
;
...
...
@@ -277,14 +510,14 @@ PyObject *__Pyx_Generator_SendEx(__pyx_GeneratorObject *self, PyObject *value) {
}
#endif
}
else
{
__Pyx_
Generator
_ExceptionClear
(
self
);
__Pyx_
Coroutine
_ExceptionClear
(
self
);
}
return
retval
;
}
static
CYTHON_INLINE
PyObject
*
__Pyx_
Generator
_MethodReturn
(
PyObject
*
retval
)
{
PyObject
*
__Pyx_
Coroutine
_MethodReturn
(
PyObject
*
retval
)
{
if
(
unlikely
(
!
retval
&&
!
PyErr_Occurred
()))
{
// method call must not terminate with NULL without setting an exception
PyErr_SetNone
(
PyExc_StopIteration
);
...
...
@@ -293,55 +526,41 @@ PyObject *__Pyx_Generator_MethodReturn(PyObject *retval) {
}
static
CYTHON_INLINE
PyObject
*
__Pyx_
Generator_FinishDelegation
(
__pyx_Generator
Object
*
gen
)
{
PyObject
*
__Pyx_
Coroutine_FinishDelegation
(
__pyx_Coroutine
Object
*
gen
)
{
PyObject
*
ret
;
PyObject
*
val
=
NULL
;
__Pyx_
Generator
_Undelegate
(
gen
);
__Pyx_
Coroutine
_Undelegate
(
gen
);
__Pyx_PyGen_FetchStopIterationValue
(
&
val
);
// val == NULL on failure => pass on exception
ret
=
__Pyx_
Generator
_SendEx
(
gen
,
val
);
ret
=
__Pyx_
Coroutine
_SendEx
(
gen
,
val
);
Py_XDECREF
(
val
);
return
ret
;
}
static
PyObject
*
__Pyx_Generator_Next
(
PyObject
*
self
)
{
__pyx_GeneratorObject
*
gen
=
(
__pyx_GeneratorObject
*
)
self
;
PyObject
*
yf
=
gen
->
yieldfrom
;
if
(
unlikely
(
__Pyx_Generator_CheckRunning
(
gen
)))
return
NULL
;
if
(
yf
)
{
PyObject
*
ret
;
// FIXME: does this really need an INCREF() ?
//Py_INCREF(yf);
// YieldFrom code ensures that yf is an iterator
gen
->
is_running
=
1
;
ret
=
Py_TYPE
(
yf
)
->
tp_iternext
(
yf
);
gen
->
is_running
=
0
;
//Py_DECREF(yf);
if
(
likely
(
ret
))
{
return
ret
;
}
return
__Pyx_Generator_FinishDelegation
(
gen
);
}
return
__Pyx_Generator_SendEx
(
gen
,
Py_None
);
}
static
PyObject
*
__Pyx_Generator_Send
(
PyObject
*
self
,
PyObject
*
value
)
{
static
PyObject
*
__Pyx_Coroutine_Send
(
PyObject
*
self
,
PyObject
*
value
)
{
PyObject
*
retval
;
__pyx_
GeneratorObject
*
gen
=
(
__pyx_Generator
Object
*
)
self
;
__pyx_
CoroutineObject
*
gen
=
(
__pyx_Coroutine
Object
*
)
self
;
PyObject
*
yf
=
gen
->
yieldfrom
;
if
(
unlikely
(
__Pyx_
Generator
_CheckRunning
(
gen
)))
if
(
unlikely
(
__Pyx_
Coroutine
_CheckRunning
(
gen
)))
return
NULL
;
if
(
yf
)
{
PyObject
*
ret
;
// FIXME: does this really need an INCREF() ?
//Py_INCREF(yf);
gen
->
is_running
=
1
;
#ifdef __Pyx_Generator_USED
if
(
__Pyx_Generator_CheckExact
(
yf
))
{
ret
=
__Pyx_Generator_Send
(
yf
,
value
);
}
else
{
ret
=
__Pyx_Coroutine_Send
(
yf
,
value
);
}
else
#endif
#ifdef __Pyx_Coroutine_USED
if
(
__Pyx_Coroutine_CheckExact
(
yf
))
{
ret
=
__Pyx_Coroutine_Send
(
yf
,
value
);
}
else
#endif
{
if
(
value
==
Py_None
)
ret
=
PyIter_Next
(
yf
);
ret
=
PyIter_Next
(
yf
);
// FIXME!
else
ret
=
__Pyx_PyObject_CallMethod1
(
yf
,
PYIDENT
(
"send"
),
value
);
}
...
...
@@ -350,24 +569,34 @@ static PyObject *__Pyx_Generator_Send(PyObject *self, PyObject *value) {
if
(
likely
(
ret
))
{
return
ret
;
}
retval
=
__Pyx_
Generator
_FinishDelegation
(
gen
);
retval
=
__Pyx_
Coroutine
_FinishDelegation
(
gen
);
}
else
{
retval
=
__Pyx_
Generator
_SendEx
(
gen
,
value
);
retval
=
__Pyx_
Coroutine
_SendEx
(
gen
,
value
);
}
return
__Pyx_
Generator
_MethodReturn
(
retval
);
return
__Pyx_
Coroutine
_MethodReturn
(
retval
);
}
// This helper function is used by gen_close and gen_throw to
// close a subiterator being delegated to by yield-from.
static
int
__Pyx_
Generator_CloseIter
(
__pyx_Generator
Object
*
gen
,
PyObject
*
yf
)
{
static
int
__Pyx_
Coroutine_CloseIter
(
__pyx_Coroutine
Object
*
gen
,
PyObject
*
yf
)
{
PyObject
*
retval
=
NULL
;
int
err
=
0
;
#ifdef __Pyx_Generator_USED
if
(
__Pyx_Generator_CheckExact
(
yf
))
{
retval
=
__Pyx_
Generator
_Close
(
yf
);
retval
=
__Pyx_
Coroutine
_Close
(
yf
);
if
(
!
retval
)
return
-
1
;
}
else
{
}
else
#endif
#ifdef __Pyx_Coroutine_USED
if
(
__Pyx_Coroutine_CheckExact
(
yf
))
{
retval
=
__Pyx_Coroutine_Close
(
yf
);
if
(
!
retval
)
return
-
1
;
}
else
#endif
{
PyObject
*
meth
;
gen
->
is_running
=
1
;
meth
=
__Pyx_PyObject_GetAttrStr
(
yf
,
PYIDENT
(
"close"
));
...
...
@@ -388,24 +617,46 @@ static int __Pyx_Generator_CloseIter(__pyx_GeneratorObject *gen, PyObject *yf) {
return
err
;
}
static
PyObject
*
__Pyx_Generator_Close
(
PyObject
*
self
)
{
__pyx_GeneratorObject
*
gen
=
(
__pyx_GeneratorObject
*
)
self
;
static
PyObject
*
__Pyx_Generator_Next
(
PyObject
*
self
)
{
__pyx_CoroutineObject
*
gen
=
(
__pyx_CoroutineObject
*
)
self
;
PyObject
*
yf
=
gen
->
yieldfrom
;
if
(
unlikely
(
__Pyx_Coroutine_CheckRunning
(
gen
)))
return
NULL
;
if
(
yf
)
{
PyObject
*
ret
;
// FIXME: does this really need an INCREF() ?
//Py_INCREF(yf);
// YieldFrom code ensures that yf is an iterator
gen
->
is_running
=
1
;
ret
=
Py_TYPE
(
yf
)
->
tp_iternext
(
yf
);
gen
->
is_running
=
0
;
//Py_DECREF(yf);
if
(
likely
(
ret
))
{
return
ret
;
}
return
__Pyx_Coroutine_FinishDelegation
(
gen
);
}
return
__Pyx_Coroutine_SendEx
(
gen
,
Py_None
);
}
static
PyObject
*
__Pyx_Coroutine_Close
(
PyObject
*
self
)
{
__pyx_CoroutineObject
*
gen
=
(
__pyx_CoroutineObject
*
)
self
;
PyObject
*
retval
,
*
raised_exception
;
PyObject
*
yf
=
gen
->
yieldfrom
;
int
err
=
0
;
if
(
unlikely
(
__Pyx_
Generator
_CheckRunning
(
gen
)))
if
(
unlikely
(
__Pyx_
Coroutine
_CheckRunning
(
gen
)))
return
NULL
;
if
(
yf
)
{
Py_INCREF
(
yf
);
err
=
__Pyx_
Generator
_CloseIter
(
gen
,
yf
);
__Pyx_
Generator
_Undelegate
(
gen
);
err
=
__Pyx_
Coroutine
_CloseIter
(
gen
,
yf
);
__Pyx_
Coroutine
_Undelegate
(
gen
);
Py_DECREF
(
yf
);
}
if
(
err
==
0
)
PyErr_SetNone
(
PyExc_GeneratorExit
);
retval
=
__Pyx_
Generator
_SendEx
(
gen
,
NULL
);
retval
=
__Pyx_
Coroutine
_SendEx
(
gen
,
NULL
);
if
(
retval
)
{
Py_DECREF
(
retval
);
PyErr_SetString
(
PyExc_RuntimeError
,
...
...
@@ -427,8 +678,8 @@ static PyObject *__Pyx_Generator_Close(PyObject *self) {
return
NULL
;
}
static
PyObject
*
__Pyx_
Generator
_Throw
(
PyObject
*
self
,
PyObject
*
args
)
{
__pyx_
GeneratorObject
*
gen
=
(
__pyx_Generator
Object
*
)
self
;
static
PyObject
*
__Pyx_
Coroutine
_Throw
(
PyObject
*
self
,
PyObject
*
args
)
{
__pyx_
CoroutineObject
*
gen
=
(
__pyx_Coroutine
Object
*
)
self
;
PyObject
*
typ
;
PyObject
*
tb
=
NULL
;
PyObject
*
val
=
NULL
;
...
...
@@ -437,24 +688,32 @@ static PyObject *__Pyx_Generator_Throw(PyObject *self, PyObject *args) {
if
(
!
PyArg_UnpackTuple
(
args
,
(
char
*
)
"throw"
,
1
,
3
,
&
typ
,
&
val
,
&
tb
))
return
NULL
;
if
(
unlikely
(
__Pyx_
Generator
_CheckRunning
(
gen
)))
if
(
unlikely
(
__Pyx_
Coroutine
_CheckRunning
(
gen
)))
return
NULL
;
if
(
yf
)
{
PyObject
*
ret
;
Py_INCREF
(
yf
);
if
(
PyErr_GivenExceptionMatches
(
typ
,
PyExc_GeneratorExit
))
{
int
err
=
__Pyx_
Generator
_CloseIter
(
gen
,
yf
);
int
err
=
__Pyx_
Coroutine
_CloseIter
(
gen
,
yf
);
Py_DECREF
(
yf
);
__Pyx_
Generator
_Undelegate
(
gen
);
__Pyx_
Coroutine
_Undelegate
(
gen
);
if
(
err
<
0
)
return
__Pyx_
Generator_MethodReturn
(
__Pyx_Generator
_SendEx
(
gen
,
NULL
));
return
__Pyx_
Coroutine_MethodReturn
(
__Pyx_Coroutine
_SendEx
(
gen
,
NULL
));
goto
throw_here
;
}
gen
->
is_running
=
1
;
#ifdef __Pyx_Generator_USED
if
(
__Pyx_Generator_CheckExact
(
yf
))
{
ret
=
__Pyx_Generator_Throw
(
yf
,
args
);
}
else
{
ret
=
__Pyx_Coroutine_Throw
(
yf
,
args
);
}
else
#endif
#ifdef __Pyx_Coroutine_USED
if
(
__Pyx_Coroutine_CheckExact
(
yf
))
{
ret
=
__Pyx_Coroutine_Throw
(
yf
,
args
);
}
else
#endif
{
PyObject
*
meth
=
__Pyx_PyObject_GetAttrStr
(
yf
,
PYIDENT
(
"throw"
));
if
(
unlikely
(
!
meth
))
{
Py_DECREF
(
yf
);
...
...
@@ -463,7 +722,7 @@ static PyObject *__Pyx_Generator_Throw(PyObject *self, PyObject *args) {
return
NULL
;
}
PyErr_Clear
();
__Pyx_
Generator
_Undelegate
(
gen
);
__Pyx_
Coroutine
_Undelegate
(
gen
);
gen
->
is_running
=
0
;
goto
throw_here
;
}
...
...
@@ -473,17 +732,17 @@ static PyObject *__Pyx_Generator_Throw(PyObject *self, PyObject *args) {
gen
->
is_running
=
0
;
Py_DECREF
(
yf
);
if
(
!
ret
)
{
ret
=
__Pyx_
Generator
_FinishDelegation
(
gen
);
ret
=
__Pyx_
Coroutine
_FinishDelegation
(
gen
);
}
return
__Pyx_
Generator
_MethodReturn
(
ret
);
return
__Pyx_
Coroutine
_MethodReturn
(
ret
);
}
throw_here:
__Pyx_Raise
(
typ
,
val
,
tb
,
NULL
);
return
__Pyx_
Generator_MethodReturn
(
__Pyx_Generator
_SendEx
(
gen
,
NULL
));
return
__Pyx_
Coroutine_MethodReturn
(
__Pyx_Coroutine
_SendEx
(
gen
,
NULL
));
}
static
int
__Pyx_
Generator
_traverse
(
PyObject
*
self
,
visitproc
visit
,
void
*
arg
)
{
__pyx_
GeneratorObject
*
gen
=
(
__pyx_Generator
Object
*
)
self
;
static
int
__Pyx_
Coroutine
_traverse
(
PyObject
*
self
,
visitproc
visit
,
void
*
arg
)
{
__pyx_
CoroutineObject
*
gen
=
(
__pyx_Coroutine
Object
*
)
self
;
Py_VISIT
(
gen
->
closure
);
Py_VISIT
(
gen
->
classobj
);
...
...
@@ -494,8 +753,8 @@ static int __Pyx_Generator_traverse(PyObject *self, visitproc visit, void *arg)
return
0
;
}
static
int
__Pyx_
Generator
_clear
(
PyObject
*
self
)
{
__pyx_
GeneratorObject
*
gen
=
(
__pyx_Generator
Object
*
)
self
;
static
int
__Pyx_
Coroutine
_clear
(
PyObject
*
self
)
{
__pyx_
CoroutineObject
*
gen
=
(
__pyx_Coroutine
Object
*
)
self
;
Py_CLEAR
(
gen
->
closure
);
Py_CLEAR
(
gen
->
classobj
);
...
...
@@ -508,8 +767,8 @@ static int __Pyx_Generator_clear(PyObject *self) {
return
0
;
}
static
void
__Pyx_
Generator
_dealloc
(
PyObject
*
self
)
{
__pyx_
GeneratorObject
*
gen
=
(
__pyx_Generator
Object
*
)
self
;
static
void
__Pyx_
Coroutine
_dealloc
(
PyObject
*
self
)
{
__pyx_
CoroutineObject
*
gen
=
(
__pyx_Coroutine
Object
*
)
self
;
PyObject_GC_UnTrack
(
gen
);
if
(
gen
->
gi_weakreflist
!=
NULL
)
...
...
@@ -531,14 +790,14 @@ static void __Pyx_Generator_dealloc(PyObject *self) {
PyObject_GC_UnTrack
(
self
);
}
__Pyx_
Generator
_clear
(
self
);
__Pyx_
Coroutine
_clear
(
self
);
PyObject_GC_Del
(
gen
);
}
static
void
__Pyx_
Generator
_del
(
PyObject
*
self
)
{
static
void
__Pyx_
Coroutine
_del
(
PyObject
*
self
)
{
PyObject
*
res
;
PyObject
*
error_type
,
*
error_value
,
*
error_traceback
;
__pyx_
GeneratorObject
*
gen
=
(
__pyx_Generator
Object
*
)
self
;
__pyx_
CoroutineObject
*
gen
=
(
__pyx_Coroutine
Object
*
)
self
;
if
(
gen
->
resume_label
<=
0
)
return
;
...
...
@@ -552,7 +811,7 @@ static void __Pyx_Generator_del(PyObject *self) {
// Save the current exception, if any.
__Pyx_ErrFetch
(
&
error_type
,
&
error_value
,
&
error_traceback
);
res
=
__Pyx_
Generator
_Close
(
self
);
res
=
__Pyx_
Coroutine
_Close
(
self
);
if
(
res
==
NULL
)
PyErr_WriteUnraisable
(
self
);
...
...
@@ -599,14 +858,14 @@ static void __Pyx_Generator_del(PyObject *self) {
}
static
PyObject
*
__Pyx_
Generator_get_name
(
__pyx_Generator
Object
*
self
)
__Pyx_
Coroutine_get_name
(
__pyx_Coroutine
Object
*
self
)
{
Py_INCREF
(
self
->
gi_name
);
return
self
->
gi_name
;
}
static
int
__Pyx_
Generator_set_name
(
__pyx_Generator
Object
*
self
,
PyObject
*
value
)
__Pyx_
Coroutine_set_name
(
__pyx_Coroutine
Object
*
self
,
PyObject
*
value
)
{
PyObject
*
tmp
;
...
...
@@ -627,14 +886,14 @@ __Pyx_Generator_set_name(__pyx_GeneratorObject *self, PyObject *value)
}
static
PyObject
*
__Pyx_
Generator_get_qualname
(
__pyx_Generator
Object
*
self
)
__Pyx_
Coroutine_get_qualname
(
__pyx_Coroutine
Object
*
self
)
{
Py_INCREF
(
self
->
gi_qualname
);
return
self
->
gi_qualname
;
}
static
int
__Pyx_
Generator_set_qualname
(
__pyx_Generator
Object
*
self
,
PyObject
*
value
)
__Pyx_
Coroutine_set_qualname
(
__pyx_Coroutine
Object
*
self
,
PyObject
*
value
)
{
PyObject
*
tmp
;
...
...
@@ -654,39 +913,149 @@ __Pyx_Generator_set_qualname(__pyx_GeneratorObject *self, PyObject *value)
return
0
;
}
static
PyGetSetDef
__pyx_
Generator
_getsets
[]
=
{
{(
char
*
)
"__name__"
,
(
getter
)
__Pyx_
Generator_get_name
,
(
setter
)
__Pyx_Generator
_set_name
,
static
PyGetSetDef
__pyx_
Coroutine
_getsets
[]
=
{
{(
char
*
)
"__name__"
,
(
getter
)
__Pyx_
Coroutine_get_name
,
(
setter
)
__Pyx_Coroutine
_set_name
,
(
char
*
)
PyDoc_STR
(
"name of the generator"
),
0
},
{(
char
*
)
"__qualname__"
,
(
getter
)
__Pyx_
Generator_get_qualname
,
(
setter
)
__Pyx_Generator
_set_qualname
,
{(
char
*
)
"__qualname__"
,
(
getter
)
__Pyx_
Coroutine_get_qualname
,
(
setter
)
__Pyx_Coroutine
_set_qualname
,
(
char
*
)
PyDoc_STR
(
"qualified name of the generator"
),
0
},
{
0
,
0
,
0
,
0
,
0
}
};
static
PyMemberDef
__pyx_
Generator
_memberlist
[]
=
{
{(
char
*
)
"gi_running"
,
T_BOOL
,
offsetof
(
__pyx_
Generator
Object
,
is_running
),
READONLY
,
NULL
},
static
PyMemberDef
__pyx_
Coroutine
_memberlist
[]
=
{
{(
char
*
)
"gi_running"
,
T_BOOL
,
offsetof
(
__pyx_
Coroutine
Object
,
is_running
),
READONLY
,
NULL
},
{
0
,
0
,
0
,
0
,
0
}
};
static
PyMethodDef
__pyx_Generator_methods
[]
=
{
{
"send"
,
(
PyCFunction
)
__Pyx_Generator_Send
,
METH_O
,
0
},
{
"throw"
,
(
PyCFunction
)
__Pyx_Generator_Throw
,
METH_VARARGS
,
0
},
{
"close"
,
(
PyCFunction
)
__Pyx_Generator_Close
,
METH_NOARGS
,
0
},
static
__pyx_CoroutineObject
*
__Pyx__Coroutine_New
(
PyTypeObject
*
type
,
__pyx_coroutine_body_t
body
,
PyObject
*
closure
,
PyObject
*
name
,
PyObject
*
qualname
)
{
__pyx_CoroutineObject
*
gen
=
PyObject_GC_New
(
__pyx_CoroutineObject
,
type
);
if
(
gen
==
NULL
)
return
NULL
;
gen
->
body
=
body
;
gen
->
closure
=
closure
;
Py_XINCREF
(
closure
);
gen
->
is_running
=
0
;
gen
->
resume_label
=
0
;
gen
->
classobj
=
NULL
;
gen
->
yieldfrom
=
NULL
;
gen
->
exc_type
=
NULL
;
gen
->
exc_value
=
NULL
;
gen
->
exc_traceback
=
NULL
;
gen
->
gi_weakreflist
=
NULL
;
Py_XINCREF
(
qualname
);
gen
->
gi_qualname
=
qualname
;
Py_XINCREF
(
name
);
gen
->
gi_name
=
name
;
PyObject_GC_Track
(
gen
);
return
gen
;
}
//////////////////// Coroutine ////////////////////
//@requires: CoroutineBase
//@requires: PatchGeneratorABC
static
void
__Pyx_Coroutine_check_and_dealloc
(
PyObject
*
self
)
{
__pyx_CoroutineObject
*
gen
=
(
__pyx_CoroutineObject
*
)
self
;
if
(
gen
->
resume_label
==
0
&&
!
PyErr_Occurred
())
{
#if PY_VERSION_HEX >= 0x03030000 || defined(PyErr_WarnFormat)
PyErr_WarnFormat
(
PyExc_RuntimeWarning
,
1
,
"coroutine '%.50S' was never awaited"
,
gen
->
gi_qualname
);
#else
PyObject
*
msg
,
*
qualname
;
char
*
cname
,
*
cmsg
;
#if PY_MAJOR_VERSION >= 3
qualname
=
PyUnicode_AsUTF8String
(
gen
->
gi_qualname
);
if
(
likely
(
qualname
))
{
cname
=
PyBytes_AS_STRING
(
qualname
);
}
else
{
PyErr_Clear
();
cname
=
(
char
*
)
"?"
;
}
msg
=
PyBytes_FromFormat
(
#else
qualname
=
gen
->
gi_qualname
;
cname
=
PyString_AS_STRING
(
qualname
);
msg
=
PyString_FromFormat
(
#endif
"coroutine '%.50s' was never awaited"
,
cname
);
#if PY_MAJOR_VERSION >= 3
Py_XDECREF
(
qualname
);
#endif
if
(
unlikely
(
!
msg
))
{
PyErr_Clear
();
cmsg
=
(
char
*
)
"coroutine was never awaited"
;
}
else
{
#if PY_MAJOR_VERSION >= 3
cmsg
=
PyBytes_AS_STRING
(
msg
);
#else
cmsg
=
PyString_AS_STRING
(
msg
);
#endif
}
if
(
unlikely
(
PyErr_WarnEx
(
PyExc_RuntimeWarning
,
cmsg
,
1
)
<
0
))
PyErr_WriteUnraisable
(
self
);
Py_XDECREF
(
msg
);
#endif
}
__Pyx_Coroutine_dealloc
(
self
);
}
#if CYTHON_COMPILING_IN_CPYTHON && PY_MAJOR_VERSION >= 3 && PY_VERSION_HEX < 0x030500B1
static
PyObject
*
__Pyx_Coroutine_compare
(
PyObject
*
obj
,
PyObject
*
other
,
int
op
)
{
PyObject
*
result
;
switch
(
op
)
{
case
Py_EQ
:
result
=
(
other
==
obj
)
?
Py_True
:
Py_False
;
break
;
case
Py_NE
:
result
=
(
other
!=
obj
)
?
Py_True
:
Py_False
;
break
;
default:
result
=
Py_NotImplemented
;
}
Py_INCREF
(
result
);
return
result
;
}
#endif
static
PyObject
*
__Pyx_Coroutine_await
(
PyObject
*
self
)
{
Py_INCREF
(
self
);
return
self
;
}
static
PyMethodDef
__pyx_Coroutine_methods
[]
=
{
{
"send"
,
(
PyCFunction
)
__Pyx_Coroutine_Send
,
METH_O
,
0
},
{
"throw"
,
(
PyCFunction
)
__Pyx_Coroutine_Throw
,
METH_VARARGS
,
0
},
{
"close"
,
(
PyCFunction
)
__Pyx_Coroutine_Close
,
METH_NOARGS
,
0
},
#if PY_VERSION_HEX < 0x030500B1
{
"__await__"
,
(
PyCFunction
)
__Pyx_Coroutine_await
,
METH_NOARGS
,
0
},
#endif
{
0
,
0
,
0
,
0
}
};
static
PyTypeObject
__pyx_GeneratorType_type
=
{
#if PY_MAJOR_VERSION >= 3
static
__Pyx_PyAsyncMethodsStruct
__pyx_Coroutine_as_async
=
{
__Pyx_Coroutine_await
,
/*am_await*/
0
,
/*am_aiter*/
0
,
/*am_anext*/
};
#endif
static
PyTypeObject
__pyx_CoroutineType_type
=
{
PyVarObject_HEAD_INIT
(
0
,
0
)
"
generator
"
,
/*tp_name*/
sizeof
(
__pyx_
Generator
Object
),
/*tp_basicsize*/
"
coroutine
"
,
/*tp_name*/
sizeof
(
__pyx_
Coroutine
Object
),
/*tp_basicsize*/
0
,
/*tp_itemsize*/
(
destructor
)
__Pyx_
Generator
_dealloc
,
/*tp_dealloc*/
(
destructor
)
__Pyx_
Coroutine_check_and
_dealloc
,
/*tp_dealloc*/
0
,
/*tp_print*/
0
,
/*tp_getattr*/
0
,
/*tp_setattr*/
#if PY_MAJOR_VERSION
<
3
0
,
/*tp_compare
*/
#if PY_MAJOR_VERSION
>=
3
&
__pyx_Coroutine_as_async
,
/*tp_as_async (tp_reserved)
*/
#else
0
,
/*reserved*/
0
,
/*
tp_
reserved*/
#endif
0
,
/*tp_repr*/
0
,
/*tp_as_number*/
...
...
@@ -700,15 +1069,21 @@ static PyTypeObject __pyx_GeneratorType_type = {
0
,
/*tp_as_buffer*/
Py_TPFLAGS_DEFAULT
|
Py_TPFLAGS_HAVE_GC
|
Py_TPFLAGS_HAVE_FINALIZE
,
/*tp_flags*/
0
,
/*tp_doc*/
(
traverseproc
)
__Pyx_
Generator
_traverse
,
/*tp_traverse*/
(
traverseproc
)
__Pyx_
Coroutine
_traverse
,
/*tp_traverse*/
0
,
/*tp_clear*/
#if CYTHON_COMPILING_IN_CPYTHON && PY_MAJOR_VERSION >= 3 && PY_VERSION_HEX < 0x030500B1
// in order to (mis-)use tp_reserved above, we must also implement tp_richcompare
__Pyx_Coroutine_compare
,
/*tp_richcompare*/
#else
0
,
/*tp_richcompare*/
offsetof
(
__pyx_GeneratorObject
,
gi_weakreflist
),
/*tp_weaklistoffset*/
#endif
offsetof
(
__pyx_CoroutineObject
,
gi_weakreflist
),
/*tp_weaklistoffset*/
// no tp_iter() as iterator is only available through __await__()
0
,
/*tp_iter*/
(
iternextfunc
)
__Pyx_Generator_Next
,
/*tp_iternext*/
__pyx_
Generator
_methods
,
/*tp_methods*/
__pyx_
Generator
_memberlist
,
/*tp_members*/
__pyx_
Generator
_getsets
,
/*tp_getset*/
__pyx_
Coroutine
_methods
,
/*tp_methods*/
__pyx_
Coroutine
_memberlist
,
/*tp_members*/
__pyx_
Coroutine
_getsets
,
/*tp_getset*/
0
,
/*tp_base*/
0
,
/*tp_dict*/
0
,
/*tp_descr_get*/
...
...
@@ -727,41 +1102,92 @@ static PyTypeObject __pyx_GeneratorType_type = {
#if PY_VERSION_HEX >= 0x030400a1
0
,
/*tp_del*/
#else
__Pyx_
Generator
_del
,
/*tp_del*/
__Pyx_
Coroutine
_del
,
/*tp_del*/
#endif
0
,
/*tp_version_tag*/
#if PY_VERSION_HEX >= 0x030400a1
__Pyx_
Generator
_del
,
/*tp_finalize*/
__Pyx_
Coroutine
_del
,
/*tp_finalize*/
#endif
};
static
__pyx_GeneratorObject
*
__Pyx_Generator_New
(
__pyx_generator_body_t
body
,
PyObject
*
closure
,
PyObject
*
name
,
PyObject
*
qualname
)
{
__pyx_GeneratorObject
*
gen
=
PyObject_GC_New
(
__pyx_GeneratorObject
,
__pyx_GeneratorType
);
static
int
__pyx_Coroutine_init
(
void
)
{
// on Windows, C-API functions can't be used in slots statically
__pyx_CoroutineType_type
.
tp_getattro
=
PyObject_GenericGetAttr
;
if
(
gen
==
NULL
)
return
NULL
;
__pyx_CoroutineType
=
__Pyx_FetchCommonType
(
&
__pyx_CoroutineType_type
);
if
(
unlikely
(
!
__pyx_CoroutineType
))
{
return
-
1
;
}
return
0
;
}
gen
->
body
=
body
;
gen
->
closure
=
closure
;
Py_XINCREF
(
closure
);
gen
->
is_running
=
0
;
gen
->
resume_label
=
0
;
gen
->
classobj
=
NULL
;
gen
->
yieldfrom
=
NULL
;
gen
->
exc_type
=
NULL
;
gen
->
exc_value
=
NULL
;
gen
->
exc_traceback
=
NULL
;
gen
->
gi_weakreflist
=
NULL
;
Py_XINCREF
(
qualname
);
gen
->
gi_qualname
=
qualname
;
Py_XINCREF
(
name
);
gen
->
gi_name
=
name
;
//////////////////// Generator ////////////////////
//@requires: CoroutineBase
//@requires: PatchGeneratorABC
PyObject_GC_Track
(
gen
);
return
gen
;
}
static
PyMethodDef
__pyx_Generator_methods
[]
=
{
{
"send"
,
(
PyCFunction
)
__Pyx_Coroutine_Send
,
METH_O
,
0
},
{
"throw"
,
(
PyCFunction
)
__Pyx_Coroutine_Throw
,
METH_VARARGS
,
0
},
{
"close"
,
(
PyCFunction
)
__Pyx_Coroutine_Close
,
METH_NOARGS
,
0
},
{
0
,
0
,
0
,
0
}
};
static
PyTypeObject
__pyx_GeneratorType_type
=
{
PyVarObject_HEAD_INIT
(
0
,
0
)
"generator"
,
/*tp_name*/
sizeof
(
__pyx_CoroutineObject
),
/*tp_basicsize*/
0
,
/*tp_itemsize*/
(
destructor
)
__Pyx_Coroutine_dealloc
,
/*tp_dealloc*/
0
,
/*tp_print*/
0
,
/*tp_getattr*/
0
,
/*tp_setattr*/
0
,
/*tp_compare / tp_as_async*/
0
,
/*tp_repr*/
0
,
/*tp_as_number*/
0
,
/*tp_as_sequence*/
0
,
/*tp_as_mapping*/
0
,
/*tp_hash*/
0
,
/*tp_call*/
0
,
/*tp_str*/
0
,
/*tp_getattro*/
0
,
/*tp_setattro*/
0
,
/*tp_as_buffer*/
Py_TPFLAGS_DEFAULT
|
Py_TPFLAGS_HAVE_GC
|
Py_TPFLAGS_HAVE_FINALIZE
,
/*tp_flags*/
0
,
/*tp_doc*/
(
traverseproc
)
__Pyx_Coroutine_traverse
,
/*tp_traverse*/
0
,
/*tp_clear*/
0
,
/*tp_richcompare*/
offsetof
(
__pyx_CoroutineObject
,
gi_weakreflist
),
/*tp_weaklistoffset*/
0
,
/*tp_iter*/
(
iternextfunc
)
__Pyx_Generator_Next
,
/*tp_iternext*/
__pyx_Generator_methods
,
/*tp_methods*/
__pyx_Coroutine_memberlist
,
/*tp_members*/
__pyx_Coroutine_getsets
,
/*tp_getset*/
0
,
/*tp_base*/
0
,
/*tp_dict*/
0
,
/*tp_descr_get*/
0
,
/*tp_descr_set*/
0
,
/*tp_dictoffset*/
0
,
/*tp_init*/
0
,
/*tp_alloc*/
0
,
/*tp_new*/
0
,
/*tp_free*/
0
,
/*tp_is_gc*/
0
,
/*tp_bases*/
0
,
/*tp_mro*/
0
,
/*tp_cache*/
0
,
/*tp_subclasses*/
0
,
/*tp_weaklist*/
#if PY_VERSION_HEX >= 0x030400a1
0
,
/*tp_del*/
#else
__Pyx_Coroutine_del
,
/*tp_del*/
#endif
0
,
/*tp_version_tag*/
#if PY_VERSION_HEX >= 0x030400a1
__Pyx_Coroutine_del
,
/*tp_finalize*/
#endif
};
static
int
__pyx_Generator_init
(
void
)
{
// on Windows, C-API functions can't be used in slots statically
...
...
@@ -769,7 +1195,7 @@ static int __pyx_Generator_init(void) {
__pyx_GeneratorType_type
.
tp_iter
=
PyObject_SelfIter
;
__pyx_GeneratorType
=
__Pyx_FetchCommonType
(
&
__pyx_GeneratorType_type
);
if
(
__pyx_GeneratorType
==
NULL
)
{
if
(
unlikely
(
!
__pyx_GeneratorType
)
)
{
return
-
1
;
}
return
0
;
...
...
@@ -778,45 +1204,70 @@ static int __pyx_Generator_init(void) {
/////////////// ReturnWithStopIteration.proto ///////////////
#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030500B1
// CPython 3.3 <= x < 3.5b1 crash in yield-from when the StopIteration is not instantiated
#define __Pyx_ReturnWithStopIteration(value) \
if (value == Py_None) PyErr_SetNone(PyExc_StopIteration); else __Pyx__ReturnWithStopIteration(value)
static
void
__Pyx__ReturnWithStopIteration
(
PyObject
*
value
);
/*proto*/
#else
#define __Pyx_ReturnWithStopIteration(value) PyErr_SetObject(PyExc_StopIteration, value)
#endif
/////////////// ReturnWithStopIteration ///////////////
//@requires: Exceptions.c::PyErrFetchRestore
//@substitute: naming
// 1) Instantiating an exception just to pass back a value is costly.
// 2) CPython 3.3 <= x < 3.5b1 crash in yield-from when the StopIteration is not instantiated.
// 3) Passing a tuple as value into PyErr_SetObject() passes its items on as arguments.
// 4) If there is currently an exception being handled, we need to chain it.
#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030500B1
static
void
__Pyx__ReturnWithStopIteration
(
PyObject
*
value
)
{
PyObject
*
exc
,
*
args
;
args
=
PyTuple_New
(
1
);
if
(
!
args
)
return
;
Py_INCREF
(
value
);
PyTuple_SET_ITEM
(
args
,
0
,
value
);
#if CYTHON_COMPILING_IN_CPYTHON
if
((
PY_VERSION_HEX
>=
0x03030000
&&
PY_VERSION_HEX
<
0x030500B1
)
||
PyTuple_Check
(
value
))
{
args
=
PyTuple_New
(
1
);
if
(
unlikely
(
!
args
))
return
;
Py_INCREF
(
value
);
PyTuple_SET_ITEM
(
args
,
0
,
value
);
exc
=
PyType_Type
.
tp_call
(
PyExc_StopIteration
,
args
,
NULL
);
Py_DECREF
(
args
);
if
(
!
exc
)
return
;
}
else
{
// it's safe to avoid instantiating the exception
Py_INCREF
(
value
);
exc
=
value
;
}
if
(
!
PyThreadState_GET
()
->
exc_type
)
{
// no chaining needed => avoid the overhead in PyErr_SetObject()
Py_INCREF
(
PyExc_StopIteration
);
__Pyx_ErrRestore
(
PyExc_StopIteration
,
exc
,
NULL
);
return
;
}
#else
args
=
PyTuple_Pack
(
1
,
value
);
if
(
unlikely
(
!
args
))
return
;
exc
=
PyObject_Call
(
PyExc_StopIteration
,
args
,
NULL
);
Py_DECREF
(
args
);
if
(
!
exc
)
return
;
Py_INCREF
(
PyExc_StopIteration
);
PyErr_Restore
(
PyExc_StopIteration
,
exc
,
NULL
);
}
if
(
unlikely
(
!
exc
))
return
;
#endif
PyErr_SetObject
(
PyExc_StopIteration
,
exc
);
Py_DECREF
(
exc
);
}
//////////////////// PatchModuleWith
Generator
.proto ////////////////////
//////////////////// PatchModuleWith
Coroutine
.proto ////////////////////
static
PyObject
*
__Pyx_
Generator
_patch_module
(
PyObject
*
module
,
const
char
*
py_code
);
/*proto*/
static
PyObject
*
__Pyx_
Coroutine
_patch_module
(
PyObject
*
module
,
const
char
*
py_code
);
/*proto*/
//////////////////// PatchModuleWith
Generator
////////////////////
//////////////////// PatchModuleWith
Coroutine
////////////////////
//@substitute: naming
static
PyObject
*
__Pyx_
Generator
_patch_module
(
PyObject
*
module
,
const
char
*
py_code
)
{
#if
def __Pyx_Generator_USED
static
PyObject
*
__Pyx_
Coroutine
_patch_module
(
PyObject
*
module
,
const
char
*
py_code
)
{
#if
defined(__Pyx_Generator_USED) || defined(__Pyx_Coroutine_USED)
PyObject
*
globals
,
*
result_obj
;
globals
=
PyDict_New
();
if
(
unlikely
(
!
globals
))
goto
ignore
;
#ifdef __Pyx_Coroutine_USED
if
(
unlikely
(
PyDict_SetItemString
(
globals
,
"_cython_coroutine_type"
,
(
PyObject
*
)
__pyx_CoroutineType
)
<
0
))
goto
ignore
;
#endif
#ifdef __Pyx_Generator_USED
if
(
unlikely
(
PyDict_SetItemString
(
globals
,
"_cython_generator_type"
,
(
PyObject
*
)
__pyx_GeneratorType
)
<
0
))
goto
ignore
;
#endif
if
(
unlikely
(
PyDict_SetItemString
(
globals
,
"_module"
,
module
)
<
0
))
goto
ignore
;
if
(
unlikely
(
PyDict_SetItemString
(
globals
,
"__builtins__"
,
$
builtins_cname
)
<
0
))
goto
ignore
;
result_obj
=
PyRun_String
(
py_code
,
Py_file_input
,
globals
,
globals
);
...
...
@@ -847,10 +1298,11 @@ ignore:
static
int
__Pyx_patch_abc
(
void
);
/*proto*/
//////////////////// PatchGeneratorABC ////////////////////
//@requires: PatchModuleWith
Generator
//@requires: PatchModuleWith
Coroutine
static
int
__Pyx_patch_abc
(
void
)
{
#if defined(__Pyx_Generator_USED) && (!defined(CYTHON_PATCH_ABC) || CYTHON_PATCH_ABC)
#if (defined(__Pyx_Generator_USED) || defined(__Pyx_Coroutine_USED)) && \
(!defined(CYTHON_PATCH_ABC) || CYTHON_PATCH_ABC)
static
int
abc_patched
=
0
;
if
(
!
abc_patched
)
{
PyObject
*
module
;
...
...
@@ -859,19 +1311,15 @@ static int __Pyx_patch_abc(void) {
PyErr_WriteUnraisable
(
NULL
);
if
(
unlikely
(
PyErr_WarnEx
(
PyExc_RuntimeWarning
,
((
PY_VERSION_HEX
>=
0x03030000
)
?
"Cython module failed to patch collections.abc
.Generator
"
:
"Cython module failed to patch collections
.Generator
"
),
1
)
<
0
))
{
"Cython module failed to patch collections.abc
module
"
:
"Cython module failed to patch collections
module
"
),
1
)
<
0
))
{
return
-
1
;
}
}
else
{
PyObject
*
abc
=
PyObject_GetAttrString
(
module
,
"Generator"
);
if
(
abc
)
{
abc_patched
=
1
;
Py_DECREF
(
abc
);
}
else
{
PyErr_Clear
();
module
=
__Pyx_Generator_patch_module
(
module
,
CSTRING
(
"""\
module
=
__Pyx_Coroutine_patch_module
(
module
,
#ifdef __Pyx_Generator_USED
CSTRING
(
"""\
def mk_gen():
from abc import abstractmethod
...
...
@@ -926,22 +1374,122 @@ def mk_gen():
generator = type((lambda: (yield))())
Generator.register(generator)
Generator.register(_cython_generator_type)
return Generator
_module.Generator = mk_gen()
try:
Generator = _module.Generator
except AttributeError:
Generator = _module.Generator = mk_gen()
Generator.register(_cython_generator_type)
"""
)
);
abc_patched
=
1
;
if
(
unlikely
(
!
module
))
return
-
1
;
}
#endif
#ifdef __Pyx_Coroutine_USED
CSTRING
(
"""\
def mk_awaitable():
from abc import abstractmethod, ABCMeta
"""
)
#if PY_MAJOR_VERSION >= 3
CSTRING
(
"""\
class Awaitable(metaclass=ABCMeta):
"""
)
#else
CSTRING
(
"""\
class Awaitable(object):
__metaclass__ = ABCMeta
"""
)
#endif
CSTRING
(
"""\
__slots__ = ()
@abstractmethod
def __await__(self):
yield
@classmethod
def __subclasshook__(cls, C):
if cls is Awaitable:
for B in C.__mro__:
if '__await__' in B.__dict__:
if B.__dict__['__await__']:
return True
break
return NotImplemented
return Awaitable
try:
Awaitable = _module.Awaitable
except AttributeError:
Awaitable = _module.Awaitable = mk_awaitable()
def mk_coroutine():
from abc import abstractmethod, ABCMeta
class Coroutine(Awaitable):
__slots__ = ()
@abstractmethod
def send(self, value):
'''Send a value into the coroutine.
Return next yielded value or raise StopIteration.
'''
raise StopIteration
@abstractmethod
def throw(self, typ, val=None, tb=None):
'''Raise an exception in the coroutine.
Return next yielded value or raise StopIteration.
'''
if val is None:
if tb is None:
raise typ
val = typ()
if tb is not None:
val = val.with_traceback(tb)
raise val
def close(self):
'''Raise GeneratorExit inside coroutine.
'''
try:
self.throw(GeneratorExit)
except (GeneratorExit, StopIteration):
pass
else:
raise RuntimeError('coroutine ignored GeneratorExit')
@classmethod
def __subclasshook__(cls, C):
if cls is Coroutine:
mro = C.__mro__
for method in ('__await__', 'send', 'throw', 'close'):
for base in mro:
if method in base.__dict__:
break
else:
return NotImplemented
return True
return NotImplemented
return Coroutine
try:
Coroutine = _module.Coroutine
except AttributeError:
Coroutine = _module.Coroutine = mk_coroutine()
Coroutine.register(_cython_coroutine_type)
"""
)
#endif
);
abc_patched
=
1
;
if
(
unlikely
(
!
module
))
return
-
1
;
Py_DECREF
(
module
);
}
}
#else
// avoid "unused" warning for __Pyx_
Generator
_patch_module()
if
(
0
)
__Pyx_
Generator
_patch_module
(
NULL
,
NULL
);
// avoid "unused" warning for __Pyx_
Coroutine
_patch_module()
if
(
0
)
__Pyx_
Coroutine
_patch_module
(
NULL
,
NULL
);
#endif
return
0
;
}
...
...
@@ -953,40 +1501,68 @@ _module.Generator = mk_gen()
static
PyObject
*
__Pyx_patch_asyncio
(
PyObject
*
module
);
/*proto*/
//////////////////// PatchAsyncIO ////////////////////
//@requires: PatchModuleWithGenerator
//@requires: ImportExport.c::Import
//@requires: PatchModuleWithCoroutine
//@requires: PatchInspect
static
PyObject
*
__Pyx_patch_asyncio
(
PyObject
*
module
)
{
#if defined(__Pyx_Generator_USED) && (!defined(CYTHON_PATCH_ASYNCIO) || CYTHON_PATCH_ASYNCIO)
#if PY_VERSION_HEX < 0x030500B1 && \
(defined(__Pyx_Coroutine_USED) || defined(__Pyx_Generator_USED)) && \
(!defined(CYTHON_PATCH_ASYNCIO) || CYTHON_PATCH_ASYNCIO)
PyObject
*
patch_module
=
NULL
;
static
int
asyncio_patched
=
0
;
if
(
unlikely
((
!
asyncio_patched
)
&&
module
))
{
PyObject
*
package
;
package
=
__Pyx_Import
(
PYIDENT
(
"asyncio.coroutines"
),
NULL
,
0
);
if
(
package
)
{
patch_module
=
__Pyx_
Generator
_patch_module
(
patch_module
=
__Pyx_
Coroutine
_patch_module
(
PyObject_GetAttrString
(
package
,
"coroutines"
),
CSTRING
(
"""\
old_types = getattr(_module, '_COROUTINE_TYPES', None)
if old_types is not None and _cython_generator_type not in old_types:
_module._COROUTINE_TYPES = type(old_types) (tuple(old_types) + (_cython_generator_type,))
coro_types = getattr(_module, '_COROUTINE_TYPES', None)
"""
)
#ifdef __Pyx_Coroutine_USED
CSTRING
(
"""\
if coro_types is not None and _cython_coroutine_type not in coro_types:
coro_types = type(coro_types) (tuple(coro_types) + (_cython_coroutine_type,))
"""
)
#endif
#ifdef __Pyx_Generator_USED
CSTRING
(
"""\
if coro_types is not None and _cython_generator_type not in coro_types:
coro_types = type(coro_types) (tuple(coro_types) + (_cython_generator_type,))
"""
)
#endif
CSTRING
(
"""
_module._COROUTINE_TYPES = coro_types
"""
)
);
#if PY_VERSION_HEX < 0x03050000
#if PY_VERSION_HEX < 0x03050000
}
else
{
// Py3.4 used to have asyncio.tasks instead of asyncio.coroutines
PyErr_Clear
();
package
=
__Pyx_Import
(
PYIDENT
(
"asyncio.tasks"
),
NULL
,
0
);
if
(
unlikely
(
!
package
))
goto
asyncio_done
;
patch_module
=
__Pyx_
Generator
_patch_module
(
patch_module
=
__Pyx_
Coroutine
_patch_module
(
PyObject_GetAttrString
(
package
,
"tasks"
),
CSTRING
(
"""\
if (hasattr(_module, 'iscoroutine') and
getattr(_module.iscoroutine, '_cython_generator_type', None) is not _cython_generator_type):
def cy_wrap(orig_func, cython_generator_type=_cython_generator_type, type=type):
def cy_iscoroutine(obj): return type(obj) is cython_generator_type or orig_func(obj)
cy_iscoroutine._cython_generator_type = cython_generator_type
return cy_iscoroutine
_module.iscoroutine = cy_wrap(_module.iscoroutine)
if hasattr(_module, 'iscoroutine'):
old_coroutine_types = getattr(_module.iscoroutine, '_cython_coroutine_types', None)
if old_coroutine_types is None or not isinstance(old_coroutine_types, list):
old_coroutine_types = []
def cy_wrap(orig_func, type=type, cython_coroutine_types=old_coroutine_types):
def cy_iscoroutine(obj): return type(obj) in cython_coroutine_types or orig_func(obj)
cy_iscoroutine._cython_coroutine_types = cython_coroutine_types
return cy_iscoroutine
_module.iscoroutine = cy_wrap(_module.iscoroutine)
"""
)
#ifdef __Pyx_Coroutine_USED
CSTRING
(
"""\
if _cython_coroutine_type not in old_coroutine_types: old_coroutine_types.append(_cython_coroutine_type)
"""
)
#endif
#ifdef __Pyx_Generator_USED
CSTRING
(
"""\
if _cython_generator_type not in old_coroutine_types: old_coroutine_types.append(_cython_generator_type)
"""
)
#endif
);
#endif
}
...
...
@@ -1023,8 +1599,8 @@ ignore:
module
=
NULL
;
}
#else
// avoid "unused" warning for __Pyx_
Generator
_patch_module()
if
(
0
)
return
__Pyx_
Generator
_patch_module
(
module
,
NULL
);
// avoid "unused" warning for __Pyx_
Coroutine
_patch_module()
if
(
0
)
return
__Pyx_
Coroutine
_patch_module
(
module
,
NULL
);
#endif
return
module
;
}
...
...
@@ -1036,14 +1612,16 @@ ignore:
static
PyObject
*
__Pyx_patch_inspect
(
PyObject
*
module
);
/*proto*/
//////////////////// PatchInspect ////////////////////
//@requires: PatchModuleWith
Generator
//@requires: PatchModuleWith
Coroutine
static
PyObject
*
__Pyx_patch_inspect
(
PyObject
*
module
)
{
#if
defined(__Pyx_Generator_USED
) && (!defined(CYTHON_PATCH_INSPECT) || CYTHON_PATCH_INSPECT)
#if
(defined(__Pyx_Generator_USED) || defined(__Pyx_Coroutine_USED)
) && (!defined(CYTHON_PATCH_INSPECT) || CYTHON_PATCH_INSPECT)
static
int
inspect_patched
=
0
;
if
(
unlikely
((
!
inspect_patched
)
&&
module
))
{
module
=
__Pyx_Generator_patch_module
(
module
,
CSTRING
(
"""\
module
=
__Pyx_Coroutine_patch_module
(
module
,
#ifdef __Pyx_Generator_USED
CSTRING
(
"""\
if getattr(_module.isgenerator, '_cython_generator_type', None) is not _cython_generator_type:
def cy_wrap(orig_func, cython_generator_type=_cython_generator_type, type=type):
def cy_isgenerator(obj): return type(obj) is cython_generator_type or orig_func(obj)
...
...
@@ -1051,12 +1629,144 @@ if getattr(_module.isgenerator, '_cython_generator_type', None) is not _cython_g
return cy_isgenerator
_module.isgenerator = cy_wrap(_module.isgenerator)
"""
)
#endif
#ifdef __Pyx_Coroutine_USED
CSTRING
(
"""\
try:
_module.iscoroutine
except AttributeError:
def cy_wrap(cython_coroutine_type=_cython_coroutine_type, type=type):
try:
from collections.abc import Coroutine
except ImportError:
from collections import Coroutine
def cy_iscoroutine(obj): return isinstance(obj, Coroutine)
return cy_iscoroutine
try:
_module.iscoroutine = cy_wrap()
except ImportError:
pass
try:
_module.isawaitable
except AttributeError:
def cy_wrap(cython_coroutine_type=_cython_coroutine_type, type=type):
try:
from collections.abc import Awaitable
except ImportError:
from collections import Awaitable
def cy_isawaitable(obj): return isinstance(obj, Awaitable)
return cy_isawaitable
try:
_module.isawaitable = cy_wrap()
except ImportError:
pass
"""
)
#endif
);
inspect_patched
=
1
;
}
#else
// avoid "unused" warning for __Pyx_
Generator
_patch_module()
if
(
0
)
return
__Pyx_
Generator
_patch_module
(
module
,
NULL
);
// avoid "unused" warning for __Pyx_
Coroutine
_patch_module()
if
(
0
)
return
__Pyx_
Coroutine
_patch_module
(
module
,
NULL
);
#endif
return
module
;
}
//////////////////// StopAsyncIteration.proto ////////////////////
#define __Pyx_StopAsyncIteration_USED
static
PyObject
*
__Pyx_PyExc_StopAsyncIteration
;
static
int
__pyx_StopAsyncIteration_init
(
void
);
/*proto*/
//////////////////// StopAsyncIteration ////////////////////
#if PY_VERSION_HEX < 0x030500B1
static
PyTypeObject
__Pyx__PyExc_StopAsyncIteration_type
=
{
PyVarObject_HEAD_INIT
(
0
,
0
)
"StopAsyncIteration"
,
/*tp_name*/
sizeof
(
PyBaseExceptionObject
),
/*tp_basicsize*/
0
,
/*tp_itemsize*/
0
,
/*tp_dealloc*/
0
,
/*tp_print*/
0
,
/*tp_getattr*/
0
,
/*tp_setattr*/
#if PY_MAJOR_VERSION < 3
0
,
/*tp_compare*/
#else
0
,
/*reserved*/
#endif
0
,
/*tp_repr*/
0
,
/*tp_as_number*/
0
,
/*tp_as_sequence*/
0
,
/*tp_as_mapping*/
0
,
/*tp_hash*/
0
,
/*tp_call*/
0
,
/*tp_str*/
0
,
/*tp_getattro*/
0
,
/*tp_setattro*/
0
,
/*tp_as_buffer*/
Py_TPFLAGS_DEFAULT
|
Py_TPFLAGS_BASETYPE
|
Py_TPFLAGS_HAVE_GC
,
/*tp_flags*/
PyDoc_STR
(
"Signal the end from iterator.__anext__()."
),
/*tp_doc*/
0
,
/*tp_traverse*/
0
,
/*tp_clear*/
0
,
/*tp_richcompare*/
0
,
/*tp_weaklistoffset*/
0
,
/*tp_iter*/
0
,
/*tp_iternext*/
0
,
/*tp_methods*/
0
,
/*tp_members*/
0
,
/*tp_getset*/
0
,
/*tp_base*/
0
,
/*tp_dict*/
0
,
/*tp_descr_get*/
0
,
/*tp_descr_set*/
0
,
/*tp_dictoffset*/
0
,
/*tp_init*/
0
,
/*tp_alloc*/
0
,
/*tp_new*/
0
,
/*tp_free*/
0
,
/*tp_is_gc*/
0
,
/*tp_bases*/
0
,
/*tp_mro*/
0
,
/*tp_cache*/
0
,
/*tp_subclasses*/
0
,
/*tp_weaklist*/
0
,
/*tp_del*/
0
,
/*tp_version_tag*/
#if PY_VERSION_HEX >= 0x030400a1
0
,
/*tp_finalize*/
#endif
};
#endif
static
int
__pyx_StopAsyncIteration_init
(
void
)
{
#if PY_VERSION_HEX >= 0x030500B1
__Pyx_PyExc_StopAsyncIteration
=
PyExc_StopAsyncIteration
;
#else
PyObject
*
builtins
=
PyEval_GetBuiltins
();
if
(
likely
(
builtins
))
{
PyObject
*
exc
=
PyMapping_GetItemString
(
builtins
,
"StopAsyncIteration"
);
if
(
exc
)
{
__Pyx_PyExc_StopAsyncIteration
=
exc
;
return
0
;
}
}
PyErr_Clear
();
__Pyx__PyExc_StopAsyncIteration_type
.
tp_traverse
=
((
PyTypeObject
*
)
PyExc_BaseException
)
->
tp_traverse
;
__Pyx__PyExc_StopAsyncIteration_type
.
tp_clear
=
((
PyTypeObject
*
)
PyExc_BaseException
)
->
tp_clear
;
__Pyx__PyExc_StopAsyncIteration_type
.
tp_dictoffset
=
((
PyTypeObject
*
)
PyExc_BaseException
)
->
tp_dictoffset
;
__Pyx__PyExc_StopAsyncIteration_type
.
tp_base
=
(
PyTypeObject
*
)
PyExc_Exception
;
__Pyx_PyExc_StopAsyncIteration
=
(
PyObject
*
)
__Pyx_FetchCommonType
(
&
__Pyx__PyExc_StopAsyncIteration_type
);
if
(
unlikely
(
!
__Pyx_PyExc_StopAsyncIteration
))
return
-
1
;
if
(
builtins
&&
unlikely
(
PyMapping_SetItemString
(
builtins
,
"StopAsyncIteration"
,
__Pyx_PyExc_StopAsyncIteration
)
<
0
))
return
-
1
;
#endif
return
0
;
}
Cython/Utility/CythonFunction.c
View file @
18691f38
...
...
@@ -68,7 +68,7 @@ static CYTHON_INLINE void __Pyx_CyFunction_SetAnnotationsDict(PyObject *m,
PyObject
*
dict
);
static
int
__
P
yx_CyFunction_init
(
void
);
static
int
__
p
yx_CyFunction_init
(
void
);
//////////////////// CythonFunction ////////////////////
//@substitute: naming
...
...
@@ -693,7 +693,7 @@ static PyTypeObject __pyx_CyFunctionType_type = {
};
static
int
__
P
yx_CyFunction_init
(
void
)
{
static
int
__
p
yx_CyFunction_init
(
void
)
{
#if !CYTHON_COMPILING_IN_PYPY
// avoid a useless level of call indirection
__pyx_CyFunctionType_type
.
tp_call
=
PyCFunction_Call
;
...
...
Cython/Utility/ModuleSetupCode.c
View file @
18691f38
...
...
@@ -184,6 +184,22 @@
#define __Pyx_PyMethod_New(func, self, klass) PyMethod_New(func, self, klass)
#endif
// backport of PyAsyncMethods from Py3.5 to older Py3.x versions
// (mis-)using the "tp_reserved" type slot which is re-activated as "tp_as_async" in Py3.5
#if PY_VERSION_HEX >= 0x030500B1
#define __Pyx_PyAsyncMethodsStruct PyAsyncMethods
#define __Pyx_PyType_AsAsync(obj) (Py_TYPE(obj)->tp_as_async)
#elif PY_MAJOR_VERSION >= 3
typedef
struct
{
unaryfunc
am_await
;
unaryfunc
am_aiter
;
unaryfunc
am_anext
;
}
__Pyx_PyAsyncMethodsStruct
;
#define __Pyx_PyType_AsAsync(obj) ((__Pyx_PyAsyncMethodsStruct*) (Py_TYPE(obj)->tp_reserved))
#else
#define __Pyx_PyType_AsAsync(obj) NULL
#endif
/* inline attribute */
#ifndef CYTHON_INLINE
#if defined(__GNUC__)
...
...
tests/errors/pep492_badsyntax_async1.pyx
0 → 100644
View file @
18691f38
# mode: error
# tag: pep492, async
async
def
foo
():
def
foo
(
a
=
await
list
()):
pass
_ERRORS
=
"""
5:14: 'await' not supported here
"""
tests/errors/pep492_badsyntax_async2.pyx
0 → 100644
View file @
18691f38
# mode: error
# tag: pep492, async
async
def
foo
():
def
foo
(
a
:
await
list
()):
pass
_ERRORS
=
"""
5:14: 'await' not supported here
5:14: 'await' not supported here
"""
tests/errors/pep492_badsyntax_async3.pyx
0 → 100644
View file @
18691f38
# mode: error
# tag: pep492, async
async
def
foo
():
[
i
async
for
i
in
els
]
_ERRORS
=
"""
5:7: Expected ']', found 'async'
"""
tests/errors/pep492_badsyntax_async4.pyx
0 → 100644
View file @
18691f38
# mode: error
# tag: pep492, async
async
def
foo
():
async
def
foo
():
await
list
()
_ERRORS
=
"""
# ??? - this fails in CPython, not sure why ...
"""
tests/errors/pep492_badsyntax_async5.pyx
0 → 100644
View file @
18691f38
# mode: error
# tag: pep492, async
def
foo
():
await
list
()
_ERRORS
=
"""
5:10: Syntax error in simple statement list
"""
tests/errors/pep492_badsyntax_async6.pyx
0 → 100644
View file @
18691f38
# mode: error
# tag: pep492, async
async
def
foo
():
yield
_ERRORS
=
"""
5:4: 'yield' not allowed in async coroutines (use 'await')
5:4: 'yield' not supported here
"""
tests/errors/pep492_badsyntax_async7.pyx
0 → 100644
View file @
18691f38
# mode: error
# tag: pep492, async
async
def
foo
():
yield
from
[]
_ERRORS
=
"""
5:4: 'yield from' not supported here
5:4: 'yield' not allowed in async coroutines (use 'await')
"""
tests/errors/pep492_badsyntax_async8.pyx
0 → 100644
View file @
18691f38
# mode: error
# tag: pep492, async
async
def
foo
():
await
await
fut
_ERRORS
=
"""
5:10: Expected an identifier or literal
"""
tests/errors/pep492_badsyntax_async9.pyx
0 → 100644
View file @
18691f38
# mode: error
# tag: pep492, async
async
def
foo
():
await
_ERRORS
=
"""
5:9: Expected an identifier or literal
"""
tests/run/async_iter_pep492.pyx
0 → 100644
View file @
18691f38
# mode: run
# tag: pep492, asyncfor, await
import
sys
if
sys
.
version_info
>=
(
3
,
5
,
0
,
'beta'
):
# pass Cython implemented AsyncIter() into a Python async-for loop
__doc__
=
u"""
>>> def test_py35():
... buffer = []
... async def coro():
... async for i1, i2 in AsyncIter(1):
... buffer.append(i1 + i2)
... return coro, buffer
>>> testfunc, buffer = test_py35()
>>> buffer
[]
>>> yielded, _ = run_async(testfunc(), check_type=False)
>>> yielded == [i * 100 for i in range(1, 11)] or yielded
True
>>> buffer == [i*2 for i in range(1, 101)] or buffer
True
"""
cdef
class
AsyncYieldFrom
:
cdef
object
obj
def
__init__
(
self
,
obj
):
self
.
obj
=
obj
def
__await__
(
self
):
yield
from
self
.
obj
cdef
class
AsyncYield
:
cdef
object
value
def
__init__
(
self
,
value
):
self
.
value
=
value
def
__await__
(
self
):
yield
self
.
value
def
run_async
(
coro
,
check_type
=
'coroutine'
):
if
check_type
:
assert
coro
.
__class__
.
__name__
==
check_type
,
\
'type(%s) != %s'
%
(
coro
.
__class__
,
check_type
)
buffer
=
[]
result
=
None
while
True
:
try
:
buffer
.
append
(
coro
.
send
(
None
))
except
StopIteration
as
ex
:
result
=
ex
.
args
[
0
]
if
ex
.
args
else
None
break
return
buffer
,
result
cdef
class
AsyncIter
:
cdef
long
i
cdef
long
aiter_calls
cdef
long
max_iter_calls
def
__init__
(
self
,
long
max_iter_calls
=
1
):
self
.
i
=
0
self
.
aiter_calls
=
0
self
.
max_iter_calls
=
max_iter_calls
async
def
__aiter__
(
self
):
self
.
aiter_calls
+=
1
return
self
async
def
__anext__
(
self
):
self
.
i
+=
1
assert
self
.
aiter_calls
<=
self
.
max_iter_calls
if
not
(
self
.
i
%
10
):
await
AsyncYield
(
self
.
i
*
10
)
if
self
.
i
>
100
:
raise
StopAsyncIteration
return
self
.
i
,
self
.
i
def
test_for_1
():
"""
>>> testfunc, buffer = test_for_1()
>>> buffer
[]
>>> yielded, _ = run_async(testfunc())
>>> yielded == [i * 100 for i in range(1, 11)] or yielded
True
>>> buffer == [i*2 for i in range(1, 101)] or buffer
True
"""
buffer
=
[]
async
def
test1
():
async
for
i1
,
i2
in
AsyncIter
(
1
):
buffer
.
append
(
i1
+
i2
)
return
test1
,
buffer
def
test_for_2
():
"""
>>> testfunc, buffer = test_for_2()
>>> buffer
[]
>>> yielded, _ = run_async(testfunc())
>>> yielded == [100, 200] or yielded
True
>>> buffer == [i for i in range(1, 21)] + ['end'] or buffer
True
"""
buffer
=
[]
async
def
test2
():
nonlocal
buffer
async
for
i
in
AsyncIter
(
2
):
buffer
.
append
(
i
[
0
])
if
i
[
0
]
==
20
:
break
else
:
buffer
.
append
(
'what?'
)
buffer
.
append
(
'end'
)
return
test2
,
buffer
def
test_for_3
():
"""
>>> testfunc, buffer = test_for_3()
>>> buffer
[]
>>> yielded, _ = run_async(testfunc())
>>> yielded == [i * 100 for i in range(1, 11)] or yielded
True
>>> buffer == [i for i in range(1, 21)] + ['what?', 'end'] or buffer
True
"""
buffer
=
[]
async
def
test3
():
nonlocal
buffer
async
for
i
in
AsyncIter
(
3
):
if
i
[
0
]
>
20
:
continue
buffer
.
append
(
i
[
0
])
else
:
buffer
.
append
(
'what?'
)
buffer
.
append
(
'end'
)
return
test3
,
buffer
cdef
class
NonAwaitableFromAnext
:
async
def
__aiter__
(
self
):
return
self
def
__anext__
(
self
):
return
123
def
test_broken_anext
():
"""
>>> testfunc = test_broken_anext()
>>> try: run_async(testfunc())
... except TypeError as exc:
... assert ' int ' in str(exc)
... else:
... print("NOT RAISED!")
"""
async
def
foo
():
async
for
i
in
NonAwaitableFromAnext
():
print
(
'never going to happen'
)
return
foo
cdef
class
Manager
:
cdef
readonly
list
counter
def
__init__
(
self
,
counter
):
self
.
counter
=
counter
async
def
__aenter__
(
self
):
self
.
counter
[
0
]
+=
10000
async
def
__aexit__
(
self
,
*
args
):
self
.
counter
[
0
]
+=
100000
cdef
class
Iterable
:
cdef
long
i
def
__init__
(
self
):
self
.
i
=
0
async
def
__aiter__
(
self
):
return
self
async
def
__anext__
(
self
):
if
self
.
i
>
10
:
raise
StopAsyncIteration
self
.
i
+=
1
return
self
.
i
def
test_with_for
():
"""
>>> test_with_for()
111011
333033
20555255
"""
I
=
[
0
]
manager
=
Manager
(
I
)
iterable
=
Iterable
()
mrefs_before
=
sys
.
getrefcount
(
manager
)
irefs_before
=
sys
.
getrefcount
(
iterable
)
async
def
main
():
async
with
manager
:
async
for
i
in
iterable
:
I
[
0
]
+=
1
I
[
0
]
+=
1000
run_async
(
main
())
print
(
I
[
0
])
assert
sys
.
getrefcount
(
manager
)
==
mrefs_before
assert
sys
.
getrefcount
(
iterable
)
==
irefs_before
##############
async
def
main
():
nonlocal
I
async
with
Manager
(
I
):
async
for
i
in
Iterable
():
I
[
0
]
+=
1
I
[
0
]
+=
1000
async
with
Manager
(
I
):
async
for
i
in
Iterable
():
I
[
0
]
+=
1
I
[
0
]
+=
1000
run_async
(
main
())
print
(
I
[
0
])
##############
async
def
main
():
async
with
Manager
(
I
):
I
[
0
]
+=
100
async
for
i
in
Iterable
():
I
[
0
]
+=
1
else
:
I
[
0
]
+=
10000000
I
[
0
]
+=
1000
async
with
Manager
(
I
):
I
[
0
]
+=
100
async
for
i
in
Iterable
():
I
[
0
]
+=
1
else
:
I
[
0
]
+=
10000000
I
[
0
]
+=
1000
run_async
(
main
())
print
(
I
[
0
])
cdef
class
AI
:
async
def
__aiter__
(
self
):
1
/
0
def
test_aiter_raises
():
"""
>>> test_aiter_raises()
RAISED
0
"""
CNT
=
0
async
def
foo
():
nonlocal
CNT
async
for
i
in
AI
():
CNT
+=
1
CNT
+=
10
try
:
run_async
(
foo
())
except
ZeroDivisionError
:
print
(
"RAISED"
)
else
:
print
(
"NOT RAISED"
)
return
CNT
tests/run/test_coroutines_pep492.pyx
0 → 100644
View file @
18691f38
# cython: language_level=3, binding=True
# mode: run
# tag: pep492, asyncfor, await
import
re
import
gc
import
sys
#import types
import
os.path
import
inspect
import
unittest
import
warnings
import
contextlib
# fake types.coroutine() decorator
class
types_coroutine
(
object
):
def
__init__
(
self
,
gen
):
self
.
_gen
=
gen
class
as_coroutine
(
object
):
def
__init__
(
self
,
gen
):
self
.
_gen
=
gen
self
.
send
=
gen
.
send
self
.
throw
=
gen
.
throw
self
.
close
=
gen
.
close
def
__await__
(
self
):
return
self
.
_gen
def
__iter__
(
self
):
return
self
.
_gen
def
__call__
(
self
,
*
args
,
**
kwargs
):
return
self
.
as_coroutine
(
self
.
_gen
(
*
args
,
**
kwargs
))
# compiled exec()
def
exec
(
code_string
,
l
,
g
):
from
Cython.Shadow
import
inline
ns
=
inline
(
code_string
,
locals
=
l
,
globals
=
g
,
lib_dir
=
os
.
path
.
dirname
(
__file__
))
g
.
update
(
ns
)
class
AsyncYieldFrom
:
def
__init__
(
self
,
obj
):
self
.
obj
=
obj
def
__await__
(
self
):
yield
from
self
.
obj
class
AsyncYield
:
def
__init__
(
self
,
value
):
self
.
value
=
value
def
__await__
(
self
):
yield
self
.
value
def
run_async
(
coro
):
#assert coro.__class__ is types.GeneratorType
assert
coro
.
__class__
.
__name__
in
(
'coroutine'
,
'as_coroutine'
)
buffer
=
[]
result
=
None
while
True
:
try
:
buffer
.
append
(
coro
.
send
(
None
))
except
StopIteration
as
ex
:
result
=
ex
.
args
[
0
]
if
ex
.
args
else
None
break
return
buffer
,
result
@
contextlib
.
contextmanager
def
silence_coro_gc
():
with
warnings
.
catch_warnings
():
warnings
.
simplefilter
(
"ignore"
)
yield
gc
.
collect
()
class
TokenizerRegrTest
(
unittest
.
TestCase
):
def
test_oneline_defs
(
self
):
buf
=
[]
for
i
in
range
(
500
):
buf
.
append
(
'def i{i}(): return {i}'
.
format
(
i
=
i
))
buf
=
'
\
n
'
.
join
(
buf
)
# Test that 500 consequent, one-line defs is OK
ns
=
{}
exec
(
buf
,
ns
,
ns
)
self
.
assertEqual
(
ns
[
'i499'
](),
499
)
# Test that 500 consequent, one-line defs *and*
# one 'async def' following them is OK
buf
+=
'
\
n
async def foo():
\
n
return'
ns
=
{}
exec
(
buf
,
ns
,
ns
)
self
.
assertEqual
(
ns
[
'i499'
](),
499
)
self
.
assertEqual
(
type
(
ns
[
'foo'
]()).
__name__
,
'coroutine'
)
#self.assertTrue(inspect.iscoroutinefunction(ns['foo']))
class
CoroutineTest
(
unittest
.
TestCase
):
@
classmethod
def
setUpClass
(
cls
):
# never mark warnings as "already seen" to prevent them from being suppressed
from
warnings
import
simplefilter
simplefilter
(
"always"
)
@
contextlib
.
contextmanager
def
assertRaises
(
self
,
exc_type
):
try
:
yield
except
exc_type
:
self
.
assertTrue
(
True
)
else
:
self
.
assertTrue
(
False
)
@
contextlib
.
contextmanager
def
assertRaisesRegex
(
self
,
exc_type
,
regex
):
# the error messages usually don't match, so we just ignore them
try
:
yield
except
exc_type
:
self
.
assertTrue
(
True
)
else
:
self
.
assertTrue
(
False
)
@
contextlib
.
contextmanager
def
assertWarnsRegex
(
self
,
exc_type
,
regex
):
from
warnings
import
catch_warnings
with
catch_warnings
(
record
=
True
)
as
log
:
yield
first_match
=
None
for
warning
in
log
:
w
=
warning
.
message
if
not
isinstance
(
w
,
exc_type
):
continue
if
first_match
is
None
:
first_match
=
w
if
re
.
search
(
regex
,
str
(
w
)):
self
.
assertTrue
(
True
)
return
if
first_match
is
None
:
self
.
assertTrue
(
False
,
"no warning was raised of type '%s'"
%
exc_type
.
__name__
)
else
:
self
.
assertTrue
(
False
,
"'%s' did not match '%s'"
%
(
first_match
,
regex
))
def
assertRegex
(
self
,
value
,
regex
):
self
.
assertTrue
(
re
.
search
(
regex
,
str
(
value
)),
"'%s' did not match '%s'"
%
(
value
,
regex
))
def
test_gen_1
(
self
):
def
gen
():
yield
self
.
assertFalse
(
hasattr
(
gen
,
'__await__'
))
def
test_func_1
(
self
):
async
def
foo
():
return
10
f
=
foo
()
self
.
assertEqual
(
f
.
__class__
.
__name__
,
'coroutine'
)
#self.assertIsInstance(f, types.GeneratorType)
#self.assertTrue(bool(foo.__code__.co_flags & 0x80))
#self.assertTrue(bool(foo.__code__.co_flags & 0x20))
#self.assertTrue(bool(f.gi_code.co_flags & 0x80))
#self.assertTrue(bool(f.gi_code.co_flags & 0x20))
self
.
assertEqual
(
run_async
(
f
),
([],
10
))
def
bar
():
pass
self
.
assertFalse
(
bool
(
bar
.
__code__
.
co_flags
&
0x80
))
# TODO
def
__test_func_2
(
self
):
async
def
foo
():
raise
StopIteration
with
self
.
assertRaisesRegex
(
RuntimeError
,
"generator raised StopIteration"
):
run_async
(
foo
())
def
test_func_3
(
self
):
async
def
foo
():
raise
StopIteration
with
silence_coro_gc
():
self
.
assertRegex
(
repr
(
foo
()),
'^<coroutine object.* at 0x.*>$'
)
def
test_func_4
(
self
):
async
def
foo
():
raise
StopIteration
check
=
lambda
:
self
.
assertRaisesRegex
(
TypeError
,
"coroutine-objects do not support iteration"
)
with
check
():
list
(
foo
())
with
check
():
tuple
(
foo
())
with
check
():
sum
(
foo
())
with
check
():
iter
(
foo
())
# in Cython: not iterable, but an iterator ...
#with check():
# next(foo())
with
silence_coro_gc
(),
check
():
for
i
in
foo
():
pass
with
silence_coro_gc
(),
check
():
[
i
for
i
in
foo
()]
def
test_func_5
(
self
):
@
types_coroutine
def
bar
():
yield
1
async
def
foo
():
await
bar
()
check
=
lambda
:
self
.
assertRaisesRegex
(
TypeError
,
"coroutine-objects do not support iteration"
)
with
check
():
for
el
in
foo
():
pass
# the following should pass without an error
for
el
in
bar
():
self
.
assertEqual
(
el
,
1
)
self
.
assertEqual
([
el
for
el
in
bar
()],
[
1
])
self
.
assertEqual
(
tuple
(
bar
()),
(
1
,))
self
.
assertEqual
(
next
(
iter
(
bar
())),
1
)
def
test_func_6
(
self
):
@
types_coroutine
def
bar
():
yield
1
yield
2
async
def
foo
():
await
bar
()
f
=
foo
()
self
.
assertEqual
(
f
.
send
(
None
),
1
)
self
.
assertEqual
(
f
.
send
(
None
),
2
)
with
self
.
assertRaises
(
StopIteration
):
f
.
send
(
None
)
# TODO (or not? see test_func_8() below)
def
__test_func_7
(
self
):
async
def
bar
():
return
10
def
foo
():
yield
from
bar
()
with
silence_coro_gc
(),
self
.
assertRaisesRegex
(
TypeError
,
"cannot 'yield from' a coroutine object from a generator"
):
list
(
foo
())
def
test_func_8
(
self
):
@
types_coroutine
def
bar
():
return
(
yield
from
foo
())
async
def
foo
():
return
'spam'
self
.
assertEqual
(
run_async
(
bar
()),
([],
'spam'
)
)
def
test_func_9
(
self
):
async
def
foo
():
pass
with
self
.
assertWarnsRegex
(
RuntimeWarning
,
"coroutine '.*test_func_9.*foo' was never awaited"
):
foo
()
gc
.
collect
()
def
test_await_1
(
self
):
async
def
foo
():
await
1
with
self
.
assertRaisesRegex
(
TypeError
,
"object int can.t.*await"
):
run_async
(
foo
())
def
test_await_2
(
self
):
async
def
foo
():
await
[]
with
self
.
assertRaisesRegex
(
TypeError
,
"object list can.t.*await"
):
run_async
(
foo
())
def
test_await_3
(
self
):
async
def
foo
():
await
AsyncYieldFrom
([
1
,
2
,
3
])
self
.
assertEqual
(
run_async
(
foo
()),
([
1
,
2
,
3
],
None
))
def
test_await_4
(
self
):
async
def
bar
():
return
42
async
def
foo
():
return
await
bar
()
self
.
assertEqual
(
run_async
(
foo
()),
([],
42
))
def
test_await_5
(
self
):
class
Awaitable
:
def
__await__
(
self
):
return
async
def
foo
():
return
(
await
Awaitable
())
with
self
.
assertRaisesRegex
(
TypeError
,
"__await__.*returned non-iterator of type"
):
run_async
(
foo
())
def
test_await_6
(
self
):
class
Awaitable
:
def
__await__
(
self
):
return
iter
([
52
])
async
def
foo
():
return
(
await
Awaitable
())
self
.
assertEqual
(
run_async
(
foo
()),
([
52
],
None
))
def
test_await_7
(
self
):
class
Awaitable
:
def
__await__
(
self
):
yield
42
return
100
async
def
foo
():
return
(
await
Awaitable
())
self
.
assertEqual
(
run_async
(
foo
()),
([
42
],
100
))
def
test_await_8
(
self
):
class
Awaitable
:
pass
async
def
foo
():
return
(
await
Awaitable
())
with
self
.
assertRaisesRegex
(
TypeError
,
"object Awaitable can't be used in 'await' expression"
):
run_async
(
foo
())
def
test_await_9
(
self
):
def
wrap
():
return
bar
async
def
bar
():
return
42
async
def
foo
():
b
=
bar
()
db
=
{
'b'
:
lambda
:
wrap
}
class
DB
:
b
=
staticmethod
(
wrap
)
return
(
await
bar
()
+
await
wrap
()()
+
await
db
[
'b'
]()()()
+
await
bar
()
*
1000
+
await
DB
.
b
()())
async
def
foo2
():
return
-
await
bar
()
self
.
assertEqual
(
run_async
(
foo
()),
([],
42168
))
self
.
assertEqual
(
run_async
(
foo2
()),
([],
-
42
))
def
test_await_10
(
self
):
async
def
baz
():
return
42
async
def
bar
():
return
baz
()
async
def
foo
():
return
await
(
await
bar
())
self
.
assertEqual
(
run_async
(
foo
()),
([],
42
))
def
test_await_11
(
self
):
def
ident
(
val
):
return
val
async
def
bar
():
return
'spam'
async
def
foo
():
return
ident
(
val
=
await
bar
())
async
def
foo2
():
return
await
bar
(),
'ham'
self
.
assertEqual
(
run_async
(
foo2
()),
([],
(
'spam'
,
'ham'
)))
def
test_await_12
(
self
):
async
def
coro
():
return
'spam'
class
Awaitable
:
def
__await__
(
self
):
return
coro
()
async
def
foo
():
return
await
Awaitable
()
with
self
.
assertRaisesRegex
(
TypeError
,
"__await__
\
(
\
) returned a coroutine"
):
run_async
(
foo
())
def
test_await_13
(
self
):
class
Awaitable
:
def
__await__
(
self
):
return
self
async
def
foo
():
return
await
Awaitable
()
with
self
.
assertRaisesRegex
(
TypeError
,
"__await__.*returned non-iterator of type"
):
run_async
(
foo
())
def
test_with_1
(
self
):
class
Manager
:
def
__init__
(
self
,
name
):
self
.
name
=
name
async
def
__aenter__
(
self
):
await
AsyncYieldFrom
([
'enter-1-'
+
self
.
name
,
'enter-2-'
+
self
.
name
])
return
self
async
def
__aexit__
(
self
,
*
args
):
await
AsyncYieldFrom
([
'exit-1-'
+
self
.
name
,
'exit-2-'
+
self
.
name
])
if
self
.
name
==
'B'
:
return
True
async
def
foo
():
async
with
Manager
(
"A"
)
as
a
,
Manager
(
"B"
)
as
b
:
await
AsyncYieldFrom
([(
'managers'
,
a
.
name
,
b
.
name
)])
1
/
0
f
=
foo
()
result
,
_
=
run_async
(
f
)
self
.
assertEqual
(
result
,
[
'enter-1-A'
,
'enter-2-A'
,
'enter-1-B'
,
'enter-2-B'
,
(
'managers'
,
'A'
,
'B'
),
'exit-1-B'
,
'exit-2-B'
,
'exit-1-A'
,
'exit-2-A'
]
)
async
def
foo
():
async
with
Manager
(
"A"
)
as
a
,
Manager
(
"C"
)
as
c
:
await
AsyncYieldFrom
([(
'managers'
,
a
.
name
,
c
.
name
)])
1
/
0
with
self
.
assertRaises
(
ZeroDivisionError
):
run_async
(
foo
())
def
test_with_2
(
self
):
class
CM
:
def
__aenter__
(
self
):
pass
async
def
foo
():
async
with
CM
():
pass
with
self
.
assertRaisesRegex
(
AttributeError
,
'__aexit__'
):
run_async
(
foo
())
def
test_with_3
(
self
):
class
CM
:
def
__aexit__
(
self
):
pass
async
def
foo
():
async
with
CM
():
pass
with
self
.
assertRaisesRegex
(
AttributeError
,
'__aenter__'
):
run_async
(
foo
())
def
test_with_4
(
self
):
class
CM
:
def
__enter__
(
self
):
pass
def
__exit__
(
self
):
pass
async
def
foo
():
async
with
CM
():
pass
with
self
.
assertRaisesRegex
(
AttributeError
,
'__aexit__'
):
run_async
(
foo
())
def
test_with_5
(
self
):
# While this test doesn't make a lot of sense,
# it's a regression test for an early bug with opcodes
# generation
class
CM
:
async
def
__aenter__
(
self
):
return
self
async
def
__aexit__
(
self
,
*
exc
):
pass
async
def
func
():
async
with
CM
():
assert
(
1
,
)
==
1
with
self
.
assertRaises
(
AssertionError
):
run_async
(
func
())
def
test_with_6
(
self
):
class
CM
:
def
__aenter__
(
self
):
return
123
def
__aexit__
(
self
,
*
e
):
return
456
async
def
foo
():
async
with
CM
():
pass
with
self
.
assertRaisesRegex
(
TypeError
,
"object int can't be used in 'await' expression"
):
# it's important that __aexit__ wasn't called
run_async
(
foo
())
def
test_with_7
(
self
):
class
CM
:
async
def
__aenter__
(
self
):
return
self
def
__aexit__
(
self
,
*
e
):
return
444
async
def
foo
():
async
with
CM
():
1
/
0
try
:
run_async
(
foo
())
except
TypeError
as
exc
:
self
.
assertRegex
(
exc
.
args
[
0
],
"object int can't be used in 'await' expression"
)
if
sys
.
version_info
[
0
]
>=
3
:
self
.
assertTrue
(
exc
.
__context__
is
not
None
)
self
.
assertTrue
(
isinstance
(
exc
.
__context__
,
ZeroDivisionError
))
else
:
self
.
fail
(
'invalid asynchronous context manager did not fail'
)
def
test_with_8
(
self
):
CNT
=
0
class
CM
:
async
def
__aenter__
(
self
):
return
self
def
__aexit__
(
self
,
*
e
):
return
456
async
def
foo
():
nonlocal
CNT
async
with
CM
():
CNT
+=
1
with
self
.
assertRaisesRegex
(
TypeError
,
"object int can't be used in 'await' expression"
):
run_async
(
foo
())
self
.
assertEqual
(
CNT
,
1
)
def
test_with_9
(
self
):
CNT
=
0
class
CM
:
async
def
__aenter__
(
self
):
return
self
async
def
__aexit__
(
self
,
*
e
):
1
/
0
async
def
foo
():
nonlocal
CNT
async
with
CM
():
CNT
+=
1
with
self
.
assertRaises
(
ZeroDivisionError
):
run_async
(
foo
())
self
.
assertEqual
(
CNT
,
1
)
def
test_with_10
(
self
):
CNT
=
0
class
CM
:
async
def
__aenter__
(
self
):
return
self
async
def
__aexit__
(
self
,
*
e
):
1
/
0
async
def
foo
():
nonlocal
CNT
async
with
CM
():
async
with
CM
():
raise
RuntimeError
try
:
run_async
(
foo
())
except
ZeroDivisionError
as
exc
:
pass
# FIXME!
#if sys.version_info[0] >= 3:
# self.assertTrue(exc.__context__ is not None)
# self.assertTrue(isinstance(exc.__context__, ZeroDivisionError))
# self.assertTrue(isinstance(exc.__context__.__context__, RuntimeError))
else
:
self
.
fail
(
'exception from __aexit__ did not propagate'
)
def
test_with_11
(
self
):
CNT
=
0
class
CM
:
async
def
__aenter__
(
self
):
raise
NotImplementedError
async
def
__aexit__
(
self
,
*
e
):
1
/
0
async
def
foo
():
nonlocal
CNT
async
with
CM
():
raise
RuntimeError
try
:
run_async
(
foo
())
except
NotImplementedError
as
exc
:
if
sys
.
version_info
[
0
]
>=
3
:
self
.
assertTrue
(
exc
.
__context__
is
None
)
else
:
self
.
fail
(
'exception from __aenter__ did not propagate'
)
def
test_with_12
(
self
):
CNT
=
0
class
CM
:
async
def
__aenter__
(
self
):
return
self
async
def
__aexit__
(
self
,
*
e
):
return
True
async
def
foo
():
nonlocal
CNT
async
with
CM
()
as
cm
:
self
.
assertIs
(
cm
.
__class__
,
CM
)
raise
RuntimeError
run_async
(
foo
())
def
test_with_13
(
self
):
CNT
=
0
class
CM
:
async
def
__aenter__
(
self
):
1
/
0
async
def
__aexit__
(
self
,
*
e
):
return
True
async
def
foo
():
nonlocal
CNT
CNT
+=
1
async
with
CM
():
CNT
+=
1000
CNT
+=
10000
with
self
.
assertRaises
(
ZeroDivisionError
):
run_async
(
foo
())
self
.
assertEqual
(
CNT
,
1
)
def
test_for_1
(
self
):
aiter_calls
=
0
class
AsyncIter
:
def
__init__
(
self
):
self
.
i
=
0
async
def
__aiter__
(
self
):
nonlocal
aiter_calls
aiter_calls
+=
1
return
self
async
def
__anext__
(
self
):
self
.
i
+=
1
if
not
(
self
.
i
%
10
):
await
AsyncYield
(
self
.
i
*
10
)
if
self
.
i
>
100
:
raise
StopAsyncIteration
return
self
.
i
,
self
.
i
buffer
=
[]
async
def
test1
():
async
for
i1
,
i2
in
AsyncIter
():
buffer
.
append
(
i1
+
i2
)
yielded
,
_
=
run_async
(
test1
())
# Make sure that __aiter__ was called only once
self
.
assertEqual
(
aiter_calls
,
1
)
self
.
assertEqual
(
yielded
,
[
i
*
100
for
i
in
range
(
1
,
11
)])
self
.
assertEqual
(
buffer
,
[
i
*
2
for
i
in
range
(
1
,
101
)])
buffer
=
[]
async
def
test2
():
nonlocal
buffer
async
for
i
in
AsyncIter
():
buffer
.
append
(
i
[
0
])
if
i
[
0
]
==
20
:
break
else
:
buffer
.
append
(
'what?'
)
buffer
.
append
(
'end'
)
yielded
,
_
=
run_async
(
test2
())
# Make sure that __aiter__ was called only once
self
.
assertEqual
(
aiter_calls
,
2
)
self
.
assertEqual
(
yielded
,
[
100
,
200
])
self
.
assertEqual
(
buffer
,
[
i
for
i
in
range
(
1
,
21
)]
+
[
'end'
])
buffer
=
[]
async
def
test3
():
nonlocal
buffer
async
for
i
in
AsyncIter
():
if
i
[
0
]
>
20
:
continue
buffer
.
append
(
i
[
0
])
else
:
buffer
.
append
(
'what?'
)
buffer
.
append
(
'end'
)
yielded
,
_
=
run_async
(
test3
())
# Make sure that __aiter__ was called only once
self
.
assertEqual
(
aiter_calls
,
3
)
self
.
assertEqual
(
yielded
,
[
i
*
100
for
i
in
range
(
1
,
11
)])
self
.
assertEqual
(
buffer
,
[
i
for
i
in
range
(
1
,
21
)]
+
[
'what?'
,
'end'
])
def
test_for_2
(
self
):
tup
=
(
1
,
2
,
3
)
refs_before
=
sys
.
getrefcount
(
tup
)
async
def
foo
():
async
for
i
in
tup
:
print
(
'never going to happen'
)
with
self
.
assertRaisesRegex
(
TypeError
,
"async for' requires an object.*__aiter__.*tuple"
):
run_async
(
foo
())
self
.
assertEqual
(
sys
.
getrefcount
(
tup
),
refs_before
)
def
test_for_3
(
self
):
class
I
:
def
__aiter__
(
self
):
return
self
aiter
=
I
()
refs_before
=
sys
.
getrefcount
(
aiter
)
async
def
foo
():
async
for
i
in
aiter
:
print
(
'never going to happen'
)
with
self
.
assertRaisesRegex
(
TypeError
,
"async for' received an invalid object.*__aiter.*
\
: I
"
):
run_async(foo())
self.assertEqual(sys.getrefcount(aiter), refs_before)
def test_for_4(self):
class I:
async def __aiter__(self):
return self
def __anext__(self):
return ()
aiter = I()
refs_before = sys.getrefcount(aiter)
async def foo():
async for i in aiter:
print('never going to happen')
with self.assertRaisesRegex(
TypeError,
"
async
for
' received an invalid object.*__anext__.*tuple"):
run_async(foo())
self.assertEqual(sys.getrefcount(aiter), refs_before)
def test_for_5(self):
class I:
async def __aiter__(self):
return self
def __anext__(self):
return 123
async def foo():
async for i in I():
print('
never
going
to
happen
')
with self.assertRaisesRegex(
TypeError,
"async for'
received
an
invalid
object
.
*
__anext
.
*
int
"):
run_async(foo())
def test_for_6(self):
I = 0
class Manager:
async def __aenter__(self):
nonlocal I
I += 10000
async def __aexit__(self, *args):
nonlocal I
I += 100000
class Iterable:
def __init__(self):
self.i = 0
async def __aiter__(self):
return self
async def __anext__(self):
if self.i > 10:
raise StopAsyncIteration
self.i += 1
return self.i
##############
manager = Manager()
iterable = Iterable()
mrefs_before = sys.getrefcount(manager)
irefs_before = sys.getrefcount(iterable)
async def main():
nonlocal I
async with manager:
async for i in iterable:
I += 1
I += 1000
run_async(main())
self.assertEqual(I, 111011)
self.assertEqual(sys.getrefcount(manager), mrefs_before)
self.assertEqual(sys.getrefcount(iterable), irefs_before)
##############
async def main():
nonlocal I
async with Manager():
async for i in Iterable():
I += 1
I += 1000
async with Manager():
async for i in Iterable():
I += 1
I += 1000
run_async(main())
self.assertEqual(I, 333033)
##############
async def main():
nonlocal I
async with Manager():
I += 100
async for i in Iterable():
I += 1
else:
I += 10000000
I += 1000
async with Manager():
I += 100
async for i in Iterable():
I += 1
else:
I += 10000000
I += 1000
run_async(main())
self.assertEqual(I, 20555255)
def test_for_7(self):
CNT = 0
class AI:
async def __aiter__(self):
1/0
async def foo():
nonlocal CNT
async for i in AI():
CNT += 1
CNT += 10
with self.assertRaises(ZeroDivisionError):
run_async(foo())
self.assertEqual(CNT, 0)
class CoroAsyncIOCompatTest(unittest.TestCase):
def test_asyncio_1(self):
import asyncio
class MyException(Exception):
pass
buffer = []
class CM:
async def __aenter__(self):
buffer.append(1)
await asyncio.sleep(0.01)
buffer.append(2)
return self
async def __aexit__(self, exc_type, exc_val, exc_tb):
await asyncio.sleep(0.01)
buffer.append(exc_type.__name__)
async def f():
async with CM() as c:
await asyncio.sleep(0.01)
raise MyException
buffer.append('unreachable')
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
try:
loop.run_until_complete(f())
except MyException:
pass
finally:
loop.close()
asyncio.set_event_loop(None)
self.assertEqual(buffer, [1, 2, 'MyException'])
class SysSetCoroWrapperTest(unittest.TestCase):
def test_set_wrapper_1(self):
async def foo():
return 'spam'
wrapped = None
def wrap(gen):
nonlocal wrapped
wrapped = gen
return gen
self.assertIsNone(sys.get_coroutine_wrapper())
sys.set_coroutine_wrapper(wrap)
self.assertIs(sys.get_coroutine_wrapper(), wrap)
try:
f = foo()
self.assertTrue(wrapped)
self.assertEqual(run_async(f), ([], 'spam'))
finally:
sys.set_coroutine_wrapper(None)
self.assertIsNone(sys.get_coroutine_wrapper())
wrapped = None
with silence_coro_gc():
foo()
self.assertFalse(wrapped)
# disable some tests that only apply to CPython
# TODO?
if True or sys.version_info < (3, 5):
SysSetCoroWrapperTest = None
try:
import asyncio
except ImportError:
CoroAsyncIOCompatTest = None
if __name__=="
__main__
":
unittest.main()
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment