Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
C
cython
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
0
Merge Requests
0
Analytics
Analytics
Repository
Value Stream
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Commits
Issue Boards
Open sidebar
Boxiang Sun
cython
Commits
19f7caa7
Commit
19f7caa7
authored
Oct 04, 2009
by
Stefan Behnel
Browse files
Options
Browse Files
Download
Plain Diff
merge
parents
6430e5b4
4c11531e
Changes
15
Show whitespace changes
Inline
Side-by-side
Showing
15 changed files
with
476 additions
and
30 deletions
+476
-30
Cython/Compiler/AnalysedTreeTransforms.py
Cython/Compiler/AnalysedTreeTransforms.py
+79
-0
Cython/Compiler/Code.py
Cython/Compiler/Code.py
+9
-4
Cython/Compiler/DebugFlags.py
Cython/Compiler/DebugFlags.py
+1
-1
Cython/Compiler/ExprNodes.py
Cython/Compiler/ExprNodes.py
+40
-2
Cython/Compiler/Main.py
Cython/Compiler/Main.py
+2
-0
Cython/Compiler/ModuleNode.py
Cython/Compiler/ModuleNode.py
+1
-1
Cython/Compiler/Options.py
Cython/Compiler/Options.py
+6
-0
Cython/Compiler/ParseTreeTransforms.py
Cython/Compiler/ParseTreeTransforms.py
+26
-8
Cython/Compiler/PyrexTypes.py
Cython/Compiler/PyrexTypes.py
+2
-2
Cython/Compiler/Visitor.py
Cython/Compiler/Visitor.py
+34
-0
Cython/Includes/numpy.pxd
Cython/Includes/numpy.pxd
+154
-12
tests/errors/e_doctesthack.pyx
tests/errors/e_doctesthack.pyx
+9
-0
tests/run/division_T384.pyx
tests/run/division_T384.pyx
+20
-0
tests/run/doctesthack.pyx
tests/run/doctesthack.pyx
+69
-0
tests/run/doctesthack_skip.pyx
tests/run/doctesthack_skip.pyx
+24
-0
No files found.
Cython/Compiler/AnalysedTreeTransforms.py
0 → 100644
View file @
19f7caa7
from
Cython.Compiler.Visitor
import
VisitorTransform
,
ScopeTrackingTransform
,
TreeVisitor
from
Nodes
import
StatListNode
,
SingleAssignmentNode
from
ExprNodes
import
(
DictNode
,
DictItemNode
,
NameNode
,
UnicodeNode
,
NoneNode
,
ExprNode
,
AttributeNode
,
ModuleRefNode
,
DocstringRefNode
)
from
PyrexTypes
import
py_object_type
from
Builtin
import
dict_type
from
StringEncoding
import
EncodedString
import
Naming
class
DoctestHackTransform
(
ScopeTrackingTransform
):
# Handles doctesthack directive
def
visit_ModuleNode
(
self
,
node
):
self
.
scope_type
=
'module'
self
.
scope_node
=
node
if
self
.
current_directives
[
'doctesthack'
]:
assert
isinstance
(
node
.
body
,
StatListNode
)
# First see if __test__ is already created
if
u'__test__'
in
node
.
scope
.
entries
:
# Do nothing
return
node
pos
=
node
.
pos
self
.
tests
=
[]
self
.
testspos
=
node
.
pos
test_dict_entry
=
node
.
scope
.
declare_var
(
EncodedString
(
u'__test__'
),
py_object_type
,
pos
,
visibility
=
'public'
)
create_test_dict_assignment
=
SingleAssignmentNode
(
pos
,
lhs
=
NameNode
(
pos
,
name
=
EncodedString
(
u'__test__'
),
entry
=
test_dict_entry
),
rhs
=
DictNode
(
pos
,
key_value_pairs
=
self
.
tests
))
self
.
visitchildren
(
node
)
node
.
body
.
stats
.
append
(
create_test_dict_assignment
)
return
node
def
add_test
(
self
,
testpos
,
name
,
func_ref_node
):
# func_ref_node must evaluate to the function object containing
# the docstring, BUT it should not be the function itself (which
# would lead to a new *definition* of the function)
pos
=
self
.
testspos
keystr
=
u'%s (line %d)'
%
(
name
,
testpos
[
1
])
key
=
UnicodeNode
(
pos
,
value
=
EncodedString
(
keystr
))
value
=
DocstringRefNode
(
pos
,
func_ref_node
)
self
.
tests
.
append
(
DictItemNode
(
pos
,
key
=
key
,
value
=
value
))
def
visit_FuncDefNode
(
self
,
node
):
if
node
.
doc
:
pos
=
self
.
testspos
if
self
.
scope_type
==
'module'
:
parent
=
ModuleRefNode
(
pos
)
name
=
node
.
entry
.
name
elif
self
.
scope_type
in
(
'pyclass'
,
'cclass'
):
mod
=
ModuleRefNode
(
pos
)
if
self
.
scope_type
==
'pyclass'
:
clsname
=
self
.
scope_node
.
name
else
:
clsname
=
self
.
scope_node
.
class_name
parent
=
AttributeNode
(
pos
,
obj
=
mod
,
attribute
=
clsname
,
type
=
py_object_type
,
is_py_attr
=
True
,
is_temp
=
True
)
name
=
"%s.%s"
%
(
clsname
,
node
.
entry
.
name
)
getfunc
=
AttributeNode
(
pos
,
obj
=
parent
,
attribute
=
node
.
entry
.
name
,
type
=
py_object_type
,
is_py_attr
=
True
,
is_temp
=
True
)
self
.
add_test
(
node
.
pos
,
name
,
getfunc
)
return
node
Cython/Compiler/Code.py
View file @
19f7caa7
...
...
@@ -24,7 +24,9 @@ class UtilityCode(object):
#
# hashes/equals by instance
def
__init__
(
self
,
proto
=
None
,
impl
=
None
,
init
=
None
,
cleanup
=
None
,
requires
=
None
):
def
__init__
(
self
,
proto
=
None
,
impl
=
None
,
init
=
None
,
cleanup
=
None
,
requires
=
None
,
proto_block
=
'utility_code_proto'
):
# proto_block: Which code block to dump prototype in. See GlobalState.
self
.
proto
=
proto
self
.
impl
=
impl
self
.
init
=
init
...
...
@@ -32,6 +34,7 @@ class UtilityCode(object):
self
.
requires
=
requires
self
.
_cache
=
{}
self
.
specialize_list
=
[]
self
.
proto_block
=
proto_block
def
specialize
(
self
,
pyrex_type
=
None
,
**
data
):
# Dicts aren't hashable...
...
...
@@ -51,7 +54,7 @@ class UtilityCode(object):
none_or_sub
(
self
.
impl
,
data
),
none_or_sub
(
self
.
init
,
data
),
none_or_sub
(
self
.
cleanup
,
data
),
requires
)
requires
,
self
.
proto_block
)
self
.
specialize_list
.
append
(
s
)
return
s
...
...
@@ -60,7 +63,7 @@ class UtilityCode(object):
for
dependency
in
self
.
requires
:
output
.
use_utility_code
(
dependency
)
if
self
.
proto
:
output
[
'utility_code_proto'
].
put
(
self
.
proto
)
output
[
self
.
proto_block
].
put
(
self
.
proto
)
if
self
.
impl
:
output
[
'utility_code_def'
].
put
(
self
.
impl
)
if
self
.
init
:
...
...
@@ -390,8 +393,10 @@ class GlobalState(object):
code_layout = [
'h_code',
'utility_code_proto',
'complex_numbers_utility_code',
'utility_code_proto_before_types',
'type_declarations',
'utility_code_proto',
'module_declarations',
'typeinfo',
'before_global_var',
...
...
Cython/Compiler/DebugFlags.py
View file @
19f7caa7
...
...
@@ -10,7 +10,7 @@ debug_temp_code_comments = 0
debug_trace_code_generation
=
0
# Do not replace exceptions with user-friendly error messages
debug_no_exception_intercept
=
0
debug_no_exception_intercept
=
1
# Print a message each time a new stage in the pipeline is entered
debug_verbose_pipeline
=
0
Cython/Compiler/ExprNodes.py
View file @
19f7caa7
...
...
@@ -3503,6 +3503,8 @@ class DictNode(ExprNode):
# obj_conversion_errors [PyrexError] used internally
subexprs
=
[
'key_value_pairs'
]
is_temp
=
1
type
=
dict_type
type
=
dict_type
obj_conversion_errors
=
[]
...
...
@@ -3521,12 +3523,10 @@ class DictNode(ExprNode):
def
analyse_types
(
self
,
env
):
hold_errors
()
self
.
type
=
dict_type
for
item
in
self
.
key_value_pairs
:
item
.
analyse_types
(
env
)
self
.
obj_conversion_errors
=
held_errors
()
release_errors
(
ignore
=
True
)
self
.
is_temp
=
1
def
coerce_to
(
self
,
dst_type
,
env
):
if
dst_type
.
is_pyobject
:
...
...
@@ -5489,6 +5489,44 @@ class CloneNode(CoercionNode):
pass
class
ModuleRefNode
(
ExprNode
):
# Simple returns the module object
type
=
py_object_type
is_temp
=
False
subexprs
=
[]
def
analyse_types
(
self
,
env
):
pass
def
calculate_result_code
(
self
):
return
Naming
.
module_cname
def
generate_result_code
(
self
,
code
):
pass
class
DocstringRefNode
(
ExprNode
):
# Extracts the docstring of the body element
subexprs
=
[
'body'
]
type
=
py_object_type
is_temp
=
True
def
__init__
(
self
,
pos
,
body
):
ExprNode
.
__init__
(
self
,
pos
)
assert
body
.
type
.
is_pyobject
self
.
body
=
body
def
analyse_types
(
self
,
env
):
pass
def
generate_result_code
(
self
,
code
):
code
.
putln
(
'%s = __Pyx_GetAttrString(%s, "__doc__");'
%
(
self
.
result
(),
self
.
body
.
result
()))
code
.
put_gotref
(
self
.
result
())
#------------------------------------------------------------------------------------
#
# Runtime support code
...
...
Cython/Compiler/Main.py
View file @
19f7caa7
...
...
@@ -88,6 +88,7 @@ class Context(object):
from
ParseTreeTransforms
import
CreateClosureClasses
,
MarkClosureVisitor
,
DecoratorTransform
from
ParseTreeTransforms
import
InterpretCompilerDirectives
,
TransformBuiltinMethods
from
ParseTreeTransforms
import
AlignFunctionDefinitions
,
GilCheck
from
AnalysedTreeTransforms
import
DoctestHackTransform
from
AutoDocTransforms
import
EmbedSignature
from
Optimize
import
FlattenInListTransform
,
SwitchTransform
,
IterationTransform
from
Optimize
import
OptimizeBuiltinCalls
,
ConstantFolding
,
FinalOptimizePhase
...
...
@@ -126,6 +127,7 @@ class Context(object):
WithTransform
(
self
),
DecoratorTransform
(
self
),
AnalyseDeclarationsTransform
(
self
),
DoctestHackTransform
(
self
),
EmbedSignature
(
self
),
TransformBuiltinMethods
(
self
),
IntroduceBufferAuxiliaryVars
(
self
),
...
...
Cython/Compiler/ModuleNode.py
View file @
19f7caa7
...
...
@@ -2515,4 +2515,4 @@ packed_struct_utility_code = UtilityCode(proto="""
#else
#define __Pyx_PACKED
#endif
"""
,
impl
=
""
)
"""
,
impl
=
""
,
proto_block
=
'utility_code_proto_before_types'
)
Cython/Compiler/Options.py
View file @
19f7caa7
...
...
@@ -68,6 +68,7 @@ option_defaults = {
'c99_complex'
:
False
,
# Don't use macro wrappers for complex arith, not sure what to name this...
'callspec'
:
""
,
'profile'
:
False
,
'doctesthack'
:
False
}
# Override types possibilities above, if needed
...
...
@@ -77,6 +78,11 @@ for key, val in option_defaults.items():
if
key
not
in
option_types
:
option_types
[
key
]
=
type
(
val
)
option_scopes
=
{
# defaults to available everywhere
# 'module', 'function', 'class', 'with statement'
'doctesthack'
:
(
'module'
,)
}
def
parse_option_value
(
name
,
value
):
"""
Parses value as an option value for the given name and returns
...
...
Cython/Compiler/ParseTreeTransforms.py
View file @
19f7caa7
...
...
@@ -338,14 +338,26 @@ class InterpretCompilerDirectives(CythonTransform, SkipDeclarations):
self
.
cython_module_names
=
set
()
self
.
option_names
=
{}
def
check_directive_scope
(
self
,
pos
,
directive
,
scope
):
legal_scopes
=
Options
.
option_scopes
.
get
(
directive
,
None
)
if
legal_scopes
and
scope
not
in
legal_scopes
:
self
.
context
.
nonfatal_error
(
PostParseError
(
pos
,
'The %s compiler directive '
'is not allowed in %s scope'
%
(
directive
,
scope
)))
return
False
else
:
return
True
# Set up processing and handle the cython: comments.
def
visit_ModuleNode
(
self
,
node
):
options
=
copy
.
copy
(
Options
.
option_defaults
)
for
key
,
value
in
self
.
compilation_option_overrides
.
iteritems
():
if
not
self
.
check_directive_scope
(
node
.
pos
,
key
,
'module'
):
self
.
wrong_scope_error
(
node
.
pos
,
key
,
'module'
)
del
self
.
compilation_option_overrides
[
key
]
continue
if
key
in
node
.
option_comments
and
node
.
option_comments
[
key
]
!=
value
:
warning
(
node
.
pos
,
"Compiler directive differs between environment and file header; this will change "
"in Cython 0.12. See http://article.gmane.org/gmane.comp.python.cython.devel/5233"
,
2
)
break
options
.
update
(
node
.
option_comments
)
options
.
update
(
self
.
compilation_option_overrides
)
self
.
options
=
options
...
...
@@ -465,7 +477,6 @@ class InterpretCompilerDirectives(CythonTransform, SkipDeclarations):
# Handle decorators
def
visit_FuncDefNode
(
self
,
node
):
options
=
[]
if
node
.
decorators
:
# Split the decorators into two lists -- real decorators and options
realdecs
=
[]
...
...
@@ -485,6 +496,9 @@ class InterpretCompilerDirectives(CythonTransform, SkipDeclarations):
options
.
reverse
()
# Decorators coming first take precedence
for
option
in
options
:
name
,
value
=
option
legal_scopes
=
Options
.
option_scopes
.
get
(
name
,
None
)
if
not
self
.
check_directive_scope
(
node
.
pos
,
name
,
'function'
):
continue
if
name
in
optdict
and
isinstance
(
optdict
[
name
],
dict
):
# only keywords can be merged, everything else
# overrides completely
...
...
@@ -503,7 +517,9 @@ class InterpretCompilerDirectives(CythonTransform, SkipDeclarations):
if
option
is
not
None
and
option
[
0
]
==
u'locals'
:
node
.
directive_locals
=
option
[
1
]
else
:
raise
PostParseError
(
dec
.
pos
,
"Cdef functions can only take cython.locals() decorator."
)
self
.
context
.
nonfatal_error
(
PostParseError
(
dec
.
pos
,
"Cdef functions can only take cython.locals() decorator."
))
continue
return
node
# Handle with statements
...
...
@@ -511,10 +527,12 @@ class InterpretCompilerDirectives(CythonTransform, SkipDeclarations):
option
=
self
.
try_to_parse_option
(
node
.
manager
)
if
option
is
not
None
:
if
node
.
target
is
not
None
:
raise
PostParseError
(
node
.
pos
,
"Compiler option with statements cannot contain 'as'"
)
self
.
context
.
nonfatal_error
(
PostParseError
(
node
.
pos
,
"Compiler option with statements cannot contain 'as'"
))
else
:
name
,
value
=
option
if
self
.
check_directive_scope
(
node
.
pos
,
name
,
'with statement'
):
return
self
.
visit_with_options
(
node
.
body
,
{
name
:
value
})
else
:
return
self
.
visit_Node
(
node
)
class
WithTransform
(
CythonTransform
,
SkipDeclarations
):
...
...
Cython/Compiler/PyrexTypes.py
View file @
19f7caa7
...
...
@@ -612,7 +612,7 @@ static INLINE %(type)s __Pyx_PyInt_As%(SignWord)s%(TypeName)s(PyObject* x) {
}
return (%(type)s)__Pyx_PyInt_As%(SignWord)sLong(x);
}
"""
)
"""
)
#fool emacs: '
c_long_from_py_function
=
UtilityCode
(
proto
=
"""
...
...
@@ -1013,7 +1013,7 @@ proto="""
}
#endif
"""
)
"""
,
proto_block
=
'complex_numbers_utility_code'
)
class
CArrayType
(
CType
):
...
...
Cython/Compiler/Visitor.py
View file @
19f7caa7
...
...
@@ -250,6 +250,9 @@ class VisitorTransform(TreeVisitor):
class
CythonTransform
(
VisitorTransform
):
"""
Certain common conventions and utilitues for Cython transforms.
- Sets up the context of the pipeline in self.context
- Tracks directives in effect in self.current_directives
"""
def
__init__
(
self
,
context
):
super
(
CythonTransform
,
self
).
__init__
()
...
...
@@ -272,6 +275,37 @@ class CythonTransform(VisitorTransform):
self
.
visitchildren
(
node
)
return
node
class
ScopeTrackingTransform
(
CythonTransform
):
# Keeps track of type of scopes
scope_type
=
None
# can be either of 'module', 'function', 'cclass', 'pyclass'
scope_node
=
None
def
visit_ModuleNode
(
self
,
node
):
self
.
scope_type
=
'module'
self
.
scope_node
=
node
self
.
visitchildren
(
node
)
return
node
def
visit_scope
(
self
,
node
,
scope_type
):
prev
=
self
.
scope_type
,
self
.
scope_node
self
.
scope_type
=
scope_type
self
.
scope_node
=
node
self
.
visitchildren
(
node
)
self
.
scope_type
,
self
.
scope_node
=
prev
return
node
def
visit_CClassDefNode
(
self
,
node
):
return
self
.
visit_scope
(
node
,
'cclass'
)
def
visit_PyClassDefNode
(
self
,
node
):
return
self
.
visit_scope
(
node
,
'pyclass'
)
def
visit_FuncDefNode
(
self
,
node
):
return
self
.
visit_scope
(
node
,
'function'
)
def
visit_CStructOrUnionDefNode
(
self
,
node
):
return
self
.
visit_scope
(
node
,
'struct'
)
class
RecursiveNodeReplacer
(
VisitorTransform
):
"""
Recursively replace all occurrences of a node in a subtree by
...
...
Cython/Includes/numpy.pxd
View file @
19f7caa7
...
...
@@ -17,6 +17,7 @@
DEF
_buffer_format_string_len
=
255
cimport
python_buffer
as
pybuf
from
python_object
cimport
PyObject
cimport
stdlib
cimport
stdio
...
...
@@ -134,6 +135,11 @@ cdef extern from "numpy/arrayobject.h":
# Use through macros
pass
ctypedef
struct
PyArrayObject
:
# For use in situations where ndarray can't replace PyArrayObject*,
# like PyArrayObject**.
pass
ctypedef
class
numpy
.
ndarray
[
object
PyArrayObject
]:
cdef
__cythonbufferdefaults__
=
{
"mode"
:
"strided"
}
...
...
@@ -167,11 +173,11 @@ cdef extern from "numpy/arrayobject.h":
if
((
flags
&
pybuf
.
PyBUF_C_CONTIGUOUS
==
pybuf
.
PyBUF_C_CONTIGUOUS
)
and
not
PyArray_CHKFLAGS
(
self
,
NPY_C_CONTIGUOUS
)):
raise
ValueError
(
"ndarray is not C contiguous"
)
raise
ValueError
(
u
"ndarray is not C contiguous"
)
if
((
flags
&
pybuf
.
PyBUF_F_CONTIGUOUS
==
pybuf
.
PyBUF_F_CONTIGUOUS
)
and
not
PyArray_CHKFLAGS
(
self
,
NPY_F_CONTIGUOUS
)):
raise
ValueError
(
"ndarray is not Fortran contiguous"
)
raise
ValueError
(
u
"ndarray is not Fortran contiguous"
)
info
.
buf
=
PyArray_DATA
(
self
)
info
.
ndim
=
ndim
...
...
@@ -209,7 +215,7 @@ cdef extern from "numpy/arrayobject.h":
t
=
descr
.
type_num
if
((
descr
.
byteorder
==
'>'
and
little_endian
)
or
(
descr
.
byteorder
==
'<'
and
not
little_endian
)):
raise
ValueError
(
"Non-native byte order not supported"
)
raise
ValueError
(
u
"Non-native byte order not supported"
)
if
t
==
NPY_BYTE
:
f
=
"b"
elif
t
==
NPY_UBYTE
:
f
=
"B"
elif
t
==
NPY_SHORT
:
f
=
"h"
...
...
@@ -228,7 +234,7 @@ cdef extern from "numpy/arrayobject.h":
elif
t
==
NPY_CLONGDOUBLE
:
f
=
"Zg"
elif
t
==
NPY_OBJECT
:
f
=
"O"
else
:
raise
ValueError
(
"unknown dtype code in numpy.pxd (%d)"
%
t
)
raise
ValueError
(
u
"unknown dtype code in numpy.pxd (%d)"
%
t
)
info
.
format
=
f
return
else
:
...
...
@@ -463,6 +469,14 @@ cdef extern from "numpy/arrayobject.h":
object
PyArray_Take
(
ndarray
ap
,
object
items
,
int
axis
)
object
PyArray_Put
(
ndarray
ap
,
object
items
,
object
values
)
void
PyArray_MultiIter_RESET
(
broadcast
multi
)
nogil
void
PyArray_MultiIter_NEXT
(
broadcast
multi
)
nogil
void
PyArray_MultiIter_GOTO
(
broadcast
multi
,
npy_intp
dest
)
nogil
void
PyArray_MultiIter_GOTO1D
(
broadcast
multi
,
npy_intp
ind
)
nogil
void
*
PyArray_MultiIter_DATA
(
broadcast
multi
,
npy_intp
i
)
nogil
void
PyArray_MultiIter_NEXTi
(
broadcast
multi
,
npy_intp
i
)
nogil
bint
PyArray_MultiIter_NOTDONE
(
broadcast
multi
)
nogil
# Functions from __multiarray_api.h
# Functions taking dtype and returning object/ndarray are disabled
...
...
@@ -528,6 +542,7 @@ cdef extern from "numpy/arrayobject.h":
double
PyArray_GetPriority
(
object
,
double
)
object
PyArray_IterNew
(
object
)
object
PyArray_MultiIterNew
(
int
,
...)
int
PyArray_PyIntAsInt
(
object
)
npy_intp
PyArray_PyIntAsIntp
(
object
)
int
PyArray_Broadcast
(
broadcast
)
...
...
@@ -639,7 +654,6 @@ cdef extern from "numpy/arrayobject.h":
int
PyArray_CompareString
(
char
*
,
char
*
,
size_t
)
# Typedefs that matches the runtime dtype objects in
# the numpy module.
...
...
@@ -687,6 +701,21 @@ ctypedef npy_clongdouble clongdouble_t
ctypedef
npy_cdouble
complex_t
cdef
inline
object
PyArray_MultiIterNew1
(
a
):
return
PyArray_MultiIterNew
(
1
,
<
void
*>
a
)
cdef
inline
object
PyArray_MultiIterNew2
(
a
,
b
):
return
PyArray_MultiIterNew
(
2
,
<
void
*>
a
,
<
void
*>
b
)
cdef
inline
object
PyArray_MultiIterNew3
(
a
,
b
,
c
):
return
PyArray_MultiIterNew
(
3
,
<
void
*>
a
,
<
void
*>
b
,
<
void
*>
c
)
cdef
inline
object
PyArray_MultiIterNew4
(
a
,
b
,
c
,
d
):
return
PyArray_MultiIterNew
(
4
,
<
void
*>
a
,
<
void
*>
b
,
<
void
*>
c
,
<
void
*>
d
)
cdef
inline
object
PyArray_MultiIterNew5
(
a
,
b
,
c
,
d
,
e
):
return
PyArray_MultiIterNew
(
5
,
<
void
*>
a
,
<
void
*>
b
,
<
void
*>
c
,
<
void
*>
d
,
<
void
*>
e
)
cdef
inline
char
*
_util_dtypestring
(
dtype
descr
,
char
*
f
,
char
*
end
,
int
*
offset
)
except
NULL
:
# Recursive utility function used in __getbuffer__ to get format
# string. The new location in the format string is returned.
...
...
@@ -703,11 +732,11 @@ cdef inline char* _util_dtypestring(dtype descr, char* f, char* end, int* offset
child
,
new_offset
=
fields
if
(
end
-
f
)
-
(
new_offset
-
offset
[
0
])
<
15
:
raise
RuntimeError
(
"Format string allocated too short, see comment in numpy.pxd"
)
raise
RuntimeError
(
u
"Format string allocated too short, see comment in numpy.pxd"
)
if
((
child
.
byteorder
==
'>'
and
little_endian
)
or
(
child
.
byteorder
==
'<'
and
not
little_endian
)):
raise
ValueError
(
"Non-native byte order not supported"
)
raise
ValueError
(
u
"Non-native byte order not supported"
)
# One could encode it in the format string and have Cython
# complain instead, BUT: < and > in format strings also imply
# standardized sizes for datatypes, and we rely on native in
...
...
@@ -727,7 +756,7 @@ cdef inline char* _util_dtypestring(dtype descr, char* f, char* end, int* offset
if
not
PyDataType_HASFIELDS
(
child
):
t
=
child
.
type_num
if
end
-
f
<
5
:
raise
RuntimeError
(
"Format string allocated too short."
)
raise
RuntimeError
(
u
"Format string allocated too short."
)
# Until ticket #99 is fixed, use integers to avoid warnings
if
t
==
NPY_BYTE
:
f
[
0
]
=
98
#"b"
...
...
@@ -748,7 +777,7 @@ cdef inline char* _util_dtypestring(dtype descr, char* f, char* end, int* offset
elif
t
==
NPY_CLONGDOUBLE
:
f
[
0
]
=
90
;
f
[
1
]
=
103
;
f
+=
1
# Zg
elif
t
==
NPY_OBJECT
:
f
[
0
]
=
79
#"O"
else
:
raise
ValueError
(
"unknown dtype code in numpy.pxd (%d)"
%
t
)
raise
ValueError
(
u
"unknown dtype code in numpy.pxd (%d)"
%
t
)
f
+=
1
else
:
# Cython ignores struct boundary information ("T{...}"),
...
...
@@ -756,3 +785,116 @@ cdef inline char* _util_dtypestring(dtype descr, char* f, char* end, int* offset
f
=
_util_dtypestring
(
child
,
f
,
end
,
offset
)
return
f
#
# ufunc API
#
cdef
extern
from
"numpy/ufuncobject.h"
:
ctypedef
void
(
*
PyUFuncGenericFunction
)
(
char
**
,
npy_intp
*
,
npy_intp
*
,
void
*
)
ctypedef
extern
class
numpy
.
ufunc
[
object
PyUFuncObject
]:
cdef
:
int
nin
,
nout
,
nargs
int
identity
PyUFuncGenericFunction
*
functions
void
**
data
int
ntypes
int
check_return
char
*
name
,
*
types
char
*
doc
void
*
ptr
PyObject
*
obj
PyObject
*
userloops
cdef
enum
:
PyUFunc_Zero
PyUFunc_One
PyUFunc_None
UFUNC_ERR_IGNORE
UFUNC_ERR_WARN
UFUNC_ERR_RAISE
UFUNC_ERR_CALL
UFUNC_ERR_PRINT
UFUNC_ERR_LOG
UFUNC_MASK_DIVIDEBYZERO
UFUNC_MASK_OVERFLOW
UFUNC_MASK_UNDERFLOW
UFUNC_MASK_INVALID
UFUNC_SHIFT_DIVIDEBYZERO
UFUNC_SHIFT_OVERFLOW
UFUNC_SHIFT_UNDERFLOW
UFUNC_SHIFT_INVALID
UFUNC_FPE_DIVIDEBYZERO
UFUNC_FPE_OVERFLOW
UFUNC_FPE_UNDERFLOW
UFUNC_FPE_INVALID
UFUNC_ERR_DEFAULT
UFUNC_ERR_DEFAULT2
object
PyUFunc_FromFuncAndData
(
PyUFuncGenericFunction
*
,
void
**
,
char
*
,
int
,
int
,
int
,
int
,
char
*
,
char
*
,
int
)
int
PyUFunc_RegisterLoopForType
(
ufunc
,
int
,
PyUFuncGenericFunction
,
int
*
,
void
*
)
int
PyUFunc_GenericFunction
\
(
ufunc
,
PyObject
*
,
PyObject
*
,
PyArrayObject
**
)
void
PyUFunc_f_f_As_d_d
\
(
char
**
,
npy_intp
*
,
npy_intp
*
,
void
*
)
void
PyUFunc_d_d
\
(
char
**
,
npy_intp
*
,
npy_intp
*
,
void
*
)
void
PyUFunc_f_f
\
(
char
**
,
npy_intp
*
,
npy_intp
*
,
void
*
)
void
PyUFunc_g_g
\
(
char
**
,
npy_intp
*
,
npy_intp
*
,
void
*
)
void
PyUFunc_F_F_As_D_D
\
(
char
**
,
npy_intp
*
,
npy_intp
*
,
void
*
)
void
PyUFunc_F_F
\
(
char
**
,
npy_intp
*
,
npy_intp
*
,
void
*
)
void
PyUFunc_D_D
\
(
char
**
,
npy_intp
*
,
npy_intp
*
,
void
*
)
void
PyUFunc_G_G
\
(
char
**
,
npy_intp
*
,
npy_intp
*
,
void
*
)
void
PyUFunc_O_O
\
(
char
**
,
npy_intp
*
,
npy_intp
*
,
void
*
)
void
PyUFunc_ff_f_As_dd_d
\
(
char
**
,
npy_intp
*
,
npy_intp
*
,
void
*
)
void
PyUFunc_ff_f
\
(
char
**
,
npy_intp
*
,
npy_intp
*
,
void
*
)
void
PyUFunc_dd_d
\
(
char
**
,
npy_intp
*
,
npy_intp
*
,
void
*
)
void
PyUFunc_gg_g
\
(
char
**
,
npy_intp
*
,
npy_intp
*
,
void
*
)
void
PyUFunc_FF_F_As_DD_D
\
(
char
**
,
npy_intp
*
,
npy_intp
*
,
void
*
)
void
PyUFunc_DD_D
\
(
char
**
,
npy_intp
*
,
npy_intp
*
,
void
*
)
void
PyUFunc_FF_F
\
(
char
**
,
npy_intp
*
,
npy_intp
*
,
void
*
)
void
PyUFunc_GG_G
\
(
char
**
,
npy_intp
*
,
npy_intp
*
,
void
*
)
void
PyUFunc_OO_O
\
(
char
**
,
npy_intp
*
,
npy_intp
*
,
void
*
)
void
PyUFunc_O_O_method
\
(
char
**
,
npy_intp
*
,
npy_intp
*
,
void
*
)
void
PyUFunc_OO_O_method
\
(
char
**
,
npy_intp
*
,
npy_intp
*
,
void
*
)
void
PyUFunc_On_Om
\
(
char
**
,
npy_intp
*
,
npy_intp
*
,
void
*
)
int
PyUFunc_GetPyValues
\
(
char
*
,
int
*
,
int
*
,
PyObject
**
)
int
PyUFunc_checkfperr
\
(
int
,
PyObject
*
,
int
*
)
void
PyUFunc_clearfperr
()
int
PyUFunc_getfperr
()
int
PyUFunc_handlefperr
\
(
int
,
PyObject
*
,
int
,
int
*
)
int
PyUFunc_ReplaceLoopBySignature
\
(
ufunc
,
PyUFuncGenericFunction
,
int
*
,
PyUFuncGenericFunction
*
)
object
PyUFunc_FromFuncAndDataAndSignature
\
(
PyUFuncGenericFunction
*
,
void
**
,
char
*
,
int
,
int
,
int
,
int
,
char
*
,
char
*
,
int
,
char
*
)
void
import_ufunc
()
tests/errors/e_doctesthack.pyx
0 → 100644
View file @
19f7caa7
cimport
cython
@
cython
.
doctesthack
(
False
)
def
foo
():
pass
_ERRORS
=
u"""
4:0: The doctesthack compiler directive is not allowed in function scope
"""
tests/run/division_T384.pyx
0 → 100644
View file @
19f7caa7
"""
>>> test(3)
(3+1j)
"""
cimport
cython
ctypedef
Py_ssize_t
index_t
ctypedef
double
complex
mycomplex
ctypedef
struct
MyStruct
:
mycomplex
a
,
b
@
cython
.
cdivision
(
False
)
def
test
(
index_t
x
):
cdef
index_t
y
=
x
//
2
cdef
MyStruct
s
s
.
a
=
x
+
y
*
1j
return
s
.
a
tests/run/doctesthack.pyx
0 → 100644
View file @
19f7caa7
#cython: doctesthack=True
"""
Tests doctesthack compiler directive.
The doctests are actually run as part of this test;
which makes the test flow a bit untraditional. Both
module test and individual tests are run; finally,
all_tests_run() is executed which does final validation.
>>> items = __test__.items()
>>> items.sort()
>>> for key, value in items:
... print key, ';', value
MyCdefClass.method (line 67) ; >>> add_log("cdef class method")
MyClass.method (line 57) ; >>> add_log("class method")
doc_without_test (line 39) ; Some docs
mycpdeffunc (line 45) ; >>> add_log("cpdef")
myfunc (line 36) ; >>> add_log("def")
"""
log
=
[]
def
all_tests_run
():
log
.
sort
()
assert
log
==
[
u'cdef class method'
,
u'class method'
,
u'cpdef'
,
u'def'
],
log
def
add_log
(
s
):
log
.
append
(
unicode
(
s
))
if
len
(
log
)
==
len
(
__test__
):
# Final per-function doctest executed
all_tests_run
()
def
myfunc
():
""">>> add_log("def")"""
def
doc_without_test
():
"""Some docs"""
def
nodocstring
():
pass
cpdef
mycpdeffunc
():
""">>> add_log("cpdef")"""
class
MyClass
:
"""
Needs no hack
>>> True
True
"""
def
method
(
self
):
""">>> add_log("class method")"""
cdef
class
MyCdefClass
:
"""
Needs no hack
>>> True
True
"""
def
method
(
self
):
""">>> add_log("cdef class method")"""
tests/run/doctesthack_skip.pyx
0 → 100644
View file @
19f7caa7
#cython: doctesthack=True
"""
Tests that doctesthack doesn't come into effect when
a __test__ is defined manually.
If this doesn't work, then the function doctest should fail.
>>> True
True
"""
def
func
():
"""
>>> True
False
"""
__test__
=
{
u"one"
:
"""
>>> True
True
"""
}
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment