Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
C
cython
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
0
Merge Requests
0
Analytics
Analytics
Repository
Value Stream
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Commits
Issue Boards
Open sidebar
Gwenaël Samain
cython
Commits
6f714f53
Commit
6f714f53
authored
Aug 12, 2013
by
Robert Bradshaw
Browse files
Options
Browse Files
Download
Plain Diff
Merge branch 'master' into common-types
parents
eaecf7a0
ebfddaba
Changes
8
Hide whitespace changes
Inline
Side-by-side
Showing
8 changed files
with
246 additions
and
196 deletions
+246
-196
Cython/Build/Cythonize.py
Cython/Build/Cythonize.py
+182
-0
Cython/Utility/ImportExport.c
Cython/Utility/ImportExport.c
+2
-2
bin/cythonize
bin/cythonize
+5
-183
docs/src/userguide/language_basics.rst
docs/src/userguide/language_basics.rst
+8
-1
runtests.py
runtests.py
+2
-0
tests/build/common_include_dir.srctree
tests/build/common_include_dir.srctree
+2
-2
tests/build/cythonize_script.srctree
tests/build/cythonize_script.srctree
+10
-8
tests/build/cythonize_script_package.srctree
tests/build/cythonize_script_package.srctree
+35
-0
No files found.
Cython/Build/Cythonize.py
0 → 100644
View file @
6f714f53
#!/usr/bin/env python
import
os
import
sys
import
glob
from
distutils.core
import
setup
from
Cython.Build.Dependencies
import
cythonize
,
extended_iglob
from
Cython.Utils
import
is_package_dir
from
Cython.Compiler
import
Options
try
:
import
multiprocessing
parallel_compiles
=
int
(
multiprocessing
.
cpu_count
()
*
1.5
)
except
ImportError
:
multiprocessing
=
None
parallel_compiles
=
0
class
_FakePool
(
object
):
def
map_async
(
self
,
func
,
args
):
from
itertools
import
imap
for
_
in
imap
(
func
,
args
):
pass
def
close
(
self
):
pass
def
terminate
(
self
):
pass
def
join
(
self
):
pass
def
parse_directives
(
option
,
name
,
value
,
parser
):
dest
=
option
.
dest
old_directives
=
dict
(
getattr
(
parser
.
values
,
dest
,
Options
.
directive_defaults
))
directives
=
Options
.
parse_directive_list
(
value
,
relaxed_bool
=
True
,
current_settings
=
old_directives
)
setattr
(
parser
.
values
,
dest
,
directives
)
def
parse_options
(
option
,
name
,
value
,
parser
):
dest
=
option
.
dest
options
=
dict
(
getattr
(
parser
.
values
,
dest
,
{}))
for
opt
in
value
.
split
(
','
):
if
'='
in
opt
:
n
,
v
=
opt
.
split
(
'='
,
1
)
v
=
v
.
lower
()
not
in
(
'false'
,
'f'
,
'0'
,
'no'
)
else
:
n
,
v
=
opt
,
True
options
[
n
]
=
v
setattr
(
parser
.
values
,
dest
,
options
)
def
find_package_base
(
path
):
base_dir
,
package_path
=
os
.
path
.
split
(
path
)
while
os
.
path
.
isfile
(
os
.
path
.
join
(
base_dir
,
'__init__.py'
)):
base_dir
,
parent
=
os
.
path
.
split
(
base_dir
)
package_path
=
'%s/%s'
%
(
parent
,
package_path
)
return
base_dir
,
package_path
def
cython_compile
(
path_pattern
,
options
):
pool
=
None
paths
=
map
(
os
.
path
.
abspath
,
extended_iglob
(
path_pattern
))
try
:
for
path
in
paths
:
if
options
.
build_inplace
:
base_dir
=
path
while
not
os
.
path
.
isdir
(
base_dir
)
or
is_package_dir
(
base_dir
):
base_dir
=
os
.
path
.
dirname
(
base_dir
)
else
:
base_dir
=
None
if
os
.
path
.
isdir
(
path
):
# recursively compiling a package
paths
=
[
os
.
path
.
join
(
path
,
'**'
,
'*.%s'
%
ext
)
for
ext
in
(
'py'
,
'pyx'
)]
else
:
# assume it's a file(-like thing)
paths
=
[
path
]
cwd
=
os
.
getcwd
()
try
:
if
base_dir
:
os
.
chdir
(
base_dir
)
ext_modules
=
cythonize
(
paths
,
nthreads
=
options
.
parallel
,
exclude_failures
=
options
.
keep_going
,
exclude
=
options
.
excludes
,
compiler_directives
=
options
.
directives
,
**
options
.
options
)
finally
:
if
base_dir
:
os
.
chdir
(
cwd
)
if
ext_modules
and
options
.
build
:
if
len
(
ext_modules
)
>
1
and
options
.
parallel
>
1
:
if
pool
is
None
:
try
:
pool
=
multiprocessing
.
Pool
(
options
.
parallel
)
except
OSError
:
pool
=
_FakePool
()
pool
.
map_async
(
run_distutils
,
[
(
base_dir
,
[
ext
])
for
ext
in
ext_modules
])
else
:
run_distutils
((
base_dir
,
ext_modules
))
except
:
if
pool
is
not
None
:
pool
.
terminate
()
raise
else
:
if
pool
is
not
None
:
pool
.
close
()
pool
.
join
()
def
run_distutils
(
args
):
base_dir
,
ext_modules
=
args
sys
.
argv
[
1
:]
=
[
'build_ext'
,
'-i'
]
cwd
=
os
.
getcwd
()
try
:
if
base_dir
:
os
.
chdir
(
base_dir
)
setup
(
ext_modules
=
ext_modules
)
finally
:
if
base_dir
:
os
.
chdir
(
cwd
)
def
parse_args
(
args
):
from
optparse
import
OptionParser
parser
=
OptionParser
(
usage
=
'%prog [options] [sources and packages]+'
)
parser
.
add_option
(
'-X'
,
'--directive'
,
metavar
=
'NAME=VALUE,...'
,
dest
=
'directives'
,
action
=
'callback'
,
callback
=
parse_directives
,
default
=
{},
help
=
'set a compiler directive'
)
parser
.
add_option
(
'-s'
,
'--option'
,
metavar
=
'NAME=VALUE'
,
dest
=
'options'
,
action
=
'callback'
,
callback
=
parse_options
,
default
=
{},
help
=
'set a cythonize option'
)
parser
.
add_option
(
'-x'
,
'--exclude'
,
metavar
=
'PATTERN'
,
dest
=
'excludes'
,
action
=
'append'
,
default
=
[],
help
=
'exclude certain file patterns from the compilation'
)
parser
.
add_option
(
'-b'
,
'--build'
,
dest
=
'build'
,
action
=
'store_true'
,
help
=
'build extension modules using distutils'
)
parser
.
add_option
(
'-i'
,
'--inplace'
,
dest
=
'build_inplace'
,
action
=
'store_true'
,
help
=
'build extension modules in place using distutils (implies -b)'
)
parser
.
add_option
(
'-j'
,
'--parallel'
,
dest
=
'parallel'
,
metavar
=
'N'
,
type
=
int
,
default
=
parallel_compiles
,
help
=
(
'run builds in N parallel jobs (default: %d)'
%
parallel_compiles
or
1
))
parser
.
add_option
(
'--lenient'
,
dest
=
'lenient'
,
action
=
'store_true'
,
help
=
'increase Python compatibility by ignoring some compile time errors'
)
parser
.
add_option
(
'-k'
,
'--keep-going'
,
dest
=
'keep_going'
,
action
=
'store_true'
,
help
=
'compile as much as possible, ignore compilation failures'
)
options
,
args
=
parser
.
parse_args
(
args
)
if
not
args
:
parser
.
error
(
"no source files provided"
)
if
options
.
build_inplace
:
options
.
build
=
True
if
multiprocessing
is
None
:
options
.
parallel
=
0
return
options
,
args
def
main
(
args
=
None
):
options
,
paths
=
parse_args
(
args
)
if
options
.
lenient
:
# increase Python compatibility by ignoring compile time errors
Options
.
error_on_unknown_names
=
False
Options
.
error_on_uninitialized
=
False
for
path
in
paths
:
cython_compile
(
path
,
options
)
if
__name__
==
'__main__'
:
main
()
Cython/Utility/ImportExport.c
View file @
6f714f53
...
...
@@ -150,7 +150,7 @@ bad:
/////////////// SetPackagePathFromImportLib.proto ///////////////
#if PY_
MAJOR_VERSION >= 3
#if PY_
VERSION_HEX >= 0x03030000
static
int
__Pyx_SetPackagePathFromImportLib
(
const
char
*
parent_package_name
,
PyObject
*
module_name
);
#else
#define __Pyx_SetPackagePathFromImportLib(a, b) 0
...
...
@@ -160,7 +160,7 @@ static int __Pyx_SetPackagePathFromImportLib(const char* parent_package_name, Py
//@requires: ObjectHandling.c::PyObjectGetAttrStr
//@substitute: naming
#if PY_
MAJOR_VERSION >= 3
#if PY_
VERSION_HEX >= 0x03030000
static
int
__Pyx_SetPackagePathFromImportLib
(
const
char
*
parent_package_name
,
PyObject
*
module_name
)
{
PyObject
*
importlib
,
*
loader
,
*
osmod
,
*
ossep
,
*
parts
,
*
package_path
;
PyObject
*
path
=
NULL
,
*
file_path
=
NULL
;
...
...
bin/cythonize
View file @
6f714f53
#!/usr/bin/env python
import
os
import
sys
import
glob
#
# command line frontend for cythonize()
#
from
distutils.core
import
setup
from
Cython.Build
import
cythonize
from
Cython.Utils
import
find_root_package_dir
,
is_package_dir
from
Cython.Compiler
import
Options
try
:
import
multiprocessing
parallel_compiles
=
int
(
multiprocessing
.
cpu_count
()
*
1.5
)
except
ImportError
:
multiprocessing
=
None
parallel_compiles
=
0
class
_FakePool
(
object
):
def
map_async
(
self
,
func
,
args
):
from
itertools
import
imap
for
_
in
imap
(
func
,
args
):
pass
def
close
(
self
):
pass
def
terminate
(
self
):
pass
def
join
(
self
):
pass
def
parse_directives
(
option
,
name
,
value
,
parser
):
dest
=
option
.
dest
old_directives
=
dict
(
getattr
(
parser
.
values
,
dest
,
Options
.
directive_defaults
))
directives
=
Options
.
parse_directive_list
(
value
,
relaxed_bool
=
True
,
current_settings
=
old_directives
)
setattr
(
parser
.
values
,
dest
,
directives
)
def
parse_options
(
option
,
name
,
value
,
parser
):
dest
=
option
.
dest
options
=
dict
(
getattr
(
parser
.
values
,
dest
,
{}))
for
opt
in
value
.
split
(
','
):
if
'='
in
opt
:
n
,
v
=
opt
.
split
(
'='
,
1
)
v
=
v
.
lower
()
not
in
(
'false'
,
'f'
,
'0'
,
'no'
)
else
:
n
,
v
=
opt
,
True
options
[
n
]
=
v
setattr
(
parser
.
values
,
dest
,
options
)
def
find_package_base
(
path
):
base_dir
,
package_path
=
os
.
path
.
split
(
path
)
while
os
.
path
.
isfile
(
os
.
path
.
join
(
base_dir
,
'__init__.py'
)):
base_dir
,
parent
=
os
.
path
.
split
(
base_dir
)
package_path
=
'%s/%s'
%
(
parent
,
package_path
)
return
base_dir
,
package_path
def
cython_compile
(
path_pattern
,
options
):
pool
=
None
try
:
for
path
in
glob
.
iglob
(
path_pattern
):
path
=
os
.
path
.
abspath
(
path
)
if
options
.
build_inplace
:
if
is_package_dir
(
path
):
base_dir
=
find_root_package_dir
(
path
)
else
:
base_dir
=
path
else
:
base_dir
=
None
if
os
.
path
.
isdir
(
path
):
# recursively compiling a package
paths
=
[
os
.
path
.
join
(
path
,
'**'
,
'*.%s'
%
ext
)
for
ext
in
(
'py'
,
'pyx'
)]
else
:
# assume it's a file(-like thing)
paths
=
[
path
]
cwd
=
os
.
getcwd
()
try
:
if
base_dir
:
os
.
chdir
(
base_dir
)
ext_modules
=
cythonize
(
paths
,
nthreads
=
options
.
parallel
,
exclude_failures
=
options
.
keep_going
,
exclude
=
options
.
excludes
,
compiler_directives
=
options
.
directives
,
**
options
.
options
)
finally
:
if
base_dir
:
os
.
chdir
(
cwd
)
if
options
.
build
:
if
len
(
ext_modules
)
>
1
and
options
.
parallel
:
if
pool
is
None
:
try
:
pool
=
multiprocessing
.
Pool
(
options
.
parallel
)
except
OSError
:
pool
=
_FakePool
()
pool
.
map_async
(
run_distutils
,
[
(
base_dir
,
[
ext
])
for
ext
in
ext_modules
])
else
:
run_distutils
((
base_dir
,
ext_modules
))
except
:
if
pool
is
not
None
:
pool
.
terminate
()
raise
else
:
if
pool
is
not
None
:
pool
.
close
()
finally
:
if
pool
is
not
None
:
pool
.
join
()
def
run_distutils
(
args
):
base_dir
,
ext_modules
=
args
sys
.
argv
[
1
:]
=
[
'build_ext'
,
'-i'
]
cwd
=
os
.
getcwd
()
try
:
if
base_dir
:
os
.
chdir
(
base_dir
)
setup
(
ext_modules
=
ext_modules
)
finally
:
if
base_dir
:
os
.
chdir
(
cwd
)
def
parse_args
(
args
):
from
optparse
import
OptionParser
parser
=
OptionParser
(
usage
=
'%prog [options] [sources and packages]+'
)
parser
.
add_option
(
'-X'
,
'--directive'
,
metavar
=
'NAME=VALUE,...'
,
dest
=
'directives'
,
action
=
'callback'
,
callback
=
parse_directives
,
default
=
{},
help
=
'set a compiler directive'
)
parser
.
add_option
(
'-s'
,
'--option'
,
metavar
=
'NAME=VALUE'
,
dest
=
'options'
,
action
=
'callback'
,
callback
=
parse_options
,
default
=
{},
help
=
'set a cythonize option'
)
parser
.
add_option
(
'-x'
,
'--exclude'
,
metavar
=
'PATTERN'
,
dest
=
'excludes'
,
action
=
'append'
,
default
=
[],
help
=
'exclude certain file patterns from the compilation'
)
parser
.
add_option
(
'-b'
,
'--build'
,
dest
=
'build'
,
action
=
'store_true'
,
help
=
'build extension modules using distutils'
)
parser
.
add_option
(
'-i'
,
'--inplace'
,
dest
=
'build_inplace'
,
action
=
'store_true'
,
help
=
'build extension modules in place using distutils (implies -b)'
)
parser
.
add_option
(
'-j'
,
'--parallel'
,
dest
=
'parallel'
,
metavar
=
'N'
,
type
=
int
,
default
=
parallel_compiles
,
help
=
(
'run builds in N parallel jobs (default: %d)'
%
parallel_compiles
or
1
))
parser
.
add_option
(
'--lenient'
,
dest
=
'lenient'
,
action
=
'store_true'
,
help
=
'increase Python compatibility by ignoring some compile time errors'
)
parser
.
add_option
(
'-k'
,
'--keep-going'
,
dest
=
'keep_going'
,
action
=
'store_true'
,
help
=
'compile as much as possible, ignore compilation failures'
)
options
,
args
=
parser
.
parse_args
(
args
)
if
not
args
:
parser
.
error
(
"no source files provided"
)
if
options
.
build_inplace
:
options
.
build
=
True
if
multiprocessing
is
None
:
options
.
parallel
=
0
return
options
,
args
def
main
(
args
=
None
):
options
,
paths
=
parse_args
(
args
)
if
options
.
lenient
:
# increase Python compatibility by ignoring compile time errors
Options
.
error_on_unknown_names
=
False
Options
.
error_on_uninitialized
=
False
for
path
in
paths
:
cython_compile
(
path
,
options
)
if
__name__
==
'__main__'
:
main
()
from
Cython.Build.Cythonize
import
main
main
()
docs/src/userguide/language_basics.rst
View file @
6f714f53
...
...
@@ -117,7 +117,14 @@ using normal C declaration syntax. For example,::
When a parameter of a Python function is declared to have a C data type, it is
passed in as a Python object and automatically converted to a C value, if
possible. Automatic conversion is currently only possible for numeric types,
possible. In other words, the definition of `spam` above is equivalent to writing::
def spam(python_i, python_s):
int i = python_i
char* s = python_s
...
Automatic conversion is currently only possible for numeric types,
string types and structs (composed recusively of any of these types);
attempting to use any other type for the parameter of a
Python function will result in a compile-time error.
...
...
runtests.py
View file @
6f714f53
...
...
@@ -216,6 +216,8 @@ VER_DEP_MODULES = {
'run.relativeimport_star_T542',
'run.initial_file_path', # relative import
'run.pynumber_subtype_conversion', # bug in Py2.4
'build.cythonize_script', # python2.4 -m a.b.c
'build.cythonize_script_package', # python2.4 -m a.b.c
]),
(2,6) : (operator.lt, lambda x: x in ['run.print_function',
'run.language_level', # print function
...
...
tests/build/common_include_dir.srctree
View file @
6f714f53
...
...
@@ -20,9 +20,9 @@ from distutils.core import setup
# (In particular, TravisCI does not support spawning processes from tests.)
try:
import multiprocessing
multiprocessing.Pool(2)
multiprocessing.Pool(2)
.close()
nthreads = 2
except:
except
(ImportError, OSError)
:
nthreads = 0
setup(
...
...
tests/build/cythonize_script.srctree
View file @
6f714f53
'''
PYTHON ../../../bin/cythonize -i pkg
PYTHON -c "import pkg.sub.test; assert pkg.sub.test.TEST == 'pkg.sub.test'; assert '.py' not in pkg.sub.test.__file__"
PYTHON -m Cython.Build.Cythonize -i '**/*_test.py'
PYTHON -c "import cy_test; assert cy_test.TEST == 'cy_test', cy_test.TEST; assert '.py' not in cy_test.__file__, cy_test.__file__"
PYTHON -c "import pkg.cy_test; assert pkg.cy_test.TEST == 'pkg.cy_test', pkg.cy_test.TEST; assert '.py' not in pkg.cy_test.__file__, pkg.cy_test.__file__"
PYTHON -c "import pkg.sub.cy_test; assert pkg.sub.cy_test.TEST == 'pkg.sub.cy_test', pkg.sub.cy_test.TEST; assert '.py' not in pkg.sub.cy_test.__file__, pkg.cy_test.__file__"
'''
######## test.py ########
########
cy_
test.py ########
TEST = 'test'
TEST = '
cy_
test'
######## pkg/__init__.py ########
######## pkg/test.py ########
######## pkg/
cy_
test.py ########
TEST = 'pkg.test'
TEST = 'pkg.
cy_
test'
######## pkg/sub/__init__.py ########
######## pkg/sub/test.py ########
######## pkg/sub/
cy_
test.py ########
# cython: language_level=3
TEST = 'pkg.sub.test'
TEST = 'pkg.sub.
cy_
test'
ustring = 'abc'
...
...
tests/build/cythonize_script_package.srctree
0 → 100644
View file @
6f714f53
'''
PYTHON -m Cython.Build.Cythonize -i pkg -j1
PYTHON package_test.py
'''
######## package_test.py ########
import sys
if sys.version_info[0] < 3 or sys.version_info >= (3,3):
# __init__.py compilation isn't supported in Py 3.[012]
import pkg.sub.test
assert pkg.sub.test.TEST == 'pkg.sub.test'
assert '.py' not in pkg.sub.test.__file__
######## test.py ########
TEST = 'test'
######## pkg/__init__.py ########
######## pkg/test.py ########
TEST = 'pkg.test'
######## pkg/sub/__init__.py ########
######## pkg/sub/test.py ########
# cython: language_level=3
TEST = 'pkg.sub.test'
ustring = 'abc'
assert isinstance(ustring, unicode)
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment