Commit 3efa9f9a authored by scoder's avatar scoder Committed by GitHub

Merge branch 'master' into readonly_buffers

parents c09f9afe 829d7bbe
language: python os: linux
dist: trusty dist: trusty
sudo: false sudo: false
addons:
apt:
sources:
- ubuntu-toolchain-r-test
packages:
- gdb
- python-dbg
- python3-dbg
- gcc-6
- g++-6
# GCC-7 currently takes 5-7 *minutes* to download on travis
#- gcc-7
#- g++-7
cache: cache:
pip: true pip: true
directories: directories:
- $HOME/.ccache - $HOME/.ccache
language: python
python: python:
- 2.7 - 2.7
- 3.6 - 3.6
...@@ -25,45 +40,86 @@ env: ...@@ -25,45 +40,86 @@ env:
- CCACHE_SLOPPINESS=pch_defines,time_macros - CCACHE_SLOPPINESS=pch_defines,time_macros
- CCACHE_COMPRESS=1 - CCACHE_COMPRESS=1
- CCACHE_MAXSIZE=100M - CCACHE_MAXSIZE=100M
- PATH="/usr/lib/ccache:$PATH" - PATH="/usr/lib/ccache:$HOME/gcc-symlinks:$PATH"
matrix: matrix:
- BACKEND=c - BACKEND=c
- BACKEND=cpp - BACKEND=cpp
matrix:
include:
#- python: 3.7-dev
# env: BACKEND=c PY=3 CC=gcc-7
- os: osx
osx_image: xcode6.4
env: BACKEND=c PY=2
python: 2
language: c
compiler: clang
cache: false
- os: osx
osx_image: xcode6.4
env: BACKEND=cpp PY=2
python: 2
language: cpp
compiler: clang
cache: false
- os: osx
osx_image: xcode6.4
env: BACKEND=c PY=3
python: 3
language: c
compiler: clang
cache: false
- os: osx
osx_image: xcode6.4
env: BACKEND=cpp PY=3
python: 3
language: cpp
compiler: clang
cache: false
allow_failures:
- python: pypy
- python: pypy3
- python: 3.7-dev
exclude:
- python: pypy
env: BACKEND=cpp
- python: pypy3
env: BACKEND=cpp
branches: branches:
only: only:
- master - master
- release - release
before_install:
- |
if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then
mkdir "$HOME/gcc-symlinks"
ln -s /usr/bin/gcc-6 $HOME/gcc-symlinks/gcc
ln -s /usr/bin/g++-6 $HOME/gcc-symlinks/g++
if [ -n "$CC" ]; then "$CC" --version; else gcc --version; fi
fi
- |
if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then # Install Miniconda
curl -s -o miniconda.sh https://repo.continuum.io/miniconda/Miniconda$PY-latest-MacOSX-x86_64.sh;
bash miniconda.sh -b -p $HOME/miniconda && rm miniconda.sh;
export PATH="$HOME/miniconda/bin:$PATH"; hash -r;
#conda install --quiet --yes nomkl --file=test-requirements.txt --file=test-requirements-cpython.txt;
fi
install: install:
- python -c 'import sys; print("Python %s" % (sys.version,))' - python -c 'import sys; print("Python %s" % (sys.version,))'
- if [ -n "${TRAVIS_PYTHON_VERSION##*-dev}" -a -n "${TRAVIS_PYTHON_VERSION##2.6*}" ]; then pip install -r test-requirements.txt $( [ -z "${TRAVIS_PYTHON_VERSION##pypy*}" ] || echo " -r test-requirements-cpython.txt" ) ; fi - if [ -n "${TRAVIS_PYTHON_VERSION##*-dev}" -a -n "${TRAVIS_PYTHON_VERSION##2.6*}" ]; then pip install -r test-requirements.txt $( [ -z "${TRAVIS_PYTHON_VERSION##pypy*}" ] || echo " -r test-requirements-cpython.txt" ) ; fi
- CFLAGS="-O2 -ggdb -Wall -Wextra $(python -c 'import sys; print("-fno-strict-aliasing" if sys.version_info[0] == 2 else "")')" python setup.py build - CFLAGS="-O2 -ggdb -Wall -Wextra $(python -c 'import sys; print("-fno-strict-aliasing" if sys.version_info[0] == 2 else "")')" python setup.py build
before_script: ccache -s before_script: ccache -s || true
script: script:
- PYTHON_DBG="python$( python -c 'import sys; print("%d.%d" % sys.version_info[:2])' )-dbg" - PYTHON_DBG="python$( python -c 'import sys; print("%d.%d" % sys.version_info[:2])' )-dbg"
- if $PYTHON_DBG -V >&2; then CFLAGS="-O0 -ggdb" $PYTHON_DBG runtests.py -vv Debugger --backends=$BACKEND; fi - if $PYTHON_DBG -V >&2; then CFLAGS="-O0 -ggdb" $PYTHON_DBG runtests.py -vv Debugger --backends=$BACKEND; fi
- if [ false && "$BACKEND" = "cpp" ]; then pip install pythran; fi # disabled: needs Pythran > 0.8.1 - if [ "$BACKEND" = "cpp" -a -n "${TRAVIS_PYTHON_VERSION##2.6*}" ]; then pip install pythran; fi
- CFLAGS="-O2 -ggdb -Wall -Wextra $(python -c 'import sys; print("-fno-strict-aliasing" if sys.version_info[0] == 2 else "")')" python setup.py build_ext -i - CFLAGS="-O2 -ggdb -Wall -Wextra $(python -c 'import sys; print("-fno-strict-aliasing" if sys.version_info[0] == 2 else "")')" python setup.py build_ext -i
- CFLAGS="-O0 -ggdb -Wall -Wextra" python runtests.py -vv -x Debugger --backends=$BACKEND -j4 - CFLAGS="-O0 -ggdb -Wall -Wextra" python runtests.py -vv -x Debugger --backends=$BACKEND -j7
matrix:
allow_failures:
- python: pypy
- python: pypy3
- python: 3.6-dev
- python: 3.7-dev
exclude:
- python: pypy
env: BACKEND=cpp
- python: pypy3
env: BACKEND=cpp
addons:
apt:
packages:
- gdb
- python-dbg
- python3-dbg
This diff is collapsed.
...@@ -3,9 +3,17 @@ from __future__ import absolute_import, print_function ...@@ -3,9 +3,17 @@ from __future__ import absolute_import, print_function
import cython import cython
from .. import __version__ from .. import __version__
import os
import shutil
import hashlib
import subprocess
import collections import collections
import re, os, sys, time import re, sys, time
from glob import iglob from glob import iglob
from io import open as io_open
from os.path import relpath as _relpath
from distutils.extension import Extension
from distutils.util import strtobool
try: try:
import gzip import gzip
...@@ -14,34 +22,6 @@ try: ...@@ -14,34 +22,6 @@ try:
except ImportError: except ImportError:
gzip_open = open gzip_open = open
gzip_ext = '' gzip_ext = ''
import shutil
import subprocess
import os
try:
import hashlib
except ImportError:
import md5 as hashlib
try:
from io import open as io_open
except ImportError:
from codecs import open as io_open
try:
from os.path import relpath as _relpath
except ImportError:
# Py<2.6
def _relpath(path, start=os.path.curdir):
if not path:
raise ValueError("no path specified")
start_list = os.path.abspath(start).split(os.path.sep)
path_list = os.path.abspath(path).split(os.path.sep)
i = len(os.path.commonprefix([start_list, path_list]))
rel_list = [os.path.pardir] * (len(start_list)-i) + path_list[i:]
if not rel_list:
return os.path.curdir
return os.path.join(*rel_list)
try: try:
import pythran import pythran
...@@ -50,9 +30,6 @@ try: ...@@ -50,9 +30,6 @@ try:
except: except:
PythranAvailable = False PythranAvailable = False
from distutils.extension import Extension
from distutils.util import strtobool
from .. import Utils from .. import Utils
from ..Utils import (cached_function, cached_method, path_exists, from ..Utils import (cached_function, cached_method, path_exists,
safe_makedirs, copy_file_to_dir_if_newer, is_package_dir) safe_makedirs, copy_file_to_dir_if_newer, is_package_dir)
...@@ -777,11 +754,11 @@ def create_extension_list(patterns, exclude=None, ctx=None, aliases=None, quiet= ...@@ -777,11 +754,11 @@ def create_extension_list(patterns, exclude=None, ctx=None, aliases=None, quiet=
cython_sources = [s for s in pattern.sources cython_sources = [s for s in pattern.sources
if os.path.splitext(s)[1] in ('.py', '.pyx')] if os.path.splitext(s)[1] in ('.py', '.pyx')]
if cython_sources: if cython_sources:
filepattern = cython_sources[0] filepattern = cython_sources[0]
if len(cython_sources) > 1: if len(cython_sources) > 1:
print("Warning: Multiple cython sources found for extension '%s': %s\n" print("Warning: Multiple cython sources found for extension '%s': %s\n"
"See http://cython.readthedocs.io/en/latest/src/userguide/sharing_declarations.html " "See http://cython.readthedocs.io/en/latest/src/userguide/sharing_declarations.html "
"for sharing declarations among Cython files." % (pattern.name, cython_sources)) "for sharing declarations among Cython files." % (pattern.name, cython_sources))
else: else:
# ignore non-cython modules # ignore non-cython modules
module_list.append(pattern) module_list.append(pattern)
...@@ -800,7 +777,6 @@ def create_extension_list(patterns, exclude=None, ctx=None, aliases=None, quiet= ...@@ -800,7 +777,6 @@ def create_extension_list(patterns, exclude=None, ctx=None, aliases=None, quiet=
for file in nonempty(sorted(extended_iglob(filepattern)), "'%s' doesn't match any files" % filepattern): for file in nonempty(sorted(extended_iglob(filepattern)), "'%s' doesn't match any files" % filepattern):
if os.path.abspath(file) in to_exclude: if os.path.abspath(file) in to_exclude:
continue continue
pkg = deps.package(file)
module_name = deps.fully_qualified_name(file) module_name = deps.fully_qualified_name(file)
if '*' in name: if '*' in name:
if module_name in explicit_modules: if module_name in explicit_modules:
...@@ -809,6 +785,9 @@ def create_extension_list(patterns, exclude=None, ctx=None, aliases=None, quiet= ...@@ -809,6 +785,9 @@ def create_extension_list(patterns, exclude=None, ctx=None, aliases=None, quiet=
print("Warning: Extension name '%s' does not match fully qualified name '%s' of '%s'" % ( print("Warning: Extension name '%s' does not match fully qualified name '%s' of '%s'" % (
name, module_name, file)) name, module_name, file))
module_name = name module_name = name
if module_name == 'cython':
raise ValueError('cython is a special module, cannot be used as a module name')
if module_name not in seen: if module_name not in seen:
try: try:
...@@ -850,8 +829,13 @@ def create_extension_list(patterns, exclude=None, ctx=None, aliases=None, quiet= ...@@ -850,8 +829,13 @@ def create_extension_list(patterns, exclude=None, ctx=None, aliases=None, quiet=
module_metadata[module_name] = metadata module_metadata[module_name] = metadata
if file not in m.sources: if file not in m.sources:
# Old setuptools unconditionally replaces .pyx with .c # Old setuptools unconditionally replaces .pyx with .c/.cpp
m.sources.remove(file.rsplit('.')[0] + '.c') target_file = os.path.splitext(file)[0] + ('.cpp' if m.language == 'c++' else '.c')
try:
m.sources.remove(target_file)
except ValueError:
# never seen this in the wild, but probably better to warn about this unexpected case
print("Warning: Cython source file not found in sources list, adding %s" % file)
m.sources.insert(0, file) m.sources.insert(0, file)
seen.add(name) seen.add(name)
return module_list, module_metadata return module_list, module_metadata
...@@ -916,22 +900,33 @@ def cythonize(module_list, exclude=None, nthreads=0, aliases=None, quiet=False, ...@@ -916,22 +900,33 @@ def cythonize(module_list, exclude=None, nthreads=0, aliases=None, quiet=False,
deps = create_dependency_tree(ctx, quiet=quiet) deps = create_dependency_tree(ctx, quiet=quiet)
build_dir = getattr(options, 'build_dir', None) build_dir = getattr(options, 'build_dir', None)
modules_by_cfile = {} def copy_to_build_dir(filepath, root=os.getcwd()):
filepath_abs = os.path.abspath(filepath)
if os.path.isabs(filepath):
filepath = filepath_abs
if filepath_abs.startswith(root):
# distutil extension depends are relative to cwd
mod_dir = join_path(build_dir,
os.path.dirname(_relpath(filepath, root)))
copy_once_if_newer(filepath_abs, mod_dir)
modules_by_cfile = collections.defaultdict(list)
to_compile = [] to_compile = []
for m in module_list: for m in module_list:
if build_dir: if build_dir:
root = os.getcwd() # distutil extension depends are relative to cwd
def copy_to_build_dir(filepath, root=root):
filepath_abs = os.path.abspath(filepath)
if os.path.isabs(filepath):
filepath = filepath_abs
if filepath_abs.startswith(root):
mod_dir = join_path(build_dir,
os.path.dirname(_relpath(filepath, root)))
copy_once_if_newer(filepath_abs, mod_dir)
for dep in m.depends: for dep in m.depends:
copy_to_build_dir(dep) copy_to_build_dir(dep)
cy_sources = [
source for source in m.sources
if os.path.splitext(source)[1] in ('.pyx', '.py')]
if len(cy_sources) == 1:
# normal "special" case: believe the Extension module name to allow user overrides
full_module_name = m.name
else:
# infer FQMN from source files
full_module_name = None
new_sources = [] new_sources = []
for source in m.sources: for source in m.sources:
base, ext = os.path.splitext(source) base, ext = os.path.splitext(source)
...@@ -976,13 +971,12 @@ def cythonize(module_list, exclude=None, nthreads=0, aliases=None, quiet=False, ...@@ -976,13 +971,12 @@ def cythonize(module_list, exclude=None, nthreads=0, aliases=None, quiet=False,
fingerprint = deps.transitive_fingerprint(source, extra) fingerprint = deps.transitive_fingerprint(source, extra)
else: else:
fingerprint = None fingerprint = None
to_compile.append((priority, source, c_file, fingerprint, quiet, to_compile.append((
options, not exclude_failures, module_metadata.get(m.name))) priority, source, c_file, fingerprint, quiet,
options, not exclude_failures, module_metadata.get(m.name),
full_module_name))
new_sources.append(c_file) new_sources.append(c_file)
if c_file not in modules_by_cfile: modules_by_cfile[c_file].append(m)
modules_by_cfile[c_file] = [m]
else:
modules_by_cfile[c_file].append(m)
else: else:
new_sources.append(source) new_sources.append(source)
if build_dir: if build_dir:
...@@ -1098,17 +1092,15 @@ else: ...@@ -1098,17 +1092,15 @@ else:
# TODO: Share context? Issue: pyx processing leaks into pxd module # TODO: Share context? Issue: pyx processing leaks into pxd module
@record_results @record_results
def cythonize_one(pyx_file, c_file, fingerprint, quiet, options=None, raise_on_failure=True, embedded_metadata=None, progress=""): def cythonize_one(pyx_file, c_file, fingerprint, quiet, options=None,
from ..Compiler.Main import compile, default_options raise_on_failure=True, embedded_metadata=None, full_module_name=None,
progress=""):
from ..Compiler.Main import compile_single, default_options
from ..Compiler.Errors import CompileError, PyrexError from ..Compiler.Errors import CompileError, PyrexError
if fingerprint: if fingerprint:
if not os.path.exists(options.cache): if not os.path.exists(options.cache):
try: safe_makedirs(options.cache)
os.mkdir(options.cache)
except:
if not os.path.exists(options.cache):
raise
# Cython-generated c files are highly compressible. # Cython-generated c files are highly compressible.
# (E.g. a compression ratio of about 10 for Sage). # (E.g. a compression ratio of about 10 for Sage).
fingerprint_file = join_path( fingerprint_file = join_path(
...@@ -1136,7 +1128,7 @@ def cythonize_one(pyx_file, c_file, fingerprint, quiet, options=None, raise_on_f ...@@ -1136,7 +1128,7 @@ def cythonize_one(pyx_file, c_file, fingerprint, quiet, options=None, raise_on_f
any_failures = 0 any_failures = 0
try: try:
result = compile([pyx_file], options) result = compile_single(pyx_file, options, full_module_name=full_module_name)
if result.num_errors > 0: if result.num_errors > 0:
any_failures = 1 any_failures = 1
except (EnvironmentError, PyrexError) as e: except (EnvironmentError, PyrexError) as e:
......
...@@ -53,8 +53,10 @@ import re ...@@ -53,8 +53,10 @@ import re
import sys import sys
import time import time
import copy import copy
import distutils.log
import textwrap import textwrap
try: try:
reload reload
except NameError: # Python 3 except NameError: # Python 3
...@@ -236,6 +238,11 @@ class CythonMagics(Magics): ...@@ -236,6 +238,11 @@ class CythonMagics(Magics):
help=("Enable profile guided optimisation in the C compiler. " help=("Enable profile guided optimisation in the C compiler. "
"Compiles the cell twice and executes it in between to generate a runtime profile.") "Compiles the cell twice and executes it in between to generate a runtime profile.")
) )
@magic_arguments.argument(
'--verbose', dest='quiet', action='store_false', default=True,
help=("Print debug information like generated .c/.cpp file location "
"and exact gcc/g++ command invoked.")
)
@cell_magic @cell_magic
def cython(self, line, cell): def cython(self, line, cell):
"""Compile and import everything from a Cython code cell. """Compile and import everything from a Cython code cell.
...@@ -282,7 +289,6 @@ class CythonMagics(Magics): ...@@ -282,7 +289,6 @@ class CythonMagics(Magics):
args = magic_arguments.parse_argstring(self.cython, line) args = magic_arguments.parse_argstring(self.cython, line)
code = cell if cell.endswith('\n') else cell + '\n' code = cell if cell.endswith('\n') else cell + '\n'
lib_dir = os.path.join(get_ipython_cache_dir(), 'cython') lib_dir = os.path.join(get_ipython_cache_dir(), 'cython')
quiet = True
key = (code, line, sys.version_info, sys.executable, cython_version) key = (code, line, sys.version_info, sys.executable, cython_version)
if not os.path.exists(lib_dir): if not os.path.exists(lib_dir):
...@@ -311,7 +317,7 @@ class CythonMagics(Magics): ...@@ -311,7 +317,7 @@ class CythonMagics(Magics):
extension = None extension = None
if need_cythonize: if need_cythonize:
extensions = self._cythonize(module_name, code, lib_dir, args, quiet=quiet) extensions = self._cythonize(module_name, code, lib_dir, args, quiet=args.quiet)
assert len(extensions) == 1 assert len(extensions) == 1
extension = extensions[0] extension = extensions[0]
self._code_cache[key] = module_name self._code_cache[key] = module_name
...@@ -319,7 +325,8 @@ class CythonMagics(Magics): ...@@ -319,7 +325,8 @@ class CythonMagics(Magics):
if args.pgo: if args.pgo:
self._profile_pgo_wrapper(extension, lib_dir) self._profile_pgo_wrapper(extension, lib_dir)
self._build_extension(extension, lib_dir, pgo_step_name='use' if args.pgo else None) self._build_extension(extension, lib_dir, pgo_step_name='use' if args.pgo else None,
quiet=args.quiet)
module = imp.load_dynamic(module_name, module_path) module = imp.load_dynamic(module_name, module_path)
self._import_all(module) self._import_all(module)
...@@ -386,7 +393,7 @@ class CythonMagics(Magics): ...@@ -386,7 +393,7 @@ class CythonMagics(Magics):
so_module_path = os.path.join(lib_dir, pgo_module_name + self.so_ext) so_module_path = os.path.join(lib_dir, pgo_module_name + self.so_ext)
imp.load_dynamic(pgo_module_name, so_module_path) imp.load_dynamic(pgo_module_name, so_module_path)
def _cythonize(self, module_name, code, lib_dir, args, quiet=False): def _cythonize(self, module_name, code, lib_dir, args, quiet=True):
pyx_file = os.path.join(lib_dir, module_name + '.pyx') pyx_file = os.path.join(lib_dir, module_name + '.pyx')
pyx_file = py3compat.cast_bytes_py2(pyx_file, encoding=sys.getfilesystemencoding()) pyx_file = py3compat.cast_bytes_py2(pyx_file, encoding=sys.getfilesystemencoding())
...@@ -422,10 +429,17 @@ class CythonMagics(Magics): ...@@ -422,10 +429,17 @@ class CythonMagics(Magics):
except CompileError: except CompileError:
return None return None
def _build_extension(self, extension, lib_dir, temp_dir=None, pgo_step_name=None): def _build_extension(self, extension, lib_dir, temp_dir=None, pgo_step_name=None, quiet=True):
build_extension = self._get_build_extension( build_extension = self._get_build_extension(
extension, lib_dir=lib_dir, temp_dir=temp_dir, pgo_step_name=pgo_step_name) extension, lib_dir=lib_dir, temp_dir=temp_dir, pgo_step_name=pgo_step_name)
build_extension.run() old_threshold = None
try:
if not quiet:
old_threshold = distutils.log.set_threshold(distutils.log.DEBUG)
build_extension.run()
finally:
if not quiet and old_threshold is not None:
distutils.log.set_threshold(old_threshold)
def _add_pgo_flags(self, build_extension, step_name, temp_dir): def _add_pgo_flags(self, build_extension, step_name, temp_dir):
compiler_type = build_extension.compiler.compiler_type compiler_type = build_extension.compiler.compiler_type
......
...@@ -3,25 +3,38 @@ ...@@ -3,25 +3,38 @@
"""Tests for the Cython magics extension.""" """Tests for the Cython magics extension."""
from __future__ import absolute_import
import os import os
import sys import sys
from contextlib import contextmanager
from Cython.Build import IpythonMagic
from Cython.TestUtils import CythonTest
try: try:
from IPython.testing.globalipapp import get_ipython import IPython.testing.globalipapp
from IPython.utils import py3compat from IPython.utils import py3compat
except: except ImportError:
__test__ = False # Disable tests and fake helpers for initialisation below.
class _py3compat(object):
def str_to_unicode(self, s):
return s
py3compat = _py3compat()
def skip_if_not_installed(_):
return None
else:
def skip_if_not_installed(c):
return c
try: try:
# disable IPython history thread to avoid having to clean it up # disable IPython history thread before it gets started to avoid having to clean it up
from IPython.core.history import HistoryManager from IPython.core.history import HistoryManager
HistoryManager.enabled = False HistoryManager.enabled = False
except ImportError: except ImportError:
pass pass
from Cython.TestUtils import CythonTest
ip = get_ipython()
code = py3compat.str_to_unicode("""\ code = py3compat.str_to_unicode("""\
def f(x): def f(x):
return 2*x return 2*x
...@@ -61,19 +74,27 @@ else: ...@@ -61,19 +74,27 @@ else:
return _skip_win32 return _skip_win32
@skip_if_not_installed
class TestIPythonMagic(CythonTest): class TestIPythonMagic(CythonTest):
@classmethod
def setUpClass(cls):
CythonTest.setUpClass()
cls._ip = IPython.testing.globalipapp.get_ipython()
def setUp(self): def setUp(self):
CythonTest.setUp(self) CythonTest.setUp(self)
ip.extension_manager.load_extension('cython') self._ip.extension_manager.load_extension('cython')
def test_cython_inline(self): def test_cython_inline(self):
ip = self._ip
ip.ex('a=10; b=20') ip.ex('a=10; b=20')
result = ip.run_cell_magic('cython_inline', '', 'return a+b') result = ip.run_cell_magic('cython_inline', '', 'return a+b')
self.assertEqual(result, 30) self.assertEqual(result, 30)
@skip_win32('Skip on Windows') @skip_win32('Skip on Windows')
def test_cython_pyximport(self): def test_cython_pyximport(self):
ip = self._ip
module_name = '_test_cython_pyximport' module_name = '_test_cython_pyximport'
ip.run_cell_magic('cython_pyximport', module_name, code) ip.run_cell_magic('cython_pyximport', module_name, code)
ip.ex('g = f(10)') ip.ex('g = f(10)')
...@@ -87,12 +108,14 @@ class TestIPythonMagic(CythonTest): ...@@ -87,12 +108,14 @@ class TestIPythonMagic(CythonTest):
pass pass
def test_cython(self): def test_cython(self):
ip = self._ip
ip.run_cell_magic('cython', '', code) ip.run_cell_magic('cython', '', code)
ip.ex('g = f(10)') ip.ex('g = f(10)')
self.assertEqual(ip.user_ns['g'], 20.0) self.assertEqual(ip.user_ns['g'], 20.0)
def test_cython_name(self): def test_cython_name(self):
# The Cython module named 'mymodule' defines the function f. # The Cython module named 'mymodule' defines the function f.
ip = self._ip
ip.run_cell_magic('cython', '--name=mymodule', code) ip.run_cell_magic('cython', '--name=mymodule', code)
# This module can now be imported in the interactive namespace. # This module can now be imported in the interactive namespace.
ip.ex('import mymodule; g = mymodule.f(10)') ip.ex('import mymodule; g = mymodule.f(10)')
...@@ -100,6 +123,7 @@ class TestIPythonMagic(CythonTest): ...@@ -100,6 +123,7 @@ class TestIPythonMagic(CythonTest):
def test_cython_language_level(self): def test_cython_language_level(self):
# The Cython cell defines the functions f() and call(). # The Cython cell defines the functions f() and call().
ip = self._ip
ip.run_cell_magic('cython', '', cython3_code) ip.run_cell_magic('cython', '', cython3_code)
ip.ex('g = f(10); h = call(10)') ip.ex('g = f(10); h = call(10)')
if sys.version_info[0] < 3: if sys.version_info[0] < 3:
...@@ -111,6 +135,7 @@ class TestIPythonMagic(CythonTest): ...@@ -111,6 +135,7 @@ class TestIPythonMagic(CythonTest):
def test_cython3(self): def test_cython3(self):
# The Cython cell defines the functions f() and call(). # The Cython cell defines the functions f() and call().
ip = self._ip
ip.run_cell_magic('cython', '-3', cython3_code) ip.run_cell_magic('cython', '-3', cython3_code)
ip.ex('g = f(10); h = call(10)') ip.ex('g = f(10); h = call(10)')
self.assertEqual(ip.user_ns['g'], 2.0 / 10.0) self.assertEqual(ip.user_ns['g'], 2.0 / 10.0)
...@@ -118,13 +143,16 @@ class TestIPythonMagic(CythonTest): ...@@ -118,13 +143,16 @@ class TestIPythonMagic(CythonTest):
def test_cython2(self): def test_cython2(self):
# The Cython cell defines the functions f() and call(). # The Cython cell defines the functions f() and call().
ip = self._ip
ip.run_cell_magic('cython', '-2', cython3_code) ip.run_cell_magic('cython', '-2', cython3_code)
ip.ex('g = f(10); h = call(10)') ip.ex('g = f(10); h = call(10)')
self.assertEqual(ip.user_ns['g'], 2 // 10) self.assertEqual(ip.user_ns['g'], 2 // 10)
self.assertEqual(ip.user_ns['h'], 2 // 10) self.assertEqual(ip.user_ns['h'], 2 // 10)
@skip_win32('Skip on Windows')
def test_cython3_pgo(self): def test_cython3_pgo(self):
# The Cython cell defines the functions f() and call(). # The Cython cell defines the functions f() and call().
ip = self._ip
ip.run_cell_magic('cython', '-3 --pgo', pgo_cython3_code) ip.run_cell_magic('cython', '-3 --pgo', pgo_cython3_code)
ip.ex('g = f(10); h = call(10); main()') ip.ex('g = f(10); h = call(10); main()')
self.assertEqual(ip.user_ns['g'], 2.0 / 10.0) self.assertEqual(ip.user_ns['g'], 2.0 / 10.0)
...@@ -132,6 +160,7 @@ class TestIPythonMagic(CythonTest): ...@@ -132,6 +160,7 @@ class TestIPythonMagic(CythonTest):
@skip_win32('Skip on Windows') @skip_win32('Skip on Windows')
def test_extlibs(self): def test_extlibs(self):
ip = self._ip
code = py3compat.str_to_unicode(""" code = py3compat.str_to_unicode("""
from libc.math cimport sin from libc.math cimport sin
x = sin(0.0) x = sin(0.0)
...@@ -139,3 +168,45 @@ x = sin(0.0) ...@@ -139,3 +168,45 @@ x = sin(0.0)
ip.user_ns['x'] = 1 ip.user_ns['x'] = 1
ip.run_cell_magic('cython', '-l m', code) ip.run_cell_magic('cython', '-l m', code)
self.assertEqual(ip.user_ns['x'], 0) self.assertEqual(ip.user_ns['x'], 0)
def test_cython_verbose(self):
ip = self._ip
ip.run_cell_magic('cython', '--verbose', code)
ip.ex('g = f(10)')
self.assertEqual(ip.user_ns['g'], 20.0)
def test_cython_verbose_thresholds(self):
@contextmanager
def mock_distutils():
class MockLog:
DEBUG = 1
INFO = 2
thresholds = [INFO]
def set_threshold(self, val):
self.thresholds.append(val)
return self.thresholds[-2]
new_log = MockLog()
old_log = IpythonMagic.distutils.log
try:
IpythonMagic.distutils.log = new_log
yield new_log
finally:
IpythonMagic.distutils.log = old_log
ip = self._ip
with mock_distutils() as verbose_log:
ip.run_cell_magic('cython', '--verbose', code)
ip.ex('g = f(10)')
self.assertEqual(ip.user_ns['g'], 20.0)
self.assertEquals([verbose_log.INFO, verbose_log.DEBUG, verbose_log.INFO],
verbose_log.thresholds)
with mock_distutils() as normal_log:
ip.run_cell_magic('cython', '', code)
ip.ex('g = f(10)')
self.assertEqual(ip.user_ns['g'], 20.0)
self.assertEquals([normal_log.INFO], normal_log.thresholds)
...@@ -363,7 +363,7 @@ class CodeWriter(DeclarationWriter): ...@@ -363,7 +363,7 @@ class CodeWriter(DeclarationWriter):
self.dedent() self.dedent()
def visit_IfStatNode(self, node): def visit_IfStatNode(self, node):
# The IfClauseNode is handled directly without a seperate match # The IfClauseNode is handled directly without a separate match
# for clariy. # for clariy.
self.startline(u"if ") self.startline(u"if ")
self.visit(node.if_clauses[0].condition) self.visit(node.if_clauses[0].condition)
......
...@@ -326,7 +326,7 @@ def put_acquire_arg_buffer(entry, code, pos): ...@@ -326,7 +326,7 @@ def put_acquire_arg_buffer(entry, code, pos):
code.putln("__Pyx_BufFmt_StackElem __pyx_stack[%d];" % entry.type.dtype.struct_nesting_depth()) code.putln("__Pyx_BufFmt_StackElem __pyx_stack[%d];" % entry.type.dtype.struct_nesting_depth())
code.putln(code.error_goto_if("%s == -1" % getbuffer, pos)) code.putln(code.error_goto_if("%s == -1" % getbuffer, pos))
code.putln("}") code.putln("}")
# An exception raised in arg parsing cannot be catched, so no # An exception raised in arg parsing cannot be caught, so no
# need to care about the buffer then. # need to care about the buffer then.
put_unpack_buffer_aux_into_scope(entry, code) put_unpack_buffer_aux_into_scope(entry, code)
...@@ -617,7 +617,7 @@ class GetAndReleaseBufferUtilityCode(object): ...@@ -617,7 +617,7 @@ class GetAndReleaseBufferUtilityCode(object):
def mangle_dtype_name(dtype): def mangle_dtype_name(dtype):
# Use prefixes to seperate user defined types from builtins # Use prefixes to separate user defined types from builtins
# (consider "typedef float unsigned_int") # (consider "typedef float unsigned_int")
if dtype.is_pyobject: if dtype.is_pyobject:
return "object" return "object"
...@@ -636,7 +636,7 @@ def get_type_information_cname(code, dtype, maxdepth=None): ...@@ -636,7 +636,7 @@ def get_type_information_cname(code, dtype, maxdepth=None):
and return the name of the type info struct. and return the name of the type info struct.
Structs with two floats of the same size are encoded as complex numbers. Structs with two floats of the same size are encoded as complex numbers.
One can seperate between complex numbers declared as struct or with native One can separate between complex numbers declared as struct or with native
encoding by inspecting to see if the fields field of the type is encoding by inspecting to see if the fields field of the type is
filled in. filled in.
""" """
......
...@@ -328,7 +328,10 @@ builtin_types_table = [ ...@@ -328,7 +328,10 @@ builtin_types_table = [
("set", "PySet_Type", [BuiltinMethod("__contains__", "TO", "b", "PySequence_Contains"), ("set", "PySet_Type", [BuiltinMethod("__contains__", "TO", "b", "PySequence_Contains"),
BuiltinMethod("clear", "T", "r", "PySet_Clear"), BuiltinMethod("clear", "T", "r", "PySet_Clear"),
# discard() and remove() have a special treatment for unhashable values # discard() and remove() have a special treatment for unhashable values
# BuiltinMethod("discard", "TO", "r", "PySet_Discard"), BuiltinMethod("discard", "TO", "r", "__Pyx_PySet_Discard",
utility_code=UtilityCode.load("py_set_discard", "Optimize.c")),
BuiltinMethod("remove", "TO", "r", "__Pyx_PySet_Remove",
utility_code=UtilityCode.load("py_set_remove", "Optimize.c")),
# update is actually variadic (see Github issue #1645) # update is actually variadic (see Github issue #1645)
# BuiltinMethod("update", "TO", "r", "__Pyx_PySet_Update", # BuiltinMethod("update", "TO", "r", "__Pyx_PySet_Update",
# utility_code=UtilityCode.load_cached("PySet_Update", "Builtins.c")), # utility_code=UtilityCode.load_cached("PySet_Update", "Builtins.c")),
......
This diff is collapsed.
...@@ -12,7 +12,7 @@ class ExtractPxdCode(VisitorTransform): ...@@ -12,7 +12,7 @@ class ExtractPxdCode(VisitorTransform):
The result is a tuple (StatListNode, ModuleScope), i.e. The result is a tuple (StatListNode, ModuleScope), i.e.
everything that is needed from the pxd after it is processed. everything that is needed from the pxd after it is processed.
A purer approach would be to seperately compile the pxd code, A purer approach would be to separately compile the pxd code,
but the result would have to be slightly more sophisticated but the result would have to be slightly more sophisticated
than pure strings (functions + wanted interned strings + than pure strings (functions + wanted interned strings +
wanted utility code + wanted cached objects) so for now this wanted utility code + wanted cached objects) so for now this
......
This diff is collapsed.
...@@ -341,14 +341,6 @@ class NameAssignment(object): ...@@ -341,14 +341,6 @@ class NameAssignment(object):
return self.entry.type return self.entry.type
return self.inferred_type return self.inferred_type
def __getstate__(self):
return (self.lhs, self.rhs, self.entry, self.pos,
self.refs, self.is_arg, self.is_deletion, self.inferred_type)
def __setstate__(self, state):
(self.lhs, self.rhs, self.entry, self.pos,
self.refs, self.is_arg, self.is_deletion, self.inferred_type) = state
class StaticAssignment(NameAssignment): class StaticAssignment(NameAssignment):
"""Initialised at declaration time, e.g. stack allocation.""" """Initialised at declaration time, e.g. stack allocation."""
......
...@@ -127,9 +127,6 @@ class FusedCFuncDefNode(StatListNode): ...@@ -127,9 +127,6 @@ class FusedCFuncDefNode(StatListNode):
# len(permutations)) # len(permutations))
# import pprint; pprint.pprint([d for cname, d in permutations]) # import pprint; pprint.pprint([d for cname, d in permutations])
if self.node.entry in env.cfunc_entries:
env.cfunc_entries.remove(self.node.entry)
# Prevent copying of the python function # Prevent copying of the python function
self.orig_py_func = orig_py_func = self.node.py_func self.orig_py_func = orig_py_func = self.node.py_func
self.node.py_func = None self.node.py_func = None
...@@ -139,12 +136,26 @@ class FusedCFuncDefNode(StatListNode): ...@@ -139,12 +136,26 @@ class FusedCFuncDefNode(StatListNode):
fused_types = self.node.type.get_fused_types() fused_types = self.node.type.get_fused_types()
self.fused_compound_types = fused_types self.fused_compound_types = fused_types
new_cfunc_entries = []
for cname, fused_to_specific in permutations: for cname, fused_to_specific in permutations:
copied_node = copy.deepcopy(self.node) copied_node = copy.deepcopy(self.node)
# Make the types in our CFuncType specific # Make the types in our CFuncType specific.
type = copied_node.type.specialize(fused_to_specific) type = copied_node.type.specialize(fused_to_specific)
entry = copied_node.entry entry = copied_node.entry
type.specialize_entry(entry, cname)
# Reuse existing Entries (e.g. from .pxd files).
for i, orig_entry in enumerate(env.cfunc_entries):
if entry.cname == orig_entry.cname and type.same_as_resolved_type(orig_entry.type):
copied_node.entry = env.cfunc_entries[i]
if not copied_node.entry.func_cname:
copied_node.entry.func_cname = entry.func_cname
entry = copied_node.entry
type = entry.type
break
else:
new_cfunc_entries.append(entry)
copied_node.type = type copied_node.type = type
entry.type, type.entry = type, entry entry.type, type.entry = type, entry
...@@ -165,9 +176,6 @@ class FusedCFuncDefNode(StatListNode): ...@@ -165,9 +176,6 @@ class FusedCFuncDefNode(StatListNode):
self._specialize_function_args(copied_node.cfunc_declarator.args, self._specialize_function_args(copied_node.cfunc_declarator.args,
fused_to_specific) fused_to_specific)
type.specialize_entry(entry, cname)
env.cfunc_entries.append(entry)
# If a cpdef, declare all specialized cpdefs (this # If a cpdef, declare all specialized cpdefs (this
# also calls analyse_declarations) # also calls analyse_declarations)
copied_node.declare_cpdef_wrapper(env) copied_node.declare_cpdef_wrapper(env)
...@@ -181,6 +189,14 @@ class FusedCFuncDefNode(StatListNode): ...@@ -181,6 +189,14 @@ class FusedCFuncDefNode(StatListNode):
if not self.replace_fused_typechecks(copied_node): if not self.replace_fused_typechecks(copied_node):
break break
# replace old entry with new entries
try:
cindex = env.cfunc_entries.index(self.node.entry)
except ValueError:
env.cfunc_entries.extend(new_cfunc_entries)
else:
env.cfunc_entries[cindex:cindex+1] = new_cfunc_entries
if orig_py_func: if orig_py_func:
self.py_func = self.make_fused_cpdef(orig_py_func, env, self.py_func = self.make_fused_cpdef(orig_py_func, env,
is_def=False) is_def=False)
...@@ -209,7 +225,7 @@ class FusedCFuncDefNode(StatListNode): ...@@ -209,7 +225,7 @@ class FusedCFuncDefNode(StatListNode):
""" """
Create a new local scope for the copied node and append it to Create a new local scope for the copied node and append it to
self.nodes. A new local scope is needed because the arguments with the self.nodes. A new local scope is needed because the arguments with the
fused types are aready in the local scope, and we need the specialized fused types are already in the local scope, and we need the specialized
entries created after analyse_declarations on each specialized version entries created after analyse_declarations on each specialized version
of the (CFunc)DefNode. of the (CFunc)DefNode.
f2s is a dict mapping each fused type to its specialized version f2s is a dict mapping each fused type to its specialized version
...@@ -422,7 +438,7 @@ class FusedCFuncDefNode(StatListNode): ...@@ -422,7 +438,7 @@ class FusedCFuncDefNode(StatListNode):
if dtype is not None: if dtype is not None:
itemsize = dtype.itemsize itemsize = dtype.itemsize
kind = ord(dtype.kind) kind = ord(dtype.kind)
# We only support the endianess of the current compiler # We only support the endianness of the current compiler
byteorder = dtype.byteorder byteorder = dtype.byteorder
if byteorder == "<" and not __Pyx_Is_Little_Endian(): if byteorder == "<" and not __Pyx_Is_Little_Endian():
arg_is_pythran_compatible = False arg_is_pythran_compatible = False
......
...@@ -18,12 +18,12 @@ try: ...@@ -18,12 +18,12 @@ try:
except ImportError: except ImportError:
basestring = str basestring = str
from . import Errors
# Do not import Parsing here, import it when needed, because Parsing imports # Do not import Parsing here, import it when needed, because Parsing imports
# Nodes, which globally needs debug command line options initialized to set a # Nodes, which globally needs debug command line options initialized to set a
# conditional metaclass. These options are processed by CmdLine called from # conditional metaclass. These options are processed by CmdLine called from
# main() in this file. # main() in this file.
# import Parsing # import Parsing
from . import Errors
from .StringEncoding import EncodedString from .StringEncoding import EncodedString
from .Scanning import PyrexScanner, FileSourceDescriptor from .Scanning import PyrexScanner, FileSourceDescriptor
from .Errors import PyrexError, CompileError, error, warning from .Errors import PyrexError, CompileError, error, warning
...@@ -38,6 +38,7 @@ module_name_pattern = re.compile(r"[A-Za-z_][A-Za-z0-9_]*(\.[A-Za-z_][A-Za-z0-9_ ...@@ -38,6 +38,7 @@ module_name_pattern = re.compile(r"[A-Za-z_][A-Za-z0-9_]*(\.[A-Za-z_][A-Za-z0-9_
verbose = 0 verbose = 0
class CompilationData(object): class CompilationData(object):
# Bundles the information that is passed from transform to transform. # Bundles the information that is passed from transform to transform.
# (For now, this is only) # (For now, this is only)
...@@ -52,6 +53,7 @@ class CompilationData(object): ...@@ -52,6 +53,7 @@ class CompilationData(object):
# result CompilationResult # result CompilationResult
pass pass
class Context(object): class Context(object):
# This class encapsulates the context needed for compiling # This class encapsulates the context needed for compiling
# one or more Cython implementation files along with their # one or more Cython implementation files along with their
...@@ -239,7 +241,7 @@ class Context(object): ...@@ -239,7 +241,7 @@ class Context(object):
pxd = self.search_include_directories(qualified_name, ".pxd", pos, sys_path=sys_path) pxd = self.search_include_directories(qualified_name, ".pxd", pos, sys_path=sys_path)
if pxd is None: # XXX Keep this until Includes/Deprecated is removed if pxd is None: # XXX Keep this until Includes/Deprecated is removed
if (qualified_name.startswith('python') or if (qualified_name.startswith('python') or
qualified_name in ('stdlib', 'stdio', 'stl')): qualified_name in ('stdlib', 'stdio', 'stl')):
standard_include_path = os.path.abspath(os.path.normpath( standard_include_path = os.path.abspath(os.path.normpath(
os.path.join(os.path.dirname(__file__), os.path.pardir, 'Includes'))) os.path.join(os.path.dirname(__file__), os.path.pardir, 'Includes')))
deprecated_include_path = os.path.join(standard_include_path, 'Deprecated') deprecated_include_path = os.path.join(standard_include_path, 'Deprecated')
...@@ -356,7 +358,7 @@ class Context(object): ...@@ -356,7 +358,7 @@ class Context(object):
from ..Parser import ConcreteSyntaxTree from ..Parser import ConcreteSyntaxTree
except ImportError: except ImportError:
raise RuntimeError( raise RuntimeError(
"Formal grammer can only be used with compiled Cython with an available pgen.") "Formal grammar can only be used with compiled Cython with an available pgen.")
ConcreteSyntaxTree.p_module(source_filename) ConcreteSyntaxTree.p_module(source_filename)
except UnicodeDecodeError as e: except UnicodeDecodeError as e:
#import traceback #import traceback
...@@ -426,6 +428,7 @@ class Context(object): ...@@ -426,6 +428,7 @@ class Context(object):
pass pass
result.c_file = None result.c_file = None
def get_output_filename(source_filename, cwd, options): def get_output_filename(source_filename, cwd, options):
if options.cplus: if options.cplus:
c_suffix = ".cpp" c_suffix = ".cpp"
...@@ -441,6 +444,7 @@ def get_output_filename(source_filename, cwd, options): ...@@ -441,6 +444,7 @@ def get_output_filename(source_filename, cwd, options):
else: else:
return suggested_file_name return suggested_file_name
def create_default_resultobj(compilation_source, options): def create_default_resultobj(compilation_source, options):
result = CompilationResult() result = CompilationResult()
result.main_source_file = compilation_source.source_desc.filename result.main_source_file = compilation_source.source_desc.filename
...@@ -451,6 +455,7 @@ def create_default_resultobj(compilation_source, options): ...@@ -451,6 +455,7 @@ def create_default_resultobj(compilation_source, options):
result.embedded_metadata = options.embedded_metadata result.embedded_metadata = options.embedded_metadata
return result return result
def run_pipeline(source, options, full_module_name=None, context=None): def run_pipeline(source, options, full_module_name=None, context=None):
from . import Pipeline from . import Pipeline
...@@ -496,11 +501,11 @@ def run_pipeline(source, options, full_module_name=None, context=None): ...@@ -496,11 +501,11 @@ def run_pipeline(source, options, full_module_name=None, context=None):
return result return result
#------------------------------------------------------------------------ # ------------------------------------------------------------------------
# #
# Main Python entry points # Main Python entry points
# #
#------------------------------------------------------------------------ # ------------------------------------------------------------------------
class CompilationSource(object): class CompilationSource(object):
""" """
...@@ -512,6 +517,7 @@ class CompilationSource(object): ...@@ -512,6 +517,7 @@ class CompilationSource(object):
self.full_module_name = full_module_name self.full_module_name = full_module_name
self.cwd = cwd self.cwd = cwd
class CompilationOptions(object): class CompilationOptions(object):
""" """
Options to the Cython compiler: Options to the Cython compiler:
...@@ -678,13 +684,14 @@ def compile_multiple(sources, options): ...@@ -678,13 +684,14 @@ def compile_multiple(sources, options):
processed.add(source) processed.add(source)
return results return results
def compile(source, options = None, full_module_name = None, **kwds): def compile(source, options = None, full_module_name = None, **kwds):
""" """
compile(source [, options], [, <option> = <value>]...) compile(source [, options], [, <option> = <value>]...)
Compile one or more Pyrex implementation files, with optional timestamp Compile one or more Pyrex implementation files, with optional timestamp
checking and recursing on dependecies. The source argument may be a string checking and recursing on dependencies. The source argument may be a string
or a sequence of strings If it is a string and no recursion or timestamp or a sequence of strings. If it is a string and no recursion or timestamp
checking is requested, a CompilationResult is returned, otherwise a checking is requested, a CompilationResult is returned, otherwise a
CompilationResultSet is returned. CompilationResultSet is returned.
""" """
...@@ -694,14 +701,17 @@ def compile(source, options = None, full_module_name = None, **kwds): ...@@ -694,14 +701,17 @@ def compile(source, options = None, full_module_name = None, **kwds):
else: else:
return compile_multiple(source, options) return compile_multiple(source, options)
#------------------------------------------------------------------------
# ------------------------------------------------------------------------
# #
# Main command-line entry point # Main command-line entry point
# #
#------------------------------------------------------------------------ # ------------------------------------------------------------------------
def setuptools_main(): def setuptools_main():
return main(command_line = 1) return main(command_line = 1)
def main(command_line = 0): def main(command_line = 0):
args = sys.argv[1:] args = sys.argv[1:]
any_failures = 0 any_failures = 0
...@@ -727,12 +737,11 @@ def main(command_line = 0): ...@@ -727,12 +737,11 @@ def main(command_line = 0):
sys.exit(1) sys.exit(1)
# ------------------------------------------------------------------------
#------------------------------------------------------------------------
# #
# Set the default options depending on the platform # Set the default options depending on the platform
# #
#------------------------------------------------------------------------ # ------------------------------------------------------------------------
default_options = dict( default_options = dict(
show_version = 0, show_version = 0,
......
...@@ -390,19 +390,15 @@ def get_memoryview_flag(access, packing): ...@@ -390,19 +390,15 @@ def get_memoryview_flag(access, packing):
return 'contiguous' return 'contiguous'
def get_is_contig_func_name(c_or_f, ndim): def get_is_contig_func_name(contig_type, ndim):
return "__pyx_memviewslice_is_%s_contig%d" % (c_or_f, ndim) assert contig_type in ('C', 'F')
return "__pyx_memviewslice_is_contig_%s%d" % (contig_type, ndim)
def get_is_contig_utility(c_contig, ndim): def get_is_contig_utility(contig_type, ndim):
C = dict(context, ndim=ndim) assert contig_type in ('C', 'F')
if c_contig: C = dict(context, ndim=ndim, contig_type=contig_type)
utility = load_memview_c_utility("MemviewSliceIsCContig", C, utility = load_memview_c_utility("MemviewSliceCheckContig", C, requires=[is_contig_utility])
requires=[is_contig_utility])
else:
utility = load_memview_c_utility("MemviewSliceIsFContig", C,
requires=[is_contig_utility])
return utility return utility
......
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
...@@ -15,6 +15,7 @@ from . import ExprNodes ...@@ -15,6 +15,7 @@ from . import ExprNodes
from . import Nodes from . import Nodes
from . import Options from . import Options
from . import Builtin from . import Builtin
from . import Errors
from .Visitor import VisitorTransform, TreeVisitor from .Visitor import VisitorTransform, TreeVisitor
from .Visitor import CythonTransform, EnvTransform, ScopeTrackingTransform from .Visitor import CythonTransform, EnvTransform, ScopeTrackingTransform
...@@ -632,7 +633,7 @@ class TrackNumpyAttributes(VisitorTransform, SkipDeclarations): ...@@ -632,7 +633,7 @@ class TrackNumpyAttributes(VisitorTransform, SkipDeclarations):
visit_Node = VisitorTransform.recurse_to_children visit_Node = VisitorTransform.recurse_to_children
class InterpretCompilerDirectives(CythonTransform, SkipDeclarations): class InterpretCompilerDirectives(CythonTransform):
""" """
After parsing, directives can be stored in a number of places: After parsing, directives can be stored in a number of places:
- #cython-comments at the top of the file (stored in ModuleNode) - #cython-comments at the top of the file (stored in ModuleNode)
...@@ -857,6 +858,11 @@ class InterpretCompilerDirectives(CythonTransform, SkipDeclarations): ...@@ -857,6 +858,11 @@ class InterpretCompilerDirectives(CythonTransform, SkipDeclarations):
node.cython_attribute = directive node.cython_attribute = directive
return node return node
def visit_NewExprNode(self, node):
self.visit(node.cppclass)
self.visitchildren(node)
return node
def try_to_parse_directives(self, node): def try_to_parse_directives(self, node):
# If node is the contents of an directive (in a with statement or # If node is the contents of an directive (in a with statement or
# decorator), returns a list of (directivename, value) pairs. # decorator), returns a list of (directivename, value) pairs.
...@@ -987,7 +993,7 @@ class InterpretCompilerDirectives(CythonTransform, SkipDeclarations): ...@@ -987,7 +993,7 @@ class InterpretCompilerDirectives(CythonTransform, SkipDeclarations):
def visit_CVarDefNode(self, node): def visit_CVarDefNode(self, node):
directives = self._extract_directives(node, 'function') directives = self._extract_directives(node, 'function')
if not directives: if not directives:
return node return self.visit_Node(node)
for name, value in directives.items(): for name, value in directives.items():
if name == 'locals': if name == 'locals':
node.directive_locals = value node.directive_locals = value
...@@ -1027,7 +1033,8 @@ class InterpretCompilerDirectives(CythonTransform, SkipDeclarations): ...@@ -1027,7 +1033,8 @@ class InterpretCompilerDirectives(CythonTransform, SkipDeclarations):
directives = [] directives = []
realdecs = [] realdecs = []
both = [] both = []
for dec in node.decorators: # Decorators coming first take precedence.
for dec in node.decorators[::-1]:
new_directives = self.try_to_parse_directives(dec.decorator) new_directives = self.try_to_parse_directives(dec.decorator)
if new_directives is not None: if new_directives is not None:
for directive in new_directives: for directive in new_directives:
...@@ -1037,15 +1044,17 @@ class InterpretCompilerDirectives(CythonTransform, SkipDeclarations): ...@@ -1037,15 +1044,17 @@ class InterpretCompilerDirectives(CythonTransform, SkipDeclarations):
directives.append(directive) directives.append(directive)
if directive[0] == 'staticmethod': if directive[0] == 'staticmethod':
both.append(dec) both.append(dec)
# Adapt scope type based on decorators that change it.
if directive[0] == 'cclass' and scope_name == 'class':
scope_name = 'cclass'
else: else:
realdecs.append(dec) realdecs.append(dec)
if realdecs and isinstance(node, (Nodes.CFuncDefNode, Nodes.CClassDefNode, Nodes.CVarDefNode)): if realdecs and (scope_name == 'cclass' or
isinstance(node, (Nodes.CFuncDefNode, Nodes.CClassDefNode, Nodes.CVarDefNode))):
raise PostParseError(realdecs[0].pos, "Cdef functions/classes cannot take arbitrary decorators.") raise PostParseError(realdecs[0].pos, "Cdef functions/classes cannot take arbitrary decorators.")
else: node.decorators = realdecs[::-1] + both[::-1]
node.decorators = realdecs + both
# merge or override repeated directives # merge or override repeated directives
optdict = {} optdict = {}
directives.reverse() # Decorators coming first take precedence
for directive in directives: for directive in directives:
name, value = directive name, value = directive
if name in optdict: if name in optdict:
...@@ -1292,7 +1301,7 @@ class WithTransform(CythonTransform, SkipDeclarations): ...@@ -1292,7 +1301,7 @@ class WithTransform(CythonTransform, SkipDeclarations):
pos, with_stat=node, pos, with_stat=node,
test_if_run=False, test_if_run=False,
args=excinfo_target, args=excinfo_target,
await=ExprNodes.AwaitExprNode(pos, arg=None) if is_async else None)), await_expr=ExprNodes.AwaitExprNode(pos, arg=None) if is_async else None)),
body=Nodes.ReraiseStatNode(pos), body=Nodes.ReraiseStatNode(pos),
), ),
], ],
...@@ -1314,7 +1323,7 @@ class WithTransform(CythonTransform, SkipDeclarations): ...@@ -1314,7 +1323,7 @@ class WithTransform(CythonTransform, SkipDeclarations):
test_if_run=True, test_if_run=True,
args=ExprNodes.TupleNode( args=ExprNodes.TupleNode(
pos, args=[ExprNodes.NoneNode(pos) for _ in range(3)]), pos, args=[ExprNodes.NoneNode(pos) for _ in range(3)]),
await=ExprNodes.AwaitExprNode(pos, arg=None) if is_async else None)), await_expr=ExprNodes.AwaitExprNode(pos, arg=None) if is_async else None)),
handle_error_case=False, handle_error_case=False,
) )
return node return node
...@@ -1385,10 +1394,15 @@ class DecoratorTransform(ScopeTrackingTransform, SkipDeclarations): ...@@ -1385,10 +1394,15 @@ class DecoratorTransform(ScopeTrackingTransform, SkipDeclarations):
elif decorator.is_attribute and decorator.obj.name in properties: elif decorator.is_attribute and decorator.obj.name in properties:
handler_name = self._map_property_attribute(decorator.attribute) handler_name = self._map_property_attribute(decorator.attribute)
if handler_name: if handler_name:
assert decorator.obj.name == node.name if decorator.obj.name != node.name:
if len(node.decorators) > 1: # CPython does not generate an error or warning, but not something useful either.
error(decorator_node.pos,
"Mismatching property names, expected '%s', got '%s'" % (
decorator.obj.name, node.name))
elif len(node.decorators) > 1:
return self._reject_decorated_property(node, decorator_node) return self._reject_decorated_property(node, decorator_node)
return self._add_to_property(properties, node, handler_name, decorator_node) else:
return self._add_to_property(properties, node, handler_name, decorator_node)
# we clear node.decorators, so we need to set the # we clear node.decorators, so we need to set the
# is_staticmethod/is_classmethod attributes now # is_staticmethod/is_classmethod attributes now
...@@ -1535,6 +1549,13 @@ class ForwardDeclareTypes(CythonTransform): ...@@ -1535,6 +1549,13 @@ class ForwardDeclareTypes(CythonTransform):
def visit_CClassDefNode(self, node): def visit_CClassDefNode(self, node):
if node.class_name not in self.module_scope.entries: if node.class_name not in self.module_scope.entries:
node.declare(self.module_scope) node.declare(self.module_scope)
# Expand fused methods of .pxd declared types to construct the final vtable order.
type = self.module_scope.entries[node.class_name].type
if type is not None and type.is_extension_type and not type.is_builtin_type and type.scope:
scope = type.scope
for entry in scope.cfunc_entries:
if entry.type and entry.type.is_fused:
entry.type.get_all_specialized_function_types()
return node return node
...@@ -1859,7 +1880,7 @@ if VALUE is not None: ...@@ -1859,7 +1880,7 @@ if VALUE is not None:
def visit_FuncDefNode(self, node): def visit_FuncDefNode(self, node):
""" """
Analyse a function and its body, as that hasn't happend yet. Also Analyse a function and its body, as that hasn't happened yet. Also
analyse the directive_locals set by @cython.locals(). analyse the directive_locals set by @cython.locals().
Then, if we are a function with fused arguments, replace the function Then, if we are a function with fused arguments, replace the function
...@@ -1922,6 +1943,8 @@ if VALUE is not None: ...@@ -1922,6 +1943,8 @@ if VALUE is not None:
binding = self.current_directives.get('binding') binding = self.current_directives.get('binding')
rhs = ExprNodes.PyCFunctionNode.from_defnode(node, binding) rhs = ExprNodes.PyCFunctionNode.from_defnode(node, binding)
node.code_object = rhs.code_object node.code_object = rhs.code_object
if node.is_generator:
node.gbody.code_object = node.code_object
if env.is_py_class_scope: if env.is_py_class_scope:
rhs.binding = True rhs.binding = True
...@@ -2048,7 +2071,7 @@ if VALUE is not None: ...@@ -2048,7 +2071,7 @@ if VALUE is not None:
# Some nodes are no longer needed after declaration # Some nodes are no longer needed after declaration
# analysis and can be dropped. The analysis was performed # analysis and can be dropped. The analysis was performed
# on these nodes in a seperate recursive process from the # on these nodes in a separate recursive process from the
# enclosing function or module, so we can simply drop them. # enclosing function or module, so we can simply drop them.
def visit_CDeclaratorNode(self, node): def visit_CDeclaratorNode(self, node):
# necessary to ensure that all CNameDeclaratorNodes are visited. # necessary to ensure that all CNameDeclaratorNodes are visited.
...@@ -2573,10 +2596,27 @@ class MarkClosureVisitor(CythonTransform): ...@@ -2573,10 +2596,27 @@ class MarkClosureVisitor(CythonTransform):
collector.visitchildren(node) collector.visitchildren(node)
if node.is_async_def: if node.is_async_def:
coroutine_type = Nodes.AsyncGenNode if collector.has_yield else Nodes.AsyncDefNode coroutine_type = Nodes.AsyncDefNode
if collector.has_yield: if collector.has_yield:
coroutine_type = Nodes.AsyncGenNode
for yield_expr in collector.yields + collector.returns: for yield_expr in collector.yields + collector.returns:
yield_expr.in_async_gen = True yield_expr.in_async_gen = True
elif node.decorators:
# evaluate @asyncio.coroutine() decorator at compile time if it's the inner-most one
# TODO: better decorator validation: should come from imported module
decorator = node.decorators[-1].decorator
if decorator.is_name and decorator.name == 'coroutine':
pass
elif decorator.is_attribute and decorator.attribute == 'coroutine':
if decorator.obj.is_name and decorator.obj.name in ('types', 'asyncio'):
pass
else:
decorator = None
else:
decorator = None
if decorator is not None:
node.decorators.pop()
coroutine_type = Nodes.IterableAsyncDefNode
elif collector.has_await: elif collector.has_await:
found = next(y for y in collector.yields if y.is_await) found = next(y for y in collector.yields if y.is_await)
error(found.pos, "'await' not allowed in generators (use 'yield')") error(found.pos, "'await' not allowed in generators (use 'yield')")
...@@ -3140,8 +3180,9 @@ class ReplaceFusedTypeChecks(VisitorTransform): ...@@ -3140,8 +3180,9 @@ class ReplaceFusedTypeChecks(VisitorTransform):
return self.transform(node) return self.transform(node)
def visit_PrimaryCmpNode(self, node): def visit_PrimaryCmpNode(self, node):
type1 = node.operand1.analyse_as_type(self.local_scope) with Errors.local_errors(ignore=True):
type2 = node.operand2.analyse_as_type(self.local_scope) type1 = node.operand1.analyse_as_type(self.local_scope)
type2 = node.operand2.analyse_as_type(self.local_scope)
if type1 and type2: if type1 and type2:
false_node = ExprNodes.BoolNode(node.pos, value=False) false_node = ExprNodes.BoolNode(node.pos, value=False)
......
...@@ -2713,6 +2713,7 @@ special_basic_c_types = cython.declare(dict, { ...@@ -2713,6 +2713,7 @@ special_basic_c_types = cython.declare(dict, {
"ssize_t" : (2, 0), "ssize_t" : (2, 0),
"size_t" : (0, 0), "size_t" : (0, 0),
"ptrdiff_t" : (2, 0), "ptrdiff_t" : (2, 0),
"Py_tss_t" : (1, 0),
}) })
sign_and_longness_words = cython.declare( sign_and_longness_words = cython.declare(
...@@ -3084,9 +3085,13 @@ def p_cdef_extern_block(s, pos, ctx): ...@@ -3084,9 +3085,13 @@ def p_cdef_extern_block(s, pos, ctx):
ctx.namespace = p_string_literal(s, 'u')[2] ctx.namespace = p_string_literal(s, 'u')[2]
if p_nogil(s): if p_nogil(s):
ctx.nogil = 1 ctx.nogil = 1
body = p_suite(s, ctx)
# Use "docstring" as verbatim string to include
verbatim_include, body = p_suite_with_docstring(s, ctx, True)
return Nodes.CDefExternNode(pos, return Nodes.CDefExternNode(pos,
include_file = include_file, include_file = include_file,
verbatim_include = verbatim_include,
body = body, body = body,
namespace = ctx.namespace) namespace = ctx.namespace)
...@@ -3438,19 +3443,15 @@ def p_c_class_definition(s, pos, ctx): ...@@ -3438,19 +3443,15 @@ def p_c_class_definition(s, pos, ctx):
as_name = class_name as_name = class_name
objstruct_name = None objstruct_name = None
typeobj_name = None typeobj_name = None
base_class_module = None bases = None
base_class_name = None
if s.sy == '(': if s.sy == '(':
s.next() positional_args, keyword_args = p_call_parse_args(s, allow_genexp=False)
base_class_path = [p_ident(s)] if keyword_args:
while s.sy == '.': s.error("C classes cannot take keyword bases.")
s.next() bases, _ = p_call_build_packed_args(pos, positional_args, keyword_args)
base_class_path.append(p_ident(s)) if bases is None:
if s.sy == ',': bases = ExprNodes.TupleNode(pos, args=[])
s.error("C class may only have one base class", fatal=False)
s.expect(')')
base_class_module = ".".join(base_class_path[:-1])
base_class_name = base_class_path[-1]
if s.sy == '[': if s.sy == '[':
if ctx.visibility not in ('public', 'extern') and not ctx.api: if ctx.visibility not in ('public', 'extern') and not ctx.api:
error(s.position(), "Name options only allowed for 'public', 'api', or 'extern' C class") error(s.position(), "Name options only allowed for 'public', 'api', or 'extern' C class")
...@@ -3490,8 +3491,7 @@ def p_c_class_definition(s, pos, ctx): ...@@ -3490,8 +3491,7 @@ def p_c_class_definition(s, pos, ctx):
module_name = ".".join(module_path), module_name = ".".join(module_path),
class_name = class_name, class_name = class_name,
as_name = as_name, as_name = as_name,
base_class_module = base_class_module, bases = bases,
base_class_name = base_class_name,
objstruct_name = objstruct_name, objstruct_name = objstruct_name,
typeobj_name = typeobj_name, typeobj_name = typeobj_name,
in_pxd = ctx.level == 'module_pxd', in_pxd = ctx.level == 'module_pxd',
......
This diff is collapsed.
# cython: language_level=3
from __future__ import absolute_import from __future__ import absolute_import
from .PyrexTypes import CType, CTypedefType, CStructOrUnionType from .PyrexTypes import CType, CTypedefType, CStructOrUnionType
...@@ -58,45 +60,56 @@ def pythran_unaryop_type(op, type_): ...@@ -58,45 +60,56 @@ def pythran_unaryop_type(op, type_):
op, pythran_type(type_)) op, pythran_type(type_))
@cython.cfunc
def _index_access(index_code, indices):
indexing = ",".join([index_code(idx) for idx in indices])
return ('[%s]' if len(indices) == 1 else '(%s)') % indexing
def _index_type_code(index_with_type):
idx, index_type = index_with_type
if idx.is_slice:
if idx.step.is_none:
func = "contiguous_slice"
n = 2
else:
func = "slice"
n = 3
return "pythonic::types::%s(%s)" % (
func, ",".join(["0"]*n))
elif index_type.is_int:
return "std::declval<%s>()" % index_type.sign_and_name()
elif index_type.is_pythran_expr:
return "std::declval<%s>()" % index_type.pythran_type
raise ValueError("unsupported indexing type %s!" % index_type)
def _index_code(idx):
if idx.is_slice:
values = idx.start, idx.stop, idx.step
if idx.step.is_none:
func = "contiguous_slice"
values = values[:2]
else:
func = "slice"
return "pythonic::types::%s(%s)" % (
func, ",".join((v.pythran_result() for v in values)))
elif idx.type.is_int:
return to_pythran(idx)
elif idx.type.is_pythran_expr:
return idx.pythran_result()
raise ValueError("unsupported indexing type %s" % idx.type)
def pythran_indexing_type(type_, indices): def pythran_indexing_type(type_, indices):
def index_code(idx): return type_remove_ref("decltype(std::declval<%s>()%s)" % (
if idx.is_slice: pythran_type(type_),
if idx.step.is_none: _index_access(_index_type_code, indices),
func = "contiguous_slice" ))
n = 2
else:
func = "slice"
n = 3
return "pythonic::types::%s(%s)" % (
func, ",".join(["0"]*n))
elif idx.type.is_int:
return "std::declval<%s>()" % idx.type.sign_and_name()
elif idx.type.is_pythran_expr:
return "std::declval<%s>()" % idx.type.pythran_type
raise ValueError("unsupported indexing type %s!" % idx.type)
indexing = ",".join(index_code(idx) for idx in indices)
return type_remove_ref("decltype(std::declval<%s>()[%s])" % (pythran_type(type_), indexing))
def pythran_indexing_code(indices): def pythran_indexing_code(indices):
def index_code(idx): return _index_access(_index_code, indices)
if idx.is_slice:
values = idx.start, idx.stop, idx.step
if idx.step.is_none:
func = "contiguous_slice"
values = values[:2]
else:
func = "slice"
return "pythonic::types::%s(%s)" % (
func, ",".join((v.pythran_result() for v in values)))
elif idx.type.is_int:
return to_pythran(idx)
elif idx.type.is_pythran_expr:
return idx.pythran_result()
raise ValueError("unsupported indexing type %s" % idx.type)
return ",".join(index_code(idx) for idx in indices)
def pythran_func_type(func, args): def pythran_func_type(func, args):
......
This diff is collapsed.
...@@ -191,6 +191,14 @@ def bytes_literal(s, encoding): ...@@ -191,6 +191,14 @@ def bytes_literal(s, encoding):
return s return s
def encoded_string(s, encoding):
assert isinstance(s, (_unicode, bytes))
s = EncodedString(s)
if encoding is not None:
s.encoding = encoding
return s
char_from_escape_sequence = { char_from_escape_sequence = {
r'\a' : u'\a', r'\a' : u'\a',
r'\b' : u'\b', r'\b' : u'\b',
......
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
# #
# Nodes used as utilities and support for transforms etc. # Nodes used as utilities and support for transforms etc.
# These often make up sets including both Nodes and ExprNodes # These often make up sets including both Nodes and ExprNodes
# so it is convenient to have them in a seperate module. # so it is convenient to have them in a separate module.
# #
from __future__ import absolute_import from __future__ import absolute_import
...@@ -267,6 +267,9 @@ class EvalWithTempExprNode(ExprNodes.ExprNode, LetNodeMixin): ...@@ -267,6 +267,9 @@ class EvalWithTempExprNode(ExprNodes.ExprNode, LetNodeMixin):
def infer_type(self, env): def infer_type(self, env):
return self.subexpression.infer_type(env) return self.subexpression.infer_type(env)
def may_be_none(self):
return self.subexpression.may_be_none()
def result(self): def result(self):
return self.subexpression.result() return self.subexpression.result()
......
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
.. include:: ../../CHANGES.rst
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment