Commit 5d2bcb8d authored by Jason Madden's avatar Jason Madden Committed by GitHub

Merge pull request #1050 from gevent/pypy-appveyor

Use setuptools to build libuv, and enable it on Windows (especially for PyPy)
parents fc34e2ff 8fba2912
......@@ -101,8 +101,7 @@
- Add initial *experimental* support for using libuv as a backend
instead of libev, controlled by setting the environment variable
``GEVENT_CORE_CFFI_ONLY=libuv`` before importing gevent. This only
works on POSIX systems and it still suffers a number of limitations
compared to libev, notably:
suffers a number of limitations compared to libev, notably:
- Timers (such as ``gevent.sleep`` and ``gevent.Timeout``) only
support a resolution of 1ms. Attempting to use something smaller
......@@ -133,8 +132,9 @@
than either of the libev implementations (cython or CFFI), so
pressure on the garbage collector will be higher.
- The build system may not be correctly producing embedded static
libraries, at least on OS X. libuv 1.18 or higher is required.
- The build system does not support using a system libuv; the
embedded copy must be used. Using setuptools to compile libuv was
the most portable method found.
- If anything unexpected happens, libuv likes to ``abort()`` the
entire process instead of reporting an error. For example, closing
......@@ -142,7 +142,7 @@
process to be exited.
Feedback and pull requests are welcome, especially to address the
issues mentioned above and for Windows support.
issues mentioned above.
Again, this is extremely experimental and all of it is subject to
change.
......
......@@ -117,8 +117,8 @@ PY27=$(BUILD_RUNTIMES)/snakepit/python2.7.14
PY34=$(BUILD_RUNTIMES)/snakepit/python3.4.7
PY35=$(BUILD_RUNTIMES)/snakepit/python3.5.4
PY36=$(BUILD_RUNTIMES)/snakepit/python3.6.2
PYPY=$(BUILD_RUNTIMES)/snakepit/pypy580
PYPY3=$(BUILD_RUNTIMES)/snakepit/pypy3.5_580
PYPY=$(BUILD_RUNTIMES)/snakepit/pypy590
PYPY3=$(BUILD_RUNTIMES)/snakepit/pypy3.5_590
TOOLS=$(BUILD_RUNTIMES)/tools
......@@ -151,20 +151,18 @@ $(PYPY):
$(PYPY3):
scripts/install.sh pypy3
PIP?=$(BUILD_RUNTIMES)/versions/$(PYTHON)/bin/pip
develop:
ls -l $(BUILD_RUNTIMES)/snakepit/
echo pip is at `which $(PIP)`
echo python is at `which $(PYTHON)`
# First install a newer pip so that it can use the wheel cache
# (only needed until travis upgrades pip to 7.x; note that the 3.5
# environment uses pip 7.1 by default)
${PIP} install -U pip setuptools
python -m pip install -U pip setuptools
# Then start installing our deps so they can be cached. Note that use of --build-options / --global-options / --install-options
# disables the cache.
# We need wheel>=0.26 on Python 3.5. See previous revisions.
${PIP} install -U -r dev-requirements.txt
python -m pip install -U -r dev-requirements.txt
lint-py27: $(PY27)
PYTHON=python2.7.14 PATH=$(BUILD_RUNTIMES)/versions/python2.7.14/bin:$(PATH) make develop travis_test_linters
......@@ -177,22 +175,23 @@ test-py278: $(PY278)
PYTHON=python2.7.8 PATH=$(BUILD_RUNTIMES)/versions/python2.7.8/bin:$(PATH) make develop toxtest
test-py34: $(PY34)
PYTHON=python3.4.7 PIP=pip PATH=$(BUILD_RUNTIMES)/versions/python3.4.7/bin:$(PATH) make develop toxtest
PYTHON=python3.4.7 PATH=$(BUILD_RUNTIMES)/versions/python3.4.7/bin:$(PATH) make develop toxtest
test-py35: $(PY35)
PYTHON=python3.5.4 PIP=pip PATH=$(BUILD_RUNTIMES)/versions/python3.5.4/bin:$(PATH) make develop fulltoxtest
PYTHON=python3.5.4 PATH=$(BUILD_RUNTIMES)/versions/python3.5.4/bin:$(PATH) make develop fulltoxtest
test-py36: $(PY36)
PYTHON=python3.6.2 PIP=pip PATH=$(BUILD_RUNTIMES)/versions/python3.6.2/bin:$(PATH) make develop toxtest
PYTHON=python3.6.2 PATH=$(BUILD_RUNTIMES)/versions/python3.6.2/bin:$(PATH) make develop toxtest
test-py36-libuv: $(PY36)
GEVENT_CORE_CFFI_ONLY=libuv make test-py36
test-pypy: $(PYPY)
PYTHON=$(PYPY) PIP=pip PATH=$(BUILD_RUNTIMES)/versions/pypy580/bin:$(PATH) make develop toxtest
ls $(BUILD_RUNTIMES)/versions/pypy590/bin/
PYTHON=$(PYPY) PATH=$(BUILD_RUNTIMES)/versions/pypy590/bin:$(PATH) make develop toxtest
test-pypy3: $(PYPY3)
PYTHON=$(PYPY3) PIP=pip PATH=$(BUILD_RUNTIMES)/versions/pypy3.5_580/bin:$(PATH) make develop toxtest
PYTHON=$(PYPY3) PATH=$(BUILD_RUNTIMES)/versions/pypy3.5_590/bin:$(PATH) make develop toxtest
test-py27-cffi: $(PY27)
GEVENT_CORE_CFFI_ONLY=1 PYTHON=python2.7.14 PATH=$(BUILD_RUNTIMES)/versions/python2.7.14/bin:$(PATH) make develop toxtest
......
# -*- coding: utf-8 -*-
"""
libuv build utilities.
"""
from __future__ import print_function, absolute_import, division
import ast
import os
import platform
import subprocess
import sys
from _setuputils import WIN
from _setuputils import DEFINE_MACROS
from _setuputils import Extension
from _setuputils import dep_abspath
from _setuputils import system
from _setuputils import glob_many
from _setuputils import should_embed
from _setuputils import _parse_environ
from distutils import log # pylint:disable=no-name-in-module
from distutils.errors import DistutilsError # pylint: disable=no-name-in-module,import-error
# Inspired by code from https://github.com/saghul/pyuv
LIBUV_EMBED = should_embed('libuv')
if WIN and not LIBUV_EMBED:
raise DistutilsError('using a system provided libuv is unsupported on Windows')
LIBUV_INCLUDE_DIR = dep_abspath('libuv', 'include')
# Set this to force dynamic linking of libuv, even when building the
# embedded copy. This is most useful when upgrading/changing libuv
# in place.
LIBUV_DYNAMIC_EMBED = _parse_environ("GEVENT_LIBUV_DYNAMIC_EMBED")
if LIBUV_DYNAMIC_EMBED is None:
# Not set in the environment. Are we developing?
# This is convenient for my workflow
# XXX Is there a better way to get this?
# This probably doesn't work for 'pip install -e .'
# or dev-requirments.txt
if 'develop' in sys.argv:
LIBUV_DYNAMIC_EMBED = LIBUV_EMBED
LIBUV_LIBRARIES = []
if sys.platform.startswith('linux'):
LIBUV_LIBRARIES.append('rt')
elif WIN:
LIBUV_LIBRARIES.append('advapi32')
LIBUV_LIBRARIES.append('iphlpapi')
LIBUV_LIBRARIES.append('psapi')
LIBUV_LIBRARIES.append('shell32')
LIBUV_LIBRARIES.append('userenv')
LIBUV_LIBRARIES.append('ws2_32')
elif sys.platform.startswith('freebsd'):
LIBUV_LIBRARIES.append('kvm')
if not LIBUV_EMBED or LIBUV_DYNAMIC_EMBED:
LIBUV_LIBRARIES.append('uv')
def prepare_windows_env(env):
env.pop('VS140COMNTOOLS', None)
env.pop('VS120COMNTOOLS', None)
env.pop('VS110COMNTOOLS', None)
if sys.version_info < (3, 3):
env.pop('VS100COMNTOOLS', None)
env['GYP_MSVS_VERSION'] = '2008'
else:
env['GYP_MSVS_VERSION'] = '2010'
if not env.get('PYTHON', '').endswith('.exe'):
env.pop('PYTHON', None)
if env.get('PYTHON'):
log.info("Using python from env %s", env['PYTHON'])
return # Already manually set by user.
if sys.version_info[:2] == (2, 7):
env['PYTHON'] = sys.executable
return # The current executable is fine.
# Try if `python` on PATH is the right one. If we would execute
# `python` directly the current executable might be used so we
# delegate this to cmd.
cmd = ['cmd.exe', '/C', 'python', '-c', 'import sys; '
'v = str(sys.version_info[:2]); sys.stdout.write(v); '
'sys.stdout.flush()']
try:
sub = subprocess.Popen(cmd, stdout=subprocess.PIPE)
stdout, _ = sub.communicate()
version = ast.literal_eval(stdout.decode(sys.stdout.encoding).strip()) # pylint:disable=no-member
if version == (2, 7):
return # Python on PATH is fine
except OSError:
pass
# Check default install locations
path = os.path.join('%SYSTEMDRIVE%', 'Python27', 'python.exe')
path = os.path.expandvars(path)
if os.path.isfile(path):
log.info('Using "%s" to build libuv...' % path)
env['PYTHON'] = path
# Things needed for run_with_env.cmd
# What we're building for, not what we're building with
# (because it's just the C library)
env['PYTHON_VERSION'] = str(sys.version_info[0]) + '.' + str(sys.version_info[1]) + ".x"
# XXX: Just guessing here. Is PYTHON_ARCH correct?
if 'PYTHON_ARCH' not in env:
env['PYTHON_ARCH'] = '64' if platform.architecture()[0] == '64bit' else '32'
from distutils.msvc9compiler import query_vcvarsall # pylint:disable=import-error,no-name-in-module
if sys.version_info[:2] >= (3, 5):
version = 14
else:
version = 9 # for 2.7, but probably not right for 3.4?
env.update(query_vcvarsall(version))
else:
raise DistutilsError('No appropriate Python version found. An '
'installation of 2.7 is required to '
'build libuv. You can set the environment '
'variable "PYTHON" to point to a custom '
'installation location.')
# This is a dummy extension that serves to let us hook into
# when we need to compile libuv
LIBUV = Extension(name='gevent.libuv._libuv',
sources=['src/gevent/libuv/_libuv.c'],
include_dirs=[LIBUV_INCLUDE_DIR],
libraries=LIBUV_LIBRARIES,
define_macros=list(DEFINE_MACROS),
depends=glob_many('deps/libuv/src/*.[ch]'))
if LIBUV_EMBED:
libuv_dir = dep_abspath('libuv')
if WIN:
libuv_library_dir = os.path.join(libuv_dir, 'Release', 'lib')
libuv_lib = os.path.join(libuv_library_dir, 'libuv.lib')
LIBUV.extra_link_args.extend(['/NODEFAULTLIB:libcmt', '/LTCG'])
LIBUV.extra_objects.append(libuv_lib)
else:
libuv_library_dir = os.path.join(libuv_dir, '.libs')
libuv_lib = os.path.join(libuv_library_dir, 'libuv.a')
if not LIBUV_DYNAMIC_EMBED:
LIBUV.extra_objects.append(libuv_lib)
else:
LIBUV.library_dirs.append(libuv_library_dir)
LIBUV.extra_link_args.extend(["-Wl,-rpath", libuv_library_dir])
def configure_libuv(_bext, _ext):
def build_libuv():
cflags = '-fPIC'
env = os.environ.copy()
env['CFLAGS'] = ' '.join(x for x in (cflags,
env.get('CFLAGS', None),
env.get('ARCHFLAGS', None))
if x)
# Since we're building a static library, if link-time-optimization is requested, it
# results in failure to properly create the library archive. This goes unnoticed on
# OS X until import time because of '-undefined dynamic_lookup'. On the raspberry
# pi, it causes the linker to crash
if '-flto' in env['CFLAGS']:
log.info("Removing LTO")
env['CFLAGS'] = env['CFLAGS'].replace('-flto', '')
log.info('Building libuv with cflags %s', env['CFLAGS'])
if WIN:
prepare_windows_env(env)
libuv_arch = {'32bit': 'x86', '64bit': 'x64'}[platform.architecture()[0]]
system(["cmd", "/E:ON", "/V:ON", "/C", "..\\..\\appveyor\\run_with_env.cmd",
'vcbuild.bat', libuv_arch, 'release'],
cwd=libuv_dir,
env=env,
shell=False)
else:
# autogen: requires automake and libtool installed
system(['./autogen.sh'],
cwd=libuv_dir,
env=env)
# On OS X, the linker will link to the full path
# of the library libuv *as encoded in the dylib it finds*.
# So the libdir is important and must match the actual location
# of the dynamic library if we want to dynamically link to it.
# Otherwise, we wind up linking to /usr/local/lib/libuv.dylib by
# default, which can't be found. `otool -D libuv.dylib` will show
# this name, and `otool -L src/gevent/libuv/_corecffi.so` will show
# what got linked. Note that this approach results in libuv.dylib
# apparently linking to *itself*, which is weird, but not harmful
system(['./configure', '--libdir=' + libuv_library_dir],
cwd=libuv_dir,
env=env)
system(['make'],
cwd=libuv_dir,
env=env)
if not os.path.exists(libuv_lib):
log.info('libuv needs to be compiled.')
build_libuv()
else:
log.info('No need to build libuv.')
if LIBUV_EMBED:
LIBUV.configure = configure_libuv
......@@ -10,12 +10,11 @@ environment:
# Pre-installed Python versions, which Appveyor may upgrade to
# a later point release.
# XXX: Sadly, PyPy won't link CFFI on Win32
# - PYTHON: "C:\\pypy-4.0.1-win32"
# PYTHON_ID: "pypy"
# PYTHON_EXE: pypy
# PYTHON_VERSION: "2.7.x"
# PYTHON_ARCH: "32"
- PYTHON: "C:\\pypy2-v5.9.0-win32"
PYTHON_ID: "pypy"
PYTHON_EXE: pypy
PYTHON_VERSION: "2.7.x"
PYTHON_ARCH: "32"
- PYTHON: "C:\\Python36-x64"
PYTHON_VERSION: "3.6.x" # currently 3.6.0
......@@ -66,6 +65,10 @@ environment:
# PYTHON_ARCH: "32"
# PYTHON_EXE: python
matrix:
allow_failures:
- PYTHON_ID: "pypy"
install:
# If there is a newer build queued for the same PR, cancel this one.
# The AppVeyor 'rollout builds' option is supposed to serve the same
......@@ -91,14 +94,11 @@ install:
New-Item -ItemType directory -Path "$env:PYTMP" | Out-Null;
}
if ("${env:PYTHON_ID}" -eq "pypy") {
if (!(Test-Path "${env:PYTMP}\pypy-4.0.1-win32.zip")) {
(New-Object Net.WebClient).DownloadFile('https://bitbucket.org/pypy/pypy/downloads/pypy-4.0.1-win32.zip', "${env:PYTMP}\pypy-4.0.1-win32.zip");
if (!(Test-Path "${env:PYTMP}\pypy2-v5.9.0-win32.zip")) {
(New-Object Net.WebClient).DownloadFile('https://bitbucket.org/pypy/pypy/downloads/pypy2-v5.9.0-win32.zip', "${env:PYTMP}\pypy2-v5.9.0-win32.zip");
}
7z x -y "${env:PYTMP}\pypy-4.0.1-win32.zip" -oC:\ | Out-Null;
if (!(Test-Path "${env:PYTMP}\get-pip.py")) {
(New-Object Net.WebClient).DownloadFile('https://bootstrap.pypa.io/get-pip.py', "${env:PYTMP}\get-pip.py");
}
& "${env:PYTHON}\pypy.exe" "${env:PYTMP}\get-pip.py";
7z x -y "${env:PYTMP}\pypy2-v5.9.0-win32.zip" -oC:\ | Out-Null;
& "${env:PYTHON}\pypy.exe" "-mensurepip";
}
elseif (-not(Test-Path($env:PYTHON))) {
......@@ -123,9 +123,13 @@ install:
# compiled extensions and are not provided as pre-built wheel packages,
# pip will build them from source using the MSVC compiler matching the
# target Python version and architecture
# NOTE: psutil won't install under PyPy.
- "%CMD_IN_ENV% pip install -U wheel cython greenlet psutil"
# Note that psutil won't build under PyPy on Windows.
- "%CMD_IN_ENV% pip install -U setuptools wheel cython greenlet cffi"
- ps:
if ("${env:PYTHON_ID}" -ne "pypy") {
pip install psutil | Out-Null;
}
- ps: "if(Test-Path(\"${env:PYTHON}\\bin\")) {ls ${env:PYTHON}\\bin;}"
- ps: "if(Test-Path(\"${env:PYTHON}\\Scripts\")) {ls ${env:PYTHON}\\Scripts;}"
......
......@@ -157,7 +157,9 @@ goto run
@rem Build the sln with msbuild.
:msbuild-found
msbuild uv.sln /t:%target% /p:Configuration=%config% /p:Platform="%msbuild_platform%" /clp:NoSummary;NoItemAndPropertyList;Verbosity=minimal /nologo
msbuild /ver
where msbuild
msbuild uv.sln /t:%target% /p:Configuration=%config% /p:Platform="%msbuild_platform%" /clp:NoSummary;NoItemAndPropertyList;Verbosity=minimal
if errorlevel 1 exit /b 1
:run
......
......@@ -8,8 +8,7 @@ coverage>=4.0
coveralls>=1.0
cffi
futures
# Makes tests faster, but causes issues on the old
# linux version Travis CI uses. We have a workaround.
# Makes tests faster
psutil
# For viewing README.rst (restview --long-description),
# CONTRIBUTING.rst, etc.
......
......@@ -3,19 +3,23 @@ from __future__ import print_function
import gevent
from gevent import subprocess
import sys
# run 2 jobs in parallel
p1 = subprocess.Popen(['uname'], stdout=subprocess.PIPE)
p2 = subprocess.Popen(['ls'], stdout=subprocess.PIPE)
if sys.platform.startswith("win"):
print("Unable to run on windows")
else:
# run 2 jobs in parallel
p1 = subprocess.Popen(['uname'], stdout=subprocess.PIPE)
p2 = subprocess.Popen(['ls'], stdout=subprocess.PIPE)
gevent.wait([p1, p2], timeout=2)
gevent.wait([p1, p2], timeout=2)
# print the results (if available)
if p1.poll() is not None:
print('uname: %r' % p1.stdout.read())
else:
print('uname: job is still running')
if p2.poll() is not None:
print('ls: %r' % p2.stdout.read())
else:
print('ls: job is still running')
# print the results (if available)
if p1.poll() is not None:
print('uname: %r' % p1.stdout.read())
else:
print('uname: job is still running')
if p2.poll() is not None:
print('ls: %r' % p2.stdout.read())
else:
print('ls: job is still running')
......@@ -97,10 +97,10 @@ for var in "$@"; do
install 3.6.2 python3.6.2
;;
pypy)
install pypy2.7-5.8.0 pypy580
install pypy2.7-5.9.0 pypy590
;;
pypy3)
install pypy3.5-5.8.0 pypy3.5_580
install pypy3.5-5.9.0 pypy3.5_590
;;
esac
done
......@@ -7,7 +7,7 @@ import os
from _setuputils import read
from _setuputils import read_version
from _setuputils import system
from _setuputils import PYPY, WIN, CFFI_WIN_BUILD_ANYWAY
from _setuputils import PYPY, WIN
from _setuputils import IGNORE_CFFI
from _setuputils import ConfiguringBuildExt
from _setuputils import MakeSdist
......@@ -19,15 +19,6 @@ from _setuputils import BuildFailed
from setuptools import Extension, setup
from setuptools import find_packages
if PYPY and WIN and not CFFI_WIN_BUILD_ANYWAY:
# We can't properly handle (hah!) file-descriptors and
# handle mapping on Windows/CFFI, because the file needed,
# libev_vfd.h, can't be included, linked, and used: it uses
# Python API functions, and you're not supposed to do that from
# CFFI code. Plus I could never get the libraries= line to ffi.compile()
# correct to make linking work.
raise Exception("Unable to install on PyPy/Windows")
if WIN:
# Make sure the env vars that make.cmd needs are set
if not os.environ.get('PYTHON_EXE'):
......@@ -52,7 +43,6 @@ from _setuplibev import CORE
from _setupares import ARES
from _setuplibuv import LIBUV
SEMAPHORE = Extension(name="gevent._semaphore",
sources=["src/gevent/gevent._semaphore.c"])
......@@ -68,13 +58,23 @@ EXT_MODULES = [
LOCAL,
]
cffi_modules = [
'src/gevent/libev/_corecffi_build.py:ffi',
]
LIBEV_CFFI_MODULE = 'src/gevent/libev/_corecffi_build.py:ffi'
LIBUV_CFFI_MODULE = 'src/gevent/libuv/_corecffi_build.py:ffi'
cffi_modules = []
if not WIN:
EXT_MODULES.append(LIBUV)
cffi_modules.append('src/gevent/libuv/_corecffi_build.py:ffi')
# We can't properly handle (hah!) file-descriptors and
# handle mapping on Windows/CFFI with libev, because the file needed,
# libev_vfd.h, can't be included, linked, and used: it uses
# Python API functions, and you're not supposed to do that from
# CFFI code. Plus I could never get the libraries= line to ffi.compile()
# correct to make linking work.
cffi_modules.append(
LIBEV_CFFI_MODULE
)
cffi_modules.append(LIBUV_CFFI_MODULE)
if PYPY:
install_requires = []
......@@ -124,7 +124,7 @@ if ((len(sys.argv) >= 2
'--version',
'clean',
'--long-description')))
or __name__ != '__main__'):
or __name__ != '__main__'):
_BUILDING = False
......@@ -135,7 +135,8 @@ def run_setup(ext_modules, run_make):
# to build the CFFI module. We need to configure libev
# because the CORE Extension won't.
# TODO: Generalize this.
system(libev_configure_command)
if LIBEV_CFFI_MODULE in cffi_modules and not WIN:
system(libev_configure_command)
MakeSdist.make()
......
......@@ -11,6 +11,7 @@ import sys
PY2 = sys.version_info[0] == 2
PY3 = sys.version_info[0] >= 3
PYPY = hasattr(sys, 'pypy_version_info')
WIN = sys.platform.startswith("win")
## Types
......@@ -21,7 +22,7 @@ if PY3:
else:
import __builtin__ # pylint:disable=import-error
string_types = __builtin__.basestring,
string_types = (__builtin__.basestring,)
text_type = __builtin__.unicode
integer_types = (int, __builtin__.long)
......
......@@ -177,7 +177,7 @@ if sys.version_info[0] >= 3:
integer_types = (int,)
else:
import __builtin__ # pylint:disable=import-error
basestring = __builtin__.basestring,
basestring = (__builtin__.basestring,)
integer_types = (int, __builtin__.long)
......
......@@ -19,8 +19,10 @@ except ImportError:
if lib == 'libuv':
from gevent.libuv import loop as _core
else:
from gevent.libev import corecffi as _core
try:
from gevent.libev import corecffi as _core
except ImportError:
from gevent.libuv import loop as _core
copy_globals(_core, globals())
......
......@@ -124,7 +124,7 @@ if sys.version_info[0] >= 3:
integer_types = (int,)
else:
import __builtin__ # pylint:disable=import-error
basestring = __builtin__.basestring,
basestring = (__builtin__.basestring,)
integer_types = (int, __builtin__.long)
......
......@@ -45,41 +45,202 @@ _cdef = _cdef.replace('#define GEVENT_UV_OS_SOCK_T int', '')
_cdef = _cdef.replace('GEVENT_ST_NLINK_T', st_nlink_type())
_cdef = _cdef.replace("GEVENT_STRUCT_DONE _;", '...;')
_cdef = _cdef.replace("GEVENT_UV_OS_SOCK_T", 'int' if not WIN else 'SOCKET')
# if sys.platform.startswith('win'):
# # We must have the vfd_open, etc, functions on
# # Windows. But on other platforms, going through
# # CFFI to just return the file-descriptor is slower
# # than just doing it in Python, so we check for and
# # workaround their absence in corecffi.py
# _cdef += """
# typedef int... vfd_socket_t;
# int vfd_open(vfd_socket_t);
# vfd_socket_t vfd_get(int);
# void vfd_free(int);
# """
# uv_os_sock_t is int on POSIX and SOCKET on Win32, but socket is
# just another name for handle, which is just another name for 'void*'
# which we will treat as an 'unsigned long' or 'unsigned long long'
# since it comes through 'fileno()' where it has been cast as an int.
_void_pointer_as_integer = 'unsigned long' if system_bits() == 32 else 'unsigned long long'
_cdef = _cdef.replace("GEVENT_UV_OS_SOCK_T", 'int' if not WIN else _void_pointer_as_integer)
setup_py_dir = os.path.abspath(os.path.join(thisdir, '..', '..', '..'))
libuv_dir = os.path.abspath(os.path.join(setup_py_dir, 'deps', 'libuv'))
sys.path.append(setup_py_dir)
include_dirs = [
LIBUV_INCLUDE_DIRS = [
thisdir, # libev_vfd.h
os.path.join(libuv_dir, 'include'),
os.path.join(libuv_dir, 'src'),
]
# Initially based on https://github.com/saghul/pyuv/blob/v1.x/setup_libuv.py
def _libuv_source(rel_path):
# Certain versions of setuptools, notably on windows, are *very*
# picky about what we feed to sources= "setup() arguments must
# *always* be /-separated paths relative to the setup.py
# directory, *never* absolute paths." POSIX doesn't have that issue.
path = os.path.join('deps', 'libuv', 'src', rel_path)
return path
LIBUV_SOURCES = [
_libuv_source('fs-poll.c'),
_libuv_source('inet.c'),
_libuv_source('threadpool.c'),
_libuv_source('uv-common.c'),
_libuv_source('version.c'),
]
from _setuplibuv import LIBUV_LIBRARIES # pylint:disable=import-error
from _setuplibuv import LIBUV # pylint:disable=import-error
if WIN:
LIBUV_SOURCES += [
_libuv_source('win/async.c'),
_libuv_source('win/core.c'),
_libuv_source('win/detect-wakeup.c'),
_libuv_source('win/dl.c'),
_libuv_source('win/error.c'),
_libuv_source('win/fs-event.c'),
_libuv_source('win/fs.c'),
# getaddrinfo.c refers to ConvertInterfaceIndexToLuid
# and ConvertInterfaceLuidToNameA, which are supposedly in iphlpapi.h
# and iphlpapi.lib/dll. But on Windows 10 with Python 3.5 and VC 14 (Visual Studio 2015),
# I get an undefined warning from the compiler for those functions and
# a link error from the linker, so this file can't be included.
# This is possibly because the functions are defined for Windows Vista, and
# Python 3.5 builds with at earlier SDK?
# Fortunately we don't use those functions.
#_libuv_source('win/getaddrinfo.c'),
# getnameinfo.c refers to uv__getaddrinfo_translate_error from
# getaddrinfo.c, which we don't have.
#_libuv_source('win/getnameinfo.c'),
_libuv_source('win/handle.c'),
_libuv_source('win/loop-watcher.c'),
_libuv_source('win/pipe.c'),
_libuv_source('win/poll.c'),
_libuv_source('win/process-stdio.c'),
_libuv_source('win/process.c'),
_libuv_source('win/req.c'),
_libuv_source('win/signal.c'),
_libuv_source('win/snprintf.c'),
_libuv_source('win/stream.c'),
_libuv_source('win/tcp.c'),
_libuv_source('win/thread.c'),
_libuv_source('win/timer.c'),
_libuv_source('win/tty.c'),
_libuv_source('win/udp.c'),
_libuv_source('win/util.c'),
_libuv_source('win/winapi.c'),
_libuv_source('win/winsock.c'),
]
else:
LIBUV_SOURCES += [
_libuv_source('unix/async.c'),
_libuv_source('unix/core.c'),
_libuv_source('unix/dl.c'),
_libuv_source('unix/fs.c'),
_libuv_source('unix/getaddrinfo.c'),
_libuv_source('unix/getnameinfo.c'),
_libuv_source('unix/loop-watcher.c'),
_libuv_source('unix/loop.c'),
_libuv_source('unix/pipe.c'),
_libuv_source('unix/poll.c'),
_libuv_source('unix/process.c'),
_libuv_source('unix/signal.c'),
_libuv_source('unix/stream.c'),
_libuv_source('unix/tcp.c'),
_libuv_source('unix/thread.c'),
_libuv_source('unix/timer.c'),
_libuv_source('unix/tty.c'),
_libuv_source('unix/udp.c'),
]
if sys.platform.startswith('linux'):
LIBUV_SOURCES += [
_libuv_source('unix/linux-core.c'),
_libuv_source('unix/linux-inotify.c'),
_libuv_source('unix/linux-syscalls.c'),
_libuv_source('unix/procfs-exepath.c'),
_libuv_source('unix/proctitle.c'),
_libuv_source('unix/sysinfo-loadavg.c'),
_libuv_source('unix/sysinfo-memory.c'),
]
elif sys.platform == 'darwin':
LIBUV_SOURCES += [
_libuv_source('unix/bsd-ifaddrs.c'),
_libuv_source('unix/darwin.c'),
_libuv_source('unix/darwin-proctitle.c'),
_libuv_source('unix/fsevents.c'),
_libuv_source('unix/kqueue.c'),
_libuv_source('unix/proctitle.c'),
]
elif sys.platform.startswith(('freebsd', 'dragonfly')):
LIBUV_SOURCES += [
_libuv_source('unix/bsd-ifaddrs.c'),
_libuv_source('unix/freebsd.c'),
_libuv_source('unix/kqueue.c'),
_libuv_source('unix/posix-hrtime.c'),
]
elif sys.platform.startswith('openbsd'):
LIBUV_SOURCES += [
_libuv_source('unix/bsd-ifaddrs.c'),
_libuv_source('unix/kqueue.c'),
_libuv_source('unix/openbsd.c'),
_libuv_source('unix/posix-hrtime.c'),
]
elif sys.platform.startswith('netbsd'):
LIBUV_SOURCES += [
_libuv_source('unix/bsd-ifaddrs.c'),
_libuv_source('unix/kqueue.c'),
_libuv_source('unix/netbsd.c'),
_libuv_source('unix/posix-hrtime.c'),
]
elif sys.platform.startswith('sunos'):
LIBUV_SOURCES += [
_libuv_source('unix/no-proctitle.c'),
_libuv_source('unix/sunos.c'),
]
LIBUV_MACROS = []
def _define_macro(name, value):
LIBUV_MACROS.append((name, value))
LIBUV_LIBRARIES = []
def _add_library(name):
LIBUV_LIBRARIES.append(name)
if sys.platform != 'win32':
_define_macro('_LARGEFILE_SOURCE', 1)
_define_macro('_FILE_OFFSET_BITS', 64)
if sys.platform.startswith('linux'):
_add_library('dl')
_add_library('rt')
elif sys.platform == 'darwin':
_define_macro('_DARWIN_USE_64_BIT_INODE', 1)
_define_macro('_DARWIN_UNLIMITED_SELECT', 1)
elif sys.platform.startswith('netbsd'):
_add_library('kvm')
elif sys.platform.startswith('sunos'):
_define_macro('__EXTENSIONS__', 1)
_define_macro('_XOPEN_SOURCE', 500)
_add_library('kstat')
_add_library('nsl')
_add_library('sendfile')
_add_library('socket')
elif WIN:
_define_macro('_GNU_SOURCE', 1)
_define_macro('WIN32', 1)
_define_macro('_CRT_SECURE_NO_DEPRECATE', 1)
_define_macro('_CRT_NONSTDC_NO_DEPRECATE', 1)
_define_macro('_WIN32_WINNT', '0x0600')
_add_library('advapi32')
_add_library('iphlpapi')
_add_library('psapi')
_add_library('shell32')
_add_library('user32')
_add_library('userenv')
_add_library('ws2_32')
ffi.cdef(_cdef)
ffi.set_source('gevent.libuv._corecffi', _source,
include_dirs=include_dirs,
library_dirs=LIBUV.library_dirs,
extra_objects=list(LIBUV.extra_objects),
extra_link_args=list(LIBUV.extra_link_args),
ffi.set_source('gevent.libuv._corecffi',
_source,
sources=LIBUV_SOURCES,
depends=LIBUV_SOURCES,
include_dirs=LIBUV_INCLUDE_DIRS,
libraries=list(LIBUV_LIBRARIES))
if __name__ == '__main__':
......
......@@ -163,6 +163,11 @@ typedef struct uv_fs_poll_s uv_fs_poll_t;
// callbacks with the same signature
// XXX: Note that these, and all callbacks, are defined to take
// a void* or handle* instead of the more specific, correct,
// value. This allows us to use the same gevent_generic_callback
// without having to do a bunch of casts everywhere. This does produce
// minor warnings when compiling the CFFI extension, though.
typedef void (*uv_close_cb)(uv_handle_t *handle);
typedef void (*uv_idle_cb)(void *handle);
typedef void (*uv_timer_cb)(void *handle);
......@@ -295,10 +300,16 @@ int uv_signal_stop(uv_signal_t *handle);
// Unix any file descriptor that would be accepted by poll(2) can be
// used.
int uv_poll_init(uv_loop_t *loop, uv_poll_t *handle, int fd);
// Initialize the handle using a socket descriptor. On Unix this is identical to uv_poll_init(). On windows it takes a SOCKET handle.
// The socket is set to non-blocking mode.
// On Windows, how to get the SOCKET type defined?
//int uv_poll_init_socket(uv_loop_t* loop, uv_poll_t* handle, GEVENT_UV_OS_SOCK_T socket);
// Initialize the handle using a socket descriptor. On Unix this is
// identical to uv_poll_init(). On windows it takes a SOCKET handle;
// SOCKET handles are another name for HANDLE objects in win32, and
// those are defined as PVOID, even though they are not actually
// pointers (they're small integers). CPython and PyPy both return
// the SOCKET (as cast to an int) from the socket.fileno() method.
// libuv uses ``uv_os_sock_t`` for this type, which is defined as an
// int on unix.
int uv_poll_init_socket(uv_loop_t* loop, uv_poll_t* handle, GEVENT_UV_OS_SOCK_T socket);
int uv_poll_start(uv_poll_t *handle, int events, uv_poll_cb cb);
int uv_poll_stop(uv_poll_t *handle);
......
/* Generated by Cython 0.24 */
#define PY_SSIZE_T_CLEAN
#include "Python.h"
#ifndef Py_PYTHON_H
#error Python headers needed to compile C extensions, please install development version of Python.
#elif PY_VERSION_HEX < 0x02060000 || (0x03000000 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x03020000)
#error Cython requires Python 2.6+ or Python 3.2+.
#else
#define CYTHON_ABI "0_24"
#include <stddef.h>
#ifndef offsetof
#define offsetof(type, member) ( (size_t) & ((type*)0) -> member )
#endif
#if !defined(WIN32) && !defined(MS_WINDOWS)
#ifndef __stdcall
#define __stdcall
#endif
#ifndef __cdecl
#define __cdecl
#endif
#ifndef __fastcall
#define __fastcall
#endif
#endif
#ifndef DL_IMPORT
#define DL_IMPORT(t) t
#endif
#ifndef DL_EXPORT
#define DL_EXPORT(t) t
#endif
#ifndef PY_LONG_LONG
#define PY_LONG_LONG LONG_LONG
#endif
#ifndef Py_HUGE_VAL
#define Py_HUGE_VAL HUGE_VAL
#endif
#ifdef PYPY_VERSION
#define CYTHON_COMPILING_IN_PYPY 1
#define CYTHON_COMPILING_IN_CPYTHON 0
#else
#define CYTHON_COMPILING_IN_PYPY 0
#define CYTHON_COMPILING_IN_CPYTHON 1
#endif
#if !defined(CYTHON_USE_PYLONG_INTERNALS) && CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x02070000
#define CYTHON_USE_PYLONG_INTERNALS 1
#endif
#if CYTHON_USE_PYLONG_INTERNALS
#include "longintrepr.h"
#undef SHIFT
#undef BASE
#undef MASK
#endif
#if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x02070600 && !defined(Py_OptimizeFlag)
#define Py_OptimizeFlag 0
#endif
#define __PYX_BUILD_PY_SSIZE_T "n"
#define CYTHON_FORMAT_SSIZE_T "z"
#if PY_MAJOR_VERSION < 3
#define __Pyx_BUILTIN_MODULE_NAME "__builtin__"
#define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\
PyCode_New(a+k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)
#define __Pyx_DefaultClassType PyClass_Type
#else
#define __Pyx_BUILTIN_MODULE_NAME "builtins"
#define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\
PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)
#define __Pyx_DefaultClassType PyType_Type
#endif
#ifndef Py_TPFLAGS_CHECKTYPES
#define Py_TPFLAGS_CHECKTYPES 0
#endif
#ifndef Py_TPFLAGS_HAVE_INDEX
#define Py_TPFLAGS_HAVE_INDEX 0
#endif
#ifndef Py_TPFLAGS_HAVE_NEWBUFFER
#define Py_TPFLAGS_HAVE_NEWBUFFER 0
#endif
#ifndef Py_TPFLAGS_HAVE_FINALIZE
#define Py_TPFLAGS_HAVE_FINALIZE 0
#endif
#if PY_VERSION_HEX > 0x03030000 && defined(PyUnicode_KIND)
#define CYTHON_PEP393_ENABLED 1
#define __Pyx_PyUnicode_READY(op) (likely(PyUnicode_IS_READY(op)) ?\
0 : _PyUnicode_Ready((PyObject *)(op)))
#define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_LENGTH(u)
#define __Pyx_PyUnicode_READ_CHAR(u, i) PyUnicode_READ_CHAR(u, i)
#define __Pyx_PyUnicode_KIND(u) PyUnicode_KIND(u)
#define __Pyx_PyUnicode_DATA(u) PyUnicode_DATA(u)
#define __Pyx_PyUnicode_READ(k, d, i) PyUnicode_READ(k, d, i)
#define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : PyUnicode_GET_SIZE(u)))
#else
#define CYTHON_PEP393_ENABLED 0
#define __Pyx_PyUnicode_READY(op) (0)
#define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_SIZE(u)
#define __Pyx_PyUnicode_READ_CHAR(u, i) ((Py_UCS4)(PyUnicode_AS_UNICODE(u)[i]))
#define __Pyx_PyUnicode_KIND(u) (sizeof(Py_UNICODE))
#define __Pyx_PyUnicode_DATA(u) ((void*)PyUnicode_AS_UNICODE(u))
#define __Pyx_PyUnicode_READ(k, d, i) ((void)(k), (Py_UCS4)(((Py_UNICODE*)d)[i]))
#define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_SIZE(u))
#endif
#if CYTHON_COMPILING_IN_PYPY
#define __Pyx_PyUnicode_Concat(a, b) PyNumber_Add(a, b)
#define __Pyx_PyUnicode_ConcatSafe(a, b) PyNumber_Add(a, b)
#else
#define __Pyx_PyUnicode_Concat(a, b) PyUnicode_Concat(a, b)
#define __Pyx_PyUnicode_ConcatSafe(a, b) ((unlikely((a) == Py_None) || unlikely((b) == Py_None)) ?\
PyNumber_Add(a, b) : __Pyx_PyUnicode_Concat(a, b))
#endif
#if CYTHON_COMPILING_IN_PYPY && !defined(PyUnicode_Contains)
#define PyUnicode_Contains(u, s) PySequence_Contains(u, s)
#endif
#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Format)
#define PyObject_Format(obj, fmt) PyObject_CallMethod(obj, "__format__", "O", fmt)
#endif
#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Malloc)
#define PyObject_Malloc(s) PyMem_Malloc(s)
#define PyObject_Free(p) PyMem_Free(p)
#define PyObject_Realloc(p) PyMem_Realloc(p)
#endif
#define __Pyx_PyString_FormatSafe(a, b) ((unlikely((a) == Py_None)) ? PyNumber_Remainder(a, b) : __Pyx_PyString_Format(a, b))
#define __Pyx_PyUnicode_FormatSafe(a, b) ((unlikely((a) == Py_None)) ? PyNumber_Remainder(a, b) : PyUnicode_Format(a, b))
#if PY_MAJOR_VERSION >= 3
#define __Pyx_PyString_Format(a, b) PyUnicode_Format(a, b)
#else
#define __Pyx_PyString_Format(a, b) PyString_Format(a, b)
#endif
#if PY_MAJOR_VERSION < 3 && !defined(PyObject_ASCII)
#define PyObject_ASCII(o) PyObject_Repr(o)
#endif
#if PY_MAJOR_VERSION >= 3
#define PyBaseString_Type PyUnicode_Type
#define PyStringObject PyUnicodeObject
#define PyString_Type PyUnicode_Type
#define PyString_Check PyUnicode_Check
#define PyString_CheckExact PyUnicode_CheckExact
#endif
#if PY_MAJOR_VERSION >= 3
#define __Pyx_PyBaseString_Check(obj) PyUnicode_Check(obj)
#define __Pyx_PyBaseString_CheckExact(obj) PyUnicode_CheckExact(obj)
#else
#define __Pyx_PyBaseString_Check(obj) (PyString_Check(obj) || PyUnicode_Check(obj))
#define __Pyx_PyBaseString_CheckExact(obj) (PyString_CheckExact(obj) || PyUnicode_CheckExact(obj))
#endif
#ifndef PySet_CheckExact
#define PySet_CheckExact(obj) (Py_TYPE(obj) == &PySet_Type)
#endif
#define __Pyx_TypeCheck(obj, type) PyObject_TypeCheck(obj, (PyTypeObject *)type)
#if PY_MAJOR_VERSION >= 3
#define PyIntObject PyLongObject
#define PyInt_Type PyLong_Type
#define PyInt_Check(op) PyLong_Check(op)
#define PyInt_CheckExact(op) PyLong_CheckExact(op)
#define PyInt_FromString PyLong_FromString
#define PyInt_FromUnicode PyLong_FromUnicode
#define PyInt_FromLong PyLong_FromLong
#define PyInt_FromSize_t PyLong_FromSize_t
#define PyInt_FromSsize_t PyLong_FromSsize_t
#define PyInt_AsLong PyLong_AsLong
#define PyInt_AS_LONG PyLong_AS_LONG
#define PyInt_AsSsize_t PyLong_AsSsize_t
#define PyInt_AsUnsignedLongMask PyLong_AsUnsignedLongMask
#define PyInt_AsUnsignedLongLongMask PyLong_AsUnsignedLongLongMask
#define PyNumber_Int PyNumber_Long
#endif
#if PY_MAJOR_VERSION >= 3
#define PyBoolObject PyLongObject
#endif
#if PY_MAJOR_VERSION >= 3 && CYTHON_COMPILING_IN_PYPY
#ifndef PyUnicode_InternFromString
#define PyUnicode_InternFromString(s) PyUnicode_FromString(s)
#endif
#endif
#if PY_VERSION_HEX < 0x030200A4
typedef long Py_hash_t;
#define __Pyx_PyInt_FromHash_t PyInt_FromLong
#define __Pyx_PyInt_AsHash_t PyInt_AsLong
#else
#define __Pyx_PyInt_FromHash_t PyInt_FromSsize_t
#define __Pyx_PyInt_AsHash_t PyInt_AsSsize_t
#endif
#if PY_MAJOR_VERSION >= 3
#define __Pyx_PyMethod_New(func, self, klass) ((self) ? PyMethod_New(func, self) : PyInstanceMethod_New(func))
#else
#define __Pyx_PyMethod_New(func, self, klass) PyMethod_New(func, self, klass)
#endif
#if PY_VERSION_HEX >= 0x030500B1
#define __Pyx_PyAsyncMethodsStruct PyAsyncMethods
#define __Pyx_PyType_AsAsync(obj) (Py_TYPE(obj)->tp_as_async)
#elif CYTHON_COMPILING_IN_CPYTHON && PY_MAJOR_VERSION >= 3
typedef struct {
unaryfunc am_await;
unaryfunc am_aiter;
unaryfunc am_anext;
} __Pyx_PyAsyncMethodsStruct;
#define __Pyx_PyType_AsAsync(obj) ((__Pyx_PyAsyncMethodsStruct*) (Py_TYPE(obj)->tp_reserved))
#else
#define __Pyx_PyType_AsAsync(obj) NULL
#endif
#ifndef CYTHON_RESTRICT
#if defined(__GNUC__)
#define CYTHON_RESTRICT __restrict__
#elif defined(_MSC_VER) && _MSC_VER >= 1400
#define CYTHON_RESTRICT __restrict
#elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L
#define CYTHON_RESTRICT restrict
#else
#define CYTHON_RESTRICT
#endif
#endif
#define __Pyx_void_to_None(void_result) ((void)(void_result), Py_INCREF(Py_None), Py_None)
#ifndef CYTHON_INLINE
#if defined(__GNUC__)
#define CYTHON_INLINE __inline__
#elif defined(_MSC_VER)
#define CYTHON_INLINE __inline
#elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L
#define CYTHON_INLINE inline
#else
#define CYTHON_INLINE
#endif
#endif
#if defined(WIN32) || defined(MS_WINDOWS)
#define _USE_MATH_DEFINES
#endif
#include <math.h>
#ifdef NAN
#define __PYX_NAN() ((float) NAN)
#else
static CYTHON_INLINE float __PYX_NAN() {
float value;
memset(&value, 0xFF, sizeof(value));
return value;
}
#endif
#define __PYX_ERR(f_index, lineno, Ln_error) \
{ \
__pyx_filename = __pyx_f[f_index]; __pyx_lineno = lineno; __pyx_clineno = __LINE__; goto Ln_error; \
}
#if PY_MAJOR_VERSION >= 3
#define __Pyx_PyNumber_Divide(x,y) PyNumber_TrueDivide(x,y)
#define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceTrueDivide(x,y)
#else
#define __Pyx_PyNumber_Divide(x,y) PyNumber_Divide(x,y)
#define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceDivide(x,y)
#endif
#ifndef __PYX_EXTERN_C
#ifdef __cplusplus
#define __PYX_EXTERN_C extern "C"
#else
#define __PYX_EXTERN_C extern
#endif
#endif
#define __PYX_HAVE__gevent__libuv___libuv
#define __PYX_HAVE_API__gevent__libuv___libuv
#ifdef _OPENMP
#include <omp.h>
#endif /* _OPENMP */
#ifdef PYREX_WITHOUT_ASSERTIONS
#define CYTHON_WITHOUT_ASSERTIONS
#endif
#ifndef CYTHON_UNUSED
# if defined(__GNUC__)
# if !(defined(__cplusplus)) || (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 4))
# define CYTHON_UNUSED __attribute__ ((__unused__))
# else
# define CYTHON_UNUSED
# endif
# elif defined(__ICC) || (defined(__INTEL_COMPILER) && !defined(_MSC_VER))
# define CYTHON_UNUSED __attribute__ ((__unused__))
# else
# define CYTHON_UNUSED
# endif
#endif
#ifndef CYTHON_NCP_UNUSED
# if CYTHON_COMPILING_IN_CPYTHON
# define CYTHON_NCP_UNUSED
# else
# define CYTHON_NCP_UNUSED CYTHON_UNUSED
# endif
#endif
typedef struct {PyObject **p; const char *s; const Py_ssize_t n; const char* encoding;
const char is_unicode; const char is_str; const char intern; } __Pyx_StringTabEntry;
#define __PYX_DEFAULT_STRING_ENCODING_IS_ASCII 0
#define __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT 0
#define __PYX_DEFAULT_STRING_ENCODING ""
#define __Pyx_PyObject_FromString __Pyx_PyBytes_FromString
#define __Pyx_PyObject_FromStringAndSize __Pyx_PyBytes_FromStringAndSize
#define __Pyx_uchar_cast(c) ((unsigned char)c)
#define __Pyx_long_cast(x) ((long)x)
#define __Pyx_fits_Py_ssize_t(v, type, is_signed) (\
(sizeof(type) < sizeof(Py_ssize_t)) ||\
(sizeof(type) > sizeof(Py_ssize_t) &&\
likely(v < (type)PY_SSIZE_T_MAX ||\
v == (type)PY_SSIZE_T_MAX) &&\
(!is_signed || likely(v > (type)PY_SSIZE_T_MIN ||\
v == (type)PY_SSIZE_T_MIN))) ||\
(sizeof(type) == sizeof(Py_ssize_t) &&\
(is_signed || likely(v < (type)PY_SSIZE_T_MAX ||\
v == (type)PY_SSIZE_T_MAX))) )
#if defined (__cplusplus) && __cplusplus >= 201103L
#include <cstdlib>
#define __Pyx_sst_abs(value) std::abs(value)
#elif SIZEOF_INT >= SIZEOF_SIZE_T
#define __Pyx_sst_abs(value) abs(value)
#elif SIZEOF_LONG >= SIZEOF_SIZE_T
#define __Pyx_sst_abs(value) labs(value)
#elif defined (_MSC_VER) && defined (_M_X64)
#define __Pyx_sst_abs(value) _abs64(value)
#elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L
#define __Pyx_sst_abs(value) llabs(value)
#elif defined (__GNUC__)
#define __Pyx_sst_abs(value) __builtin_llabs(value)
#else
#define __Pyx_sst_abs(value) ((value<0) ? -value : value)
#endif
static CYTHON_INLINE char* __Pyx_PyObject_AsString(PyObject*);
static CYTHON_INLINE char* __Pyx_PyObject_AsStringAndSize(PyObject*, Py_ssize_t* length);
#define __Pyx_PyByteArray_FromString(s) PyByteArray_FromStringAndSize((const char*)s, strlen((const char*)s))
#define __Pyx_PyByteArray_FromStringAndSize(s, l) PyByteArray_FromStringAndSize((const char*)s, l)
#define __Pyx_PyBytes_FromString PyBytes_FromString
#define __Pyx_PyBytes_FromStringAndSize PyBytes_FromStringAndSize
static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char*);
#if PY_MAJOR_VERSION < 3
#define __Pyx_PyStr_FromString __Pyx_PyBytes_FromString
#define __Pyx_PyStr_FromStringAndSize __Pyx_PyBytes_FromStringAndSize
#else
#define __Pyx_PyStr_FromString __Pyx_PyUnicode_FromString
#define __Pyx_PyStr_FromStringAndSize __Pyx_PyUnicode_FromStringAndSize
#endif
#define __Pyx_PyObject_AsSString(s) ((signed char*) __Pyx_PyObject_AsString(s))
#define __Pyx_PyObject_AsUString(s) ((unsigned char*) __Pyx_PyObject_AsString(s))
#define __Pyx_PyObject_FromCString(s) __Pyx_PyObject_FromString((const char*)s)
#define __Pyx_PyBytes_FromCString(s) __Pyx_PyBytes_FromString((const char*)s)
#define __Pyx_PyByteArray_FromCString(s) __Pyx_PyByteArray_FromString((const char*)s)
#define __Pyx_PyStr_FromCString(s) __Pyx_PyStr_FromString((const char*)s)
#define __Pyx_PyUnicode_FromCString(s) __Pyx_PyUnicode_FromString((const char*)s)
#if PY_MAJOR_VERSION < 3
static CYTHON_INLINE size_t __Pyx_Py_UNICODE_strlen(const Py_UNICODE *u)
{
const Py_UNICODE *u_end = u;
while (*u_end++) ;
return (size_t)(u_end - u - 1);
}
#else
#define __Pyx_Py_UNICODE_strlen Py_UNICODE_strlen
#endif
#define __Pyx_PyUnicode_FromUnicode(u) PyUnicode_FromUnicode(u, __Pyx_Py_UNICODE_strlen(u))
#define __Pyx_PyUnicode_FromUnicodeAndLength PyUnicode_FromUnicode
#define __Pyx_PyUnicode_AsUnicode PyUnicode_AsUnicode
#define __Pyx_NewRef(obj) (Py_INCREF(obj), obj)
#define __Pyx_Owned_Py_None(b) __Pyx_NewRef(Py_None)
#define __Pyx_PyBool_FromLong(b) ((b) ? __Pyx_NewRef(Py_True) : __Pyx_NewRef(Py_False))
static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject*);
static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x);
static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject*);
static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t);
#if CYTHON_COMPILING_IN_CPYTHON
#define __pyx_PyFloat_AsDouble(x) (PyFloat_CheckExact(x) ? PyFloat_AS_DOUBLE(x) : PyFloat_AsDouble(x))
#else
#define __pyx_PyFloat_AsDouble(x) PyFloat_AsDouble(x)
#endif
#define __pyx_PyFloat_AsFloat(x) ((float) __pyx_PyFloat_AsDouble(x))
#if PY_MAJOR_VERSION >= 3
#define __Pyx_PyNumber_Int(x) (PyLong_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Long(x))
#else
#define __Pyx_PyNumber_Int(x) (PyInt_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Int(x))
#endif
#define __Pyx_PyNumber_Float(x) (PyFloat_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Float(x))
#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII
static int __Pyx_sys_getdefaultencoding_not_ascii;
static int __Pyx_init_sys_getdefaultencoding_params(void) {
PyObject* sys;
PyObject* default_encoding = NULL;
PyObject* ascii_chars_u = NULL;
PyObject* ascii_chars_b = NULL;
const char* default_encoding_c;
sys = PyImport_ImportModule("sys");
if (!sys) goto bad;
default_encoding = PyObject_CallMethod(sys, (char*) "getdefaultencoding", NULL);
Py_DECREF(sys);
if (!default_encoding) goto bad;
default_encoding_c = PyBytes_AsString(default_encoding);
if (!default_encoding_c) goto bad;
if (strcmp(default_encoding_c, "ascii") == 0) {
__Pyx_sys_getdefaultencoding_not_ascii = 0;
} else {
char ascii_chars[128];
int c;
for (c = 0; c < 128; c++) {
ascii_chars[c] = c;
}
__Pyx_sys_getdefaultencoding_not_ascii = 1;
ascii_chars_u = PyUnicode_DecodeASCII(ascii_chars, 128, NULL);
if (!ascii_chars_u) goto bad;
ascii_chars_b = PyUnicode_AsEncodedString(ascii_chars_u, default_encoding_c, NULL);
if (!ascii_chars_b || !PyBytes_Check(ascii_chars_b) || memcmp(ascii_chars, PyBytes_AS_STRING(ascii_chars_b), 128) != 0) {
PyErr_Format(
PyExc_ValueError,
"This module compiled with c_string_encoding=ascii, but default encoding '%.200s' is not a superset of ascii.",
default_encoding_c);
goto bad;
}
Py_DECREF(ascii_chars_u);
Py_DECREF(ascii_chars_b);
}
Py_DECREF(default_encoding);
return 0;
bad:
Py_XDECREF(default_encoding);
Py_XDECREF(ascii_chars_u);
Py_XDECREF(ascii_chars_b);
return -1;
}
#endif
#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT && PY_MAJOR_VERSION >= 3
#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_DecodeUTF8(c_str, size, NULL)
#else
#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_Decode(c_str, size, __PYX_DEFAULT_STRING_ENCODING, NULL)
#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT
static char* __PYX_DEFAULT_STRING_ENCODING;
static int __Pyx_init_sys_getdefaultencoding_params(void) {
PyObject* sys;
PyObject* default_encoding = NULL;
char* default_encoding_c;
sys = PyImport_ImportModule("sys");
if (!sys) goto bad;
default_encoding = PyObject_CallMethod(sys, (char*) (const char*) "getdefaultencoding", NULL);
Py_DECREF(sys);
if (!default_encoding) goto bad;
default_encoding_c = PyBytes_AsString(default_encoding);
if (!default_encoding_c) goto bad;
__PYX_DEFAULT_STRING_ENCODING = (char*) malloc(strlen(default_encoding_c));
if (!__PYX_DEFAULT_STRING_ENCODING) goto bad;
strcpy(__PYX_DEFAULT_STRING_ENCODING, default_encoding_c);
Py_DECREF(default_encoding);
return 0;
bad:
Py_XDECREF(default_encoding);
return -1;
}
#endif
#endif
/* Test for GCC > 2.95 */
#if defined(__GNUC__) && (__GNUC__ > 2 || (__GNUC__ == 2 && (__GNUC_MINOR__ > 95)))
#define likely(x) __builtin_expect(!!(x), 1)
#define unlikely(x) __builtin_expect(!!(x), 0)
#else /* !__GNUC__ or GCC < 2.95 */
#define likely(x) (x)
#define unlikely(x) (x)
#endif /* __GNUC__ */
static PyObject *__pyx_m;
static PyObject *__pyx_d;
static PyObject *__pyx_b;
static PyObject *__pyx_empty_tuple;
static PyObject *__pyx_empty_bytes;
static PyObject *__pyx_empty_unicode;
static int __pyx_lineno;
static int __pyx_clineno = 0;
static const char * __pyx_cfilenm= __FILE__;
static const char *__pyx_filename;
static const char *__pyx_f[] = {
"gevent/libuv/_libuv.py",
};
/*--- Type declarations ---*/
/* --- Runtime support code (head) --- */
/* Refnanny.proto */
#ifndef CYTHON_REFNANNY
#define CYTHON_REFNANNY 0
#endif
#if CYTHON_REFNANNY
typedef struct {
void (*INCREF)(void*, PyObject*, int);
void (*DECREF)(void*, PyObject*, int);
void (*GOTREF)(void*, PyObject*, int);
void (*GIVEREF)(void*, PyObject*, int);
void* (*SetupContext)(const char*, int, const char*);
void (*FinishContext)(void**);
} __Pyx_RefNannyAPIStruct;
static __Pyx_RefNannyAPIStruct *__Pyx_RefNanny = NULL;
static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname);
#define __Pyx_RefNannyDeclarations void *__pyx_refnanny = NULL;
#ifdef WITH_THREAD
#define __Pyx_RefNannySetupContext(name, acquire_gil)\
if (acquire_gil) {\
PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();\
__pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__);\
PyGILState_Release(__pyx_gilstate_save);\
} else {\
__pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__);\
}
#else
#define __Pyx_RefNannySetupContext(name, acquire_gil)\
__pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__)
#endif
#define __Pyx_RefNannyFinishContext()\
__Pyx_RefNanny->FinishContext(&__pyx_refnanny)
#define __Pyx_INCREF(r) __Pyx_RefNanny->INCREF(__pyx_refnanny, (PyObject *)(r), __LINE__)
#define __Pyx_DECREF(r) __Pyx_RefNanny->DECREF(__pyx_refnanny, (PyObject *)(r), __LINE__)
#define __Pyx_GOTREF(r) __Pyx_RefNanny->GOTREF(__pyx_refnanny, (PyObject *)(r), __LINE__)
#define __Pyx_GIVEREF(r) __Pyx_RefNanny->GIVEREF(__pyx_refnanny, (PyObject *)(r), __LINE__)
#define __Pyx_XINCREF(r) do { if((r) != NULL) {__Pyx_INCREF(r); }} while(0)
#define __Pyx_XDECREF(r) do { if((r) != NULL) {__Pyx_DECREF(r); }} while(0)
#define __Pyx_XGOTREF(r) do { if((r) != NULL) {__Pyx_GOTREF(r); }} while(0)
#define __Pyx_XGIVEREF(r) do { if((r) != NULL) {__Pyx_GIVEREF(r);}} while(0)
#else
#define __Pyx_RefNannyDeclarations
#define __Pyx_RefNannySetupContext(name, acquire_gil)
#define __Pyx_RefNannyFinishContext()
#define __Pyx_INCREF(r) Py_INCREF(r)
#define __Pyx_DECREF(r) Py_DECREF(r)
#define __Pyx_GOTREF(r)
#define __Pyx_GIVEREF(r)
#define __Pyx_XINCREF(r) Py_XINCREF(r)
#define __Pyx_XDECREF(r) Py_XDECREF(r)
#define __Pyx_XGOTREF(r)
#define __Pyx_XGIVEREF(r)
#endif
#define __Pyx_XDECREF_SET(r, v) do {\
PyObject *tmp = (PyObject *) r;\
r = v; __Pyx_XDECREF(tmp);\
} while (0)
#define __Pyx_DECREF_SET(r, v) do {\
PyObject *tmp = (PyObject *) r;\
r = v; __Pyx_DECREF(tmp);\
} while (0)
#define __Pyx_CLEAR(r) do { PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);} while(0)
#define __Pyx_XCLEAR(r) do { if((r) != NULL) {PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);}} while(0)
/* CodeObjectCache.proto */
typedef struct {
PyCodeObject* code_object;
int code_line;
} __Pyx_CodeObjectCacheEntry;
struct __Pyx_CodeObjectCache {
int count;
int max_count;
__Pyx_CodeObjectCacheEntry* entries;
};
static struct __Pyx_CodeObjectCache __pyx_code_cache = {0,0,NULL};
static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line);
static PyCodeObject *__pyx_find_code_object(int code_line);
static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object);
/* AddTraceback.proto */
static void __Pyx_AddTraceback(const char *funcname, int c_line,
int py_line, const char *filename);
/* CIntToPy.proto */
static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value);
/* CIntFromPy.proto */
static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *);
/* CIntFromPy.proto */
static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *);
/* CheckBinaryVersion.proto */
static int __Pyx_check_binary_version(void);
/* InitStrings.proto */
static int __Pyx_InitStrings(__Pyx_StringTabEntry *t);
/* Module declarations from 'gevent.libuv._libuv' */
#define __Pyx_MODULE_NAME "gevent.libuv._libuv"
int __pyx_module_is_main_gevent__libuv___libuv = 0;
/* Implementation of 'gevent.libuv._libuv' */
static const char __pyx_k_main[] = "__main__";
static const char __pyx_k_test[] = "__test__";
static PyObject *__pyx_n_s_main;
static PyObject *__pyx_n_s_test;
static PyMethodDef __pyx_methods[] = {
{0, 0, 0, 0}
};
#if PY_MAJOR_VERSION >= 3
static struct PyModuleDef __pyx_moduledef = {
#if PY_VERSION_HEX < 0x03020000
{ PyObject_HEAD_INIT(NULL) NULL, 0, NULL },
#else
PyModuleDef_HEAD_INIT,
#endif
"_libuv",
0, /* m_doc */
-1, /* m_size */
__pyx_methods /* m_methods */,
NULL, /* m_reload */
NULL, /* m_traverse */
NULL, /* m_clear */
NULL /* m_free */
};
#endif
static __Pyx_StringTabEntry __pyx_string_tab[] = {
{&__pyx_n_s_main, __pyx_k_main, sizeof(__pyx_k_main), 0, 0, 1, 1},
{&__pyx_n_s_test, __pyx_k_test, sizeof(__pyx_k_test), 0, 0, 1, 1},
{0, 0, 0, 0, 0, 0, 0}
};
static int __Pyx_InitCachedBuiltins(void) {
return 0;
}
static int __Pyx_InitCachedConstants(void) {
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__Pyx_InitCachedConstants", 0);
__Pyx_RefNannyFinishContext();
return 0;
}
static int __Pyx_InitGlobals(void) {
if (__Pyx_InitStrings(__pyx_string_tab) < 0) __PYX_ERR(0, 1, __pyx_L1_error);
return 0;
__pyx_L1_error:;
return -1;
}
#if PY_MAJOR_VERSION < 3
PyMODINIT_FUNC init_libuv(void); /*proto*/
PyMODINIT_FUNC init_libuv(void)
#else
PyMODINIT_FUNC PyInit__libuv(void); /*proto*/
PyMODINIT_FUNC PyInit__libuv(void)
#endif
{
PyObject *__pyx_t_1 = NULL;
__Pyx_RefNannyDeclarations
#if CYTHON_REFNANNY
__Pyx_RefNanny = __Pyx_RefNannyImportAPI("refnanny");
if (!__Pyx_RefNanny) {
PyErr_Clear();
__Pyx_RefNanny = __Pyx_RefNannyImportAPI("Cython.Runtime.refnanny");
if (!__Pyx_RefNanny)
Py_FatalError("failed to import 'refnanny' module");
}
#endif
__Pyx_RefNannySetupContext("PyMODINIT_FUNC PyInit__libuv(void)", 0);
if (__Pyx_check_binary_version() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
__pyx_empty_tuple = PyTuple_New(0); if (unlikely(!__pyx_empty_tuple)) __PYX_ERR(0, 1, __pyx_L1_error)
__pyx_empty_bytes = PyBytes_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_bytes)) __PYX_ERR(0, 1, __pyx_L1_error)
__pyx_empty_unicode = PyUnicode_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_unicode)) __PYX_ERR(0, 1, __pyx_L1_error)
#ifdef __Pyx_CyFunction_USED
if (__pyx_CyFunction_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
#endif
#ifdef __Pyx_FusedFunction_USED
if (__pyx_FusedFunction_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
#endif
#ifdef __Pyx_Coroutine_USED
if (__pyx_Coroutine_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
#endif
#ifdef __Pyx_Generator_USED
if (__pyx_Generator_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
#endif
#ifdef __Pyx_StopAsyncIteration_USED
if (__pyx_StopAsyncIteration_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
#endif
/*--- Library function declarations ---*/
/*--- Threads initialization code ---*/
#if defined(__PYX_FORCE_INIT_THREADS) && __PYX_FORCE_INIT_THREADS
#ifdef WITH_THREAD /* Python build with threading support? */
PyEval_InitThreads();
#endif
#endif
/*--- Module creation code ---*/
#if PY_MAJOR_VERSION < 3
__pyx_m = Py_InitModule4("_libuv", __pyx_methods, 0, 0, PYTHON_API_VERSION); Py_XINCREF(__pyx_m);
#else
__pyx_m = PyModule_Create(&__pyx_moduledef);
#endif
if (unlikely(!__pyx_m)) __PYX_ERR(0, 1, __pyx_L1_error)
__pyx_d = PyModule_GetDict(__pyx_m); if (unlikely(!__pyx_d)) __PYX_ERR(0, 1, __pyx_L1_error)
Py_INCREF(__pyx_d);
__pyx_b = PyImport_AddModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_b)) __PYX_ERR(0, 1, __pyx_L1_error)
#if CYTHON_COMPILING_IN_PYPY
Py_INCREF(__pyx_b);
#endif
if (PyObject_SetAttrString(__pyx_m, "__builtins__", __pyx_b) < 0) __PYX_ERR(0, 1, __pyx_L1_error);
/*--- Initialize various global constants etc. ---*/
if (__Pyx_InitGlobals() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
#if PY_MAJOR_VERSION < 3 && (__PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT)
if (__Pyx_init_sys_getdefaultencoding_params() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
#endif
if (__pyx_module_is_main_gevent__libuv___libuv) {
if (PyObject_SetAttrString(__pyx_m, "__name__", __pyx_n_s_main) < 0) __PYX_ERR(0, 1, __pyx_L1_error)
}
#if PY_MAJOR_VERSION >= 3
{
PyObject *modules = PyImport_GetModuleDict(); if (unlikely(!modules)) __PYX_ERR(0, 1, __pyx_L1_error)
if (!PyDict_GetItemString(modules, "gevent.libuv._libuv")) {
if (unlikely(PyDict_SetItemString(modules, "gevent.libuv._libuv", __pyx_m) < 0)) __PYX_ERR(0, 1, __pyx_L1_error)
}
}
#endif
/*--- Builtin init code ---*/
if (__Pyx_InitCachedBuiltins() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
/*--- Constants init code ---*/
if (__Pyx_InitCachedConstants() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
/*--- Global init code ---*/
/*--- Variable export code ---*/
/*--- Function export code ---*/
/*--- Type init code ---*/
/*--- Type import code ---*/
/*--- Variable import code ---*/
/*--- Function import code ---*/
/*--- Execution code ---*/
#if defined(__Pyx_Generator_USED) || defined(__Pyx_Coroutine_USED)
if (__Pyx_patch_abc() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
#endif
/* "gevent/libuv/_libuv.py":1
* # Empty module, exists to build libuv library # <<<<<<<<<<<<<<
*/
__pyx_t_1 = PyDict_New(); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
if (PyDict_SetItem(__pyx_d, __pyx_n_s_test, __pyx_t_1) < 0) __PYX_ERR(0, 1, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
/*--- Wrapped vars code ---*/
goto __pyx_L0;
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_1);
if (__pyx_m) {
if (__pyx_d) {
__Pyx_AddTraceback("init gevent.libuv._libuv", __pyx_clineno, __pyx_lineno, __pyx_filename);
}
Py_DECREF(__pyx_m); __pyx_m = 0;
} else if (!PyErr_Occurred()) {
PyErr_SetString(PyExc_ImportError, "init gevent.libuv._libuv");
}
__pyx_L0:;
__Pyx_RefNannyFinishContext();
#if PY_MAJOR_VERSION < 3
return;
#else
return __pyx_m;
#endif
}
/* --- Runtime support code --- */
/* Refnanny */
#if CYTHON_REFNANNY
static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname) {
PyObject *m = NULL, *p = NULL;
void *r = NULL;
m = PyImport_ImportModule((char *)modname);
if (!m) goto end;
p = PyObject_GetAttrString(m, (char *)"RefNannyAPI");
if (!p) goto end;
r = PyLong_AsVoidPtr(p);
end:
Py_XDECREF(p);
Py_XDECREF(m);
return (__Pyx_RefNannyAPIStruct *)r;
}
#endif
/* CodeObjectCache */
static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line) {
int start = 0, mid = 0, end = count - 1;
if (end >= 0 && code_line > entries[end].code_line) {
return count;
}
while (start < end) {
mid = start + (end - start) / 2;
if (code_line < entries[mid].code_line) {
end = mid;
} else if (code_line > entries[mid].code_line) {
start = mid + 1;
} else {
return mid;
}
}
if (code_line <= entries[mid].code_line) {
return mid;
} else {
return mid + 1;
}
}
static PyCodeObject *__pyx_find_code_object(int code_line) {
PyCodeObject* code_object;
int pos;
if (unlikely(!code_line) || unlikely(!__pyx_code_cache.entries)) {
return NULL;
}
pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line);
if (unlikely(pos >= __pyx_code_cache.count) || unlikely(__pyx_code_cache.entries[pos].code_line != code_line)) {
return NULL;
}
code_object = __pyx_code_cache.entries[pos].code_object;
Py_INCREF(code_object);
return code_object;
}
static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object) {
int pos, i;
__Pyx_CodeObjectCacheEntry* entries = __pyx_code_cache.entries;
if (unlikely(!code_line)) {
return;
}
if (unlikely(!entries)) {
entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Malloc(64*sizeof(__Pyx_CodeObjectCacheEntry));
if (likely(entries)) {
__pyx_code_cache.entries = entries;
__pyx_code_cache.max_count = 64;
__pyx_code_cache.count = 1;
entries[0].code_line = code_line;
entries[0].code_object = code_object;
Py_INCREF(code_object);
}
return;
}
pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line);
if ((pos < __pyx_code_cache.count) && unlikely(__pyx_code_cache.entries[pos].code_line == code_line)) {
PyCodeObject* tmp = entries[pos].code_object;
entries[pos].code_object = code_object;
Py_DECREF(tmp);
return;
}
if (__pyx_code_cache.count == __pyx_code_cache.max_count) {
int new_max = __pyx_code_cache.max_count + 64;
entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Realloc(
__pyx_code_cache.entries, (size_t)new_max*sizeof(__Pyx_CodeObjectCacheEntry));
if (unlikely(!entries)) {
return;
}
__pyx_code_cache.entries = entries;
__pyx_code_cache.max_count = new_max;
}
for (i=__pyx_code_cache.count; i>pos; i--) {
entries[i] = entries[i-1];
}
entries[pos].code_line = code_line;
entries[pos].code_object = code_object;
__pyx_code_cache.count++;
Py_INCREF(code_object);
}
/* AddTraceback */
#include "compile.h"
#include "frameobject.h"
#include "traceback.h"
static PyCodeObject* __Pyx_CreateCodeObjectForTraceback(
const char *funcname, int c_line,
int py_line, const char *filename) {
PyCodeObject *py_code = 0;
PyObject *py_srcfile = 0;
PyObject *py_funcname = 0;
#if PY_MAJOR_VERSION < 3
py_srcfile = PyString_FromString(filename);
#else
py_srcfile = PyUnicode_FromString(filename);
#endif
if (!py_srcfile) goto bad;
if (c_line) {
#if PY_MAJOR_VERSION < 3
py_funcname = PyString_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line);
#else
py_funcname = PyUnicode_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line);
#endif
}
else {
#if PY_MAJOR_VERSION < 3
py_funcname = PyString_FromString(funcname);
#else
py_funcname = PyUnicode_FromString(funcname);
#endif
}
if (!py_funcname) goto bad;
py_code = __Pyx_PyCode_New(
0,
0,
0,
0,
0,
__pyx_empty_bytes, /*PyObject *code,*/
__pyx_empty_tuple, /*PyObject *consts,*/
__pyx_empty_tuple, /*PyObject *names,*/
__pyx_empty_tuple, /*PyObject *varnames,*/
__pyx_empty_tuple, /*PyObject *freevars,*/
__pyx_empty_tuple, /*PyObject *cellvars,*/
py_srcfile, /*PyObject *filename,*/
py_funcname, /*PyObject *name,*/
py_line,
__pyx_empty_bytes /*PyObject *lnotab*/
);
Py_DECREF(py_srcfile);
Py_DECREF(py_funcname);
return py_code;
bad:
Py_XDECREF(py_srcfile);
Py_XDECREF(py_funcname);
return NULL;
}
static void __Pyx_AddTraceback(const char *funcname, int c_line,
int py_line, const char *filename) {
PyCodeObject *py_code = 0;
PyFrameObject *py_frame = 0;
py_code = __pyx_find_code_object(c_line ? c_line : py_line);
if (!py_code) {
py_code = __Pyx_CreateCodeObjectForTraceback(
funcname, c_line, py_line, filename);
if (!py_code) goto bad;
__pyx_insert_code_object(c_line ? c_line : py_line, py_code);
}
py_frame = PyFrame_New(
PyThreadState_GET(), /*PyThreadState *tstate,*/
py_code, /*PyCodeObject *code,*/
__pyx_d, /*PyObject *globals,*/
0 /*PyObject *locals*/
);
if (!py_frame) goto bad;
py_frame->f_lineno = py_line;
PyTraceBack_Here(py_frame);
bad:
Py_XDECREF(py_code);
Py_XDECREF(py_frame);
}
/* CIntToPy */
static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value) {
const long neg_one = (long) -1, const_zero = (long) 0;
const int is_unsigned = neg_one > const_zero;
if (is_unsigned) {
if (sizeof(long) < sizeof(long)) {
return PyInt_FromLong((long) value);
} else if (sizeof(long) <= sizeof(unsigned long)) {
return PyLong_FromUnsignedLong((unsigned long) value);
} else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) {
return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value);
}
} else {
if (sizeof(long) <= sizeof(long)) {
return PyInt_FromLong((long) value);
} else if (sizeof(long) <= sizeof(PY_LONG_LONG)) {
return PyLong_FromLongLong((PY_LONG_LONG) value);
}
}
{
int one = 1; int little = (int)*(unsigned char *)&one;
unsigned char *bytes = (unsigned char *)&value;
return _PyLong_FromByteArray(bytes, sizeof(long),
little, !is_unsigned);
}
}
/* CIntFromPyVerify */
#define __PYX_VERIFY_RETURN_INT(target_type, func_type, func_value)\
__PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 0)
#define __PYX_VERIFY_RETURN_INT_EXC(target_type, func_type, func_value)\
__PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 1)
#define __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, exc)\
{\
func_type value = func_value;\
if (sizeof(target_type) < sizeof(func_type)) {\
if (unlikely(value != (func_type) (target_type) value)) {\
func_type zero = 0;\
if (exc && unlikely(value == (func_type)-1 && PyErr_Occurred()))\
return (target_type) -1;\
if (is_unsigned && unlikely(value < zero))\
goto raise_neg_overflow;\
else\
goto raise_overflow;\
}\
}\
return (target_type) value;\
}
/* CIntFromPy */
static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *x) {
const long neg_one = (long) -1, const_zero = (long) 0;
const int is_unsigned = neg_one > const_zero;
#if PY_MAJOR_VERSION < 3
if (likely(PyInt_Check(x))) {
if (sizeof(long) < sizeof(long)) {
__PYX_VERIFY_RETURN_INT(long, long, PyInt_AS_LONG(x))
} else {
long val = PyInt_AS_LONG(x);
if (is_unsigned && unlikely(val < 0)) {
goto raise_neg_overflow;
}
return (long) val;
}
} else
#endif
if (likely(PyLong_Check(x))) {
if (is_unsigned) {
#if CYTHON_USE_PYLONG_INTERNALS
const digit* digits = ((PyLongObject*)x)->ob_digit;
switch (Py_SIZE(x)) {
case 0: return (long) 0;
case 1: __PYX_VERIFY_RETURN_INT(long, digit, digits[0])
case 2:
if (8 * sizeof(long) > 1 * PyLong_SHIFT) {
if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) {
__PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
} else if (8 * sizeof(long) >= 2 * PyLong_SHIFT) {
return (long) (((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]));
}
}
break;
case 3:
if (8 * sizeof(long) > 2 * PyLong_SHIFT) {
if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) {
__PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
} else if (8 * sizeof(long) >= 3 * PyLong_SHIFT) {
return (long) (((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]));
}
}
break;
case 4:
if (8 * sizeof(long) > 3 * PyLong_SHIFT) {
if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) {
__PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
} else if (8 * sizeof(long) >= 4 * PyLong_SHIFT) {
return (long) (((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]));
}
}
break;
}
#endif
#if CYTHON_COMPILING_IN_CPYTHON
if (unlikely(Py_SIZE(x) < 0)) {
goto raise_neg_overflow;
}
#else
{
int result = PyObject_RichCompareBool(x, Py_False, Py_LT);
if (unlikely(result < 0))
return (long) -1;
if (unlikely(result == 1))
goto raise_neg_overflow;
}
#endif
if (sizeof(long) <= sizeof(unsigned long)) {
__PYX_VERIFY_RETURN_INT_EXC(long, unsigned long, PyLong_AsUnsignedLong(x))
} else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) {
__PYX_VERIFY_RETURN_INT_EXC(long, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x))
}
} else {
#if CYTHON_USE_PYLONG_INTERNALS
const digit* digits = ((PyLongObject*)x)->ob_digit;
switch (Py_SIZE(x)) {
case 0: return (long) 0;
case -1: __PYX_VERIFY_RETURN_INT(long, sdigit, (sdigit) (-(sdigit)digits[0]))
case 1: __PYX_VERIFY_RETURN_INT(long, digit, +digits[0])
case -2:
if (8 * sizeof(long) - 1 > 1 * PyLong_SHIFT) {
if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) {
__PYX_VERIFY_RETURN_INT(long, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
} else if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) {
return (long) (((long)-1)*(((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0])));
}
}
break;
case 2:
if (8 * sizeof(long) > 1 * PyLong_SHIFT) {
if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) {
__PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
} else if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) {
return (long) ((((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0])));
}
}
break;
case -3:
if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) {
if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) {
__PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
} else if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) {
return (long) (((long)-1)*(((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])));
}
}
break;
case 3:
if (8 * sizeof(long) > 2 * PyLong_SHIFT) {
if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) {
__PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
} else if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) {
return (long) ((((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])));
}
}
break;
case -4:
if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) {
if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) {
__PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
} else if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) {
return (long) (((long)-1)*(((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])));
}
}
break;
case 4:
if (8 * sizeof(long) > 3 * PyLong_SHIFT) {
if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) {
__PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
} else if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) {
return (long) ((((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])));
}
}
break;
}
#endif
if (sizeof(long) <= sizeof(long)) {
__PYX_VERIFY_RETURN_INT_EXC(long, long, PyLong_AsLong(x))
} else if (sizeof(long) <= sizeof(PY_LONG_LONG)) {
__PYX_VERIFY_RETURN_INT_EXC(long, PY_LONG_LONG, PyLong_AsLongLong(x))
}
}
{
#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray)
PyErr_SetString(PyExc_RuntimeError,
"_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers");
#else
long val;
PyObject *v = __Pyx_PyNumber_IntOrLong(x);
#if PY_MAJOR_VERSION < 3
if (likely(v) && !PyLong_Check(v)) {
PyObject *tmp = v;
v = PyNumber_Long(tmp);
Py_DECREF(tmp);
}
#endif
if (likely(v)) {
int one = 1; int is_little = (int)*(unsigned char *)&one;
unsigned char *bytes = (unsigned char *)&val;
int ret = _PyLong_AsByteArray((PyLongObject *)v,
bytes, sizeof(val),
is_little, !is_unsigned);
Py_DECREF(v);
if (likely(!ret))
return val;
}
#endif
return (long) -1;
}
} else {
long val;
PyObject *tmp = __Pyx_PyNumber_IntOrLong(x);
if (!tmp) return (long) -1;
val = __Pyx_PyInt_As_long(tmp);
Py_DECREF(tmp);
return val;
}
raise_overflow:
PyErr_SetString(PyExc_OverflowError,
"value too large to convert to long");
return (long) -1;
raise_neg_overflow:
PyErr_SetString(PyExc_OverflowError,
"can't convert negative value to long");
return (long) -1;
}
/* CIntFromPy */
static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *x) {
const int neg_one = (int) -1, const_zero = (int) 0;
const int is_unsigned = neg_one > const_zero;
#if PY_MAJOR_VERSION < 3
if (likely(PyInt_Check(x))) {
if (sizeof(int) < sizeof(long)) {
__PYX_VERIFY_RETURN_INT(int, long, PyInt_AS_LONG(x))
} else {
long val = PyInt_AS_LONG(x);
if (is_unsigned && unlikely(val < 0)) {
goto raise_neg_overflow;
}
return (int) val;
}
} else
#endif
if (likely(PyLong_Check(x))) {
if (is_unsigned) {
#if CYTHON_USE_PYLONG_INTERNALS
const digit* digits = ((PyLongObject*)x)->ob_digit;
switch (Py_SIZE(x)) {
case 0: return (int) 0;
case 1: __PYX_VERIFY_RETURN_INT(int, digit, digits[0])
case 2:
if (8 * sizeof(int) > 1 * PyLong_SHIFT) {
if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) {
__PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
} else if (8 * sizeof(int) >= 2 * PyLong_SHIFT) {
return (int) (((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]));
}
}
break;
case 3:
if (8 * sizeof(int) > 2 * PyLong_SHIFT) {
if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) {
__PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
} else if (8 * sizeof(int) >= 3 * PyLong_SHIFT) {
return (int) (((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]));
}
}
break;
case 4:
if (8 * sizeof(int) > 3 * PyLong_SHIFT) {
if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) {
__PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
} else if (8 * sizeof(int) >= 4 * PyLong_SHIFT) {
return (int) (((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]));
}
}
break;
}
#endif
#if CYTHON_COMPILING_IN_CPYTHON
if (unlikely(Py_SIZE(x) < 0)) {
goto raise_neg_overflow;
}
#else
{
int result = PyObject_RichCompareBool(x, Py_False, Py_LT);
if (unlikely(result < 0))
return (int) -1;
if (unlikely(result == 1))
goto raise_neg_overflow;
}
#endif
if (sizeof(int) <= sizeof(unsigned long)) {
__PYX_VERIFY_RETURN_INT_EXC(int, unsigned long, PyLong_AsUnsignedLong(x))
} else if (sizeof(int) <= sizeof(unsigned PY_LONG_LONG)) {
__PYX_VERIFY_RETURN_INT_EXC(int, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x))
}
} else {
#if CYTHON_USE_PYLONG_INTERNALS
const digit* digits = ((PyLongObject*)x)->ob_digit;
switch (Py_SIZE(x)) {
case 0: return (int) 0;
case -1: __PYX_VERIFY_RETURN_INT(int, sdigit, (sdigit) (-(sdigit)digits[0]))
case 1: __PYX_VERIFY_RETURN_INT(int, digit, +digits[0])
case -2:
if (8 * sizeof(int) - 1 > 1 * PyLong_SHIFT) {
if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) {
__PYX_VERIFY_RETURN_INT(int, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
} else if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) {
return (int) (((int)-1)*(((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0])));
}
}
break;
case 2:
if (8 * sizeof(int) > 1 * PyLong_SHIFT) {
if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) {
__PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
} else if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) {
return (int) ((((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0])));
}
}
break;
case -3:
if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) {
if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) {
__PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
} else if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) {
return (int) (((int)-1)*(((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])));
}
}
break;
case 3:
if (8 * sizeof(int) > 2 * PyLong_SHIFT) {
if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) {
__PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
} else if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) {
return (int) ((((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])));
}
}
break;
case -4:
if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) {
if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) {
__PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
} else if (8 * sizeof(int) - 1 > 4 * PyLong_SHIFT) {
return (int) (((int)-1)*(((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])));
}
}
break;
case 4:
if (8 * sizeof(int) > 3 * PyLong_SHIFT) {
if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) {
__PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
} else if (8 * sizeof(int) - 1 > 4 * PyLong_SHIFT) {
return (int) ((((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])));
}
}
break;
}
#endif
if (sizeof(int) <= sizeof(long)) {
__PYX_VERIFY_RETURN_INT_EXC(int, long, PyLong_AsLong(x))
} else if (sizeof(int) <= sizeof(PY_LONG_LONG)) {
__PYX_VERIFY_RETURN_INT_EXC(int, PY_LONG_LONG, PyLong_AsLongLong(x))
}
}
{
#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray)
PyErr_SetString(PyExc_RuntimeError,
"_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers");
#else
int val;
PyObject *v = __Pyx_PyNumber_IntOrLong(x);
#if PY_MAJOR_VERSION < 3
if (likely(v) && !PyLong_Check(v)) {
PyObject *tmp = v;
v = PyNumber_Long(tmp);
Py_DECREF(tmp);
}
#endif
if (likely(v)) {
int one = 1; int is_little = (int)*(unsigned char *)&one;
unsigned char *bytes = (unsigned char *)&val;
int ret = _PyLong_AsByteArray((PyLongObject *)v,
bytes, sizeof(val),
is_little, !is_unsigned);
Py_DECREF(v);
if (likely(!ret))
return val;
}
#endif
return (int) -1;
}
} else {
int val;
PyObject *tmp = __Pyx_PyNumber_IntOrLong(x);
if (!tmp) return (int) -1;
val = __Pyx_PyInt_As_int(tmp);
Py_DECREF(tmp);
return val;
}
raise_overflow:
PyErr_SetString(PyExc_OverflowError,
"value too large to convert to int");
return (int) -1;
raise_neg_overflow:
PyErr_SetString(PyExc_OverflowError,
"can't convert negative value to int");
return (int) -1;
}
/* CheckBinaryVersion */
static int __Pyx_check_binary_version(void) {
char ctversion[4], rtversion[4];
PyOS_snprintf(ctversion, 4, "%d.%d", PY_MAJOR_VERSION, PY_MINOR_VERSION);
PyOS_snprintf(rtversion, 4, "%s", Py_GetVersion());
if (ctversion[0] != rtversion[0] || ctversion[2] != rtversion[2]) {
char message[200];
PyOS_snprintf(message, sizeof(message),
"compiletime version %s of module '%.100s' "
"does not match runtime version %s",
ctversion, __Pyx_MODULE_NAME, rtversion);
return PyErr_WarnEx(NULL, message, 1);
}
return 0;
}
/* InitStrings */
static int __Pyx_InitStrings(__Pyx_StringTabEntry *t) {
while (t->p) {
#if PY_MAJOR_VERSION < 3
if (t->is_unicode) {
*t->p = PyUnicode_DecodeUTF8(t->s, t->n - 1, NULL);
} else if (t->intern) {
*t->p = PyString_InternFromString(t->s);
} else {
*t->p = PyString_FromStringAndSize(t->s, t->n - 1);
}
#else
if (t->is_unicode | t->is_str) {
if (t->intern) {
*t->p = PyUnicode_InternFromString(t->s);
} else if (t->encoding) {
*t->p = PyUnicode_Decode(t->s, t->n - 1, t->encoding, NULL);
} else {
*t->p = PyUnicode_FromStringAndSize(t->s, t->n - 1);
}
} else {
*t->p = PyBytes_FromStringAndSize(t->s, t->n - 1);
}
#endif
if (!*t->p)
return -1;
++t;
}
return 0;
}
static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char* c_str) {
return __Pyx_PyUnicode_FromStringAndSize(c_str, (Py_ssize_t)strlen(c_str));
}
static CYTHON_INLINE char* __Pyx_PyObject_AsString(PyObject* o) {
Py_ssize_t ignore;
return __Pyx_PyObject_AsStringAndSize(o, &ignore);
}
static CYTHON_INLINE char* __Pyx_PyObject_AsStringAndSize(PyObject* o, Py_ssize_t *length) {
#if CYTHON_COMPILING_IN_CPYTHON && (__PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT)
if (
#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII
__Pyx_sys_getdefaultencoding_not_ascii &&
#endif
PyUnicode_Check(o)) {
#if PY_VERSION_HEX < 0x03030000
char* defenc_c;
PyObject* defenc = _PyUnicode_AsDefaultEncodedString(o, NULL);
if (!defenc) return NULL;
defenc_c = PyBytes_AS_STRING(defenc);
#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII
{
char* end = defenc_c + PyBytes_GET_SIZE(defenc);
char* c;
for (c = defenc_c; c < end; c++) {
if ((unsigned char) (*c) >= 128) {
PyUnicode_AsASCIIString(o);
return NULL;
}
}
}
#endif
*length = PyBytes_GET_SIZE(defenc);
return defenc_c;
#else
if (__Pyx_PyUnicode_READY(o) == -1) return NULL;
#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII
if (PyUnicode_IS_ASCII(o)) {
*length = PyUnicode_GET_LENGTH(o);
return PyUnicode_AsUTF8(o);
} else {
PyUnicode_AsASCIIString(o);
return NULL;
}
#else
return PyUnicode_AsUTF8AndSize(o, length);
#endif
#endif
} else
#endif
#if (!CYTHON_COMPILING_IN_PYPY) || (defined(PyByteArray_AS_STRING) && defined(PyByteArray_GET_SIZE))
if (PyByteArray_Check(o)) {
*length = PyByteArray_GET_SIZE(o);
return PyByteArray_AS_STRING(o);
} else
#endif
{
char* result;
int r = PyBytes_AsStringAndSize(o, &result, length);
if (unlikely(r < 0)) {
return NULL;
} else {
return result;
}
}
}
static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject* x) {
int is_true = x == Py_True;
if (is_true | (x == Py_False) | (x == Py_None)) return is_true;
else return PyObject_IsTrue(x);
}
static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x) {
PyNumberMethods *m;
const char *name = NULL;
PyObject *res = NULL;
#if PY_MAJOR_VERSION < 3
if (PyInt_Check(x) || PyLong_Check(x))
#else
if (PyLong_Check(x))
#endif
return __Pyx_NewRef(x);
m = Py_TYPE(x)->tp_as_number;
#if PY_MAJOR_VERSION < 3
if (m && m->nb_int) {
name = "int";
res = PyNumber_Int(x);
}
else if (m && m->nb_long) {
name = "long";
res = PyNumber_Long(x);
}
#else
if (m && m->nb_int) {
name = "int";
res = PyNumber_Long(x);
}
#endif
if (res) {
#if PY_MAJOR_VERSION < 3
if (!PyInt_Check(res) && !PyLong_Check(res)) {
#else
if (!PyLong_Check(res)) {
#endif
PyErr_Format(PyExc_TypeError,
"__%.4s__ returned non-%.4s (type %.200s)",
name, name, Py_TYPE(res)->tp_name);
Py_DECREF(res);
return NULL;
}
}
else if (!PyErr_Occurred()) {
PyErr_SetString(PyExc_TypeError,
"an integer is required");
}
return res;
}
static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject* b) {
Py_ssize_t ival;
PyObject *x;
#if PY_MAJOR_VERSION < 3
if (likely(PyInt_CheckExact(b))) {
if (sizeof(Py_ssize_t) >= sizeof(long))
return PyInt_AS_LONG(b);
else
return PyInt_AsSsize_t(x);
}
#endif
if (likely(PyLong_CheckExact(b))) {
#if CYTHON_USE_PYLONG_INTERNALS
const digit* digits = ((PyLongObject*)b)->ob_digit;
const Py_ssize_t size = Py_SIZE(b);
if (likely(__Pyx_sst_abs(size) <= 1)) {
ival = likely(size) ? digits[0] : 0;
if (size == -1) ival = -ival;
return ival;
} else {
switch (size) {
case 2:
if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) {
return (Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]));
}
break;
case -2:
if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) {
return -(Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]));
}
break;
case 3:
if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) {
return (Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]));
}
break;
case -3:
if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) {
return -(Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]));
}
break;
case 4:
if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) {
return (Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]));
}
break;
case -4:
if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) {
return -(Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]));
}
break;
}
}
#endif
return PyLong_AsSsize_t(b);
}
x = PyNumber_Index(b);
if (!x) return -1;
ival = PyInt_AsSsize_t(x);
Py_DECREF(x);
return ival;
}
static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t ival) {
return PyInt_FromSize_t(ival);
}
#endif /* Py_PYTHON_H */
# Empty module, exists to build libuv library
......@@ -185,6 +185,13 @@ class loop(AbstractLoop):
"""
Return all the handles that are open and their ref status.
"""
# XXX: Disabled because, at least on Windows, the times this
# gets called often produce `SystemError: ffi.from_handle():
# dead or bogus handle object`, and sometimes that crashes the process.
return []
def _really_debug(self):
handle_state = namedtuple("HandleState",
['handle',
'watcher',
......
......@@ -3,6 +3,7 @@
from __future__ import absolute_import, print_function
import functools
import sys
import weakref
import gevent.libuv._corecffi as _corecffi # pylint:disable=no-name-in-module,import-error
......@@ -10,7 +11,6 @@ import gevent.libuv._corecffi as _corecffi # pylint:disable=no-name-in-module,im
ffi = _corecffi.ffi
libuv = _corecffi.lib
from gevent._ffi import watcher as _base
_closing_handles = set()
......@@ -28,11 +28,10 @@ def _dbg(*args, **kwargs):
def _pid_dbg(*args, **kwargs):
import os
import sys
kwargs['file'] = sys.stderr
print(os.getpid(), *args, **kwargs)
# _dbg = _pid_dbg
#_dbg = _pid_dbg
_events = [(libuv.UV_READABLE, "READ"),
(libuv.UV_WRITABLE, "WRITE")]
......@@ -224,14 +223,21 @@ class io(_base.IoMixin, watcher):
def _watcher_ffi_start(self):
self._watcher_start(self._watcher, self._events, self._watcher_callback)
if sys.platform.startswith('win32'):
# We can only handle sockets. We smuggle the SOCKET through disguised
# as a fileno
_watcher_init = watcher._LIB.uv_poll_init_socket
class _multiplexwatcher(object):
callback = None
args = ()
pass_events = False
ref = True
def __init__(self, events, watcher):
self.events = events
self.callback = None
self.args = ()
self.pass_events = False
self.ref = True
# References:
# These objects keep the original IO object alive;
......@@ -242,6 +248,7 @@ class io(_base.IoMixin, watcher):
self._watcher_ref = watcher
def start(self, callback, *args, **kwargs):
_dbg("Starting IO multiplex watcher for", self.fd, callback)
self.pass_events = kwargs.get("pass_events")
self.callback = callback
self.args = args
......@@ -251,6 +258,7 @@ class io(_base.IoMixin, watcher):
watcher._io_start()
def stop(self):
_dbg("Stopping IO multiplex watcher for", self.fd, self.callback)
self.callback = None
self.pass_events = None
self.args = None
......@@ -310,14 +318,22 @@ class io(_base.IoMixin, watcher):
# the reader, we get a LoopExit. So we can't return here and arguably shouldn't print it
# either. The negative events mask will match the watcher's mask.
# See test__fileobject.py:Test.test_newlines for an example.
# On Windows (at least with PyPy), we can get ENOTSOCK (socket operation on non-socket)
# if a socket gets closed. If we don't pass the events on, we hang.
# See test__makefile_ref.TestSSL for examples.
# return
#_dbg("Callback event for watcher", self._fd, "event", events)
for watcher_ref in self._multiplex_watchers:
watcher = watcher_ref()
if not watcher or not watcher.callback:
continue
if events & watcher.events:
#_dbg("Event for watcher", self._fd, events, watcher.events, events & watcher.events)
send_event = (events & watcher.events) or events < 0
if send_event:
if not watcher.pass_events:
watcher.callback(*watcher.args)
else:
......
-----BEGIN PRIVATE KEY-----
MIICdgIBADANBgkqhkiG9w0BAQEFAASCAmAwggJcAgEAAoGBAOoy7/QOtTjQ0niE
6uDcTwtkC0R2Tvy1AjVnXohCntZfdzbTGDoYTgXSOLsP8A697jUiJ8VCePGH50xG
Z4DKnAF3a9O3a9nr2pLXb0iY3XOMv+YEBii7CfI+3oxFYgCl0sMgHzDD2ZTVYAsm
DWgLUVsE2gHEccRwrM2tPf2EgR+FAgMBAAECgYEA3qyfyYVSeTrTYxO93x6ZaVMu
A2IZp9zSxMQL9bKiI2GRj+cV2ebSCGbg2btFnD6qBor7FWsmYz+8g6FNN/9sY4az
61rMqMtQvLBe+7L8w70FeTze4qQ4Y1oQri0qD6tBWhDVlpnbI5Py9bkZKD67yVUk
elcEA/5x4PrYXkuqsAECQQD80NjT0mDvaY0JOOaQFSEpMv6QiUA8GGX8Xli7IoKb
tAolPG8rQBa+qSpcWfDMTrWw/aWHuMEEQoP/bVDH9W4FAkEA7SYQbBAKnojZ5A3G
kOHdV7aeivRQxQk/JN8Fb8oKB9Csvpv/BsuGxPKXHdhFa6CBTTsNRtHQw/szPo4l
xMIjgQJAPoMxqibR+0EBM6+TKzteSL6oPXsCnBl4Vk/J5vPgkbmR7KUl4+7j8N8J
b2554TrxKEN/w7CGYZRE6UrRd7ATNQJAWD7Yz41sli+wfPdPU2xo1BHljyl4wMk/
EPZYbI/PCbdyAH/F935WyQTIjNeEhZc1Zkq6FwdOWw8ns3hrv3rKgQJAHXv1BqUa
czGPIFxX2TNoqtcl6/En4vrxVB1wzsfzkkDAg98kBl7qsF+S3qujSzKikjeaVbI2
/CyWR2P3yLtOmA==
-----END PRIVATE KEY-----
-----BEGIN CERTIFICATE-----
MIIDcjCCAtugAwIBAgIJAN5dc9TOWjB7MA0GCSqGSIb3DQEBCwUAMF0xCzAJBgNV
BAYTAlhZMRcwFQYDVQQHDA5DYXN0bGUgQW50aHJheDEjMCEGA1UECgwaUHl0aG9u
IFNvZnR3YXJlIEZvdW5kYXRpb24xEDAOBgNVBAMMB2FsbHNhbnMwHhcNMTYwODA1
MTAyMTExWhcNMjYwODAzMTAyMTExWjBdMQswCQYDVQQGEwJYWTEXMBUGA1UEBwwO
Q2FzdGxlIEFudGhyYXgxIzAhBgNVBAoMGlB5dGhvbiBTb2Z0d2FyZSBGb3VuZGF0
aW9uMRAwDgYDVQQDDAdhbGxzYW5zMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKB
gQDqMu/0DrU40NJ4hOrg3E8LZAtEdk78tQI1Z16IQp7WX3c20xg6GE4F0ji7D/AO
ve41IifFQnjxh+dMRmeAypwBd2vTt2vZ69qS129ImN1zjL/mBAYouwnyPt6MRWIA
pdLDIB8ww9mU1WALJg1oC1FbBNoBxHHEcKzNrT39hIEfhQIDAQABo4IBODCCATQw
ggEwBgNVHREEggEnMIIBI4IHYWxsc2Fuc6AeBgMqAwSgFwwVc29tZSBvdGhlciBp
ZGVudGlmaWVyoDUGBisGAQUCAqArMCmgEBsOS0VSQkVST1MuUkVBTE2hFTAToAMC
AQGhDDAKGwh1c2VybmFtZYEQdXNlckBleGFtcGxlLm9yZ4IPd3d3LmV4YW1wbGUu
b3JnpGcwZTELMAkGA1UEBhMCWFkxFzAVBgNVBAcMDkNhc3RsZSBBbnRocmF4MSMw
IQYDVQQKDBpQeXRob24gU29mdHdhcmUgRm91bmRhdGlvbjEYMBYGA1UEAwwPZGly
bmFtZSBleGFtcGxlhhdodHRwczovL3d3dy5weXRob24ub3JnL4cEfwAAAYcQAAAA
AAAAAAAAAAAAAAAAAYgEKgMEBTANBgkqhkiG9w0BAQsFAAOBgQAy16h+F+nOmeiT
VWR0fc8F/j6FcadbLseAUaogcC15OGxCl4UYpLV88HBkABOoGCpP155qwWTwOrdG
iYPGJSusf1OnJEbvzFejZf6u078bPd9/ZL4VWLjv+FPGkjd+N+/OaqMvgj8Lu99f
3Y/C4S7YbHxxwff6C6l2Xli+q6gnuQ==
-----END CERTIFICATE-----
-----BEGIN PRIVATE KEY-----
MIICdQIBADANBgkqhkiG9w0BAQEFAASCAl8wggJbAgEAAoGBANcLaMB7T/Wi9DBc
PltGzgt8cxsv55m7PQPHMZvn6Ke8xmNqcmEzib8opRwKGrCV6TltKeFlNSg8dwQK
Tl4ktyTkGCVweRQJ37AkBayvEBml5s+QD4vlhqkJPsL/Nsd+fnqngOGc5+59+C6r
s3XpiLlF5ah/z8q92Mnw54nypw1JAgMBAAECgYBE3t2Mj7GbDLZB6rj5yKJioVfI
BD6bSJEQ7bGgqdQkLFwpKMU7BiN+ekjuwvmrRkesYZ7BFgXBPiQrwhU5J28Tpj5B
EOMYSIOHfzdalhxDGM1q2oK9LDFiCotTaSdEzMYadel5rmKXJ0zcK2Jho0PCuECf
tf/ghRxK+h1Hm0tKgQJBAO6MdGDSmGKYX6/5kPDje7we/lSLorSDkYmV0tmVShsc
JxgaGaapazceA/sHL3Myx7Eenkip+yPYDXEDFvAKNDECQQDmxsT9NOp6mo7ISvky
GFr2vVHsJ745BMWoma4rFjPBVnS8RkgK+b2EpDCdZSrQ9zw2r8sKTgrEyrDiGTEg
wJyZAkA8OOc0flYMJg2aHnYR6kwVjPmGHI5h5gk648EMPx0rROs1sXkiUwkHLCOz
HvhCq+Iv+9vX2lnVjbiu/CmxRdIxAkA1YEfzoKeTD+hyXxTgB04Sv5sRGegfXAEz
i8gC4zG5R/vcCA1lrHmvEiLEZL/QcT6WD3bQvVg0SAU9ZkI8pxARAkA7yqMSvP1l
gJXy44R+rzpLYb1/PtiLkIkaKG3x9TUfPnfD2jY09fPkZlfsRU3/uS09IkhSwimV
d5rWoljEfdou
-----END PRIVATE KEY-----
-----BEGIN CERTIFICATE-----
MIICXTCCAcagAwIBAgIJALVQzebTtrXFMA0GCSqGSIb3DQEBBQUAMGIxCzAJBgNV
BAYTAlhZMRcwFQYDVQQHDA5DYXN0bGUgQW50aHJheDEjMCEGA1UECgwaUHl0aG9u
IFNvZnR3YXJlIEZvdW5kYXRpb24xFTATBgNVBAMMDGZha2Vob3N0bmFtZTAeFw0x
NDExMjMxNzAwMDdaFw0yNDExMjAxNzAwMDdaMGIxCzAJBgNVBAYTAlhZMRcwFQYD
VQQHDA5DYXN0bGUgQW50aHJheDEjMCEGA1UECgwaUHl0aG9uIFNvZnR3YXJlIEZv
dW5kYXRpb24xFTATBgNVBAMMDGZha2Vob3N0bmFtZTCBnzANBgkqhkiG9w0BAQEF
AAOBjQAwgYkCgYEA1wtowHtP9aL0MFw+W0bOC3xzGy/nmbs9A8cxm+fop7zGY2py
YTOJvyilHAoasJXpOW0p4WU1KDx3BApOXiS3JOQYJXB5FAnfsCQFrK8QGaXmz5AP
i+WGqQk+wv82x35+eqeA4Zzn7n34LquzdemIuUXlqH/Pyr3YyfDnifKnDUkCAwEA
AaMbMBkwFwYDVR0RBBAwDoIMZmFrZWhvc3RuYW1lMA0GCSqGSIb3DQEBBQUAA4GB
AKuay3vDKfWzt5+ch/HHBsert84ISot4fUjzXDA/oOgTOEjVcSShHxqNShMOW1oA
QYBpBB/5Kx5RkD/w6imhucxt2WQPRgjX4x4bwMipVH/HvFDp03mG51/Cpi1TyZ74
El7qa/Pd4lHhOLzMKBA6503fpeYSFUIBxZbGLqylqRK7
-----END CERTIFICATE-----
......@@ -20,7 +20,7 @@ from test import test_support
from test.test_support import HOST, HOSTv6
threading = test_support.import_module('threading')
TIMEOUT = 3
# the dummy data returned by server over the data channel when
# RETR, LIST and NLST commands are issued
RETR_DATA = 'abcde12345\r\n' * 1000
......@@ -223,6 +223,7 @@ class DummyFTPServer(asyncore.dispatcher, threading.Thread):
self.active = False
self.active_lock = threading.Lock()
self.host, self.port = self.socket.getsockname()[:2]
self.handler_instance = None
def start(self):
assert not self.active
......@@ -246,8 +247,7 @@ class DummyFTPServer(asyncore.dispatcher, threading.Thread):
def handle_accept(self):
conn, addr = self.accept()
self.handler = self.handler(conn)
self.close()
self.handler_instance = self.handler(conn)
def handle_connect(self):
self.close()
......@@ -262,7 +262,8 @@ class DummyFTPServer(asyncore.dispatcher, threading.Thread):
if ssl is not None:
CERTFILE = os.path.join(os.path.dirname(__file__), "keycert.pem")
CERTFILE = os.path.join(os.path.dirname(__file__), "keycert3.pem")
CAFILE = os.path.join(os.path.dirname(__file__), "pycacert.pem")
class SSLConnection(object, asyncore.dispatcher):
"""An asyncore.dispatcher subclass supporting TLS/SSL."""
......@@ -271,23 +272,25 @@ if ssl is not None:
_ssl_closing = False
def secure_connection(self):
self.socket = ssl.wrap_socket(self.socket, suppress_ragged_eofs=False,
certfile=CERTFILE, server_side=True,
do_handshake_on_connect=False,
ssl_version=ssl.PROTOCOL_SSLv23)
socket = ssl.wrap_socket(self.socket, suppress_ragged_eofs=False,
certfile=CERTFILE, server_side=True,
do_handshake_on_connect=False,
ssl_version=ssl.PROTOCOL_SSLv23)
self.del_channel()
self.set_socket(socket)
self._ssl_accepting = True
def _do_ssl_handshake(self):
try:
self.socket.do_handshake()
except ssl.SSLError, err:
except ssl.SSLError as err:
if err.args[0] in (ssl.SSL_ERROR_WANT_READ,
ssl.SSL_ERROR_WANT_WRITE):
return
elif err.args[0] == ssl.SSL_ERROR_EOF:
return self.handle_close()
raise
except socket.error, err:
except socket.error as err:
if err.args[0] == errno.ECONNABORTED:
return self.handle_close()
else:
......@@ -297,18 +300,21 @@ if ssl is not None:
self._ssl_closing = True
try:
self.socket = self.socket.unwrap()
except ssl.SSLError, err:
except ssl.SSLError as err:
if err.args[0] in (ssl.SSL_ERROR_WANT_READ,
ssl.SSL_ERROR_WANT_WRITE):
return
except socket.error, err:
except socket.error as err:
# Any "socket error" corresponds to a SSL_ERROR_SYSCALL return
# from OpenSSL's SSL_shutdown(), corresponding to a
# closed socket condition. See also:
# http://www.mail-archive.com/openssl-users@openssl.org/msg60710.html
pass
self._ssl_closing = False
super(SSLConnection, self).close()
if getattr(self, '_ccc', False) is False:
super(SSLConnection, self).close()
else:
pass
def handle_read_event(self):
if self._ssl_accepting:
......@@ -329,7 +335,7 @@ if ssl is not None:
def send(self, data):
try:
return super(SSLConnection, self).send(data)
except ssl.SSLError, err:
except ssl.SSLError as err:
if err.args[0] in (ssl.SSL_ERROR_EOF, ssl.SSL_ERROR_ZERO_RETURN,
ssl.SSL_ERROR_WANT_READ,
ssl.SSL_ERROR_WANT_WRITE):
......@@ -339,13 +345,13 @@ if ssl is not None:
def recv(self, buffer_size):
try:
return super(SSLConnection, self).recv(buffer_size)
except ssl.SSLError, err:
except ssl.SSLError as err:
if err.args[0] in (ssl.SSL_ERROR_WANT_READ,
ssl.SSL_ERROR_WANT_WRITE):
return ''
return b''
if err.args[0] in (ssl.SSL_ERROR_EOF, ssl.SSL_ERROR_ZERO_RETURN):
self.handle_close()
return ''
return b''
raise
def handle_error(self):
......@@ -355,6 +361,8 @@ if ssl is not None:
if (isinstance(self.socket, ssl.SSLSocket) and
self.socket._sslobj is not None):
self._do_ssl_shutdown()
else:
super(SSLConnection, self).close()
class DummyTLS_DTPHandler(SSLConnection, DummyDTPHandler):
......@@ -462,12 +470,12 @@ class TestFTPClass(TestCase):
def test_rename(self):
self.client.rename('a', 'b')
self.server.handler.next_response = '200'
self.server.handler_instance.next_response = '200'
self.assertRaises(ftplib.error_reply, self.client.rename, 'a', 'b')
def test_delete(self):
self.client.delete('foo')
self.server.handler.next_response = '199'
self.server.handler_instance.next_response = '199'
self.assertRaises(ftplib.error_reply, self.client.delete, 'foo')
def test_size(self):
......@@ -484,10 +492,6 @@ class TestFTPClass(TestCase):
dir = self.client.cwd('/foo')
self.assertEqual(dir, '250 cwd ok')
def test_mkd(self):
dir = self.client.mkd('/foo')
self.assertEqual(dir, '/foo')
def test_pwd(self):
dir = self.client.pwd()
self.assertEqual(dir, 'pwd ok')
......@@ -519,7 +523,7 @@ class TestFTPClass(TestCase):
def test_storbinary(self):
f = StringIO.StringIO(RETR_DATA)
self.client.storbinary('stor', f)
self.assertEqual(self.server.handler.last_received_data, RETR_DATA)
self.assertEqual(self.server.handler_instance.last_received_data, RETR_DATA)
# test new callback arg
flag = []
f.seek(0)
......@@ -531,12 +535,12 @@ class TestFTPClass(TestCase):
for r in (30, '30'):
f.seek(0)
self.client.storbinary('stor', f, rest=r)
self.assertEqual(self.server.handler.rest, str(r))
self.assertEqual(self.server.handler_instance.rest, str(r))
def test_storlines(self):
f = StringIO.StringIO(RETR_DATA.replace('\r\n', '\n'))
self.client.storlines('stor', f)
self.assertEqual(self.server.handler.last_received_data, RETR_DATA)
self.assertEqual(self.server.handler_instance.last_received_data, RETR_DATA)
# test new callback arg
flag = []
f.seek(0)
......@@ -555,14 +559,14 @@ class TestFTPClass(TestCase):
def test_makeport(self):
self.client.makeport()
# IPv4 is in use, just make sure send_eprt has not been used
self.assertEqual(self.server.handler.last_received_cmd, 'port')
self.assertEqual(self.server.handler_instance.last_received_cmd, 'port')
def test_makepasv(self):
host, port = self.client.makepasv()
conn = socket.create_connection((host, port), 10)
conn.close()
# IPv4 is in use, just make sure send_epsv has not been used
self.assertEqual(self.server.handler.last_received_cmd, 'pasv')
self.assertEqual(self.server.handler_instance.last_received_cmd, 'pasv')
def test_line_too_long(self):
self.assertRaises(ftplib.Error, self.client.sendcmd,
......@@ -604,13 +608,13 @@ class TestIPv6Environment(TestCase):
def test_makeport(self):
self.client.makeport()
self.assertEqual(self.server.handler.last_received_cmd, 'eprt')
self.assertEqual(self.server.handler_instance.last_received_cmd, 'eprt')
def test_makepasv(self):
host, port = self.client.makepasv()
conn = socket.create_connection((host, port), 10)
conn.close()
self.assertEqual(self.server.handler.last_received_cmd, 'epsv')
self.assertEqual(self.server.handler_instance.last_received_cmd, 'epsv')
def test_transfer(self):
def retr():
......@@ -646,7 +650,7 @@ class TestTLS_FTPClass(TestCase):
def setUp(self):
self.server = DummyTLS_FTPServer((HOST, 0))
self.server.start()
self.client = ftplib.FTP_TLS(timeout=10)
self.client = ftplib.FTP_TLS(timeout=TIMEOUT)
self.client.connect(self.server.host, self.server.port)
def tearDown(self):
......@@ -693,12 +697,65 @@ class TestTLS_FTPClass(TestCase):
def test_auth_ssl(self):
try:
self.client.ssl_version = ssl.PROTOCOL_SSLv3
self.client.ssl_version = ssl.PROTOCOL_SSLv23
self.client.auth()
self.assertRaises(ValueError, self.client.auth)
finally:
self.client.ssl_version = ssl.PROTOCOL_TLSv1
def test_context(self):
self.client.quit()
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
self.assertRaises(ValueError, ftplib.FTP_TLS, keyfile=CERTFILE,
context=ctx)
self.assertRaises(ValueError, ftplib.FTP_TLS, certfile=CERTFILE,
context=ctx)
self.assertRaises(ValueError, ftplib.FTP_TLS, certfile=CERTFILE,
keyfile=CERTFILE, context=ctx)
self.client = ftplib.FTP_TLS(context=ctx, timeout=TIMEOUT)
self.client.connect(self.server.host, self.server.port)
self.assertNotIsInstance(self.client.sock, ssl.SSLSocket)
self.client.auth()
self.assertIs(self.client.sock.context, ctx)
self.assertIsInstance(self.client.sock, ssl.SSLSocket)
self.client.prot_p()
sock = self.client.transfercmd('list')
try:
self.assertIs(sock.context, ctx)
self.assertIsInstance(sock, ssl.SSLSocket)
finally:
sock.close()
def test_check_hostname(self):
self.client.quit()
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
ctx.verify_mode = ssl.CERT_REQUIRED
ctx.check_hostname = True
ctx.load_verify_locations(CAFILE)
self.client = ftplib.FTP_TLS(context=ctx, timeout=TIMEOUT)
# 127.0.0.1 doesn't match SAN
self.client.connect(self.server.host, self.server.port)
with self.assertRaises(ssl.CertificateError):
self.client.auth()
# exception quits connection
self.client.connect(self.server.host, self.server.port)
self.client.prot_p()
with self.assertRaises(ssl.CertificateError):
self.client.transfercmd("list").close()
self.client.quit()
self.client.connect("localhost", self.server.port)
self.client.auth()
self.client.quit()
self.client.connect("localhost", self.server.port)
self.client.prot_p()
self.client.transfercmd("list").close()
class TestTimeouts(TestCase):
......
import httplib
import itertools
import array
import httplib
import StringIO
import socket
import errno
import os
import tempfile
import unittest
TestCase = unittest.TestCase
from test import test_support
here = os.path.dirname(__file__)
# Self-signed cert file for 'localhost'
CERT_localhost = os.path.join(here, 'keycert.pem')
# Self-signed cert file for 'fakehostname'
CERT_fakehostname = os.path.join(here, 'keycert2.pem')
# Self-signed cert file for self-signed.pythontest.net
CERT_selfsigned_pythontestdotnet = os.path.join(here, 'selfsigned_pythontestdotnet.pem')
HOST = test_support.HOST
class FakeSocket:
......@@ -17,6 +27,7 @@ class FakeSocket:
self.text = text
self.fileclass = fileclass
self.data = ''
self.file_closed = False
self.host = host
self.port = port
......@@ -26,7 +37,13 @@ class FakeSocket:
def makefile(self, mode, bufsize=None):
if mode != 'r' and mode != 'rb':
raise httplib.UnimplementedFileMode()
return self.fileclass(self.text)
# keep the file around so we can check how much was read from it
self.file = self.fileclass(self.text)
self.file.close = self.file_close #nerf close ()
return self.file
def file_close(self):
self.file_closed = True
def close(self):
pass
......@@ -107,21 +124,59 @@ class HeaderTests(TestCase):
self.content_length = kv[1].strip()
list.append(self, item)
# POST with empty body
conn = httplib.HTTPConnection('example.com')
conn.sock = FakeSocket(None)
conn._buffer = ContentLengthChecker()
conn.request('POST', '/', '')
self.assertEqual(conn._buffer.content_length, '0',
'Header Content-Length not set')
# PUT request with empty body
conn = httplib.HTTPConnection('example.com')
conn.sock = FakeSocket(None)
conn._buffer = ContentLengthChecker()
conn.request('PUT', '/', '')
self.assertEqual(conn._buffer.content_length, '0',
'Header Content-Length not set')
# Here, we're testing that methods expecting a body get a
# content-length set to zero if the body is empty (either None or '')
bodies = (None, '')
methods_with_body = ('PUT', 'POST', 'PATCH')
for method, body in itertools.product(methods_with_body, bodies):
conn = httplib.HTTPConnection('example.com')
conn.sock = FakeSocket(None)
conn._buffer = ContentLengthChecker()
conn.request(method, '/', body)
self.assertEqual(
conn._buffer.content_length, '0',
'Header Content-Length incorrect on {}'.format(method)
)
# For these methods, we make sure that content-length is not set when
# the body is None because it might cause unexpected behaviour on the
# server.
methods_without_body = (
'GET', 'CONNECT', 'DELETE', 'HEAD', 'OPTIONS', 'TRACE',
)
for method in methods_without_body:
conn = httplib.HTTPConnection('example.com')
conn.sock = FakeSocket(None)
conn._buffer = ContentLengthChecker()
conn.request(method, '/', None)
self.assertEqual(
conn._buffer.content_length, None,
'Header Content-Length set for empty body on {}'.format(method)
)
# If the body is set to '', that's considered to be "present but
# empty" rather than "missing", so content length would be set, even
# for methods that don't expect a body.
for method in methods_without_body:
conn = httplib.HTTPConnection('example.com')
conn.sock = FakeSocket(None)
conn._buffer = ContentLengthChecker()
conn.request(method, '/', '')
self.assertEqual(
conn._buffer.content_length, '0',
'Header Content-Length incorrect on {}'.format(method)
)
# If the body is set, make sure Content-Length is set.
for method in itertools.chain(methods_without_body, methods_with_body):
conn = httplib.HTTPConnection('example.com')
conn.sock = FakeSocket(None)
conn._buffer = ContentLengthChecker()
conn.request(method, '/', ' ')
self.assertEqual(
conn._buffer.content_length, '1',
'Header Content-Length incorrect on {}'.format(method)
)
def test_putheader(self):
conn = httplib.HTTPConnection('example.com')
......@@ -130,9 +185,36 @@ class HeaderTests(TestCase):
conn.putheader('Content-length',42)
self.assertIn('Content-length: 42', conn._buffer)
conn.putheader('Foo', ' bar ')
self.assertIn(b'Foo: bar ', conn._buffer)
conn.putheader('Bar', '\tbaz\t')
self.assertIn(b'Bar: \tbaz\t', conn._buffer)
conn.putheader('Authorization', 'Bearer mytoken')
self.assertIn(b'Authorization: Bearer mytoken', conn._buffer)
conn.putheader('IterHeader', 'IterA', 'IterB')
self.assertIn(b'IterHeader: IterA\r\n\tIterB', conn._buffer)
conn.putheader('LatinHeader', b'\xFF')
self.assertIn(b'LatinHeader: \xFF', conn._buffer)
conn.putheader('Utf8Header', b'\xc3\x80')
self.assertIn(b'Utf8Header: \xc3\x80', conn._buffer)
conn.putheader('C1-Control', b'next\x85line')
self.assertIn(b'C1-Control: next\x85line', conn._buffer)
conn.putheader('Embedded-Fold-Space', 'is\r\n allowed')
self.assertIn(b'Embedded-Fold-Space: is\r\n allowed', conn._buffer)
conn.putheader('Embedded-Fold-Tab', 'is\r\n\tallowed')
self.assertIn(b'Embedded-Fold-Tab: is\r\n\tallowed', conn._buffer)
conn.putheader('Key Space', 'value')
self.assertIn(b'Key Space: value', conn._buffer)
conn.putheader('KeySpace ', 'value')
self.assertIn(b'KeySpace : value', conn._buffer)
conn.putheader(b'Nonbreak\xa0Space', 'value')
self.assertIn(b'Nonbreak\xa0Space: value', conn._buffer)
conn.putheader(b'\xa0NonbreakSpace', 'value')
self.assertIn(b'\xa0NonbreakSpace: value', conn._buffer)
def test_ipv6host_header(self):
# Default host header on IPv6 transaction should wrapped by [] if
# its actual IPv6 address
# Default host header on IPv6 transaction should be wrapped by [] if
# it is an IPv6 address
expected = 'GET /foo HTTP/1.1\r\nHost: [2001::]:81\r\n' \
'Accept-Encoding: identity\r\n\r\n'
conn = httplib.HTTPConnection('[2001::]:81')
......@@ -149,6 +231,159 @@ class HeaderTests(TestCase):
conn.request('GET', '/foo')
self.assertTrue(sock.data.startswith(expected))
def test_malformed_headers_coped_with(self):
# Issue 19996
body = "HTTP/1.1 200 OK\r\nFirst: val\r\n: nval\r\nSecond: val\r\n\r\n"
sock = FakeSocket(body)
resp = httplib.HTTPResponse(sock)
resp.begin()
self.assertEqual(resp.getheader('First'), 'val')
self.assertEqual(resp.getheader('Second'), 'val')
def test_malformed_truncation(self):
# Other malformed header lines, especially without colons, used to
# cause the rest of the header section to be truncated
resp = (
b'HTTP/1.1 200 OK\r\n'
b'Public-Key-Pins: \n'
b'pin-sha256="xxx=";\n'
b'report-uri="https://..."\r\n'
b'Transfer-Encoding: chunked\r\n'
b'\r\n'
b'4\r\nbody\r\n0\r\n\r\n'
)
resp = httplib.HTTPResponse(FakeSocket(resp))
resp.begin()
self.assertIsNotNone(resp.getheader('Public-Key-Pins'))
self.assertEqual(resp.getheader('Transfer-Encoding'), 'chunked')
self.assertEqual(resp.read(), b'body')
def test_blank_line_forms(self):
# Test that both CRLF and LF blank lines can terminate the header
# section and start the body
for blank in (b'\r\n', b'\n'):
resp = b'HTTP/1.1 200 OK\r\n' b'Transfer-Encoding: chunked\r\n'
resp += blank
resp += b'4\r\nbody\r\n0\r\n\r\n'
resp = httplib.HTTPResponse(FakeSocket(resp))
resp.begin()
self.assertEqual(resp.getheader('Transfer-Encoding'), 'chunked')
self.assertEqual(resp.read(), b'body')
resp = b'HTTP/1.0 200 OK\r\n' + blank + b'body'
resp = httplib.HTTPResponse(FakeSocket(resp))
resp.begin()
self.assertEqual(resp.read(), b'body')
# A blank line ending in CR is not treated as the end of the HTTP
# header section, therefore header fields following it should be
# parsed if possible
resp = (
b'HTTP/1.1 200 OK\r\n'
b'\r'
b'Name: value\r\n'
b'Transfer-Encoding: chunked\r\n'
b'\r\n'
b'4\r\nbody\r\n0\r\n\r\n'
)
resp = httplib.HTTPResponse(FakeSocket(resp))
resp.begin()
self.assertEqual(resp.getheader('Transfer-Encoding'), 'chunked')
self.assertEqual(resp.read(), b'body')
# No header fields nor blank line
resp = b'HTTP/1.0 200 OK\r\n'
resp = httplib.HTTPResponse(FakeSocket(resp))
resp.begin()
self.assertEqual(resp.read(), b'')
def test_from_line(self):
# The parser handles "From" lines specially, so test this does not
# affect parsing the rest of the header section
resp = (
b'HTTP/1.1 200 OK\r\n'
b'From start\r\n'
b' continued\r\n'
b'Name: value\r\n'
b'From middle\r\n'
b' continued\r\n'
b'Transfer-Encoding: chunked\r\n'
b'From end\r\n'
b'\r\n'
b'4\r\nbody\r\n0\r\n\r\n'
)
resp = httplib.HTTPResponse(FakeSocket(resp))
resp.begin()
self.assertIsNotNone(resp.getheader('Name'))
self.assertEqual(resp.getheader('Transfer-Encoding'), 'chunked')
self.assertEqual(resp.read(), b'body')
resp = (
b'HTTP/1.0 200 OK\r\n'
b'From alone\r\n'
b'\r\n'
b'body'
)
resp = httplib.HTTPResponse(FakeSocket(resp))
resp.begin()
self.assertEqual(resp.read(), b'body')
def test_parse_all_octets(self):
# Ensure no valid header field octet breaks the parser
body = (
b'HTTP/1.1 200 OK\r\n'
b"!#$%&'*+-.^_`|~: value\r\n" # Special token characters
b'VCHAR: ' + bytearray(range(0x21, 0x7E + 1)) + b'\r\n'
b'obs-text: ' + bytearray(range(0x80, 0xFF + 1)) + b'\r\n'
b'obs-fold: text\r\n'
b' folded with space\r\n'
b'\tfolded with tab\r\n'
b'Content-Length: 0\r\n'
b'\r\n'
)
sock = FakeSocket(body)
resp = httplib.HTTPResponse(sock)
resp.begin()
self.assertEqual(resp.getheader('Content-Length'), '0')
self.assertEqual(resp.getheader("!#$%&'*+-.^_`|~"), 'value')
vchar = ''.join(map(chr, range(0x21, 0x7E + 1)))
self.assertEqual(resp.getheader('VCHAR'), vchar)
self.assertIsNotNone(resp.getheader('obs-text'))
folded = resp.getheader('obs-fold')
self.assertTrue(folded.startswith('text'))
self.assertIn(' folded with space', folded)
self.assertTrue(folded.endswith('folded with tab'))
def test_invalid_headers(self):
conn = httplib.HTTPConnection('example.com')
conn.sock = FakeSocket('')
conn.putrequest('GET', '/')
# http://tools.ietf.org/html/rfc7230#section-3.2.4, whitespace is no
# longer allowed in header names
cases = (
(b'Invalid\r\nName', b'ValidValue'),
(b'Invalid\rName', b'ValidValue'),
(b'Invalid\nName', b'ValidValue'),
(b'\r\nInvalidName', b'ValidValue'),
(b'\rInvalidName', b'ValidValue'),
(b'\nInvalidName', b'ValidValue'),
(b' InvalidName', b'ValidValue'),
(b'\tInvalidName', b'ValidValue'),
(b'Invalid:Name', b'ValidValue'),
(b':InvalidName', b'ValidValue'),
(b'ValidName', b'Invalid\r\nValue'),
(b'ValidName', b'Invalid\rValue'),
(b'ValidName', b'Invalid\nValue'),
(b'ValidName', b'InvalidValue\r\n'),
(b'ValidName', b'InvalidValue\r'),
(b'ValidName', b'InvalidValue\n'),
)
for name, value in cases:
with self.assertRaisesRegexp(ValueError, 'Invalid header'):
conn.putheader(name, value)
class BasicTest(TestCase):
def test_status_lines(self):
......@@ -262,6 +497,13 @@ class BasicTest(TestCase):
if resp.read() != "":
self.fail("Did not expect response from HEAD request")
def test_too_many_headers(self):
headers = '\r\n'.join('Header%d: foo' % i for i in xrange(200)) + '\r\n'
text = ('HTTP/1.1 200 OK\r\n' + headers)
s = FakeSocket(text)
r = httplib.HTTPResponse(s)
self.assertRaises(httplib.HTTPException, r.begin)
def test_send_file(self):
expected = 'GET /foo HTTP/1.1\r\nHost: example.com\r\n' \
'Accept-Encoding: identity\r\nContent-Length:'
......@@ -272,6 +514,22 @@ class BasicTest(TestCase):
conn.sock = sock
conn.request('GET', '/foo', body)
self.assertTrue(sock.data.startswith(expected))
self.assertIn('def test_send_file', sock.data)
def test_send_tempfile(self):
expected = ('GET /foo HTTP/1.1\r\nHost: example.com\r\n'
'Accept-Encoding: identity\r\nContent-Length: 9\r\n\r\n'
'fake\ndata')
with tempfile.TemporaryFile() as body:
body.write('fake\ndata')
body.seek(0)
conn = httplib.HTTPConnection('example.com')
sock = FakeSocket(body)
conn.sock = sock
conn.request('GET', '/foo', body)
self.assertEqual(sock.data, expected)
def test_send(self):
expected = 'this is a test this is only a test'
......@@ -381,7 +639,7 @@ class BasicTest(TestCase):
self.assertTrue(hasattr(resp,'fileno'),
'HTTPResponse should expose a fileno attribute')
# Test lines overflowing the max line size (_MAXLINE in http.client)
# Test lines overflowing the max line size (_MAXLINE in httplib)
def test_overflowing_status_line(self):
self.skipTest("disabled for HTTP 0.9 support")
......@@ -418,12 +676,42 @@ class BasicTest(TestCase):
self.assertEqual(resp.read(), '')
self.assertTrue(resp.isclosed())
def test_error_leak(self):
# Test that the socket is not leaked if getresponse() fails
conn = httplib.HTTPConnection('example.com')
response = []
class Response(httplib.HTTPResponse):
def __init__(self, *pos, **kw):
response.append(self) # Avoid garbage collector closing the socket
httplib.HTTPResponse.__init__(self, *pos, **kw)
conn.response_class = Response
conn.sock = FakeSocket('') # Emulate server dropping connection
conn.request('GET', '/')
self.assertRaises(httplib.BadStatusLine, conn.getresponse)
self.assertTrue(response)
#self.assertTrue(response[0].closed)
self.assertTrue(conn.sock.file_closed)
def test_proxy_tunnel_without_status_line(self):
# Issue 17849: If a proxy tunnel is created that does not return
# a status code, fail.
body = 'hello world'
conn = httplib.HTTPConnection('example.com', strict=False)
conn.set_tunnel('foo')
conn.sock = FakeSocket(body)
with self.assertRaisesRegexp(socket.error, "Invalid response"):
conn._tunnel()
class OfflineTest(TestCase):
def test_responses(self):
self.assertEqual(httplib.responses[httplib.NOT_FOUND], "Not Found")
class SourceAddressTest(TestCase):
class TestServerMixin:
"""A limited socket server mixin.
This is used by test cases for testing http connection end points.
"""
def setUp(self):
self.serv = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.port = test_support.bind_port(self.serv)
......@@ -438,6 +726,7 @@ class SourceAddressTest(TestCase):
self.serv.close()
self.serv = None
class SourceAddressTest(TestServerMixin, TestCase):
def testHTTPConnectionSourceAddress(self):
self.conn = httplib.HTTPConnection(HOST, self.port,
source_address=('', self.source_port))
......@@ -449,11 +738,29 @@ class SourceAddressTest(TestCase):
def testHTTPSConnectionSourceAddress(self):
self.conn = httplib.HTTPSConnection(HOST, self.port,
source_address=('', self.source_port))
# We don't test anything here other the constructor not barfing as
# We don't test anything here other than the constructor not barfing as
# this code doesn't deal with setting up an active running SSL server
# for an ssl_wrapped connect() to actually return from.
class HTTPTest(TestServerMixin, TestCase):
def testHTTPConnection(self):
self.conn = httplib.HTTP(host=HOST, port=self.port, strict=None)
self.conn.connect()
self.assertEqual(self.conn._conn.host, HOST)
self.assertEqual(self.conn._conn.port, self.port)
def testHTTPWithConnectHostPort(self):
testhost = 'unreachable.test.domain'
testport = '80'
self.conn = httplib.HTTP(host=testhost, port=testport)
self.conn.connect(host=HOST, port=self.port)
self.assertNotEqual(self.conn._conn.host, testhost)
self.assertNotEqual(self.conn._conn.port, testport)
self.assertEqual(self.conn._conn.host, HOST)
self.assertEqual(self.conn._conn.port, self.port)
class TimeoutTest(TestCase):
PORT = None
......@@ -500,35 +807,138 @@ class TimeoutTest(TestCase):
httpConn.close()
class HTTPSTimeoutTest(TestCase):
# XXX Here should be tests for HTTPS, there isn't any right now!
class HTTPSTest(TestCase):
def setUp(self):
if not hasattr(httplib, 'HTTPSConnection'):
self.skipTest('ssl support required')
def make_server(self, certfile):
from test.ssl_servers import make_https_server
return make_https_server(self, certfile=certfile)
def test_attributes(self):
# simple test to check it's storing it
if hasattr(httplib, 'HTTPSConnection'):
h = httplib.HTTPSConnection(HOST, TimeoutTest.PORT, timeout=30)
self.assertEqual(h.timeout, 30)
# simple test to check it's storing the timeout
h = httplib.HTTPSConnection(HOST, TimeoutTest.PORT, timeout=30)
self.assertEqual(h.timeout, 30)
def test_networked(self):
# Default settings: requires a valid cert from a trusted CA
import ssl
test_support.requires('network')
with test_support.transient_internet('self-signed.pythontest.net'):
h = httplib.HTTPSConnection('self-signed.pythontest.net', 443)
with self.assertRaises(ssl.SSLError) as exc_info:
h.request('GET', '/')
self.assertEqual(exc_info.exception.reason, 'CERTIFICATE_VERIFY_FAILED')
def test_networked_noverification(self):
# Switch off cert verification
import ssl
test_support.requires('network')
with test_support.transient_internet('self-signed.pythontest.net'):
context = ssl._create_stdlib_context()
h = httplib.HTTPSConnection('self-signed.pythontest.net', 443,
context=context)
h.request('GET', '/')
resp = h.getresponse()
self.assertIn('nginx', resp.getheader('server'))
@test_support.system_must_validate_cert
def test_networked_trusted_by_default_cert(self):
# Default settings: requires a valid cert from a trusted CA
test_support.requires('network')
with test_support.transient_internet('www.python.org'):
h = httplib.HTTPSConnection('www.python.org', 443)
h.request('GET', '/')
resp = h.getresponse()
content_type = resp.getheader('content-type')
self.assertIn('text/html', content_type)
def test_networked_good_cert(self):
# We feed the server's cert as a validating cert
import ssl
test_support.requires('network')
with test_support.transient_internet('self-signed.pythontest.net'):
context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
context.verify_mode = ssl.CERT_REQUIRED
context.load_verify_locations(CERT_selfsigned_pythontestdotnet)
h = httplib.HTTPSConnection('self-signed.pythontest.net', 443, context=context)
h.request('GET', '/')
resp = h.getresponse()
server_string = resp.getheader('server')
self.assertIn('nginx', server_string)
def test_networked_bad_cert(self):
# We feed a "CA" cert that is unrelated to the server's cert
import ssl
test_support.requires('network')
with test_support.transient_internet('self-signed.pythontest.net'):
context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
context.verify_mode = ssl.CERT_REQUIRED
context.load_verify_locations(CERT_localhost)
h = httplib.HTTPSConnection('self-signed.pythontest.net', 443, context=context)
with self.assertRaises(ssl.SSLError) as exc_info:
h.request('GET', '/')
self.assertEqual(exc_info.exception.reason, 'CERTIFICATE_VERIFY_FAILED')
def test_local_unknown_cert(self):
# The custom cert isn't known to the default trust bundle
import ssl
server = self.make_server(CERT_localhost)
h = httplib.HTTPSConnection('localhost', server.port)
with self.assertRaises(ssl.SSLError) as exc_info:
h.request('GET', '/')
self.assertEqual(exc_info.exception.reason, 'CERTIFICATE_VERIFY_FAILED')
def test_local_good_hostname(self):
# The (valid) cert validates the HTTP hostname
import ssl
server = self.make_server(CERT_localhost)
context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
context.verify_mode = ssl.CERT_REQUIRED
context.load_verify_locations(CERT_localhost)
h = httplib.HTTPSConnection('localhost', server.port, context=context)
h.request('GET', '/nonexistent')
resp = h.getresponse()
self.assertEqual(resp.status, 404)
def test_local_bad_hostname(self):
# The (valid) cert doesn't validate the HTTP hostname
import ssl
server = self.make_server(CERT_fakehostname)
context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
context.verify_mode = ssl.CERT_REQUIRED
context.check_hostname = True
context.load_verify_locations(CERT_fakehostname)
h = httplib.HTTPSConnection('localhost', server.port, context=context)
with self.assertRaises(ssl.CertificateError):
h.request('GET', '/')
h.close()
# With context.check_hostname=False, the mismatching is ignored
context.check_hostname = False
h = httplib.HTTPSConnection('localhost', server.port, context=context)
h.request('GET', '/nonexistent')
resp = h.getresponse()
self.assertEqual(resp.status, 404)
@unittest.skipIf(not hasattr(httplib, 'HTTPS'), 'httplib.HTTPS not available')
def test_host_port(self):
# Check invalid host_port
# Note that httplib does not accept user:password@ in the host-port.
for hp in ("www.python.org:abc", "user:password@www.python.org"):
self.assertRaises(httplib.InvalidURL, httplib.HTTP, hp)
self.assertRaises(httplib.InvalidURL, httplib.HTTPSConnection, hp)
for hp, h, p in (("[fe80::207:e9ff:fe9b]:8000", "fe80::207:e9ff:fe9b",
8000),
("pypi.python.org:443", "pypi.python.org", 443),
("pypi.python.org", "pypi.python.org", 443),
("pypi.python.org:", "pypi.python.org", 443),
("[fe80::207:e9ff:fe9b]", "fe80::207:e9ff:fe9b", 443)):
http = httplib.HTTPS(hp)
c = http._conn
if h != c.host:
self.fail("Host incorrectly parsed: %s != %s" % (h, c.host))
if p != c.port:
self.fail("Port incorrectly parsed: %s != %s" % (p, c.host))
for hp, h, p in (("[fe80::207:e9ff:fe9b]:8000",
"fe80::207:e9ff:fe9b", 8000),
("www.python.org:443", "www.python.org", 443),
("www.python.org:", "www.python.org", 443),
("www.python.org", "www.python.org", 443),
("[fe80::207:e9ff:fe9b]", "fe80::207:e9ff:fe9b", 443),
("[fe80::207:e9ff:fe9b]:", "fe80::207:e9ff:fe9b",
443)):
c = httplib.HTTPSConnection(hp)
self.assertEqual(h, c.host)
self.assertEqual(p, c.port)
class TunnelTests(TestCase):
......@@ -556,10 +966,12 @@ class TunnelTests(TestCase):
self.assertEqual(conn.sock.host, 'proxy.com')
self.assertEqual(conn.sock.port, 80)
self.assertTrue('CONNECT destination.com' in conn.sock.data)
self.assertTrue('Host: destination.com' in conn.sock.data)
self.assertIn('CONNECT destination.com', conn.sock.data)
# issue22095
self.assertNotIn('Host: destination.com:None', conn.sock.data)
self.assertIn('Host: destination.com', conn.sock.data)
self.assertTrue('Host: proxy.com' not in conn.sock.data)
self.assertNotIn('Host: proxy.com', conn.sock.data)
conn.close()
......@@ -570,9 +982,11 @@ class TunnelTests(TestCase):
self.assertTrue('Host: destination.com' in conn.sock.data)
@test_support.reap_threads
def test_main(verbose=None):
test_support.run_unittest(HeaderTests, OfflineTest, BasicTest, TimeoutTest,
HTTPSTimeoutTest, SourceAddressTest, TunnelTests)
HTTPTest, HTTPSTest, SourceAddressTest,
TunnelTests)
if __name__ == '__main__':
test_main()
......@@ -8,6 +8,7 @@ import os
import sys
import re
import base64
import ntpath
import shutil
import urllib
import httplib
......@@ -177,6 +178,12 @@ class BaseHTTPServerTestCase(BaseTestCase):
self.send_header('Connection', 'close')
self.end_headers()
def do_SEND_ERROR(self):
self.send_error(int(self.path[1:]))
def do_HEAD(self):
self.send_error(int(self.path[1:]))
def setUp(self):
BaseTestCase.setUp(self)
self.con = httplib.HTTPConnection('localhost', self.PORT)
......@@ -275,6 +282,38 @@ class BaseHTTPServerTestCase(BaseTestCase):
res = self.con.getresponse()
self.assertEqual(res.status, 999)
def test_send_error(self):
allow_transfer_encoding_codes = (205, 304)
for code in (101, 102, 204, 205, 304):
self.con.request('SEND_ERROR', '/{}'.format(code))
res = self.con.getresponse()
self.assertEqual(code, res.status)
self.assertEqual(None, res.getheader('Content-Length'))
self.assertEqual(None, res.getheader('Content-Type'))
if code not in allow_transfer_encoding_codes:
self.assertEqual(None, res.getheader('Transfer-Encoding'))
data = res.read()
self.assertEqual(b'', data)
def test_head_via_send_error(self):
allow_transfer_encoding_codes = (205, 304)
for code in (101, 200, 204, 205, 304):
self.con.request('HEAD', '/{}'.format(code))
res = self.con.getresponse()
self.assertEqual(code, res.status)
if code == 200:
self.assertEqual(None, res.getheader('Content-Length'))
self.assertIn('text/html', res.getheader('Content-Type'))
else:
self.assertEqual(None, res.getheader('Content-Length'))
self.assertEqual(None, res.getheader('Content-Type'))
if code not in allow_transfer_encoding_codes:
self.assertEqual(None, res.getheader('Transfer-Encoding'))
data = res.read()
self.assertEqual(b'', data)
class SimpleHTTPServerTestCase(BaseTestCase):
class request_handler(NoLogRequestHandler, SimpleHTTPRequestHandler):
......@@ -288,6 +327,7 @@ class SimpleHTTPServerTestCase(BaseTestCase):
self.data = 'We are the knights who say Ni!'
self.tempdir = tempfile.mkdtemp(dir=basetempdir)
self.tempdir_name = os.path.basename(self.tempdir)
self.base_url = '/' + self.tempdir_name
temp = open(os.path.join(self.tempdir, 'test'), 'wb')
temp.write(self.data)
temp.close()
......@@ -312,33 +352,39 @@ class SimpleHTTPServerTestCase(BaseTestCase):
def test_get(self):
#constructs the path relative to the root directory of the HTTPServer
response = self.request(self.tempdir_name + '/test')
response = self.request(self.base_url + '/test')
self.check_status_and_reason(response, 200, data=self.data)
# check for trailing "/" which should return 404. See Issue17324
response = self.request(self.tempdir_name + '/test/')
response = self.request(self.base_url + '/test/')
self.check_status_and_reason(response, 404)
response = self.request(self.tempdir_name + '/')
response = self.request(self.base_url + '/')
self.check_status_and_reason(response, 200)
response = self.request(self.tempdir_name)
response = self.request(self.base_url)
self.check_status_and_reason(response, 301)
response = self.request(self.base_url + '/?hi=2')
self.check_status_and_reason(response, 200)
response = self.request(self.base_url + '?hi=1')
self.check_status_and_reason(response, 301)
self.assertEqual(response.getheader("Location"),
self.base_url + "/?hi=1")
response = self.request('/ThisDoesNotExist')
self.check_status_and_reason(response, 404)
response = self.request('/' + 'ThisDoesNotExist' + '/')
self.check_status_and_reason(response, 404)
with open(os.path.join(self.tempdir_name, 'index.html'), 'w') as fp:
response = self.request('/' + self.tempdir_name + '/')
response = self.request(self.base_url + '/')
self.check_status_and_reason(response, 200)
# chmod() doesn't work as expected on Windows, and filesystem
# permissions are ignored by root on Unix.
if os.name == 'posix' and os.geteuid() != 0:
os.chmod(self.tempdir, 0)
response = self.request(self.tempdir_name + '/')
response = self.request(self.base_url + '/')
self.check_status_and_reason(response, 404)
os.chmod(self.tempdir, 0755)
def test_head(self):
response = self.request(
self.tempdir_name + '/test', method='HEAD')
self.base_url + '/test', method='HEAD')
self.check_status_and_reason(response, 200)
self.assertEqual(response.getheader('content-length'),
str(len(self.data)))
......@@ -349,11 +395,27 @@ class SimpleHTTPServerTestCase(BaseTestCase):
response = self.request('/', method='FOO')
self.check_status_and_reason(response, 501)
# requests must be case sensitive,so this should fail too
response = self.request('/', method='get')
response = self.request('/', method='custom')
self.check_status_and_reason(response, 501)
response = self.request('/', method='GETs')
self.check_status_and_reason(response, 501)
def test_path_without_leading_slash(self):
response = self.request(self.tempdir_name + '/test')
self.check_status_and_reason(response, 200, data=self.data)
response = self.request(self.tempdir_name + '/test/')
self.check_status_and_reason(response, 404)
response = self.request(self.tempdir_name + '/')
self.check_status_and_reason(response, 200)
response = self.request(self.tempdir_name)
self.check_status_and_reason(response, 301)
response = self.request(self.tempdir_name + '/?hi=2')
self.check_status_and_reason(response, 200)
response = self.request(self.tempdir_name + '?hi=1')
self.check_status_and_reason(response, 301)
self.assertEqual(response.getheader("Location"),
self.tempdir_name + "/?hi=1")
cgi_file1 = """\
#!%s
......@@ -375,6 +437,16 @@ print "%%s, %%s, %%s" %% (form.getfirst("spam"), form.getfirst("eggs"),
form.getfirst("bacon"))
"""
cgi_file4 = """\
#!%s
import os
print("Content-type: text/html")
print("")
print(os.environ["%s"])
"""
@unittest.skipIf(hasattr(os, 'geteuid') and os.geteuid() == 0,
"This test can't be run reliably as root (issue #13308).")
......@@ -386,7 +458,9 @@ class CGIHTTPServerTestCase(BaseTestCase):
BaseTestCase.setUp(self)
self.parent_dir = tempfile.mkdtemp()
self.cgi_dir = os.path.join(self.parent_dir, 'cgi-bin')
self.cgi_child_dir = os.path.join(self.cgi_dir, 'child-dir')
os.mkdir(self.cgi_dir)
os.mkdir(self.cgi_child_dir)
# The shebang line should be pure ASCII: use symlink if possible.
# See issue #7668.
......@@ -411,6 +485,16 @@ class CGIHTTPServerTestCase(BaseTestCase):
file2.write(cgi_file2 % self.pythonexe)
os.chmod(self.file2_path, 0777)
self.file3_path = os.path.join(self.cgi_child_dir, 'file3.py')
with open(self.file3_path, 'w') as file3:
file3.write(cgi_file1 % self.pythonexe)
os.chmod(self.file3_path, 0777)
self.file4_path = os.path.join(self.cgi_dir, 'file4.py')
with open(self.file4_path, 'w') as file4:
file4.write(cgi_file4 % (self.pythonexe, 'QUERY_STRING'))
os.chmod(self.file4_path, 0o777)
self.cwd = os.getcwd()
os.chdir(self.parent_dir)
......@@ -422,6 +506,9 @@ class CGIHTTPServerTestCase(BaseTestCase):
os.remove(self.nocgi_path)
os.remove(self.file1_path)
os.remove(self.file2_path)
os.remove(self.file3_path)
os.remove(self.file4_path)
os.rmdir(self.cgi_child_dir)
os.rmdir(self.cgi_dir)
os.rmdir(self.parent_dir)
finally:
......@@ -516,6 +603,24 @@ class CGIHTTPServerTestCase(BaseTestCase):
self.assertEqual((b'Hello World\n', 'text/html', 200),
(res.read(), res.getheader('Content-type'), res.status))
def test_nested_cgi_path_issue21323(self):
res = self.request('/cgi-bin/child-dir/file3.py')
self.assertEqual((b'Hello World\n', 'text/html', 200),
(res.read(), res.getheader('Content-type'), res.status))
def test_query_with_multiple_question_mark(self):
res = self.request('/cgi-bin/file4.py?a=b?c=d')
self.assertEqual(
(b'a=b?c=d\n', 'text/html', 200),
(res.read(), res.getheader('Content-type'), res.status))
def test_query_with_continuous_slashes(self):
res = self.request('/cgi-bin/file4.py?k=aa%2F%2Fbb&//q//p//=//a//b//')
self.assertEqual(
(b'k=aa%2F%2Fbb&//q//p//=//a//b//\n',
'text/html', 200),
(res.read(), res.getheader('Content-type'), res.status))
class SimpleHTTPRequestHandlerTestCase(unittest.TestCase):
""" Test url parsing """
......@@ -538,6 +643,25 @@ class SimpleHTTPRequestHandlerTestCase(unittest.TestCase):
path = self.handler.translate_path('//filename?foo=bar')
self.assertEqual(path, self.translated)
def test_windows_colon(self):
import SimpleHTTPServer
with test_support.swap_attr(SimpleHTTPServer.os, 'path', ntpath):
path = self.handler.translate_path('c:c:c:foo/filename')
path = path.replace(ntpath.sep, os.sep)
self.assertEqual(path, self.translated)
path = self.handler.translate_path('\\c:../filename')
path = path.replace(ntpath.sep, os.sep)
self.assertEqual(path, self.translated)
path = self.handler.translate_path('c:\\c:..\\foo/filename')
path = path.replace(ntpath.sep, os.sep)
self.assertEqual(path, self.translated)
path = self.handler.translate_path('c:c:foo\\c:c:bar/filename')
path = path.replace(ntpath.sep, os.sep)
self.assertEqual(path, self.translated)
def test_main(verbose=None):
try:
......
......@@ -29,9 +29,9 @@ class SelectTestCase(unittest.TestCase):
self.assertIsNot(w, x)
def test_select(self):
cmd = 'for i in 0 1 2 3 4 5 6 7 8 9; do echo testing...; sleep 0.1; done'
cmd = 'for i in 0 1 2 3 4 5 6 7 8 9; do echo testing...; sleep 1; done'
p = os.popen(cmd, 'r')
for tout in (0, 0.1, 0.2, 0.4, 0.8, 1.6) + (None,)*10:
for tout in (0, 1, 2, 4, 8, 16) + (None,)*10:
if test_support.verbose:
print 'timeout =', tout
rfd, wfd, xfd = select.select([p], [], [], tout)
......
......@@ -179,31 +179,31 @@ class DebuggingServerTests(unittest.TestCase):
def testBasic(self):
# connect
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=3)
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=15)
smtp.quit()
def testNOOP(self):
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=3)
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=15)
expected = (250, 'Ok')
self.assertEqual(smtp.noop(), expected)
smtp.quit()
def testRSET(self):
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=3)
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=15)
expected = (250, 'Ok')
self.assertEqual(smtp.rset(), expected)
smtp.quit()
def testNotImplemented(self):
# EHLO isn't implemented in DebuggingServer
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=3)
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=15)
expected = (502, 'Error: command "EHLO" not implemented')
self.assertEqual(smtp.ehlo(), expected)
smtp.quit()
def testVRFY(self):
# VRFY isn't implemented in DebuggingServer
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=3)
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=15)
expected = (502, 'Error: command "VRFY" not implemented')
self.assertEqual(smtp.vrfy('nobody@nowhere.com'), expected)
self.assertEqual(smtp.verify('nobody@nowhere.com'), expected)
......@@ -212,21 +212,21 @@ class DebuggingServerTests(unittest.TestCase):
def testSecondHELO(self):
# check that a second HELO returns a message that it's a duplicate
# (this behavior is specific to smtpd.SMTPChannel)
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=3)
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=15)
smtp.helo()
expected = (503, 'Duplicate HELO/EHLO')
self.assertEqual(smtp.helo(), expected)
smtp.quit()
def testHELP(self):
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=3)
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=15)
self.assertEqual(smtp.help(), 'Error: command "HELP" not implemented')
smtp.quit()
def testSend(self):
# connect and send mail
m = 'A test message'
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=3)
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=15)
smtp.sendmail('John', 'Sally', m)
# XXX(nnorwitz): this test is flaky and dies with a bad file descriptor
# in asyncore. This sleep might help, but should really be fixed
......@@ -292,6 +292,33 @@ class BadHELOServerTests(unittest.TestCase):
HOST, self.port, 'localhost', 3)
@unittest.skipUnless(threading, 'Threading required for this test.')
class TooLongLineTests(unittest.TestCase):
respdata = '250 OK' + ('.' * smtplib._MAXLINE * 2) + '\n'
def setUp(self):
self.old_stdout = sys.stdout
self.output = StringIO.StringIO()
sys.stdout = self.output
self.evt = threading.Event()
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.sock.settimeout(15)
self.port = test_support.bind_port(self.sock)
servargs = (self.evt, self.respdata, self.sock)
threading.Thread(target=server, args=servargs).start()
self.evt.wait()
self.evt.clear()
def tearDown(self):
self.evt.wait()
sys.stdout = self.old_stdout
def testLineTooLong(self):
self.assertRaises(smtplib.SMTPResponseException, smtplib.SMTP,
HOST, self.port, 'localhost', 3)
sim_users = {'Mr.A@somewhere.com':'John A',
'Ms.B@somewhere.com':'Sally B',
'Mrs.C@somewhereesle.com':'Ruth C',
......@@ -507,11 +534,27 @@ class SMTPSimTests(unittest.TestCase):
#TODO: add tests for correct AUTH method fallback now that the
#test infrastructure can support it.
def test_quit_resets_greeting(self):
smtp = smtplib.SMTP(HOST, self.port,
local_hostname='localhost',
timeout=15)
code, message = smtp.ehlo()
self.assertEqual(code, 250)
self.assertIn('size', smtp.esmtp_features)
smtp.quit()
self.assertNotIn('size', smtp.esmtp_features)
smtp.connect(HOST, self.port)
self.assertNotIn('size', smtp.esmtp_features)
smtp.ehlo_or_helo_if_needed()
self.assertIn('size', smtp.esmtp_features)
smtp.quit()
def test_main(verbose=None):
test_support.run_unittest(GeneralTests, DebuggingServerTests,
NonConnectingTests,
BadHELOServerTests, SMTPSimTests)
BadHELOServerTests, SMTPSimTests,
TooLongLineTests)
if __name__ == '__main__':
test_main()
......@@ -2,6 +2,7 @@ import unittest
from test import test_support
import errno
import itertools
import socket
import select
import time
......@@ -11,9 +12,14 @@ import sys
import os
import array
import contextlib
from weakref import proxy
import signal
import math
import weakref
try:
import _socket
except ImportError:
_socket = None
def try_address(host, port=0, family=socket.AF_INET):
"""Try to bind a socket on the given host:port and return True
......@@ -80,7 +86,7 @@ class ThreadableTest:
clientTearDown ()
Any new test functions within the class must then define
tests in pairs, where the test name is preceeded with a
tests in pairs, where the test name is preceded with a
'_' to indicate the client portion of the test. Ex:
def testFoo(self):
......@@ -243,9 +249,22 @@ class SocketPairTest(unittest.TestCase, ThreadableTest):
class GeneralModuleTests(unittest.TestCase):
@unittest.skipUnless(_socket is not None, 'need _socket module')
def test_csocket_repr(self):
s = _socket.socket(_socket.AF_INET, _socket.SOCK_STREAM)
try:
expected = ('<socket object, fd=%s, family=%s, type=%s, protocol=%s>'
% (s.fileno(), s.family, s.type, s.proto))
self.assertEqual(repr(s), expected)
finally:
s.close()
expected = ('<socket object, fd=-1, family=%s, type=%s, protocol=%s>'
% (s.family, s.type, s.proto))
self.assertEqual(repr(s), expected)
def test_weakref(self):
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
p = proxy(s)
p = weakref.proxy(s)
self.assertEqual(p.fileno(), s.fileno())
s.close()
s = None
......@@ -257,6 +276,14 @@ class GeneralModuleTests(unittest.TestCase):
else:
self.fail('Socket proxy still exists')
def test_weakref__sock(self):
s = socket.socket()._sock
w = weakref.ref(s)
self.assertIs(w(), s)
del s
test_support.gc_collect()
self.assertIsNone(w())
def testSocketError(self):
# Testing socket module exceptions
def raise_error(*args, **kwargs):
......@@ -273,7 +300,7 @@ class GeneralModuleTests(unittest.TestCase):
"Error raising socket exception.")
def testSendtoErrors(self):
# Testing that sendto doens't masks failures. See #10169.
# Testing that sendto doesn't mask failures. See #10169.
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
self.addCleanup(s.close)
s.bind(('', 0))
......@@ -603,17 +630,24 @@ class GeneralModuleTests(unittest.TestCase):
sock.close()
def test_getsockaddrarg(self):
host = '0.0.0.0'
port = self._get_unused_port(bind_address=host)
sock = socket.socket()
self.addCleanup(sock.close)
port = test_support.find_unused_port()
big_port = port + 65536
neg_port = port - 65536
sock = socket.socket()
try:
self.assertRaises((OverflowError, ValueError), sock.bind, (host, big_port))
self.assertRaises((OverflowError, ValueError), sock.bind, (host, neg_port))
sock.bind((host, port))
finally:
sock.close()
self.assertRaises((OverflowError, ValueError), sock.bind, (HOST, big_port))
self.assertRaises((OverflowError, ValueError), sock.bind, (HOST, neg_port))
# Since find_unused_port() is inherently subject to race conditions, we
# call it a couple times if necessary.
for i in itertools.count():
port = test_support.find_unused_port()
try:
sock.bind((HOST, port))
except OSError as e:
if e.errno != errno.EADDRINUSE or i == 5:
raise
else:
break
@unittest.skipUnless(os.name == "nt", "Windows specific")
def test_sock_ioctl(self):
......@@ -677,7 +711,7 @@ class GeneralModuleTests(unittest.TestCase):
pass
def check_sendall_interrupted(self, with_timeout):
# socketpair() is not stricly required, but it makes things easier.
# socketpair() is not strictly required, but it makes things easier.
if not hasattr(signal, 'alarm') or not hasattr(socket, 'socketpair'):
self.skipTest("signal.alarm and socket.socketpair required for this test")
# Our signal handlers clobber the C errno by calling a math function
......@@ -706,7 +740,6 @@ class GeneralModuleTests(unittest.TestCase):
c.close()
s.close()
@unittest.skip("Needs fix in CFFI; hangs forever")
def test_sendall_interrupted(self):
self.check_sendall_interrupted(False)
......@@ -797,7 +830,7 @@ class BasicTCPTest(SocketConnectedTest):
self.serv_conn.sendall(big_chunk)
@unittest.skipUnless(hasattr(socket, 'fromfd'),
'socket.fromfd not availble')
'socket.fromfd not available')
def testFromFd(self):
# Testing fromfd()
fd = self.cli_conn.fileno()
......@@ -1292,15 +1325,11 @@ class NetworkConnectionNoServer(unittest.TestCase):
def mocked_socket_module(self):
"""Return a socket which times out on connect"""
old_socket = socket.socket
import gevent.socket
old_g_socket = gevent.socket.socket
socket.socket = self.MockSocket
gevent.socket.socket = self.MockSocket
try:
yield
finally:
socket.socket = old_socket
gevent.socket.socket = old_g_socket
def test_connect(self):
port = test_support.find_unused_port()
......@@ -1734,7 +1763,7 @@ class TIPCThreadableTest(unittest.TestCase, ThreadableTest):
self.conn, self.connaddr = self.srv.accept()
def clientSetUp(self):
# The is a hittable race between serverExplicitReady() and the
# There is a hittable race between serverExplicitReady() and the
# accept() call; sleep a little while to avoid it, otherwise
# we could get an exception
time.sleep(0.1)
......
......@@ -158,6 +158,8 @@ class SocketServerTest(unittest.TestCase):
if verbose: print "waiting for server"
server.shutdown()
t.join()
server.server_close()
self.assertRaises(socket.error, server.socket.fileno)
if verbose: print "done"
def stream_examine(self, proto, addr):
......@@ -173,6 +175,8 @@ class SocketServerTest(unittest.TestCase):
def dgram_examine(self, proto, addr):
s = socket.socket(proto, socket.SOCK_DGRAM)
if HAVE_UNIX_SOCKETS and proto == socket.AF_UNIX:
s.bind(self.pickaddr(proto))
s.sendto(TEST_STR, addr)
buf = data = receive(s, 100)
while data and '\n' not in buf:
......@@ -267,27 +271,24 @@ class SocketServerTest(unittest.TestCase):
# Make sure select was called again:
self.assertGreater(mock_select.called, 1)
# Alas, on Linux (at least) recvfrom() doesn't return a meaningful
# client address so this cannot work:
# @requires_unix_sockets
# def test_UnixDatagramServer(self):
# self.run_server(SocketServer.UnixDatagramServer,
# SocketServer.DatagramRequestHandler,
# self.dgram_examine)
#
# @requires_unix_sockets
# def test_ThreadingUnixDatagramServer(self):
# self.run_server(SocketServer.ThreadingUnixDatagramServer,
# SocketServer.DatagramRequestHandler,
# self.dgram_examine)
#
# @requires_unix_sockets
# @requires_forking
# def test_ForkingUnixDatagramServer(self):
# self.run_server(SocketServer.ForkingUnixDatagramServer,
# SocketServer.DatagramRequestHandler,
# self.dgram_examine)
@requires_unix_sockets
def test_UnixDatagramServer(self):
self.run_server(SocketServer.UnixDatagramServer,
SocketServer.DatagramRequestHandler,
self.dgram_examine)
@requires_unix_sockets
def test_ThreadingUnixDatagramServer(self):
self.run_server(SocketServer.ThreadingUnixDatagramServer,
SocketServer.DatagramRequestHandler,
self.dgram_examine)
@requires_unix_sockets
@requires_forking
def test_ForkingUnixDatagramServer(self):
self.run_server(ForkingUnixDatagramServer,
SocketServer.DatagramRequestHandler,
self.dgram_examine)
@reap_threads
def test_shutdown(self):
......@@ -314,6 +315,40 @@ class SocketServerTest(unittest.TestCase):
for t, s in threads:
t.join()
def test_tcpserver_bind_leak(self):
# Issue #22435: the server socket wouldn't be closed if bind()/listen()
# failed.
# Create many servers for which bind() will fail, to see if this result
# in FD exhaustion.
for i in range(1024):
with self.assertRaises(OverflowError):
SocketServer.TCPServer((HOST, -1),
SocketServer.StreamRequestHandler)
class MiscTestCase(unittest.TestCase):
def test_shutdown_request_called_if_verify_request_false(self):
# Issue #26309: BaseServer should call shutdown_request even if
# verify_request is False
class MyServer(SocketServer.TCPServer):
def verify_request(self, request, client_address):
return False
shutdown_called = 0
def shutdown_request(self, request):
self.shutdown_called += 1
SocketServer.TCPServer.shutdown_request(self, request)
server = MyServer((HOST, 0), SocketServer.StreamRequestHandler)
s = socket.socket(server.address_family, socket.SOCK_STREAM)
s.connect(server.server_address)
s.close()
server.handle_request()
self.assertEqual(server.shutdown_called, 1)
server.server_close()
def test_main():
if imp.lock_held():
......
......@@ -26,6 +26,9 @@ ssl = support.import_module("ssl")
PROTOCOLS = sorted(ssl._PROTOCOL_NAMES)
HOST = support.HOST
IS_LIBRESSL = ssl.OPENSSL_VERSION.startswith('LibreSSL')
IS_OPENSSL_1_1 = not IS_LIBRESSL and ssl.OPENSSL_VERSION_INFO >= (1, 1, 0)
def data_file(*name):
return os.path.join(os.path.dirname(__file__), *name)
......@@ -57,6 +60,8 @@ CRLFILE = data_file("revocation.crl")
SIGNED_CERTFILE = data_file("keycert3.pem")
SIGNED_CERTFILE2 = data_file("keycert4.pem")
SIGNING_CA = data_file("pycacert.pem")
# cert with all kinds of subject alt names
ALLSANFILE = data_file("allsans.pem")
REMOTE_HOST = "self-signed.pythontest.net"
REMOTE_ROOT_CERT = data_file("selfsigned_pythontestdotnet.pem")
......@@ -164,7 +169,6 @@ class BasicSocketTests(unittest.TestCase):
self.assertIn(ssl.HAS_SNI, {True, False})
self.assertIn(ssl.HAS_ECDH, {True, False})
def test_random(self):
v = ssl.RAND_status()
if support.verbose:
......@@ -245,6 +249,27 @@ class BasicSocketTests(unittest.TestCase):
self.assertEqual(p['subjectAltName'], san)
def test_parse_all_sans(self):
p = ssl._ssl._test_decode_cert(ALLSANFILE)
self.assertEqual(p['subjectAltName'],
(
('DNS', 'allsans'),
('othername', '<unsupported>'),
('othername', '<unsupported>'),
('email', 'user@example.org'),
('DNS', 'www.example.org'),
('DirName',
((('countryName', 'XY'),),
(('localityName', 'Castle Anthrax'),),
(('organizationName', 'Python Software Foundation'),),
(('commonName', 'dirname example'),))),
('URI', 'https://www.python.org/'),
('IP Address', '127.0.0.1'),
('IP Address', '0:0:0:0:0:0:0:1\n'),
('Registered ID', '1.2.3.4.5')
)
)
def test_DER_to_PEM(self):
with open(CAFILE_CACERT, 'r') as f:
pem = f.read()
......@@ -281,9 +306,9 @@ class BasicSocketTests(unittest.TestCase):
self.assertGreaterEqual(status, 0)
self.assertLessEqual(status, 15)
# Version string as returned by {Open,Libre}SSL, the format might change
if "LibreSSL" in s:
self.assertTrue(s.startswith("LibreSSL {:d}.{:d}".format(major, minor)),
(s, t))
if IS_LIBRESSL:
self.assertTrue(s.startswith("LibreSSL {:d}".format(major)),
(s, t, hex(n)))
else:
self.assertTrue(s.startswith("OpenSSL {:d}.{:d}.{:d}".format(major, minor, fix)),
(s, t))
......@@ -742,15 +767,15 @@ class ContextTests(unittest.TestCase):
def test_options(self):
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
# OP_ALL | OP_NO_SSLv2 | OP_NO_SSLv3 is the default value
self.assertEqual(ssl.OP_ALL | ssl.OP_NO_SSLv2 | ssl.OP_NO_SSLv3,
ctx.options)
default = (ssl.OP_ALL | ssl.OP_NO_SSLv2 | ssl.OP_NO_SSLv3)
if not IS_LIBRESSL and ssl.OPENSSL_VERSION_INFO >= (1, 1, 0):
default |= ssl.OP_NO_COMPRESSION
self.assertEqual(default, ctx.options)
ctx.options |= ssl.OP_NO_TLSv1
self.assertEqual(ssl.OP_ALL | ssl.OP_NO_SSLv2 | ssl.OP_NO_SSLv3 | ssl.OP_NO_TLSv1,
ctx.options)
self.assertEqual(default | ssl.OP_NO_TLSv1, ctx.options)
if can_clear_options():
ctx.options = (ctx.options & ~ssl.OP_NO_SSLv2) | ssl.OP_NO_TLSv1
self.assertEqual(ssl.OP_ALL | ssl.OP_NO_TLSv1 | ssl.OP_NO_SSLv3,
ctx.options)
ctx.options = (ctx.options & ~ssl.OP_NO_TLSv1)
self.assertEqual(default, ctx.options)
ctx.options = 0
self.assertEqual(0, ctx.options)
else:
......@@ -1088,6 +1113,7 @@ class ContextTests(unittest.TestCase):
self.assertRaises(TypeError, ctx.load_default_certs, 'SERVER_AUTH')
@unittest.skipIf(sys.platform == "win32", "not-Windows specific")
@unittest.skipIf(IS_LIBRESSL, "LibreSSL doesn't support env vars")
def test_load_default_certs_env(self):
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
with support.EnvironmentVarGuard() as env:
......@@ -1534,7 +1560,6 @@ class NetworkedTests(unittest.TestCase):
sys.stdout.write("%s\n" % x)
else:
self.fail("Got server certificate %s for %s:%s!" % (pem, host, port))
pem = ssl.get_server_certificate((host, port),
ca_certs=cert)
if not pem:
......@@ -2489,8 +2514,6 @@ else:
def test_asyncore_server(self):
"""Check the example asyncore integration."""
indata = "TEST MESSAGE of mixed case\n"
if support.verbose:
sys.stdout.write("\n")
......@@ -2783,7 +2806,7 @@ else:
with closing(context.wrap_socket(socket.socket())) as s:
self.assertIs(s.version(), None)
s.connect((HOST, server.port))
self.assertEqual(s.version(), "TLSv1")
self.assertEqual(s.version(), 'TLSv1')
self.assertIs(s.version(), None)
@unittest.skipUnless(ssl.HAS_ECDH, "test requires ECDH-enabled OpenSSL")
......@@ -2925,24 +2948,36 @@ else:
(['http/3.0', 'http/4.0'], None)
]
for client_protocols, expected in protocol_tests:
server_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
server_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1_2)
server_context.load_cert_chain(CERTFILE)
server_context.set_alpn_protocols(server_protocols)
client_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
client_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1_2)
client_context.load_cert_chain(CERTFILE)
client_context.set_alpn_protocols(client_protocols)
stats = server_params_test(client_context, server_context,
chatty=True, connectionchatty=True)
msg = "failed trying %s (s) and %s (c).\n" \
"was expecting %s, but got %%s from the %%s" \
% (str(server_protocols), str(client_protocols),
str(expected))
client_result = stats['client_alpn_protocol']
self.assertEqual(client_result, expected, msg % (client_result, "client"))
server_result = stats['server_alpn_protocols'][-1] \
if len(stats['server_alpn_protocols']) else 'nothing'
self.assertEqual(server_result, expected, msg % (server_result, "server"))
try:
stats = server_params_test(client_context,
server_context,
chatty=True,
connectionchatty=True)
except ssl.SSLError as e:
stats = e
if expected is None and IS_OPENSSL_1_1:
# OpenSSL 1.1.0 raises handshake error
self.assertIsInstance(stats, ssl.SSLError)
else:
msg = "failed trying %s (s) and %s (c).\n" \
"was expecting %s, but got %%s from the %%s" \
% (str(server_protocols), str(client_protocols),
str(expected))
client_result = stats['client_alpn_protocol']
self.assertEqual(client_result, expected,
msg % (client_result, "client"))
server_result = stats['server_alpn_protocols'][-1] \
if len(stats['server_alpn_protocols']) else 'nothing'
self.assertEqual(server_result, expected,
msg % (server_result, "server"))
def test_selected_npn_protocol(self):
# selected_npn_protocol() is None unless NPN is used
......
......@@ -20,7 +20,6 @@ except ImportError:
threading = None
mswindows = (sys.platform == "win32")
PYPY = hasattr(sys, 'pypy_version_info')
#
# Depends on the following external programs: Python
......@@ -33,16 +32,6 @@ PYPY = hasattr(sys, 'pypy_version_info')
# SETBINARY = ''
try:
mkstemp = tempfile.mkstemp
except AttributeError:
# tempfile.mkstemp is not available
def mkstemp():
"""Replacement for mkstemp, calling mktemp."""
fname = tempfile.mktemp()
return os.open(fname, os.O_RDWR|os.O_CREAT), fname
class BaseTestCase(unittest.TestCase):
def setUp(self):
# Try to minimize the number of children we have so this test
......@@ -194,8 +183,8 @@ class ProcessTestCase(BaseTestCase):
p.wait()
self.assertEqual(p.returncode, 47)
@unittest.skipIf(sysconfig.is_python_build() or PYPY,
"need an installed Python. See #7774. Also fails to get sys.prefix on stock PyPy")
@unittest.skipIf(sysconfig.is_python_build(),
"need an installed Python. See #7774")
def test_executable_without_cwd(self):
# For a normal installation, it should work without 'cwd'
# argument. For test runs in the build directory, see #7774.
......@@ -296,6 +285,27 @@ class ProcessTestCase(BaseTestCase):
tf.seek(0)
self.assertStderrEqual(tf.read(), "strawberry")
def test_stderr_redirect_with_no_stdout_redirect(self):
# test stderr=STDOUT while stdout=None (not set)
# - grandchild prints to stderr
# - child redirects grandchild's stderr to its stdout
# - the parent should get grandchild's stderr in child's stdout
p = subprocess.Popen([sys.executable, "-c",
'import sys, subprocess;'
'rc = subprocess.call([sys.executable, "-c",'
' "import sys;"'
' "sys.stderr.write(\'42\')"],'
' stderr=subprocess.STDOUT);'
'sys.exit(rc)'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = p.communicate()
#NOTE: stdout should get stderr from grandchild
self.assertStderrEqual(stdout, b'42')
self.assertStderrEqual(stderr, b'') # should be empty
self.assertEqual(p.returncode, 0)
def test_stdout_stderr_pipe(self):
# capture stdout and stderr to the same pipe
p = subprocess.Popen([sys.executable, "-c",
......@@ -416,7 +426,7 @@ class ProcessTestCase(BaseTestCase):
def test_communicate_pipe_fd_leak(self):
fd_directory = '/proc/%d/fd' % os.getpid()
num_fds_before_popen = len(os.listdir(fd_directory))
p = subprocess.Popen([sys.executable, "-c", "print()"],
p = subprocess.Popen([sys.executable, "-c", "print('')"],
stdout=subprocess.PIPE)
p.communicate()
num_fds_after_communicate = len(os.listdir(fd_directory))
......@@ -669,9 +679,9 @@ class ProcessTestCase(BaseTestCase):
def test_handles_closed_on_exception(self):
# If CreateProcess exits with an error, ensure the
# duplicate output handles are released
ifhandle, ifname = mkstemp()
ofhandle, ofname = mkstemp()
efhandle, efname = mkstemp()
ifhandle, ifname = tempfile.mkstemp()
ofhandle, ofname = tempfile.mkstemp()
efhandle, efname = tempfile.mkstemp()
try:
subprocess.Popen (["*"], stdin=ifhandle, stdout=ofhandle,
stderr=efhandle)
......@@ -861,7 +871,7 @@ class POSIXProcessTestCase(BaseTestCase):
def test_args_string(self):
# args is a string
f, fname = mkstemp()
f, fname = tempfile.mkstemp()
os.write(f, "#!/bin/sh\n")
os.write(f, "exec '%s' -c 'import sys; sys.exit(47)'\n" %
sys.executable)
......@@ -905,7 +915,7 @@ class POSIXProcessTestCase(BaseTestCase):
def test_call_string(self):
# call() function with string argument on UNIX
f, fname = mkstemp()
f, fname = tempfile.mkstemp()
os.write(f, "#!/bin/sh\n")
os.write(f, "exec '%s' -c 'import sys; sys.exit(47)'\n" %
sys.executable)
......@@ -1061,7 +1071,7 @@ class POSIXProcessTestCase(BaseTestCase):
def check_swap_fds(self, stdin_no, stdout_no, stderr_no):
# open up some temporary files
temps = [mkstemp() for i in range(3)]
temps = [tempfile.mkstemp() for i in range(3)]
temp_fds = [fd for fd, fname in temps]
try:
# unlink the files -- we won't need to reopen them
......@@ -1384,7 +1394,7 @@ class CommandsWithSpaces (BaseTestCase):
def setUp(self):
super(CommandsWithSpaces, self).setUp()
f, fname = mkstemp(".py", "te st")
f, fname = tempfile.mkstemp(".py", "te st")
self.fname = fname.lower ()
os.write(f, b"import sys;"
b"sys.stdout.write('%d %s' % (len(sys.argv), [a.lower () for a in sys.argv]))"
......
......@@ -248,8 +248,8 @@ class ReadTests(TestCase):
func = getattr(telnet, func_name)
self.assertRaises(EOFError, func)
# read_eager and read_very_eager make the same gaurantees
# (they behave differently but we only test the gaurantees)
# read_eager and read_very_eager make the same guarantees
# (they behave differently but we only test the guarantees)
def test_read_very_eager_A(self):
self._test_read_any_eager_A('read_very_eager')
def test_read_very_eager_B(self):
......
......@@ -234,7 +234,12 @@ class TestForkInThread(unittest.TestCase):
if pid == 0: # child
os.close(self.read_fd)
os.write(self.write_fd, "OK")
sys.exit(0)
# Exiting the thread normally in the child process can leave
# any additional threads (such as the one started by
# importing _tkinter) still running, and this can prevent
# the half-zombie child process from being cleaned up. See
# Issue #26456.
os._exit(0)
else: # parent
os.close(self.write_fd)
......
......@@ -51,7 +51,7 @@ class TestThread(threading.Thread):
self.nrunning.inc()
if verbose:
print self.nrunning.get(), 'tasks are running'
self.testcase.assertTrue(self.nrunning.get() <= 3)
self.testcase.assertLessEqual(self.nrunning.get(), 3)
time.sleep(delay)
if verbose:
......@@ -59,7 +59,7 @@ class TestThread(threading.Thread):
with self.mutex:
self.nrunning.dec()
self.testcase.assertTrue(self.nrunning.get() >= 0)
self.testcase.assertGreaterEqual(self.nrunning.get(), 0)
if verbose:
print '%s is finished. %d tasks are running' % (
self.name, self.nrunning.get())
......@@ -92,25 +92,25 @@ class ThreadTests(BaseTestCase):
for i in range(NUMTASKS):
t = TestThread("<thread %d>"%i, self, sema, mutex, numrunning)
threads.append(t)
self.assertEqual(t.ident, None)
self.assertTrue(re.match('<TestThread\(.*, initial\)>', repr(t)))
self.assertIsNone(t.ident)
self.assertRegexpMatches(repr(t), r'^<TestThread\(.*, initial\)>$')
t.start()
if verbose:
print 'waiting for all tasks to complete'
for t in threads:
t.join(NUMTASKS)
self.assertTrue(not t.is_alive())
self.assertFalse(t.is_alive())
self.assertNotEqual(t.ident, 0)
self.assertFalse(t.ident is None)
self.assertTrue(re.match('<TestThread\(.*, \w+ -?\d+\)>', repr(t)))
self.assertIsNotNone(t.ident)
self.assertRegexpMatches(repr(t), r'^<TestThread\(.*, \w+ -?\d+\)>$')
if verbose:
print 'all tasks done'
self.assertEqual(numrunning.get(), 0)
def test_ident_of_no_threading_threads(self):
# The ident still must work for the main thread and dummy threads.
self.assertFalse(threading.currentThread().ident is None)
self.assertIsNotNone(threading.currentThread().ident)
def f():
ident.append(threading.currentThread().ident)
done.set()
......@@ -118,7 +118,7 @@ class ThreadTests(BaseTestCase):
ident = []
thread.start_new_thread(f, ())
done.wait()
self.assertFalse(ident[0] is None)
self.assertIsNotNone(ident[0])
# Kill the "immortal" _DummyThread
del threading._active[ident[0]]
......@@ -237,7 +237,7 @@ class ThreadTests(BaseTestCase):
self.assertTrue(ret)
if verbose:
print " verifying worker hasn't exited"
self.assertTrue(not t.finished)
self.assertFalse(t.finished)
if verbose:
print " attempting to raise asynch exception in worker"
result = set_async_exc(ctypes.c_long(t.id), exception)
......@@ -706,52 +706,6 @@ class ThreadJoinOnShutdown(BaseTestCase):
output = "end of worker thread\nend of main thread\n"
self.assertScriptHasOutput(script, output)
@unittest.skipIf(sys.platform in platforms_to_skip, "due to known OS bug")
@unittest.skipIf(sys.pypy_version_info[:2] >= (2,6),
"This test was removed in CPython 2.7.9, and fails under PyPy 2.6 "
"with 'RuntimeError: stream lock is not held' in random_io")
def test_6_daemon_threads(self):
# Check that a daemon thread cannot crash the interpreter on shutdown
# by manipulating internal structures that are being disposed of in
# the main thread.
script = """if True:
import os
import random
import sys
import time
import threading
thread_has_run = set()
def random_io():
'''Loop for a while sleeping random tiny amounts and doing some I/O.'''
while True:
in_f = open(os.__file__, 'rb')
stuff = in_f.read(200)
null_f = open(os.devnull, 'wb')
null_f.write(stuff)
time.sleep(random.random() / 1995)
null_f.close()
in_f.close()
thread_has_run.add(threading.current_thread())
def main():
count = 0
for _ in range(40):
new_thread = threading.Thread(target=random_io)
new_thread.daemon = True
new_thread.start()
count += 1
while len(thread_has_run) < count:
time.sleep(0.001)
# Trigger process shutdown
sys.exit(0)
main()
"""
rc, out, err = assert_python_ok('-c', script)
self.assertFalse(err)
@unittest.skipUnless(hasattr(os, 'fork'), "needs os.fork()")
@unittest.skipIf(sys.platform in platforms_to_skip, "due to known OS bug")
def test_reinit_tls_after_fork(self):
......@@ -791,7 +745,7 @@ class ThreadJoinOnShutdown(BaseTestCase):
def generator():
while 1:
yield "genereator"
yield "generator"
def callback():
if callback.gen is None:
......@@ -838,6 +792,85 @@ class ThreadingExceptionTests(BaseTestCase):
thread.start()
self.assertRaises(RuntimeError, setattr, thread, "daemon", True)
def test_print_exception(self):
script = r"""if 1:
import threading
import time
running = False
def run():
global running
running = True
while running:
time.sleep(0.01)
1.0/0.0
t = threading.Thread(target=run)
t.start()
while not running:
time.sleep(0.01)
running = False
t.join()
"""
rc, out, err = assert_python_ok("-c", script)
self.assertEqual(out, '')
self.assertIn("Exception in thread", err)
self.assertIn("Traceback (most recent call last):", err)
self.assertIn("ZeroDivisionError", err)
self.assertNotIn("Unhandled exception", err)
def test_print_exception_stderr_is_none_1(self):
script = r"""if 1:
import sys
import threading
import time
running = False
def run():
global running
running = True
while running:
time.sleep(0.01)
1.0/0.0
t = threading.Thread(target=run)
t.start()
while not running:
time.sleep(0.01)
sys.stderr = None
running = False
t.join()
"""
rc, out, err = assert_python_ok("-c", script)
self.assertEqual(out, '')
self.assertIn("Exception in thread", err)
self.assertIn("Traceback (most recent call last):", err)
self.assertIn("ZeroDivisionError", err)
self.assertNotIn("Unhandled exception", err)
def test_print_exception_stderr_is_none_2(self):
script = r"""if 1:
import sys
import threading
import time
running = False
def run():
global running
running = True
while running:
time.sleep(0.01)
1.0/0.0
sys.stderr = None
t = threading.Thread(target=run)
t.start()
while not running:
time.sleep(0.01)
running = False
t.join()
"""
rc, out, err = assert_python_ok("-c", script)
self.assertEqual(out, '')
self.assertNotIn("Unhandled exception", err)
class LockTests(lock_tests.LockTests):
locktype = staticmethod(threading.Lock)
......@@ -849,7 +882,7 @@ class EventTests(lock_tests.EventTests):
eventtype = staticmethod(threading.Event)
class ConditionAsRLockTests(lock_tests.RLockTests):
# An Condition uses an RLock by default and exports its API.
# Condition uses an RLock by default and exports its API.
locktype = staticmethod(threading.Condition)
class ConditionTests(lock_tests.ConditionTests):
......
import unittest
from doctest import DocTestSuite
from test import test_support
from test import test_support as support
import weakref
import gc
# Modules under test
_thread = test_support.import_module('thread')
threading = test_support.import_module('threading')
_thread = support.import_module('thread')
threading = support.import_module('threading')
import _threading_local
......@@ -35,7 +35,6 @@ class BaseLocalTest:
del t
gc.collect()
gc.collect() # needed when run through gevent; why?
self.assertEqual(len(weaklist), n)
# XXX _threading_local keeps the local of the last stopped thread alive.
......@@ -64,14 +63,9 @@ class BaseLocalTest:
# Simply check that the variable is correctly set
self.assertEqual(local.x, i)
threads= []
for i in range(10):
t = threading.Thread(target=f, args=(i,))
t.start()
threads.append(t)
for t in threads:
t.join()
with support.start_threads(threading.Thread(target=f, args=(i,))
for i in range(10)):
pass
def test_derived_cycle_dealloc(self):
# http://bugs.python.org/issue6990
......@@ -174,7 +168,7 @@ class BaseLocalTest:
obj = cls()
obj.x = 5
self.assertEqual(obj.__dict__, {'x': 5})
if test_support.check_impl_detail():
if support.check_impl_detail():
with self.assertRaises(AttributeError):
obj.__dict__ = {}
with self.assertRaises(AttributeError):
......@@ -203,7 +197,7 @@ class ThreadLocalTest(unittest.TestCase, BaseLocalTest):
wr = weakref.ref(x)
del x
gc.collect()
self.assertIs(wr(), None)
self.assertIsNone(wr())
class PyThreadingLocalTest(unittest.TestCase, BaseLocalTest):
_local = _threading_local.local
......@@ -230,7 +224,7 @@ def test_main():
setUp=setUp, tearDown=tearDown)
)
test_support.run_unittest(suite)
support.run_unittest(suite)
if __name__ == '__main__':
test_main()
"""Regresssion tests for urllib"""
"""Regression tests for urllib"""
import collections
import urllib
import httplib
import io
import unittest
import os
import sys
import mimetools
import tempfile
import StringIO
from test import test_support
from base64 import b64encode
......@@ -21,37 +22,43 @@ def hexescape(char):
return "%" + hex_repr
class FakeHTTPMixin(object):
def fakehttp(self, fakedata):
class FakeSocket(StringIO.StringIO):
def fakehttp(fakedata):
class FakeSocket(io.BytesIO):
def sendall(self, data):
FakeHTTPConnection.buf = data
def sendall(self, data):
FakeHTTPConnection.buf = data
def makefile(self, *args, **kwds):
return self
def makefile(self, *args, **kwds):
return self
def read(self, amt=None):
if self.closed:
return b""
return io.BytesIO.read(self, amt)
def read(self, amt=None):
if self.closed:
return ""
return StringIO.StringIO.read(self, amt)
def readline(self, length=None):
if self.closed:
return b""
return io.BytesIO.readline(self, length)
def readline(self, length=None):
if self.closed:
return ""
return StringIO.StringIO.readline(self, length)
class FakeHTTPConnection(httplib.HTTPConnection):
class FakeHTTPConnection(httplib.HTTPConnection):
# buffer to store data for verification in urlopen tests.
buf = ""
# buffer to store data for verification in urlopen tests.
buf = ""
def connect(self):
self.sock = FakeSocket(self.fakedata)
self.__class__.fakesock = self.sock
FakeHTTPConnection.fakedata = fakedata
def connect(self):
self.sock = FakeSocket(fakedata)
return FakeHTTPConnection
class FakeHTTPMixin(object):
def fakehttp(self, fakedata):
assert httplib.HTTP._connection_class == httplib.HTTPConnection
httplib.HTTP._connection_class = FakeHTTPConnection
httplib.HTTP._connection_class = fakehttp(fakedata)
def unfakehttp(self):
httplib.HTTP._connection_class = httplib.HTTPConnection
......@@ -158,8 +165,71 @@ class ProxyTests(unittest.TestCase):
# getproxies_environment use lowered case truncated (no '_proxy') keys
self.assertEqual('localhost', proxies['no'])
# List of no_proxies with space.
self.env.set('NO_PROXY', 'localhost, anotherdomain.com, newdomain.com')
self.env.set('NO_PROXY', 'localhost, anotherdomain.com, newdomain.com:1234')
self.assertTrue(urllib.proxy_bypass_environment('anotherdomain.com'))
self.assertTrue(urllib.proxy_bypass_environment('anotherdomain.com:8888'))
self.assertTrue(urllib.proxy_bypass_environment('newdomain.com:1234'))
def test_proxy_cgi_ignore(self):
try:
self.env.set('HTTP_PROXY', 'http://somewhere:3128')
proxies = urllib.getproxies_environment()
self.assertEqual('http://somewhere:3128', proxies['http'])
self.env.set('REQUEST_METHOD', 'GET')
proxies = urllib.getproxies_environment()
self.assertNotIn('http', proxies)
finally:
self.env.unset('REQUEST_METHOD')
self.env.unset('HTTP_PROXY')
def test_proxy_bypass_environment_host_match(self):
bypass = urllib.proxy_bypass_environment
self.env.set('NO_PROXY',
'localhost, anotherdomain.com, newdomain.com:1234')
self.assertTrue(bypass('localhost'))
self.assertTrue(bypass('LocalHost')) # MixedCase
self.assertTrue(bypass('LOCALHOST')) # UPPERCASE
self.assertTrue(bypass('newdomain.com:1234'))
self.assertTrue(bypass('anotherdomain.com:8888'))
self.assertTrue(bypass('www.newdomain.com:1234'))
self.assertFalse(bypass('prelocalhost'))
self.assertFalse(bypass('newdomain.com')) # no port
self.assertFalse(bypass('newdomain.com:1235')) # wrong port
class ProxyTests_withOrderedEnv(unittest.TestCase):
def setUp(self):
# We need to test conditions, where variable order _is_ significant
self._saved_env = os.environ
# Monkey patch os.environ, start with empty fake environment
os.environ = collections.OrderedDict()
def tearDown(self):
os.environ = self._saved_env
def test_getproxies_environment_prefer_lowercase(self):
# Test lowercase preference with removal
os.environ['no_proxy'] = ''
os.environ['No_Proxy'] = 'localhost'
self.assertFalse(urllib.proxy_bypass_environment('localhost'))
self.assertFalse(urllib.proxy_bypass_environment('arbitrary'))
os.environ['http_proxy'] = ''
os.environ['HTTP_PROXY'] = 'http://somewhere:3128'
proxies = urllib.getproxies_environment()
self.assertEqual({}, proxies)
# Test lowercase preference of proxy bypass and correct matching including ports
os.environ['no_proxy'] = 'localhost, noproxy.com, my.proxy:1234'
os.environ['No_Proxy'] = 'xyz.com'
self.assertTrue(urllib.proxy_bypass_environment('localhost'))
self.assertTrue(urllib.proxy_bypass_environment('noproxy.com:5678'))
self.assertTrue(urllib.proxy_bypass_environment('my.proxy:1234'))
self.assertFalse(urllib.proxy_bypass_environment('my.proxy'))
self.assertFalse(urllib.proxy_bypass_environment('arbitrary'))
# Test lowercase preference with replacement
os.environ['http_proxy'] = 'http://somewhere:3128'
os.environ['Http_Proxy'] = 'http://somewhereelse:3128'
proxies = urllib.getproxies_environment()
self.assertEqual('http://somewhere:3128', proxies['http'])
class urlopen_HttpTests(unittest.TestCase, FakeHTTPMixin):
......@@ -209,10 +279,26 @@ Connection: close
Content-Type: text/html; charset=iso-8859-1
""")
try:
self.assertRaises(IOError, urllib.urlopen, "http://python.org/")
msg = "Redirection to url 'file:"
with self.assertRaisesRegexp(IOError, msg):
urllib.urlopen("http://python.org/")
finally:
self.unfakehttp()
def test_redirect_limit_independent(self):
# Ticket #12923: make sure independent requests each use their
# own retry limit.
for i in range(urllib.FancyURLopener().maxtries):
self.fakehttp(b'''HTTP/1.1 302 Found
Location: file://guidocomputer.athome.com:/python/license
Connection: close
''')
try:
self.assertRaises(IOError, urllib.urlopen,
"http://something")
finally:
self.unfakehttp()
def test_empty_socket(self):
# urlopen() raises IOError if the underlying socket does not send any
# data. (#1680230)
......@@ -773,21 +859,55 @@ class Pathname_Tests(unittest.TestCase):
class Utility_Tests(unittest.TestCase):
"""Testcase to test the various utility functions in the urllib."""
# In Python 3 this test class is moved to test_urlparse.
def test_splittype(self):
splittype = urllib.splittype
self.assertEqual(splittype('type:opaquestring'), ('type', 'opaquestring'))
self.assertEqual(splittype('opaquestring'), (None, 'opaquestring'))
self.assertEqual(splittype(':opaquestring'), (None, ':opaquestring'))
self.assertEqual(splittype('type:'), ('type', ''))
self.assertEqual(splittype('type:opaque:string'), ('type', 'opaque:string'))
def test_splithost(self):
splithost = urllib.splithost
self.assertEqual(splithost('//www.example.org:80/foo/bar/baz.html'),
('www.example.org:80', '/foo/bar/baz.html'))
self.assertEqual(splithost('//www.example.org:80'),
('www.example.org:80', ''))
self.assertEqual(splithost('/foo/bar/baz.html'),
(None, '/foo/bar/baz.html'))
def test_splituser(self):
splituser = urllib.splituser
self.assertEqual(splituser('User:Pass@www.python.org:080'),
('User:Pass', 'www.python.org:080'))
self.assertEqual(splituser('@www.python.org:080'),
('', 'www.python.org:080'))
self.assertEqual(splituser('www.python.org:080'),
(None, 'www.python.org:080'))
self.assertEqual(splituser('User:Pass@'),
('User:Pass', ''))
self.assertEqual(splituser('User@example.com:Pass@www.python.org:080'),
('User@example.com:Pass', 'www.python.org:080'))
def test_splitpasswd(self):
"""Some of the password examples are not sensible, but it is added to
confirming to RFC2617 and addressing issue4675.
"""
self.assertEqual(('user', 'ab'),urllib.splitpasswd('user:ab'))
self.assertEqual(('user', 'a\nb'),urllib.splitpasswd('user:a\nb'))
self.assertEqual(('user', 'a\tb'),urllib.splitpasswd('user:a\tb'))
self.assertEqual(('user', 'a\rb'),urllib.splitpasswd('user:a\rb'))
self.assertEqual(('user', 'a\fb'),urllib.splitpasswd('user:a\fb'))
self.assertEqual(('user', 'a\vb'),urllib.splitpasswd('user:a\vb'))
self.assertEqual(('user', 'a:b'),urllib.splitpasswd('user:a:b'))
self.assertEqual(('user', 'a b'),urllib.splitpasswd('user:a b'))
self.assertEqual(('user 2', 'ab'),urllib.splitpasswd('user 2:ab'))
self.assertEqual(('user+1', 'a+b'),urllib.splitpasswd('user+1:a+b'))
# Some of the password examples are not sensible, but it is added to
# confirming to RFC2617 and addressing issue4675.
splitpasswd = urllib.splitpasswd
self.assertEqual(splitpasswd('user:ab'), ('user', 'ab'))
self.assertEqual(splitpasswd('user:a\nb'), ('user', 'a\nb'))
self.assertEqual(splitpasswd('user:a\tb'), ('user', 'a\tb'))
self.assertEqual(splitpasswd('user:a\rb'), ('user', 'a\rb'))
self.assertEqual(splitpasswd('user:a\fb'), ('user', 'a\fb'))
self.assertEqual(splitpasswd('user:a\vb'), ('user', 'a\vb'))
self.assertEqual(splitpasswd('user:a:b'), ('user', 'a:b'))
self.assertEqual(splitpasswd('user:a b'), ('user', 'a b'))
self.assertEqual(splitpasswd('user 2:ab'), ('user 2', 'ab'))
self.assertEqual(splitpasswd('user+1:a+b'), ('user+1', 'a+b'))
self.assertEqual(splitpasswd('user:'), ('user', ''))
self.assertEqual(splitpasswd('user'), ('user', None))
self.assertEqual(splitpasswd(':ab'), ('', 'ab'))
def test_splitport(self):
splitport = urllib.splitport
......@@ -796,6 +916,9 @@ class Utility_Tests(unittest.TestCase):
self.assertEqual(splitport('parrot:'), ('parrot', None))
self.assertEqual(splitport('127.0.0.1'), ('127.0.0.1', None))
self.assertEqual(splitport('parrot:cheese'), ('parrot:cheese', None))
self.assertEqual(splitport('[::1]:88'), ('[::1]', '88'))
self.assertEqual(splitport('[::1]'), ('[::1]', None))
self.assertEqual(splitport(':88'), ('', '88'))
def test_splitnport(self):
splitnport = urllib.splitnport
......@@ -809,6 +932,59 @@ class Utility_Tests(unittest.TestCase):
self.assertEqual(splitnport('parrot:cheese'), ('parrot', None))
self.assertEqual(splitnport('parrot:cheese', 55), ('parrot', None))
def test_splitquery(self):
# Normal cases are exercised by other tests; ensure that we also
# catch cases with no port specified (testcase ensuring coverage)
splitquery = urllib.splitquery
self.assertEqual(splitquery('http://python.org/fake?foo=bar'),
('http://python.org/fake', 'foo=bar'))
self.assertEqual(splitquery('http://python.org/fake?foo=bar?'),
('http://python.org/fake?foo=bar', ''))
self.assertEqual(splitquery('http://python.org/fake'),
('http://python.org/fake', None))
self.assertEqual(splitquery('?foo=bar'), ('', 'foo=bar'))
def test_splittag(self):
splittag = urllib.splittag
self.assertEqual(splittag('http://example.com?foo=bar#baz'),
('http://example.com?foo=bar', 'baz'))
self.assertEqual(splittag('http://example.com?foo=bar#'),
('http://example.com?foo=bar', ''))
self.assertEqual(splittag('#baz'), ('', 'baz'))
self.assertEqual(splittag('http://example.com?foo=bar'),
('http://example.com?foo=bar', None))
self.assertEqual(splittag('http://example.com?foo=bar#baz#boo'),
('http://example.com?foo=bar#baz', 'boo'))
def test_splitattr(self):
splitattr = urllib.splitattr
self.assertEqual(splitattr('/path;attr1=value1;attr2=value2'),
('/path', ['attr1=value1', 'attr2=value2']))
self.assertEqual(splitattr('/path;'), ('/path', ['']))
self.assertEqual(splitattr(';attr1=value1;attr2=value2'),
('', ['attr1=value1', 'attr2=value2']))
self.assertEqual(splitattr('/path'), ('/path', []))
def test_splitvalue(self):
# Normal cases are exercised by other tests; test pathological cases
# with no key/value pairs. (testcase ensuring coverage)
splitvalue = urllib.splitvalue
self.assertEqual(splitvalue('foo=bar'), ('foo', 'bar'))
self.assertEqual(splitvalue('foo='), ('foo', ''))
self.assertEqual(splitvalue('=bar'), ('', 'bar'))
self.assertEqual(splitvalue('foobar'), ('foobar', None))
self.assertEqual(splitvalue('foo=bar=baz'), ('foo', 'bar=baz'))
def test_toBytes(self):
result = urllib.toBytes(u'http://www.python.org')
self.assertEqual(result, 'http://www.python.org')
self.assertRaises(UnicodeError, urllib.toBytes,
test_support.u(r'http://www.python.org/medi\u00e6val'))
def test_unwrap(self):
url = urllib.unwrap('<URL:type://host/path>')
self.assertEqual(url, 'type://host/path')
class URLopener_Tests(unittest.TestCase):
"""Testcase to test the open method of URLopener class."""
......@@ -924,6 +1100,8 @@ def test_main():
Pathname_Tests,
Utility_Tests,
URLopener_Tests,
ProxyTests,
ProxyTests_withOrderedEnv,
#FTPWrapperTests,
)
......
import unittest
from test import test_support
from test import test_urllib
import os
import socket
import StringIO
import urllib2
from urllib2 import Request, OpenerDirector
from urllib2 import Request, OpenerDirector, AbstractDigestAuthHandler
import httplib
try:
import ssl
except ImportError:
ssl = None
# XXX
# Request
......@@ -20,7 +27,7 @@ class TrivialTests(unittest.TestCase):
self.assertRaises(ValueError, urllib2.urlopen, 'bogus url')
# XXX Name hacking to get this to work on Windows.
fname = os.path.abspath(urllib2.__file__).replace('\\', '/')
fname = os.path.abspath(urllib2.__file__).replace(os.sep, '/')
# And more hacking to get it to work on MacOS. This assumes
# urllib.pathname2url works, unfortunately...
......@@ -47,6 +54,14 @@ class TrivialTests(unittest.TestCase):
for string, list in tests:
self.assertEqual(urllib2.parse_http_list(string), list)
@unittest.skipUnless(ssl, "ssl module required")
def test_cafile_and_context(self):
context = ssl.create_default_context()
with self.assertRaises(ValueError):
urllib2.urlopen(
"https://localhost", cafile="/nonexistent/path", context=context
)
def test_request_headers_dict():
"""
......@@ -278,8 +293,8 @@ class MockHTTPClass:
self.req_headers = []
self.data = None
self.raise_on_endheaders = False
self.sock = None
self._tunnel_headers = {}
self.sock = None # gevent: compat with 2.7.0+
def __call__(self, host, timeout=socket._GLOBAL_DEFAULT_TIMEOUT):
self.host = host
......@@ -408,7 +423,7 @@ class MockHTTPHandler(urllib2.BaseHandler):
self._count = 0
self.requests = []
def http_open(self, req):
import mimetools, httplib, copy
import mimetools, copy
from StringIO import StringIO
self.requests.append(copy.deepcopy(req))
if self._count == 0:
......@@ -1026,6 +1041,22 @@ class HandlerTests(unittest.TestCase):
fp = o.open('http://www.example.com')
self.assertEqual(fp.geturl(), redirected_url.strip())
def test_redirect_no_path(self):
# Issue 14132: Relative redirect strips original path
real_class = httplib.HTTPConnection
response1 = b"HTTP/1.1 302 Found\r\nLocation: ?query\r\n\r\n"
httplib.HTTPConnection = test_urllib.fakehttp(response1)
self.addCleanup(setattr, httplib, "HTTPConnection", real_class)
urls = iter(("/path", "/path?query"))
def request(conn, method, url, *pos, **kw):
self.assertEqual(url, next(urls))
real_class.request(conn, method, url, *pos, **kw)
# Change response for subsequent connection
conn.__class__.fakedata = b"HTTP/1.1 200 OK\r\n\r\nHello!"
httplib.HTTPConnection.request = request
fp = urllib2.urlopen("http://python.org/path")
self.assertEqual(fp.geturl(), "http://python.org/path?query")
def test_proxy(self):
o = OpenerDirector()
ph = urllib2.ProxyHandler(dict(http="proxy.example.com:3128"))
......@@ -1280,6 +1311,16 @@ class MiscTests(unittest.TestCase):
else:
self.assertTrue(False)
def test_unsupported_algorithm(self):
handler = AbstractDigestAuthHandler()
with self.assertRaises(ValueError) as exc:
handler.get_algorithm_impls('invalid')
self.assertEqual(
str(exc.exception),
"Unsupported digest authentication algorithm 'invalid'"
)
class RequestTests(unittest.TestCase):
def setUp(self):
......@@ -1340,6 +1381,11 @@ class RequestTests(unittest.TestCase):
req = Request(url)
self.assertEqual(req.get_full_url(), url)
def test_private_attributes(self):
self.assertFalse(hasattr(self.get, '_Request__r_xxx'))
# Issue #6500: infinite recursion
self.assertFalse(hasattr(self.get, '_Request__r_method'))
def test_HTTPError_interface(self):
"""
Issue 13211 reveals that HTTPError didn't implement the URLError
......
import os
import base64
import urlparse
import urllib2
import BaseHTTPServer
......@@ -9,6 +11,17 @@ from test import test_support
mimetools = test_support.import_module('mimetools', deprecated=True)
threading = test_support.import_module('threading')
try:
import ssl
except ImportError:
ssl = None
here = os.path.dirname(__file__)
# Self-signed cert file for 'localhost'
CERT_localhost = os.path.join(here, 'keycert.pem')
# Self-signed cert file for 'fakehostname'
CERT_fakehostname = os.path.join(here, 'keycert2.pem')
# Loopback http server infrastructure
class LoopbackHttpServer(BaseHTTPServer.HTTPServer):
......@@ -23,7 +36,7 @@ class LoopbackHttpServer(BaseHTTPServer.HTTPServer):
# Set the timeout of our listening socket really low so
# that we can stop the server easily.
self.socket.settimeout(1.0)
self.socket.settimeout(0.1)
def get_request(self):
"""BaseHTTPServer method, overridden."""
......@@ -66,6 +79,46 @@ class LoopbackHttpServerThread(threading.Thread):
# Authentication infrastructure
class BasicAuthHandler(BaseHTTPServer.BaseHTTPRequestHandler):
"""Handler for performing Basic Authentication."""
# Server side values
USER = "testUser"
PASSWD = "testPass"
REALM = "Test"
USER_PASSWD = "%s:%s" % (USER, PASSWD)
ENCODED_AUTH = base64.b64encode(USER_PASSWD)
def __init__(self, *args, **kwargs):
BaseHTTPServer.BaseHTTPRequestHandler.__init__(self, *args, **kwargs)
def log_message(self, format, *args):
# Suppress the HTTP Console log output
pass
def do_HEAD(self):
self.send_response(200)
self.send_header("Content-type", "text/html")
self.end_headers()
def do_AUTHHEAD(self):
self.send_response(401)
self.send_header("WWW-Authenticate", "Basic realm=\"%s\"" % self.REALM)
self.send_header("Content-type", "text/html")
self.end_headers()
def do_GET(self):
if self.headers.getheader("Authorization") == None:
self.do_AUTHHEAD()
self.wfile.write("No Auth Header Received")
elif self.headers.getheader(
"Authorization") == "Basic " + self.ENCODED_AUTH:
self.wfile.write("It works!")
else:
# Unauthorized Request
self.do_AUTHHEAD()
class DigestAuthHandler:
"""Handler for performing digest authentication."""
......@@ -228,6 +281,45 @@ class BaseTestCase(unittest.TestCase):
test_support.threading_cleanup(*self._threads)
class BasicAuthTests(BaseTestCase):
USER = "testUser"
PASSWD = "testPass"
INCORRECT_PASSWD = "Incorrect"
REALM = "Test"
def setUp(self):
super(BasicAuthTests, self).setUp()
# With Basic Authentication
def http_server_with_basic_auth_handler(*args, **kwargs):
return BasicAuthHandler(*args, **kwargs)
self.server = LoopbackHttpServerThread(http_server_with_basic_auth_handler)
self.server_url = 'http://127.0.0.1:%s' % self.server.port
self.server.start()
self.server.ready.wait()
def tearDown(self):
self.server.stop()
super(BasicAuthTests, self).tearDown()
def test_basic_auth_success(self):
ah = urllib2.HTTPBasicAuthHandler()
ah.add_password(self.REALM, self.server_url, self.USER, self.PASSWD)
urllib2.install_opener(urllib2.build_opener(ah))
try:
self.assertTrue(urllib2.urlopen(self.server_url))
except urllib2.HTTPError:
self.fail("Basic Auth Failed for url: %s" % self.server_url)
except Exception as e:
raise e
def test_basic_auth_httperror(self):
ah = urllib2.HTTPBasicAuthHandler()
ah.add_password(self.REALM, self.server_url, self.USER,
self.INCORRECT_PASSWD)
urllib2.install_opener(urllib2.build_opener(ah))
self.assertRaises(urllib2.HTTPError, urllib2.urlopen, self.server_url)
class ProxyAuthTests(BaseTestCase):
URL = "http://localhost"
......@@ -237,9 +329,18 @@ class ProxyAuthTests(BaseTestCase):
def setUp(self):
super(ProxyAuthTests, self).setUp()
# Ignore proxy bypass settings in the environment.
def restore_environ(old_environ):
os.environ.clear()
os.environ.update(old_environ)
self.addCleanup(restore_environ, os.environ.copy())
os.environ['NO_PROXY'] = ''
os.environ['no_proxy'] = ''
self.digest_auth_handler = DigestAuthHandler()
self.digest_auth_handler.set_users({self.USER: self.PASSWD})
self.digest_auth_handler.set_realm(self.REALM)
# With Digest Authentication
def create_fake_proxy_handler(*args, **kwargs):
return FakeProxyHandler(self.digest_auth_handler, *args, **kwargs)
......@@ -352,6 +453,19 @@ class TestUrlopen(BaseTestCase):
urllib2.install_opener(opener)
super(TestUrlopen, self).setUp()
def urlopen(self, url, data=None, **kwargs):
l = []
f = urllib2.urlopen(url, data, **kwargs)
try:
# Exercise various methods
l.extend(f.readlines(200))
l.append(f.readline())
l.append(f.read(1024))
l.append(f.read())
finally:
f.close()
return b"".join(l)
def start_server(self, responses):
handler = GetRequestHandler(responses)
......@@ -362,6 +476,16 @@ class TestUrlopen(BaseTestCase):
handler.port = port
return handler
def start_https_server(self, responses=None, **kwargs):
if not hasattr(urllib2, 'HTTPSHandler'):
self.skipTest('ssl support required')
from test.ssl_servers import make_https_server
if responses is None:
responses = [(200, [], b"we care a bit")]
handler = GetRequestHandler(responses)
server = make_https_server(self, handler_class=handler, **kwargs)
handler.port = server.port
return handler
def test_redirection(self):
expected_response = 'We got here...'
......@@ -432,6 +556,49 @@ class TestUrlopen(BaseTestCase):
finally:
self.server.stop()
def test_https(self):
handler = self.start_https_server()
context = ssl.create_default_context(cafile=CERT_localhost)
data = self.urlopen("https://localhost:%s/bizarre" % handler.port, context=context)
self.assertEqual(data, b"we care a bit")
def test_https_with_cafile(self):
handler = self.start_https_server(certfile=CERT_localhost)
# Good cert
data = self.urlopen("https://localhost:%s/bizarre" % handler.port,
cafile=CERT_localhost)
self.assertEqual(data, b"we care a bit")
# Bad cert
with self.assertRaises(urllib2.URLError):
self.urlopen("https://localhost:%s/bizarre" % handler.port,
cafile=CERT_fakehostname)
# Good cert, but mismatching hostname
handler = self.start_https_server(certfile=CERT_fakehostname)
with self.assertRaises(ssl.CertificateError):
self.urlopen("https://localhost:%s/bizarre" % handler.port,
cafile=CERT_fakehostname)
def test_https_with_cadefault(self):
handler = self.start_https_server(certfile=CERT_localhost)
# Self-signed cert should fail verification with system certificate store
with self.assertRaises(urllib2.URLError):
self.urlopen("https://localhost:%s/bizarre" % handler.port,
cadefault=True)
def test_https_sni(self):
if ssl is None:
self.skipTest("ssl module required")
if not ssl.HAS_SNI:
self.skipTest("SNI support required in OpenSSL")
sni_name = [None]
def cb_sni(ssl_sock, server_name, initial_context):
sni_name[0] = server_name
context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
context.set_servername_callback(cb_sni)
handler = self.start_https_server(context=context, certfile=CERT_localhost)
context = ssl.create_default_context(cafile=CERT_localhost)
self.urlopen("https://localhost:%s" % handler.port, context=context)
self.assertEqual(sni_name[0], "localhost")
def test_sending_headers(self):
handler = self.start_server([(200, [], "we don't care")])
......@@ -544,7 +711,7 @@ def test_main():
# the next line.
#test_support.requires("network")
test_support.run_unittest(ProxyAuthTests, TestUrlopen)
test_support.run_unittest(BasicAuthTests, ProxyAuthTests, TestUrlopen)
if __name__ == "__main__":
test_main()
......@@ -80,11 +80,11 @@ class CloseSocketTest(unittest.TestCase):
# delve deep into response to fetch socket._socketobject
response = _urlopen_with_retry("http://www.example.com/")
abused_fileobject = response.fp
#self.assertIs(abused_fileobject.__class__, socket._fileobject) JAM gevent: disable
# self.assertIs(abused_fileobject.__class__, socket._fileobject) # gevent: disable
httpresponse = abused_fileobject._sock
self.assertIs(httpresponse.__class__, httplib.HTTPResponse)
fileobject = httpresponse.fp
#self.assertIs(fileobject.__class__, socket._fileobject) JAM gevent: disable
# self.assertIs(fileobject.__class__, socket._fileobject) # gevent: disable
self.assertTrue(not fileobject.closed)
response.close()
......@@ -102,11 +102,9 @@ class OtherNetworkTests(unittest.TestCase):
def test_ftp(self):
urls = [
'ftp://ftp.kernel.org/pub/linux/kernel/README',
'ftp://ftp.kernel.org/pub/linux/kernel/non-existent-file',
#'ftp://ftp.kernel.org/pub/leenox/kernel/test',
'ftp://gatekeeper.research.compaq.com/pub/DEC/SRC'
'/research-reports/00README-Legal-Rules-Regs',
'ftp://ftp.debian.org/debian/README',
('ftp://ftp.debian.org/debian/non-existent-file',
None, urllib2.URLError),
]
self._test_urls(urls, self._extra_handlers())
......@@ -155,12 +153,12 @@ class OtherNetworkTests(unittest.TestCase):
## self._test_urls(urls, self._extra_handlers()+[bauth, dauth])
def test_urlwithfrag(self):
urlwith_frag = "https://docs.python.org/2/glossary.html#glossary"
urlwith_frag = "http://www.pythontest.net/index.html#frag"
with test_support.transient_internet(urlwith_frag):
req = urllib2.Request(urlwith_frag)
res = urllib2.urlopen(req)
self.assertEqual(res.geturl(),
"https://docs.python.org/2/glossary.html#glossary")
"http://www.pythontest.net/index.html#frag")
def test_fileno(self):
req = urllib2.Request("http://www.example.com")
......@@ -255,6 +253,7 @@ class TimeoutTest(unittest.TestCase):
with test_support.transient_internet(url, timeout=None):
u = _urlopen_with_retry(url)
self.assertIsNone(u.fp._sock.fp._sock.gettimeout())
u.close()
def test_http_default_timeout(self):
self.assertIsNone(socket.getdefaulttimeout())
......@@ -266,6 +265,7 @@ class TimeoutTest(unittest.TestCase):
finally:
socket.setdefaulttimeout(None)
self.assertEqual(u.fp._sock.fp._sock.gettimeout(), 60)
u.close()
def test_http_no_timeout(self):
self.assertIsNone(socket.getdefaulttimeout())
......@@ -277,20 +277,23 @@ class TimeoutTest(unittest.TestCase):
finally:
socket.setdefaulttimeout(None)
self.assertIsNone(u.fp._sock.fp._sock.gettimeout())
u.close()
def test_http_timeout(self):
url = "http://www.example.com"
with test_support.transient_internet(url):
u = _urlopen_with_retry(url, timeout=120)
self.assertEqual(u.fp._sock.fp._sock.gettimeout(), 120)
u.close()
FTP_HOST = "ftp://ftp.mirror.nl/pub/gnu/"
FTP_HOST = 'ftp://ftp.debian.org/debian/'
def test_ftp_basic(self):
self.assertIsNone(socket.getdefaulttimeout())
with test_support.transient_internet(self.FTP_HOST, timeout=None):
u = _urlopen_with_retry(self.FTP_HOST)
self.assertIsNone(u.fp.fp._sock.gettimeout())
u.close()
def test_ftp_default_timeout(self):
self.assertIsNone(socket.getdefaulttimeout())
......@@ -301,6 +304,7 @@ class TimeoutTest(unittest.TestCase):
finally:
socket.setdefaulttimeout(None)
self.assertEqual(u.fp.fp._sock.gettimeout(), 60)
u.close()
def test_ftp_no_timeout(self):
self.assertIsNone(socket.getdefaulttimeout(),)
......@@ -311,11 +315,16 @@ class TimeoutTest(unittest.TestCase):
finally:
socket.setdefaulttimeout(None)
self.assertIsNone(u.fp.fp._sock.gettimeout())
u.close()
def test_ftp_timeout(self):
with test_support.transient_internet(self.FTP_HOST):
u = _urlopen_with_retry(self.FTP_HOST, timeout=60)
try:
u = _urlopen_with_retry(self.FTP_HOST, timeout=60)
except:
raise
self.assertEqual(u.fp.fp._sock.gettimeout(), 60)
u.close()
def test_main():
......
from __future__ import nested_scopes # Backward compat for 2.1
from unittest import TestCase
from wsgiref.util import setup_testing_defaults
from wsgiref.headers import Headers
from wsgiref.handlers import BaseHandler, BaseCGIHandler
from wsgiref import util
from wsgiref.validate import validator
from wsgiref.simple_server import WSGIServer, WSGIRequestHandler, demo_app
from wsgiref.simple_server import WSGIServer, WSGIRequestHandler
from wsgiref.simple_server import make_server
from StringIO import StringIO
from SocketServer import BaseServer
import os
import re
import sys
......@@ -113,6 +113,11 @@ class IntegrationTests(TestCase):
out, err = run_amock()
self.check_hello(out)
def test_request_length(self):
out, err = run_amock(data="GET " + ("x" * 65537) + " HTTP/1.0\n\n")
self.assertEqual(out.splitlines()[0],
"HTTP/1.0 414 Request-URI Too Long")
def test_validated_hello(self):
out, err = run_amock(validator(hello_app))
# the middleware doesn't support len(), so content-length isn't there
......
......@@ -42,7 +42,12 @@ import _six as six
PYPY = hasattr(sys, 'pypy_version_info')
VERBOSE = sys.argv.count('-v') > 1
LIBUV = os.getenv('GEVENT_CORE_CFFI_ONLY') == 'libuv' # XXX: Formalize this better
WIN = sys.platform.startswith("win")
LINUX = sys.platform.startswith('linux')
# XXX: Formalize this better
LIBUV = os.getenv('GEVENT_CORE_CFFI_ONLY') == 'libuv' or (PYPY and WIN) or hasattr(gevent.core, 'libuv')
if '--debug-greentest' in sys.argv:
sys.argv.remove('--debug-greentest')
......@@ -57,7 +62,7 @@ OPTIONAL_MODULES = ['resolver_ares']
# on particular platforms; they generally contain partial
# implementations completed in different modules.
PLATFORM_SPECIFIC_SUFFIXES = ['2', '279', '3']
if sys.platform.startswith('win'):
if WIN:
PLATFORM_SPECIFIC_SUFFIXES.append('posix')
PY2 = None
......@@ -88,7 +93,7 @@ elif sys.version_info[0] == 2:
PYPY3 = PYPY and PY3
if sys.platform.startswith('win'):
if WIN:
NON_APPLICABLE_SUFFIXES.append("posix")
# This is intimately tied to FileObjectPosix
NON_APPLICABLE_SUFFIXES.append("fileobject2")
......@@ -103,6 +108,11 @@ def _do_not_skip(reason):
return f
return dec
if WIN:
skipOnWindows = unittest.skip
else:
skipOnWindows = _do_not_skip
if RUNNING_ON_APPVEYOR:
# See comments scattered around about timeouts and the timer
# resolution available on appveyor (lots of jitter). this
......@@ -370,11 +380,16 @@ class TestCaseMetaClass(type):
# Travis is slow and overloaded; Appveyor used to be faster, but
# as of Dec 2015 it's almost always slower and/or has much worse timer
# resolution
CI_TIMEOUT = 7
if PY3 and PYPY:
# pypy3 is very slow right now
CI_TIMEOUT = 10
LOCAL_TIMEOUT = 1
CI_TIMEOUT = 10
if (PY3 and PYPY) or (PYPY and WIN and LIBUV):
# pypy3 is very slow right now,
# as is PyPy2 on windows (which only has libuv)
CI_TIMEOUT = 15
if PYPY and WIN and LIBUV:
# slow and flaky timeouts
LOCAL_TIMEOUT = CI_TIMEOUT
else:
LOCAL_TIMEOUT = 1
DEFAULT_LOCAL_HOST_ADDR = 'localhost'
DEFAULT_LOCAL_HOST_ADDR6 = DEFAULT_LOCAL_HOST_ADDR
......@@ -410,6 +425,14 @@ class TestCase(TestCaseMetaClass("NewBase", (BaseTestCase,), {})):
if hasattr(self, 'cleanup'):
self.cleanup()
self._error = self._none
self._tearDownCloseOnTearDown()
try:
del self.close_on_teardown
except AttributeError:
pass
super(TestCase, self).tearDown()
def _tearDownCloseOnTearDown(self):
# XXX: Should probably reverse this
for x in self.close_on_teardown:
close = getattr(x, 'close', x)
......@@ -417,11 +440,7 @@ class TestCase(TestCaseMetaClass("NewBase", (BaseTestCase,), {})):
close()
except Exception:
pass
try:
del self.close_on_teardown
except AttributeError:
pass
super(TestCase, self).tearDown()
@classmethod
def setUpClass(cls):
......@@ -464,6 +483,7 @@ class TestCase(TestCaseMetaClass("NewBase", (BaseTestCase,), {})):
return splitext(basename(self.modulename))[0] + '.' + self.testcasename
_none = (None, None, None)
# (context, kind, value)
_error = _none
def expect_one_error(self):
......@@ -471,12 +491,12 @@ class TestCase(TestCaseMetaClass("NewBase", (BaseTestCase,), {})):
self._old_handle_error = gevent.get_hub().handle_error
gevent.get_hub().handle_error = self._store_error
def _store_error(self, where, type, value, tb):
def _store_error(self, where, t, value, tb):
del tb
if self._error != self._none:
gevent.get_hub().parent.throw(type, value)
gevent.get_hub().parent.throw(t, value)
else:
self._error = (where, type, value)
self._error = (where, t, value)
def peek_error(self):
return self._error
......@@ -487,18 +507,25 @@ class TestCase(TestCaseMetaClass("NewBase", (BaseTestCase,), {})):
finally:
self._error = self._none
def assert_error(self, type=None, value=None, error=None, where_type=None):
def assert_error(self, kind=None, value=None, error=None, where_type=None):
if error is None:
error = self.get_error()
if type is not None:
assert issubclass(error[1], type), error
econtext, ekind, evalue = error
if kind is not None:
self.assertIsInstance(kind, type)
try:
assert issubclass(ekind, kind), error
except TypeError as e:
# Seen on PyPy on Windows
print("TYPE ERROR", e, ekind, kind, type(kind))
raise
if value is not None:
if isinstance(value, str):
assert str(error[2]) == value, error
self.assertEqual(str(evalue), value)
else:
assert error[2] is value, error
self.assertIs(evalue, value)
if where_type is not None:
self.assertIsInstance(error[0], where_type)
self.assertIsInstance(econtext, where_type)
return error
if RUNNING_ON_APPVEYOR:
......@@ -777,18 +804,24 @@ def disabled_gc():
import re
# Linux/OS X/BSD platforms can implement this by calling out to lsof
def _run_lsof():
import tempfile
pid = os.getpid()
fd, tmpname = tempfile.mkstemp('get_open_files')
os.close(fd)
lsof_command = 'lsof -p %s > %s' % (pid, tmpname)
if os.system(lsof_command):
raise OSError("lsof failed")
with open(tmpname) as fobj:
data = fobj.read().strip()
os.remove(tmpname)
return data
if WIN:
def _run_lsof():
raise unittest.SkipTest("lsof not expected on Windows")
else:
def _run_lsof():
import tempfile
pid = os.getpid()
fd, tmpname = tempfile.mkstemp('get_open_files')
os.close(fd)
lsof_command = 'lsof -p %s > %s' % (pid, tmpname)
if os.system(lsof_command):
# XXX: This prints to the console an annoying message: 'lsof is not recognized'
raise unittest.SkipTest("lsof failed")
with open(tmpname) as fobj:
data = fobj.read().strip()
os.remove(tmpname)
return data
def default_get_open_files(pipes=False):
data = _run_lsof()
......@@ -822,7 +855,7 @@ def default_get_number_open_files():
else:
try:
return len(get_open_files(pipes=True)) - 1
except (OSError, AssertionError):
except (OSError, AssertionError, unittest.SkipTest):
return 0
lsof_get_open_files = default_get_open_files
......@@ -846,6 +879,11 @@ else:
Return a list of popenfile and pconn objects.
Note that other than `fd`, they have different attributes.
.. important:: If you want to find open sockets, on Windows
and linux, it is important that the socket at least be listening
(socket.listen(1)). Unlike the lsof implementation, this will only
return sockets in a state like that.
"""
results = dict()
process = psutil.Process()
......@@ -863,9 +901,6 @@ else:
# num_fds is unix only. Is num_handles close enough on Windows?
return 0
if RUNNING_ON_TRAVIS:
# XXX: Note: installing psutil on the travis linux vm caused failures in test__makefile_refs.
get_open_files = lsof_get_open_files
if PYPY:
......
......@@ -152,6 +152,10 @@ if PYPY:
## Unknown; can't reproduce locally on OS X
'FLAKY test_subprocess.py', # timeouts on one test.
## PyPy3 5.9.0-beta seems to have dropped recvmsg_into?
'test_socket.py',
'FLAKY test_ssl.py',
]
......
......@@ -15,6 +15,12 @@ import re
TRAVIS = os.environ.get("TRAVIS") == "true"
OSX = sys.platform == 'darwin'
PYPY = hasattr(sys, 'pypy_version_info')
WIN = sys.platform.startswith("win")
# XXX: Formalize this better
LIBUV = os.getenv('GEVENT_CORE_CFFI_ONLY') == 'libuv' or (PYPY and WIN)
# By default, test cases are expected to switch and emit warnings if there was none
# If a test is found in this list, it's expected not to switch.
......@@ -184,7 +190,7 @@ if 'thread' in os.getenv('GEVENT_FILE', ''):
]
if os.getenv('GEVENT_CORE_CFFI_ONLY') == 'libuv':
if LIBUV:
# epoll appears to work with these just fine in some cases;
# kqueue (at least on OS X, the only tested kqueue system)
# never does (failing with abort())
......@@ -218,6 +224,75 @@ if os.getenv('GEVENT_CORE_CFFI_ONLY') == 'libuv':
'test_selectors.PollSelectorTestCase.test_timeout',
]
if WIN and PYPY:
# From PyPy2-v5.9.0, using its version of tests,
# which do work on darwin (and possibly linux?)
# I can't produce them in a local VM running Windows 10
# and the same pypy version.
disabled_tests += [
# appears to timeout?
'test_threading.ThreadTests.test_finalize_with_trace',
'test_asyncore.DispatcherWithSendTests_UsePoll.test_send',
'test_asyncore.DispatcherWithSendTests.test_send',
# These, which use asyncore, fail with
# 'NoneType is not iterable' on 'conn, addr = self.accept()'
# That returns None when the underlying socket raises
# EWOULDBLOCK, which it will do because it's set to non-blocking
# both by gevent and by libuv (at the level below python's knowledge)
# I can *sometimes* reproduce these locally; it seems to be some sort
# of race condition.
'test_ftplib.TestFTPClass.test_acct',
'test_ftplib.TestFTPClass.test_all_errors',
'test_ftplib.TestFTPClass.test_cwd',
'test_ftplib.TestFTPClass.test_delete',
'test_ftplib.TestFTPClass.test_dir',
'test_ftplib.TestFTPClass.test_exceptions',
'test_ftplib.TestFTPClass.test_getwelcome',
'test_ftplib.TestFTPClass.test_line_too_long',
'test_ftplib.TestFTPClass.test_login',
'test_ftplib.TestFTPClass.test_makepasv',
'test_ftplib.TestFTPClass.test_mkd',
'test_ftplib.TestFTPClass.test_nlst',
'test_ftplib.TestFTPClass.test_pwd',
'test_ftplib.TestFTPClass.test_quit',
'test_ftplib.TestFTPClass.test_makepasv',
'test_ftplib.TestFTPClass.test_rename',
'test_ftplib.TestFTPClass.test_retrbinary',
'test_ftplib.TestFTPClass.test_retrbinary_rest',
'test_ftplib.TestFTPClass.test_retrlines',
'test_ftplib.TestFTPClass.test_retrlines_too_long',
'test_ftplib.TestFTPClass.test_rmd',
'test_ftplib.TestFTPClass.test_sanitize',
'test_ftplib.TestFTPClass.test_set_pasv',
'test_ftplib.TestFTPClass.test_size',
'test_ftplib.TestFTPClass.test_storbinary',
'test_ftplib.TestFTPClass.test_storbinary_rest',
'test_ftplib.TestFTPClass.test_storlines',
'test_ftplib.TestFTPClass.test_storlines_too_long',
'test_ftplib.TestFTPClass.test_voidcmd',
# This one times out
'test_ftplib.TestFTPClass.test_makeport',
# More unexpected timeouts
'test_smtplib.TooLongLineTests.testLineTooLong',
'test_smtplib.GeneralTests.testTimeoutValue',
'test_ssl.ContextTests.test__https_verify_envvar',
'test_subprocess.ProcessTestCase.test_check_output',
'test_telnetlib.ReadTests.test_read_eager_A',
# A timeout, possibly because of the way we handle interrupts?
'test_socketserver.SocketServerTest.test_InterruptedServerSelectCall',
# This one might be like 'test_urllib2_localnet.TestUrlopen.test_https_with_cafile'?
'test_httpservers.BaseHTTPServerTestCase.test_command',
# But on Windows, our gc fix for that doesn't work anyway
# so we have to disable it.
'test_urllib2_localnet.TestUrlopen.test_https_with_cafile',
]
def _make_run_with_original(mod_name, func_name):
@contextlib.contextmanager
def with_orig():
......@@ -474,7 +549,7 @@ if hasattr(sys, 'pypy_version_info') and sys.version_info[:2] >= (3, 3):
if hasattr(sys, 'pypy_version_info') and sys.pypy_version_info[:4] in ( # pylint:disable=no-member
(5, 8, 0, 'beta'),
(5, 8, 0, 'beta'), (5, 9, 0, 'beta'),
):
# 3.5 is beta. Hard to say what are real bugs in us vs real bugs in pypy.
# For that reason, we pin these patches exactly to the version in use.
......@@ -517,6 +592,8 @@ if hasattr(sys, 'pypy_version_info') and sys.pypy_version_info[:4] in ( # pylint
'test_threading.ThreadJoinOnShutdown.test_1_join_on_shutdown',
]
if hasattr(sys, 'pypy_version_info'):
wrapped_tests.update({
# XXX: gevent: The error that was raised by that last call
# left a socket open on the server or client. The server gets
......
......@@ -21,7 +21,8 @@ class TestWatchers(unittest.TestCase):
def test_io(self):
if sys.platform == 'win32':
Error = IOError
# libev raises IOError, libuv raises ValueError
Error = (IOError,ValueError)
win32 = True
else:
Error = ValueError
......
......@@ -17,77 +17,86 @@ EV_USE_INOTIFY = getattr(gevent.core, 'EV_USE_INOTIFY', None)
WIN = sys.platform.startswith('win')
try:
open(filename, 'wb', buffering=0).close()
assert os.path.exists(filename), filename
def write():
with open(filename, 'wb', buffering=0) as f:
f.write(b'x')
start = time.time()
greenlet = gevent.spawn_later(DELAY, write)
# If we don't specify an interval, we default to zero.
# libev interprets that as meaning to use its default interval,
# which is about 5 seconds. If we go below it's minimum check
# threshold, it bumps it up to the minimum.
watcher = hub.loop.stat(filename, interval=-1)
assert watcher.path == filename, (watcher.path, filename)
filenames = filename if isinstance(filename, bytes) else filename.encode('ascii')
assert watcher._paths == filenames, (watcher._paths, filenames)
assert watcher.interval == -1
def check_attr(name, none):
# Deals with the complex behaviour of the 'attr' and 'prev'
# attributes on Windows. This codifies it, rather than simply letting
# the test fail, so we know exactly when and what changes it.
try:
x = getattr(watcher, name)
except ImportError:
if WIN:
# the 'posix' module is not available
pass
def test():
try:
open(filename, 'wb', buffering=0).close()
assert os.path.exists(filename), filename
def write():
with open(filename, 'wb', buffering=0) as f:
f.write(b'x')
start = time.time()
greenlet = gevent.spawn_later(DELAY, write)
# If we don't specify an interval, we default to zero.
# libev interprets that as meaning to use its default interval,
# which is about 5 seconds. If we go below it's minimum check
# threshold, it bumps it up to the minimum.
watcher = hub.loop.stat(filename, interval=-1)
assert watcher.path == filename, (watcher.path, filename)
filenames = filename if isinstance(filename, bytes) else filename.encode('ascii')
assert watcher._paths == filenames, (watcher._paths, filenames)
assert watcher.interval == -1
def check_attr(name, none):
# Deals with the complex behaviour of the 'attr' and 'prev'
# attributes on Windows. This codifies it, rather than simply letting
# the test fail, so we know exactly when and what changes it.
try:
x = getattr(watcher, name)
except ImportError:
if WIN:
# the 'posix' module is not available
pass
else:
raise
else:
raise
else:
if WIN:
# The ImportError is only raised for the first time;
# after that, the attribute starts returning None
assert x is None, "Only None is supported on Windows"
if none:
assert x is None, x
else:
assert x is not None, x
with gevent.Timeout(5 + DELAY + 0.5):
hub.wait(watcher)
reaction = time.time() - start - DELAY
print('Watcher %s reacted after %.4f seconds (write)' % (watcher, reaction))
if reaction >= DELAY and EV_USE_INOTIFY:
print('WARNING: inotify failed (write)')
assert reaction >= 0.0, 'Watcher %s reacted too early (write): %.3fs' % (watcher, reaction)
check_attr('attr', False)
check_attr('prev', False)
# The watcher interval changed after it started; -1 is illegal
assert watcher.interval != -1, watcher.interval
greenlet.join()
gevent.spawn_later(DELAY, os.unlink, filename)
start = time.time()
with gevent.Timeout(5 + DELAY + 0.5):
hub.wait(watcher)
reaction = time.time() - start - DELAY
print('Watcher %s reacted after %.4f seconds (unlink)' % (watcher, reaction))
if reaction >= DELAY and EV_USE_INOTIFY:
print('WARNING: inotify failed (unlink)')
assert reaction >= 0.0, 'Watcher %s reacted too early (unlink): %.3fs' % (watcher, reaction)
check_attr('attr', True)
check_attr('prev', False)
finally:
if os.path.exists(filename):
os.unlink(filename)
if WIN:
# The ImportError is only raised for the first time;
# after that, the attribute starts returning None
assert x is None, "Only None is supported on Windows"
if none:
assert x is None, x
else:
assert x is not None, x
with gevent.Timeout(5 + DELAY + 0.5):
hub.wait(watcher)
now = time.time()
if now - start - DELAY <= 0.0:
# Sigh. This is especially true on PyPy.
assert WIN, ("Bad timer resolution expected on Windows, test is useless", start, now)
return
reaction = now - start - DELAY
print('Watcher %s reacted after %.4f seconds (write)' % (watcher, reaction))
if reaction >= DELAY and EV_USE_INOTIFY:
print('WARNING: inotify failed (write)')
assert reaction >= 0.0, 'Watcher %s reacted too early (write): %.3fs' % (watcher, reaction)
check_attr('attr', False)
check_attr('prev', False)
# The watcher interval changed after it started; -1 is illegal
assert watcher.interval != -1, watcher.interval
greenlet.join()
gevent.spawn_later(DELAY, os.unlink, filename)
start = time.time()
with gevent.Timeout(5 + DELAY + 0.5):
hub.wait(watcher)
reaction = time.time() - start - DELAY
print('Watcher %s reacted after %.4f seconds (unlink)' % (watcher, reaction))
if reaction >= DELAY and EV_USE_INOTIFY:
print('WARNING: inotify failed (unlink)')
assert reaction >= 0.0, 'Watcher %s reacted too early (unlink): %.3fs' % (watcher, reaction)
check_attr('attr', True)
check_attr('prev', False)
finally:
if os.path.exists(filename):
os.unlink(filename)
if __name__ == '__main__':
test()
......@@ -68,7 +68,7 @@ class Test(greentest.TestCase):
loop.destroy()
@greentest.skipOnAppVeyor("Stdout can't be watched on Win32")
@greentest.skipOnWindows("Stdout can't be watched on Win32")
def test_reuse_io(self):
loop = core.loop(default=False)
......
......@@ -6,20 +6,22 @@ import ssl
import threading
import unittest
import errno
import weakref
import greentest
from greentest import TestCase
dirname = os.path.dirname(os.path.abspath(__file__))
certfile = os.path.join(dirname, '2.7/keycert.pem')
pid = os.getpid()
import sys
PY3 = sys.version_info[0] >= 3
PY3 = greentest.PY3
PYPY = greentest.PYPY
fd_types = int
if PY3:
long = int
fd_types = (int, long)
WIN = sys.platform.startswith("win")
WIN = greentest.WIN
from greentest import get_open_files
try:
......@@ -28,7 +30,7 @@ except ImportError:
psutil = None
class Test(TestCase):
class Test(greentest.TestCase):
extra_allowed_open_states = ()
......@@ -105,12 +107,27 @@ class Test(TestCase):
def make_open_socket(self):
s = socket.socket()
s.bind(('127.0.0.1', 0))
if WIN:
# Windows doesn't show as open until this
self._close_on_teardown(s)
if WIN or greentest.LINUX:
# Windows and linux (with psutil) doesn't show as open until
# we call listen (linux with lsof accepts either)
s.listen(1)
self.assert_open(s, s.fileno())
return s
def _close_on_teardown(self, resource):
# Keeping raw sockets alive keeps SSL sockets
# from being closed too, at least on CPython, so we
# need to use weakrefs
if 'close_on_teardown' not in self.__dict__:
self.close_on_teardown = []
self.close_on_teardown.append(weakref.ref(resource))
return resource
def _tearDownCloseOnTearDown(self):
self.close_on_teardown = [r() for r in self.close_on_teardown if r() is not None]
super(Test, self)._tearDownCloseOnTearDown()
self.close_on_teardown = []
class TestSocket(Test):
......@@ -239,12 +256,55 @@ class TestSocket(Test):
listener.close()
class TestSSL(Test):
def _ssl_connect_task(self, connector, port):
connector.connect(('127.0.0.1', port))
try:
# Note: We get ResourceWarning about 'x'
# on Python 3 if we don't join the spawned thread
x = ssl.wrap_socket(connector)
except socket.error:
# Observed on Windows with PyPy2 5.9.0 and libuv:
# if we don't switch in a timely enough fashion,
# the server side runs ahead of us and closes
# our socket first, so this fails.
pass
else:
self._close_on_teardown(x)
def _make_ssl_connect_task(self, connector, port):
t = threading.Thread(target=self._ssl_connect_task, args=(connector, port))
t.daemon = True
return t
def __cleanup(self, task, *sockets):
# workaround for test_server_makefile1, test_server_makefile2,
# test_server_simple, test_serverssl_makefile1.
# On PyPy on Linux, it is important to join the SSL Connect
# Task FIRST, before closing the sockets. If we do it after
# (which makes more sense) we hang. It's not clear why, except
# that it has something to do with context switches. Inserting a call to
# gevent.sleep(0.1) instead of joining the task has the same
# effect. If the previous tests hang, then later tests can fail with
# SSLError: unknown alert type.
# XXX: Why do those two things happen?
# On PyPy on macOS, we don't have that problem and can use the
# more logical order.
task.join()
for s in sockets:
s.close()
def test_simple_close(self):
s = self.make_open_socket()
fileno = s.fileno()
s = ssl.wrap_socket(s)
self._close_on_teardown(s)
fileno = s.fileno()
self.assert_open(s, fileno)
s.close()
......@@ -252,9 +312,9 @@ class TestSSL(Test):
def test_makefile1(self):
s = self.make_open_socket()
fileno = s.fileno()
s = ssl.wrap_socket(s)
self._close_on_teardown(s)
fileno = s.fileno()
self.assert_open(s, fileno)
f = s.makefile()
......@@ -269,6 +329,7 @@ class TestSSL(Test):
fileno = s.fileno()
s = ssl.wrap_socket(s)
self._close_on_teardown(s)
fileno = s.fileno()
self.assert_open(s, fileno)
f = s.makefile()
......@@ -288,24 +349,20 @@ class TestSSL(Test):
connector = socket.socket()
self._close_on_teardown(connector)
def connect():
connector.connect(('127.0.0.1', port))
x = ssl.wrap_socket(connector)
self._close_on_teardown(x)
t = threading.Thread(target=connect)
t = self._make_ssl_connect_task(connector, port)
t.start()
try:
client_socket, _addr = listener.accept()
self._close_on_teardown(client_socket)
client_socket = ssl.wrap_socket(client_socket, keyfile=certfile, certfile=certfile, server_side=True)
self._close_on_teardown(client_socket)
fileno = client_socket.fileno()
self.assert_open(client_socket, fileno)
client_socket.close()
self.assert_closed(client_socket, fileno)
finally:
t.join()
listener.close()
self.__cleanup(t, listener, connector)
def test_server_makefile1(self):
listener = socket.socket()
......@@ -314,20 +371,18 @@ class TestSSL(Test):
port = listener.getsockname()[1]
listener.listen(1)
connector = socket.socket()
self._close_on_teardown(connector)
def connect():
connector.connect(('127.0.0.1', port))
x = ssl.wrap_socket(connector)
self._close_on_teardown(x)
t = threading.Thread(target=connect)
t = self._make_ssl_connect_task(connector, port)
t.start()
try:
client_socket, _addr = listener.accept()
self._close_on_teardown(client_socket)
client_socket = ssl.wrap_socket(client_socket, keyfile=certfile, certfile=certfile, server_side=True)
self._close_on_teardown(client_socket)
fileno = client_socket.fileno()
self.assert_open(client_socket, fileno)
f = client_socket.makefile()
......@@ -337,8 +392,8 @@ class TestSSL(Test):
f.close()
self.assert_closed(client_socket, fileno)
finally:
t.join()
connector.close()
self.__cleanup(t, listener, connector)
def test_server_makefile2(self):
listener = socket.socket()
......@@ -349,17 +404,14 @@ class TestSSL(Test):
connector = socket.socket()
self._close_on_teardown(connector)
def connect():
connector.connect(('127.0.0.1', port))
x = ssl.wrap_socket(connector)
self._close_on_teardown(x)
t = threading.Thread(target=connect)
t = self._make_ssl_connect_task(connector, port)
t.start()
try:
client_socket, _addr = listener.accept()
self._close_on_teardown(client_socket)
client_socket = ssl.wrap_socket(client_socket, keyfile=certfile, certfile=certfile, server_side=True)
self._close_on_teardown(client_socket)
fileno = client_socket.fileno()
self.assert_open(client_socket, fileno)
f = client_socket.makefile()
......@@ -370,9 +422,7 @@ class TestSSL(Test):
client_socket.close()
self.assert_closed(client_socket, fileno)
finally:
t.join()
listener.close()
connector.close()
self.__cleanup(t, connector, listener, client_socket)
def test_serverssl_makefile1(self):
listener = socket.socket()
......@@ -385,12 +435,7 @@ class TestSSL(Test):
connector = socket.socket()
self._close_on_teardown(connector)
def connect():
connector.connect(('127.0.0.1', port))
x = ssl.wrap_socket(connector)
self._close_on_teardown(x)
t = threading.Thread(target=connect)
t = self._make_ssl_connect_task(connector, port)
t.start()
try:
......@@ -404,9 +449,7 @@ class TestSSL(Test):
f.close()
self.assert_closed(client_socket, fileno)
finally:
t.join()
listener.close()
connector.close()
self.__cleanup(t, listener, connector)
def test_serverssl_makefile2(self):
listener = socket.socket()
......@@ -425,6 +468,7 @@ class TestSSL(Test):
connector.close()
t = threading.Thread(target=connect)
t.daemon = True
t.start()
try:
......@@ -443,8 +487,7 @@ class TestSSL(Test):
client_socket.close()
self.assert_closed(client_socket, fileno)
finally:
t.join()
listener.close()
self.__cleanup(t, listener)
if __name__ == '__main__':
......
......@@ -3,13 +3,13 @@ import _six as six
from os import pipe
import gevent
from gevent import os
from greentest import TestCase, main
from greentest import TestCase, main, CI_TIMEOUT
from gevent import Greenlet, joinall
class TestOS_tp(TestCase):
__timeout__ = 5
__timeout__ = CI_TIMEOUT
def pipe(self):
return pipe()
......@@ -83,7 +83,7 @@ if hasattr(os, 'fork_and_watch'):
class TestForkAndWatch(TestCase):
__timeout__ = 5
__timeout__ = CI_TIMEOUT
def test_waitpid_all(self):
# Cover this specific case.
......
......@@ -6,9 +6,10 @@ import greentest
# XXX also test: send, sendall, recvfrom, recvfrom_into, sendto
def readall(socket, address):
while socket.recv(1024):
def readall(sock, _):
while sock.recv(1024):
pass
sock.close()
class Test(greentest.TestCase):
......@@ -32,7 +33,7 @@ class Test(greentest.TestCase):
receiver.join(timeout=0.1)
assert receiver.ready(), receiver
self.assertEqual(receiver.value, None)
assert isinstance(receiver.exception, socket.error)
self.assertIsInstance(receiver.exception, socket.error)
self.assertEqual(receiver.exception.errno, socket.EBADF)
finally:
receiver.kill()
......@@ -47,6 +48,7 @@ class Test(greentest.TestCase):
self.assertRaises(AssertionError, sock.recv, 25)
finally:
receiver.kill()
sock.close()
if __name__ == '__main__':
......
import sys
from _six import xrange
if 'runtestcase' in sys.argv[1:]:
import gevent
import gevent.subprocess
gevent.spawn(sys.exit, 'bye')
gevent.subprocess.Popen('python -c "1/0"'.split())
# Look closely, this doesn't actually do anything, that's a string
# not a division
gevent.subprocess.Popen([sys.executable, '-c', '"1/0"'])
gevent.sleep(1)
else:
import subprocess
for _ in xrange(5):
out, err = subprocess.Popen([sys.executable, __file__, 'runtestcase'], stderr=subprocess.PIPE).communicate()
if b'refs' in err:
for _ in range(5):
out, err = subprocess.Popen([sys.executable, '-W', 'ignore',
__file__, 'runtestcase'],
stderr=subprocess.PIPE).communicate()
if b'refs' in err: # Something to do with debug mode python builds?
assert err.startswith(b'bye'), repr(err)
else:
assert err.strip() == b'bye', repr(err)
......@@ -15,7 +15,8 @@ PYPY = hasattr(sys, 'pypy_version_info')
class TestCase(greentest.TestCase):
# These generally need more time
__timeout__ = greentest.CI_TIMEOUT
pool = None
def cleanup(self):
......@@ -107,7 +108,7 @@ def sqr_random_sleep(x):
TIMEOUT1, TIMEOUT2, TIMEOUT3 = 0.082, 0.035, 0.14
class _AbstractPoolTest(TestCase):
__timeout__ = 5
size = 1
ClassUnderTest = ThreadPool
......@@ -256,7 +257,7 @@ class TestPool3(TestPool):
class TestPool10(TestPool):
size = 10
__timeout__ = 5
# class TestJoinSleep(greentest.GenericGetTestCase):
......
......@@ -54,7 +54,7 @@ def run_many(tests, expected=(), failfast=False, quiet=False):
passed = {}
NWORKERS = min(len(tests), NWORKERS) or 1
print('thread pool size: %s' % NWORKERS)
print('thread pool size:', NWORKERS, '\n')
pool = ThreadPool(NWORKERS)
util.BUFFER_OUTPUT = NWORKERS > 1
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment