Commit d29d50ae authored by Jason Madden's avatar Jason Madden

Add deploy wheel automation to Travis.

Refs #1555

Let testing work in manylinux when test.support isn't installed.

If test.support can't be imported many of the stdlib tests cant run either.
parent c3d03463
......@@ -11,10 +11,13 @@ dist: xenial
group: travis_latest
os: linux
env:
global:
- BUILD_RUNTIMES=$HOME/.runtimes
- PYTHONHASHSEED=8675309
- PYTHONUNBUFFERED=1
- PYTHONDONTWRITEBYTECODE=1
- PIP_UPGRADE_STRATEGY=eager
- CC="ccache gcc"
- CCACHE_NOCPP2=true
......@@ -28,9 +31,13 @@ env:
- CPPFLAGS="-I$BUILD_LIBS/include -DEV_VERIFY=3"
- LDFLAGS="-L$BUILD_LIBS/lib"
- LD_LIBRARY_PATH="$BUILD_LIBS/lib"
# Uploading built wheels for releases.
# TWINE_PASSWORD is encrypted and stored directly in the
# travis repo settings.
- TWINE_USERNAME="__token__"
# Note that this list is again *manually* expanded
# for the 'cache' stage. The benefit of doing this (instead of
# for the 'build-gevent' stage. The benefit of doing this (instead of
# only listing a single version here, or using a different 'language')
# is that we get separate caches per version. This keeps them small,
# and prevents stepping on each other when we change Python minor
......@@ -51,25 +58,6 @@ env:
- TRAVIS_PYTHON_VERSION=pypy3.6
- TRAVIS_PYTHON_VERSION=2.7 GEVENTSETUP_EMBED=0 GEVENTSETUP_EV_VERIFY=3
before_install:
- export PATH=$BUILD_RUNTIMES/snakepit/$TRAVIS_PYTHON_VERSION.d/bin:$PATH
- export G_SITE=$BUILD_RUNTIMES/snakepit/$TRAVIS_PYTHON_VERSION.d/lib/*/site-packages/
# Restore the configure caches
- if [ -f $BUILD_LIBS/config.cache.ares ]; then cp $BUILD_LIBS/config.cache.ares deps/c-ares/config.cache ; fi
- if [ -f $BUILD_LIBS/config.cache.libev ]; then cp $BUILD_LIBS/config.cache.libev deps/libev/config.cache ; fi
- if [ -f $BUILD_LIBS/config.cache.libuv ]; then cp $BUILD_LIBS/config.cache.libuv deps/libuv/config.cache ; fi
before_script:
# Show some details of interest
- python --version
- >
python -c "import greenlet; print(greenlet, greenlet.__version__)"
- >
python -c "import gevent.core; print(gevent.core.loop)"
- >
python -c "import gevent.ares; print(gevent.ares)"
cache:
pip: true
directories:
......@@ -77,16 +65,43 @@ cache:
- $HOME/.runtimes
- $HOME/.wheelhouse
- $HOME/.ccache
- $HOME/.libs
- $BUILD_LIBS
- $HOME/Library/Caches/pip
before_cache:
- rm -f $HOME/.cache/pip/log/debug.log
# Store the configure caches. Having a cache can speed up c-ares
# configure from 2-3 minutes to 20 seconds.
- mkdir -p $BUILD_LIBS
- cp deps/c-ares/config.cache $BUILD_LIBS/config.cache.ares
- cp deps/libev/config.cache $BUILD_LIBS/config.cache.libev
- cp deps/libuv/config.cache $BUILD_LIBS/config.cache.libuv
- if [ -f deps/c-ares/config.cache ]; then cp deps/c-ares/config.cache $BUILD_LIBS/config.cache.ares ; fi
- if [ -f deps/libev/config.cache ]; then cp deps/libev/config.cache $BUILD_LIBS/config.cache.libev ; fi
- if [ -f deps/libuv/config.cache ]; then cp deps/libuv/config.cache $BUILD_LIBS/config.cache.libuv ; fi
before_install:
- export PATH=$BUILD_RUNTIMES/snakepit/$TRAVIS_PYTHON_VERSION.d/bin:$PATH
- export G_SITE=$BUILD_RUNTIMES/snakepit/$TRAVIS_PYTHON_VERSION.d/lib/*/site-packages/
# Restore the configure caches
- if [ -f $BUILD_LIBS/config.cache.ares ]; then cp $BUILD_LIBS/config.cache.ares deps/c-ares/config.cache ; fi
- if [ -f $BUILD_LIBS/config.cache.libev ]; then cp $BUILD_LIBS/config.cache.libev deps/libev/config.cache ; fi
- if [ -f $BUILD_LIBS/config.cache.libuv ]; then cp $BUILD_LIBS/config.cache.libuv deps/libuv/config.cache ; fi
- |
if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then
whoami
mkdir -p $HOME/Library/Caches/pip
chown -R `whoami` $HOME/Library/Caches/pip
git clone https://github.com/MacPython/terryfy
source terryfy/travis_tools.sh
get_python_environment $TERRYFY_PYTHON venv
unset CC
fi
before_script:
# Show some details of interest
- |
python --version
python -c 'import greenlet; print(greenlet, greenlet.__version__)'
python -c 'import gevent.core; print(gevent.core.loop)'
python -c 'import gevent.ares; print(gevent.ares)'
# Installing is taken care of by the first stage.
......@@ -112,15 +127,30 @@ script:
# Submit coverage info
after_success:
- python -m coverage combine
- python -m coverage report -i
- python -m coveralls
- python -m coverage combine || true
- python -m coverage report -i || true
- python -m coveralls || true
- |
if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then
ls -l dist
twine check dist/*
if [[ $TRAVIS_TAG ]]; then
twine upload --skip-existing dist/*
fi
fi
- |
if [[ -n "$DOCKER_IMAGE" ]]; then
ls -l wheelhouse
twine check wheelhouse/*
if [[ $TRAVIS_TAG ]]; then
twine upload --skip-existing wheelhouse/*
fi
fi
stages:
- build-gevent
- test
- test-ares
jobs:
......@@ -139,7 +169,7 @@ jobs:
# First, the build dependencies (see setup.cfg)
# so that we don't have to use build isolation and can better use the cache;
# Note that we can't use -U for cffi and greenlet on PyPy.
- &build-gevent-deps pip install -U setuptools wheel && pip install -U 'cffi;platform_python_implementation=="CPython"' cython 'greenlet;platform_python_implementation=="CPython"'
- &build-gevent-deps pip install -U setuptools wheel twine && pip install -U 'cffi;platform_python_implementation=="CPython"' cython 'greenlet;platform_python_implementation=="CPython"'
# Next, build the wheel *in place*. This helps ccache, and also lets us cache the configure
# output (pip install uses a random temporary directory, making this difficult)
- python setup.py bdist_wheel
......@@ -194,7 +224,26 @@ jobs:
- python -c 'import gevent.libuv.loop as CF; assert not CF.libuv.LIBUV_EMBED'
# Ok, now we switch to the test stage. These are all in addition
# to the jobs created by the matrix (and should override the `script` command).
# to the jobs created by the matrix (and should override the
# `script` command).
# The manylinux builds and tests.
# These take awhile, so get them started while others proceed in parallel.
- stage: test
name: 64-bit manylinux wheels (all Pythons)
services: docker
env: DOCKER_IMAGE=quay.io/pypa/manylinux2010_x86_64
install: docker pull $DOCKER_IMAGE
script: bash scripts/releases/make-manylinux
before_script: pip install -U twine
- stage: test
name: 32-bit manylinux wheels (all Pythons)
services: docker
env: DOCKER_IMAGE=quay.io/pypa/manylinux2010_i686 PRE_CMD=linux32
install: docker pull $DOCKER_IMAGE
script: bash scripts/releases/make-manylinux
before_script: pip install -U twine
# Lint the code. Because this is a separate job, even if it fails fast
# the tests will still run. Put it at the top for fast feedback.
......@@ -314,6 +363,44 @@ jobs:
# env: TRAVIS_PYTHON_VERSION=pypy3.6
# name: dnspythonpypy36
###
# Python on macOS.
# These take awhile, and tend to gum up the works getting started.
# TODO: maybe these and manylinux builds should be their own stage?
###
# It's important to use 'macpython' builds to get the least
# restrictive wheel tag. It's also important to avoid
# 'homebrew 3' because it floats instead of being a specific
# version.
# TODO: This could probably be better broken into stages and just
# generally added to the matrix.
- &test-base-osx
name: Python 2.7 wheels for MacOS
os: osx
osx_image: xcode11.3
compiler: clang
# We require at least 2.7.15 to upload wheels.
# See https://github.com/zopefoundation/BTrees/issues/113
env: TERRYFY_PYTHON='macpython 2.7.17'
install:
- mkdir -p $BUILD_LIBS/lib
- *build-gevent-deps
- python setup.py bdist_wheel
- pip uninstall -y gevent
- pip install -U --no-compile `ls dist/gevent*whl`[test]
- <<: *test-base-osx
name: Python 3.6 wheels for MacOS
env: TERRYFY_PYTHON='macpython 3.6.8'
- <<: *test-base-osx
name: Python 3.7 wheels for MacOS
env: TERRYFY_PYTHON='macpython 3.7.6'
- <<: *test-base-osx
name: Python 3.8 wheels for MacOS
env: TERRYFY_PYTHON='macpython 3.8.1'
notifications:
email: false
......@@ -320,6 +320,9 @@ if sys.platform == 'darwin':
# libuv
'-Wno-tautological-compare',
'-Wno-implicit-function-declaration',
# libev
'-Wno-unused-value',
'-Wno-macro-redefined',
)
## Distutils extensions
......
......@@ -27,7 +27,7 @@ environment:
secure: m1SSFQeqMo/O65xd0tKYT5wBnqVHSmX8nWw5aui+i65GWODEkXbRp4QQha9PLNtiQvYBTO4tAkWaimWhEHZyx6Zun/0yEcBmovFkLaLWXKdekIDcX+Oi0aefP1t4yoo8C9amOLMAV/t229P7hzqCUDHB9JHvhOWbBMsCJ4VCIKCCiMObUeSazD4lBwgiwMHHfyfMF+xWs24jc5D/7a/xpSghvzoaoLl/gBDCy1op2tsuAIgJCF0P1YjUP/bzVNqmjMa2bG5pQY5n8mskuTTqdA==
matrix:
matrix:
# Pre-installed Python versions, which Appveyor may upgrade to
# a later point release.
......
Let CI (Travis and Appveyor) build and upload release wheels for
Windows, macOS and manylinux. As part of this, (a subset of) gevent's
tests can run if the standard library's ``test.support`` module has
been stripped.
......@@ -5,6 +5,15 @@
set -e
export PYTHONUNBUFFERED=1
export PYTHONDONTWRITEBYTECODE=1
# Use a fixed hash seed for reproducability
export PYTHONHASHSEED=8675309
# Disable tests that use external network resources;
# too often we get failures to resolve DNS names or failures
# to connect on AppVeyor.
export GEVENTTEST_USE_RESOURCES="-network"
export CI=1
export TRAVIS=true
export GEVENT_MANYLINUX=1
if [ -d /gevent -a -d /opt/python ]; then
# Running inside docker
......@@ -18,9 +27,13 @@ if [ -d /gevent -a -d /opt/python ]; then
cd /tmp/build
git clone /gevent gevent
cd gevent
PATH=$variant/bin:$PATH $variant/bin/pip wheel . -w dist
PATH=$variant/bin:$PATH $variant/bin/python -mpip wheel . -w dist
auditwheel repair --plat manylinux2010_x86_64 dist/gevent*.whl
cp wheelhouse/gevent*.whl /gevent/wheelhouse
PATH=$variant/bin:$PATH $variant/bin/python -mpip install -U --no-compile `ls dist/gevent*whl`[test]
PATH=$variant/bin:$PATH $variant/bin/python -mgevent.tests
cd /gevent
rm -rf /tmp/build
done
......@@ -29,3 +42,4 @@ if [ -d /gevent -a -d /opt/python ]; then
fi
docker run --rm -ti -v "$(pwd):/gevent" quay.io/pypa/manylinux2010_x86_64 /gevent/scripts/releases/$(basename $0)
ls -l wheelhouse
......@@ -461,10 +461,14 @@ class socket(_socketcommon.SocketMixin):
if hasattr(_socket.socket, 'recvmsg_into'):
def recvmsg_into(self, *args):
def recvmsg_into(self, buffers, *args):
while True:
try:
return self._sock.recvmsg_into(*args)
if args:
# The C code is sensitive about whether extra arguments are
# passed or not.
return self._sock.recvmsg_into(buffers, *args)
return self._sock.recvmsg_into(buffers)
except error as ex:
if ex.args[0] != EWOULDBLOCK or self.timeout == 0.0:
raise
......
......@@ -89,6 +89,7 @@ from .skipping import skipOnPy3
from .skipping import skipWithoutResource
from .skipping import skipWithoutExternalNetwork
from .skipping import skipOnPy2
from .skipping import skipOnManylinux
from .exception import ExpectedException
......
......@@ -1239,6 +1239,13 @@ disabled_tests += [
'test_ssl.BasicSocketTests.test_openssl_version'
]
if TRAVIS and OSX:
disabled_tests += [
# This sometimes produces OSError: Errno 40: Message too long
'test_socket.RecvmsgIntoTCPTest.testRecvmsgIntoGenerator',
]
# Now build up the data structure we'll use to actually find disabled tests
# to avoid a linear scan for every file (it seems the list could get quite large)
# (First, freeze the source list to make sure it isn't modified anywhere)
......
......@@ -38,8 +38,6 @@ def get_ALL_RESOURCES():
# shouldn't be enabled by default or when asking for "all" resources.
# ALL_RESOURCES is the list of resources enabled by default or with "all" resources.
try:
# 3.6 and 3.7
from test.libregrtest import ALL_RESOURCES
......@@ -64,7 +62,10 @@ def get_ALL_RESOURCES():
'xpickle'
)
return list(ALL_RESOURCES)
return list(ALL_RESOURCES) + [
# Do we test the stdlib monkey-patched?
'gevent_monkey',
]
def parse_resources(resource_str=None):
......
......@@ -35,6 +35,7 @@ def _do_not_skip(reason):
skipOnWindows = _do_not_skip
skipOnAppVeyor = _do_not_skip
skipOnCI = _do_not_skip
skipOnManylinux = _do_not_skip
skipOnPyPy = _do_not_skip
skipOnPyPyOnCI = _do_not_skip
......@@ -76,6 +77,8 @@ if sysinfo.RUNNING_ON_APPVEYOR:
if sysinfo.RUNNING_ON_CI:
skipOnCI = unittest.skip
if sysinfo.RUNNING_ON_MANYLINUX:
skipOnManylinux = unittest.skip
if sysinfo.PYPY:
skipOnPyPy = unittest.skip
......
......@@ -20,14 +20,17 @@
"""
A re-export of the support module from Python's test package, with some
version compatibility shims and overrides.
The manylinux docker images do not include test.support at all, for space reasons,
so we need to be vaguely functional to run tests in that environment.
"""
import sys
# Proxy through, so that changes to this module reflect in the
# real module too. In 3.7, this is natively supported.
# This breaks static analysis (pylint), so we configure pylint
# to ignore this module.
# Proxy through, so that changes to this module reflect in the real
# module too. (In 3.7, this is natively supported with __getattr__ at
# module scope.) This breaks static analysis (pylint), so we configure
# pylint to ignore this module.
class _Default(object):
# A descriptor-like object that will
......@@ -39,10 +42,10 @@ class _Default(object):
class _ModuleProxy(object):
__slots__ = ('_orig_mod', '_stdlib_support')
__slots__ = ('_this_mod', '_stdlib_support')
def __init__(self):
self._orig_mod = sys.modules[__name__]
self._this_mod = sys.modules[__name__]
self._stdlib_support = None
def __get_stdlib_support(self):
......@@ -52,14 +55,17 @@ class _ModuleProxy(object):
# *and* in 2.7.14 (but with a BWC module)
from test import support as stdlib_support
except ImportError:
try:
from test import test_support as stdlib_support
except ImportError:
stdlib_support = None
self._stdlib_support = stdlib_support
return self._stdlib_support
def __getattr__(self, name):
try:
local_val = getattr(self._orig_mod, name)
local_val = getattr(self._this_mod, name)
except AttributeError:
return getattr(self.__get_stdlib_support(), name)
......@@ -77,14 +83,63 @@ class _ModuleProxy(object):
# Setting it deletes it from this module, so that
# we then continue to fall through to the original module.
try:
delattr(self._orig_mod, name)
setattr(self.__get_stdlib_support(), name, value)
except AttributeError:
setattr(self._this_mod, name, value)
else:
try:
delattr(self._this_mod, name)
except AttributeError:
pass
setattr(self.__get_stdlib_support(), name, value)
def __repr__(self):
return repr(self._this_mod)
# When is this not defined in test.support?
HOSTv6 = _Default('::1')
HOST = _Default("localhost")
HOSTv4 = _Default("127.0.0.1")
verbose = _Default(False)
@_Default
def is_resource_enabled(_):
return False
@_Default
def bind_port(sock, host=None): # pragma: no cover
import socket
host = host if host is not None else sys.modules[__name__].HOST
if sock.family == socket.AF_INET and sock.type == socket.SOCK_STREAM:
if hasattr(socket, 'SO_EXCLUSIVEADDRUSE'):
sock.setsockopt(socket.SOL_SOCKET, socket.SO_EXCLUSIVEADDRUSE, 1) # pylint:disable=no-member
sock.bind((host, 0))
port = sock.getsockname()[1]
return port
@_Default
def find_unused_port(family=None, socktype=None): # pragma: no cover
import socket
family = family or socket.AF_INET
socktype = socktype or socket.SOCK_STREAM
tempsock = socket.socket(family, socktype)
try:
port = sys.modules[__name__].bind_port(tempsock)
finally:
tempsock.close()
del tempsock
return port
@_Default
def threading_setup():
return []
@_Default
def threading_cleanup(*_):
pass
@_Default
def reap_children():
pass
# Set by resources.setup_resources()
gevent_has_setup_resources = False
......
......@@ -113,16 +113,20 @@ else:
RUNNING_ON_TRAVIS = os.environ.get('TRAVIS')
RUNNING_ON_APPVEYOR = os.environ.get('APPVEYOR')
RUNNING_ON_CI = RUNNING_ON_TRAVIS or RUNNING_ON_APPVEYOR
RUNNING_ON_MANYLINUX = os.environ.get('GEVENT_MANYLINUX')
if RUNNING_ON_APPVEYOR:
# We can't exec corecext on appveyor if we haven't run setup.py in
# 'develop' mode (i.e., we install)
NON_APPLICABLE_SUFFIXES += ('corecext',)
EXPECT_POOR_TIMER_RESOLUTION = (PYPY3
EXPECT_POOR_TIMER_RESOLUTION = (
PYPY3
or RUNNING_ON_APPVEYOR
or (LIBUV and PYPY)
or RUN_COVERAGE)
or RUN_COVERAGE
or (OSX and RUNNING_ON_CI)
)
CONN_ABORTED_ERRORS = []
......
......@@ -35,6 +35,7 @@ TIMEOUT = 100 # seconds
DEFAULT_NWORKERS = int(os.environ.get('NWORKERS') or max(cpu_count() - 1, 4))
if DEFAULT_NWORKERS > 10:
DEFAULT_NWORKERS = 10
SUGGESTED_NWORKERS = DEFAULT_NWORKERS
if RUN_LEAKCHECKS:
# Capturing the stats takes time, and we run each
......@@ -146,7 +147,10 @@ class Runner(object):
util.sleep(self.TIME_WAIT_SPAWN)
def __call__(self):
util.log("Running tests in parallel with concurrency %s" % (self._worker_count,),)
util.log("Running tests in parallel with concurrency %s %s." % (
self._worker_count,
util._colorize('number', '(concurrency available: %d)' % SUGGESTED_NWORKERS)
),)
# Setting global state, in theory we can be used multiple times.
# This is fine as long as we are single threaded and call these
# sequentially.
......@@ -405,7 +409,7 @@ def report(total, failed, passed, exit=True, took=None,
print_list(passed_unexpected)
if failed:
util.log('\n%s/%s tests failed%s', len(failed), total, took)
util.log('\n%s/%s tests failed%s', len(failed), total, took, color='warning')
for name in failed:
if matches(configured_failing_tests, name, include_flaky=True):
......@@ -414,7 +418,7 @@ def report(total, failed, passed, exit=True, took=None,
failed_unexpected.append(name)
if failed_expected:
util.log('\n%s/%s expected failures', len(failed_expected), total)
util.log('\n%s/%s expected failures', len(failed_expected), total, color='warning')
print_list(failed_expected)
if failed_unexpected:
......
......@@ -108,26 +108,15 @@ def log(message, *args, **kwargs):
:keyword str color: One of the values from _colorscheme
"""
with output_lock: # pylint:disable=not-context-manager
color = kwargs.pop('color', 'normal')
try:
if args:
string = message % args
else:
string = message
except Exception:
traceback.print_exc()
try:
string = '%r %% %r\n\n' % (message, args)
except Exception:
pass
try:
string = _colorize('exception', string)
sys.stderr.write(string)
except Exception:
traceback.print_exc()
else:
string = _colorize(color, string)
with output_lock: # pylint:disable=not-context-manager
sys.stderr.write(string + '\n')
def debug(message, *args, **kwargs):
......
......@@ -15,7 +15,7 @@ from gevent.testing.sysinfo import RESOLVER_NOT_SYSTEM
from gevent.testing.sysinfo import PYPY
from gevent.testing.sysinfo import PY3
from gevent.testing.sysinfo import PY35
from gevent.testing.sysinfo import OSX
from gevent.testing.sysinfo import LIBUV
IGNORED_TESTS = []
......@@ -279,6 +279,15 @@ if PYPY:
'test__pywsgi.py',
]
if PY3 and TRAVIS:
FAILING_TESTS += [
## ---
## Unknown; can't reproduce locally on OS X
'FLAKY test_subprocess.py', # timeouts on one test.
]
if TRAVIS and (PYPY or OSX):
IGNORED_TESTS += [
# XXX Re-enable these when we have more time to investigate.
# This test, which normally takes ~60s, sometimes
......@@ -291,15 +300,6 @@ if PYPY:
'test__threading_2.py',
]
if PY3 and TRAVIS:
FAILING_TESTS += [
## ---
## Unknown; can't reproduce locally on OS X
'FLAKY test_subprocess.py', # timeouts on one test.
]
if LIBUV:
if sys.platform.startswith("darwin"):
FAILING_TESTS += [
......
......@@ -11,11 +11,7 @@ except ImportError:
import threading
import unittest
try:
from test import support
except ImportError:
from test import test_support as support
from gevent.testing import support
from gevent.testing.testcase import TimeAssertMixin
def _wait():
......
......@@ -7,6 +7,7 @@ import atexit
from gevent.testing import util
from gevent.testing import sysinfo
from gevent.testing.support import is_resource_enabled
TIMEOUT = 120
......@@ -19,6 +20,11 @@ def get_absolute_pythonpath():
def TESTRUNNER(tests=None):
if not is_resource_enabled('gevent_monkey'):
util.log('WARNING: Testing monkey-patched stdlib has been disabled',
color="suboptimal-behaviour")
return
try:
test_dir, version_test_dir = util.find_stdlib_tests()
except util.NoSetupPyFound as e:
......
......@@ -108,7 +108,7 @@ class Test(greentest.TestCase):
@greentest.skipOnAppVeyor("Timing is flaky, especially under Py 3.4/64-bit")
@greentest.skipOnPyPy3OnCI("Timing is flaky, especially under Py 3.4/64-bit")
@greentest.reraises_flaky_timeout(Timeout)
@greentest.reraises_flaky_timeout((Timeout, AssertionError))
def test_api(self):
# Nothing happens if with-block finishes before the timeout expires
t = Timeout(DELAY * 2)
......
......@@ -12,6 +12,7 @@ import gevent.testing as greentest
from gevent.testing.params import DEFAULT_BIND_ADDR_TUPLE
from gevent.testing.params import DEFAULT_CONNECT
from gevent.testing.sockets import tcp_listener
from gevent.testing.skipping import skipOnManylinux
dirname = os.path.dirname(os.path.abspath(__file__))
certfile = os.path.join(dirname, '2_7_keycert.pem')
......@@ -247,6 +248,7 @@ class TestSocket(Test):
@greentest.skipOnAppVeyor("This sometimes times out for no apparent reason.")
@skipOnManylinux("For some reason manylinux doesn't see the open files all the time.")
class TestSSL(Test):
def _ssl_connect_task(self, connector, port, accepted_event):
......@@ -407,6 +409,7 @@ class TestSSL(Test):
f.close()
self.assert_closed(client_socket, fileno)
@skipOnManylinux("Doesn't see the file open")
def test_serverssl_makefile2(self):
raw_listener = tcp_listener(backlog=1)
port = raw_listener.getsockname()[1]
......
......@@ -533,7 +533,7 @@ class TestSpawn(greentest.TestCase):
self.assertEqual(len(p), 1)
p.spawn(gevent.sleep, 0.1) # this spawn blocks until the old one finishes
self.assertEqual(len(p), 1)
gevent.sleep(0.19 if not greentest.RUNNING_ON_APPVEYOR else 0.5)
gevent.sleep(0.19 if not greentest.EXPECT_POOR_TIMER_RESOLUTION else 0.5)
self.assertEqual(len(p), 0)
def testSpawnAndWait(self):
......
......@@ -458,12 +458,14 @@ class TestCreateConnection(greentest.TestCase):
with self.assertRaisesRegex(
socket.error,
# We really expect "connection refused". It's unclear
# where/why we would get '[errno -2] name or service not known'
# but it seems some systems generate that.
# https://github.com/gevent/gevent/issues/1389
# Somehow extremly rarely we've also seen 'address already in use',
# which makes even less sense.
'refused|not known|already in use'
# where/why we would get '[errno -2] name or service
# not known' but it seems some systems generate that.
# https://github.com/gevent/gevent/issues/1389 Somehow
# extremly rarely we've also seen 'address already in
# use', which makes even less sense. The manylinux
# 2010 environment produces 'errno 99 Cannot assign
# requested address', which, I guess?
'refused|not known|already in use|assign'
):
socket.create_connection(
(greentest.DEFAULT_BIND_ADDR, connect_port),
......
......@@ -49,6 +49,13 @@ class TestSSL(test__socket.TestTCP):
# to send a very large amount to make it timeout
_test_sendall_data = data_sent = b'hello' * 100000000
test_sendall_array = greentest.skipOnManylinux("Sometimes misses data")(
test__socket.TestTCP.test_sendall_array
)
test_sendall_str = greentest.skipOnManylinux("Sometimes misses data")(
test__socket.TestTCP.test_sendall_str
)
@greentest.skipOnWindows("Not clear why we're skipping")
def test_ssl_sendall_timeout0(self):
# Issue #317: SSL_WRITE_PENDING in some corner cases
......
......@@ -28,11 +28,8 @@ exec(setup_)
setup_3 = '\n'.join(' %s' % line for line in setup_.split('\n'))
setup_4 = '\n'.join(' %s' % line for line in setup_.split('\n'))
try:
from test.support import verbose
except ImportError:
from test.test_support import verbose
from gevent.testing import support
verbose = support.verbose
import random
import re
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment