Commit db7b3671 authored by Jason Madden's avatar Jason Madden

Merge pull request #766 from gevent/drop-py26

Drop support for python 2.6.
parents 7be7dedc dbcc9520
[pep8]
ignore=E702,E265,E402,E731,E266,E261,W503,E129
max_line_length=160
exclude=.runtimes,.eggs,.tox,.git,build,2.6,2.7,2.7pypy,3.3,3.5,test_support.py,test_queue.py,patched_tests_setup.py,test_threading_2.py,lock_tests.py,_sslgte279.py,3.4
exclude=.runtimes,.eggs,.tox,.git,build,2.6,2.7,2.7.8,2.7pypy,3.3,3.5,test_support.py,test_queue.py,patched_tests_setup.py,test_threading_2.py,lock_tests.py,_sslgte279.py,3.4
......@@ -13,7 +13,7 @@ env:
- TASK=test-pypy
- TASK=lint-py27
- TASK=test-py35
- TASK=test-py26
- TASK=test-py278
- TASK=test-py27
- TASK=test-py33
- TASK=test-py34
......
......@@ -62,7 +62,7 @@ test_prelim:
make bench
toxtest: test_prelim
cd greentest && GEVENT_RESOLVER=thread python testrunner.py --config ../known_failures.py
cd greentest && GEVENT_RESOLVER=thread ${PYTHON} testrunner.py --config ../known_failures.py
fulltoxtest: test_prelim
cd greentest && GEVENT_RESOLVER=thread ${PYTHON} testrunner.py --config ../known_failures.py
......@@ -92,7 +92,7 @@ travis_test_linters:
BUILD_RUNTIMES?=$(PWD)/.runtimes
PY26=$(BUILD_RUNTIMES)/snakepit/python2.6
PY278=$(BUILD_RUNTIMES)/snakepit/python2.7.8
PY27=$(BUILD_RUNTIMES)/snakepit/python2.7
PY33=$(BUILD_RUNTIMES)/snakepit/python3.3
PY34=$(BUILD_RUNTIMES)/snakepit/python3.4
......@@ -109,8 +109,8 @@ TOOL_PYTHON=$(TOOL_VIRTUALENV)/bin/python
TOOL_PIP=$(TOOL_VIRTUALENV)/bin/pip
TOOL_INSTALL=$(TOOL_PIP) install --upgrade
$(PY26):
scripts/install.sh 2.6
$(PY278):
scripts/install.sh 2.7.8
$(PY27):
scripts/install.sh 2.7
......@@ -130,7 +130,8 @@ $(PYPY):
PIP?=$(BUILD_RUNTIMES)/versions/$(PYTHON)/bin/pip
develop:
echo $(PIP) $(PYTHON)
echo pip is at `which $(PIP)`
echo python is at `which $(PYTHON)`
# First install a newer pip so that it can use the wheel cache
# (only needed until travis upgrades pip to 7.x; note that the 3.5
# environment uses pip 7.1 by default)
......@@ -138,9 +139,7 @@ develop:
# Then start installing our deps so they can be cached. Note that use of --build-options / --global-options / --install-options
# disables the cache.
# We need wheel>=0.26 on Python 3.5. See previous revisions.
${PIP} install -U wheel
${PIP} install -U tox cython cffi greenlet pep8 pyflakes "coverage>=4.0" "coveralls>=1.0"
${PYTHON} setup.py develop
${PIP} install -U -r dev-requirements.txt
lint-py27: $(PY27)
PYTHON=python2.7 PATH=$(BUILD_RUNTIMES)/versions/python2.7/bin:$(PATH) make develop travis_test_linters
......@@ -148,8 +147,9 @@ lint-py27: $(PY27)
test-py27: $(PY27)
PYTHON=python2.7 PATH=$(BUILD_RUNTIMES)/versions/python2.7/bin:$(PATH) make develop fulltoxtest
test-py26: $(PY26)
PYTHON=python2.6 PATH=$(BUILD_RUNTIMES)/versions/python2.6/bin:$(PATH) make develop fulltoxtest
test-py278: $(PY278)
ls $(BUILD_RUNTIMES)/versions/python2.7.8/bin/
PYTHON=python2.7.8 PATH=$(BUILD_RUNTIMES)/versions/python2.7.8/bin:$(PATH) make develop fulltoxtest
test-py33: $(PY33)
PYTHON=python3.3 PATH=$(BUILD_RUNTIMES)/versions/python3.3/bin:$(PATH) make develop fulltoxtest
......
......@@ -27,7 +27,7 @@ Check out the detailed changelog_ for this version.
Get gevent
==========
Gevent runs on Python >= 2.6, Python >= 3.3, or PyPy >= 4.0.1
Gevent runs on Python >= 2.7, Python >= 3.3, or PyPy >= 4.0.1
(but not PyPy3) (*Note*: PyPy is not supported in Windows). On all
platforms, installing setuptools is recommended (this is done
automatically if working in a virtual environment).
......
......@@ -7,6 +7,7 @@
1.2a1 (unreleased)
==================
- Remove support for Python 2.6. See :pr:`766`.
- Update libev to version 4.22.
- Update tblib to 1.3.0.
- Update c-ares to version 1.11.0 (`release notes <https://raw.githubusercontent.com/c-ares/c-ares/cares-1_11_0/RELEASE-NOTES>`_).
......
......@@ -22,10 +22,10 @@ Features include:
Installation and Requirements
=============================
`gevent 1.1`_ runs on Python 2 and Python 3. Versions 2.6 and 2.7 of
Python 2 are supported, and versions 3.3, 3.4, and 3.5 of Python 3 are
`gevent 1.1`_ runs on Python 2 and Python 3. Version 2.7 of
Python 2 is supported, and versions 3.3, 3.4, and 3.5 of Python 3 are
supported. (Users of older versions of Python 2 need to install gevent
1.0.x; Python 3 is not supported by 1.0.) gevent requires the
1.0.x (2.5) or 1.1.x (2.6); Python 3 is not supported by 1.0.) gevent requires the
greenlet__ library.
gevent 1.1 also runs on PyPy 2.6.1 and above, although 4.0 or above is
......
"""
Various tests for synchronization primitives.
"""
import sys
import time
from thread import start_new_thread, get_ident
import threading
import unittest
from test import test_support as support
def _wait():
# A crude wait/yield function not relying on synchronization primitives.
time.sleep(0.01)
class Bunch(object):
"""
A bunch of threads.
"""
def __init__(self, f, n, wait_before_exit=False):
"""
Construct a bunch of `n` threads running the same function `f`.
If `wait_before_exit` is True, the threads won't terminate until
do_finish() is called.
"""
self.f = f
self.n = n
self.started = []
self.finished = []
self._can_exit = not wait_before_exit
def task():
tid = get_ident()
self.started.append(tid)
try:
f()
finally:
self.finished.append(tid)
while not self._can_exit:
_wait()
for i in range(n):
start_new_thread(task, ())
def wait_for_started(self):
while len(self.started) < self.n:
_wait()
def wait_for_finished(self):
while len(self.finished) < self.n:
_wait()
def do_finish(self):
self._can_exit = True
class BaseTestCase(unittest.TestCase):
def setUp(self):
self._threads = support.threading_setup()
def tearDown(self):
support.threading_cleanup(*self._threads)
support.reap_children()
class BaseLockTests(BaseTestCase):
"""
Tests for both recursive and non-recursive locks.
"""
def test_constructor(self):
lock = self.locktype()
del lock
def test_acquire_destroy(self):
lock = self.locktype()
lock.acquire()
del lock
def test_acquire_release(self):
lock = self.locktype()
lock.acquire()
lock.release()
del lock
def test_try_acquire(self):
lock = self.locktype()
self.assertTrue(lock.acquire(False))
lock.release()
def test_try_acquire_contended(self):
lock = self.locktype()
lock.acquire()
result = []
def f():
result.append(lock.acquire(False))
Bunch(f, 1).wait_for_finished()
self.assertFalse(result[0])
lock.release()
def test_acquire_contended(self):
lock = self.locktype()
lock.acquire()
N = 5
def f():
lock.acquire()
lock.release()
b = Bunch(f, N)
b.wait_for_started()
_wait()
self.assertEqual(len(b.finished), 0)
lock.release()
b.wait_for_finished()
self.assertEqual(len(b.finished), N)
def test_with(self):
lock = self.locktype()
def f():
lock.acquire()
lock.release()
def _with(err=None):
with lock:
if err is not None:
raise err
_with()
# Check the lock is unacquired
Bunch(f, 1).wait_for_finished()
self.assertRaises(TypeError, _with, TypeError)
# Check the lock is unacquired
Bunch(f, 1).wait_for_finished()
def test_thread_leak(self):
# The lock shouldn't leak a Thread instance when used from a foreign
# (non-threading) thread.
lock = self.locktype()
def f():
lock.acquire()
lock.release()
n = len(threading.enumerate())
# We run many threads in the hope that existing threads ids won't
# be recycled.
Bunch(f, 15).wait_for_finished()
self.assertEqual(n, len(threading.enumerate()))
class LockTests(BaseLockTests):
"""
Tests for non-recursive, weak locks
(which can be acquired and released from different threads).
"""
def test_reacquire(self):
# Lock needs to be released before re-acquiring.
lock = self.locktype()
phase = []
def f():
lock.acquire()
phase.append(None)
lock.acquire()
phase.append(None)
start_new_thread(f, ())
while len(phase) == 0:
_wait()
_wait()
self.assertEqual(len(phase), 1)
lock.release()
while len(phase) == 1:
_wait()
self.assertEqual(len(phase), 2)
def test_different_thread(self):
# Lock can be released from a different thread.
lock = self.locktype()
lock.acquire()
def f():
lock.release()
b = Bunch(f, 1)
b.wait_for_finished()
lock.acquire()
lock.release()
class RLockTests(BaseLockTests):
"""
Tests for recursive locks.
"""
def test_reacquire(self):
lock = self.locktype()
lock.acquire()
lock.acquire()
lock.release()
lock.acquire()
lock.release()
lock.release()
def test_release_unacquired(self):
# Cannot release an unacquired lock
lock = self.locktype()
self.assertRaises(RuntimeError, lock.release)
lock.acquire()
lock.acquire()
lock.release()
lock.acquire()
lock.release()
lock.release()
self.assertRaises(RuntimeError, lock.release)
def test_different_thread(self):
# Cannot release from a different thread
lock = self.locktype()
def f():
lock.acquire()
b = Bunch(f, 1, True)
try:
self.assertRaises(RuntimeError, lock.release)
finally:
b.do_finish()
def test__is_owned(self):
lock = self.locktype()
self.assertFalse(lock._is_owned())
lock.acquire()
self.assertTrue(lock._is_owned())
lock.acquire()
self.assertTrue(lock._is_owned())
result = []
def f():
result.append(lock._is_owned())
Bunch(f, 1).wait_for_finished()
self.assertFalse(result[0])
lock.release()
self.assertTrue(lock._is_owned())
lock.release()
self.assertFalse(lock._is_owned())
class EventTests(BaseTestCase):
"""
Tests for Event objects.
"""
def test_is_set(self):
evt = self.eventtype()
self.assertFalse(evt.is_set())
evt.set()
self.assertTrue(evt.is_set())
evt.set()
self.assertTrue(evt.is_set())
evt.clear()
self.assertFalse(evt.is_set())
evt.clear()
self.assertFalse(evt.is_set())
def _check_notify(self, evt):
# All threads get notified
N = 5
results1 = []
results2 = []
def f():
evt.wait()
results1.append(evt.is_set())
evt.wait()
results2.append(evt.is_set())
b = Bunch(f, N)
b.wait_for_started()
_wait()
self.assertEqual(len(results1), 0)
evt.set()
b.wait_for_finished()
self.assertEqual(results1, [True] * N)
self.assertEqual(results2, [True] * N)
def test_notify(self):
evt = self.eventtype()
self._check_notify(evt)
# Another time, after an explicit clear()
evt.set()
evt.clear()
self._check_notify(evt)
def test_timeout(self):
evt = self.eventtype()
results1 = []
results2 = []
N = 5
def f():
evt.wait(0.0)
results1.append(evt.is_set())
t1 = time.time()
evt.wait(0.2)
r = evt.is_set()
t2 = time.time()
results2.append((r, t2 - t1))
Bunch(f, N).wait_for_finished()
self.assertEqual(results1, [False] * N)
for r, dt in results2:
self.assertFalse(r)
self.assertTrue(dt >= 0.2, dt)
# The event is set
results1 = []
results2 = []
evt.set()
Bunch(f, N).wait_for_finished()
self.assertEqual(results1, [True] * N)
for r, dt in results2:
self.assertTrue(r)
class ConditionTests(BaseTestCase):
"""
Tests for condition variables.
"""
def test_acquire(self):
cond = self.condtype()
# Be default we have an RLock: the condition can be acquired multiple
# times.
cond.acquire()
cond.acquire()
cond.release()
cond.release()
lock = threading.Lock()
cond = self.condtype(lock)
cond.acquire()
self.assertFalse(lock.acquire(False))
cond.release()
self.assertTrue(lock.acquire(False))
self.assertFalse(cond.acquire(False))
lock.release()
with cond:
self.assertFalse(lock.acquire(False))
def test_unacquired_wait(self):
cond = self.condtype()
self.assertRaises(RuntimeError, cond.wait)
def test_unacquired_notify(self):
cond = self.condtype()
self.assertRaises(RuntimeError, cond.notify)
def _check_notify(self, cond):
N = 5
results1 = []
results2 = []
phase_num = 0
def f():
cond.acquire()
cond.wait()
cond.release()
results1.append(phase_num)
cond.acquire()
cond.wait()
cond.release()
results2.append(phase_num)
b = Bunch(f, N)
b.wait_for_started()
_wait()
self.assertEqual(results1, [])
# Notify 3 threads at first
cond.acquire()
cond.notify(3)
_wait()
phase_num = 1
cond.release()
while len(results1) < 3:
_wait()
self.assertEqual(results1, [1] * 3)
self.assertEqual(results2, [])
# Notify 5 threads: they might be in their first or second wait
cond.acquire()
cond.notify(5)
_wait()
phase_num = 2
cond.release()
while len(results1) + len(results2) < 8:
_wait()
self.assertEqual(results1, [1] * 3 + [2] * 2)
self.assertEqual(results2, [2] * 3)
# Notify all threads: they are all in their second wait
cond.acquire()
cond.notify_all()
_wait()
phase_num = 3
cond.release()
while len(results2) < 5:
_wait()
self.assertEqual(results1, [1] * 3 + [2] * 2)
self.assertEqual(results2, [2] * 3 + [3] * 2)
b.wait_for_finished()
def test_notify(self):
cond = self.condtype()
self._check_notify(cond)
# A second time, to check internal state is still ok.
self._check_notify(cond)
def test_timeout(self):
cond = self.condtype()
results = []
N = 5
def f():
cond.acquire()
t1 = time.time()
cond.wait(0.2)
t2 = time.time()
cond.release()
results.append(t2 - t1)
Bunch(f, N).wait_for_finished()
self.assertEqual(len(results), 5)
for dt in results:
self.assertTrue(dt >= 0.2, dt)
class BaseSemaphoreTests(BaseTestCase):
"""
Common tests for {bounded, unbounded} semaphore objects.
"""
def test_constructor(self):
self.assertRaises(ValueError, self.semtype, value = -1)
self.assertRaises(ValueError, self.semtype, value = -sys.maxint)
def test_acquire(self):
sem = self.semtype(1)
sem.acquire()
sem.release()
sem = self.semtype(2)
sem.acquire()
sem.acquire()
sem.release()
sem.release()
def test_acquire_destroy(self):
sem = self.semtype()
sem.acquire()
del sem
def test_acquire_contended(self):
sem = self.semtype(7)
sem.acquire()
N = 10
results1 = []
results2 = []
phase_num = 0
def f():
sem.acquire()
results1.append(phase_num)
sem.acquire()
results2.append(phase_num)
b = Bunch(f, 10)
b.wait_for_started()
while len(results1) + len(results2) < 6:
_wait()
self.assertEqual(results1 + results2, [0] * 6)
phase_num = 1
for i in range(7):
sem.release()
while len(results1) + len(results2) < 13:
_wait()
self.assertEqual(sorted(results1 + results2), [0] * 6 + [1] * 7)
phase_num = 2
for i in range(6):
sem.release()
while len(results1) + len(results2) < 19:
_wait()
self.assertEqual(sorted(results1 + results2), [0] * 6 + [1] * 7 + [2] * 6)
# The semaphore is still locked
self.assertFalse(sem.acquire(False))
# Final release, to let the last thread finish
sem.release()
b.wait_for_finished()
def test_try_acquire(self):
sem = self.semtype(2)
self.assertTrue(sem.acquire(False))
self.assertTrue(sem.acquire(False))
self.assertFalse(sem.acquire(False))
sem.release()
self.assertTrue(sem.acquire(False))
def test_try_acquire_contended(self):
sem = self.semtype(4)
sem.acquire()
results = []
def f():
results.append(sem.acquire(False))
results.append(sem.acquire(False))
Bunch(f, 5).wait_for_finished()
# There can be a thread switch between acquiring the semaphore and
# appending the result, therefore results will not necessarily be
# ordered.
self.assertEqual(sorted(results), [False] * 7 + [True] * 3 )
def test_default_value(self):
# The default initial value is 1.
sem = self.semtype()
sem.acquire()
def f():
sem.acquire()
sem.release()
b = Bunch(f, 1)
b.wait_for_started()
_wait()
self.assertFalse(b.finished)
sem.release()
b.wait_for_finished()
def test_with(self):
sem = self.semtype(2)
def _with(err=None):
with sem:
self.assertTrue(sem.acquire(False))
sem.release()
with sem:
self.assertFalse(sem.acquire(False))
if err:
raise err
_with()
self.assertTrue(sem.acquire(False))
sem.release()
self.assertRaises(TypeError, _with, TypeError)
self.assertTrue(sem.acquire(False))
sem.release()
class SemaphoreTests(BaseSemaphoreTests):
"""
Tests for unbounded semaphores.
"""
def test_release_unacquired(self):
# Unbounded releases are allowed and increment the semaphore's value
sem = self.semtype(1)
sem.release()
sem.acquire()
sem.acquire()
sem.release()
class BoundedSemaphoreTests(BaseSemaphoreTests):
"""
Tests for bounded semaphores.
"""
def test_release_unacquired(self):
# Cannot go past the initial value
sem = self.semtype()
self.assertRaises(ValueError, sem.release)
sem.acquire()
sem.release()
self.assertRaises(ValueError, sem.release)
-----BEGIN CERTIFICATE-----
MIIFxzCCA6+gAwIBAgIJALnlnf5uzTkIMA0GCSqGSIb3DQEBCwUAMEsxCzAJBgNV
BAYTAkRFMRcwFQYDVQQKEw5zY2hva29rZWtzLm9yZzEjMCEGCSqGSIb3DQEJARYU
aGFubm9Ac2Nob2tva2Vrcy5vcmcwHhcNMTAwMTI3MDAyMTI1WhcNMjAwMTI1MDAy
MTI1WjBLMQswCQYDVQQGEwJERTEXMBUGA1UEChMOc2Nob2tva2Vrcy5vcmcxIzAh
BgkqhkiG9w0BCQEWFGhhbm5vQHNjaG9rb2tla3Mub3JnMIICIjANBgkqhkiG9w0B
AQEFAAOCAg8AMIICCgKCAgEApJ4ODPwEooMW35dQPlBqdvcfkEvjhcsA7jmJfFqN
e/1T34zT44X9+KnMBSG2InacbD7eyFgjfaENFsZ87YkEBDIFZ/SHotLJZORQ8PUj
YoxPG4mjKN+yL2WthNcYbRyJreTbbDroNMuw6tkTSxeSXyYFQrKMCUfErVbZa/d5
RvfFVk+Au9dVUFhed/Stn5cv+a0ffvpyA7ygihm1kMFICbvPeI0846tmC2Ph7rM5
pYQyNBDOVpULODTk5Wu6jiiJJygvJWCZ1FdpsdBs5aKWHWdRhX++quGuflTTjH5d
qaIka4op9H7XksYphTDXmV+qHnva5jbPogwutDQcVsGBQcJaLmQqhsQK13bf4khE
iWJvfBLfHn8OOpY25ZwwuigJIwifNCxQeeT1FrLmyuYNhz2phPpzx065kqSUSR+A
Iw8DPE6e65UqMDKqZnID3dQeiQaFrHEV+Ibo0U/tD0YSBw5p33TMh0Es33IBWMac
m7x4hIFWdhl8W522u6qOrTswY3s8vB7blNWqMc9n7oWH8ybFf7EgKeDVtEN9AyBE
0WotXIEZWI+WvDbU1ACJXau9sQhYP/eerg7Zwr3iGUy4IQ5oUJibnjtcE+z8zmDN
pE6YcMCLJyLjXiQ3iHG9mNXzw7wPnslTbEEEukrfSlHGgW8Dm+VrNyW0JUM1bntx
vbMCAwEAAaOBrTCBqjAdBgNVHQ4EFgQUCedv7pDTuXtCxm4HTw9hUtrTvsowewYD
VR0jBHQwcoAUCedv7pDTuXtCxm4HTw9hUtrTvsqhT6RNMEsxCzAJBgNVBAYTAkRF
MRcwFQYDVQQKEw5zY2hva29rZWtzLm9yZzEjMCEGCSqGSIb3DQEJARYUaGFubm9A
c2Nob2tva2Vrcy5vcmeCCQC55Z3+bs05CDAMBgNVHRMEBTADAQH/MA0GCSqGSIb3
DQEBCwUAA4ICAQBHKAxA7WA/MEFjet03K8ouzEOr6Jrk2fZOuRhoDZ+9gr4FtaJB
P3Hh5D00kuSOvDnwsvCohxeNd1KTMAwVmVoH+NZkHERn3UXniUENlp18koI1ehlr
CZbXbzzE9Te9BelliSFA63q0cq0yJN1x9GyabU34XkAouCAmOqfSpKNZWZHGBHPF
bbYnZrHEMcsye6vKeTOcg1GqUHGrQM2WK0QaOwnCQv2RblI9VN+SeRoUJ44qTXdW
TwIYStsIPesacNcAQTStnHgKqIPx4zCwdx5xo8zONbXJfocqwyFqiAofvb9dN1nW
g1noVBcXB+oRBZW5CjFw87U88itq39i9+BWl835DWLBW2pVmx1QTLGv0RNgs/xVx
mWnjH4nNHvrjn6pRmqHZTk/SS0Hkl2qtDsynVxIl8EiMTfWSU3DBTuD2J/RSzuOE
eKtAbaoXkXE31jCl4FEZLITIZd8UkXacb9rN304tAK92L76JOAV+xOZxFRipmvx4
+A9qQXgLhtP4VaDajb44V/kCKPSA0Vm3apehke9Wl8dDtagfos1e6MxSu3EVLXRF
SP2U777V77pdMSd0f/7cerKn5FjrxW1v1FaP1oIGniMk4qQNTgA/jvvhjybsPlVA
jsfnhWGbh1voJa0RQcMiRMsxpw2P1KNOEu37W2eq/vFghVztZJQUmb5iNw==
-----END CERTIFICATE-----
import asyncore
import unittest
import select
import os
import socket
import threading
import sys
import time
import errno
from test import test_support
from test.test_support import TESTFN, run_unittest, unlink
from StringIO import StringIO
HOST = test_support.HOST
class dummysocket:
def __init__(self):
self.closed = False
def close(self):
self.closed = True
def fileno(self):
return 42
class dummychannel:
def __init__(self):
self.socket = dummysocket()
def close(self):
self.socket.close()
class exitingdummy:
def __init__(self):
pass
def handle_read_event(self):
raise asyncore.ExitNow()
handle_write_event = handle_read_event
handle_close = handle_read_event
handle_expt_event = handle_read_event
class crashingdummy:
def __init__(self):
self.error_handled = False
def handle_read_event(self):
raise Exception()
handle_write_event = handle_read_event
handle_close = handle_read_event
handle_expt_event = handle_read_event
def handle_error(self):
self.error_handled = True
# used when testing senders; just collects what it gets until newline is sent
def capture_server(evt, buf, serv):
try:
serv.listen(5)
conn, addr = serv.accept()
except socket.timeout:
pass
else:
n = 200
while n > 0:
r, w, e = select.select([conn], [], [])
if r:
data = conn.recv(10)
# keep everything except for the newline terminator
buf.write(data.replace('\n', ''))
if '\n' in data:
break
n -= 1
time.sleep(0.01)
conn.close()
finally:
serv.close()
evt.set()
class HelperFunctionTests(unittest.TestCase):
def test_readwriteexc(self):
# Check exception handling behavior of read, write and _exception
# check that ExitNow exceptions in the object handler method
# bubbles all the way up through asyncore read/write/_exception calls
tr1 = exitingdummy()
self.assertRaises(asyncore.ExitNow, asyncore.read, tr1)
self.assertRaises(asyncore.ExitNow, asyncore.write, tr1)
self.assertRaises(asyncore.ExitNow, asyncore._exception, tr1)
# check that an exception other than ExitNow in the object handler
# method causes the handle_error method to get called
tr2 = crashingdummy()
asyncore.read(tr2)
self.assertEqual(tr2.error_handled, True)
tr2 = crashingdummy()
asyncore.write(tr2)
self.assertEqual(tr2.error_handled, True)
tr2 = crashingdummy()
asyncore._exception(tr2)
self.assertEqual(tr2.error_handled, True)
# asyncore.readwrite uses constants in the select module that
# are not present in Windows systems (see this thread:
# http://mail.python.org/pipermail/python-list/2001-October/109973.html)
# These constants should be present as long as poll is available
if hasattr(select, 'poll'):
def test_readwrite(self):
# Check that correct methods are called by readwrite()
attributes = ('read', 'expt', 'write', 'closed', 'error_handled')
expected = (
(select.POLLIN, 'read'),
(select.POLLPRI, 'expt'),
(select.POLLOUT, 'write'),
(select.POLLERR, 'closed'),
(select.POLLHUP, 'closed'),
(select.POLLNVAL, 'closed'),
)
class testobj:
def __init__(self):
self.read = False
self.write = False
self.closed = False
self.expt = False
self.error_handled = False
def handle_read_event(self):
self.read = True
def handle_write_event(self):
self.write = True
def handle_close(self):
self.closed = True
def handle_expt_event(self):
self.expt = True
def handle_error(self):
self.error_handled = True
for flag, expectedattr in expected:
tobj = testobj()
self.assertEqual(getattr(tobj, expectedattr), False)
asyncore.readwrite(tobj, flag)
# Only the attribute modified by the routine we expect to be
# called should be True.
for attr in attributes:
self.assertEqual(getattr(tobj, attr), attr==expectedattr)
# check that ExitNow exceptions in the object handler method
# bubbles all the way up through asyncore readwrite call
tr1 = exitingdummy()
self.assertRaises(asyncore.ExitNow, asyncore.readwrite, tr1, flag)
# check that an exception other than ExitNow in the object handler
# method causes the handle_error method to get called
tr2 = crashingdummy()
self.assertEqual(tr2.error_handled, False)
asyncore.readwrite(tr2, flag)
self.assertEqual(tr2.error_handled, True)
def test_closeall(self):
self.closeall_check(False)
def test_closeall_default(self):
self.closeall_check(True)
def closeall_check(self, usedefault):
# Check that close_all() closes everything in a given map
l = []
testmap = {}
for i in range(10):
c = dummychannel()
l.append(c)
self.assertEqual(c.socket.closed, False)
testmap[i] = c
if usedefault:
socketmap = asyncore.socket_map
try:
asyncore.socket_map = testmap
asyncore.close_all()
finally:
testmap, asyncore.socket_map = asyncore.socket_map, socketmap
else:
asyncore.close_all(testmap)
self.assertEqual(len(testmap), 0)
for c in l:
self.assertEqual(c.socket.closed, True)
def test_compact_traceback(self):
try:
raise Exception("I don't like spam!")
except:
real_t, real_v, real_tb = sys.exc_info()
r = asyncore.compact_traceback()
else:
self.fail("Expected exception")
(f, function, line), t, v, info = r
self.assertEqual(os.path.split(f)[-1], 'test_asyncore.py')
self.assertEqual(function, 'test_compact_traceback')
self.assertEqual(t, real_t)
self.assertEqual(v, real_v)
self.assertEqual(info, '[%s|%s|%s]' % (f, function, line))
class DispatcherTests(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
asyncore.close_all()
def test_basic(self):
d = asyncore.dispatcher()
self.assertEqual(d.readable(), True)
self.assertEqual(d.writable(), True)
def test_repr(self):
d = asyncore.dispatcher()
self.assertEqual(repr(d), '<asyncore.dispatcher at %#x>' % id(d))
def test_log(self):
d = asyncore.dispatcher()
# capture output of dispatcher.log() (to stderr)
fp = StringIO()
stderr = sys.stderr
l1 = "Lovely spam! Wonderful spam!"
l2 = "I don't like spam!"
try:
sys.stderr = fp
d.log(l1)
d.log(l2)
finally:
sys.stderr = stderr
lines = fp.getvalue().splitlines()
self.assertEquals(lines, ['log: %s' % l1, 'log: %s' % l2])
def test_log_info(self):
d = asyncore.dispatcher()
# capture output of dispatcher.log_info() (to stdout via print)
fp = StringIO()
stdout = sys.stdout
l1 = "Have you got anything without spam?"
l2 = "Why can't she have egg bacon spam and sausage?"
l3 = "THAT'S got spam in it!"
try:
sys.stdout = fp
d.log_info(l1, 'EGGS')
d.log_info(l2)
d.log_info(l3, 'SPAM')
finally:
sys.stdout = stdout
lines = fp.getvalue().splitlines()
expected = ['EGGS: %s' % l1, 'info: %s' % l2, 'SPAM: %s' % l3]
self.assertEquals(lines, expected)
def test_unhandled(self):
d = asyncore.dispatcher()
d.ignore_log_types = ()
# capture output of dispatcher.log_info() (to stdout via print)
fp = StringIO()
stdout = sys.stdout
try:
sys.stdout = fp
d.handle_expt()
d.handle_read()
d.handle_write()
d.handle_connect()
d.handle_accept()
finally:
sys.stdout = stdout
lines = fp.getvalue().splitlines()
expected = ['warning: unhandled incoming priority event',
'warning: unhandled read event',
'warning: unhandled write event',
'warning: unhandled connect event',
'warning: unhandled accept event']
self.assertEquals(lines, expected)
def test_issue_8594(self):
d = asyncore.dispatcher(socket.socket())
# make sure the error message no longer refers to the socket
# object but the dispatcher instance instead
try:
d.foo
except AttributeError, err:
self.assertTrue('dispatcher instance' in str(err))
else:
self.fail("exception not raised")
# test cheap inheritance with the underlying socket
self.assertEqual(d.family, socket.AF_INET)
def test_strerror(self):
# refers to bug #8573
err = asyncore._strerror(errno.EPERM)
if hasattr(os, 'strerror'):
self.assertEqual(err, os.strerror(errno.EPERM))
err = asyncore._strerror(-1)
self.assertTrue("unknown error" in err.lower())
class dispatcherwithsend_noread(asyncore.dispatcher_with_send):
def readable(self):
return False
def handle_connect(self):
pass
class DispatcherWithSendTests(unittest.TestCase):
usepoll = False
def setUp(self):
pass
def tearDown(self):
asyncore.close_all()
def test_send(self):
self.evt = threading.Event()
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.sock.settimeout(3)
self.port = test_support.bind_port(self.sock)
cap = StringIO()
args = (self.evt, cap, self.sock)
threading.Thread(target=capture_server, args=args).start()
# wait a little longer for the server to initialize (it sometimes
# refuses connections on slow machines without this wait)
time.sleep(0.2)
data = "Suppose there isn't a 16-ton weight?"
d = dispatcherwithsend_noread()
d.create_socket(socket.AF_INET, socket.SOCK_STREAM)
d.connect((HOST, self.port))
# give time for socket to connect
time.sleep(0.1)
d.send(data)
d.send(data)
d.send('\n')
n = 1000
while d.out_buffer and n > 0:
asyncore.poll()
n -= 1
self.evt.wait()
self.assertEqual(cap.getvalue(), data*2)
class DispatcherWithSendTests_UsePoll(DispatcherWithSendTests):
usepoll = True
if hasattr(asyncore, 'file_wrapper'):
class FileWrapperTest(unittest.TestCase):
def setUp(self):
self.d = "It's not dead, it's sleeping!"
file(TESTFN, 'w').write(self.d)
def tearDown(self):
unlink(TESTFN)
def test_recv(self):
fd = os.open(TESTFN, os.O_RDONLY)
w = asyncore.file_wrapper(fd)
os.close(fd)
self.assertNotEqual(w.fd, fd)
self.assertNotEqual(w.fileno(), fd)
self.assertEqual(w.recv(13), "It's not dead")
self.assertEqual(w.read(6), ", it's")
w.close()
self.assertRaises(OSError, w.read, 1)
def test_send(self):
d1 = "Come again?"
d2 = "I want to buy some cheese."
fd = os.open(TESTFN, os.O_WRONLY | os.O_APPEND)
w = asyncore.file_wrapper(fd)
os.close(fd)
w.write(d1)
w.send(d2)
w.close()
self.assertEqual(file(TESTFN).read(), self.d + d1 + d2)
def test_dispatcher(self):
fd = os.open(TESTFN, os.O_RDONLY)
data = []
class FileDispatcher(asyncore.file_dispatcher):
def handle_read(self):
data.append(self.recv(29))
s = FileDispatcher(fd)
os.close(fd)
asyncore.loop(timeout=0.01, use_poll=True, count=2)
self.assertEqual("".join(data), self.d)
def test_main():
tests = [HelperFunctionTests, DispatcherTests, DispatcherWithSendTests,
DispatcherWithSendTests_UsePoll]
if hasattr(asyncore, 'file_wrapper'):
tests.append(FileWrapperTest)
run_unittest(*tests)
if __name__ == "__main__":
test_main()
"""Test script for ftplib module."""
# Modified by Giampaolo Rodola' to test FTP class and IPv6 environment
import ftplib
import threading
import asyncore
import asynchat
import socket
import StringIO
from unittest import TestCase
from test import test_support
from test.test_support import HOST
# the dummy data returned by server over the data channel when
# RETR, LIST and NLST commands are issued
RETR_DATA = 'abcde12345\r\n' * 1000
LIST_DATA = 'foo\r\nbar\r\n'
NLST_DATA = 'foo\r\nbar\r\n'
class DummyDTPHandler(asynchat.async_chat):
def __init__(self, conn, baseclass):
asynchat.async_chat.__init__(self, conn)
self.baseclass = baseclass
self.baseclass.last_received_data = ''
def handle_read(self):
self.baseclass.last_received_data += self.recv(1024)
def handle_close(self):
self.baseclass.push('226 transfer complete')
self.close()
class DummyFTPHandler(asynchat.async_chat):
def __init__(self, conn):
asynchat.async_chat.__init__(self, conn)
self.set_terminator("\r\n")
self.in_buffer = []
self.dtp = None
self.last_received_cmd = None
self.last_received_data = ''
self.next_response = ''
self.push('220 welcome')
def collect_incoming_data(self, data):
self.in_buffer.append(data)
def found_terminator(self):
line = ''.join(self.in_buffer)
self.in_buffer = []
if self.next_response:
self.push(self.next_response)
self.next_response = ''
cmd = line.split(' ')[0].lower()
self.last_received_cmd = cmd
space = line.find(' ')
if space != -1:
arg = line[space + 1:]
else:
arg = ""
if hasattr(self, 'cmd_' + cmd):
method = getattr(self, 'cmd_' + cmd)
method(arg)
else:
self.push('550 command "%s" not understood.' %cmd)
def handle_error(self):
raise
def push(self, data):
asynchat.async_chat.push(self, data + '\r\n')
def cmd_port(self, arg):
addr = map(int, arg.split(','))
ip = '%d.%d.%d.%d' %tuple(addr[:4])
port = (addr[4] * 256) + addr[5]
s = socket.create_connection((ip, port), timeout=2)
self.dtp = DummyDTPHandler(s, baseclass=self)
self.push('200 active data connection established')
def cmd_pasv(self, arg):
sock = socket.socket()
sock.bind((self.socket.getsockname()[0], 0))
sock.listen(5)
sock.settimeout(2)
ip, port = sock.getsockname()[:2]
ip = ip.replace('.', ',')
p1, p2 = divmod(port, 256)
self.push('227 entering passive mode (%s,%d,%d)' %(ip, p1, p2))
conn, addr = sock.accept()
self.dtp = DummyDTPHandler(conn, baseclass=self)
def cmd_eprt(self, arg):
af, ip, port = arg.split(arg[0])[1:-1]
port = int(port)
s = socket.create_connection((ip, port), timeout=2)
self.dtp = DummyDTPHandler(s, baseclass=self)
self.push('200 active data connection established')
def cmd_epsv(self, arg):
sock = socket.socket(socket.AF_INET6)
sock.bind((self.socket.getsockname()[0], 0))
sock.listen(5)
sock.settimeout(2)
port = sock.getsockname()[1]
self.push('229 entering extended passive mode (|||%d|)' %port)
conn, addr = sock.accept()
self.dtp = DummyDTPHandler(conn, baseclass=self)
def cmd_echo(self, arg):
# sends back the received string (used by the test suite)
self.push(arg)
def cmd_user(self, arg):
self.push('331 username ok')
def cmd_pass(self, arg):
self.push('230 password ok')
def cmd_acct(self, arg):
self.push('230 acct ok')
def cmd_rnfr(self, arg):
self.push('350 rnfr ok')
def cmd_rnto(self, arg):
self.push('250 rnto ok')
def cmd_dele(self, arg):
self.push('250 dele ok')
def cmd_cwd(self, arg):
self.push('250 cwd ok')
def cmd_size(self, arg):
self.push('250 1000')
def cmd_mkd(self, arg):
self.push('257 "%s"' %arg)
def cmd_rmd(self, arg):
self.push('250 rmd ok')
def cmd_pwd(self, arg):
self.push('257 "pwd ok"')
def cmd_type(self, arg):
self.push('200 type ok')
def cmd_quit(self, arg):
self.push('221 quit ok')
self.close()
def cmd_stor(self, arg):
self.push('125 stor ok')
def cmd_retr(self, arg):
self.push('125 retr ok')
self.dtp.push(RETR_DATA)
self.dtp.close_when_done()
def cmd_list(self, arg):
self.push('125 list ok')
self.dtp.push(LIST_DATA)
self.dtp.close_when_done()
def cmd_nlst(self, arg):
self.push('125 nlst ok')
self.dtp.push(NLST_DATA)
self.dtp.close_when_done()
class DummyFTPServer(asyncore.dispatcher, threading.Thread):
handler = DummyFTPHandler
def __init__(self, address, af=socket.AF_INET):
threading.Thread.__init__(self)
asyncore.dispatcher.__init__(self)
self.create_socket(af, socket.SOCK_STREAM)
self.bind(address)
self.listen(5)
self.active = False
self.active_lock = threading.Lock()
self.host, self.port = self.socket.getsockname()[:2]
def start(self):
assert not self.active
self.__flag = threading.Event()
threading.Thread.start(self)
self.__flag.wait()
def run(self):
self.active = True
self.__flag.set()
while self.active and asyncore.socket_map:
self.active_lock.acquire()
asyncore.loop(timeout=0.1, count=1)
self.active_lock.release()
asyncore.close_all(ignore_all=True)
def stop(self):
assert self.active
self.active = False
self.join()
def handle_accept(self):
conn, addr = self.accept()
self.handler = self.handler(conn)
self.close()
def handle_connect(self):
self.close()
handle_read = handle_connect
def writable(self):
return 0
def handle_error(self):
raise
class TestFTPClass(TestCase):
def setUp(self):
self.server = DummyFTPServer((HOST, 0))
self.server.start()
self.client = ftplib.FTP(timeout=2)
self.client.connect(self.server.host, self.server.port)
def tearDown(self):
self.client.close()
self.server.stop()
def test_getwelcome(self):
self.assertEqual(self.client.getwelcome(), '220 welcome')
def test_sanitize(self):
self.assertEqual(self.client.sanitize('foo'), repr('foo'))
self.assertEqual(self.client.sanitize('pass 12345'), repr('pass *****'))
self.assertEqual(self.client.sanitize('PASS 12345'), repr('PASS *****'))
def test_exceptions(self):
self.assertRaises(ftplib.error_temp, self.client.sendcmd, 'echo 400')
self.assertRaises(ftplib.error_temp, self.client.sendcmd, 'echo 499')
self.assertRaises(ftplib.error_perm, self.client.sendcmd, 'echo 500')
self.assertRaises(ftplib.error_perm, self.client.sendcmd, 'echo 599')
self.assertRaises(ftplib.error_proto, self.client.sendcmd, 'echo 999')
def test_all_errors(self):
exceptions = (ftplib.error_reply, ftplib.error_temp, ftplib.error_perm,
ftplib.error_proto, ftplib.Error, IOError, EOFError)
for x in exceptions:
try:
raise x('exception not included in all_errors set')
except ftplib.all_errors:
pass
def test_set_pasv(self):
# passive mode is supposed to be enabled by default
self.assertTrue(self.client.passiveserver)
self.client.set_pasv(True)
self.assertTrue(self.client.passiveserver)
self.client.set_pasv(False)
self.assertFalse(self.client.passiveserver)
def test_voidcmd(self):
self.client.voidcmd('echo 200')
self.client.voidcmd('echo 299')
self.assertRaises(ftplib.error_reply, self.client.voidcmd, 'echo 199')
self.assertRaises(ftplib.error_reply, self.client.voidcmd, 'echo 300')
def test_login(self):
self.client.login()
def test_acct(self):
self.client.acct('passwd')
def test_rename(self):
self.client.rename('a', 'b')
self.server.handler.next_response = '200'
self.assertRaises(ftplib.error_reply, self.client.rename, 'a', 'b')
def test_delete(self):
self.client.delete('foo')
self.server.handler.next_response = '199'
self.assertRaises(ftplib.error_reply, self.client.delete, 'foo')
def test_size(self):
self.client.size('foo')
def test_mkd(self):
dir = self.client.mkd('/foo')
self.assertEqual(dir, '/foo')
def test_rmd(self):
self.client.rmd('foo')
def test_pwd(self):
dir = self.client.pwd()
self.assertEqual(dir, 'pwd ok')
def test_quit(self):
self.assertEqual(self.client.quit(), '221 quit ok')
# Ensure the connection gets closed; sock attribute should be None
self.assertEqual(self.client.sock, None)
def test_retrbinary(self):
received = []
self.client.retrbinary('retr', received.append)
self.assertEqual(''.join(received), RETR_DATA)
def test_retrlines(self):
received = []
self.client.retrlines('retr', received.append)
self.assertEqual(''.join(received), RETR_DATA.replace('\r\n', ''))
def test_storbinary(self):
f = StringIO.StringIO(RETR_DATA)
self.client.storbinary('stor', f)
self.assertEqual(self.server.handler.last_received_data, RETR_DATA)
# test new callback arg
flag = []
f.seek(0)
self.client.storbinary('stor', f, callback=lambda x: flag.append(None))
self.assertTrue(flag)
def test_storlines(self):
f = StringIO.StringIO(RETR_DATA.replace('\r\n', '\n'))
self.client.storlines('stor', f)
self.assertEqual(self.server.handler.last_received_data, RETR_DATA)
# test new callback arg
flag = []
f.seek(0)
self.client.storlines('stor foo', f, callback=lambda x: flag.append(None))
self.assertTrue(flag)
def test_nlst(self):
self.client.nlst()
self.assertEqual(self.client.nlst(), NLST_DATA.split('\r\n')[:-1])
def test_dir(self):
l = []
self.client.dir(lambda x: l.append(x))
self.assertEqual(''.join(l), LIST_DATA.replace('\r\n', ''))
def test_makeport(self):
self.client.makeport()
# IPv4 is in use, just make sure send_eprt has not been used
self.assertEqual(self.server.handler.last_received_cmd, 'port')
def test_makepasv(self):
host, port = self.client.makepasv()
conn = socket.create_connection((host, port), 2)
conn.close()
# IPv4 is in use, just make sure send_epsv has not been used
self.assertEqual(self.server.handler.last_received_cmd, 'pasv')
class TestIPv6Environment(TestCase):
def setUp(self):
self.server = DummyFTPServer((HOST, 0), af=socket.AF_INET6)
self.server.start()
self.client = ftplib.FTP()
self.client.connect(self.server.host, self.server.port)
def tearDown(self):
self.client.close()
self.server.stop()
def test_af(self):
self.assertEqual(self.client.af, socket.AF_INET6)
def test_makeport(self):
self.client.makeport()
self.assertEqual(self.server.handler.last_received_cmd, 'eprt')
def test_makepasv(self):
host, port = self.client.makepasv()
conn = socket.create_connection((host, port), 2)
conn.close()
self.assertEqual(self.server.handler.last_received_cmd, 'epsv')
def test_transfer(self):
def retr():
received = []
self.client.retrbinary('retr', received.append)
self.assertEqual(''.join(received), RETR_DATA)
self.client.set_pasv(True)
retr()
self.client.set_pasv(False)
retr()
class TestTimeouts(TestCase):
def setUp(self):
self.evt = threading.Event()
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.sock.settimeout(3)
self.port = test_support.bind_port(self.sock)
threading.Thread(target=self.server, args=(self.evt,self.sock)).start()
# Wait for the server to be ready.
self.evt.wait()
self.evt.clear()
ftplib.FTP.port = self.port
def tearDown(self):
self.evt.wait()
def server(self, evt, serv):
# This method sets the evt 3 times:
# 1) when the connection is ready to be accepted.
# 2) when it is safe for the caller to close the connection
# 3) when we have closed the socket
serv.listen(5)
# (1) Signal the caller that we are ready to accept the connection.
evt.set()
try:
conn, addr = serv.accept()
except socket.timeout:
pass
else:
conn.send("1 Hola mundo\n")
# (2) Signal the caller that it is safe to close the socket.
evt.set()
conn.close()
finally:
serv.close()
# (3) Signal the caller that we are done.
evt.set()
def testTimeoutDefault(self):
# default -- use global socket timeout
self.assert_(socket.getdefaulttimeout() is None)
socket.setdefaulttimeout(30)
try:
ftp = ftplib.FTP("localhost")
finally:
socket.setdefaulttimeout(None)
self.assertEqual(ftp.sock.gettimeout(), 30)
self.evt.wait()
ftp.close()
def testTimeoutNone(self):
# no timeout -- do not use global socket timeout
self.assert_(socket.getdefaulttimeout() is None)
socket.setdefaulttimeout(30)
try:
ftp = ftplib.FTP("localhost", timeout=None)
finally:
socket.setdefaulttimeout(None)
self.assertTrue(ftp.sock.gettimeout() is None)
self.evt.wait()
ftp.close()
def testTimeoutValue(self):
# a value
ftp = ftplib.FTP(HOST, timeout=30)
self.assertEqual(ftp.sock.gettimeout(), 30)
self.evt.wait()
ftp.close()
def testTimeoutConnect(self):
ftp = ftplib.FTP()
ftp.connect(HOST, timeout=30)
self.assertEqual(ftp.sock.gettimeout(), 30)
self.evt.wait()
ftp.close()
def testTimeoutDifferentOrder(self):
ftp = ftplib.FTP(timeout=30)
ftp.connect(HOST)
self.assertEqual(ftp.sock.gettimeout(), 30)
self.evt.wait()
ftp.close()
def testTimeoutDirectAccess(self):
ftp = ftplib.FTP()
ftp.timeout = 30
ftp.connect(HOST)
self.assertEqual(ftp.sock.gettimeout(), 30)
self.evt.wait()
ftp.close()
def test_main():
tests = [TestFTPClass, TestTimeouts]
if socket.has_ipv6:
try:
DummyFTPServer((HOST, 0), af=socket.AF_INET6)
except socket.error:
pass
else:
tests.append(TestIPv6Environment)
thread_info = test_support.threading_setup()
try:
test_support.run_unittest(*tests)
finally:
test_support.threading_cleanup(*thread_info)
if __name__ == '__main__':
test_main()
import array
import httplib
import StringIO
import socket
from unittest import TestCase
from test import test_support
HOST = test_support.HOST
class FakeSocket:
def __init__(self, text, fileclass=StringIO.StringIO):
self.text = text
self.fileclass = fileclass
self.data = ''
def sendall(self, data):
self.data += ''.join(data)
def makefile(self, mode, bufsize=None):
if mode != 'r' and mode != 'rb':
raise httplib.UnimplementedFileMode()
return self.fileclass(self.text)
class NoEOFStringIO(StringIO.StringIO):
"""Like StringIO, but raises AssertionError on EOF.
This is used below to test that httplib doesn't try to read
more from the underlying file than it should.
"""
def read(self, n=-1):
data = StringIO.StringIO.read(self, n)
if data == '':
raise AssertionError('caller tried to read past EOF')
return data
def readline(self, length=None):
data = StringIO.StringIO.readline(self, length)
if data == '':
raise AssertionError('caller tried to read past EOF')
return data
class HeaderTests(TestCase):
def test_auto_headers(self):
# Some headers are added automatically, but should not be added by
# .request() if they are explicitly set.
import httplib
class HeaderCountingBuffer(list):
def __init__(self):
self.count = {}
def append(self, item):
kv = item.split(':')
if len(kv) > 1:
# item is a 'Key: Value' header string
lcKey = kv[0].lower()
self.count.setdefault(lcKey, 0)
self.count[lcKey] += 1
list.append(self, item)
for explicit_header in True, False:
for header in 'Content-length', 'Host', 'Accept-encoding':
conn = httplib.HTTPConnection('example.com')
conn.sock = FakeSocket('blahblahblah')
conn._buffer = HeaderCountingBuffer()
body = 'spamspamspam'
headers = {}
if explicit_header:
headers[header] = str(len(body))
conn.request('POST', '/', body, headers)
self.assertEqual(conn._buffer.count[header.lower()], 1)
class BasicTest(TestCase):
def test_status_lines(self):
# Test HTTP status lines
body = "HTTP/1.1 200 Ok\r\n\r\nText"
sock = FakeSocket(body)
resp = httplib.HTTPResponse(sock)
resp.begin()
self.assertEqual(resp.read(), 'Text')
self.assertTrue(resp.isclosed())
body = "HTTP/1.1 400.100 Not Ok\r\n\r\nText"
sock = FakeSocket(body)
resp = httplib.HTTPResponse(sock)
self.assertRaises(httplib.BadStatusLine, resp.begin)
def test_partial_reads(self):
# if we have a lenght, the system knows when to close itself
# same behaviour than when we read the whole thing with read()
body = "HTTP/1.1 200 Ok\r\nContent-Length: 4\r\n\r\nText"
sock = FakeSocket(body)
resp = httplib.HTTPResponse(sock)
resp.begin()
self.assertEqual(resp.read(2), 'Te')
self.assertFalse(resp.isclosed())
self.assertEqual(resp.read(2), 'xt')
self.assertTrue(resp.isclosed())
def test_host_port(self):
# Check invalid host_port
for hp in ("www.python.org:abc", "www.python.org:"):
self.assertRaises(httplib.InvalidURL, httplib.HTTP, hp)
for hp, h, p in (("[fe80::207:e9ff:fe9b]:8000", "fe80::207:e9ff:fe9b",
8000),
("www.python.org:80", "www.python.org", 80),
("www.python.org", "www.python.org", 80),
("[fe80::207:e9ff:fe9b]", "fe80::207:e9ff:fe9b", 80)):
http = httplib.HTTP(hp)
c = http._conn
if h != c.host:
self.fail("Host incorrectly parsed: %s != %s" % (h, c.host))
if p != c.port:
self.fail("Port incorrectly parsed: %s != %s" % (p, c.host))
def test_response_headers(self):
# test response with multiple message headers with the same field name.
text = ('HTTP/1.1 200 OK\r\n'
'Set-Cookie: Customer="WILE_E_COYOTE";'
' Version="1"; Path="/acme"\r\n'
'Set-Cookie: Part_Number="Rocket_Launcher_0001"; Version="1";'
' Path="/acme"\r\n'
'\r\n'
'No body\r\n')
hdr = ('Customer="WILE_E_COYOTE"; Version="1"; Path="/acme"'
', '
'Part_Number="Rocket_Launcher_0001"; Version="1"; Path="/acme"')
s = FakeSocket(text)
r = httplib.HTTPResponse(s)
r.begin()
cookies = r.getheader("Set-Cookie")
if cookies != hdr:
self.fail("multiple headers not combined properly")
def test_read_head(self):
# Test that the library doesn't attempt to read any data
# from a HEAD request. (Tickles SF bug #622042.)
sock = FakeSocket(
'HTTP/1.1 200 OK\r\n'
'Content-Length: 14432\r\n'
'\r\n',
NoEOFStringIO)
resp = httplib.HTTPResponse(sock, method="HEAD")
resp.begin()
if resp.read() != "":
self.fail("Did not expect response from HEAD request")
def test_send_file(self):
expected = 'GET /foo HTTP/1.1\r\nHost: example.com\r\n' \
'Accept-Encoding: identity\r\nContent-Length:'
body = open(__file__, 'rb')
conn = httplib.HTTPConnection('example.com')
sock = FakeSocket(body)
conn.sock = sock
conn.request('GET', '/foo', body)
self.assertTrue(sock.data.startswith(expected))
def test_send(self):
expected = 'this is a test this is only a test'
conn = httplib.HTTPConnection('example.com')
sock = FakeSocket(None)
conn.sock = sock
conn.send(expected)
self.assertEquals(expected, sock.data)
sock.data = ''
conn.send(array.array('c', expected))
self.assertEquals(expected, sock.data)
sock.data = ''
conn.send(StringIO.StringIO(expected))
self.assertEquals(expected, sock.data)
def test_chunked(self):
chunked_start = (
'HTTP/1.1 200 OK\r\n'
'Transfer-Encoding: chunked\r\n\r\n'
'a\r\n'
'hello worl\r\n'
'1\r\n'
'd\r\n'
)
sock = FakeSocket(chunked_start + '0\r\n')
resp = httplib.HTTPResponse(sock, method="GET")
resp.begin()
self.assertEquals(resp.read(), 'hello world')
resp.close()
for x in ('', 'foo\r\n'):
sock = FakeSocket(chunked_start + x)
resp = httplib.HTTPResponse(sock, method="GET")
resp.begin()
try:
resp.read()
except httplib.IncompleteRead, i:
self.assertEquals(i.partial, 'hello world')
self.assertEqual(repr(i),'IncompleteRead(11 bytes read)')
self.assertEqual(str(i),'IncompleteRead(11 bytes read)')
else:
self.fail('IncompleteRead expected')
finally:
resp.close()
def test_chunked_head(self):
chunked_start = (
'HTTP/1.1 200 OK\r\n'
'Transfer-Encoding: chunked\r\n\r\n'
'a\r\n'
'hello world\r\n'
'1\r\n'
'd\r\n'
)
sock = FakeSocket(chunked_start + '0\r\n')
resp = httplib.HTTPResponse(sock, method="HEAD")
resp.begin()
self.assertEquals(resp.read(), '')
self.assertEquals(resp.status, 200)
self.assertEquals(resp.reason, 'OK')
self.assertTrue(resp.isclosed())
def test_negative_content_length(self):
sock = FakeSocket('HTTP/1.1 200 OK\r\n'
'Content-Length: -1\r\n\r\nHello\r\n')
resp = httplib.HTTPResponse(sock, method="GET")
resp.begin()
self.assertEquals(resp.read(), 'Hello\r\n')
resp.close()
def test_incomplete_read(self):
sock = FakeSocket('HTTP/1.1 200 OK\r\nContent-Length: 10\r\n\r\nHello\r\n')
resp = httplib.HTTPResponse(sock, method="GET")
resp.begin()
try:
resp.read()
except httplib.IncompleteRead as i:
self.assertEquals(i.partial, 'Hello\r\n')
self.assertEqual(repr(i),
"IncompleteRead(7 bytes read, 3 more expected)")
self.assertEqual(str(i),
"IncompleteRead(7 bytes read, 3 more expected)")
else:
self.fail('IncompleteRead expected')
finally:
resp.close()
class OfflineTest(TestCase):
def test_responses(self):
self.assertEquals(httplib.responses[httplib.NOT_FOUND], "Not Found")
class TimeoutTest(TestCase):
PORT = None
def setUp(self):
self.serv = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
TimeoutTest.PORT = test_support.bind_port(self.serv)
self.serv.listen(5)
def tearDown(self):
self.serv.close()
self.serv = None
def testTimeoutAttribute(self):
'''This will prove that the timeout gets through
HTTPConnection and into the socket.
'''
# default -- use global socket timeout
self.assert_(socket.getdefaulttimeout() is None)
socket.setdefaulttimeout(30)
try:
httpConn = httplib.HTTPConnection(HOST, TimeoutTest.PORT)
httpConn.connect()
finally:
socket.setdefaulttimeout(None)
self.assertEqual(httpConn.sock.gettimeout(), 30)
httpConn.close()
# no timeout -- do not use global socket default
self.assert_(socket.getdefaulttimeout() is None)
socket.setdefaulttimeout(30)
try:
httpConn = httplib.HTTPConnection(HOST, TimeoutTest.PORT,
timeout=None)
httpConn.connect()
finally:
socket.setdefaulttimeout(None)
self.assertEqual(httpConn.sock.gettimeout(), None)
httpConn.close()
# a value
httpConn = httplib.HTTPConnection(HOST, TimeoutTest.PORT, timeout=30)
httpConn.connect()
self.assertEqual(httpConn.sock.gettimeout(), 30)
httpConn.close()
class HTTPSTimeoutTest(TestCase):
# XXX Here should be tests for HTTPS, there isn't any right now!
def test_attributes(self):
# simple test to check it's storing it
if hasattr(httplib, 'HTTPSConnection'):
h = httplib.HTTPSConnection(HOST, TimeoutTest.PORT, timeout=30)
self.assertEqual(h.timeout, 30)
def test_main(verbose=None):
test_support.run_unittest(HeaderTests, OfflineTest, BasicTest, TimeoutTest,
HTTPSTimeoutTest)
if __name__ == '__main__':
test_main()
"""Unittests for the various HTTPServer modules.
Written by Cody A.W. Somerville <cody-somerville@ubuntu.com>,
Josip Dzolonga, and Michael Otteneder for the 2007/08 GHOP contest.
"""
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
from SimpleHTTPServer import SimpleHTTPRequestHandler
from CGIHTTPServer import CGIHTTPRequestHandler
import os
import sys
import base64
import shutil
import urllib
import httplib
import tempfile
import threading
import unittest
from test import test_support
class NoLogRequestHandler:
def log_message(self, *args):
# don't write log messages to stderr
pass
class TestServerThread(threading.Thread):
def __init__(self, test_object, request_handler):
threading.Thread.__init__(self)
self.request_handler = request_handler
self.test_object = test_object
def run(self):
self.server = HTTPServer(('', 0), self.request_handler)
self.test_object.PORT = self.server.socket.getsockname()[1]
self.test_object.server_started.set()
self.test_object = None
try:
self.server.serve_forever(0.05)
finally:
self.server.server_close()
def stop(self):
self.server.shutdown()
class BaseTestCase(unittest.TestCase):
def setUp(self):
self.server_started = threading.Event()
self.thread = TestServerThread(self, self.request_handler)
self.thread.start()
self.server_started.wait()
def tearDown(self):
self.thread.stop()
def request(self, uri, method='GET', body=None, headers={}):
self.connection = httplib.HTTPConnection('localhost', self.PORT)
self.connection.request(method, uri, body, headers)
return self.connection.getresponse()
class BaseHTTPServerTestCase(BaseTestCase):
class request_handler(NoLogRequestHandler, BaseHTTPRequestHandler):
protocol_version = 'HTTP/1.1'
default_request_version = 'HTTP/1.1'
def do_TEST(self):
self.send_response(204)
self.send_header('Content-Type', 'text/html')
self.send_header('Connection', 'close')
self.end_headers()
def do_KEEP(self):
self.send_response(204)
self.send_header('Content-Type', 'text/html')
self.send_header('Connection', 'keep-alive')
self.end_headers()
def do_KEYERROR(self):
self.send_error(999)
def do_CUSTOM(self):
self.send_response(999)
self.send_header('Content-Type', 'text/html')
self.send_header('Connection', 'close')
self.end_headers()
def setUp(self):
BaseTestCase.setUp(self)
self.con = httplib.HTTPConnection('localhost', self.PORT)
self.con.connect()
def test_command(self):
self.con.request('GET', '/')
res = self.con.getresponse()
self.assertEquals(res.status, 501)
def test_request_line_trimming(self):
self.con._http_vsn_str = 'HTTP/1.1\n'
self.con.putrequest('GET', '/')
self.con.endheaders()
res = self.con.getresponse()
self.assertEquals(res.status, 501)
def test_version_bogus(self):
self.con._http_vsn_str = 'FUBAR'
self.con.putrequest('GET', '/')
self.con.endheaders()
res = self.con.getresponse()
self.assertEquals(res.status, 400)
def test_version_digits(self):
self.con._http_vsn_str = 'HTTP/9.9.9'
self.con.putrequest('GET', '/')
self.con.endheaders()
res = self.con.getresponse()
self.assertEquals(res.status, 400)
def test_version_none_get(self):
self.con._http_vsn_str = ''
self.con.putrequest('GET', '/')
self.con.endheaders()
res = self.con.getresponse()
self.assertEquals(res.status, 501)
def test_version_none(self):
self.con._http_vsn_str = ''
self.con.putrequest('PUT', '/')
self.con.endheaders()
res = self.con.getresponse()
self.assertEquals(res.status, 400)
def test_version_invalid(self):
self.con._http_vsn = 99
self.con._http_vsn_str = 'HTTP/9.9'
self.con.putrequest('GET', '/')
self.con.endheaders()
res = self.con.getresponse()
self.assertEquals(res.status, 505)
def test_send_blank(self):
self.con._http_vsn_str = ''
self.con.putrequest('', '')
self.con.endheaders()
res = self.con.getresponse()
self.assertEquals(res.status, 400)
def test_header_close(self):
self.con.putrequest('GET', '/')
self.con.putheader('Connection', 'close')
self.con.endheaders()
res = self.con.getresponse()
self.assertEquals(res.status, 501)
def test_head_keep_alive(self):
self.con._http_vsn_str = 'HTTP/1.1'
self.con.putrequest('GET', '/')
self.con.putheader('Connection', 'keep-alive')
self.con.endheaders()
res = self.con.getresponse()
self.assertEquals(res.status, 501)
def test_handler(self):
self.con.request('TEST', '/')
res = self.con.getresponse()
self.assertEquals(res.status, 204)
def test_return_header_keep_alive(self):
self.con.request('KEEP', '/')
res = self.con.getresponse()
self.assertEquals(res.getheader('Connection'), 'keep-alive')
self.con.request('TEST', '/')
def test_internal_key_error(self):
self.con.request('KEYERROR', '/')
res = self.con.getresponse()
self.assertEquals(res.status, 999)
def test_return_custom_status(self):
self.con.request('CUSTOM', '/')
res = self.con.getresponse()
self.assertEquals(res.status, 999)
class SimpleHTTPServerTestCase(BaseTestCase):
class request_handler(NoLogRequestHandler, SimpleHTTPRequestHandler):
pass
def setUp(self):
BaseTestCase.setUp(self)
self.cwd = os.getcwd()
basetempdir = tempfile.gettempdir()
os.chdir(basetempdir)
self.data = 'We are the knights who say Ni!'
self.tempdir = tempfile.mkdtemp(dir=basetempdir)
self.tempdir_name = os.path.basename(self.tempdir)
temp = open(os.path.join(self.tempdir, 'test'), 'wb')
temp.write(self.data)
temp.close()
def tearDown(self):
try:
os.chdir(self.cwd)
try:
shutil.rmtree(self.tempdir)
except:
pass
finally:
BaseTestCase.tearDown(self)
def check_status_and_reason(self, response, status, data=None):
body = response.read()
self.assert_(response)
self.assertEquals(response.status, status)
self.assert_(response.reason != None)
if data:
self.assertEqual(data, body)
def test_get(self):
#constructs the path relative to the root directory of the HTTPServer
response = self.request(self.tempdir_name + '/test')
self.check_status_and_reason(response, 200, data=self.data)
response = self.request(self.tempdir_name + '/')
self.check_status_and_reason(response, 200)
response = self.request(self.tempdir_name)
self.check_status_and_reason(response, 301)
response = self.request('/ThisDoesNotExist')
self.check_status_and_reason(response, 404)
response = self.request('/' + 'ThisDoesNotExist' + '/')
self.check_status_and_reason(response, 404)
f = open(os.path.join(self.tempdir_name, 'index.html'), 'w')
response = self.request('/' + self.tempdir_name + '/')
self.check_status_and_reason(response, 200)
if os.name == 'posix':
# chmod won't work as expected on Windows platforms
os.chmod(self.tempdir, 0)
response = self.request(self.tempdir_name + '/')
self.check_status_and_reason(response, 404)
os.chmod(self.tempdir, 0755)
def test_head(self):
response = self.request(
self.tempdir_name + '/test', method='HEAD')
self.check_status_and_reason(response, 200)
self.assertEqual(response.getheader('content-length'),
str(len(self.data)))
self.assertEqual(response.getheader('content-type'),
'application/octet-stream')
def test_invalid_requests(self):
response = self.request('/', method='FOO')
self.check_status_and_reason(response, 501)
# requests must be case sensitive,so this should fail too
response = self.request('/', method='get')
self.check_status_and_reason(response, 501)
response = self.request('/', method='GETs')
self.check_status_and_reason(response, 501)
cgi_file1 = """\
#!%s
print "Content-type: text/html"
print
print "Hello World"
"""
cgi_file2 = """\
#!%s
import cgi
print "Content-type: text/html"
print
form = cgi.FieldStorage()
print "%%s, %%s, %%s" %% (form.getfirst("spam"), form.getfirst("eggs"),\
form.getfirst("bacon"))
"""
class CGIHTTPServerTestCase(BaseTestCase):
class request_handler(NoLogRequestHandler, CGIHTTPRequestHandler):
pass
def setUp(self):
BaseTestCase.setUp(self)
self.parent_dir = tempfile.mkdtemp()
self.cgi_dir = os.path.join(self.parent_dir, 'cgi-bin')
os.mkdir(self.cgi_dir)
# The shebang line should be pure ASCII: use symlink if possible.
# See issue #7668.
if hasattr(os, 'symlink'):
self.pythonexe = os.path.join(self.parent_dir, 'python')
os.symlink(sys.executable, self.pythonexe)
else:
self.pythonexe = sys.executable
self.file1_path = os.path.join(self.cgi_dir, 'file1.py')
with open(self.file1_path, 'w') as file1:
file1.write(cgi_file1 % self.pythonexe)
os.chmod(self.file1_path, 0777)
self.file2_path = os.path.join(self.cgi_dir, 'file2.py')
with open(self.file2_path, 'w') as file2:
file2.write(cgi_file2 % self.pythonexe)
os.chmod(self.file2_path, 0777)
self.cwd = os.getcwd()
os.chdir(self.parent_dir)
def tearDown(self):
try:
os.chdir(self.cwd)
if self.pythonexe != sys.executable:
os.remove(self.pythonexe)
os.remove(self.file1_path)
os.remove(self.file2_path)
os.rmdir(self.cgi_dir)
os.rmdir(self.parent_dir)
finally:
BaseTestCase.tearDown(self)
def test_headers_and_content(self):
res = self.request('/cgi-bin/file1.py')
self.assertEquals(('Hello World\n', 'text/html', 200), \
(res.read(), res.getheader('Content-type'), res.status))
def test_post(self):
params = urllib.urlencode({'spam' : 1, 'eggs' : 'python', 'bacon' : 123456})
headers = {'Content-type' : 'application/x-www-form-urlencoded'}
res = self.request('/cgi-bin/file2.py', 'POST', params, headers)
self.assertEquals(res.read(), '1, python, 123456\n')
def test_invaliduri(self):
res = self.request('/cgi-bin/invalid')
res.read()
self.assertEquals(res.status, 404)
def test_authorization(self):
headers = {'Authorization' : 'Basic %s' % \
base64.b64encode('username:pass')}
res = self.request('/cgi-bin/file1.py', 'GET', headers=headers)
self.assertEquals(('Hello World\n', 'text/html', 200), \
(res.read(), res.getheader('Content-type'), res.status))
def test_main(verbose=None):
cwd = os.getcwd()
env = os.environ.copy()
try:
test_support.run_unittest(BaseHTTPServerTestCase,
SimpleHTTPServerTestCase,
CGIHTTPServerTestCase
)
finally:
test_support.reap_children()
os.environ.clear()
os.environ.update(env)
os.chdir(cwd)
if __name__ == '__main__':
test_main()
# Some simple queue module tests, plus some failure conditions
# to ensure the Queue locks remain stable.
import Queue
import sys
import threading
import time
import unittest
from test import test_support
QUEUE_SIZE = 5
# A thread to run a function that unclogs a blocked Queue.
class _TriggerThread(threading.Thread):
def __init__(self, fn, args):
self.fn = fn
self.args = args
self.startedEvent = threading.Event()
threading.Thread.__init__(self)
def run(self):
# The sleep isn't necessary, but is intended to give the blocking
# function in the main thread a chance at actually blocking before
# we unclog it. But if the sleep is longer than the timeout-based
# tests wait in their blocking functions, those tests will fail.
# So we give them much longer timeout values compared to the
# sleep here (I aimed at 10 seconds for blocking functions --
# they should never actually wait that long - they should make
# progress as soon as we call self.fn()).
time.sleep(0.1)
self.startedEvent.set()
self.fn(*self.args)
# Execute a function that blocks, and in a separate thread, a function that
# triggers the release. Returns the result of the blocking function. Caution:
# block_func must guarantee to block until trigger_func is called, and
# trigger_func must guarantee to change queue state so that block_func can make
# enough progress to return. In particular, a block_func that just raises an
# exception regardless of whether trigger_func is called will lead to
# timing-dependent sporadic failures, and one of those went rarely seen but
# undiagnosed for years. Now block_func must be unexceptional. If block_func
# is supposed to raise an exception, call do_exceptional_blocking_test()
# instead.
class BlockingTestMixin:
def do_blocking_test(self, block_func, block_args, trigger_func, trigger_args):
self.t = _TriggerThread(trigger_func, trigger_args)
self.t.start()
self.result = block_func(*block_args)
# If block_func returned before our thread made the call, we failed!
if not self.t.startedEvent.is_set():
self.fail("blocking function '%r' appeared not to block" %
block_func)
self.t.join(10) # make sure the thread terminates
if self.t.is_alive():
self.fail("trigger function '%r' appeared to not return" %
trigger_func)
return self.result
# Call this instead if block_func is supposed to raise an exception.
def do_exceptional_blocking_test(self,block_func, block_args, trigger_func,
trigger_args, expected_exception_class):
self.t = _TriggerThread(trigger_func, trigger_args)
self.t.start()
try:
try:
block_func(*block_args)
except expected_exception_class:
raise
else:
self.fail("expected exception of kind %r" %
expected_exception_class)
finally:
self.t.join(10) # make sure the thread terminates
if self.t.is_alive():
self.fail("trigger function '%r' appeared to not return" %
trigger_func)
if not self.t.startedEvent.is_set():
self.fail("trigger thread ended but event never set")
class BaseQueueTest(unittest.TestCase, BlockingTestMixin):
def setUp(self):
self.cum = 0
self.cumlock = threading.Lock()
def simple_queue_test(self, q):
if not q.empty():
raise RuntimeError, "Call this function with an empty queue"
# I guess we better check things actually queue correctly a little :)
q.put(111)
q.put(333)
q.put(222)
target_order = dict(Queue = [111, 333, 222],
LifoQueue = [222, 333, 111],
PriorityQueue = [111, 222, 333])
actual_order = [q.get(), q.get(), q.get()]
self.assertEquals(actual_order, target_order[q.__class__.__name__],
"Didn't seem to queue the correct data!")
for i in range(QUEUE_SIZE-1):
q.put(i)
self.assert_(not q.empty(), "Queue should not be empty")
self.assert_(not q.full(), "Queue should not be full")
last = 2 * QUEUE_SIZE
full = 3 * 2 * QUEUE_SIZE
q.put(last)
self.assert_(q.full(), "Queue should be full")
try:
q.put(full, block=0)
self.fail("Didn't appear to block with a full queue")
except Queue.Full:
pass
try:
q.put(full, timeout=0.01)
self.fail("Didn't appear to time-out with a full queue")
except Queue.Full:
pass
# Test a blocking put
self.do_blocking_test(q.put, (full,), q.get, ())
self.do_blocking_test(q.put, (full, True, 10), q.get, ())
# Empty it
for i in range(QUEUE_SIZE):
q.get()
self.assert_(q.empty(), "Queue should be empty")
try:
q.get(block=0)
self.fail("Didn't appear to block with an empty queue")
except Queue.Empty:
pass
try:
q.get(timeout=0.01)
self.fail("Didn't appear to time-out with an empty queue")
except Queue.Empty:
pass
# Test a blocking get
self.do_blocking_test(q.get, (), q.put, ('empty',))
self.do_blocking_test(q.get, (True, 10), q.put, ('empty',))
def worker(self, q):
while True:
x = q.get()
if x is None:
q.task_done()
return
with self.cumlock:
self.cum += x
q.task_done()
def queue_join_test(self, q):
self.cum = 0
for i in (0,1):
threading.Thread(target=self.worker, args=(q,)).start()
for i in xrange(100):
q.put(i)
q.join()
self.assertEquals(self.cum, sum(range(100)),
"q.join() did not block until all tasks were done")
for i in (0,1):
q.put(None) # instruct the threads to close
q.join() # verify that you can join twice
def test_queue_task_done(self):
# Test to make sure a queue task completed successfully.
q = self.type2test()
try:
q.task_done()
except ValueError:
pass
else:
self.fail("Did not detect task count going negative")
def test_queue_join(self):
# Test that a queue join()s successfully, and before anything else
# (done twice for insurance).
q = self.type2test()
self.queue_join_test(q)
self.queue_join_test(q)
try:
q.task_done()
except ValueError:
pass
else:
self.fail("Did not detect task count going negative")
def test_simple_queue(self):
# Do it a couple of times on the same queue.
# Done twice to make sure works with same instance reused.
q = self.type2test(QUEUE_SIZE)
self.simple_queue_test(q)
self.simple_queue_test(q)
class QueueTest(BaseQueueTest):
type2test = Queue.Queue
class LifoQueueTest(BaseQueueTest):
type2test = Queue.LifoQueue
class PriorityQueueTest(BaseQueueTest):
type2test = Queue.PriorityQueue
# A Queue subclass that can provoke failure at a moment's notice :)
class FailingQueueException(Exception):
pass
class FailingQueue(Queue.Queue):
def __init__(self, *args):
self.fail_next_put = False
self.fail_next_get = False
Queue.Queue.__init__(self, *args)
def _put(self, item):
if self.fail_next_put:
self.fail_next_put = False
raise FailingQueueException, "You Lose"
return Queue.Queue._put(self, item)
def _get(self):
if self.fail_next_get:
self.fail_next_get = False
raise FailingQueueException, "You Lose"
return Queue.Queue._get(self)
class FailingQueueTest(unittest.TestCase, BlockingTestMixin):
def failing_queue_test(self, q):
if not q.empty():
raise RuntimeError, "Call this function with an empty queue"
for i in range(QUEUE_SIZE-1):
q.put(i)
# Test a failing non-blocking put.
q.fail_next_put = True
try:
q.put("oops", block=0)
self.fail("The queue didn't fail when it should have")
except FailingQueueException:
pass
q.fail_next_put = True
try:
q.put("oops", timeout=0.1)
self.fail("The queue didn't fail when it should have")
except FailingQueueException:
pass
q.put("last")
self.assert_(q.full(), "Queue should be full")
# Test a failing blocking put
q.fail_next_put = True
try:
self.do_blocking_test(q.put, ("full",), q.get, ())
self.fail("The queue didn't fail when it should have")
except FailingQueueException:
pass
# Check the Queue isn't damaged.
# put failed, but get succeeded - re-add
q.put("last")
# Test a failing timeout put
q.fail_next_put = True
try:
self.do_exceptional_blocking_test(q.put, ("full", True, 10), q.get, (),
FailingQueueException)
self.fail("The queue didn't fail when it should have")
except FailingQueueException:
pass
# Check the Queue isn't damaged.
# put failed, but get succeeded - re-add
q.put("last")
self.assert_(q.full(), "Queue should be full")
q.get()
self.assert_(not q.full(), "Queue should not be full")
q.put("last")
self.assert_(q.full(), "Queue should be full")
# Test a blocking put
self.do_blocking_test(q.put, ("full",), q.get, ())
# Empty it
for i in range(QUEUE_SIZE):
q.get()
self.assert_(q.empty(), "Queue should be empty")
q.put("first")
q.fail_next_get = True
try:
q.get()
self.fail("The queue didn't fail when it should have")
except FailingQueueException:
pass
self.assert_(not q.empty(), "Queue should not be empty")
q.fail_next_get = True
try:
q.get(timeout=0.1)
self.fail("The queue didn't fail when it should have")
except FailingQueueException:
pass
self.assert_(not q.empty(), "Queue should not be empty")
q.get()
self.assert_(q.empty(), "Queue should be empty")
q.fail_next_get = True
try:
self.do_exceptional_blocking_test(q.get, (), q.put, ('empty',),
FailingQueueException)
self.fail("The queue didn't fail when it should have")
except FailingQueueException:
pass
# put succeeded, but get failed.
self.assert_(not q.empty(), "Queue should not be empty")
q.get()
self.assert_(q.empty(), "Queue should be empty")
def test_failing_queue(self):
# Test to make sure a queue is functioning correctly.
# Done twice to the same instance.
q = FailingQueue(QUEUE_SIZE)
self.failing_queue_test(q)
self.failing_queue_test(q)
def test_main():
test_support.run_unittest(QueueTest, LifoQueueTest, PriorityQueueTest,
FailingQueueTest)
if __name__ == "__main__":
test_main()
from test import test_support
import unittest
import select
import os
import sys
class SelectTestCase(unittest.TestCase):
class Nope:
pass
class Almost:
def fileno(self):
return 'fileno'
def test_error_conditions(self):
self.assertRaises(TypeError, select.select, 1, 2, 3)
self.assertRaises(TypeError, select.select, [self.Nope()], [], [])
self.assertRaises(TypeError, select.select, [self.Almost()], [], [])
self.assertRaises(TypeError, select.select, [], [], [], "not a number")
def test_returned_list_identity(self):
if sys.platform[:3] in ('win', 'mac', 'os2'):
if test_support.verbose:
print "can't easily test on this system"
return
# See issue #8329
r, w, x = select.select([], [], [], 1)
self.assertFalse(r is w)
self.assertFalse(r is x)
self.assertFalse(w is x)
def test_select(self):
if sys.platform[:3] in ('win', 'mac', 'os2'):
if test_support.verbose:
print "can't easily test on this system"
return
cmd = 'for i in 0 1 2 3 4 5 6 7 8 9; do echo testing...; sleep 0.1; done'
p = os.popen(cmd, 'r')
for tout in (0, 0.1, 0.2, 0.4, 0.8, 1.6) + (None,)*10:
if test_support.verbose:
print 'timeout =', tout
rfd, wfd, xfd = select.select([p], [], [], tout)
if (rfd, wfd, xfd) == ([], [], []):
continue
if (rfd, wfd, xfd) == ([p], [], []):
line = p.readline()
if test_support.verbose:
print repr(line)
if not line:
if test_support.verbose:
print 'EOF'
break
continue
self.fail('Unexpected return values from select():', rfd, wfd, xfd)
p.close()
def test_main():
test_support.run_unittest(SelectTestCase)
test_support.reap_children()
if __name__ == "__main__":
test_main()
import unittest
from test import test_support
from contextlib import closing, nested
import gc
import pickle
import select
import signal
import subprocess
import traceback
import sys, os, time, errno
if sys.platform[:3] in ('win', 'os2') or sys.platform == 'riscos':
raise test_support.TestSkipped("Can't test signal on %s" % \
sys.platform)
class HandlerBCalled(Exception):
pass
def exit_subprocess():
"""Use os._exit(0) to exit the current subprocess.
Otherwise, the test catches the SystemExit and continues executing
in parallel with the original test, so you wind up with an
exponential number of tests running concurrently.
"""
os._exit(0)
def ignoring_eintr(__func, *args, **kwargs):
try:
return __func(*args, **kwargs)
except EnvironmentError as e:
if e.errno != errno.EINTR:
raise
return None
class InterProcessSignalTests(unittest.TestCase):
MAX_DURATION = 20 # Entire test should last at most 20 sec.
def setUp(self):
self.using_gc = gc.isenabled()
gc.disable()
def tearDown(self):
if self.using_gc:
gc.enable()
def format_frame(self, frame, limit=None):
return ''.join(traceback.format_stack(frame, limit=limit))
def handlerA(self, signum, frame):
self.a_called = True
if test_support.verbose:
print "handlerA invoked from signal %s at:\n%s" % (
signum, self.format_frame(frame, limit=1))
def handlerB(self, signum, frame):
self.b_called = True
if test_support.verbose:
print "handlerB invoked from signal %s at:\n%s" % (
signum, self.format_frame(frame, limit=1))
raise HandlerBCalled(signum, self.format_frame(frame))
def wait(self, child):
"""Wait for child to finish, ignoring EINTR."""
while True:
try:
child.wait()
return
except OSError as e:
if e.errno != errno.EINTR:
raise
def run_test(self):
# Install handlers. This function runs in a sub-process, so we
# don't worry about re-setting the default handlers.
signal.signal(signal.SIGHUP, self.handlerA)
signal.signal(signal.SIGUSR1, self.handlerB)
signal.signal(signal.SIGUSR2, signal.SIG_IGN)
signal.signal(signal.SIGALRM, signal.default_int_handler)
# Variables the signals will modify:
self.a_called = False
self.b_called = False
# Let the sub-processes know who to send signals to.
pid = os.getpid()
if test_support.verbose:
print "test runner's pid is", pid
child = ignoring_eintr(subprocess.Popen, ['kill', '-HUP', str(pid)])
if child:
self.wait(child)
if not self.a_called:
time.sleep(1) # Give the signal time to be delivered.
self.assertTrue(self.a_called)
self.assertFalse(self.b_called)
self.a_called = False
# Make sure the signal isn't delivered while the previous
# Popen object is being destroyed, because __del__ swallows
# exceptions.
del child
try:
child = subprocess.Popen(['kill', '-USR1', str(pid)])
# This wait should be interrupted by the signal's exception.
self.wait(child)
time.sleep(1) # Give the signal time to be delivered.
self.fail('HandlerBCalled exception not thrown')
except HandlerBCalled:
self.assertTrue(self.b_called)
self.assertFalse(self.a_called)
if test_support.verbose:
print "HandlerBCalled exception caught"
child = ignoring_eintr(subprocess.Popen, ['kill', '-USR2', str(pid)])
if child:
self.wait(child) # Nothing should happen.
try:
signal.alarm(1)
# The race condition in pause doesn't matter in this case,
# since alarm is going to raise a KeyboardException, which
# will skip the call.
signal.pause()
# But if another signal arrives before the alarm, pause
# may return early.
time.sleep(1)
except KeyboardInterrupt:
if test_support.verbose:
print "KeyboardInterrupt (the alarm() went off)"
except:
self.fail("Some other exception woke us from pause: %s" %
traceback.format_exc())
else:
self.fail("pause returned of its own accord, and the signal"
" didn't arrive after another second.")
def test_main(self):
# Issue 3864, unknown if this affects earlier versions of freebsd also
if sys.platform=='freebsd6':
if test_support.verbose:
sys.stderr.write('skipping -- inter process signals not '
'reliable (do not mix well with threading) on freebsd6\n')
return
# This function spawns a child process to insulate the main
# test-running process from all the signals. It then
# communicates with that child process over a pipe and
# re-raises information about any exceptions the child
# throws. The real work happens in self.run_test().
os_done_r, os_done_w = os.pipe()
with nested(closing(os.fdopen(os_done_r)),
closing(os.fdopen(os_done_w, 'w'))) as (done_r, done_w):
child = os.fork()
if child == 0:
# In the child process; run the test and report results
# through the pipe.
try:
done_r.close()
# Have to close done_w again here because
# exit_subprocess() will skip the enclosing with block.
with closing(done_w):
try:
self.run_test()
except:
pickle.dump(traceback.format_exc(), done_w)
else:
pickle.dump(None, done_w)
except:
print 'Uh oh, raised from pickle.'
traceback.print_exc()
finally:
exit_subprocess()
done_w.close()
# Block for up to MAX_DURATION seconds for the test to finish.
r, w, x = select.select([done_r], [], [], self.MAX_DURATION)
if done_r in r:
tb = pickle.load(done_r)
if tb:
self.fail(tb)
else:
os.kill(child, signal.SIGKILL)
self.fail('Test deadlocked after %d seconds.' %
self.MAX_DURATION)
class BasicSignalTests(unittest.TestCase):
def trivial_signal_handler(self, *args):
pass
def test_out_of_range_signal_number_raises_error(self):
self.assertRaises(ValueError, signal.getsignal, 4242)
self.assertRaises(ValueError, signal.signal, 4242,
self.trivial_signal_handler)
def test_setting_signal_handler_to_none_raises_error(self):
self.assertRaises(TypeError, signal.signal,
signal.SIGUSR1, None)
def test_getsignal(self):
hup = signal.signal(signal.SIGHUP, self.trivial_signal_handler)
self.assertEquals(signal.getsignal(signal.SIGHUP),
self.trivial_signal_handler)
signal.signal(signal.SIGHUP, hup)
self.assertEquals(signal.getsignal(signal.SIGHUP), hup)
class WakeupSignalTests(unittest.TestCase):
TIMEOUT_FULL = 10
TIMEOUT_HALF = 5
def test_wakeup_fd_early(self):
import select
signal.alarm(1)
before_time = time.time()
# We attempt to get a signal during the sleep,
# before select is called
time.sleep(self.TIMEOUT_FULL)
mid_time = time.time()
self.assert_(mid_time - before_time < self.TIMEOUT_HALF)
select.select([self.read], [], [], self.TIMEOUT_FULL)
after_time = time.time()
self.assert_(after_time - mid_time < self.TIMEOUT_HALF)
def test_wakeup_fd_during(self):
import select
signal.alarm(1)
before_time = time.time()
# We attempt to get a signal during the select call
self.assertRaises(select.error, select.select,
[self.read], [], [], self.TIMEOUT_FULL)
after_time = time.time()
self.assert_(after_time - before_time < self.TIMEOUT_HALF)
def setUp(self):
import fcntl
self.alrm = signal.signal(signal.SIGALRM, lambda x,y:None)
self.read, self.write = os.pipe()
flags = fcntl.fcntl(self.write, fcntl.F_GETFL, 0)
flags = flags | os.O_NONBLOCK
fcntl.fcntl(self.write, fcntl.F_SETFL, flags)
self.old_wakeup = signal.set_wakeup_fd(self.write)
def tearDown(self):
signal.set_wakeup_fd(self.old_wakeup)
os.close(self.read)
os.close(self.write)
signal.signal(signal.SIGALRM, self.alrm)
class SiginterruptTest(unittest.TestCase):
signum = signal.SIGUSR1
def setUp(self):
"""Install a no-op signal handler that can be set to allow
interrupts or not, and arrange for the original signal handler to be
re-installed when the test is finished.
"""
self._cleanups = []
oldhandler = signal.signal(self.signum, lambda x,y: None)
self.addCleanup(signal.signal, self.signum, oldhandler)
def tearDown(self):
"""Run any cleanup functions which have been registered.
"""
for (f, a) in self._cleanups:
f(*a)
def addCleanup(self, f, *a):
"""Register a function to be called at the end of the test method
run.
"""
self._cleanups.append((f, a))
def readpipe_interrupted(self):
"""Perform a read during which a signal will arrive. Return True if the
read is interrupted by the signal and raises an exception. Return False
if it returns normally.
"""
# Create a pipe that can be used for the read. Also clean it up
# when the test is over, since nothing else will (but see below for
# the write end).
r, w = os.pipe()
self.addCleanup(os.close, r)
# Create another process which can send a signal to this one to try
# to interrupt the read.
ppid = os.getpid()
pid = os.fork()
if pid == 0:
# Child code: sleep to give the parent enough time to enter the
# read() call (there's a race here, but it's really tricky to
# eliminate it); then signal the parent process. Also, sleep
# again to make it likely that the signal is delivered to the
# parent process before the child exits. If the child exits
# first, the write end of the pipe will be closed and the test
# is invalid.
try:
time.sleep(0.2)
os.kill(ppid, self.signum)
time.sleep(0.2)
finally:
# No matter what, just exit as fast as possible now.
exit_subprocess()
else:
# Parent code.
# Make sure the child is eventually reaped, else it'll be a
# zombie for the rest of the test suite run.
self.addCleanup(os.waitpid, pid, 0)
# Close the write end of the pipe. The child has a copy, so
# it's not really closed until the child exits. We need it to
# close when the child exits so that in the non-interrupt case
# the read eventually completes, otherwise we could just close
# it *after* the test.
os.close(w)
# Try the read and report whether it is interrupted or not to
# the caller.
try:
d = os.read(r, 1)
return False
except OSError, err:
if err.errno != errno.EINTR:
raise
return True
def test_without_siginterrupt(self):
"""If a signal handler is installed and siginterrupt is not called
at all, when that signal arrives, it interrupts a syscall that's in
progress.
"""
i = self.readpipe_interrupted()
self.assertTrue(i)
# Arrival of the signal shouldn't have changed anything.
i = self.readpipe_interrupted()
self.assertTrue(i)
def test_siginterrupt_on(self):
"""If a signal handler is installed and siginterrupt is called with
a true value for the second argument, when that signal arrives, it
interrupts a syscall that's in progress.
"""
signal.siginterrupt(self.signum, 1)
i = self.readpipe_interrupted()
self.assertTrue(i)
# Arrival of the signal shouldn't have changed anything.
i = self.readpipe_interrupted()
self.assertTrue(i)
def test_siginterrupt_off(self):
"""If a signal handler is installed and siginterrupt is called with
a false value for the second argument, when that signal arrives, it
does not interrupt a syscall that's in progress.
"""
signal.siginterrupt(self.signum, 0)
i = self.readpipe_interrupted()
self.assertFalse(i)
# Arrival of the signal shouldn't have changed anything.
i = self.readpipe_interrupted()
self.assertFalse(i)
class ItimerTest(unittest.TestCase):
def setUp(self):
self.hndl_called = False
self.hndl_count = 0
self.itimer = None
self.old_alarm = signal.signal(signal.SIGALRM, self.sig_alrm)
def tearDown(self):
signal.signal(signal.SIGALRM, self.old_alarm)
if self.itimer is not None: # test_itimer_exc doesn't change this attr
# just ensure that itimer is stopped
signal.setitimer(self.itimer, 0)
def sig_alrm(self, *args):
self.hndl_called = True
if test_support.verbose:
print("SIGALRM handler invoked", args)
def sig_vtalrm(self, *args):
self.hndl_called = True
if self.hndl_count > 3:
# it shouldn't be here, because it should have been disabled.
raise signal.ItimerError("setitimer didn't disable ITIMER_VIRTUAL "
"timer.")
elif self.hndl_count == 3:
# disable ITIMER_VIRTUAL, this function shouldn't be called anymore
signal.setitimer(signal.ITIMER_VIRTUAL, 0)
if test_support.verbose:
print("last SIGVTALRM handler call")
self.hndl_count += 1
if test_support.verbose:
print("SIGVTALRM handler invoked", args)
def sig_prof(self, *args):
self.hndl_called = True
signal.setitimer(signal.ITIMER_PROF, 0)
if test_support.verbose:
print("SIGPROF handler invoked", args)
def test_itimer_exc(self):
# XXX I'm assuming -1 is an invalid itimer, but maybe some platform
# defines it ?
self.assertRaises(signal.ItimerError, signal.setitimer, -1, 0)
# Negative times are treated as zero on some platforms.
if 0:
self.assertRaises(signal.ItimerError,
signal.setitimer, signal.ITIMER_REAL, -1)
def test_itimer_real(self):
self.itimer = signal.ITIMER_REAL
signal.setitimer(self.itimer, 1.0)
if test_support.verbose:
print("\ncall pause()...")
signal.pause()
self.assertEqual(self.hndl_called, True)
def test_itimer_virtual(self):
# Issue 3864, unknown if this affects earlier versions of freebsd also
if sys.platform=='freebsd6':
if test_support.verbose:
sys.stderr.write('skipping -- itimer not reliable (does not '
'mix well with threading) on freebsd6\n')
return
self.itimer = signal.ITIMER_VIRTUAL
signal.signal(signal.SIGVTALRM, self.sig_vtalrm)
signal.setitimer(self.itimer, 0.3, 0.2)
start_time = time.time()
while time.time() - start_time < 60.0:
# use up some virtual time by doing real work
_ = pow(12345, 67890, 10000019)
if signal.getitimer(self.itimer) == (0.0, 0.0):
break # sig_vtalrm handler stopped this itimer
else: # Issue 8424
sys.stderr.write("test_itimer_virtual: timeout: likely cause: "
"machine too slow or load too high.\n")
return
# virtual itimer should be (0.0, 0.0) now
self.assertEquals(signal.getitimer(self.itimer), (0.0, 0.0))
# and the handler should have been called
self.assertEquals(self.hndl_called, True)
def test_itimer_prof(self):
# Issue 3864, unknown if this affects earlier versions of freebsd also
if sys.platform=='freebsd6':
if test_support.verbose:
sys.stderr.write('skipping -- itimer not reliable (does not '
'mix well with threading) on freebsd6\n')
return
self.itimer = signal.ITIMER_PROF
signal.signal(signal.SIGPROF, self.sig_prof)
signal.setitimer(self.itimer, 0.2, 0.2)
start_time = time.time()
while time.time() - start_time < 60.0:
# do some work
_ = pow(12345, 67890, 10000019)
if signal.getitimer(self.itimer) == (0.0, 0.0):
break # sig_prof handler stopped this itimer
else: # Issue 8424
sys.stdout.write("test_itimer_prof: timeout: likely cause: "
"machine too slow or load too high.\n")
return
# profiling itimer should be (0.0, 0.0) now
self.assertEquals(signal.getitimer(self.itimer), (0.0, 0.0))
# and the handler should have been called
self.assertEqual(self.hndl_called, True)
def test_main():
test_support.run_unittest(BasicSignalTests, InterProcessSignalTests,
WakeupSignalTests, SiginterruptTest, ItimerTest)
if __name__ == "__main__":
test_main()
import asyncore
import email.utils
import socket
import threading
import smtpd
import smtplib
import StringIO
import sys
import time
import select
from unittest import TestCase
from test import test_support
HOST = test_support.HOST
def server(evt, buf, serv):
serv.listen(5)
evt.set()
try:
conn, addr = serv.accept()
except socket.timeout:
pass
else:
n = 500
while buf and n > 0:
r, w, e = select.select([], [conn], [])
if w:
sent = conn.send(buf)
buf = buf[sent:]
n -= 1
conn.close()
finally:
serv.close()
evt.set()
class GeneralTests(TestCase):
def setUp(self):
self.evt = threading.Event()
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.sock.settimeout(15)
self.port = test_support.bind_port(self.sock)
servargs = (self.evt, "220 Hola mundo\n", self.sock)
threading.Thread(target=server, args=servargs).start()
self.evt.wait()
self.evt.clear()
def tearDown(self):
self.evt.wait()
def testBasic1(self):
# connects
smtp = smtplib.SMTP(HOST, self.port)
smtp.close()
def testBasic2(self):
# connects, include port in host name
smtp = smtplib.SMTP("%s:%s" % (HOST, self.port))
smtp.close()
def testLocalHostName(self):
# check that supplied local_hostname is used
smtp = smtplib.SMTP(HOST, self.port, local_hostname="testhost")
self.assertEqual(smtp.local_hostname, "testhost")
smtp.close()
def testTimeoutDefault(self):
self.assertTrue(socket.getdefaulttimeout() is None)
socket.setdefaulttimeout(30)
try:
smtp = smtplib.SMTP(HOST, self.port)
finally:
socket.setdefaulttimeout(None)
self.assertEqual(smtp.sock.gettimeout(), 30)
smtp.close()
def testTimeoutNone(self):
self.assertTrue(socket.getdefaulttimeout() is None)
socket.setdefaulttimeout(30)
try:
smtp = smtplib.SMTP(HOST, self.port, timeout=None)
finally:
socket.setdefaulttimeout(None)
self.assertTrue(smtp.sock.gettimeout() is None)
smtp.close()
def testTimeoutValue(self):
smtp = smtplib.SMTP(HOST, self.port, timeout=30)
self.assertEqual(smtp.sock.gettimeout(), 30)
smtp.close()
# Test server thread using the specified SMTP server class
def debugging_server(serv, serv_evt, client_evt):
serv_evt.set()
try:
if hasattr(select, 'poll'):
poll_fun = asyncore.poll2
else:
poll_fun = asyncore.poll
n = 1000
while asyncore.socket_map and n > 0:
poll_fun(0.01, asyncore.socket_map)
# when the client conversation is finished, it will
# set client_evt, and it's then ok to kill the server
if client_evt.is_set():
serv.close()
break
n -= 1
except socket.timeout:
pass
finally:
if not client_evt.is_set():
# allow some time for the client to read the result
time.sleep(0.5)
serv.close()
asyncore.close_all()
serv_evt.set()
MSG_BEGIN = '---------- MESSAGE FOLLOWS ----------\n'
MSG_END = '------------ END MESSAGE ------------\n'
# NOTE: Some SMTP objects in the tests below are created with a non-default
# local_hostname argument to the constructor, since (on some systems) the FQDN
# lookup caused by the default local_hostname sometimes takes so long that the
# test server times out, causing the test to fail.
# Test behavior of smtpd.DebuggingServer
class DebuggingServerTests(TestCase):
def setUp(self):
# temporarily replace sys.stdout to capture DebuggingServer output
self.old_stdout = sys.stdout
self.output = StringIO.StringIO()
sys.stdout = self.output
self.serv_evt = threading.Event()
self.client_evt = threading.Event()
# Pick a random unused port by passing 0 for the port number
self.serv = smtpd.DebuggingServer((HOST, 0), ('nowhere', -1))
# Keep a note of what port was assigned
self.port = self.serv.socket.getsockname()[1]
serv_args = (self.serv, self.serv_evt, self.client_evt)
threading.Thread(target=debugging_server, args=serv_args).start()
# wait until server thread has assigned a port number
self.serv_evt.wait()
self.serv_evt.clear()
def tearDown(self):
# indicate that the client is finished
self.client_evt.set()
# wait for the server thread to terminate
self.serv_evt.wait()
# restore sys.stdout
sys.stdout = self.old_stdout
def testBasic(self):
# connect
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=3)
smtp.quit()
def testNOOP(self):
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=3)
expected = (250, 'Ok')
self.assertEqual(smtp.noop(), expected)
smtp.quit()
def testRSET(self):
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=3)
expected = (250, 'Ok')
self.assertEqual(smtp.rset(), expected)
smtp.quit()
def testNotImplemented(self):
# EHLO isn't implemented in DebuggingServer
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=3)
expected = (502, 'Error: command "EHLO" not implemented')
self.assertEqual(smtp.ehlo(), expected)
smtp.quit()
def testVRFY(self):
# VRFY isn't implemented in DebuggingServer
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=3)
expected = (502, 'Error: command "VRFY" not implemented')
self.assertEqual(smtp.vrfy('nobody@nowhere.com'), expected)
self.assertEqual(smtp.verify('nobody@nowhere.com'), expected)
smtp.quit()
def testSecondHELO(self):
# check that a second HELO returns a message that it's a duplicate
# (this behavior is specific to smtpd.SMTPChannel)
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=3)
smtp.helo()
expected = (503, 'Duplicate HELO/EHLO')
self.assertEqual(smtp.helo(), expected)
smtp.quit()
def testHELP(self):
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=3)
self.assertEqual(smtp.help(), 'Error: command "HELP" not implemented')
smtp.quit()
def testSend(self):
# connect and send mail
m = 'A test message'
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=3)
smtp.sendmail('John', 'Sally', m)
# XXX(nnorwitz): this test is flaky and dies with a bad file descriptor
# in asyncore. This sleep might help, but should really be fixed
# properly by using an Event variable.
time.sleep(0.01)
smtp.quit()
self.client_evt.set()
self.serv_evt.wait()
self.output.flush()
mexpect = '%s%s\n%s' % (MSG_BEGIN, m, MSG_END)
self.assertEqual(self.output.getvalue(), mexpect)
class NonConnectingTests(TestCase):
def testNotConnected(self):
# Test various operations on an unconnected SMTP object that
# should raise exceptions (at present the attempt in SMTP.send
# to reference the nonexistent 'sock' attribute of the SMTP object
# causes an AttributeError)
smtp = smtplib.SMTP()
self.assertRaises(smtplib.SMTPServerDisconnected, smtp.ehlo)
self.assertRaises(smtplib.SMTPServerDisconnected,
smtp.send, 'test msg')
def testNonnumericPort(self):
# check that non-numeric port raises socket.error
self.assertRaises(socket.error, smtplib.SMTP,
"localhost", "bogus")
self.assertRaises(socket.error, smtplib.SMTP,
"localhost:bogus")
# test response of client to a non-successful HELO message
class BadHELOServerTests(TestCase):
def setUp(self):
self.old_stdout = sys.stdout
self.output = StringIO.StringIO()
sys.stdout = self.output
self.evt = threading.Event()
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.sock.settimeout(15)
self.port = test_support.bind_port(self.sock)
servargs = (self.evt, "199 no hello for you!\n", self.sock)
threading.Thread(target=server, args=servargs).start()
self.evt.wait()
self.evt.clear()
def tearDown(self):
self.evt.wait()
sys.stdout = self.old_stdout
def testFailingHELO(self):
self.assertRaises(smtplib.SMTPConnectError, smtplib.SMTP,
HOST, self.port, 'localhost', 3)
sim_users = {'Mr.A@somewhere.com':'John A',
'Ms.B@somewhere.com':'Sally B',
'Mrs.C@somewhereesle.com':'Ruth C',
}
sim_auth = ('Mr.A@somewhere.com', 'somepassword')
sim_cram_md5_challenge = ('PENCeUxFREJoU0NnbmhNWitOMjNGNn'
'dAZWx3b29kLmlubm9zb2Z0LmNvbT4=')
sim_auth_credentials = {
'login': 'TXIuQUBzb21ld2hlcmUuY29t',
'plain': 'AE1yLkFAc29tZXdoZXJlLmNvbQBzb21lcGFzc3dvcmQ=',
'cram-md5': ('TXIUQUBZB21LD2HLCMUUY29TIDG4OWQ0MJ'
'KWZGQ4ODNMNDA4NTGXMDRLZWMYZJDMODG1'),
}
sim_auth_login_password = 'C29TZXBHC3N3B3JK'
sim_lists = {'list-1':['Mr.A@somewhere.com','Mrs.C@somewhereesle.com'],
'list-2':['Ms.B@somewhere.com',],
}
# Simulated SMTP channel & server
class SimSMTPChannel(smtpd.SMTPChannel):
def __init__(self, extra_features, *args, **kw):
self._extrafeatures = ''.join(
[ "250-{0}\r\n".format(x) for x in extra_features ])
smtpd.SMTPChannel.__init__(self, *args, **kw)
def smtp_EHLO(self, arg):
resp = ('250-testhost\r\n'
'250-EXPN\r\n'
'250-SIZE 20000000\r\n'
'250-STARTTLS\r\n'
'250-DELIVERBY\r\n')
resp = resp + self._extrafeatures + '250 HELP'
self.push(resp)
def smtp_VRFY(self, arg):
raw_addr = email.utils.parseaddr(arg)[1]
quoted_addr = smtplib.quoteaddr(arg)
if raw_addr in sim_users:
self.push('250 %s %s' % (sim_users[raw_addr], quoted_addr))
else:
self.push('550 No such user: %s' % arg)
def smtp_EXPN(self, arg):
list_name = email.utils.parseaddr(arg)[1].lower()
if list_name in sim_lists:
user_list = sim_lists[list_name]
for n, user_email in enumerate(user_list):
quoted_addr = smtplib.quoteaddr(user_email)
if n < len(user_list) - 1:
self.push('250-%s %s' % (sim_users[user_email], quoted_addr))
else:
self.push('250 %s %s' % (sim_users[user_email], quoted_addr))
else:
self.push('550 No access for you!')
def smtp_AUTH(self, arg):
if arg.strip().lower()=='cram-md5':
self.push('334 {0}'.format(sim_cram_md5_challenge))
return
mech, auth = arg.split()
mech = mech.lower()
if mech not in sim_auth_credentials:
self.push('504 auth type unimplemented')
return
if mech == 'plain' and auth==sim_auth_credentials['plain']:
self.push('235 plain auth ok')
elif mech=='login' and auth==sim_auth_credentials['login']:
self.push('334 Password:')
else:
self.push('550 No access for you!')
class SimSMTPServer(smtpd.SMTPServer):
def __init__(self, *args, **kw):
self._extra_features = []
smtpd.SMTPServer.__init__(self, *args, **kw)
def handle_accept(self):
conn, addr = self.accept()
self._SMTPchannel = SimSMTPChannel(self._extra_features,
self, conn, addr)
def process_message(self, peer, mailfrom, rcpttos, data):
pass
def add_feature(self, feature):
self._extra_features.append(feature)
# Test various SMTP & ESMTP commands/behaviors that require a simulated server
# (i.e., something with more features than DebuggingServer)
class SMTPSimTests(TestCase):
def setUp(self):
self.serv_evt = threading.Event()
self.client_evt = threading.Event()
# Pick a random unused port by passing 0 for the port number
self.serv = SimSMTPServer((HOST, 0), ('nowhere', -1))
# Keep a note of what port was assigned
self.port = self.serv.socket.getsockname()[1]
serv_args = (self.serv, self.serv_evt, self.client_evt)
threading.Thread(target=debugging_server, args=serv_args).start()
# wait until server thread has assigned a port number
self.serv_evt.wait()
self.serv_evt.clear()
def tearDown(self):
# indicate that the client is finished
self.client_evt.set()
# wait for the server thread to terminate
self.serv_evt.wait()
def testBasic(self):
# smoke test
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=15)
smtp.quit()
def testEHLO(self):
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=15)
# no features should be present before the EHLO
self.assertEqual(smtp.esmtp_features, {})
# features expected from the test server
expected_features = {'expn':'',
'size': '20000000',
'starttls': '',
'deliverby': '',
'help': '',
}
smtp.ehlo()
self.assertEqual(smtp.esmtp_features, expected_features)
for k in expected_features:
self.assertTrue(smtp.has_extn(k))
self.assertFalse(smtp.has_extn('unsupported-feature'))
smtp.quit()
def testVRFY(self):
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=15)
for email, name in sim_users.items():
expected_known = (250, '%s %s' % (name, smtplib.quoteaddr(email)))
self.assertEqual(smtp.vrfy(email), expected_known)
u = 'nobody@nowhere.com'
expected_unknown = (550, 'No such user: %s' % smtplib.quoteaddr(u))
self.assertEqual(smtp.vrfy(u), expected_unknown)
smtp.quit()
def testEXPN(self):
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=15)
for listname, members in sim_lists.items():
users = []
for m in members:
users.append('%s %s' % (sim_users[m], smtplib.quoteaddr(m)))
expected_known = (250, '\n'.join(users))
self.assertEqual(smtp.expn(listname), expected_known)
u = 'PSU-Members-List'
expected_unknown = (550, 'No access for you!')
self.assertEqual(smtp.expn(u), expected_unknown)
smtp.quit()
def testAUTH_PLAIN(self):
self.serv.add_feature("AUTH PLAIN")
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=15)
expected_auth_ok = (235, b'plain auth ok')
self.assertEqual(smtp.login(sim_auth[0], sim_auth[1]), expected_auth_ok)
# SimSMTPChannel doesn't fully support LOGIN or CRAM-MD5 auth because they
# require a synchronous read to obtain the credentials...so instead smtpd
# sees the credential sent by smtplib's login method as an unknown command,
# which results in smtplib raising an auth error. Fortunately the error
# message contains the encoded credential, so we can partially check that it
# was generated correctly (partially, because the 'word' is uppercased in
# the error message).
def testAUTH_LOGIN(self):
self.serv.add_feature("AUTH LOGIN")
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=15)
try: smtp.login(sim_auth[0], sim_auth[1])
except smtplib.SMTPAuthenticationError as err:
if sim_auth_login_password not in str(err):
raise "expected encoded password not found in error message"
def testAUTH_CRAM_MD5(self):
self.serv.add_feature("AUTH CRAM-MD5")
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=15)
try: smtp.login(sim_auth[0], sim_auth[1])
except smtplib.SMTPAuthenticationError as err:
if sim_auth_credentials['cram-md5'] not in str(err):
raise "expected encoded credentials not found in error message"
#TODO: add tests for correct AUTH method fallback now that the
#test infrastructure can support it.
def test_main(verbose=None):
test_support.run_unittest(GeneralTests, DebuggingServerTests,
NonConnectingTests,
BadHELOServerTests, SMTPSimTests)
if __name__ == '__main__':
test_main()
#!/usr/bin/env python
import unittest
from test import test_support
import errno
import socket
import select
import thread, threading
import time
import traceback
import Queue
import sys
import os
import array
from weakref import proxy
import signal
HOST = test_support.HOST
MSG = 'Michael Gilfix was here\n'
class SocketTCPTest(unittest.TestCase):
def setUp(self):
self.serv = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.port = test_support.bind_port(self.serv)
self.serv.listen(1)
def tearDown(self):
self.serv.close()
self.serv = None
class SocketUDPTest(unittest.TestCase):
def setUp(self):
self.serv = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
self.port = test_support.bind_port(self.serv)
def tearDown(self):
self.serv.close()
self.serv = None
class ThreadableTest:
"""Threadable Test class
The ThreadableTest class makes it easy to create a threaded
client/server pair from an existing unit test. To create a
new threaded class from an existing unit test, use multiple
inheritance:
class NewClass (OldClass, ThreadableTest):
pass
This class defines two new fixture functions with obvious
purposes for overriding:
clientSetUp ()
clientTearDown ()
Any new test functions within the class must then define
tests in pairs, where the test name is preceeded with a
'_' to indicate the client portion of the test. Ex:
def testFoo(self):
# Server portion
def _testFoo(self):
# Client portion
Any exceptions raised by the clients during their tests
are caught and transferred to the main thread to alert
the testing framework.
Note, the server setup function cannot call any blocking
functions that rely on the client thread during setup,
unless serverExplicitReady() is called just before
the blocking call (such as in setting up a client/server
connection and performing the accept() in setUp().
"""
def __init__(self):
# Swap the true setup function
self.__setUp = self.setUp
self.__tearDown = self.tearDown
self.setUp = self._setUp
self.tearDown = self._tearDown
def serverExplicitReady(self):
"""This method allows the server to explicitly indicate that
it wants the client thread to proceed. This is useful if the
server is about to execute a blocking routine that is
dependent upon the client thread during its setup routine."""
self.server_ready.set()
def _setUp(self):
self.server_ready = threading.Event()
self.client_ready = threading.Event()
self.done = threading.Event()
self.queue = Queue.Queue(1)
# Do some munging to start the client test.
methodname = self.id()
i = methodname.rfind('.')
methodname = methodname[i+1:]
test_method = getattr(self, '_' + methodname)
self.client_thread = thread.start_new_thread(
self.clientRun, (test_method,))
self.__setUp()
if not self.server_ready.is_set():
self.server_ready.set()
self.client_ready.wait()
def _tearDown(self):
self.__tearDown()
self.done.wait()
if not self.queue.empty():
msg = self.queue.get()
self.fail(msg)
def clientRun(self, test_func):
self.server_ready.wait()
self.client_ready.set()
self.clientSetUp()
with test_support._check_py3k_warnings():
if not callable(test_func):
raise TypeError("test_func must be a callable function.")
try:
test_func()
except Exception, strerror:
self.queue.put(strerror)
self.clientTearDown()
def clientSetUp(self):
raise NotImplementedError("clientSetUp must be implemented.")
def clientTearDown(self):
self.done.set()
thread.exit()
class ThreadedTCPSocketTest(SocketTCPTest, ThreadableTest):
def __init__(self, methodName='runTest'):
SocketTCPTest.__init__(self, methodName=methodName)
ThreadableTest.__init__(self)
def clientSetUp(self):
self.cli = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
def clientTearDown(self):
self.cli.close()
self.cli = None
ThreadableTest.clientTearDown(self)
class ThreadedUDPSocketTest(SocketUDPTest, ThreadableTest):
def __init__(self, methodName='runTest'):
SocketUDPTest.__init__(self, methodName=methodName)
ThreadableTest.__init__(self)
def clientSetUp(self):
self.cli = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
class SocketConnectedTest(ThreadedTCPSocketTest):
def __init__(self, methodName='runTest'):
ThreadedTCPSocketTest.__init__(self, methodName=methodName)
def setUp(self):
ThreadedTCPSocketTest.setUp(self)
# Indicate explicitly we're ready for the client thread to
# proceed and then perform the blocking call to accept
self.serverExplicitReady()
conn, addr = self.serv.accept()
self.cli_conn = conn
def tearDown(self):
self.cli_conn.close()
self.cli_conn = None
ThreadedTCPSocketTest.tearDown(self)
def clientSetUp(self):
ThreadedTCPSocketTest.clientSetUp(self)
self.cli.connect((HOST, self.port))
self.serv_conn = self.cli
def clientTearDown(self):
self.serv_conn.close()
self.serv_conn = None
ThreadedTCPSocketTest.clientTearDown(self)
class SocketPairTest(unittest.TestCase, ThreadableTest):
def __init__(self, methodName='runTest'):
unittest.TestCase.__init__(self, methodName=methodName)
ThreadableTest.__init__(self)
def setUp(self):
self.serv, self.cli = socket.socketpair()
def tearDown(self):
self.serv.close()
self.serv = None
def clientSetUp(self):
pass
def clientTearDown(self):
self.cli.close()
self.cli = None
ThreadableTest.clientTearDown(self)
#######################################################################
## Begin Tests
class GeneralModuleTests(unittest.TestCase):
def test_weakref(self):
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
p = proxy(s)
self.assertEqual(p.fileno(), s.fileno())
s.close()
s = None
try:
p.fileno()
except ReferenceError:
pass
else:
self.fail('Socket proxy still exists')
def testSocketError(self):
# Testing socket module exceptions
def raise_error(*args, **kwargs):
raise socket.error
def raise_herror(*args, **kwargs):
raise socket.herror
def raise_gaierror(*args, **kwargs):
raise socket.gaierror
self.failUnlessRaises(socket.error, raise_error,
"Error raising socket exception.")
self.failUnlessRaises(socket.error, raise_herror,
"Error raising socket exception.")
self.failUnlessRaises(socket.error, raise_gaierror,
"Error raising socket exception.")
def testCrucialConstants(self):
# Testing for mission critical constants
socket.AF_INET
socket.SOCK_STREAM
socket.SOCK_DGRAM
socket.SOCK_RAW
socket.SOCK_RDM
socket.SOCK_SEQPACKET
socket.SOL_SOCKET
socket.SO_REUSEADDR
def testHostnameRes(self):
# Testing hostname resolution mechanisms
hostname = socket.gethostname()
try:
ip = socket.gethostbyname(hostname)
except socket.error:
# Probably name lookup wasn't set up right; skip this test
return
self.assert_(ip.find('.') >= 0, "Error resolving host to ip.")
try:
hname, aliases, ipaddrs = socket.gethostbyaddr(ip)
except socket.error:
# Probably a similar problem as above; skip this test
return
all_host_names = [hostname, hname] + aliases
fqhn = socket.getfqdn(ip)
if not fqhn in all_host_names:
self.fail("Error testing host resolution mechanisms. (fqdn: %s, all: %s)" % (fqhn, repr(all_host_names)))
def testRefCountGetNameInfo(self):
# Testing reference count for getnameinfo
if hasattr(sys, "getrefcount"):
try:
# On some versions, this loses a reference
orig = sys.getrefcount(__name__)
socket.getnameinfo(__name__,0)
except TypeError:
self.assertEqual(sys.getrefcount(__name__), orig,
"socket.getnameinfo loses a reference")
def testInterpreterCrash(self):
# Making sure getnameinfo doesn't crash the interpreter
try:
# On some versions, this crashes the interpreter.
socket.getnameinfo(('x', 0, 0, 0), 0)
except socket.error:
pass
def testNtoH(self):
# This just checks that htons etc. are their own inverse,
# when looking at the lower 16 or 32 bits.
sizes = {socket.htonl: 32, socket.ntohl: 32,
socket.htons: 16, socket.ntohs: 16}
for func, size in sizes.items():
mask = (1L<<size) - 1
for i in (0, 1, 0xffff, ~0xffff, 2, 0x01234567, 0x76543210):
self.assertEqual(i & mask, func(func(i&mask)) & mask)
swapped = func(mask)
self.assertEqual(swapped & mask, mask)
self.assertRaises(OverflowError, func, 1L<<34)
def testNtoHErrors(self):
good_values = [ 1, 2, 3, 1L, 2L, 3L ]
bad_values = [ -1, -2, -3, -1L, -2L, -3L ]
for k in good_values:
socket.ntohl(k)
socket.ntohs(k)
socket.htonl(k)
socket.htons(k)
for k in bad_values:
self.assertRaises(OverflowError, socket.ntohl, k)
self.assertRaises(OverflowError, socket.ntohs, k)
self.assertRaises(OverflowError, socket.htonl, k)
self.assertRaises(OverflowError, socket.htons, k)
def testGetServBy(self):
eq = self.assertEqual
# Find one service that exists, then check all the related interfaces.
# I've ordered this by protocols that have both a tcp and udp
# protocol, at least for modern Linuxes.
if sys.platform in ('linux2', 'freebsd4', 'freebsd5', 'freebsd6',
'freebsd7', 'freebsd8', 'darwin'):
# avoid the 'echo' service on this platform, as there is an
# assumption breaking non-standard port/protocol entry
services = ('daytime', 'qotd', 'domain')
else:
services = ('echo', 'daytime', 'domain')
for service in services:
try:
port = socket.getservbyname(service, 'tcp')
break
except socket.error:
pass
else:
raise socket.error
# Try same call with optional protocol omitted
port2 = socket.getservbyname(service)
eq(port, port2)
# Try udp, but don't barf it it doesn't exist
try:
udpport = socket.getservbyname(service, 'udp')
except socket.error:
udpport = None
else:
eq(udpport, port)
# Now make sure the lookup by port returns the same service name
eq(socket.getservbyport(port2), service)
eq(socket.getservbyport(port, 'tcp'), service)
if udpport is not None:
eq(socket.getservbyport(udpport, 'udp'), service)
def testDefaultTimeout(self):
# Testing default timeout
# The default timeout should initially be None
self.assertEqual(socket.getdefaulttimeout(), None)
s = socket.socket()
self.assertEqual(s.gettimeout(), None)
s.close()
# Set the default timeout to 10, and see if it propagates
socket.setdefaulttimeout(10)
self.assertEqual(socket.getdefaulttimeout(), 10)
s = socket.socket()
self.assertEqual(s.gettimeout(), 10)
s.close()
# Reset the default timeout to None, and see if it propagates
socket.setdefaulttimeout(None)
self.assertEqual(socket.getdefaulttimeout(), None)
s = socket.socket()
self.assertEqual(s.gettimeout(), None)
s.close()
# Check that setting it to an invalid value raises ValueError
self.assertRaises(ValueError, socket.setdefaulttimeout, -1)
# Check that setting it to an invalid type raises TypeError
self.assertRaises(TypeError, socket.setdefaulttimeout, "spam")
def testIPv4_inet_aton_fourbytes(self):
if not hasattr(socket, 'inet_aton'):
return # No inet_aton, nothing to check
# Test that issue1008086 and issue767150 are fixed.
# It must return 4 bytes.
self.assertEquals('\x00'*4, socket.inet_aton('0.0.0.0'))
self.assertEquals('\xff'*4, socket.inet_aton('255.255.255.255'))
def testIPv4toString(self):
if not hasattr(socket, 'inet_pton'):
return # No inet_pton() on this platform
from socket import inet_aton as f, inet_pton, AF_INET
g = lambda a: inet_pton(AF_INET, a)
self.assertEquals('\x00\x00\x00\x00', f('0.0.0.0'))
self.assertEquals('\xff\x00\xff\x00', f('255.0.255.0'))
self.assertEquals('\xaa\xaa\xaa\xaa', f('170.170.170.170'))
self.assertEquals('\x01\x02\x03\x04', f('1.2.3.4'))
self.assertEquals('\xff\xff\xff\xff', f('255.255.255.255'))
self.assertEquals('\x00\x00\x00\x00', g('0.0.0.0'))
self.assertEquals('\xff\x00\xff\x00', g('255.0.255.0'))
self.assertEquals('\xaa\xaa\xaa\xaa', g('170.170.170.170'))
self.assertEquals('\xff\xff\xff\xff', g('255.255.255.255'))
def testIPv6toString(self):
if not hasattr(socket, 'inet_pton'):
return # No inet_pton() on this platform
try:
from socket import inet_pton, AF_INET6, has_ipv6
if not has_ipv6:
return
except ImportError:
return
f = lambda a: inet_pton(AF_INET6, a)
self.assertEquals('\x00' * 16, f('::'))
self.assertEquals('\x00' * 16, f('0::0'))
self.assertEquals('\x00\x01' + '\x00' * 14, f('1::'))
self.assertEquals(
'\x45\xef\x76\xcb\x00\x1a\x56\xef\xaf\xeb\x0b\xac\x19\x24\xae\xae',
f('45ef:76cb:1a:56ef:afeb:bac:1924:aeae')
)
def testStringToIPv4(self):
if not hasattr(socket, 'inet_ntop'):
return # No inet_ntop() on this platform
from socket import inet_ntoa as f, inet_ntop, AF_INET
g = lambda a: inet_ntop(AF_INET, a)
self.assertEquals('1.0.1.0', f('\x01\x00\x01\x00'))
self.assertEquals('170.85.170.85', f('\xaa\x55\xaa\x55'))
self.assertEquals('255.255.255.255', f('\xff\xff\xff\xff'))
self.assertEquals('1.2.3.4', f('\x01\x02\x03\x04'))
self.assertEquals('1.0.1.0', g('\x01\x00\x01\x00'))
self.assertEquals('170.85.170.85', g('\xaa\x55\xaa\x55'))
self.assertEquals('255.255.255.255', g('\xff\xff\xff\xff'))
def testStringToIPv6(self):
if not hasattr(socket, 'inet_ntop'):
return # No inet_ntop() on this platform
try:
from socket import inet_ntop, AF_INET6, has_ipv6
if not has_ipv6:
return
except ImportError:
return
f = lambda a: inet_ntop(AF_INET6, a)
self.assertEquals('::', f('\x00' * 16))
self.assertEquals('::1', f('\x00' * 15 + '\x01'))
self.assertEquals(
'aef:b01:506:1001:ffff:9997:55:170',
f('\x0a\xef\x0b\x01\x05\x06\x10\x01\xff\xff\x99\x97\x00\x55\x01\x70')
)
# XXX The following don't test module-level functionality...
def testSockName(self):
# Testing getsockname(). Use a temporary socket to elicit an unused
# ephemeral port that we can use later in the test.
tempsock = socket.socket()
tempsock.bind(("0.0.0.0", 0))
(host, port) = tempsock.getsockname()
tempsock.close()
del tempsock
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.bind(("0.0.0.0", port))
name = sock.getsockname()
# XXX(nnorwitz): http://tinyurl.com/os5jz seems to indicate
# it reasonable to get the host's addr in addition to 0.0.0.0.
# At least for eCos. This is required for the S/390 to pass.
try:
my_ip_addr = socket.gethostbyname(socket.gethostname())
except socket.error:
# Probably name lookup wasn't set up right; skip this test
return
self.assert_(name[0] in ("0.0.0.0", my_ip_addr), '%s invalid' % name[0])
self.assertEqual(name[1], port)
def testGetSockOpt(self):
# Testing getsockopt()
# We know a socket should start without reuse==0
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
reuse = sock.getsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR)
self.failIf(reuse != 0, "initial mode is reuse")
def testSetSockOpt(self):
# Testing setsockopt()
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
reuse = sock.getsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR)
self.failIf(reuse == 0, "failed to set reuse mode")
def testSendAfterClose(self):
# testing send() after close() with timeout
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.settimeout(1)
sock.close()
self.assertRaises(socket.error, sock.send, "spam")
def testNewAttributes(self):
# testing .family, .type and .protocol
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.assertEqual(sock.family, socket.AF_INET)
self.assertEqual(sock.type, socket.SOCK_STREAM)
self.assertEqual(sock.proto, 0)
sock.close()
def test_sock_ioctl(self):
if os.name != "nt":
return
self.assert_(hasattr(socket.socket, 'ioctl'))
self.assert_(hasattr(socket, 'SIO_RCVALL'))
self.assert_(hasattr(socket, 'RCVALL_ON'))
self.assert_(hasattr(socket, 'RCVALL_OFF'))
class BasicTCPTest(SocketConnectedTest):
def __init__(self, methodName='runTest'):
SocketConnectedTest.__init__(self, methodName=methodName)
def testRecv(self):
# Testing large receive over TCP
msg = self.cli_conn.recv(1024)
self.assertEqual(msg, MSG)
def _testRecv(self):
self.serv_conn.send(MSG)
def testOverFlowRecv(self):
# Testing receive in chunks over TCP
seg1 = self.cli_conn.recv(len(MSG) - 3)
seg2 = self.cli_conn.recv(1024)
msg = seg1 + seg2
self.assertEqual(msg, MSG)
def _testOverFlowRecv(self):
self.serv_conn.send(MSG)
def testRecvFrom(self):
# Testing large recvfrom() over TCP
msg, addr = self.cli_conn.recvfrom(1024)
self.assertEqual(msg, MSG)
def _testRecvFrom(self):
self.serv_conn.send(MSG)
def testOverFlowRecvFrom(self):
# Testing recvfrom() in chunks over TCP
seg1, addr = self.cli_conn.recvfrom(len(MSG)-3)
seg2, addr = self.cli_conn.recvfrom(1024)
msg = seg1 + seg2
self.assertEqual(msg, MSG)
def _testOverFlowRecvFrom(self):
self.serv_conn.send(MSG)
def testSendAll(self):
# Testing sendall() with a 2048 byte string over TCP
msg = ''
while 1:
read = self.cli_conn.recv(1024)
if not read:
break
msg += read
self.assertEqual(msg, 'f' * 2048)
def _testSendAll(self):
big_chunk = 'f' * 2048
self.serv_conn.sendall(big_chunk)
def testFromFd(self):
# Testing fromfd()
if not hasattr(socket, "fromfd"):
return # On Windows, this doesn't exist
fd = self.cli_conn.fileno()
sock = socket.fromfd(fd, socket.AF_INET, socket.SOCK_STREAM)
msg = sock.recv(1024)
self.assertEqual(msg, MSG)
def _testFromFd(self):
self.serv_conn.send(MSG)
def testShutdown(self):
# Testing shutdown()
msg = self.cli_conn.recv(1024)
self.assertEqual(msg, MSG)
# wait for _testShutdown to finish: on OS X, when the server
# closes the connection the client also becomes disconnected,
# and the client's shutdown call will fail. (Issue #4397.)
self.done.wait()
def _testShutdown(self):
self.serv_conn.send(MSG)
self.serv_conn.shutdown(2)
class BasicUDPTest(ThreadedUDPSocketTest):
def __init__(self, methodName='runTest'):
ThreadedUDPSocketTest.__init__(self, methodName=methodName)
def testSendtoAndRecv(self):
# Testing sendto() and Recv() over UDP
msg = self.serv.recv(len(MSG))
self.assertEqual(msg, MSG)
def _testSendtoAndRecv(self):
self.cli.sendto(MSG, 0, (HOST, self.port))
def testRecvFrom(self):
# Testing recvfrom() over UDP
msg, addr = self.serv.recvfrom(len(MSG))
self.assertEqual(msg, MSG)
def _testRecvFrom(self):
self.cli.sendto(MSG, 0, (HOST, self.port))
def testRecvFromNegative(self):
# Negative lengths passed to recvfrom should give ValueError.
self.assertRaises(ValueError, self.serv.recvfrom, -1)
def _testRecvFromNegative(self):
self.cli.sendto(MSG, 0, (HOST, self.port))
class TCPCloserTest(ThreadedTCPSocketTest):
def testClose(self):
conn, addr = self.serv.accept()
conn.close()
sd = self.cli
read, write, err = select.select([sd], [], [], 1.0)
self.assertEqual(read, [sd])
self.assertEqual(sd.recv(1), '')
def _testClose(self):
self.cli.connect((HOST, self.port))
time.sleep(1.0)
class BasicSocketPairTest(SocketPairTest):
def __init__(self, methodName='runTest'):
SocketPairTest.__init__(self, methodName=methodName)
def testRecv(self):
msg = self.serv.recv(1024)
self.assertEqual(msg, MSG)
def _testRecv(self):
self.cli.send(MSG)
def testSend(self):
self.serv.send(MSG)
def _testSend(self):
msg = self.cli.recv(1024)
self.assertEqual(msg, MSG)
class NonBlockingTCPTests(ThreadedTCPSocketTest):
def __init__(self, methodName='runTest'):
ThreadedTCPSocketTest.__init__(self, methodName=methodName)
def testSetBlocking(self):
# Testing whether set blocking works
self.serv.setblocking(0)
start = time.time()
try:
self.serv.accept()
except socket.error:
pass
end = time.time()
self.assert_((end - start) < 1.0, "Error setting non-blocking mode.")
def _testSetBlocking(self):
pass
def testAccept(self):
# Testing non-blocking accept
self.serv.setblocking(0)
try:
conn, addr = self.serv.accept()
except socket.error:
pass
else:
self.fail("Error trying to do non-blocking accept.")
read, write, err = select.select([self.serv], [], [])
if self.serv in read:
conn, addr = self.serv.accept()
else:
self.fail("Error trying to do accept after select.")
def _testAccept(self):
time.sleep(0.1)
self.cli.connect((HOST, self.port))
def testConnect(self):
# Testing non-blocking connect
conn, addr = self.serv.accept()
def _testConnect(self):
self.cli.settimeout(10)
self.cli.connect((HOST, self.port))
def testRecv(self):
# Testing non-blocking recv
conn, addr = self.serv.accept()
conn.setblocking(0)
try:
msg = conn.recv(len(MSG))
except socket.error:
pass
else:
self.fail("Error trying to do non-blocking recv.")
read, write, err = select.select([conn], [], [])
if conn in read:
msg = conn.recv(len(MSG))
self.assertEqual(msg, MSG)
else:
self.fail("Error during select call to non-blocking socket.")
def _testRecv(self):
self.cli.connect((HOST, self.port))
time.sleep(0.1)
self.cli.send(MSG)
class FileObjectClassTestCase(SocketConnectedTest):
bufsize = -1 # Use default buffer size
def __init__(self, methodName='runTest'):
SocketConnectedTest.__init__(self, methodName=methodName)
def setUp(self):
SocketConnectedTest.setUp(self)
self.serv_file = self.cli_conn.makefile('rb', self.bufsize)
def tearDown(self):
self.serv_file.close()
self.assert_(self.serv_file.closed)
self.serv_file = None
SocketConnectedTest.tearDown(self)
def clientSetUp(self):
SocketConnectedTest.clientSetUp(self)
self.cli_file = self.serv_conn.makefile('wb')
def clientTearDown(self):
self.cli_file.close()
self.assert_(self.cli_file.closed)
self.cli_file = None
SocketConnectedTest.clientTearDown(self)
def testSmallRead(self):
# Performing small file read test
first_seg = self.serv_file.read(len(MSG)-3)
second_seg = self.serv_file.read(3)
msg = first_seg + second_seg
self.assertEqual(msg, MSG)
def _testSmallRead(self):
self.cli_file.write(MSG)
self.cli_file.flush()
def testFullRead(self):
# read until EOF
msg = self.serv_file.read()
self.assertEqual(msg, MSG)
def _testFullRead(self):
self.cli_file.write(MSG)
self.cli_file.close()
def testUnbufferedRead(self):
# Performing unbuffered file read test
buf = ''
while 1:
char = self.serv_file.read(1)
if not char:
break
buf += char
self.assertEqual(buf, MSG)
def _testUnbufferedRead(self):
self.cli_file.write(MSG)
self.cli_file.flush()
def testReadline(self):
# Performing file readline test
line = self.serv_file.readline()
self.assertEqual(line, MSG)
def _testReadline(self):
self.cli_file.write(MSG)
self.cli_file.flush()
def testReadlineAfterRead(self):
a_baloo_is = self.serv_file.read(len("A baloo is"))
self.assertEqual("A baloo is", a_baloo_is)
_a_bear = self.serv_file.read(len(" a bear"))
self.assertEqual(" a bear", _a_bear)
line = self.serv_file.readline()
self.assertEqual("\n", line)
line = self.serv_file.readline()
self.assertEqual("A BALOO IS A BEAR.\n", line)
line = self.serv_file.readline()
self.assertEqual(MSG, line)
def _testReadlineAfterRead(self):
self.cli_file.write("A baloo is a bear\n")
self.cli_file.write("A BALOO IS A BEAR.\n")
self.cli_file.write(MSG)
self.cli_file.flush()
def testReadlineAfterReadNoNewline(self):
end_of_ = self.serv_file.read(len("End Of "))
self.assertEqual("End Of ", end_of_)
line = self.serv_file.readline()
self.assertEqual("Line", line)
def _testReadlineAfterReadNoNewline(self):
self.cli_file.write("End Of Line")
def testClosedAttr(self):
self.assert_(not self.serv_file.closed)
def _testClosedAttr(self):
self.assert_(not self.cli_file.closed)
class FileObjectInterruptedTestCase(unittest.TestCase):
"""Test that the file object correctly handles EINTR internally."""
class MockSocket(object):
def __init__(self, recv_funcs=()):
# A generator that returns callables that we'll call for each
# call to recv().
self._recv_step = iter(recv_funcs)
def recv(self, size):
return self._recv_step.next()()
@staticmethod
def _raise_eintr():
raise socket.error(errno.EINTR)
def _test_readline(self, size=-1, **kwargs):
mock_sock = self.MockSocket(recv_funcs=[
lambda : "This is the first line\nAnd the sec",
self._raise_eintr,
lambda : "ond line is here\n",
lambda : "",
])
fo = socket._fileobject(mock_sock, **kwargs)
self.assertEquals(fo.readline(size), "This is the first line\n")
self.assertEquals(fo.readline(size), "And the second line is here\n")
def _test_read(self, size=-1, **kwargs):
mock_sock = self.MockSocket(recv_funcs=[
lambda : "This is the first line\nAnd the sec",
self._raise_eintr,
lambda : "ond line is here\n",
lambda : "",
])
fo = socket._fileobject(mock_sock, **kwargs)
self.assertEquals(fo.read(size), "This is the first line\n"
"And the second line is here\n")
def test_default(self):
self._test_readline()
self._test_readline(size=100)
self._test_read()
self._test_read(size=100)
def test_with_1k_buffer(self):
self._test_readline(bufsize=1024)
self._test_readline(size=100, bufsize=1024)
self._test_read(bufsize=1024)
self._test_read(size=100, bufsize=1024)
def _test_readline_no_buffer(self, size=-1):
mock_sock = self.MockSocket(recv_funcs=[
lambda : "aa",
lambda : "\n",
lambda : "BB",
self._raise_eintr,
lambda : "bb",
lambda : "",
])
fo = socket._fileobject(mock_sock, bufsize=0)
self.assertEquals(fo.readline(size), "aa\n")
self.assertEquals(fo.readline(size), "BBbb")
def test_no_buffer(self):
self._test_readline_no_buffer()
self._test_readline_no_buffer(size=4)
self._test_read(bufsize=0)
self._test_read(size=100, bufsize=0)
class UnbufferedFileObjectClassTestCase(FileObjectClassTestCase):
"""Repeat the tests from FileObjectClassTestCase with bufsize==0.
In this case (and in this case only), it should be possible to
create a file object, read a line from it, create another file
object, read another line from it, without loss of data in the
first file object's buffer. Note that httplib relies on this
when reading multiple requests from the same socket."""
bufsize = 0 # Use unbuffered mode
def testUnbufferedReadline(self):
# Read a line, create a new file object, read another line with it
line = self.serv_file.readline() # first line
self.assertEqual(line, "A. " + MSG) # first line
self.serv_file = self.cli_conn.makefile('rb', 0)
line = self.serv_file.readline() # second line
self.assertEqual(line, "B. " + MSG) # second line
def _testUnbufferedReadline(self):
self.cli_file.write("A. " + MSG)
self.cli_file.write("B. " + MSG)
self.cli_file.flush()
class LineBufferedFileObjectClassTestCase(FileObjectClassTestCase):
bufsize = 1 # Default-buffered for reading; line-buffered for writing
class SmallBufferedFileObjectClassTestCase(FileObjectClassTestCase):
bufsize = 2 # Exercise the buffering code
class NetworkConnectionTest(object):
"""Prove network connection."""
def clientSetUp(self):
# We're inherited below by BasicTCPTest2, which also inherits
# BasicTCPTest, which defines self.port referenced below.
self.cli = socket.create_connection((HOST, self.port))
self.serv_conn = self.cli
class BasicTCPTest2(NetworkConnectionTest, BasicTCPTest):
"""Tests that NetworkConnection does not break existing TCP functionality.
"""
class NetworkConnectionNoServer(unittest.TestCase):
def testWithoutServer(self):
port = test_support.find_unused_port()
self.failUnlessRaises(
socket.error,
lambda: socket.create_connection((HOST, port))
)
class NetworkConnectionAttributesTest(SocketTCPTest, ThreadableTest):
def __init__(self, methodName='runTest'):
SocketTCPTest.__init__(self, methodName=methodName)
ThreadableTest.__init__(self)
def clientSetUp(self):
pass
def clientTearDown(self):
self.cli.close()
self.cli = None
ThreadableTest.clientTearDown(self)
def _justAccept(self):
conn, addr = self.serv.accept()
testFamily = _justAccept
def _testFamily(self):
self.cli = socket.create_connection((HOST, self.port), timeout=30)
self.assertEqual(self.cli.family, 2)
testTimeoutDefault = _justAccept
def _testTimeoutDefault(self):
# passing no explicit timeout uses socket's global default
self.assert_(socket.getdefaulttimeout() is None)
socket.setdefaulttimeout(42)
try:
self.cli = socket.create_connection((HOST, self.port))
finally:
socket.setdefaulttimeout(None)
self.assertEquals(self.cli.gettimeout(), 42)
testTimeoutNone = _justAccept
def _testTimeoutNone(self):
# None timeout means the same as sock.settimeout(None)
self.assert_(socket.getdefaulttimeout() is None)
socket.setdefaulttimeout(30)
try:
self.cli = socket.create_connection((HOST, self.port), timeout=None)
finally:
socket.setdefaulttimeout(None)
self.assertEqual(self.cli.gettimeout(), None)
testTimeoutValueNamed = _justAccept
def _testTimeoutValueNamed(self):
self.cli = socket.create_connection((HOST, self.port), timeout=30)
self.assertEqual(self.cli.gettimeout(), 30)
testTimeoutValueNonamed = _justAccept
def _testTimeoutValueNonamed(self):
self.cli = socket.create_connection((HOST, self.port), 30)
self.assertEqual(self.cli.gettimeout(), 30)
class NetworkConnectionBehaviourTest(SocketTCPTest, ThreadableTest):
def __init__(self, methodName='runTest'):
SocketTCPTest.__init__(self, methodName=methodName)
ThreadableTest.__init__(self)
def clientSetUp(self):
pass
def clientTearDown(self):
self.cli.close()
self.cli = None
ThreadableTest.clientTearDown(self)
def testInsideTimeout(self):
conn, addr = self.serv.accept()
time.sleep(3)
conn.send("done!")
testOutsideTimeout = testInsideTimeout
def _testInsideTimeout(self):
self.cli = sock = socket.create_connection((HOST, self.port))
data = sock.recv(5)
self.assertEqual(data, "done!")
def _testOutsideTimeout(self):
self.cli = sock = socket.create_connection((HOST, self.port), timeout=1)
self.failUnlessRaises(socket.timeout, lambda: sock.recv(5))
class TestIssue9543(SocketTCPTest, NetworkConnectionTest):
"""
This test exercises the code in the _fileobject.flush() method when the
whole write buffer hasn't been flushed.
See http://bugs.python.org/issue9543
"""
# XXX: this is just a sanity check, proper tests for flush() should still
# be added
def setUp(self):
SocketTCPTest.setUp(self)
NetworkConnectionTest.clientSetUp(self)
def test_issue9543(self):
self.cli.close()
file_a = self.serv.makefile('w')
file_a.write('x')
# flush() will try to send data to self.cli and raise an error because
# it's closed
self.assertRaises(socket.error, file_a.flush)
# close() will raise an error too, because it calls flush() before
# closing the file
self.assertRaises(socket.error, file_a.close)
self.assertTrue(file_a.closed)
class Urllib2FileobjectTest(unittest.TestCase):
# urllib2.HTTPHandler has "borrowed" socket._fileobject, and requires that
# it close the socket if the close c'tor argument is true
def testClose(self):
class MockSocket:
closed = False
def flush(self): pass
def close(self): self.closed = True
# must not close unless we request it: the original use of _fileobject
# by module socket requires that the underlying socket not be closed until
# the _socketobject that created the _fileobject is closed
s = MockSocket()
f = socket._fileobject(s)
f.close()
self.assert_(not s.closed)
s = MockSocket()
f = socket._fileobject(s, close=True)
f.close()
self.assert_(s.closed)
class TCPTimeoutTest(SocketTCPTest):
def testTCPTimeout(self):
def raise_timeout(*args, **kwargs):
self.serv.settimeout(1.0)
self.serv.accept()
self.failUnlessRaises(socket.timeout, raise_timeout,
"Error generating a timeout exception (TCP)")
def testTimeoutZero(self):
ok = False
try:
self.serv.settimeout(0.0)
foo = self.serv.accept()
except socket.timeout:
self.fail("caught timeout instead of error (TCP)")
except socket.error:
ok = True
except:
self.fail("caught unexpected exception (TCP)")
if not ok:
self.fail("accept() returned success when we did not expect it")
def testInterruptedTimeout(self):
# XXX I don't know how to do this test on MSWindows or any other
# plaform that doesn't support signal.alarm() or os.kill(), though
# the bug should have existed on all platforms.
if not hasattr(signal, "alarm"):
return # can only test on *nix
self.serv.settimeout(5.0) # must be longer than alarm
class Alarm(Exception):
pass
def alarm_handler(signal, frame):
raise Alarm
old_alarm = signal.signal(signal.SIGALRM, alarm_handler)
try:
signal.alarm(2) # POSIX allows alarm to be up to 1 second early
try:
foo = self.serv.accept()
except socket.timeout:
self.fail("caught timeout instead of Alarm")
except Alarm:
pass
except:
self.fail("caught other exception instead of Alarm:"
" %s(%s):\n%s" %
(sys.exc_info()[:2] + (traceback.format_exc(),)))
else:
self.fail("nothing caught")
finally:
signal.alarm(0) # shut off alarm
except Alarm:
self.fail("got Alarm in wrong place")
finally:
# no alarm can be pending. Safe to restore old handler.
signal.signal(signal.SIGALRM, old_alarm)
class UDPTimeoutTest(SocketTCPTest):
def testUDPTimeout(self):
def raise_timeout(*args, **kwargs):
self.serv.settimeout(1.0)
self.serv.recv(1024)
self.failUnlessRaises(socket.timeout, raise_timeout,
"Error generating a timeout exception (UDP)")
def testTimeoutZero(self):
ok = False
try:
self.serv.settimeout(0.0)
foo = self.serv.recv(1024)
except socket.timeout:
self.fail("caught timeout instead of error (UDP)")
except socket.error:
ok = True
except:
self.fail("caught unexpected exception (UDP)")
if not ok:
self.fail("recv() returned success when we did not expect it")
class TestExceptions(unittest.TestCase):
def testExceptionTree(self):
self.assert_(issubclass(socket.error, Exception))
self.assert_(issubclass(socket.herror, socket.error))
self.assert_(issubclass(socket.gaierror, socket.error))
self.assert_(issubclass(socket.timeout, socket.error))
class TestLinuxAbstractNamespace(unittest.TestCase):
UNIX_PATH_MAX = 108
def testLinuxAbstractNamespace(self):
address = "\x00python-test-hello\x00\xff"
s1 = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
s1.bind(address)
s1.listen(1)
s2 = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
s2.connect(s1.getsockname())
s1.accept()
self.assertEqual(s1.getsockname(), address)
self.assertEqual(s2.getpeername(), address)
def testMaxName(self):
address = "\x00" + "h" * (self.UNIX_PATH_MAX - 1)
s = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
s.bind(address)
self.assertEqual(s.getsockname(), address)
def testNameOverflow(self):
address = "\x00" + "h" * self.UNIX_PATH_MAX
s = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
self.assertRaises(socket.error, s.bind, address)
class BufferIOTest(SocketConnectedTest):
"""
Test the buffer versions of socket.recv() and socket.send().
"""
def __init__(self, methodName='runTest'):
SocketConnectedTest.__init__(self, methodName=methodName)
def testRecvInto(self):
buf = array.array('c', ' '*1024)
nbytes = self.cli_conn.recv_into(buf)
self.assertEqual(nbytes, len(MSG))
msg = buf.tostring()[:len(MSG)]
self.assertEqual(msg, MSG)
def _testRecvInto(self):
with test_support._check_py3k_warnings():
buf = buffer(MSG)
self.serv_conn.send(buf)
def testRecvFromInto(self):
buf = array.array('c', ' '*1024)
nbytes, addr = self.cli_conn.recvfrom_into(buf)
self.assertEqual(nbytes, len(MSG))
msg = buf.tostring()[:len(MSG)]
self.assertEqual(msg, MSG)
def _testRecvFromInto(self):
with test_support._check_py3k_warnings():
buf = buffer(MSG)
self.serv_conn.send(buf)
TIPC_STYPE = 2000
TIPC_LOWER = 200
TIPC_UPPER = 210
def isTipcAvailable():
"""Check if the TIPC module is loaded
The TIPC module is not loaded automatically on Ubuntu and probably
other Linux distros.
"""
if not hasattr(socket, "AF_TIPC"):
return False
if not os.path.isfile("/proc/modules"):
return False
with open("/proc/modules") as f:
for line in f:
if line.startswith("tipc "):
return True
if test_support.verbose:
print "TIPC module is not loaded, please 'sudo modprobe tipc'"
return False
class TIPCTest (unittest.TestCase):
def testRDM(self):
srv = socket.socket(socket.AF_TIPC, socket.SOCK_RDM)
cli = socket.socket(socket.AF_TIPC, socket.SOCK_RDM)
srv.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
srvaddr = (socket.TIPC_ADDR_NAMESEQ, TIPC_STYPE,
TIPC_LOWER, TIPC_UPPER)
srv.bind(srvaddr)
sendaddr = (socket.TIPC_ADDR_NAME, TIPC_STYPE,
TIPC_LOWER + (TIPC_UPPER - TIPC_LOWER) / 2, 0)
cli.sendto(MSG, sendaddr)
msg, recvaddr = srv.recvfrom(1024)
self.assertEqual(cli.getsockname(), recvaddr)
self.assertEqual(msg, MSG)
class TIPCThreadableTest (unittest.TestCase, ThreadableTest):
def __init__(self, methodName = 'runTest'):
unittest.TestCase.__init__(self, methodName = methodName)
ThreadableTest.__init__(self)
def setUp(self):
self.srv = socket.socket(socket.AF_TIPC, socket.SOCK_STREAM)
self.srv.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
srvaddr = (socket.TIPC_ADDR_NAMESEQ, TIPC_STYPE,
TIPC_LOWER, TIPC_UPPER)
self.srv.bind(srvaddr)
self.srv.listen(5)
self.serverExplicitReady()
self.conn, self.connaddr = self.srv.accept()
def clientSetUp(self):
# The is a hittable race between serverExplicitReady() and the
# accept() call; sleep a little while to avoid it, otherwise
# we could get an exception
time.sleep(0.1)
self.cli = socket.socket(socket.AF_TIPC, socket.SOCK_STREAM)
addr = (socket.TIPC_ADDR_NAME, TIPC_STYPE,
TIPC_LOWER + (TIPC_UPPER - TIPC_LOWER) / 2, 0)
self.cli.connect(addr)
self.cliaddr = self.cli.getsockname()
def testStream(self):
msg = self.conn.recv(1024)
self.assertEqual(msg, MSG)
self.assertEqual(self.cliaddr, self.connaddr)
def _testStream(self):
self.cli.send(MSG)
self.cli.close()
def test_main():
tests = [GeneralModuleTests, BasicTCPTest, TCPCloserTest, TCPTimeoutTest,
TestExceptions, BufferIOTest, BasicTCPTest2]
if sys.platform != 'mac':
tests.extend([ BasicUDPTest, UDPTimeoutTest ])
tests.extend([
NonBlockingTCPTests,
FileObjectClassTestCase,
FileObjectInterruptedTestCase,
UnbufferedFileObjectClassTestCase,
LineBufferedFileObjectClassTestCase,
SmallBufferedFileObjectClassTestCase,
Urllib2FileobjectTest,
NetworkConnectionNoServer,
NetworkConnectionAttributesTest,
NetworkConnectionBehaviourTest,
TestIssue9543,
])
if hasattr(socket, "socketpair"):
tests.append(BasicSocketPairTest)
if sys.platform == 'linux2':
tests.append(TestLinuxAbstractNamespace)
if isTipcAvailable():
tests.append(TIPCTest)
tests.append(TIPCThreadableTest)
thread_info = test_support.threading_setup()
test_support.run_unittest(*tests)
test_support.threading_cleanup(*thread_info)
if __name__ == "__main__":
test_main()
"""
Test suite for SocketServer.py.
"""
import contextlib
import errno
import imp
import os
import select
import signal
import socket
import tempfile
import threading
import time
import unittest
import SocketServer
import test.test_support
from test.test_support import reap_children, verbose, TestSkipped
from test.test_support import TESTFN as TEST_FILE
test.test_support.requires("network")
TEST_STR = "hello world\n"
HOST = test.test_support.HOST
HAVE_UNIX_SOCKETS = hasattr(socket, "AF_UNIX")
HAVE_FORKING = hasattr(os, "fork") and os.name != "os2"
def signal_alarm(n):
"""Call signal.alarm when it exists (i.e. not on Windows)."""
if hasattr(signal, 'alarm'):
signal.alarm(n)
def receive(sock, n, timeout=20):
r, w, x = select.select([sock], [], [], timeout)
if sock in r:
return sock.recv(n)
else:
raise RuntimeError, "timed out on %r" % (sock,)
if HAVE_UNIX_SOCKETS:
class ForkingUnixStreamServer(SocketServer.ForkingMixIn,
SocketServer.UnixStreamServer):
pass
class ForkingUnixDatagramServer(SocketServer.ForkingMixIn,
SocketServer.UnixDatagramServer):
pass
@contextlib.contextmanager
def simple_subprocess(testcase):
pid = os.fork()
if pid == 0:
# Don't throw an exception; it would be caught by the test harness.
os._exit(72)
yield None
pid2, status = os.waitpid(pid, 0)
testcase.assertEquals(pid2, pid)
testcase.assertEquals(72 << 8, status)
class SocketServerTest(unittest.TestCase):
"""Test all socket servers."""
def setUp(self):
signal_alarm(20) # Kill deadlocks after 20 seconds.
self.port_seed = 0
self.test_files = []
def tearDown(self):
signal_alarm(0) # Didn't deadlock.
reap_children()
for fn in self.test_files:
try:
os.remove(fn)
except os.error:
pass
self.test_files[:] = []
def pickaddr(self, proto):
if proto == socket.AF_INET:
return (HOST, 0)
else:
# XXX: We need a way to tell AF_UNIX to pick its own name
# like AF_INET provides port==0.
dir = None
if os.name == 'os2':
dir = '\socket'
fn = tempfile.mktemp(prefix='unix_socket.', dir=dir)
if os.name == 'os2':
# AF_UNIX socket names on OS/2 require a specific prefix
# which can't include a drive letter and must also use
# backslashes as directory separators
if fn[1] == ':':
fn = fn[2:]
if fn[0] in (os.sep, os.altsep):
fn = fn[1:]
if os.sep == '/':
fn = fn.replace(os.sep, os.altsep)
else:
fn = fn.replace(os.altsep, os.sep)
self.test_files.append(fn)
return fn
def make_server(self, addr, svrcls, hdlrbase):
class MyServer(svrcls):
def handle_error(self, request, client_address):
self.close_request(request)
self.server_close()
raise
class MyHandler(hdlrbase):
def handle(self):
line = self.rfile.readline()
self.wfile.write(line)
if verbose: print "creating server"
server = MyServer(addr, MyHandler)
self.assertEquals(server.server_address, server.socket.getsockname())
return server
def run_server(self, svrcls, hdlrbase, testfunc):
server = self.make_server(self.pickaddr(svrcls.address_family),
svrcls, hdlrbase)
# We had the OS pick a port, so pull the real address out of
# the server.
addr = server.server_address
if verbose:
print "server created"
print "ADDR =", addr
print "CLASS =", svrcls
t = threading.Thread(
name='%s serving' % svrcls,
target=server.serve_forever,
# Short poll interval to make the test finish quickly.
# Time between requests is short enough that we won't wake
# up spuriously too many times.
kwargs={'poll_interval':0.01})
t.daemon = True # In case this function raises.
t.start()
if verbose: print "server running"
for i in range(3):
if verbose: print "test client", i
testfunc(svrcls.address_family, addr)
if verbose: print "waiting for server"
server.shutdown()
t.join()
if verbose: print "done"
def stream_examine(self, proto, addr):
s = socket.socket(proto, socket.SOCK_STREAM)
s.connect(addr)
s.sendall(TEST_STR)
buf = data = receive(s, 100)
while data and '\n' not in buf:
data = receive(s, 100)
buf += data
self.assertEquals(buf, TEST_STR)
s.close()
def dgram_examine(self, proto, addr):
s = socket.socket(proto, socket.SOCK_DGRAM)
s.sendto(TEST_STR, addr)
buf = data = receive(s, 100)
while data and '\n' not in buf:
data = receive(s, 100)
buf += data
self.assertEquals(buf, TEST_STR)
s.close()
def test_TCPServer(self):
self.run_server(SocketServer.TCPServer,
SocketServer.StreamRequestHandler,
self.stream_examine)
def test_ThreadingTCPServer(self):
self.run_server(SocketServer.ThreadingTCPServer,
SocketServer.StreamRequestHandler,
self.stream_examine)
if HAVE_FORKING:
def test_ForkingTCPServer(self):
with simple_subprocess(self):
self.run_server(SocketServer.ForkingTCPServer,
SocketServer.StreamRequestHandler,
self.stream_examine)
if HAVE_UNIX_SOCKETS:
def test_UnixStreamServer(self):
self.run_server(SocketServer.UnixStreamServer,
SocketServer.StreamRequestHandler,
self.stream_examine)
def test_ThreadingUnixStreamServer(self):
self.run_server(SocketServer.ThreadingUnixStreamServer,
SocketServer.StreamRequestHandler,
self.stream_examine)
if HAVE_FORKING:
def test_ForkingUnixStreamServer(self):
with simple_subprocess(self):
self.run_server(ForkingUnixStreamServer,
SocketServer.StreamRequestHandler,
self.stream_examine)
def test_UDPServer(self):
self.run_server(SocketServer.UDPServer,
SocketServer.DatagramRequestHandler,
self.dgram_examine)
def test_ThreadingUDPServer(self):
self.run_server(SocketServer.ThreadingUDPServer,
SocketServer.DatagramRequestHandler,
self.dgram_examine)
if HAVE_FORKING:
def test_ForkingUDPServer(self):
with simple_subprocess(self):
self.run_server(SocketServer.ForkingUDPServer,
SocketServer.DatagramRequestHandler,
self.dgram_examine)
# Alas, on Linux (at least) recvfrom() doesn't return a meaningful
# client address so this cannot work:
# if HAVE_UNIX_SOCKETS:
# def test_UnixDatagramServer(self):
# self.run_server(SocketServer.UnixDatagramServer,
# SocketServer.DatagramRequestHandler,
# self.dgram_examine)
#
# def test_ThreadingUnixDatagramServer(self):
# self.run_server(SocketServer.ThreadingUnixDatagramServer,
# SocketServer.DatagramRequestHandler,
# self.dgram_examine)
#
# if HAVE_FORKING:
# def test_ForkingUnixDatagramServer(self):
# self.run_server(SocketServer.ForkingUnixDatagramServer,
# SocketServer.DatagramRequestHandler,
# self.dgram_examine)
def test_shutdown(self):
# Issue #2302: shutdown() should always succeed in making an
# other thread leave serve_forever().
class MyServer(SocketServer.TCPServer):
pass
class MyHandler(SocketServer.StreamRequestHandler):
pass
threads = []
for i in range(20):
s = MyServer((HOST, 0), MyHandler)
t = threading.Thread(
name='MyServer serving',
target=s.serve_forever,
kwargs={'poll_interval':0.01})
t.daemon = True # In case this function raises.
threads.append((t, s))
for t, s in threads:
t.start()
s.shutdown()
for t, s in threads:
t.join()
def test_main():
if imp.lock_held():
# If the import lock is held, the threads will hang
raise TestSkipped("can't run when import lock is held")
test.test_support.run_unittest(SocketServerTest)
if __name__ == "__main__":
test_main()
signal_alarm(3) # Shutdown shouldn't take more than 3 seconds.
import unittest
from test import test_support
import subprocess
import sys
import signal
import os
import errno
import tempfile
import time
import re
mswindows = (sys.platform == "win32")
#
# Depends on the following external programs: Python
#
if mswindows:
SETBINARY = ('import msvcrt; msvcrt.setmode(sys.stdout.fileno(), '
'os.O_BINARY);')
else:
SETBINARY = ''
# In a debug build, stuff like "[6580 refs]" is printed to stderr at
# shutdown time. That frustrates tests trying to check stderr produced
# from a spawned Python process.
def remove_stderr_debug_decorations(stderr):
return re.sub(r"\[\d+ refs\]\r?\n?$", "", stderr)
class ProcessTestCase(unittest.TestCase):
def setUp(self):
# Try to minimize the number of children we have so this test
# doesn't crash on some buildbots (Alphas in particular).
if hasattr(test_support, "reap_children"):
test_support.reap_children()
def tearDown(self):
# Try to minimize the number of children we have so this test
# doesn't crash on some buildbots (Alphas in particular).
if hasattr(test_support, "reap_children"):
test_support.reap_children()
def mkstemp(self):
"""wrapper for mkstemp, calling mktemp if mkstemp is not available"""
if hasattr(tempfile, "mkstemp"):
return tempfile.mkstemp()
else:
fname = tempfile.mktemp()
return os.open(fname, os.O_RDWR|os.O_CREAT), fname
#
# Generic tests
#
def test_call_seq(self):
# call() function with sequence argument
rc = subprocess.call([sys.executable, "-c",
"import sys; sys.exit(47)"])
self.assertEqual(rc, 47)
def test_check_call_zero(self):
# check_call() function with zero return code
rc = subprocess.check_call([sys.executable, "-c",
"import sys; sys.exit(0)"])
self.assertEqual(rc, 0)
def test_check_call_nonzero(self):
# check_call() function with non-zero return code
try:
subprocess.check_call([sys.executable, "-c",
"import sys; sys.exit(47)"])
except subprocess.CalledProcessError, e:
self.assertEqual(e.returncode, 47)
else:
self.fail("Expected CalledProcessError")
def test_call_kwargs(self):
# call() function with keyword args
newenv = os.environ.copy()
newenv["FRUIT"] = "banana"
rc = subprocess.call([sys.executable, "-c",
'import sys, os;' \
'sys.exit(os.getenv("FRUIT")=="banana")'],
env=newenv)
self.assertEqual(rc, 1)
def test_stdin_none(self):
# .stdin is None when not redirected
p = subprocess.Popen([sys.executable, "-c", 'print "banana"'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
p.wait()
self.assertEqual(p.stdin, None)
def test_stdout_none(self):
# .stdout is None when not redirected
p = subprocess.Popen([sys.executable, "-c",
'print " this bit of output is from a '
'test of stdout in a different '
'process ..."'],
stdin=subprocess.PIPE, stderr=subprocess.PIPE)
p.wait()
self.assertEqual(p.stdout, None)
def test_stderr_none(self):
# .stderr is None when not redirected
p = subprocess.Popen([sys.executable, "-c", 'print "banana"'],
stdin=subprocess.PIPE, stdout=subprocess.PIPE)
p.wait()
self.assertEqual(p.stderr, None)
def test_executable(self):
p = subprocess.Popen(["somethingyoudonthave",
"-c", "import sys; sys.exit(47)"],
executable=sys.executable)
p.wait()
self.assertEqual(p.returncode, 47)
def test_stdin_pipe(self):
# stdin redirection
p = subprocess.Popen([sys.executable, "-c",
'import sys; sys.exit(sys.stdin.read() == "pear")'],
stdin=subprocess.PIPE)
p.stdin.write("pear")
p.stdin.close()
p.wait()
self.assertEqual(p.returncode, 1)
def test_stdin_filedes(self):
# stdin is set to open file descriptor
tf = tempfile.TemporaryFile()
d = tf.fileno()
os.write(d, "pear")
os.lseek(d, 0, 0)
p = subprocess.Popen([sys.executable, "-c",
'import sys; sys.exit(sys.stdin.read() == "pear")'],
stdin=d)
p.wait()
self.assertEqual(p.returncode, 1)
def test_stdin_fileobj(self):
# stdin is set to open file object
tf = tempfile.TemporaryFile()
tf.write("pear")
tf.seek(0)
p = subprocess.Popen([sys.executable, "-c",
'import sys; sys.exit(sys.stdin.read() == "pear")'],
stdin=tf)
p.wait()
self.assertEqual(p.returncode, 1)
def test_stdout_pipe(self):
# stdout redirection
p = subprocess.Popen([sys.executable, "-c",
'import sys; sys.stdout.write("orange")'],
stdout=subprocess.PIPE)
self.assertEqual(p.stdout.read(), "orange")
def test_stdout_filedes(self):
# stdout is set to open file descriptor
tf = tempfile.TemporaryFile()
d = tf.fileno()
p = subprocess.Popen([sys.executable, "-c",
'import sys; sys.stdout.write("orange")'],
stdout=d)
p.wait()
os.lseek(d, 0, 0)
self.assertEqual(os.read(d, 1024), "orange")
def test_stdout_fileobj(self):
# stdout is set to open file object
tf = tempfile.TemporaryFile()
p = subprocess.Popen([sys.executable, "-c",
'import sys; sys.stdout.write("orange")'],
stdout=tf)
p.wait()
tf.seek(0)
self.assertEqual(tf.read(), "orange")
def test_stderr_pipe(self):
# stderr redirection
p = subprocess.Popen([sys.executable, "-c",
'import sys; sys.stderr.write("strawberry")'],
stderr=subprocess.PIPE)
self.assertEqual(remove_stderr_debug_decorations(p.stderr.read()),
"strawberry")
def test_stderr_filedes(self):
# stderr is set to open file descriptor
tf = tempfile.TemporaryFile()
d = tf.fileno()
p = subprocess.Popen([sys.executable, "-c",
'import sys; sys.stderr.write("strawberry")'],
stderr=d)
p.wait()
os.lseek(d, 0, 0)
self.assertEqual(remove_stderr_debug_decorations(os.read(d, 1024)),
"strawberry")
def test_stderr_fileobj(self):
# stderr is set to open file object
tf = tempfile.TemporaryFile()
p = subprocess.Popen([sys.executable, "-c",
'import sys; sys.stderr.write("strawberry")'],
stderr=tf)
p.wait()
tf.seek(0)
self.assertEqual(remove_stderr_debug_decorations(tf.read()),
"strawberry")
def test_stdout_stderr_pipe(self):
# capture stdout and stderr to the same pipe
p = subprocess.Popen([sys.executable, "-c",
'import sys;' \
'sys.stdout.write("apple");' \
'sys.stdout.flush();' \
'sys.stderr.write("orange")'],
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
output = p.stdout.read()
stripped = remove_stderr_debug_decorations(output)
self.assertEqual(stripped, "appleorange")
def test_stdout_stderr_file(self):
# capture stdout and stderr to the same open file
tf = tempfile.TemporaryFile()
p = subprocess.Popen([sys.executable, "-c",
'import sys;' \
'sys.stdout.write("apple");' \
'sys.stdout.flush();' \
'sys.stderr.write("orange")'],
stdout=tf,
stderr=tf)
p.wait()
tf.seek(0)
output = tf.read()
stripped = remove_stderr_debug_decorations(output)
self.assertEqual(stripped, "appleorange")
def test_stdout_filedes_of_stdout(self):
# stdout is set to 1 (#1531862).
cmd = r"import sys, os; sys.exit(os.write(sys.stdout.fileno(), '.\n'))"
rc = subprocess.call([sys.executable, "-c", cmd], stdout=1)
self.assertEquals(rc, 2)
def test_cwd(self):
tmpdir = tempfile.gettempdir()
# We cannot use os.path.realpath to canonicalize the path,
# since it doesn't expand Tru64 {memb} strings. See bug 1063571.
cwd = os.getcwd()
os.chdir(tmpdir)
tmpdir = os.getcwd()
os.chdir(cwd)
p = subprocess.Popen([sys.executable, "-c",
'import sys,os;' \
'sys.stdout.write(os.getcwd())'],
stdout=subprocess.PIPE,
cwd=tmpdir)
normcase = os.path.normcase
self.assertEqual(normcase(p.stdout.read()), normcase(tmpdir))
def test_env(self):
newenv = os.environ.copy()
newenv["FRUIT"] = "orange"
p = subprocess.Popen([sys.executable, "-c",
'import sys,os;' \
'sys.stdout.write(os.getenv("FRUIT"))'],
stdout=subprocess.PIPE,
env=newenv)
self.assertEqual(p.stdout.read(), "orange")
def test_communicate_stdin(self):
p = subprocess.Popen([sys.executable, "-c",
'import sys; sys.exit(sys.stdin.read() == "pear")'],
stdin=subprocess.PIPE)
p.communicate("pear")
self.assertEqual(p.returncode, 1)
def test_communicate_stdout(self):
p = subprocess.Popen([sys.executable, "-c",
'import sys; sys.stdout.write("pineapple")'],
stdout=subprocess.PIPE)
(stdout, stderr) = p.communicate()
self.assertEqual(stdout, "pineapple")
self.assertEqual(stderr, None)
def test_communicate_stderr(self):
p = subprocess.Popen([sys.executable, "-c",
'import sys; sys.stderr.write("pineapple")'],
stderr=subprocess.PIPE)
(stdout, stderr) = p.communicate()
self.assertEqual(stdout, None)
self.assertEqual(remove_stderr_debug_decorations(stderr), "pineapple")
def test_communicate(self):
p = subprocess.Popen([sys.executable, "-c",
'import sys,os;'
'sys.stderr.write("pineapple");'
'sys.stdout.write(sys.stdin.read())'],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
(stdout, stderr) = p.communicate("banana")
self.assertEqual(stdout, "banana")
self.assertEqual(remove_stderr_debug_decorations(stderr),
"pineapple")
# This test is Linux specific for simplicity to at least have
# some coverage. It is not a platform specific bug.
if os.path.isdir('/proc/%d/fd' % os.getpid()):
# Test for the fd leak reported in http://bugs.python.org/issue2791.
def test_communicate_pipe_fd_leak(self):
fd_directory = '/proc/%d/fd' % os.getpid()
num_fds_before_popen = len(os.listdir(fd_directory))
p = subprocess.Popen([sys.executable, '-c', 'print()'],
stdout=subprocess.PIPE)
p.communicate()
num_fds_after_communicate = len(os.listdir(fd_directory))
del p
num_fds_after_destruction = len(os.listdir(fd_directory))
self.assertEqual(num_fds_before_popen, num_fds_after_destruction)
self.assertEqual(num_fds_before_popen, num_fds_after_communicate)
def test_communicate_returns(self):
# communicate() should return None if no redirection is active
p = subprocess.Popen([sys.executable, "-c",
"import sys; sys.exit(47)"])
(stdout, stderr) = p.communicate()
self.assertEqual(stdout, None)
self.assertEqual(stderr, None)
def test_communicate_pipe_buf(self):
# communicate() with writes larger than pipe_buf
# This test will probably deadlock rather than fail, if
# communicate() does not work properly.
x, y = os.pipe()
if mswindows:
pipe_buf = 512
else:
pipe_buf = os.fpathconf(x, "PC_PIPE_BUF")
os.close(x)
os.close(y)
p = subprocess.Popen([sys.executable, "-c",
'import sys,os;'
'sys.stdout.write(sys.stdin.read(47));' \
'sys.stderr.write("xyz"*%d);' \
'sys.stdout.write(sys.stdin.read())' % pipe_buf],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
string_to_write = "abc"*pipe_buf
(stdout, stderr) = p.communicate(string_to_write)
self.assertEqual(stdout, string_to_write)
def test_writes_before_communicate(self):
# stdin.write before communicate()
p = subprocess.Popen([sys.executable, "-c",
'import sys,os;' \
'sys.stdout.write(sys.stdin.read())'],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
p.stdin.write("banana")
(stdout, stderr) = p.communicate("split")
self.assertEqual(stdout, "bananasplit")
self.assertEqual(remove_stderr_debug_decorations(stderr), "")
def test_universal_newlines(self):
p = subprocess.Popen([sys.executable, "-c",
'import sys,os;' + SETBINARY +
'sys.stdout.write("line1\\n");'
'sys.stdout.flush();'
'sys.stdout.write("line2\\r");'
'sys.stdout.flush();'
'sys.stdout.write("line3\\r\\n");'
'sys.stdout.flush();'
'sys.stdout.write("line4\\r");'
'sys.stdout.flush();'
'sys.stdout.write("\\nline5");'
'sys.stdout.flush();'
'sys.stdout.write("\\nline6");'],
stdout=subprocess.PIPE,
universal_newlines=1)
stdout = p.stdout.read()
if hasattr(file, 'newlines'):
# Interpreter with universal newline support
self.assertEqual(stdout,
"line1\nline2\nline3\nline4\nline5\nline6")
else:
# Interpreter without universal newline support
self.assertEqual(stdout,
"line1\nline2\rline3\r\nline4\r\nline5\nline6")
def test_universal_newlines_communicate(self):
# universal newlines through communicate()
p = subprocess.Popen([sys.executable, "-c",
'import sys,os;' + SETBINARY +
'sys.stdout.write("line1\\n");'
'sys.stdout.flush();'
'sys.stdout.write("line2\\r");'
'sys.stdout.flush();'
'sys.stdout.write("line3\\r\\n");'
'sys.stdout.flush();'
'sys.stdout.write("line4\\r");'
'sys.stdout.flush();'
'sys.stdout.write("\\nline5");'
'sys.stdout.flush();'
'sys.stdout.write("\\nline6");'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE,
universal_newlines=1)
(stdout, stderr) = p.communicate()
if hasattr(file, 'newlines'):
# Interpreter with universal newline support
self.assertEqual(stdout,
"line1\nline2\nline3\nline4\nline5\nline6")
else:
# Interpreter without universal newline support
self.assertEqual(stdout, "line1\nline2\rline3\r\nline4\r\nline5\nline6")
def test_no_leaking(self):
# Make sure we leak no resources
if not hasattr(test_support, "is_resource_enabled") \
or test_support.is_resource_enabled("subprocess") and not mswindows:
max_handles = 1026 # too much for most UNIX systems
else:
max_handles = 65
for i in range(max_handles):
p = subprocess.Popen([sys.executable, "-c",
"import sys;sys.stdout.write(sys.stdin.read())"],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
data = p.communicate("lime")[0]
self.assertEqual(data, "lime")
def test_list2cmdline(self):
self.assertEqual(subprocess.list2cmdline(['a b c', 'd', 'e']),
'"a b c" d e')
self.assertEqual(subprocess.list2cmdline(['ab"c', '\\', 'd']),
'ab\\"c \\ d')
self.assertEqual(subprocess.list2cmdline(['ab"c', ' \\', 'd']),
'ab\\"c " \\\\" d')
self.assertEqual(subprocess.list2cmdline(['a\\\\\\b', 'de fg', 'h']),
'a\\\\\\b "de fg" h')
self.assertEqual(subprocess.list2cmdline(['a\\"b', 'c', 'd']),
'a\\\\\\"b c d')
self.assertEqual(subprocess.list2cmdline(['a\\\\b c', 'd', 'e']),
'"a\\\\b c" d e')
self.assertEqual(subprocess.list2cmdline(['a\\\\b\\ c', 'd', 'e']),
'"a\\\\b\\ c" d e')
self.assertEqual(subprocess.list2cmdline(['ab', '']),
'ab ""')
def test_poll(self):
p = subprocess.Popen([sys.executable,
"-c", "import time; time.sleep(1)"])
count = 0
while p.poll() is None:
time.sleep(0.1)
count += 1
# We expect that the poll loop probably went around about 10 times,
# but, based on system scheduling we can't control, it's possible
# poll() never returned None. It "should be" very rare that it
# didn't go around at least twice.
self.assert_(count >= 2)
# Subsequent invocations should just return the returncode
self.assertEqual(p.poll(), 0)
def test_wait(self):
p = subprocess.Popen([sys.executable,
"-c", "import time; time.sleep(2)"])
self.assertEqual(p.wait(), 0)
# Subsequent invocations should just return the returncode
self.assertEqual(p.wait(), 0)
def test_invalid_bufsize(self):
# an invalid type of the bufsize argument should raise
# TypeError.
try:
subprocess.Popen([sys.executable, "-c", "pass"], "orange")
except TypeError:
pass
else:
self.fail("Expected TypeError")
def test_leaking_fds_on_error(self):
# see bug #5179: Popen leaks file descriptors to PIPEs if
# the child fails to execute; this will eventually exhaust
# the maximum number of open fds. 1024 seems a very common
# value for that limit, but Windows has 2048, so we loop
# 1024 times (each call leaked two fds).
for i in range(1024):
try:
subprocess.Popen(['nonexisting_i_hope'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
# Windows raises IOError
except (IOError, OSError), err:
if err.errno != 2: # ignore "no such file"
raise
#
# POSIX tests
#
if not mswindows:
def test_exceptions(self):
# catched & re-raised exceptions
try:
p = subprocess.Popen([sys.executable, "-c", ""],
cwd="/this/path/does/not/exist")
except OSError, e:
# The attribute child_traceback should contain "os.chdir"
# somewhere.
self.assertNotEqual(e.child_traceback.find("os.chdir"), -1)
else:
self.fail("Expected OSError")
def _suppress_core_files(self):
"""Try to prevent core files from being created.
Returns previous ulimit if successful, else None.
"""
if sys.platform == 'darwin':
# Check if the 'Crash Reporter' on OSX was configured
# in 'Developer' mode and warn that it will get triggered
# when it is.
#
# This assumes that this context manager is used in tests
# that might trigger the next manager.
value = subprocess.Popen(['/usr/bin/defaults', 'read',
'com.apple.CrashReporter', 'DialogType'],
stdout=subprocess.PIPE).communicate()[0]
if value.strip() == b'developer':
print "this tests triggers the Crash Reporter, that is intentional"
sys.stdout.flush()
try:
import resource
old_limit = resource.getrlimit(resource.RLIMIT_CORE)
resource.setrlimit(resource.RLIMIT_CORE, (0,0))
return old_limit
except (ImportError, ValueError, resource.error):
return None
def _unsuppress_core_files(self, old_limit):
"""Return core file behavior to default."""
if old_limit is None:
return
try:
import resource
resource.setrlimit(resource.RLIMIT_CORE, old_limit)
except (ImportError, ValueError, resource.error):
return
def test_run_abort(self):
# returncode handles signal termination
old_limit = self._suppress_core_files()
try:
p = subprocess.Popen([sys.executable,
"-c", "import os; os.abort()"])
finally:
self._unsuppress_core_files(old_limit)
p.wait()
self.assertEqual(-p.returncode, signal.SIGABRT)
def test_preexec(self):
# preexec function
p = subprocess.Popen([sys.executable, "-c",
'import sys,os;' \
'sys.stdout.write(os.getenv("FRUIT"))'],
stdout=subprocess.PIPE,
preexec_fn=lambda: os.putenv("FRUIT", "apple"))
self.assertEqual(p.stdout.read(), "apple")
def test_args_string(self):
# args is a string
f, fname = self.mkstemp()
os.write(f, "#!/bin/sh\n")
os.write(f, "exec '%s' -c 'import sys; sys.exit(47)'\n" %
sys.executable)
os.close(f)
os.chmod(fname, 0700)
p = subprocess.Popen(fname)
p.wait()
os.remove(fname)
self.assertEqual(p.returncode, 47)
def test_invalid_args(self):
# invalid arguments should raise ValueError
self.assertRaises(ValueError, subprocess.call,
[sys.executable,
"-c", "import sys; sys.exit(47)"],
startupinfo=47)
self.assertRaises(ValueError, subprocess.call,
[sys.executable,
"-c", "import sys; sys.exit(47)"],
creationflags=47)
def test_shell_sequence(self):
# Run command through the shell (sequence)
newenv = os.environ.copy()
newenv["FRUIT"] = "apple"
p = subprocess.Popen(["echo $FRUIT"], shell=1,
stdout=subprocess.PIPE,
env=newenv)
self.assertEqual(p.stdout.read().strip(), "apple")
def test_shell_string(self):
# Run command through the shell (string)
newenv = os.environ.copy()
newenv["FRUIT"] = "apple"
p = subprocess.Popen("echo $FRUIT", shell=1,
stdout=subprocess.PIPE,
env=newenv)
self.assertEqual(p.stdout.read().strip(), "apple")
def test_call_string(self):
# call() function with string argument on UNIX
f, fname = self.mkstemp()
os.write(f, "#!/bin/sh\n")
os.write(f, "exec '%s' -c 'import sys; sys.exit(47)'\n" %
sys.executable)
os.close(f)
os.chmod(fname, 0700)
rc = subprocess.call(fname)
os.remove(fname)
self.assertEqual(rc, 47)
def test_specific_shell(self):
# Issue #9265: Incorrect name passed as arg[0].
shells = []
for prefix in ['/bin', '/usr/bin/', '/usr/local/bin']:
for name in ['bash', 'ksh']:
sh = os.path.join(prefix, name)
if os.path.isfile(sh):
shells.append(sh)
if not shells: # Will probably work for any shell but csh.
return # skip test
sh = '/bin/sh'
if os.path.isfile(sh) and not os.path.islink(sh):
# Test will fail if /bin/sh is a symlink to csh.
shells.append(sh)
for sh in shells:
p = subprocess.Popen("echo $0", executable=sh, shell=True,
stdout=subprocess.PIPE)
self.assertEqual(p.stdout.read().strip(), sh)
def DISABLED_test_send_signal(self):
p = subprocess.Popen([sys.executable,
"-c", "input()"])
self.assert_(p.poll() is None, p.poll())
p.send_signal(signal.SIGINT)
self.assertNotEqual(p.wait(), 0)
def DISABLED_test_kill(self):
p = subprocess.Popen([sys.executable,
"-c", "input()"])
self.assert_(p.poll() is None, p.poll())
p.kill()
self.assertEqual(p.wait(), -signal.SIGKILL)
def DISABLED_test_terminate(self):
p = subprocess.Popen([sys.executable,
"-c", "input()"])
self.assert_(p.poll() is None, p.poll())
p.terminate()
self.assertEqual(p.wait(), -signal.SIGTERM)
#
# Windows tests
#
if mswindows:
def test_startupinfo(self):
# startupinfo argument
# We uses hardcoded constants, because we do not want to
# depend on win32all.
STARTF_USESHOWWINDOW = 1
SW_MAXIMIZE = 3
startupinfo = subprocess.STARTUPINFO()
startupinfo.dwFlags = STARTF_USESHOWWINDOW
startupinfo.wShowWindow = SW_MAXIMIZE
# Since Python is a console process, it won't be affected
# by wShowWindow, but the argument should be silently
# ignored
subprocess.call([sys.executable, "-c", "import sys; sys.exit(0)"],
startupinfo=startupinfo)
def test_creationflags(self):
# creationflags argument
CREATE_NEW_CONSOLE = 16
sys.stderr.write(" a DOS box should flash briefly ...\n")
subprocess.call(sys.executable +
' -c "import time; time.sleep(0.25)"',
creationflags=CREATE_NEW_CONSOLE)
def test_invalid_args(self):
# invalid arguments should raise ValueError
self.assertRaises(ValueError, subprocess.call,
[sys.executable,
"-c", "import sys; sys.exit(47)"],
preexec_fn=lambda: 1)
self.assertRaises(ValueError, subprocess.call,
[sys.executable,
"-c", "import sys; sys.exit(47)"],
stdout=subprocess.PIPE,
close_fds=True)
def test_close_fds(self):
# close file descriptors
rc = subprocess.call([sys.executable, "-c",
"import sys; sys.exit(47)"],
close_fds=True)
self.assertEqual(rc, 47)
def test_shell_sequence(self):
# Run command through the shell (sequence)
newenv = os.environ.copy()
newenv["FRUIT"] = "physalis"
p = subprocess.Popen(["set"], shell=1,
stdout=subprocess.PIPE,
env=newenv)
self.assertNotEqual(p.stdout.read().find("physalis"), -1)
def test_shell_string(self):
# Run command through the shell (string)
newenv = os.environ.copy()
newenv["FRUIT"] = "physalis"
p = subprocess.Popen("set", shell=1,
stdout=subprocess.PIPE,
env=newenv)
self.assertNotEqual(p.stdout.read().find("physalis"), -1)
def test_call_string(self):
# call() function with string argument on Windows
rc = subprocess.call(sys.executable +
' -c "import sys; sys.exit(47)"')
self.assertEqual(rc, 47)
def DISABLED_test_send_signal(self):
p = subprocess.Popen([sys.executable,
"-c", "input()"])
self.assert_(p.poll() is None, p.poll())
p.send_signal(signal.SIGTERM)
self.assertNotEqual(p.wait(), 0)
def DISABLED_test_kill(self):
p = subprocess.Popen([sys.executable,
"-c", "input()"])
self.assert_(p.poll() is None, p.poll())
p.kill()
self.assertNotEqual(p.wait(), 0)
def DISABLED_test_terminate(self):
p = subprocess.Popen([sys.executable,
"-c", "input()"])
self.assert_(p.poll() is None, p.poll())
p.terminate()
self.assertNotEqual(p.wait(), 0)
class HelperFunctionTests(unittest.TestCase):
def _test_eintr_retry_call(self):
record_calls = []
def fake_os_func(*args):
record_calls.append(args)
if len(record_calls) == 2:
raise OSError(errno.EINTR, "fake interrupted system call")
return tuple(reversed(args))
self.assertEqual((999, 256),
subprocess._eintr_retry_call(fake_os_func, 256, 999))
self.assertEqual([(256, 999)], record_calls)
# This time there will be an EINTR so it will loop once.
self.assertEqual((666,),
subprocess._eintr_retry_call(fake_os_func, 666))
self.assertEqual([(256, 999), (666,), (666,)], record_calls)
if not mswindows:
test_eintr_retry_call = _test_eintr_retry_call
def test_main():
test_support.run_unittest(ProcessTestCase,
HelperFunctionTests)
if hasattr(test_support, "reap_children"):
test_support.reap_children()
if __name__ == "__main__":
test_main()
import socket
import threading
import telnetlib
import time
from unittest import TestCase
from test import test_support
HOST = test_support.HOST
def server(evt, serv):
serv.listen(5)
evt.set()
try:
conn, addr = serv.accept()
except socket.timeout:
pass
finally:
serv.close()
evt.set()
class GeneralTests(TestCase):
def setUp(self):
self.evt = threading.Event()
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.sock.settimeout(3)
self.port = test_support.bind_port(self.sock)
threading.Thread(target=server, args=(self.evt,self.sock)).start()
self.evt.wait()
self.evt.clear()
time.sleep(.1)
def tearDown(self):
self.evt.wait()
def testBasic(self):
# connects
telnet = telnetlib.Telnet(HOST, self.port)
telnet.sock.close()
def testTimeoutDefault(self):
self.assertTrue(socket.getdefaulttimeout() is None)
socket.setdefaulttimeout(30)
try:
telnet = telnetlib.Telnet("localhost", self.port)
finally:
socket.setdefaulttimeout(None)
self.assertEqual(telnet.sock.gettimeout(), 30)
telnet.sock.close()
def testTimeoutNone(self):
# None, having other default
self.assertTrue(socket.getdefaulttimeout() is None)
socket.setdefaulttimeout(30)
try:
telnet = telnetlib.Telnet(HOST, self.port, timeout=None)
finally:
socket.setdefaulttimeout(None)
self.assertTrue(telnet.sock.gettimeout() is None)
telnet.sock.close()
def testTimeoutValue(self):
telnet = telnetlib.Telnet("localhost", self.port, timeout=30)
self.assertEqual(telnet.sock.gettimeout(), 30)
telnet.sock.close()
def testTimeoutOpen(self):
telnet = telnetlib.Telnet()
telnet.open("localhost", self.port, timeout=30)
self.assertEqual(telnet.sock.gettimeout(), 30)
telnet.sock.close()
def test_main(verbose=None):
test_support.run_unittest(GeneralTests)
if __name__ == '__main__':
test_main()
import os
import unittest
import random
from test import test_support
import thread
import time
import sys
import lock_tests
NUMTASKS = 10
NUMTRIPS = 3
_print_mutex = thread.allocate_lock()
def verbose_print(arg):
"""Helper function for printing out debugging output."""
if test_support.verbose:
with _print_mutex:
print arg
class BasicThreadTest(unittest.TestCase):
def setUp(self):
self.done_mutex = thread.allocate_lock()
self.done_mutex.acquire()
self.running_mutex = thread.allocate_lock()
self.random_mutex = thread.allocate_lock()
self.created = 0
self.running = 0
self.next_ident = 0
class ThreadRunningTests(BasicThreadTest):
def newtask(self):
with self.running_mutex:
self.next_ident += 1
verbose_print("creating task %s" % self.next_ident)
thread.start_new_thread(self.task, (self.next_ident,))
self.created += 1
self.running += 1
def task(self, ident):
with self.random_mutex:
delay = random.random() / 10000.0
verbose_print("task %s will run for %sus" % (ident, round(delay*1e6)))
time.sleep(delay)
verbose_print("task %s done" % ident)
with self.running_mutex:
self.running -= 1
if self.created == NUMTASKS and self.running == 0:
self.done_mutex.release()
def test_starting_threads(self):
# Basic test for thread creation.
for i in range(NUMTASKS):
self.newtask()
verbose_print("waiting for tasks to complete...")
self.done_mutex.acquire()
verbose_print("all tasks done")
def test_stack_size(self):
# Various stack size tests.
self.assertEquals(thread.stack_size(), 0, "intial stack size is not 0")
thread.stack_size(0)
self.assertEquals(thread.stack_size(), 0, "stack_size not reset to default")
if os.name not in ("nt", "os2", "posix"):
return
tss_supported = True
try:
thread.stack_size(4096)
except ValueError:
verbose_print("caught expected ValueError setting "
"stack_size(4096)")
except thread.error:
tss_supported = False
verbose_print("platform does not support changing thread stack "
"size")
if tss_supported:
fail_msg = "stack_size(%d) failed - should succeed"
for tss in (262144, 0x100000, 0):
thread.stack_size(tss)
self.assertEquals(thread.stack_size(), tss, fail_msg % tss)
verbose_print("successfully set stack_size(%d)" % tss)
for tss in (262144, 0x100000):
verbose_print("trying stack_size = (%d)" % tss)
self.next_ident = 0
self.created = 0
for i in range(NUMTASKS):
self.newtask()
verbose_print("waiting for all tasks to complete")
self.done_mutex.acquire()
verbose_print("all tasks done")
thread.stack_size(0)
class Barrier:
def __init__(self, num_threads):
self.num_threads = num_threads
self.waiting = 0
self.checkin_mutex = thread.allocate_lock()
self.checkout_mutex = thread.allocate_lock()
self.checkout_mutex.acquire()
def enter(self):
self.checkin_mutex.acquire()
self.waiting = self.waiting + 1
if self.waiting == self.num_threads:
self.waiting = self.num_threads - 1
self.checkout_mutex.release()
return
self.checkin_mutex.release()
self.checkout_mutex.acquire()
self.waiting = self.waiting - 1
if self.waiting == 0:
self.checkin_mutex.release()
return
self.checkout_mutex.release()
class BarrierTest(BasicThreadTest):
def test_barrier(self):
self.bar = Barrier(NUMTASKS)
self.running = NUMTASKS
for i in range(NUMTASKS):
thread.start_new_thread(self.task2, (i,))
verbose_print("waiting for tasks to end")
self.done_mutex.acquire()
verbose_print("tasks done")
def task2(self, ident):
for i in range(NUMTRIPS):
if ident == 0:
# give it a good chance to enter the next
# barrier before the others are all out
# of the current one
delay = 0
else:
with self.random_mutex:
delay = random.random() / 10000.0
verbose_print("task %s will run for %sus" %
(ident, round(delay * 1e6)))
time.sleep(delay)
verbose_print("task %s entering %s" % (ident, i))
self.bar.enter()
verbose_print("task %s leaving barrier" % ident)
with self.running_mutex:
self.running -= 1
# Must release mutex before releasing done, else the main thread can
# exit and set mutex to None as part of global teardown; then
# mutex.release() raises AttributeError.
finished = self.running == 0
if finished:
self.done_mutex.release()
class LockTests(lock_tests.LockTests):
locktype = thread.allocate_lock
class TestForkInThread(unittest.TestCase):
def setUp(self):
self.read_fd, self.write_fd = os.pipe()
def _test_forkinthread(self):
def thread1():
try:
pid = os.fork() # fork in a thread
except RuntimeError:
sys.exit(0) # exit the child
if pid == 0: # child
os.close(self.read_fd)
os.write(self.write_fd, "OK")
sys.exit(0)
else: # parent
os.close(self.write_fd)
thread.start_new_thread(thread1, ())
self.assertEqual(os.read(self.read_fd, 2), "OK",
"Unable to fork() in thread")
if not sys.platform.startswith('win'):
test_forkinthread = _test_forkinthread
def tearDown(self):
try:
os.close(self.read_fd)
except OSError:
pass
try:
os.close(self.write_fd)
except OSError:
pass
def test_main():
test_support.run_unittest(ThreadRunningTests, BarrierTest, LockTests,
TestForkInThread)
if __name__ == "__main__":
test_main()
# Very rudimentary test of threading module
import test.test_support
from test.test_support import verbose
import random
import re
import sys
import threading
import thread
import time
import unittest
import weakref
import lock_tests
# A trivial mutable counter.
class Counter(object):
def __init__(self):
self.value = 0
def inc(self):
self.value += 1
def dec(self):
self.value -= 1
def get(self):
return self.value
class TestThread(threading.Thread):
def __init__(self, name, testcase, sema, mutex, nrunning):
threading.Thread.__init__(self, name=name)
self.testcase = testcase
self.sema = sema
self.mutex = mutex
self.nrunning = nrunning
def run(self):
delay = random.random() / 10000.0
if verbose:
print 'task %s will run for %.1f usec' % (
self.name, delay * 1e6)
with self.sema:
with self.mutex:
self.nrunning.inc()
if verbose:
print self.nrunning.get(), 'tasks are running'
self.testcase.assert_(self.nrunning.get() <= 3)
time.sleep(delay)
if verbose:
print 'task', self.name, 'done'
with self.mutex:
self.nrunning.dec()
self.testcase.assert_(self.nrunning.get() >= 0)
if verbose:
print '%s is finished. %d tasks are running' % (
self.name, self.nrunning.get())
class ThreadTests(unittest.TestCase):
# Create a bunch of threads, let each do some work, wait until all are
# done.
def test_various_ops(self):
# This takes about n/3 seconds to run (about n/3 clumps of tasks,
# times about 1 second per clump).
NUMTASKS = 10
# no more than 3 of the 10 can run at once
sema = threading.BoundedSemaphore(value=3)
mutex = threading.RLock()
numrunning = Counter()
threads = []
for i in range(NUMTASKS):
t = TestThread("<thread %d>"%i, self, sema, mutex, numrunning)
threads.append(t)
self.failUnlessEqual(t.ident, None)
self.assert_(re.match('<TestThread\(.*, initial\)>', repr(t)))
t.start()
if verbose:
print 'waiting for all tasks to complete'
for t in threads:
t.join(NUMTASKS)
self.assert_(not t.is_alive())
self.failIfEqual(t.ident, 0)
self.assertFalse(t.ident is None)
self.assert_(re.match('<TestThread\(.*, \w+ -?\d+\)>', repr(t)))
if verbose:
print 'all tasks done'
self.assertEqual(numrunning.get(), 0)
def test_ident_of_no_threading_threads(self):
# The ident still must work for the main thread and dummy threads.
self.assertFalse(threading.currentThread().ident is None)
def f():
ident.append(threading.currentThread().ident)
done.set()
done = threading.Event()
ident = []
thread.start_new_thread(f, ())
done.wait()
self.assertFalse(ident[0] is None)
# Kill the "immortal" _DummyThread
del threading._active[ident[0]]
# run with a small(ish) thread stack size (256kB)
def test_various_ops_small_stack(self):
if verbose:
print 'with 256kB thread stack size...'
try:
threading.stack_size(262144)
except thread.error:
if verbose:
print 'platform does not support changing thread stack size'
return
self.test_various_ops()
threading.stack_size(0)
# run with a large thread stack size (1MB)
def test_various_ops_large_stack(self):
if verbose:
print 'with 1MB thread stack size...'
try:
threading.stack_size(0x100000)
except thread.error:
if verbose:
print 'platform does not support changing thread stack size'
return
self.test_various_ops()
threading.stack_size(0)
def test_foreign_thread(self):
# Check that a "foreign" thread can use the threading module.
def f(mutex):
# Calling current_thread() forces an entry for the foreign
# thread to get made in the threading._active map.
threading.current_thread()
mutex.release()
mutex = threading.Lock()
mutex.acquire()
tid = thread.start_new_thread(f, (mutex,))
# Wait for the thread to finish.
mutex.acquire()
self.assert_(tid in threading._active)
self.assert_(isinstance(threading._active[tid],
threading._DummyThread))
del threading._active[tid]
# PyThreadState_SetAsyncExc() is a CPython-only gimmick, not (currently)
# exposed at the Python level. This test relies on ctypes to get at it.
def test_PyThreadState_SetAsyncExc(self):
try:
import ctypes
except ImportError:
if verbose:
print "test_PyThreadState_SetAsyncExc can't import ctypes"
return # can't do anything
set_async_exc = ctypes.pythonapi.PyThreadState_SetAsyncExc
class AsyncExc(Exception):
pass
exception = ctypes.py_object(AsyncExc)
# `worker_started` is set by the thread when it's inside a try/except
# block waiting to catch the asynchronously set AsyncExc exception.
# `worker_saw_exception` is set by the thread upon catching that
# exception.
worker_started = threading.Event()
worker_saw_exception = threading.Event()
class Worker(threading.Thread):
def run(self):
self.id = thread.get_ident()
self.finished = False
try:
while True:
worker_started.set()
time.sleep(0.1)
except AsyncExc:
self.finished = True
worker_saw_exception.set()
t = Worker()
t.daemon = True # so if this fails, we don't hang Python at shutdown
t.start()
if verbose:
print " started worker thread"
# Try a thread id that doesn't make sense.
if verbose:
print " trying nonsensical thread id"
result = set_async_exc(ctypes.c_long(-1), exception)
self.assertEqual(result, 0) # no thread states modified
# Now raise an exception in the worker thread.
if verbose:
print " waiting for worker thread to get started"
worker_started.wait()
if verbose:
print " verifying worker hasn't exited"
self.assert_(not t.finished)
if verbose:
print " attempting to raise asynch exception in worker"
result = set_async_exc(ctypes.c_long(t.id), exception)
self.assertEqual(result, 1) # one thread state modified
if verbose:
print " waiting for worker to say it caught the exception"
worker_saw_exception.wait(timeout=10)
self.assert_(t.finished)
if verbose:
print " all OK -- joining worker"
if t.finished:
t.join()
# else the thread is still running, and we have no way to kill it
def test_limbo_cleanup(self):
# Issue 7481: Failure to start thread should cleanup the limbo map.
def fail_new_thread(*args):
raise thread.error()
_start_new_thread = threading._start_new_thread
threading._start_new_thread = fail_new_thread
try:
t = threading.Thread(target=lambda: None)
self.assertRaises(thread.error, t.start)
self.assertFalse(
t in threading._limbo,
"Failed to cleanup _limbo map on failure of Thread.start().")
finally:
threading._start_new_thread = _start_new_thread
def test_finalize_runnning_thread(self):
# Issue 1402: the PyGILState_Ensure / _Release functions may be called
# very late on python exit: on deallocation of a running thread for
# example.
try:
import ctypes
except ImportError:
if verbose:
print("test_finalize_with_runnning_thread can't import ctypes")
return # can't do anything
import subprocess
rc = subprocess.call([sys.executable, "-c", """if 1:
import ctypes, sys, time, thread
# This lock is used as a simple event variable.
ready = thread.allocate_lock()
ready.acquire()
# Module globals are cleared before __del__ is run
# So we save the functions in class dict
class C:
ensure = ctypes.pythonapi.PyGILState_Ensure
release = ctypes.pythonapi.PyGILState_Release
def __del__(self):
state = self.ensure()
self.release(state)
def waitingThread():
x = C()
ready.release()
time.sleep(100)
thread.start_new_thread(waitingThread, ())
ready.acquire() # Be sure the other thread is waiting.
sys.exit(42)
"""])
self.assertEqual(rc, 42)
def test_finalize_with_trace(self):
# Issue1733757
# Avoid a deadlock when sys.settrace steps into threading._shutdown
import subprocess
rc = subprocess.call([sys.executable, "-c", """if 1:
import sys, threading
# A deadlock-killer, to prevent the
# testsuite to hang forever
def killer():
import os, time
time.sleep(2)
print 'program blocked; aborting'
os._exit(2)
t = threading.Thread(target=killer)
t.daemon = True
t.start()
# This is the trace function
def func(frame, event, arg):
threading.current_thread()
return func
sys.settrace(func)
"""])
self.failIf(rc == 2, "interpreted was blocked")
self.failUnless(rc == 0, "Unexpected error")
def test_join_nondaemon_on_shutdown(self):
# Issue 1722344
# Raising SystemExit skipped threading._shutdown
import subprocess
p = subprocess.Popen([sys.executable, "-c", """if 1:
import threading
from time import sleep
def child():
sleep(1)
# As a non-daemon thread we SHOULD wake up and nothing
# should be torn down yet
print "Woke up, sleep function is:", sleep
threading.Thread(target=child).start()
raise SystemExit
"""],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = p.communicate()
self.assertEqual(stdout.strip(),
"Woke up, sleep function is: <built-in function sleep>")
stderr = re.sub(r"^\[\d+ refs\]", "", stderr, re.MULTILINE).strip()
self.assertEqual(stderr, "")
def test_enumerate_after_join(self):
# Try hard to trigger #1703448: a thread is still returned in
# threading.enumerate() after it has been join()ed.
enum = threading.enumerate
old_interval = sys.getcheckinterval()
try:
for i in xrange(1, 100):
# Try a couple times at each thread-switching interval
# to get more interleavings.
sys.setcheckinterval(i // 5)
t = threading.Thread(target=lambda: None)
t.start()
t.join()
l = enum()
self.assertFalse(t in l,
"#1703448 triggered after %d trials: %s" % (i, l))
finally:
sys.setcheckinterval(old_interval)
def test_no_refcycle_through_target(self):
class RunSelfFunction(object):
def __init__(self, should_raise):
# The links in this refcycle from Thread back to self
# should be cleaned up when the thread completes.
self.should_raise = should_raise
self.thread = threading.Thread(target=self._run,
args=(self,),
kwargs={'yet_another':self})
self.thread.start()
def _run(self, other_ref, yet_another):
if self.should_raise:
raise SystemExit
cyclic_object = RunSelfFunction(should_raise=False)
weak_cyclic_object = weakref.ref(cyclic_object)
cyclic_object.thread.join()
del cyclic_object
self.assertEquals(None, weak_cyclic_object(),
msg=('%d references still around' %
sys.getrefcount(weak_cyclic_object())))
raising_cyclic_object = RunSelfFunction(should_raise=True)
weak_raising_cyclic_object = weakref.ref(raising_cyclic_object)
raising_cyclic_object.thread.join()
del raising_cyclic_object
self.assertEquals(None, weak_raising_cyclic_object(),
msg=('%d references still around' %
sys.getrefcount(weak_raising_cyclic_object())))
class ThreadJoinOnShutdown(unittest.TestCase):
def _run_and_join(self, script):
script = """if 1:
import sys, os, time, threading
# a thread, which waits for the main program to terminate
def joiningfunc(mainthread):
mainthread.join()
print 'end of thread'
\n""" + script
import subprocess
p = subprocess.Popen([sys.executable, "-c", script], stdout=subprocess.PIPE)
rc = p.wait()
data = p.stdout.read().replace('\r', '')
self.assertEqual(data, "end of main\nend of thread\n")
self.failIf(rc == 2, "interpreter was blocked")
self.failUnless(rc == 0, "Unexpected error")
def test_1_join_on_shutdown(self):
# The usual case: on exit, wait for a non-daemon thread
script = """if 1:
import os
t = threading.Thread(target=joiningfunc,
args=(threading.current_thread(),))
t.start()
time.sleep(0.1)
print 'end of main'
"""
self._run_and_join(script)
def test_2_join_in_forked_process(self):
# Like the test above, but from a forked interpreter
import os
if not hasattr(os, 'fork'):
return
script = """if 1:
childpid = os.fork()
if childpid != 0:
os.waitpid(childpid, 0)
sys.exit(0)
t = threading.Thread(target=joiningfunc,
args=(threading.current_thread(),))
t.start()
print 'end of main'
"""
self._run_and_join(script)
def test_3_join_in_forked_from_thread(self):
# Like the test above, but fork() was called from a worker thread
# In the forked process, the main Thread object must be marked as stopped.
import os
if not hasattr(os, 'fork'):
return
# Skip platforms with known problems forking from a worker thread.
# See http://bugs.python.org/issue3863.
if sys.platform in ('freebsd4', 'freebsd5', 'freebsd6', 'os2emx'):
print >>sys.stderr, ('Skipping test_3_join_in_forked_from_thread'
' due to known OS bugs on'), sys.platform
return
script = """if 1:
main_thread = threading.current_thread()
def worker():
childpid = os.fork()
if childpid != 0:
os.waitpid(childpid, 0)
sys.exit(0)
t = threading.Thread(target=joiningfunc,
args=(main_thread,))
print 'end of main'
t.start()
t.join() # Should not block: main_thread is already stopped
w = threading.Thread(target=worker)
w.start()
"""
self._run_and_join(script)
class ThreadingExceptionTests(unittest.TestCase):
# A RuntimeError should be raised if Thread.start() is called
# multiple times.
def test_start_thread_again(self):
thread = threading.Thread()
thread.start()
self.assertRaises(RuntimeError, thread.start)
def test_joining_current_thread(self):
current_thread = threading.current_thread()
self.assertRaises(RuntimeError, current_thread.join);
def test_joining_inactive_thread(self):
thread = threading.Thread()
self.assertRaises(RuntimeError, thread.join)
def test_daemonize_active_thread(self):
thread = threading.Thread()
thread.start()
self.assertRaises(RuntimeError, setattr, thread, "daemon", True)
class LockTests(lock_tests.LockTests):
locktype = staticmethod(threading.Lock)
class RLockTests(lock_tests.RLockTests):
locktype = staticmethod(threading.RLock)
class EventTests(lock_tests.EventTests):
eventtype = staticmethod(threading.Event)
class ConditionAsRLockTests(lock_tests.RLockTests):
# An Condition uses an RLock by default and exports its API.
locktype = staticmethod(threading.Condition)
class ConditionTests(lock_tests.ConditionTests):
condtype = staticmethod(threading.Condition)
class SemaphoreTests(lock_tests.SemaphoreTests):
semtype = staticmethod(threading.Semaphore)
class BoundedSemaphoreTests(lock_tests.BoundedSemaphoreTests):
semtype = staticmethod(threading.BoundedSemaphore)
def test_main():
test.test_support.run_unittest(LockTests, RLockTests, EventTests,
ConditionAsRLockTests, ConditionTests,
SemaphoreTests, BoundedSemaphoreTests,
ThreadTests,
ThreadJoinOnShutdown,
ThreadingExceptionTests,
)
if __name__ == "__main__":
test_main()
import unittest
from doctest import DocTestSuite
from test import test_support
import threading
import weakref
import gc
class Weak(object):
pass
def target(local, weaklist):
weak = Weak()
local.weak = weak
weaklist.append(weakref.ref(weak))
class ThreadingLocalTest(unittest.TestCase):
def test_local_refs(self):
self._local_refs(20)
self._local_refs(50)
self._local_refs(100)
def _local_refs(self, n):
local = threading.local()
weaklist = []
for i in range(n):
t = threading.Thread(target=target, args=(local, weaklist))
t.start()
t.join()
del t
gc.collect()
self.assertEqual(len(weaklist), n)
# XXX threading.local keeps the local of the last stopped thread alive.
deadlist = [weak for weak in weaklist if weak() is None]
self.assertEqual(len(deadlist), n-1)
# Assignment to the same thread local frees it sometimes (!)
local.someothervar = None
gc.collect()
deadlist = [weak for weak in weaklist if weak() is None]
self.assert_(len(deadlist) in (n-1, n), (n, len(deadlist)))
def test_derived(self):
# Issue 3088: if there is a threads switch inside the __init__
# of a threading.local derived class, the per-thread dictionary
# is created but not correctly set on the object.
# The first member set may be bogus.
import time
class Local(threading.local):
def __init__(self):
time.sleep(0.01)
local = Local()
def f(i):
local.x = i
# Simply check that the variable is correctly set
self.assertEqual(local.x, i)
threads= []
for i in range(10):
t = threading.Thread(target=f, args=(i,))
t.start()
threads.append(t)
for t in threads:
t.join()
def test_derived_cycle_dealloc(self):
# http://bugs.python.org/issue6990
class Local(threading.local):
pass
locals = None
passed = [False]
e1 = threading.Event()
e2 = threading.Event()
def f():
# 1) Involve Local in a cycle
cycle = [Local()]
cycle.append(cycle)
cycle[0].foo = 'bar'
# 2) GC the cycle (triggers threadmodule.c::local_clear
# before local_dealloc)
del cycle
gc.collect()
e1.set()
e2.wait()
# 4) New Locals should be empty
passed[0] = all(not hasattr(local, 'foo') for local in locals)
t = threading.Thread(target=f)
t.start()
e1.wait()
# 3) New Locals should recycle the original's address. Creating
# them in the thread overwrites the thread state and avoids the
# bug
locals = [Local() for i in range(10)]
e2.set()
t.join()
self.assertTrue(passed[0])
def test_main():
suite = unittest.TestSuite()
suite.addTest(DocTestSuite('_threading_local'))
suite.addTest(unittest.makeSuite(ThreadingLocalTest))
try:
from thread import _local
except ImportError:
pass
else:
import _threading_local
local_orig = _threading_local.local
def setUp(test):
_threading_local.local = _local
def tearDown(test):
_threading_local.local = local_orig
suite.addTest(DocTestSuite('_threading_local',
setUp=setUp, tearDown=tearDown)
)
test_support.run_unittest(suite)
if __name__ == '__main__':
test_main()
"""Unit tests for socket timeout feature."""
import unittest
from test import test_support
# This requires the 'network' resource as given on the regrtest command line.
skip_expected = not test_support.is_resource_enabled('network')
import time
import socket
class CreationTestCase(unittest.TestCase):
"""Test case for socket.gettimeout() and socket.settimeout()"""
def setUp(self):
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
def tearDown(self):
self.sock.close()
def testObjectCreation(self):
# Test Socket creation
self.assertEqual(self.sock.gettimeout(), None,
"timeout not disabled by default")
def testFloatReturnValue(self):
# Test return value of gettimeout()
self.sock.settimeout(7.345)
self.assertEqual(self.sock.gettimeout(), 7.345)
self.sock.settimeout(3)
self.assertEqual(self.sock.gettimeout(), 3)
self.sock.settimeout(None)
self.assertEqual(self.sock.gettimeout(), None)
def testReturnType(self):
# Test return type of gettimeout()
self.sock.settimeout(1)
self.assertEqual(type(self.sock.gettimeout()), type(1.0))
self.sock.settimeout(3.9)
self.assertEqual(type(self.sock.gettimeout()), type(1.0))
def testTypeCheck(self):
# Test type checking by settimeout()
self.sock.settimeout(0)
self.sock.settimeout(0L)
self.sock.settimeout(0.0)
self.sock.settimeout(None)
self.assertRaises(TypeError, self.sock.settimeout, "")
self.assertRaises(TypeError, self.sock.settimeout, u"")
self.assertRaises(TypeError, self.sock.settimeout, ())
self.assertRaises(TypeError, self.sock.settimeout, [])
self.assertRaises(TypeError, self.sock.settimeout, {})
self.assertRaises(TypeError, self.sock.settimeout, 0j)
def testRangeCheck(self):
# Test range checking by settimeout()
self.assertRaises(ValueError, self.sock.settimeout, -1)
self.assertRaises(ValueError, self.sock.settimeout, -1L)
self.assertRaises(ValueError, self.sock.settimeout, -1.0)
def testTimeoutThenBlocking(self):
# Test settimeout() followed by setblocking()
self.sock.settimeout(10)
self.sock.setblocking(1)
self.assertEqual(self.sock.gettimeout(), None)
self.sock.setblocking(0)
self.assertEqual(self.sock.gettimeout(), 0.0)
self.sock.settimeout(10)
self.sock.setblocking(0)
self.assertEqual(self.sock.gettimeout(), 0.0)
self.sock.setblocking(1)
self.assertEqual(self.sock.gettimeout(), None)
def testBlockingThenTimeout(self):
# Test setblocking() followed by settimeout()
self.sock.setblocking(0)
self.sock.settimeout(1)
self.assertEqual(self.sock.gettimeout(), 1)
self.sock.setblocking(1)
self.sock.settimeout(1)
self.assertEqual(self.sock.gettimeout(), 1)
class TimeoutTestCase(unittest.TestCase):
"""Test case for socket.socket() timeout functions"""
# There are a number of tests here trying to make sure that an operation
# doesn't take too much longer than expected. But competing machine
# activity makes it inevitable that such tests will fail at times.
# When fuzz was at 1.0, I (tim) routinely saw bogus failures on Win2K
# and Win98SE. Boosting it to 2.0 helped a lot, but isn't a real
# solution.
fuzz = 2.0
def setUp(self):
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.addr_remote = ('www.python.org.', 80)
self.localhost = '127.0.0.1'
def tearDown(self):
self.sock.close()
def testConnectTimeout(self):
# Choose a private address that is unlikely to exist to prevent
# failures due to the connect succeeding before the timeout.
# Use a dotted IP address to avoid including the DNS lookup time
# with the connect time. This avoids failing the assertion that
# the timeout occurred fast enough.
addr = ('10.0.0.0', 12345)
# Test connect() timeout
_timeout = 0.001
self.sock.settimeout(_timeout)
_t1 = time.time()
self.failUnlessRaises(socket.error, self.sock.connect, addr)
_t2 = time.time()
_delta = abs(_t1 - _t2)
self.assert_(_delta < _timeout + self.fuzz,
"timeout (%g) is more than %g seconds more than expected (%g)"
%(_delta, self.fuzz, _timeout))
def testRecvTimeout(self):
# Test recv() timeout
_timeout = 0.02
self.sock.connect(self.addr_remote)
self.sock.settimeout(_timeout)
_t1 = time.time()
self.failUnlessRaises(socket.error, self.sock.recv, 1024)
_t2 = time.time()
_delta = abs(_t1 - _t2)
self.assert_(_delta < _timeout + self.fuzz,
"timeout (%g) is %g seconds more than expected (%g)"
%(_delta, self.fuzz, _timeout))
def testAcceptTimeout(self):
# Test accept() timeout
_timeout = 2
self.sock.settimeout(_timeout)
# Prevent "Address already in use" socket exceptions
test_support.bind_port(self.sock, self.localhost)
self.sock.listen(5)
_t1 = time.time()
self.failUnlessRaises(socket.error, self.sock.accept)
_t2 = time.time()
_delta = abs(_t1 - _t2)
self.assert_(_delta < _timeout + self.fuzz,
"timeout (%g) is %g seconds more than expected (%g)"
%(_delta, self.fuzz, _timeout))
def testRecvfromTimeout(self):
# Test recvfrom() timeout
_timeout = 2
self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
self.sock.settimeout(_timeout)
# Prevent "Address already in use" socket exceptions
test_support.bind_port(self.sock, self.localhost)
_t1 = time.time()
self.failUnlessRaises(socket.error, self.sock.recvfrom, 8192)
_t2 = time.time()
_delta = abs(_t1 - _t2)
self.assert_(_delta < _timeout + self.fuzz,
"timeout (%g) is %g seconds more than expected (%g)"
%(_delta, self.fuzz, _timeout))
def testSend(self):
# Test send() timeout
# couldn't figure out how to test it
pass
def testSendto(self):
# Test sendto() timeout
# couldn't figure out how to test it
pass
def testSendall(self):
# Test sendall() timeout
# couldn't figure out how to test it
pass
def test_main():
test_support.requires('network')
test_support.run_unittest(CreationTestCase, TimeoutTestCase)
if __name__ == "__main__":
test_main()
"""Regresssion tests for urllib"""
import urllib
import httplib
import unittest
from test import test_support
import os
import mimetools
import tempfile
import StringIO
def hexescape(char):
"""Escape char as RFC 2396 specifies"""
hex_repr = hex(ord(char))[2:].upper()
if len(hex_repr) == 1:
hex_repr = "0%s" % hex_repr
return "%" + hex_repr
class urlopen_FileTests(unittest.TestCase):
"""Test urlopen() opening a temporary file.
Try to test as much functionality as possible so as to cut down on reliance
on connecting to the Net for testing.
"""
def setUp(self):
"""Setup of a temp file to use for testing"""
self.text = "test_urllib: %s\n" % self.__class__.__name__
FILE = file(test_support.TESTFN, 'wb')
try:
FILE.write(self.text)
finally:
FILE.close()
self.pathname = test_support.TESTFN
self.returned_obj = urllib.urlopen("file:%s" % self.pathname)
def tearDown(self):
"""Shut down the open object"""
self.returned_obj.close()
os.remove(test_support.TESTFN)
def test_interface(self):
# Make sure object returned by urlopen() has the specified methods
for attr in ("read", "readline", "readlines", "fileno",
"close", "info", "geturl", "getcode", "__iter__"):
self.assert_(hasattr(self.returned_obj, attr),
"object returned by urlopen() lacks %s attribute" %
attr)
def test_read(self):
self.assertEqual(self.text, self.returned_obj.read())
def test_readline(self):
self.assertEqual(self.text, self.returned_obj.readline())
self.assertEqual('', self.returned_obj.readline(),
"calling readline() after exhausting the file did not"
" return an empty string")
def test_readlines(self):
lines_list = self.returned_obj.readlines()
self.assertEqual(len(lines_list), 1,
"readlines() returned the wrong number of lines")
self.assertEqual(lines_list[0], self.text,
"readlines() returned improper text")
def test_fileno(self):
file_num = self.returned_obj.fileno()
self.assert_(isinstance(file_num, int),
"fileno() did not return an int")
self.assertEqual(os.read(file_num, len(self.text)), self.text,
"Reading on the file descriptor returned by fileno() "
"did not return the expected text")
def test_close(self):
# Test close() by calling it hear and then having it be called again
# by the tearDown() method for the test
self.returned_obj.close()
def test_info(self):
self.assert_(isinstance(self.returned_obj.info(), mimetools.Message))
def test_geturl(self):
self.assertEqual(self.returned_obj.geturl(), self.pathname)
def test_getcode(self):
self.assertEqual(self.returned_obj.getcode(), None)
def test_iter(self):
# Test iterator
# Don't need to count number of iterations since test would fail the
# instant it returned anything beyond the first line from the
# comparison
for line in self.returned_obj.__iter__():
self.assertEqual(line, self.text)
class ProxyTests(unittest.TestCase):
def setUp(self):
# Records changes to env vars
self.env = test_support.EnvironmentVarGuard()
# Delete all proxy related env vars
for k in os.environ.keys():
if 'proxy' in k.lower():
self.env.unset(k)
def tearDown(self):
# Restore all proxy related env vars
self.env.__exit__()
del self.env
def test_getproxies_environment_keep_no_proxies(self):
self.env.set('NO_PROXY', 'localhost')
proxies = urllib.getproxies_environment()
# getproxies_environment use lowered case truncated (no '_proxy') keys
self.assertEquals('localhost', proxies['no'])
class urlopen_HttpTests(unittest.TestCase):
"""Test urlopen() opening a fake http connection."""
def fakehttp(self, fakedata):
class FakeSocket(StringIO.StringIO):
def sendall(self, str): pass
def makefile(self, mode, name): return self
def read(self, amt=None):
if self.closed: return ''
return StringIO.StringIO.read(self, amt)
def readline(self, length=None):
if self.closed: return ''
return StringIO.StringIO.readline(self, length)
class FakeHTTPConnection(httplib.HTTPConnection):
def connect(self):
self.sock = FakeSocket(fakedata)
assert httplib.HTTP._connection_class == httplib.HTTPConnection
httplib.HTTP._connection_class = FakeHTTPConnection
def unfakehttp(self):
httplib.HTTP._connection_class = httplib.HTTPConnection
def test_read(self):
self.fakehttp('Hello!')
try:
fp = urllib.urlopen("http://python.org/")
self.assertEqual(fp.readline(), 'Hello!')
self.assertEqual(fp.readline(), '')
self.assertEqual(fp.geturl(), 'http://python.org/')
self.assertEqual(fp.getcode(), 200)
finally:
self.unfakehttp()
def test_read_bogus(self):
# urlopen() should raise IOError for many error codes.
self.fakehttp('''HTTP/1.1 401 Authentication Required
Date: Wed, 02 Jan 2008 03:03:54 GMT
Server: Apache/1.3.33 (Debian GNU/Linux) mod_ssl/2.8.22 OpenSSL/0.9.7e
Connection: close
Content-Type: text/html; charset=iso-8859-1
''')
try:
self.assertRaises(IOError, urllib.urlopen, "http://python.org/")
finally:
self.unfakehttp()
def test_invalid_redirect(self):
# urlopen() should raise IOError for many error codes.
self.fakehttp("""HTTP/1.1 302 Found
Date: Wed, 02 Jan 2008 03:03:54 GMT
Server: Apache/1.3.33 (Debian GNU/Linux) mod_ssl/2.8.22 OpenSSL/0.9.7e
Location: file:README
Connection: close
Content-Type: text/html; charset=iso-8859-1
""")
try:
self.assertRaises(IOError, urllib.urlopen, "http://python.org/")
finally:
self.unfakehttp()
def test_empty_socket(self):
# urlopen() raises IOError if the underlying socket does not send any
# data. (#1680230)
self.fakehttp('')
try:
self.assertRaises(IOError, urllib.urlopen, 'http://something')
finally:
self.unfakehttp()
class urlretrieve_FileTests(unittest.TestCase):
"""Test urllib.urlretrieve() on local files"""
def setUp(self):
# Create a list of temporary files. Each item in the list is a file
# name (absolute path or relative to the current working directory).
# All files in this list will be deleted in the tearDown method. Note,
# this only helps to makes sure temporary files get deleted, but it
# does nothing about trying to close files that may still be open. It
# is the responsibility of the developer to properly close files even
# when exceptional conditions occur.
self.tempFiles = []
# Create a temporary file.
self.registerFileForCleanUp(test_support.TESTFN)
self.text = 'testing urllib.urlretrieve'
try:
FILE = file(test_support.TESTFN, 'wb')
FILE.write(self.text)
FILE.close()
finally:
try: FILE.close()
except: pass
def tearDown(self):
# Delete the temporary files.
for each in self.tempFiles:
try: os.remove(each)
except: pass
def constructLocalFileUrl(self, filePath):
return "file://%s" % urllib.pathname2url(os.path.abspath(filePath))
def createNewTempFile(self, data=""):
"""Creates a new temporary file containing the specified data,
registers the file for deletion during the test fixture tear down, and
returns the absolute path of the file."""
newFd, newFilePath = tempfile.mkstemp()
try:
self.registerFileForCleanUp(newFilePath)
newFile = os.fdopen(newFd, "wb")
newFile.write(data)
newFile.close()
finally:
try: newFile.close()
except: pass
return newFilePath
def registerFileForCleanUp(self, fileName):
self.tempFiles.append(fileName)
def test_basic(self):
# Make sure that a local file just gets its own location returned and
# a headers value is returned.
result = urllib.urlretrieve("file:%s" % test_support.TESTFN)
self.assertEqual(result[0], test_support.TESTFN)
self.assert_(isinstance(result[1], mimetools.Message),
"did not get a mimetools.Message instance as second "
"returned value")
def test_copy(self):
# Test that setting the filename argument works.
second_temp = "%s.2" % test_support.TESTFN
self.registerFileForCleanUp(second_temp)
result = urllib.urlretrieve(self.constructLocalFileUrl(
test_support.TESTFN), second_temp)
self.assertEqual(second_temp, result[0])
self.assert_(os.path.exists(second_temp), "copy of the file was not "
"made")
FILE = file(second_temp, 'rb')
try:
text = FILE.read()
FILE.close()
finally:
try: FILE.close()
except: pass
self.assertEqual(self.text, text)
def test_reporthook(self):
# Make sure that the reporthook works.
def hooktester(count, block_size, total_size, count_holder=[0]):
self.assert_(isinstance(count, int))
self.assert_(isinstance(block_size, int))
self.assert_(isinstance(total_size, int))
self.assertEqual(count, count_holder[0])
count_holder[0] = count_holder[0] + 1
second_temp = "%s.2" % test_support.TESTFN
self.registerFileForCleanUp(second_temp)
urllib.urlretrieve(self.constructLocalFileUrl(test_support.TESTFN),
second_temp, hooktester)
def test_reporthook_0_bytes(self):
# Test on zero length file. Should call reporthook only 1 time.
report = []
def hooktester(count, block_size, total_size, _report=report):
_report.append((count, block_size, total_size))
srcFileName = self.createNewTempFile()
urllib.urlretrieve(self.constructLocalFileUrl(srcFileName),
test_support.TESTFN, hooktester)
self.assertEqual(len(report), 1)
self.assertEqual(report[0][2], 0)
def test_reporthook_5_bytes(self):
# Test on 5 byte file. Should call reporthook only 2 times (once when
# the "network connection" is established and once when the block is
# read). Since the block size is 8192 bytes, only one block read is
# required to read the entire file.
report = []
def hooktester(count, block_size, total_size, _report=report):
_report.append((count, block_size, total_size))
srcFileName = self.createNewTempFile("x" * 5)
urllib.urlretrieve(self.constructLocalFileUrl(srcFileName),
test_support.TESTFN, hooktester)
self.assertEqual(len(report), 2)
self.assertEqual(report[0][1], 8192)
self.assertEqual(report[0][2], 5)
def test_reporthook_8193_bytes(self):
# Test on 8193 byte file. Should call reporthook only 3 times (once
# when the "network connection" is established, once for the next 8192
# bytes, and once for the last byte).
report = []
def hooktester(count, block_size, total_size, _report=report):
_report.append((count, block_size, total_size))
srcFileName = self.createNewTempFile("x" * 8193)
urllib.urlretrieve(self.constructLocalFileUrl(srcFileName),
test_support.TESTFN, hooktester)
self.assertEqual(len(report), 3)
self.assertEqual(report[0][1], 8192)
self.assertEqual(report[0][2], 8193)
class QuotingTests(unittest.TestCase):
"""Tests for urllib.quote() and urllib.quote_plus()
According to RFC 2396 ("Uniform Resource Identifiers), to escape a
character you write it as '%' + <2 character US-ASCII hex value>. The Python
code of ``'%' + hex(ord(<character>))[2:]`` escapes a character properly.
Case does not matter on the hex letters.
The various character sets specified are:
Reserved characters : ";/?:@&=+$,"
Have special meaning in URIs and must be escaped if not being used for
their special meaning
Data characters : letters, digits, and "-_.!~*'()"
Unreserved and do not need to be escaped; can be, though, if desired
Control characters : 0x00 - 0x1F, 0x7F
Have no use in URIs so must be escaped
space : 0x20
Must be escaped
Delimiters : '<>#%"'
Must be escaped
Unwise : "{}|\^[]`"
Must be escaped
"""
def test_never_quote(self):
# Make sure quote() does not quote letters, digits, and "_,.-"
do_not_quote = '' .join(["ABCDEFGHIJKLMNOPQRSTUVWXYZ",
"abcdefghijklmnopqrstuvwxyz",
"0123456789",
"_.-"])
result = urllib.quote(do_not_quote)
self.assertEqual(do_not_quote, result,
"using quote(): %s != %s" % (do_not_quote, result))
result = urllib.quote_plus(do_not_quote)
self.assertEqual(do_not_quote, result,
"using quote_plus(): %s != %s" % (do_not_quote, result))
def test_default_safe(self):
# Test '/' is default value for 'safe' parameter
self.assertEqual(urllib.quote.func_defaults[0], '/')
def test_safe(self):
# Test setting 'safe' parameter does what it should do
quote_by_default = "<>"
result = urllib.quote(quote_by_default, safe=quote_by_default)
self.assertEqual(quote_by_default, result,
"using quote(): %s != %s" % (quote_by_default, result))
result = urllib.quote_plus(quote_by_default, safe=quote_by_default)
self.assertEqual(quote_by_default, result,
"using quote_plus(): %s != %s" %
(quote_by_default, result))
def test_default_quoting(self):
# Make sure all characters that should be quoted are by default sans
# space (separate test for that).
should_quote = [chr(num) for num in range(32)] # For 0x00 - 0x1F
should_quote.append('<>#%"{}|\^[]`')
should_quote.append(chr(127)) # For 0x7F
should_quote = ''.join(should_quote)
for char in should_quote:
result = urllib.quote(char)
self.assertEqual(hexescape(char), result,
"using quote(): %s should be escaped to %s, not %s" %
(char, hexescape(char), result))
result = urllib.quote_plus(char)
self.assertEqual(hexescape(char), result,
"using quote_plus(): "
"%s should be escapes to %s, not %s" %
(char, hexescape(char), result))
del should_quote
partial_quote = "ab[]cd"
expected = "ab%5B%5Dcd"
result = urllib.quote(partial_quote)
self.assertEqual(expected, result,
"using quote(): %s != %s" % (expected, result))
self.assertEqual(expected, result,
"using quote_plus(): %s != %s" % (expected, result))
def test_quoting_space(self):
# Make sure quote() and quote_plus() handle spaces as specified in
# their unique way
result = urllib.quote(' ')
self.assertEqual(result, hexescape(' '),
"using quote(): %s != %s" % (result, hexescape(' ')))
result = urllib.quote_plus(' ')
self.assertEqual(result, '+',
"using quote_plus(): %s != +" % result)
given = "a b cd e f"
expect = given.replace(' ', hexescape(' '))
result = urllib.quote(given)
self.assertEqual(expect, result,
"using quote(): %s != %s" % (expect, result))
expect = given.replace(' ', '+')
result = urllib.quote_plus(given)
self.assertEqual(expect, result,
"using quote_plus(): %s != %s" % (expect, result))
def test_quoting_plus(self):
self.assertEqual(urllib.quote_plus('alpha+beta gamma'),
'alpha%2Bbeta+gamma')
self.assertEqual(urllib.quote_plus('alpha+beta gamma', '+'),
'alpha+beta+gamma')
class UnquotingTests(unittest.TestCase):
"""Tests for unquote() and unquote_plus()
See the doc string for quoting_Tests for details on quoting and such.
"""
def test_unquoting(self):
# Make sure unquoting of all ASCII values works
escape_list = []
for num in range(128):
given = hexescape(chr(num))
expect = chr(num)
result = urllib.unquote(given)
self.assertEqual(expect, result,
"using unquote(): %s != %s" % (expect, result))
result = urllib.unquote_plus(given)
self.assertEqual(expect, result,
"using unquote_plus(): %s != %s" %
(expect, result))
escape_list.append(given)
escape_string = ''.join(escape_list)
del escape_list
result = urllib.unquote(escape_string)
self.assertEqual(result.count('%'), 1,
"using quote(): not all characters escaped; %s" %
result)
result = urllib.unquote(escape_string)
self.assertEqual(result.count('%'), 1,
"using unquote(): not all characters escaped: "
"%s" % result)
def test_unquoting_badpercent(self):
# Test unquoting on bad percent-escapes
given = '%xab'
expect = given
result = urllib.unquote(given)
self.assertEqual(expect, result, "using unquote(): %r != %r"
% (expect, result))
given = '%x'
expect = given
result = urllib.unquote(given)
self.assertEqual(expect, result, "using unquote(): %r != %r"
% (expect, result))
given = '%'
expect = given
result = urllib.unquote(given)
self.assertEqual(expect, result, "using unquote(): %r != %r"
% (expect, result))
def test_unquoting_mixed_case(self):
# Test unquoting on mixed-case hex digits in the percent-escapes
given = '%Ab%eA'
expect = '\xab\xea'
result = urllib.unquote(given)
self.assertEqual(expect, result, "using unquote(): %r != %r"
% (expect, result))
def test_unquoting_parts(self):
# Make sure unquoting works when have non-quoted characters
# interspersed
given = 'ab%sd' % hexescape('c')
expect = "abcd"
result = urllib.unquote(given)
self.assertEqual(expect, result,
"using quote(): %s != %s" % (expect, result))
result = urllib.unquote_plus(given)
self.assertEqual(expect, result,
"using unquote_plus(): %s != %s" % (expect, result))
def test_unquoting_plus(self):
# Test difference between unquote() and unquote_plus()
given = "are+there+spaces..."
expect = given
result = urllib.unquote(given)
self.assertEqual(expect, result,
"using unquote(): %s != %s" % (expect, result))
expect = given.replace('+', ' ')
result = urllib.unquote_plus(given)
self.assertEqual(expect, result,
"using unquote_plus(): %s != %s" % (expect, result))
def test_unquote_with_unicode(self):
r = urllib.unquote(u'br%C3%BCckner_sapporo_20050930.doc')
self.assertEqual(r, u'br\xc3\xbcckner_sapporo_20050930.doc')
class urlencode_Tests(unittest.TestCase):
"""Tests for urlencode()"""
def help_inputtype(self, given, test_type):
"""Helper method for testing different input types.
'given' must lead to only the pairs:
* 1st, 1
* 2nd, 2
* 3rd, 3
Test cannot assume anything about order. Docs make no guarantee and
have possible dictionary input.
"""
expect_somewhere = ["1st=1", "2nd=2", "3rd=3"]
result = urllib.urlencode(given)
for expected in expect_somewhere:
self.assert_(expected in result,
"testing %s: %s not found in %s" %
(test_type, expected, result))
self.assertEqual(result.count('&'), 2,
"testing %s: expected 2 '&'s; got %s" %
(test_type, result.count('&')))
amp_location = result.index('&')
on_amp_left = result[amp_location - 1]
on_amp_right = result[amp_location + 1]
self.assert_(on_amp_left.isdigit() and on_amp_right.isdigit(),
"testing %s: '&' not located in proper place in %s" %
(test_type, result))
self.assertEqual(len(result), (5 * 3) + 2, #5 chars per thing and amps
"testing %s: "
"unexpected number of characters: %s != %s" %
(test_type, len(result), (5 * 3) + 2))
def test_using_mapping(self):
# Test passing in a mapping object as an argument.
self.help_inputtype({"1st":'1', "2nd":'2', "3rd":'3'},
"using dict as input type")
def test_using_sequence(self):
# Test passing in a sequence of two-item sequences as an argument.
self.help_inputtype([('1st', '1'), ('2nd', '2'), ('3rd', '3')],
"using sequence of two-item tuples as input")
def test_quoting(self):
# Make sure keys and values are quoted using quote_plus()
given = {"&":"="}
expect = "%s=%s" % (hexescape('&'), hexescape('='))
result = urllib.urlencode(given)
self.assertEqual(expect, result)
given = {"key name":"A bunch of pluses"}
expect = "key+name=A+bunch+of+pluses"
result = urllib.urlencode(given)
self.assertEqual(expect, result)
def test_doseq(self):
# Test that passing True for 'doseq' parameter works correctly
given = {'sequence':['1', '2', '3']}
expect = "sequence=%s" % urllib.quote_plus(str(['1', '2', '3']))
result = urllib.urlencode(given)
self.assertEqual(expect, result)
result = urllib.urlencode(given, True)
for value in given["sequence"]:
expect = "sequence=%s" % value
self.assert_(expect in result,
"%s not found in %s" % (expect, result))
self.assertEqual(result.count('&'), 2,
"Expected 2 '&'s, got %s" % result.count('&'))
class Pathname_Tests(unittest.TestCase):
"""Test pathname2url() and url2pathname()"""
def test_basic(self):
# Make sure simple tests pass
expected_path = os.path.join("parts", "of", "a", "path")
expected_url = "parts/of/a/path"
result = urllib.pathname2url(expected_path)
self.assertEqual(expected_url, result,
"pathname2url() failed; %s != %s" %
(result, expected_url))
result = urllib.url2pathname(expected_url)
self.assertEqual(expected_path, result,
"url2pathame() failed; %s != %s" %
(result, expected_path))
def test_quoting(self):
# Test automatic quoting and unquoting works for pathnam2url() and
# url2pathname() respectively
given = os.path.join("needs", "quot=ing", "here")
expect = "needs/%s/here" % urllib.quote("quot=ing")
result = urllib.pathname2url(given)
self.assertEqual(expect, result,
"pathname2url() failed; %s != %s" %
(expect, result))
expect = given
result = urllib.url2pathname(result)
self.assertEqual(expect, result,
"url2pathname() failed; %s != %s" %
(expect, result))
given = os.path.join("make sure", "using_quote")
expect = "%s/using_quote" % urllib.quote("make sure")
result = urllib.pathname2url(given)
self.assertEqual(expect, result,
"pathname2url() failed; %s != %s" %
(expect, result))
given = "make+sure/using_unquote"
expect = os.path.join("make+sure", "using_unquote")
result = urllib.url2pathname(given)
self.assertEqual(expect, result,
"url2pathname() failed; %s != %s" %
(expect, result))
class URLopener_Tests(unittest.TestCase):
"""Testcase to test the open method of URLopener class."""
def test_quoted_open(self):
class DummyURLopener(urllib.URLopener):
def open_spam(self, url):
return url
self.assertEqual(DummyURLopener().open(
'spam://example/ /'),'//example/%20/')
# test the safe characters are not quoted by urlopen
self.assertEqual(DummyURLopener().open(
"spam://c:|windows%/:=&?~#+!$,;'@()*[]|/path/"),
"//c:|windows%/:=&?~#+!$,;'@()*[]|/path/")
# Just commented them out.
# Can't really tell why keep failing in windows and sparc.
# Everywhere else they work ok, but on those machines, someteimes
# fail in one of the tests, sometimes in other. I have a linux, and
# the tests go ok.
# If anybody has one of the problematic enviroments, please help!
# . Facundo
#
# def server(evt):
# import socket, time
# serv = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# serv.settimeout(3)
# serv.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
# serv.bind(("", 9093))
# serv.listen(5)
# try:
# conn, addr = serv.accept()
# conn.send("1 Hola mundo\n")
# cantdata = 0
# while cantdata < 13:
# data = conn.recv(13-cantdata)
# cantdata += len(data)
# time.sleep(.3)
# conn.send("2 No more lines\n")
# conn.close()
# except socket.timeout:
# pass
# finally:
# serv.close()
# evt.set()
#
# class FTPWrapperTests(unittest.TestCase):
#
# def setUp(self):
# import ftplib, time, threading
# ftplib.FTP.port = 9093
# self.evt = threading.Event()
# threading.Thread(target=server, args=(self.evt,)).start()
# time.sleep(.1)
#
# def tearDown(self):
# self.evt.wait()
#
# def testBasic(self):
# # connects
# ftp = urllib.ftpwrapper("myuser", "mypass", "localhost", 9093, [])
# ftp.close()
#
# def testTimeoutNone(self):
# # global default timeout is ignored
# import socket
# self.assert_(socket.getdefaulttimeout() is None)
# socket.setdefaulttimeout(30)
# try:
# ftp = urllib.ftpwrapper("myuser", "mypass", "localhost", 9093, [])
# finally:
# socket.setdefaulttimeout(None)
# self.assertEqual(ftp.ftp.sock.gettimeout(), 30)
# ftp.close()
#
# def testTimeoutDefault(self):
# # global default timeout is used
# import socket
# self.assert_(socket.getdefaulttimeout() is None)
# socket.setdefaulttimeout(30)
# try:
# ftp = urllib.ftpwrapper("myuser", "mypass", "localhost", 9093, [])
# finally:
# socket.setdefaulttimeout(None)
# self.assertEqual(ftp.ftp.sock.gettimeout(), 30)
# ftp.close()
#
# def testTimeoutValue(self):
# ftp = urllib.ftpwrapper("myuser", "mypass", "localhost", 9093, [],
# timeout=30)
# self.assertEqual(ftp.ftp.sock.gettimeout(), 30)
# ftp.close()
def test_main():
import warnings
with warnings.catch_warnings():
warnings.filterwarnings('ignore', ".*urllib\.urlopen.*Python 3.0",
DeprecationWarning)
test_support.run_unittest(
urlopen_FileTests,
urlopen_HttpTests,
urlretrieve_FileTests,
ProxyTests,
QuotingTests,
UnquotingTests,
urlencode_Tests,
Pathname_Tests,
URLopener_Tests,
#FTPWrapperTests,
)
if __name__ == '__main__':
test_main()
import unittest
from test import test_support
import os
import socket
import StringIO
import urllib2
from urllib2 import Request, OpenerDirector
# XXX
# Request
# CacheFTPHandler (hard to write)
# parse_keqv_list, parse_http_list, HTTPDigestAuthHandler
class TrivialTests(unittest.TestCase):
def test_trivial(self):
# A couple trivial tests
self.assertRaises(ValueError, urllib2.urlopen, 'bogus url')
# XXX Name hacking to get this to work on Windows.
fname = os.path.abspath(urllib2.__file__).replace('\\', '/')
# And more hacking to get it to work on MacOS. This assumes
# urllib.pathname2url works, unfortunately...
if os.name == 'mac':
fname = '/' + fname.replace(':', '/')
elif os.name == 'riscos':
import string
fname = os.expand(fname)
fname = fname.translate(string.maketrans("/.", "./"))
if os.name == 'nt':
file_url = "file:///%s" % fname
else:
file_url = "file://%s" % fname
f = urllib2.urlopen(file_url)
buf = f.read()
f.close()
def test_parse_http_list(self):
tests = [('a,b,c', ['a', 'b', 'c']),
('path"o,l"og"i"cal, example', ['path"o,l"og"i"cal', 'example']),
('a, b, "c", "d", "e,f", g, h', ['a', 'b', '"c"', '"d"', '"e,f"', 'g', 'h']),
('a="b\\"c", d="e\\,f", g="h\\\\i"', ['a="b"c"', 'd="e,f"', 'g="h\\i"'])]
for string, list in tests:
self.assertEquals(urllib2.parse_http_list(string), list)
def test_request_headers_dict():
"""
The Request.headers dictionary is not a documented interface. It should
stay that way, because the complete set of headers are only accessible
through the .get_header(), .has_header(), .header_items() interface.
However, .headers pre-dates those methods, and so real code will be using
the dictionary.
The introduction in 2.4 of those methods was a mistake for the same reason:
code that previously saw all (urllib2 user)-provided headers in .headers
now sees only a subset (and the function interface is ugly and incomplete).
A better change would have been to replace .headers dict with a dict
subclass (or UserDict.DictMixin instance?) that preserved the .headers
interface and also provided access to the "unredirected" headers. It's
probably too late to fix that, though.
Check .capitalize() case normalization:
>>> url = "http://example.com"
>>> Request(url, headers={"Spam-eggs": "blah"}).headers["Spam-eggs"]
'blah'
>>> Request(url, headers={"spam-EggS": "blah"}).headers["Spam-eggs"]
'blah'
Currently, Request(url, "Spam-eggs").headers["Spam-Eggs"] raises KeyError,
but that could be changed in future.
"""
def test_request_headers_methods():
"""
Note the case normalization of header names here, to .capitalize()-case.
This should be preserved for backwards-compatibility. (In the HTTP case,
normalization to .title()-case is done by urllib2 before sending headers to
httplib).
>>> url = "http://example.com"
>>> r = Request(url, headers={"Spam-eggs": "blah"})
>>> r.has_header("Spam-eggs")
True
>>> r.header_items()
[('Spam-eggs', 'blah')]
>>> r.add_header("Foo-Bar", "baz")
>>> items = r.header_items()
>>> items.sort()
>>> items
[('Foo-bar', 'baz'), ('Spam-eggs', 'blah')]
Note that e.g. r.has_header("spam-EggS") is currently False, and
r.get_header("spam-EggS") returns None, but that could be changed in
future.
>>> r.has_header("Not-there")
False
>>> print r.get_header("Not-there")
None
>>> r.get_header("Not-there", "default")
'default'
"""
def test_password_manager(self):
"""
>>> mgr = urllib2.HTTPPasswordMgr()
>>> add = mgr.add_password
>>> add("Some Realm", "http://example.com/", "joe", "password")
>>> add("Some Realm", "http://example.com/ni", "ni", "ni")
>>> add("c", "http://example.com/foo", "foo", "ni")
>>> add("c", "http://example.com/bar", "bar", "nini")
>>> add("b", "http://example.com/", "first", "blah")
>>> add("b", "http://example.com/", "second", "spam")
>>> add("a", "http://example.com", "1", "a")
>>> add("Some Realm", "http://c.example.com:3128", "3", "c")
>>> add("Some Realm", "d.example.com", "4", "d")
>>> add("Some Realm", "e.example.com:3128", "5", "e")
>>> mgr.find_user_password("Some Realm", "example.com")
('joe', 'password')
>>> mgr.find_user_password("Some Realm", "http://example.com")
('joe', 'password')
>>> mgr.find_user_password("Some Realm", "http://example.com/")
('joe', 'password')
>>> mgr.find_user_password("Some Realm", "http://example.com/spam")
('joe', 'password')
>>> mgr.find_user_password("Some Realm", "http://example.com/spam/spam")
('joe', 'password')
>>> mgr.find_user_password("c", "http://example.com/foo")
('foo', 'ni')
>>> mgr.find_user_password("c", "http://example.com/bar")
('bar', 'nini')
Actually, this is really undefined ATM
## Currently, we use the highest-level path where more than one match:
## >>> mgr.find_user_password("Some Realm", "http://example.com/ni")
## ('joe', 'password')
Use latest add_password() in case of conflict:
>>> mgr.find_user_password("b", "http://example.com/")
('second', 'spam')
No special relationship between a.example.com and example.com:
>>> mgr.find_user_password("a", "http://example.com/")
('1', 'a')
>>> mgr.find_user_password("a", "http://a.example.com/")
(None, None)
Ports:
>>> mgr.find_user_password("Some Realm", "c.example.com")
(None, None)
>>> mgr.find_user_password("Some Realm", "c.example.com:3128")
('3', 'c')
>>> mgr.find_user_password("Some Realm", "http://c.example.com:3128")
('3', 'c')
>>> mgr.find_user_password("Some Realm", "d.example.com")
('4', 'd')
>>> mgr.find_user_password("Some Realm", "e.example.com:3128")
('5', 'e')
"""
pass
def test_password_manager_default_port(self):
"""
>>> mgr = urllib2.HTTPPasswordMgr()
>>> add = mgr.add_password
The point to note here is that we can't guess the default port if there's
no scheme. This applies to both add_password and find_user_password.
>>> add("f", "http://g.example.com:80", "10", "j")
>>> add("g", "http://h.example.com", "11", "k")
>>> add("h", "i.example.com:80", "12", "l")
>>> add("i", "j.example.com", "13", "m")
>>> mgr.find_user_password("f", "g.example.com:100")
(None, None)
>>> mgr.find_user_password("f", "g.example.com:80")
('10', 'j')
>>> mgr.find_user_password("f", "g.example.com")
(None, None)
>>> mgr.find_user_password("f", "http://g.example.com:100")
(None, None)
>>> mgr.find_user_password("f", "http://g.example.com:80")
('10', 'j')
>>> mgr.find_user_password("f", "http://g.example.com")
('10', 'j')
>>> mgr.find_user_password("g", "h.example.com")
('11', 'k')
>>> mgr.find_user_password("g", "h.example.com:80")
('11', 'k')
>>> mgr.find_user_password("g", "http://h.example.com:80")
('11', 'k')
>>> mgr.find_user_password("h", "i.example.com")
(None, None)
>>> mgr.find_user_password("h", "i.example.com:80")
('12', 'l')
>>> mgr.find_user_password("h", "http://i.example.com:80")
('12', 'l')
>>> mgr.find_user_password("i", "j.example.com")
('13', 'm')
>>> mgr.find_user_password("i", "j.example.com:80")
(None, None)
>>> mgr.find_user_password("i", "http://j.example.com")
('13', 'm')
>>> mgr.find_user_password("i", "http://j.example.com:80")
(None, None)
"""
class MockOpener:
addheaders = []
def open(self, req, data=None,timeout=socket._GLOBAL_DEFAULT_TIMEOUT):
self.req, self.data, self.timeout = req, data, timeout
def error(self, proto, *args):
self.proto, self.args = proto, args
class MockFile:
def read(self, count=None): pass
def readline(self, count=None): pass
def close(self): pass
class MockHeaders(dict):
def getheaders(self, name):
return self.values()
class MockResponse(StringIO.StringIO):
def __init__(self, code, msg, headers, data, url=None):
StringIO.StringIO.__init__(self, data)
self.code, self.msg, self.headers, self.url = code, msg, headers, url
def info(self):
return self.headers
def geturl(self):
return self.url
class MockCookieJar:
def add_cookie_header(self, request):
self.ach_req = request
def extract_cookies(self, response, request):
self.ec_req, self.ec_r = request, response
class FakeMethod:
def __init__(self, meth_name, action, handle):
self.meth_name = meth_name
self.handle = handle
self.action = action
def __call__(self, *args):
return self.handle(self.meth_name, self.action, *args)
class MockHTTPResponse:
def __init__(self, fp, msg, status, reason):
self.fp = fp
self.msg = msg
self.status = status
self.reason = reason
def read(self):
return ''
class MockHTTPClass:
def __init__(self):
self.req_headers = []
self.data = None
self.raise_on_endheaders = False
self._tunnel_headers = {}
def __call__(self, host, timeout=socket._GLOBAL_DEFAULT_TIMEOUT):
self.host = host
self.timeout = timeout
return self
def set_debuglevel(self, level):
self.level = level
def _set_tunnel(self, host, port=None, headers=None):
self._tunnel_host = host
self._tunnel_port = port
if headers:
self._tunnel_headers = headers
else:
self._tunnel_headers.clear()
def request(self, method, url, body=None, headers=None):
self.method = method
self.selector = url
if headers is not None:
self.req_headers += headers.items()
self.req_headers.sort()
if body:
self.data = body
if self.raise_on_endheaders:
import socket
raise socket.error()
def getresponse(self):
return MockHTTPResponse(MockFile(), {}, 200, "OK")
class MockHandler:
# useful for testing handler machinery
# see add_ordered_mock_handlers() docstring
handler_order = 500
def __init__(self, methods):
self._define_methods(methods)
def _define_methods(self, methods):
for spec in methods:
if len(spec) == 2: name, action = spec
else: name, action = spec, None
meth = FakeMethod(name, action, self.handle)
setattr(self.__class__, name, meth)
def handle(self, fn_name, action, *args, **kwds):
self.parent.calls.append((self, fn_name, args, kwds))
if action is None:
return None
elif action == "return self":
return self
elif action == "return response":
res = MockResponse(200, "OK", {}, "")
return res
elif action == "return request":
return Request("http://blah/")
elif action.startswith("error"):
code = action[action.rfind(" ")+1:]
try:
code = int(code)
except ValueError:
pass
res = MockResponse(200, "OK", {}, "")
return self.parent.error("http", args[0], res, code, "", {})
elif action == "raise":
raise urllib2.URLError("blah")
assert False
def close(self): pass
def add_parent(self, parent):
self.parent = parent
self.parent.calls = []
def __lt__(self, other):
if not hasattr(other, "handler_order"):
# No handler_order, leave in original order. Yuck.
return True
return self.handler_order < other.handler_order
def add_ordered_mock_handlers(opener, meth_spec):
"""Create MockHandlers and add them to an OpenerDirector.
meth_spec: list of lists of tuples and strings defining methods to define
on handlers. eg:
[["http_error", "ftp_open"], ["http_open"]]
defines methods .http_error() and .ftp_open() on one handler, and
.http_open() on another. These methods just record their arguments and
return None. Using a tuple instead of a string causes the method to
perform some action (see MockHandler.handle()), eg:
[["http_error"], [("http_open", "return request")]]
defines .http_error() on one handler (which simply returns None), and
.http_open() on another handler, which returns a Request object.
"""
handlers = []
count = 0
for meths in meth_spec:
class MockHandlerSubclass(MockHandler): pass
h = MockHandlerSubclass(meths)
h.handler_order += count
h.add_parent(opener)
count = count + 1
handlers.append(h)
opener.add_handler(h)
return handlers
def build_test_opener(*handler_instances):
opener = OpenerDirector()
for h in handler_instances:
opener.add_handler(h)
return opener
class MockHTTPHandler(urllib2.BaseHandler):
# useful for testing redirections and auth
# sends supplied headers and code as first response
# sends 200 OK as second response
def __init__(self, code, headers):
self.code = code
self.headers = headers
self.reset()
def reset(self):
self._count = 0
self.requests = []
def http_open(self, req):
import mimetools, httplib, copy
from StringIO import StringIO
self.requests.append(copy.deepcopy(req))
if self._count == 0:
self._count = self._count + 1
name = httplib.responses[self.code]
msg = mimetools.Message(StringIO(self.headers))
return self.parent.error(
"http", req, MockFile(), self.code, name, msg)
else:
self.req = req
msg = mimetools.Message(StringIO("\r\n\r\n"))
return MockResponse(200, "OK", msg, "", req.get_full_url())
class MockHTTPSHandler(urllib2.AbstractHTTPHandler):
# Useful for testing the Proxy-Authorization request by verifying the
# properties of httpcon
def __init__(self):
urllib2.AbstractHTTPHandler.__init__(self)
self.httpconn = MockHTTPClass()
def https_open(self, req):
return self.do_open(self.httpconn, req)
class MockPasswordManager:
def add_password(self, realm, uri, user, password):
self.realm = realm
self.url = uri
self.user = user
self.password = password
def find_user_password(self, realm, authuri):
self.target_realm = realm
self.target_url = authuri
return self.user, self.password
class OpenerDirectorTests(unittest.TestCase):
def test_add_non_handler(self):
class NonHandler(object):
pass
self.assertRaises(TypeError,
OpenerDirector().add_handler, NonHandler())
def test_badly_named_methods(self):
# test work-around for three methods that accidentally follow the
# naming conventions for handler methods
# (*_open() / *_request() / *_response())
# These used to call the accidentally-named methods, causing a
# TypeError in real code; here, returning self from these mock
# methods would either cause no exception, or AttributeError.
from urllib2 import URLError
o = OpenerDirector()
meth_spec = [
[("do_open", "return self"), ("proxy_open", "return self")],
[("redirect_request", "return self")],
]
handlers = add_ordered_mock_handlers(o, meth_spec)
o.add_handler(urllib2.UnknownHandler())
for scheme in "do", "proxy", "redirect":
self.assertRaises(URLError, o.open, scheme+"://example.com/")
def test_handled(self):
# handler returning non-None means no more handlers will be called
o = OpenerDirector()
meth_spec = [
["http_open", "ftp_open", "http_error_302"],
["ftp_open"],
[("http_open", "return self")],
[("http_open", "return self")],
]
handlers = add_ordered_mock_handlers(o, meth_spec)
req = Request("http://example.com/")
r = o.open(req)
# Second .http_open() gets called, third doesn't, since second returned
# non-None. Handlers without .http_open() never get any methods called
# on them.
# In fact, second mock handler defining .http_open() returns self
# (instead of response), which becomes the OpenerDirector's return
# value.
self.assertEqual(r, handlers[2])
calls = [(handlers[0], "http_open"), (handlers[2], "http_open")]
for expected, got in zip(calls, o.calls):
handler, name, args, kwds = got
self.assertEqual((handler, name), expected)
self.assertEqual(args, (req,))
def test_handler_order(self):
o = OpenerDirector()
handlers = []
for meths, handler_order in [
([("http_open", "return self")], 500),
(["http_open"], 0),
]:
class MockHandlerSubclass(MockHandler): pass
h = MockHandlerSubclass(meths)
h.handler_order = handler_order
handlers.append(h)
o.add_handler(h)
r = o.open("http://example.com/")
# handlers called in reverse order, thanks to their sort order
self.assertEqual(o.calls[0][0], handlers[1])
self.assertEqual(o.calls[1][0], handlers[0])
def test_raise(self):
# raising URLError stops processing of request
o = OpenerDirector()
meth_spec = [
[("http_open", "raise")],
[("http_open", "return self")],
]
handlers = add_ordered_mock_handlers(o, meth_spec)
req = Request("http://example.com/")
self.assertRaises(urllib2.URLError, o.open, req)
self.assertEqual(o.calls, [(handlers[0], "http_open", (req,), {})])
## def test_error(self):
## # XXX this doesn't actually seem to be used in standard library,
## # but should really be tested anyway...
def test_http_error(self):
# XXX http_error_default
# http errors are a special case
o = OpenerDirector()
meth_spec = [
[("http_open", "error 302")],
[("http_error_400", "raise"), "http_open"],
[("http_error_302", "return response"), "http_error_303",
"http_error"],
[("http_error_302")],
]
handlers = add_ordered_mock_handlers(o, meth_spec)
class Unknown:
def __eq__(self, other): return True
req = Request("http://example.com/")
r = o.open(req)
assert len(o.calls) == 2
calls = [(handlers[0], "http_open", (req,)),
(handlers[2], "http_error_302",
(req, Unknown(), 302, "", {}))]
for expected, got in zip(calls, o.calls):
handler, method_name, args = expected
self.assertEqual((handler, method_name), got[:2])
self.assertEqual(args, got[2])
def test_processors(self):
# *_request / *_response methods get called appropriately
o = OpenerDirector()
meth_spec = [
[("http_request", "return request"),
("http_response", "return response")],
[("http_request", "return request"),
("http_response", "return response")],
]
handlers = add_ordered_mock_handlers(o, meth_spec)
req = Request("http://example.com/")
r = o.open(req)
# processor methods are called on *all* handlers that define them,
# not just the first handler that handles the request
calls = [
(handlers[0], "http_request"), (handlers[1], "http_request"),
(handlers[0], "http_response"), (handlers[1], "http_response")]
for i, (handler, name, args, kwds) in enumerate(o.calls):
if i < 2:
# *_request
self.assertEqual((handler, name), calls[i])
self.assertEqual(len(args), 1)
self.assert_(isinstance(args[0], Request))
else:
# *_response
self.assertEqual((handler, name), calls[i])
self.assertEqual(len(args), 2)
self.assert_(isinstance(args[0], Request))
# response from opener.open is None, because there's no
# handler that defines http_open to handle it
self.assert_(args[1] is None or
isinstance(args[1], MockResponse))
def sanepathname2url(path):
import urllib
urlpath = urllib.pathname2url(path)
if os.name == "nt" and urlpath.startswith("///"):
urlpath = urlpath[2:]
# XXX don't ask me about the mac...
return urlpath
class HandlerTests(unittest.TestCase):
def test_ftp(self):
class MockFTPWrapper:
def __init__(self, data): self.data = data
def retrfile(self, filename, filetype):
self.filename, self.filetype = filename, filetype
return StringIO.StringIO(self.data), len(self.data)
class NullFTPHandler(urllib2.FTPHandler):
def __init__(self, data): self.data = data
def connect_ftp(self, user, passwd, host, port, dirs,
timeout=socket._GLOBAL_DEFAULT_TIMEOUT):
self.user, self.passwd = user, passwd
self.host, self.port = host, port
self.dirs = dirs
self.ftpwrapper = MockFTPWrapper(self.data)
return self.ftpwrapper
import ftplib
data = "rheum rhaponicum"
h = NullFTPHandler(data)
o = h.parent = MockOpener()
for url, host, port, type_, dirs, filename, mimetype in [
("ftp://localhost/foo/bar/baz.html",
"localhost", ftplib.FTP_PORT, "I",
["foo", "bar"], "baz.html", "text/html"),
("ftp://localhost:80/foo/bar/",
"localhost", 80, "D",
["foo", "bar"], "", None),
("ftp://localhost/baz.gif;type=a",
"localhost", ftplib.FTP_PORT, "A",
[], "baz.gif", None), # XXX really this should guess image/gif
]:
req = Request(url)
req.timeout = None
r = h.ftp_open(req)
# ftp authentication not yet implemented by FTPHandler
self.assert_(h.user == h.passwd == "")
self.assertEqual(h.host, socket.gethostbyname(host))
self.assertEqual(h.port, port)
self.assertEqual(h.dirs, dirs)
self.assertEqual(h.ftpwrapper.filename, filename)
self.assertEqual(h.ftpwrapper.filetype, type_)
headers = r.info()
self.assertEqual(headers.get("Content-type"), mimetype)
self.assertEqual(int(headers["Content-length"]), len(data))
def test_file(self):
import rfc822, socket
h = urllib2.FileHandler()
o = h.parent = MockOpener()
TESTFN = test_support.TESTFN
urlpath = sanepathname2url(os.path.abspath(TESTFN))
towrite = "hello, world\n"
urls = [
"file://localhost%s" % urlpath,
"file://%s" % urlpath,
"file://%s%s" % (socket.gethostbyname('localhost'), urlpath),
]
try:
localaddr = socket.gethostbyname(socket.gethostname())
except socket.gaierror:
localaddr = ''
if localaddr:
urls.append("file://%s%s" % (localaddr, urlpath))
for url in urls:
f = open(TESTFN, "wb")
try:
try:
f.write(towrite)
finally:
f.close()
r = h.file_open(Request(url))
try:
data = r.read()
headers = r.info()
respurl = r.geturl()
finally:
r.close()
stats = os.stat(TESTFN)
modified = rfc822.formatdate(stats.st_mtime)
finally:
os.remove(TESTFN)
self.assertEqual(data, towrite)
self.assertEqual(headers["Content-type"], "text/plain")
self.assertEqual(headers["Content-length"], "13")
self.assertEqual(headers["Last-modified"], modified)
self.assertEqual(respurl, url)
for url in [
"file://localhost:80%s" % urlpath,
"file:///file_does_not_exist.txt",
"file://%s:80%s/%s" % (socket.gethostbyname('localhost'),
os.getcwd(), TESTFN),
"file://somerandomhost.ontheinternet.com%s/%s" %
(os.getcwd(), TESTFN),
]:
try:
f = open(TESTFN, "wb")
try:
f.write(towrite)
finally:
f.close()
self.assertRaises(urllib2.URLError,
h.file_open, Request(url))
finally:
os.remove(TESTFN)
h = urllib2.FileHandler()
o = h.parent = MockOpener()
# XXXX why does // mean ftp (and /// mean not ftp!), and where
# is file: scheme specified? I think this is really a bug, and
# what was intended was to distinguish between URLs like:
# file:/blah.txt (a file)
# file://localhost/blah.txt (a file)
# file:///blah.txt (a file)
# file://ftp.example.com/blah.txt (an ftp URL)
for url, ftp in [
("file://ftp.example.com//foo.txt", True),
("file://ftp.example.com///foo.txt", False),
# XXXX bug: fails with OSError, should be URLError
("file://ftp.example.com/foo.txt", False),
("file://somehost//foo/something.txt", True),
("file://localhost//foo/something.txt", False),
]:
req = Request(url)
try:
h.file_open(req)
# XXXX remove OSError when bug fixed
except (urllib2.URLError, OSError):
self.assert_(not ftp)
else:
self.assert_(o.req is req)
self.assertEqual(req.type, "ftp")
self.assertEqual(req.type is "ftp", ftp)
def test_http(self):
h = urllib2.AbstractHTTPHandler()
o = h.parent = MockOpener()
url = "http://example.com/"
for method, data in [("GET", None), ("POST", "blah")]:
req = Request(url, data, {"Foo": "bar"})
req.timeout = None
req.add_unredirected_header("Spam", "eggs")
http = MockHTTPClass()
r = h.do_open(http, req)
# result attributes
r.read; r.readline # wrapped MockFile methods
r.info; r.geturl # addinfourl methods
r.code, r.msg == 200, "OK" # added from MockHTTPClass.getreply()
hdrs = r.info()
hdrs.get; hdrs.has_key # r.info() gives dict from .getreply()
self.assertEqual(r.geturl(), url)
self.assertEqual(http.host, "example.com")
self.assertEqual(http.level, 0)
self.assertEqual(http.method, method)
self.assertEqual(http.selector, "/")
self.assertEqual(http.req_headers,
[("Connection", "close"),
("Foo", "bar"), ("Spam", "eggs")])
self.assertEqual(http.data, data)
# check socket.error converted to URLError
http.raise_on_endheaders = True
self.assertRaises(urllib2.URLError, h.do_open, http, req)
# check adding of standard headers
o.addheaders = [("Spam", "eggs")]
for data in "", None: # POST, GET
req = Request("http://example.com/", data)
r = MockResponse(200, "OK", {}, "")
newreq = h.do_request_(req)
if data is None: # GET
self.assert_("Content-length" not in req.unredirected_hdrs)
self.assert_("Content-type" not in req.unredirected_hdrs)
else: # POST
self.assertEqual(req.unredirected_hdrs["Content-length"], "0")
self.assertEqual(req.unredirected_hdrs["Content-type"],
"application/x-www-form-urlencoded")
# XXX the details of Host could be better tested
self.assertEqual(req.unredirected_hdrs["Host"], "example.com")
self.assertEqual(req.unredirected_hdrs["Spam"], "eggs")
# don't clobber existing headers
req.add_unredirected_header("Content-length", "foo")
req.add_unredirected_header("Content-type", "bar")
req.add_unredirected_header("Host", "baz")
req.add_unredirected_header("Spam", "foo")
newreq = h.do_request_(req)
self.assertEqual(req.unredirected_hdrs["Content-length"], "foo")
self.assertEqual(req.unredirected_hdrs["Content-type"], "bar")
self.assertEqual(req.unredirected_hdrs["Host"], "baz")
self.assertEqual(req.unredirected_hdrs["Spam"], "foo")
def test_http_doubleslash(self):
# Checks that the presence of an unnecessary double slash in a url doesn't break anything
# Previously, a double slash directly after the host could cause incorrect parsing of the url
h = urllib2.AbstractHTTPHandler()
o = h.parent = MockOpener()
data = ""
ds_urls = [
"http://example.com/foo/bar/baz.html",
"http://example.com//foo/bar/baz.html",
"http://example.com/foo//bar/baz.html",
"http://example.com/foo/bar//baz.html",
]
for ds_url in ds_urls:
ds_req = Request(ds_url, data)
# Check whether host is determined correctly if there is no proxy
np_ds_req = h.do_request_(ds_req)
self.assertEqual(np_ds_req.unredirected_hdrs["Host"],"example.com")
# Check whether host is determined correctly if there is a proxy
ds_req.set_proxy("someproxy:3128",None)
p_ds_req = h.do_request_(ds_req)
self.assertEqual(p_ds_req.unredirected_hdrs["Host"],"example.com")
def test_errors(self):
h = urllib2.HTTPErrorProcessor()
o = h.parent = MockOpener()
url = "http://example.com/"
req = Request(url)
# all 2xx are passed through
r = MockResponse(200, "OK", {}, "", url)
newr = h.http_response(req, r)
self.assert_(r is newr)
self.assert_(not hasattr(o, "proto")) # o.error not called
r = MockResponse(202, "Accepted", {}, "", url)
newr = h.http_response(req, r)
self.assert_(r is newr)
self.assert_(not hasattr(o, "proto")) # o.error not called
r = MockResponse(206, "Partial content", {}, "", url)
newr = h.http_response(req, r)
self.assert_(r is newr)
self.assert_(not hasattr(o, "proto")) # o.error not called
# anything else calls o.error (and MockOpener returns None, here)
r = MockResponse(502, "Bad gateway", {}, "", url)
self.assert_(h.http_response(req, r) is None)
self.assertEqual(o.proto, "http") # o.error called
self.assertEqual(o.args, (req, r, 502, "Bad gateway", {}))
def test_cookies(self):
cj = MockCookieJar()
h = urllib2.HTTPCookieProcessor(cj)
o = h.parent = MockOpener()
req = Request("http://example.com/")
r = MockResponse(200, "OK", {}, "")
newreq = h.http_request(req)
self.assert_(cj.ach_req is req is newreq)
self.assertEquals(req.get_origin_req_host(), "example.com")
self.assert_(not req.is_unverifiable())
newr = h.http_response(req, r)
self.assert_(cj.ec_req is req)
self.assert_(cj.ec_r is r is newr)
def test_redirect(self):
from_url = "http://example.com/a.html"
to_url = "http://example.com/b.html"
h = urllib2.HTTPRedirectHandler()
o = h.parent = MockOpener()
# ordinary redirect behaviour
for code in 301, 302, 303, 307:
for data in None, "blah\nblah\n":
method = getattr(h, "http_error_%s" % code)
req = Request(from_url, data)
req.add_header("Nonsense", "viking=withhold")
req.timeout = socket._GLOBAL_DEFAULT_TIMEOUT
if data is not None:
req.add_header("Content-Length", str(len(data)))
req.add_unredirected_header("Spam", "spam")
try:
method(req, MockFile(), code, "Blah",
MockHeaders({"location": to_url}))
except urllib2.HTTPError:
# 307 in response to POST requires user OK
self.assert_(code == 307 and data is not None)
self.assertEqual(o.req.get_full_url(), to_url)
try:
self.assertEqual(o.req.get_method(), "GET")
except AttributeError:
self.assert_(not o.req.has_data())
# now it's a GET, there should not be headers regarding content
# (possibly dragged from before being a POST)
headers = [x.lower() for x in o.req.headers]
self.assertTrue("content-length" not in headers)
self.assertTrue("content-type" not in headers)
self.assertEqual(o.req.headers["Nonsense"],
"viking=withhold")
self.assert_("Spam" not in o.req.headers)
self.assert_("Spam" not in o.req.unredirected_hdrs)
# loop detection
req = Request(from_url)
req.timeout = socket._GLOBAL_DEFAULT_TIMEOUT
def redirect(h, req, url=to_url):
h.http_error_302(req, MockFile(), 302, "Blah",
MockHeaders({"location": url}))
# Note that the *original* request shares the same record of
# redirections with the sub-requests caused by the redirections.
# detect infinite loop redirect of a URL to itself
req = Request(from_url, origin_req_host="example.com")
count = 0
req.timeout = socket._GLOBAL_DEFAULT_TIMEOUT
try:
while 1:
redirect(h, req, "http://example.com/")
count = count + 1
except urllib2.HTTPError:
# don't stop until max_repeats, because cookies may introduce state
self.assertEqual(count, urllib2.HTTPRedirectHandler.max_repeats)
# detect endless non-repeating chain of redirects
req = Request(from_url, origin_req_host="example.com")
count = 0
req.timeout = socket._GLOBAL_DEFAULT_TIMEOUT
try:
while 1:
redirect(h, req, "http://example.com/%d" % count)
count = count + 1
except urllib2.HTTPError:
self.assertEqual(count,
urllib2.HTTPRedirectHandler.max_redirections)
def test_invalid_redirect(self):
from_url = "http://example.com/a.html"
valid_schemes = ['http', 'https', 'ftp']
invalid_schemes = ['file', 'imap', 'ldap']
schemeless_url = "example.com/b.html"
h = urllib2.HTTPRedirectHandler()
o = h.parent = MockOpener()
req = Request(from_url)
req.timeout = socket._GLOBAL_DEFAULT_TIMEOUT
for scheme in invalid_schemes:
invalid_url = scheme + '://' + schemeless_url
self.assertRaises(urllib2.HTTPError, h.http_error_302,
req, MockFile(), 302, "Security Loophole",
MockHeaders({"location": invalid_url}))
for scheme in valid_schemes:
valid_url = scheme + '://' + schemeless_url
h.http_error_302(req, MockFile(), 302, "That's fine",
MockHeaders({"location": valid_url}))
self.assertEqual(o.req.get_full_url(), valid_url)
def test_cookie_redirect(self):
# cookies shouldn't leak into redirected requests
from cookielib import CookieJar
from test_cookielib import interact_netscape
cj = CookieJar()
interact_netscape(cj, "http://www.example.com/", "spam=eggs")
hh = MockHTTPHandler(302, "Location: http://www.cracker.com/\r\n\r\n")
hdeh = urllib2.HTTPDefaultErrorHandler()
hrh = urllib2.HTTPRedirectHandler()
cp = urllib2.HTTPCookieProcessor(cj)
o = build_test_opener(hh, hdeh, hrh, cp)
o.open("http://www.example.com/")
self.assert_(not hh.req.has_header("Cookie"))
def test_proxy(self):
o = OpenerDirector()
ph = urllib2.ProxyHandler(dict(http="proxy.example.com:3128"))
o.add_handler(ph)
meth_spec = [
[("http_open", "return response")]
]
handlers = add_ordered_mock_handlers(o, meth_spec)
req = Request("http://acme.example.com/")
self.assertEqual(req.get_host(), "acme.example.com")
r = o.open(req)
self.assertEqual(req.get_host(), "proxy.example.com:3128")
self.assertEqual([(handlers[0], "http_open")],
[tup[0:2] for tup in o.calls])
def test_proxy_no_proxy(self):
os.environ['no_proxy'] = 'python.org'
o = OpenerDirector()
ph = urllib2.ProxyHandler(dict(http="proxy.example.com"))
o.add_handler(ph)
req = Request("http://www.perl.org/")
self.assertEqual(req.get_host(), "www.perl.org")
r = o.open(req)
self.assertEqual(req.get_host(), "proxy.example.com")
req = Request("http://www.python.org")
self.assertEqual(req.get_host(), "www.python.org")
r = o.open(req)
self.assertEqual(req.get_host(), "www.python.org")
del os.environ['no_proxy']
def test_proxy_https(self):
o = OpenerDirector()
ph = urllib2.ProxyHandler(dict(https='proxy.example.com:3128'))
o.add_handler(ph)
meth_spec = [
[("https_open","return response")]
]
handlers = add_ordered_mock_handlers(o, meth_spec)
req = Request("https://www.example.com/")
self.assertEqual(req.get_host(), "www.example.com")
r = o.open(req)
self.assertEqual(req.get_host(), "proxy.example.com:3128")
self.assertEqual([(handlers[0], "https_open")],
[tup[0:2] for tup in o.calls])
def test_proxy_https_proxy_authorization(self):
o = OpenerDirector()
ph = urllib2.ProxyHandler(dict(https='proxy.example.com:3128'))
o.add_handler(ph)
https_handler = MockHTTPSHandler()
o.add_handler(https_handler)
req = Request("https://www.example.com/")
req.add_header("Proxy-Authorization","FooBar")
req.add_header("User-Agent","Grail")
self.assertEqual(req.get_host(), "www.example.com")
self.assertTrue(req._tunnel_host is None)
r = o.open(req)
# Verify Proxy-Authorization gets tunneled to request.
# httpsconn req_headers do not have the Proxy-Authorization header but
# the req will have.
self.assertFalse(("Proxy-Authorization","FooBar") in
https_handler.httpconn.req_headers)
self.assertTrue(("User-Agent","Grail") in
https_handler.httpconn.req_headers)
self.assertFalse(req._tunnel_host is None)
self.assertEqual(req.get_host(), "proxy.example.com:3128")
self.assertEqual(req.get_header("Proxy-authorization"),"FooBar")
def test_basic_auth(self, quote_char='"'):
opener = OpenerDirector()
password_manager = MockPasswordManager()
auth_handler = urllib2.HTTPBasicAuthHandler(password_manager)
realm = "ACME Widget Store"
http_handler = MockHTTPHandler(
401, 'WWW-Authenticate: Basic realm=%s%s%s\r\n\r\n' %
(quote_char, realm, quote_char) )
opener.add_handler(auth_handler)
opener.add_handler(http_handler)
self._test_basic_auth(opener, auth_handler, "Authorization",
realm, http_handler, password_manager,
"http://acme.example.com/protected",
"http://acme.example.com/protected",
)
def test_basic_auth_with_single_quoted_realm(self):
self.test_basic_auth(quote_char="'")
def test_proxy_basic_auth(self):
opener = OpenerDirector()
ph = urllib2.ProxyHandler(dict(http="proxy.example.com:3128"))
opener.add_handler(ph)
password_manager = MockPasswordManager()
auth_handler = urllib2.ProxyBasicAuthHandler(password_manager)
realm = "ACME Networks"
http_handler = MockHTTPHandler(
407, 'Proxy-Authenticate: Basic realm="%s"\r\n\r\n' % realm)
opener.add_handler(auth_handler)
opener.add_handler(http_handler)
self._test_basic_auth(opener, auth_handler, "Proxy-authorization",
realm, http_handler, password_manager,
"http://acme.example.com:3128/protected",
"proxy.example.com:3128",
)
def test_basic_and_digest_auth_handlers(self):
# HTTPDigestAuthHandler threw an exception if it couldn't handle a 40*
# response (http://python.org/sf/1479302), where it should instead
# return None to allow another handler (especially
# HTTPBasicAuthHandler) to handle the response.
# Also (http://python.org/sf/14797027, RFC 2617 section 1.2), we must
# try digest first (since it's the strongest auth scheme), so we record
# order of calls here to check digest comes first:
class RecordingOpenerDirector(OpenerDirector):
def __init__(self):
OpenerDirector.__init__(self)
self.recorded = []
def record(self, info):
self.recorded.append(info)
class TestDigestAuthHandler(urllib2.HTTPDigestAuthHandler):
def http_error_401(self, *args, **kwds):
self.parent.record("digest")
urllib2.HTTPDigestAuthHandler.http_error_401(self,
*args, **kwds)
class TestBasicAuthHandler(urllib2.HTTPBasicAuthHandler):
def http_error_401(self, *args, **kwds):
self.parent.record("basic")
urllib2.HTTPBasicAuthHandler.http_error_401(self,
*args, **kwds)
opener = RecordingOpenerDirector()
password_manager = MockPasswordManager()
digest_handler = TestDigestAuthHandler(password_manager)
basic_handler = TestBasicAuthHandler(password_manager)
realm = "ACME Networks"
http_handler = MockHTTPHandler(
401, 'WWW-Authenticate: Basic realm="%s"\r\n\r\n' % realm)
opener.add_handler(basic_handler)
opener.add_handler(digest_handler)
opener.add_handler(http_handler)
# check basic auth isn't blocked by digest handler failing
self._test_basic_auth(opener, basic_handler, "Authorization",
realm, http_handler, password_manager,
"http://acme.example.com/protected",
"http://acme.example.com/protected",
)
# check digest was tried before basic (twice, because
# _test_basic_auth called .open() twice)
self.assertEqual(opener.recorded, ["digest", "basic"]*2)
def _test_basic_auth(self, opener, auth_handler, auth_header,
realm, http_handler, password_manager,
request_url, protected_url):
import base64
user, password = "wile", "coyote"
# .add_password() fed through to password manager
auth_handler.add_password(realm, request_url, user, password)
self.assertEqual(realm, password_manager.realm)
self.assertEqual(request_url, password_manager.url)
self.assertEqual(user, password_manager.user)
self.assertEqual(password, password_manager.password)
r = opener.open(request_url)
# should have asked the password manager for the username/password
self.assertEqual(password_manager.target_realm, realm)
self.assertEqual(password_manager.target_url, protected_url)
# expect one request without authorization, then one with
self.assertEqual(len(http_handler.requests), 2)
self.assertFalse(http_handler.requests[0].has_header(auth_header))
userpass = '%s:%s' % (user, password)
auth_hdr_value = 'Basic '+base64.encodestring(userpass).strip()
self.assertEqual(http_handler.requests[1].get_header(auth_header),
auth_hdr_value)
self.assertEqual(http_handler.requests[1].unredirected_hdrs[auth_header],
auth_hdr_value)
# if the password manager can't find a password, the handler won't
# handle the HTTP auth error
password_manager.user = password_manager.password = None
http_handler.reset()
r = opener.open(request_url)
self.assertEqual(len(http_handler.requests), 1)
self.assertFalse(http_handler.requests[0].has_header(auth_header))
class MiscTests(unittest.TestCase):
def test_build_opener(self):
class MyHTTPHandler(urllib2.HTTPHandler): pass
class FooHandler(urllib2.BaseHandler):
def foo_open(self): pass
class BarHandler(urllib2.BaseHandler):
def bar_open(self): pass
build_opener = urllib2.build_opener
o = build_opener(FooHandler, BarHandler)
self.opener_has_handler(o, FooHandler)
self.opener_has_handler(o, BarHandler)
# can take a mix of classes and instances
o = build_opener(FooHandler, BarHandler())
self.opener_has_handler(o, FooHandler)
self.opener_has_handler(o, BarHandler)
# subclasses of default handlers override default handlers
o = build_opener(MyHTTPHandler)
self.opener_has_handler(o, MyHTTPHandler)
# a particular case of overriding: default handlers can be passed
# in explicitly
o = build_opener()
self.opener_has_handler(o, urllib2.HTTPHandler)
o = build_opener(urllib2.HTTPHandler)
self.opener_has_handler(o, urllib2.HTTPHandler)
o = build_opener(urllib2.HTTPHandler())
self.opener_has_handler(o, urllib2.HTTPHandler)
# Issue2670: multiple handlers sharing the same base class
class MyOtherHTTPHandler(urllib2.HTTPHandler): pass
o = build_opener(MyHTTPHandler, MyOtherHTTPHandler)
self.opener_has_handler(o, MyHTTPHandler)
self.opener_has_handler(o, MyOtherHTTPHandler)
def opener_has_handler(self, opener, handler_class):
for h in opener.handlers:
if h.__class__ == handler_class:
break
else:
self.assert_(False)
class RequestTests(unittest.TestCase):
def setUp(self):
self.get = urllib2.Request("http://www.python.org/~jeremy/")
self.post = urllib2.Request("http://www.python.org/~jeremy/",
"data",
headers={"X-Test": "test"})
def test_method(self):
self.assertEqual("POST", self.post.get_method())
self.assertEqual("GET", self.get.get_method())
def test_add_data(self):
self.assert_(not self.get.has_data())
self.assertEqual("GET", self.get.get_method())
self.get.add_data("spam")
self.assert_(self.get.has_data())
self.assertEqual("POST", self.get.get_method())
def test_get_full_url(self):
self.assertEqual("http://www.python.org/~jeremy/",
self.get.get_full_url())
def test_selector(self):
self.assertEqual("/~jeremy/", self.get.get_selector())
req = urllib2.Request("http://www.python.org/")
self.assertEqual("/", req.get_selector())
def test_get_type(self):
self.assertEqual("http", self.get.get_type())
def test_get_host(self):
self.assertEqual("www.python.org", self.get.get_host())
def test_get_host_unquote(self):
req = urllib2.Request("http://www.%70ython.org/")
self.assertEqual("www.python.org", req.get_host())
def test_proxy(self):
self.assert_(not self.get.has_proxy())
self.get.set_proxy("www.perl.org", "http")
self.assert_(self.get.has_proxy())
self.assertEqual("www.python.org", self.get.get_origin_req_host())
self.assertEqual("www.perl.org", self.get.get_host())
def test_main(verbose=None):
import test_urllib2
test_support.run_doctest(test_urllib2, verbose)
test_support.run_doctest(urllib2, verbose)
tests = (TrivialTests,
OpenerDirectorTests,
HandlerTests,
MiscTests,
RequestTests)
test_support.run_unittest(*tests)
if __name__ == "__main__":
test_main(verbose=True)
#!/usr/bin/env python
import threading
import urlparse
import urllib2
import BaseHTTPServer
import unittest
import hashlib
from test import test_support
mimetools = test_support.import_module('mimetools', deprecated=True)
# Loopback http server infrastructure
class LoopbackHttpServer(BaseHTTPServer.HTTPServer):
"""HTTP server w/ a few modifications that make it useful for
loopback testing purposes.
"""
def __init__(self, server_address, RequestHandlerClass):
BaseHTTPServer.HTTPServer.__init__(self,
server_address,
RequestHandlerClass)
# Set the timeout of our listening socket really low so
# that we can stop the server easily.
self.socket.settimeout(0.1)
def get_request(self):
"""BaseHTTPServer method, overridden."""
request, client_address = self.socket.accept()
# It's a loopback connection, so setting the timeout
# really low shouldn't affect anything, but should make
# deadlocks less likely to occur.
request.settimeout(1.0)
return (request, client_address)
class LoopbackHttpServerThread(threading.Thread):
"""Stoppable thread that runs a loopback http server."""
def __init__(self, request_handler):
threading.Thread.__init__(self)
self._stop = False
self.ready = threading.Event()
request_handler.protocol_version = "HTTP/1.0"
self.httpd = LoopbackHttpServer(('127.0.0.1', 0),
request_handler)
#print "Serving HTTP on %s port %s" % (self.httpd.server_name,
# self.httpd.server_port)
self.port = self.httpd.server_port
def stop(self):
"""Stops the webserver if it's currently running."""
# Set the stop flag.
self._stop = True
self.join()
def run(self):
self.ready.set()
while not self._stop:
self.httpd.handle_request()
# Authentication infrastructure
class DigestAuthHandler:
"""Handler for performing digest authentication."""
def __init__(self):
self._request_num = 0
self._nonces = []
self._users = {}
self._realm_name = "Test Realm"
self._qop = "auth"
def set_qop(self, qop):
self._qop = qop
def set_users(self, users):
assert isinstance(users, dict)
self._users = users
def set_realm(self, realm):
self._realm_name = realm
def _generate_nonce(self):
self._request_num += 1
nonce = hashlib.md5(str(self._request_num)).hexdigest()
self._nonces.append(nonce)
return nonce
def _create_auth_dict(self, auth_str):
first_space_index = auth_str.find(" ")
auth_str = auth_str[first_space_index+1:]
parts = auth_str.split(",")
auth_dict = {}
for part in parts:
name, value = part.split("=")
name = name.strip()
if value[0] == '"' and value[-1] == '"':
value = value[1:-1]
else:
value = value.strip()
auth_dict[name] = value
return auth_dict
def _validate_auth(self, auth_dict, password, method, uri):
final_dict = {}
final_dict.update(auth_dict)
final_dict["password"] = password
final_dict["method"] = method
final_dict["uri"] = uri
HA1_str = "%(username)s:%(realm)s:%(password)s" % final_dict
HA1 = hashlib.md5(HA1_str).hexdigest()
HA2_str = "%(method)s:%(uri)s" % final_dict
HA2 = hashlib.md5(HA2_str).hexdigest()
final_dict["HA1"] = HA1
final_dict["HA2"] = HA2
response_str = "%(HA1)s:%(nonce)s:%(nc)s:" \
"%(cnonce)s:%(qop)s:%(HA2)s" % final_dict
response = hashlib.md5(response_str).hexdigest()
return response == auth_dict["response"]
def _return_auth_challenge(self, request_handler):
request_handler.send_response(407, "Proxy Authentication Required")
request_handler.send_header("Content-Type", "text/html")
request_handler.send_header(
'Proxy-Authenticate', 'Digest realm="%s", '
'qop="%s",'
'nonce="%s", ' % \
(self._realm_name, self._qop, self._generate_nonce()))
# XXX: Not sure if we're supposed to add this next header or
# not.
#request_handler.send_header('Connection', 'close')
request_handler.end_headers()
request_handler.wfile.write("Proxy Authentication Required.")
return False
def handle_request(self, request_handler):
"""Performs digest authentication on the given HTTP request
handler. Returns True if authentication was successful, False
otherwise.
If no users have been set, then digest auth is effectively
disabled and this method will always return True.
"""
if len(self._users) == 0:
return True
if 'Proxy-Authorization' not in request_handler.headers:
return self._return_auth_challenge(request_handler)
else:
auth_dict = self._create_auth_dict(
request_handler.headers['Proxy-Authorization']
)
if auth_dict["username"] in self._users:
password = self._users[ auth_dict["username"] ]
else:
return self._return_auth_challenge(request_handler)
if not auth_dict.get("nonce") in self._nonces:
return self._return_auth_challenge(request_handler)
else:
self._nonces.remove(auth_dict["nonce"])
auth_validated = False
# MSIE uses short_path in its validation, but Python's
# urllib2 uses the full path, so we're going to see if
# either of them works here.
for path in [request_handler.path, request_handler.short_path]:
if self._validate_auth(auth_dict,
password,
request_handler.command,
path):
auth_validated = True
if not auth_validated:
return self._return_auth_challenge(request_handler)
return True
# Proxy test infrastructure
class FakeProxyHandler(BaseHTTPServer.BaseHTTPRequestHandler):
"""This is a 'fake proxy' that makes it look like the entire
internet has gone down due to a sudden zombie invasion. It main
utility is in providing us with authentication support for
testing.
"""
digest_auth_handler = DigestAuthHandler()
def log_message(self, format, *args):
# Uncomment the next line for debugging.
#sys.stderr.write(format % args)
pass
def do_GET(self):
(scm, netloc, path, params, query, fragment) = urlparse.urlparse(
self.path, 'http')
self.short_path = path
if self.digest_auth_handler.handle_request(self):
self.send_response(200, "OK")
self.send_header("Content-Type", "text/html")
self.end_headers()
self.wfile.write("You've reached %s!<BR>" % self.path)
self.wfile.write("Our apologies, but our server is down due to "
"a sudden zombie invasion.")
# Test cases
class ProxyAuthTests(unittest.TestCase):
URL = "http://localhost"
USER = "tester"
PASSWD = "test123"
REALM = "TestRealm"
def setUp(self):
FakeProxyHandler.digest_auth_handler.set_users({
self.USER : self.PASSWD
})
FakeProxyHandler.digest_auth_handler.set_realm(self.REALM)
self.server = LoopbackHttpServerThread(FakeProxyHandler)
self.server.start()
self.server.ready.wait()
proxy_url = "http://127.0.0.1:%d" % self.server.port
handler = urllib2.ProxyHandler({"http" : proxy_url})
self._digest_auth_handler = urllib2.ProxyDigestAuthHandler()
self.opener = urllib2.build_opener(handler, self._digest_auth_handler)
def tearDown(self):
self.server.stop()
def test_proxy_with_bad_password_raises_httperror(self):
self._digest_auth_handler.add_password(self.REALM, self.URL,
self.USER, self.PASSWD+"bad")
FakeProxyHandler.digest_auth_handler.set_qop("auth")
self.assertRaises(urllib2.HTTPError,
self.opener.open,
self.URL)
def test_proxy_with_no_password_raises_httperror(self):
FakeProxyHandler.digest_auth_handler.set_qop("auth")
self.assertRaises(urllib2.HTTPError,
self.opener.open,
self.URL)
def test_proxy_qop_auth_works(self):
self._digest_auth_handler.add_password(self.REALM, self.URL,
self.USER, self.PASSWD)
FakeProxyHandler.digest_auth_handler.set_qop("auth")
result = self.opener.open(self.URL)
while result.read():
pass
result.close()
def test_proxy_qop_auth_int_works_or_throws_urlerror(self):
self._digest_auth_handler.add_password(self.REALM, self.URL,
self.USER, self.PASSWD)
FakeProxyHandler.digest_auth_handler.set_qop("auth-int")
try:
result = self.opener.open(self.URL)
except urllib2.URLError:
# It's okay if we don't support auth-int, but we certainly
# shouldn't receive any kind of exception here other than
# a URLError.
result = None
if result:
while result.read():
pass
result.close()
def GetRequestHandler(responses):
class FakeHTTPRequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
server_version = "TestHTTP/"
requests = []
headers_received = []
port = 80
def do_GET(self):
body = self.send_head()
if body:
self.wfile.write(body)
def do_POST(self):
content_length = self.headers['Content-Length']
post_data = self.rfile.read(int(content_length))
self.do_GET()
self.requests.append(post_data)
def send_head(self):
FakeHTTPRequestHandler.headers_received = self.headers
self.requests.append(self.path)
response_code, headers, body = responses.pop(0)
self.send_response(response_code)
for (header, value) in headers:
self.send_header(header, value % self.port)
if body:
self.send_header('Content-type', 'text/plain')
self.end_headers()
return body
self.end_headers()
def log_message(self, *args):
pass
return FakeHTTPRequestHandler
class TestUrlopen(unittest.TestCase):
"""Tests urllib2.urlopen using the network.
These tests are not exhaustive. Assuming that testing using files does a
good job overall of some of the basic interface features. There are no
tests exercising the optional 'data' and 'proxies' arguments. No tests
for transparent redirection have been written.
"""
def start_server(self, responses):
handler = GetRequestHandler(responses)
self.server = LoopbackHttpServerThread(handler)
self.server.start()
self.server.ready.wait()
port = self.server.port
handler.port = port
return handler
def test_redirection(self):
expected_response = 'We got here...'
responses = [
(302, [('Location', 'http://localhost:%s/somewhere_else')], ''),
(200, [], expected_response)
]
handler = self.start_server(responses)
try:
f = urllib2.urlopen('http://localhost:%s/' % handler.port)
data = f.read()
f.close()
self.assertEquals(data, expected_response)
self.assertEquals(handler.requests, ['/', '/somewhere_else'])
finally:
self.server.stop()
def test_404(self):
expected_response = 'Bad bad bad...'
handler = self.start_server([(404, [], expected_response)])
try:
try:
urllib2.urlopen('http://localhost:%s/weeble' % handler.port)
except urllib2.URLError, f:
pass
else:
self.fail('404 should raise URLError')
data = f.read()
f.close()
self.assertEquals(data, expected_response)
self.assertEquals(handler.requests, ['/weeble'])
finally:
self.server.stop()
def test_200(self):
expected_response = 'pycon 2008...'
handler = self.start_server([(200, [], expected_response)])
try:
f = urllib2.urlopen('http://localhost:%s/bizarre' % handler.port)
data = f.read()
f.close()
self.assertEquals(data, expected_response)
self.assertEquals(handler.requests, ['/bizarre'])
finally:
self.server.stop()
def test_200_with_parameters(self):
expected_response = 'pycon 2008...'
handler = self.start_server([(200, [], expected_response)])
try:
f = urllib2.urlopen('http://localhost:%s/bizarre' % handler.port, 'get=with_feeling')
data = f.read()
f.close()
self.assertEquals(data, expected_response)
self.assertEquals(handler.requests, ['/bizarre', 'get=with_feeling'])
finally:
self.server.stop()
def test_sending_headers(self):
handler = self.start_server([(200, [], "we don't care")])
try:
req = urllib2.Request("http://localhost:%s/" % handler.port,
headers={'Range': 'bytes=20-39'})
urllib2.urlopen(req)
self.assertEqual(handler.headers_received['Range'], 'bytes=20-39')
finally:
self.server.stop()
def test_basic(self):
handler = self.start_server([(200, [], "we don't care")])
try:
open_url = urllib2.urlopen("http://localhost:%s" % handler.port)
for attr in ("read", "close", "info", "geturl"):
self.assert_(hasattr(open_url, attr), "object returned from "
"urlopen lacks the %s attribute" % attr)
try:
self.assert_(open_url.read(), "calling 'read' failed")
finally:
open_url.close()
finally:
self.server.stop()
def test_info(self):
handler = self.start_server([(200, [], "we don't care")])
try:
open_url = urllib2.urlopen("http://localhost:%s" % handler.port)
info_obj = open_url.info()
self.assert_(isinstance(info_obj, mimetools.Message),
"object returned by 'info' is not an instance of "
"mimetools.Message")
self.assertEqual(info_obj.getsubtype(), "plain")
finally:
self.server.stop()
def test_geturl(self):
# Make sure same URL as opened is returned by geturl.
handler = self.start_server([(200, [], "we don't care")])
try:
open_url = urllib2.urlopen("http://localhost:%s" % handler.port)
url = open_url.geturl()
self.assertEqual(url, "http://localhost:%s" % handler.port)
finally:
self.server.stop()
def test_bad_address(self):
# Make sure proper exception is raised when connecting to a bogus
# address.
self.assertRaises(IOError,
# Given that both VeriSign and various ISPs have in
# the past or are presently hijacking various invalid
# domain name requests in an attempt to boost traffic
# to their own sites, finding a domain name to use
# for this test is difficult. RFC2606 leads one to
# believe that '.invalid' should work, but experience
# seemed to indicate otherwise. Single character
# TLDs are likely to remain invalid, so this seems to
# be the best choice. The trailing '.' prevents a
# related problem: The normal DNS resolver appends
# the domain names from the search path if there is
# no '.' the end and, and if one of those domains
# implements a '*' rule a result is returned.
# However, none of this will prevent the test from
# failing if the ISP hijacks all invalid domain
# requests. The real solution would be to be able to
# parameterize the framework with a mock resolver.
urllib2.urlopen, "http://sadflkjsasf.i.nvali.d./")
def test_main():
# We will NOT depend on the network resource flag
# (Lib/test/regrtest.py -u network) since all tests here are only
# localhost. However, if this is a bad rationale, then uncomment
# the next line.
#test_support.requires("network")
test_support.run_unittest(ProxyAuthTests)
test_support.run_unittest(TestUrlopen)
if __name__ == "__main__":
test_main()
#!/usr/bin/env python
import unittest
from test import test_support
from test_urllib2 import sanepathname2url
import socket
import urllib2
import sys
import os
import sys
TIMEOUT = 60 # seconds
def _retry_thrice(func, exc, *args, **kwargs):
for i in range(3):
try:
return func(*args, **kwargs)
except exc, last_exc:
continue
except:
raise
raise last_exc
def _wrap_with_retry_thrice(func, exc):
def wrapped(*args, **kwargs):
return _retry_thrice(func, exc, *args, **kwargs)
return wrapped
# Connecting to remote hosts is flaky. Make it more robust by retrying
# the connection several times.
_urlopen_with_retry = _wrap_with_retry_thrice(urllib2.urlopen, urllib2.URLError)
class AuthTests(unittest.TestCase):
"""Tests urllib2 authentication features."""
## Disabled at the moment since there is no page under python.org which
## could be used to HTTP authentication.
#
# def test_basic_auth(self):
# import httplib
#
# test_url = "http://www.python.org/test/test_urllib2/basic_auth"
# test_hostport = "www.python.org"
# test_realm = 'Test Realm'
# test_user = 'test.test_urllib2net'
# test_password = 'blah'
#
# # failure
# try:
# _urlopen_with_retry(test_url)
# except urllib2.HTTPError, exc:
# self.assertEqual(exc.code, 401)
# else:
# self.fail("urlopen() should have failed with 401")
#
# # success
# auth_handler = urllib2.HTTPBasicAuthHandler()
# auth_handler.add_password(test_realm, test_hostport,
# test_user, test_password)
# opener = urllib2.build_opener(auth_handler)
# f = opener.open('http://localhost/')
# response = _urlopen_with_retry("http://www.python.org/")
#
# # The 'userinfo' URL component is deprecated by RFC 3986 for security
# # reasons, let's not implement it! (it's already implemented for proxy
# # specification strings (that is, URLs or authorities specifying a
# # proxy), so we must keep that)
# self.assertRaises(httplib.InvalidURL,
# urllib2.urlopen, "http://evil:thing@example.com")
class CloseSocketTest(unittest.TestCase):
def test_close(self):
import socket, httplib, gc
# calling .close() on urllib2's response objects should close the
# underlying socket
# delve deep into response to fetch socket._socketobject
response = _urlopen_with_retry("http://www.python.org/")
abused_fileobject = response.fp
#self.assert_(abused_fileobject.__class__ is socket._fileobject) JAM: gevent: disable
httpresponse = abused_fileobject._sock
self.assert_(httpresponse.__class__ is httplib.HTTPResponse)
fileobject = httpresponse.fp
#self.assert_(fileobject.__class__ is socket._fileobject) JAM: gevent: disable
self.assert_(not fileobject.closed)
response.close()
self.assert_(fileobject.closed)
class OtherNetworkTests(unittest.TestCase):
def setUp(self):
if 0: # for debugging
import logging
logger = logging.getLogger("test_urllib2net")
logger.addHandler(logging.StreamHandler())
# XXX The rest of these tests aren't very good -- they don't check much.
# They do sometimes catch some major disasters, though.
def test_ftp(self):
urls = [
'ftp://ftp.kernel.org/pub/linux/kernel/README',
'ftp://ftp.kernel.org/pub/linux/kernel/non-existent-file',
#'ftp://ftp.kernel.org/pub/leenox/kernel/test',
'ftp://gatekeeper.research.compaq.com/pub/DEC/SRC'
'/research-reports/00README-Legal-Rules-Regs',
]
self._test_urls(urls, self._extra_handlers())
def test_file(self):
TESTFN = test_support.TESTFN
f = open(TESTFN, 'w')
try:
f.write('hi there\n')
f.close()
urls = [
'file:'+sanepathname2url(os.path.abspath(TESTFN)),
('file:///nonsensename/etc/passwd', None, urllib2.URLError),
]
self._test_urls(urls, self._extra_handlers(), retry=True)
finally:
os.remove(TESTFN)
# XXX Following test depends on machine configurations that are internal
# to CNRI. Need to set up a public server with the right authentication
# configuration for test purposes.
## def test_cnri(self):
## if socket.gethostname() == 'bitdiddle':
## localhost = 'bitdiddle.cnri.reston.va.us'
## elif socket.gethostname() == 'bitdiddle.concentric.net':
## localhost = 'localhost'
## else:
## localhost = None
## if localhost is not None:
## urls = [
## 'file://%s/etc/passwd' % localhost,
## 'http://%s/simple/' % localhost,
## 'http://%s/digest/' % localhost,
## 'http://%s/not/found.h' % localhost,
## ]
## bauth = HTTPBasicAuthHandler()
## bauth.add_password('basic_test_realm', localhost, 'jhylton',
## 'password')
## dauth = HTTPDigestAuthHandler()
## dauth.add_password('digest_test_realm', localhost, 'jhylton',
## 'password')
## self._test_urls(urls, self._extra_handlers()+[bauth, dauth])
def _test_urls(self, urls, handlers, retry=True):
import socket
import time
import logging
debug = logging.getLogger("test_urllib2").debug
urlopen = urllib2.build_opener(*handlers).open
if retry:
urlopen = _wrap_with_retry_thrice(urlopen, urllib2.URLError)
for url in urls:
if isinstance(url, tuple):
url, req, expected_err = url
else:
req = expected_err = None
debug(url)
try:
f = urlopen(url, req, TIMEOUT)
except EnvironmentError, err:
debug(err)
if expected_err:
msg = ("Didn't get expected error(s) %s for %s %s, got %s: %s" %
(expected_err, url, req, type(err), err))
self.assert_(isinstance(err, expected_err), msg)
except urllib2.URLError as err:
if isinstance(err[0], socket.timeout):
print >>sys.stderr, "<timeout: %s>" % url
continue
else:
raise
else:
try:
with test_support.transient_internet():
buf = f.read()
debug("read %d bytes" % len(buf))
except socket.timeout:
print >>sys.stderr, "<timeout: %s>" % url
f.close()
debug("******** next url coming up...")
time.sleep(0.1)
def _extra_handlers(self):
handlers = []
cfh = urllib2.CacheFTPHandler()
cfh.setTimeout(1)
handlers.append(cfh)
return handlers
class TimeoutTest(unittest.TestCase):
def test_http_basic(self):
self.assertTrue(socket.getdefaulttimeout() is None)
u = _urlopen_with_retry("http://www.python.org")
self.assertTrue(u.fp._sock.fp._sock.gettimeout() is None)
def test_http_default_timeout(self):
self.assertTrue(socket.getdefaulttimeout() is None)
socket.setdefaulttimeout(60)
try:
u = _urlopen_with_retry("http://www.python.org")
finally:
socket.setdefaulttimeout(None)
self.assertEqual(u.fp._sock.fp._sock.gettimeout(), 60)
def test_http_no_timeout(self):
self.assertTrue(socket.getdefaulttimeout() is None)
socket.setdefaulttimeout(60)
try:
u = _urlopen_with_retry("http://www.python.org", timeout=None)
finally:
socket.setdefaulttimeout(None)
self.assertTrue(u.fp._sock.fp._sock.gettimeout() is None)
def test_http_timeout(self):
u = _urlopen_with_retry("http://www.python.org", timeout=120)
self.assertEqual(u.fp._sock.fp._sock.gettimeout(), 120)
FTP_HOST = "ftp://ftp.mirror.nl/pub/mirror/gnu/"
def test_ftp_basic(self):
self.assertTrue(socket.getdefaulttimeout() is None)
u = _urlopen_with_retry(self.FTP_HOST)
self.assertTrue(u.fp.fp._sock.gettimeout() is None)
def test_ftp_default_timeout(self):
self.assertTrue(socket.getdefaulttimeout() is None)
socket.setdefaulttimeout(60)
try:
u = _urlopen_with_retry(self.FTP_HOST)
finally:
socket.setdefaulttimeout(None)
self.assertEqual(u.fp.fp._sock.gettimeout(), 60)
def test_ftp_no_timeout(self):
self.assertTrue(socket.getdefaulttimeout() is None)
socket.setdefaulttimeout(60)
try:
u = _urlopen_with_retry(self.FTP_HOST, timeout=None)
finally:
socket.setdefaulttimeout(None)
self.assertTrue(u.fp.fp._sock.gettimeout() is None)
def test_ftp_timeout(self):
u = _urlopen_with_retry(self.FTP_HOST, timeout=60)
self.assertEqual(u.fp.fp._sock.gettimeout(), 60)
def test_main():
test_support.requires("network")
test_support.run_unittest(AuthTests,
OtherNetworkTests,
CloseSocketTest,
TimeoutTest,
)
if __name__ == "__main__":
test_main()
from __future__ import nested_scopes # Backward compat for 2.1
from unittest import TestCase
from wsgiref.util import setup_testing_defaults
from wsgiref.headers import Headers
from wsgiref.handlers import BaseHandler, BaseCGIHandler
from wsgiref import util
from wsgiref.validate import validator
from wsgiref.simple_server import WSGIServer, WSGIRequestHandler, demo_app
from wsgiref.simple_server import make_server
from StringIO import StringIO
from SocketServer import BaseServer
import os
import re
import sys
from test import test_support
class MockServer(WSGIServer):
"""Non-socket HTTP server"""
def __init__(self, server_address, RequestHandlerClass):
BaseServer.__init__(self, server_address, RequestHandlerClass)
self.server_bind()
def server_bind(self):
host, port = self.server_address
self.server_name = host
self.server_port = port
self.setup_environ()
class MockHandler(WSGIRequestHandler):
"""Non-socket HTTP handler"""
def setup(self):
self.connection = self.request
self.rfile, self.wfile = self.connection
def finish(self):
pass
def hello_app(environ,start_response):
start_response("200 OK", [
('Content-Type','text/plain'),
('Date','Mon, 05 Jun 2006 18:49:54 GMT')
])
return ["Hello, world!"]
def run_amock(app=hello_app, data="GET / HTTP/1.0\n\n"):
server = make_server("", 80, app, MockServer, MockHandler)
inp, out, err, olderr = StringIO(data), StringIO(), StringIO(), sys.stderr
sys.stderr = err
try:
server.finish_request((inp,out), ("127.0.0.1",8888))
finally:
sys.stderr = olderr
return out.getvalue(), err.getvalue()
def compare_generic_iter(make_it,match):
"""Utility to compare a generic 2.1/2.2+ iterator with an iterable
If running under Python 2.2+, this tests the iterator using iter()/next(),
as well as __getitem__. 'make_it' must be a function returning a fresh
iterator to be tested (since this may test the iterator twice)."""
it = make_it()
n = 0
for item in match:
if not it[n]==item: raise AssertionError
n+=1
try:
it[n]
except IndexError:
pass
else:
raise AssertionError("Too many items from __getitem__",it)
try:
iter, StopIteration
except NameError:
pass
else:
# Only test iter mode under 2.2+
it = make_it()
if not iter(it) is it: raise AssertionError
for item in match:
if not it.next()==item: raise AssertionError
try:
it.next()
except StopIteration:
pass
else:
raise AssertionError("Too many items from .next()",it)
class IntegrationTests(TestCase):
def check_hello(self, out, has_length=True):
self.assertEqual(out,
"HTTP/1.0 200 OK\r\n"
"Server: WSGIServer/0.1 Python/"+sys.version.split()[0]+"\r\n"
"Content-Type: text/plain\r\n"
"Date: Mon, 05 Jun 2006 18:49:54 GMT\r\n" +
(has_length and "Content-Length: 13\r\n" or "") +
"\r\n"
"Hello, world!"
)
def test_plain_hello(self):
out, err = run_amock()
self.check_hello(out)
def test_validated_hello(self):
out, err = run_amock(validator(hello_app))
# the middleware doesn't support len(), so content-length isn't there
self.check_hello(out, has_length=False)
def test_simple_validation_error(self):
def bad_app(environ,start_response):
start_response("200 OK", ('Content-Type','text/plain'))
return ["Hello, world!"]
out, err = run_amock(validator(bad_app))
self.failUnless(out.endswith(
"A server error occurred. Please contact the administrator."
))
self.assertEqual(
err.splitlines()[-2],
"AssertionError: Headers (('Content-Type', 'text/plain')) must"
" be of type list: <type 'tuple'>"
)
class UtilityTests(TestCase):
def checkShift(self,sn_in,pi_in,part,sn_out,pi_out):
env = {'SCRIPT_NAME':sn_in,'PATH_INFO':pi_in}
util.setup_testing_defaults(env)
self.assertEqual(util.shift_path_info(env),part)
self.assertEqual(env['PATH_INFO'],pi_out)
self.assertEqual(env['SCRIPT_NAME'],sn_out)
return env
def checkDefault(self, key, value, alt=None):
# Check defaulting when empty
env = {}
util.setup_testing_defaults(env)
if isinstance(value,StringIO):
self.failUnless(isinstance(env[key],StringIO))
else:
self.assertEqual(env[key],value)
# Check existing value
env = {key:alt}
util.setup_testing_defaults(env)
self.failUnless(env[key] is alt)
def checkCrossDefault(self,key,value,**kw):
util.setup_testing_defaults(kw)
self.assertEqual(kw[key],value)
def checkAppURI(self,uri,**kw):
util.setup_testing_defaults(kw)
self.assertEqual(util.application_uri(kw),uri)
def checkReqURI(self,uri,query=1,**kw):
util.setup_testing_defaults(kw)
self.assertEqual(util.request_uri(kw,query),uri)
def checkFW(self,text,size,match):
def make_it(text=text,size=size):
return util.FileWrapper(StringIO(text),size)
compare_generic_iter(make_it,match)
it = make_it()
self.failIf(it.filelike.closed)
for item in it:
pass
self.failIf(it.filelike.closed)
it.close()
self.failUnless(it.filelike.closed)
def testSimpleShifts(self):
self.checkShift('','/', '', '/', '')
self.checkShift('','/x', 'x', '/x', '')
self.checkShift('/','', None, '/', '')
self.checkShift('/a','/x/y', 'x', '/a/x', '/y')
self.checkShift('/a','/x/', 'x', '/a/x', '/')
def testNormalizedShifts(self):
self.checkShift('/a/b', '/../y', '..', '/a', '/y')
self.checkShift('', '/../y', '..', '', '/y')
self.checkShift('/a/b', '//y', 'y', '/a/b/y', '')
self.checkShift('/a/b', '//y/', 'y', '/a/b/y', '/')
self.checkShift('/a/b', '/./y', 'y', '/a/b/y', '')
self.checkShift('/a/b', '/./y/', 'y', '/a/b/y', '/')
self.checkShift('/a/b', '///./..//y/.//', '..', '/a', '/y/')
self.checkShift('/a/b', '///', '', '/a/b/', '')
self.checkShift('/a/b', '/.//', '', '/a/b/', '')
self.checkShift('/a/b', '/x//', 'x', '/a/b/x', '/')
self.checkShift('/a/b', '/.', None, '/a/b', '')
def testDefaults(self):
for key, value in [
('SERVER_NAME','127.0.0.1'),
('SERVER_PORT', '80'),
('SERVER_PROTOCOL','HTTP/1.0'),
('HTTP_HOST','127.0.0.1'),
('REQUEST_METHOD','GET'),
('SCRIPT_NAME',''),
('PATH_INFO','/'),
('wsgi.version', (1,0)),
('wsgi.run_once', 0),
('wsgi.multithread', 0),
('wsgi.multiprocess', 0),
('wsgi.input', StringIO("")),
('wsgi.errors', StringIO()),
('wsgi.url_scheme','http'),
]:
self.checkDefault(key,value)
def testCrossDefaults(self):
self.checkCrossDefault('HTTP_HOST',"foo.bar",SERVER_NAME="foo.bar")
self.checkCrossDefault('wsgi.url_scheme',"https",HTTPS="on")
self.checkCrossDefault('wsgi.url_scheme',"https",HTTPS="1")
self.checkCrossDefault('wsgi.url_scheme',"https",HTTPS="yes")
self.checkCrossDefault('wsgi.url_scheme',"http",HTTPS="foo")
self.checkCrossDefault('SERVER_PORT',"80",HTTPS="foo")
self.checkCrossDefault('SERVER_PORT',"443",HTTPS="on")
def testGuessScheme(self):
self.assertEqual(util.guess_scheme({}), "http")
self.assertEqual(util.guess_scheme({'HTTPS':"foo"}), "http")
self.assertEqual(util.guess_scheme({'HTTPS':"on"}), "https")
self.assertEqual(util.guess_scheme({'HTTPS':"yes"}), "https")
self.assertEqual(util.guess_scheme({'HTTPS':"1"}), "https")
def testAppURIs(self):
self.checkAppURI("http://127.0.0.1/")
self.checkAppURI("http://127.0.0.1/spam", SCRIPT_NAME="/spam")
self.checkAppURI("http://spam.example.com:2071/",
HTTP_HOST="spam.example.com:2071", SERVER_PORT="2071")
self.checkAppURI("http://spam.example.com/",
SERVER_NAME="spam.example.com")
self.checkAppURI("http://127.0.0.1/",
HTTP_HOST="127.0.0.1", SERVER_NAME="spam.example.com")
self.checkAppURI("https://127.0.0.1/", HTTPS="on")
self.checkAppURI("http://127.0.0.1:8000/", SERVER_PORT="8000",
HTTP_HOST=None)
def testReqURIs(self):
self.checkReqURI("http://127.0.0.1/")
self.checkReqURI("http://127.0.0.1/spam", SCRIPT_NAME="/spam")
self.checkReqURI("http://127.0.0.1/spammity/spam",
SCRIPT_NAME="/spammity", PATH_INFO="/spam")
self.checkReqURI("http://127.0.0.1/spammity/spam?say=ni",
SCRIPT_NAME="/spammity", PATH_INFO="/spam",QUERY_STRING="say=ni")
self.checkReqURI("http://127.0.0.1/spammity/spam", 0,
SCRIPT_NAME="/spammity", PATH_INFO="/spam",QUERY_STRING="say=ni")
def testFileWrapper(self):
self.checkFW("xyz"*50, 120, ["xyz"*40,"xyz"*10])
def testHopByHop(self):
for hop in (
"Connection Keep-Alive Proxy-Authenticate Proxy-Authorization "
"TE Trailers Transfer-Encoding Upgrade"
).split():
for alt in hop, hop.title(), hop.upper(), hop.lower():
self.failUnless(util.is_hop_by_hop(alt))
# Not comprehensive, just a few random header names
for hop in (
"Accept Cache-Control Date Pragma Trailer Via Warning"
).split():
for alt in hop, hop.title(), hop.upper(), hop.lower():
self.failIf(util.is_hop_by_hop(alt))
class HeaderTests(TestCase):
def testMappingInterface(self):
test = [('x','y')]
self.assertEqual(len(Headers([])),0)
self.assertEqual(len(Headers(test[:])),1)
self.assertEqual(Headers(test[:]).keys(), ['x'])
self.assertEqual(Headers(test[:]).values(), ['y'])
self.assertEqual(Headers(test[:]).items(), test)
self.failIf(Headers(test).items() is test) # must be copy!
h=Headers([])
del h['foo'] # should not raise an error
h['Foo'] = 'bar'
for m in h.has_key, h.__contains__, h.get, h.get_all, h.__getitem__:
self.failUnless(m('foo'))
self.failUnless(m('Foo'))
self.failUnless(m('FOO'))
self.failIf(m('bar'))
self.assertEqual(h['foo'],'bar')
h['foo'] = 'baz'
self.assertEqual(h['FOO'],'baz')
self.assertEqual(h.get_all('foo'),['baz'])
self.assertEqual(h.get("foo","whee"), "baz")
self.assertEqual(h.get("zoo","whee"), "whee")
self.assertEqual(h.setdefault("foo","whee"), "baz")
self.assertEqual(h.setdefault("zoo","whee"), "whee")
self.assertEqual(h["foo"],"baz")
self.assertEqual(h["zoo"],"whee")
def testRequireList(self):
self.assertRaises(TypeError, Headers, "foo")
def testExtras(self):
h = Headers([])
self.assertEqual(str(h),'\r\n')
h.add_header('foo','bar',baz="spam")
self.assertEqual(h['foo'], 'bar; baz="spam"')
self.assertEqual(str(h),'foo: bar; baz="spam"\r\n\r\n')
h.add_header('Foo','bar',cheese=None)
self.assertEqual(h.get_all('foo'),
['bar; baz="spam"', 'bar; cheese'])
self.assertEqual(str(h),
'foo: bar; baz="spam"\r\n'
'Foo: bar; cheese\r\n'
'\r\n'
)
class ErrorHandler(BaseCGIHandler):
"""Simple handler subclass for testing BaseHandler"""
# BaseHandler records the OS environment at import time, but envvars
# might have been changed later by other tests, which trips up
# HandlerTests.testEnviron().
os_environ = dict(os.environ.items())
def __init__(self,**kw):
setup_testing_defaults(kw)
BaseCGIHandler.__init__(
self, StringIO(''), StringIO(), StringIO(), kw,
multithread=True, multiprocess=True
)
class TestHandler(ErrorHandler):
"""Simple handler subclass for testing BaseHandler, w/error passthru"""
def handle_error(self):
raise # for testing, we want to see what's happening
class HandlerTests(TestCase):
def checkEnvironAttrs(self, handler):
env = handler.environ
for attr in [
'version','multithread','multiprocess','run_once','file_wrapper'
]:
if attr=='file_wrapper' and handler.wsgi_file_wrapper is None:
continue
self.assertEqual(getattr(handler,'wsgi_'+attr),env['wsgi.'+attr])
def checkOSEnviron(self,handler):
empty = {}; setup_testing_defaults(empty)
env = handler.environ
from os import environ
for k,v in environ.items():
if k not in empty:
self.assertEqual(env[k],v)
for k,v in empty.items():
self.assertTrue(k in env)
def testEnviron(self):
h = TestHandler(X="Y")
h.setup_environ()
self.checkEnvironAttrs(h)
self.checkOSEnviron(h)
self.assertEqual(h.environ["X"],"Y")
def testCGIEnviron(self):
h = BaseCGIHandler(None,None,None,{})
h.setup_environ()
for key in 'wsgi.url_scheme', 'wsgi.input', 'wsgi.errors':
self.assertTrue(key in h.environ)
def testScheme(self):
h=TestHandler(HTTPS="on"); h.setup_environ()
self.assertEqual(h.environ['wsgi.url_scheme'],'https')
h=TestHandler(); h.setup_environ()
self.assertEqual(h.environ['wsgi.url_scheme'],'http')
def testAbstractMethods(self):
h = BaseHandler()
for name in [
'_flush','get_stdin','get_stderr','add_cgi_vars'
]:
self.assertRaises(NotImplementedError, getattr(h,name))
self.assertRaises(NotImplementedError, h._write, "test")
def testContentLength(self):
# Demo one reason iteration is better than write()... ;)
def trivial_app1(e,s):
s('200 OK',[])
return [e['wsgi.url_scheme']]
def trivial_app2(e,s):
s('200 OK',[])(e['wsgi.url_scheme'])
return []
h = TestHandler()
h.run(trivial_app1)
self.assertEqual(h.stdout.getvalue(),
"Status: 200 OK\r\n"
"Content-Length: 4\r\n"
"\r\n"
"http")
h = TestHandler()
h.run(trivial_app2)
self.assertEqual(h.stdout.getvalue(),
"Status: 200 OK\r\n"
"\r\n"
"http")
def testBasicErrorOutput(self):
def non_error_app(e,s):
s('200 OK',[])
return []
def error_app(e,s):
raise AssertionError("This should be caught by handler")
h = ErrorHandler()
h.run(non_error_app)
self.assertEqual(h.stdout.getvalue(),
"Status: 200 OK\r\n"
"Content-Length: 0\r\n"
"\r\n")
self.assertEqual(h.stderr.getvalue(),"")
h = ErrorHandler()
h.run(error_app)
self.assertEqual(h.stdout.getvalue(),
"Status: %s\r\n"
"Content-Type: text/plain\r\n"
"Content-Length: %d\r\n"
"\r\n%s" % (h.error_status,len(h.error_body),h.error_body))
self.assertTrue("AssertionError" in h.stderr.getvalue(),
"AssertionError not in stderr")
def testErrorAfterOutput(self):
MSG = "Some output has been sent"
def error_app(e,s):
s("200 OK",[])(MSG)
raise AssertionError("This should be caught by handler")
h = ErrorHandler()
h.run(error_app)
self.assertEqual(h.stdout.getvalue(),
"Status: 200 OK\r\n"
"\r\n"+MSG)
self.assertTrue("AssertionError" in h.stderr.getvalue(),
"AssertionError not in stderr")
def testHeaderFormats(self):
def non_error_app(e,s):
s('200 OK',[])
return []
stdpat = (
r"HTTP/%s 200 OK\r\n"
r"Date: \w{3}, [ 0123]\d \w{3} \d{4} \d\d:\d\d:\d\d GMT\r\n"
r"%s" r"Content-Length: 0\r\n" r"\r\n"
)
shortpat = (
"Status: 200 OK\r\n" "Content-Length: 0\r\n" "\r\n"
)
for ssw in "FooBar/1.0", None:
sw = ssw and "Server: %s\r\n" % ssw or ""
for version in "1.0", "1.1":
for proto in "HTTP/0.9", "HTTP/1.0", "HTTP/1.1":
h = TestHandler(SERVER_PROTOCOL=proto)
h.origin_server = False
h.http_version = version
h.server_software = ssw
h.run(non_error_app)
self.assertEqual(shortpat,h.stdout.getvalue())
h = TestHandler(SERVER_PROTOCOL=proto)
h.origin_server = True
h.http_version = version
h.server_software = ssw
h.run(non_error_app)
if proto=="HTTP/0.9":
self.assertEqual(h.stdout.getvalue(),"")
else:
self.failUnless(
re.match(stdpat%(version,sw), h.stdout.getvalue()),
(stdpat%(version,sw), h.stdout.getvalue())
)
# This epilogue is needed for compatibility with the Python 2.5 regrtest module
def test_main():
test_support.run_unittest(__name__)
if __name__ == "__main__":
test_main()
# the above lines intentionally left blank
# Certificate for projects.developer.nokia.com:443 (see issue 13034)
-----BEGIN CERTIFICATE-----
MIIFLDCCBBSgAwIBAgIQLubqdkCgdc7lAF9NfHlUmjANBgkqhkiG9w0BAQUFADCB
vDELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL
ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTswOQYDVQQLEzJUZXJtcyBvZiB1c2Ug
YXQgaHR0cHM6Ly93d3cudmVyaXNpZ24uY29tL3JwYSAoYykxMDE2MDQGA1UEAxMt
VmVyaVNpZ24gQ2xhc3MgMyBJbnRlcm5hdGlvbmFsIFNlcnZlciBDQSAtIEczMB4X
DTExMDkyMTAwMDAwMFoXDTEyMDkyMDIzNTk1OVowcTELMAkGA1UEBhMCRkkxDjAM
BgNVBAgTBUVzcG9vMQ4wDAYDVQQHFAVFc3BvbzEOMAwGA1UEChQFTm9raWExCzAJ
BgNVBAsUAkJJMSUwIwYDVQQDFBxwcm9qZWN0cy5kZXZlbG9wZXIubm9raWEuY29t
MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQCr92w1bpHYSYxUEx8N/8Iddda2
lYi+aXNtQfV/l2Fw9Ykv3Ipw4nLeGTj18FFlAZgMdPRlgrzF/NNXGw/9l3/qKdow
CypkQf8lLaxb9Ze1E/KKmkRJa48QTOqvo6GqKuTI6HCeGlG1RxDb8YSKcQWLiytn
yj3Wp4MgRQO266xmMQIDAQABo4IB9jCCAfIwQQYDVR0RBDowOIIccHJvamVjdHMu
ZGV2ZWxvcGVyLm5va2lhLmNvbYIYcHJvamVjdHMuZm9ydW0ubm9raWEuY29tMAkG
A1UdEwQCMAAwCwYDVR0PBAQDAgWgMEEGA1UdHwQ6MDgwNqA0oDKGMGh0dHA6Ly9T
VlJJbnRsLUczLWNybC52ZXJpc2lnbi5jb20vU1ZSSW50bEczLmNybDBEBgNVHSAE
PTA7MDkGC2CGSAGG+EUBBxcDMCowKAYIKwYBBQUHAgEWHGh0dHBzOi8vd3d3LnZl
cmlzaWduLmNvbS9ycGEwKAYDVR0lBCEwHwYJYIZIAYb4QgQBBggrBgEFBQcDAQYI
KwYBBQUHAwIwcgYIKwYBBQUHAQEEZjBkMCQGCCsGAQUFBzABhhhodHRwOi8vb2Nz
cC52ZXJpc2lnbi5jb20wPAYIKwYBBQUHMAKGMGh0dHA6Ly9TVlJJbnRsLUczLWFp
YS52ZXJpc2lnbi5jb20vU1ZSSW50bEczLmNlcjBuBggrBgEFBQcBDARiMGChXqBc
MFowWDBWFglpbWFnZS9naWYwITAfMAcGBSsOAwIaBBRLa7kolgYMu9BSOJsprEsH
iyEFGDAmFiRodHRwOi8vbG9nby52ZXJpc2lnbi5jb20vdnNsb2dvMS5naWYwDQYJ
KoZIhvcNAQEFBQADggEBACQuPyIJqXwUyFRWw9x5yDXgMW4zYFopQYOw/ItRY522
O5BsySTh56BWS6mQB07XVfxmYUGAvRQDA5QHpmY8jIlNwSmN3s8RKo+fAtiNRlcL
x/mWSfuMs3D/S6ev3D6+dpEMZtjrhOdctsarMKp8n/hPbwhAbg5hVjpkW5n8vz2y
0KxvvkA1AxpLwpVv7OlK17ttzIHw8bp9HTlHBU5s8bKz4a565V/a5HI0CSEv/+0y
ko4/ghTnZc1CkmUngKKeFMSah/mT/xAh8XnE2l1AazFa8UKuYki1e+ArHaGZc4ix
UYOtiRphwfuYQhRZ7qX9q2MMkCMI65XNK/SaFrAbbG0=
-----END CERTIFICATE-----
Certificate:
Data:
Version: 3 (0x2)
Serial Number: 0 (0x0)
Signature Algorithm: sha1WithRSAEncryption
Issuer: C=US, ST=Oregon, L=Beaverton, O=Python Software Foundation, OU=Python Core Development, CN=null.python.org\x00example.org/emailAddress=python-dev@python.org
Validity
Not Before: Aug 7 13:11:52 2013 GMT
Not After : Aug 7 13:12:52 2013 GMT
Subject: C=US, ST=Oregon, L=Beaverton, O=Python Software Foundation, OU=Python Core Development, CN=null.python.org\x00example.org/emailAddress=python-dev@python.org
Subject Public Key Info:
Public Key Algorithm: rsaEncryption
Public-Key: (2048 bit)
Modulus:
00:b5:ea:ed:c9:fb:46:7d:6f:3b:76:80:dd:3a:f3:
03:94:0b:a7:a6:db:ec:1d:df:ff:23:74:08:9d:97:
16:3f:a3:a4:7b:3e:1b:0e:96:59:25:03:a7:26:e2:
88:a9:cf:79:cd:f7:04:56:b0:ab:79:32:6e:59:c1:
32:30:54:eb:58:a8:cb:91:f0:42:a5:64:27:cb:d4:
56:31:88:52:ad:cf:bd:7f:f0:06:64:1f:cc:27:b8:
a3:8b:8c:f3:d8:29:1f:25:0b:f5:46:06:1b:ca:02:
45:ad:7b:76:0a:9c:bf:bb:b9:ae:0d:16:ab:60:75:
ae:06:3e:9c:7c:31:dc:92:2f:29:1a:e0:4b:0c:91:
90:6c:e9:37:c5:90:d7:2a:d7:97:15:a3:80:8f:5d:
7b:49:8f:54:30:d4:97:2c:1c:5b:37:b5:ab:69:30:
68:43:d3:33:78:4b:02:60:f5:3c:44:80:a1:8f:e7:
f0:0f:d1:5e:87:9e:46:cf:62:fc:f9:bf:0c:65:12:
f1:93:c8:35:79:3f:c8:ec:ec:47:f5:ef:be:44:d5:
ae:82:1e:2d:9a:9f:98:5a:67:65:e1:74:70:7c:cb:
d3:c2:ce:0e:45:49:27:dc:e3:2d:d4:fb:48:0e:2f:
9e:77:b8:14:46:c0:c4:36:ca:02:ae:6a:91:8c:da:
2f:85
Exponent: 65537 (0x10001)
X509v3 extensions:
X509v3 Basic Constraints: critical
CA:FALSE
X509v3 Subject Key Identifier:
88:5A:55:C0:52:FF:61:CD:52:A3:35:0F:EA:5A:9C:24:38:22:F7:5C
X509v3 Key Usage:
Digital Signature, Non Repudiation, Key Encipherment
X509v3 Subject Alternative Name:
*************************************************************
WARNING: The values for DNS, email and URI are WRONG. OpenSSL
doesn't print the text after a NULL byte.
*************************************************************
DNS:altnull.python.org, email:null@python.org, URI:http://null.python.org, IP Address:192.0.2.1, IP Address:2001:DB8:0:0:0:0:0:1
Signature Algorithm: sha1WithRSAEncryption
ac:4f:45:ef:7d:49:a8:21:70:8e:88:59:3e:d4:36:42:70:f5:
a3:bd:8b:d7:a8:d0:58:f6:31:4a:b1:a4:a6:dd:6f:d9:e8:44:
3c:b6:0a:71:d6:7f:b1:08:61:9d:60:ce:75:cf:77:0c:d2:37:
86:02:8d:5e:5d:f9:0f:71:b4:16:a8:c1:3d:23:1c:f1:11:b3:
56:6e:ca:d0:8d:34:94:e6:87:2a:99:f2:ae:ae:cc:c2:e8:86:
de:08:a8:7f:c5:05:fa:6f:81:a7:82:e6:d0:53:9d:34:f4:ac:
3e:40:fe:89:57:7a:29:a4:91:7e:0b:c6:51:31:e5:10:2f:a4:
60:76:cd:95:51:1a:be:8b:a1:b0:fd:ad:52:bd:d7:1b:87:60:
d2:31:c7:17:c4:18:4f:2d:08:25:a3:a7:4f:b7:92:ca:e2:f5:
25:f1:54:75:81:9d:b3:3d:61:a2:f7:da:ed:e1:c6:6f:2c:60:
1f:d8:6f:c5:92:05:ab:c9:09:62:49:a9:14:ad:55:11:cc:d6:
4a:19:94:99:97:37:1d:81:5f:8b:cf:a3:a8:96:44:51:08:3d:
0b:05:65:12:eb:b6:70:80:88:48:72:4f:c6:c2:da:cf:cd:8e:
5b:ba:97:2f:60:b4:96:56:49:5e:3a:43:76:63:04:be:2a:f6:
c1:ca:a9:94
-----BEGIN CERTIFICATE-----
MIIE2DCCA8CgAwIBAgIBADANBgkqhkiG9w0BAQUFADCBxTELMAkGA1UEBhMCVVMx
DzANBgNVBAgMBk9yZWdvbjESMBAGA1UEBwwJQmVhdmVydG9uMSMwIQYDVQQKDBpQ
eXRob24gU29mdHdhcmUgRm91bmRhdGlvbjEgMB4GA1UECwwXUHl0aG9uIENvcmUg
RGV2ZWxvcG1lbnQxJDAiBgNVBAMMG251bGwucHl0aG9uLm9yZwBleGFtcGxlLm9y
ZzEkMCIGCSqGSIb3DQEJARYVcHl0aG9uLWRldkBweXRob24ub3JnMB4XDTEzMDgw
NzEzMTE1MloXDTEzMDgwNzEzMTI1MlowgcUxCzAJBgNVBAYTAlVTMQ8wDQYDVQQI
DAZPcmVnb24xEjAQBgNVBAcMCUJlYXZlcnRvbjEjMCEGA1UECgwaUHl0aG9uIFNv
ZnR3YXJlIEZvdW5kYXRpb24xIDAeBgNVBAsMF1B5dGhvbiBDb3JlIERldmVsb3Bt
ZW50MSQwIgYDVQQDDBtudWxsLnB5dGhvbi5vcmcAZXhhbXBsZS5vcmcxJDAiBgkq
hkiG9w0BCQEWFXB5dGhvbi1kZXZAcHl0aG9uLm9yZzCCASIwDQYJKoZIhvcNAQEB
BQADggEPADCCAQoCggEBALXq7cn7Rn1vO3aA3TrzA5QLp6bb7B3f/yN0CJ2XFj+j
pHs+Gw6WWSUDpybiiKnPec33BFawq3kyblnBMjBU61ioy5HwQqVkJ8vUVjGIUq3P
vX/wBmQfzCe4o4uM89gpHyUL9UYGG8oCRa17dgqcv7u5rg0Wq2B1rgY+nHwx3JIv
KRrgSwyRkGzpN8WQ1yrXlxWjgI9de0mPVDDUlywcWze1q2kwaEPTM3hLAmD1PESA
oY/n8A/RXoeeRs9i/Pm/DGUS8ZPINXk/yOzsR/XvvkTVroIeLZqfmFpnZeF0cHzL
08LODkVJJ9zjLdT7SA4vnne4FEbAxDbKAq5qkYzaL4UCAwEAAaOB0DCBzTAMBgNV
HRMBAf8EAjAAMB0GA1UdDgQWBBSIWlXAUv9hzVKjNQ/qWpwkOCL3XDALBgNVHQ8E
BAMCBeAwgZAGA1UdEQSBiDCBhYIeYWx0bnVsbC5weXRob24ub3JnAGV4YW1wbGUu
Y29tgSBudWxsQHB5dGhvbi5vcmcAdXNlckBleGFtcGxlLm9yZ4YpaHR0cDovL251
bGwucHl0aG9uLm9yZwBodHRwOi8vZXhhbXBsZS5vcmeHBMAAAgGHECABDbgAAAAA
AAAAAAAAAAEwDQYJKoZIhvcNAQEFBQADggEBAKxPRe99SaghcI6IWT7UNkJw9aO9
i9eo0Fj2MUqxpKbdb9noRDy2CnHWf7EIYZ1gznXPdwzSN4YCjV5d+Q9xtBaowT0j
HPERs1ZuytCNNJTmhyqZ8q6uzMLoht4IqH/FBfpvgaeC5tBTnTT0rD5A/olXeimk
kX4LxlEx5RAvpGB2zZVRGr6LobD9rVK91xuHYNIxxxfEGE8tCCWjp0+3ksri9SXx
VHWBnbM9YaL32u3hxm8sYB/Yb8WSBavJCWJJqRStVRHM1koZlJmXNx2BX4vPo6iW
RFEIPQsFZRLrtnCAiEhyT8bC2s/Njlu6ly9gtJZWSV46Q3ZjBL4q9sHKqZQ=
-----END CERTIFICATE-----
# Certificate chain for https://sha256.tbs-internet.com
0 s:/C=FR/postalCode=14000/ST=Calvados/L=CAEN/street=22 rue de Bretagne/O=TBS INTERNET/OU=0002 440443810/OU=Certificats TBS X509/CN=ecom.tbs-x509.com
i:/C=FR/ST=Calvados/L=Caen/O=TBS INTERNET/OU=Terms and Conditions: http://www.tbs-internet.com/CA/repository/OU=TBS INTERNET CA/CN=TBS X509 CA business
-----BEGIN CERTIFICATE-----
MIIGTjCCBTagAwIBAgIQOh3d9dNDPq1cSdJmEiMpqDANBgkqhkiG9w0BAQUFADCB
yTELMAkGA1UEBhMCRlIxETAPBgNVBAgTCENhbHZhZG9zMQ0wCwYDVQQHEwRDYWVu
MRUwEwYDVQQKEwxUQlMgSU5URVJORVQxSDBGBgNVBAsTP1Rlcm1zIGFuZCBDb25k
aXRpb25zOiBodHRwOi8vd3d3LnRicy1pbnRlcm5ldC5jb20vQ0EvcmVwb3NpdG9y
eTEYMBYGA1UECxMPVEJTIElOVEVSTkVUIENBMR0wGwYDVQQDExRUQlMgWDUwOSBD
QSBidXNpbmVzczAeFw0xMTAxMjUwMDAwMDBaFw0xMzAyMDUyMzU5NTlaMIHHMQsw
CQYDVQQGEwJGUjEOMAwGA1UEERMFMTQwMDAxETAPBgNVBAgTCENhbHZhZG9zMQ0w
CwYDVQQHEwRDQUVOMRswGQYDVQQJExIyMiBydWUgZGUgQnJldGFnbmUxFTATBgNV
BAoTDFRCUyBJTlRFUk5FVDEXMBUGA1UECxMOMDAwMiA0NDA0NDM4MTAxHTAbBgNV
BAsTFENlcnRpZmljYXRzIFRCUyBYNTA5MRowGAYDVQQDExFlY29tLnRicy14NTA5
LmNvbTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKRrlHUnJ++1lpcg
jtYco7cdmRe+EEfTmwPfCdfV3G1QfsTSvY6FfMpm/83pqHfT+4ANwr18wD9ZrAEN
G16mf9VdCGK12+TP7DmqeZyGIqlFFoahQnmb8EarvE43/1UeQ2CV9XmzwZvpqeli
LfXsFonawrY3H6ZnMwS64St61Z+9gdyuZ/RbsoZBbT5KUjDEG844QRU4OT1IGeEI
eY5NM5RNIh6ZNhVtqeeCxMS7afONkHQrOco73RdSTRck/Hj96Ofl3MHNHryr+AMK
DGFk1kLCZGpPdXtkxXvaDeQoiYDlil26CWc+YK6xyDPMdsWvoG14ZLyCpzMXA7/7
4YAQRH0CAwEAAaOCAjAwggIsMB8GA1UdIwQYMBaAFBoJBMz5CY+7HqDO1KQUf0vV
I1jNMB0GA1UdDgQWBBQgOU8HsWzbmD4WZP5Wtdw7jca2WDAOBgNVHQ8BAf8EBAMC
BaAwDAYDVR0TAQH/BAIwADAdBgNVHSUEFjAUBggrBgEFBQcDAQYIKwYBBQUHAwIw
TAYDVR0gBEUwQzBBBgsrBgEEAYDlNwIBATAyMDAGCCsGAQUFBwIBFiRodHRwczov
L3d3dy50YnMtaW50ZXJuZXQuY29tL0NBL0NQUzEwdwYDVR0fBHAwbjA3oDWgM4Yx
aHR0cDovL2NybC50YnMtaW50ZXJuZXQuY29tL1RCU1g1MDlDQWJ1c2luZXNzLmNy
bDAzoDGgL4YtaHR0cDovL2NybC50YnMteDUwOS5jb20vVEJTWDUwOUNBYnVzaW5l
c3MuY3JsMIGwBggrBgEFBQcBAQSBozCBoDA9BggrBgEFBQcwAoYxaHR0cDovL2Ny
dC50YnMtaW50ZXJuZXQuY29tL1RCU1g1MDlDQWJ1c2luZXNzLmNydDA5BggrBgEF
BQcwAoYtaHR0cDovL2NydC50YnMteDUwOS5jb20vVEJTWDUwOUNBYnVzaW5lc3Mu
Y3J0MCQGCCsGAQUFBzABhhhodHRwOi8vb2NzcC50YnMteDUwOS5jb20wMwYDVR0R
BCwwKoIRZWNvbS50YnMteDUwOS5jb22CFXd3dy5lY29tLnRicy14NTA5LmNvbTAN
BgkqhkiG9w0BAQUFAAOCAQEArT4NHfbY87bGAw8lPV4DmHlmuDuVp/y7ltO3Ynse
3Rz8RxW2AzuO0Oy2F0Cu4yWKtMyEyMXyHqWtae7ElRbdTu5w5GwVBLJHClCzC8S9
SpgMMQTx3Rgn8vjkHuU9VZQlulZyiPK7yunjc7c310S9FRZ7XxOwf8Nnx4WnB+No
WrfApzhhQl31w+RyrNxZe58hCfDDHmevRvwLjQ785ZoQXJDj2j3qAD4aI2yB8lB5
oaE1jlCJzC7Kmz/Y9jzfmv/zAs1LQTm9ktevv4BTUFaGjv9jxnQ1xnS862ZiouLW
zZYIlYPf4F6JjXGiIQgQRglILUfq3ftJd9/ok9W9ZF8h8w==
-----END CERTIFICATE-----
1 s:/C=FR/ST=Calvados/L=Caen/O=TBS INTERNET/OU=Terms and Conditions: http://www.tbs-internet.com/CA/repository/OU=TBS INTERNET CA/CN=TBS X509 CA business
i:/C=SE/O=AddTrust AB/OU=AddTrust External TTP Network/CN=AddTrust External CA Root
-----BEGIN CERTIFICATE-----
MIIFPzCCBCegAwIBAgIQDlBz/++iRSmLDeVRHT/hADANBgkqhkiG9w0BAQUFADBv
MQswCQYDVQQGEwJTRTEUMBIGA1UEChMLQWRkVHJ1c3QgQUIxJjAkBgNVBAsTHUFk
ZFRydXN0IEV4dGVybmFsIFRUUCBOZXR3b3JrMSIwIAYDVQQDExlBZGRUcnVzdCBF
eHRlcm5hbCBDQSBSb290MB4XDTA1MTIwMTAwMDAwMFoXDTE5MDcwOTE4MTkyMlow
gckxCzAJBgNVBAYTAkZSMREwDwYDVQQIEwhDYWx2YWRvczENMAsGA1UEBxMEQ2Fl
bjEVMBMGA1UEChMMVEJTIElOVEVSTkVUMUgwRgYDVQQLEz9UZXJtcyBhbmQgQ29u
ZGl0aW9uczogaHR0cDovL3d3dy50YnMtaW50ZXJuZXQuY29tL0NBL3JlcG9zaXRv
cnkxGDAWBgNVBAsTD1RCUyBJTlRFUk5FVCBDQTEdMBsGA1UEAxMUVEJTIFg1MDkg
Q0EgYnVzaW5lc3MwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDB1PAU
qudCcz3tmyGcf+u6EkZqonKKHrV4gZYbvVkIRojmmlhfi/jwvpHvo8bqSt/9Rj5S
jhCDW0pcbI+IPPtD1Jy+CHNSfnMqVDy6CKQ3p5maTzCMG6ZT+XjnvcND5v+FtaiB
xk1iCX6uvt0jeUtdZvYbyytsSDE6c3Y5//wRxOF8tM1JxibwO3pyER26jbbN2gQz
m/EkdGjLdJ4svPk23WDAvQ6G0/z2LcAaJB+XLfqRwfQpHQvfKa1uTi8PivC8qtip
rmNQMMPMjxSK2azX8cKjjTDJiUKaCb4VHlJDWKEsCFRpgJAoAuX8f7Yfs1M4esGo
sWb3PGspK3O22uIlAgMBAAGjggF6MIIBdjAdBgNVHQ4EFgQUGgkEzPkJj7seoM7U
pBR/S9UjWM0wDgYDVR0PAQH/BAQDAgEGMBIGA1UdEwEB/wQIMAYBAf8CAQAwGAYD
VR0gBBEwDzANBgsrBgEEAYDlNwIBATB7BgNVHR8EdDByMDigNqA0hjJodHRwOi8v
Y3JsLmNvbW9kb2NhLmNvbS9BZGRUcnVzdEV4dGVybmFsQ0FSb290LmNybDA2oDSg
MoYwaHR0cDovL2NybC5jb21vZG8ubmV0L0FkZFRydXN0RXh0ZXJuYWxDQVJvb3Qu
Y3JsMIGGBggrBgEFBQcBAQR6MHgwOwYIKwYBBQUHMAKGL2h0dHA6Ly9jcnQuY29t
b2RvY2EuY29tL0FkZFRydXN0VVROU2VydmVyQ0EuY3J0MDkGCCsGAQUFBzAChi1o
dHRwOi8vY3J0LmNvbW9kby5uZXQvQWRkVHJ1c3RVVE5TZXJ2ZXJDQS5jcnQwEQYJ
YIZIAYb4QgEBBAQDAgIEMA0GCSqGSIb3DQEBBQUAA4IBAQA7mqrMgk/MrE6QnbNA
h4nRCn2ti4bg4w2C3lB6bSvRPnYwuNw9Jb8vuKkNFzRDxNJXqVDZdfFW5CVQJuyd
nfAx83+wk+spzvFaE1KhFYfN9G9pQfXUfvDRoIcJgPEKUXL1wRiOG+IjU3VVI8pg
IgqHkr7ylln5i5zCiFAPuIJmYUSFg/gxH5xkCNcjJqqrHrHatJr6Qrrke93joupw
oU1njfAcZtYp6fbiK6u2b1pJqwkVBE8RsfLnPhRj+SFbpvjv8Od7o/ieJhFIYQNU
k2jX2u8qZnAiNw93LZW9lpYjtuvMXq8QQppENNja5b53q7UwI+lU7ZGjZ7quuESp
J6/5
-----END CERTIFICATE-----
2 s:/C=SE/O=AddTrust AB/OU=AddTrust External TTP Network/CN=AddTrust External CA Root
i:/C=US/ST=UT/L=Salt Lake City/O=The USERTRUST Network/OU=http://www.usertrust.com/CN=UTN-USERFirst-Hardware
-----BEGIN CERTIFICATE-----
MIIETzCCAzegAwIBAgIQHM5EYpUZep1jUvnyI6m2mDANBgkqhkiG9w0BAQUFADCB
lzELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2Ug
Q2l0eTEeMBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExho
dHRwOi8vd3d3LnVzZXJ0cnVzdC5jb20xHzAdBgNVBAMTFlVUTi1VU0VSRmlyc3Qt
SGFyZHdhcmUwHhcNMDUwNjA3MDgwOTEwWhcNMTkwNzA5MTgxOTIyWjBvMQswCQYD
VQQGEwJTRTEUMBIGA1UEChMLQWRkVHJ1c3QgQUIxJjAkBgNVBAsTHUFkZFRydXN0
IEV4dGVybmFsIFRUUCBOZXR3b3JrMSIwIAYDVQQDExlBZGRUcnVzdCBFeHRlcm5h
bCBDQSBSb290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAt/caM+by
AAQtOeBOW+0fvGwPzbX6I7bO3psRM5ekKUx9k5+9SryT7QMa44/P5W1QWtaXKZRa
gLBJetsulf24yr83OC0ePpFBrXBWx/BPP+gynnTKyJBU6cZfD3idmkA8Dqxhql4U
j56HoWpQ3NeaTq8Fs6ZxlJxxs1BgCscTnTgHhgKo6ahpJhiQq0ywTyOrOk+E2N/O
n+Fpb7vXQtdrROTHre5tQV9yWnEIN7N5ZaRZoJQ39wAvDcKSctrQOHLbFKhFxF0q
fbe01sTurM0TRLfJK91DACX6YblpalgjEbenM49WdVn1zSnXRrcKK2W200JvFbK4
e/vv6V1T1TRaJwIDAQABo4G9MIG6MB8GA1UdIwQYMBaAFKFyXyYbKJhDlV0HN9WF
lp1L0sNFMB0GA1UdDgQWBBStvZh6NLQm9/rEJlTvA73gJMtUGjAOBgNVHQ8BAf8E
BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zARBglghkgBhvhCAQEEBAMCAQIwRAYDVR0f
BD0wOzA5oDegNYYzaHR0cDovL2NybC51c2VydHJ1c3QuY29tL1VUTi1VU0VSRmly
c3QtSGFyZHdhcmUuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQByQhANOs4kClrwF8BW
onvUOGCSjRK52zYZgDXYNjDtmr5rJ6NyPFDNn+JxkLpjYetIFMTbSRe679Bt8m7a
gIAoQYFQtxMuyLnJegB2aEbQiIxh/tC21UcFF7ktdnDoTlA6w3pLuvunaI84Of3o
2YBrhzkTbCfaYk5JRlTpudW9DkUkHBsyx3nknPKnplkIGaK0jgn8E0n+SFabYaHk
I9LroYT/+JtLefh9lgBdAgVv0UPbzoGfuDsrk/Zh+UrgbLFpHoVnElhzbkh64Z0X
OGaJunQc68cCZu5HTn/aK7fBGMcVflRCXLVEQpU9PIAdGA8Ynvg684t8GMaKsRl1
jIGZ
-----END CERTIFICATE-----
3 s:/C=US/ST=UT/L=Salt Lake City/O=The USERTRUST Network/OU=http://www.usertrust.com/CN=UTN-USERFirst-Hardware
i:/C=US/ST=UT/L=Salt Lake City/O=The USERTRUST Network/OU=http://www.usertrust.com/CN=UTN-USERFirst-Hardware
-----BEGIN CERTIFICATE-----
MIIEdDCCA1ygAwIBAgIQRL4Mi1AAJLQR0zYq/mUK/TANBgkqhkiG9w0BAQUFADCB
lzELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2Ug
Q2l0eTEeMBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExho
dHRwOi8vd3d3LnVzZXJ0cnVzdC5jb20xHzAdBgNVBAMTFlVUTi1VU0VSRmlyc3Qt
SGFyZHdhcmUwHhcNOTkwNzA5MTgxMDQyWhcNMTkwNzA5MTgxOTIyWjCBlzELMAkG
A1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2UgQ2l0eTEe
MBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExhodHRwOi8v
d3d3LnVzZXJ0cnVzdC5jb20xHzAdBgNVBAMTFlVUTi1VU0VSRmlyc3QtSGFyZHdh
cmUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCx98M4P7Sof885glFn
0G2f0v9Y8+efK+wNiVSZuTiZFvfgIXlIwrthdBKWHTxqctU8EGc6Oe0rE81m65UJ
M6Rsl7HoxuzBdXmcRl6Nq9Bq/bkqVRcQVLMZ8Jr28bFdtqdt++BxF2uiiPsA3/4a
MXcMmgF6sTLjKwEHOG7DpV4jvEWbe1DByTCP2+UretNb+zNAHqDVmBe8i4fDidNd
oI6yqqr2jmmIBsX6iSHzCJ1pLgkzmykNRg+MzEk0sGlRvfkGzWitZky8PqxhvQqI
DsjfPe58BEydCl5rkdbux+0ojatNh4lz0G6k0B4WixThdkQDf2Os5M1JnMWS9Ksy
oUhbAgMBAAGjgbkwgbYwCwYDVR0PBAQDAgHGMA8GA1UdEwEB/wQFMAMBAf8wHQYD
VR0OBBYEFKFyXyYbKJhDlV0HN9WFlp1L0sNFMEQGA1UdHwQ9MDswOaA3oDWGM2h0
dHA6Ly9jcmwudXNlcnRydXN0LmNvbS9VVE4tVVNFUkZpcnN0LUhhcmR3YXJlLmNy
bDAxBgNVHSUEKjAoBggrBgEFBQcDAQYIKwYBBQUHAwUGCCsGAQUFBwMGBggrBgEF
BQcDBzANBgkqhkiG9w0BAQUFAAOCAQEARxkP3nTGmZev/K0oXnWO6y1n7k57K9cM
//bey1WiCuFMVGWTYGufEpytXoMs61quwOQt9ABjHbjAbPLPSbtNk28Gpgoiskli
CE7/yMgUsogWXecB5BKV5UU0s4tpvc+0hY91UZ59Ojg6FEgSxvunOxqNDYJAB+gE
CJChicsZUN/KHAG8HQQZexB2lzvukJDKxA4fFm517zP4029bHpbj4HR3dHuKom4t
3XbWOTCC8KucUvIqx69JXn7HaOWCgchqJ/kniCrVWFCVH/A7HFe7fRQ5YiuayZSS
KqMiDP+JJn1fIytH1xUdqWqeUQ0qUZ6B+dQ7XnASfxAynB67nfhmqA==
-----END CERTIFICATE-----
......@@ -6,31 +6,26 @@ from test import test_support
import asyncore
import socket
import select
import errno
import subprocess
import time
import gc
import os
import errno
import pprint
import urllib, urlparse
import shutil
import traceback
import weakref
import functools
import platform
from BaseHTTPServer import HTTPServer
from SimpleHTTPServer import SimpleHTTPRequestHandler
# Optionally test SSL support, if we have it in the tested platform
skip_expected = False
try:
import ssl
except ImportError:
skip_expected = True
ssl = test_support.import_module("ssl")
HOST = test_support.HOST
CERTFILE = None
SVN_PYTHON_ORG_ROOT_CERT = None
NULLBYTECERT = None
def handle_error(prefix):
exc_format = ' '.join(traceback.format_exception(*sys.exc_info()))
......@@ -57,8 +52,37 @@ class BasicTests(unittest.TestCase):
else:
raise
# Issue #9415: Ubuntu hijacks their OpenSSL and forcefully disables SSLv2
def skip_if_broken_ubuntu_ssl(func):
if hasattr(ssl, 'PROTOCOL_SSLv2'):
# We need to access the lower-level wrapper in order to create an
# implicit SSL context without trying to connect or listen.
try:
import _ssl
except ImportError:
# The returned function won't get executed, just ignore the error
pass
@functools.wraps(func)
def f(*args, **kwargs):
try:
s = socket.socket(socket.AF_INET)
_ssl.sslwrap(s._sock, 0, None, None,
ssl.CERT_NONE, ssl.PROTOCOL_SSLv2, None, None)
except ssl.SSLError as e:
if (ssl.OPENSSL_VERSION_INFO == (0, 9, 8, 15, 15) and
platform.linux_distribution() == ('debian', 'squeeze/sid', '')
and 'Invalid SSL protocol variant specified' in str(e)):
raise unittest.SkipTest("Patched Ubuntu OpenSSL breaks behaviour")
return func(*args, **kwargs)
return f
else:
return func
class BasicSocketTests(unittest.TestCase):
def test_constants(self):
ssl.PROTOCOL_SSLv2
#ssl.PROTOCOL_SSLv2
ssl.PROTOCOL_SSLv23
ssl.PROTOCOL_SSLv3
ssl.PROTOCOL_TLSv1
......@@ -72,12 +96,8 @@ class BasicTests(unittest.TestCase):
sys.stdout.write("\n RAND_status is %d (%s)\n"
% (v, (v and "sufficient randomness") or
"insufficient randomness"))
try:
ssl.RAND_egd(1)
except TypeError:
pass
else:
print "didn't raise TypeError"
self.assertRaises(TypeError, ssl.RAND_egd, 1)
self.assertRaises(TypeError, ssl.RAND_egd, 'foo', 1)
ssl.RAND_add("this is a random string", 75.0)
def test_parse_cert(self):
......@@ -87,6 +107,51 @@ class BasicTests(unittest.TestCase):
p = ssl._ssl._test_decode_cert(CERTFILE, False)
if test_support.verbose:
sys.stdout.write("\n" + pprint.pformat(p) + "\n")
self.assertEqual(p['subject'],
((('countryName', 'XY'),),
(('localityName', 'Castle Anthrax'),),
(('organizationName', 'Python Software Foundation'),),
(('commonName', 'localhost'),))
)
self.assertEqual(p['subjectAltName'], (('DNS', 'localhost'),))
# Issue #13034: the subjectAltName in some certificates
# (notably projects.developer.nokia.com:443) wasn't parsed
p = ssl._ssl._test_decode_cert(NOKIACERT)
if test_support.verbose:
sys.stdout.write("\n" + pprint.pformat(p) + "\n")
self.assertEqual(p['subjectAltName'],
(('DNS', 'projects.developer.nokia.com'),
('DNS', 'projects.forum.nokia.com'))
)
def test_parse_cert_CVE_2013_4238(self):
p = ssl._ssl._test_decode_cert(NULLBYTECERT)
if test_support.verbose:
sys.stdout.write("\n" + pprint.pformat(p) + "\n")
subject = ((('countryName', 'US'),),
(('stateOrProvinceName', 'Oregon'),),
(('localityName', 'Beaverton'),),
(('organizationName', 'Python Software Foundation'),),
(('organizationalUnitName', 'Python Core Development'),),
(('commonName', 'null.python.org\x00example.org'),),
(('emailAddress', 'python-dev@python.org'),))
self.assertEqual(p['subject'], subject)
self.assertEqual(p['issuer'], subject)
if ssl.OPENSSL_VERSION_INFO >= (0, 9, 8):
san = (('DNS', 'altnull.python.org\x00example.com'),
('email', 'null@python.org\x00user@example.org'),
('URI', 'http://null.python.org\x00http://example.org'),
('IP Address', '192.0.2.1'),
('IP Address', '2001:DB8:0:0:0:0:0:1\n'))
else:
# OpenSSL 0.9.7 doesn't support IPv6 addresses in subjectAltName
san = (('DNS', 'altnull.python.org\x00example.com'),
('email', 'null@python.org\x00user@example.org'),
('URI', 'http://null.python.org\x00http://example.org'),
('IP Address', '192.0.2.1'),
('IP Address', '<invalid>'))
self.assertEqual(p['subjectAltName'], san)
def test_DER_to_PEM(self):
with open(SVN_PYTHON_ORG_ROOT_CERT, 'r') as f:
......@@ -100,6 +165,51 @@ class BasicTests(unittest.TestCase):
if not p2.endswith('\n' + ssl.PEM_FOOTER + '\n'):
self.fail("DER-to-PEM didn't include correct footer:\n%r\n" % p2)
def test_openssl_version(self):
n = ssl.OPENSSL_VERSION_NUMBER
t = ssl.OPENSSL_VERSION_INFO
s = ssl.OPENSSL_VERSION
self.assertIsInstance(n, (int, long))
self.assertIsInstance(t, tuple)
self.assertIsInstance(s, str)
# Some sanity checks follow
# >= 0.9
self.assertGreaterEqual(n, 0x900000)
# < 2.0
self.assertLess(n, 0x20000000)
major, minor, fix, patch, status = t
self.assertGreaterEqual(major, 0)
self.assertLess(major, 2)
self.assertGreaterEqual(minor, 0)
self.assertLess(minor, 256)
self.assertGreaterEqual(fix, 0)
self.assertLess(fix, 256)
self.assertGreaterEqual(patch, 0)
self.assertLessEqual(patch, 26)
self.assertGreaterEqual(status, 0)
self.assertLessEqual(status, 15)
# Version string as returned by OpenSSL, the format might change
self.assertTrue(s.startswith("OpenSSL {:d}.{:d}.{:d}".format(major, minor, fix)),
(s, t))
@test_support.requires_resource('network')
def test_ciphers(self):
remote = ("svn.python.org", 443)
with test_support.transient_internet(remote[0]):
s = ssl.wrap_socket(socket.socket(socket.AF_INET),
cert_reqs=ssl.CERT_NONE, ciphers="ALL")
s.connect(remote)
s = ssl.wrap_socket(socket.socket(socket.AF_INET),
cert_reqs=ssl.CERT_NONE, ciphers="DEFAULT")
s.connect(remote)
# Error checking occurs when connecting, because the SSL context
# isn't created before.
s = ssl.wrap_socket(socket.socket(socket.AF_INET),
cert_reqs=ssl.CERT_NONE, ciphers="^$:,;?*'dorothyx")
with self.assertRaisesRegexp(ssl.SSLError, "No cipher can be selected"):
s.connect(remote)
@test_support.cpython_only
def test_refcycle(self):
# Issue #7943: an SSL object doesn't create reference cycles with
# itself.
......@@ -109,13 +219,31 @@ class BasicTests(unittest.TestCase):
del ss
self.assertEqual(wr(), None)
def test_wrapped_unconnected(self):
# The _delegate_methods in socket.py are correctly delegated to by an
# unconnected SSLSocket, so they will raise a socket.error rather than
# something unexpected like TypeError.
s = socket.socket(socket.AF_INET)
ss = ssl.wrap_socket(s)
self.assertRaises(socket.error, ss.recv, 1)
self.assertRaises(socket.error, ss.recv_into, bytearray(b'x'))
self.assertRaises(socket.error, ss.recvfrom, 1)
self.assertRaises(socket.error, ss.recvfrom_into, bytearray(b'x'), 1)
self.assertRaises(socket.error, ss.send, b'x')
self.assertRaises(socket.error, ss.sendto, b'x', ('0.0.0.0', 0))
def test_unsupported_dtls(self):
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
self.addCleanup(s.close)
with self.assertRaises(NotImplementedError) as cx:
ssl.wrap_socket(s, cert_reqs=ssl.CERT_NONE)
self.assertEqual(str(cx.exception), "only stream sockets are supported")
class NetworkedTests(unittest.TestCase):
def test_connect(self):
if True:
print "svn.python.org cert changed; see https://bugs.python.org/issue25940"
return
with test_support.transient_internet("svn.python.org"):
s = ssl.wrap_socket(socket.socket(socket.AF_INET),
cert_reqs=ssl.CERT_NONE)
s.connect(("svn.python.org", 443))
......@@ -143,14 +271,84 @@ class NetworkedTests(unittest.TestCase):
finally:
s.close()
def test_connect_ex(self):
# Issue #11326: check connect_ex() implementation
with test_support.transient_internet("svn.python.org"):
s = ssl.wrap_socket(socket.socket(socket.AF_INET),
cert_reqs=ssl.CERT_REQUIRED,
ca_certs=SVN_PYTHON_ORG_ROOT_CERT)
try:
self.assertEqual(0, s.connect_ex(("svn.python.org", 443)))
self.assertTrue(s.getpeercert())
finally:
s.close()
def test_non_blocking_connect_ex(self):
# Issue #11326: non-blocking connect_ex() should allow handshake
# to proceed after the socket gets ready.
with test_support.transient_internet("svn.python.org"):
s = ssl.wrap_socket(socket.socket(socket.AF_INET),
cert_reqs=ssl.CERT_REQUIRED,
ca_certs=SVN_PYTHON_ORG_ROOT_CERT,
do_handshake_on_connect=False)
try:
s.setblocking(False)
rc = s.connect_ex(('svn.python.org', 443))
# EWOULDBLOCK under Windows, EINPROGRESS elsewhere
self.assertIn(rc, (0, errno.EINPROGRESS, errno.EWOULDBLOCK))
# Wait for connect to finish
select.select([], [s], [], 5.0)
# Non-blocking handshake
while True:
try:
s.do_handshake()
break
except ssl.SSLError as err:
if err.args[0] == ssl.SSL_ERROR_WANT_READ:
select.select([s], [], [], 5.0)
elif err.args[0] == ssl.SSL_ERROR_WANT_WRITE:
select.select([], [s], [], 5.0)
else:
raise
# SSL established
self.assertTrue(s.getpeercert())
finally:
s.close()
def test_timeout_connect_ex(self):
# Issue #12065: on a timeout, connect_ex() should return the original
# errno (mimicking the behaviour of non-SSL sockets).
with test_support.transient_internet("svn.python.org"):
s = ssl.wrap_socket(socket.socket(socket.AF_INET),
cert_reqs=ssl.CERT_REQUIRED,
ca_certs=SVN_PYTHON_ORG_ROOT_CERT,
do_handshake_on_connect=False)
try:
s.settimeout(0.0000001)
rc = s.connect_ex(('svn.python.org', 443))
if rc == 0:
self.skipTest("svn.python.org responded too quickly")
self.assertIn(rc, (errno.EAGAIN, errno.EWOULDBLOCK))
finally:
s.close()
def test_connect_ex_error(self):
with test_support.transient_internet("svn.python.org"):
s = ssl.wrap_socket(socket.socket(socket.AF_INET),
cert_reqs=ssl.CERT_REQUIRED,
ca_certs=SVN_PYTHON_ORG_ROOT_CERT)
try:
self.assertEqual(errno.ECONNREFUSED,
s.connect_ex(("svn.python.org", 444)))
finally:
s.close()
@unittest.skipIf(os.name == "nt", "Can't use a socket as a file under Windows")
def test_makefile_close(self):
# Issue #5238: creating a file-like object with makefile() shouldn't
# delay closing the underlying "real socket" (here tested with its
# file descriptor, hence skipping the test under Windows).
if os.name == "nt":
if test_support.verbose:
print "Skipped: can't use a socket as a file under Windows"
return
with test_support.transient_internet("svn.python.org"):
ss = ssl.wrap_socket(socket.socket(socket.AF_INET))
ss.connect(("svn.python.org", 443))
fd = ss.fileno()
......@@ -161,14 +359,12 @@ class NetworkedTests(unittest.TestCase):
# Closing the SSL socket should close the fd too
ss.close()
gc.collect()
try:
with self.assertRaises(OSError) as e:
os.read(fd, 0)
except OSError, e:
self.assertEqual(e.errno, errno.EBADF)
else:
self.fail("OSError wasn't raised")
self.assertEqual(e.exception.errno, errno.EBADF)
def test_non_blocking_handshake(self):
with test_support.transient_internet("svn.python.org"):
s = socket.socket(socket.AF_INET)
s.connect(("svn.python.org", 443))
s.setblocking(False)
......@@ -193,42 +389,44 @@ class NetworkedTests(unittest.TestCase):
sys.stdout.write("\nNeeded %d calls to do_handshake() to establish session.\n" % count)
def test_get_server_certificate(self):
if True:
print "svn.python.org cert changed; see https://bugs.python.org/issue25940"
return
pem = ssl.get_server_certificate(("svn.python.org", 443))
with test_support.transient_internet("svn.python.org"):
pem = ssl.get_server_certificate(("svn.python.org", 443),
ssl.PROTOCOL_SSLv23)
if not pem:
self.fail("No server certificate on svn.python.org:443!")
try:
pem = ssl.get_server_certificate(("svn.python.org", 443), ca_certs=CERTFILE)
pem = ssl.get_server_certificate(("svn.python.org", 443),
ssl.PROTOCOL_SSLv23,
ca_certs=CERTFILE)
except ssl.SSLError:
#should fail
pass
else:
self.fail("Got server certificate %s for svn.python.org!" % pem)
pem = ssl.get_server_certificate(("svn.python.org", 443), ca_certs=SVN_PYTHON_ORG_ROOT_CERT)
pem = ssl.get_server_certificate(("svn.python.org", 443),
ssl.PROTOCOL_SSLv23,
ca_certs=SVN_PYTHON_ORG_ROOT_CERT)
if not pem:
self.fail("No server certificate on svn.python.org:443!")
if test_support.verbose:
sys.stdout.write("\nVerified certificate for svn.python.org:443 is\n%s\n" % pem)
# Test disabled: OPENSSL_VERSION* not available in Python 2.6
def test_algorithms(self):
if test_support.verbose:
sys.stdout.write("test_algorithms disabled, "
"as it fails on some old OpenSSL versions")
return
# Issue #8484: all algorithms should be available when verifying a
# certificate.
# NOTE: https://sha256.tbs-internet.com is another possible test host
remote = ("sha2.hboeck.de", 443)
# SHA256 was added in OpenSSL 0.9.8
if ssl.OPENSSL_VERSION_INFO < (0, 9, 8, 0, 15):
self.skipTest("SHA256 not available on %r" % ssl.OPENSSL_VERSION)
self.skipTest("remote host needs SNI, only available on Python 3.2+")
# NOTE: https://sha2.hboeck.de is another possible test host
remote = ("sha256.tbs-internet.com", 443)
sha256_cert = os.path.join(os.path.dirname(__file__), "sha256.pem")
with test_support.transient_internet("sha256.tbs-internet.com"):
s = ssl.wrap_socket(socket.socket(socket.AF_INET),
cert_reqs=ssl.CERT_REQUIRED,
ca_certs=sha256_cert,)
with test_support.transient_internet():
try:
s.connect(remote)
if test_support.verbose:
......@@ -282,11 +480,13 @@ else:
certfile=self.server.certificate,
ssl_version=self.server.protocol,
ca_certs=self.server.cacerts,
cert_reqs=self.server.certreqs)
except ssl.SSLError:
cert_reqs=self.server.certreqs,
ciphers=self.server.ciphers)
except ssl.SSLError as e:
# XXX Various errors can have happened here, for example
# a mismatching protocol version, an invalid certificate,
# or a low-level bug. This should be made more discriminating.
self.server.conn_errors.append(e)
if self.server.chatty:
handle_error("\n server: bad connection attempt from " +
str(self.sock.getpeername()) + ":\n")
......@@ -368,7 +568,7 @@ else:
def __init__(self, certificate, ssl_version=None,
certreqs=None, cacerts=None,
chatty=True, connectionchatty=False, starttls_server=False,
wrap_accepting_socket=False):
wrap_accepting_socket=False, ciphers=None):
if ssl_version is None:
ssl_version = ssl.PROTOCOL_TLSv1
......@@ -378,6 +578,7 @@ else:
self.protocol = ssl_version
self.certreqs = certreqs
self.cacerts = cacerts
self.ciphers = ciphers
self.chatty = chatty
self.connectionchatty = connectionchatty
self.starttls_server = starttls_server
......@@ -388,14 +589,25 @@ else:
certfile=self.certificate,
cert_reqs = self.certreqs,
ca_certs = self.cacerts,
ssl_version = self.protocol)
ssl_version = self.protocol,
ciphers = self.ciphers)
if test_support.verbose and self.chatty:
sys.stdout.write(' server: wrapped server socket as %s\n' % str(self.sock))
self.port = test_support.bind_port(self.sock)
self.active = False
self.conn_errors = []
threading.Thread.__init__(self)
self.daemon = True
def __enter__(self):
self.start(threading.Event())
self.flag.wait()
return self
def __exit__(self, *args):
self.stop()
self.join()
def start(self, flag=None):
self.flag = flag
threading.Thread.start(self)
......@@ -415,6 +627,7 @@ else:
+ str(connaddr) + '\n')
handler = self.ConnectionHandler(self, newconn)
handler.start()
handler.join()
except socket.timeout:
pass
except KeyboardInterrupt:
......@@ -502,6 +715,21 @@ else:
def __str__(self):
return "<%s %s>" % (self.__class__.__name__, self.server)
def __enter__(self):
self.start(threading.Event())
self.flag.wait()
return self
def __exit__(self, *args):
if test_support.verbose:
sys.stdout.write(" cleanup: stopping server.\n")
self.stop()
if test_support.verbose:
sys.stdout.write(" cleanup: joining server thread.\n")
self.join()
if test_support.verbose:
sys.stdout.write(" cleanup: successfully joined.\n")
def start(self, flag=None):
self.flag = flag
threading.Thread.start(self)
......@@ -616,12 +844,7 @@ else:
server = ThreadedEchoServer(CERTFILE,
certreqs=ssl.CERT_REQUIRED,
cacerts=CERTFILE, chatty=False)
flag = threading.Event()
server.start(flag)
# wait for it to start
flag.wait()
# try to connect
try:
with server:
try:
s = ssl.wrap_socket(socket.socket(),
certfile=certfile,
......@@ -634,14 +857,11 @@ else:
if test_support.verbose:
sys.stdout.write("\nsocket.error is %s\n" % x[1])
else:
self.fail("Use of invalid cert should have failed!")
finally:
server.stop()
server.join()
raise AssertionError("Use of invalid cert should have failed!")
def server_params_test(certfile, protocol, certreqs, cacertsfile,
client_certfile, client_protocol=None, indata="FOO\n",
chatty=True, connectionchatty=False,
ciphers=None, chatty=True, connectionchatty=False,
wrap_accepting_socket=False):
"""
Launch a server, connect a client to it and try various reads
......@@ -651,34 +871,33 @@ else:
certreqs=certreqs,
ssl_version=protocol,
cacerts=cacertsfile,
ciphers=ciphers,
chatty=chatty,
connectionchatty=connectionchatty,
wrap_accepting_socket=wrap_accepting_socket)
flag = threading.Event()
server.start(flag)
# wait for it to start
flag.wait()
with server:
# try to connect
if client_protocol is None:
client_protocol = protocol
try:
s = ssl.wrap_socket(socket.socket(),
certfile=client_certfile,
ca_certs=cacertsfile,
ciphers=ciphers,
cert_reqs=certreqs,
ssl_version=client_protocol)
s.connect((HOST, server.port))
for arg in [indata, bytearray(indata), memoryview(indata)]:
if connectionchatty:
if test_support.verbose:
sys.stdout.write(
" client: sending %s...\n" % (repr(indata)))
s.write(indata)
" client: sending %s...\n" % (repr(arg)))
s.write(arg)
outdata = s.read()
if connectionchatty:
if test_support.verbose:
sys.stdout.write(" client: read %s\n" % repr(outdata))
if outdata != indata.lower():
self.fail(
raise AssertionError(
"bad data <<%s>> (%d) received; expected <<%s>> (%d)\n"
% (outdata[:min(len(outdata),20)], len(outdata),
indata[:min(len(indata),20)].lower(), len(indata)))
......@@ -687,9 +906,6 @@ else:
if test_support.verbose:
sys.stdout.write(" client: closing connection.\n")
s.close()
finally:
server.stop()
server.join()
def try_protocol_combo(server_protocol,
client_protocol,
......@@ -709,9 +925,12 @@ else:
ssl.get_protocol_name(server_protocol),
certtype))
try:
# NOTE: we must enable "ALL" ciphers, otherwise an SSLv23 client
# will send an SSLv3 hello (rather than SSLv2) starting from
# OpenSSL 1.0.0 (see issue #8322).
server_params_test(CERTFILE, server_protocol, certsreqs,
CERTFILE, CERTFILE, client_protocol,
chatty=False)
ciphers="ALL", chatty=False)
# Protocol mismatch can result in either an SSLError, or a
# "Connection reset by peer" error.
except ssl.SSLError:
......@@ -756,6 +975,8 @@ else:
c = socket.socket()
c.connect((HOST, port))
listener_gone.wait()
# XXX why is it necessary?
test_support.gc_collect()
try:
ssl_sock = ssl.wrap_socket(c)
except IOError:
......@@ -770,6 +991,7 @@ else:
finally:
t.join()
@skip_if_broken_ubuntu_ssl
def test_echo(self):
"""Basic test of an SSL client connecting to a server"""
if test_support.verbose:
......@@ -787,12 +1009,7 @@ else:
ssl_version=ssl.PROTOCOL_SSLv23,
cacerts=CERTFILE,
chatty=False)
flag = threading.Event()
server.start(flag)
# wait for it to start
flag.wait()
# try to connect
try:
with server:
s = ssl.wrap_socket(socket.socket(),
certfile=CERTFILE,
ca_certs=CERTFILE,
......@@ -814,9 +1031,6 @@ else:
"Missing or invalid 'organizationName' field in certificate subject; "
"should be 'Python Software Foundation'.")
s.close()
finally:
server.stop()
server.join()
def test_empty_cert(self):
"""Connecting with an empty cert file"""
......@@ -835,33 +1049,25 @@ else:
bad_cert_test(os.path.join(os.path.dirname(__file__) or os.curdir,
"badkey.pem"))
@skip_if_broken_ubuntu_ssl
def test_protocol_sslv2(self):
"""Connecting to an SSLv2 server with various client options"""
if test_support.verbose:
sys.stdout.write("\ntest_protocol_sslv2 disabled, "
"as it fails on OpenSSL 1.0.0+")
return
sys.stdout.write("\n")
if not hasattr(ssl, 'PROTOCOL_SSLv2'):
self.skipTest("PROTOCOL_SSLv2 needed")
try_protocol_combo(ssl.PROTOCOL_SSLv2, ssl.PROTOCOL_SSLv2, True)
try_protocol_combo(ssl.PROTOCOL_SSLv2, ssl.PROTOCOL_SSLv2, True, ssl.CERT_OPTIONAL)
try_protocol_combo(ssl.PROTOCOL_SSLv2, ssl.PROTOCOL_SSLv2, True, ssl.CERT_REQUIRED)
try_protocol_combo(ssl.PROTOCOL_SSLv2, ssl.PROTOCOL_SSLv23, True)
try_protocol_combo(ssl.PROTOCOL_SSLv2, ssl.PROTOCOL_SSLv23, False)
try_protocol_combo(ssl.PROTOCOL_SSLv2, ssl.PROTOCOL_SSLv3, False)
try_protocol_combo(ssl.PROTOCOL_SSLv2, ssl.PROTOCOL_TLSv1, False)
@skip_if_broken_ubuntu_ssl
def test_protocol_sslv23(self):
"""Connecting to an SSLv23 server with various client options"""
if test_support.verbose:
sys.stdout.write("\ntest_protocol_sslv23 disabled, "
"as it fails on OpenSSL 1.0.0+")
return
try:
try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_SSLv2, True)
except (ssl.SSLError, socket.error), x:
# this fails on some older versions of OpenSSL (0.9.7l, for instance)
if test_support.verbose:
sys.stdout.write(
" SSL2 client to SSL23 server test unexpectedly failed:\n %s\n"
% str(x))
sys.stdout.write("\n")
try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_SSLv3, True)
try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_SSLv23, True)
try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_TLSv1, True)
......@@ -874,31 +1080,29 @@ else:
try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_SSLv23, True, ssl.CERT_REQUIRED)
try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_TLSv1, True, ssl.CERT_REQUIRED)
@skip_if_broken_ubuntu_ssl
def test_protocol_sslv3(self):
"""Connecting to an SSLv3 server with various client options"""
if test_support.verbose:
sys.stdout.write("\ntest_protocol_sslv3 disabled, "
"as it fails on OpenSSL 1.0.0+")
return
sys.stdout.write("\n")
try_protocol_combo(ssl.PROTOCOL_SSLv3, ssl.PROTOCOL_SSLv3, True)
try_protocol_combo(ssl.PROTOCOL_SSLv3, ssl.PROTOCOL_SSLv3, True, ssl.CERT_OPTIONAL)
try_protocol_combo(ssl.PROTOCOL_SSLv3, ssl.PROTOCOL_SSLv3, True, ssl.CERT_REQUIRED)
if hasattr(ssl, 'PROTOCOL_SSLv2'):
try_protocol_combo(ssl.PROTOCOL_SSLv3, ssl.PROTOCOL_SSLv2, False)
try_protocol_combo(ssl.PROTOCOL_SSLv3, ssl.PROTOCOL_SSLv23, False)
try_protocol_combo(ssl.PROTOCOL_SSLv3, ssl.PROTOCOL_TLSv1, False)
@skip_if_broken_ubuntu_ssl
def test_protocol_tlsv1(self):
"""Connecting to a TLSv1 server with various client options"""
if test_support.verbose:
sys.stdout.write("\ntest_protocol_tlsv1 disabled, "
"as it fails on OpenSSL 1.0.0+")
return
sys.stdout.write("\n")
try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_TLSv1, True)
try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_TLSv1, True, ssl.CERT_OPTIONAL)
try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_TLSv1, True, ssl.CERT_REQUIRED)
if hasattr(ssl, 'PROTOCOL_SSLv2'):
try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_SSLv2, False)
try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_SSLv3, False)
try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_SSLv23, False)
def test_starttls(self):
"""Switching from clear text to encrypted and back again."""
......@@ -909,13 +1113,8 @@ else:
starttls_server=True,
chatty=True,
connectionchatty=True)
flag = threading.Event()
server.start(flag)
# wait for it to start
flag.wait()
# try to connect
wrapped = False
try:
with server:
s = socket.socket()
s.setblocking(1)
s.connect((HOST, server.port))
......@@ -960,9 +1159,6 @@ else:
else:
s.send("over\n")
s.close()
finally:
server.stop()
server.join()
def test_socketserver(self):
"""Using a SocketServer to create and manage SSL connections."""
......@@ -981,7 +1177,7 @@ else:
# now fetch the same data from the HTTPS server
url = 'https://127.0.0.1:%d/%s' % (
server.port, os.path.split(CERTFILE)[1])
with test_support._check_py3k_warnings():
with test_support.check_py3k_warnings():
f = urllib.urlopen(url)
dlen = f.info().getheader("content-length")
if dlen and (int(dlen) > 0):
......@@ -1012,12 +1208,7 @@ else:
if test_support.verbose:
sys.stdout.write("\n")
server = AsyncoreEchoServer(CERTFILE)
flag = threading.Event()
server.start(flag)
# wait for it to start
flag.wait()
# try to connect
try:
with server:
s = ssl.wrap_socket(socket.socket())
s.connect(('127.0.0.1', server.port))
if test_support.verbose:
......@@ -1036,10 +1227,6 @@ else:
if test_support.verbose:
sys.stdout.write(" client: closing connection.\n")
s.close()
finally:
server.stop()
# wait for server thread to end
server.join()
def test_recv_send(self):
"""Test recv(), send() and friends."""
......@@ -1052,11 +1239,7 @@ else:
cacerts=CERTFILE,
chatty=True,
connectionchatty=False)
flag = threading.Event()
server.start(flag)
# wait for it to start
flag.wait()
# try to connect
with server:
s = ssl.wrap_socket(socket.socket(),
server_side=False,
certfile=CERTFILE,
......@@ -1064,7 +1247,6 @@ else:
cert_reqs=ssl.CERT_NONE,
ssl_version=ssl.PROTOCOL_TLSv1)
s.connect((HOST, server.port))
try:
# helper methods for standardising recv* method signatures
def _recv_into():
b = bytearray("\0"*100)
......@@ -1097,7 +1279,7 @@ else:
outdata = s.read()
outdata = outdata.decode('ASCII', 'strict')
if outdata != indata.lower():
raise support.TestFailed(
self.fail(
"While sending with <<%s>> bad data "
"<<%r>> (%d) received; "
"expected <<%r>> (%d)\n" % (
......@@ -1107,12 +1289,12 @@ else:
)
except ValueError as e:
if expect_success:
raise support.TestFailed(
self.fail(
"Failed to send with method <<%s>>; "
"expected to succeed.\n" % (meth_name,)
)
if not str(e).startswith(meth_name):
raise support.TestFailed(
self.fail(
"Method <<%s>> failed with unexpected "
"exception message: %s\n" % (
meth_name, e
......@@ -1126,7 +1308,7 @@ else:
outdata = recv_meth(*args)
outdata = outdata.decode('ASCII', 'strict')
if outdata != indata.lower():
raise support.TestFailed(
self.fail(
"While receiving with <<%s>> bad data "
"<<%r>> (%d) received; "
"expected <<%r>> (%d)\n" % (
......@@ -1136,12 +1318,12 @@ else:
)
except ValueError as e:
if expect_success:
raise support.TestFailed(
self.fail(
"Failed to receive with method <<%s>>; "
"expected to succeed.\n" % (meth_name,)
)
if not str(e).startswith(meth_name):
raise support.TestFailed(
self.fail(
"Method <<%s>> failed with unexpected "
"exception message: %s\n" % (
meth_name, e
......@@ -1152,9 +1334,6 @@ else:
s.write("over\n".encode("ASCII", "strict"))
s.close()
finally:
server.stop()
server.join()
def test_handshake_timeout(self):
# Issue #5103: SSL handshake must respect the socket timeout
......@@ -1185,12 +1364,8 @@ else:
c.settimeout(0.2)
c.connect((host, port))
# Will attempt handshake and time out
try:
ssl.wrap_socket(c)
except ssl.SSLError, e:
self.assertTrue("timed out" in str(e), str(e))
else:
self.fail("SSLError wasn't raised")
self.assertRaisesRegexp(ssl.SSLError, "timed out",
ssl.wrap_socket, c)
finally:
c.close()
try:
......@@ -1198,12 +1373,8 @@ else:
c.settimeout(0.2)
c = ssl.wrap_socket(c)
# Will attempt handshake and time out
try:
c.connect((host, port))
except ssl.SSLError, e:
self.assertTrue("timed out" in str(e), str(e))
else:
self.fail("SSLError wasn't raised")
self.assertRaisesRegexp(ssl.SSLError, "timed out",
c.connect, (host, port))
finally:
c.close()
finally:
......@@ -1211,23 +1382,45 @@ else:
t.join()
server.close()
def test_default_ciphers(self):
with ThreadedEchoServer(CERTFILE,
ssl_version=ssl.PROTOCOL_SSLv23,
chatty=False) as server:
sock = socket.socket()
try:
# Force a set of weak ciphers on our client socket
try:
s = ssl.wrap_socket(sock,
ssl_version=ssl.PROTOCOL_SSLv23,
ciphers="DES")
except ssl.SSLError:
self.skipTest("no DES cipher available")
with self.assertRaises((OSError, ssl.SSLError)):
s.connect((HOST, server.port))
finally:
sock.close()
self.assertIn("no shared cipher", str(server.conn_errors[0]))
def test_main(verbose=False):
if skip_expected:
raise test_support.TestSkipped("No SSL support")
global CERTFILE, SVN_PYTHON_ORG_ROOT_CERT
def test_main(verbose=False):
global CERTFILE, SVN_PYTHON_ORG_ROOT_CERT, NOKIACERT, NULLBYTECERT
CERTFILE = os.path.join(os.path.dirname(__file__) or os.curdir,
"keycert.pem")
SVN_PYTHON_ORG_ROOT_CERT = os.path.join(
os.path.dirname(__file__) or os.curdir,
"https_svn_python_org_root.pem")
NOKIACERT = os.path.join(os.path.dirname(__file__) or os.curdir,
"nokia.pem")
NULLBYTECERT = os.path.join(os.path.dirname(__file__) or os.curdir,
"nullbytecert.pem")
if (not os.path.exists(CERTFILE) or
not os.path.exists(SVN_PYTHON_ORG_ROOT_CERT)):
not os.path.exists(SVN_PYTHON_ORG_ROOT_CERT) or
not os.path.exists(NOKIACERT) or
not os.path.exists(NULLBYTECERT)):
raise test_support.TestFailed("Can't read certificate files!")
tests = [BasicTests]
tests = [BasicTests, BasicSocketTests]
if test_support.is_resource_enabled('network'):
tests.append(NetworkedTests)
......
......@@ -307,7 +307,7 @@ class EventTests(BaseTestCase):
def test_reset_internal_locks(self):
evt = self.eventtype()
if not hasattr(evt, '_Event__cond'):
if not hasattr(evt, '_Event__cond') or sys.version_info[:3] <= (2, 7, 8):
self.skipTest("gevent: internal impl difference")
old_lock = evt._Event__cond._Condition__lock
evt._reset_internal_locks()
......
......@@ -730,6 +730,7 @@ class HTTPSTest(TestCase):
resp = h.getresponse()
self.assertIn('nginx', resp.getheader('server'))
if hasattr(test_support, 'system_must_validate_cert'): # gevent: run on 2.7.8
@test_support.system_must_validate_cert
def test_networked_trusted_by_default_cert(self):
# Default settings: requires a valid cert from a trusted CA
......
......@@ -294,7 +294,8 @@ class BadHELOServerTests(unittest.TestCase):
@unittest.skipUnless(threading, 'Threading required for this test.')
class TooLongLineTests(unittest.TestCase):
respdata = '250 OK' + ('.' * smtplib._MAXLINE * 2) + '\n'
# gevent: run on 2.7.8
respdata = '250 OK' + ('.' * getattr(smtplib, '_MAXLINE', 1) * 2) + '\n'
def setUp(self):
self.old_stdout = sys.stdout
......
......@@ -991,7 +991,7 @@ class ContextTests(unittest.TestCase):
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
ctx.set_default_verify_paths()
@unittest.skipUnless(ssl.HAS_ECDH, "ECDH disabled on this OpenSSL build")
@unittest.skipUnless(ssl.HAS_ECDH, "ECDH disabled on this OpenSSL build") # gevent run on 2.7.8
def test_set_ecdh_curve(self):
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
ctx.set_ecdh_curve("prime256v1")
......
......@@ -175,16 +175,81 @@ if 'thread' in os.getenv('GEVENT_FILE', ''):
# Fails with "OSError: 9 invalid file descriptor"; expect GC/lifetime issues
]
if sys.version_info[:2] == (2, 6):
if sys.version_info[:3] <= (2, 7, 8):
disabled_tests += [
# SSLv2 May or may not be available depending on the build
'test_ssl.BasicTests.test_constants',
'test_ssl.ThreadedTests.test_protocol_sslv23',
'test_ssl.ThreadedTests.test_protocol_sslv3',
'test_ssl.ThreadedTests.test_protocol_tlsv1',
]
# Our 2.7 tests are from 2.7.11 so all the new SSLContext stuff
# has to go.
disabled_tests += [
'test_ftplib.TestTLS_FTPClass.test_check_hostname',
'test_ftplib.TestTLS_FTPClass.test_context',
'test_urllib2.TrivialTests.test_cafile_and_context',
'test_urllib2_localnet.TestUrlopen.test_https',
'test_urllib2_localnet.TestUrlopen.test_https_sni',
'test_urllib2_localnet.TestUrlopen.test_https_with_cadefault',
'test_urllib2_localnet.TestUrlopen.test_https_with_cafile',
'test_httplib.HTTPTest.testHTTPWithConnectHostPort',
'test_httplib.HTTPSTest.test_local_good_hostname',
'test_httplib.HTTPSTest.test_local_unknown_cert',
'test_httplib.HTTPSTest.test_networked_bad_cert',
'test_httplib.HTTPSTest.test_networked_good_cert',
'test_httplib.HTTPSTest.test_networked_noverification',
'test_httplib.HTTPSTest.test_networked',
]
# Except for test_ssl, which is from 2.7.8. But it has some certificate problems
# due to age
disabled_tests += [
'test_ssl.NetworkedTests.test_connect',
'test_ssl.NetworkedTests.test_connect_ex',
'test_ssl.NetworkedTests.test_get_server_certificate',
# XXX: Not sure
'test_ssl.BasicSocketTests.test_unsupported_dtls',
]
# These are also bugs fixed more recently
disabled_tests += [
'test_httpservers.CGIHTTPServerTestCase.test_nested_cgi_path_issue21323',
'test_httpservers.CGIHTTPServerTestCase.test_query_with_continuous_slashes',
'test_httpservers.CGIHTTPServerTestCase.test_query_with_multiple_question_mark',
'test_socket.GeneralModuleTests.test_weakref__sock',
'test_threading.ThreadingExceptionTests.test_print_exception_stderr_is_none_1',
'test_threading.ThreadingExceptionTests.test_print_exception_stderr_is_none_2',
'test_wsgiref.IntegrationTests.test_request_length',
'test_httplib.HeaderTests.test_content_length_0',
'test_httplib.HeaderTests.test_invalid_headers',
'test_httplib.HeaderTests.test_malformed_headers_coped_with',
'test_httplib.BasicTest.test_error_leak',
'test_httplib.BasicTest.test_too_many_headers',
'test_httplib.BasicTest.test_proxy_tunnel_without_status_line',
'test_httplib.TunnelTests.test_connect',
'test_smtplib.TooLongLineTests.testLineTooLong',
'test_smtplib.SMTPSimTests.test_quit_resets_greeting',
# features in test_support not available
'test_threading_local.ThreadLocalTests.test_derived',
'test_threading_local.PyThreadingLocalTests.test_derived',
'test_urllib.UtilityTests.test_toBytes',
'test_httplib.HTTPSTest.test_networked_trusted_by_default_cert',
]
if os.environ.get('TRAVIS') == 'true':
# somehow these fail with "Permission denied" on travis
disabled_tests += [
'test_httpservers.CGIHTTPServerTestCase.test_post',
......@@ -193,6 +258,7 @@ if sys.version_info[:2] == (2, 6):
'test_httpservers.SimpleHTTPServerTestCase.test_get'
]
if sys.platform == 'darwin':
disabled_tests += [
'test_subprocess.POSIXProcessTestCase.test_run_abort',
......
......@@ -8,8 +8,10 @@ import atexit
TIMEOUT = 120
directory = '%s.%s' % sys.version_info[:2]
full_directory = '%s.%s.%s' % sys.version_info[:3]
if hasattr(sys, 'pypy_version_info'):
directory += 'pypy'
full_directory += 'pypy'
version = '%s.%s.%s' % sys.version_info[:3]
......@@ -27,11 +29,16 @@ def TESTRUNNER(tests=None):
util.log('WARNING: The tests in %s/ are from version %s and your Python is %s', directory, preferred_version, version)
if not tests:
tests = sorted(glob.glob('%s/test_*.py' % directory))
tests = glob.glob('%s/test_*.py' % directory)
version_tests = glob.glob('%s/test_*.py' % full_directory)
tests = sorted(tests)
version_tests = sorted(version_tests)
PYTHONPATH = (os.getcwd() + os.pathsep + get_absolute_pythonpath()).rstrip(':')
tests = [os.path.basename(x) for x in tests]
version_tests = [os.path.basename(x) for x in version_tests]
options = {'cwd': directory,
'timeout': TIMEOUT,
'setenv': {'PYTHONPATH': PYTHONPATH}}
......@@ -40,6 +47,14 @@ def TESTRUNNER(tests=None):
atexit.register(os.system, 'rm -f */@test*')
for filename in tests:
if filename in version_tests:
util.log("Overriding %s from %s with file from %s", filename, directory, full_directory)
continue
yield [sys.executable, '-u', '-m', 'monkey_test', filename], options.copy()
yield [sys.executable, '-u', '-m', 'monkey_test', '--Event', filename], options.copy()
options['cwd'] = full_directory
for filename in version_tests:
yield [sys.executable, '-u', '-m', 'monkey_test', filename], options.copy()
yield [sys.executable, '-u', '-m', 'monkey_test', '--Event', filename], options.copy()
......
......@@ -81,8 +81,8 @@ install () {
for var in "$@"; do
case "${var}" in
2.6)
install 2.6.9 python2.6
2.7.8)
install 2.7.8 python2.7.8
;;
2.7)
install 2.7.11 python2.7
......
......@@ -35,6 +35,8 @@ if WIN:
if not os.environ.get('PYEXE'):
os.environ['PYEXE'] = os.environ['PYTHON_EXE']
if sys.version_info[:2] < (2, 7):
raise Exception("Please install gevent 1.1 for Python 2.6")
import distutils
import distutils.sysconfig # to get CFLAGS to pass into c-ares configure script
......@@ -496,7 +498,6 @@ def run_setup(ext_modules, run_make):
test_suite="greentest.testrunner",
classifiers=[
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
......
[tox]
envlist =
py26,py27,py33,py34,py35,py27-cffi,pypy,lint
py27,py33,py34,py35,py27-cffi,pypy,lint
[testenv]
deps =
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment