Commit 9c13ea92 authored by Éric Araujo's avatar Éric Araujo

Branch merge

parents 3650ae57 3a9d1332
......@@ -17,5 +17,4 @@ con.executemany("insert into person(firstname, lastname) values (?, ?)", persons
for row in con.execute("select firstname, lastname from person"):
print(row)
# Using a dummy WHERE clause to not let SQLite take the shortcut table deletes.
print("I just deleted", con.execute("delete from person where 1=1").rowcount, "rows")
print("I just deleted", con.execute("delete from person").rowcount, "rows")
......@@ -555,18 +555,17 @@ Cursor Objects
attribute, the database engine's own support for the determination of "rows
affected"/"rows selected" is quirky.
For ``DELETE`` statements, SQLite reports :attr:`rowcount` as 0 if you make a
``DELETE FROM table`` without any condition.
For :meth:`executemany` statements, the number of modifications are summed up
into :attr:`rowcount`.
As required by the Python DB API Spec, the :attr:`rowcount` attribute "is -1 in
case no ``executeXX()`` has been performed on the cursor or the rowcount of the
last operation is not determinable by the interface".
last operation is not determinable by the interface". This includes ``SELECT``
statements because we cannot determine the number of rows a query produced
until all rows were fetched.
This includes ``SELECT`` statements because we cannot determine the number of
rows a query produced until all rows were fetched.
With SQLite versions before 3.6.5, :attr:`rowcount` is set to 0 if
you make a ``DELETE FROM table`` without any condition.
.. attribute:: Cursor.lastrowid
......
......@@ -19,28 +19,33 @@ work. One should use importlib as the public-facing version of this module.
# Bootstrap-related code ######################################################
# TODO: when not on any of these platforms, replace _case_ok() w/
# ``lambda x,y: True``.
CASE_OK_PLATFORMS = 'win', 'cygwin', 'darwin'
CASE_INSENSITIVE_PLATFORMS = 'win', 'cygwin', 'darwin'
def _case_ok(directory, check):
"""Check if the directory contains something matching 'check'
case-sensitively when running on Windows or OS X.
def _case_insensitive_ok(directory, check):
"""Check if the directory contains something matching 'check' exists in the
directory.
If running on Window or OS X and PYTHONCASEOK is a defined environment
variable then no case-sensitive check is performed. No check is done to see
if what is being checked for exists, so if the platform is not Windows or
OS X then assume the case is fine.
If PYTHONCASEOK is a defined environment variable then skip the
case-sensitivity check.
"""
if (any(map(sys.platform.startswith, CASE_OK_PLATFORMS)) and
b'PYTHONCASEOK' not in _os.environ):
if b'PYTHONCASEOK' not in _os.environ:
if not directory:
directory = '.'
return check in _os.listdir(directory)
else:
return True
def _case_sensitive_ok(directory, check):
"""Under case-sensitive filesystems always assume the case matches.
Since other code does the file existence check, that subsumes a
case-sensitivity check.
"""
return True
_case_ok = None
# TODO: Expose from marshal
......@@ -137,26 +142,16 @@ def _path_absolute(path):
def _write_atomic(path, data):
"""Best-effort function to write data to a path atomically.
Be prepared to handle a FileExistsError if concurrent writing of the
temporary file is attempted."""
# Renaming should be atomic on most platforms (including Windows).
# Under Windows, the limitation is that we can't rename() to an existing
# path, while POSIX will overwrite it. But here we don't really care
# if there is a glimpse of time during which the final pyc file doesn't
# exist.
"""Function to write data to a path atomically."""
# id() is used to generate a pseudo-random filename.
path_tmp = '{}.{}'.format(path, id(path))
fd = _os.open(path_tmp, _os.O_EXCL | _os.O_CREAT | _os.O_WRONLY, 0o666)
try:
# We first write data to a temporary file, and then use os.replace() to
# perform an atomic rename.
with _io.FileIO(fd, 'wb') as file:
file.write(data)
try:
_os.rename(path_tmp, path)
except FileExistsError:
# Windows (if we had access to MoveFileEx, we could overwrite)
_os.unlink(path)
_os.rename(path_tmp, path)
_os.replace(path_tmp, path)
except OSError:
try:
_os.unlink(path_tmp)
......@@ -602,9 +597,8 @@ class _SourceFileLoader(_FileLoader, SourceLoader):
return
try:
_write_atomic(path, data)
except (PermissionError, FileExistsError):
# Don't worry if you can't write bytecode or someone is writing
# it at the same time.
except PermissionError:
# Don't worry if you can't write bytecode.
pass
......@@ -713,10 +707,12 @@ class PathFinder:
the default hook, for which ImportError is raised.
"""
if path == '':
path = _os.getcwd()
try:
finder = sys.path_importer_cache[path]
except KeyError:
finder = cls._path_hooks(path if path != '' else _os.getcwd())
finder = cls._path_hooks(path)
sys.path_importer_cache[path] = finder
else:
if finder is None and default:
......@@ -861,19 +857,50 @@ class _ImportLockContext:
imp.release_lock()
_IMPLICIT_META_PATH = [BuiltinImporter, FrozenImporter, _DefaultPathFinder]
def _resolve_name(name, package, level):
"""Resolve a relative module name to an absolute one."""
dot = len(package)
for x in range(level, 1, -1):
try:
dot = package.rindex('.', 0, dot)
except ValueError:
raise ValueError("attempted relative import beyond "
"top-level package")
if name:
return "{0}.{1}".format(package[:dot], name)
else:
return package[:dot]
def _find_module(name, path):
"""Find a module's loader."""
meta_path = sys.meta_path + _IMPLICIT_META_PATH
for finder in meta_path:
loader = finder.find_module(name, path)
if loader is not None:
# The parent import may have already imported this module.
if name not in sys.modules:
return loader
else:
return sys.modules[name].__loader__
else:
return None
_ERR_MSG = 'No module named {!r}'
def _gcd_import(name, package=None, level=0):
"""Import and return the module based on its name, the package the call is
being made from, and the level adjustment.
def _set___package__(module):
"""Set __package__ on a module."""
# Watch out for what comes out of sys.modules to not be a module,
# e.g. an int.
try:
module.__package__ = module.__name__
if not hasattr(module, '__path__'):
module.__package__ = module.__package__.rpartition('.')[0]
except AttributeError:
pass
This function represents the greatest common denominator of functionality
between import_module and __import__. This includes setting __package__ if
the loader did not.
"""
def _sanity_check(name, package, level):
"""Verify arguments are "sane"."""
if package:
if not hasattr(package, 'rindex'):
raise ValueError("__package__ not set to a string")
......@@ -883,18 +910,47 @@ def _gcd_import(name, package=None, level=0):
raise SystemError(msg.format(package))
if not name and level == 0:
raise ValueError("Empty module name")
def _find_search_path(name, import_):
"""Find the search path for a module.
import_ is expected to be a callable which takes the name of a module to
import. It is required to decouple the function from importlib.
"""
path = None
parent = name.rpartition('.')[0]
if parent:
if parent not in sys.modules:
import_(parent)
# Backwards-compatibility; be nicer to skip the dict lookup.
parent_module = sys.modules[parent]
try:
path = parent_module.__path__
except AttributeError:
msg = (_ERR_MSG + '; {} is not a package').format(name, parent)
raise ImportError(msg)
return parent, path
_IMPLICIT_META_PATH = [BuiltinImporter, FrozenImporter, _DefaultPathFinder]
_ERR_MSG = 'No module named {!r}'
def _gcd_import(name, package=None, level=0):
"""Import and return the module based on its name, the package the call is
being made from, and the level adjustment.
This function represents the greatest common denominator of functionality
between import_module and __import__. This includes setting __package__ if
the loader did not.
"""
_sanity_check(name, package, level)
if level > 0:
dot = len(package)
for x in range(level, 1, -1):
try:
dot = package.rindex('.', 0, dot)
except ValueError:
raise ValueError("attempted relative import beyond "
"top-level package")
if name:
name = "{0}.{1}".format(package[:dot], name)
else:
name = package[:dot]
name = _resolve_name(name, package, level)
with _ImportLockContext():
try:
module = sys.modules[name]
......@@ -905,70 +961,33 @@ def _gcd_import(name, package=None, level=0):
return module
except KeyError:
pass
parent = name.rpartition('.')[0]
path = None
if parent:
if parent not in sys.modules:
_gcd_import(parent)
# Backwards-compatibility; be nicer to skip the dict lookup.
parent_module = sys.modules[parent]
try:
path = parent_module.__path__
except AttributeError:
msg = (_ERR_MSG + '; {} is not a package').format(name, parent)
raise ImportError(msg)
meta_path = sys.meta_path + _IMPLICIT_META_PATH
for finder in meta_path:
loader = finder.find_module(name, path)
if loader is not None:
# The parent import may have already imported this module.
if name not in sys.modules:
loader.load_module(name)
break
else:
parent, path = _find_search_path(name, _gcd_import)
loader = _find_module(name, path)
if loader is None:
raise ImportError(_ERR_MSG.format(name))
elif name not in sys.modules:
# The parent import may have already imported this module.
loader.load_module(name)
# Backwards-compatibility; be nicer to skip the dict lookup.
module = sys.modules[name]
if parent:
# Set the module as an attribute on its parent.
parent_module = sys.modules[parent]
setattr(parent_module, name.rpartition('.')[2], module)
# Set __package__ if the loader did not.
if not hasattr(module, '__package__') or module.__package__ is None:
# Watch out for what comes out of sys.modules to not be a module,
# e.g. an int.
try:
module.__package__ = module.__name__
if not hasattr(module, '__path__'):
module.__package__ = module.__package__.rpartition('.')[0]
except AttributeError:
pass
_set___package__(module)
return module
def __import__(name, globals={}, locals={}, fromlist=[], level=0):
"""Import a module.
def _return_module(module, name, fromlist, level, import_):
"""Figure out what __import__ should return.
The 'globals' argument is used to infer where the import is occuring from
to handle relative imports. The 'locals' argument is ignored. The
'fromlist' argument specifies what should exist as attributes on the module
being imported (e.g. ``from module import <fromlist>``). The 'level'
argument represents the package location to import from in a relative
import (e.g. ``from ..pkg import mod`` would have a 'level' of 2).
The import_ parameter is a callable which takes the name of module to
import. It is required to decouple the function from assuming importlib's
import implementation is desired.
"""
if not hasattr(name, 'rpartition'):
raise TypeError("module name must be str, not {}".format(type(name)))
if level == 0:
module = _gcd_import(name)
else:
# __package__ is not guaranteed to be defined or could be set to None
# to represent that its proper value is unknown
package = globals.get('__package__')
if package is None:
package = globals['__name__']
if '__path__' not in globals:
package = package.rpartition('.')[0]
module = _gcd_import(name, package, level)
# The hell that is fromlist ...
if not fromlist:
# Return up to the first dot in 'name'. This is complicated by the fact
......@@ -989,12 +1008,50 @@ def __import__(name, globals={}, locals={}, fromlist=[], level=0):
fromlist.extend(module.__all__)
for x in (y for y in fromlist if not hasattr(module,y)):
try:
_gcd_import('{0}.{1}'.format(module.__name__, x))
import_('{0}.{1}'.format(module.__name__, x))
except ImportError:
pass
return module
def _calc___package__(globals):
"""Calculate what __package__ should be.
__package__ is not guaranteed to be defined or could be set to None
to represent that its proper value is unknown.
"""
package = globals.get('__package__')
if package is None:
package = globals['__name__']
if '__path__' not in globals:
package = package.rpartition('.')[0]
return package
def __import__(name, globals={}, locals={}, fromlist=[], level=0):
"""Import a module.
The 'globals' argument is used to infer where the import is occuring from
to handle relative imports. The 'locals' argument is ignored. The
'fromlist' argument specifies what should exist as attributes on the module
being imported (e.g. ``from module import <fromlist>``). The 'level'
argument represents the package location to import from in a relative
import (e.g. ``from ..pkg import mod`` would have a 'level' of 2).
"""
if not hasattr(name, 'rpartition'):
raise TypeError("module name must be str, not {}".format(type(name)))
if level == 0:
module = _gcd_import(name)
elif level < 0:
raise ValueError('level must be >= 0')
else:
package = _calc___package__(globals)
module = _gcd_import(name, package, level)
return _return_module(module, name, fromlist, level, _gcd_import)
def _setup(sys_module, imp_module):
"""Setup importlib by importing needed built-in modules and injecting them
into the global namespace.
......@@ -1003,7 +1060,7 @@ def _setup(sys_module, imp_module):
modules, those two modules must be explicitly passed in.
"""
global imp, sys
global _case_ok, imp, sys
imp = imp_module
sys = sys_module
......@@ -1037,6 +1094,11 @@ def _setup(sys_module, imp_module):
setattr(self_module, '_os', os_module)
setattr(self_module, 'path_sep', path_sep)
if sys_module.platform in CASE_INSENSITIVE_PLATFORMS:
_case_ok = _case_insensitive_ok
else:
_case_ok = _case_sensitive_ok
def _install(sys_module, imp_module):
"""Install importlib as the implementation of import.
......
......@@ -7,7 +7,6 @@ builtins.__import__ instead of importlib.__import__.
from importlib.test.import_ import util
import os.path
from test.support import run_unittest
import sys
import unittest
......@@ -15,10 +14,17 @@ def test_main():
start_dir = os.path.dirname(__file__)
top_dir = os.path.dirname(os.path.dirname(start_dir))
test_loader = unittest.TestLoader()
if '--builtin' in sys.argv:
util.using___import__ = True
run_unittest(test_loader.discover(start_dir, top_level_dir=top_dir))
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser(description='Execute the importlib test '
'suite')
parser.add_argument('-b', '--builtin', action='store_true', default=False,
help='use builtins.__import__() instead of importlib')
args = parser.parse_args()
if args.builtin:
util.using___import__ = True
test_main()
......@@ -12,6 +12,13 @@ class APITest(unittest.TestCase):
with self.assertRaises(TypeError):
util.import_(42)
def test_negative_level(self):
# Raise ValueError when a negative level is specified.
# PEP 328 did away with sys.module None entries and the ambiguity of
# absolute/relative imports.
with self.assertRaises(ValueError):
util.import_('os', globals(), level=-1)
def test_main():
from test.support import run_unittest
......
......@@ -82,7 +82,7 @@ class FinderTests(unittest.TestCase):
with util.import_state(path=[path], path_hooks=[hook]):
loader = machinery.PathFinder.find_module(module)
self.assertIs(loader, importer)
self.assertIn('', sys.path_importer_cache)
self.assertIn(os.getcwd(), sys.path_importer_cache)
class DefaultPathFinderTests(unittest.TestCase):
......
......@@ -142,15 +142,21 @@ def _copy(master_fd, master_read=_read, stdin_read=_read):
Copies
pty master -> standard output (master_read)
standard input -> pty master (stdin_read)"""
while 1:
rfds, wfds, xfds = select(
[master_fd, STDIN_FILENO], [], [])
fds = [master_fd, STDIN_FILENO]
while True:
rfds, wfds, xfds = select(fds, [], [])
if master_fd in rfds:
data = master_read(master_fd)
os.write(STDOUT_FILENO, data)
if not data: # Reached EOF.
fds.remove(master_fd)
else:
os.write(STDOUT_FILENO, data)
if STDIN_FILENO in rfds:
data = stdin_read(STDIN_FILENO)
_writen(master_fd, data)
if not data:
fds.remove(STDIN_FILENO)
else:
_writen(master_fd, data)
def spawn(argv, master_read=_read, stdin_read=_read):
"""Create a spawned process."""
......
......@@ -8,7 +8,9 @@ import errno
import pty
import os
import sys
import select
import signal
import socket
import unittest
TEST_STRING_1 = b"I wish to buy a fish license.\n"
......@@ -194,9 +196,96 @@ class PtyTest(unittest.TestCase):
# pty.fork() passed.
class SmallPtyTests(unittest.TestCase):
"""These tests don't spawn children or hang."""
def setUp(self):
self.orig_stdin_fileno = pty.STDIN_FILENO
self.orig_stdout_fileno = pty.STDOUT_FILENO
self.orig_pty_select = pty.select
self.fds = [] # A list of file descriptors to close.
self.select_rfds_lengths = []
self.select_rfds_results = []
def tearDown(self):
pty.STDIN_FILENO = self.orig_stdin_fileno
pty.STDOUT_FILENO = self.orig_stdout_fileno
pty.select = self.orig_pty_select
for fd in self.fds:
try:
os.close(fd)
except:
pass
def _pipe(self):
pipe_fds = os.pipe()
self.fds.extend(pipe_fds)
return pipe_fds
def _mock_select(self, rfds, wfds, xfds):
# This will raise IndexError when no more expected calls exist.
self.assertEqual(self.select_rfds_lengths.pop(0), len(rfds))
return self.select_rfds_results.pop(0), [], []
def test__copy_to_each(self):
"""Test the normal data case on both master_fd and stdin."""
read_from_stdout_fd, mock_stdout_fd = self._pipe()
pty.STDOUT_FILENO = mock_stdout_fd
mock_stdin_fd, write_to_stdin_fd = self._pipe()
pty.STDIN_FILENO = mock_stdin_fd
socketpair = socket.socketpair()
masters = [s.fileno() for s in socketpair]
self.fds.extend(masters)
# Feed data. Smaller than PIPEBUF. These writes will not block.
os.write(masters[1], b'from master')
os.write(write_to_stdin_fd, b'from stdin')
# Expect two select calls, the last one will cause IndexError
pty.select = self._mock_select
self.select_rfds_lengths.append(2)
self.select_rfds_results.append([mock_stdin_fd, masters[0]])
self.select_rfds_lengths.append(2)
with self.assertRaises(IndexError):
pty._copy(masters[0])
# Test that the right data went to the right places.
rfds = select.select([read_from_stdout_fd, masters[1]], [], [], 0)[0]
self.assertEqual([read_from_stdout_fd, masters[1]], rfds)
self.assertEqual(os.read(read_from_stdout_fd, 20), b'from master')
self.assertEqual(os.read(masters[1], 20), b'from stdin')
def test__copy_eof_on_all(self):
"""Test the empty read EOF case on both master_fd and stdin."""
read_from_stdout_fd, mock_stdout_fd = self._pipe()
pty.STDOUT_FILENO = mock_stdout_fd
mock_stdin_fd, write_to_stdin_fd = self._pipe()
pty.STDIN_FILENO = mock_stdin_fd
socketpair = socket.socketpair()
masters = [s.fileno() for s in socketpair]
self.fds.extend(masters)
os.close(masters[1])
socketpair[1].close()
os.close(write_to_stdin_fd)
# Expect two select calls, the last one will cause IndexError
pty.select = self._mock_select
self.select_rfds_lengths.append(2)
self.select_rfds_results.append([mock_stdin_fd, masters[0]])
# We expect that both fds were removed from the fds list as they
# both encountered an EOF before the second select call.
self.select_rfds_lengths.append(0)
with self.assertRaises(IndexError):
pty._copy(masters[0])
def test_main(verbose=None):
try:
run_unittest(PtyTest)
run_unittest(SmallPtyTests, PtyTest)
finally:
reap_children()
......
......@@ -12,10 +12,10 @@ class TestCase(unittest.TestCase):
l = []
fun = lambda x: l.append(x)
scheduler = sched.scheduler(time.time, time.sleep)
for x in [0.05, 0.04, 0.03, 0.02, 0.01]:
for x in [0.5, 0.4, 0.3, 0.2, 0.1]:
z = scheduler.enter(x, 1, fun, (x,))
scheduler.run()
self.assertEqual(l, [0.01, 0.02, 0.03, 0.04, 0.05])
self.assertEqual(l, [0.1, 0.2, 0.3, 0.4, 0.5])
def test_enterabs(self):
l = []
......@@ -31,7 +31,7 @@ class TestCase(unittest.TestCase):
fun = lambda x: l.append(x)
scheduler = sched.scheduler(time.time, time.sleep)
for priority in [1, 2, 3, 4, 5]:
z = scheduler.enter(0.01, priority, fun, (priority,))
z = scheduler.enterabs(0.01, priority, fun, (priority,))
scheduler.run()
self.assertEqual(l, [1, 2, 3, 4, 5])
......@@ -39,11 +39,12 @@ class TestCase(unittest.TestCase):
l = []
fun = lambda x: l.append(x)
scheduler = sched.scheduler(time.time, time.sleep)
event1 = scheduler.enter(0.01, 1, fun, (0.01,))
event2 = scheduler.enter(0.02, 1, fun, (0.02,))
event3 = scheduler.enter(0.03, 1, fun, (0.03,))
event4 = scheduler.enter(0.04, 1, fun, (0.04,))
event5 = scheduler.enter(0.05, 1, fun, (0.05,))
now = time.time()
event1 = scheduler.enterabs(now + 0.01, 1, fun, (0.01,))
event2 = scheduler.enterabs(now + 0.02, 1, fun, (0.02,))
event3 = scheduler.enterabs(now + 0.03, 1, fun, (0.03,))
event4 = scheduler.enterabs(now + 0.04, 1, fun, (0.04,))
event5 = scheduler.enterabs(now + 0.05, 1, fun, (0.05,))
scheduler.cancel(event1)
scheduler.cancel(event5)
scheduler.run()
......@@ -64,11 +65,12 @@ class TestCase(unittest.TestCase):
l = []
fun = lambda x: l.append(x)
scheduler = sched.scheduler(time.time, time.sleep)
e5 = scheduler.enter(0.05, 1, fun)
e1 = scheduler.enter(0.01, 1, fun)
e2 = scheduler.enter(0.02, 1, fun)
e4 = scheduler.enter(0.04, 1, fun)
e3 = scheduler.enter(0.03, 1, fun)
now = time.time()
e5 = scheduler.enterabs(now + 0.05, 1, fun)
e1 = scheduler.enterabs(now + 0.01, 1, fun)
e2 = scheduler.enterabs(now + 0.02, 1, fun)
e4 = scheduler.enterabs(now + 0.04, 1, fun)
e3 = scheduler.enterabs(now + 0.03, 1, fun)
# queue property is supposed to return an order list of
# upcoming events
self.assertEqual(list(scheduler.queue), [e1, e2, e3, e4, e5])
......
......@@ -1352,7 +1352,6 @@ def xinclude():
r"""
Basic inclusion example (XInclude C.1)
>>> from xml.etree import ElementTree as ET
>>> from xml.etree import ElementInclude
>>> document = xinclude_loader("C1.xml")
......@@ -1882,12 +1881,7 @@ class CleanContext(object):
def __enter__(self):
from xml.etree import ElementPath
if hasattr(ET, '_namespace_map'):
self._nsmap = ET._namespace_map
else:
# when testing the cElementTree alias
from xml.etree.ElementTree import _namespace_map
self._nsmap = _namespace_map
self._nsmap = ET.register_namespace._namespace_map
# Copy the default namespace mapping
self._nsmap_copy = self._nsmap.copy()
# Copy the path cache (should be empty)
......@@ -1904,12 +1898,20 @@ class CleanContext(object):
self.checkwarnings.__exit__(*args)
class TestAcceleratorNotImported(unittest.TestCase):
# Test that the C accelerator was not imported for pyET
def test_correct_import_pyET(self):
self.assertEqual(pyET.Element.__module__, 'xml.etree.ElementTree')
def test_main(module=pyET):
from test import test_xml_etree
# The same doctests are used for both the Python and the C implementations
test_xml_etree.ET = module
support.run_unittest(TestAcceleratorNotImported)
# XXX the C module should give the same warnings as the Python module
with CleanContext(quiet=(module is not pyET)):
support.run_doctest(test_xml_etree, verbosity=True)
......
......@@ -5,7 +5,7 @@ from test.support import import_fresh_module
import unittest
cET = import_fresh_module('xml.etree.ElementTree', fresh=['_elementtree'])
cET_alias = import_fresh_module('xml.etree.cElementTree', fresh=['_elementtree'])
cET_alias = import_fresh_module('xml.etree.cElementTree', fresh=['_elementtree', 'xml.etree'])
# cElementTree specific tests
......@@ -46,6 +46,15 @@ class MiscTests(unittest.TestCase):
finally:
data = None
@unittest.skipUnless(cET, 'requires _elementtree')
class TestAcceleratorImported(unittest.TestCase):
# Test that the C accelerator was imported, as expected
def test_correct_import_cET(self):
self.assertEqual(cET.Element.__module__, '_elementtree')
def test_correct_import_cET_alias(self):
self.assertEqual(cET_alias.Element.__module__, '_elementtree')
def test_main():
from test import test_xml_etree, test_xml_etree_c
......@@ -53,7 +62,7 @@ def test_main():
# Run the tests specific to the C implementation
support.run_doctest(test_xml_etree_c, verbosity=True)
support.run_unittest(MiscTests)
support.run_unittest(MiscTests, TestAcceleratorImported)
# Run the same test suite as the Python module
test_xml_etree.test_main(module=cET)
......
......@@ -1086,6 +1086,8 @@ _namespace_map = {
# dublin core
"http://purl.org/dc/elements/1.1/": "dc",
}
# For tests and troubleshooting
register_namespace._namespace_map = _namespace_map
def _raise_serialization_error(text):
raise TypeError(
......
......@@ -858,7 +858,7 @@ altbininstall: $(BUILDPYTHON)
done
$(INSTALL_PROGRAM) $(BUILDPYTHON) $(DESTDIR)$(BINDIR)/python$(LDVERSION)$(EXE)
-if test "$(VERSION)" != "$(LDVERSION)"; then \
if test -f $(DESTDIR)$(BINDIR)/$(PYTHON)$(VERSION)$(EXE) -o -h $(DESTDIR)$(BINDIR)/$(PYTHON)$(VERSION)$(EXE); \
if test -f $(DESTDIR)$(BINDIR)/python$(VERSION)$(EXE) -o -h $(DESTDIR)$(BINDIR)/python$(VERSION)$(EXE); \
then rm -f $(DESTDIR)$(BINDIR)/python$(VERSION)$(EXE); \
fi; \
(cd $(DESTDIR)$(BINDIR); $(LN) python$(LDVERSION)$(EXE) python$(VERSION)$(EXE)); \
......@@ -879,11 +879,11 @@ altbininstall: $(BUILDPYTHON)
fi
bininstall: altbininstall
-if test -f $(DESTDIR)$(BINDIR)/$(PYTHON)3$(EXE) -o -h $(DESTDIR)$(BINDIR)/$(PYTHON)3$(EXE); \
then rm -f $(DESTDIR)$(BINDIR)/$(PYTHON)3$(EXE); \
-if test -f $(DESTDIR)$(BINDIR)/python3$(EXE) -o -h $(DESTDIR)$(BINDIR)/python3$(EXE); \
then rm -f $(DESTDIR)$(BINDIR)/python3$(EXE); \
else true; \
fi
(cd $(DESTDIR)$(BINDIR); $(LN) python$(VERSION)$(EXE) $(PYTHON)3$(EXE))
(cd $(DESTDIR)$(BINDIR); $(LN) -s python$(VERSION)$(EXE) python3$(EXE))
-if test "$(VERSION)" != "$(LDVERSION)"; then \
rm -f $(DESTDIR)$(BINDIR)/python$(VERSION)-config; \
(cd $(DESTDIR)$(BINDIR); $(LN) -s python$(LDVERSION)-config python$(VERSION)-config); \
......
......@@ -466,6 +466,13 @@ Core and Builtins
Library
-------
- Issue #13961: Move importlib over to using os.replace() for atomic renaming.
- Do away with ambiguous level values (as suggested by PEP 328) in
importlib.__import__() by raising ValueError when level < 0.
- Issue #2489: pty.spawn could consume 100% cpu when it encountered an EOF.
- Issue #13014: Fix a possible reference leak in SSLSocket.getpeercert().
- Issue #13777: Add PF_SYSTEM sockets on OS X.
......@@ -2256,8 +2263,8 @@ C-API
Documentation
-------------
- Issue #13491: Fix many errors in sqlite3 documentation. Initial
patch by Johannes Vogel.
- Issues #13491 and #13995: Fix many errors in sqlite3 documentation.
Initial patch for #13491 by Johannes Vogel.
- Issue #13402: Document absoluteness of sys.executable.
......
......@@ -2547,7 +2547,9 @@ PyInit__ssl(void)
PyModule_AddIntConstant(m, "OP_CIPHER_SERVER_PREFERENCE",
SSL_OP_CIPHER_SERVER_PREFERENCE);
PyModule_AddIntConstant(m, "OP_SINGLE_DH_USE", SSL_OP_SINGLE_DH_USE);
#ifdef SSL_OP_SINGLE_ECDH_USE
PyModule_AddIntConstant(m, "OP_SINGLE_ECDH_USE", SSL_OP_SINGLE_ECDH_USE);
#endif
#ifdef SSL_OP_NO_COMPRESSION
PyModule_AddIntConstant(m, "OP_NO_COMPRESSION",
SSL_OP_NO_COMPRESSION);
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment