Commit 6ee1a1ce authored by Johannes Reiff's avatar Johannes Reiff Committed by GitHub

Merge branch 'master' into pr-easyinstall

parents ec270f9e 73376585
[bumpversion] [bumpversion]
current_version = 42.0.2 current_version = 45.1.0
commit = True commit = True
tag = True tag = True
......
[flake8]
exclude=
.tox
setuptools/_vendor,
pkg_resources/_vendor
ignore =
# W503 violates spec https://github.com/PyCQA/pycodestyle/issues/513
W503
# W504 has issues https://github.com/OCA/maintainer-quality-tools/issues/545
W504
setuptools/site-patch.py F821
setuptools/py*compat.py F811
name: >-
👷
Test suite
on:
push:
pull_request:
schedule:
- cron: 1 0 * * * # Run daily at 0:01 UTC
jobs:
tests:
name: >-
${{ matrix.python-version }}
/
${{ matrix.os }}
runs-on: ${{ matrix.os }}
strategy:
# max-parallel: 5
matrix:
python-version:
- 3.8
- pypy3
- 3.7
- 3.6
- 3.5
os:
- ubuntu-latest
- ubuntu-16.04
- macOS-latest
# - windows-2019
# - windows-2016
env:
NETWORK_REQUIRED: 1
TOX_PARALLEL_NO_SPINNER: 1
TOXENV: python
steps:
- uses: actions/checkout@master
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v1.1.1
with:
python-version: ${{ matrix.python-version }}
- name: Log Python version
run: >-
python --version
- name: Log Python location
run: >-
which python
- name: Log Python env
run: >-
python -m sysconfig
- name: Pip cache
uses: actions/cache@v1
with:
path: ~/.cache/pip
key: ${{ runner.os }}-pip-${{ hashFiles('docs/requirements.txt') }}-${{ hashFiles('tests/requirements.txt') }}-${{ hashFiles('tox.ini') }}
restore-keys: |
${{ runner.os }}-pip-
${{ runner.os }}-
- name: Upgrade pip/setuptools/wheel
run: >-
python
-m pip install
--disable-pip-version-check
--upgrade
pip setuptools wheel
- name: Install tox
run: >-
python -m pip install --upgrade tox tox-venv
- name: Log installed dists
run: >-
python -m pip freeze --all
- name: Adjust TOXENV for PyPy
if: startsWith(matrix.python-version, 'pypy')
run: >-
echo "::set-env name=TOXENV::${{ matrix.python-version }}"
- name: Log env vars
run: >-
env
- name: Verify that there's no cached Python modules in sources
if: >-
! startsWith(matrix.os, 'windows-')
run: >-
! grep pyc setuptools.egg-info/SOURCES.txt
- name: 'Initialize tox envs: ${{ matrix.env.TOXENV }}'
run: >-
python -m
tox
--parallel auto
--notest
--skip-missing-interpreters false
- name: Test with tox
run: >-
python -m
tox
--parallel auto
--
--cov
...@@ -6,13 +6,11 @@ jobs: ...@@ -6,13 +6,11 @@ jobs:
include: include:
- &latest_py2 - &latest_py2
python: 2.7 python: 2.7
env: TOXENV=py27
- <<: *latest_py2 - <<: *latest_py2
env: LANG=C env: LANG=C TOXENV=py27
- python: pypy
env: DISABLE_COVERAGE=1 # Don't run coverage on pypy (too slow).
- python: pypy3 - python: pypy3
env: DISABLE_COVERAGE=1 env: DISABLE_COVERAGE=1 # Don't run coverage on pypy (too slow).
- python: 3.4
- python: 3.5 - python: 3.5
- python: 3.6 - python: 3.6
- python: 3.7 - python: 3.7
...@@ -42,8 +40,6 @@ install: ...@@ -42,8 +40,6 @@ install:
- pip freeze --all - pip freeze --all
- env - env
# update egg_info based on setup.py in checkout
- python bootstrap.py
- "! grep pyc setuptools.egg-info/SOURCES.txt" - "! grep pyc setuptools.egg-info/SOURCES.txt"
script: script:
......
v45.1.0
-------
* #1458: Add minimum sunset date and preamble to Python 2 warning.
* #1704: Set sys.argv[0] in setup script run by build_meta.__legacy__
* #1974: Add Python 3 Only Trove Classifier and remove universal wheel declaration for more complete transition from Python 2.
v45.0.0
-------
* #1458: Drop support for Python 2. Setuptools now requires Python 3.5 or later. Install setuptools using pip >=9 or pin to Setuptools <45 to maintain 2.7 support.
* #1959: Fix for Python 4: replace unsafe six.PY3 with six.PY2
v44.0.0
-------
* #1908: Drop support for Python 3.4.
v43.0.0
-------
* #1634: Include ``pyproject.toml`` in source distribution by default. Projects relying on the previous behavior where ``pyproject.toml`` was excluded by default should stop relying on that behavior or add ``exclude pyproject.toml`` to their MANIFEST.in file.
* #1927: Setuptools once again declares 'setuptools' in the ``build-system.requires`` and adds PEP 517 build support by declaring itself as the ``build-backend``. It additionally specifies ``build-system.backend-path`` to rely on itself for those builders that support it.
v42.0.2 v42.0.2
------- -------
......
...@@ -9,8 +9,8 @@ environment: ...@@ -9,8 +9,8 @@ environment:
matrix: matrix:
- APPVEYOR_JOB_NAME: "python36-x64" - APPVEYOR_JOB_NAME: "python36-x64"
PYTHON: "C:\\Python36-x64" PYTHON: "C:\\Python36-x64"
- APPVEYOR_JOB_NAME: "python27-x64" - APPVEYOR_JOB_NAME: "python37-x64"
PYTHON: "C:\\Python27-x64" PYTHON: "C:\\Python37-x64"
install: install:
# symlink python from a directory with a space # symlink python from a directory with a space
...@@ -28,7 +28,6 @@ test_script: ...@@ -28,7 +28,6 @@ test_script:
- python -m pip install --disable-pip-version-check --upgrade pip setuptools wheel - python -m pip install --disable-pip-version-check --upgrade pip setuptools wheel
- pip install --upgrade tox tox-venv virtualenv - pip install --upgrade tox tox-venv virtualenv
- pip freeze --all - pip freeze --all
- python bootstrap.py
- tox -- --cov - tox -- --cov
after_test: after_test:
......
# Create the project in Azure with:
# az devops project create --name $name --organization https://dev.azure.com/$org/ --visibility public
# then configure the pipelines (through web UI)
trigger:
branches:
include:
- '*'
tags:
include:
- '*'
pool:
vmimage: 'Ubuntu-18.04'
variables:
- group: Azure secrets
stages:
- stage: Test
jobs:
- job: 'Test'
strategy:
matrix:
Python36:
python.version: '3.6'
Python38:
python.version: '3.8'
maxParallel: 4
steps:
- task: UsePythonVersion@0
inputs:
versionSpec: '$(python.version)'
architecture: 'x64'
- script: python -m pip install tox
displayName: 'Install tox'
- script: |
tox -- --junit-xml=test-results.xml
displayName: 'run tests'
- task: PublishTestResults@2
inputs:
testResultsFiles: '**/test-results.xml'
testRunTitle: 'Python $(python.version)'
condition: succeededOrFailed()
- stage: Publish
dependsOn: Test
jobs:
- job: 'Publish'
steps:
- task: UsePythonVersion@0
inputs:
versionSpec: '3.8'
architecture: 'x64'
- script: python -m pip install tox
displayName: 'Install tox'
- script: |
tox -e release
env:
TWINE_PASSWORD: $(PyPI-token)
displayName: 'publish to PyPI'
condition: contains(variables['Build.SourceBranch'], 'tags')
...@@ -25,6 +25,7 @@ minimal_egg_info = textwrap.dedent(""" ...@@ -25,6 +25,7 @@ minimal_egg_info = textwrap.dedent("""
entry_points = setuptools.dist:check_entry_points entry_points = setuptools.dist:check_entry_points
[egg_info.writers] [egg_info.writers]
PKG-INFO = setuptools.command.egg_info:write_pkg_info
dependency_links.txt = setuptools.command.egg_info:overwrite_arg dependency_links.txt = setuptools.command.egg_info:overwrite_arg
entry_points.txt = setuptools.command.egg_info:write_entries entry_points.txt = setuptools.command.egg_info:write_entries
requires.txt = setuptools.command.egg_info:write_requirements requires.txt = setuptools.command.egg_info:write_requirements
...@@ -35,10 +36,11 @@ def ensure_egg_info(): ...@@ -35,10 +36,11 @@ def ensure_egg_info():
if os.path.exists('setuptools.egg-info'): if os.path.exists('setuptools.egg-info'):
return return
print("adding minimal entry_points") print("adding minimal entry_points")
build_egg_info() add_minimal_info()
run_egg_info()
def build_egg_info(): def add_minimal_info():
""" """
Build a minimal egg-info, enough to invoke egg_info Build a minimal egg-info, enough to invoke egg_info
""" """
...@@ -52,13 +54,6 @@ def run_egg_info(): ...@@ -52,13 +54,6 @@ def run_egg_info():
cmd = [sys.executable, 'setup.py', 'egg_info'] cmd = [sys.executable, 'setup.py', 'egg_info']
print("Regenerating egg_info") print("Regenerating egg_info")
subprocess.check_call(cmd) subprocess.check_call(cmd)
print("...and again.")
subprocess.check_call(cmd)
def main():
ensure_egg_info()
run_egg_info()
__name__ == '__main__' and main() __name__ == '__main__' and ensure_egg_info()
Fixed defect in _imp, introduced in 41.6.0 when the 'tests' directory is not present.
Add flake8-2020 to check for misuse of sys.version or sys.version_info.
# -*- coding: utf-8 -*-
#
# Setuptools documentation build configuration file, created by
# sphinx-quickstart on Fri Jul 17 14:22:37 2009.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# The contents of this file are pickled, so don't put values in the namespace
# that aren't pickleable (module imports are okay, they're removed automatically).
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
import subprocess import subprocess
import sys import sys
import os import os
...@@ -26,14 +6,12 @@ import os ...@@ -26,14 +6,12 @@ import os
# hack to run the bootstrap script so that jaraco.packaging.sphinx # hack to run the bootstrap script so that jaraco.packaging.sphinx
# can invoke setup.py # can invoke setup.py
'READTHEDOCS' in os.environ and subprocess.check_call( 'READTHEDOCS' in os.environ and subprocess.check_call(
[sys.executable, 'bootstrap.py'], [sys.executable, '-m', 'bootstrap'],
cwd=os.path.join(os.path.dirname(__file__), os.path.pardir), cwd=os.path.join(os.path.dirname(__file__), os.path.pardir),
) )
# -- General configuration ----------------------------------------------------- # -- General configuration --
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['jaraco.packaging.sphinx', 'rst.linker'] extensions = ['jaraco.packaging.sphinx', 'rst.linker']
# Add any paths that contain templates here, relative to this directory. # Add any paths that contain templates here, relative to this directory.
...@@ -45,7 +23,8 @@ source_suffix = '.txt' ...@@ -45,7 +23,8 @@ source_suffix = '.txt'
# The master toctree document. # The master toctree document.
master_doc = 'index' master_doc = 'index'
# A list of glob-style patterns that should be excluded when looking for source files. # A list of glob-style patterns that should be excluded
# when looking for source files.
exclude_patterns = ['requirements.txt'] exclude_patterns = ['requirements.txt']
# List of directories, relative to source directory, that shouldn't be searched # List of directories, relative to source directory, that shouldn't be searched
...@@ -55,7 +34,7 @@ exclude_trees = [] ...@@ -55,7 +34,7 @@ exclude_trees = []
# The name of the Pygments (syntax highlighting) style to use. # The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx' pygments_style = 'sphinx'
# -- Options for HTML output --------------------------------------------------- # -- Options for HTML output --
# The theme to use for HTML and HTML Help pages. Major themes that come with # The theme to use for HTML and HTML Help pages. Major themes that come with
# Sphinx are currently 'default' and 'sphinxdoc'. # Sphinx are currently 'default' and 'sphinxdoc'.
...@@ -69,7 +48,10 @@ html_theme_path = ['_theme'] ...@@ -69,7 +48,10 @@ html_theme_path = ['_theme']
html_use_smartypants = True html_use_smartypants = True
# Custom sidebar templates, maps document names to template names. # Custom sidebar templates, maps document names to template names.
html_sidebars = {'index': ['relations.html', 'sourcelink.html', 'indexsidebar.html', 'searchbox.html']} html_sidebars = {
'index': [
'relations.html', 'sourcelink.html', 'indexsidebar.html',
'searchbox.html']}
# If false, no module index is generated. # If false, no module index is generated.
html_use_modindex = False html_use_modindex = False
...@@ -77,14 +59,15 @@ html_use_modindex = False ...@@ -77,14 +59,15 @@ html_use_modindex = False
# If false, no index is generated. # If false, no index is generated.
html_use_index = False html_use_index = False
# -- Options for LaTeX output -------------------------------------------------- # -- Options for LaTeX output --
# Grouping the document tree into LaTeX files. List of tuples # Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]). # (source start file, target name, title, author,
latex_documents = [ # documentclass [howto/manual]).
('index', 'Setuptools.tex', 'Setuptools Documentation', latex_documents = [(
'The fellowship of the packaging', 'manual'), 'index', 'Setuptools.tex', 'Setuptools Documentation',
] 'The fellowship of the packaging', 'manual',
)]
link_files = { link_files = {
'../CHANGES.rst': dict( '../CHANGES.rst': dict(
......
...@@ -104,12 +104,8 @@ from the command line after pushing a new branch. ...@@ -104,12 +104,8 @@ from the command line after pushing a new branch.
Testing Testing
------- -------
The primary tests are run using tox. To run the tests, first create the metadata The primary tests are run using tox. Make sure you have tox installed,
needed to run the tests:: and invoke it::
$ python bootstrap.py
Then make sure you have tox installed, and invoke it::
$ tox $ tox
......
...@@ -41,7 +41,7 @@ Please see the `setuptools PyPI page <https://pypi.org/project/setuptools/>`_ ...@@ -41,7 +41,7 @@ Please see the `setuptools PyPI page <https://pypi.org/project/setuptools/>`_
for download links and basic installation instructions for each of the for download links and basic installation instructions for each of the
supported platforms. supported platforms.
You will need at least Python 3.4 or 2.7. An ``easy_install`` script will be You will need at least Python 3.5 or 2.7. An ``easy_install`` script will be
installed in the normal location for Python scripts on your platform. installed in the normal location for Python scripts on your platform.
Note that the instructions on the setuptools PyPI page assume that you are Note that the instructions on the setuptools PyPI page assume that you are
......
This diff is collapsed.
...@@ -83,13 +83,14 @@ __import__('pkg_resources.extern.packaging.version') ...@@ -83,13 +83,14 @@ __import__('pkg_resources.extern.packaging.version')
__import__('pkg_resources.extern.packaging.specifiers') __import__('pkg_resources.extern.packaging.specifiers')
__import__('pkg_resources.extern.packaging.requirements') __import__('pkg_resources.extern.packaging.requirements')
__import__('pkg_resources.extern.packaging.markers') __import__('pkg_resources.extern.packaging.markers')
__import__('pkg_resources.py2_warn')
__metaclass__ = type __metaclass__ = type
if (3, 0) < sys.version_info < (3, 4): if (3, 0) < sys.version_info < (3, 5):
raise RuntimeError("Python 3.4 or later is required") raise RuntimeError("Python 3.5 or later is required")
if six.PY2: if six.PY2:
# Those builtin exceptions are only defined in Python 3 # Those builtin exceptions are only defined in Python 3
...@@ -2328,7 +2329,8 @@ register_namespace_handler(object, null_ns_handler) ...@@ -2328,7 +2329,8 @@ register_namespace_handler(object, null_ns_handler)
def normalize_path(filename): def normalize_path(filename):
"""Normalize a file/dir name for comparison purposes""" """Normalize a file/dir name for comparison purposes"""
return os.path.normcase(os.path.realpath(os.path.normpath(_cygwin_patch(filename)))) return os.path.normcase(os.path.realpath(os.path.normpath(
_cygwin_patch(filename))))
def _cygwin_patch(filename): # pragma: nocover def _cygwin_patch(filename): # pragma: nocover
...@@ -3287,6 +3289,7 @@ def _initialize_master_working_set(): ...@@ -3287,6 +3289,7 @@ def _initialize_master_working_set():
list(map(working_set.add_entry, sys.path)) list(map(working_set.add_entry, sys.path))
globals().update(locals()) globals().update(locals())
class PkgResourcesDeprecationWarning(Warning): class PkgResourcesDeprecationWarning(Warning):
""" """
Base class for warning about deprecations in ``pkg_resources`` Base class for warning about deprecations in ``pkg_resources``
......
import sys
import warnings
import textwrap
msg = textwrap.dedent("""
You are running Setuptools on Python 2, which is no longer
supported and
>>> SETUPTOOLS WILL STOP WORKING <<<
in a subsequent release (no sooner than 2020-04-20).
Please ensure you are installing
Setuptools using pip 9.x or later or pin to `setuptools<45`
in your environment.
If you have done those things and are still encountering
this message, please comment in
https://github.com/pypa/setuptools/issues/1458
about the steps that led to this unsupported combination.
""")
pre = "Setuptools will stop working on Python 2\n"
sys.version_info < (3,) and warnings.warn(pre + "*" * 60 + msg + "*" * 60)
...@@ -17,7 +17,9 @@ try: ...@@ -17,7 +17,9 @@ try:
except ImportError: except ImportError:
import mock import mock
from pkg_resources import DistInfoDistribution, Distribution, EggInfoDistribution from pkg_resources import (
DistInfoDistribution, Distribution, EggInfoDistribution,
)
from setuptools.extern import six from setuptools.extern import six
from pkg_resources.extern.six.moves import map from pkg_resources.extern.six.moves import map
from pkg_resources.extern.six import text_type, string_types from pkg_resources.extern.six import text_type, string_types
...@@ -279,8 +281,8 @@ def make_distribution_no_version(tmpdir, basename): ...@@ -279,8 +281,8 @@ def make_distribution_no_version(tmpdir, basename):
('dist-info', 'METADATA', DistInfoDistribution), ('dist-info', 'METADATA', DistInfoDistribution),
], ],
) )
def test_distribution_version_missing(tmpdir, suffix, expected_filename, def test_distribution_version_missing(
expected_dist_type): tmpdir, suffix, expected_filename, expected_dist_type):
""" """
Test Distribution.version when the "Version" header is missing. Test Distribution.version when the "Version" header is missing.
""" """
......
...@@ -15,7 +15,7 @@ import pkg_resources ...@@ -15,7 +15,7 @@ import pkg_resources
from pkg_resources import ( from pkg_resources import (
parse_requirements, VersionConflict, parse_version, parse_requirements, VersionConflict, parse_version,
Distribution, EntryPoint, Requirement, safe_version, safe_name, Distribution, EntryPoint, Requirement, safe_version, safe_name,
WorkingSet, PkgResourcesDeprecationWarning) WorkingSet)
# from Python 3.6 docs. # from Python 3.6 docs.
...@@ -501,7 +501,6 @@ class TestEntryPoints: ...@@ -501,7 +501,6 @@ class TestEntryPoints:
ep.load(require=False) ep.load(require=False)
class TestRequirements: class TestRequirements:
def testBasics(self): def testBasics(self):
r = Requirement.parse("Twisted>=1.2") r = Requirement.parse("Twisted>=1.2")
......
[build-system] [build-system]
requires = ["wheel"] requires = ["setuptools >= 40.8", "wheel"]
build-backend = "setuptools.build_meta"
backend-path = ["."]
[tool.towncrier] [tool.towncrier]
package = "setuptools" package = "setuptools"
......
[pytest] [pytest]
addopts=--doctest-modules --doctest-glob=pkg_resources/api_tests.txt -r sxX addopts=--doctest-modules --flake8 --doctest-glob=pkg_resources/api_tests.txt -r sxX
norecursedirs=dist build *.egg setuptools/extern pkg_resources/extern pkg_resources/tests/data tools .* norecursedirs=dist build *.egg setuptools/extern pkg_resources/extern pkg_resources/tests/data tools .* setuptools/_vendor pkg_resources/_vendor
flake8-ignore =
setuptools/site-patch.py F821
setuptools/py*compat.py F811
doctest_optionflags=ELLIPSIS ALLOW_UNICODE doctest_optionflags=ELLIPSIS ALLOW_UNICODE
filterwarnings = filterwarnings =
# https://github.com/pypa/setuptools/issues/1823 # https://github.com/pypa/setuptools/issues/1823
......
...@@ -14,12 +14,9 @@ repository = https://upload.pypi.org/legacy/ ...@@ -14,12 +14,9 @@ repository = https://upload.pypi.org/legacy/
[sdist] [sdist]
formats = zip formats = zip
[bdist_wheel]
universal = 1
[metadata] [metadata]
name = setuptools name = setuptools
version = 42.0.2 version = 45.1.0
description = Easily download, build, install, upgrade, and uninstall Python packages description = Easily download, build, install, upgrade, and uninstall Python packages
author = Python Packaging Authority author = Python Packaging Authority
author_email = distutils-sig@python.org author_email = distutils-sig@python.org
...@@ -35,10 +32,8 @@ classifiers = ...@@ -35,10 +32,8 @@ classifiers =
Intended Audience :: Developers Intended Audience :: Developers
License :: OSI Approved :: MIT License License :: OSI Approved :: MIT License
Operating System :: OS Independent Operating System :: OS Independent
Programming Language :: Python :: 2
Programming Language :: Python :: 2.7
Programming Language :: Python :: 3 Programming Language :: Python :: 3
Programming Language :: Python :: 3.4 Programming Language :: Python :: 3 :: Only
Programming Language :: Python :: 3.5 Programming Language :: Python :: 3.5
Programming Language :: Python :: 3.6 Programming Language :: Python :: 3.6
Programming Language :: Python :: 3.7 Programming Language :: Python :: 3.7
...@@ -50,7 +45,7 @@ classifiers = ...@@ -50,7 +45,7 @@ classifiers =
[options] [options]
zip_safe = True zip_safe = True
python_requires = >=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.* python_requires = >=3.5
py_modules = easy_install py_modules = easy_install
packages = find: packages = find:
......
"""Extensions to the 'distutils' for large or complex distributions""" """Extensions to the 'distutils' for large or complex distributions"""
import os import os
import sys
import functools import functools
import distutils.core import distutils.core
import distutils.filelist import distutils.filelist
...@@ -31,7 +30,7 @@ __all__ = [ ...@@ -31,7 +30,7 @@ __all__ = [
] ]
if PY3: if PY3:
__all__.append('find_namespace_packages') __all__.append('find_namespace_packages')
__version__ = setuptools.version.__version__ __version__ = setuptools.version.__version__
...@@ -123,7 +122,7 @@ class PEP420PackageFinder(PackageFinder): ...@@ -123,7 +122,7 @@ class PEP420PackageFinder(PackageFinder):
find_packages = PackageFinder.find find_packages = PackageFinder.find
if PY3: if PY3:
find_namespace_packages = PEP420PackageFinder.find find_namespace_packages = PEP420PackageFinder.find
def _install_setup_requires(attrs): def _install_setup_requires(attrs):
...@@ -144,6 +143,7 @@ def setup(**attrs): ...@@ -144,6 +143,7 @@ def setup(**attrs):
_install_setup_requires(attrs) _install_setup_requires(attrs)
return distutils.core.setup(**attrs) return distutils.core.setup(**attrs)
setup.__doc__ = distutils.core.setup.__doc__ setup.__doc__ = distutils.core.setup.__doc__
...@@ -191,8 +191,8 @@ class Command(_Command): ...@@ -191,8 +191,8 @@ class Command(_Command):
ok = False ok = False
if not ok: if not ok:
raise DistutilsOptionError( raise DistutilsOptionError(
"'%s' must be a list of strings (got %r)" "'%s' must be a list of strings (got %r)"
% (option, val)) % (option, val))
def reinitialize_command(self, command, reinit_subcommands=0, **kw): def reinitialize_command(self, command, reinit_subcommands=0, **kw):
cmd = _Command.reinitialize_command(self, command, reinit_subcommands) cmd = _Command.reinitialize_command(self, command, reinit_subcommands)
......
...@@ -17,9 +17,18 @@ C_BUILTIN = 6 ...@@ -17,9 +17,18 @@ C_BUILTIN = 6
PY_FROZEN = 7 PY_FROZEN = 7
def find_spec(module, paths):
finder = (
importlib.machinery.PathFinder().find_spec
if isinstance(paths, list) else
importlib.util.find_spec
)
return finder(module, paths)
def find_module(module, paths=None): def find_module(module, paths=None):
"""Just like 'imp.find_module()', but with package support""" """Just like 'imp.find_module()', but with package support"""
spec = importlib.util.find_spec(module, paths) spec = find_spec(module, paths)
if spec is None: if spec is None:
raise ImportError("Can't find %s" % module) raise ImportError("Can't find %s" % module)
if not spec.has_location and hasattr(spec, 'submodule_search_locations'): if not spec.has_location and hasattr(spec, 'submodule_search_locations'):
...@@ -60,14 +69,14 @@ def find_module(module, paths=None): ...@@ -60,14 +69,14 @@ def find_module(module, paths=None):
def get_frozen_object(module, paths=None): def get_frozen_object(module, paths=None):
spec = importlib.util.find_spec(module, paths) spec = find_spec(module, paths)
if not spec: if not spec:
raise ImportError("Can't find %s" % module) raise ImportError("Can't find %s" % module)
return spec.loader.get_code(module) return spec.loader.get_code(module)
def get_module(module, paths, info): def get_module(module, paths, info):
spec = importlib.util.find_spec(module, paths) spec = find_spec(module, paths)
if not spec: if not spec:
raise ImportError("Can't find %s" % module) raise ImportError("Can't find %s" % module)
return module_from_spec(spec) return module_from_spec(spec)
...@@ -25,7 +25,8 @@ def default_filter(src, dst): ...@@ -25,7 +25,8 @@ def default_filter(src, dst):
return dst return dst
def unpack_archive(filename, extract_dir, progress_filter=default_filter, def unpack_archive(
filename, extract_dir, progress_filter=default_filter,
drivers=None): drivers=None):
"""Unpack `filename` to `extract_dir`, or raise ``UnrecognizedFormat`` """Unpack `filename` to `extract_dir`, or raise ``UnrecognizedFormat``
...@@ -148,7 +149,8 @@ def unpack_tarfile(filename, extract_dir, progress_filter=default_filter): ...@@ -148,7 +149,8 @@ def unpack_tarfile(filename, extract_dir, progress_filter=default_filter):
# resolve any links and to extract the link targets as normal # resolve any links and to extract the link targets as normal
# files # files
while member is not None and (member.islnk() or member.issym()): while member is not None and (
member.islnk() or member.issym()):
linkpath = member.linkname linkpath = member.linkname
if member.issym(): if member.issym():
base = posixpath.dirname(member.name) base = posixpath.dirname(member.name)
......
...@@ -48,6 +48,7 @@ __all__ = ['get_requires_for_build_sdist', ...@@ -48,6 +48,7 @@ __all__ = ['get_requires_for_build_sdist',
'__legacy__', '__legacy__',
'SetupRequirementsError'] 'SetupRequirementsError']
class SetupRequirementsError(BaseException): class SetupRequirementsError(BaseException):
def __init__(self, specifiers): def __init__(self, specifiers):
self.specifiers = specifiers self.specifiers = specifiers
...@@ -143,7 +144,8 @@ class _BuildMetaBackend(object): ...@@ -143,7 +144,8 @@ class _BuildMetaBackend(object):
def get_requires_for_build_wheel(self, config_settings=None): def get_requires_for_build_wheel(self, config_settings=None):
config_settings = self._fix_config(config_settings) config_settings = self._fix_config(config_settings)
return self._get_build_requires(config_settings, requirements=['wheel']) return self._get_build_requires(
config_settings, requirements=['wheel'])
def get_requires_for_build_sdist(self, config_settings=None): def get_requires_for_build_sdist(self, config_settings=None):
config_settings = self._fix_config(config_settings) config_settings = self._fix_config(config_settings)
...@@ -160,8 +162,10 @@ class _BuildMetaBackend(object): ...@@ -160,8 +162,10 @@ class _BuildMetaBackend(object):
dist_infos = [f for f in os.listdir(dist_info_directory) dist_infos = [f for f in os.listdir(dist_info_directory)
if f.endswith('.dist-info')] if f.endswith('.dist-info')]
if (len(dist_infos) == 0 and if (
len(_get_immediate_subdirectories(dist_info_directory)) == 1): len(dist_infos) == 0 and
len(_get_immediate_subdirectories(dist_info_directory)) == 1
):
dist_info_directory = os.path.join( dist_info_directory = os.path.join(
dist_info_directory, os.listdir(dist_info_directory)[0]) dist_info_directory, os.listdir(dist_info_directory)[0])
...@@ -193,7 +197,8 @@ class _BuildMetaBackend(object): ...@@ -193,7 +197,8 @@ class _BuildMetaBackend(object):
config_settings["--global-option"]) config_settings["--global-option"])
self.run_setup() self.run_setup()
result_basename = _file_with_extension(tmp_dist_dir, result_extension) result_basename = _file_with_extension(
tmp_dist_dir, result_extension)
result_path = os.path.join(result_directory, result_basename) result_path = os.path.join(result_directory, result_basename)
if os.path.exists(result_path): if os.path.exists(result_path):
# os.rename will fail overwriting on non-Unix. # os.rename will fail overwriting on non-Unix.
...@@ -202,7 +207,6 @@ class _BuildMetaBackend(object): ...@@ -202,7 +207,6 @@ class _BuildMetaBackend(object):
return result_basename return result_basename
def build_wheel(self, wheel_directory, config_settings=None, def build_wheel(self, wheel_directory, config_settings=None,
metadata_directory=None): metadata_directory=None):
return self._build_with_temp_dir(['bdist_wheel'], '.whl', return self._build_with_temp_dir(['bdist_wheel'], '.whl',
...@@ -217,9 +221,12 @@ class _BuildMetaBackend(object): ...@@ -217,9 +221,12 @@ class _BuildMetaBackend(object):
class _BuildMetaLegacyBackend(_BuildMetaBackend): class _BuildMetaLegacyBackend(_BuildMetaBackend):
"""Compatibility backend for setuptools """Compatibility backend for setuptools
This is a version of setuptools.build_meta that endeavors to maintain backwards This is a version of setuptools.build_meta that endeavors
compatibility with pre-PEP 517 modes of invocation. It exists as a temporary to maintain backwards
bridge between the old packaging mechanism and the new packaging mechanism, compatibility with pre-PEP 517 modes of invocation. It
exists as a temporary
bridge between the old packaging mechanism and the new
packaging mechanism,
and will eventually be removed. and will eventually be removed.
""" """
def run_setup(self, setup_script='setup.py'): def run_setup(self, setup_script='setup.py'):
...@@ -232,6 +239,12 @@ class _BuildMetaLegacyBackend(_BuildMetaBackend): ...@@ -232,6 +239,12 @@ class _BuildMetaLegacyBackend(_BuildMetaBackend):
if script_dir not in sys.path: if script_dir not in sys.path:
sys.path.insert(0, script_dir) sys.path.insert(0, script_dir)
# Some setup.py scripts (e.g. in pygame and numpy) use sys.argv[0] to
# get the directory of the source code. They expect it to refer to the
# setup.py script.
sys_argv_0 = sys.argv[0]
sys.argv[0] = setup_script
try: try:
super(_BuildMetaLegacyBackend, super(_BuildMetaLegacyBackend,
self).run_setup(setup_script=setup_script) self).run_setup(setup_script=setup_script)
...@@ -242,6 +255,8 @@ class _BuildMetaLegacyBackend(_BuildMetaBackend): ...@@ -242,6 +255,8 @@ class _BuildMetaLegacyBackend(_BuildMetaBackend):
# the original path so that the path manipulation does not persist # the original path so that the path manipulation does not persist
# within the hook after run_setup is called. # within the hook after run_setup is called.
sys.path[:] = sys_path sys.path[:] = sys_path
sys.argv[0] = sys_argv_0
# The primary backend # The primary backend
_BACKEND = _BuildMetaBackend() _BACKEND = _BuildMetaBackend()
......
...@@ -25,9 +25,9 @@ class build_clib(orig.build_clib): ...@@ -25,9 +25,9 @@ class build_clib(orig.build_clib):
sources = build_info.get('sources') sources = build_info.get('sources')
if sources is None or not isinstance(sources, (list, tuple)): if sources is None or not isinstance(sources, (list, tuple)):
raise DistutilsSetupError( raise DistutilsSetupError(
"in 'libraries' option (library '%s'), " "in 'libraries' option (library '%s'), "
"'sources' must be present and must be " "'sources' must be present and must be "
"a list of source filenames" % lib_name) "a list of source filenames" % lib_name)
sources = list(sources) sources = list(sources)
log.info("building '%s' library", lib_name) log.info("building '%s' library", lib_name)
...@@ -38,9 +38,9 @@ class build_clib(orig.build_clib): ...@@ -38,9 +38,9 @@ class build_clib(orig.build_clib):
obj_deps = build_info.get('obj_deps', dict()) obj_deps = build_info.get('obj_deps', dict())
if not isinstance(obj_deps, dict): if not isinstance(obj_deps, dict):
raise DistutilsSetupError( raise DistutilsSetupError(
"in 'libraries' option (library '%s'), " "in 'libraries' option (library '%s'), "
"'obj_deps' must be a dictionary of " "'obj_deps' must be a dictionary of "
"type 'source: list'" % lib_name) "type 'source: list'" % lib_name)
dependencies = [] dependencies = []
# Get the global dependencies that are specified by the '' key. # Get the global dependencies that are specified by the '' key.
...@@ -48,9 +48,9 @@ class build_clib(orig.build_clib): ...@@ -48,9 +48,9 @@ class build_clib(orig.build_clib):
global_deps = obj_deps.get('', list()) global_deps = obj_deps.get('', list())
if not isinstance(global_deps, (list, tuple)): if not isinstance(global_deps, (list, tuple)):
raise DistutilsSetupError( raise DistutilsSetupError(
"in 'libraries' option (library '%s'), " "in 'libraries' option (library '%s'), "
"'obj_deps' must be a dictionary of " "'obj_deps' must be a dictionary of "
"type 'source: list'" % lib_name) "type 'source: list'" % lib_name)
# Build the list to be used by newer_pairwise_group # Build the list to be used by newer_pairwise_group
# each source will be auto-added to its dependencies. # each source will be auto-added to its dependencies.
...@@ -60,39 +60,42 @@ class build_clib(orig.build_clib): ...@@ -60,39 +60,42 @@ class build_clib(orig.build_clib):
extra_deps = obj_deps.get(source, list()) extra_deps = obj_deps.get(source, list())
if not isinstance(extra_deps, (list, tuple)): if not isinstance(extra_deps, (list, tuple)):
raise DistutilsSetupError( raise DistutilsSetupError(
"in 'libraries' option (library '%s'), " "in 'libraries' option (library '%s'), "
"'obj_deps' must be a dictionary of " "'obj_deps' must be a dictionary of "
"type 'source: list'" % lib_name) "type 'source: list'" % lib_name)
src_deps.extend(extra_deps) src_deps.extend(extra_deps)
dependencies.append(src_deps) dependencies.append(src_deps)
expected_objects = self.compiler.object_filenames( expected_objects = self.compiler.object_filenames(
sources, sources,
output_dir=self.build_temp output_dir=self.build_temp,
) )
if newer_pairwise_group(dependencies, expected_objects) != ([], []): if (
newer_pairwise_group(dependencies, expected_objects)
!= ([], [])
):
# First, compile the source code to object files in the library # First, compile the source code to object files in the library
# directory. (This should probably change to putting object # directory. (This should probably change to putting object
# files in a temporary build directory.) # files in a temporary build directory.)
macros = build_info.get('macros') macros = build_info.get('macros')
include_dirs = build_info.get('include_dirs') include_dirs = build_info.get('include_dirs')
cflags = build_info.get('cflags') cflags = build_info.get('cflags')
objects = self.compiler.compile( self.compiler.compile(
sources, sources,
output_dir=self.build_temp, output_dir=self.build_temp,
macros=macros, macros=macros,
include_dirs=include_dirs, include_dirs=include_dirs,
extra_postargs=cflags, extra_postargs=cflags,
debug=self.debug debug=self.debug
) )
# Now "link" the object files together into a static library. # Now "link" the object files together into a static library.
# (On Unix at least, this isn't really linking -- it just # (On Unix at least, this isn't really linking -- it just
# builds an archive. Whatever.) # builds an archive. Whatever.)
self.compiler.create_static_lib( self.compiler.create_static_lib(
expected_objects, expected_objects,
lib_name, lib_name,
output_dir=self.build_clib, output_dir=self.build_clib,
debug=self.debug debug=self.debug
) )
...@@ -14,7 +14,8 @@ from setuptools.extern import six ...@@ -14,7 +14,8 @@ from setuptools.extern import six
if six.PY2: if six.PY2:
import imp import imp
EXTENSION_SUFFIXES = [s for s, _, tp in imp.get_suffixes() if tp == imp.C_EXTENSION] EXTENSION_SUFFIXES = [
s for s, _, tp in imp.get_suffixes() if tp == imp.C_EXTENSION]
else: else:
from importlib.machinery import EXTENSION_SUFFIXES from importlib.machinery import EXTENSION_SUFFIXES
...@@ -29,7 +30,7 @@ except ImportError: ...@@ -29,7 +30,7 @@ except ImportError:
# make sure _config_vars is initialized # make sure _config_vars is initialized
get_config_var("LDSHARED") get_config_var("LDSHARED")
from distutils.sysconfig import _config_vars as _CONFIG_VARS from distutils.sysconfig import _config_vars as _CONFIG_VARS # noqa
def _customize_compiler_for_shlib(compiler): def _customize_compiler_for_shlib(compiler):
...@@ -65,7 +66,9 @@ elif os.name != 'nt': ...@@ -65,7 +66,9 @@ elif os.name != 'nt':
except ImportError: except ImportError:
pass pass
if_dl = lambda s: s if have_rtld else ''
def if_dl(s):
return s if have_rtld else ''
def get_abi3_suffix(): def get_abi3_suffix():
...@@ -113,7 +116,7 @@ class build_ext(_build_ext): ...@@ -113,7 +116,7 @@ class build_ext(_build_ext):
if fullname in self.ext_map: if fullname in self.ext_map:
ext = self.ext_map[fullname] ext = self.ext_map[fullname]
use_abi3 = ( use_abi3 = (
six.PY3 not six.PY2
and getattr(ext, 'py_limited_api') and getattr(ext, 'py_limited_api')
and get_abi3_suffix() and get_abi3_suffix()
) )
......
...@@ -108,7 +108,7 @@ class develop(namespaces.DevelopInstaller, easy_install): ...@@ -108,7 +108,7 @@ class develop(namespaces.DevelopInstaller, easy_install):
return path_to_setup return path_to_setup
def install_for_development(self): def install_for_development(self):
if six.PY3 and getattr(self.distribution, 'use_2to3', False): if not six.PY2 and getattr(self.distribution, 'use_2to3', False):
# If we run 2to3 we can not do this inplace: # If we run 2to3 we can not do this inplace:
# Ensure metadata is up-to-date # Ensure metadata is up-to-date
......
...@@ -121,7 +121,8 @@ else: ...@@ -121,7 +121,8 @@ else:
return False return False
_one_liner = lambda text: textwrap.dedent(text).strip().replace('\n', '; ') def _one_liner(text):
return textwrap.dedent(text).strip().replace('\n', '; ')
class easy_install(Command): class easy_install(Command):
...@@ -414,8 +415,8 @@ class easy_install(Command): ...@@ -414,8 +415,8 @@ class easy_install(Command):
if show_deprecation: if show_deprecation:
self.announce( self.announce(
"WARNING: The easy_install command is deprecated " "WARNING: The easy_install command is deprecated "
"and will be removed in a future version." "and will be removed in a future version.",
, log.WARN, log.WARN,
) )
if self.verbose != self.distribution.verbose: if self.verbose != self.distribution.verbose:
log.set_verbosity(self.verbose) log.set_verbosity(self.verbose)
...@@ -508,13 +509,13 @@ class easy_install(Command): ...@@ -508,13 +509,13 @@ class easy_install(Command):
the distutils default setting) was: the distutils default setting) was:
%s %s
""").lstrip() """).lstrip() # noqa
__not_exists_id = textwrap.dedent(""" __not_exists_id = textwrap.dedent("""
This directory does not currently exist. Please create it and try again, or This directory does not currently exist. Please create it and try again, or
choose a different installation directory (using the -d or --install-dir choose a different installation directory (using the -d or --install-dir
option). option).
""").lstrip() """).lstrip() # noqa
__access_msg = textwrap.dedent(""" __access_msg = textwrap.dedent("""
Perhaps your account does not have write access to this directory? If the Perhaps your account does not have write access to this directory? If the
...@@ -530,7 +531,7 @@ class easy_install(Command): ...@@ -530,7 +531,7 @@ class easy_install(Command):
https://setuptools.readthedocs.io/en/latest/easy_install.html https://setuptools.readthedocs.io/en/latest/easy_install.html
Please make the appropriate changes for your system and try again. Please make the appropriate changes for your system and try again.
""").lstrip() """).lstrip() # noqa
def cant_write_to_target(self): def cant_write_to_target(self):
msg = self.__cant_write_msg % (sys.exc_info()[1], self.install_dir,) msg = self.__cant_write_msg % (sys.exc_info()[1], self.install_dir,)
...@@ -1094,13 +1095,13 @@ class easy_install(Command): ...@@ -1094,13 +1095,13 @@ class easy_install(Command):
pkg_resources.require("%(name)s") # latest installed version pkg_resources.require("%(name)s") # latest installed version
pkg_resources.require("%(name)s==%(version)s") # this exact version pkg_resources.require("%(name)s==%(version)s") # this exact version
pkg_resources.require("%(name)s>=%(version)s") # this version or higher pkg_resources.require("%(name)s>=%(version)s") # this version or higher
""").lstrip() """).lstrip() # noqa
__id_warning = textwrap.dedent(""" __id_warning = textwrap.dedent("""
Note also that the installation directory must be on sys.path at runtime for Note also that the installation directory must be on sys.path at runtime for
this to work. (e.g. by being the application's script directory, by being on this to work. (e.g. by being the application's script directory, by being on
PYTHONPATH, or by being added to sys.path by your code.) PYTHONPATH, or by being added to sys.path by your code.)
""") """) # noqa
def installation_report(self, req, dist, what="Installed"): def installation_report(self, req, dist, what="Installed"):
"""Helpful installation message for display to package users""" """Helpful installation message for display to package users"""
...@@ -1125,7 +1126,7 @@ class easy_install(Command): ...@@ -1125,7 +1126,7 @@ class easy_install(Command):
%(python)s setup.py develop %(python)s setup.py develop
See the setuptools documentation for the "develop" command for more info. See the setuptools documentation for the "develop" command for more info.
""").lstrip() """).lstrip() # noqa
def report_editable(self, spec, setup_script): def report_editable(self, spec, setup_script):
dirname = os.path.dirname(setup_script) dirname = os.path.dirname(setup_script)
...@@ -1308,7 +1309,8 @@ class easy_install(Command): ...@@ -1308,7 +1309,8 @@ class easy_install(Command):
https://setuptools.readthedocs.io/en/latest/easy_install.html#custom-installation-locations https://setuptools.readthedocs.io/en/latest/easy_install.html#custom-installation-locations
Please make the appropriate changes for your system and try again.""").lstrip() Please make the appropriate changes for your system and try again.
""").strip()
def install_site_py(self): def install_site_py(self):
"""Make sure there's a site.py in the target dir, if needed""" """Make sure there's a site.py in the target dir, if needed"""
...@@ -1564,7 +1566,7 @@ def get_exe_prefixes(exe_filename): ...@@ -1564,7 +1566,7 @@ def get_exe_prefixes(exe_filename):
continue continue
if parts[0].upper() in ('PURELIB', 'PLATLIB'): if parts[0].upper() in ('PURELIB', 'PLATLIB'):
contents = z.read(name) contents = z.read(name)
if six.PY3: if not six.PY2:
contents = contents.decode() contents = contents.decode()
for pth in yield_lines(contents): for pth in yield_lines(contents):
pth = pth.strip().replace('\\', '/') pth = pth.strip().replace('\\', '/')
...@@ -2090,7 +2092,8 @@ class ScriptWriter: ...@@ -2090,7 +2092,8 @@ class ScriptWriter:
@classmethod @classmethod
def get_script_header(cls, script_text, executable=None, wininst=False): def get_script_header(cls, script_text, executable=None, wininst=False):
# for backward compatibility # for backward compatibility
warnings.warn("Use get_header", EasyInstallDeprecationWarning, stacklevel=2) warnings.warn(
"Use get_header", EasyInstallDeprecationWarning, stacklevel=2)
if wininst: if wininst:
executable = "python.exe" executable = "python.exe"
return cls.get_header(script_text, executable) return cls.get_header(script_text, executable)
...@@ -2339,5 +2342,8 @@ def _patch_usage(): ...@@ -2339,5 +2342,8 @@ def _patch_usage():
finally: finally:
distutils.core.gen_usage = saved distutils.core.gen_usage = saved
class EasyInstallDeprecationWarning(SetuptoolsDeprecationWarning): class EasyInstallDeprecationWarning(SetuptoolsDeprecationWarning):
"""Class for warning about deprecations in EasyInstall in SetupTools. Not ignored by default, unlike DeprecationWarning.""" """
Warning for EasyInstall deprecations, bypassing suppression.
"""
...@@ -33,6 +33,7 @@ from setuptools.glob import glob ...@@ -33,6 +33,7 @@ from setuptools.glob import glob
from setuptools.extern import packaging from setuptools.extern import packaging
from setuptools import SetuptoolsDeprecationWarning from setuptools import SetuptoolsDeprecationWarning
def translate_pattern(glob): def translate_pattern(glob):
""" """
Translate a file path glob like '*.txt' in to a regular expression. Translate a file path glob like '*.txt' in to a regular expression.
...@@ -113,7 +114,7 @@ def translate_pattern(glob): ...@@ -113,7 +114,7 @@ def translate_pattern(glob):
pat += sep pat += sep
pat += r'\Z' pat += r'\Z'
return re.compile(pat, flags=re.MULTILINE|re.DOTALL) return re.compile(pat, flags=re.MULTILINE | re.DOTALL)
class InfoCommon: class InfoCommon:
...@@ -266,7 +267,7 @@ class egg_info(InfoCommon, Command): ...@@ -266,7 +267,7 @@ class egg_info(InfoCommon, Command):
to the file. to the file.
""" """
log.info("writing %s to %s", what, filename) log.info("writing %s to %s", what, filename)
if six.PY3: if not six.PY2:
data = data.encode("utf-8") data = data.encode("utf-8")
if not self.dry_run: if not self.dry_run:
f = open(filename, 'wb') f = open(filename, 'wb')
...@@ -637,7 +638,9 @@ def warn_depends_obsolete(cmd, basename, filename): ...@@ -637,7 +638,9 @@ def warn_depends_obsolete(cmd, basename, filename):
def _write_requirements(stream, reqs): def _write_requirements(stream, reqs):
lines = yield_lines(reqs or ()) lines = yield_lines(reqs or ())
append_cr = lambda line: line + '\n'
def append_cr(line):
return line + '\n'
lines = map(append_cr, lines) lines = map(append_cr, lines)
stream.writelines(lines) stream.writelines(lines)
...@@ -703,7 +706,8 @@ def get_pkg_info_revision(): ...@@ -703,7 +706,8 @@ def get_pkg_info_revision():
Get a -r### off of PKG-INFO Version in case this is an sdist of Get a -r### off of PKG-INFO Version in case this is an sdist of
a subversion revision. a subversion revision.
""" """
warnings.warn("get_pkg_info_revision is deprecated.", EggInfoDeprecationWarning) warnings.warn(
"get_pkg_info_revision is deprecated.", EggInfoDeprecationWarning)
if os.path.exists('PKG-INFO'): if os.path.exists('PKG-INFO'):
with io.open('PKG-INFO') as f: with io.open('PKG-INFO') as f:
for line in f: for line in f:
...@@ -714,4 +718,4 @@ def get_pkg_info_revision(): ...@@ -714,4 +718,4 @@ def get_pkg_info_revision():
class EggInfoDeprecationWarning(SetuptoolsDeprecationWarning): class EggInfoDeprecationWarning(SetuptoolsDeprecationWarning):
"""Class for warning about deprecations in eggInfo in setupTools. Not ignored by default, unlike DeprecationWarning.""" """Deprecated behavior warning for EggInfo, bypassing suppression."""
...@@ -77,7 +77,8 @@ class install_lib(orig.install_lib): ...@@ -77,7 +77,8 @@ class install_lib(orig.install_lib):
if not hasattr(sys, 'implementation'): if not hasattr(sys, 'implementation'):
return return
base = os.path.join('__pycache__', '__init__.' + sys.implementation.cache_tag) base = os.path.join(
'__pycache__', '__init__.' + sys.implementation.cache_tag)
yield base + '.pyc' yield base + '.pyc'
yield base + '.pyo' yield base + '.pyo'
yield base + '.opt-1.pyc' yield base + '.opt-1.pyc'
......
...@@ -132,5 +132,5 @@ class sdist_add_defaults: ...@@ -132,5 +132,5 @@ class sdist_add_defaults:
if hasattr(sdist.sdist, '_add_defaults_standards'): if hasattr(sdist.sdist, '_add_defaults_standards'):
# disable the functionality already available upstream # disable the functionality already available upstream
class sdist_add_defaults: class sdist_add_defaults: # noqa
pass pass
...@@ -121,19 +121,40 @@ class sdist(sdist_add_defaults, orig.sdist): ...@@ -121,19 +121,40 @@ class sdist(sdist_add_defaults, orig.sdist):
if has_leaky_handle: if has_leaky_handle:
read_template = __read_template_hack read_template = __read_template_hack
def _add_defaults_optional(self):
if six.PY2:
sdist_add_defaults._add_defaults_optional(self)
else:
super()._add_defaults_optional()
if os.path.isfile('pyproject.toml'):
self.filelist.append('pyproject.toml')
def _add_defaults_python(self): def _add_defaults_python(self):
"""getting python files""" """getting python files"""
if self.distribution.has_pure_modules(): if self.distribution.has_pure_modules():
build_py = self.get_finalized_command('build_py') build_py = self.get_finalized_command('build_py')
self.filelist.extend(build_py.get_source_files()) self.filelist.extend(build_py.get_source_files())
# This functionality is incompatible with include_package_data, and self._add_data_files(self._safe_data_files(build_py))
# will in fact create an infinite recursion if include_package_data
# is True. Use of include_package_data will imply that def _safe_data_files(self, build_py):
# distutils-style automatic handling of package_data is disabled """
if not self.distribution.include_package_data: Extracting data_files from build_py is known to cause
for _, src_dir, _, filenames in build_py.data_files: infinite recursion errors when `include_package_data`
self.filelist.extend([os.path.join(src_dir, filename) is enabled, so suppress it in that case.
for filename in filenames]) """
if self.distribution.include_package_data:
return ()
return build_py.data_files
def _add_data_files(self, data_files):
"""
Add data files as found in build_py.data_files.
"""
self.filelist.extend(
os.path.join(src_dir, name)
for _, src_dir, _, filenames in data_files
for name in filenames
)
def _add_defaults_data_files(self): def _add_defaults_data_files(self):
try: try:
...@@ -186,7 +207,7 @@ class sdist(sdist_add_defaults, orig.sdist): ...@@ -186,7 +207,7 @@ class sdist(sdist_add_defaults, orig.sdist):
manifest = open(self.manifest, 'rb') manifest = open(self.manifest, 'rb')
for line in manifest: for line in manifest:
# The manifest must contain UTF-8. See #303. # The manifest must contain UTF-8. See #303.
if six.PY3: if not six.PY2:
try: try:
line = line.decode('UTF-8') line = line.decode('UTF-8')
except UnicodeDecodeError: except UnicodeDecodeError:
......
...@@ -129,7 +129,8 @@ class test(Command): ...@@ -129,7 +129,8 @@ class test(Command):
@contextlib.contextmanager @contextlib.contextmanager
def project_on_sys_path(self, include_dists=[]): def project_on_sys_path(self, include_dists=[]):
with_2to3 = six.PY3 and getattr(self.distribution, 'use_2to3', False) with_2to3 = not six.PY2 and getattr(
self.distribution, 'use_2to3', False)
if with_2to3: if with_2to3:
# If we run 2to3 we can not do this inplace: # If we run 2to3 we can not do this inplace:
...@@ -240,7 +241,7 @@ class test(Command): ...@@ -240,7 +241,7 @@ class test(Command):
# Purge modules under test from sys.modules. The test loader will # Purge modules under test from sys.modules. The test loader will
# re-import them from the build location. Required when 2to3 is used # re-import them from the build location. Required when 2to3 is used
# with namespace packages. # with namespace packages.
if six.PY3 and getattr(self.distribution, 'use_2to3', False): if not six.PY2 and getattr(self.distribution, 'use_2to3', False):
module = self.test_suite.split('.')[0] module = self.test_suite.split('.')[0]
if module in _namespace_packages: if module in _namespace_packages:
del_modules = [] del_modules = []
......
...@@ -24,7 +24,7 @@ from .upload import upload ...@@ -24,7 +24,7 @@ from .upload import upload
def _encode(s): def _encode(s):
errors = 'surrogateescape' if six.PY3 else 'strict' errors = 'strict' if six.PY2 else 'surrogateescape'
return s.encode('utf-8', errors) return s.encode('utf-8', errors)
...@@ -127,8 +127,8 @@ class upload_docs(upload): ...@@ -127,8 +127,8 @@ class upload_docs(upload):
""" """
Build up the MIME payload for the POST data Build up the MIME payload for the POST data
""" """
boundary = b'--------------GHSKFJDLGDS7543FJKLFHRE75642756743254' boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
sep_boundary = b'\n--' + boundary sep_boundary = b'\n--' + boundary.encode('ascii')
end_boundary = sep_boundary + b'--' end_boundary = sep_boundary + b'--'
end_items = end_boundary, b"\n", end_items = end_boundary, b"\n",
builder = functools.partial( builder = functools.partial(
...@@ -138,7 +138,7 @@ class upload_docs(upload): ...@@ -138,7 +138,7 @@ class upload_docs(upload):
part_groups = map(builder, data.items()) part_groups = map(builder, data.items())
parts = itertools.chain.from_iterable(part_groups) parts = itertools.chain.from_iterable(part_groups)
body_items = itertools.chain(parts, end_items) body_items = itertools.chain(parts, end_items)
content_type = 'multipart/form-data; boundary=%s' % boundary.decode('ascii') content_type = 'multipart/form-data; boundary=%s' % boundary
return b''.join(body_items), content_type return b''.join(body_items), content_type
def upload_file(self, filename): def upload_file(self, filename):
...@@ -153,7 +153,7 @@ class upload_docs(upload): ...@@ -153,7 +153,7 @@ class upload_docs(upload):
# set up the authentication # set up the authentication
credentials = _encode(self.username + ':' + self.password) credentials = _encode(self.username + ':' + self.password)
credentials = standard_b64encode(credentials) credentials = standard_b64encode(credentials)
if six.PY3: if not six.PY2:
credentials = credentials.decode('ascii') credentials = credentials.decode('ascii')
auth = "Basic " + credentials auth = "Basic " + credentials
......
from distutils.dep_util import newer_group from distutils.dep_util import newer_group
# yes, this is was almost entirely copy-pasted from # yes, this is was almost entirely copy-pasted from
# 'newer_pairwise()', this is just another convenience # 'newer_pairwise()', this is just another convenience
# function. # function.
...@@ -10,7 +11,8 @@ def newer_pairwise_group(sources_groups, targets): ...@@ -10,7 +11,8 @@ def newer_pairwise_group(sources_groups, targets):
of 'newer_group()'. of 'newer_group()'.
""" """
if len(sources_groups) != len(targets): if len(sources_groups) != len(targets):
raise ValueError("'sources_group' and 'targets' must be the same length") raise ValueError(
"'sources_group' and 'targets' must be the same length")
# build a pair of lists (sources_groups, targets) where source is newer # build a pair of lists (sources_groups, targets) where source is newer
n_sources = [] n_sources = []
......
...@@ -162,7 +162,7 @@ def write_pkg_file(self, file): ...@@ -162,7 +162,7 @@ def write_pkg_file(self, file):
if self.download_url: if self.download_url:
write_field('Download-URL', self.download_url) write_field('Download-URL', self.download_url)
for project_url in self.project_urls.items(): for project_url in self.project_urls.items():
write_field('Project-URL', '%s, %s' % project_url) write_field('Project-URL', '%s, %s' % project_url)
long_desc = rfc822_escape(self.get_long_description()) long_desc = rfc822_escape(self.get_long_description())
write_field('Description', long_desc) write_field('Description', long_desc)
...@@ -571,7 +571,7 @@ class Distribution(_Distribution): ...@@ -571,7 +571,7 @@ class Distribution(_Distribution):
from setuptools.extern.six.moves.configparser import ConfigParser from setuptools.extern.six.moves.configparser import ConfigParser
# Ignore install directory options if we have a venv # Ignore install directory options if we have a venv
if six.PY3 and sys.prefix != sys.base_prefix: if not six.PY2 and sys.prefix != sys.base_prefix:
ignore_options = [ ignore_options = [
'install-base', 'install-platbase', 'install-lib', 'install-base', 'install-platbase', 'install-lib',
'install-platlib', 'install-purelib', 'install-headers', 'install-platlib', 'install-purelib', 'install-headers',
...@@ -593,7 +593,7 @@ class Distribution(_Distribution): ...@@ -593,7 +593,7 @@ class Distribution(_Distribution):
with io.open(filename, encoding='utf-8') as reader: with io.open(filename, encoding='utf-8') as reader:
if DEBUG: if DEBUG:
self.announce(" reading {filename}".format(**locals())) self.announce(" reading {filename}".format(**locals()))
(parser.read_file if six.PY3 else parser.readfp)(reader) (parser.readfp if six.PY2 else parser.read_file)(reader)
for section in parser.sections(): for section in parser.sections():
options = parser.options(section) options = parser.options(section)
opt_dict = self.get_option_dict(section) opt_dict = self.get_option_dict(section)
...@@ -636,7 +636,7 @@ class Distribution(_Distribution): ...@@ -636,7 +636,7 @@ class Distribution(_Distribution):
Ref #1653 Ref #1653
""" """
if six.PY3: if not six.PY2:
return val return val
try: try:
return val.encode() return val.encode()
......
...@@ -64,8 +64,8 @@ def fetch_build_egg(dist, req): ...@@ -64,8 +64,8 @@ def fetch_build_egg(dist, req):
dist.announce( dist.announce(
'WARNING: The pip package is not available, falling back ' 'WARNING: The pip package is not available, falling back '
'to EasyInstall for handling setup_requires/test_requires; ' 'to EasyInstall for handling setup_requires/test_requires; '
'this is deprecated and will be removed in a future version.' 'this is deprecated and will be removed in a future version.',
, log.WARN log.WARN
) )
return _legacy_fetch_build_egg(dist, req) return _legacy_fetch_build_egg(dist, req)
# Warn if wheel is not. # Warn if wheel is not.
......
...@@ -544,7 +544,7 @@ class SystemInfo: ...@@ -544,7 +544,7 @@ class SystemInfo:
# Except for VS15+, VC version is aligned with VS version # Except for VS15+, VC version is aligned with VS version
self.vs_ver = self.vc_ver = ( self.vs_ver = self.vc_ver = (
vc_ver or self._find_latest_available_vs_ver()) vc_ver or self._find_latest_available_vs_ver())
def _find_latest_available_vs_ver(self): def _find_latest_available_vs_ver(self):
""" """
...@@ -1225,7 +1225,7 @@ class EnvironmentInfo: ...@@ -1225,7 +1225,7 @@ class EnvironmentInfo:
arch_subdir = self.pi.target_dir(x64=True) arch_subdir = self.pi.target_dir(x64=True)
lib = join(self.si.WindowsSdkDir, 'lib') lib = join(self.si.WindowsSdkDir, 'lib')
libver = self._sdk_subdir libver = self._sdk_subdir
return [join(lib, '%sum%s' % (libver , arch_subdir))] return [join(lib, '%sum%s' % (libver, arch_subdir))]
@property @property
def OSIncludes(self): def OSIncludes(self):
...@@ -1274,13 +1274,16 @@ class EnvironmentInfo: ...@@ -1274,13 +1274,16 @@ class EnvironmentInfo:
libpath += [ libpath += [
ref, ref,
join(self.si.WindowsSdkDir, 'UnionMetadata'), join(self.si.WindowsSdkDir, 'UnionMetadata'),
join(ref, 'Windows.Foundation.UniversalApiContract', '1.0.0.0'), join(
ref, 'Windows.Foundation.UniversalApiContract', '1.0.0.0'),
join(ref, 'Windows.Foundation.FoundationContract', '1.0.0.0'), join(ref, 'Windows.Foundation.FoundationContract', '1.0.0.0'),
join(ref,'Windows.Networking.Connectivity.WwanContract', join(
'1.0.0.0'), ref, 'Windows.Networking.Connectivity.WwanContract',
join(self.si.WindowsSdkDir, 'ExtensionSDKs', 'Microsoft.VCLibs', '1.0.0.0'),
'%0.1f' % self.vs_ver, 'References', 'CommonConfiguration', join(
'neutral'), self.si.WindowsSdkDir, 'ExtensionSDKs', 'Microsoft.VCLibs',
'%0.1f' % self.vs_ver, 'References', 'CommonConfiguration',
'neutral'),
] ]
return libpath return libpath
......
...@@ -47,13 +47,17 @@ class Installer: ...@@ -47,13 +47,17 @@ class Installer:
"p = os.path.join(%(root)s, *%(pth)r)", "p = os.path.join(%(root)s, *%(pth)r)",
"importlib = has_mfs and __import__('importlib.util')", "importlib = has_mfs and __import__('importlib.util')",
"has_mfs and __import__('importlib.machinery')", "has_mfs and __import__('importlib.machinery')",
"m = has_mfs and " (
"m = has_mfs and "
"sys.modules.setdefault(%(pkg)r, " "sys.modules.setdefault(%(pkg)r, "
"importlib.util.module_from_spec(" "importlib.util.module_from_spec("
"importlib.machinery.PathFinder.find_spec(%(pkg)r, " "importlib.machinery.PathFinder.find_spec(%(pkg)r, "
"[os.path.dirname(p)])))", "[os.path.dirname(p)])))"
"m = m or " ),
"sys.modules.setdefault(%(pkg)r, types.ModuleType(%(pkg)r))", (
"m = m or "
"sys.modules.setdefault(%(pkg)r, types.ModuleType(%(pkg)r))"
),
"mp = (m or []) and m.__dict__.setdefault('__path__',[])", "mp = (m or []) and m.__dict__.setdefault('__path__',[])",
"(p not in mp) and mp.append(p)", "(p not in mp) and mp.append(p)",
) )
......
...@@ -46,7 +46,8 @@ __all__ = [ ...@@ -46,7 +46,8 @@ __all__ = [
_SOCKET_TIMEOUT = 15 _SOCKET_TIMEOUT = 15
_tmpl = "setuptools/{setuptools.__version__} Python-urllib/{py_major}" _tmpl = "setuptools/{setuptools.__version__} Python-urllib/{py_major}"
user_agent = _tmpl.format(py_major='{}.{}'.format(*sys.version_info), setuptools=setuptools) user_agent = _tmpl.format(
py_major='{}.{}'.format(*sys.version_info), setuptools=setuptools)
def parse_requirement_arg(spec): def parse_requirement_arg(spec):
...@@ -1092,7 +1093,8 @@ def open_with_auth(url, opener=urllib.request.urlopen): ...@@ -1092,7 +1093,8 @@ def open_with_auth(url, opener=urllib.request.urlopen):
# copy of urllib.parse._splituser from Python 3.8 # copy of urllib.parse._splituser from Python 3.8
def _splituser(host): def _splituser(host):
"""splituser('user[:passwd]@host[:port]') --> 'user[:passwd]', 'host[:port]'.""" """splituser('user[:passwd]@host[:port]')
--> 'user[:passwd]', 'host[:port]'."""
user, delim, host = host.rpartition('@') user, delim, host = host.rpartition('@')
return (user if delim else None), host return (user if delim else None), host
......
...@@ -16,7 +16,7 @@ def get_all_headers(message, key): ...@@ -16,7 +16,7 @@ def get_all_headers(message, key):
if six.PY2: if six.PY2:
def get_all_headers(message, key): def get_all_headers(message, key): # noqa
return message.getheaders(key) return message.getheaders(key)
......
...@@ -13,6 +13,8 @@ from setuptools.extern import six ...@@ -13,6 +13,8 @@ from setuptools.extern import six
from setuptools.extern.six.moves import builtins, map from setuptools.extern.six.moves import builtins, map
import pkg_resources.py31compat import pkg_resources.py31compat
from distutils.errors import DistutilsError
from pkg_resources import working_set
if sys.platform.startswith('java'): if sys.platform.startswith('java'):
import org.python.modules.posix.PosixModule as _os import org.python.modules.posix.PosixModule as _os
...@@ -23,8 +25,6 @@ try: ...@@ -23,8 +25,6 @@ try:
except NameError: except NameError:
_file = None _file = None
_open = open _open = open
from distutils.errors import DistutilsError
from pkg_resources import working_set
__all__ = [ __all__ = [
...@@ -374,7 +374,7 @@ class AbstractSandbox: ...@@ -374,7 +374,7 @@ class AbstractSandbox:
if hasattr(os, 'devnull'): if hasattr(os, 'devnull'):
_EXCEPTIONS = [os.devnull,] _EXCEPTIONS = [os.devnull]
else: else:
_EXCEPTIONS = [] _EXCEPTIONS = []
...@@ -466,7 +466,8 @@ class DirectorySandbox(AbstractSandbox): ...@@ -466,7 +466,8 @@ class DirectorySandbox(AbstractSandbox):
WRITE_FLAGS = functools.reduce( WRITE_FLAGS = functools.reduce(
operator.or_, [getattr(_os, a, 0) for a in operator.or_, [
getattr(_os, a, 0) for a in
"O_WRONLY O_RDWR O_APPEND O_CREAT O_TRUNC O_TEMPORARY".split()] "O_WRONLY O_RDWR O_APPEND O_CREAT O_TRUNC O_TEMPORARY".split()]
) )
......
...@@ -38,22 +38,24 @@ def __boot(): ...@@ -38,22 +38,24 @@ def __boot():
else: else:
raise ImportError("Couldn't find the real 'site' module") raise ImportError("Couldn't find the real 'site' module")
known_paths = dict([(makepath(item)[1], 1) for item in sys.path]) # 2.2 comp # 2.2 comp
known_paths = dict([(
makepath(item)[1], 1) for item in sys.path]) # noqa
oldpos = getattr(sys, '__egginsert', 0) # save old insertion position oldpos = getattr(sys, '__egginsert', 0) # save old insertion position
sys.__egginsert = 0 # and reset the current one sys.__egginsert = 0 # and reset the current one
for item in PYTHONPATH: for item in PYTHONPATH:
addsitedir(item) addsitedir(item) # noqa
sys.__egginsert += oldpos # restore effective old position sys.__egginsert += oldpos # restore effective old position
d, nd = makepath(stdpath[0]) d, nd = makepath(stdpath[0]) # noqa
insert_at = None insert_at = None
new_path = [] new_path = []
for item in sys.path: for item in sys.path:
p, np = makepath(item) p, np = makepath(item) # noqa
if np == nd and insert_at is None: if np == nd and insert_at is None:
# We've hit the first 'system' path entry, so added entries go here # We've hit the first 'system' path entry, so added entries go here
......
...@@ -35,7 +35,8 @@ try: ...@@ -35,7 +35,8 @@ try:
except AttributeError: except AttributeError:
HTTPSHandler = HTTPSConnection = object HTTPSHandler = HTTPSConnection = object
is_available = ssl is not None and object not in (HTTPSHandler, HTTPSConnection) is_available = ssl is not None and object not in (
HTTPSHandler, HTTPSConnection)
try: try:
...@@ -85,8 +86,10 @@ if not match_hostname: ...@@ -85,8 +86,10 @@ if not match_hostname:
return dn.lower() == hostname.lower() return dn.lower() == hostname.lower()
# RFC 6125, section 6.4.3, subitem 1. # RFC 6125, section 6.4.3, subitem 1.
# The client SHOULD NOT attempt to match a presented identifier in which # The client SHOULD NOT attempt to match a
# the wildcard character comprises a label other than the left-most label. # presented identifier in which the wildcard
# character comprises a label other than the
# left-most label.
if leftmost == '*': if leftmost == '*':
# When '*' is a fragment by itself, it matches a non-empty dotless # When '*' is a fragment by itself, it matches a non-empty dotless
# fragment. # fragment.
...@@ -137,15 +140,16 @@ if not match_hostname: ...@@ -137,15 +140,16 @@ if not match_hostname:
return return
dnsnames.append(value) dnsnames.append(value)
if len(dnsnames) > 1: if len(dnsnames) > 1:
raise CertificateError("hostname %r " raise CertificateError(
"doesn't match either of %s" "hostname %r doesn't match either of %s"
% (hostname, ', '.join(map(repr, dnsnames)))) % (hostname, ', '.join(map(repr, dnsnames))))
elif len(dnsnames) == 1: elif len(dnsnames) == 1:
raise CertificateError("hostname %r " raise CertificateError(
"doesn't match %r" "hostname %r doesn't match %r"
% (hostname, dnsnames[0])) % (hostname, dnsnames[0]))
else: else:
raise CertificateError("no appropriate commonName or " raise CertificateError(
"no appropriate commonName or "
"subjectAltName fields were found") "subjectAltName fields were found")
...@@ -158,7 +162,8 @@ class VerifyingHTTPSHandler(HTTPSHandler): ...@@ -158,7 +162,8 @@ class VerifyingHTTPSHandler(HTTPSHandler):
def https_open(self, req): def https_open(self, req):
return self.do_open( return self.do_open(
lambda host, **kw: VerifyingHTTPSConn(host, self.ca_bundle, **kw), req lambda host, **kw: VerifyingHTTPSConn(host, self.ca_bundle, **kw),
req
) )
......
...@@ -6,7 +6,7 @@ from setuptools.extern.six import PY2, PY3 ...@@ -6,7 +6,7 @@ from setuptools.extern.six import PY2, PY3
__all__ = [ __all__ = [
'fail_on_ascii', 'py2_only', 'py3_only' 'fail_on_ascii', 'py2_only', 'py3_only'
] ]
......
mock mock
pytest-flake8; python_version!="3.4" pytest-flake8
pytest-flake8<=1.0.0; python_version=="3.4" flake8-2020; python_version>="3.6"
virtualenv>=13.0.0 virtualenv>=13.0.0
pytest-virtualenv>=1.2.7 pytest-virtualenv>=1.2.7
pytest>=3.7 pytest>=3.7
......
...@@ -8,8 +8,7 @@ from setuptools.dist import Distribution ...@@ -8,8 +8,7 @@ from setuptools.dist import Distribution
class TestBuildCLib: class TestBuildCLib:
@mock.patch( @mock.patch(
'setuptools.command.build_clib.newer_pairwise_group' 'setuptools.command.build_clib.newer_pairwise_group')
)
def test_build_libraries(self, mock_newer): def test_build_libraries(self, mock_newer):
dist = Distribution() dist = Distribution()
cmd = build_clib(dist) cmd = build_clib(dist)
......
...@@ -23,6 +23,7 @@ class BuildBackendBase: ...@@ -23,6 +23,7 @@ class BuildBackendBase:
self.env = env self.env = env
self.backend_name = backend_name self.backend_name = backend_name
class BuildBackend(BuildBackendBase): class BuildBackend(BuildBackendBase):
"""PEP 517 Build Backend""" """PEP 517 Build Backend"""
...@@ -262,6 +263,27 @@ class TestBuildMetaBackend: ...@@ -262,6 +263,27 @@ class TestBuildMetaBackend:
assert os.path.isfile( assert os.path.isfile(
os.path.join(os.path.abspath("out_sdist"), sdist_name)) os.path.join(os.path.abspath("out_sdist"), sdist_name))
def test_build_sdist_pyproject_toml_exists(self, tmpdir_cwd):
files = {
'setup.py': DALS("""
__import__('setuptools').setup(
name='foo',
version='0.0.0',
py_modules=['hello']
)"""),
'hello.py': '',
'pyproject.toml': DALS("""
[build-system]
requires = ["setuptools", "wheel"]
build-backend = "setuptools.build_meta
"""),
}
build_files(files)
build_backend = self.get_build_backend()
targz_path = build_backend.build_sdist("temp")
with tarfile.open(os.path.join("temp", targz_path)) as tar:
assert any('pyproject.toml' in name for name in tar.getnames())
def test_build_sdist_setup_py_exists(self, tmpdir_cwd): def test_build_sdist_setup_py_exists(self, tmpdir_cwd):
# If build_sdist is called from a script other than setup.py, # If build_sdist is called from a script other than setup.py,
# ensure setup.py is included # ensure setup.py is included
...@@ -385,6 +407,28 @@ class TestBuildMetaBackend: ...@@ -385,6 +407,28 @@ class TestBuildMetaBackend:
assert expected == sorted(actual) assert expected == sorted(actual)
_sys_argv_0_passthrough = {
'setup.py': DALS("""
import os
import sys
__import__('setuptools').setup(
name='foo',
version='0.0.0',
)
sys_argv = os.path.abspath(sys.argv[0])
file_path = os.path.abspath('setup.py')
assert sys_argv == file_path
""")
}
def test_sys_argv_passthrough(self, tmpdir_cwd):
build_files(self._sys_argv_0_passthrough)
build_backend = self.get_build_backend()
with pytest.raises(AssertionError):
build_backend.build_sdist("temp")
class TestBuildMetaLegacyBackend(TestBuildMetaBackend): class TestBuildMetaLegacyBackend(TestBuildMetaBackend):
backend_name = 'setuptools.build_meta:__legacy__' backend_name = 'setuptools.build_meta:__legacy__'
...@@ -396,3 +440,9 @@ class TestBuildMetaLegacyBackend(TestBuildMetaBackend): ...@@ -396,3 +440,9 @@ class TestBuildMetaLegacyBackend(TestBuildMetaBackend):
build_backend = self.get_build_backend() build_backend = self.get_build_backend()
build_backend.build_sdist("temp") build_backend.build_sdist("temp")
def test_sys_argv_passthrough(self, tmpdir_cwd):
build_files(self._sys_argv_0_passthrough)
build_backend = self.get_build_backend()
build_backend.build_sdist("temp")
...@@ -695,7 +695,7 @@ class TestOptions: ...@@ -695,7 +695,7 @@ class TestOptions:
) )
with get_dist(tmpdir) as dist: with get_dist(tmpdir) as dist:
assert set(dist.packages) == set( assert set(dist.packages) == set(
['fake_package', 'fake_package.sub_two']) ['fake_package', 'fake_package.sub_two'])
@py2_only @py2_only
def test_find_namespace_directive_fails_on_py2(self, tmpdir): def test_find_namespace_directive_fails_on_py2(self, tmpdir):
...@@ -748,7 +748,7 @@ class TestOptions: ...@@ -748,7 +748,7 @@ class TestOptions:
) )
with get_dist(tmpdir) as dist: with get_dist(tmpdir) as dist:
assert set(dist.packages) == { assert set(dist.packages) == {
'fake_package', 'fake_package.sub_two' 'fake_package', 'fake_package.sub_two'
} }
def test_extras_require(self, tmpdir): def test_extras_require(self, tmpdir):
...@@ -881,7 +881,7 @@ class TestExternalSetters: ...@@ -881,7 +881,7 @@ class TestExternalSetters:
return None return None
@patch.object(_Distribution, '__init__', autospec=True) @patch.object(_Distribution, '__init__', autospec=True)
def test_external_setters(self, mock_parent_init, tmpdir): def test_external_setters(self, mock_parent_init, tmpdir):
mock_parent_init.side_effect = self._fake_distribution_init mock_parent_init.side_effect = self._fake_distribution_init
dist = Distribution(attrs={ dist = Distribution(attrs={
......
...@@ -95,7 +95,7 @@ class TestDevelop: ...@@ -95,7 +95,7 @@ class TestDevelop:
with io.open(fn) as init_file: with io.open(fn) as init_file:
init = init_file.read().strip() init = init_file.read().strip()
expected = 'print("foo")' if six.PY3 else 'print "foo"' expected = 'print "foo"' if six.PY2 else 'print("foo")'
assert init == expected assert init == expected
def test_console_scripts(self, tmpdir): def test_console_scripts(self, tmpdir):
...@@ -161,7 +161,7 @@ class TestNamespaces: ...@@ -161,7 +161,7 @@ class TestNamespaces:
reason="https://github.com/pypa/setuptools/issues/851", reason="https://github.com/pypa/setuptools/issues/851",
) )
@pytest.mark.skipif( @pytest.mark.skipif(
platform.python_implementation() == 'PyPy' and six.PY3, platform.python_implementation() == 'PyPy' and not six.PY2,
reason="https://github.com/pypa/setuptools/issues/1202", reason="https://github.com/pypa/setuptools/issues/1202",
) )
def test_namespace_package_importable(self, tmpdir): def test_namespace_package_importable(self, tmpdir):
......
...@@ -61,7 +61,8 @@ def test_dist_fetch_build_egg(tmpdir): ...@@ -61,7 +61,8 @@ def test_dist_fetch_build_egg(tmpdir):
dist.fetch_build_egg(r) dist.fetch_build_egg(r)
for r in reqs for r in reqs
] ]
assert [dist.key for dist in resolved_dists if dist] == reqs # noqa below because on Python 2 it causes flakes
assert [dist.key for dist in resolved_dists if dist] == reqs # noqa
def test_dist__get_unpatched_deprecated(): def test_dist__get_unpatched_deprecated():
...@@ -284,7 +285,7 @@ def test_provides_extras_deterministic_order(): ...@@ -284,7 +285,7 @@ def test_provides_extras_deterministic_order():
dist = Distribution(attrs) dist = Distribution(attrs)
assert dist.metadata.provides_extras == ['b', 'a'] assert dist.metadata.provides_extras == ['b', 'a']
CHECK_PACKAGE_DATA_TESTS = ( CHECK_PACKAGE_DATA_TESTS = (
# Valid. # Valid.
({ ({
...@@ -309,7 +310,8 @@ CHECK_PACKAGE_DATA_TESTS = ( ...@@ -309,7 +310,8 @@ CHECK_PACKAGE_DATA_TESTS = (
({ ({
'hello': str('*.msg'), 'hello': str('*.msg'),
}, ( }, (
"\"values of 'package_data' dict\" must be a list of strings (got '*.msg')" "\"values of 'package_data' dict\" "
"must be a list of strings (got '*.msg')"
)), )),
# Invalid value type (generators are single use) # Invalid value type (generators are single use)
({ ({
...@@ -321,10 +323,12 @@ CHECK_PACKAGE_DATA_TESTS = ( ...@@ -321,10 +323,12 @@ CHECK_PACKAGE_DATA_TESTS = (
) )
@pytest.mark.parametrize('package_data, expected_message', CHECK_PACKAGE_DATA_TESTS) @pytest.mark.parametrize(
'package_data, expected_message', CHECK_PACKAGE_DATA_TESTS)
def test_check_package_data(package_data, expected_message): def test_check_package_data(package_data, expected_message):
if expected_message is None: if expected_message is None:
assert check_package_data(None, 'package_data', package_data) is None assert check_package_data(None, 'package_data', package_data) is None
else: else:
with pytest.raises(DistutilsSetupError, match=re.escape(expected_message)): with pytest.raises(
DistutilsSetupError, match=re.escape(expected_message)):
check_package_data(None, str('package_data'), package_data) check_package_data(None, str('package_data'), package_data)
...@@ -629,7 +629,7 @@ class TestSetupRequires: ...@@ -629,7 +629,7 @@ class TestSetupRequires:
test_pkg = create_setup_requires_package( test_pkg = create_setup_requires_package(
temp_dir, setup_attrs=dict(version='attr: foobar.version'), temp_dir, setup_attrs=dict(version='attr: foobar.version'),
make_package=make_dependency_sdist, make_package=make_dependency_sdist,
use_setup_cfg=use_setup_cfg+('version',), use_setup_cfg=use_setup_cfg + ('version',),
) )
test_setup_py = os.path.join(test_pkg, 'setup.py') test_setup_py = os.path.join(test_pkg, 'setup.py')
with contexts.quiet() as (stdout, stderr): with contexts.quiet() as (stdout, stderr):
...@@ -671,8 +671,10 @@ class TestSetupRequires: ...@@ -671,8 +671,10 @@ class TestSetupRequires:
dep_url = path_to_url(dep_sdist, authority='localhost') dep_url = path_to_url(dep_sdist, authority='localhost')
test_pkg = create_setup_requires_package( test_pkg = create_setup_requires_package(
temp_dir, temp_dir,
'python-xlib', '0.19', # Ignored (overriden by setup_attrs). # Ignored (overriden by setup_attrs)
setup_attrs=dict(setup_requires='dependency @ %s' % dep_url)) 'python-xlib', '0.19',
setup_attrs=dict(
setup_requires='dependency @ %s' % dep_url))
test_setup_py = os.path.join(test_pkg, 'setup.py') test_setup_py = os.path.join(test_pkg, 'setup.py')
run_setup(test_setup_py, [str('--version')]) run_setup(test_setup_py, [str('--version')])
assert len(mock_index.requests) == 0 assert len(mock_index.requests) == 0
...@@ -710,11 +712,14 @@ class TestSetupRequires: ...@@ -710,11 +712,14 @@ class TestSetupRequires:
dep_1_0_sdist = 'dep-1.0.tar.gz' dep_1_0_sdist = 'dep-1.0.tar.gz'
dep_1_0_url = path_to_url(str(tmpdir / dep_1_0_sdist)) dep_1_0_url = path_to_url(str(tmpdir / dep_1_0_sdist))
dep_1_0_python_requires = '>=2.7' dep_1_0_python_requires = '>=2.7'
make_python_requires_sdist(str(tmpdir / dep_1_0_sdist), 'dep', '1.0', dep_1_0_python_requires) make_python_requires_sdist(
str(tmpdir / dep_1_0_sdist), 'dep', '1.0', dep_1_0_python_requires)
dep_2_0_sdist = 'dep-2.0.tar.gz' dep_2_0_sdist = 'dep-2.0.tar.gz'
dep_2_0_url = path_to_url(str(tmpdir / dep_2_0_sdist)) dep_2_0_url = path_to_url(str(tmpdir / dep_2_0_sdist))
dep_2_0_python_requires = '!=' + '.'.join(map(str, sys.version_info[:2])) + '.*' dep_2_0_python_requires = '!=' + '.'.join(
make_python_requires_sdist(str(tmpdir / dep_2_0_sdist), 'dep', '2.0', dep_2_0_python_requires) map(str, sys.version_info[:2])) + '.*'
make_python_requires_sdist(
str(tmpdir / dep_2_0_sdist), 'dep', '2.0', dep_2_0_python_requires)
index = tmpdir / 'index.html' index = tmpdir / 'index.html'
index.write_text(DALS( index.write_text(DALS(
''' '''
...@@ -726,7 +731,7 @@ class TestSetupRequires: ...@@ -726,7 +731,7 @@ class TestSetupRequires:
<a href="{dep_2_0_url}" data-requires-python="{dep_2_0_python_requires}">{dep_2_0_sdist}</a><br/> <a href="{dep_2_0_url}" data-requires-python="{dep_2_0_python_requires}">{dep_2_0_sdist}</a><br/>
</body> </body>
</html> </html>
''').format( ''').format( # noqa
dep_1_0_url=dep_1_0_url, dep_1_0_url=dep_1_0_url,
dep_1_0_sdist=dep_1_0_sdist, dep_1_0_sdist=dep_1_0_sdist,
dep_1_0_python_requires=dep_1_0_python_requires, dep_1_0_python_requires=dep_1_0_python_requires,
...@@ -738,23 +743,29 @@ class TestSetupRequires: ...@@ -738,23 +743,29 @@ class TestSetupRequires:
with contexts.save_pkg_resources_state(): with contexts.save_pkg_resources_state():
test_pkg = create_setup_requires_package( test_pkg = create_setup_requires_package(
str(tmpdir), str(tmpdir),
'python-xlib', '0.19', # Ignored (overriden by setup_attrs). 'python-xlib', '0.19', # Ignored (overriden by setup_attrs).
setup_attrs=dict(setup_requires='dep', dependency_links=[index_url])) setup_attrs=dict(
setup_requires='dep', dependency_links=[index_url]))
test_setup_py = os.path.join(test_pkg, 'setup.py') test_setup_py = os.path.join(test_pkg, 'setup.py')
run_setup(test_setup_py, [str('--version')]) run_setup(test_setup_py, [str('--version')])
eggs = list(map(str, pkg_resources.find_distributions(os.path.join(test_pkg, '.eggs')))) eggs = list(map(str, pkg_resources.find_distributions(
os.path.join(test_pkg, '.eggs'))))
assert eggs == ['dep 1.0'] assert eggs == ['dep 1.0']
@pytest.mark.parametrize('use_legacy_installer,with_dependency_links_in_setup_py', @pytest.mark.parametrize(
itertools.product((False, True), (False, True))) 'use_legacy_installer,with_dependency_links_in_setup_py',
def test_setup_requires_with_find_links_in_setup_cfg(self, monkeypatch, itertools.product((False, True), (False, True)))
use_legacy_installer, def test_setup_requires_with_find_links_in_setup_cfg(
with_dependency_links_in_setup_py): self, monkeypatch, use_legacy_installer,
with_dependency_links_in_setup_py):
monkeypatch.setenv(str('PIP_RETRIES'), str('0')) monkeypatch.setenv(str('PIP_RETRIES'), str('0'))
monkeypatch.setenv(str('PIP_TIMEOUT'), str('0')) monkeypatch.setenv(str('PIP_TIMEOUT'), str('0'))
with contexts.save_pkg_resources_state(): with contexts.save_pkg_resources_state():
with contexts.tempdir() as temp_dir: with contexts.tempdir() as temp_dir:
make_trivial_sdist(os.path.join(temp_dir, 'python-xlib-42.tar.gz'), 'python-xlib', '42') make_trivial_sdist(
os.path.join(temp_dir, 'python-xlib-42.tar.gz'),
'python-xlib',
'42')
test_pkg = os.path.join(temp_dir, 'test_pkg') test_pkg = os.path.join(temp_dir, 'test_pkg')
test_setup_py = os.path.join(test_pkg, 'setup.py') test_setup_py = os.path.join(test_pkg, 'setup.py')
test_setup_cfg = os.path.join(test_pkg, 'setup.cfg') test_setup_cfg = os.path.join(test_pkg, 'setup.cfg')
...@@ -771,7 +782,7 @@ class TestSetupRequires: ...@@ -771,7 +782,7 @@ class TestSetupRequires:
installer.fetch_build_egg = installer._legacy_fetch_build_egg installer.fetch_build_egg = installer._legacy_fetch_build_egg
setup(setup_requires='python-xlib==42', setup(setup_requires='python-xlib==42',
dependency_links={dependency_links!r}) dependency_links={dependency_links!r})
''').format(use_legacy_installer=use_legacy_installer, ''').format(use_legacy_installer=use_legacy_installer, # noqa
dependency_links=dependency_links)) dependency_links=dependency_links))
with open(test_setup_cfg, 'w') as fp: with open(test_setup_cfg, 'w') as fp:
fp.write(DALS( fp.write(DALS(
...@@ -783,14 +794,17 @@ class TestSetupRequires: ...@@ -783,14 +794,17 @@ class TestSetupRequires:
find_links=temp_dir)) find_links=temp_dir))
run_setup(test_setup_py, [str('--version')]) run_setup(test_setup_py, [str('--version')])
def test_setup_requires_with_transitive_extra_dependency(self, monkeypatch): def test_setup_requires_with_transitive_extra_dependency(
self, monkeypatch):
# Use case: installing a package with a build dependency on # Use case: installing a package with a build dependency on
# an already installed `dep[extra]`, which in turn depends # an already installed `dep[extra]`, which in turn depends
# on `extra_dep` (whose is not already installed). # on `extra_dep` (whose is not already installed).
with contexts.save_pkg_resources_state(): with contexts.save_pkg_resources_state():
with contexts.tempdir() as temp_dir: with contexts.tempdir() as temp_dir:
# Create source distribution for `extra_dep`. # Create source distribution for `extra_dep`.
make_trivial_sdist(os.path.join(temp_dir, 'extra_dep-1.0.tar.gz'), 'extra_dep', '1.0') make_trivial_sdist(
os.path.join(temp_dir, 'extra_dep-1.0.tar.gz'),
'extra_dep', '1.0')
# Create source tree for `dep`. # Create source tree for `dep`.
dep_pkg = os.path.join(temp_dir, 'dep') dep_pkg = os.path.join(temp_dir, 'dep')
os.mkdir(dep_pkg) os.mkdir(dep_pkg)
...@@ -806,12 +820,12 @@ class TestSetupRequires: ...@@ -806,12 +820,12 @@ class TestSetupRequires:
'setup.cfg': '', 'setup.cfg': '',
}, prefix=dep_pkg) }, prefix=dep_pkg)
# "Install" dep. # "Install" dep.
run_setup(os.path.join(dep_pkg, 'setup.py'), [str('dist_info')]) run_setup(
os.path.join(dep_pkg, 'setup.py'), [str('dist_info')])
working_set.add_entry(dep_pkg) working_set.add_entry(dep_pkg)
# Create source tree for test package. # Create source tree for test package.
test_pkg = os.path.join(temp_dir, 'test_pkg') test_pkg = os.path.join(temp_dir, 'test_pkg')
test_setup_py = os.path.join(test_pkg, 'setup.py') test_setup_py = os.path.join(test_pkg, 'setup.py')
test_setup_cfg = os.path.join(test_pkg, 'setup.cfg')
os.mkdir(test_pkg) os.mkdir(test_pkg)
with open(test_setup_py, 'w') as fp: with open(test_setup_py, 'w') as fp:
fp.write(DALS( fp.write(DALS(
...@@ -881,16 +895,19 @@ def make_nspkg_sdist(dist_path, distname, version): ...@@ -881,16 +895,19 @@ def make_nspkg_sdist(dist_path, distname, version):
def make_python_requires_sdist(dist_path, distname, version, python_requires): def make_python_requires_sdist(dist_path, distname, version, python_requires):
make_sdist(dist_path, [ make_sdist(dist_path, [
('setup.py', DALS("""\ (
import setuptools 'setup.py',
setuptools.setup( DALS("""\
name={name!r}, import setuptools
version={version!r}, setuptools.setup(
python_requires={python_requires!r}, name={name!r},
) version={version!r},
""").format(name=distname, version=version, python_requires={python_requires!r},
python_requires=python_requires)), )
('setup.cfg', ''), """).format(
name=distname, version=version,
python_requires=python_requires)),
('setup.cfg', ''),
]) ])
...@@ -948,16 +965,16 @@ def create_setup_requires_package(path, distname='foobar', version='0.1', ...@@ -948,16 +965,16 @@ def create_setup_requires_package(path, distname='foobar', version='0.1',
value = ';'.join(value) value = ';'.join(value)
section.append('%s: %s' % (name, value)) section.append('%s: %s' % (name, value))
test_setup_cfg_contents = DALS( test_setup_cfg_contents = DALS(
""" """
[metadata] [metadata]
{metadata} {metadata}
[options] [options]
{options} {options}
""" """
).format( ).format(
options='\n'.join(options), options='\n'.join(options),
metadata='\n'.join(metadata), metadata='\n'.join(metadata),
) )
else: else:
test_setup_cfg_contents = '' test_setup_cfg_contents = ''
with open(os.path.join(test_pkg, 'setup.cfg'), 'w') as f: with open(os.path.join(test_pkg, 'setup.cfg'), 'w') as f:
......
...@@ -525,19 +525,19 @@ class TestEggInfo: ...@@ -525,19 +525,19 @@ class TestEggInfo:
license_file = LICENSE license_file = LICENSE
"""), """),
'LICENSE': "Test license" 'LICENSE': "Test license"
}, True), # with license }, True), # with license
({ ({
'setup.cfg': DALS(""" 'setup.cfg': DALS("""
[metadata] [metadata]
license_file = INVALID_LICENSE license_file = INVALID_LICENSE
"""), """),
'LICENSE': "Test license" 'LICENSE': "Test license"
}, False), # with an invalid license }, False), # with an invalid license
({ ({
'setup.cfg': DALS(""" 'setup.cfg': DALS("""
"""), """),
'LICENSE': "Test license" 'LICENSE': "Test license"
}, False), # no license_file attribute }, False), # no license_file attribute
({ ({
'setup.cfg': DALS(""" 'setup.cfg': DALS("""
[metadata] [metadata]
...@@ -545,7 +545,7 @@ class TestEggInfo: ...@@ -545,7 +545,7 @@ class TestEggInfo:
"""), """),
'MANIFEST.in': "exclude LICENSE", 'MANIFEST.in': "exclude LICENSE",
'LICENSE': "Test license" 'LICENSE': "Test license"
}, False) # license file is manually excluded }, False) # license file is manually excluded
]) ])
def test_setup_cfg_license_file( def test_setup_cfg_license_file(
self, tmpdir_cwd, env, files, license_in_sources): self, tmpdir_cwd, env, files, license_in_sources):
...@@ -565,7 +565,8 @@ class TestEggInfo: ...@@ -565,7 +565,8 @@ class TestEggInfo:
assert 'LICENSE' in sources_text assert 'LICENSE' in sources_text
else: else:
assert 'LICENSE' not in sources_text assert 'LICENSE' not in sources_text
assert 'INVALID_LICENSE' not in sources_text # for invalid license test # for invalid license test
assert 'INVALID_LICENSE' not in sources_text
@pytest.mark.parametrize("files, incl_licenses, excl_licenses", [ @pytest.mark.parametrize("files, incl_licenses, excl_licenses", [
({ ({
...@@ -577,7 +578,7 @@ class TestEggInfo: ...@@ -577,7 +578,7 @@ class TestEggInfo:
"""), """),
'LICENSE-ABC': "ABC license", 'LICENSE-ABC': "ABC license",
'LICENSE-XYZ': "XYZ license" 'LICENSE-XYZ': "XYZ license"
}, ['LICENSE-ABC', 'LICENSE-XYZ'], []), # with licenses }, ['LICENSE-ABC', 'LICENSE-XYZ'], []), # with licenses
({ ({
'setup.cfg': DALS(""" 'setup.cfg': DALS("""
[metadata] [metadata]
...@@ -585,7 +586,7 @@ class TestEggInfo: ...@@ -585,7 +586,7 @@ class TestEggInfo:
"""), """),
'LICENSE-ABC': "ABC license", 'LICENSE-ABC': "ABC license",
'LICENSE-XYZ': "XYZ license" 'LICENSE-XYZ': "XYZ license"
}, ['LICENSE-ABC', 'LICENSE-XYZ'], []), # with commas }, ['LICENSE-ABC', 'LICENSE-XYZ'], []), # with commas
({ ({
'setup.cfg': DALS(""" 'setup.cfg': DALS("""
[metadata] [metadata]
...@@ -594,7 +595,7 @@ class TestEggInfo: ...@@ -594,7 +595,7 @@ class TestEggInfo:
"""), """),
'LICENSE-ABC': "ABC license", 'LICENSE-ABC': "ABC license",
'LICENSE-XYZ': "XYZ license" 'LICENSE-XYZ': "XYZ license"
}, ['LICENSE-ABC'], ['LICENSE-XYZ']), # with one license }, ['LICENSE-ABC'], ['LICENSE-XYZ']), # with one license
({ ({
'setup.cfg': DALS(""" 'setup.cfg': DALS("""
[metadata] [metadata]
...@@ -602,7 +603,7 @@ class TestEggInfo: ...@@ -602,7 +603,7 @@ class TestEggInfo:
"""), """),
'LICENSE-ABC': "ABC license", 'LICENSE-ABC': "ABC license",
'LICENSE-XYZ': "XYZ license" 'LICENSE-XYZ': "XYZ license"
}, [], ['LICENSE-ABC', 'LICENSE-XYZ']), # empty }, [], ['LICENSE-ABC', 'LICENSE-XYZ']), # empty
({ ({
'setup.cfg': DALS(""" 'setup.cfg': DALS("""
[metadata] [metadata]
...@@ -610,7 +611,7 @@ class TestEggInfo: ...@@ -610,7 +611,7 @@ class TestEggInfo:
"""), """),
'LICENSE-ABC': "ABC license", 'LICENSE-ABC': "ABC license",
'LICENSE-XYZ': "XYZ license" 'LICENSE-XYZ': "XYZ license"
}, ['LICENSE-XYZ'], ['LICENSE-ABC']), # on same line }, ['LICENSE-XYZ'], ['LICENSE-ABC']), # on same line
({ ({
'setup.cfg': DALS(""" 'setup.cfg': DALS("""
[metadata] [metadata]
...@@ -619,12 +620,12 @@ class TestEggInfo: ...@@ -619,12 +620,12 @@ class TestEggInfo:
INVALID_LICENSE INVALID_LICENSE
"""), """),
'LICENSE-ABC': "Test license" 'LICENSE-ABC': "Test license"
}, ['LICENSE-ABC'], ['INVALID_LICENSE']), # with an invalid license }, ['LICENSE-ABC'], ['INVALID_LICENSE']), # with an invalid license
({ ({
'setup.cfg': DALS(""" 'setup.cfg': DALS("""
"""), """),
'LICENSE': "Test license" 'LICENSE': "Test license"
}, [], ['LICENSE']), # no license_files attribute }, [], ['LICENSE']), # no license_files attribute
({ ({
'setup.cfg': DALS(""" 'setup.cfg': DALS("""
[metadata] [metadata]
...@@ -632,7 +633,7 @@ class TestEggInfo: ...@@ -632,7 +633,7 @@ class TestEggInfo:
"""), """),
'MANIFEST.in': "exclude LICENSE", 'MANIFEST.in': "exclude LICENSE",
'LICENSE': "Test license" 'LICENSE': "Test license"
}, [], ['LICENSE']), # license file is manually excluded }, [], ['LICENSE']), # license file is manually excluded
({ ({
'setup.cfg': DALS(""" 'setup.cfg': DALS("""
[metadata] [metadata]
...@@ -643,7 +644,7 @@ class TestEggInfo: ...@@ -643,7 +644,7 @@ class TestEggInfo:
'MANIFEST.in': "exclude LICENSE-XYZ", 'MANIFEST.in': "exclude LICENSE-XYZ",
'LICENSE-ABC': "ABC license", 'LICENSE-ABC': "ABC license",
'LICENSE-XYZ': "XYZ license" 'LICENSE-XYZ': "XYZ license"
}, ['LICENSE-ABC'], ['LICENSE-XYZ']) # subset is manually excluded }, ['LICENSE-ABC'], ['LICENSE-XYZ']) # subset is manually excluded
]) ])
def test_setup_cfg_license_files( def test_setup_cfg_license_files(
self, tmpdir_cwd, env, files, incl_licenses, excl_licenses): self, tmpdir_cwd, env, files, incl_licenses, excl_licenses):
...@@ -674,7 +675,7 @@ class TestEggInfo: ...@@ -674,7 +675,7 @@ class TestEggInfo:
"""), """),
'LICENSE-ABC': "ABC license", 'LICENSE-ABC': "ABC license",
'LICENSE-XYZ': "XYZ license" 'LICENSE-XYZ': "XYZ license"
}, [], ['LICENSE-ABC', 'LICENSE-XYZ']), # both empty }, [], ['LICENSE-ABC', 'LICENSE-XYZ']), # both empty
({ ({
'setup.cfg': DALS(""" 'setup.cfg': DALS("""
[metadata] [metadata]
...@@ -684,7 +685,8 @@ class TestEggInfo: ...@@ -684,7 +685,8 @@ class TestEggInfo:
"""), """),
'LICENSE-ABC': "ABC license", 'LICENSE-ABC': "ABC license",
'LICENSE-XYZ': "XYZ license" 'LICENSE-XYZ': "XYZ license"
}, [], ['LICENSE-ABC', 'LICENSE-XYZ']), # license_file is still singular # license_file is still singular
}, [], ['LICENSE-ABC', 'LICENSE-XYZ']),
({ ({
'setup.cfg': DALS(""" 'setup.cfg': DALS("""
[metadata] [metadata]
...@@ -696,7 +698,7 @@ class TestEggInfo: ...@@ -696,7 +698,7 @@ class TestEggInfo:
'LICENSE-ABC': "ABC license", 'LICENSE-ABC': "ABC license",
'LICENSE-PQR': "PQR license", 'LICENSE-PQR': "PQR license",
'LICENSE-XYZ': "XYZ license" 'LICENSE-XYZ': "XYZ license"
}, ['LICENSE-ABC', 'LICENSE-PQR', 'LICENSE-XYZ'], []), # combined }, ['LICENSE-ABC', 'LICENSE-PQR', 'LICENSE-XYZ'], []), # combined
({ ({
'setup.cfg': DALS(""" 'setup.cfg': DALS("""
[metadata] [metadata]
...@@ -709,7 +711,8 @@ class TestEggInfo: ...@@ -709,7 +711,8 @@ class TestEggInfo:
'LICENSE-ABC': "ABC license", 'LICENSE-ABC': "ABC license",
'LICENSE-PQR': "PQR license", 'LICENSE-PQR': "PQR license",
'LICENSE-XYZ': "XYZ license" 'LICENSE-XYZ': "XYZ license"
}, ['LICENSE-ABC', 'LICENSE-PQR', 'LICENSE-XYZ'], []), # duplicate license # duplicate license
}, ['LICENSE-ABC', 'LICENSE-PQR', 'LICENSE-XYZ'], []),
({ ({
'setup.cfg': DALS(""" 'setup.cfg': DALS("""
[metadata] [metadata]
...@@ -720,7 +723,8 @@ class TestEggInfo: ...@@ -720,7 +723,8 @@ class TestEggInfo:
'LICENSE-ABC': "ABC license", 'LICENSE-ABC': "ABC license",
'LICENSE-PQR': "PQR license", 'LICENSE-PQR': "PQR license",
'LICENSE-XYZ': "XYZ license" 'LICENSE-XYZ': "XYZ license"
}, ['LICENSE-ABC', 'LICENSE-XYZ'], ['LICENSE-PQR']), # combined subset # combined subset
}, ['LICENSE-ABC', 'LICENSE-XYZ'], ['LICENSE-PQR']),
({ ({
'setup.cfg': DALS(""" 'setup.cfg': DALS("""
[metadata] [metadata]
...@@ -730,7 +734,8 @@ class TestEggInfo: ...@@ -730,7 +734,8 @@ class TestEggInfo:
LICENSE-PQR LICENSE-PQR
"""), """),
'LICENSE-PQR': "Test license" 'LICENSE-PQR': "Test license"
}, ['LICENSE-PQR'], ['LICENSE-ABC', 'LICENSE-XYZ']), # with invalid licenses # with invalid licenses
}, ['LICENSE-PQR'], ['LICENSE-ABC', 'LICENSE-XYZ']),
({ ({
'setup.cfg': DALS(""" 'setup.cfg': DALS("""
[metadata] [metadata]
...@@ -743,7 +748,8 @@ class TestEggInfo: ...@@ -743,7 +748,8 @@ class TestEggInfo:
'LICENSE-ABC': "ABC license", 'LICENSE-ABC': "ABC license",
'LICENSE-PQR': "PQR license", 'LICENSE-PQR': "PQR license",
'LICENSE-XYZ': "XYZ license" 'LICENSE-XYZ': "XYZ license"
}, ['LICENSE-XYZ'], ['LICENSE-ABC', 'LICENSE-PQR']) # manually excluded # manually excluded
}, ['LICENSE-XYZ'], ['LICENSE-ABC', 'LICENSE-PQR'])
]) ])
def test_setup_cfg_license_file_license_files( def test_setup_cfg_license_file_license_files(
self, tmpdir_cwd, env, files, incl_licenses, excl_licenses): self, tmpdir_cwd, env, files, incl_licenses, excl_licenses):
......
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
"""sdist tests""" """sdist tests"""
from __future__ import print_function, unicode_literals
import os import os
import shutil
import sys import sys
import tempfile import tempfile
import unicodedata import unicodedata
...@@ -50,7 +51,7 @@ def quiet(): ...@@ -50,7 +51,7 @@ def quiet():
# Convert to POSIX path # Convert to POSIX path
def posix(path): def posix(path):
if six.PY3 and not isinstance(path, str): if not six.PY2 and not isinstance(path, str):
return path.replace(os.sep.encode('ascii'), b'/') return path.replace(os.sep.encode('ascii'), b'/')
else: else:
return path.replace(os.sep, '/') return path.replace(os.sep, '/')
...@@ -89,30 +90,28 @@ fail_on_latin1_encoded_filenames = pytest.mark.xfail( ...@@ -89,30 +90,28 @@ fail_on_latin1_encoded_filenames = pytest.mark.xfail(
) )
def touch(path):
path.write_text('', encoding='utf-8')
class TestSdistTest: class TestSdistTest:
def setup_method(self, method): @pytest.fixture(autouse=True)
self.temp_dir = tempfile.mkdtemp() def source_dir(self, tmpdir):
with open(os.path.join(self.temp_dir, 'setup.py'), 'w') as f: (tmpdir / 'setup.py').write_text(SETUP_PY, encoding='utf-8')
f.write(SETUP_PY)
# Set up the rest of the test package # Set up the rest of the test package
test_pkg = os.path.join(self.temp_dir, 'sdist_test') test_pkg = tmpdir / 'sdist_test'
os.mkdir(test_pkg) test_pkg.mkdir()
data_folder = os.path.join(self.temp_dir, "d") data_folder = tmpdir / 'd'
os.mkdir(data_folder) data_folder.mkdir()
# *.rst was not included in package_data, so c.rst should not be # *.rst was not included in package_data, so c.rst should not be
# automatically added to the manifest when not under version control # automatically added to the manifest when not under version control
for fname in ['__init__.py', 'a.txt', 'b.txt', 'c.rst', for fname in ['__init__.py', 'a.txt', 'b.txt', 'c.rst']:
os.path.join(data_folder, "e.dat")]: touch(test_pkg / fname)
# Just touch the files; their contents are irrelevant touch(data_folder / 'e.dat')
open(os.path.join(test_pkg, fname), 'w').close()
self.old_cwd = os.getcwd()
os.chdir(self.temp_dir)
def teardown_method(self, method): with tmpdir.as_cwd():
os.chdir(self.old_cwd) yield
shutil.rmtree(self.temp_dir)
def test_package_data_in_sdist(self): def test_package_data_in_sdist(self):
"""Regression test for pull request #4: ensures that files listed in """Regression test for pull request #4: ensures that files listed in
...@@ -175,14 +174,14 @@ class TestSdistTest: ...@@ -175,14 +174,14 @@ class TestSdistTest:
manifest = cmd.filelist.files manifest = cmd.filelist.files
assert 'setup.py' not in manifest assert 'setup.py' not in manifest
def test_defaults_case_sensitivity(self): def test_defaults_case_sensitivity(self, tmpdir):
""" """
Make sure default files (README.*, etc.) are added in a case-sensitive Make sure default files (README.*, etc.) are added in a case-sensitive
way to avoid problems with packages built on Windows. way to avoid problems with packages built on Windows.
""" """
open(os.path.join(self.temp_dir, 'readme.rst'), 'w').close() touch(tmpdir / 'readme.rst')
open(os.path.join(self.temp_dir, 'SETUP.cfg'), 'w').close() touch(tmpdir / 'SETUP.cfg')
dist = Distribution(SETUP_ATTRS) dist = Distribution(SETUP_ATTRS)
# the extension deliberately capitalized for this test # the extension deliberately capitalized for this test
...@@ -230,10 +229,6 @@ class TestSdistTest: ...@@ -230,10 +229,6 @@ class TestSdistTest:
u_contents = contents.decode('UTF-8') u_contents = contents.decode('UTF-8')
# The manifest should contain the UTF-8 filename # The manifest should contain the UTF-8 filename
if six.PY2:
fs_enc = sys.getfilesystemencoding()
filename = filename.decode(fs_enc)
assert posix(filename) in u_contents assert posix(filename) in u_contents
@py3_only @py3_only
...@@ -334,7 +329,7 @@ class TestSdistTest: ...@@ -334,7 +329,7 @@ class TestSdistTest:
cmd.read_manifest() cmd.read_manifest()
# The filelist should contain the UTF-8 filename # The filelist should contain the UTF-8 filename
if six.PY3: if not six.PY2:
filename = filename.decode('utf-8') filename = filename.decode('utf-8')
assert filename in cmd.filelist.files assert filename in cmd.filelist.files
...@@ -374,7 +369,7 @@ class TestSdistTest: ...@@ -374,7 +369,7 @@ class TestSdistTest:
@fail_on_latin1_encoded_filenames @fail_on_latin1_encoded_filenames
def test_sdist_with_utf8_encoded_filename(self): def test_sdist_with_utf8_encoded_filename(self):
# Test for #303. # Test for #303.
dist = Distribution(SETUP_ATTRS) dist = Distribution(self.make_strings(SETUP_ATTRS))
dist.script_name = 'setup.py' dist.script_name = 'setup.py'
cmd = sdist(dist) cmd = sdist(dist)
cmd.ensure_finalized() cmd.ensure_finalized()
...@@ -388,7 +383,7 @@ class TestSdistTest: ...@@ -388,7 +383,7 @@ class TestSdistTest:
if sys.platform == 'darwin': if sys.platform == 'darwin':
filename = decompose(filename) filename = decompose(filename)
if six.PY3: if not six.PY2:
fs_enc = sys.getfilesystemencoding() fs_enc = sys.getfilesystemencoding()
if sys.platform == 'win32': if sys.platform == 'win32':
...@@ -405,10 +400,19 @@ class TestSdistTest: ...@@ -405,10 +400,19 @@ class TestSdistTest:
else: else:
assert filename in cmd.filelist.files assert filename in cmd.filelist.files
@classmethod
def make_strings(cls, item):
if isinstance(item, dict):
return {
key: cls.make_strings(value) for key, value in item.items()}
if isinstance(item, list):
return list(map(cls.make_strings, item))
return str(item)
@fail_on_latin1_encoded_filenames @fail_on_latin1_encoded_filenames
def test_sdist_with_latin1_encoded_filename(self): def test_sdist_with_latin1_encoded_filename(self):
# Test for #303. # Test for #303.
dist = Distribution(SETUP_ATTRS) dist = Distribution(self.make_strings(SETUP_ATTRS))
dist.script_name = 'setup.py' dist.script_name = 'setup.py'
cmd = sdist(dist) cmd = sdist(dist)
cmd.ensure_finalized() cmd.ensure_finalized()
...@@ -421,7 +425,19 @@ class TestSdistTest: ...@@ -421,7 +425,19 @@ class TestSdistTest:
with quiet(): with quiet():
cmd.run() cmd.run()
if six.PY3: if six.PY2:
# Under Python 2 there seems to be no decoded string in the
# filelist. However, due to decode and encoding of the
# file name to get utf-8 Manifest the latin1 maybe excluded
try:
# fs_enc should match how one is expect the decoding to
# be proformed for the manifest output.
fs_enc = sys.getfilesystemencoding()
filename.decode(fs_enc)
assert filename in cmd.filelist.files
except UnicodeDecodeError:
filename not in cmd.filelist.files
else:
# not all windows systems have a default FS encoding of cp1252 # not all windows systems have a default FS encoding of cp1252
if sys.platform == 'win32': if sys.platform == 'win32':
# Latin-1 is similar to Windows-1252 however # Latin-1 is similar to Windows-1252 however
...@@ -436,18 +452,36 @@ class TestSdistTest: ...@@ -436,18 +452,36 @@ class TestSdistTest:
# The Latin-1 filename should have been skipped # The Latin-1 filename should have been skipped
filename = filename.decode('latin-1') filename = filename.decode('latin-1')
filename not in cmd.filelist.files filename not in cmd.filelist.files
else:
# Under Python 2 there seems to be no decoded string in the def test_pyproject_toml_in_sdist(self, tmpdir):
# filelist. However, due to decode and encoding of the """
# file name to get utf-8 Manifest the latin1 maybe excluded Check if pyproject.toml is included in source distribution if present
try: """
# fs_enc should match how one is expect the decoding to touch(tmpdir / 'pyproject.toml')
# be proformed for the manifest output. dist = Distribution(SETUP_ATTRS)
fs_enc = sys.getfilesystemencoding() dist.script_name = 'setup.py'
filename.decode(fs_enc) cmd = sdist(dist)
assert filename in cmd.filelist.files cmd.ensure_finalized()
except UnicodeDecodeError: with quiet():
filename not in cmd.filelist.files cmd.run()
manifest = cmd.filelist.files
assert 'pyproject.toml' in manifest
def test_pyproject_toml_excluded(self, tmpdir):
"""
Check that pyproject.toml can excluded even if present
"""
touch(tmpdir / 'pyproject.toml')
with open('MANIFEST.in', 'w') as mts:
print('exclude pyproject.toml', file=mts)
dist = Distribution(SETUP_ATTRS)
dist.script_name = 'setup.py'
cmd = sdist(dist)
cmd.ensure_finalized()
with quiet():
cmd.run()
manifest = cmd.filelist.files
assert 'pyproject.toml' not in manifest
def test_default_revctrl(): def test_default_revctrl():
......
...@@ -15,7 +15,7 @@ class TestEdit: ...@@ -15,7 +15,7 @@ class TestEdit:
def parse_config(filename): def parse_config(filename):
parser = configparser.ConfigParser() parser = configparser.ConfigParser()
with io.open(filename, encoding='utf-8') as reader: with io.open(filename, encoding='utf-8') as reader:
(parser.read_file if six.PY3 else parser.readfp)(reader) (parser.readfp if six.PY2 else parser.read_file)(reader)
return parser return parser
@staticmethod @staticmethod
......
...@@ -108,6 +108,11 @@ class TestDepends: ...@@ -108,6 +108,11 @@ class TestDepends:
assert not req.is_present() assert not req.is_present()
assert not req.is_current() assert not req.is_current()
@needs_bytecode
def test_require_present(self):
# In #1896, this test was failing for months with the only
# complaint coming from test runners (not end users).
# TODO: Evaluate if this code is needed at all.
req = Require('Tests', None, 'tests', homepage="http://example.com") req = Require('Tests', None, 'tests', homepage="http://example.com")
assert req.format is None assert req.format is None
assert req.attribute is None assert req.attribute is None
...@@ -223,10 +228,10 @@ class TestFeatures: ...@@ -223,10 +228,10 @@ class TestFeatures:
py_modules=['bar_et'], remove=['bar.ext'], py_modules=['bar_et'], remove=['bar.ext'],
), ),
'baz': Feature( 'baz': Feature(
"baz", optional=False, packages=['pkg.baz'], "baz", optional=False, packages=['pkg.baz'],
scripts=['scripts/baz_it'], scripts=['scripts/baz_it'],
libraries=[('libfoo', 'foo/foofoo.c')] libraries=[('libfoo', 'foo/foofoo.c')]
), ),
'dwim': Feature("DWIM", available=False, remove='bazish'), 'dwim': Feature("DWIM", available=False, remove='bazish'),
}, },
script_args=['--without-bar', 'install'], script_args=['--without-bar', 'install'],
......
...@@ -12,7 +12,7 @@ from setuptools.command.test import test ...@@ -12,7 +12,7 @@ from setuptools.command.test import test
from setuptools.dist import Distribution from setuptools.dist import Distribution
from .textwrap import DALS from .textwrap import DALS
from . import contexts
SETUP_PY = DALS(""" SETUP_PY = DALS("""
from setuptools import setup from setuptools import setup
......
...@@ -8,8 +8,6 @@ from pytest_fixture_config import yield_requires_config ...@@ -8,8 +8,6 @@ from pytest_fixture_config import yield_requires_config
import pytest_virtualenv import pytest_virtualenv
from setuptools.extern import six
from .textwrap import DALS from .textwrap import DALS
from .test_easy_install import make_nspkg_sdist from .test_easy_install import make_nspkg_sdist
...@@ -64,7 +62,7 @@ def _get_pip_versions(): ...@@ -64,7 +62,7 @@ def _get_pip_versions():
from urllib.request import urlopen from urllib.request import urlopen
from urllib.error import URLError from urllib.error import URLError
except ImportError: except ImportError:
from urllib2 import urlopen, URLError # Python 2.7 compat from urllib2 import urlopen, URLError # Python 2.7 compat
try: try:
urlopen('https://pypi.org', timeout=1) urlopen('https://pypi.org', timeout=1)
...@@ -77,12 +75,9 @@ def _get_pip_versions(): ...@@ -77,12 +75,9 @@ def _get_pip_versions():
'pip==10.0.1', 'pip==10.0.1',
'pip==18.1', 'pip==18.1',
'pip==19.0.1', 'pip==19.0.1',
'https://github.com/pypa/pip/archive/master.zip',
] ]
# Pip's master dropped support for 3.4.
if not six.PY34:
network_versions.append('https://github.com/pypa/pip/archive/master.zip')
versions = [None] + [ versions = [None] + [
pytest.param(v, **({} if network else {'marks': pytest.mark.skip})) pytest.param(v, **({} if network else {'marks': pytest.mark.skip}))
for v in network_versions for v in network_versions
...@@ -183,12 +178,16 @@ def _check_test_command_install_requirements(virtualenv, tmpdir): ...@@ -183,12 +178,16 @@ def _check_test_command_install_requirements(virtualenv, tmpdir):
)).format(tmpdir=tmpdir)) )).format(tmpdir=tmpdir))
assert tmpdir.join('success').check() assert tmpdir.join('success').check()
def test_test_command_install_requirements(virtualenv, tmpdir): def test_test_command_install_requirements(virtualenv, tmpdir):
# Ensure pip/wheel packages are installed. # Ensure pip/wheel packages are installed.
virtualenv.run("python -c \"__import__('pkg_resources').require(['pip', 'wheel'])\"") virtualenv.run(
"python -c \"__import__('pkg_resources').require(['pip', 'wheel'])\"")
_check_test_command_install_requirements(virtualenv, tmpdir) _check_test_command_install_requirements(virtualenv, tmpdir)
def test_test_command_install_requirements_when_using_easy_install(bare_virtualenv, tmpdir):
def test_test_command_install_requirements_when_using_easy_install(
bare_virtualenv, tmpdir):
_check_test_command_install_requirements(bare_virtualenv, tmpdir) _check_test_command_install_requirements(bare_virtualenv, tmpdir)
......
...@@ -125,11 +125,12 @@ def flatten_tree(tree): ...@@ -125,11 +125,12 @@ def flatten_tree(tree):
def format_install_tree(tree): def format_install_tree(tree):
return {x.format( return {
py_version=PY_MAJOR, x.format(
platform=get_platform(), py_version=PY_MAJOR,
shlib_ext=get_config_var('EXT_SUFFIX') or get_config_var('SO')) platform=get_platform(),
for x in tree} shlib_ext=get_config_var('EXT_SUFFIX') or get_config_var('SO'))
for x in tree}
def _check_wheel_install(filename, install_dir, install_tree_includes, def _check_wheel_install(filename, install_dir, install_tree_includes,
...@@ -455,7 +456,8 @@ WHEEL_INSTALL_TESTS = ( ...@@ -455,7 +456,8 @@ WHEEL_INSTALL_TESTS = (
id='empty_namespace_package', id='empty_namespace_package',
file_defs={ file_defs={
'foobar': { 'foobar': {
'__init__.py': "__import__('pkg_resources').declare_namespace(__name__)", '__init__.py':
"__import__('pkg_resources').declare_namespace(__name__)",
}, },
}, },
setup_kwargs=dict( setup_kwargs=dict(
...@@ -579,4 +581,5 @@ def test_wheel_is_compatible(monkeypatch): ...@@ -579,4 +581,5 @@ def test_wheel_is_compatible(monkeypatch):
for t in parse_tag('cp36-cp36m-manylinux1_x86_64'): for t in parse_tag('cp36-cp36m-manylinux1_x86_64'):
yield t yield t
monkeypatch.setattr('setuptools.wheel.sys_tags', sys_tags) monkeypatch.setattr('setuptools.wheel.sys_tags', sys_tags)
assert Wheel('onnxruntime-0.1.2-cp36-cp36m-manylinux1_x86_64.whl').is_compatible() assert Wheel(
'onnxruntime-0.1.2-cp36-cp36m-manylinux1_x86_64.whl').is_compatible()
...@@ -77,7 +77,8 @@ class Wheel: ...@@ -77,7 +77,8 @@ class Wheel:
def is_compatible(self): def is_compatible(self):
'''Is the wheel is compatible with the current platform?''' '''Is the wheel is compatible with the current platform?'''
supported_tags = set((t.interpreter, t.abi, t.platform) for t in sys_tags()) supported_tags = set(
(t.interpreter, t.abi, t.platform) for t in sys_tags())
return next((True for t in self.tags() if t in supported_tags), False) return next((True for t in self.tags() if t in supported_tags), False)
def egg_name(self): def egg_name(self):
......
import os import os
import shutil
import subprocess import subprocess
import sys import sys
from glob import glob
VIRTUAL_ENV = os.environ['VIRTUAL_ENV']
TOX_PIP_DIR = os.path.join(VIRTUAL_ENV, 'pip') def remove_setuptools():
"""
Remove setuptools from the current environment.
"""
print("Removing setuptools")
cmd = [sys.executable, '-m', 'pip', 'uninstall', '-y', 'setuptools']
# set cwd to something other than '.' to avoid detecting
# '.' as the installed package.
subprocess.check_call(cmd, cwd='.tox')
def bootstrap():
print("Running bootstrap")
cmd = [sys.executable, '-m', 'bootstrap']
subprocess.check_call(cmd)
def pip(args): def pip(args):
# First things first, get a recent (stable) version of pip. # Honor requires-python when installing test suite dependencies
if not os.path.exists(TOX_PIP_DIR): if any('-r' in arg for arg in args):
subprocess.check_call([sys.executable, '-m', 'pip', os.environ['PIP_IGNORE_REQUIRES_PYTHON'] = '0'
'--disable-pip-version-check',
'install', '-t', TOX_PIP_DIR, if '.' in args:
'pip']) remove_setuptools()
shutil.rmtree(glob(os.path.join(TOX_PIP_DIR, 'pip-*.dist-info'))[0]) bootstrap()
# And use that version.
pypath = os.environ.get('PYTHONPATH') cmd = [sys.executable, '-m', 'pip'] + args
pypath = pypath.split(os.pathsep) if pypath is not None else [] subprocess.check_call(cmd)
pypath.insert(0, TOX_PIP_DIR)
os.environ['PYTHONPATH'] = os.pathsep.join(pypath)
# Disable PEP 517 support when using editable installs.
for n, a in enumerate(args):
if not a.startswith('-'):
if a in 'install' and '-e' in args[n:]:
args.insert(n + 1, '--no-use-pep517')
break
# Fix call for setuptools editable install.
for n, a in enumerate(args):
if a == '.':
args[n] = os.getcwd()
subprocess.check_call([sys.executable, '-m', 'pip'] + args, cwd=TOX_PIP_DIR)
if __name__ == '__main__': if __name__ == '__main__':
......
# Note: Run "python bootstrap.py" before running Tox, to generate metadata.
#
# To run Tox against all supported Python interpreters, you can set: # To run Tox against all supported Python interpreters, you can set:
# #
# export TOXENV='py27,py3{4,5,6},pypy,pypy3' # export TOXENV='py3{5,6,7,8},pypy,pypy3'
[tox] [tox]
envlist=python envlist=python
minversion = 3.2
requires =
tox-pip-version >= 0.0.6
[helpers] [helpers]
# Wrapper for calls to pip that make sure the version being used is a # Custom pip behavior
# up-to-date, and to prevent the current working directory from being
# added to `sys.path`.
pip = python {toxinidir}/tools/tox_pip.py pip = python {toxinidir}/tools/tox_pip.py
[testenv] [testenv]
deps=-r{toxinidir}/tests/requirements.txt deps=-r{toxinidir}/setuptools/tests/requirements.txt
pip_version = pip
install_command = {[helpers]pip} install {opts} {packages} install_command = {[helpers]pip} install {opts} {packages}
list_dependencies_command = {[helpers]pip} freeze --all list_dependencies_command = {[helpers]pip} freeze --all
setenv=COVERAGE_FILE={toxworkdir}/.coverage.{envname} setenv =
COVERAGE_FILE={toxworkdir}/.coverage.{envname}
py{27,py2}: PIP_IGNORE_REQUIRES_PYTHON=true
# TODO: The passed environment variables came from copying other tox.ini files # TODO: The passed environment variables came from copying other tox.ini files
# These should probably be individually annotated to explain what needs them. # These should probably be individually annotated to explain what needs them.
passenv=APPDATA HOMEDRIVE HOMEPATH windir APPVEYOR APPVEYOR_* CI CODECOV_* TRAVIS TRAVIS_* NETWORK_REQUIRED passenv=APPDATA HOMEDRIVE HOMEPATH windir APPVEYOR APPVEYOR_* CI CODECOV_* TRAVIS TRAVIS_* NETWORK_REQUIRED
...@@ -45,7 +47,7 @@ commands=codecov -X gcov --file {toxworkdir}/coverage.xml ...@@ -45,7 +47,7 @@ commands=codecov -X gcov --file {toxworkdir}/coverage.xml
deps = -r{toxinidir}/docs/requirements.txt deps = -r{toxinidir}/docs/requirements.txt
skip_install=True skip_install=True
commands = commands =
python {toxinidir}/bootstrap.py python -m bootstrap
sphinx-build -W -b html -d {envtmpdir}/doctrees docs docs/build/html sphinx-build -W -b html -d {envtmpdir}/doctrees docs docs/build/html
sphinx-build -W -b man -d {envtmpdir}/doctrees docs docs/build/man sphinx-build -W -b man -d {envtmpdir}/doctrees docs docs/build/man
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment