Commit 6ee1a1ce authored by Johannes Reiff's avatar Johannes Reiff Committed by GitHub

Merge branch 'master' into pr-easyinstall

parents ec270f9e 73376585
[bumpversion]
current_version = 42.0.2
current_version = 45.1.0
commit = True
tag = True
......
[flake8]
exclude=
.tox
setuptools/_vendor,
pkg_resources/_vendor
ignore =
# W503 violates spec https://github.com/PyCQA/pycodestyle/issues/513
W503
# W504 has issues https://github.com/OCA/maintainer-quality-tools/issues/545
W504
setuptools/site-patch.py F821
setuptools/py*compat.py F811
name: >-
👷
Test suite
on:
push:
pull_request:
schedule:
- cron: 1 0 * * * # Run daily at 0:01 UTC
jobs:
tests:
name: >-
${{ matrix.python-version }}
/
${{ matrix.os }}
runs-on: ${{ matrix.os }}
strategy:
# max-parallel: 5
matrix:
python-version:
- 3.8
- pypy3
- 3.7
- 3.6
- 3.5
os:
- ubuntu-latest
- ubuntu-16.04
- macOS-latest
# - windows-2019
# - windows-2016
env:
NETWORK_REQUIRED: 1
TOX_PARALLEL_NO_SPINNER: 1
TOXENV: python
steps:
- uses: actions/checkout@master
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v1.1.1
with:
python-version: ${{ matrix.python-version }}
- name: Log Python version
run: >-
python --version
- name: Log Python location
run: >-
which python
- name: Log Python env
run: >-
python -m sysconfig
- name: Pip cache
uses: actions/cache@v1
with:
path: ~/.cache/pip
key: ${{ runner.os }}-pip-${{ hashFiles('docs/requirements.txt') }}-${{ hashFiles('tests/requirements.txt') }}-${{ hashFiles('tox.ini') }}
restore-keys: |
${{ runner.os }}-pip-
${{ runner.os }}-
- name: Upgrade pip/setuptools/wheel
run: >-
python
-m pip install
--disable-pip-version-check
--upgrade
pip setuptools wheel
- name: Install tox
run: >-
python -m pip install --upgrade tox tox-venv
- name: Log installed dists
run: >-
python -m pip freeze --all
- name: Adjust TOXENV for PyPy
if: startsWith(matrix.python-version, 'pypy')
run: >-
echo "::set-env name=TOXENV::${{ matrix.python-version }}"
- name: Log env vars
run: >-
env
- name: Verify that there's no cached Python modules in sources
if: >-
! startsWith(matrix.os, 'windows-')
run: >-
! grep pyc setuptools.egg-info/SOURCES.txt
- name: 'Initialize tox envs: ${{ matrix.env.TOXENV }}'
run: >-
python -m
tox
--parallel auto
--notest
--skip-missing-interpreters false
- name: Test with tox
run: >-
python -m
tox
--parallel auto
--
--cov
......@@ -6,13 +6,11 @@ jobs:
include:
- &latest_py2
python: 2.7
env: TOXENV=py27
- <<: *latest_py2
env: LANG=C
- python: pypy
env: DISABLE_COVERAGE=1 # Don't run coverage on pypy (too slow).
env: LANG=C TOXENV=py27
- python: pypy3
env: DISABLE_COVERAGE=1
- python: 3.4
env: DISABLE_COVERAGE=1 # Don't run coverage on pypy (too slow).
- python: 3.5
- python: 3.6
- python: 3.7
......@@ -42,8 +40,6 @@ install:
- pip freeze --all
- env
# update egg_info based on setup.py in checkout
- python bootstrap.py
- "! grep pyc setuptools.egg-info/SOURCES.txt"
script:
......
v45.1.0
-------
* #1458: Add minimum sunset date and preamble to Python 2 warning.
* #1704: Set sys.argv[0] in setup script run by build_meta.__legacy__
* #1974: Add Python 3 Only Trove Classifier and remove universal wheel declaration for more complete transition from Python 2.
v45.0.0
-------
* #1458: Drop support for Python 2. Setuptools now requires Python 3.5 or later. Install setuptools using pip >=9 or pin to Setuptools <45 to maintain 2.7 support.
* #1959: Fix for Python 4: replace unsafe six.PY3 with six.PY2
v44.0.0
-------
* #1908: Drop support for Python 3.4.
v43.0.0
-------
* #1634: Include ``pyproject.toml`` in source distribution by default. Projects relying on the previous behavior where ``pyproject.toml`` was excluded by default should stop relying on that behavior or add ``exclude pyproject.toml`` to their MANIFEST.in file.
* #1927: Setuptools once again declares 'setuptools' in the ``build-system.requires`` and adds PEP 517 build support by declaring itself as the ``build-backend``. It additionally specifies ``build-system.backend-path`` to rely on itself for those builders that support it.
v42.0.2
-------
......
......@@ -9,8 +9,8 @@ environment:
matrix:
- APPVEYOR_JOB_NAME: "python36-x64"
PYTHON: "C:\\Python36-x64"
- APPVEYOR_JOB_NAME: "python27-x64"
PYTHON: "C:\\Python27-x64"
- APPVEYOR_JOB_NAME: "python37-x64"
PYTHON: "C:\\Python37-x64"
install:
# symlink python from a directory with a space
......@@ -28,7 +28,6 @@ test_script:
- python -m pip install --disable-pip-version-check --upgrade pip setuptools wheel
- pip install --upgrade tox tox-venv virtualenv
- pip freeze --all
- python bootstrap.py
- tox -- --cov
after_test:
......
# Create the project in Azure with:
# az devops project create --name $name --organization https://dev.azure.com/$org/ --visibility public
# then configure the pipelines (through web UI)
trigger:
branches:
include:
- '*'
tags:
include:
- '*'
pool:
vmimage: 'Ubuntu-18.04'
variables:
- group: Azure secrets
stages:
- stage: Test
jobs:
- job: 'Test'
strategy:
matrix:
Python36:
python.version: '3.6'
Python38:
python.version: '3.8'
maxParallel: 4
steps:
- task: UsePythonVersion@0
inputs:
versionSpec: '$(python.version)'
architecture: 'x64'
- script: python -m pip install tox
displayName: 'Install tox'
- script: |
tox -- --junit-xml=test-results.xml
displayName: 'run tests'
- task: PublishTestResults@2
inputs:
testResultsFiles: '**/test-results.xml'
testRunTitle: 'Python $(python.version)'
condition: succeededOrFailed()
- stage: Publish
dependsOn: Test
jobs:
- job: 'Publish'
steps:
- task: UsePythonVersion@0
inputs:
versionSpec: '3.8'
architecture: 'x64'
- script: python -m pip install tox
displayName: 'Install tox'
- script: |
tox -e release
env:
TWINE_PASSWORD: $(PyPI-token)
displayName: 'publish to PyPI'
condition: contains(variables['Build.SourceBranch'], 'tags')
......@@ -25,6 +25,7 @@ minimal_egg_info = textwrap.dedent("""
entry_points = setuptools.dist:check_entry_points
[egg_info.writers]
PKG-INFO = setuptools.command.egg_info:write_pkg_info
dependency_links.txt = setuptools.command.egg_info:overwrite_arg
entry_points.txt = setuptools.command.egg_info:write_entries
requires.txt = setuptools.command.egg_info:write_requirements
......@@ -35,10 +36,11 @@ def ensure_egg_info():
if os.path.exists('setuptools.egg-info'):
return
print("adding minimal entry_points")
build_egg_info()
add_minimal_info()
run_egg_info()
def build_egg_info():
def add_minimal_info():
"""
Build a minimal egg-info, enough to invoke egg_info
"""
......@@ -52,13 +54,6 @@ def run_egg_info():
cmd = [sys.executable, 'setup.py', 'egg_info']
print("Regenerating egg_info")
subprocess.check_call(cmd)
print("...and again.")
subprocess.check_call(cmd)
def main():
ensure_egg_info()
run_egg_info()
__name__ == '__main__' and main()
__name__ == '__main__' and ensure_egg_info()
Fixed defect in _imp, introduced in 41.6.0 when the 'tests' directory is not present.
Add flake8-2020 to check for misuse of sys.version or sys.version_info.
# -*- coding: utf-8 -*-
#
# Setuptools documentation build configuration file, created by
# sphinx-quickstart on Fri Jul 17 14:22:37 2009.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# The contents of this file are pickled, so don't put values in the namespace
# that aren't pickleable (module imports are okay, they're removed automatically).
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
import subprocess
import sys
import os
......@@ -26,14 +6,12 @@ import os
# hack to run the bootstrap script so that jaraco.packaging.sphinx
# can invoke setup.py
'READTHEDOCS' in os.environ and subprocess.check_call(
[sys.executable, 'bootstrap.py'],
[sys.executable, '-m', 'bootstrap'],
cwd=os.path.join(os.path.dirname(__file__), os.path.pardir),
)
# -- General configuration -----------------------------------------------------
# -- General configuration --
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['jaraco.packaging.sphinx', 'rst.linker']
# Add any paths that contain templates here, relative to this directory.
......@@ -45,7 +23,8 @@ source_suffix = '.txt'
# The master toctree document.
master_doc = 'index'
# A list of glob-style patterns that should be excluded when looking for source files.
# A list of glob-style patterns that should be excluded
# when looking for source files.
exclude_patterns = ['requirements.txt']
# List of directories, relative to source directory, that shouldn't be searched
......@@ -55,7 +34,7 @@ exclude_trees = []
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# -- Options for HTML output ---------------------------------------------------
# -- Options for HTML output --
# The theme to use for HTML and HTML Help pages. Major themes that come with
# Sphinx are currently 'default' and 'sphinxdoc'.
......@@ -69,7 +48,10 @@ html_theme_path = ['_theme']
html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
html_sidebars = {'index': ['relations.html', 'sourcelink.html', 'indexsidebar.html', 'searchbox.html']}
html_sidebars = {
'index': [
'relations.html', 'sourcelink.html', 'indexsidebar.html',
'searchbox.html']}
# If false, no module index is generated.
html_use_modindex = False
......@@ -77,14 +59,15 @@ html_use_modindex = False
# If false, no index is generated.
html_use_index = False
# -- Options for LaTeX output --------------------------------------------------
# -- Options for LaTeX output --
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'Setuptools.tex', 'Setuptools Documentation',
'The fellowship of the packaging', 'manual'),
]
# (source start file, target name, title, author,
# documentclass [howto/manual]).
latex_documents = [(
'index', 'Setuptools.tex', 'Setuptools Documentation',
'The fellowship of the packaging', 'manual',
)]
link_files = {
'../CHANGES.rst': dict(
......
......@@ -104,12 +104,8 @@ from the command line after pushing a new branch.
Testing
-------
The primary tests are run using tox. To run the tests, first create the metadata
needed to run the tests::
$ python bootstrap.py
Then make sure you have tox installed, and invoke it::
The primary tests are run using tox. Make sure you have tox installed,
and invoke it::
$ tox
......
......@@ -41,7 +41,7 @@ Please see the `setuptools PyPI page <https://pypi.org/project/setuptools/>`_
for download links and basic installation instructions for each of the
supported platforms.
You will need at least Python 3.4 or 2.7. An ``easy_install`` script will be
You will need at least Python 3.5 or 2.7. An ``easy_install`` script will be
installed in the normal location for Python scripts on your platform.
Note that the instructions on the setuptools PyPI page assume that you are
......
......@@ -62,7 +62,7 @@ Installing ``setuptools``
To install the latest version of setuptools, use::
pip install -U setuptools
pip install --upgrade setuptools
Refer to `Installing Packages`_ guide for more information.
......@@ -88,7 +88,7 @@ packages in the directory where the setup.py lives. See the `Command
Reference`_ section below to see what commands you can give to this setup
script. For example, to produce a source distribution, simply invoke::
python setup.py sdist
setup.py sdist
Of course, before you release your project to PyPI, you'll want to add a bit
more information to your setup script to help people find or learn about your
......@@ -100,17 +100,17 @@ dependencies, and perhaps some data files and scripts::
name="HelloWorld",
version="0.1",
packages=find_packages(),
scripts=['say_hello.py'],
scripts=["say_hello.py"],
# Project uses reStructuredText, so ensure that the docutils get
# installed or upgraded on the target machine
install_requires=['docutils>=0.3'],
install_requires=["docutils>=0.3"],
package_data={
# If any package contains *.txt or *.rst files, include them:
'': ['*.txt', '*.rst'],
# And include any *.msg files found in the 'hello' package, too:
'hello': ['*.msg'],
"": ["*.txt", "*.rst"],
# And include any *.msg files found in the "hello" package, too:
"hello": ["*.msg"],
},
# metadata to display on PyPI
......@@ -125,7 +125,7 @@ dependencies, and perhaps some data files and scripts::
"Source Code": "https://code.example.com/HelloWorld/",
},
classifiers=[
'License :: OSI Approved :: Python Software Foundation License'
"License :: OSI Approved :: Python Software Foundation License"
]
# could also include long_description, download_url, etc.
......@@ -207,11 +207,11 @@ but here are a few tips that will keep you out of trouble in the corner cases:
to compare different version numbers::
>>> from pkg_resources import parse_version
>>> parse_version('1.9.a.dev') == parse_version('1.9a0dev')
>>> parse_version("1.9.a.dev") == parse_version("1.9a0dev")
True
>>> parse_version('2.1-rc2') < parse_version('2.1')
>>> parse_version("2.1-rc2") < parse_version("2.1")
True
>>> parse_version('0.6a9dev-r41475') < parse_version('0.6a9')
>>> parse_version("0.6a9dev-r41475") < parse_version("0.6a9")
True
Once you've decided on a version numbering scheme for your project, you can
......@@ -371,7 +371,7 @@ unless you need the associated ``setuptools`` feature.
imported. This argument is only useful if the project will be installed as
a zipfile, and there is a need to have all of the listed resources be
extracted to the filesystem *as a unit*. Resources listed here
should be '/'-separated paths, relative to the source root, so to list a
should be "/"-separated paths, relative to the source root, so to list a
resource ``foo.png`` in package ``bar.baz``, you would include the string
``bar/baz/foo.png`` in this argument.
......@@ -413,7 +413,7 @@ the same
directory as the setup script. Some projects use a ``src`` or ``lib``
directory as the root of their source tree, and those projects would of course
use ``"src"`` or ``"lib"`` as the first argument to ``find_packages()``. (And
such projects also need something like ``package_dir={'':'src'}`` in their
such projects also need something like ``package_dir={"": "src"}`` in their
``setup()`` arguments, but that's just a normal distutils thing.)
Anyway, ``find_packages()`` walks the target directory, filtering by inclusion
......@@ -480,7 +480,7 @@ top-level package called ``tests``! One way to avoid this problem is to use the
setup(
name="namespace.mypackage",
version="0.1",
packages=find_namespace_packages(include=['namespace.*'])
packages=find_namespace_packages(include=["namespace.*"])
)
Another option is to use the "src" layout, where all package code is placed in
......@@ -500,8 +500,8 @@ With this layout, the package directory is specified as ``src``, as such::
setup(name="namespace.mypackage",
version="0.1",
package_dir={'': 'src'},
packages=find_namespace_packages(where='src'))
package_dir={"": "src"},
packages=find_namespace_packages(where="src"))
.. _PEP 420: https://www.python.org/dev/peps/pep-0420/
......@@ -526,12 +526,12 @@ script called ``baz``, you might do something like this::
setup(
# other arguments here...
entry_points={
'console_scripts': [
'foo = my_package.some_module:main_func',
'bar = other_module:some_func',
"console_scripts": [
"foo = my_package.some_module:main_func",
"bar = other_module:some_func",
],
'gui_scripts': [
'baz = my_package_gui:start_func',
"gui_scripts": [
"baz = my_package_gui:start_func",
]
}
)
......@@ -567,8 +567,8 @@ as the following::
setup(
# other arguments here...
entry_points={
'setuptools.installation': [
'eggsecutable = my_package.some_module:main_func',
"setuptools.installation": [
"eggsecutable = my_package.some_module:main_func",
]
}
)
......@@ -741,8 +741,8 @@ For example, let's say that Project A offers optional PDF and reST support::
name="Project-A",
...
extras_require={
'PDF': ["ReportLab>=1.2", "RXP"],
'reST': ["docutils>=0.3"],
"PDF": ["ReportLab>=1.2", "RXP"],
"reST": ["docutils>=0.3"],
}
)
......@@ -763,9 +763,9 @@ declare it like this, so that the "PDF" requirements are only resolved if the
name="Project-A",
...
entry_points={
'console_scripts': [
'rst2pdf = project_a.tools.pdfgen [PDF]',
'rst2html = project_a.tools.htmlgen',
"console_scripts": [
"rst2pdf = project_a.tools.pdfgen [PDF]",
"rst2html = project_a.tools.htmlgen",
# more script entry points ...
],
}
......@@ -801,8 +801,8 @@ setup to this::
name="Project-A",
...
extras_require={
'PDF': [],
'reST': ["docutils>=0.3"],
"PDF": [],
"reST": ["docutils>=0.3"],
}
)
......@@ -829,8 +829,8 @@ For example, here is a project that uses the ``enum`` module and ``pywin32``::
name="Project",
...
install_requires=[
'enum34;python_version<"3.4"',
'pywin32 >= 1.0;platform_system=="Windows"'
"enum34;python_version<'3.4'",
"pywin32 >= 1.0;platform_system=='Windows'"
]
)
......@@ -878,9 +878,9 @@ e.g.::
...
package_data={
# If any package contains *.txt or *.rst files, include them:
'': ['*.txt', '*.rst'],
# And include any *.msg files found in the 'hello' package, too:
'hello': ['*.msg'],
"": ["*.txt", "*.rst"],
# And include any *.msg files found in the "hello" package, too:
"hello": ["*.msg"],
}
)
......@@ -903,15 +903,15 @@ The setuptools setup file might look like this::
from setuptools import setup, find_packages
setup(
...
packages=find_packages('src'), # include all packages under src
package_dir={'':'src'}, # tell distutils packages are under src
packages=find_packages("src"), # include all packages under src
package_dir={"": "src"}, # tell distutils packages are under src
package_data={
# If any package contains *.txt files, include them:
'': ['*.txt'],
# And include any *.dat files found in the 'data' subdirectory
# of the 'mypkg' package, also:
'mypkg': ['data/*.dat'],
"": ["*.txt"],
# And include any *.dat files found in the "data" subdirectory
# of the "mypkg" package, also:
"mypkg": ["data/*.dat"],
}
)
......@@ -926,7 +926,7 @@ converts slashes to appropriate platform-specific separators at build time.
If datafiles are contained in a subdirectory of a package that isn't a package
itself (no ``__init__.py``), then the subdirectory names (or ``*``) are required
in the ``package_data`` argument (as shown above with ``'data/*.dat'``).
in the ``package_data`` argument (as shown above with ``"data/*.dat"``).
When building an ``sdist``, the datafiles are also drawn from the
``package_name.egg-info/SOURCES.txt`` file, so make sure that this is removed if
......@@ -951,18 +951,18 @@ to do things like this::
from setuptools import setup, find_packages
setup(
...
packages=find_packages('src'), # include all packages under src
package_dir={'':'src'}, # tell distutils packages are under src
packages=find_packages("src"), # include all packages under src
package_dir={"": "src"}, # tell distutils packages are under src
include_package_data=True, # include everything in source control
# ...but exclude README.txt from all packages
exclude_package_data={'': ['README.txt']},
exclude_package_data={"": ["README.txt"]},
)
The ``exclude_package_data`` option is a dictionary mapping package names to
lists of wildcard patterns, just like the ``package_data`` option. And, just
as with that option, a key of ``''`` will apply the given pattern(s) to all
as with that option, a key of ``""`` will apply the given pattern(s) to all
packages. However, any files that match these patterns will be *excluded*
from installation, even if they were listed in ``package_data`` or were
included as a result of using ``include_package_data``.
......@@ -1000,11 +1000,11 @@ and Python Eggs. It is strongly recommended that, if you are using data files,
you should use the :ref:`ResourceManager API` of ``pkg_resources`` to access
them. The ``pkg_resources`` module is distributed as part of setuptools, so if
you're using setuptools to distribute your package, there is no reason not to
use its resource management API. See also `Accessing Package Resources`_ for
use its resource management API. See also `Importlib Resources`_ for
a quick example of converting code that uses ``__file__`` to use
``pkg_resources`` instead.
.. _Accessing Package Resources: http://peak.telecommunity.com/DevCenter/PythonEggs#accessing-package-resources
.. _Importlib Resources: https://docs.python.org/3/library/importlib.html#module-importlib.resources
Non-Package Data Files
......@@ -1096,12 +1096,12 @@ for our hypothetical blogging tool::
setup(
# ...
entry_points={'blogtool.parsers': '.rst = some_module:SomeClass'}
entry_points={"blogtool.parsers": ".rst = some_module:SomeClass"}
)
setup(
# ...
entry_points={'blogtool.parsers': ['.rst = some_module:a_func']}
entry_points={"blogtool.parsers": [".rst = some_module:a_func"]}
)
setup(
......@@ -1199,7 +1199,7 @@ command; see the section on the `develop`_ command below for more details.
Note that you can also apply setuptools commands to non-setuptools projects,
using commands like this::
python -c "import setuptools; execfile('setup.py')" develop
python -c "import setuptools; with open('setup.py') as f: exec(compile(f.read(), 'setup.py', 'exec'))" develop
That is, you can simply list the normal setup commands and options following
the quoted part.
......@@ -1215,12 +1215,12 @@ Detailed instructions to distribute a setuptools project can be found at
Before you begin, make sure you have the latest versions of setuptools and wheel::
python3 -m pip install --user --upgrade setuptools wheel
pip install --upgrade setuptools wheel
To build a setuptools project, run this command from the same directory where
setup.py is located::
python3 setup.py sdist bdist_wheel
setup.py sdist bdist_wheel
This will generate distribution archives in the `dist` directory.
......@@ -1229,7 +1229,7 @@ https://test.pypi.org/account/register/. You will also need to verify your email
to be able to upload any packages.
You should install twine to be able to upload packages::
python3 -m pip install --user --upgrade setuptools wheel
pip install --upgrade twine
Now, to upload these archives, run::
......@@ -1237,7 +1237,7 @@ Now, to upload these archives, run::
To install your newly uploaded package ``example_pkg``, you can use pip::
python3 -m pip install --index-url https://test.pypi.org/simple/ example_pkg
pip install --index-url https://test.pypi.org/simple/ example_pkg
If you have issues at any point, please refer to `Packaging project tutorials`_
for clarification.
......@@ -1309,7 +1309,7 @@ participates in. For example, the ZopeInterface project might do this::
setup(
# ...
namespace_packages=['zope']
namespace_packages=["zope"]
)
because it contains a ``zope.interface`` package that lives in the ``zope``
......@@ -1327,7 +1327,7 @@ packages' ``__init__.py`` files (and the ``__init__.py`` of any parent
packages), in a normal Python package layout. These ``__init__.py`` files
*must* contain the line::
__import__('pkg_resources').declare_namespace(__name__)
__import__("pkg_resources").declare_namespace(__name__)
This code ensures that the namespace package machinery is operating and that
the current package is registered as a namespace package.
......@@ -1410,7 +1410,7 @@ pattern. So, you can use a command line like::
setup.py egg_info -rbDEV bdist_egg rotate -m.egg -k3
to build an egg whose version info includes 'DEV-rNNNN' (where NNNN is the
to build an egg whose version info includes "DEV-rNNNN" (where NNNN is the
most recent Subversion revision that affected the source tree), and then
delete any egg files from the distribution directory except for the three
that were built most recently.
......@@ -1469,7 +1469,7 @@ tagging the release, so the trunk will still produce development snapshots.
Alternately, if you are not branching for releases, you can override the
default version options on the command line, using something like::
python setup.py egg_info -Db "" sdist bdist_egg
setup.py egg_info -Db "" sdist bdist_egg
The first part of this command (``egg_info -Db ""``) will override the
configured tag information, before creating source and binary eggs. Thus, these
......@@ -1479,11 +1479,11 @@ build designation string.
Of course, if you will be doing this a lot, you may wish to create a personal
alias for this operation, e.g.::
python setup.py alias -u release egg_info -Db ""
setup.py alias -u release egg_info -Db ""
You can then use it like this::
python setup.py release sdist bdist_egg
setup.py release sdist bdist_egg
Or of course you can create more elaborate aliases that do all of the above.
See the sections below on the `egg_info`_ and `alias`_ commands for more ideas.
......@@ -1500,7 +1500,7 @@ To ensure Cython is available, include Cython in the build-requires section
of your pyproject.toml::
[build-system]
requires=[..., 'cython']
requires=[..., "cython"]
Built with pip 10 or later, that declaration is sufficient to include Cython
in the build. For broader compatibility, declare the dependency in your
......@@ -1800,7 +1800,7 @@ to support "daily builds" or "snapshot" releases. It is run automatically by
the ``sdist``, ``bdist_egg``, ``develop``, and ``test`` commands in order to
update the project's metadata, but you can also specify it explicitly in order
to temporarily change the project's version string while executing other
commands. (It also generates the``.egg-info/SOURCES.txt`` manifest file, which
commands. (It also generates the ``.egg-info/SOURCES.txt`` manifest file, which
is used when you are building source distributions.)
In addition to writing the core egg metadata defined by ``setuptools`` and
......@@ -1848,7 +1848,7 @@ binary distributions of your project, you should first make sure that you know
how the resulting version numbers will be interpreted by automated tools
like pip. See the section above on `Specifying Your Project's Version`_ for an
explanation of pre- and post-release tags, as well as tips on how to choose and
verify a versioning scheme for your your project.)
verify a versioning scheme for your project.)
For advanced uses, there is one other option that can be set, to change the
location of the project's ``.egg-info`` directory. Commands that need to find
......@@ -1873,12 +1873,12 @@ Other ``egg_info`` Options
Creating a dated "nightly build" snapshot egg::
python setup.py egg_info --tag-date --tag-build=DEV bdist_egg
setup.py egg_info --tag-date --tag-build=DEV bdist_egg
Creating a release with no version tags, even if some default tags are
specified in ``setup.cfg``::
python setup.py egg_info -RDb "" sdist bdist_egg
setup.py egg_info -RDb "" sdist bdist_egg
(Notice that ``egg_info`` must always appear on the command line *before* any
commands that you want the version changes to apply to.)
......@@ -2351,7 +2351,7 @@ parsing ``metadata`` and ``options`` sections into a dictionary.
from setuptools.config import read_configuration
conf_dict = read_configuration('/home/user/dev/package/setup.cfg')
conf_dict = read_configuration("/home/user/dev/package/setup.cfg")
By default, ``read_configuration()`` will read only the file provided
......@@ -2531,7 +2531,7 @@ a file. Here's what the writer utility looks like::
argname = os.path.splitext(basename)[0]
value = getattr(cmd.distribution, argname, None)
if value is not None:
value = '\n'.join(value) + '\n'
value = "\n".join(value) + "\n"
cmd.write_or_delete_file(argname, filename, value)
As you can see, ``egg_info.writers`` entry points must be a function taking
......
......@@ -83,13 +83,14 @@ __import__('pkg_resources.extern.packaging.version')
__import__('pkg_resources.extern.packaging.specifiers')
__import__('pkg_resources.extern.packaging.requirements')
__import__('pkg_resources.extern.packaging.markers')
__import__('pkg_resources.py2_warn')
__metaclass__ = type
if (3, 0) < sys.version_info < (3, 4):
raise RuntimeError("Python 3.4 or later is required")
if (3, 0) < sys.version_info < (3, 5):
raise RuntimeError("Python 3.5 or later is required")
if six.PY2:
# Those builtin exceptions are only defined in Python 3
......@@ -2328,7 +2329,8 @@ register_namespace_handler(object, null_ns_handler)
def normalize_path(filename):
"""Normalize a file/dir name for comparison purposes"""
return os.path.normcase(os.path.realpath(os.path.normpath(_cygwin_patch(filename))))
return os.path.normcase(os.path.realpath(os.path.normpath(
_cygwin_patch(filename))))
def _cygwin_patch(filename): # pragma: nocover
......@@ -3287,6 +3289,7 @@ def _initialize_master_working_set():
list(map(working_set.add_entry, sys.path))
globals().update(locals())
class PkgResourcesDeprecationWarning(Warning):
"""
Base class for warning about deprecations in ``pkg_resources``
......
import sys
import warnings
import textwrap
msg = textwrap.dedent("""
You are running Setuptools on Python 2, which is no longer
supported and
>>> SETUPTOOLS WILL STOP WORKING <<<
in a subsequent release (no sooner than 2020-04-20).
Please ensure you are installing
Setuptools using pip 9.x or later or pin to `setuptools<45`
in your environment.
If you have done those things and are still encountering
this message, please comment in
https://github.com/pypa/setuptools/issues/1458
about the steps that led to this unsupported combination.
""")
pre = "Setuptools will stop working on Python 2\n"
sys.version_info < (3,) and warnings.warn(pre + "*" * 60 + msg + "*" * 60)
......@@ -17,7 +17,9 @@ try:
except ImportError:
import mock
from pkg_resources import DistInfoDistribution, Distribution, EggInfoDistribution
from pkg_resources import (
DistInfoDistribution, Distribution, EggInfoDistribution,
)
from setuptools.extern import six
from pkg_resources.extern.six.moves import map
from pkg_resources.extern.six import text_type, string_types
......@@ -279,8 +281,8 @@ def make_distribution_no_version(tmpdir, basename):
('dist-info', 'METADATA', DistInfoDistribution),
],
)
def test_distribution_version_missing(tmpdir, suffix, expected_filename,
expected_dist_type):
def test_distribution_version_missing(
tmpdir, suffix, expected_filename, expected_dist_type):
"""
Test Distribution.version when the "Version" header is missing.
"""
......
......@@ -15,7 +15,7 @@ import pkg_resources
from pkg_resources import (
parse_requirements, VersionConflict, parse_version,
Distribution, EntryPoint, Requirement, safe_version, safe_name,
WorkingSet, PkgResourcesDeprecationWarning)
WorkingSet)
# from Python 3.6 docs.
......@@ -501,7 +501,6 @@ class TestEntryPoints:
ep.load(require=False)
class TestRequirements:
def testBasics(self):
r = Requirement.parse("Twisted>=1.2")
......
[build-system]
requires = ["wheel"]
requires = ["setuptools >= 40.8", "wheel"]
build-backend = "setuptools.build_meta"
backend-path = ["."]
[tool.towncrier]
package = "setuptools"
......
[pytest]
addopts=--doctest-modules --doctest-glob=pkg_resources/api_tests.txt -r sxX
norecursedirs=dist build *.egg setuptools/extern pkg_resources/extern pkg_resources/tests/data tools .*
flake8-ignore =
setuptools/site-patch.py F821
setuptools/py*compat.py F811
addopts=--doctest-modules --flake8 --doctest-glob=pkg_resources/api_tests.txt -r sxX
norecursedirs=dist build *.egg setuptools/extern pkg_resources/extern pkg_resources/tests/data tools .* setuptools/_vendor pkg_resources/_vendor
doctest_optionflags=ELLIPSIS ALLOW_UNICODE
filterwarnings =
# https://github.com/pypa/setuptools/issues/1823
......
......@@ -14,12 +14,9 @@ repository = https://upload.pypi.org/legacy/
[sdist]
formats = zip
[bdist_wheel]
universal = 1
[metadata]
name = setuptools
version = 42.0.2
version = 45.1.0
description = Easily download, build, install, upgrade, and uninstall Python packages
author = Python Packaging Authority
author_email = distutils-sig@python.org
......@@ -35,10 +32,8 @@ classifiers =
Intended Audience :: Developers
License :: OSI Approved :: MIT License
Operating System :: OS Independent
Programming Language :: Python :: 2
Programming Language :: Python :: 2.7
Programming Language :: Python :: 3
Programming Language :: Python :: 3.4
Programming Language :: Python :: 3 :: Only
Programming Language :: Python :: 3.5
Programming Language :: Python :: 3.6
Programming Language :: Python :: 3.7
......@@ -50,7 +45,7 @@ classifiers =
[options]
zip_safe = True
python_requires = >=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*
python_requires = >=3.5
py_modules = easy_install
packages = find:
......
"""Extensions to the 'distutils' for large or complex distributions"""
import os
import sys
import functools
import distutils.core
import distutils.filelist
......@@ -31,7 +30,7 @@ __all__ = [
]
if PY3:
__all__.append('find_namespace_packages')
__all__.append('find_namespace_packages')
__version__ = setuptools.version.__version__
......@@ -123,7 +122,7 @@ class PEP420PackageFinder(PackageFinder):
find_packages = PackageFinder.find
if PY3:
find_namespace_packages = PEP420PackageFinder.find
find_namespace_packages = PEP420PackageFinder.find
def _install_setup_requires(attrs):
......@@ -144,6 +143,7 @@ def setup(**attrs):
_install_setup_requires(attrs)
return distutils.core.setup(**attrs)
setup.__doc__ = distutils.core.setup.__doc__
......@@ -191,8 +191,8 @@ class Command(_Command):
ok = False
if not ok:
raise DistutilsOptionError(
"'%s' must be a list of strings (got %r)"
% (option, val))
"'%s' must be a list of strings (got %r)"
% (option, val))
def reinitialize_command(self, command, reinit_subcommands=0, **kw):
cmd = _Command.reinitialize_command(self, command, reinit_subcommands)
......
......@@ -17,9 +17,18 @@ C_BUILTIN = 6
PY_FROZEN = 7
def find_spec(module, paths):
finder = (
importlib.machinery.PathFinder().find_spec
if isinstance(paths, list) else
importlib.util.find_spec
)
return finder(module, paths)
def find_module(module, paths=None):
"""Just like 'imp.find_module()', but with package support"""
spec = importlib.util.find_spec(module, paths)
spec = find_spec(module, paths)
if spec is None:
raise ImportError("Can't find %s" % module)
if not spec.has_location and hasattr(spec, 'submodule_search_locations'):
......@@ -60,14 +69,14 @@ def find_module(module, paths=None):
def get_frozen_object(module, paths=None):
spec = importlib.util.find_spec(module, paths)
spec = find_spec(module, paths)
if not spec:
raise ImportError("Can't find %s" % module)
return spec.loader.get_code(module)
def get_module(module, paths, info):
spec = importlib.util.find_spec(module, paths)
spec = find_spec(module, paths)
if not spec:
raise ImportError("Can't find %s" % module)
return module_from_spec(spec)
......@@ -25,7 +25,8 @@ def default_filter(src, dst):
return dst
def unpack_archive(filename, extract_dir, progress_filter=default_filter,
def unpack_archive(
filename, extract_dir, progress_filter=default_filter,
drivers=None):
"""Unpack `filename` to `extract_dir`, or raise ``UnrecognizedFormat``
......@@ -148,7 +149,8 @@ def unpack_tarfile(filename, extract_dir, progress_filter=default_filter):
# resolve any links and to extract the link targets as normal
# files
while member is not None and (member.islnk() or member.issym()):
while member is not None and (
member.islnk() or member.issym()):
linkpath = member.linkname
if member.issym():
base = posixpath.dirname(member.name)
......
......@@ -48,6 +48,7 @@ __all__ = ['get_requires_for_build_sdist',
'__legacy__',
'SetupRequirementsError']
class SetupRequirementsError(BaseException):
def __init__(self, specifiers):
self.specifiers = specifiers
......@@ -143,7 +144,8 @@ class _BuildMetaBackend(object):
def get_requires_for_build_wheel(self, config_settings=None):
config_settings = self._fix_config(config_settings)
return self._get_build_requires(config_settings, requirements=['wheel'])
return self._get_build_requires(
config_settings, requirements=['wheel'])
def get_requires_for_build_sdist(self, config_settings=None):
config_settings = self._fix_config(config_settings)
......@@ -160,8 +162,10 @@ class _BuildMetaBackend(object):
dist_infos = [f for f in os.listdir(dist_info_directory)
if f.endswith('.dist-info')]
if (len(dist_infos) == 0 and
len(_get_immediate_subdirectories(dist_info_directory)) == 1):
if (
len(dist_infos) == 0 and
len(_get_immediate_subdirectories(dist_info_directory)) == 1
):
dist_info_directory = os.path.join(
dist_info_directory, os.listdir(dist_info_directory)[0])
......@@ -193,7 +197,8 @@ class _BuildMetaBackend(object):
config_settings["--global-option"])
self.run_setup()
result_basename = _file_with_extension(tmp_dist_dir, result_extension)
result_basename = _file_with_extension(
tmp_dist_dir, result_extension)
result_path = os.path.join(result_directory, result_basename)
if os.path.exists(result_path):
# os.rename will fail overwriting on non-Unix.
......@@ -202,7 +207,6 @@ class _BuildMetaBackend(object):
return result_basename
def build_wheel(self, wheel_directory, config_settings=None,
metadata_directory=None):
return self._build_with_temp_dir(['bdist_wheel'], '.whl',
......@@ -217,9 +221,12 @@ class _BuildMetaBackend(object):
class _BuildMetaLegacyBackend(_BuildMetaBackend):
"""Compatibility backend for setuptools
This is a version of setuptools.build_meta that endeavors to maintain backwards
compatibility with pre-PEP 517 modes of invocation. It exists as a temporary
bridge between the old packaging mechanism and the new packaging mechanism,
This is a version of setuptools.build_meta that endeavors
to maintain backwards
compatibility with pre-PEP 517 modes of invocation. It
exists as a temporary
bridge between the old packaging mechanism and the new
packaging mechanism,
and will eventually be removed.
"""
def run_setup(self, setup_script='setup.py'):
......@@ -232,6 +239,12 @@ class _BuildMetaLegacyBackend(_BuildMetaBackend):
if script_dir not in sys.path:
sys.path.insert(0, script_dir)
# Some setup.py scripts (e.g. in pygame and numpy) use sys.argv[0] to
# get the directory of the source code. They expect it to refer to the
# setup.py script.
sys_argv_0 = sys.argv[0]
sys.argv[0] = setup_script
try:
super(_BuildMetaLegacyBackend,
self).run_setup(setup_script=setup_script)
......@@ -242,6 +255,8 @@ class _BuildMetaLegacyBackend(_BuildMetaBackend):
# the original path so that the path manipulation does not persist
# within the hook after run_setup is called.
sys.path[:] = sys_path
sys.argv[0] = sys_argv_0
# The primary backend
_BACKEND = _BuildMetaBackend()
......
......@@ -25,9 +25,9 @@ class build_clib(orig.build_clib):
sources = build_info.get('sources')
if sources is None or not isinstance(sources, (list, tuple)):
raise DistutilsSetupError(
"in 'libraries' option (library '%s'), "
"'sources' must be present and must be "
"a list of source filenames" % lib_name)
"in 'libraries' option (library '%s'), "
"'sources' must be present and must be "
"a list of source filenames" % lib_name)
sources = list(sources)
log.info("building '%s' library", lib_name)
......@@ -38,9 +38,9 @@ class build_clib(orig.build_clib):
obj_deps = build_info.get('obj_deps', dict())
if not isinstance(obj_deps, dict):
raise DistutilsSetupError(
"in 'libraries' option (library '%s'), "
"'obj_deps' must be a dictionary of "
"type 'source: list'" % lib_name)
"in 'libraries' option (library '%s'), "
"'obj_deps' must be a dictionary of "
"type 'source: list'" % lib_name)
dependencies = []
# Get the global dependencies that are specified by the '' key.
......@@ -48,9 +48,9 @@ class build_clib(orig.build_clib):
global_deps = obj_deps.get('', list())
if not isinstance(global_deps, (list, tuple)):
raise DistutilsSetupError(
"in 'libraries' option (library '%s'), "
"'obj_deps' must be a dictionary of "
"type 'source: list'" % lib_name)
"in 'libraries' option (library '%s'), "
"'obj_deps' must be a dictionary of "
"type 'source: list'" % lib_name)
# Build the list to be used by newer_pairwise_group
# each source will be auto-added to its dependencies.
......@@ -60,39 +60,42 @@ class build_clib(orig.build_clib):
extra_deps = obj_deps.get(source, list())
if not isinstance(extra_deps, (list, tuple)):
raise DistutilsSetupError(
"in 'libraries' option (library '%s'), "
"'obj_deps' must be a dictionary of "
"type 'source: list'" % lib_name)
"in 'libraries' option (library '%s'), "
"'obj_deps' must be a dictionary of "
"type 'source: list'" % lib_name)
src_deps.extend(extra_deps)
dependencies.append(src_deps)
expected_objects = self.compiler.object_filenames(
sources,
output_dir=self.build_temp
)
sources,
output_dir=self.build_temp,
)
if newer_pairwise_group(dependencies, expected_objects) != ([], []):
if (
newer_pairwise_group(dependencies, expected_objects)
!= ([], [])
):
# First, compile the source code to object files in the library
# directory. (This should probably change to putting object
# files in a temporary build directory.)
macros = build_info.get('macros')
include_dirs = build_info.get('include_dirs')
cflags = build_info.get('cflags')
objects = self.compiler.compile(
sources,
output_dir=self.build_temp,
macros=macros,
include_dirs=include_dirs,
extra_postargs=cflags,
debug=self.debug
)
self.compiler.compile(
sources,
output_dir=self.build_temp,
macros=macros,
include_dirs=include_dirs,
extra_postargs=cflags,
debug=self.debug
)
# Now "link" the object files together into a static library.
# (On Unix at least, this isn't really linking -- it just
# builds an archive. Whatever.)
self.compiler.create_static_lib(
expected_objects,
lib_name,
output_dir=self.build_clib,
debug=self.debug
)
expected_objects,
lib_name,
output_dir=self.build_clib,
debug=self.debug
)
......@@ -14,7 +14,8 @@ from setuptools.extern import six
if six.PY2:
import imp
EXTENSION_SUFFIXES = [s for s, _, tp in imp.get_suffixes() if tp == imp.C_EXTENSION]
EXTENSION_SUFFIXES = [
s for s, _, tp in imp.get_suffixes() if tp == imp.C_EXTENSION]
else:
from importlib.machinery import EXTENSION_SUFFIXES
......@@ -29,7 +30,7 @@ except ImportError:
# make sure _config_vars is initialized
get_config_var("LDSHARED")
from distutils.sysconfig import _config_vars as _CONFIG_VARS
from distutils.sysconfig import _config_vars as _CONFIG_VARS # noqa
def _customize_compiler_for_shlib(compiler):
......@@ -65,7 +66,9 @@ elif os.name != 'nt':
except ImportError:
pass
if_dl = lambda s: s if have_rtld else ''
def if_dl(s):
return s if have_rtld else ''
def get_abi3_suffix():
......@@ -113,7 +116,7 @@ class build_ext(_build_ext):
if fullname in self.ext_map:
ext = self.ext_map[fullname]
use_abi3 = (
six.PY3
not six.PY2
and getattr(ext, 'py_limited_api')
and get_abi3_suffix()
)
......
......@@ -108,7 +108,7 @@ class develop(namespaces.DevelopInstaller, easy_install):
return path_to_setup
def install_for_development(self):
if six.PY3 and getattr(self.distribution, 'use_2to3', False):
if not six.PY2 and getattr(self.distribution, 'use_2to3', False):
# If we run 2to3 we can not do this inplace:
# Ensure metadata is up-to-date
......
......@@ -121,7 +121,8 @@ else:
return False
_one_liner = lambda text: textwrap.dedent(text).strip().replace('\n', '; ')
def _one_liner(text):
return textwrap.dedent(text).strip().replace('\n', '; ')
class easy_install(Command):
......@@ -414,8 +415,8 @@ class easy_install(Command):
if show_deprecation:
self.announce(
"WARNING: The easy_install command is deprecated "
"and will be removed in a future version."
, log.WARN,
"and will be removed in a future version.",
log.WARN,
)
if self.verbose != self.distribution.verbose:
log.set_verbosity(self.verbose)
......@@ -508,13 +509,13 @@ class easy_install(Command):
the distutils default setting) was:
%s
""").lstrip()
""").lstrip() # noqa
__not_exists_id = textwrap.dedent("""
This directory does not currently exist. Please create it and try again, or
choose a different installation directory (using the -d or --install-dir
option).
""").lstrip()
""").lstrip() # noqa
__access_msg = textwrap.dedent("""
Perhaps your account does not have write access to this directory? If the
......@@ -530,7 +531,7 @@ class easy_install(Command):
https://setuptools.readthedocs.io/en/latest/easy_install.html
Please make the appropriate changes for your system and try again.
""").lstrip()
""").lstrip() # noqa
def cant_write_to_target(self):
msg = self.__cant_write_msg % (sys.exc_info()[1], self.install_dir,)
......@@ -1094,13 +1095,13 @@ class easy_install(Command):
pkg_resources.require("%(name)s") # latest installed version
pkg_resources.require("%(name)s==%(version)s") # this exact version
pkg_resources.require("%(name)s>=%(version)s") # this version or higher
""").lstrip()
""").lstrip() # noqa
__id_warning = textwrap.dedent("""
Note also that the installation directory must be on sys.path at runtime for
this to work. (e.g. by being the application's script directory, by being on
PYTHONPATH, or by being added to sys.path by your code.)
""")
""") # noqa
def installation_report(self, req, dist, what="Installed"):
"""Helpful installation message for display to package users"""
......@@ -1125,7 +1126,7 @@ class easy_install(Command):
%(python)s setup.py develop
See the setuptools documentation for the "develop" command for more info.
""").lstrip()
""").lstrip() # noqa
def report_editable(self, spec, setup_script):
dirname = os.path.dirname(setup_script)
......@@ -1308,7 +1309,8 @@ class easy_install(Command):
https://setuptools.readthedocs.io/en/latest/easy_install.html#custom-installation-locations
Please make the appropriate changes for your system and try again.""").lstrip()
Please make the appropriate changes for your system and try again.
""").strip()
def install_site_py(self):
"""Make sure there's a site.py in the target dir, if needed"""
......@@ -1564,7 +1566,7 @@ def get_exe_prefixes(exe_filename):
continue
if parts[0].upper() in ('PURELIB', 'PLATLIB'):
contents = z.read(name)
if six.PY3:
if not six.PY2:
contents = contents.decode()
for pth in yield_lines(contents):
pth = pth.strip().replace('\\', '/')
......@@ -2090,7 +2092,8 @@ class ScriptWriter:
@classmethod
def get_script_header(cls, script_text, executable=None, wininst=False):
# for backward compatibility
warnings.warn("Use get_header", EasyInstallDeprecationWarning, stacklevel=2)
warnings.warn(
"Use get_header", EasyInstallDeprecationWarning, stacklevel=2)
if wininst:
executable = "python.exe"
return cls.get_header(script_text, executable)
......@@ -2339,5 +2342,8 @@ def _patch_usage():
finally:
distutils.core.gen_usage = saved
class EasyInstallDeprecationWarning(SetuptoolsDeprecationWarning):
"""Class for warning about deprecations in EasyInstall in SetupTools. Not ignored by default, unlike DeprecationWarning."""
"""
Warning for EasyInstall deprecations, bypassing suppression.
"""
......@@ -33,6 +33,7 @@ from setuptools.glob import glob
from setuptools.extern import packaging
from setuptools import SetuptoolsDeprecationWarning
def translate_pattern(glob):
"""
Translate a file path glob like '*.txt' in to a regular expression.
......@@ -113,7 +114,7 @@ def translate_pattern(glob):
pat += sep
pat += r'\Z'
return re.compile(pat, flags=re.MULTILINE|re.DOTALL)
return re.compile(pat, flags=re.MULTILINE | re.DOTALL)
class InfoCommon:
......@@ -266,7 +267,7 @@ class egg_info(InfoCommon, Command):
to the file.
"""
log.info("writing %s to %s", what, filename)
if six.PY3:
if not six.PY2:
data = data.encode("utf-8")
if not self.dry_run:
f = open(filename, 'wb')
......@@ -637,7 +638,9 @@ def warn_depends_obsolete(cmd, basename, filename):
def _write_requirements(stream, reqs):
lines = yield_lines(reqs or ())
append_cr = lambda line: line + '\n'
def append_cr(line):
return line + '\n'
lines = map(append_cr, lines)
stream.writelines(lines)
......@@ -703,7 +706,8 @@ def get_pkg_info_revision():
Get a -r### off of PKG-INFO Version in case this is an sdist of
a subversion revision.
"""
warnings.warn("get_pkg_info_revision is deprecated.", EggInfoDeprecationWarning)
warnings.warn(
"get_pkg_info_revision is deprecated.", EggInfoDeprecationWarning)
if os.path.exists('PKG-INFO'):
with io.open('PKG-INFO') as f:
for line in f:
......@@ -714,4 +718,4 @@ def get_pkg_info_revision():
class EggInfoDeprecationWarning(SetuptoolsDeprecationWarning):
"""Class for warning about deprecations in eggInfo in setupTools. Not ignored by default, unlike DeprecationWarning."""
"""Deprecated behavior warning for EggInfo, bypassing suppression."""
......@@ -77,7 +77,8 @@ class install_lib(orig.install_lib):
if not hasattr(sys, 'implementation'):
return
base = os.path.join('__pycache__', '__init__.' + sys.implementation.cache_tag)
base = os.path.join(
'__pycache__', '__init__.' + sys.implementation.cache_tag)
yield base + '.pyc'
yield base + '.pyo'
yield base + '.opt-1.pyc'
......
......@@ -132,5 +132,5 @@ class sdist_add_defaults:
if hasattr(sdist.sdist, '_add_defaults_standards'):
# disable the functionality already available upstream
class sdist_add_defaults:
class sdist_add_defaults: # noqa
pass
......@@ -121,19 +121,40 @@ class sdist(sdist_add_defaults, orig.sdist):
if has_leaky_handle:
read_template = __read_template_hack
def _add_defaults_optional(self):
if six.PY2:
sdist_add_defaults._add_defaults_optional(self)
else:
super()._add_defaults_optional()
if os.path.isfile('pyproject.toml'):
self.filelist.append('pyproject.toml')
def _add_defaults_python(self):
"""getting python files"""
if self.distribution.has_pure_modules():
build_py = self.get_finalized_command('build_py')
self.filelist.extend(build_py.get_source_files())
# This functionality is incompatible with include_package_data, and
# will in fact create an infinite recursion if include_package_data
# is True. Use of include_package_data will imply that
# distutils-style automatic handling of package_data is disabled
if not self.distribution.include_package_data:
for _, src_dir, _, filenames in build_py.data_files:
self.filelist.extend([os.path.join(src_dir, filename)
for filename in filenames])
self._add_data_files(self._safe_data_files(build_py))
def _safe_data_files(self, build_py):
"""
Extracting data_files from build_py is known to cause
infinite recursion errors when `include_package_data`
is enabled, so suppress it in that case.
"""
if self.distribution.include_package_data:
return ()
return build_py.data_files
def _add_data_files(self, data_files):
"""
Add data files as found in build_py.data_files.
"""
self.filelist.extend(
os.path.join(src_dir, name)
for _, src_dir, _, filenames in data_files
for name in filenames
)
def _add_defaults_data_files(self):
try:
......@@ -186,7 +207,7 @@ class sdist(sdist_add_defaults, orig.sdist):
manifest = open(self.manifest, 'rb')
for line in manifest:
# The manifest must contain UTF-8. See #303.
if six.PY3:
if not six.PY2:
try:
line = line.decode('UTF-8')
except UnicodeDecodeError:
......
......@@ -129,7 +129,8 @@ class test(Command):
@contextlib.contextmanager
def project_on_sys_path(self, include_dists=[]):
with_2to3 = six.PY3 and getattr(self.distribution, 'use_2to3', False)
with_2to3 = not six.PY2 and getattr(
self.distribution, 'use_2to3', False)
if with_2to3:
# If we run 2to3 we can not do this inplace:
......@@ -240,7 +241,7 @@ class test(Command):
# Purge modules under test from sys.modules. The test loader will
# re-import them from the build location. Required when 2to3 is used
# with namespace packages.
if six.PY3 and getattr(self.distribution, 'use_2to3', False):
if not six.PY2 and getattr(self.distribution, 'use_2to3', False):
module = self.test_suite.split('.')[0]
if module in _namespace_packages:
del_modules = []
......
......@@ -24,7 +24,7 @@ from .upload import upload
def _encode(s):
errors = 'surrogateescape' if six.PY3 else 'strict'
errors = 'strict' if six.PY2 else 'surrogateescape'
return s.encode('utf-8', errors)
......@@ -127,8 +127,8 @@ class upload_docs(upload):
"""
Build up the MIME payload for the POST data
"""
boundary = b'--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
sep_boundary = b'\n--' + boundary
boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
sep_boundary = b'\n--' + boundary.encode('ascii')
end_boundary = sep_boundary + b'--'
end_items = end_boundary, b"\n",
builder = functools.partial(
......@@ -138,7 +138,7 @@ class upload_docs(upload):
part_groups = map(builder, data.items())
parts = itertools.chain.from_iterable(part_groups)
body_items = itertools.chain(parts, end_items)
content_type = 'multipart/form-data; boundary=%s' % boundary.decode('ascii')
content_type = 'multipart/form-data; boundary=%s' % boundary
return b''.join(body_items), content_type
def upload_file(self, filename):
......@@ -153,7 +153,7 @@ class upload_docs(upload):
# set up the authentication
credentials = _encode(self.username + ':' + self.password)
credentials = standard_b64encode(credentials)
if six.PY3:
if not six.PY2:
credentials = credentials.decode('ascii')
auth = "Basic " + credentials
......
from distutils.dep_util import newer_group
# yes, this is was almost entirely copy-pasted from
# 'newer_pairwise()', this is just another convenience
# function.
......@@ -10,7 +11,8 @@ def newer_pairwise_group(sources_groups, targets):
of 'newer_group()'.
"""
if len(sources_groups) != len(targets):
raise ValueError("'sources_group' and 'targets' must be the same length")
raise ValueError(
"'sources_group' and 'targets' must be the same length")
# build a pair of lists (sources_groups, targets) where source is newer
n_sources = []
......
......@@ -162,7 +162,7 @@ def write_pkg_file(self, file):
if self.download_url:
write_field('Download-URL', self.download_url)
for project_url in self.project_urls.items():
write_field('Project-URL', '%s, %s' % project_url)
write_field('Project-URL', '%s, %s' % project_url)
long_desc = rfc822_escape(self.get_long_description())
write_field('Description', long_desc)
......@@ -571,7 +571,7 @@ class Distribution(_Distribution):
from setuptools.extern.six.moves.configparser import ConfigParser
# Ignore install directory options if we have a venv
if six.PY3 and sys.prefix != sys.base_prefix:
if not six.PY2 and sys.prefix != sys.base_prefix:
ignore_options = [
'install-base', 'install-platbase', 'install-lib',
'install-platlib', 'install-purelib', 'install-headers',
......@@ -593,7 +593,7 @@ class Distribution(_Distribution):
with io.open(filename, encoding='utf-8') as reader:
if DEBUG:
self.announce(" reading {filename}".format(**locals()))
(parser.read_file if six.PY3 else parser.readfp)(reader)
(parser.readfp if six.PY2 else parser.read_file)(reader)
for section in parser.sections():
options = parser.options(section)
opt_dict = self.get_option_dict(section)
......@@ -636,7 +636,7 @@ class Distribution(_Distribution):
Ref #1653
"""
if six.PY3:
if not six.PY2:
return val
try:
return val.encode()
......
......@@ -64,8 +64,8 @@ def fetch_build_egg(dist, req):
dist.announce(
'WARNING: The pip package is not available, falling back '
'to EasyInstall for handling setup_requires/test_requires; '
'this is deprecated and will be removed in a future version.'
, log.WARN
'this is deprecated and will be removed in a future version.',
log.WARN
)
return _legacy_fetch_build_egg(dist, req)
# Warn if wheel is not.
......
......@@ -544,7 +544,7 @@ class SystemInfo:
# Except for VS15+, VC version is aligned with VS version
self.vs_ver = self.vc_ver = (
vc_ver or self._find_latest_available_vs_ver())
vc_ver or self._find_latest_available_vs_ver())
def _find_latest_available_vs_ver(self):
"""
......@@ -1225,7 +1225,7 @@ class EnvironmentInfo:
arch_subdir = self.pi.target_dir(x64=True)
lib = join(self.si.WindowsSdkDir, 'lib')
libver = self._sdk_subdir
return [join(lib, '%sum%s' % (libver , arch_subdir))]
return [join(lib, '%sum%s' % (libver, arch_subdir))]
@property
def OSIncludes(self):
......@@ -1274,13 +1274,16 @@ class EnvironmentInfo:
libpath += [
ref,
join(self.si.WindowsSdkDir, 'UnionMetadata'),
join(ref, 'Windows.Foundation.UniversalApiContract', '1.0.0.0'),
join(
ref, 'Windows.Foundation.UniversalApiContract', '1.0.0.0'),
join(ref, 'Windows.Foundation.FoundationContract', '1.0.0.0'),
join(ref,'Windows.Networking.Connectivity.WwanContract',
'1.0.0.0'),
join(self.si.WindowsSdkDir, 'ExtensionSDKs', 'Microsoft.VCLibs',
'%0.1f' % self.vs_ver, 'References', 'CommonConfiguration',
'neutral'),
join(
ref, 'Windows.Networking.Connectivity.WwanContract',
'1.0.0.0'),
join(
self.si.WindowsSdkDir, 'ExtensionSDKs', 'Microsoft.VCLibs',
'%0.1f' % self.vs_ver, 'References', 'CommonConfiguration',
'neutral'),
]
return libpath
......
......@@ -47,13 +47,17 @@ class Installer:
"p = os.path.join(%(root)s, *%(pth)r)",
"importlib = has_mfs and __import__('importlib.util')",
"has_mfs and __import__('importlib.machinery')",
"m = has_mfs and "
(
"m = has_mfs and "
"sys.modules.setdefault(%(pkg)r, "
"importlib.util.module_from_spec("
"importlib.machinery.PathFinder.find_spec(%(pkg)r, "
"[os.path.dirname(p)])))",
"m = m or "
"sys.modules.setdefault(%(pkg)r, types.ModuleType(%(pkg)r))",
"importlib.util.module_from_spec("
"importlib.machinery.PathFinder.find_spec(%(pkg)r, "
"[os.path.dirname(p)])))"
),
(
"m = m or "
"sys.modules.setdefault(%(pkg)r, types.ModuleType(%(pkg)r))"
),
"mp = (m or []) and m.__dict__.setdefault('__path__',[])",
"(p not in mp) and mp.append(p)",
)
......
......@@ -46,7 +46,8 @@ __all__ = [
_SOCKET_TIMEOUT = 15
_tmpl = "setuptools/{setuptools.__version__} Python-urllib/{py_major}"
user_agent = _tmpl.format(py_major='{}.{}'.format(*sys.version_info), setuptools=setuptools)
user_agent = _tmpl.format(
py_major='{}.{}'.format(*sys.version_info), setuptools=setuptools)
def parse_requirement_arg(spec):
......@@ -1092,7 +1093,8 @@ def open_with_auth(url, opener=urllib.request.urlopen):
# copy of urllib.parse._splituser from Python 3.8
def _splituser(host):
"""splituser('user[:passwd]@host[:port]') --> 'user[:passwd]', 'host[:port]'."""
"""splituser('user[:passwd]@host[:port]')
--> 'user[:passwd]', 'host[:port]'."""
user, delim, host = host.rpartition('@')
return (user if delim else None), host
......
......@@ -16,7 +16,7 @@ def get_all_headers(message, key):
if six.PY2:
def get_all_headers(message, key):
def get_all_headers(message, key): # noqa
return message.getheaders(key)
......
......@@ -13,6 +13,8 @@ from setuptools.extern import six
from setuptools.extern.six.moves import builtins, map
import pkg_resources.py31compat
from distutils.errors import DistutilsError
from pkg_resources import working_set
if sys.platform.startswith('java'):
import org.python.modules.posix.PosixModule as _os
......@@ -23,8 +25,6 @@ try:
except NameError:
_file = None
_open = open
from distutils.errors import DistutilsError
from pkg_resources import working_set
__all__ = [
......@@ -374,7 +374,7 @@ class AbstractSandbox:
if hasattr(os, 'devnull'):
_EXCEPTIONS = [os.devnull,]
_EXCEPTIONS = [os.devnull]
else:
_EXCEPTIONS = []
......@@ -466,7 +466,8 @@ class DirectorySandbox(AbstractSandbox):
WRITE_FLAGS = functools.reduce(
operator.or_, [getattr(_os, a, 0) for a in
operator.or_, [
getattr(_os, a, 0) for a in
"O_WRONLY O_RDWR O_APPEND O_CREAT O_TRUNC O_TEMPORARY".split()]
)
......
......@@ -38,22 +38,24 @@ def __boot():
else:
raise ImportError("Couldn't find the real 'site' module")
known_paths = dict([(makepath(item)[1], 1) for item in sys.path]) # 2.2 comp
# 2.2 comp
known_paths = dict([(
makepath(item)[1], 1) for item in sys.path]) # noqa
oldpos = getattr(sys, '__egginsert', 0) # save old insertion position
sys.__egginsert = 0 # and reset the current one
for item in PYTHONPATH:
addsitedir(item)
addsitedir(item) # noqa
sys.__egginsert += oldpos # restore effective old position
d, nd = makepath(stdpath[0])
d, nd = makepath(stdpath[0]) # noqa
insert_at = None
new_path = []
for item in sys.path:
p, np = makepath(item)
p, np = makepath(item) # noqa
if np == nd and insert_at is None:
# We've hit the first 'system' path entry, so added entries go here
......
......@@ -35,7 +35,8 @@ try:
except AttributeError:
HTTPSHandler = HTTPSConnection = object
is_available = ssl is not None and object not in (HTTPSHandler, HTTPSConnection)
is_available = ssl is not None and object not in (
HTTPSHandler, HTTPSConnection)
try:
......@@ -85,8 +86,10 @@ if not match_hostname:
return dn.lower() == hostname.lower()
# RFC 6125, section 6.4.3, subitem 1.
# The client SHOULD NOT attempt to match a presented identifier in which
# the wildcard character comprises a label other than the left-most label.
# The client SHOULD NOT attempt to match a
# presented identifier in which the wildcard
# character comprises a label other than the
# left-most label.
if leftmost == '*':
# When '*' is a fragment by itself, it matches a non-empty dotless
# fragment.
......@@ -137,15 +140,16 @@ if not match_hostname:
return
dnsnames.append(value)
if len(dnsnames) > 1:
raise CertificateError("hostname %r "
"doesn't match either of %s"
raise CertificateError(
"hostname %r doesn't match either of %s"
% (hostname, ', '.join(map(repr, dnsnames))))
elif len(dnsnames) == 1:
raise CertificateError("hostname %r "
"doesn't match %r"
raise CertificateError(
"hostname %r doesn't match %r"
% (hostname, dnsnames[0]))
else:
raise CertificateError("no appropriate commonName or "
raise CertificateError(
"no appropriate commonName or "
"subjectAltName fields were found")
......@@ -158,7 +162,8 @@ class VerifyingHTTPSHandler(HTTPSHandler):
def https_open(self, req):
return self.do_open(
lambda host, **kw: VerifyingHTTPSConn(host, self.ca_bundle, **kw), req
lambda host, **kw: VerifyingHTTPSConn(host, self.ca_bundle, **kw),
req
)
......
......@@ -6,7 +6,7 @@ from setuptools.extern.six import PY2, PY3
__all__ = [
'fail_on_ascii', 'py2_only', 'py3_only'
'fail_on_ascii', 'py2_only', 'py3_only'
]
......
mock
pytest-flake8; python_version!="3.4"
pytest-flake8<=1.0.0; python_version=="3.4"
pytest-flake8
flake8-2020; python_version>="3.6"
virtualenv>=13.0.0
pytest-virtualenv>=1.2.7
pytest>=3.7
......
......@@ -8,8 +8,7 @@ from setuptools.dist import Distribution
class TestBuildCLib:
@mock.patch(
'setuptools.command.build_clib.newer_pairwise_group'
)
'setuptools.command.build_clib.newer_pairwise_group')
def test_build_libraries(self, mock_newer):
dist = Distribution()
cmd = build_clib(dist)
......
......@@ -23,6 +23,7 @@ class BuildBackendBase:
self.env = env
self.backend_name = backend_name
class BuildBackend(BuildBackendBase):
"""PEP 517 Build Backend"""
......@@ -262,6 +263,27 @@ class TestBuildMetaBackend:
assert os.path.isfile(
os.path.join(os.path.abspath("out_sdist"), sdist_name))
def test_build_sdist_pyproject_toml_exists(self, tmpdir_cwd):
files = {
'setup.py': DALS("""
__import__('setuptools').setup(
name='foo',
version='0.0.0',
py_modules=['hello']
)"""),
'hello.py': '',
'pyproject.toml': DALS("""
[build-system]
requires = ["setuptools", "wheel"]
build-backend = "setuptools.build_meta
"""),
}
build_files(files)
build_backend = self.get_build_backend()
targz_path = build_backend.build_sdist("temp")
with tarfile.open(os.path.join("temp", targz_path)) as tar:
assert any('pyproject.toml' in name for name in tar.getnames())
def test_build_sdist_setup_py_exists(self, tmpdir_cwd):
# If build_sdist is called from a script other than setup.py,
# ensure setup.py is included
......@@ -385,6 +407,28 @@ class TestBuildMetaBackend:
assert expected == sorted(actual)
_sys_argv_0_passthrough = {
'setup.py': DALS("""
import os
import sys
__import__('setuptools').setup(
name='foo',
version='0.0.0',
)
sys_argv = os.path.abspath(sys.argv[0])
file_path = os.path.abspath('setup.py')
assert sys_argv == file_path
""")
}
def test_sys_argv_passthrough(self, tmpdir_cwd):
build_files(self._sys_argv_0_passthrough)
build_backend = self.get_build_backend()
with pytest.raises(AssertionError):
build_backend.build_sdist("temp")
class TestBuildMetaLegacyBackend(TestBuildMetaBackend):
backend_name = 'setuptools.build_meta:__legacy__'
......@@ -396,3 +440,9 @@ class TestBuildMetaLegacyBackend(TestBuildMetaBackend):
build_backend = self.get_build_backend()
build_backend.build_sdist("temp")
def test_sys_argv_passthrough(self, tmpdir_cwd):
build_files(self._sys_argv_0_passthrough)
build_backend = self.get_build_backend()
build_backend.build_sdist("temp")
......@@ -695,7 +695,7 @@ class TestOptions:
)
with get_dist(tmpdir) as dist:
assert set(dist.packages) == set(
['fake_package', 'fake_package.sub_two'])
['fake_package', 'fake_package.sub_two'])
@py2_only
def test_find_namespace_directive_fails_on_py2(self, tmpdir):
......@@ -748,7 +748,7 @@ class TestOptions:
)
with get_dist(tmpdir) as dist:
assert set(dist.packages) == {
'fake_package', 'fake_package.sub_two'
'fake_package', 'fake_package.sub_two'
}
def test_extras_require(self, tmpdir):
......@@ -881,7 +881,7 @@ class TestExternalSetters:
return None
@patch.object(_Distribution, '__init__', autospec=True)
def test_external_setters(self, mock_parent_init, tmpdir):
def test_external_setters(self, mock_parent_init, tmpdir):
mock_parent_init.side_effect = self._fake_distribution_init
dist = Distribution(attrs={
......
......@@ -95,7 +95,7 @@ class TestDevelop:
with io.open(fn) as init_file:
init = init_file.read().strip()
expected = 'print("foo")' if six.PY3 else 'print "foo"'
expected = 'print "foo"' if six.PY2 else 'print("foo")'
assert init == expected
def test_console_scripts(self, tmpdir):
......@@ -161,7 +161,7 @@ class TestNamespaces:
reason="https://github.com/pypa/setuptools/issues/851",
)
@pytest.mark.skipif(
platform.python_implementation() == 'PyPy' and six.PY3,
platform.python_implementation() == 'PyPy' and not six.PY2,
reason="https://github.com/pypa/setuptools/issues/1202",
)
def test_namespace_package_importable(self, tmpdir):
......
......@@ -61,7 +61,8 @@ def test_dist_fetch_build_egg(tmpdir):
dist.fetch_build_egg(r)
for r in reqs
]
assert [dist.key for dist in resolved_dists if dist] == reqs
# noqa below because on Python 2 it causes flakes
assert [dist.key for dist in resolved_dists if dist] == reqs # noqa
def test_dist__get_unpatched_deprecated():
......@@ -284,7 +285,7 @@ def test_provides_extras_deterministic_order():
dist = Distribution(attrs)
assert dist.metadata.provides_extras == ['b', 'a']
CHECK_PACKAGE_DATA_TESTS = (
# Valid.
({
......@@ -309,7 +310,8 @@ CHECK_PACKAGE_DATA_TESTS = (
({
'hello': str('*.msg'),
}, (
"\"values of 'package_data' dict\" must be a list of strings (got '*.msg')"
"\"values of 'package_data' dict\" "
"must be a list of strings (got '*.msg')"
)),
# Invalid value type (generators are single use)
({
......@@ -321,10 +323,12 @@ CHECK_PACKAGE_DATA_TESTS = (
)
@pytest.mark.parametrize('package_data, expected_message', CHECK_PACKAGE_DATA_TESTS)
@pytest.mark.parametrize(
'package_data, expected_message', CHECK_PACKAGE_DATA_TESTS)
def test_check_package_data(package_data, expected_message):
if expected_message is None:
assert check_package_data(None, 'package_data', package_data) is None
else:
with pytest.raises(DistutilsSetupError, match=re.escape(expected_message)):
with pytest.raises(
DistutilsSetupError, match=re.escape(expected_message)):
check_package_data(None, str('package_data'), package_data)
......@@ -629,7 +629,7 @@ class TestSetupRequires:
test_pkg = create_setup_requires_package(
temp_dir, setup_attrs=dict(version='attr: foobar.version'),
make_package=make_dependency_sdist,
use_setup_cfg=use_setup_cfg+('version',),
use_setup_cfg=use_setup_cfg + ('version',),
)
test_setup_py = os.path.join(test_pkg, 'setup.py')
with contexts.quiet() as (stdout, stderr):
......@@ -671,8 +671,10 @@ class TestSetupRequires:
dep_url = path_to_url(dep_sdist, authority='localhost')
test_pkg = create_setup_requires_package(
temp_dir,
'python-xlib', '0.19', # Ignored (overriden by setup_attrs).
setup_attrs=dict(setup_requires='dependency @ %s' % dep_url))
# Ignored (overriden by setup_attrs)
'python-xlib', '0.19',
setup_attrs=dict(
setup_requires='dependency @ %s' % dep_url))
test_setup_py = os.path.join(test_pkg, 'setup.py')
run_setup(test_setup_py, [str('--version')])
assert len(mock_index.requests) == 0
......@@ -710,11 +712,14 @@ class TestSetupRequires:
dep_1_0_sdist = 'dep-1.0.tar.gz'
dep_1_0_url = path_to_url(str(tmpdir / dep_1_0_sdist))
dep_1_0_python_requires = '>=2.7'
make_python_requires_sdist(str(tmpdir / dep_1_0_sdist), 'dep', '1.0', dep_1_0_python_requires)
make_python_requires_sdist(
str(tmpdir / dep_1_0_sdist), 'dep', '1.0', dep_1_0_python_requires)
dep_2_0_sdist = 'dep-2.0.tar.gz'
dep_2_0_url = path_to_url(str(tmpdir / dep_2_0_sdist))
dep_2_0_python_requires = '!=' + '.'.join(map(str, sys.version_info[:2])) + '.*'
make_python_requires_sdist(str(tmpdir / dep_2_0_sdist), 'dep', '2.0', dep_2_0_python_requires)
dep_2_0_python_requires = '!=' + '.'.join(
map(str, sys.version_info[:2])) + '.*'
make_python_requires_sdist(
str(tmpdir / dep_2_0_sdist), 'dep', '2.0', dep_2_0_python_requires)
index = tmpdir / 'index.html'
index.write_text(DALS(
'''
......@@ -726,7 +731,7 @@ class TestSetupRequires:
<a href="{dep_2_0_url}" data-requires-python="{dep_2_0_python_requires}">{dep_2_0_sdist}</a><br/>
</body>
</html>
''').format(
''').format( # noqa
dep_1_0_url=dep_1_0_url,
dep_1_0_sdist=dep_1_0_sdist,
dep_1_0_python_requires=dep_1_0_python_requires,
......@@ -738,23 +743,29 @@ class TestSetupRequires:
with contexts.save_pkg_resources_state():
test_pkg = create_setup_requires_package(
str(tmpdir),
'python-xlib', '0.19', # Ignored (overriden by setup_attrs).
setup_attrs=dict(setup_requires='dep', dependency_links=[index_url]))
'python-xlib', '0.19', # Ignored (overriden by setup_attrs).
setup_attrs=dict(
setup_requires='dep', dependency_links=[index_url]))
test_setup_py = os.path.join(test_pkg, 'setup.py')
run_setup(test_setup_py, [str('--version')])
eggs = list(map(str, pkg_resources.find_distributions(os.path.join(test_pkg, '.eggs'))))
eggs = list(map(str, pkg_resources.find_distributions(
os.path.join(test_pkg, '.eggs'))))
assert eggs == ['dep 1.0']
@pytest.mark.parametrize('use_legacy_installer,with_dependency_links_in_setup_py',
itertools.product((False, True), (False, True)))
def test_setup_requires_with_find_links_in_setup_cfg(self, monkeypatch,
use_legacy_installer,
with_dependency_links_in_setup_py):
@pytest.mark.parametrize(
'use_legacy_installer,with_dependency_links_in_setup_py',
itertools.product((False, True), (False, True)))
def test_setup_requires_with_find_links_in_setup_cfg(
self, monkeypatch, use_legacy_installer,
with_dependency_links_in_setup_py):
monkeypatch.setenv(str('PIP_RETRIES'), str('0'))
monkeypatch.setenv(str('PIP_TIMEOUT'), str('0'))
with contexts.save_pkg_resources_state():
with contexts.tempdir() as temp_dir:
make_trivial_sdist(os.path.join(temp_dir, 'python-xlib-42.tar.gz'), 'python-xlib', '42')
make_trivial_sdist(
os.path.join(temp_dir, 'python-xlib-42.tar.gz'),
'python-xlib',
'42')
test_pkg = os.path.join(temp_dir, 'test_pkg')
test_setup_py = os.path.join(test_pkg, 'setup.py')
test_setup_cfg = os.path.join(test_pkg, 'setup.cfg')
......@@ -771,7 +782,7 @@ class TestSetupRequires:
installer.fetch_build_egg = installer._legacy_fetch_build_egg
setup(setup_requires='python-xlib==42',
dependency_links={dependency_links!r})
''').format(use_legacy_installer=use_legacy_installer,
''').format(use_legacy_installer=use_legacy_installer, # noqa
dependency_links=dependency_links))
with open(test_setup_cfg, 'w') as fp:
fp.write(DALS(
......@@ -783,14 +794,17 @@ class TestSetupRequires:
find_links=temp_dir))
run_setup(test_setup_py, [str('--version')])
def test_setup_requires_with_transitive_extra_dependency(self, monkeypatch):
def test_setup_requires_with_transitive_extra_dependency(
self, monkeypatch):
# Use case: installing a package with a build dependency on
# an already installed `dep[extra]`, which in turn depends
# on `extra_dep` (whose is not already installed).
with contexts.save_pkg_resources_state():
with contexts.tempdir() as temp_dir:
# Create source distribution for `extra_dep`.
make_trivial_sdist(os.path.join(temp_dir, 'extra_dep-1.0.tar.gz'), 'extra_dep', '1.0')
make_trivial_sdist(
os.path.join(temp_dir, 'extra_dep-1.0.tar.gz'),
'extra_dep', '1.0')
# Create source tree for `dep`.
dep_pkg = os.path.join(temp_dir, 'dep')
os.mkdir(dep_pkg)
......@@ -806,12 +820,12 @@ class TestSetupRequires:
'setup.cfg': '',
}, prefix=dep_pkg)
# "Install" dep.
run_setup(os.path.join(dep_pkg, 'setup.py'), [str('dist_info')])
run_setup(
os.path.join(dep_pkg, 'setup.py'), [str('dist_info')])
working_set.add_entry(dep_pkg)
# Create source tree for test package.
test_pkg = os.path.join(temp_dir, 'test_pkg')
test_setup_py = os.path.join(test_pkg, 'setup.py')
test_setup_cfg = os.path.join(test_pkg, 'setup.cfg')
os.mkdir(test_pkg)
with open(test_setup_py, 'w') as fp:
fp.write(DALS(
......@@ -881,16 +895,19 @@ def make_nspkg_sdist(dist_path, distname, version):
def make_python_requires_sdist(dist_path, distname, version, python_requires):
make_sdist(dist_path, [
('setup.py', DALS("""\
import setuptools
setuptools.setup(
name={name!r},
version={version!r},
python_requires={python_requires!r},
)
""").format(name=distname, version=version,
python_requires=python_requires)),
('setup.cfg', ''),
(
'setup.py',
DALS("""\
import setuptools
setuptools.setup(
name={name!r},
version={version!r},
python_requires={python_requires!r},
)
""").format(
name=distname, version=version,
python_requires=python_requires)),
('setup.cfg', ''),
])
......@@ -948,16 +965,16 @@ def create_setup_requires_package(path, distname='foobar', version='0.1',
value = ';'.join(value)
section.append('%s: %s' % (name, value))
test_setup_cfg_contents = DALS(
"""
[metadata]
{metadata}
[options]
{options}
"""
).format(
options='\n'.join(options),
metadata='\n'.join(metadata),
)
"""
[metadata]
{metadata}
[options]
{options}
"""
).format(
options='\n'.join(options),
metadata='\n'.join(metadata),
)
else:
test_setup_cfg_contents = ''
with open(os.path.join(test_pkg, 'setup.cfg'), 'w') as f:
......
......@@ -525,19 +525,19 @@ class TestEggInfo:
license_file = LICENSE
"""),
'LICENSE': "Test license"
}, True), # with license
}, True), # with license
({
'setup.cfg': DALS("""
[metadata]
license_file = INVALID_LICENSE
"""),
'LICENSE': "Test license"
}, False), # with an invalid license
}, False), # with an invalid license
({
'setup.cfg': DALS("""
"""),
'LICENSE': "Test license"
}, False), # no license_file attribute
}, False), # no license_file attribute
({
'setup.cfg': DALS("""
[metadata]
......@@ -545,7 +545,7 @@ class TestEggInfo:
"""),
'MANIFEST.in': "exclude LICENSE",
'LICENSE': "Test license"
}, False) # license file is manually excluded
}, False) # license file is manually excluded
])
def test_setup_cfg_license_file(
self, tmpdir_cwd, env, files, license_in_sources):
......@@ -565,7 +565,8 @@ class TestEggInfo:
assert 'LICENSE' in sources_text
else:
assert 'LICENSE' not in sources_text
assert 'INVALID_LICENSE' not in sources_text # for invalid license test
# for invalid license test
assert 'INVALID_LICENSE' not in sources_text
@pytest.mark.parametrize("files, incl_licenses, excl_licenses", [
({
......@@ -577,7 +578,7 @@ class TestEggInfo:
"""),
'LICENSE-ABC': "ABC license",
'LICENSE-XYZ': "XYZ license"
}, ['LICENSE-ABC', 'LICENSE-XYZ'], []), # with licenses
}, ['LICENSE-ABC', 'LICENSE-XYZ'], []), # with licenses
({
'setup.cfg': DALS("""
[metadata]
......@@ -585,7 +586,7 @@ class TestEggInfo:
"""),
'LICENSE-ABC': "ABC license",
'LICENSE-XYZ': "XYZ license"
}, ['LICENSE-ABC', 'LICENSE-XYZ'], []), # with commas
}, ['LICENSE-ABC', 'LICENSE-XYZ'], []), # with commas
({
'setup.cfg': DALS("""
[metadata]
......@@ -594,7 +595,7 @@ class TestEggInfo:
"""),
'LICENSE-ABC': "ABC license",
'LICENSE-XYZ': "XYZ license"
}, ['LICENSE-ABC'], ['LICENSE-XYZ']), # with one license
}, ['LICENSE-ABC'], ['LICENSE-XYZ']), # with one license
({
'setup.cfg': DALS("""
[metadata]
......@@ -602,7 +603,7 @@ class TestEggInfo:
"""),
'LICENSE-ABC': "ABC license",
'LICENSE-XYZ': "XYZ license"
}, [], ['LICENSE-ABC', 'LICENSE-XYZ']), # empty
}, [], ['LICENSE-ABC', 'LICENSE-XYZ']), # empty
({
'setup.cfg': DALS("""
[metadata]
......@@ -610,7 +611,7 @@ class TestEggInfo:
"""),
'LICENSE-ABC': "ABC license",
'LICENSE-XYZ': "XYZ license"
}, ['LICENSE-XYZ'], ['LICENSE-ABC']), # on same line
}, ['LICENSE-XYZ'], ['LICENSE-ABC']), # on same line
({
'setup.cfg': DALS("""
[metadata]
......@@ -619,12 +620,12 @@ class TestEggInfo:
INVALID_LICENSE
"""),
'LICENSE-ABC': "Test license"
}, ['LICENSE-ABC'], ['INVALID_LICENSE']), # with an invalid license
}, ['LICENSE-ABC'], ['INVALID_LICENSE']), # with an invalid license
({
'setup.cfg': DALS("""
"""),
'LICENSE': "Test license"
}, [], ['LICENSE']), # no license_files attribute
}, [], ['LICENSE']), # no license_files attribute
({
'setup.cfg': DALS("""
[metadata]
......@@ -632,7 +633,7 @@ class TestEggInfo:
"""),
'MANIFEST.in': "exclude LICENSE",
'LICENSE': "Test license"
}, [], ['LICENSE']), # license file is manually excluded
}, [], ['LICENSE']), # license file is manually excluded
({
'setup.cfg': DALS("""
[metadata]
......@@ -643,7 +644,7 @@ class TestEggInfo:
'MANIFEST.in': "exclude LICENSE-XYZ",
'LICENSE-ABC': "ABC license",
'LICENSE-XYZ': "XYZ license"
}, ['LICENSE-ABC'], ['LICENSE-XYZ']) # subset is manually excluded
}, ['LICENSE-ABC'], ['LICENSE-XYZ']) # subset is manually excluded
])
def test_setup_cfg_license_files(
self, tmpdir_cwd, env, files, incl_licenses, excl_licenses):
......@@ -674,7 +675,7 @@ class TestEggInfo:
"""),
'LICENSE-ABC': "ABC license",
'LICENSE-XYZ': "XYZ license"
}, [], ['LICENSE-ABC', 'LICENSE-XYZ']), # both empty
}, [], ['LICENSE-ABC', 'LICENSE-XYZ']), # both empty
({
'setup.cfg': DALS("""
[metadata]
......@@ -684,7 +685,8 @@ class TestEggInfo:
"""),
'LICENSE-ABC': "ABC license",
'LICENSE-XYZ': "XYZ license"
}, [], ['LICENSE-ABC', 'LICENSE-XYZ']), # license_file is still singular
# license_file is still singular
}, [], ['LICENSE-ABC', 'LICENSE-XYZ']),
({
'setup.cfg': DALS("""
[metadata]
......@@ -696,7 +698,7 @@ class TestEggInfo:
'LICENSE-ABC': "ABC license",
'LICENSE-PQR': "PQR license",
'LICENSE-XYZ': "XYZ license"
}, ['LICENSE-ABC', 'LICENSE-PQR', 'LICENSE-XYZ'], []), # combined
}, ['LICENSE-ABC', 'LICENSE-PQR', 'LICENSE-XYZ'], []), # combined
({
'setup.cfg': DALS("""
[metadata]
......@@ -709,7 +711,8 @@ class TestEggInfo:
'LICENSE-ABC': "ABC license",
'LICENSE-PQR': "PQR license",
'LICENSE-XYZ': "XYZ license"
}, ['LICENSE-ABC', 'LICENSE-PQR', 'LICENSE-XYZ'], []), # duplicate license
# duplicate license
}, ['LICENSE-ABC', 'LICENSE-PQR', 'LICENSE-XYZ'], []),
({
'setup.cfg': DALS("""
[metadata]
......@@ -720,7 +723,8 @@ class TestEggInfo:
'LICENSE-ABC': "ABC license",
'LICENSE-PQR': "PQR license",
'LICENSE-XYZ': "XYZ license"
}, ['LICENSE-ABC', 'LICENSE-XYZ'], ['LICENSE-PQR']), # combined subset
# combined subset
}, ['LICENSE-ABC', 'LICENSE-XYZ'], ['LICENSE-PQR']),
({
'setup.cfg': DALS("""
[metadata]
......@@ -730,7 +734,8 @@ class TestEggInfo:
LICENSE-PQR
"""),
'LICENSE-PQR': "Test license"
}, ['LICENSE-PQR'], ['LICENSE-ABC', 'LICENSE-XYZ']), # with invalid licenses
# with invalid licenses
}, ['LICENSE-PQR'], ['LICENSE-ABC', 'LICENSE-XYZ']),
({
'setup.cfg': DALS("""
[metadata]
......@@ -743,7 +748,8 @@ class TestEggInfo:
'LICENSE-ABC': "ABC license",
'LICENSE-PQR': "PQR license",
'LICENSE-XYZ': "XYZ license"
}, ['LICENSE-XYZ'], ['LICENSE-ABC', 'LICENSE-PQR']) # manually excluded
# manually excluded
}, ['LICENSE-XYZ'], ['LICENSE-ABC', 'LICENSE-PQR'])
])
def test_setup_cfg_license_file_license_files(
self, tmpdir_cwd, env, files, incl_licenses, excl_licenses):
......
# -*- coding: utf-8 -*-
"""sdist tests"""
from __future__ import print_function, unicode_literals
import os
import shutil
import sys
import tempfile
import unicodedata
......@@ -50,7 +51,7 @@ def quiet():
# Convert to POSIX path
def posix(path):
if six.PY3 and not isinstance(path, str):
if not six.PY2 and not isinstance(path, str):
return path.replace(os.sep.encode('ascii'), b'/')
else:
return path.replace(os.sep, '/')
......@@ -89,30 +90,28 @@ fail_on_latin1_encoded_filenames = pytest.mark.xfail(
)
def touch(path):
path.write_text('', encoding='utf-8')
class TestSdistTest:
def setup_method(self, method):
self.temp_dir = tempfile.mkdtemp()
with open(os.path.join(self.temp_dir, 'setup.py'), 'w') as f:
f.write(SETUP_PY)
@pytest.fixture(autouse=True)
def source_dir(self, tmpdir):
(tmpdir / 'setup.py').write_text(SETUP_PY, encoding='utf-8')
# Set up the rest of the test package
test_pkg = os.path.join(self.temp_dir, 'sdist_test')
os.mkdir(test_pkg)
data_folder = os.path.join(self.temp_dir, "d")
os.mkdir(data_folder)
test_pkg = tmpdir / 'sdist_test'
test_pkg.mkdir()
data_folder = tmpdir / 'd'
data_folder.mkdir()
# *.rst was not included in package_data, so c.rst should not be
# automatically added to the manifest when not under version control
for fname in ['__init__.py', 'a.txt', 'b.txt', 'c.rst',
os.path.join(data_folder, "e.dat")]:
# Just touch the files; their contents are irrelevant
open(os.path.join(test_pkg, fname), 'w').close()
self.old_cwd = os.getcwd()
os.chdir(self.temp_dir)
for fname in ['__init__.py', 'a.txt', 'b.txt', 'c.rst']:
touch(test_pkg / fname)
touch(data_folder / 'e.dat')
def teardown_method(self, method):
os.chdir(self.old_cwd)
shutil.rmtree(self.temp_dir)
with tmpdir.as_cwd():
yield
def test_package_data_in_sdist(self):
"""Regression test for pull request #4: ensures that files listed in
......@@ -175,14 +174,14 @@ class TestSdistTest:
manifest = cmd.filelist.files
assert 'setup.py' not in manifest
def test_defaults_case_sensitivity(self):
def test_defaults_case_sensitivity(self, tmpdir):
"""
Make sure default files (README.*, etc.) are added in a case-sensitive
way to avoid problems with packages built on Windows.
"""
open(os.path.join(self.temp_dir, 'readme.rst'), 'w').close()
open(os.path.join(self.temp_dir, 'SETUP.cfg'), 'w').close()
touch(tmpdir / 'readme.rst')
touch(tmpdir / 'SETUP.cfg')
dist = Distribution(SETUP_ATTRS)
# the extension deliberately capitalized for this test
......@@ -230,10 +229,6 @@ class TestSdistTest:
u_contents = contents.decode('UTF-8')
# The manifest should contain the UTF-8 filename
if six.PY2:
fs_enc = sys.getfilesystemencoding()
filename = filename.decode(fs_enc)
assert posix(filename) in u_contents
@py3_only
......@@ -334,7 +329,7 @@ class TestSdistTest:
cmd.read_manifest()
# The filelist should contain the UTF-8 filename
if six.PY3:
if not six.PY2:
filename = filename.decode('utf-8')
assert filename in cmd.filelist.files
......@@ -374,7 +369,7 @@ class TestSdistTest:
@fail_on_latin1_encoded_filenames
def test_sdist_with_utf8_encoded_filename(self):
# Test for #303.
dist = Distribution(SETUP_ATTRS)
dist = Distribution(self.make_strings(SETUP_ATTRS))
dist.script_name = 'setup.py'
cmd = sdist(dist)
cmd.ensure_finalized()
......@@ -388,7 +383,7 @@ class TestSdistTest:
if sys.platform == 'darwin':
filename = decompose(filename)
if six.PY3:
if not six.PY2:
fs_enc = sys.getfilesystemencoding()
if sys.platform == 'win32':
......@@ -405,10 +400,19 @@ class TestSdistTest:
else:
assert filename in cmd.filelist.files
@classmethod
def make_strings(cls, item):
if isinstance(item, dict):
return {
key: cls.make_strings(value) for key, value in item.items()}
if isinstance(item, list):
return list(map(cls.make_strings, item))
return str(item)
@fail_on_latin1_encoded_filenames
def test_sdist_with_latin1_encoded_filename(self):
# Test for #303.
dist = Distribution(SETUP_ATTRS)
dist = Distribution(self.make_strings(SETUP_ATTRS))
dist.script_name = 'setup.py'
cmd = sdist(dist)
cmd.ensure_finalized()
......@@ -421,7 +425,19 @@ class TestSdistTest:
with quiet():
cmd.run()
if six.PY3:
if six.PY2:
# Under Python 2 there seems to be no decoded string in the
# filelist. However, due to decode and encoding of the
# file name to get utf-8 Manifest the latin1 maybe excluded
try:
# fs_enc should match how one is expect the decoding to
# be proformed for the manifest output.
fs_enc = sys.getfilesystemencoding()
filename.decode(fs_enc)
assert filename in cmd.filelist.files
except UnicodeDecodeError:
filename not in cmd.filelist.files
else:
# not all windows systems have a default FS encoding of cp1252
if sys.platform == 'win32':
# Latin-1 is similar to Windows-1252 however
......@@ -436,18 +452,36 @@ class TestSdistTest:
# The Latin-1 filename should have been skipped
filename = filename.decode('latin-1')
filename not in cmd.filelist.files
else:
# Under Python 2 there seems to be no decoded string in the
# filelist. However, due to decode and encoding of the
# file name to get utf-8 Manifest the latin1 maybe excluded
try:
# fs_enc should match how one is expect the decoding to
# be proformed for the manifest output.
fs_enc = sys.getfilesystemencoding()
filename.decode(fs_enc)
assert filename in cmd.filelist.files
except UnicodeDecodeError:
filename not in cmd.filelist.files
def test_pyproject_toml_in_sdist(self, tmpdir):
"""
Check if pyproject.toml is included in source distribution if present
"""
touch(tmpdir / 'pyproject.toml')
dist = Distribution(SETUP_ATTRS)
dist.script_name = 'setup.py'
cmd = sdist(dist)
cmd.ensure_finalized()
with quiet():
cmd.run()
manifest = cmd.filelist.files
assert 'pyproject.toml' in manifest
def test_pyproject_toml_excluded(self, tmpdir):
"""
Check that pyproject.toml can excluded even if present
"""
touch(tmpdir / 'pyproject.toml')
with open('MANIFEST.in', 'w') as mts:
print('exclude pyproject.toml', file=mts)
dist = Distribution(SETUP_ATTRS)
dist.script_name = 'setup.py'
cmd = sdist(dist)
cmd.ensure_finalized()
with quiet():
cmd.run()
manifest = cmd.filelist.files
assert 'pyproject.toml' not in manifest
def test_default_revctrl():
......
......@@ -15,7 +15,7 @@ class TestEdit:
def parse_config(filename):
parser = configparser.ConfigParser()
with io.open(filename, encoding='utf-8') as reader:
(parser.read_file if six.PY3 else parser.readfp)(reader)
(parser.readfp if six.PY2 else parser.read_file)(reader)
return parser
@staticmethod
......
......@@ -108,6 +108,11 @@ class TestDepends:
assert not req.is_present()
assert not req.is_current()
@needs_bytecode
def test_require_present(self):
# In #1896, this test was failing for months with the only
# complaint coming from test runners (not end users).
# TODO: Evaluate if this code is needed at all.
req = Require('Tests', None, 'tests', homepage="http://example.com")
assert req.format is None
assert req.attribute is None
......@@ -223,10 +228,10 @@ class TestFeatures:
py_modules=['bar_et'], remove=['bar.ext'],
),
'baz': Feature(
"baz", optional=False, packages=['pkg.baz'],
scripts=['scripts/baz_it'],
libraries=[('libfoo', 'foo/foofoo.c')]
),
"baz", optional=False, packages=['pkg.baz'],
scripts=['scripts/baz_it'],
libraries=[('libfoo', 'foo/foofoo.c')]
),
'dwim': Feature("DWIM", available=False, remove='bazish'),
},
script_args=['--without-bar', 'install'],
......
......@@ -12,7 +12,7 @@ from setuptools.command.test import test
from setuptools.dist import Distribution
from .textwrap import DALS
from . import contexts
SETUP_PY = DALS("""
from setuptools import setup
......
......@@ -8,8 +8,6 @@ from pytest_fixture_config import yield_requires_config
import pytest_virtualenv
from setuptools.extern import six
from .textwrap import DALS
from .test_easy_install import make_nspkg_sdist
......@@ -64,7 +62,7 @@ def _get_pip_versions():
from urllib.request import urlopen
from urllib.error import URLError
except ImportError:
from urllib2 import urlopen, URLError # Python 2.7 compat
from urllib2 import urlopen, URLError # Python 2.7 compat
try:
urlopen('https://pypi.org', timeout=1)
......@@ -77,12 +75,9 @@ def _get_pip_versions():
'pip==10.0.1',
'pip==18.1',
'pip==19.0.1',
'https://github.com/pypa/pip/archive/master.zip',
]
# Pip's master dropped support for 3.4.
if not six.PY34:
network_versions.append('https://github.com/pypa/pip/archive/master.zip')
versions = [None] + [
pytest.param(v, **({} if network else {'marks': pytest.mark.skip}))
for v in network_versions
......@@ -183,12 +178,16 @@ def _check_test_command_install_requirements(virtualenv, tmpdir):
)).format(tmpdir=tmpdir))
assert tmpdir.join('success').check()
def test_test_command_install_requirements(virtualenv, tmpdir):
# Ensure pip/wheel packages are installed.
virtualenv.run("python -c \"__import__('pkg_resources').require(['pip', 'wheel'])\"")
virtualenv.run(
"python -c \"__import__('pkg_resources').require(['pip', 'wheel'])\"")
_check_test_command_install_requirements(virtualenv, tmpdir)
def test_test_command_install_requirements_when_using_easy_install(bare_virtualenv, tmpdir):
def test_test_command_install_requirements_when_using_easy_install(
bare_virtualenv, tmpdir):
_check_test_command_install_requirements(bare_virtualenv, tmpdir)
......
......@@ -125,11 +125,12 @@ def flatten_tree(tree):
def format_install_tree(tree):
return {x.format(
py_version=PY_MAJOR,
platform=get_platform(),
shlib_ext=get_config_var('EXT_SUFFIX') or get_config_var('SO'))
for x in tree}
return {
x.format(
py_version=PY_MAJOR,
platform=get_platform(),
shlib_ext=get_config_var('EXT_SUFFIX') or get_config_var('SO'))
for x in tree}
def _check_wheel_install(filename, install_dir, install_tree_includes,
......@@ -455,7 +456,8 @@ WHEEL_INSTALL_TESTS = (
id='empty_namespace_package',
file_defs={
'foobar': {
'__init__.py': "__import__('pkg_resources').declare_namespace(__name__)",
'__init__.py':
"__import__('pkg_resources').declare_namespace(__name__)",
},
},
setup_kwargs=dict(
......@@ -579,4 +581,5 @@ def test_wheel_is_compatible(monkeypatch):
for t in parse_tag('cp36-cp36m-manylinux1_x86_64'):
yield t
monkeypatch.setattr('setuptools.wheel.sys_tags', sys_tags)
assert Wheel('onnxruntime-0.1.2-cp36-cp36m-manylinux1_x86_64.whl').is_compatible()
assert Wheel(
'onnxruntime-0.1.2-cp36-cp36m-manylinux1_x86_64.whl').is_compatible()
......@@ -77,7 +77,8 @@ class Wheel:
def is_compatible(self):
'''Is the wheel is compatible with the current platform?'''
supported_tags = set((t.interpreter, t.abi, t.platform) for t in sys_tags())
supported_tags = set(
(t.interpreter, t.abi, t.platform) for t in sys_tags())
return next((True for t in self.tags() if t in supported_tags), False)
def egg_name(self):
......
import os
import shutil
import subprocess
import sys
from glob import glob
VIRTUAL_ENV = os.environ['VIRTUAL_ENV']
TOX_PIP_DIR = os.path.join(VIRTUAL_ENV, 'pip')
def remove_setuptools():
"""
Remove setuptools from the current environment.
"""
print("Removing setuptools")
cmd = [sys.executable, '-m', 'pip', 'uninstall', '-y', 'setuptools']
# set cwd to something other than '.' to avoid detecting
# '.' as the installed package.
subprocess.check_call(cmd, cwd='.tox')
def bootstrap():
print("Running bootstrap")
cmd = [sys.executable, '-m', 'bootstrap']
subprocess.check_call(cmd)
def pip(args):
# First things first, get a recent (stable) version of pip.
if not os.path.exists(TOX_PIP_DIR):
subprocess.check_call([sys.executable, '-m', 'pip',
'--disable-pip-version-check',
'install', '-t', TOX_PIP_DIR,
'pip'])
shutil.rmtree(glob(os.path.join(TOX_PIP_DIR, 'pip-*.dist-info'))[0])
# And use that version.
pypath = os.environ.get('PYTHONPATH')
pypath = pypath.split(os.pathsep) if pypath is not None else []
pypath.insert(0, TOX_PIP_DIR)
os.environ['PYTHONPATH'] = os.pathsep.join(pypath)
# Disable PEP 517 support when using editable installs.
for n, a in enumerate(args):
if not a.startswith('-'):
if a in 'install' and '-e' in args[n:]:
args.insert(n + 1, '--no-use-pep517')
break
# Fix call for setuptools editable install.
for n, a in enumerate(args):
if a == '.':
args[n] = os.getcwd()
subprocess.check_call([sys.executable, '-m', 'pip'] + args, cwd=TOX_PIP_DIR)
# Honor requires-python when installing test suite dependencies
if any('-r' in arg for arg in args):
os.environ['PIP_IGNORE_REQUIRES_PYTHON'] = '0'
if '.' in args:
remove_setuptools()
bootstrap()
cmd = [sys.executable, '-m', 'pip'] + args
subprocess.check_call(cmd)
if __name__ == '__main__':
......
# Note: Run "python bootstrap.py" before running Tox, to generate metadata.
#
# To run Tox against all supported Python interpreters, you can set:
#
# export TOXENV='py27,py3{4,5,6},pypy,pypy3'
# export TOXENV='py3{5,6,7,8},pypy,pypy3'
[tox]
envlist=python
minversion = 3.2
requires =
tox-pip-version >= 0.0.6
[helpers]
# Wrapper for calls to pip that make sure the version being used is a
# up-to-date, and to prevent the current working directory from being
# added to `sys.path`.
# Custom pip behavior
pip = python {toxinidir}/tools/tox_pip.py
[testenv]
deps=-r{toxinidir}/tests/requirements.txt
deps=-r{toxinidir}/setuptools/tests/requirements.txt
pip_version = pip
install_command = {[helpers]pip} install {opts} {packages}
list_dependencies_command = {[helpers]pip} freeze --all
setenv=COVERAGE_FILE={toxworkdir}/.coverage.{envname}
setenv =
COVERAGE_FILE={toxworkdir}/.coverage.{envname}
py{27,py2}: PIP_IGNORE_REQUIRES_PYTHON=true
# TODO: The passed environment variables came from copying other tox.ini files
# These should probably be individually annotated to explain what needs them.
passenv=APPDATA HOMEDRIVE HOMEPATH windir APPVEYOR APPVEYOR_* CI CODECOV_* TRAVIS TRAVIS_* NETWORK_REQUIRED
......@@ -45,7 +47,7 @@ commands=codecov -X gcov --file {toxworkdir}/coverage.xml
deps = -r{toxinidir}/docs/requirements.txt
skip_install=True
commands =
python {toxinidir}/bootstrap.py
python -m bootstrap
sphinx-build -W -b html -d {envtmpdir}/doctrees docs docs/build/html
sphinx-build -W -b man -d {envtmpdir}/doctrees docs docs/build/man
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment