Commit 16ee10c4 authored by Jason R. Coombs's avatar Jason R. Coombs

Merge with 10.2.1

--HG--
branch : feature/issue-229
parents 866ff739 41f2c5ec
...@@ -160,3 +160,24 @@ bc6655b4acf205dd9f25c702955645656077398a 6.0.1 ...@@ -160,3 +160,24 @@ bc6655b4acf205dd9f25c702955645656077398a 6.0.1
01271e84e5125fcc4f0f368a6e21116a5722953c 6.0.2 01271e84e5125fcc4f0f368a6e21116a5722953c 6.0.2
7ea80190d494a766c6356fce85c844703964b6cc 6.1 7ea80190d494a766c6356fce85c844703964b6cc 6.1
df26609c2f614f5fc9110342e4003ee8bd95cf84 7.0 df26609c2f614f5fc9110342e4003ee8bd95cf84 7.0
850a5c155c48b6ecfbb83b961586ea359b561522 8.0b1
7ea0e7498e4ddbf63b6929ee83c75a9207996b08 8.0
1af3a5f24f7dd4e51d117f701918052b7de65c99 8.1b1
d62bf4e407b3b9b5bedcc1396a9ba46f35571902 8.0.1
1c03d512e39d5cfd711ae3ed7e316769f427e43b 8.0.2
6c3467488123ce70b1dd009145a02f51fb78cdcc 8.0.3
2c467afffe9fe1e14618b576fac6b4f7c412a61e 8.0.4
3f87370b6863e5a4e831b394ef1a58e0e97a4336 8.1
995f6d9651312cd481ca1e5ddb271cbdd0474c57 8.2
efbe39dae0aba9a7db399f6442758ae94e315c93 8.2.1
cd14b2a72e51c7d13873ab6c2041f901b1a7a1cd 8.3
0eee586a153f068142c1a0df4bc2635ed2c1a1cc 9.0b1
921e60a0f9067311571fde9ccf2f35223159d9f6 8.4
0d7b9b63d06ab7f68bc8edd56cb2034e6395d7fc 9.0
fa069bf2411a150c9379d31a04d1c3836e2d3027 9.0.1
3ed27d68d3f41bb5daa2afecfa9180d5958fe9d3 9.1
0c4d18a747a6d39bff8e194a58af949a960d674a 10.0
4c41e2cdd70beb0da556d71f46a67734c14f2bc2 10.0.1
26b00011ec65b8f7b4f3d51078ec0a694701a45c 10.1
651d41db58849d4fc50e466f4dc458d448480c4e 10.2
1f5de53c079d577ead9d80265c9e006503b16457 10.2.1
...@@ -8,7 +8,8 @@ python: ...@@ -8,7 +8,8 @@ python:
- pypy - pypy
# command to run tests # command to run tests
script: script:
- python setup.py egg_info # update egg_info based on setup.py in checkout
- python setup.py test - python bootstrap.py
- python setup.py ptr
- python ez_setup.py --version 5.4.1 - python setup.py ptr --addopts='-rs'
- python ez_setup.py --version 10.2.1
...@@ -2,6 +2,147 @@ ...@@ -2,6 +2,147 @@
CHANGES CHANGES
======= =======
------
10.2.1
------
* Issue #323: Fix regression in entry point name parsing.
----
10.2
----
* Deprecated use of EntryPoint.load(require=False).
* Substantial refactoring of all unit tests. Tests are now much leaner and
re-use a lot of fixtures and contexts for better clarity of purpose.
----
10.1
----
* Issue #320: Added a compatibility implementation of
``sdist._default_revctrl``
so that systems relying on that interface do not fail (namely, Ubuntu 12.04
and similar Debian releases).
------
10.0.1
------
* Issue #319: Fixed issue installing pure distutils packages.
----
10.0
----
* Issue #313: Removed built-in support for subversion. Projects wishing to
retain support for subversion will need to use a third party library. The
extant implementation is being ported to `setuptools_svn
<https://pypi.python.org/pypi/setuptools_svn>`_.
* Issue #315: Updated setuptools to hide its own loaded modules during
installation of another package. This change will enable setuptools to
upgrade (or downgrade) itself even when its own metadata and implementation
change.
---
9.1
---
* Prefer vendored packaging library `as recommended
<https://github.com/jaraco/setuptools/commit/170657b68f4b92e7e1bf82f5e19a831f5744af67#commitcomment-9109448>`_.
-----
9.0.1
-----
* Issue #312: Restored presence of pkg_resources API tests (doctest) to sdist.
---
9.0
---
* Issue #314: Disabled support for ``setup_requires`` metadata to avoid issue
where Setuptools was unable to upgrade over earlier versions.
---
8.4
---
* Pull Request #106: Now write ``setup_requires`` metadata.
---
8.3
---
* Issue #311: Decoupled pkg_resources from setuptools once again.
``pkg_resources`` is now a package instead of a module.
-----
8.2.1
-----
* Issue #306: Suppress warnings about Version format except in select scenarios
(such as installation).
---
8.2
---
* Pull Request #85: Search egg-base when adding egg-info to manifest.
---
8.1
---
* Upgrade ``packaging`` to 14.5, giving preference to "rc" as designator for
release candidates over "c".
* PEP-440 warnings are now raised as their own class,
``pkg_resources.PEP440Warning``, instead of RuntimeWarning.
* Disabled warnings on empty versions.
-----
8.0.4
-----
* Upgrade ``packaging`` to 14.4, fixing an error where there is a
different result for if 2.0.5 is contained within >2.0dev and >2.0.dev even
though normalization rules should have made them equal.
* Issue #296: Add warning when a version is parsed as legacy. This warning will
make it easier for developers to recognize deprecated version numbers.
-----
8.0.3
-----
* Issue #296: Restored support for ``__hash__`` on parse_version results.
-----
8.0.2
-----
* Issue #296: Restored support for ``__getitem__`` and sort operations on
parse_version result.
-----
8.0.1
-----
* Issue #296: Restore support for iteration over parse_version result, but
deprecated that usage with a warning. Fixes failure with buildout.
---
8.0
---
* Implement `PEP 440 <http://legacy.python.org/dev/peps/pep-0440/>`_ within
pkg_resources and setuptools. This change
deprecates some version numbers such that they will no longer be installable
without using the ``===`` escape hatch. See `the changes to test_resources
<https://bitbucket.org/pypa/setuptools/commits/dcd552da643c4448056de84c73d56da6d70769d5#chg-setuptools/tests/test_resources.py>`_
for specific examples of version numbers and specifiers that are no longer
supported. Setuptools now "vendors" the `packaging
<https://github.com/pypa/packaging>`_ library.
--- ---
7.0 7.0
--- ---
......
recursive-include setuptools *.py *.txt *.exe *.xml recursive-include setuptools *.py *.exe *.xml
recursive-include tests *.py *.c *.pyx *.txt recursive-include tests *.py *.c *.pyx
recursive-include setuptools/tests *.html entries* recursive-include setuptools/tests *.html
recursive-include setuptools/tests/svn_data *.zip
recursive-include docs *.py *.txt *.conf *.css *.css_t Makefile indexsidebar.html recursive-include docs *.py *.txt *.conf *.css *.css_t Makefile indexsidebar.html
recursive-include _markerlib *.py recursive-include _markerlib *.py
recursive-include setuptools/_vendor * recursive-include setuptools/_vendor *
recursive-include pkg_resources *.py *.txt
include *.py include *.py
include *.txt include *.txt
include MANIFEST.in include MANIFEST.in
......
empty:
exit 1
update-vendored:
rm -rf setuptools/_vendor/packaging
pip install -r setuptools/_vendor/vendored.txt -t setuptools/_vendor/
rm -rf setuptools/_vendor/*.{egg,dist}-info
...@@ -5,6 +5,8 @@ Installing and Using Setuptools ...@@ -5,6 +5,8 @@ Installing and Using Setuptools
.. contents:: **Table of Contents** .. contents:: **Table of Contents**
`Change History <https://pythonhosted.org/setuptools/history.html>`_.
------------------------- -------------------------
Installation Instructions Installation Instructions
------------------------- -------------------------
...@@ -83,6 +85,18 @@ Alternatively, Setuptools may be installed to a user-local path:: ...@@ -83,6 +85,18 @@ Alternatively, Setuptools may be installed to a user-local path::
> wget https://bootstrap.pypa.io/ez_setup.py -O - | python - --user > wget https://bootstrap.pypa.io/ez_setup.py -O - | python - --user
Note that on some older systems (noted on Debian 6 and CentOS 5 installations),
`wget` may refuse to download `ez_setup.py`, complaining that the certificate common name `*.c.ssl.fastly.net`
does not match the host name `bootstrap.pypa.io`. In addition, the `ez_setup.py` script may then encounter similar problems using
`wget` internally to download `setuptools-x.y.zip`, complaining that the certificate common name of `www.python.org` does not match the
host name `pypi.python.org`. Those are known issues, related to a bug in the older versions of `wget`
(see `Issue 59 <https://bitbucket.org/pypa/pypi/issue/59#comment-5881915>`_). If you happen to encounter them,
install Setuptools as follows::
> wget --no-check-certificate https://bootstrap.pypa.io/ez_setup.py
> python ez_setup.py --insecure
Unix including Mac OS X (curl) Unix including Mac OS X (curl)
============================== ==============================
......
pytest_plugins = 'setuptools.tests.fixtures'
...@@ -28,7 +28,7 @@ import setup as setup_script ...@@ -28,7 +28,7 @@ import setup as setup_script
# Add any Sphinx extension module names here, as strings. They can be extensions # Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones. # coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = [] extensions = ['linkify']
# Add any paths that contain templates here, relative to this directory. # Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates'] templates_path = ['_templates']
...@@ -44,7 +44,7 @@ master_doc = 'index' ...@@ -44,7 +44,7 @@ master_doc = 'index'
# General information about the project. # General information about the project.
project = 'Setuptools' project = 'Setuptools'
copyright = '2009-2013, The fellowship of the packaging' copyright = '2009-2014, The fellowship of the packaging'
# The version info for the project you're documenting, acts as replacement for # The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the # |version| and |release|, also used in various other places throughout the
......
...@@ -45,7 +45,7 @@ ticket already exists for your issue. If not, create one. Try to think from ...@@ -45,7 +45,7 @@ ticket already exists for your issue. If not, create one. Try to think from
the perspective of the reader. Explain what behavior you expected, what you the perspective of the reader. Explain what behavior you expected, what you
got instead, and what factors might have contributed to the unexpected got instead, and what factors might have contributed to the unexpected
behavior. In Bitbucket, surround a block of code or traceback with the triple behavior. In Bitbucket, surround a block of code or traceback with the triple
backtick "```" so that it is formatted nicely. backtick "\`\`\`" so that it is formatted nicely.
Filing a ticket provides a forum for justification, discussion, and Filing a ticket provides a forum for justification, discussion, and
clarification. The ticket provides a record of the purpose for the change and clarification. The ticket provides a record of the purpose for the change and
......
...@@ -286,6 +286,12 @@ that can be used to obtain ``Requirement`` objects describing the ...@@ -286,6 +286,12 @@ that can be used to obtain ``Requirement`` objects describing the
project's core and optional dependencies. project's core and optional dependencies.
``setup_requires.txt``
----------------------
Much like ``requires.txt`` except represents the requirements
specified by the ``setup_requires`` parameter to the Distribution.
``dependency_links.txt`` ``dependency_links.txt``
------------------------ ------------------------
......
:tocdepth: 2
.. _changes:
History
*******
.. include:: ../CHANGES (links).txt
...@@ -16,6 +16,7 @@ Documentation content: ...@@ -16,6 +16,7 @@ Documentation content:
.. toctree:: .. toctree::
:maxdepth: 2 :maxdepth: 2
history
roadmap roadmap
python3 python3
using using
......
...@@ -594,7 +594,7 @@ Requirements Parsing ...@@ -594,7 +594,7 @@ Requirements Parsing
requirement ::= project_name versionspec? extras? requirement ::= project_name versionspec? extras?
versionspec ::= comparison version (',' comparison version)* versionspec ::= comparison version (',' comparison version)*
comparison ::= '<' | '<=' | '!=' | '==' | '>=' | '>' comparison ::= '<' | '<=' | '!=' | '==' | '>=' | '>' | '~=' | '==='
extras ::= '[' extralist? ']' extras ::= '[' extralist? ']'
extralist ::= identifier (',' identifier)* extralist ::= identifier (',' identifier)*
project_name ::= identifier project_name ::= identifier
...@@ -646,13 +646,10 @@ Requirements Parsing ...@@ -646,13 +646,10 @@ Requirements Parsing
The ``Requirement`` object's version specifiers (``.specs``) are internally The ``Requirement`` object's version specifiers (``.specs``) are internally
sorted into ascending version order, and used to establish what ranges of sorted into ascending version order, and used to establish what ranges of
versions are acceptable. Adjacent redundant conditions are effectively versions are acceptable. Adjacent redundant conditions are effectively
consolidated (e.g. ``">1, >2"`` produces the same results as ``">1"``, and consolidated (e.g. ``">1, >2"`` produces the same results as ``">2"``, and
``"<2,<3"`` produces the same results as``"<3"``). ``"!="`` versions are ``"<2,<3"`` produces the same results as``"<2"``). ``"!="`` versions are
excised from the ranges they fall within. The version being tested for excised from the ranges they fall within. The version being tested for
acceptability is then checked for membership in the resulting ranges. acceptability is then checked for membership in the resulting ranges.
(Note that providing conflicting conditions for the same version (e.g.
``"<2,>=2"`` or ``"==2,!=2"``) is meaningless and may therefore produce
bizarre results when compared with actual version number(s).)
``__eq__(other_requirement)`` ``__eq__(other_requirement)``
A requirement compares equal to another requirement if they have A requirement compares equal to another requirement if they have
...@@ -681,10 +678,7 @@ Requirements Parsing ...@@ -681,10 +678,7 @@ Requirements Parsing
``specs`` ``specs``
A list of ``(op,version)`` tuples, sorted in ascending parsed-version A list of ``(op,version)`` tuples, sorted in ascending parsed-version
order. The `op` in each tuple is a comparison operator, represented as order. The `op` in each tuple is a comparison operator, represented as
a string. The `version` is the (unparsed) version number. The relative a string. The `version` is the (unparsed) version number.
order of tuples containing the same version numbers is undefined, since
having more than one operator for a given version is either redundant or
self-contradictory.
Entry Points Entry Points
...@@ -967,7 +961,7 @@ version ...@@ -967,7 +961,7 @@ version
``ValueError`` is raised. ``ValueError`` is raised.
parsed_version parsed_version
The ``parsed_version`` is a tuple representing a "parsed" form of the The ``parsed_version`` is an object representing a "parsed" form of the
distribution's ``version``. ``dist.parsed_version`` is a shortcut for distribution's ``version``. ``dist.parsed_version`` is a shortcut for
calling ``parse_version(dist.version)``. It is used to compare or sort calling ``parse_version(dist.version)``. It is used to compare or sort
distributions by version. (See the `Parsing Utilities`_ section below for distributions by version. (See the `Parsing Utilities`_ section below for
...@@ -1541,40 +1535,12 @@ Parsing Utilities ...@@ -1541,40 +1535,12 @@ Parsing Utilities
----------------- -----------------
``parse_version(version)`` ``parse_version(version)``
Parse a project's version string, returning a value that can be used to Parsed a project's version string as defined by PEP 440. The returned
compare versions by chronological order. Semantically, the format is a value will be an object that represents the version. These objects may
rough cross between distutils' ``StrictVersion`` and ``LooseVersion`` be compared to each other and sorted. The sorting algorithm is as defined
classes; if you give it versions that would work with ``StrictVersion``, by PEP 440 with the addition that any version which is not a valid PEP 440
then they will compare the same way. Otherwise, comparisons are more like version will be considered less than any valid PEP 440 version and the
a "smarter" form of ``LooseVersion``. It is *possible* to create invalid versions will continue sorting using the original algorithm.
pathological version coding schemes that will fool this parser, but they
should be very rare in practice.
The returned value will be a tuple of strings. Numeric portions of the
version are padded to 8 digits so they will compare numerically, but
without relying on how numbers compare relative to strings. Dots are
dropped, but dashes are retained. Trailing zeros between alpha segments
or dashes are suppressed, so that e.g. "2.4.0" is considered the same as
"2.4". Alphanumeric parts are lower-cased.
The algorithm assumes that strings like "-" and any alpha string that
alphabetically follows "final" represents a "patch level". So, "2.4-1"
is assumed to be a branch or patch of "2.4", and therefore "2.4.1" is
considered newer than "2.4-1", which in turn is newer than "2.4".
Strings like "a", "b", "c", "alpha", "beta", "candidate" and so on (that
come before "final" alphabetically) are assumed to be pre-release versions,
so that the version "2.4" is considered newer than "2.4a1". Any "-"
characters preceding a pre-release indicator are removed. (In versions of
setuptools prior to 0.6a9, "-" characters were not removed, leading to the
unintuitive result that "0.2-rc1" was considered a newer version than
"0.2".)
Finally, to handle miscellaneous cases, the strings "pre", "preview", and
"rc" are treated as if they were "c", i.e. as though they were release
candidates, and therefore are not as new as a version string that does not
contain them. And the string "dev" is treated as if it were an "@" sign;
that is, a version coming before even "a" or "alpha".
.. _yield_lines(): .. _yield_lines():
...@@ -1629,10 +1595,12 @@ Parsing Utilities ...@@ -1629,10 +1595,12 @@ Parsing Utilities
See ``to_filename()``. See ``to_filename()``.
``safe_version(version)`` ``safe_version(version)``
Similar to ``safe_name()`` except that spaces in the input become dots, and This will return the normalized form of any PEP 440 version, if the version
dots are allowed to exist in the output. As with ``safe_name()``, if you string is not PEP 440 compatible than it is similar to ``safe_name()``
are generating a filename from this you should replace any "-" characters except that spaces in the input become dots, and dots are allowed to exist
in the output with underscores. in the output. As with ``safe_name()``, if you are generating a filename
from this you should replace any "-" characters in the output with
underscores.
``safe_extra(extra)`` ``safe_extra(extra)``
Return a "safe" form of an extra's name, suitable for use in a requirement Return a "safe" form of an extra's name, suitable for use in a requirement
......
...@@ -17,6 +17,43 @@ revision slated for release:: ...@@ -17,6 +17,43 @@ revision slated for release::
python -m jaraco.packaging.release python -m jaraco.packaging.release
Bootstrap Bookmark
------------------
Setuptools has a bootstrap script (ez_setup.py) which is hosted in the
repository and must be updated with each release (to bump the default version).
The "published" version of the script is the one indicated by the ``bootstrap``
bookmark (Mercurial) or branch (Git).
Therefore, the latest bootstrap script can be retrieved by checking out the
repository at that bookmark. It's also possible to get the bootstrap script for
any particular release by grabbing the script from that tagged release.
The officially-published location of the bootstrap script is hosted on Python
infrastructure (#python-infra on freenode) at https://bootstrap.pypa.io and
is updated every fifteen minutes from the bootstrap script. Sometimes,
especially when the bootstrap script is rolled back, this
process doesn't work as expected and requires manual intervention.
Release Frequency
-----------------
Some have asked why Setuptools is released so frequently. Because Setuptools
uses a mechanical release process, it's very easy to make releases whenever the
code is stable (tests are passing). As a result, the philosophy is to release
early and often.
While some find the frequent releases somewhat surprising, they only empower
the user. Although releases are made frequently, users can choose the frequency
at which they use those releases. If instead Setuptools contributions were only
released in batches, the user would be constrained to only use Setuptools when
those official releases were made. With frequent releases, the user can govern
exactly how often he wishes to update.
Frequent releases also then obviate the need for dev or beta releases in most
cases. Because releases are made early and often, bugs are discovered and
corrected quickly, in many cases before other users have yet to encounter them.
Release Managers Release Managers
---------------- ----------------
......
...@@ -308,7 +308,7 @@ unless you need the associated ``setuptools`` feature. ...@@ -308,7 +308,7 @@ unless you need the associated ``setuptools`` feature.
(Note: projects listed in ``setup_requires`` will NOT be automatically (Note: projects listed in ``setup_requires`` will NOT be automatically
installed on the system where the setup script is being run. They are installed on the system where the setup script is being run. They are
simply downloaded to the setup directory if they're not locally available simply downloaded to the ./.eggs directory if they're not locally available
already. If you want them to be installed, as well as being available already. If you want them to be installed, as well as being available
when the setup script is run, you should add them to ``install_requires`` when the setup script is run, you should add them to ``install_requires``
**and** ``setup_requires``.) **and** ``setup_requires``.)
......
...@@ -8,3 +8,6 @@ it at the very beginning of `setup.py` like this:: ...@@ -8,3 +8,6 @@ it at the very beginning of `setup.py` like this::
from ez_setup import use_setuptools from ez_setup import use_setuptools
use_setuptools() use_setuptools()
More info on `ez_setup.py` can be found at `the project home page
<https://pypy.python.org/pypi/setuptools>`_.
...@@ -36,7 +36,7 @@ try: ...@@ -36,7 +36,7 @@ try:
except ImportError: except ImportError:
USER_SITE = None USER_SITE = None
DEFAULT_VERSION = "7.1" DEFAULT_VERSION = "10.2.2"
DEFAULT_URL = "https://pypi.python.org/packages/source/s/setuptools/" DEFAULT_URL = "https://pypi.python.org/packages/source/s/setuptools/"
def _python_cmd(*args): def _python_cmd(*args):
......
"""
Sphinx plugin to add links to the changelog.
"""
import re
import os
link_patterns = [
r"(Issue )?#(?P<issue>\d+)",
r"Pull Request ?#(?P<pull_request>\d+)",
r"Distribute #(?P<distribute>\d+)",
r"Buildout #(?P<buildout>\d+)",
r"Old Setuptools #(?P<old_setuptools>\d+)",
r"Jython #(?P<jython>\d+)",
r"Python #(?P<python>\d+)",
]
issue_urls = dict(
pull_request='https://bitbucket.org'
'/pypa/setuptools/pull-request/{pull_request}',
issue='https://bitbucket.org/pypa/setuptools/issue/{issue}',
distribute='https://bitbucket.org/tarek/distribute/issue/{distribute}',
buildout='https://github.com/buildout/buildout/issues/{buildout}',
old_setuptools='http://bugs.python.org/setuptools/issue{old_setuptools}',
jython='http://bugs.jython.org/issue{jython}',
python='http://bugs.python.org/issue{python}',
)
def _linkify(source, dest):
pattern = '|'.join(link_patterns)
with open(source) as source:
out = re.sub(pattern, replacer, source.read())
with open(dest, 'w') as dest:
dest.write(out)
def replacer(match):
text = match.group(0)
match_dict = match.groupdict()
for key in match_dict:
if match_dict[key]:
url = issue_urls[key].format(**match_dict)
return "`{text} <{url}>`_".format(text=text, url=url)
def setup(app):
_linkify('CHANGES.txt', 'CHANGES (links).txt')
app.connect('build-finished', remove_file)
def remove_file(app, exception):
os.remove('CHANGES (links).txt')
...@@ -14,6 +14,8 @@ The package resource API is designed to work with normal filesystem packages, ...@@ -14,6 +14,8 @@ The package resource API is designed to work with normal filesystem packages,
method. method.
""" """
from __future__ import absolute_import
import sys import sys
import os import os
import io import io
...@@ -73,6 +75,136 @@ try: ...@@ -73,6 +75,136 @@ try:
except ImportError: except ImportError:
pass pass
try:
import pkg_resources._vendor.packaging.version
import pkg_resources._vendor.packaging.specifiers
packaging = pkg_resources._vendor.packaging
except ImportError:
# fallback to naturally-installed version; allows system packagers to
# omit vendored packages.
import packaging.version
import packaging.specifiers
class PEP440Warning(RuntimeWarning):
"""
Used when there is an issue with a version or specifier not complying with
PEP 440.
"""
class _SetuptoolsVersionMixin(object):
def __hash__(self):
return super(_SetuptoolsVersionMixin, self).__hash__()
def __lt__(self, other):
if isinstance(other, tuple):
return tuple(self) < other
else:
return super(_SetuptoolsVersionMixin, self).__lt__(other)
def __le__(self, other):
if isinstance(other, tuple):
return tuple(self) <= other
else:
return super(_SetuptoolsVersionMixin, self).__le__(other)
def __eq__(self, other):
if isinstance(other, tuple):
return tuple(self) == other
else:
return super(_SetuptoolsVersionMixin, self).__eq__(other)
def __ge__(self, other):
if isinstance(other, tuple):
return tuple(self) >= other
else:
return super(_SetuptoolsVersionMixin, self).__ge__(other)
def __gt__(self, other):
if isinstance(other, tuple):
return tuple(self) > other
else:
return super(_SetuptoolsVersionMixin, self).__gt__(other)
def __ne__(self, other):
if isinstance(other, tuple):
return tuple(self) != other
else:
return super(_SetuptoolsVersionMixin, self).__ne__(other)
def __getitem__(self, key):
return tuple(self)[key]
def __iter__(self):
component_re = re.compile(r'(\d+ | [a-z]+ | \.| -)', re.VERBOSE)
replace = {
'pre': 'c',
'preview': 'c',
'-': 'final-',
'rc': 'c',
'dev': '@',
}.get
def _parse_version_parts(s):
for part in component_re.split(s):
part = replace(part, part)
if not part or part == '.':
continue
if part[:1] in '0123456789':
# pad for numeric comparison
yield part.zfill(8)
else:
yield '*'+part
# ensure that alpha/beta/candidate are before final
yield '*final'
def old_parse_version(s):
parts = []
for part in _parse_version_parts(s.lower()):
if part.startswith('*'):
# remove '-' before a prerelease tag
if part < '*final':
while parts and parts[-1] == '*final-':
parts.pop()
# remove trailing zeros from each series of numeric parts
while parts and parts[-1] == '00000000':
parts.pop()
parts.append(part)
return tuple(parts)
# Warn for use of this function
warnings.warn(
"You have iterated over the result of "
"pkg_resources.parse_version. This is a legacy behavior which is "
"inconsistent with the new version class introduced in setuptools "
"8.0. That class should be used directly instead of attempting to "
"iterate over the result.",
RuntimeWarning,
stacklevel=1,
)
for part in old_parse_version(str(self)):
yield part
class SetuptoolsVersion(_SetuptoolsVersionMixin, packaging.version.Version):
pass
class SetuptoolsLegacyVersion(_SetuptoolsVersionMixin,
packaging.version.LegacyVersion):
pass
def parse_version(v):
try:
return SetuptoolsVersion(v)
except packaging.version.InvalidVersion:
return SetuptoolsLegacyVersion(v)
_state_vars = {} _state_vars = {}
...@@ -153,6 +285,9 @@ __all__ = [ ...@@ -153,6 +285,9 @@ __all__ = [
'ResolutionError', 'VersionConflict', 'DistributionNotFound', 'ResolutionError', 'VersionConflict', 'DistributionNotFound',
'UnknownExtra', 'ExtractionError', 'UnknownExtra', 'ExtractionError',
# Warnings
'PEP440Warning',
# Parsing functions and string utilities # Parsing functions and string utilities
'parse_requirements', 'parse_version', 'safe_name', 'safe_version', 'parse_requirements', 'parse_version', 'safe_name', 'safe_version',
'get_platform', 'compatible_platforms', 'yield_lines', 'split_sections', 'get_platform', 'compatible_platforms', 'yield_lines', 'split_sections',
...@@ -1156,13 +1291,15 @@ def safe_name(name): ...@@ -1156,13 +1291,15 @@ def safe_name(name):
def safe_version(version): def safe_version(version):
"""Convert an arbitrary string to a standard version string
Spaces become dots, and all other non-alphanumeric characters become
dashes, with runs of multiple dashes condensed to a single dash.
""" """
version = version.replace(' ','.') Convert an arbitrary string to a standard version string
return re.sub('[^A-Za-z0-9.]+', '-', version) """
try:
# normalize the version
return str(packaging.version.Version(version))
except packaging.version.InvalidVersion:
version = version.replace(' ','.')
return re.sub('[^A-Za-z0-9.]+', '-', version)
def safe_extra(extra): def safe_extra(extra):
...@@ -2080,7 +2217,7 @@ CONTINUE = re.compile(r"\s*\\\s*(#.*)?$").match ...@@ -2080,7 +2217,7 @@ CONTINUE = re.compile(r"\s*\\\s*(#.*)?$").match
# Distribution or extra # Distribution or extra
DISTRO = re.compile(r"\s*((\w|[-.])+)").match DISTRO = re.compile(r"\s*((\w|[-.])+)").match
# ver. info # ver. info
VERSION = re.compile(r"\s*(<=?|>=?|==|!=)\s*((\w|[-.])+)").match VERSION = re.compile(r"\s*(<=?|>=?|===?|!=|~=)\s*((\w|[-.*_!+])+)").match
# comma between items # comma between items
COMMA = re.compile(r"\s*,").match COMMA = re.compile(r"\s*,").match
OBRACKET = re.compile(r"\s*\[").match OBRACKET = re.compile(r"\s*\[").match
...@@ -2092,67 +2229,6 @@ EGG_NAME = re.compile( ...@@ -2092,67 +2229,6 @@ EGG_NAME = re.compile(
re.VERBOSE | re.IGNORECASE re.VERBOSE | re.IGNORECASE
).match ).match
component_re = re.compile(r'(\d+ | [a-z]+ | \.| -)', re.VERBOSE)
replace = {'pre':'c', 'preview':'c','-':'final-','rc':'c','dev':'@'}.get
def _parse_version_parts(s):
for part in component_re.split(s):
part = replace(part, part)
if not part or part=='.':
continue
if part[:1] in '0123456789':
# pad for numeric comparison
yield part.zfill(8)
else:
yield '*'+part
# ensure that alpha/beta/candidate are before final
yield '*final'
def parse_version(s):
"""Convert a version string to a chronologically-sortable key
This is a rough cross between distutils' StrictVersion and LooseVersion;
if you give it versions that would work with StrictVersion, then it behaves
the same; otherwise it acts like a slightly-smarter LooseVersion. It is
*possible* to create pathological version coding schemes that will fool
this parser, but they should be very rare in practice.
The returned value will be a tuple of strings. Numeric portions of the
version are padded to 8 digits so they will compare numerically, but
without relying on how numbers compare relative to strings. Dots are
dropped, but dashes are retained. Trailing zeros between alpha segments
or dashes are suppressed, so that e.g. "2.4.0" is considered the same as
"2.4". Alphanumeric parts are lower-cased.
The algorithm assumes that strings like "-" and any alpha string that
alphabetically follows "final" represents a "patch level". So, "2.4-1"
is assumed to be a branch or patch of "2.4", and therefore "2.4.1" is
considered newer than "2.4-1", which in turn is newer than "2.4".
Strings like "a", "b", "c", "alpha", "beta", "candidate" and so on (that
come before "final" alphabetically) are assumed to be pre-release versions,
so that the version "2.4" is considered newer than "2.4a1".
Finally, to handle miscellaneous cases, the strings "pre", "preview", and
"rc" are treated as if they were "c", i.e. as though they were release
candidates, and therefore are not as new as a version string that does not
contain them, and "dev" is replaced with an '@' so that it sorts lower than
than any other pre-release tag.
"""
parts = []
for part in _parse_version_parts(s.lower()):
if part.startswith('*'):
# remove '-' before a prerelease tag
if part < '*final':
while parts and parts[-1] == '*final-':
parts.pop()
# remove trailing zeros from each series of numeric parts
while parts and parts[-1]=='00000000':
parts.pop()
parts.append(part)
return tuple(parts)
class EntryPoint(object): class EntryPoint(object):
"""Object representing an advertised importable object""" """Object representing an advertised importable object"""
...@@ -2180,14 +2256,20 @@ class EntryPoint(object): ...@@ -2180,14 +2256,20 @@ class EntryPoint(object):
def load(self, require=True, env=None, installer=None): def load(self, require=True, env=None, installer=None):
if require: if require:
self.require(env, installer) self.require(env, installer)
entry = __import__(self.module_name, globals(), globals(), else:
['__name__']) warnings.warn(
for attr in self.attrs: "`require` parameter is deprecated. Use "
try: "EntryPoint._load instead.",
entry = getattr(entry, attr) DeprecationWarning,
except AttributeError: )
raise ImportError("%r has no %r attribute" % (entry, attr)) return self._load()
return entry
def _load(self):
module = __import__(self.module_name, fromlist=['__name__'], level=0)
try:
return functools.reduce(getattr, self.attrs, module)
except AttributeError as exc:
raise ImportError(str(exc))
def require(self, env=None, installer=None): def require(self, env=None, installer=None):
if self.extras and not self.dist: if self.extras and not self.dist:
...@@ -2196,6 +2278,15 @@ class EntryPoint(object): ...@@ -2196,6 +2278,15 @@ class EntryPoint(object):
items = working_set.resolve(reqs, env, installer) items = working_set.resolve(reqs, env, installer)
list(map(working_set.add, items)) list(map(working_set.add, items))
pattern = re.compile(
r'\s*'
r'(?P<name>[+\w. -]+?)\s*'
r'=\s*'
r'(?P<module>[\w.]+)\s*'
r'(:\s*(?P<attr>[\w.]+))?\s*'
r'(?P<extras>\[.*\])?\s*$'
)
@classmethod @classmethod
def parse(cls, src, dist=None): def parse(cls, src, dist=None):
"""Parse a single entry point from string `src` """Parse a single entry point from string `src`
...@@ -2207,25 +2298,23 @@ class EntryPoint(object): ...@@ -2207,25 +2298,23 @@ class EntryPoint(object):
The entry name and module name are required, but the ``:attrs`` and The entry name and module name are required, but the ``:attrs`` and
``[extras]`` parts are optional ``[extras]`` parts are optional
""" """
try: m = cls.pattern.match(src)
attrs = extras = () if not m:
name, value = src.split('=', 1)
if '[' in value:
value, extras = value.split('[', 1)
req = Requirement.parse("x[" + extras)
if req.specs:
raise ValueError
extras = req.extras
if ':' in value:
value, attrs = value.split(':', 1)
if not MODULE(attrs.rstrip()):
raise ValueError
attrs = attrs.rstrip().split('.')
except ValueError:
msg = "EntryPoint must be in 'name=module:attrs [extras]' format" msg = "EntryPoint must be in 'name=module:attrs [extras]' format"
raise ValueError(msg, src) raise ValueError(msg, src)
else: res = m.groupdict()
return cls(name.strip(), value.strip(), attrs, extras, dist) extras = cls._parse_extras(res['extras'])
attrs = res['attr'].split('.') if res['attr'] else ()
return cls(res['name'], res['module'], attrs, extras, dist)
@classmethod
def _parse_extras(cls, extras_spec):
if not extras_spec:
return ()
req = Requirement.parse('x' + extras_spec)
if req.specs:
raise ValueError()
return req.extras
@classmethod @classmethod
def parse_group(cls, group, lines, dist=None): def parse_group(cls, group, lines, dist=None):
...@@ -2305,7 +2394,7 @@ class Distribution(object): ...@@ -2305,7 +2394,7 @@ class Distribution(object):
@property @property
def hashcmp(self): def hashcmp(self):
return ( return (
getattr(self, 'parsed_version', ()), self.parsed_version,
self.precedence, self.precedence,
self.key, self.key,
_remove_md5_fragment(self.location), _remove_md5_fragment(self.location),
...@@ -2351,11 +2440,29 @@ class Distribution(object): ...@@ -2351,11 +2440,29 @@ class Distribution(object):
@property @property
def parsed_version(self): def parsed_version(self):
try: if not hasattr(self, "_parsed_version"):
return self._parsed_version self._parsed_version = parse_version(self.version)
except AttributeError: if isinstance(
self._parsed_version = pv = parse_version(self.version) self._parsed_version, packaging.version.LegacyVersion):
return pv # While an empty version is techincally a legacy version and
# is not a valid PEP 440 version, it's also unlikely to
# actually come from someone and instead it is more likely that
# it comes from setuptools attempting to parse a filename and
# including it in the list. So for that we'll gate this warning
# on if the version is anything at all or not.
if self.version:
warnings.warn(
"'%s (%s)' is being parsed as a legacy, non PEP 440, "
"version. You may find odd behavior and sort order. "
"In particular it will be sorted as less than 0.0. It "
"is recommend to migrate to PEP 440 compatible "
"versions." % (
self.project_name, self.version,
),
PEP440Warning,
)
return self._parsed_version
@property @property
def version(self): def version(self):
...@@ -2460,7 +2567,12 @@ class Distribution(object): ...@@ -2460,7 +2567,12 @@ class Distribution(object):
def as_requirement(self): def as_requirement(self):
"""Return a ``Requirement`` that matches this distribution exactly""" """Return a ``Requirement`` that matches this distribution exactly"""
return Requirement.parse('%s==%s' % (self.project_name, self.version)) if isinstance(self.parsed_version, packaging.version.Version):
spec = "%s==%s" % (self.project_name, self.parsed_version)
else:
spec = "%s===%s" % (self.project_name, self.parsed_version)
return Requirement.parse(spec)
def load_entry_point(self, group, name): def load_entry_point(self, group, name):
"""Return the `name` entry point of `group` or raise ImportError""" """Return the `name` entry point of `group` or raise ImportError"""
...@@ -2712,7 +2824,7 @@ def parse_requirements(strs): ...@@ -2712,7 +2824,7 @@ def parse_requirements(strs):
line, p, specs = scan_list(VERSION, LINE_END, line, p, (1, 2), line, p, specs = scan_list(VERSION, LINE_END, line, p, (1, 2),
"version spec") "version spec")
specs = [(op, safe_version(val)) for op, val in specs] specs = [(op, val) for op, val in specs]
yield Requirement(project_name, specs, extras) yield Requirement(project_name, specs, extras)
...@@ -2721,26 +2833,23 @@ class Requirement: ...@@ -2721,26 +2833,23 @@ class Requirement:
"""DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!""" """DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!"""
self.unsafe_name, project_name = project_name, safe_name(project_name) self.unsafe_name, project_name = project_name, safe_name(project_name)
self.project_name, self.key = project_name, project_name.lower() self.project_name, self.key = project_name, project_name.lower()
index = [ self.specifier = packaging.specifiers.SpecifierSet(
(parse_version(v), state_machine[op], op, v) ",".join(["".join([x, y]) for x, y in specs])
for op, v in specs )
] self.specs = specs
index.sort() self.extras = tuple(map(safe_extra, extras))
self.specs = [(op, ver) for parsed, trans, op, ver in index]
self.index, self.extras = index, tuple(map(safe_extra, extras))
self.hashCmp = ( self.hashCmp = (
self.key, self.key,
tuple((op, parsed) for parsed, trans, op, ver in index), self.specifier,
frozenset(self.extras), frozenset(self.extras),
) )
self.__hash = hash(self.hashCmp) self.__hash = hash(self.hashCmp)
def __str__(self): def __str__(self):
specs = ','.join([''.join(s) for s in self.specs])
extras = ','.join(self.extras) extras = ','.join(self.extras)
if extras: if extras:
extras = '[%s]' % extras extras = '[%s]' % extras
return '%s%s%s' % (self.project_name, extras, specs) return '%s%s%s' % (self.project_name, extras, self.specifier)
def __eq__(self, other): def __eq__(self, other):
return ( return (
...@@ -2752,29 +2861,13 @@ class Requirement: ...@@ -2752,29 +2861,13 @@ class Requirement:
if isinstance(item, Distribution): if isinstance(item, Distribution):
if item.key != self.key: if item.key != self.key:
return False return False
# only get if we need it
if self.index: item = item.version
item = item.parsed_version
elif isinstance(item, string_types): # Allow prereleases always in order to match the previous behavior of
item = parse_version(item) # this method. In the future this should be smarter and follow PEP 440
last = None # more accurately.
# -1, 0, 1 return self.specifier.contains(item, prereleases=True)
compare = lambda a, b: (a > b) - (a < b)
for parsed, trans, op, ver in self.index:
# Indexing: 0, 1, -1
action = trans[compare(item, parsed)]
if action == 'F':
return False
elif action == 'T':
return True
elif action == '+':
last = True
elif action == '-' or last is None:
last = False
# no rules encountered
if last is None:
last = True
return last
def __hash__(self): def __hash__(self):
return self.__hash return self.__hash
...@@ -2790,16 +2883,6 @@ class Requirement: ...@@ -2790,16 +2883,6 @@ class Requirement:
raise ValueError("Expected only one requirement", s) raise ValueError("Expected only one requirement", s)
raise ValueError("No requirements found", s) raise ValueError("No requirements found", s)
state_machine = {
# =><
'<': '--T',
'<=': 'T-T',
'>': 'F+F',
'>=': 'T+F',
'==': 'T..',
'!=': 'F++',
}
def _get_mro(cls): def _get_mro(cls):
"""Get an mro for a type or classic class""" """Get an mro for a type or classic class"""
...@@ -2868,6 +2951,13 @@ def _mkstemp(*args,**kw): ...@@ -2868,6 +2951,13 @@ def _mkstemp(*args,**kw):
os.open = old_open os.open = old_open
# Silence the PEP440Warning by default, so that end users don't get hit by it
# randomly just because they use pkg_resources. We want to append the rule
# because we want earlier uses of filterwarnings to take precedence over this
# one.
warnings.filterwarnings("ignore", category=PEP440Warning, append=True)
# Set up global resource manager (deliberately not state-saved) # Set up global resource manager (deliberately not state-saved)
_manager = ResourceManager() _manager = ResourceManager()
def _initialize(g): def _initialize(g):
......
# Copyright 2014 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
__all__ = [
"__title__", "__summary__", "__uri__", "__version__", "__author__",
"__email__", "__license__", "__copyright__",
]
__title__ = "packaging"
__summary__ = "Core utilities for Python packages"
__uri__ = "https://github.com/pypa/packaging"
__version__ = "14.5"
__author__ = "Donald Stufft"
__email__ = "donald@stufft.io"
__license__ = "Apache License, Version 2.0"
__copyright__ = "Copyright 2014 %s" % __author__
# Copyright 2014 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
from .__about__ import (
__author__, __copyright__, __email__, __license__, __summary__, __title__,
__uri__, __version__
)
__all__ = [
"__title__", "__summary__", "__uri__", "__version__", "__author__",
"__email__", "__license__", "__copyright__",
]
# Copyright 2014 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
import sys
PY2 = sys.version_info[0] == 2
PY3 = sys.version_info[0] == 3
# flake8: noqa
if PY3:
string_types = str,
else:
string_types = basestring,
def with_metaclass(meta, *bases):
"""
Create a base class with a metaclass.
"""
# This requires a bit of explanation: the basic idea is to make a dummy
# metaclass for one level of class instantiation that replaces itself with
# the actual metaclass.
class metaclass(meta):
def __new__(cls, name, this_bases, d):
return meta(name, bases, d)
return type.__new__(metaclass, 'temporary_class', (), {})
# Copyright 2014 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
class Infinity(object):
def __repr__(self):
return "Infinity"
def __hash__(self):
return hash(repr(self))
def __lt__(self, other):
return False
def __le__(self, other):
return False
def __eq__(self, other):
return isinstance(other, self.__class__)
def __ne__(self, other):
return not isinstance(other, self.__class__)
def __gt__(self, other):
return True
def __ge__(self, other):
return True
def __neg__(self):
return NegativeInfinity
Infinity = Infinity()
class NegativeInfinity(object):
def __repr__(self):
return "-Infinity"
def __hash__(self):
return hash(repr(self))
def __lt__(self, other):
return True
def __le__(self, other):
return True
def __eq__(self, other):
return isinstance(other, self.__class__)
def __ne__(self, other):
return not isinstance(other, self.__class__)
def __gt__(self, other):
return False
def __ge__(self, other):
return False
def __neg__(self):
return Infinity
NegativeInfinity = NegativeInfinity()
# Copyright 2014 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
import abc
import functools
import itertools
import re
from ._compat import string_types, with_metaclass
from .version import Version, LegacyVersion, parse
class InvalidSpecifier(ValueError):
"""
An invalid specifier was found, users should refer to PEP 440.
"""
class BaseSpecifier(with_metaclass(abc.ABCMeta, object)):
@abc.abstractmethod
def __str__(self):
"""
Returns the str representation of this Specifier like object. This
should be representative of the Specifier itself.
"""
@abc.abstractmethod
def __hash__(self):
"""
Returns a hash value for this Specifier like object.
"""
@abc.abstractmethod
def __eq__(self, other):
"""
Returns a boolean representing whether or not the two Specifier like
objects are equal.
"""
@abc.abstractmethod
def __ne__(self, other):
"""
Returns a boolean representing whether or not the two Specifier like
objects are not equal.
"""
@abc.abstractproperty
def prereleases(self):
"""
Returns whether or not pre-releases as a whole are allowed by this
specifier.
"""
@prereleases.setter
def prereleases(self, value):
"""
Sets whether or not pre-releases as a whole are allowed by this
specifier.
"""
@abc.abstractmethod
def contains(self, item, prereleases=None):
"""
Determines if the given item is contained within this specifier.
"""
@abc.abstractmethod
def filter(self, iterable, prereleases=None):
"""
Takes an iterable of items and filters them so that only items which
are contained within this specifier are allowed in it.
"""
class _IndividualSpecifier(BaseSpecifier):
_operators = {}
def __init__(self, spec="", prereleases=None):
match = self._regex.search(spec)
if not match:
raise InvalidSpecifier("Invalid specifier: '{0}'".format(spec))
self._spec = (
match.group("operator").strip(),
match.group("version").strip(),
)
# Store whether or not this Specifier should accept prereleases
self._prereleases = prereleases
def __repr__(self):
pre = (
", prereleases={0!r}".format(self.prereleases)
if self._prereleases is not None
else ""
)
return "<{0}({1!r}{2})>".format(
self.__class__.__name__,
str(self),
pre,
)
def __str__(self):
return "{0}{1}".format(*self._spec)
def __hash__(self):
return hash(self._spec)
def __eq__(self, other):
if isinstance(other, string_types):
try:
other = self.__class__(other)
except InvalidSpecifier:
return NotImplemented
elif not isinstance(other, self.__class__):
return NotImplemented
return self._spec == other._spec
def __ne__(self, other):
if isinstance(other, string_types):
try:
other = self.__class__(other)
except InvalidSpecifier:
return NotImplemented
elif not isinstance(other, self.__class__):
return NotImplemented
return self._spec != other._spec
def _get_operator(self, op):
return getattr(self, "_compare_{0}".format(self._operators[op]))
def _coerce_version(self, version):
if not isinstance(version, (LegacyVersion, Version)):
version = parse(version)
return version
@property
def prereleases(self):
return self._prereleases
@prereleases.setter
def prereleases(self, value):
self._prereleases = value
def contains(self, item, prereleases=None):
# Determine if prereleases are to be allowed or not.
if prereleases is None:
prereleases = self.prereleases
# Normalize item to a Version or LegacyVersion, this allows us to have
# a shortcut for ``"2.0" in Specifier(">=2")
item = self._coerce_version(item)
# Determine if we should be supporting prereleases in this specifier
# or not, if we do not support prereleases than we can short circuit
# logic if this version is a prereleases.
if item.is_prerelease and not prereleases:
return False
# Actually do the comparison to determine if this item is contained
# within this Specifier or not.
return self._get_operator(self._spec[0])(item, self._spec[1])
def filter(self, iterable, prereleases=None):
yielded = False
found_prereleases = []
kw = {"prereleases": prereleases if prereleases is not None else True}
# Attempt to iterate over all the values in the iterable and if any of
# them match, yield them.
for version in iterable:
parsed_version = self._coerce_version(version)
if self.contains(parsed_version, **kw):
# If our version is a prerelease, and we were not set to allow
# prereleases, then we'll store it for later incase nothing
# else matches this specifier.
if (parsed_version.is_prerelease
and not (prereleases or self.prereleases)):
found_prereleases.append(version)
# Either this is not a prerelease, or we should have been
# accepting prereleases from the begining.
else:
yielded = True
yield version
# Now that we've iterated over everything, determine if we've yielded
# any values, and if we have not and we have any prereleases stored up
# then we will go ahead and yield the prereleases.
if not yielded and found_prereleases:
for version in found_prereleases:
yield version
class LegacySpecifier(_IndividualSpecifier):
_regex = re.compile(
r"""
^
\s*
(?P<operator>(==|!=|<=|>=|<|>))
\s*
(?P<version>
[^\s]* # We just match everything, except for whitespace since this
# is a "legacy" specifier and the version string can be just
# about anything.
)
\s*
$
""",
re.VERBOSE | re.IGNORECASE,
)
_operators = {
"==": "equal",
"!=": "not_equal",
"<=": "less_than_equal",
">=": "greater_than_equal",
"<": "less_than",
">": "greater_than",
}
def _coerce_version(self, version):
if not isinstance(version, LegacyVersion):
version = LegacyVersion(str(version))
return version
def _compare_equal(self, prospective, spec):
return prospective == self._coerce_version(spec)
def _compare_not_equal(self, prospective, spec):
return prospective != self._coerce_version(spec)
def _compare_less_than_equal(self, prospective, spec):
return prospective <= self._coerce_version(spec)
def _compare_greater_than_equal(self, prospective, spec):
return prospective >= self._coerce_version(spec)
def _compare_less_than(self, prospective, spec):
return prospective < self._coerce_version(spec)
def _compare_greater_than(self, prospective, spec):
return prospective > self._coerce_version(spec)
def _require_version_compare(fn):
@functools.wraps(fn)
def wrapped(self, prospective, spec):
if not isinstance(prospective, Version):
return False
return fn(self, prospective, spec)
return wrapped
class Specifier(_IndividualSpecifier):
_regex = re.compile(
r"""
^
\s*
(?P<operator>(~=|==|!=|<=|>=|<|>|===))
(?P<version>
(?:
# The identity operators allow for an escape hatch that will
# do an exact string match of the version you wish to install.
# This will not be parsed by PEP 440 and we cannot determine
# any semantic meaning from it. This operator is discouraged
# but included entirely as an escape hatch.
(?<====) # Only match for the identity operator
\s*
[^\s]* # We just match everything, except for whitespace
# since we are only testing for strict identity.
)
|
(?:
# The (non)equality operators allow for wild card and local
# versions to be specified so we have to define these two
# operators separately to enable that.
(?<===|!=) # Only match for equals and not equals
\s*
v?
(?:[0-9]+!)? # epoch
[0-9]+(?:\.[0-9]+)* # release
(?: # pre release
[-_\.]?
(a|b|c|rc|alpha|beta|pre|preview)
[-_\.]?
[0-9]*
)?
(?: # post release
(?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
)?
# You cannot use a wild card and a dev or local version
# together so group them with a | and make them optional.
(?:
(?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
(?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local
|
\.\* # Wild card syntax of .*
)?
)
|
(?:
# The compatible operator requires at least two digits in the
# release segment.
(?<=~=) # Only match for the compatible operator
\s*
v?
(?:[0-9]+!)? # epoch
[0-9]+(?:\.[0-9]+)+ # release (We have a + instead of a *)
(?: # pre release
[-_\.]?
(a|b|c|rc|alpha|beta|pre|preview)
[-_\.]?
[0-9]*
)?
(?: # post release
(?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
)?
(?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
)
|
(?:
# All other operators only allow a sub set of what the
# (non)equality operators do. Specifically they do not allow
# local versions to be specified nor do they allow the prefix
# matching wild cards.
(?<!==|!=|~=) # We have special cases for these
# operators so we want to make sure they
# don't match here.
\s*
v?
(?:[0-9]+!)? # epoch
[0-9]+(?:\.[0-9]+)* # release
(?: # pre release
[-_\.]?
(a|b|c|rc|alpha|beta|pre|preview)
[-_\.]?
[0-9]*
)?
(?: # post release
(?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
)?
(?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
)
)
\s*
$
""",
re.VERBOSE | re.IGNORECASE,
)
_operators = {
"~=": "compatible",
"==": "equal",
"!=": "not_equal",
"<=": "less_than_equal",
">=": "greater_than_equal",
"<": "less_than",
">": "greater_than",
"===": "arbitrary",
}
@_require_version_compare
def _compare_compatible(self, prospective, spec):
# Compatible releases have an equivalent combination of >= and ==. That
# is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to
# implement this in terms of the other specifiers instead of
# implementing it ourselves. The only thing we need to do is construct
# the other specifiers.
# We want everything but the last item in the version, but we want to
# ignore post and dev releases and we want to treat the pre-release as
# it's own separate segment.
prefix = ".".join(
list(
itertools.takewhile(
lambda x: (not x.startswith("post")
and not x.startswith("dev")),
_version_split(spec),
)
)[:-1]
)
# Add the prefix notation to the end of our string
prefix += ".*"
return (self._get_operator(">=")(prospective, spec)
and self._get_operator("==")(prospective, prefix))
@_require_version_compare
def _compare_equal(self, prospective, spec):
# We need special logic to handle prefix matching
if spec.endswith(".*"):
# Split the spec out by dots, and pretend that there is an implicit
# dot in between a release segment and a pre-release segment.
spec = _version_split(spec[:-2]) # Remove the trailing .*
# Split the prospective version out by dots, and pretend that there
# is an implicit dot in between a release segment and a pre-release
# segment.
prospective = _version_split(str(prospective))
# Shorten the prospective version to be the same length as the spec
# so that we can determine if the specifier is a prefix of the
# prospective version or not.
prospective = prospective[:len(spec)]
# Pad out our two sides with zeros so that they both equal the same
# length.
spec, prospective = _pad_version(spec, prospective)
else:
# Convert our spec string into a Version
spec = Version(spec)
# If the specifier does not have a local segment, then we want to
# act as if the prospective version also does not have a local
# segment.
if not spec.local:
prospective = Version(prospective.public)
return prospective == spec
@_require_version_compare
def _compare_not_equal(self, prospective, spec):
return not self._compare_equal(prospective, spec)
@_require_version_compare
def _compare_less_than_equal(self, prospective, spec):
return prospective <= Version(spec)
@_require_version_compare
def _compare_greater_than_equal(self, prospective, spec):
return prospective >= Version(spec)
@_require_version_compare
def _compare_less_than(self, prospective, spec):
# Less than are defined as exclusive operators, this implies that
# pre-releases do not match for the same series as the spec. This is
# implemented by making <V imply !=V.*.
spec = Version(spec)
return (prospective < spec
and self._get_operator("!=")(prospective, str(spec) + ".*"))
@_require_version_compare
def _compare_greater_than(self, prospective, spec):
# Greater than are defined as exclusive operators, this implies that
# pre-releases do not match for the same series as the spec. This is
# implemented by making >V imply !=V.*.
spec = Version(spec)
return (prospective > spec
and self._get_operator("!=")(prospective, str(spec) + ".*"))
def _compare_arbitrary(self, prospective, spec):
return str(prospective).lower() == str(spec).lower()
@property
def prereleases(self):
# If there is an explicit prereleases set for this, then we'll just
# blindly use that.
if self._prereleases is not None:
return self._prereleases
# Look at all of our specifiers and determine if they are inclusive
# operators, and if they are if they are including an explicit
# prerelease.
operator, version = self._spec
if operator in ["==", ">=", "<=", "~="]:
# The == specifier can include a trailing .*, if it does we
# want to remove before parsing.
if operator == "==" and version.endswith(".*"):
version = version[:-2]
# Parse the version, and if it is a pre-release than this
# specifier allows pre-releases.
if parse(version).is_prerelease:
return True
return False
@prereleases.setter
def prereleases(self, value):
self._prereleases = value
_prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$")
def _version_split(version):
result = []
for item in version.split("."):
match = _prefix_regex.search(item)
if match:
result.extend(match.groups())
else:
result.append(item)
return result
def _pad_version(left, right):
left_split, right_split = [], []
# Get the release segment of our versions
left_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left)))
right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right)))
# Get the rest of our versions
left_split.append(left[len(left_split):])
right_split.append(left[len(right_split):])
# Insert our padding
left_split.insert(
1,
["0"] * max(0, len(right_split[0]) - len(left_split[0])),
)
right_split.insert(
1,
["0"] * max(0, len(left_split[0]) - len(right_split[0])),
)
return (
list(itertools.chain(*left_split)),
list(itertools.chain(*right_split)),
)
class SpecifierSet(BaseSpecifier):
def __init__(self, specifiers="", prereleases=None):
# Split on , to break each indidivual specifier into it's own item, and
# strip each item to remove leading/trailing whitespace.
specifiers = [s.strip() for s in specifiers.split(",") if s.strip()]
# Parsed each individual specifier, attempting first to make it a
# Specifier and falling back to a LegacySpecifier.
parsed = set()
for specifier in specifiers:
try:
parsed.add(Specifier(specifier))
except InvalidSpecifier:
parsed.add(LegacySpecifier(specifier))
# Turn our parsed specifiers into a frozen set and save them for later.
self._specs = frozenset(parsed)
# Store our prereleases value so we can use it later to determine if
# we accept prereleases or not.
self._prereleases = prereleases
def __repr__(self):
pre = (
", prereleases={0!r}".format(self.prereleases)
if self._prereleases is not None
else ""
)
return "<SpecifierSet({0!r}{1})>".format(str(self), pre)
def __str__(self):
return ",".join(sorted(str(s) for s in self._specs))
def __hash__(self):
return hash(self._specs)
def __and__(self, other):
if isinstance(other, string_types):
other = SpecifierSet(other)
elif not isinstance(other, SpecifierSet):
return NotImplemented
specifier = SpecifierSet()
specifier._specs = frozenset(self._specs | other._specs)
if self._prereleases is None and other._prereleases is not None:
specifier._prereleases = other._prereleases
elif self._prereleases is not None and other._prereleases is None:
specifier._prereleases = self._prereleases
elif self._prereleases == other._prereleases:
specifier._prereleases = self._prereleases
else:
raise ValueError(
"Cannot combine SpecifierSets with True and False prerelease "
"overrides."
)
return specifier
def __eq__(self, other):
if isinstance(other, string_types):
other = SpecifierSet(other)
elif isinstance(other, _IndividualSpecifier):
other = SpecifierSet(str(other))
elif not isinstance(other, SpecifierSet):
return NotImplemented
return self._specs == other._specs
def __ne__(self, other):
if isinstance(other, string_types):
other = SpecifierSet(other)
elif isinstance(other, _IndividualSpecifier):
other = SpecifierSet(str(other))
elif not isinstance(other, SpecifierSet):
return NotImplemented
return self._specs != other._specs
@property
def prereleases(self):
# If we have been given an explicit prerelease modifier, then we'll
# pass that through here.
if self._prereleases is not None:
return self._prereleases
# Otherwise we'll see if any of the given specifiers accept
# prereleases, if any of them do we'll return True, otherwise False.
# Note: The use of any() here means that an empty set of specifiers
# will always return False, this is an explicit design decision.
return any(s.prereleases for s in self._specs)
@prereleases.setter
def prereleases(self, value):
self._prereleases = value
def contains(self, item, prereleases=None):
# Ensure that our item is a Version or LegacyVersion instance.
if not isinstance(item, (LegacyVersion, Version)):
item = parse(item)
# We can determine if we're going to allow pre-releases by looking to
# see if any of the underlying items supports them. If none of them do
# and this item is a pre-release then we do not allow it and we can
# short circuit that here.
# Note: This means that 1.0.dev1 would not be contained in something
# like >=1.0.devabc however it would be in >=1.0.debabc,>0.0.dev0
if (not (self.prereleases or prereleases)) and item.is_prerelease:
return False
# Determine if we're forcing a prerelease or not, we bypass
# self.prereleases here and use self._prereleases because we want to
# only take into consideration actual *forced* values. The underlying
# specifiers will handle the other logic.
# The logic here is: If prereleases is anything but None, we'll just
# go aheand and continue to use that. However if
# prereleases is None, then we'll use whatever the
# value of self._prereleases is as long as it is not
# None itself.
if prereleases is None and self._prereleases is not None:
prereleases = self._prereleases
# We simply dispatch to the underlying specs here to make sure that the
# given version is contained within all of them.
# Note: This use of all() here means that an empty set of specifiers
# will always return True, this is an explicit design decision.
return all(
s.contains(item, prereleases=prereleases)
for s in self._specs
)
def filter(self, iterable, prereleases=None):
# Determine if we're forcing a prerelease or not, we bypass
# self.prereleases here and use self._prereleases because we want to
# only take into consideration actual *forced* values. The underlying
# specifiers will handle the other logic.
# The logic here is: If prereleases is anything but None, we'll just
# go aheand and continue to use that. However if
# prereleases is None, then we'll use whatever the
# value of self._prereleases is as long as it is not
# None itself.
if prereleases is None and self._prereleases is not None:
prereleases = self._prereleases
# If we have any specifiers, then we want to wrap our iterable in the
# filter method for each one, this will act as a logical AND amongst
# each specifier.
if self._specs:
for spec in self._specs:
iterable = spec.filter(iterable, prereleases=prereleases)
return iterable
# If we do not have any specifiers, then we need to have a rough filter
# which will filter out any pre-releases, unless there are no final
# releases, and which will filter out LegacyVersion in general.
else:
filtered = []
found_prereleases = []
for item in iterable:
# Ensure that we some kind of Version class for this item.
if not isinstance(item, (LegacyVersion, Version)):
parsed_version = parse(item)
else:
parsed_version = item
# Filter out any item which is parsed as a LegacyVersion
if isinstance(parsed_version, LegacyVersion):
continue
# Store any item which is a pre-release for later unless we've
# already found a final version or we are accepting prereleases
if parsed_version.is_prerelease and not prereleases:
if not filtered:
found_prereleases.append(item)
else:
filtered.append(item)
# If we've found no items except for pre-releases, then we'll go
# ahead and use the pre-releases
if not filtered and found_prereleases and prereleases is None:
return found_prereleases
return filtered
# Copyright 2014 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
import collections
import itertools
import re
from ._structures import Infinity
__all__ = [
"parse", "Version", "LegacyVersion", "InvalidVersion", "VERSION_PATTERN"
]
_Version = collections.namedtuple(
"_Version",
["epoch", "release", "dev", "pre", "post", "local"],
)
def parse(version):
"""
Parse the given version string and return either a :class:`Version` object
or a :class:`LegacyVersion` object depending on if the given version is
a valid PEP 440 version or a legacy version.
"""
try:
return Version(version)
except InvalidVersion:
return LegacyVersion(version)
class InvalidVersion(ValueError):
"""
An invalid version was found, users should refer to PEP 440.
"""
class _BaseVersion(object):
def __hash__(self):
return hash(self._key)
def __lt__(self, other):
return self._compare(other, lambda s, o: s < o)
def __le__(self, other):
return self._compare(other, lambda s, o: s <= o)
def __eq__(self, other):
return self._compare(other, lambda s, o: s == o)
def __ge__(self, other):
return self._compare(other, lambda s, o: s >= o)
def __gt__(self, other):
return self._compare(other, lambda s, o: s > o)
def __ne__(self, other):
return self._compare(other, lambda s, o: s != o)
def _compare(self, other, method):
if not isinstance(other, _BaseVersion):
return NotImplemented
return method(self._key, other._key)
class LegacyVersion(_BaseVersion):
def __init__(self, version):
self._version = str(version)
self._key = _legacy_cmpkey(self._version)
def __str__(self):
return self._version
def __repr__(self):
return "<LegacyVersion({0})>".format(repr(str(self)))
@property
def public(self):
return self._version
@property
def local(self):
return None
@property
def is_prerelease(self):
return False
_legacy_version_component_re = re.compile(
r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE,
)
_legacy_version_replacement_map = {
"pre": "c", "preview": "c", "-": "final-", "rc": "c", "dev": "@",
}
def _parse_version_parts(s):
for part in _legacy_version_component_re.split(s):
part = _legacy_version_replacement_map.get(part, part)
if not part or part == ".":
continue
if part[:1] in "0123456789":
# pad for numeric comparison
yield part.zfill(8)
else:
yield "*" + part
# ensure that alpha/beta/candidate are before final
yield "*final"
def _legacy_cmpkey(version):
# We hardcode an epoch of -1 here. A PEP 440 version can only have a epoch
# greater than or equal to 0. This will effectively put the LegacyVersion,
# which uses the defacto standard originally implemented by setuptools,
# as before all PEP 440 versions.
epoch = -1
# This scheme is taken from pkg_resources.parse_version setuptools prior to
# it's adoption of the packaging library.
parts = []
for part in _parse_version_parts(version.lower()):
if part.startswith("*"):
# remove "-" before a prerelease tag
if part < "*final":
while parts and parts[-1] == "*final-":
parts.pop()
# remove trailing zeros from each series of numeric parts
while parts and parts[-1] == "00000000":
parts.pop()
parts.append(part)
parts = tuple(parts)
return epoch, parts
# Deliberately not anchored to the start and end of the string, to make it
# easier for 3rd party code to reuse
VERSION_PATTERN = r"""
v?
(?:
(?:(?P<epoch>[0-9]+)!)? # epoch
(?P<release>[0-9]+(?:\.[0-9]+)*) # release segment
(?P<pre> # pre-release
[-_\.]?
(?P<pre_l>(a|b|c|rc|alpha|beta|pre|preview))
[-_\.]?
(?P<pre_n>[0-9]+)?
)?
(?P<post> # post release
(?:-(?P<post_n1>[0-9]+))
|
(?:
[-_\.]?
(?P<post_l>post|rev|r)
[-_\.]?
(?P<post_n2>[0-9]+)?
)
)?
(?P<dev> # dev release
[-_\.]?
(?P<dev_l>dev)
[-_\.]?
(?P<dev_n>[0-9]+)?
)?
)
(?:\+(?P<local>[a-z0-9]+(?:[-_\.][a-z0-9]+)*))? # local version
"""
class Version(_BaseVersion):
_regex = re.compile(
r"^\s*" + VERSION_PATTERN + r"\s*$",
re.VERBOSE | re.IGNORECASE,
)
def __init__(self, version):
# Validate the version and parse it into pieces
match = self._regex.search(version)
if not match:
raise InvalidVersion("Invalid version: '{0}'".format(version))
# Store the parsed out pieces of the version
self._version = _Version(
epoch=int(match.group("epoch")) if match.group("epoch") else 0,
release=tuple(int(i) for i in match.group("release").split(".")),
pre=_parse_letter_version(
match.group("pre_l"),
match.group("pre_n"),
),
post=_parse_letter_version(
match.group("post_l"),
match.group("post_n1") or match.group("post_n2"),
),
dev=_parse_letter_version(
match.group("dev_l"),
match.group("dev_n"),
),
local=_parse_local_version(match.group("local")),
)
# Generate a key which will be used for sorting
self._key = _cmpkey(
self._version.epoch,
self._version.release,
self._version.pre,
self._version.post,
self._version.dev,
self._version.local,
)
def __repr__(self):
return "<Version({0})>".format(repr(str(self)))
def __str__(self):
parts = []
# Epoch
if self._version.epoch != 0:
parts.append("{0}!".format(self._version.epoch))
# Release segment
parts.append(".".join(str(x) for x in self._version.release))
# Pre-release
if self._version.pre is not None:
parts.append("".join(str(x) for x in self._version.pre))
# Post-release
if self._version.post is not None:
parts.append(".post{0}".format(self._version.post[1]))
# Development release
if self._version.dev is not None:
parts.append(".dev{0}".format(self._version.dev[1]))
# Local version segment
if self._version.local is not None:
parts.append(
"+{0}".format(".".join(str(x) for x in self._version.local))
)
return "".join(parts)
@property
def public(self):
return str(self).split("+", 1)[0]
@property
def local(self):
version_string = str(self)
if "+" in version_string:
return version_string.split("+", 1)[1]
@property
def is_prerelease(self):
return bool(self._version.dev or self._version.pre)
def _parse_letter_version(letter, number):
if letter:
# We consider there to be an implicit 0 in a pre-release if there is
# not a numeral associated with it.
if number is None:
number = 0
# We normalize any letters to their lower case form
letter = letter.lower()
# We consider some words to be alternate spellings of other words and
# in those cases we want to normalize the spellings to our preferred
# spelling.
if letter == "alpha":
letter = "a"
elif letter == "beta":
letter = "b"
elif letter in ["c", "pre", "preview"]:
letter = "rc"
return letter, int(number)
if not letter and number:
# We assume if we are given a number, but we are not given a letter
# then this is using the implicit post release syntax (e.g. 1.0-1)
letter = "post"
return letter, int(number)
_local_version_seperators = re.compile(r"[\._-]")
def _parse_local_version(local):
"""
Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
"""
if local is not None:
return tuple(
part.lower() if not part.isdigit() else int(part)
for part in _local_version_seperators.split(local)
)
def _cmpkey(epoch, release, pre, post, dev, local):
# When we compare a release version, we want to compare it with all of the
# trailing zeros removed. So we'll use a reverse the list, drop all the now
# leading zeros until we come to something non zero, then take the rest
# re-reverse it back into the correct order and make it a tuple and use
# that for our sorting key.
release = tuple(
reversed(list(
itertools.dropwhile(
lambda x: x == 0,
reversed(release),
)
))
)
# We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.
# We'll do this by abusing the pre segment, but we _only_ want to do this
# if there is not a pre or a post segment. If we have one of those then
# the normal sorting rules will handle this case correctly.
if pre is None and post is None and dev is not None:
pre = -Infinity
# Versions without a pre-release (except as noted above) should sort after
# those with one.
elif pre is None:
pre = Infinity
# Versions without a post segment should sort before those with one.
if post is None:
post = -Infinity
# Versions without a development segment should sort after those with one.
if dev is None:
dev = Infinity
if local is None:
# Versions without a local segment should sort before those with one.
local = -Infinity
else:
# Versions with a local segment need that segment parsed to implement
# the sorting rules in PEP440.
# - Alpha numeric segments sort before numeric segments
# - Alpha numeric segments sort lexicographically
# - Numeric segments sort numerically
# - Shorter versions sort before longer versions when the prefixes
# match exactly
local = tuple(
(i, "") if isinstance(i, int) else (-Infinity, i)
for i in local
)
return epoch, release, pre, post, dev, local
...@@ -171,7 +171,7 @@ You can append a path entry to a working set using ``add_entry()``:: ...@@ -171,7 +171,7 @@ You can append a path entry to a working set using ``add_entry()``::
['http://example.com/something'] ['http://example.com/something']
>>> ws.add_entry(pkg_resources.__file__) >>> ws.add_entry(pkg_resources.__file__)
>>> ws.entries >>> ws.entries
['http://example.com/something', '...pkg_resources.py...'] ['http://example.com/something', '...pkg_resources...']
Multiple additions result in multiple entries, even if the entry is already in Multiple additions result in multiple entries, even if the entry is already in
the working set (because ``sys.path`` can contain the same entry more than the working set (because ``sys.path`` can contain the same entry more than
......
...@@ -3,7 +3,6 @@ Setuptools is released using 'jaraco.packaging.release'. To make a release, ...@@ -3,7 +3,6 @@ Setuptools is released using 'jaraco.packaging.release'. To make a release,
install jaraco.packaging and run 'python -m jaraco.packaging.release' install jaraco.packaging and run 'python -m jaraco.packaging.release'
""" """
import re
import os import os
import subprocess import subprocess
...@@ -14,12 +13,10 @@ pkg_resources.require('wheel') ...@@ -14,12 +13,10 @@ pkg_resources.require('wheel')
def before_upload(): def before_upload():
_linkify('CHANGES.txt', 'CHANGES (links).txt')
BootstrapBookmark.add() BootstrapBookmark.add()
def after_push(): def after_push():
os.remove('CHANGES (links).txt')
BootstrapBookmark.push() BootstrapBookmark.push()
files_with_versions = ( files_with_versions = (
...@@ -33,44 +30,6 @@ test_info = "Travis-CI tests: http://travis-ci.org/#!/jaraco/setuptools" ...@@ -33,44 +30,6 @@ test_info = "Travis-CI tests: http://travis-ci.org/#!/jaraco/setuptools"
os.environ["SETUPTOOLS_INSTALL_WINDOWS_SPECIFIC_FILES"] = "1" os.environ["SETUPTOOLS_INSTALL_WINDOWS_SPECIFIC_FILES"] = "1"
link_patterns = [
r"(Issue )?#(?P<issue>\d+)",
r"Pull Request ?#(?P<pull_request>\d+)",
r"Distribute #(?P<distribute>\d+)",
r"Buildout #(?P<buildout>\d+)",
r"Old Setuptools #(?P<old_setuptools>\d+)",
r"Jython #(?P<jython>\d+)",
r"Python #(?P<python>\d+)",
]
issue_urls = dict(
pull_request='https://bitbucket.org'
'/pypa/setuptools/pull-request/{pull_request}',
issue='https://bitbucket.org/pypa/setuptools/issue/{issue}',
distribute='https://bitbucket.org/tarek/distribute/issue/{distribute}',
buildout='https://github.com/buildout/buildout/issues/{buildout}',
old_setuptools='http://bugs.python.org/setuptools/issue{old_setuptools}',
jython='http://bugs.jython.org/issue{jython}',
python='http://bugs.python.org/issue{python}',
)
def _linkify(source, dest):
pattern = '|'.join(link_patterns)
with open(source) as source:
out = re.sub(pattern, replacer, source.read())
with open(dest, 'w') as dest:
dest.write(out)
def replacer(match):
text = match.group(0)
match_dict = match.groupdict()
for key in match_dict:
if match_dict[key]:
url = issue_urls[key].format(**match_dict)
return "`{text} <{url}>`_".format(text=text, url=url)
class BootstrapBookmark: class BootstrapBookmark:
name = 'bootstrap' name = 'bootstrap'
......
...@@ -21,5 +21,5 @@ formats = gztar zip ...@@ -21,5 +21,5 @@ formats = gztar zip
universal=1 universal=1
[pytest] [pytest]
addopts=--doctest-modules --ignore release.py --ignore setuptools/lib2to3_ex.py --ignore tests/manual_test.py --ignore tests/shlib_test addopts=--doctest-modules --ignore release.py --ignore setuptools/lib2to3_ex.py --ignore tests/manual_test.py --ignore tests/shlib_test --doctest-glob=pkg_resources/api_tests.txt
norecursedirs=dist build *.egg norecursedirs=dist build *.egg
...@@ -4,7 +4,6 @@ import io ...@@ -4,7 +4,6 @@ import io
import os import os
import sys import sys
import textwrap import textwrap
import contextlib
# Allow to run setup.py from another directory. # Allow to run setup.py from another directory.
os.chdir(os.path.dirname(os.path.abspath(__file__))) os.chdir(os.path.dirname(os.path.abspath(__file__)))
...@@ -27,7 +26,6 @@ with open(ver_path) as ver_file: ...@@ -27,7 +26,6 @@ with open(ver_path) as ver_file:
import setuptools import setuptools
from setuptools.command.build_py import build_py as _build_py from setuptools.command.build_py import build_py as _build_py
from setuptools.command.test import test as _test
scripts = [] scripts = []
...@@ -61,44 +59,11 @@ class build_py(_build_py): ...@@ -61,44 +59,11 @@ class build_py(_build_py):
outf, copied = self.copy_file(srcfile, target) outf, copied = self.copy_file(srcfile, target)
srcfile = os.path.abspath(srcfile) srcfile = os.path.abspath(srcfile)
class test(_test):
"""Specific test class to avoid rewriting the entry_points.txt"""
def run(self):
with self._save_entry_points():
_test.run(self)
@contextlib.contextmanager
def _save_entry_points(self):
entry_points = os.path.join('setuptools.egg-info', 'entry_points.txt')
if not os.path.exists(entry_points):
yield
return
# save the content
with open(entry_points, 'rb') as f:
ep_content = f.read()
# run the tests
try:
yield
finally:
# restore the file
with open(entry_points, 'wb') as f:
f.write(ep_content)
readme_file = io.open('README.txt', encoding='utf-8') readme_file = io.open('README.txt', encoding='utf-8')
# The release script adds hyperlinks to issues,
# but if the release script has not run, fall back to the source file
changes_names = 'CHANGES (links).txt', 'CHANGES.txt'
changes_fn = next(iter(filter(os.path.exists, changes_names)))
changes_file = io.open(changes_fn, encoding='utf-8')
with readme_file: with readme_file:
with changes_file: long_description = readme_file.read()
long_description = readme_file.read() + '\n' + changes_file.read()
package_data = { package_data = {
'setuptools': ['script (dev).tmpl', 'script.tmpl', 'site-patch.py']} 'setuptools': ['script (dev).tmpl', 'script.tmpl', 'site-patch.py']}
...@@ -123,16 +88,14 @@ setup_params = dict( ...@@ -123,16 +88,14 @@ setup_params = dict(
long_description=long_description, long_description=long_description,
keywords="CPAN PyPI distutils eggs package management", keywords="CPAN PyPI distutils eggs package management",
url="https://bitbucket.org/pypa/setuptools", url="https://bitbucket.org/pypa/setuptools",
test_suite='setuptools.tests',
src_root=src_root, src_root=src_root,
packages=setuptools.find_packages(), packages=setuptools.find_packages(),
package_data=package_data, package_data=package_data,
py_modules=['pkg_resources', 'easy_install'], py_modules=['easy_install'],
zip_safe=True, zip_safe=True,
cmdclass={'test': test},
entry_points={ entry_points={
"distutils.commands": [ "distutils.commands": [
"%(cmd)s = setuptools.command.%(cmd)s:%(cmd)s" % locals() "%(cmd)s = setuptools.command.%(cmd)s:%(cmd)s" % locals()
...@@ -172,9 +135,6 @@ setup_params = dict( ...@@ -172,9 +135,6 @@ setup_params = dict(
], ],
"console_scripts": console_scripts, "console_scripts": console_scripts,
"setuptools.file_finders":
["svn_cvs = setuptools.command.sdist:_default_revctrl"],
"setuptools.installation": "setuptools.installation":
['eggsecutable = setuptools.command.easy_install:bootstrap'], ['eggsecutable = setuptools.command.easy_install:bootstrap'],
}, },
...@@ -213,6 +173,7 @@ setup_params = dict( ...@@ -213,6 +173,7 @@ setup_params = dict(
tests_require=[ tests_require=[
'setuptools[ssl]', 'setuptools[ssl]',
'pytest', 'pytest',
'mock',
], ],
setup_requires=[ setup_requires=[
] + pytest_runner, ] + pytest_runner,
......
...@@ -3,7 +3,6 @@ ...@@ -3,7 +3,6 @@
__import__('setuptools.bootstrap').bootstrap.ensure_deps() __import__('setuptools.bootstrap').bootstrap.ensure_deps()
import os import os
import sys
import distutils.core import distutils.core
import distutils.filelist import distutils.filelist
from distutils.core import Command as _Command from distutils.core import Command as _Command
......
...@@ -64,20 +64,23 @@ def unpack_directory(filename, extract_dir, progress_filter=default_filter): ...@@ -64,20 +64,23 @@ def unpack_directory(filename, extract_dir, progress_filter=default_filter):
Raises ``UnrecognizedFormat`` if `filename` is not a directory Raises ``UnrecognizedFormat`` if `filename` is not a directory
""" """
if not os.path.isdir(filename): if not os.path.isdir(filename):
raise UnrecognizedFormat("%s is not a directory" % (filename,)) raise UnrecognizedFormat("%s is not a directory" % filename)
paths = {filename:('',extract_dir)} paths = {
filename: ('', extract_dir),
}
for base, dirs, files in os.walk(filename): for base, dirs, files in os.walk(filename):
src,dst = paths[base] src, dst = paths[base]
for d in dirs: for d in dirs:
paths[os.path.join(base,d)] = src+d+'/', os.path.join(dst,d) paths[os.path.join(base, d)] = src + d + '/', os.path.join(dst, d)
for f in files: for f in files:
target = os.path.join(dst,f) target = os.path.join(dst, f)
target = progress_filter(src+f, target) target = progress_filter(src + f, target)
if not target: if not target:
continue # skip non-files # skip non-files
continue
ensure_directory(target) ensure_directory(target)
f = os.path.join(base,f) f = os.path.join(base, f)
shutil.copyfile(f, target) shutil.copyfile(f, target)
shutil.copystat(f, target) shutil.copystat(f, target)
...@@ -112,12 +115,8 @@ def unpack_zipfile(filename, extract_dir, progress_filter=default_filter): ...@@ -112,12 +115,8 @@ def unpack_zipfile(filename, extract_dir, progress_filter=default_filter):
# file # file
ensure_directory(target) ensure_directory(target)
data = z.read(info.filename) data = z.read(info.filename)
f = open(target,'wb') with open(target, 'wb') as f:
try:
f.write(data) f.write(data)
finally:
f.close()
del data
unix_attributes = info.external_attr >> 16 unix_attributes = info.external_attr >> 16
if unix_attributes: if unix_attributes:
os.chmod(target, unix_attributes) os.chmod(target, unix_attributes)
...@@ -137,18 +136,21 @@ def unpack_tarfile(filename, extract_dir, progress_filter=default_filter): ...@@ -137,18 +136,21 @@ def unpack_tarfile(filename, extract_dir, progress_filter=default_filter):
"%s is not a compressed or uncompressed tar file" % (filename,) "%s is not a compressed or uncompressed tar file" % (filename,)
) )
with contextlib.closing(tarobj): with contextlib.closing(tarobj):
tarobj.chown = lambda *args: None # don't do any chowning! # don't do any chowning!
tarobj.chown = lambda *args: None
for member in tarobj: for member in tarobj:
name = member.name name = member.name
# don't extract absolute paths or ones with .. in them # don't extract absolute paths or ones with .. in them
if not name.startswith('/') and '..' not in name.split('/'): if not name.startswith('/') and '..' not in name.split('/'):
prelim_dst = os.path.join(extract_dir, *name.split('/')) prelim_dst = os.path.join(extract_dir, *name.split('/'))
# resolve any links and to extract the link targets as normal files # resolve any links and to extract the link targets as normal
# files
while member is not None and (member.islnk() or member.issym()): while member is not None and (member.islnk() or member.issym()):
linkpath = member.linkname linkpath = member.linkname
if member.issym(): if member.issym():
linkpath = posixpath.join(posixpath.dirname(member.name), linkpath) base = posixpath.dirname(member.name)
linkpath = posixpath.join(base, linkpath)
linkpath = posixpath.normpath(linkpath) linkpath = posixpath.normpath(linkpath)
member = tarobj._getmember(linkpath) member = tarobj._getmember(linkpath)
...@@ -158,9 +160,11 @@ def unpack_tarfile(filename, extract_dir, progress_filter=default_filter): ...@@ -158,9 +160,11 @@ def unpack_tarfile(filename, extract_dir, progress_filter=default_filter):
if final_dst.endswith(os.sep): if final_dst.endswith(os.sep):
final_dst = final_dst[:-1] final_dst = final_dst[:-1]
try: try:
tarobj._extract_member(member, final_dst) # XXX Ugh # XXX Ugh
tarobj._extract_member(member, final_dst)
except tarfile.ExtractError: except tarfile.ExtractError:
pass # chown/chmod/mkfifo/mknode/makedev failed # chown/chmod/mkfifo/mknode/makedev failed
pass
return True return True
extraction_drivers = unpack_directory, unpack_zipfile, unpack_tarfile extraction_drivers = unpack_directory, unpack_zipfile, unpack_tarfile
...@@ -6,6 +6,7 @@ from distutils.errors import DistutilsError ...@@ -6,6 +6,7 @@ from distutils.errors import DistutilsError
from distutils import log from distutils import log
import os import os
import sys import sys
import itertools
from setuptools.extension import Library from setuptools.extension import Library
...@@ -33,18 +34,13 @@ if sys.platform == "darwin": ...@@ -33,18 +34,13 @@ if sys.platform == "darwin":
use_stubs = True use_stubs = True
elif os.name != 'nt': elif os.name != 'nt':
try: try:
from dl import RTLD_NOW import dl
use_stubs = have_rtld = hasattr(dl, 'RTLD_NOW')
have_rtld = True
use_stubs = True
except ImportError: except ImportError:
pass pass
def if_dl(s): if_dl = lambda s: s if have_rtld else ''
if have_rtld:
return s
return ''
class build_ext(_build_ext): class build_ext(_build_ext):
...@@ -123,10 +119,10 @@ class build_ext(_build_ext): ...@@ -123,10 +119,10 @@ class build_ext(_build_ext):
# XXX what to do with conflicts? # XXX what to do with conflicts?
self.ext_map[fullname.split('.')[-1]] = ext self.ext_map[fullname.split('.')[-1]] = ext
ltd = ext._links_to_dynamic = \ ltd = self.shlibs and self.links_to_dynamic(ext) or False
self.shlibs and self.links_to_dynamic(ext) or False ns = ltd and use_stubs and not isinstance(ext, Library)
ext._needs_stub = ltd and use_stubs and not isinstance(ext, ext._links_to_dynamic = ltd
Library) ext._needs_stub = ns
filename = ext._file_name = self.get_ext_filename(fullname) filename = ext._file_name = self.get_ext_filename(fullname)
libdir = os.path.dirname(os.path.join(self.build_lib, filename)) libdir = os.path.dirname(os.path.join(self.build_lib, filename))
if ltd and libdir not in ext.library_dirs: if ltd and libdir not in ext.library_dirs:
...@@ -186,9 +182,8 @@ class build_ext(_build_ext): ...@@ -186,9 +182,8 @@ class build_ext(_build_ext):
self.compiler = self.shlib_compiler self.compiler = self.shlib_compiler
_build_ext.build_extension(self, ext) _build_ext.build_extension(self, ext)
if ext._needs_stub: if ext._needs_stub:
self.write_stub( cmd = self.get_finalized_command('build_py').build_lib
self.get_finalized_command('build_py').build_lib, ext self.write_stub(cmd, ext)
)
finally: finally:
self.compiler = _compiler self.compiler = _compiler
...@@ -199,22 +194,27 @@ class build_ext(_build_ext): ...@@ -199,22 +194,27 @@ class build_ext(_build_ext):
# XXX static-compiled version # XXX static-compiled version
libnames = dict.fromkeys([lib._full_name for lib in self.shlibs]) libnames = dict.fromkeys([lib._full_name for lib in self.shlibs])
pkg = '.'.join(ext._full_name.split('.')[:-1] + ['']) pkg = '.'.join(ext._full_name.split('.')[:-1] + [''])
for libname in ext.libraries: return any(pkg + libname in libnames for libname in ext.libraries)
if pkg + libname in libnames:
return True
return False
def get_outputs(self): def get_outputs(self):
outputs = _build_ext.get_outputs(self) return _build_ext.get_outputs(self) + self.__get_stubs_outputs()
optimize = self.get_finalized_command('build_py').optimize
for ext in self.extensions: def __get_stubs_outputs(self):
if ext._needs_stub: # assemble the base name for each extension that needs a stub
base = os.path.join(self.build_lib, *ext._full_name.split('.')) ns_ext_bases = (
outputs.append(base + '.py') os.path.join(self.build_lib, *ext._full_name.split('.'))
outputs.append(base + '.pyc') for ext in self.extensions
if optimize: if ext._needs_stub
outputs.append(base + '.pyo') )
return outputs # pair each base with the extension
pairs = itertools.product(ns_ext_bases, self.__get_output_extensions())
return list(base + fnext for base, fnext in pairs)
def __get_output_extensions(self):
yield '.py'
yield '.pyc'
if self.get_finalized_command('build_py').optimize:
yield '.pyo'
def write_stub(self, output_dir, ext, compile=False): def write_stub(self, output_dir, ext, compile=False):
log.info("writing stub loader for %s to %s", ext._full_name, log.info("writing stub loader for %s to %s", ext._full_name,
......
...@@ -34,6 +34,7 @@ import textwrap ...@@ -34,6 +34,7 @@ import textwrap
import warnings import warnings
import site import site
import struct import struct
import contextlib
import six import six
from six.moves import configparser from six.moves import configparser
...@@ -55,9 +56,14 @@ from pkg_resources import ( ...@@ -55,9 +56,14 @@ from pkg_resources import (
import pkg_resources import pkg_resources
# Turn on PEP440Warnings
warnings.filterwarnings("default", category=pkg_resources.PEP440Warning)
sys_executable = os.environ.get('__PYVENV_LAUNCHER__', sys_executable = os.environ.get('__PYVENV_LAUNCHER__',
os.path.normpath(sys.executable)) os.path.normpath(sys.executable))
__all__ = [ __all__ = [
'samefile', 'easy_install', 'PthDistributions', 'extract_wininst_cfg', 'samefile', 'easy_install', 'PthDistributions', 'extract_wininst_cfg',
'main', 'get_exe_prefixes', 'main', 'get_exe_prefixes',
...@@ -1541,10 +1547,14 @@ class PthDistributions(Environment): ...@@ -1541,10 +1547,14 @@ class PthDistributions(Environment):
def add(self, dist): def add(self, dist):
"""Add `dist` to the distribution map""" """Add `dist` to the distribution map"""
if (dist.location not in self.paths and ( new_path = (
dist.location not in self.sitedirs or dist.location not in self.paths and (
dist.location == os.getcwd() # account for '.' being in PYTHONPATH dist.location not in self.sitedirs or
)): # account for '.' being in PYTHONPATH
dist.location == os.getcwd()
)
)
if new_path:
self.paths.append(dist.location) self.paths.append(dist.location)
self.dirty = True self.dirty = True
Environment.add(self, dist) Environment.add(self, dist)
...@@ -2112,39 +2122,42 @@ def bootstrap(): ...@@ -2112,39 +2122,42 @@ def bootstrap():
def main(argv=None, **kw): def main(argv=None, **kw):
from setuptools import setup from setuptools import setup
from setuptools.dist import Distribution from setuptools.dist import Distribution
import distutils.core
USAGE = """\
usage: %(script)s [options] requirement_or_url ...
or: %(script)s --help
"""
def gen_usage(script_name):
return USAGE % dict(
script=os.path.basename(script_name),
)
def with_ei_usage(f):
old_gen_usage = distutils.core.gen_usage
try:
distutils.core.gen_usage = gen_usage
return f()
finally:
distutils.core.gen_usage = old_gen_usage
class DistributionWithoutHelpCommands(Distribution): class DistributionWithoutHelpCommands(Distribution):
common_usage = "" common_usage = ""
def _show_help(self, *args, **kw): def _show_help(self, *args, **kw):
with_ei_usage(lambda: Distribution._show_help(self, *args, **kw)) with _patch_usage():
Distribution._show_help(self, *args, **kw)
if argv is None: if argv is None:
argv = sys.argv[1:] argv = sys.argv[1:]
with_ei_usage( with _patch_usage():
lambda: setup( setup(
script_args=['-q', 'easy_install', '-v'] + argv, script_args=['-q', 'easy_install', '-v'] + argv,
script_name=sys.argv[0] or 'easy_install', script_name=sys.argv[0] or 'easy_install',
distclass=DistributionWithoutHelpCommands, **kw distclass=DistributionWithoutHelpCommands, **kw
) )
)
@contextlib.contextmanager
def _patch_usage():
import distutils.core
USAGE = textwrap.dedent("""
usage: %(script)s [options] requirement_or_url ...
or: %(script)s --help
""").lstrip()
def gen_usage(script_name):
return USAGE % dict(
script=os.path.basename(script_name),
)
saved = distutils.core.gen_usage
distutils.core.gen_usage = gen_usage
try:
yield
finally:
distutils.core.gen_usage = saved
...@@ -6,21 +6,27 @@ from distutils.filelist import FileList as _FileList ...@@ -6,21 +6,27 @@ from distutils.filelist import FileList as _FileList
from distutils.util import convert_path from distutils.util import convert_path
from distutils import log from distutils import log
import distutils.errors import distutils.errors
import distutils.filelist
import os import os
import re import re
import sys import sys
import six import six
try:
from setuptools_svn import svn_utils
except ImportError:
pass
from setuptools import Command from setuptools import Command
from setuptools.command.sdist import sdist from setuptools.command.sdist import sdist
from setuptools import svn_utils
from setuptools.command.sdist import walk_revctrl from setuptools.command.sdist import walk_revctrl
from pkg_resources import ( from pkg_resources import (
parse_requirements, safe_name, parse_version, parse_requirements, safe_name, parse_version,
safe_version, yield_lines, EntryPoint, iter_entry_points, to_filename) safe_version, yield_lines, EntryPoint, iter_entry_points, to_filename)
import setuptools.unicode_utils as unicode_utils import setuptools.unicode_utils as unicode_utils
from pkg_resources import packaging
class egg_info(Command): class egg_info(Command):
description = "create a distribution's .egg-info directory" description = "create a distribution's .egg-info directory"
...@@ -69,10 +75,15 @@ class egg_info(Command): ...@@ -69,10 +75,15 @@ class egg_info(Command):
self.vtags = self.tags() self.vtags = self.tags()
self.egg_version = self.tagged_version() self.egg_version = self.tagged_version()
parsed_version = parse_version(self.egg_version)
try: try:
is_version = isinstance(parsed_version, packaging.version.Version)
spec = (
"%s==%s" if is_version else "%s===%s"
)
list( list(
parse_requirements('%s==%s' % (self.egg_name, parse_requirements(spec % (self.egg_name, self.egg_version))
self.egg_version))
) )
except ValueError: except ValueError:
raise distutils.errors.DistutilsOptionError( raise distutils.errors.DistutilsOptionError(
...@@ -184,6 +195,8 @@ class egg_info(Command): ...@@ -184,6 +195,8 @@ class egg_info(Command):
@staticmethod @staticmethod
def get_svn_revision(): def get_svn_revision():
if 'svn_utils' not in globals():
return "0"
return str(svn_utils.SvnInfo.load(os.curdir).get_revision()) return str(svn_utils.SvnInfo.load(os.curdir).get_revision())
def find_sources(self): def find_sources(self):
...@@ -313,8 +326,33 @@ class manifest_maker(sdist): ...@@ -313,8 +326,33 @@ class manifest_maker(sdist):
elif os.path.exists(self.manifest): elif os.path.exists(self.manifest):
self.read_manifest() self.read_manifest()
ei_cmd = self.get_finalized_command('egg_info') ei_cmd = self.get_finalized_command('egg_info')
self._add_egg_info(cmd=ei_cmd)
self.filelist.include_pattern("*", prefix=ei_cmd.egg_info) self.filelist.include_pattern("*", prefix=ei_cmd.egg_info)
def _add_egg_info(self, cmd):
"""
Add paths for egg-info files for an external egg-base.
The egg-info files are written to egg-base. If egg-base is
outside the current working directory, this method
searchs the egg-base directory for files to include
in the manifest. Uses distutils.filelist.findall (which is
really the version monkeypatched in by setuptools/__init__.py)
to perform the search.
Since findall records relative paths, prefix the returned
paths with cmd.egg_base, so add_default's include_pattern call
(which is looking for the absolute cmd.egg_info) will match
them.
"""
if cmd.egg_base == os.curdir:
# egg-info files were already added by something else
return
discovered = distutils.filelist.findall(cmd.egg_base)
resolved = (os.path.join(cmd.egg_base, path) for path in discovered)
self.filelist.allfiles.extend(resolved)
def prune_file_list(self): def prune_file_list(self):
build = self.get_finalized_command('build') build = self.get_finalized_command('build')
base_dir = self.distribution.get_fullname() base_dir = self.distribution.get_fullname()
...@@ -383,6 +421,12 @@ def write_requirements(cmd, basename, filename): ...@@ -383,6 +421,12 @@ def write_requirements(cmd, basename, filename):
cmd.write_or_delete_file("requirements", filename, data.getvalue()) cmd.write_or_delete_file("requirements", filename, data.getvalue())
def write_setup_requirements(cmd, basename, filename):
data = StringIO()
_write_requirements(data, cmd.distribution.setup_requires)
cmd.write_or_delete_file("setup-requirements", filename, data.getvalue())
def write_toplevel_names(cmd, basename, filename): def write_toplevel_names(cmd, basename, filename):
pkgs = dict.fromkeys( pkgs = dict.fromkeys(
[ [
......
from glob import glob from glob import glob
from distutils.util import convert_path
from distutils import log from distutils import log
import distutils.command.sdist as orig import distutils.command.sdist as orig
import os import os
import re
import sys import sys
import six import six
from setuptools import svn_utils
from setuptools.utils import cs_path_exists from setuptools.utils import cs_path_exists
import pkg_resources import pkg_resources
READMES = ('README', 'README.rst', 'README.txt') READMES = 'README', 'README.rst', 'README.txt'
_default_revctrl = list
def walk_revctrl(dirname=''): def walk_revctrl(dirname=''):
"""Find all files under revision control""" """Find all files under revision control"""
...@@ -23,60 +21,6 @@ def walk_revctrl(dirname=''): ...@@ -23,60 +21,6 @@ def walk_revctrl(dirname=''):
yield item yield item
# TODO will need test case
class re_finder(object):
"""
Finder that locates files based on entries in a file matched by a
regular expression.
"""
def __init__(self, path, pattern, postproc=lambda x: x):
self.pattern = pattern
self.postproc = postproc
self.entries_path = convert_path(path)
def _finder(self, dirname, filename):
f = open(filename, 'rU')
try:
data = f.read()
finally:
f.close()
for match in self.pattern.finditer(data):
path = match.group(1)
# postproc was formerly used when the svn finder
# was an re_finder for calling unescape
path = self.postproc(path)
yield svn_utils.joinpath(dirname, path)
def find(self, dirname=''):
path = svn_utils.joinpath(dirname, self.entries_path)
if not os.path.isfile(path):
# entries file doesn't exist
return
for path in self._finder(dirname, path):
if os.path.isfile(path):
yield path
elif os.path.isdir(path):
for item in self.find(path):
yield item
__call__ = find
def _default_revctrl(dirname=''):
'Primary svn_cvs entry point'
for finder in finders:
for item in finder(dirname):
yield item
finders = [
re_finder('CVS/Entries', re.compile(r"^\w?/([^/]+)/", re.M)),
svn_utils.svn_finder,
]
class sdist(orig.sdist): class sdist(orig.sdist):
"""Smart sdist that finds anything supported by revision control""" """Smart sdist that finds anything supported by revision control"""
......
...@@ -173,4 +173,4 @@ class test(Command): ...@@ -173,4 +173,4 @@ class test(Command):
if val is None: if val is None:
return return
parsed = EntryPoint.parse("x=" + val) parsed = EntryPoint.parse("x=" + val)
return parsed.load(require=False)() return parsed._load()()
...@@ -19,6 +19,9 @@ from setuptools.depends import Require ...@@ -19,6 +19,9 @@ from setuptools.depends import Require
from setuptools import windows_support from setuptools import windows_support
import pkg_resources import pkg_resources
packaging = pkg_resources.packaging
def _get_unpatched(cls): def _get_unpatched(cls):
"""Protect against re-patching the distutils if reloaded """Protect against re-patching the distutils if reloaded
...@@ -270,6 +273,27 @@ class Distribution(_Distribution): ...@@ -270,6 +273,27 @@ class Distribution(_Distribution):
# Some people apparently take "version number" too literally :) # Some people apparently take "version number" too literally :)
self.metadata.version = str(self.metadata.version) self.metadata.version = str(self.metadata.version)
if self.metadata.version is not None:
try:
ver = packaging.version.Version(self.metadata.version)
normalized_version = str(ver)
if self.metadata.version != normalized_version:
warnings.warn(
"The version specified requires normalization, "
"consider using '%s' instead of '%s'." % (
normalized_version,
self.metadata.version,
)
)
self.metadata.version = normalized_version
except (packaging.version.InvalidVersion, TypeError):
warnings.warn(
"The version specified (%r) is an invalid version, this "
"may not work as expected with newer versions of "
"setuptools, pip, and PyPI. Please see PEP 440 for more "
"details." % self.metadata.version
)
def parse_command_line(self): def parse_command_line(self):
"""Process features after parsing command line options""" """Process features after parsing command line options"""
result = _Distribution.parse_command_line(self) result = _Distribution.parse_command_line(self)
...@@ -411,7 +435,8 @@ class Distribution(_Distribution): ...@@ -411,7 +435,8 @@ class Distribution(_Distribution):
def print_commands(self): def print_commands(self):
for ep in pkg_resources.iter_entry_points('distutils.commands'): for ep in pkg_resources.iter_entry_points('distutils.commands'):
if ep.name not in self.cmdclass: if ep.name not in self.cmdclass:
cmdclass = ep.load(False) # don't require extras, we're not running # don't require extras as the commands won't be invoked
cmdclass = ep._load()
self.cmdclass[ep.name] = cmdclass self.cmdclass[ep.name] = cmdclass
return _Distribution.print_commands(self) return _Distribution.print_commands(self)
......
...@@ -5,7 +5,10 @@ import operator ...@@ -5,7 +5,10 @@ import operator
import functools import functools
import itertools import itertools
import re import re
import contextlib
import pickle
import six
from six.moves import builtins from six.moves import builtins
import pkg_resources import pkg_resources
...@@ -42,20 +45,150 @@ def _execfile(filename, globals, locals=None): ...@@ -42,20 +45,150 @@ def _execfile(filename, globals, locals=None):
code = compile(script, filename, 'exec') code = compile(script, filename, 'exec')
exec(code, globals, locals) exec(code, globals, locals)
@contextlib.contextmanager
def save_argv():
saved = sys.argv[:]
try:
yield saved
finally:
sys.argv[:] = saved
@contextlib.contextmanager
def save_path():
saved = sys.path[:]
try:
yield saved
finally:
sys.path[:] = saved
@contextlib.contextmanager
def override_temp(replacement):
"""
Monkey-patch tempfile.tempdir with replacement, ensuring it exists
"""
if not os.path.isdir(replacement):
os.makedirs(replacement)
saved = tempfile.tempdir
tempfile.tempdir = replacement
try:
yield
finally:
tempfile.tempdir = saved
@contextlib.contextmanager
def pushd(target):
saved = os.getcwd()
os.chdir(target)
try:
yield saved
finally:
os.chdir(saved)
@contextlib.contextmanager
def save_modules():
"""
Context in which imported modules are saved.
Translates exceptions internal to the context into the equivalent exception
outside the context.
"""
saved = sys.modules.copy()
try:
try:
yield saved
except:
# dump any exception
class_, exc, tb = sys.exc_info()
saved_cls = pickle.dumps(class_)
saved_exc = pickle.dumps(exc)
raise
finally:
sys.modules.update(saved)
# remove any modules imported since
del_modules = (
mod_name for mod_name in sys.modules
if mod_name not in saved
# exclude any encodings modules. See #285
and not mod_name.startswith('encodings.')
)
_clear_modules(del_modules)
except:
# reload and re-raise any exception, using restored modules
class_, exc, tb = sys.exc_info()
new_cls = pickle.loads(saved_cls)
new_exc = pickle.loads(saved_exc)
six.reraise(new_cls, new_exc, tb)
def _clear_modules(module_names):
for mod_name in list(module_names):
del sys.modules[mod_name]
@contextlib.contextmanager
def save_pkg_resources_state():
saved = pkg_resources.__getstate__()
try:
yield saved
finally:
pkg_resources.__setstate__(saved)
@contextlib.contextmanager
def setup_context(setup_dir):
temp_dir = os.path.join(setup_dir, 'temp')
with save_pkg_resources_state():
with save_modules():
hide_setuptools()
with save_path():
with save_argv():
with override_temp(temp_dir):
with pushd(setup_dir):
# ensure setuptools commands are available
__import__('setuptools')
yield
def _needs_hiding(mod_name):
"""
>>> _needs_hiding('setuptools')
True
>>> _needs_hiding('pkg_resources')
True
>>> _needs_hiding('setuptools_plugin')
False
>>> _needs_hiding('setuptools.__init__')
True
>>> _needs_hiding('distutils')
True
"""
pattern = re.compile('(setuptools|pkg_resources|distutils)(\.|$)')
return bool(pattern.match(mod_name))
def hide_setuptools():
"""
Remove references to setuptools' modules from sys.modules to allow the
invocation to import the most appropriate setuptools. This technique is
necessary to avoid issues such as #315 where setuptools upgrading itself
would fail to find a function declared in the metadata.
"""
modules = filter(_needs_hiding, sys.modules)
_clear_modules(modules)
def run_setup(setup_script, args): def run_setup(setup_script, args):
"""Run a distutils setup script, sandboxed in its directory""" """Run a distutils setup script, sandboxed in its directory"""
old_dir = os.getcwd()
save_argv = sys.argv[:]
save_path = sys.path[:]
setup_dir = os.path.abspath(os.path.dirname(setup_script)) setup_dir = os.path.abspath(os.path.dirname(setup_script))
temp_dir = os.path.join(setup_dir,'temp') with setup_context(setup_dir):
if not os.path.isdir(temp_dir): os.makedirs(temp_dir)
save_tmp = tempfile.tempdir
save_modules = sys.modules.copy()
pr_state = pkg_resources.__getstate__()
try:
tempfile.tempdir = temp_dir
os.chdir(setup_dir)
try: try:
sys.argv[:] = [setup_script]+list(args) sys.argv[:] = [setup_script]+list(args)
sys.path.insert(0, setup_dir) sys.path.insert(0, setup_dir)
...@@ -71,21 +204,6 @@ def run_setup(setup_script, args): ...@@ -71,21 +204,6 @@ def run_setup(setup_script, args):
if v.args and v.args[0]: if v.args and v.args[0]:
raise raise
# Normal exit, just return # Normal exit, just return
finally:
pkg_resources.__setstate__(pr_state)
sys.modules.update(save_modules)
# remove any modules imported within the sandbox
del_modules = [
mod_name for mod_name in sys.modules
if mod_name not in save_modules
# exclude any encodings modules. See #285
and not mod_name.startswith('encodings.')
]
list(map(sys.modules.__delitem__, del_modules))
os.chdir(old_dir)
sys.path[:] = save_path
sys.argv[:] = save_argv
tempfile.tempdir = save_tmp
class AbstractSandbox: class AbstractSandbox:
......
from __future__ import absolute_import
import os
import re
import sys
from distutils import log
import xml.dom.pulldom
import shlex
import locale
import codecs
import unicodedata
import warnings
from setuptools.py31compat import TemporaryDirectory
from xml.sax.saxutils import unescape
import six
from six.moves import urllib
from subprocess import Popen as _Popen, PIPE as _PIPE
#NOTE: Use of the command line options require SVN 1.3 or newer (December 2005)
# and SVN 1.3 hasn't been supported by the developers since mid 2008.
#subprocess is called several times with shell=(sys.platform=='win32')
#see the follow for more information:
# http://bugs.python.org/issue8557
# http://stackoverflow.com/questions/5658622/
# python-subprocess-popen-environment-path
def _run_command(args, stdout=_PIPE, stderr=_PIPE, encoding=None, stream=0):
#regarding the shell argument, see: http://bugs.python.org/issue8557
try:
proc = _Popen(args, stdout=stdout, stderr=stderr,
shell=(sys.platform == 'win32'))
data = proc.communicate()[stream]
except OSError:
return 1, ''
#doubled checked and
data = decode_as_string(data, encoding)
#communciate calls wait()
return proc.returncode, data
def _get_entry_schedule(entry):
schedule = entry.getElementsByTagName('schedule')[0]
return "".join([t.nodeValue
for t in schedule.childNodes
if t.nodeType == t.TEXT_NODE])
def _get_target_property(target):
property_text = target.getElementsByTagName('property')[0]
return "".join([t.nodeValue
for t in property_text.childNodes
if t.nodeType == t.TEXT_NODE])
def _get_xml_data(decoded_str):
if six.PY2:
#old versions want an encoded string
data = decoded_str.encode('utf-8')
else:
data = decoded_str
return data
def joinpath(prefix, *suffix):
if not prefix or prefix == '.':
return os.path.join(*suffix)
return os.path.join(prefix, *suffix)
def determine_console_encoding():
try:
#try for the preferred encoding
encoding = locale.getpreferredencoding()
#see if the locale.getdefaultlocale returns null
#some versions of python\platforms return US-ASCII
#when it cannot determine an encoding
if not encoding or encoding == "US-ASCII":
encoding = locale.getdefaultlocale()[1]
if encoding:
codecs.lookup(encoding) # make sure a lookup error is not made
except (locale.Error, LookupError):
encoding = None
is_osx = sys.platform == "darwin"
if not encoding:
return ["US-ASCII", "utf-8"][is_osx]
elif encoding.startswith("mac-") and is_osx:
#certain versions of python would return mac-roman as default
#OSX as a left over of earlier mac versions.
return "utf-8"
else:
return encoding
_console_encoding = determine_console_encoding()
def decode_as_string(text, encoding=None):
"""
Decode the console or file output explicitly using getpreferredencoding.
The text paraemeter should be a encoded string, if not no decode occurs
If no encoding is given, getpreferredencoding is used. If encoding is
specified, that is used instead. This would be needed for SVN --xml
output. Unicode is explicitly put in composed NFC form.
--xml should be UTF-8 (SVN Issue 2938) the discussion on the Subversion
DEV List from 2007 seems to indicate the same.
"""
#text should be a byte string
if encoding is None:
encoding = _console_encoding
if not isinstance(text, six.text_type):
text = text.decode(encoding)
text = unicodedata.normalize('NFC', text)
return text
def parse_dir_entries(decoded_str):
'''Parse the entries from a recursive info xml'''
doc = xml.dom.pulldom.parseString(_get_xml_data(decoded_str))
entries = list()
for event, node in doc:
if event == 'START_ELEMENT' and node.nodeName == 'entry':
doc.expandNode(node)
if not _get_entry_schedule(node).startswith('delete'):
entries.append((node.getAttribute('path'),
node.getAttribute('kind')))
return entries[1:] # do not want the root directory
def parse_externals_xml(decoded_str, prefix=''):
'''Parse a propget svn:externals xml'''
prefix = os.path.normpath(prefix)
prefix = os.path.normcase(prefix)
doc = xml.dom.pulldom.parseString(_get_xml_data(decoded_str))
externals = list()
for event, node in doc:
if event == 'START_ELEMENT' and node.nodeName == 'target':
doc.expandNode(node)
path = os.path.normpath(node.getAttribute('path'))
if os.path.normcase(path).startswith(prefix):
path = path[len(prefix)+1:]
data = _get_target_property(node)
#data should be decoded already
for external in parse_external_prop(data):
externals.append(joinpath(path, external))
return externals # do not want the root directory
def parse_external_prop(lines):
"""
Parse the value of a retrieved svn:externals entry.
possible token setups (with quotng and backscaping in laters versions)
URL[@#] EXT_FOLDERNAME
[-r#] URL EXT_FOLDERNAME
EXT_FOLDERNAME [-r#] URL
"""
externals = []
for line in lines.splitlines():
line = line.lstrip() # there might be a "\ "
if not line:
continue
if six.PY2:
#shlex handles NULLs just fine and shlex in 2.7 tries to encode
#as ascii automatiically
line = line.encode('utf-8')
line = shlex.split(line)
if six.PY2:
line = [x.decode('utf-8') for x in line]
#EXT_FOLDERNAME is either the first or last depending on where
#the URL falls
if urllib.parse.urlsplit(line[-1])[0]:
external = line[0]
else:
external = line[-1]
external = decode_as_string(external, encoding="utf-8")
externals.append(os.path.normpath(external))
return externals
def parse_prop_file(filename, key):
found = False
f = open(filename, 'rt')
data = ''
try:
for line in iter(f.readline, ''): # can't use direct iter!
parts = line.split()
if len(parts) == 2:
kind, length = parts
data = f.read(int(length))
if kind == 'K' and data == key:
found = True
elif kind == 'V' and found:
break
finally:
f.close()
return data
class SvnInfo(object):
'''
Generic svn_info object. No has little knowledge of how to extract
information. Use cls.load to instatiate according svn version.
Paths are not filesystem encoded.
'''
@staticmethod
def get_svn_version():
# Temp config directory should be enough to check for repository
# This is needed because .svn always creates .subversion and
# some operating systems do not handle dot directory correctly.
# Real queries in real svn repos with be concerned with it creation
with TemporaryDirectory() as tempdir:
code, data = _run_command(['svn',
'--config-dir', tempdir,
'--version',
'--quiet'])
if code == 0 and data:
return data.strip()
else:
return ''
#svnversion return values (previous implementations return max revision)
# 4123:4168 mixed revision working copy
# 4168M modified working copy
# 4123S switched working copy
# 4123:4168MS mixed revision, modified, switched working copy
revision_re = re.compile(r'(?:([\-0-9]+):)?(\d+)([a-z]*)\s*$', re.I)
@classmethod
def load(cls, dirname=''):
normdir = os.path.normpath(dirname)
# Temp config directory should be enough to check for repository
# This is needed because .svn always creates .subversion and
# some operating systems do not handle dot directory correctly.
# Real queries in real svn repos with be concerned with it creation
with TemporaryDirectory() as tempdir:
code, data = _run_command(['svn',
'--config-dir', tempdir,
'info', normdir])
# Must check for some contents, as some use empty directories
# in testcases, however only enteries is needed also the info
# command above MUST have worked
svn_dir = os.path.join(normdir, '.svn')
is_svn_wd = (not code or
os.path.isfile(os.path.join(svn_dir, 'entries')))
svn_version = tuple(cls.get_svn_version().split('.'))
try:
base_svn_version = tuple(int(x) for x in svn_version[:2])
except ValueError:
base_svn_version = tuple()
if not is_svn_wd:
#return an instance of this NO-OP class
return SvnInfo(dirname)
if code or not base_svn_version or base_svn_version < (1, 3):
warnings.warn(("No SVN 1.3+ command found: falling back "
"on pre 1.7 .svn parsing"), DeprecationWarning)
return SvnFileInfo(dirname)
if base_svn_version < (1, 5):
return Svn13Info(dirname)
return Svn15Info(dirname)
def __init__(self, path=''):
self.path = path
self._entries = None
self._externals = None
def get_revision(self):
'Retrieve the directory revision information using svnversion'
code, data = _run_command(['svnversion', '-c', self.path])
if code:
log.warn("svnversion failed")
return 0
parsed = self.revision_re.match(data)
if parsed:
return int(parsed.group(2))
else:
return 0
@property
def entries(self):
if self._entries is None:
self._entries = self.get_entries()
return self._entries
@property
def externals(self):
if self._externals is None:
self._externals = self.get_externals()
return self._externals
def iter_externals(self):
'''
Iterate over the svn:external references in the repository path.
'''
for item in self.externals:
yield item
def iter_files(self):
'''
Iterate over the non-deleted file entries in the repository path
'''
for item, kind in self.entries:
if kind.lower() == 'file':
yield item
def iter_dirs(self, include_root=True):
'''
Iterate over the non-deleted file entries in the repository path
'''
if include_root:
yield self.path
for item, kind in self.entries:
if kind.lower() == 'dir':
yield item
def get_entries(self):
return []
def get_externals(self):
return []
class Svn13Info(SvnInfo):
def get_entries(self):
code, data = _run_command(['svn', 'info', '-R', '--xml', self.path],
encoding="utf-8")
if code:
log.debug("svn info failed")
return []
return parse_dir_entries(data)
def get_externals(self):
#Previous to 1.5 --xml was not supported for svn propget and the -R
#output format breaks the shlex compatible semantics.
cmd = ['svn', 'propget', 'svn:externals']
result = []
for folder in self.iter_dirs():
code, lines = _run_command(cmd + [folder], encoding="utf-8")
if code != 0:
log.warn("svn propget failed")
return []
#lines should a str
for external in parse_external_prop(lines):
if folder:
external = os.path.join(folder, external)
result.append(os.path.normpath(external))
return result
class Svn15Info(Svn13Info):
def get_externals(self):
cmd = ['svn', 'propget', 'svn:externals', self.path, '-R', '--xml']
code, lines = _run_command(cmd, encoding="utf-8")
if code:
log.debug("svn propget failed")
return []
return parse_externals_xml(lines, prefix=os.path.abspath(self.path))
class SvnFileInfo(SvnInfo):
def __init__(self, path=''):
super(SvnFileInfo, self).__init__(path)
self._directories = None
self._revision = None
def _walk_svn(self, base):
entry_file = joinpath(base, '.svn', 'entries')
if os.path.isfile(entry_file):
entries = SVNEntriesFile.load(base)
yield (base, False, entries.parse_revision())
for path in entries.get_undeleted_records():
path = decode_as_string(path)
path = joinpath(base, path)
if os.path.isfile(path):
yield (path, True, None)
elif os.path.isdir(path):
for item in self._walk_svn(path):
yield item
def _build_entries(self):
entries = list()
rev = 0
for path, isfile, dir_rev in self._walk_svn(self.path):
if isfile:
entries.append((path, 'file'))
else:
entries.append((path, 'dir'))
rev = max(rev, dir_rev)
self._entries = entries
self._revision = rev
def get_entries(self):
if self._entries is None:
self._build_entries()
return self._entries
def get_revision(self):
if self._revision is None:
self._build_entries()
return self._revision
def get_externals(self):
prop_files = [['.svn', 'dir-prop-base'],
['.svn', 'dir-props']]
externals = []
for dirname in self.iter_dirs():
prop_file = None
for rel_parts in prop_files:
filename = joinpath(dirname, *rel_parts)
if os.path.isfile(filename):
prop_file = filename
if prop_file is not None:
ext_prop = parse_prop_file(prop_file, 'svn:externals')
#ext_prop should be utf-8 coming from svn:externals
ext_prop = decode_as_string(ext_prop, encoding="utf-8")
externals.extend(parse_external_prop(ext_prop))
return externals
def svn_finder(dirname=''):
#combined externals due to common interface
#combined externals and entries due to lack of dir_props in 1.7
info = SvnInfo.load(dirname)
for path in info.iter_files():
yield path
for path in info.iter_externals():
sub_info = SvnInfo.load(path)
for sub_path in sub_info.iter_files():
yield sub_path
class SVNEntriesFile(object):
def __init__(self, data):
self.data = data
@classmethod
def load(class_, base):
filename = os.path.join(base, '.svn', 'entries')
f = open(filename)
try:
result = SVNEntriesFile.read(f)
finally:
f.close()
return result
@classmethod
def read(class_, fileobj):
data = fileobj.read()
is_xml = data.startswith('<?xml')
class_ = [SVNEntriesFileText, SVNEntriesFileXML][is_xml]
return class_(data)
def parse_revision(self):
all_revs = self.parse_revision_numbers() + [0]
return max(all_revs)
class SVNEntriesFileText(SVNEntriesFile):
known_svn_versions = {
'1.4.x': 8,
'1.5.x': 9,
'1.6.x': 10,
}
def __get_cached_sections(self):
return self.sections
def get_sections(self):
SECTION_DIVIDER = '\f\n'
sections = self.data.split(SECTION_DIVIDER)
sections = [x for x in map(str.splitlines, sections)]
try:
# remove the SVN version number from the first line
svn_version = int(sections[0].pop(0))
if not svn_version in self.known_svn_versions.values():
log.warn("Unknown subversion verson %d", svn_version)
except ValueError:
return
self.sections = sections
self.get_sections = self.__get_cached_sections
return self.sections
def is_valid(self):
return bool(self.get_sections())
def get_url(self):
return self.get_sections()[0][4]
def parse_revision_numbers(self):
revision_line_number = 9
rev_numbers = [
int(section[revision_line_number])
for section in self.get_sections()
if (len(section) > revision_line_number
and section[revision_line_number])
]
return rev_numbers
def get_undeleted_records(self):
undeleted = lambda s: s and s[0] and (len(s) < 6 or s[5] != 'delete')
result = [
section[0]
for section in self.get_sections()
if undeleted(section)
]
return result
class SVNEntriesFileXML(SVNEntriesFile):
def is_valid(self):
return True
def get_url(self):
"Get repository URL"
urlre = re.compile('url="([^"]+)"')
return urlre.search(self.data).group(1)
def parse_revision_numbers(self):
revre = re.compile(r'committed-rev="(\d+)"')
return [
int(m.group(1))
for m in revre.finditer(self.data)
]
def get_undeleted_records(self):
entries_pattern = \
re.compile(r'name="([^"]+)"(?![^>]+deleted="true")', re.I)
results = [
unescape(match.group(1))
for match in entries_pattern.finditer(self.data)
]
return results
if __name__ == '__main__':
for name in svn_finder(sys.argv[1]):
print(name)
"""Tests for the 'setuptools' package""" """Tests for the 'setuptools' package"""
import sys import sys
import os import os
import unittest
import doctest
import distutils.core import distutils.core
import distutils.cmd import distutils.cmd
from distutils.errors import DistutilsOptionError, DistutilsPlatformError from distutils.errors import DistutilsOptionError, DistutilsPlatformError
...@@ -11,23 +9,13 @@ from distutils.core import Extension ...@@ -11,23 +9,13 @@ from distutils.core import Extension
from distutils.version import LooseVersion from distutils.version import LooseVersion
import six import six
import pytest
import setuptools.dist import setuptools.dist
import setuptools.depends as dep import setuptools.depends as dep
from setuptools import Feature from setuptools import Feature
from setuptools.depends import Require from setuptools.depends import Require
def additional_tests():
suite = unittest.TestSuite((
doctest.DocFileSuite(
os.path.join('tests', 'api_tests.txt'),
optionflags=doctest.ELLIPSIS, package='pkg_resources',
),
))
if sys.platform == 'win32':
suite.addTest(doctest.DocFileSuite('win_script_wrapper.txt'))
return suite
def makeSetup(**args): def makeSetup(**args):
"""Return distribution from 'setup(**args)', without executing commands""" """Return distribution from 'setup(**args)', without executing commands"""
...@@ -42,7 +30,12 @@ def makeSetup(**args): ...@@ -42,7 +30,12 @@ def makeSetup(**args):
distutils.core._setup_stop_after = None distutils.core._setup_stop_after = None
class DependsTests(unittest.TestCase): needs_bytecode = pytest.mark.skipif(
not hasattr(dep, 'get_module_constant'),
reason="bytecode support not available",
)
class TestDepends:
def testExtractConst(self): def testExtractConst(self):
if not hasattr(dep, 'extract_constant'): if not hasattr(dep, 'extract_constant'):
...@@ -55,86 +48,77 @@ class DependsTests(unittest.TestCase): ...@@ -55,86 +48,77 @@ class DependsTests(unittest.TestCase):
y = z y = z
fc = six.get_function_code(f1) fc = six.get_function_code(f1)
# unrecognized name # unrecognized name
self.assertEqual(dep.extract_constant(fc,'q', -1), None) assert dep.extract_constant(fc,'q', -1) is None
# constant assigned # constant assigned
self.assertEqual(dep.extract_constant(fc,'x', -1), "test") dep.extract_constant(fc,'x', -1) == "test"
# expression assigned # expression assigned
self.assertEqual(dep.extract_constant(fc,'y', -1), -1) dep.extract_constant(fc,'y', -1) == -1
# recognized name, not assigned # recognized name, not assigned
self.assertEqual(dep.extract_constant(fc,'z', -1), None) dep.extract_constant(fc,'z', -1) is None
def testFindModule(self): def testFindModule(self):
self.assertRaises(ImportError, dep.find_module, 'no-such.-thing') with pytest.raises(ImportError):
self.assertRaises(ImportError, dep.find_module, 'setuptools.non-existent') dep.find_module('no-such.-thing')
with pytest.raises(ImportError):
dep.find_module('setuptools.non-existent')
f,p,i = dep.find_module('setuptools.tests') f,p,i = dep.find_module('setuptools.tests')
f.close() f.close()
@needs_bytecode
def testModuleExtract(self): def testModuleExtract(self):
if not hasattr(dep, 'get_module_constant'):
# skip on non-bytecode platforms
return
from email import __version__ from email import __version__
self.assertEqual( assert dep.get_module_constant('email','__version__') == __version__
dep.get_module_constant('email','__version__'), __version__ assert dep.get_module_constant('sys','version') == sys.version
) assert dep.get_module_constant('setuptools.tests','__doc__') == __doc__
self.assertEqual(
dep.get_module_constant('sys','version'), sys.version
)
self.assertEqual(
dep.get_module_constant('setuptools.tests','__doc__'),__doc__
)
@needs_bytecode
def testRequire(self): def testRequire(self):
if not hasattr(dep, 'extract_constant'):
# skip on non-bytecode platformsh
return
req = Require('Email','1.0.3','email') req = Require('Email','1.0.3','email')
self.assertEqual(req.name, 'Email') assert req.name == 'Email'
self.assertEqual(req.module, 'email') assert req.module == 'email'
self.assertEqual(req.requested_version, '1.0.3') assert req.requested_version == '1.0.3'
self.assertEqual(req.attribute, '__version__') assert req.attribute == '__version__'
self.assertEqual(req.full_name(), 'Email-1.0.3') assert req.full_name() == 'Email-1.0.3'
from email import __version__ from email import __version__
self.assertEqual(req.get_version(), __version__) assert req.get_version() == __version__
self.assertTrue(req.version_ok('1.0.9')) assert req.version_ok('1.0.9')
self.assertTrue(not req.version_ok('0.9.1')) assert not req.version_ok('0.9.1')
self.assertTrue(not req.version_ok('unknown')) assert not req.version_ok('unknown')
self.assertTrue(req.is_present()) assert req.is_present()
self.assertTrue(req.is_current()) assert req.is_current()
req = Require('Email 3000','03000','email',format=LooseVersion) req = Require('Email 3000','03000','email',format=LooseVersion)
self.assertTrue(req.is_present()) assert req.is_present()
self.assertTrue(not req.is_current()) assert not req.is_current()
self.assertTrue(not req.version_ok('unknown')) assert not req.version_ok('unknown')
req = Require('Do-what-I-mean','1.0','d-w-i-m') req = Require('Do-what-I-mean','1.0','d-w-i-m')
self.assertTrue(not req.is_present()) assert not req.is_present()
self.assertTrue(not req.is_current()) assert not req.is_current()
req = Require('Tests', None, 'tests', homepage="http://example.com") req = Require('Tests', None, 'tests', homepage="http://example.com")
self.assertEqual(req.format, None) assert req.format is None
self.assertEqual(req.attribute, None) assert req.attribute is None
self.assertEqual(req.requested_version, None) assert req.requested_version is None
self.assertEqual(req.full_name(), 'Tests') assert req.full_name() == 'Tests'
self.assertEqual(req.homepage, 'http://example.com') assert req.homepage == 'http://example.com'
paths = [os.path.dirname(p) for p in __path__] paths = [os.path.dirname(p) for p in __path__]
self.assertTrue(req.is_present(paths)) assert req.is_present(paths)
self.assertTrue(req.is_current(paths)) assert req.is_current(paths)
class DistroTests(unittest.TestCase): class TestDistro:
def setUp(self): def setup_method(self, method):
self.e1 = Extension('bar.ext',['bar.c']) self.e1 = Extension('bar.ext',['bar.c'])
self.e2 = Extension('c.y', ['y.c']) self.e2 = Extension('c.y', ['y.c'])
...@@ -146,21 +130,21 @@ class DistroTests(unittest.TestCase): ...@@ -146,21 +130,21 @@ class DistroTests(unittest.TestCase):
) )
def testDistroType(self): def testDistroType(self):
self.assertTrue(isinstance(self.dist,setuptools.dist.Distribution)) assert isinstance(self.dist,setuptools.dist.Distribution)
def testExcludePackage(self): def testExcludePackage(self):
self.dist.exclude_package('a') self.dist.exclude_package('a')
self.assertEqual(self.dist.packages, ['b','c']) assert self.dist.packages == ['b','c']
self.dist.exclude_package('b') self.dist.exclude_package('b')
self.assertEqual(self.dist.packages, ['c']) assert self.dist.packages == ['c']
self.assertEqual(self.dist.py_modules, ['x']) assert self.dist.py_modules == ['x']
self.assertEqual(self.dist.ext_modules, [self.e1, self.e2]) assert self.dist.ext_modules == [self.e1, self.e2]
self.dist.exclude_package('c') self.dist.exclude_package('c')
self.assertEqual(self.dist.packages, []) assert self.dist.packages == []
self.assertEqual(self.dist.py_modules, ['x']) assert self.dist.py_modules == ['x']
self.assertEqual(self.dist.ext_modules, [self.e1]) assert self.dist.ext_modules == [self.e1]
# test removals from unspecified options # test removals from unspecified options
makeSetup().exclude_package('x') makeSetup().exclude_package('x')
...@@ -168,21 +152,21 @@ class DistroTests(unittest.TestCase): ...@@ -168,21 +152,21 @@ class DistroTests(unittest.TestCase):
def testIncludeExclude(self): def testIncludeExclude(self):
# remove an extension # remove an extension
self.dist.exclude(ext_modules=[self.e1]) self.dist.exclude(ext_modules=[self.e1])
self.assertEqual(self.dist.ext_modules, [self.e2]) assert self.dist.ext_modules == [self.e2]
# add it back in # add it back in
self.dist.include(ext_modules=[self.e1]) self.dist.include(ext_modules=[self.e1])
self.assertEqual(self.dist.ext_modules, [self.e2, self.e1]) assert self.dist.ext_modules == [self.e2, self.e1]
# should not add duplicate # should not add duplicate
self.dist.include(ext_modules=[self.e1]) self.dist.include(ext_modules=[self.e1])
self.assertEqual(self.dist.ext_modules, [self.e2, self.e1]) assert self.dist.ext_modules == [self.e2, self.e1]
def testExcludePackages(self): def testExcludePackages(self):
self.dist.exclude(packages=['c','b','a']) self.dist.exclude(packages=['c','b','a'])
self.assertEqual(self.dist.packages, []) assert self.dist.packages == []
self.assertEqual(self.dist.py_modules, ['x']) assert self.dist.py_modules == ['x']
self.assertEqual(self.dist.ext_modules, [self.e1]) assert self.dist.ext_modules == [self.e1]
def testEmpty(self): def testEmpty(self):
dist = makeSetup() dist = makeSetup()
...@@ -191,49 +175,41 @@ class DistroTests(unittest.TestCase): ...@@ -191,49 +175,41 @@ class DistroTests(unittest.TestCase):
dist.exclude(packages=['a'], py_modules=['b'], ext_modules=[self.e2]) dist.exclude(packages=['a'], py_modules=['b'], ext_modules=[self.e2])
def testContents(self): def testContents(self):
self.assertTrue(self.dist.has_contents_for('a')) assert self.dist.has_contents_for('a')
self.dist.exclude_package('a') self.dist.exclude_package('a')
self.assertTrue(not self.dist.has_contents_for('a')) assert not self.dist.has_contents_for('a')
self.assertTrue(self.dist.has_contents_for('b')) assert self.dist.has_contents_for('b')
self.dist.exclude_package('b') self.dist.exclude_package('b')
self.assertTrue(not self.dist.has_contents_for('b')) assert not self.dist.has_contents_for('b')
self.assertTrue(self.dist.has_contents_for('c')) assert self.dist.has_contents_for('c')
self.dist.exclude_package('c') self.dist.exclude_package('c')
self.assertTrue(not self.dist.has_contents_for('c')) assert not self.dist.has_contents_for('c')
def testInvalidIncludeExclude(self): def testInvalidIncludeExclude(self):
self.assertRaises(DistutilsSetupError, with pytest.raises(DistutilsSetupError):
self.dist.include, nonexistent_option='x' self.dist.include(nonexistent_option='x')
) with pytest.raises(DistutilsSetupError):
self.assertRaises(DistutilsSetupError, self.dist.exclude(nonexistent_option='x')
self.dist.exclude, nonexistent_option='x' with pytest.raises(DistutilsSetupError):
) self.dist.include(packages={'x':'y'})
self.assertRaises(DistutilsSetupError, with pytest.raises(DistutilsSetupError):
self.dist.include, packages={'x':'y'} self.dist.exclude(packages={'x':'y'})
) with pytest.raises(DistutilsSetupError):
self.assertRaises(DistutilsSetupError, self.dist.include(ext_modules={'x':'y'})
self.dist.exclude, packages={'x':'y'} with pytest.raises(DistutilsSetupError):
) self.dist.exclude(ext_modules={'x':'y'})
self.assertRaises(DistutilsSetupError,
self.dist.include, ext_modules={'x':'y'} with pytest.raises(DistutilsSetupError):
) self.dist.include(package_dir=['q'])
self.assertRaises(DistutilsSetupError, with pytest.raises(DistutilsSetupError):
self.dist.exclude, ext_modules={'x':'y'} self.dist.exclude(package_dir=['q'])
)
self.assertRaises(DistutilsSetupError, class TestFeatures:
self.dist.include, package_dir=['q']
) def setup_method(self, method):
self.assertRaises(DistutilsSetupError,
self.dist.exclude, package_dir=['q']
)
class FeatureTests(unittest.TestCase):
def setUp(self):
self.req = Require('Distutils','1.0.3','distutils') self.req = Require('Distutils','1.0.3','distutils')
self.dist = makeSetup( self.dist = makeSetup(
features={ features={
...@@ -255,80 +231,75 @@ class FeatureTests(unittest.TestCase): ...@@ -255,80 +231,75 @@ class FeatureTests(unittest.TestCase):
) )
def testDefaults(self): def testDefaults(self):
self.assertTrue(not assert not Feature(
Feature( "test",standard=True,remove='x',available=False
"test",standard=True,remove='x',available=False ).include_by_default()
).include_by_default() assert Feature("test",standard=True,remove='x').include_by_default()
)
self.assertTrue(
Feature("test",standard=True,remove='x').include_by_default()
)
# Feature must have either kwargs, removes, or require_features # Feature must have either kwargs, removes, or require_features
self.assertRaises(DistutilsSetupError, Feature, "test") with pytest.raises(DistutilsSetupError):
Feature("test")
def testAvailability(self): def testAvailability(self):
self.assertRaises( with pytest.raises(DistutilsPlatformError):
DistutilsPlatformError, self.dist.features['dwim'].include_in(self.dist)
self.dist.features['dwim'].include_in, self.dist
)
def testFeatureOptions(self): def testFeatureOptions(self):
dist = self.dist dist = self.dist
self.assertTrue( assert (
('with-dwim',None,'include DWIM') in dist.feature_options ('with-dwim',None,'include DWIM') in dist.feature_options
) )
self.assertTrue( assert (
('without-dwim',None,'exclude DWIM (default)') in dist.feature_options ('without-dwim',None,'exclude DWIM (default)') in dist.feature_options
) )
self.assertTrue( assert (
('with-bar',None,'include bar (default)') in dist.feature_options ('with-bar',None,'include bar (default)') in dist.feature_options
) )
self.assertTrue( assert (
('without-bar',None,'exclude bar') in dist.feature_options ('without-bar',None,'exclude bar') in dist.feature_options
) )
self.assertEqual(dist.feature_negopt['without-foo'],'with-foo') assert dist.feature_negopt['without-foo'] == 'with-foo'
self.assertEqual(dist.feature_negopt['without-bar'],'with-bar') assert dist.feature_negopt['without-bar'] == 'with-bar'
self.assertEqual(dist.feature_negopt['without-dwim'],'with-dwim') assert dist.feature_negopt['without-dwim'] == 'with-dwim'
self.assertTrue(not 'without-baz' in dist.feature_negopt) assert (not 'without-baz' in dist.feature_negopt)
def testUseFeatures(self): def testUseFeatures(self):
dist = self.dist dist = self.dist
self.assertEqual(dist.with_foo,1) assert dist.with_foo == 1
self.assertEqual(dist.with_bar,0) assert dist.with_bar == 0
self.assertEqual(dist.with_baz,1) assert dist.with_baz == 1
self.assertTrue(not 'bar_et' in dist.py_modules) assert (not 'bar_et' in dist.py_modules)
self.assertTrue(not 'pkg.bar' in dist.packages) assert (not 'pkg.bar' in dist.packages)
self.assertTrue('pkg.baz' in dist.packages) assert ('pkg.baz' in dist.packages)
self.assertTrue('scripts/baz_it' in dist.scripts) assert ('scripts/baz_it' in dist.scripts)
self.assertTrue(('libfoo','foo/foofoo.c') in dist.libraries) assert (('libfoo','foo/foofoo.c') in dist.libraries)
self.assertEqual(dist.ext_modules,[]) assert dist.ext_modules == []
self.assertEqual(dist.require_features, [self.req]) assert dist.require_features == [self.req]
# If we ask for bar, it should fail because we explicitly disabled # If we ask for bar, it should fail because we explicitly disabled
# it on the command line # it on the command line
self.assertRaises(DistutilsOptionError, dist.include_feature, 'bar') with pytest.raises(DistutilsOptionError):
dist.include_feature('bar')
def testFeatureWithInvalidRemove(self): def testFeatureWithInvalidRemove(self):
self.assertRaises( with pytest.raises(SystemExit):
SystemExit, makeSetup, features = {'x':Feature('x', remove='y')} makeSetup(features={'x':Feature('x', remove='y')})
)
class TestCommandTests(unittest.TestCase): class TestCommandTests:
def testTestIsCommand(self): def testTestIsCommand(self):
test_cmd = makeSetup().get_command_obj('test') test_cmd = makeSetup().get_command_obj('test')
self.assertTrue(isinstance(test_cmd, distutils.cmd.Command)) assert (isinstance(test_cmd, distutils.cmd.Command))
def testLongOptSuiteWNoDefault(self): def testLongOptSuiteWNoDefault(self):
ts1 = makeSetup(script_args=['test','--test-suite=foo.tests.suite']) ts1 = makeSetup(script_args=['test','--test-suite=foo.tests.suite'])
ts1 = ts1.get_command_obj('test') ts1 = ts1.get_command_obj('test')
ts1.ensure_finalized() ts1.ensure_finalized()
self.assertEqual(ts1.test_suite, 'foo.tests.suite') assert ts1.test_suite == 'foo.tests.suite'
def testDefaultSuite(self): def testDefaultSuite(self):
ts2 = makeSetup(test_suite='bar.tests.suite').get_command_obj('test') ts2 = makeSetup(test_suite='bar.tests.suite').get_command_obj('test')
ts2.ensure_finalized() ts2.ensure_finalized()
self.assertEqual(ts2.test_suite, 'bar.tests.suite') assert ts2.test_suite == 'bar.tests.suite'
def testDefaultWModuleOnCmdLine(self): def testDefaultWModuleOnCmdLine(self):
ts3 = makeSetup( ts3 = makeSetup(
...@@ -336,16 +307,17 @@ class TestCommandTests(unittest.TestCase): ...@@ -336,16 +307,17 @@ class TestCommandTests(unittest.TestCase):
script_args=['test','-m','foo.tests'] script_args=['test','-m','foo.tests']
).get_command_obj('test') ).get_command_obj('test')
ts3.ensure_finalized() ts3.ensure_finalized()
self.assertEqual(ts3.test_module, 'foo.tests') assert ts3.test_module == 'foo.tests'
self.assertEqual(ts3.test_suite, 'foo.tests.test_suite') assert ts3.test_suite == 'foo.tests.test_suite'
def testConflictingOptions(self): def testConflictingOptions(self):
ts4 = makeSetup( ts4 = makeSetup(
script_args=['test','-m','bar.tests', '-s','foo.tests.suite'] script_args=['test','-m','bar.tests', '-s','foo.tests.suite']
).get_command_obj('test') ).get_command_obj('test')
self.assertRaises(DistutilsOptionError, ts4.ensure_finalized) with pytest.raises(DistutilsOptionError):
ts4.ensure_finalized()
def testNoSuite(self): def testNoSuite(self):
ts5 = makeSetup().get_command_obj('test') ts5 = makeSetup().get_command_obj('test')
ts5.ensure_finalized() ts5.ensure_finalized()
self.assertEqual(ts5.test_suite, None) assert ts5.test_suite == None
import tempfile
import os
import shutil
import sys
import contextlib
import site
import six
@contextlib.contextmanager
def tempdir(cd=lambda dir:None, **kwargs):
temp_dir = tempfile.mkdtemp(**kwargs)
orig_dir = os.getcwd()
try:
cd(temp_dir)
yield temp_dir
finally:
cd(orig_dir)
shutil.rmtree(temp_dir)
@contextlib.contextmanager
def environment(**replacements):
"""
In a context, patch the environment with replacements. Pass None values
to clear the values.
"""
saved = dict(
(key, os.environ['key'])
for key in replacements
if key in os.environ
)
# remove values that are null
remove = (key for (key, value) in replacements.items() if value is None)
for key in list(remove):
os.environ.pop(key, None)
replacements.pop(key)
os.environ.update(replacements)
try:
yield saved
finally:
for key in replacements:
os.environ.pop(key, None)
os.environ.update(saved)
@contextlib.contextmanager
def argv(repl):
old_argv = sys.argv[:]
sys.argv[:] = repl
yield
sys.argv[:] = old_argv
@contextlib.contextmanager
def quiet():
"""
Redirect stdout/stderr to StringIO objects to prevent console output from
distutils commands.
"""
old_stdout = sys.stdout
old_stderr = sys.stderr
new_stdout = sys.stdout = six.StringIO()
new_stderr = sys.stderr = six.StringIO()
try:
yield new_stdout, new_stderr
finally:
new_stdout.seek(0)
new_stderr.seek(0)
sys.stdout = old_stdout
sys.stderr = old_stderr
@contextlib.contextmanager
def save_user_site_setting():
saved = site.ENABLE_USER_SITE
try:
yield saved
finally:
site.ENABLE_USER_SITE = saved
@contextlib.contextmanager
def suppress_exceptions(*excs):
try:
yield
except excs:
pass
10
dir
89001
http://svn.python.org/projects/sandbox/branches/setuptools-0.6
http://svn.python.org/projects
2013-06-03T17:26:03.052972Z
89000
phillip.eby
6015fed2-1504-0410-9fe1-9d1591cc4771
api_tests.txt
file
2013-06-19T13:20:47.948712Z
dec366372ca14fbeaeb26f492bcf5725
2013-05-15T22:04:59.389374Z
88997
phillip.eby
has-props
12312
setuptools.egg-info
dir
README.txt
file
2013-06-19T13:20:47.948712Z
26f0dd5d095522ba3ad999b6b6777b92
2011-05-31T20:10:56.416725Z
88846
phillip.eby
has-props
7615
easy_install.py
file
2013-06-19T13:20:47.948712Z
97b52fe7253bf4683f9f626f015eb72e
2006-09-20T20:48:18.716070Z
51935
phillip.eby
has-props
126
setuptools
dir
launcher.c
file
2013-06-19T13:20:47.924700Z
e5a8e77de9022688b80f77fc6d742fee
2009-10-19T21:03:29.785400Z
75544
phillip.eby
has-props
7476
ez_setup.py
file
2013-06-19T13:20:47.924700Z
17e8ec5e08faccfcb08b5f8d5167ca14
2011-01-20T18:50:00.815420Z
88124
phillip.eby
has-props
8350
version
file
2013-06-19T13:20:47.924700Z
e456da09e0c9e224a56302f8316b6dbf
2007-01-09T19:21:05.921317Z
53317
phillip.eby
has-props
1143
setup.py
file
2013-06-19T13:20:47.924700Z
d4e5b3c16bd61bfef6c0bb9377a3a3ea
2013-05-15T22:04:59.389374Z
88997
phillip.eby
has-props
5228
release.sh
file
2013-06-19T13:20:47.932704Z
b1fd4054a1c107ff0f27baacd97be94c
2009-10-28T17:12:45.227140Z
75925
phillip.eby
has-props
1044
pkg_resources.txt
file
2013-06-19T13:20:47.928702Z
f497e7c92a4de207cbd9ab1943f93388
2009-10-12T20:00:02.336146Z
75385
phillip.eby
has-props
94518
site.py
file
2013-06-19T13:20:47.932704Z
ebaac6fb6525f77ca950d22e6f8315df
2006-03-11T00:39:09.666740Z
42965
phillip.eby
has-props
2362
version.dat
file
2013-06-19T13:20:47.932704Z
8e14ecea32b9874cd7d29277494554c0
2009-10-28T17:12:45.227140Z
75925
phillip.eby
has-props
80
virtual-python.py
file
2013-06-19T13:20:47.932704Z
aa857add3b5563238f0a904187f5ded9
2005-10-17T02:26:39.000000Z
41262
pje
has-props
3898
setup.cfg
file
2013-06-19T13:20:47.932704Z
eda883e744fce83f8107ad8dc8303536
2006-09-21T22:26:48.050256Z
51965
phillip.eby
has-props
296
setuptools.txt
file
2013-06-19T13:20:47.940708Z
11926256f06046b196eaf814772504e7
2013-05-15T22:04:59.389374Z
88997
phillip.eby
has-props
149832
pkg_resources.py
file
2013-06-19T13:20:47.940708Z
b63a30f5f0f0225a788c2c0e3430b3cf
2013-05-15T22:04:59.389374Z
88997
phillip.eby
has-props
90397
tests
dir
wikiup.cfg
file
2013-06-19T13:20:47.944710Z
34ad845a5e0a0b46458557fa910bf429
2008-08-21T17:23:50.797633Z
65935
phillip.eby
has-props
136
EasyInstall.txt
file
2013-06-19T13:20:47.944710Z
e97387c517f70fc18a377e42d19d64d4
2013-05-15T22:04:59.389374Z
88997
phillip.eby
has-props
82495
import os import os
import zipfile
import sys import sys
import tempfile
import unittest
import shutil
import stat
import unicodedata import unicodedata
from subprocess import Popen as _Popen, PIPE as _PIPE from subprocess import Popen as _Popen, PIPE as _PIPE
def _remove_dir(target):
#on windows this seems to a problem
for dir_path, dirs, files in os.walk(target):
os.chmod(dir_path, stat.S_IWRITE)
for filename in files:
os.chmod(os.path.join(dir_path, filename), stat.S_IWRITE)
shutil.rmtree(target)
class ZippedEnvironment(unittest.TestCase):
datafile = None
dataname = None
old_cwd = None
def setUp(self):
if self.datafile is None or self.dataname is None:
return
if not os.path.isfile(self.datafile):
self.old_cwd = None
return
self.old_cwd = os.getcwd()
self.temp_dir = tempfile.mkdtemp()
zip_file, source, target = [None, None, None]
try:
zip_file = zipfile.ZipFile(self.datafile)
for files in zip_file.namelist():
zip_file.extract(files, self.temp_dir)
finally:
if zip_file:
zip_file.close()
del zip_file
os.chdir(os.path.join(self.temp_dir, self.dataname))
def tearDown(self):
#Assume setUp was never completed
if self.dataname is None or self.datafile is None:
return
try:
if self.old_cwd:
os.chdir(self.old_cwd)
_remove_dir(self.temp_dir)
except OSError:
#sigh?
pass
def _which_dirs(cmd): def _which_dirs(cmd):
result = set() result = set()
for path in os.environ.get('PATH', '').split(os.pathsep): for path in os.environ.get('PATH', '').split(os.pathsep):
......
import mock
import pytest
from . import contexts
@pytest.yield_fixture
def user_override():
"""
Override site.USER_BASE and site.USER_SITE with temporary directories in
a context.
"""
with contexts.tempdir() as user_base:
with mock.patch('site.USER_BASE', user_base):
with contexts.tempdir() as user_site:
with mock.patch('site.USER_SITE', user_site):
with contexts.save_user_site_setting():
yield
@pytest.yield_fixture
def tmpdir_cwd(tmpdir):
with tmpdir.as_cwd() as orig:
yield orig
import unittest import sys
import tarfile
import contextlib
try: def _tarfile_open_ex(*args, **kwargs):
# provide skipIf for Python 2.4-2.6 """
skipIf = unittest.skipIf Extend result as a context manager.
except AttributeError: """
def skipIf(condition, reason): return contextlib.closing(tarfile.open(*args, **kwargs))
def skipper(func):
def skip(*args, **kwargs): tarfile_open = _tarfile_open_ex if sys.version_info < (2,7) else tarfile.open
return
if condition:
return skip
return func
return skipper
"""Basic http server for tests to simulate PyPI or custom indexes """Basic http server for tests to simulate PyPI or custom indexes
""" """
import sys
import time import time
import threading import threading
from six.moves import BaseHTTPServer, SimpleHTTPServer, urllib from six.moves import BaseHTTPServer, SimpleHTTPServer
class IndexServer(BaseHTTPServer.HTTPServer): class IndexServer(BaseHTTPServer.HTTPServer):
"""Basic single-threaded http server simulating a package index """Basic single-threaded http server simulating a package index
...@@ -23,12 +23,8 @@ class IndexServer(BaseHTTPServer.HTTPServer): ...@@ -23,12 +23,8 @@ class IndexServer(BaseHTTPServer.HTTPServer):
RequestHandlerClass) RequestHandlerClass)
self._run = True self._run = True
def serve(self):
while self._run:
self.handle_request()
def start(self): def start(self):
self.thread = threading.Thread(target=self.serve) self.thread = threading.Thread(target=self.serve_forever)
self.thread.start() self.thread.start()
def stop(self): def stop(self):
...@@ -37,19 +33,7 @@ class IndexServer(BaseHTTPServer.HTTPServer): ...@@ -37,19 +33,7 @@ class IndexServer(BaseHTTPServer.HTTPServer):
# Let the server finish the last request and wait for a new one. # Let the server finish the last request and wait for a new one.
time.sleep(0.1) time.sleep(0.1)
# self.shutdown is not supported on python < 2.6, so just self.shutdown()
# set _run to false, and make a request, causing it to
# terminate.
self._run = False
url = 'http://127.0.0.1:%(server_port)s/' % vars(self)
try:
if sys.version_info >= (2, 6):
urllib.request.urlopen(url, timeout=5)
else:
urllib.request.urlopen(url)
except urllib.error.URLError:
# ignore any errors; all that's important is the request
pass
self.thread.join() self.thread.join()
self.socket.close() self.socket.close()
...@@ -78,6 +62,6 @@ class MockServer(BaseHTTPServer.HTTPServer, threading.Thread): ...@@ -78,6 +62,6 @@ class MockServer(BaseHTTPServer.HTTPServer, threading.Thread):
def run(self): def run(self):
self.serve_forever() self.serve_forever()
@property
def url(self): def url(self):
return 'http://localhost:%(server_port)s/' % vars(self) return 'http://localhost:%(server_port)s/' % vars(self)
url = property(url)
third_party3 file:///C:/development/svn_example/repos/svn13/extra1
third_party2 -r3 file:///C:/development/svn_example/repos/svn13/extra1
third_party -r1 file:///C:/development/svn_example/repos/svn13/extra1
<?xml version="1.0" encoding="utf-8"?>
<info>
<entry
kind="dir"
path="svn13_example"
revision="6">
<url>file:///C:/development/svn_example/repos/svn13/main</url>
<repository>
<root>file:///C:/development/svn_example/repos/svn13/main</root>
<uuid>d2996769-47b0-9946-b618-da1aa3eceda3</uuid>
</repository>
<wc-info>
<schedule>normal</schedule>
<prop-updated>2013-07-13T15:33:23.187500Z</prop-updated>
</wc-info>
<commit
revision="6">
<author>ptt</author>
<date>2013-07-13T15:33:28.359375Z</date>
</commit>
</entry>
<entry
kind="file"
path="svn13_example\a file"
revision="6">
<url>file:///C:/development/svn_example/repos/svn13/main/a%20file</url>
<repository>
<root>file:///C:/development/svn_example/repos/svn13/main</root>
<uuid>d2996769-47b0-9946-b618-da1aa3eceda3</uuid>
</repository>
<wc-info>
<schedule>normal</schedule>
<text-updated>2013-07-13T15:33:21.109375Z</text-updated>
<checksum>a6166e5e98a5a503089cde9bc8031293</checksum>
</wc-info>
<commit
revision="3">
<author>ptt</author>
<date>2013-07-13T15:33:21.312500Z</date>
</commit>
</entry>
<entry
kind="file"
path="svn13_example\to_delete"
revision="6">
<url>file:///C:/development/svn_example/repos/svn13/main/to_delete</url>
<repository>
<root>file:///C:/development/svn_example/repos/svn13/main</root>
<uuid>d2996769-47b0-9946-b618-da1aa3eceda3</uuid>
</repository>
<wc-info>
<schedule>delete</schedule>
<text-updated>2013-07-13T15:33:28.140625Z</text-updated>
<checksum>d41d8cd98f00b204e9800998ecf8427e</checksum>
</wc-info>
<commit
revision="6">
<author>ptt</author>
<date>2013-07-13T15:33:28.359375Z</date>
</commit>
</entry>
<entry
kind="dir"
path="svn13_example\folder"
revision="6">
<url>file:///C:/development/svn_example/repos/svn13/main/folder</url>
<repository>
<root>file:///C:/development/svn_example/repos/svn13/main</root>
<uuid>d2996769-47b0-9946-b618-da1aa3eceda3</uuid>
</repository>
<wc-info>
<schedule>normal</schedule>
<prop-updated>2013-07-13T15:33:26.187500Z</prop-updated>
</wc-info>
<commit
revision="5">
<author>ptt</author>
<date>2013-07-13T15:33:26.312500Z</date>
</commit>
</entry>
<entry
kind="file"
path="svn13_example\folder\quest.txt"
revision="6">
<url>file:///C:/development/svn_example/repos/svn13/main/folder/quest.txt</url>
<repository>
<root>file:///C:/development/svn_example/repos/svn13/main</root>
<uuid>d2996769-47b0-9946-b618-da1aa3eceda3</uuid>
</repository>
<wc-info>
<schedule>normal</schedule>
<text-updated>2013-07-13T15:33:20.109375Z</text-updated>
<checksum>795240c6a830c14f83961e57e07dad12</checksum>
</wc-info>
<commit
revision="2">
<author>ptt</author>
<date>2013-07-13T15:33:20.312500Z</date>
</commit>
</entry>
<entry
kind="file"
path="svn13_example\folder\lalala.txt"
revision="6">
<url>file:///C:/development/svn_example/repos/svn13/main/folder/lalala.txt</url>
<repository>
<root>file:///C:/development/svn_example/repos/svn13/main</root>
<uuid>d2996769-47b0-9946-b618-da1aa3eceda3</uuid>
</repository>
<wc-info>
<schedule>normal</schedule>
<text-updated>2013-07-13T15:33:19.375000Z</text-updated>
<checksum>d41d8cd98f00b204e9800998ecf8427e</checksum>
</wc-info>
<commit
revision="1">
<author>ptt</author>
<date>2013-07-13T15:33:19.609375Z</date>
</commit>
</entry>
</info>
third_party3 file:///C:/development/svn_example/repos/svn13/extra1
third_party2 -r3 file:///C:/development/svn_example/repos/svn13/extra1
third_party -r1 file:///C:/development/svn_example/repos/svn13/extra1
<?xml version="1.0"?>
<info>
<entry
kind="dir"
path="svn14_example"
revision="6">
<url>file:///C:/development/svn_example/repos/svn14/main</url>
<repository>
<root>file:///C:/development/svn_example/repos/svn14/main</root>
<uuid>c75942e5-8b7a-354d-b1cf-73dee23fa94f</uuid>
</repository>
<wc-info>
<schedule>normal</schedule>
</wc-info>
<commit
revision="6">
<author>ptt</author>
<date>2013-07-13T15:34:14.406250Z</date>
</commit>
</entry>
<entry
kind="file"
path="svn14_example\a file"
revision="6">
<url>file:///C:/development/svn_example/repos/svn14/main/a%20file</url>
<repository>
<root>file:///C:/development/svn_example/repos/svn14/main</root>
<uuid>c75942e5-8b7a-354d-b1cf-73dee23fa94f</uuid>
</repository>
<wc-info>
<schedule>normal</schedule>
<text-updated>2013-07-13T15:34:08.109375Z</text-updated>
<checksum>a6166e5e98a5a503089cde9bc8031293</checksum>
</wc-info>
<commit
revision="3">
<author>ptt</author>
<date>2013-07-13T15:34:08.390625Z</date>
</commit>
</entry>
<entry
kind="file"
path="svn14_example\to_delete"
revision="6">
<url>file:///C:/development/svn_example/repos/svn14/main/to_delete</url>
<repository>
<root>file:///C:/development/svn_example/repos/svn14/main</root>
<uuid>c75942e5-8b7a-354d-b1cf-73dee23fa94f</uuid>
</repository>
<wc-info>
<schedule>delete</schedule>
<text-updated>2013-07-13T15:34:14.125000Z</text-updated>
<checksum>d41d8cd98f00b204e9800998ecf8427e</checksum>
</wc-info>
<commit
revision="6">
<author>ptt</author>
<date>2013-07-13T15:34:14.406250Z</date>
</commit>
</entry>
<entry
kind="dir"
path="svn14_example\folder"
revision="6">
<url>file:///C:/development/svn_example/repos/svn14/main/folder</url>
<repository>
<root>file:///C:/development/svn_example/repos/svn14/main</root>
<uuid>c75942e5-8b7a-354d-b1cf-73dee23fa94f</uuid>
</repository>
<wc-info>
<schedule>normal</schedule>
</wc-info>
<commit
revision="5">
<author>ptt</author>
<date>2013-07-13T15:34:12.390625Z</date>
</commit>
</entry>
<entry
kind="file"
path="svn14_example\folder\quest.txt"
revision="6">
<url>file:///C:/development/svn_example/repos/svn14/main/folder/quest.txt</url>
<repository>
<root>file:///C:/development/svn_example/repos/svn14/main</root>
<uuid>c75942e5-8b7a-354d-b1cf-73dee23fa94f</uuid>
</repository>
<wc-info>
<schedule>normal</schedule>
<text-updated>2013-07-13T15:34:07.109375Z</text-updated>
<checksum>795240c6a830c14f83961e57e07dad12</checksum>
</wc-info>
<commit
revision="2">
<author>ptt</author>
<date>2013-07-13T15:34:07.390625Z</date>
</commit>
</entry>
<entry
kind="file"
path="svn14_example\folder\lalala.txt"
revision="6">
<url>file:///C:/development/svn_example/repos/svn14/main/folder/lalala.txt</url>
<repository>
<root>file:///C:/development/svn_example/repos/svn14/main</root>
<uuid>c75942e5-8b7a-354d-b1cf-73dee23fa94f</uuid>
</repository>
<wc-info>
<schedule>normal</schedule>
<text-updated>2013-07-13T15:34:06.250000Z</text-updated>
<checksum>d41d8cd98f00b204e9800998ecf8427e</checksum>
</wc-info>
<commit
revision="1">
<author>ptt</author>
<date>2013-07-13T15:34:06.531250Z</date>
</commit>
</entry>
</info>
third_party3 file:///C:/development/svn_example/repos/svn15/extra1
-r3 file:///C:/development/svn_example/repos/svn15/extra1 third_party2
file:///C:/development/svn_example/repos/svn15/extra1@r1 third_party
<?xml version="1.0"?>
<properties>
<target
path="C:/development/svn_example/svn15_example/folder">
<property
name="svn:externals">third_party3 file:///C:/development/svn_example/repos/svn15/extra2
-r3 file:///C:/development/svn_example/repos/svn15/extra2 third_party2
file:///C:/development/svn_example/repos/svn15/extra2@r1 third_party大介
</property>
</target>
<target
path="C:/development/svn_example/svn15_example">
<property
name="svn:externals">third_party3 file:///C:/development/svn_example/repos/svn15/extra1
-r3 file:///C:/development/svn_example/repos/svn15/extra1 third_party2
file:///C:/development/svn_example/repos/svn15/extra1@r1 third_party大介
</property>
</target>
</properties>
<?xml version="1.0"?>
<info>
<entry
kind="dir"
path="svn15_example"
revision="6">
<url>file:///C:/development/svn_example/repos/svn15/main</url>
<repository>
<root>file:///C:/development/svn_example/repos/svn15/main</root>
<uuid>4eab6983-54fe-384b-a282-9306f52d948f</uuid>
</repository>
<wc-info>
<schedule>normal</schedule>
<depth>infinity</depth>
</wc-info>
<commit
revision="6">
<author>ptt</author>
<date>2013-07-13T15:34:49.562500Z</date>
</commit>
</entry>
<entry
kind="file"
path="svn15_example\a file"
revision="6">
<url>file:///C:/development/svn_example/repos/svn15/main/a%20file</url>
<repository>
<root>file:///C:/development/svn_example/repos/svn15/main</root>
<uuid>4eab6983-54fe-384b-a282-9306f52d948f</uuid>
</repository>
<wc-info>
<schedule>normal</schedule>
<depth>infinity</depth>
<text-updated>2013-07-13T15:34:43.109375Z</text-updated>
<checksum>a6166e5e98a5a503089cde9bc8031293</checksum>
</wc-info>
<commit
revision="3">
<author>ptt</author>
<date>2013-07-13T15:34:43.484375Z</date>
</commit>
</entry>
<entry
kind="file"
path="svn15_example\to_delete"
revision="6">
<url>file:///C:/development/svn_example/repos/svn15/main/to_delete</url>
<repository>
<root>file:///C:/development/svn_example/repos/svn15/main</root>
<uuid>4eab6983-54fe-384b-a282-9306f52d948f</uuid>
</repository>
<wc-info>
<schedule>delete</schedule>
<depth>infinity</depth>
<text-updated>2013-07-13T15:34:49.125000Z</text-updated>
<checksum>d41d8cd98f00b204e9800998ecf8427e</checksum>
</wc-info>
<commit
revision="6">
<author>ptt</author>
<date>2013-07-13T15:34:49.562500Z</date>
</commit>
</entry>
<entry
kind="dir"
path="svn15_example\folder"
revision="6">
<url>file:///C:/development/svn_example/repos/svn15/main/folder</url>
<repository>
<root>file:///C:/development/svn_example/repos/svn15/main</root>
<uuid>4eab6983-54fe-384b-a282-9306f52d948f</uuid>
</repository>
<wc-info>
<schedule>normal</schedule>
<depth>infinity</depth>
</wc-info>
<commit
revision="5">
<author>ptt</author>
<date>2013-07-13T15:34:47.515625Z</date>
</commit>
</entry>
<entry
kind="file"
path="svn15_example\folder\quest.txt"
revision="6">
<url>file:///C:/development/svn_example/repos/svn15/main/folder/quest.txt</url>
<repository>
<root>file:///C:/development/svn_example/repos/svn15/main</root>
<uuid>4eab6983-54fe-384b-a282-9306f52d948f</uuid>
</repository>
<wc-info>
<schedule>normal</schedule>
<depth>infinity</depth>
<text-updated>2013-07-13T15:34:42.109375Z</text-updated>
<checksum>795240c6a830c14f83961e57e07dad12</checksum>
</wc-info>
<commit
revision="2">
<author>ptt</author>
<date>2013-07-13T15:34:42.484375Z</date>
</commit>
</entry>
<entry
kind="file"
path="svn15_example\folder\lalala.txt"
revision="6">
<url>file:///C:/development/svn_example/repos/svn15/main/folder/lalala.txt</url>
<repository>
<root>file:///C:/development/svn_example/repos/svn15/main</root>
<uuid>4eab6983-54fe-384b-a282-9306f52d948f</uuid>
</repository>
<wc-info>
<schedule>normal</schedule>
<depth>infinity</depth>
<text-updated>2013-07-13T15:34:41.375000Z</text-updated>
<checksum>d41d8cd98f00b204e9800998ecf8427e</checksum>
</wc-info>
<commit
revision="1">
<author>ptt</author>
<date>2013-07-13T15:34:41.734375Z</date>
</commit>
</entry>
</info>
"third party3" file:///C:/development/svn_example/repos/svn16/extra1
'third party3b' file:///C:/development/svn_example/repos/svn16/extra1
-r3 file:///C:/development/svn_example/repos/svn16/extra1 third\ party2
file:///C:/development/svn_example/repos/svn16/extra1@r1 third_party
<?xml version="1.0"?>
<properties>
<target
path="C:/development/svn_example/svn16_example/folder">
<property
name="svn:externals">"third party3" file:///C:/development/svn_example/repos/svn16/extra2
-r3 file:///C:/development/svn_example/repos/svn16/extra2 third\ party2
file:///C:/development/svn_example/repos/svn16/extra2@r1 third_party大介
</property>
</target>
<target
path="C:/development/svn_example/svn16_example">
<property
name="svn:externals">"third party3" file:///C:/development/svn_example/repos/svn16/extra1
-r3 file:///C:/development/svn_example/repos/svn16/extra1 third\ party2
file:///C:/development/svn_example/repos/svn16/extra1@r1 third_party大介
</property>
</target>
</properties>
<?xml version="1.0"?>
<info>
<entry
kind="dir"
path="svn16_example"
revision="6">
<url>file:///C:/development/svn_example/repos/svn16/main</url>
<repository>
<root>file:///C:/development/svn_example/repos/svn16/main</root>
<uuid>bd8d2cfc-1a74-de45-b166-262010c17c0a</uuid>
</repository>
<wc-info>
<schedule>normal</schedule>
<depth>infinity</depth>
</wc-info>
<commit
revision="6">
<author>ptt</author>
<date>2013-07-13T15:35:17.390625Z</date>
</commit>
</entry>
<entry
kind="file"
path="svn16_example\a file"
revision="6">
<url>file:///C:/development/svn_example/repos/svn16/main/a%20file</url>
<repository>
<root>file:///C:/development/svn_example/repos/svn16/main</root>
<uuid>bd8d2cfc-1a74-de45-b166-262010c17c0a</uuid>
</repository>
<wc-info>
<schedule>normal</schedule>
<depth>infinity</depth>
<text-updated>2013-07-13T15:35:14.578125Z</text-updated>
<checksum>a6166e5e98a5a503089cde9bc8031293</checksum>
</wc-info>
<commit
revision="3">
<author>ptt</author>
<date>2013-07-13T15:35:14.906250Z</date>
</commit>
</entry>
<entry
kind="file"
path="svn16_example\to_delete"
revision="6">
<url>file:///C:/development/svn_example/repos/svn16/main/to_delete</url>
<repository>
<root>file:///C:/development/svn_example/repos/svn16/main</root>
<uuid>bd8d2cfc-1a74-de45-b166-262010c17c0a</uuid>
</repository>
<wc-info>
<schedule>delete</schedule>
<depth>infinity</depth>
<text-updated>2013-07-13T15:35:17.046875Z</text-updated>
<checksum>d41d8cd98f00b204e9800998ecf8427e</checksum>
</wc-info>
<commit
revision="6">
<author>ptt</author>
<date>2013-07-13T15:35:17.390625Z</date>
</commit>
</entry>
<entry
kind="dir"
path="svn16_example\folder"
revision="6">
<url>file:///C:/development/svn_example/repos/svn16/main/folder</url>
<repository>
<root>file:///C:/development/svn_example/repos/svn16/main</root>
<uuid>bd8d2cfc-1a74-de45-b166-262010c17c0a</uuid>
</repository>
<wc-info>
<schedule>normal</schedule>
<depth>infinity</depth>
</wc-info>
<commit
revision="5">
<author>ptt</author>
<date>2013-07-13T15:35:16.406250Z</date>
</commit>
</entry>
<entry
kind="file"
path="svn16_example\folder\quest.txt"
revision="6">
<url>file:///C:/development/svn_example/repos/svn16/main/folder/quest.txt</url>
<repository>
<root>file:///C:/development/svn_example/repos/svn16/main</root>
<uuid>bd8d2cfc-1a74-de45-b166-262010c17c0a</uuid>
</repository>
<wc-info>
<schedule>normal</schedule>
<depth>infinity</depth>
<text-updated>2013-07-13T15:35:14.078125Z</text-updated>
<checksum>795240c6a830c14f83961e57e07dad12</checksum>
</wc-info>
<commit
revision="2">
<author>ptt</author>
<date>2013-07-13T15:35:14.421875Z</date>
</commit>
</entry>
<entry
kind="file"
path="svn16_example\folder\lalala.txt"
revision="6">
<url>file:///C:/development/svn_example/repos/svn16/main/folder/lalala.txt</url>
<repository>
<root>file:///C:/development/svn_example/repos/svn16/main</root>
<uuid>bd8d2cfc-1a74-de45-b166-262010c17c0a</uuid>
</repository>
<wc-info>
<schedule>normal</schedule>
<depth>infinity</depth>
<text-updated>2013-07-13T15:35:12.171875Z</text-updated>
<checksum>d41d8cd98f00b204e9800998ecf8427e</checksum>
</wc-info>
<commit
revision="1">
<author>ptt</author>
<date>2013-07-13T15:35:13.906250Z</date>
</commit>
</entry>
</info>
"third party3" file:///C:/development/svn_example/repos/svn17/extra1
'third party3b' file:///C:/development/svn_example/repos/svn17/extra1
-r3 file:///C:/development/svn_example/repos/svn17/extra1 third\ party2
file:///C:/development/svn_example/repos/svn17/extra1@r1 third_party
<?xml version="1.0" encoding="UTF-8"?>
<properties>
<target
path="C:/development/svn_example/svn17_example">
<property
name="svn:externals">"third party3" file:///C:/development/svn_example/repos/svn16/extra1
-r3 file:///C:/development/svn_example/repos/svn16/extra1 third\ party2
file:///C:/development/svn_example/repos/svn16/extra1@r1 third_party大介
</property>
</target>
<target
path="C:/development/svn_example/svn17_example/folder">
<property
name="svn:externals">"third party3" file:///C:/development/svn_example/repos/svn17/extra2
-r3 file:///C:/development/svn_example/repos/svn17/extra2 third\ party2
file:///C:/development/svn_example/repos/svn17/extra2@r1 third_party大介
</property>
</target>
</properties>
<?xml version="1.0" encoding="UTF-8"?>
<info>
<entry
kind="dir"
path="svn17_example"
revision="6">
<url>file:///C:/development/svn_example/repos/svn17/main</url>
<repository>
<root>file:///C:/development/svn_example/repos/svn17/main</root>
<uuid>5ba45434-5197-164e-afab-81923f4744f5</uuid>
</repository>
<wc-info>
<wcroot-abspath>C:/development/svn_example/svn17_example</wcroot-abspath>
<schedule>normal</schedule>
<depth>infinity</depth>
</wc-info>
<commit
revision="6">
<author>ptt</author>
<date>2013-07-13T15:35:36.171875Z</date>
</commit>
</entry>
<entry
path="svn17_example\folder"
revision="6"
kind="dir">
<url>file:///C:/development/svn_example/repos/svn17/main/folder</url>
<repository>
<root>file:///C:/development/svn_example/repos/svn17/main</root>
<uuid>5ba45434-5197-164e-afab-81923f4744f5</uuid>
</repository>
<wc-info>
<wcroot-abspath>C:/development/svn_example/svn17_example</wcroot-abspath>
<schedule>normal</schedule>
<depth>infinity</depth>
</wc-info>
<commit
revision="5">
<author>ptt</author>
<date>2013-07-13T15:35:34.859375Z</date>
</commit>
</entry>
<entry
kind="file"
path="svn17_example\folder\quest.txt"
revision="6">
<url>file:///C:/development/svn_example/repos/svn17/main/folder/quest.txt</url>
<repository>
<root>file:///C:/development/svn_example/repos/svn17/main</root>
<uuid>5ba45434-5197-164e-afab-81923f4744f5</uuid>
</repository>
<wc-info>
<wcroot-abspath>C:/development/svn_example/svn17_example</wcroot-abspath>
<schedule>normal</schedule>
<depth>infinity</depth>
<text-updated>2013-07-13T15:35:32.812500Z</text-updated>
<checksum>bc80eba9e7a10c0a571a4678c520bc9683f3bac2</checksum>
</wc-info>
<commit
revision="2">
<author>ptt</author>
<date>2013-07-13T15:35:33.109375Z</date>
</commit>
</entry>
<entry
kind="file"
path="svn17_example\folder\lalala.txt"
revision="6">
<url>file:///C:/development/svn_example/repos/svn17/main/folder/lalala.txt</url>
<repository>
<root>file:///C:/development/svn_example/repos/svn17/main</root>
<uuid>5ba45434-5197-164e-afab-81923f4744f5</uuid>
</repository>
<wc-info>
<wcroot-abspath>C:/development/svn_example/svn17_example</wcroot-abspath>
<schedule>normal</schedule>
<depth>infinity</depth>
<text-updated>2013-07-13T15:35:32.343750Z</text-updated>
<checksum>da39a3ee5e6b4b0d3255bfef95601890afd80709</checksum>
</wc-info>
<commit
revision="1">
<author>ptt</author>
<date>2013-07-13T15:35:32.687500Z</date>
</commit>
</entry>
<entry
path="svn17_example\a file"
revision="6"
kind="file">
<url>file:///C:/development/svn_example/repos/svn17/main/a%20file</url>
<repository>
<root>file:///C:/development/svn_example/repos/svn17/main</root>
<uuid>5ba45434-5197-164e-afab-81923f4744f5</uuid>
</repository>
<wc-info>
<wcroot-abspath>C:/development/svn_example/svn17_example</wcroot-abspath>
<schedule>normal</schedule>
<depth>infinity</depth>
<text-updated>2013-07-13T15:35:33.187500Z</text-updated>
<checksum>43785ab4b1816b49f242990883292813cd4f486c</checksum>
</wc-info>
<commit
revision="3">
<author>ptt</author>
<date>2013-07-13T15:35:33.515625Z</date>
</commit>
</entry>
<entry
path="svn17_example\to_delete"
revision="6"
kind="file">
<url>file:///C:/development/svn_example/repos/svn17/main/to_delete</url>
<repository>
<root>file:///C:/development/svn_example/repos/svn17/main</root>
<uuid>5ba45434-5197-164e-afab-81923f4744f5</uuid>
</repository>
<wc-info>
<wcroot-abspath>C:/development/svn_example/svn17_example</wcroot-abspath>
<schedule>delete</schedule>
<depth>infinity</depth>
<checksum>da39a3ee5e6b4b0d3255bfef95601890afd80709</checksum>
</wc-info>
<commit
revision="6">
<author>ptt</author>
<date>2013-07-13T15:35:36.171875Z</date>
</commit>
</entry>
</info>
"third party3" file:///C:/development/svn_example/repos/svn18/extra1
'third party3b' file:///C:/development/svn_example/repos/svn18/extra1
-r3 file:///C:/development/svn_example/repos/svn18/extra1 third\ party2
file:///C:/development/svn_example/repos/svn18/extra1@r1 third_party
<?xml version="1.0" encoding="UTF-8"?>
<properties>
<target
path="C:/development/svn_example/svn18_example">
<property
name="svn:externals">"third party3" file:///C:/development/svn_example/repos/svn16/extra1
-r3 file:///C:/development/svn_example/repos/svn16/extra1 third\ party2
file:///C:/development/svn_example/repos/svn16/extra1@r1 third_party大介
</property>
</target>
<target
path="C:/development/svn_example/svn18_example/folder">
<property
name="svn:externals">"third party3" file:///C:/development/svn_example/repos/svn18/extra2
-r3 file:///C:/development/svn_example/repos/svn18/extra2 third\ party2
file:///C:/development/svn_example/repos/svn18/extra2@r1 third_party大介
</property>
</target>
</properties>
<?xml version="1.0" encoding="UTF-8"?>
<info>
<entry
path="svn18_example"
revision="6"
kind="dir">
<url>file:///C:/development/svn_example/repos/svn18/main</url>
<relative-url>^/</relative-url>
<repository>
<root>file:///C:/development/svn_example/repos/svn18/main</root>
<uuid>3c5e3929-c92b-7045-9ba9-5e65d3dd1ee9</uuid>
</repository>
<wc-info>
<wcroot-abspath>C:/development/svn_example/svn18_example</wcroot-abspath>
<schedule>normal</schedule>
<depth>infinity</depth>
</wc-info>
<commit
revision="6">
<author>ptt</author>
<date>2013-07-13T15:35:57.796875Z</date>
</commit>
</entry>
<entry
kind="file"
path="svn18_example\a file"
revision="6">
<url>file:///C:/development/svn_example/repos/svn18/main/a%20file</url>
<relative-url>^/a%20file</relative-url>
<repository>
<root>file:///C:/development/svn_example/repos/svn18/main</root>
<uuid>3c5e3929-c92b-7045-9ba9-5e65d3dd1ee9</uuid>
</repository>
<wc-info>
<wcroot-abspath>C:/development/svn_example/svn18_example</wcroot-abspath>
<schedule>normal</schedule>
<depth>infinity</depth>
<text-updated>2013-07-13T15:35:54.906250Z</text-updated>
<checksum>43785ab4b1816b49f242990883292813cd4f486c</checksum>
</wc-info>
<commit
revision="3">
<author>ptt</author>
<date>2013-07-13T15:35:55.265625Z</date>
</commit>
</entry>
<entry
kind="file"
path="svn18_example\to_delete"
revision="6">
<url>file:///C:/development/svn_example/repos/svn18/main/to_delete</url>
<relative-url>^/to_delete</relative-url>
<repository>
<root>file:///C:/development/svn_example/repos/svn18/main</root>
<uuid>3c5e3929-c92b-7045-9ba9-5e65d3dd1ee9</uuid>
</repository>
<wc-info>
<wcroot-abspath>C:/development/svn_example/svn18_example</wcroot-abspath>
<schedule>delete</schedule>
<depth>infinity</depth>
<checksum>da39a3ee5e6b4b0d3255bfef95601890afd80709</checksum>
</wc-info>
<commit
revision="6">
<author>ptt</author>
<date>2013-07-13T15:35:57.796875Z</date>
</commit>
</entry>
<entry
kind="dir"
path="svn18_example\folder"
revision="6">
<url>file:///C:/development/svn_example/repos/svn18/main/folder</url>
<relative-url>^/folder</relative-url>
<repository>
<root>file:///C:/development/svn_example/repos/svn18/main</root>
<uuid>3c5e3929-c92b-7045-9ba9-5e65d3dd1ee9</uuid>
</repository>
<wc-info>
<wcroot-abspath>C:/development/svn_example/svn18_example</wcroot-abspath>
<schedule>normal</schedule>
<depth>infinity</depth>
</wc-info>
<commit
revision="5">
<author>ptt</author>
<date>2013-07-13T15:35:56.750000Z</date>
</commit>
</entry>
<entry
path="svn18_example\folder\quest.txt"
revision="6"
kind="file">
<url>file:///C:/development/svn_example/repos/svn18/main/folder/quest.txt</url>
<relative-url>^/folder/quest.txt</relative-url>
<repository>
<root>file:///C:/development/svn_example/repos/svn18/main</root>
<uuid>3c5e3929-c92b-7045-9ba9-5e65d3dd1ee9</uuid>
</repository>
<wc-info>
<wcroot-abspath>C:/development/svn_example/svn18_example</wcroot-abspath>
<schedule>normal</schedule>
<depth>infinity</depth>
<text-updated>2013-07-13T15:35:54.484375Z</text-updated>
<checksum>bc80eba9e7a10c0a571a4678c520bc9683f3bac2</checksum>
</wc-info>
<commit
revision="2">
<author>ptt</author>
<date>2013-07-13T15:35:54.843750Z</date>
</commit>
</entry>
<entry
path="svn18_example\folder\lalala.txt"
revision="6"
kind="file">
<url>file:///C:/development/svn_example/repos/svn18/main/folder/lalala.txt</url>
<relative-url>^/folder/lalala.txt</relative-url>
<repository>
<root>file:///C:/development/svn_example/repos/svn18/main</root>
<uuid>3c5e3929-c92b-7045-9ba9-5e65d3dd1ee9</uuid>
</repository>
<wc-info>
<wcroot-abspath>C:/development/svn_example/svn18_example</wcroot-abspath>
<schedule>normal</schedule>
<depth>infinity</depth>
<text-updated>2013-07-13T15:35:54.015625Z</text-updated>
<checksum>da39a3ee5e6b4b0d3255bfef95601890afd80709</checksum>
</wc-info>
<commit
revision="1">
<author>ptt</author>
<date>2013-07-13T15:35:54.375000Z</date>
</commit>
</entry>
</info>
...@@ -2,53 +2,32 @@ ...@@ -2,53 +2,32 @@
""" """
import os import os
import re import re
import shutil
import site
import sys
import tempfile
import unittest
import six import six
import pytest
from distutils.errors import DistutilsError
from setuptools.command.bdist_egg import bdist_egg
from setuptools.command import easy_install as easy_install_pkg
from setuptools.dist import Distribution from setuptools.dist import Distribution
from . import contexts
SETUP_PY = """\ SETUP_PY = """\
from setuptools import setup from setuptools import setup
setup(name='foo', py_modules=['hi']) setup(name='foo', py_modules=['hi'])
""" """
class TestDevelopTest(unittest.TestCase): @pytest.yield_fixture
def setup_context(tmpdir):
def setUp(self): with (tmpdir/'setup.py').open('w') as f:
self.dir = tempfile.mkdtemp()
self.old_cwd = os.getcwd()
os.chdir(self.dir)
f = open('setup.py', 'w')
f.write(SETUP_PY) f.write(SETUP_PY)
f.close() with (tmpdir/'hi.py').open('w') as f:
f = open('hi.py', 'w')
f.write('1\n') f.write('1\n')
f.close() with tmpdir.as_cwd():
if sys.version >= "2.6": yield tmpdir
self.old_base = site.USER_BASE
site.USER_BASE = tempfile.mkdtemp()
self.old_site = site.USER_SITE
site.USER_SITE = tempfile.mkdtemp()
def tearDown(self):
os.chdir(self.old_cwd)
shutil.rmtree(self.dir)
if sys.version >= "2.6":
shutil.rmtree(site.USER_BASE)
shutil.rmtree(site.USER_SITE)
site.USER_BASE = self.old_base
site.USER_SITE = self.old_site
def test_bdist_egg(self): class Test:
def test_bdist_egg(self, setup_context, user_override):
dist = Distribution(dict( dist = Distribution(dict(
script_name='setup.py', script_name='setup.py',
script_args=['bdist_egg'], script_args=['bdist_egg'],
...@@ -56,17 +35,10 @@ class TestDevelopTest(unittest.TestCase): ...@@ -56,17 +35,10 @@ class TestDevelopTest(unittest.TestCase):
py_modules=['hi'] py_modules=['hi']
)) ))
os.makedirs(os.path.join('build', 'src')) os.makedirs(os.path.join('build', 'src'))
old_stdout = sys.stdout with contexts.quiet():
sys.stdout = o = six.StringIO()
try:
dist.parse_command_line() dist.parse_command_line()
dist.run_commands() dist.run_commands()
finally:
sys.stdout = old_stdout
# let's see if we got our egg link at the right place # let's see if we got our egg link at the right place
[content] = os.listdir('dist') [content] = os.listdir('dist')
self.assertTrue(re.match('foo-0.0.0-py[23].\d.egg$', content)) assert re.match('foo-0.0.0-py[23].\d.egg$', content)
def test_suite():
return unittest.makeSuite(TestDevelopTest)
"""build_ext tests import distutils.command.build_ext as orig
"""
import unittest
from distutils.command.build_ext import build_ext as distutils_build_ext
from setuptools.command.build_ext import build_ext from setuptools.command.build_ext import build_ext
from setuptools.dist import Distribution from setuptools.dist import Distribution
class TestBuildExtTest(unittest.TestCase): class TestBuildExt:
def test_get_ext_filename(self): def test_get_ext_filename(self):
# setuptools needs to give back the same """
# result than distutils, even if the fullname Setuptools needs to give back the same
# is not in ext_map result as distutils, even if the fullname
is not in ext_map.
"""
dist = Distribution() dist = Distribution()
cmd = build_ext(dist) cmd = build_ext(dist)
cmd.ext_map['foo/bar'] = '' cmd.ext_map['foo/bar'] = ''
res = cmd.get_ext_filename('foo') res = cmd.get_ext_filename('foo')
wanted = distutils_build_ext.get_ext_filename(cmd, 'foo') wanted = orig.build_ext.get_ext_filename(cmd, 'foo')
assert res == wanted assert res == wanted
...@@ -5,9 +5,7 @@ import shutil ...@@ -5,9 +5,7 @@ import shutil
import site import site
import sys import sys
import tempfile import tempfile
import unittest
from distutils.errors import DistutilsError
from setuptools.command.develop import develop from setuptools.command.develop import develop
from setuptools.dist import Distribution from setuptools.dist import Distribution
...@@ -23,10 +21,10 @@ setup(name='foo', ...@@ -23,10 +21,10 @@ setup(name='foo',
INIT_PY = """print "foo" INIT_PY = """print "foo"
""" """
class TestDevelopTest(unittest.TestCase): class TestDevelopTest:
def setUp(self): def setup_method(self, method):
if sys.version < "2.6" or hasattr(sys, 'real_prefix'): if hasattr(sys, 'real_prefix'):
return return
# Directory structure # Directory structure
...@@ -50,8 +48,8 @@ class TestDevelopTest(unittest.TestCase): ...@@ -50,8 +48,8 @@ class TestDevelopTest(unittest.TestCase):
self.old_site = site.USER_SITE self.old_site = site.USER_SITE
site.USER_SITE = tempfile.mkdtemp() site.USER_SITE = tempfile.mkdtemp()
def tearDown(self): def teardown_method(self, method):
if sys.version < "2.6" or hasattr(sys, 'real_prefix') or (hasattr(sys, 'base_prefix') and sys.base_prefix != sys.prefix): if hasattr(sys, 'real_prefix') or (hasattr(sys, 'base_prefix') and sys.base_prefix != sys.prefix):
return return
os.chdir(self.old_cwd) os.chdir(self.old_cwd)
...@@ -62,7 +60,7 @@ class TestDevelopTest(unittest.TestCase): ...@@ -62,7 +60,7 @@ class TestDevelopTest(unittest.TestCase):
site.USER_SITE = self.old_site site.USER_SITE = self.old_site
def test_develop(self): def test_develop(self):
if sys.version < "2.6" or hasattr(sys, 'real_prefix'): if hasattr(sys, 'real_prefix'):
return return
dist = Distribution( dist = Distribution(
dict(name='foo', dict(name='foo',
...@@ -86,7 +84,7 @@ class TestDevelopTest(unittest.TestCase): ...@@ -86,7 +84,7 @@ class TestDevelopTest(unittest.TestCase):
# let's see if we got our egg link at the right place # let's see if we got our egg link at the right place
content = os.listdir(site.USER_SITE) content = os.listdir(site.USER_SITE)
content.sort() content.sort()
self.assertEqual(content, ['easy-install.pth', 'foo.egg-link']) assert content == ['easy-install.pth', 'foo.egg-link']
# Check that we are using the right code. # Check that we are using the right code.
egg_link_file = open(os.path.join(site.USER_SITE, 'foo.egg-link'), 'rt') egg_link_file = open(os.path.join(site.USER_SITE, 'foo.egg-link'), 'rt')
...@@ -100,23 +98,6 @@ class TestDevelopTest(unittest.TestCase): ...@@ -100,23 +98,6 @@ class TestDevelopTest(unittest.TestCase):
finally: finally:
init_file.close() init_file.close()
if sys.version < "3": if sys.version < "3":
self.assertEqual(init, 'print "foo"') assert init == 'print "foo"'
else: else:
self.assertEqual(init, 'print("foo")') assert init == 'print("foo")'
def notest_develop_with_setup_requires(self):
wanted = ("Could not find suitable distribution for "
"Requirement.parse('I-DONT-EXIST')")
old_dir = os.getcwd()
os.chdir(self.dir)
try:
try:
Distribution({'setup_requires': ['I_DONT_EXIST']})
except DistutilsError:
e = sys.exc_info()[1]
error = str(e)
if error == wanted:
pass
finally:
os.chdir(old_dir)
...@@ -3,28 +3,20 @@ ...@@ -3,28 +3,20 @@
import os import os
import shutil import shutil
import tempfile import tempfile
import unittest
import textwrap
try: import pytest
import ast
except:
pass
import pkg_resources import pkg_resources
from .textwrap import DALS
from setuptools.tests.py26compat import skipIf
def DALS(s): class TestDistInfo:
"dedent and left-strip"
return textwrap.dedent(s).lstrip()
class TestDistInfo(unittest.TestCase):
def test_distinfo(self): def test_distinfo(self):
dists = {} dists = dict(
for d in pkg_resources.find_distributions(self.tmpdir): (d.project_name, d)
dists[d.project_name] = d for d in pkg_resources.find_distributions(self.tmpdir)
)
assert len(dists) == 2, dists assert len(dists) == 2, dists
...@@ -34,50 +26,45 @@ class TestDistInfo(unittest.TestCase): ...@@ -34,50 +26,45 @@ class TestDistInfo(unittest.TestCase):
assert versioned.version == '2.718' # from filename assert versioned.version == '2.718' # from filename
assert unversioned.version == '0.3' # from METADATA assert unversioned.version == '0.3' # from METADATA
@skipIf('ast' not in globals(), @pytest.mark.importorskip('ast')
"ast is used to test conditional dependencies (Python >= 2.6)")
def test_conditional_dependencies(self): def test_conditional_dependencies(self):
requires = [pkg_resources.Requirement.parse('splort==4'), specs = 'splort==4', 'quux>=1.1'
pkg_resources.Requirement.parse('quux>=1.1')] requires = list(map(pkg_resources.Requirement.parse, specs))
for d in pkg_resources.find_distributions(self.tmpdir): for d in pkg_resources.find_distributions(self.tmpdir):
self.assertEqual(d.requires(), requires[:1]) assert d.requires() == requires[:1]
self.assertEqual(d.requires(extras=('baz',)), requires) assert d.requires(extras=('baz',)) == requires
self.assertEqual(d.extras, ['baz']) assert d.extras == ['baz']
metadata_template = DALS("""
Metadata-Version: 1.2
Name: {name}
{version}
Requires-Dist: splort (==4)
Provides-Extra: baz
Requires-Dist: quux (>=1.1); extra == 'baz'
""")
def setUp(self): def setup_method(self, method):
self.tmpdir = tempfile.mkdtemp() self.tmpdir = tempfile.mkdtemp()
versioned = os.path.join(self.tmpdir, dist_info_name = 'VersionedDistribution-2.718.dist-info'
'VersionedDistribution-2.718.dist-info') versioned = os.path.join(self.tmpdir, dist_info_name)
os.mkdir(versioned) os.mkdir(versioned)
metadata_file = open(os.path.join(versioned, 'METADATA'), 'w+') with open(os.path.join(versioned, 'METADATA'), 'w+') as metadata_file:
try: metadata = self.metadata_template.format(
metadata_file.write(DALS( name='VersionedDistribution',
""" version='',
Metadata-Version: 1.2 ).replace('\n\n', '\n')
Name: VersionedDistribution metadata_file.write(metadata)
Requires-Dist: splort (4) dist_info_name = 'UnversionedDistribution.dist-info'
Provides-Extra: baz unversioned = os.path.join(self.tmpdir, dist_info_name)
Requires-Dist: quux (>=1.1); extra == 'baz'
"""))
finally:
metadata_file.close()
unversioned = os.path.join(self.tmpdir,
'UnversionedDistribution.dist-info')
os.mkdir(unversioned) os.mkdir(unversioned)
metadata_file = open(os.path.join(unversioned, 'METADATA'), 'w+') with open(os.path.join(unversioned, 'METADATA'), 'w+') as metadata_file:
try: metadata = self.metadata_template.format(
metadata_file.write(DALS( name='UnversionedDistribution',
""" version='Version: 0.3',
Metadata-Version: 1.2 )
Name: UnversionedDistribution metadata_file.write(metadata)
Version: 0.3
Requires-Dist: splort (==4)
Provides-Extra: baz
Requires-Dist: quux (>=1.1); extra == 'baz'
"""))
finally:
metadata_file.close()
def tearDown(self): def teardown_method(self, method):
shutil.rmtree(self.tmpdir) shutil.rmtree(self.tmpdir)
"""Easy install Tests """Easy install Tests
""" """
from __future__ import absolute_import
import sys import sys
import os import os
import shutil import shutil
import tempfile import tempfile
import unittest
import site import site
import contextlib import contextlib
import textwrap
import tarfile import tarfile
import logging import logging
import distutils.core import itertools
import io import io
import six
from six.moves import urllib from six.moves import urllib
import pytest
import mock
from setuptools import sandbox
from setuptools.sandbox import run_setup, SandboxViolation from setuptools.sandbox import run_setup, SandboxViolation
from setuptools.command.easy_install import ( from setuptools.command.easy_install import (
easy_install, fix_jython_executable, get_script_args, nt_quote_arg) easy_install, fix_jython_executable, get_script_args, nt_quote_arg)
...@@ -26,7 +28,11 @@ from pkg_resources import working_set, VersionConflict ...@@ -26,7 +28,11 @@ from pkg_resources import working_set, VersionConflict
from pkg_resources import Distribution as PRDistribution from pkg_resources import Distribution as PRDistribution
import setuptools.tests.server import setuptools.tests.server
import pkg_resources import pkg_resources
from .py26compat import skipIf
from .py26compat import tarfile_open
from . import contexts
from .textwrap import DALS
class FakeDist(object): class FakeDist(object):
def get_entry_map(self, group): def get_entry_map(self, group):
...@@ -37,26 +43,26 @@ class FakeDist(object): ...@@ -37,26 +43,26 @@ class FakeDist(object):
def as_requirement(self): def as_requirement(self):
return 'spec' return 'spec'
WANTED = """\ WANTED = DALS("""
#!%s #!%s
# EASY-INSTALL-ENTRY-SCRIPT: 'spec','console_scripts','name' # EASY-INSTALL-ENTRY-SCRIPT: 'spec','console_scripts','name'
__requires__ = 'spec' __requires__ = 'spec'
import sys import sys
from pkg_resources import load_entry_point from pkg_resources import load_entry_point
if __name__ == '__main__': if __name__ == '__main__':
sys.exit( sys.exit(
load_entry_point('spec', 'console_scripts', 'name')() load_entry_point('spec', 'console_scripts', 'name')()
) )
""" % nt_quote_arg(fix_jython_executable(sys.executable, "")) """) % nt_quote_arg(fix_jython_executable(sys.executable, ""))
SETUP_PY = """\ SETUP_PY = DALS("""
from setuptools import setup from setuptools import setup
setup(name='foo') setup(name='foo')
""" """)
class TestEasyInstallTest(unittest.TestCase): class TestEasyInstallTest:
def test_install_site_py(self): def test_install_site_py(self):
dist = Distribution() dist = Distribution()
...@@ -66,20 +72,17 @@ class TestEasyInstallTest(unittest.TestCase): ...@@ -66,20 +72,17 @@ class TestEasyInstallTest(unittest.TestCase):
try: try:
cmd.install_site_py() cmd.install_site_py()
sitepy = os.path.join(cmd.install_dir, 'site.py') sitepy = os.path.join(cmd.install_dir, 'site.py')
self.assertTrue(os.path.exists(sitepy)) assert os.path.exists(sitepy)
finally: finally:
shutil.rmtree(cmd.install_dir) shutil.rmtree(cmd.install_dir)
def test_get_script_args(self): def test_get_script_args(self):
dist = FakeDist() dist = FakeDist()
old_platform = sys.platform args = next(get_script_args(dist))
try: name, script = itertools.islice(args, 2)
name, script = [i for i in next(get_script_args(dist))][0:2]
finally:
sys.platform = old_platform
self.assertEqual(script, WANTED) assert script == WANTED
def test_no_find_links(self): def test_no_find_links(self):
# new option '--no-find-links', that blocks find-links added at # new option '--no-find-links', that blocks find-links added at
...@@ -92,7 +95,7 @@ class TestEasyInstallTest(unittest.TestCase): ...@@ -92,7 +95,7 @@ class TestEasyInstallTest(unittest.TestCase):
cmd.install_dir = os.path.join(tempfile.mkdtemp(), 'ok') cmd.install_dir = os.path.join(tempfile.mkdtemp(), 'ok')
cmd.args = ['ok'] cmd.args = ['ok']
cmd.ensure_finalized() cmd.ensure_finalized()
self.assertEqual(cmd.package_index.scanned_urls, {}) assert cmd.package_index.scanned_urls == {}
# let's try without it (default behavior) # let's try without it (default behavior)
cmd = easy_install(dist) cmd = easy_install(dist)
...@@ -102,61 +105,44 @@ class TestEasyInstallTest(unittest.TestCase): ...@@ -102,61 +105,44 @@ class TestEasyInstallTest(unittest.TestCase):
cmd.args = ['ok'] cmd.args = ['ok']
cmd.ensure_finalized() cmd.ensure_finalized()
keys = sorted(cmd.package_index.scanned_urls.keys()) keys = sorted(cmd.package_index.scanned_urls.keys())
self.assertEqual(keys, ['link1', 'link2']) assert keys == ['link1', 'link2']
class TestPTHFileWriter(unittest.TestCase): class TestPTHFileWriter:
def test_add_from_cwd_site_sets_dirty(self): def test_add_from_cwd_site_sets_dirty(self):
'''a pth file manager should set dirty '''a pth file manager should set dirty
if a distribution is in site but also the cwd if a distribution is in site but also the cwd
''' '''
pth = PthDistributions('does-not_exist', [os.getcwd()]) pth = PthDistributions('does-not_exist', [os.getcwd()])
self.assertTrue(not pth.dirty) assert not pth.dirty
pth.add(PRDistribution(os.getcwd())) pth.add(PRDistribution(os.getcwd()))
self.assertTrue(pth.dirty) assert pth.dirty
def test_add_from_site_is_ignored(self): def test_add_from_site_is_ignored(self):
if os.name != 'nt': location = '/test/location/does-not-have-to-exist'
location = '/test/location/does-not-have-to-exist' # PthDistributions expects all locations to be normalized
else: location = pkg_resources.normalize_path(location)
location = 'c:\\does_not_exist'
pth = PthDistributions('does-not_exist', [location, ]) pth = PthDistributions('does-not_exist', [location, ])
self.assertTrue(not pth.dirty) assert not pth.dirty
pth.add(PRDistribution(location)) pth.add(PRDistribution(location))
self.assertTrue(not pth.dirty) assert not pth.dirty
class TestUserInstallTest(unittest.TestCase):
def setUp(self): @pytest.yield_fixture
self.dir = tempfile.mkdtemp() def setup_context(tmpdir):
setup = os.path.join(self.dir, 'setup.py') with (tmpdir/'setup.py').open('w') as f:
f = open(setup, 'w')
f.write(SETUP_PY) f.write(SETUP_PY)
f.close() with tmpdir.as_cwd():
self.old_cwd = os.getcwd() yield tmpdir
os.chdir(self.dir)
self.old_enable_site = site.ENABLE_USER_SITE
self.old_file = easy_install_pkg.__file__
self.old_base = site.USER_BASE
site.USER_BASE = tempfile.mkdtemp()
self.old_site = site.USER_SITE
site.USER_SITE = tempfile.mkdtemp()
easy_install_pkg.__file__ = site.USER_SITE
def tearDown(self):
os.chdir(self.old_cwd)
shutil.rmtree(self.dir)
shutil.rmtree(site.USER_BASE) @pytest.mark.usefixtures("user_override")
shutil.rmtree(site.USER_SITE) @pytest.mark.usefixtures("setup_context")
site.USER_BASE = self.old_base class TestUserInstallTest:
site.USER_SITE = self.old_site
site.ENABLE_USER_SITE = self.old_enable_site
easy_install_pkg.__file__ = self.old_file
@mock.patch('setuptools.command.easy_install.__file__', None)
def test_user_install_implied(self): def test_user_install_implied(self):
easy_install_pkg.__file__ = site.USER_SITE
site.ENABLE_USER_SITE = True # disabled sometimes site.ENABLE_USER_SITE = True # disabled sometimes
#XXX: replace with something meaningfull #XXX: replace with something meaningfull
dist = Distribution() dist = Distribution()
...@@ -164,7 +150,7 @@ class TestUserInstallTest(unittest.TestCase): ...@@ -164,7 +150,7 @@ class TestUserInstallTest(unittest.TestCase):
cmd = easy_install(dist) cmd = easy_install(dist)
cmd.args = ['py'] cmd.args = ['py']
cmd.ensure_finalized() cmd.ensure_finalized()
self.assertTrue(cmd.user, 'user should be implied') assert cmd.user, 'user should be implied'
def test_multiproc_atexit(self): def test_multiproc_atexit(self):
try: try:
...@@ -185,7 +171,7 @@ class TestUserInstallTest(unittest.TestCase): ...@@ -185,7 +171,7 @@ class TestUserInstallTest(unittest.TestCase):
cmd = easy_install(dist) cmd = easy_install(dist)
cmd.args = ['py'] cmd.args = ['py']
cmd.initialize_options() cmd.initialize_options()
self.assertFalse(cmd.user, 'NOT user should be implied') assert not cmd.user, 'NOT user should be implied'
def test_local_index(self): def test_local_index(self):
# make sure the local index is used # make sure the local index is used
...@@ -194,11 +180,8 @@ class TestUserInstallTest(unittest.TestCase): ...@@ -194,11 +180,8 @@ class TestUserInstallTest(unittest.TestCase):
new_location = tempfile.mkdtemp() new_location = tempfile.mkdtemp()
target = tempfile.mkdtemp() target = tempfile.mkdtemp()
egg_file = os.path.join(new_location, 'foo-1.0.egg-info') egg_file = os.path.join(new_location, 'foo-1.0.egg-info')
f = open(egg_file, 'w') with open(egg_file, 'w') as f:
try:
f.write('Name: foo\n') f.write('Name: foo\n')
finally:
f.close()
sys.path.append(target) sys.path.append(target)
old_ppath = os.environ.get('PYTHONPATH') old_ppath = os.environ.get('PYTHONPATH')
...@@ -214,7 +197,7 @@ class TestUserInstallTest(unittest.TestCase): ...@@ -214,7 +197,7 @@ class TestUserInstallTest(unittest.TestCase):
res = cmd.easy_install('foo') res = cmd.easy_install('foo')
actual = os.path.normcase(os.path.realpath(res.location)) actual = os.path.normcase(os.path.realpath(res.location))
expected = os.path.normcase(os.path.realpath(new_location)) expected = os.path.normcase(os.path.realpath(new_location))
self.assertEqual(actual, expected) assert actual == expected
finally: finally:
sys.path.remove(target) sys.path.remove(target)
for basedir in [new_location, target, ]: for basedir in [new_location, target, ]:
...@@ -229,6 +212,25 @@ class TestUserInstallTest(unittest.TestCase): ...@@ -229,6 +212,25 @@ class TestUserInstallTest(unittest.TestCase):
else: else:
del os.environ['PYTHONPATH'] del os.environ['PYTHONPATH']
@contextlib.contextmanager
def user_install_setup_context(self, *args, **kwargs):
"""
Wrap sandbox.setup_context to patch easy_install in that context to
appear as user-installed.
"""
with self.orig_context(*args, **kwargs):
import setuptools.command.easy_install as ei
ei.__file__ = site.USER_SITE
yield
def patched_setup_context(self):
self.orig_context = sandbox.setup_context
return mock.patch(
'setuptools.sandbox.setup_context',
self.user_install_setup_context,
)
def test_setup_requires(self): def test_setup_requires(self):
"""Regression test for Distribute issue #318 """Regression test for Distribute issue #318
...@@ -237,12 +239,12 @@ class TestUserInstallTest(unittest.TestCase): ...@@ -237,12 +239,12 @@ class TestUserInstallTest(unittest.TestCase):
SandboxViolation. SandboxViolation.
""" """
test_pkg = create_setup_requires_package(self.dir) test_pkg = create_setup_requires_package(os.getcwd())
test_setup_py = os.path.join(test_pkg, 'setup.py') test_setup_py = os.path.join(test_pkg, 'setup.py')
try: try:
with quiet_context(): with contexts.quiet():
with reset_setup_stop_context(): with self.patched_setup_context():
run_setup(test_setup_py, ['install']) run_setup(test_setup_py, ['install'])
except SandboxViolation: except SandboxViolation:
self.fail('Installation caused SandboxViolation') self.fail('Installation caused SandboxViolation')
...@@ -252,7 +254,24 @@ class TestUserInstallTest(unittest.TestCase): ...@@ -252,7 +254,24 @@ class TestUserInstallTest(unittest.TestCase):
pass pass
class TestSetupRequires(unittest.TestCase): @pytest.yield_fixture
def distutils_package():
distutils_setup_py = SETUP_PY.replace(
'from setuptools import setup',
'from distutils.core import setup',
)
with contexts.tempdir(cd=os.chdir):
with open('setup.py', 'w') as f:
f.write(distutils_setup_py)
yield
class TestDistutilsPackage:
def test_bdist_egg_available_on_distutils_pkg(self, distutils_package):
run_setup('setup.py', ['bdist_egg'])
class TestSetupRequires:
def test_setup_requires_honors_fetch_params(self): def test_setup_requires_honors_fetch_params(self):
""" """
...@@ -269,25 +288,27 @@ class TestSetupRequires(unittest.TestCase): ...@@ -269,25 +288,27 @@ class TestSetupRequires(unittest.TestCase):
# Some platforms (Jython) don't find a port to which to bind, # Some platforms (Jython) don't find a port to which to bind,
# so skip this test for them. # so skip this test for them.
return return
with quiet_context(): with contexts.quiet():
# create an sdist that has a build-time dependency. # create an sdist that has a build-time dependency.
with TestSetupRequires.create_sdist() as dist_file: with TestSetupRequires.create_sdist() as dist_file:
with tempdir_context() as temp_install_dir: with contexts.tempdir() as temp_install_dir:
with environment_context(PYTHONPATH=temp_install_dir): with contexts.environment(PYTHONPATH=temp_install_dir):
ei_params = ['--index-url', p_index.url, ei_params = [
'--index-url', p_index.url,
'--allow-hosts', p_index_loc, '--allow-hosts', p_index_loc,
'--exclude-scripts', '--install-dir', temp_install_dir, '--exclude-scripts',
dist_file] '--install-dir', temp_install_dir,
with reset_setup_stop_context(): dist_file,
with argv_context(['easy_install']): ]
# attempt to install the dist. It should fail because with contexts.argv(['easy_install']):
# it doesn't exist. # attempt to install the dist. It should fail because
self.assertRaises(SystemExit, # it doesn't exist.
easy_install_pkg.main, ei_params) with pytest.raises(SystemExit):
easy_install_pkg.main(ei_params)
# there should have been two or three requests to the server # there should have been two or three requests to the server
# (three happens on Python 3.3a) # (three happens on Python 3.3a)
self.assertTrue(2 <= len(p_index.requests) <= 3) assert 2 <= len(p_index.requests) <= 3
self.assertEqual(p_index.requests[0].path, '/does-not-exist/') assert p_index.requests[0].path == '/does-not-exist/'
@staticmethod @staticmethod
@contextlib.contextmanager @contextlib.contextmanager
...@@ -296,18 +317,17 @@ class TestSetupRequires(unittest.TestCase): ...@@ -296,18 +317,17 @@ class TestSetupRequires(unittest.TestCase):
Return an sdist with a setup_requires dependency (of something that Return an sdist with a setup_requires dependency (of something that
doesn't exist) doesn't exist)
""" """
with tempdir_context() as dir: with contexts.tempdir() as dir:
dist_path = os.path.join(dir, 'setuptools-test-fetcher-1.0.tar.gz') dist_path = os.path.join(dir, 'setuptools-test-fetcher-1.0.tar.gz')
make_trivial_sdist( script = DALS("""
dist_path, import setuptools
textwrap.dedent(""" setuptools.setup(
import setuptools name="setuptools-test-fetcher",
setuptools.setup( version="1.0",
name="setuptools-test-fetcher", setup_requires = ['does-not-exist'],
version="1.0", )
setup_requires = ['does-not-exist'], """)
) make_trivial_sdist(dist_path, script)
""").lstrip())
yield dist_path yield dist_path
def test_setup_requires_overrides_version_conflict(self): def test_setup_requires_overrides_version_conflict(self):
...@@ -325,22 +345,21 @@ class TestSetupRequires(unittest.TestCase): ...@@ -325,22 +345,21 @@ class TestSetupRequires(unittest.TestCase):
working_set.add(fake_dist) working_set.add(fake_dist)
try: try:
with tempdir_context() as temp_dir: with contexts.tempdir() as temp_dir:
test_pkg = create_setup_requires_package(temp_dir) test_pkg = create_setup_requires_package(temp_dir)
test_setup_py = os.path.join(test_pkg, 'setup.py') test_setup_py = os.path.join(test_pkg, 'setup.py')
with quiet_context() as (stdout, stderr): with contexts.quiet() as (stdout, stderr):
with reset_setup_stop_context(): try:
try: # Don't even need to install the package, just
# Don't even need to install the package, just # running the setup.py at all is sufficient
# running the setup.py at all is sufficient run_setup(test_setup_py, ['--name'])
run_setup(test_setup_py, ['--name']) except VersionConflict:
except VersionConflict: self.fail('Installing setup.py requirements '
self.fail('Installing setup.py requirements ' 'caused a VersionConflict')
'caused a VersionConflict')
lines = stdout.readlines() lines = stdout.readlines()
self.assertTrue(len(lines) > 0) assert len(lines) > 0
self.assertTrue(lines[-1].strip(), 'test_pkg') assert lines[-1].strip(), 'test_pkg'
finally: finally:
pkg_resources.__setstate__(pr_state) pkg_resources.__setstate__(pr_state)
...@@ -361,17 +380,16 @@ def create_setup_requires_package(path): ...@@ -361,17 +380,16 @@ def create_setup_requires_package(path):
test_setup_py = os.path.join(test_pkg, 'setup.py') test_setup_py = os.path.join(test_pkg, 'setup.py')
os.mkdir(test_pkg) os.mkdir(test_pkg)
f = open(test_setup_py, 'w') with open(test_setup_py, 'w') as f:
f.write(textwrap.dedent("""\ f.write(DALS("""
import setuptools import setuptools
setuptools.setup(**%r) setuptools.setup(**%r)
""" % test_setup_attrs)) """ % test_setup_attrs))
f.close()
foobar_path = os.path.join(path, 'foobar-0.1.tar.gz') foobar_path = os.path.join(path, 'foobar-0.1.tar.gz')
make_trivial_sdist( make_trivial_sdist(
foobar_path, foobar_path,
textwrap.dedent("""\ DALS("""
import setuptools import setuptools
setuptools.setup( setuptools.setup(
name='foobar', name='foobar',
...@@ -390,71 +408,5 @@ def make_trivial_sdist(dist_path, setup_py): ...@@ -390,71 +408,5 @@ def make_trivial_sdist(dist_path, setup_py):
setup_py_file = tarfile.TarInfo(name='setup.py') setup_py_file = tarfile.TarInfo(name='setup.py')
setup_py_bytes = io.BytesIO(setup_py.encode('utf-8')) setup_py_bytes = io.BytesIO(setup_py.encode('utf-8'))
setup_py_file.size = len(setup_py_bytes.getvalue()) setup_py_file.size = len(setup_py_bytes.getvalue())
dist = tarfile.open(dist_path, 'w:gz') with tarfile_open(dist_path, 'w:gz') as dist:
try:
dist.addfile(setup_py_file, fileobj=setup_py_bytes) dist.addfile(setup_py_file, fileobj=setup_py_bytes)
finally:
dist.close()
@contextlib.contextmanager
def tempdir_context(cd=lambda dir:None):
temp_dir = tempfile.mkdtemp()
orig_dir = os.getcwd()
try:
cd(temp_dir)
yield temp_dir
finally:
cd(orig_dir)
shutil.rmtree(temp_dir)
@contextlib.contextmanager
def environment_context(**updates):
old_env = os.environ.copy()
os.environ.update(updates)
try:
yield
finally:
for key in updates:
del os.environ[key]
os.environ.update(old_env)
@contextlib.contextmanager
def argv_context(repl):
old_argv = sys.argv[:]
sys.argv[:] = repl
yield
sys.argv[:] = old_argv
@contextlib.contextmanager
def reset_setup_stop_context():
"""
When the setuptools tests are run using setup.py test, and then
one wants to invoke another setup() command (such as easy_install)
within those tests, it's necessary to reset the global variable
in distutils.core so that the setup() command will run naturally.
"""
setup_stop_after = distutils.core._setup_stop_after
distutils.core._setup_stop_after = None
yield
distutils.core._setup_stop_after = setup_stop_after
@contextlib.contextmanager
def quiet_context():
"""
Redirect stdout/stderr to StringIO objects to prevent console output from
distutils commands.
"""
old_stdout = sys.stdout
old_stderr = sys.stderr
new_stdout = sys.stdout = six.StringIO()
new_stderr = sys.stderr = six.StringIO()
try:
yield new_stdout, new_stderr
finally:
new_stdout.seek(0)
new_stderr.seek(0)
sys.stdout = old_stdout
sys.stderr = old_stderr
import os import os
import sys import stat
import tempfile
import shutil import pytest
import unittest
from . import environment
import pkg_resources from .textwrap import DALS
import warnings from . import contexts
from setuptools.command import egg_info
from setuptools import svn_utils
from setuptools.tests import environment, test_svn class TestEggInfo:
from setuptools.tests.py26compat import skipIf
setup_script = DALS("""
ENTRIES_V10 = pkg_resources.resource_string(__name__, 'entries-v10') from setuptools import setup
"An entries file generated with svn 1.6.17 against the legacy Setuptools repo"
setup(
name='foo',
class TestEggInfo(unittest.TestCase): py_modules=['hello'],
entry_points={'console_scripts': ['hi = hello.run']},
def setUp(self): zip_safe=False,
self.test_dir = tempfile.mkdtemp() )
os.mkdir(os.path.join(self.test_dir, '.svn')) """)
self.old_cwd = os.getcwd() def _create_project(self):
os.chdir(self.test_dir) with open('setup.py', 'w') as f:
f.write(self.setup_script)
def tearDown(self):
os.chdir(self.old_cwd) with open('hello.py', 'w') as f:
shutil.rmtree(self.test_dir) f.write(DALS("""
def run():
def _write_entries(self, entries): print('hello')
fn = os.path.join(self.test_dir, '.svn', 'entries') """))
entries_f = open(fn, 'wb')
entries_f.write(entries) @pytest.yield_fixture
entries_f.close() def env(self):
class Environment(str): pass
@skipIf(not test_svn._svn_check, "No SVN to text, in the first place")
def test_version_10_format(self): with contexts.tempdir(prefix='setuptools-test.') as env_dir:
""" env = Environment(env_dir)
""" os.chmod(env_dir, stat.S_IRWXU)
#keeping this set for 1.6 is a good check on the get_svn_revision subs = 'home', 'lib', 'scripts', 'data', 'egg-base'
#to ensure I return using svnversion what would had been returned env.paths = dict(
version_str = svn_utils.SvnInfo.get_svn_version() (dirname, os.path.join(env_dir, dirname))
version = [int(x) for x in version_str.split('.')[:2]] for dirname in subs
if version != [1, 6]: )
if hasattr(self, 'skipTest'): list(map(os.mkdir, env.paths.values()))
self.skipTest('') config = os.path.join(env.paths['home'], '.pydistutils.cfg')
else: with open(config, 'w') as f:
sys.stderr.write('\n Skipping due to SVN Version\n') f.write(DALS("""
return [egg_info]
egg-base = %(egg-base)s
self._write_entries(ENTRIES_V10) """ % env.paths
rev = egg_info.egg_info.get_svn_revision() ))
self.assertEqual(rev, '89000') yield env
def test_version_10_format_legacy_parser(self): def test_egg_base_installed_egg_info(self, tmpdir_cwd, env):
""" self._create_project()
"""
path_variable = None environ = os.environ.copy().update(
for env in os.environ: HOME=env.paths['home'],
if env.lower() == 'path': )
path_variable = env cmd = [
'install',
if path_variable: '--home', env.paths['home'],
old_path = os.environ[path_variable] '--install-lib', env.paths['lib'],
os.environ[path_variable] = '' '--install-scripts', env.paths['scripts'],
#catch_warnings not available until py26 '--install-data', env.paths['data'],
warning_filters = warnings.filters ]
warnings.filters = warning_filters[:] code, data = environment.run_setup_py(
try: cmd=cmd,
warnings.simplefilter("ignore", DeprecationWarning) pypath=os.pathsep.join([env.paths['lib'], str(tmpdir_cwd)]),
self._write_entries(ENTRIES_V10) data_stream=1,
rev = egg_info.egg_info.get_svn_revision() env=environ,
finally: )
#restore the warning filters
warnings.filters = warning_filters
#restore the os path
if path_variable:
os.environ[path_variable] = old_path
self.assertEqual(rev, '89000')
DUMMY_SOURCE_TXT = """CHANGES.txt
CONTRIBUTORS.txt
HISTORY.txt
LICENSE
MANIFEST.in
README.txt
setup.py
dummy/__init__.py
dummy/test.txt
dummy.egg-info/PKG-INFO
dummy.egg-info/SOURCES.txt
dummy.egg-info/dependency_links.txt
dummy.egg-info/top_level.txt"""
class TestSvnDummy(environment.ZippedEnvironment):
def setUp(self):
version = svn_utils.SvnInfo.get_svn_version()
if not version: # None or Empty
return None
self.base_version = tuple([int(x) for x in version.split('.')][:2])
if not self.base_version:
raise ValueError('No SVN tools installed')
elif self.base_version < (1, 3):
raise ValueError('Insufficient SVN Version %s' % version)
elif self.base_version >= (1, 9):
#trying the latest version
self.base_version = (1, 8)
self.dataname = "dummy%i%i" % self.base_version
self.datafile = os.path.join('setuptools', 'tests',
'svn_data', self.dataname + ".zip")
super(TestSvnDummy, self).setUp()
@skipIf(not test_svn._svn_check, "No SVN to text, in the first place")
def test_sources(self):
code, data = environment.run_setup_py(["sdist"],
pypath=self.old_cwd,
data_stream=1)
if code: if code:
raise AssertionError(data) raise AssertionError(data)
sources = os.path.join('dummy.egg-info', 'SOURCES.txt') actual = self._find_egg_info_files(env.paths['lib'])
infile = open(sources, 'r')
try: expected = [
read_contents = infile.read() 'PKG-INFO',
finally: 'SOURCES.txt',
infile.close() 'dependency_links.txt',
del infile 'entry_points.txt',
'not-zip-safe',
self.assertEqual(DUMMY_SOURCE_TXT, read_contents) 'top_level.txt',
]
return data assert sorted(actual) == expected
@skipIf(not test_svn._svn_check, "No SVN to text, in the first place") def _find_egg_info_files(self, root):
def test_svn_tags(self): results = (
code, data = environment.run_setup_py(["egg_info", filenames
"--tag-svn-revision"], for dirpath, dirnames, filenames in os.walk(root)
pypath=self.old_cwd, if os.path.basename(dirpath) == 'EGG-INFO'
data_stream=1) )
if code: # expect exactly one result
raise AssertionError(data) result, = results
return result
pkginfo = os.path.join('dummy.egg-info', 'PKG-INFO')
infile = open(pkginfo, 'r')
try:
read_contents = infile.readlines()
finally:
infile.close()
del infile
self.assertTrue("Version: 0.1.1-r1\n" in read_contents)
@skipIf(not test_svn._svn_check, "No SVN to text, in the first place")
def test_no_tags(self):
code, data = environment.run_setup_py(["egg_info"],
pypath=self.old_cwd,
data_stream=1)
if code:
raise AssertionError(data)
pkginfo = os.path.join('dummy.egg-info', 'PKG-INFO')
infile = open(pkginfo, 'r')
try:
read_contents = infile.readlines()
finally:
infile.close()
del infile
self.assertTrue("Version: 0.1.1\n" in read_contents)
class TestSvnDummyLegacy(environment.ZippedEnvironment):
def setUp(self):
self.base_version = (1, 6)
self.dataname = "dummy%i%i" % self.base_version
self.datafile = os.path.join('setuptools', 'tests',
'svn_data', self.dataname + ".zip")
super(TestSvnDummyLegacy, self).setUp()
def test_sources(self):
code, data = environment.run_setup_py(["sdist"],
pypath=self.old_cwd,
path="",
data_stream=1)
if code:
raise AssertionError(data)
sources = os.path.join('dummy.egg-info', 'SOURCES.txt')
infile = open(sources, 'r')
try:
read_contents = infile.read()
finally:
infile.close()
del infile
self.assertEqual(DUMMY_SOURCE_TXT, read_contents)
return data
def test_suite():
return unittest.defaultTestLoader.loadTestsFromName(__name__)
...@@ -3,12 +3,12 @@ import os ...@@ -3,12 +3,12 @@ import os
import sys import sys
import shutil import shutil
import tempfile import tempfile
import unittest
import platform import platform
import pytest
import setuptools import setuptools
from setuptools import find_packages from setuptools import find_packages
from setuptools.tests.py26compat import skipIf
find_420_packages = setuptools.PEP420PackageFinder.find find_420_packages = setuptools.PEP420PackageFinder.find
...@@ -33,13 +33,13 @@ def has_symlink(): ...@@ -33,13 +33,13 @@ def has_symlink():
) )
return can_symlink() and not bad_symlink return can_symlink() and not bad_symlink
class TestFindPackages(unittest.TestCase): class TestFindPackages:
def setUp(self): def setup_method(self, method):
self.dist_dir = tempfile.mkdtemp() self.dist_dir = tempfile.mkdtemp()
self._make_pkg_structure() self._make_pkg_structure()
def tearDown(self): def teardown_method(self, method):
shutil.rmtree(self.dist_dir) shutil.rmtree(self.dist_dir)
def _make_pkg_structure(self): def _make_pkg_structure(self):
...@@ -87,7 +87,7 @@ class TestFindPackages(unittest.TestCase): ...@@ -87,7 +87,7 @@ class TestFindPackages(unittest.TestCase):
def test_regular_package(self): def test_regular_package(self):
self._touch('__init__.py', self.pkg_dir) self._touch('__init__.py', self.pkg_dir)
packages = find_packages(self.dist_dir) packages = find_packages(self.dist_dir)
self.assertEqual(packages, ['pkg', 'pkg.subpkg']) assert packages == ['pkg', 'pkg.subpkg']
def test_exclude(self): def test_exclude(self):
self._touch('__init__.py', self.pkg_dir) self._touch('__init__.py', self.pkg_dir)
...@@ -102,7 +102,7 @@ class TestFindPackages(unittest.TestCase): ...@@ -102,7 +102,7 @@ class TestFindPackages(unittest.TestCase):
alt_dir = self._mkdir('other_pkg', self.dist_dir) alt_dir = self._mkdir('other_pkg', self.dist_dir)
self._touch('__init__.py', alt_dir) self._touch('__init__.py', alt_dir)
packages = find_packages(self.dist_dir, include=['other_pkg']) packages = find_packages(self.dist_dir, include=['other_pkg'])
self.assertEqual(packages, ['other_pkg']) assert packages == ['other_pkg']
def test_dir_with_dot_is_skipped(self): def test_dir_with_dot_is_skipped(self):
shutil.rmtree(os.path.join(self.dist_dir, 'pkg/subpkg/assets')) shutil.rmtree(os.path.join(self.dist_dir, 'pkg/subpkg/assets'))
...@@ -110,7 +110,7 @@ class TestFindPackages(unittest.TestCase): ...@@ -110,7 +110,7 @@ class TestFindPackages(unittest.TestCase):
self._touch('__init__.py', data_dir) self._touch('__init__.py', data_dir)
self._touch('file.dat', data_dir) self._touch('file.dat', data_dir)
packages = find_packages(self.dist_dir) packages = find_packages(self.dist_dir)
self.assertTrue('pkg.some.data' not in packages) assert 'pkg.some.data' not in packages
def test_dir_with_packages_in_subdir_is_excluded(self): def test_dir_with_packages_in_subdir_is_excluded(self):
""" """
...@@ -121,9 +121,9 @@ class TestFindPackages(unittest.TestCase): ...@@ -121,9 +121,9 @@ class TestFindPackages(unittest.TestCase):
build_pkg_dir = self._mkdir('pkg', build_dir) build_pkg_dir = self._mkdir('pkg', build_dir)
self._touch('__init__.py', build_pkg_dir) self._touch('__init__.py', build_pkg_dir)
packages = find_packages(self.dist_dir) packages = find_packages(self.dist_dir)
self.assertTrue('build.pkg' not in packages) assert 'build.pkg' not in packages
@skipIf(not has_symlink(), 'Symlink support required') @pytest.mark.skipif(not has_symlink(), reason='Symlink support required')
def test_symlinked_packages_are_included(self): def test_symlinked_packages_are_included(self):
""" """
A symbolically-linked directory should be treated like any other A symbolically-linked directory should be treated like any other
...@@ -136,10 +136,10 @@ class TestFindPackages(unittest.TestCase): ...@@ -136,10 +136,10 @@ class TestFindPackages(unittest.TestCase):
os.symlink('pkg', linked_pkg) os.symlink('pkg', linked_pkg)
assert os.path.isdir(linked_pkg) assert os.path.isdir(linked_pkg)
packages = find_packages(self.dist_dir) packages = find_packages(self.dist_dir)
self.assertTrue('lpkg' in packages) assert 'lpkg' in packages
def _assert_packages(self, actual, expected): def _assert_packages(self, actual, expected):
self.assertEqual(set(actual), set(expected)) assert set(actual) == set(expected)
def test_pep420_ns_package(self): def test_pep420_ns_package(self):
packages = find_420_packages( packages = find_420_packages(
......
...@@ -27,7 +27,7 @@ def install_context(request, tmpdir, monkeypatch): ...@@ -27,7 +27,7 @@ def install_context(request, tmpdir, monkeypatch):
def fin(): def fin():
# undo the monkeypatch, particularly needed under # undo the monkeypatch, particularly needed under
# windows because of kept handle on cwd # windows because of kept handle on cwd
monkeypatch.undo() monkeypatch.undo()
new_cwd.remove() new_cwd.remove()
user_base.remove() user_base.remove()
user_site.remove() user_site.remove()
...@@ -71,7 +71,6 @@ def test_virtualenvwrapper(install_context): ...@@ -71,7 +71,6 @@ def test_virtualenvwrapper(install_context):
'virtualenvwrapper', 'hook_loader.py') 'virtualenvwrapper', 'hook_loader.py')
@pytest.mark.xfail
def test_pbr(install_context): def test_pbr(install_context):
_install_one('pbr', install_context, _install_one('pbr', install_context,
'pbr', 'core.py') 'pbr', 'core.py')
......
import os import os
import unittest
from setuptools.tests.py26compat import skipIf
try: import pytest
import ast
except ImportError:
pass
class TestMarkerlib(unittest.TestCase):
@skipIf('ast' not in globals(), class TestMarkerlib:
"ast not available (Python < 2.6?)")
@pytest.mark.importorskip('ast')
def test_markers(self): def test_markers(self):
from _markerlib import interpret, default_environment, compile from _markerlib import interpret, default_environment, compile
os_name = os.name os_name = os.name
self.assertTrue(interpret("")) assert interpret("")
self.assertTrue(interpret("os.name != 'buuuu'")) assert interpret("os.name != 'buuuu'")
self.assertTrue(interpret("os_name != 'buuuu'")) assert interpret("os_name != 'buuuu'")
self.assertTrue(interpret("python_version > '1.0'")) assert interpret("python_version > '1.0'")
self.assertTrue(interpret("python_version < '5.0'")) assert interpret("python_version < '5.0'")
self.assertTrue(interpret("python_version <= '5.0'")) assert interpret("python_version <= '5.0'")
self.assertTrue(interpret("python_version >= '1.0'")) assert interpret("python_version >= '1.0'")
self.assertTrue(interpret("'%s' in os.name" % os_name)) assert interpret("'%s' in os.name" % os_name)
self.assertTrue(interpret("'%s' in os_name" % os_name)) assert interpret("'%s' in os_name" % os_name)
self.assertTrue(interpret("'buuuu' not in os.name")) assert interpret("'buuuu' not in os.name")
self.assertFalse(interpret("os.name == 'buuuu'")) assert not interpret("os.name == 'buuuu'")
self.assertFalse(interpret("os_name == 'buuuu'")) assert not interpret("os_name == 'buuuu'")
self.assertFalse(interpret("python_version < '1.0'")) assert not interpret("python_version < '1.0'")
self.assertFalse(interpret("python_version > '5.0'")) assert not interpret("python_version > '5.0'")
self.assertFalse(interpret("python_version >= '5.0'")) assert not interpret("python_version >= '5.0'")
self.assertFalse(interpret("python_version <= '1.0'")) assert not interpret("python_version <= '1.0'")
self.assertFalse(interpret("'%s' not in os.name" % os_name)) assert not interpret("'%s' not in os.name" % os_name)
self.assertFalse(interpret("'buuuu' in os.name and python_version >= '5.0'")) assert not interpret("'buuuu' in os.name and python_version >= '5.0'")
self.assertFalse(interpret("'buuuu' in os_name and python_version >= '5.0'")) assert not interpret("'buuuu' in os_name and python_version >= '5.0'")
environment = default_environment() environment = default_environment()
environment['extra'] = 'test' environment['extra'] = 'test'
self.assertTrue(interpret("extra == 'test'", environment)) assert interpret("extra == 'test'", environment)
self.assertFalse(interpret("extra == 'doc'", environment)) assert not interpret("extra == 'doc'", environment)
def raises_nameError(): def raises_nameError():
try: try:
interpret("python.version == '42'") interpret("python.version == '42'")
...@@ -50,9 +45,9 @@ class TestMarkerlib(unittest.TestCase): ...@@ -50,9 +45,9 @@ class TestMarkerlib(unittest.TestCase):
pass pass
else: else:
raise Exception("Expected NameError") raise Exception("Expected NameError")
raises_nameError() raises_nameError()
def raises_syntaxError(): def raises_syntaxError():
try: try:
interpret("(x for x in (4,))") interpret("(x for x in (4,))")
...@@ -60,9 +55,9 @@ class TestMarkerlib(unittest.TestCase): ...@@ -60,9 +55,9 @@ class TestMarkerlib(unittest.TestCase):
pass pass
else: else:
raise Exception("Expected SyntaxError") raise Exception("Expected SyntaxError")
raises_syntaxError() raises_syntaxError()
statement = "python_version == '5'" statement = "python_version == '5'"
self.assertEqual(compile(statement).__doc__, statement) assert compile(statement).__doc__ == statement
"""msvc9compiler monkey patch test """
Tests for msvc9compiler.
This test ensures that importing setuptools is sufficient to replace
the standard find_vcvarsall function with our patched version that
finds the Visual C++ for Python package.
""" """
import os import os
import shutil
import sys
import tempfile
import unittest
import distutils.errors
import contextlib import contextlib
import distutils.errors
# importing only setuptools should apply the patch import pytest
__import__('setuptools') import mock
class MockReg:
"""Mock for distutils.msvc9compiler.Reg. We patch it
with an instance of this class that mocks out the
functions that access the registry.
"""
def __init__(self, hkey_local_machine={}, hkey_current_user={}):
self.hklm = hkey_local_machine
self.hkcu = hkey_current_user
def __enter__(self): from . import contexts
self.original_read_keys = distutils.msvc9compiler.Reg.read_keys
self.original_read_values = distutils.msvc9compiler.Reg.read_values
_winreg = getattr(distutils.msvc9compiler, '_winreg', None) # importing only setuptools should apply the patch
winreg = getattr(distutils.msvc9compiler, 'winreg', _winreg) __import__('setuptools')
hives = { pytest.importorskip("distutils.msvc9compiler")
winreg.HKEY_CURRENT_USER: self.hkcu,
winreg.HKEY_LOCAL_MACHINE: self.hklm,
}
def read_keys(cls, base, key):
"""Return list of registry keys."""
hive = hives.get(base, {})
return [k.rpartition('\\')[2]
for k in hive if k.startswith(key.lower())]
def read_values(cls, base, key): def mock_reg(hkcu=None, hklm=None):
"""Return dict of registry keys and values.""" """
hive = hives.get(base, {}) Return a mock for distutils.msvc9compiler.Reg, patched
return dict((k.rpartition('\\')[2], hive[k]) to mock out the functions that access the registry.
for k in hive if k.startswith(key.lower())) """
distutils.msvc9compiler.Reg.read_keys = classmethod(read_keys) _winreg = getattr(distutils.msvc9compiler, '_winreg', None)
distutils.msvc9compiler.Reg.read_values = classmethod(read_values) winreg = getattr(distutils.msvc9compiler, 'winreg', _winreg)
hives = {
winreg.HKEY_CURRENT_USER: hkcu or {},
winreg.HKEY_LOCAL_MACHINE: hklm or {},
}
@classmethod
def read_keys(cls, base, key):
"""Return list of registry keys."""
hive = hives.get(base, {})
return [
k.rpartition('\\')[2]
for k in hive if k.startswith(key.lower())
]
@classmethod
def read_values(cls, base, key):
"""Return dict of registry keys and values."""
hive = hives.get(base, {})
return dict(
(k.rpartition('\\')[2], hive[k])
for k in hive if k.startswith(key.lower())
)
return self return mock.patch.multiple(distutils.msvc9compiler.Reg,
read_keys=read_keys, read_values=read_values)
def __exit__(self, exc_type, exc_value, exc_tb):
distutils.msvc9compiler.Reg.read_keys = self.original_read_keys
distutils.msvc9compiler.Reg.read_values = self.original_read_values
@contextlib.contextmanager class TestModulePatch:
def patch_env(**replacements):
""" """
In a context, patch the environment with replacements. Pass None values Ensure that importing setuptools is sufficient to replace
to clear the values. the standard find_vcvarsall function with a version that
recognizes the "Visual C++ for Python" package.
""" """
saved = dict(
(key, os.environ['key'])
for key in replacements
if key in os.environ
)
# remove values that are null
remove = (key for (key, value) in replacements.items() if value is None)
for key in list(remove):
os.environ.pop(key, None)
replacements.pop(key)
os.environ.update(replacements)
try:
yield saved
finally:
for key in replacements:
os.environ.pop(key, None)
os.environ.update(saved)
class TestMSVC9Compiler(unittest.TestCase):
def test_find_vcvarsall_patch(self):
if not hasattr(distutils, 'msvc9compiler'):
# skip
return
self.assertEqual(
"setuptools.msvc9_support",
distutils.msvc9compiler.find_vcvarsall.__module__,
"find_vcvarsall was not patched"
)
key_32 = r'software\microsoft\devdiv\vcforpython\9.0\installdir'
key_64 = r'software\wow6432node\microsoft\devdiv\vcforpython\9.0\installdir'
def test_patched(self):
"Test the module is actually patched"
mod_name = distutils.msvc9compiler.find_vcvarsall.__module__
assert mod_name == "setuptools.msvc9_support", "find_vcvarsall unpatched"
def test_no_registry_entryies_means_nothing_found(self):
"""
No registry entries or environment variable should lead to an error
directing the user to download vcpython27.
"""
find_vcvarsall = distutils.msvc9compiler.find_vcvarsall find_vcvarsall = distutils.msvc9compiler.find_vcvarsall
query_vcvarsall = distutils.msvc9compiler.query_vcvarsall query_vcvarsall = distutils.msvc9compiler.query_vcvarsall
# No registry entries or environment variable means we should with contexts.environment(VS90COMNTOOLS=None):
# not find anything with mock_reg():
with patch_env(VS90COMNTOOLS=None): assert find_vcvarsall(9.0) is None
with MockReg():
self.assertIsNone(find_vcvarsall(9.0))
try: expected = distutils.errors.DistutilsPlatformError
with pytest.raises(expected) as exc:
query_vcvarsall(9.0) query_vcvarsall(9.0)
self.fail('Expected DistutilsPlatformError from query_vcvarsall()') assert 'aka.ms/vcpython27' in str(exc)
except distutils.errors.DistutilsPlatformError:
exc_message = str(sys.exc_info()[1]) @pytest.yield_fixture
self.assertIn('aka.ms/vcpython27', exc_message) def user_preferred_setting(self):
"""
key_32 = r'software\microsoft\devdiv\vcforpython\9.0\installdir' Set up environment with different install dirs for user vs. system
key_64 = r'software\wow6432node\microsoft\devdiv\vcforpython\9.0\installdir' and yield the user_install_dir for the expected result.
"""
# Make two mock files so we can tell whether HCKU entries are with self.mock_install_dir() as user_install_dir:
# preferred to HKLM entries. with self.mock_install_dir() as system_install_dir:
mock_installdir_1 = tempfile.mkdtemp() reg = mock_reg(
mock_vcvarsall_bat_1 = os.path.join(mock_installdir_1, 'vcvarsall.bat') hkcu={
open(mock_vcvarsall_bat_1, 'w').close() self.key_32: user_install_dir,
mock_installdir_2 = tempfile.mkdtemp() },
mock_vcvarsall_bat_2 = os.path.join(mock_installdir_2, 'vcvarsall.bat') hklm={
open(mock_vcvarsall_bat_2, 'w').close() self.key_32: system_install_dir,
try: self.key_64: system_install_dir,
# Ensure we get the current user's setting first },
with MockReg( )
hkey_current_user={key_32: mock_installdir_1}, with reg:
hkey_local_machine={ yield user_install_dir
key_32: mock_installdir_2,
key_64: mock_installdir_2, def test_prefer_current_user(self, user_preferred_setting):
} """
): Ensure user's settings are preferred.
self.assertEqual(mock_vcvarsall_bat_1, find_vcvarsall(9.0)) """
result = distutils.msvc9compiler.find_vcvarsall(9.0)
# Ensure we get the local machine value if it's there assert user_preferred_setting == result
with MockReg(hkey_local_machine={key_32: mock_installdir_2}):
self.assertEqual(mock_vcvarsall_bat_2, find_vcvarsall(9.0)) @pytest.yield_fixture
def local_machine_setting(self):
# Ensure we prefer the 64-bit local machine key """
# (*not* the Wow6432Node key) Set up environment with only the system environment configured.
with MockReg( """
hkey_local_machine={ with self.mock_install_dir() as system_install_dir:
# This *should* only exist on 32-bit machines reg = mock_reg(
key_32: mock_installdir_1, hklm={
# This *should* only exist on 64-bit machines self.key_32: system_install_dir,
key_64: mock_installdir_2, },
} )
): with reg:
self.assertEqual(mock_vcvarsall_bat_1, find_vcvarsall(9.0)) yield system_install_dir
finally:
shutil.rmtree(mock_installdir_1) def test_local_machine_recognized(self, local_machine_setting):
shutil.rmtree(mock_installdir_2) """
Ensure machine setting is honored if user settings are not present.
"""
result = distutils.msvc9compiler.find_vcvarsall(9.0)
assert local_machine_setting == result
@pytest.yield_fixture
def x64_preferred_setting(self):
"""
Set up environment with 64-bit and 32-bit system settings configured
and yield the 64-bit location.
"""
with self.mock_install_dir() as x32_dir:
with self.mock_install_dir() as x64_dir:
reg = mock_reg(
hklm={
# This *should* only exist on 32-bit machines
self.key_32: x32_dir,
# This *should* only exist on 64-bit machines
self.key_64: x64_dir,
},
)
with reg:
yield x64_dir
def test_ensure_64_bit_preferred(self, x64_preferred_setting):
"""
Ensure 64-bit system key is preferred.
"""
result = distutils.msvc9compiler.find_vcvarsall(9.0)
assert x64_preferred_setting == result
@staticmethod
@contextlib.contextmanager
def mock_install_dir():
"""
Make a mock install dir in a unique location so that tests can
distinguish which dir was detected in a given scenario.
"""
with contexts.tempdir() as result:
vcvarsall = os.path.join(result, 'vcvarsall.bat')
with open(vcvarsall, 'w'):
pass
yield
"""Package Index Tests
"""
import sys import sys
import os
import unittest
import distutils.errors import distutils.errors
import six import six
...@@ -12,7 +8,8 @@ import pkg_resources ...@@ -12,7 +8,8 @@ import pkg_resources
import setuptools.package_index import setuptools.package_index
from setuptools.tests.server import IndexServer from setuptools.tests.server import IndexServer
class TestPackageIndex(unittest.TestCase):
class TestPackageIndex:
def test_bad_url_bad_port(self): def test_bad_url_bad_port(self):
index = setuptools.package_index.PackageIndex() index = setuptools.package_index.PackageIndex()
...@@ -21,9 +18,9 @@ class TestPackageIndex(unittest.TestCase): ...@@ -21,9 +18,9 @@ class TestPackageIndex(unittest.TestCase):
v = index.open_url(url) v = index.open_url(url)
except Exception: except Exception:
v = sys.exc_info()[1] v = sys.exc_info()[1]
self.assertTrue(url in str(v)) assert url in str(v)
else: else:
self.assertTrue(isinstance(v, urllib.error.HTTPError)) assert isinstance(v, urllib.error.HTTPError)
def test_bad_url_typo(self): def test_bad_url_typo(self):
# issue 16 # issue 16
...@@ -38,9 +35,9 @@ class TestPackageIndex(unittest.TestCase): ...@@ -38,9 +35,9 @@ class TestPackageIndex(unittest.TestCase):
v = index.open_url(url) v = index.open_url(url)
except Exception: except Exception:
v = sys.exc_info()[1] v = sys.exc_info()[1]
self.assertTrue(url in str(v)) assert url in str(v)
else: else:
self.assertTrue(isinstance(v, urllib.error.HTTPError)) assert isinstance(v, urllib.error.HTTPError)
def test_bad_url_bad_status_line(self): def test_bad_url_bad_status_line(self):
index = setuptools.package_index.PackageIndex( index = setuptools.package_index.PackageIndex(
...@@ -56,7 +53,7 @@ class TestPackageIndex(unittest.TestCase): ...@@ -56,7 +53,7 @@ class TestPackageIndex(unittest.TestCase):
v = index.open_url(url) v = index.open_url(url)
except Exception: except Exception:
v = sys.exc_info()[1] v = sys.exc_info()[1]
self.assertTrue('line' in str(v)) assert 'line' in str(v)
else: else:
raise AssertionError('Should have raise here!') raise AssertionError('Should have raise here!')
...@@ -97,7 +94,7 @@ class TestPackageIndex(unittest.TestCase): ...@@ -97,7 +94,7 @@ class TestPackageIndex(unittest.TestCase):
hosts=('www.example.com',) hosts=('www.example.com',)
) )
url = 'file:///tmp/test_package_index' url = 'file:///tmp/test_package_index'
self.assertTrue(index.url_ok(url, True)) assert index.url_ok(url, True)
def test_links_priority(self): def test_links_priority(self):
""" """
...@@ -130,21 +127,30 @@ class TestPackageIndex(unittest.TestCase): ...@@ -130,21 +127,30 @@ class TestPackageIndex(unittest.TestCase):
server.stop() server.stop()
# the distribution has been found # the distribution has been found
self.assertTrue('foobar' in pi) assert 'foobar' in pi
# we have only one link, because links are compared without md5 # we have only one link, because links are compared without md5
self.assertTrue(len(pi['foobar'])==1) assert len(pi['foobar'])==1
# the link should be from the index # the link should be from the index
self.assertTrue('correct_md5' in pi['foobar'][0].location) assert 'correct_md5' in pi['foobar'][0].location
def test_parse_bdist_wininst(self): def test_parse_bdist_wininst(self):
self.assertEqual(setuptools.package_index.parse_bdist_wininst( parse = setuptools.package_index.parse_bdist_wininst
'reportlab-2.5.win32-py2.4.exe'), ('reportlab-2.5', '2.4', 'win32'))
self.assertEqual(setuptools.package_index.parse_bdist_wininst( actual = parse('reportlab-2.5.win32-py2.4.exe')
'reportlab-2.5.win32.exe'), ('reportlab-2.5', None, 'win32')) expected = 'reportlab-2.5', '2.4', 'win32'
self.assertEqual(setuptools.package_index.parse_bdist_wininst( assert actual == expected
'reportlab-2.5.win-amd64-py2.7.exe'), ('reportlab-2.5', '2.7', 'win-amd64'))
self.assertEqual(setuptools.package_index.parse_bdist_wininst( actual = parse('reportlab-2.5.win32.exe')
'reportlab-2.5.win-amd64.exe'), ('reportlab-2.5', None, 'win-amd64')) expected = 'reportlab-2.5', None, 'win32'
assert actual == expected
actual = parse('reportlab-2.5.win-amd64-py2.7.exe')
expected = 'reportlab-2.5', '2.7', 'win-amd64'
assert actual == expected
actual = parse('reportlab-2.5.win-amd64.exe')
expected = 'reportlab-2.5', None, 'win-amd64'
assert actual == expected
def test__vcs_split_rev_from_url(self): def test__vcs_split_rev_from_url(self):
""" """
...@@ -152,55 +158,51 @@ class TestPackageIndex(unittest.TestCase): ...@@ -152,55 +158,51 @@ class TestPackageIndex(unittest.TestCase):
""" """
vsrfu = setuptools.package_index.PackageIndex._vcs_split_rev_from_url vsrfu = setuptools.package_index.PackageIndex._vcs_split_rev_from_url
url, rev = vsrfu('https://example.com/bar@2995') url, rev = vsrfu('https://example.com/bar@2995')
self.assertEqual(url, 'https://example.com/bar') assert url == 'https://example.com/bar'
self.assertEqual(rev, '2995') assert rev == '2995'
def test_local_index(self): def test_local_index(self, tmpdir):
""" """
local_open should be able to read an index from the file system. local_open should be able to read an index from the file system.
""" """
f = open('index.html', 'w') index_file = tmpdir / 'index.html'
f.write('<div>content</div>') with index_file.open('w') as f:
f.close() f.write('<div>content</div>')
try: url = 'file:' + urllib.request.pathname2url(str(tmpdir)) + '/'
url = 'file:' + urllib.request.pathname2url(os.getcwd()) + '/' res = setuptools.package_index.local_open(url)
res = setuptools.package_index.local_open(url)
finally:
os.remove('index.html')
assert 'content' in res.read() assert 'content' in res.read()
class TestContentCheckers(unittest.TestCase): class TestContentCheckers:
def test_md5(self): def test_md5(self):
checker = setuptools.package_index.HashChecker.from_url( checker = setuptools.package_index.HashChecker.from_url(
'http://foo/bar#md5=f12895fdffbd45007040d2e44df98478') 'http://foo/bar#md5=f12895fdffbd45007040d2e44df98478')
checker.feed('You should probably not be using MD5'.encode('ascii')) checker.feed('You should probably not be using MD5'.encode('ascii'))
self.assertEqual(checker.hash.hexdigest(), assert checker.hash.hexdigest() == 'f12895fdffbd45007040d2e44df98478'
'f12895fdffbd45007040d2e44df98478') assert checker.is_valid()
self.assertTrue(checker.is_valid())
def test_other_fragment(self): def test_other_fragment(self):
"Content checks should succeed silently if no hash is present" "Content checks should succeed silently if no hash is present"
checker = setuptools.package_index.HashChecker.from_url( checker = setuptools.package_index.HashChecker.from_url(
'http://foo/bar#something%20completely%20different') 'http://foo/bar#something%20completely%20different')
checker.feed('anything'.encode('ascii')) checker.feed('anything'.encode('ascii'))
self.assertTrue(checker.is_valid()) assert checker.is_valid()
def test_blank_md5(self): def test_blank_md5(self):
"Content checks should succeed if a hash is empty" "Content checks should succeed if a hash is empty"
checker = setuptools.package_index.HashChecker.from_url( checker = setuptools.package_index.HashChecker.from_url(
'http://foo/bar#md5=') 'http://foo/bar#md5=')
checker.feed('anything'.encode('ascii')) checker.feed('anything'.encode('ascii'))
self.assertTrue(checker.is_valid()) assert checker.is_valid()
def test_get_hash_name_md5(self): def test_get_hash_name_md5(self):
checker = setuptools.package_index.HashChecker.from_url( checker = setuptools.package_index.HashChecker.from_url(
'http://foo/bar#md5=f12895fdffbd45007040d2e44df98478') 'http://foo/bar#md5=f12895fdffbd45007040d2e44df98478')
self.assertEqual(checker.hash_name, 'md5') assert checker.hash_name == 'md5'
def test_report(self): def test_report(self):
checker = setuptools.package_index.HashChecker.from_url( checker = setuptools.package_index.HashChecker.from_url(
'http://foo/bar#md5=f12895fdffbd45007040d2e44df98478') 'http://foo/bar#md5=f12895fdffbd45007040d2e44df98478')
rep = checker.report(lambda x: x, 'My message about %s') rep = checker.report(lambda x: x, 'My message about %s')
self.assertEqual(rep, 'My message about md5') assert rep == 'My message about md5'
#!/usr/bin/python #!/usr/bin/python
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# NOTE: the shebang and encoding lines are for ScriptHeaderTests do not remove # NOTE: the shebang and encoding lines are for TestScriptHeader do not remove
import os import os
import sys import sys
import tempfile import tempfile
import shutil import shutil
from unittest import TestCase
import pytest
import pkg_resources import pkg_resources
from pkg_resources import (parse_requirements, VersionConflict, parse_version, from pkg_resources import (parse_requirements, VersionConflict, parse_version,
Distribution, EntryPoint, Requirement, safe_version, safe_name, Distribution, EntryPoint, Requirement, safe_version, safe_name,
WorkingSet) WorkingSet)
packaging = pkg_resources.packaging
from setuptools.command.easy_install import (get_script_header, is_sh, from setuptools.command.easy_install import (get_script_header, is_sh,
nt_quote_arg) nt_quote_arg)
from .py26compat import skipIf
import six import six
...@@ -46,36 +48,32 @@ class Metadata(pkg_resources.EmptyProvider): ...@@ -46,36 +48,32 @@ class Metadata(pkg_resources.EmptyProvider):
dist_from_fn = pkg_resources.Distribution.from_filename dist_from_fn = pkg_resources.Distribution.from_filename
class DistroTests(TestCase): class TestDistro:
def testCollection(self): def testCollection(self):
# empty path should produce no distributions # empty path should produce no distributions
ad = pkg_resources.Environment([], platform=None, python=None) ad = pkg_resources.Environment([], platform=None, python=None)
self.assertEqual(list(ad), []) assert list(ad) == []
self.assertEqual(ad['FooPkg'],[]) assert ad['FooPkg'] == []
ad.add(dist_from_fn("FooPkg-1.3_1.egg")) ad.add(dist_from_fn("FooPkg-1.3_1.egg"))
ad.add(dist_from_fn("FooPkg-1.4-py2.4-win32.egg")) ad.add(dist_from_fn("FooPkg-1.4-py2.4-win32.egg"))
ad.add(dist_from_fn("FooPkg-1.2-py2.4.egg")) ad.add(dist_from_fn("FooPkg-1.2-py2.4.egg"))
# Name is in there now # Name is in there now
self.assertTrue(ad['FooPkg']) assert ad['FooPkg']
# But only 1 package # But only 1 package
self.assertEqual(list(ad), ['foopkg']) assert list(ad) == ['foopkg']
# Distributions sort by version # Distributions sort by version
self.assertEqual( assert [dist.version for dist in ad['FooPkg']] == ['1.4','1.3-1','1.2']
[dist.version for dist in ad['FooPkg']], ['1.4','1.3-1','1.2']
)
# Removing a distribution leaves sequence alone # Removing a distribution leaves sequence alone
ad.remove(ad['FooPkg'][1]) ad.remove(ad['FooPkg'][1])
self.assertEqual( assert [dist.version for dist in ad['FooPkg']] == ['1.4','1.2']
[dist.version for dist in ad['FooPkg']], ['1.4','1.2']
)
# And inserting adds them in order # And inserting adds them in order
ad.add(dist_from_fn("FooPkg-1.9.egg")) ad.add(dist_from_fn("FooPkg-1.9.egg"))
self.assertEqual( assert [dist.version for dist in ad['FooPkg']] == ['1.9','1.4','1.2']
[dist.version for dist in ad['FooPkg']], ['1.9','1.4','1.2']
)
ws = WorkingSet([]) ws = WorkingSet([])
foo12 = dist_from_fn("FooPkg-1.2-py2.4.egg") foo12 = dist_from_fn("FooPkg-1.2-py2.4.egg")
...@@ -83,31 +81,32 @@ class DistroTests(TestCase): ...@@ -83,31 +81,32 @@ class DistroTests(TestCase):
req, = parse_requirements("FooPkg>=1.3") req, = parse_requirements("FooPkg>=1.3")
# Nominal case: no distros on path, should yield all applicable # Nominal case: no distros on path, should yield all applicable
self.assertEqual(ad.best_match(req,ws).version, '1.9') assert ad.best_match(req, ws).version == '1.9'
# If a matching distro is already installed, should return only that # If a matching distro is already installed, should return only that
ws.add(foo14) ws.add(foo14)
self.assertEqual(ad.best_match(req,ws).version, '1.4') assert ad.best_match(req, ws).version == '1.4'
# If the first matching distro is unsuitable, it's a version conflict # If the first matching distro is unsuitable, it's a version conflict
ws = WorkingSet([]) ws = WorkingSet([])
ws.add(foo12) ws.add(foo12)
ws.add(foo14) ws.add(foo14)
self.assertRaises(VersionConflict, ad.best_match, req, ws) with pytest.raises(VersionConflict):
ad.best_match(req, ws)
# If more than one match on the path, the first one takes precedence # If more than one match on the path, the first one takes precedence
ws = WorkingSet([]) ws = WorkingSet([])
ws.add(foo14) ws.add(foo14)
ws.add(foo12) ws.add(foo12)
ws.add(foo14) ws.add(foo14)
self.assertEqual(ad.best_match(req,ws).version, '1.4') assert ad.best_match(req, ws).version == '1.4'
def checkFooPkg(self,d): def checkFooPkg(self,d):
self.assertEqual(d.project_name, "FooPkg") assert d.project_name == "FooPkg"
self.assertEqual(d.key, "foopkg") assert d.key == "foopkg"
self.assertEqual(d.version, "1.3-1") assert d.version == "1.3.post1"
self.assertEqual(d.py_version, "2.4") assert d.py_version == "2.4"
self.assertEqual(d.platform, "win32") assert d.platform == "win32"
self.assertEqual(d.parsed_version, parse_version("1.3-1")) assert d.parsed_version == parse_version("1.3-1")
def testDistroBasics(self): def testDistroBasics(self):
d = Distribution( d = Distribution(
...@@ -117,13 +116,13 @@ class DistroTests(TestCase): ...@@ -117,13 +116,13 @@ class DistroTests(TestCase):
self.checkFooPkg(d) self.checkFooPkg(d)
d = Distribution("/some/path") d = Distribution("/some/path")
self.assertEqual(d.py_version, sys.version[:3]) assert d.py_version == sys.version[:3]
self.assertEqual(d.platform, None) assert d.platform == None
def testDistroParse(self): def testDistroParse(self):
d = dist_from_fn("FooPkg-1.3_1-py2.4-win32.egg") d = dist_from_fn("FooPkg-1.3.post1-py2.4-win32.egg")
self.checkFooPkg(d) self.checkFooPkg(d)
d = dist_from_fn("FooPkg-1.3_1-py2.4-win32.egg-info") d = dist_from_fn("FooPkg-1.3.post1-py2.4-win32.egg-info")
self.checkFooPkg(d) self.checkFooPkg(d)
def testDistroMetadata(self): def testDistroMetadata(self):
...@@ -139,10 +138,7 @@ class DistroTests(TestCase): ...@@ -139,10 +138,7 @@ class DistroTests(TestCase):
return Distribution("/foo", metadata=Metadata(('depends.txt', txt))) return Distribution("/foo", metadata=Metadata(('depends.txt', txt)))
def checkRequires(self, dist, txt, extras=()): def checkRequires(self, dist, txt, extras=()):
self.assertEqual( assert list(dist.requires(extras)) == list(parse_requirements(txt))
list(dist.requires(extras)),
list(parse_requirements(txt))
)
def testDistroDependsSimple(self): def testDistroDependsSimple(self):
for v in "Twisted>=1.5", "Twisted>=1.5\nZConfig>=2.0": for v in "Twisted>=1.5", "Twisted>=1.5\nZConfig>=2.0":
...@@ -152,11 +148,11 @@ class DistroTests(TestCase): ...@@ -152,11 +148,11 @@ class DistroTests(TestCase):
ad = pkg_resources.Environment([]) ad = pkg_resources.Environment([])
ws = WorkingSet([]) ws = WorkingSet([])
# Resolving no requirements -> nothing to install # Resolving no requirements -> nothing to install
self.assertEqual(list(ws.resolve([],ad)), []) assert list(ws.resolve([], ad)) == []
# Request something not in the collection -> DistributionNotFound # Request something not in the collection -> DistributionNotFound
self.assertRaises( with pytest.raises(pkg_resources.DistributionNotFound):
pkg_resources.DistributionNotFound, ws.resolve, parse_requirements("Foo"), ad ws.resolve(parse_requirements("Foo"), ad)
)
Foo = Distribution.from_filename( Foo = Distribution.from_filename(
"/foo_dir/Foo-1.2.egg", "/foo_dir/Foo-1.2.egg",
metadata=Metadata(('depends.txt', "[bar]\nBaz>=2.0")) metadata=Metadata(('depends.txt', "[bar]\nBaz>=2.0"))
...@@ -167,28 +163,25 @@ class DistroTests(TestCase): ...@@ -167,28 +163,25 @@ class DistroTests(TestCase):
# Request thing(s) that are available -> list to activate # Request thing(s) that are available -> list to activate
for i in range(3): for i in range(3):
targets = list(ws.resolve(parse_requirements("Foo"), ad)) targets = list(ws.resolve(parse_requirements("Foo"), ad))
self.assertEqual(targets, [Foo]) assert targets == [Foo]
list(map(ws.add,targets)) list(map(ws.add,targets))
self.assertRaises(VersionConflict, ws.resolve, with pytest.raises(VersionConflict):
parse_requirements("Foo==0.9"), ad) ws.resolve(parse_requirements("Foo==0.9"), ad)
ws = WorkingSet([]) # reset ws = WorkingSet([]) # reset
# Request an extra that causes an unresolved dependency for "Baz" # Request an extra that causes an unresolved dependency for "Baz"
self.assertRaises( with pytest.raises(pkg_resources.DistributionNotFound):
pkg_resources.DistributionNotFound, ws.resolve,parse_requirements("Foo[bar]"), ad ws.resolve(parse_requirements("Foo[bar]"), ad)
)
Baz = Distribution.from_filename( Baz = Distribution.from_filename(
"/foo_dir/Baz-2.1.egg", metadata=Metadata(('depends.txt', "Foo")) "/foo_dir/Baz-2.1.egg", metadata=Metadata(('depends.txt', "Foo"))
) )
ad.add(Baz) ad.add(Baz)
# Activation list now includes resolved dependency # Activation list now includes resolved dependency
self.assertEqual( assert list(ws.resolve(parse_requirements("Foo[bar]"), ad)) ==[Foo,Baz]
list(ws.resolve(parse_requirements("Foo[bar]"), ad)), [Foo,Baz]
)
# Requests for conflicting versions produce VersionConflict # Requests for conflicting versions produce VersionConflict
self.assertRaises(VersionConflict, with pytest.raises(VersionConflict):
ws.resolve, parse_requirements("Foo==1.2\nFoo!=1.2"), ad) ws.resolve(parse_requirements("Foo==1.2\nFoo!=1.2"), ad)
def testDistroDependsOptions(self): def testDistroDependsOptions(self):
d = self.distRequires(""" d = self.distRequires("""
...@@ -213,49 +206,55 @@ class DistroTests(TestCase): ...@@ -213,49 +206,55 @@ class DistroTests(TestCase):
d,"Twisted>=1.5 fcgiapp>=0.1 ZConfig>=2.0 docutils>=0.3".split(), d,"Twisted>=1.5 fcgiapp>=0.1 ZConfig>=2.0 docutils>=0.3".split(),
["fastcgi", "docgen"] ["fastcgi", "docgen"]
) )
self.assertRaises(pkg_resources.UnknownExtra, d.requires, ["foo"]) with pytest.raises(pkg_resources.UnknownExtra):
d.requires(["foo"])
class EntryPointTests(TestCase): class TestEntryPoints:
def assertfields(self, ep): def assertfields(self, ep):
self.assertEqual(ep.name,"foo") assert ep.name == "foo"
self.assertEqual(ep.module_name,"setuptools.tests.test_resources") assert ep.module_name == "setuptools.tests.test_resources"
self.assertEqual(ep.attrs, ("EntryPointTests",)) assert ep.attrs == ("TestEntryPoints",)
self.assertEqual(ep.extras, ("x",)) assert ep.extras == ("x",)
self.assertTrue(ep.load() is EntryPointTests) assert ep.load() is TestEntryPoints
self.assertEqual( assert (
str(ep), str(ep) ==
"foo = setuptools.tests.test_resources:EntryPointTests [x]" "foo = setuptools.tests.test_resources:TestEntryPoints [x]"
) )
def setUp(self): def setup_method(self, method):
self.dist = Distribution.from_filename( self.dist = Distribution.from_filename(
"FooPkg-1.2-py2.4.egg", metadata=Metadata(('requires.txt','[x]'))) "FooPkg-1.2-py2.4.egg", metadata=Metadata(('requires.txt','[x]')))
def testBasics(self): def testBasics(self):
ep = EntryPoint( ep = EntryPoint(
"foo", "setuptools.tests.test_resources", ["EntryPointTests"], "foo", "setuptools.tests.test_resources", ["TestEntryPoints"],
["x"], self.dist ["x"], self.dist
) )
self.assertfields(ep) self.assertfields(ep)
def testParse(self): def testParse(self):
s = "foo = setuptools.tests.test_resources:EntryPointTests [x]" s = "foo = setuptools.tests.test_resources:TestEntryPoints [x]"
ep = EntryPoint.parse(s, self.dist) ep = EntryPoint.parse(s, self.dist)
self.assertfields(ep) self.assertfields(ep)
ep = EntryPoint.parse("bar baz= spammity[PING]") ep = EntryPoint.parse("bar baz= spammity[PING]")
self.assertEqual(ep.name,"bar baz") assert ep.name == "bar baz"
self.assertEqual(ep.module_name,"spammity") assert ep.module_name == "spammity"
self.assertEqual(ep.attrs, ()) assert ep.attrs == ()
self.assertEqual(ep.extras, ("ping",)) assert ep.extras == ("ping",)
ep = EntryPoint.parse(" fizzly = wocka:foo") ep = EntryPoint.parse(" fizzly = wocka:foo")
self.assertEqual(ep.name,"fizzly") assert ep.name == "fizzly"
self.assertEqual(ep.module_name,"wocka") assert ep.module_name == "wocka"
self.assertEqual(ep.attrs, ("foo",)) assert ep.attrs == ("foo",)
self.assertEqual(ep.extras, ()) assert ep.extras == ()
# plus in the name
spec = "html+mako = mako.ext.pygmentplugin:MakoHtmlLexer"
ep = EntryPoint.parse(spec)
assert ep.name == 'html+mako'
def testRejects(self): def testRejects(self):
for ep in [ for ep in [
...@@ -266,9 +265,9 @@ class EntryPointTests(TestCase): ...@@ -266,9 +265,9 @@ class EntryPointTests(TestCase):
else: raise AssertionError("Should've been bad", ep) else: raise AssertionError("Should've been bad", ep)
def checkSubMap(self, m): def checkSubMap(self, m):
self.assertEqual(len(m), len(self.submap_expect)) assert len(m) == len(self.submap_expect)
for key, ep in six.iteritems(self.submap_expect): for key, ep in six.iteritems(self.submap_expect):
self.assertEqual(repr(m.get(key)), repr(ep)) assert repr(m.get(key)) == repr(ep)
submap_expect = dict( submap_expect = dict(
feature1=EntryPoint('feature1', 'somemodule', ['somefunction']), feature1=EntryPoint('feature1', 'somemodule', ['somefunction']),
...@@ -284,72 +283,71 @@ class EntryPointTests(TestCase): ...@@ -284,72 +283,71 @@ class EntryPointTests(TestCase):
def testParseList(self): def testParseList(self):
self.checkSubMap(EntryPoint.parse_group("xyz", self.submap_str)) self.checkSubMap(EntryPoint.parse_group("xyz", self.submap_str))
self.assertRaises(ValueError, EntryPoint.parse_group, "x a", "foo=bar") with pytest.raises(ValueError):
self.assertRaises(ValueError, EntryPoint.parse_group, "x", EntryPoint.parse_group("x a", "foo=bar")
["foo=baz", "foo=bar"]) with pytest.raises(ValueError):
EntryPoint.parse_group("x", ["foo=baz", "foo=bar"])
def testParseMap(self): def testParseMap(self):
m = EntryPoint.parse_map({'xyz':self.submap_str}) m = EntryPoint.parse_map({'xyz':self.submap_str})
self.checkSubMap(m['xyz']) self.checkSubMap(m['xyz'])
self.assertEqual(list(m.keys()),['xyz']) assert list(m.keys()) == ['xyz']
m = EntryPoint.parse_map("[xyz]\n"+self.submap_str) m = EntryPoint.parse_map("[xyz]\n"+self.submap_str)
self.checkSubMap(m['xyz']) self.checkSubMap(m['xyz'])
self.assertEqual(list(m.keys()),['xyz']) assert list(m.keys()) == ['xyz']
self.assertRaises(ValueError, EntryPoint.parse_map, ["[xyz]", "[xyz]"]) with pytest.raises(ValueError):
self.assertRaises(ValueError, EntryPoint.parse_map, self.submap_str) EntryPoint.parse_map(["[xyz]", "[xyz]"])
with pytest.raises(ValueError):
EntryPoint.parse_map(self.submap_str)
class RequirementsTests(TestCase): class TestRequirements:
def testBasics(self): def testBasics(self):
r = Requirement.parse("Twisted>=1.2") r = Requirement.parse("Twisted>=1.2")
self.assertEqual(str(r),"Twisted>=1.2") assert str(r) == "Twisted>=1.2"
self.assertEqual(repr(r),"Requirement.parse('Twisted>=1.2')") assert repr(r) == "Requirement.parse('Twisted>=1.2')"
self.assertEqual(r, Requirement("Twisted", [('>=','1.2')], ())) assert r == Requirement("Twisted", [('>=','1.2')], ())
self.assertEqual(r, Requirement("twisTed", [('>=','1.2')], ())) assert r == Requirement("twisTed", [('>=','1.2')], ())
self.assertNotEqual(r, Requirement("Twisted", [('>=','2.0')], ())) assert r != Requirement("Twisted", [('>=','2.0')], ())
self.assertNotEqual(r, Requirement("Zope", [('>=','1.2')], ())) assert r != Requirement("Zope", [('>=','1.2')], ())
self.assertNotEqual(r, Requirement("Zope", [('>=','3.0')], ())) assert r != Requirement("Zope", [('>=','3.0')], ())
self.assertNotEqual(r, Requirement.parse("Twisted[extras]>=1.2")) assert r != Requirement.parse("Twisted[extras]>=1.2")
def testOrdering(self): def testOrdering(self):
r1 = Requirement("Twisted", [('==','1.2c1'),('>=','1.2')], ()) r1 = Requirement("Twisted", [('==','1.2c1'),('>=','1.2')], ())
r2 = Requirement("Twisted", [('>=','1.2'),('==','1.2c1')], ()) r2 = Requirement("Twisted", [('>=','1.2'),('==','1.2c1')], ())
self.assertEqual(r1,r2) assert r1 == r2
self.assertEqual(str(r1),str(r2)) assert str(r1) == str(r2)
self.assertEqual(str(r2),"Twisted==1.2c1,>=1.2") assert str(r2) == "Twisted==1.2c1,>=1.2"
def testBasicContains(self): def testBasicContains(self):
r = Requirement("Twisted", [('>=','1.2')], ()) r = Requirement("Twisted", [('>=','1.2')], ())
foo_dist = Distribution.from_filename("FooPkg-1.3_1.egg") foo_dist = Distribution.from_filename("FooPkg-1.3_1.egg")
twist11 = Distribution.from_filename("Twisted-1.1.egg") twist11 = Distribution.from_filename("Twisted-1.1.egg")
twist12 = Distribution.from_filename("Twisted-1.2.egg") twist12 = Distribution.from_filename("Twisted-1.2.egg")
self.assertTrue(parse_version('1.2') in r) assert parse_version('1.2') in r
self.assertTrue(parse_version('1.1') not in r) assert parse_version('1.1') not in r
self.assertTrue('1.2' in r) assert '1.2' in r
self.assertTrue('1.1' not in r) assert '1.1' not in r
self.assertTrue(foo_dist not in r) assert foo_dist not in r
self.assertTrue(twist11 not in r) assert twist11 not in r
self.assertTrue(twist12 in r) assert twist12 in r
def testAdvancedContains(self):
r, = parse_requirements("Foo>=1.2,<=1.3,==1.9,>2.0,!=2.5,<3.0,==4.5")
for v in ('1.2','1.2.2','1.3','1.9','2.0.1','2.3','2.6','3.0c1','4.5'):
self.assertTrue(v in r, (v,r))
for v in ('1.2c1','1.3.1','1.5','1.9.1','2.0','2.5','3.0','4.0'):
self.assertTrue(v not in r, (v,r))
def testOptionsAndHashing(self): def testOptionsAndHashing(self):
r1 = Requirement.parse("Twisted[foo,bar]>=1.2") r1 = Requirement.parse("Twisted[foo,bar]>=1.2")
r2 = Requirement.parse("Twisted[bar,FOO]>=1.2") r2 = Requirement.parse("Twisted[bar,FOO]>=1.2")
r3 = Requirement.parse("Twisted[BAR,FOO]>=1.2.0") assert r1 == r2
self.assertEqual(r1,r2) assert r1.extras == ("foo","bar")
self.assertEqual(r1,r3) assert r2.extras == ("bar","foo") # extras are normalized
self.assertEqual(r1.extras, ("foo","bar")) assert hash(r1) == hash(r2)
self.assertEqual(r2.extras, ("bar","foo")) # extras are normalized assert (
self.assertEqual(hash(r1), hash(r2)) hash(r1)
self.assertEqual( ==
hash(r1), hash(("twisted", ((">=",parse_version("1.2")),), hash((
frozenset(["foo","bar"]))) "twisted",
packaging.specifiers.SpecifierSet(">=1.2"),
frozenset(["foo","bar"]),
))
) )
def testVersionEquality(self): def testVersionEquality(self):
...@@ -357,42 +355,42 @@ class RequirementsTests(TestCase): ...@@ -357,42 +355,42 @@ class RequirementsTests(TestCase):
r2 = Requirement.parse("foo!=0.3a4") r2 = Requirement.parse("foo!=0.3a4")
d = Distribution.from_filename d = Distribution.from_filename
self.assertTrue(d("foo-0.3a4.egg") not in r1) assert d("foo-0.3a4.egg") not in r1
self.assertTrue(d("foo-0.3a1.egg") not in r1) assert d("foo-0.3a1.egg") not in r1
self.assertTrue(d("foo-0.3a4.egg") not in r2) assert d("foo-0.3a4.egg") not in r2
self.assertTrue(d("foo-0.3a2.egg") in r1) assert d("foo-0.3a2.egg") in r1
self.assertTrue(d("foo-0.3a2.egg") in r2) assert d("foo-0.3a2.egg") in r2
self.assertTrue(d("foo-0.3a3.egg") in r2) assert d("foo-0.3a3.egg") in r2
self.assertTrue(d("foo-0.3a5.egg") in r2) assert d("foo-0.3a5.egg") in r2
def testSetuptoolsProjectName(self): def testSetuptoolsProjectName(self):
""" """
The setuptools project should implement the setuptools package. The setuptools project should implement the setuptools package.
""" """
self.assertEqual( assert (
Requirement.parse('setuptools').project_name, 'setuptools') Requirement.parse('setuptools').project_name == 'setuptools')
# setuptools 0.7 and higher means setuptools. # setuptools 0.7 and higher means setuptools.
self.assertEqual( assert (
Requirement.parse('setuptools == 0.7').project_name, 'setuptools') Requirement.parse('setuptools == 0.7').project_name == 'setuptools')
self.assertEqual( assert (
Requirement.parse('setuptools == 0.7a1').project_name, 'setuptools') Requirement.parse('setuptools == 0.7a1').project_name == 'setuptools')
self.assertEqual( assert (
Requirement.parse('setuptools >= 0.7').project_name, 'setuptools') Requirement.parse('setuptools >= 0.7').project_name == 'setuptools')
class ParseTests(TestCase): class TestParsing:
def testEmptyParse(self): def testEmptyParse(self):
self.assertEqual(list(parse_requirements('')), []) assert list(parse_requirements('')) == []
def testYielding(self): def testYielding(self):
for inp,out in [ for inp,out in [
([], []), ('x',['x']), ([[]],[]), (' x\n y', ['x','y']), ([], []), ('x',['x']), ([[]],[]), (' x\n y', ['x','y']),
(['x\n\n','y'], ['x','y']), (['x\n\n','y'], ['x','y']),
]: ]:
self.assertEqual(list(pkg_resources.yield_lines(inp)),out) assert list(pkg_resources.yield_lines(inp)) == out
def testSplitting(self): def testSplitting(self):
sample = """ sample = """
...@@ -408,64 +406,81 @@ class ParseTests(TestCase): ...@@ -408,64 +406,81 @@ class ParseTests(TestCase):
[q] [q]
v v
""" """
self.assertEqual(list(pkg_resources.split_sections(sample)), assert (
[(None,["x"]), ("Y",["z","a"]), ("b",["c"]), ("d",[]), ("q",["v"])] list(pkg_resources.split_sections(sample))
==
[
(None, ["x"]),
("Y", ["z", "a"]),
("b", ["c"]),
("d", []),
("q", ["v"]),
]
) )
self.assertRaises(ValueError,list,pkg_resources.split_sections("[foo")) with pytest.raises(ValueError):
list(pkg_resources.split_sections("[foo"))
def testSafeName(self): def testSafeName(self):
self.assertEqual(safe_name("adns-python"), "adns-python") assert safe_name("adns-python") == "adns-python"
self.assertEqual(safe_name("WSGI Utils"), "WSGI-Utils") assert safe_name("WSGI Utils") == "WSGI-Utils"
self.assertEqual(safe_name("WSGI Utils"), "WSGI-Utils") assert safe_name("WSGI Utils") == "WSGI-Utils"
self.assertEqual(safe_name("Money$$$Maker"), "Money-Maker") assert safe_name("Money$$$Maker") == "Money-Maker"
self.assertNotEqual(safe_name("peak.web"), "peak-web") assert safe_name("peak.web") != "peak-web"
def testSafeVersion(self): def testSafeVersion(self):
self.assertEqual(safe_version("1.2-1"), "1.2-1") assert safe_version("1.2-1") == "1.2.post1"
self.assertEqual(safe_version("1.2 alpha"), "1.2.alpha") assert safe_version("1.2 alpha") == "1.2.alpha"
self.assertEqual(safe_version("2.3.4 20050521"), "2.3.4.20050521") assert safe_version("2.3.4 20050521") == "2.3.4.20050521"
self.assertEqual(safe_version("Money$$$Maker"), "Money-Maker") assert safe_version("Money$$$Maker") == "Money-Maker"
self.assertEqual(safe_version("peak.web"), "peak.web") assert safe_version("peak.web") == "peak.web"
def testSimpleRequirements(self): def testSimpleRequirements(self):
self.assertEqual( assert (
list(parse_requirements('Twis-Ted>=1.2-1')), list(parse_requirements('Twis-Ted>=1.2-1'))
==
[Requirement('Twis-Ted',[('>=','1.2-1')], ())] [Requirement('Twis-Ted',[('>=','1.2-1')], ())]
) )
self.assertEqual( assert (
list(parse_requirements('Twisted >=1.2, \ # more\n<2.0')), list(parse_requirements('Twisted >=1.2, \ # more\n<2.0'))
==
[Requirement('Twisted',[('>=','1.2'),('<','2.0')], ())] [Requirement('Twisted',[('>=','1.2'),('<','2.0')], ())]
) )
self.assertEqual( assert (
Requirement.parse("FooBar==1.99a3"), Requirement.parse("FooBar==1.99a3")
==
Requirement("FooBar", [('==','1.99a3')], ()) Requirement("FooBar", [('==','1.99a3')], ())
) )
self.assertRaises(ValueError,Requirement.parse,">=2.3") with pytest.raises(ValueError):
self.assertRaises(ValueError,Requirement.parse,"x\\") Requirement.parse(">=2.3")
self.assertRaises(ValueError,Requirement.parse,"x==2 q") with pytest.raises(ValueError):
self.assertRaises(ValueError,Requirement.parse,"X==1\nY==2") Requirement.parse("x\\")
self.assertRaises(ValueError,Requirement.parse,"#") with pytest.raises(ValueError):
Requirement.parse("x==2 q")
with pytest.raises(ValueError):
Requirement.parse("X==1\nY==2")
with pytest.raises(ValueError):
Requirement.parse("#")
def testVersionEquality(self): def testVersionEquality(self):
def c(s1,s2): def c(s1,s2):
p1, p2 = parse_version(s1),parse_version(s2) p1, p2 = parse_version(s1),parse_version(s2)
self.assertEqual(p1,p2, (s1,s2,p1,p2)) assert p1 == p2, (s1,s2,p1,p2)
c('1.2-rc1', '1.2rc1') c('1.2-rc1', '1.2rc1')
c('0.4', '0.4.0') c('0.4', '0.4.0')
c('0.4.0.0', '0.4.0') c('0.4.0.0', '0.4.0')
c('0.4.0-0', '0.4-0') c('0.4.0-0', '0.4-0')
c('0pl1', '0.0pl1') c('0post1', '0.0post1')
c('0pre1', '0.0c1') c('0pre1', '0.0c1')
c('0.0.0preview1', '0c1') c('0.0.0preview1', '0c1')
c('0.0c1', '0-rc1') c('0.0c1', '0-rc1')
c('1.2a1', '1.2.a.1') c('1.2a1', '1.2.a.1')
c('1.2...a', '1.2a') c('1.2.a', '1.2a')
def testVersionOrdering(self): def testVersionOrdering(self):
def c(s1,s2): def c(s1,s2):
p1, p2 = parse_version(s1),parse_version(s2) p1, p2 = parse_version(s1),parse_version(s2)
self.assertTrue(p1<p2, (s1,s2,p1,p2)) assert p1<p2, (s1,s2,p1,p2)
c('2.1','2.1.1') c('2.1','2.1.1')
c('2a1','2b0') c('2a1','2b0')
...@@ -473,16 +488,14 @@ class ParseTests(TestCase): ...@@ -473,16 +488,14 @@ class ParseTests(TestCase):
c('2.3a1', '2.3') c('2.3a1', '2.3')
c('2.1-1', '2.1-2') c('2.1-1', '2.1-2')
c('2.1-1', '2.1.1') c('2.1-1', '2.1.1')
c('2.1', '2.1pl4') c('2.1', '2.1post4')
c('2.1a0-20040501', '2.1') c('2.1a0-20040501', '2.1')
c('1.1', '02.1') c('1.1', '02.1')
c('A56','B27') c('3.2', '3.2.post0')
c('3.2', '3.2.pl0') c('3.2post1', '3.2post2')
c('3.2-1', '3.2pl1')
c('3.2pl1', '3.2pl1-1')
c('0.4', '4.0') c('0.4', '4.0')
c('0.0.4', '0.4.0') c('0.0.4', '0.4.0')
c('0pl1', '0.4pl1') c('0post1', '0.4post1')
c('2.1.0-rc1','2.1.0') c('2.1.0-rc1','2.1.0')
c('2.1dev','2.1a0') c('2.1dev','2.1a0')
...@@ -496,8 +509,59 @@ class ParseTests(TestCase): ...@@ -496,8 +509,59 @@ class ParseTests(TestCase):
for v2 in torture[p+1:]: for v2 in torture[p+1:]:
c(v2,v1) c(v2,v1)
def testVersionBuildout(self):
"""
Buildout has a function in it's bootstrap.py that inspected the return
value of parse_version. The new parse_version returns a Version class
which needs to support this behavior, at least for now.
"""
def buildout(parsed_version):
_final_parts = '*final-', '*final'
def _final_version(parsed_version):
for part in parsed_version:
if (part[:1] == '*') and (part not in _final_parts):
return False
return True
return _final_version(parsed_version)
assert buildout(parse_version("1.0"))
assert not buildout(parse_version("1.0a1"))
def testVersionIndexable(self):
"""
Some projects were doing things like parse_version("v")[0], so we'll
support indexing the same as we support iterating.
"""
assert parse_version("1.0")[0] == "00000001"
class ScriptHeaderTests(TestCase): def testVersionTupleSort(self):
"""
Some projects expected to be able to sort tuples against the return
value of parse_version. So again we'll add a warning enabled shim to
make this possible.
"""
assert parse_version("1.0") < tuple(parse_version("2.0"))
assert parse_version("1.0") <= tuple(parse_version("2.0"))
assert parse_version("1.0") == tuple(parse_version("1.0"))
assert parse_version("3.0") > tuple(parse_version("2.0"))
assert parse_version("3.0") >= tuple(parse_version("2.0"))
assert parse_version("3.0") != tuple(parse_version("2.0"))
assert not (parse_version("3.0") != tuple(parse_version("3.0")))
def testVersionHashable(self):
"""
Ensure that our versions stay hashable even though we've subclassed
them and added some shim code to them.
"""
assert (
hash(parse_version("1.0"))
==
hash(parse_version("1.0"))
)
class TestScriptHeader:
non_ascii_exe = '/Users/José/bin/python' non_ascii_exe = '/Users/José/bin/python'
exe_with_spaces = r'C:\Program Files\Python33\python.exe' exe_with_spaces = r'C:\Program Files\Python33\python.exe'
...@@ -505,17 +569,15 @@ class ScriptHeaderTests(TestCase): ...@@ -505,17 +569,15 @@ class ScriptHeaderTests(TestCase):
if not sys.platform.startswith('java') or not is_sh(sys.executable): if not sys.platform.startswith('java') or not is_sh(sys.executable):
# This test is for non-Jython platforms # This test is for non-Jython platforms
expected = '#!%s\n' % nt_quote_arg(os.path.normpath(sys.executable)) expected = '#!%s\n' % nt_quote_arg(os.path.normpath(sys.executable))
self.assertEqual(get_script_header('#!/usr/local/bin/python'), assert get_script_header('#!/usr/local/bin/python') == expected
expected)
expected = '#!%s -x\n' % nt_quote_arg(os.path.normpath(sys.executable)) expected = '#!%s -x\n' % nt_quote_arg(os.path.normpath(sys.executable))
self.assertEqual(get_script_header('#!/usr/bin/python -x'), assert get_script_header('#!/usr/bin/python -x') == expected
expected) candidate = get_script_header('#!/usr/bin/python',
self.assertEqual(get_script_header('#!/usr/bin/python', executable=self.non_ascii_exe)
executable=self.non_ascii_exe), assert candidate == '#!%s -x\n' % self.non_ascii_exe
'#!%s -x\n' % self.non_ascii_exe)
candidate = get_script_header('#!/usr/bin/python', candidate = get_script_header('#!/usr/bin/python',
executable=self.exe_with_spaces) executable=self.exe_with_spaces)
self.assertEqual(candidate, '#!"%s"\n' % self.exe_with_spaces) assert candidate == '#!"%s"\n' % self.exe_with_spaces
def test_get_script_header_jython_workaround(self): def test_get_script_header_jython_workaround(self):
# This test doesn't work with Python 3 in some locales # This test doesn't work with Python 3 in some locales
...@@ -536,44 +598,46 @@ class ScriptHeaderTests(TestCase): ...@@ -536,44 +598,46 @@ class ScriptHeaderTests(TestCase):
try: try:
# A mock sys.executable that uses a shebang line (this file) # A mock sys.executable that uses a shebang line (this file)
exe = os.path.normpath(os.path.splitext(__file__)[0] + '.py') exe = os.path.normpath(os.path.splitext(__file__)[0] + '.py')
self.assertEqual( assert (
get_script_header('#!/usr/local/bin/python', executable=exe), get_script_header('#!/usr/local/bin/python', executable=exe)
'#!/usr/bin/env %s\n' % exe) ==
'#!/usr/bin/env %s\n' % exe
)
# Ensure we generate what is basically a broken shebang line # Ensure we generate what is basically a broken shebang line
# when there's options, with a warning emitted # when there's options, with a warning emitted
sys.stdout = sys.stderr = six.StringIO() sys.stdout = sys.stderr = six.StringIO()
self.assertEqual(get_script_header('#!/usr/bin/python -x', candidate = get_script_header('#!/usr/bin/python -x',
executable=exe), executable=exe)
'#!%s -x\n' % exe) assert candidate == '#!%s -x\n' % exe
self.assertTrue('Unable to adapt shebang line' in sys.stdout.getvalue()) assert 'Unable to adapt shebang line' in sys.stdout.getvalue()
sys.stdout = sys.stderr = six.StringIO() sys.stdout = sys.stderr = six.StringIO()
self.assertEqual(get_script_header('#!/usr/bin/python', candidate = get_script_header('#!/usr/bin/python',
executable=self.non_ascii_exe), executable=self.non_ascii_exe)
'#!%s -x\n' % self.non_ascii_exe) assert candidate == '#!%s -x\n' % self.non_ascii_exe
self.assertTrue('Unable to adapt shebang line' in sys.stdout.getvalue()) assert 'Unable to adapt shebang line' in sys.stdout.getvalue()
finally: finally:
del sys.modules["java"] del sys.modules["java"]
sys.platform = platform sys.platform = platform
sys.stdout, sys.stderr = stdout, stderr sys.stdout, sys.stderr = stdout, stderr
class NamespaceTests(TestCase): class TestNamespaces:
def setUp(self): def setup_method(self, method):
self._ns_pkgs = pkg_resources._namespace_packages.copy() self._ns_pkgs = pkg_resources._namespace_packages.copy()
self._tmpdir = tempfile.mkdtemp(prefix="tests-setuptools-") self._tmpdir = tempfile.mkdtemp(prefix="tests-setuptools-")
os.makedirs(os.path.join(self._tmpdir, "site-pkgs")) os.makedirs(os.path.join(self._tmpdir, "site-pkgs"))
self._prev_sys_path = sys.path[:] self._prev_sys_path = sys.path[:]
sys.path.append(os.path.join(self._tmpdir, "site-pkgs")) sys.path.append(os.path.join(self._tmpdir, "site-pkgs"))
def tearDown(self): def teardown_method(self, method):
shutil.rmtree(self._tmpdir) shutil.rmtree(self._tmpdir)
pkg_resources._namespace_packages = self._ns_pkgs.copy() pkg_resources._namespace_packages = self._ns_pkgs.copy()
sys.path = self._prev_sys_path[:] sys.path = self._prev_sys_path[:]
msg = "Test fails when /tmp is a symlink. See #231" @pytest.mark.skipif(os.path.islink(tempfile.gettempdir()),
@skipIf(os.path.islink(tempfile.gettempdir()), msg) reason="Test fails when /tmp is a symlink. See #231")
def test_two_levels_deep(self): def test_two_levels_deep(self):
""" """
Test nested namespace packages Test nested namespace packages
......
"""develop tests """develop tests
""" """
import sys
import os import os
import shutil
import unittest
import tempfile
import types import types
import pytest
import pkg_resources import pkg_resources
import setuptools.sandbox import setuptools.sandbox
from setuptools.sandbox import DirectorySandbox, SandboxViolation from setuptools.sandbox import DirectorySandbox, SandboxViolation
def has_win32com():
"""
Run this to determine if the local machine has win32com, and if it
does, include additional tests.
"""
if not sys.platform.startswith('win32'):
return False
try:
__import__('win32com')
except ImportError:
return False
return True
class TestSandbox(unittest.TestCase):
def setUp(self): class TestSandbox:
self.dir = tempfile.mkdtemp()
def tearDown(self): def test_devnull(self, tmpdir):
shutil.rmtree(self.dir) sandbox = DirectorySandbox(str(tmpdir))
def test_devnull(self):
sandbox = DirectorySandbox(self.dir)
sandbox.run(self._file_writer(os.devnull)) sandbox.run(self._file_writer(os.devnull))
@staticmethod
def _file_writer(path): def _file_writer(path):
def do_write(): def do_write():
f = open(path, 'w') with open(path, 'w') as f:
f.write('xxx') f.write('xxx')
f.close()
return do_write return do_write
_file_writer = staticmethod(_file_writer) def test_win32com(self, tmpdir):
"""
if has_win32com(): win32com should not be prevented from caching COM interfaces
def test_win32com(self): in gen_py.
""" """
win32com should not be prevented from caching COM interfaces win32com = pytest.importorskip('win32com')
in gen_py. gen_py = win32com.__gen_path__
""" target = os.path.join(gen_py, 'test_write')
import win32com sandbox = DirectorySandbox(str(tmpdir))
gen_py = win32com.__gen_path__ try:
target = os.path.join(gen_py, 'test_write')
sandbox = DirectorySandbox(self.dir)
try: try:
try: sandbox.run(self._file_writer(target))
sandbox.run(self._file_writer(target)) except SandboxViolation:
except SandboxViolation: self.fail("Could not create gen_py file due to SandboxViolation")
self.fail("Could not create gen_py file due to SandboxViolation") finally:
finally: if os.path.exists(target):
if os.path.exists(target): os.remove(target) os.remove(target)
def test_setup_py_with_BOM(self): def test_setup_py_with_BOM(self):
""" """
...@@ -73,11 +51,8 @@ class TestSandbox(unittest.TestCase): ...@@ -73,11 +51,8 @@ class TestSandbox(unittest.TestCase):
setuptools.sandbox._execfile(target, vars(namespace)) setuptools.sandbox._execfile(target, vars(namespace))
assert namespace.result == 'passed' assert namespace.result == 'passed'
def test_setup_py_with_CRLF(self): def test_setup_py_with_CRLF(self, tmpdir):
setup_py = os.path.join(self.dir, 'setup.py') setup_py = tmpdir / 'setup.py'
with open(setup_py, 'wb') as stream: with setup_py.open('wb') as stream:
stream.write(b'"degenerate script"\r\n') stream.write(b'"degenerate script"\r\n')
setuptools.sandbox._execfile(setup_py, globals()) setuptools.sandbox._execfile(str(setup_py), globals())
if __name__ == '__main__':
unittest.main()
...@@ -6,19 +6,16 @@ import os ...@@ -6,19 +6,16 @@ import os
import shutil import shutil
import sys import sys
import tempfile import tempfile
import unittest
import unicodedata import unicodedata
import re
import contextlib import contextlib
import six import six
import pytest
from setuptools.tests import environment, test_svn import pkg_resources
from setuptools.tests.py26compat import skipIf from setuptools.command.sdist import sdist
from setuptools.command.sdist import sdist, walk_revctrl
from setuptools.command.egg_info import manifest_maker from setuptools.command.egg_info import manifest_maker
from setuptools.dist import Distribution from setuptools.dist import Distribution
from setuptools import svn_utils
SETUP_ATTRS = { SETUP_ATTRS = {
'name': 'sdist_test', 'name': 'sdist_test',
...@@ -80,9 +77,9 @@ def decompose(path): ...@@ -80,9 +77,9 @@ def decompose(path):
return path return path
class TestSdistTest(unittest.TestCase): class TestSdistTest:
def setUp(self): def setup_method(self, method):
self.temp_dir = tempfile.mkdtemp() self.temp_dir = tempfile.mkdtemp()
f = open(os.path.join(self.temp_dir, 'setup.py'), 'w') f = open(os.path.join(self.temp_dir, 'setup.py'), 'w')
f.write(SETUP_PY) f.write(SETUP_PY)
...@@ -100,7 +97,7 @@ class TestSdistTest(unittest.TestCase): ...@@ -100,7 +97,7 @@ class TestSdistTest(unittest.TestCase):
self.old_cwd = os.getcwd() self.old_cwd = os.getcwd()
os.chdir(self.temp_dir) os.chdir(self.temp_dir)
def tearDown(self): def teardown_method(self, method):
os.chdir(self.old_cwd) os.chdir(self.old_cwd)
shutil.rmtree(self.temp_dir) shutil.rmtree(self.temp_dir)
...@@ -119,9 +116,9 @@ class TestSdistTest(unittest.TestCase): ...@@ -119,9 +116,9 @@ class TestSdistTest(unittest.TestCase):
cmd.run() cmd.run()
manifest = cmd.filelist.files manifest = cmd.filelist.files
self.assertTrue(os.path.join('sdist_test', 'a.txt') in manifest) assert os.path.join('sdist_test', 'a.txt') in manifest
self.assertTrue(os.path.join('sdist_test', 'b.txt') in manifest) assert os.path.join('sdist_test', 'b.txt') in manifest
self.assertTrue(os.path.join('sdist_test', 'c.rst') not in manifest) assert os.path.join('sdist_test', 'c.rst') not in manifest
def test_defaults_case_sensitivity(self): def test_defaults_case_sensitivity(self):
...@@ -146,9 +143,9 @@ class TestSdistTest(unittest.TestCase): ...@@ -146,9 +143,9 @@ class TestSdistTest(unittest.TestCase):
# lowercase all names so we can test in a case-insensitive way to make sure the files are not included # lowercase all names so we can test in a case-insensitive way to make sure the files are not included
manifest = map(lambda x: x.lower(), cmd.filelist.files) manifest = map(lambda x: x.lower(), cmd.filelist.files)
self.assertFalse('readme.rst' in manifest, manifest) assert 'readme.rst' not in manifest, manifest
self.assertFalse('setup.py' in manifest, manifest) assert 'setup.py' not in manifest, manifest
self.assertFalse('setup.cfg' in manifest, manifest) assert 'setup.cfg' not in manifest, manifest
def test_manifest_is_written_with_utf8_encoding(self): def test_manifest_is_written_with_utf8_encoding(self):
# Test for #303. # Test for #303.
...@@ -186,7 +183,7 @@ class TestSdistTest(unittest.TestCase): ...@@ -186,7 +183,7 @@ class TestSdistTest(unittest.TestCase):
fs_enc = sys.getfilesystemencoding() fs_enc = sys.getfilesystemencoding()
filename = filename.decode(fs_enc) filename = filename.decode(fs_enc)
self.assertTrue(posix(filename) in u_contents) assert posix(filename) in u_contents
# Python 3 only # Python 3 only
if six.PY3: if six.PY3:
...@@ -225,10 +222,10 @@ class TestSdistTest(unittest.TestCase): ...@@ -225,10 +222,10 @@ class TestSdistTest(unittest.TestCase):
self.fail(e) self.fail(e)
# The manifest should contain the UTF-8 filename # The manifest should contain the UTF-8 filename
self.assertTrue(posix(filename) in contents) assert posix(filename) in contents
# The filelist should have been updated as well # The filelist should have been updated as well
self.assertTrue(u_filename in mm.filelist.files) assert u_filename in mm.filelist.files
def test_write_manifest_skips_non_utf8_filenames(self): def test_write_manifest_skips_non_utf8_filenames(self):
""" """
...@@ -266,10 +263,10 @@ class TestSdistTest(unittest.TestCase): ...@@ -266,10 +263,10 @@ class TestSdistTest(unittest.TestCase):
self.fail(e) self.fail(e)
# The Latin-1 filename should have been skipped # The Latin-1 filename should have been skipped
self.assertFalse(posix(filename) in contents) assert posix(filename) not in contents
# The filelist should have been updated as well # The filelist should have been updated as well
self.assertFalse(u_filename in mm.filelist.files) assert u_filename not in mm.filelist.files
def test_manifest_is_read_with_utf8_encoding(self): def test_manifest_is_read_with_utf8_encoding(self):
# Test for #303. # Test for #303.
...@@ -300,7 +297,7 @@ class TestSdistTest(unittest.TestCase): ...@@ -300,7 +297,7 @@ class TestSdistTest(unittest.TestCase):
# The filelist should contain the UTF-8 filename # The filelist should contain the UTF-8 filename
if six.PY3: if six.PY3:
filename = filename.decode('utf-8') filename = filename.decode('utf-8')
self.assertTrue(filename in cmd.filelist.files) assert filename in cmd.filelist.files
# Python 3 only # Python 3 only
if six.PY3: if six.PY3:
...@@ -337,10 +334,11 @@ class TestSdistTest(unittest.TestCase): ...@@ -337,10 +334,11 @@ class TestSdistTest(unittest.TestCase):
# The Latin-1 filename should have been skipped # The Latin-1 filename should have been skipped
filename = filename.decode('latin-1') filename = filename.decode('latin-1')
self.assertFalse(filename in cmd.filelist.files) assert filename not in cmd.filelist.files
@skipIf(six.PY3 and locale.getpreferredencoding() != 'UTF-8', @pytest.mark.skipif(six.PY3 and locale.getpreferredencoding() != 'UTF-8',
'Unittest fails if locale is not utf-8 but the manifests is recorded correctly') reason='Unittest fails if locale is not utf-8 but the manifests is '
'recorded correctly')
def test_sdist_with_utf8_encoded_filename(self): def test_sdist_with_utf8_encoded_filename(self):
# Test for #303. # Test for #303.
dist = Distribution(SETUP_ATTRS) dist = Distribution(SETUP_ATTRS)
...@@ -365,15 +363,15 @@ class TestSdistTest(unittest.TestCase): ...@@ -365,15 +363,15 @@ class TestSdistTest(unittest.TestCase):
if fs_enc == 'cp1252': if fs_enc == 'cp1252':
# Python 3 mangles the UTF-8 filename # Python 3 mangles the UTF-8 filename
filename = filename.decode('cp1252') filename = filename.decode('cp1252')
self.assertTrue(filename in cmd.filelist.files) assert filename in cmd.filelist.files
else: else:
filename = filename.decode('mbcs') filename = filename.decode('mbcs')
self.assertTrue(filename in cmd.filelist.files) assert filename in cmd.filelist.files
else: else:
filename = filename.decode('utf-8') filename = filename.decode('utf-8')
self.assertTrue(filename in cmd.filelist.files) assert filename in cmd.filelist.files
else: else:
self.assertTrue(filename in cmd.filelist.files) assert filename in cmd.filelist.files
def test_sdist_with_latin1_encoded_filename(self): def test_sdist_with_latin1_encoded_filename(self):
# Test for #303. # Test for #303.
...@@ -385,7 +383,7 @@ class TestSdistTest(unittest.TestCase): ...@@ -385,7 +383,7 @@ class TestSdistTest(unittest.TestCase):
# Latin-1 filename # Latin-1 filename
filename = os.path.join(b('sdist_test'), LATIN1_FILENAME) filename = os.path.join(b('sdist_test'), LATIN1_FILENAME)
open(filename, 'w').close() open(filename, 'w').close()
self.assertTrue(os.path.isfile(filename)) assert os.path.isfile(filename)
with quiet(): with quiet():
cmd.run() cmd.run()
...@@ -401,11 +399,11 @@ class TestSdistTest(unittest.TestCase): ...@@ -401,11 +399,11 @@ class TestSdistTest(unittest.TestCase):
else: else:
filename = filename.decode('latin-1') filename = filename.decode('latin-1')
self.assertTrue(filename in cmd.filelist.files) assert filename in cmd.filelist.files
else: else:
# The Latin-1 filename should have been skipped # The Latin-1 filename should have been skipped
filename = filename.decode('latin-1') filename = filename.decode('latin-1')
self.assertFalse(filename in cmd.filelist.files) filename not in cmd.filelist.files
else: else:
# Under Python 2 there seems to be no decoded string in the # Under Python 2 there seems to be no decoded string in the
# filelist. However, due to decode and encoding of the # filelist. However, due to decode and encoding of the
...@@ -415,139 +413,23 @@ class TestSdistTest(unittest.TestCase): ...@@ -415,139 +413,23 @@ class TestSdistTest(unittest.TestCase):
# be proformed for the manifest output. # be proformed for the manifest output.
fs_enc = sys.getfilesystemencoding() fs_enc = sys.getfilesystemencoding()
filename.decode(fs_enc) filename.decode(fs_enc)
self.assertTrue(filename in cmd.filelist.files) assert filename in cmd.filelist.files
except UnicodeDecodeError: except UnicodeDecodeError:
self.assertFalse(filename in cmd.filelist.files) filename not in cmd.filelist.files
class TestDummyOutput(environment.ZippedEnvironment):
def test_default_revctrl():
def setUp(self): """
self.datafile = os.path.join('setuptools', 'tests', When _default_revctrl was removed from the `setuptools.command.sdist`
'svn_data', "dummy.zip") module in 10.0, it broke some systems which keep an old install of
self.dataname = "dummy" setuptools (Distribute) around. Those old versions require that the
super(TestDummyOutput, self).setUp() setuptools package continue to implement that interface, so this
function provides that interface, stubbed. See #320 for details.
def _run(self):
code, data = environment.run_setup_py(["sdist"], This interface must be maintained until Ubuntu 12.04 is no longer
pypath=self.old_cwd, supported (by Setuptools).
data_stream=0) """
if code: ep_def = 'svn_cvs = setuptools.command.sdist:_default_revctrl'
info = "DIR: " + os.path.abspath('.') ep = pkg_resources.EntryPoint.parse(ep_def)
info += "\n SDIST RETURNED: %i\n\n" % code res = ep._load()
info += data assert hasattr(res, '__iter__')
raise AssertionError(info)
datalines = data.splitlines()
possible = (
"running sdist",
"running egg_info",
"creating dummy\.egg-info",
"writing dummy\.egg-info",
"writing top-level names to dummy\.egg-info",
"writing dependency_links to dummy\.egg-info",
"writing manifest file 'dummy\.egg-info",
"reading manifest file 'dummy\.egg-info",
"reading manifest template 'MANIFEST\.in'",
"writing manifest file 'dummy\.egg-info",
"creating dummy-0.1.1",
"making hard links in dummy-0\.1\.1",
"copying files to dummy-0\.1\.1",
"copying \S+ -> dummy-0\.1\.1",
"copying dummy",
"copying dummy\.egg-info",
"hard linking \S+ -> dummy-0\.1\.1",
"hard linking dummy",
"hard linking dummy\.egg-info",
"Writing dummy-0\.1\.1",
"creating dist",
"creating 'dist",
"Creating tar archive",
"running check",
"adding 'dummy-0\.1\.1",
"tar .+ dist/dummy-0\.1\.1\.tar dummy-0\.1\.1",
"gzip .+ dist/dummy-0\.1\.1\.tar",
"removing 'dummy-0\.1\.1' \\(and everything under it\\)",
)
print(" DIR: " + os.path.abspath('.'))
for line in datalines:
found = False
for pattern in possible:
if re.match(pattern, line):
print(" READ: " + line)
found = True
break
if not found:
raise AssertionError("Unexpexected: %s\n-in-\n%s"
% (line, data))
return data
def test_sources(self):
self._run()
class TestSvn(environment.ZippedEnvironment):
def setUp(self):
version = svn_utils.SvnInfo.get_svn_version()
if not version: # None or Empty
return
self.base_version = tuple([int(x) for x in version.split('.')][:2])
if not self.base_version:
raise ValueError('No SVN tools installed')
elif self.base_version < (1, 3):
raise ValueError('Insufficient SVN Version %s' % version)
elif self.base_version >= (1, 9):
# trying the latest version
self.base_version = (1, 8)
self.dataname = "svn%i%i_example" % self.base_version
self.datafile = os.path.join('setuptools', 'tests',
'svn_data', self.dataname + ".zip")
super(TestSvn, self).setUp()
@skipIf(not test_svn._svn_check, "No SVN to text, in the first place")
def test_walksvn(self):
if self.base_version >= (1, 6):
folder2 = 'third party2'
folder3 = 'third party3'
else:
folder2 = 'third_party2'
folder3 = 'third_party3'
# TODO is this right
expected = set([
os.path.join('a file'),
os.path.join(folder2, 'Changes.txt'),
os.path.join(folder2, 'MD5SUMS'),
os.path.join(folder2, 'README.txt'),
os.path.join(folder3, 'Changes.txt'),
os.path.join(folder3, 'MD5SUMS'),
os.path.join(folder3, 'README.txt'),
os.path.join(folder3, 'TODO.txt'),
os.path.join(folder3, 'fin'),
os.path.join('third_party', 'README.txt'),
os.path.join('folder', folder2, 'Changes.txt'),
os.path.join('folder', folder2, 'MD5SUMS'),
os.path.join('folder', folder2, 'WatashiNiYomimasu.txt'),
os.path.join('folder', folder3, 'Changes.txt'),
os.path.join('folder', folder3, 'fin'),
os.path.join('folder', folder3, 'MD5SUMS'),
os.path.join('folder', folder3, 'oops'),
os.path.join('folder', folder3, 'WatashiNiYomimasu.txt'),
os.path.join('folder', folder3, 'ZuMachen.txt'),
os.path.join('folder', 'third_party', 'WatashiNiYomimasu.txt'),
os.path.join('folder', 'lalala.txt'),
os.path.join('folder', 'quest.txt'),
# The example will have a deleted file
# (or should) but shouldn't return it
])
self.assertEqual(set(x for x in walk_revctrl()), expected)
def test_suite():
return unittest.defaultTestLoader.loadTestsFromName(__name__)
# -*- coding: utf-8 -*-
"""svn tests"""
import io
import os
import subprocess
import sys
import unittest
import six
from setuptools.tests import environment
from setuptools import svn_utils
from setuptools.tests.py26compat import skipIf
def _do_svn_check():
try:
subprocess.check_call(["svn", "--version"],
shell=(sys.platform == 'win32'))
return True
except (OSError, subprocess.CalledProcessError):
return False
_svn_check = _do_svn_check()
class TestSvnVersion(unittest.TestCase):
def test_no_svn_found(self):
path_variable = None
for env in os.environ:
if env.lower() == 'path':
path_variable = env
if path_variable is None:
try:
self.skipTest('Cannot figure out how to modify path')
except AttributeError: # PY26 doesn't have this
return
old_path = os.environ[path_variable]
os.environ[path_variable] = ''
try:
version = svn_utils.SvnInfo.get_svn_version()
self.assertEqual(version, '')
finally:
os.environ[path_variable] = old_path
@skipIf(not _svn_check, "No SVN to text, in the first place")
def test_svn_should_exist(self):
version = svn_utils.SvnInfo.get_svn_version()
self.assertNotEqual(version, '')
def _read_utf8_file(path):
fileobj = None
try:
fileobj = io.open(path, 'r', encoding='utf-8')
data = fileobj.read()
return data
finally:
if fileobj:
fileobj.close()
class ParserInfoXML(unittest.TestCase):
def parse_tester(self, svn_name, ext_spaces):
path = os.path.join('setuptools', 'tests',
'svn_data', svn_name + '_info.xml')
#Remember these are pre-generated to test XML parsing
# so these paths might not valid on your system
example_base = "%s_example" % svn_name
data = _read_utf8_file(path)
expected = set([
("\\".join((example_base, 'a file')), 'file'),
("\\".join((example_base, 'folder')), 'dir'),
("\\".join((example_base, 'folder', 'lalala.txt')), 'file'),
("\\".join((example_base, 'folder', 'quest.txt')), 'file'),
])
self.assertEqual(set(x for x in svn_utils.parse_dir_entries(data)),
expected)
def test_svn13(self):
self.parse_tester('svn13', False)
def test_svn14(self):
self.parse_tester('svn14', False)
def test_svn15(self):
self.parse_tester('svn15', False)
def test_svn16(self):
self.parse_tester('svn16', True)
def test_svn17(self):
self.parse_tester('svn17', True)
def test_svn18(self):
self.parse_tester('svn18', True)
class ParserExternalXML(unittest.TestCase):
def parse_tester(self, svn_name, ext_spaces):
path = os.path.join('setuptools', 'tests',
'svn_data', svn_name + '_ext_list.xml')
example_base = svn_name + '_example'
data = _read_utf8_file(path)
if ext_spaces:
folder2 = 'third party2'
folder3 = 'third party3'
else:
folder2 = 'third_party2'
folder3 = 'third_party3'
expected = set([
os.sep.join((example_base, folder2)),
os.sep.join((example_base, folder3)),
# folder is third_party大介
os.sep.join((example_base,
six.text_type('third_party') +
six.unichr(0x5927) + six.unichr(0x4ecb))),
os.sep.join((example_base, 'folder', folder2)),
os.sep.join((example_base, 'folder', folder3)),
os.sep.join((example_base, 'folder',
six.text_type('third_party') +
six.unichr(0x5927) + six.unichr(0x4ecb))),
])
expected = set(os.path.normpath(x) for x in expected)
dir_base = os.sep.join(('C:', 'development', 'svn_example'))
self.assertEqual(set(x for x
in svn_utils.parse_externals_xml(data, dir_base)), expected)
def test_svn15(self):
self.parse_tester('svn15', False)
def test_svn16(self):
self.parse_tester('svn16', True)
def test_svn17(self):
self.parse_tester('svn17', True)
def test_svn18(self):
self.parse_tester('svn18', True)
class ParseExternal(unittest.TestCase):
def parse_tester(self, svn_name, ext_spaces):
path = os.path.join('setuptools', 'tests',
'svn_data', svn_name + '_ext_list.txt')
data = _read_utf8_file(path)
if ext_spaces:
expected = set(['third party2', 'third party3',
'third party3b', 'third_party'])
else:
expected = set(['third_party2', 'third_party3', 'third_party'])
self.assertEqual(set(x for x in svn_utils.parse_external_prop(data)),
expected)
def test_svn13(self):
self.parse_tester('svn13', False)
def test_svn14(self):
self.parse_tester('svn14', False)
def test_svn15(self):
self.parse_tester('svn15', False)
def test_svn16(self):
self.parse_tester('svn16', True)
def test_svn17(self):
self.parse_tester('svn17', True)
def test_svn18(self):
self.parse_tester('svn18', True)
class TestSvn(environment.ZippedEnvironment):
def setUp(self):
version = svn_utils.SvnInfo.get_svn_version()
if not version: # empty or null
self.dataname = None
self.datafile = None
return
self.base_version = tuple([int(x) for x in version.split('.')[:2]])
if self.base_version < (1,3):
raise ValueError('Insufficient SVN Version %s' % version)
elif self.base_version >= (1,9):
#trying the latest version
self.base_version = (1,8)
self.dataname = "svn%i%i_example" % self.base_version
self.datafile = os.path.join('setuptools', 'tests',
'svn_data', self.dataname + ".zip")
super(TestSvn, self).setUp()
@skipIf(not _svn_check, "No SVN to text, in the first place")
def test_revision(self):
rev = svn_utils.SvnInfo.load('.').get_revision()
self.assertEqual(rev, 6)
@skipIf(not _svn_check, "No SVN to text, in the first place")
def test_entries(self):
expected = set([
(os.path.join('a file'), 'file'),
(os.path.join('folder'), 'dir'),
(os.path.join('folder', 'lalala.txt'), 'file'),
(os.path.join('folder', 'quest.txt'), 'file'),
#The example will have a deleted file (or should)
#but shouldn't return it
])
info = svn_utils.SvnInfo.load('.')
self.assertEqual(set(x for x in info.entries), expected)
@skipIf(not _svn_check, "No SVN to text, in the first place")
def test_externals(self):
if self.base_version >= (1,6):
folder2 = 'third party2'
folder3 = 'third party3'
else:
folder2 = 'third_party2'
folder3 = 'third_party3'
expected = set([
os.path.join(folder2),
os.path.join(folder3),
os.path.join('third_party'),
os.path.join('folder', folder2),
os.path.join('folder', folder3),
os.path.join('folder', 'third_party'),
])
info = svn_utils.SvnInfo.load('.')
self.assertEqual(set([x for x in info.externals]), expected)
def test_suite():
return unittest.defaultTestLoader.loadTestsFromName(__name__)
# -*- coding: UTF-8 -*- # -*- coding: UTF-8 -*-
"""develop tests from __future__ import unicode_literals
"""
import os import os
import shutil
import site import site
import sys
import tempfile
import unittest
from distutils.errors import DistutilsError from distutils.errors import DistutilsError
import six import six
import pytest
from setuptools.command.test import test from setuptools.command.test import test
from setuptools.command import easy_install as easy_install_pkg
from setuptools.dist import Distribution from setuptools.dist import Distribution
SETUP_PY = """\ from .textwrap import DALS
from setuptools import setup from . import contexts
setup(name='foo', SETUP_PY = DALS("""
packages=['name', 'name.space', 'name.space.tests'], from setuptools import setup
namespace_packages=['name'],
test_suite='name.space.tests.test_suite', setup(name='foo',
) packages=['name', 'name.space', 'name.space.tests'],
""" namespace_packages=['name'],
test_suite='name.space.tests.test_suite',
NS_INIT = """# -*- coding: Latin-1 -*- )
# Söme Arbiträry Ünicode to test Issüé 310 """)
try:
__import__('pkg_resources').declare_namespace(__name__) NS_INIT = DALS("""
except ImportError: # -*- coding: Latin-1 -*-
from pkgutil import extend_path # Söme Arbiträry Ünicode to test Distribute Issüé 310
__path__ = extend_path(__path__, __name__) try:
""" __import__('pkg_resources').declare_namespace(__name__)
# Make sure this is Latin-1 binary, before writing: except ImportError:
if six.PY2: from pkgutil import extend_path
NS_INIT = NS_INIT.decode('UTF-8') __path__ = extend_path(__path__, __name__)
NS_INIT = NS_INIT.encode('Latin-1') """)
TEST_PY = """import unittest TEST_PY = DALS("""
import unittest
class TestTest(unittest.TestCase):
def test_test(self): class TestTest(unittest.TestCase):
print "Foo" # Should fail under Python 3 unless 2to3 is used def test_test(self):
print "Foo" # Should fail under Python 3 unless 2to3 is used
test_suite = unittest.makeSuite(TestTest) test_suite = unittest.makeSuite(TestTest)
""" """)
class TestTestTest(unittest.TestCase):
def setUp(self): @pytest.fixture
if sys.version < "2.6" or hasattr(sys, 'real_prefix'): def sample_test(tmpdir_cwd):
return os.makedirs('name/space/tests')
# Directory structure # setup.py
self.dir = tempfile.mkdtemp() with open('setup.py', 'wt') as f:
os.mkdir(os.path.join(self.dir, 'name'))
os.mkdir(os.path.join(self.dir, 'name', 'space'))
os.mkdir(os.path.join(self.dir, 'name', 'space', 'tests'))
# setup.py
setup = os.path.join(self.dir, 'setup.py')
f = open(setup, 'wt')
f.write(SETUP_PY) f.write(SETUP_PY)
f.close()
self.old_cwd = os.getcwd() # name/__init__.py
# name/__init__.py with open('name/__init__.py', 'wb') as f:
init = os.path.join(self.dir, 'name', '__init__.py') f.write(NS_INIT.encode('Latin-1'))
f = open(init, 'wb')
f.write(NS_INIT) # name/space/__init__.py
f.close() with open('name/space/__init__.py', 'wt') as f:
# name/space/__init__.py
init = os.path.join(self.dir, 'name', 'space', '__init__.py')
f = open(init, 'wt')
f.write('#empty\n') f.write('#empty\n')
f.close()
# name/space/tests/__init__.py
init = os.path.join(self.dir, 'name', 'space', 'tests', '__init__.py')
f = open(init, 'wt')
f.write(TEST_PY)
f.close()
os.chdir(self.dir) # name/space/tests/__init__.py
self.old_base = site.USER_BASE with open('name/space/tests/__init__.py', 'wt') as f:
site.USER_BASE = tempfile.mkdtemp() f.write(TEST_PY)
self.old_site = site.USER_SITE
site.USER_SITE = tempfile.mkdtemp()
def tearDown(self):
if sys.version < "2.6" or hasattr(sys, 'real_prefix'):
return
os.chdir(self.old_cwd) @pytest.mark.skipif('hasattr(sys, "real_prefix")')
shutil.rmtree(self.dir) @pytest.mark.usefixtures('user_override')
shutil.rmtree(site.USER_BASE) @pytest.mark.usefixtures('sample_test')
shutil.rmtree(site.USER_SITE) class TestTestTest:
site.USER_BASE = self.old_base
site.USER_SITE = self.old_site
def test_test(self): def test_test(self):
if sys.version < "2.6" or hasattr(sys, 'real_prefix'): params = dict(
return
dist = Distribution(dict(
name='foo', name='foo',
packages=['name', 'name.space', 'name.space.tests'], packages=['name', 'name.space', 'name.space.tests'],
namespace_packages=['name'], namespace_packages=['name'],
test_suite='name.space.tests.test_suite', test_suite='name.space.tests.test_suite',
use_2to3=True, use_2to3=True,
)) )
dist = Distribution(params)
dist.script_name = 'setup.py' dist.script_name = 'setup.py'
cmd = test(dist) cmd = test(dist)
cmd.user = 1 cmd.user = 1
cmd.ensure_finalized() cmd.ensure_finalized()
cmd.install_dir = site.USER_SITE cmd.install_dir = site.USER_SITE
cmd.user = 1 cmd.user = 1
old_stdout = sys.stdout with contexts.quiet():
sys.stdout = six.StringIO() # The test runner calls sys.exit
try: with contexts.suppress_exceptions(SystemExit):
try: # try/except/finally doesn't work in Python 2.4, so we need nested try-statements.
cmd.run() cmd.run()
except SystemExit: # The test runner calls sys.exit, stop that making an error.
pass
finally:
sys.stdout = old_stdout
"""build_ext tests import os
""" import zipfile
import sys, os, shutil, tempfile, unittest, site, zipfile import contextlib
import pytest
from setuptools.command.upload_docs import upload_docs from setuptools.command.upload_docs import upload_docs
from setuptools.dist import Distribution from setuptools.dist import Distribution
SETUP_PY = """\ from .textwrap import DALS
from setuptools import setup from . import contexts
setup(name='foo') SETUP_PY = DALS(
""" """
from setuptools import setup
class TestUploadDocsTest(unittest.TestCase): setup(name='foo')
def setUp(self): """)
self.dir = tempfile.mkdtemp()
setup = os.path.join(self.dir, 'setup.py')
f = open(setup, 'w') @pytest.fixture
def sample_project(tmpdir_cwd):
# setup.py
with open('setup.py', 'wt') as f:
f.write(SETUP_PY) f.write(SETUP_PY)
f.close()
self.old_cwd = os.getcwd()
os.chdir(self.dir)
self.upload_dir = os.path.join(self.dir, 'build') os.mkdir('build')
os.mkdir(self.upload_dir)
# A test document. # A test document.
f = open(os.path.join(self.upload_dir, 'index.html'), 'w') with open('build/index.html', 'w') as f:
f.write("Hello world.") f.write("Hello world.")
f.close()
# An empty folder.
# An empty folder. os.mkdir('build/empty')
os.mkdir(os.path.join(self.upload_dir, 'empty'))
if sys.version >= "2.6": @pytest.mark.usefixtures('sample_project')
self.old_base = site.USER_BASE @pytest.mark.usefixtures('user_override')
site.USER_BASE = upload_docs.USER_BASE = tempfile.mkdtemp() class TestUploadDocsTest:
self.old_site = site.USER_SITE
site.USER_SITE = upload_docs.USER_SITE = tempfile.mkdtemp()
def tearDown(self):
os.chdir(self.old_cwd)
shutil.rmtree(self.dir)
if sys.version >= "2.6":
shutil.rmtree(site.USER_BASE)
shutil.rmtree(site.USER_SITE)
site.USER_BASE = self.old_base
site.USER_SITE = self.old_site
def test_create_zipfile(self): def test_create_zipfile(self):
# Test to make sure zipfile creation handles common cases. """
# This explicitly includes a folder containing an empty folder. Ensure zipfile creation handles common cases, including a folder
containing an empty folder.
"""
dist = Distribution() dist = Distribution()
cmd = upload_docs(dist) cmd = upload_docs(dist)
cmd.upload_dir = self.upload_dir cmd.target_dir = cmd.upload_dir = 'build'
cmd.target_dir = self.upload_dir with contexts.tempdir() as tmp_dir:
tmp_dir = tempfile.mkdtemp() tmp_file = os.path.join(tmp_dir, 'foo.zip')
tmp_file = os.path.join(tmp_dir, 'foo.zip')
try:
zip_file = cmd.create_zipfile(tmp_file) zip_file = cmd.create_zipfile(tmp_file)
assert zipfile.is_zipfile(tmp_file) assert zipfile.is_zipfile(tmp_file)
zip_file = zipfile.ZipFile(tmp_file) # woh... with contextlib.closing(zipfile.ZipFile(tmp_file)) as zip_file:
assert zip_file.namelist() == ['index.html']
assert zip_file.namelist() == ['index.html']
zip_file.close()
finally:
shutil.rmtree(tmp_dir)
"""
Python Script Wrapper for Windows
=================================
setuptools includes wrappers for Python scripts that allows them to be
executed like regular windows programs. There are 2 wrappers, one
for command-line programs, cli.exe, and one for graphical programs,
gui.exe. These programs are almost identical, function pretty much
the same way, and are generated from the same source file. The
wrapper programs are used by copying them to the directory containing
the script they are to wrap and with the same name as the script they
are to wrap.
"""
from __future__ import absolute_import
import sys
import textwrap
import subprocess
import pytest
from setuptools.command.easy_install import nt_quote_arg
import pkg_resources
pytestmark = pytest.mark.skipif(sys.platform != 'win32', reason="Windows only")
class WrapperTester:
@classmethod
def prep_script(cls, template):
python_exe = nt_quote_arg(sys.executable)
return template % locals()
@classmethod
def create_script(cls, tmpdir):
"""
Create a simple script, foo-script.py
Note that the script starts with a Unix-style '#!' line saying which
Python executable to run. The wrapper will use this line to find the
correct Python executable.
"""
script = cls.prep_script(cls.script_tmpl)
with (tmpdir / cls.script_name).open('w') as f:
f.write(script)
# also copy cli.exe to the sample directory
with (tmpdir / cls.wrapper_name).open('wb') as f:
w = pkg_resources.resource_string('setuptools', cls.wrapper_source)
f.write(w)
class TestCLI(WrapperTester):
script_name = 'foo-script.py'
wrapper_source = 'cli-32.exe'
wrapper_name = 'foo.exe'
script_tmpl = textwrap.dedent("""
#!%(python_exe)s
import sys
input = repr(sys.stdin.read())
print(sys.argv[0][-14:])
print(sys.argv[1:])
print(input)
if __debug__:
print('non-optimized')
""").lstrip()
def test_basic(self, tmpdir):
"""
When the copy of cli.exe, foo.exe in this example, runs, it examines
the path name it was run with and computes a Python script path name
by removing the '.exe' suffix and adding the '-script.py' suffix. (For
GUI programs, the suffix '-script.pyw' is added.) This is why we
named out script the way we did. Now we can run out script by running
the wrapper:
This example was a little pathological in that it exercised windows
(MS C runtime) quoting rules:
- Strings containing spaces are surrounded by double quotes.
- Double quotes in strings need to be escaped by preceding them with
back slashes.
- One or more backslashes preceding double quotes need to be escaped
by preceding each of them with back slashes.
"""
self.create_script(tmpdir)
cmd = [
str(tmpdir / 'foo.exe'),
'arg1',
'arg 2',
'arg "2\\"',
'arg 4\\',
'arg5 a\\\\b',
]
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE)
stdout, stderr = proc.communicate('hello\nworld\n'.encode('ascii'))
actual = stdout.decode('ascii').replace('\r\n', '\n')
expected = textwrap.dedent(r"""
\foo-script.py
['arg1', 'arg 2', 'arg "2\\"', 'arg 4\\', 'arg5 a\\\\b']
'hello\nworld\n'
non-optimized
""").lstrip()
assert actual == expected
def test_with_options(self, tmpdir):
"""
Specifying Python Command-line Options
--------------------------------------
You can specify a single argument on the '#!' line. This can be used
to specify Python options like -O, to run in optimized mode or -i
to start the interactive interpreter. You can combine multiple
options as usual. For example, to run in optimized mode and
enter the interpreter after running the script, you could use -Oi:
"""
self.create_script(tmpdir)
tmpl = textwrap.dedent("""
#!%(python_exe)s -Oi
import sys
input = repr(sys.stdin.read())
print(sys.argv[0][-14:])
print(sys.argv[1:])
print(input)
if __debug__:
print('non-optimized')
sys.ps1 = '---'
""").lstrip()
with (tmpdir / 'foo-script.py').open('w') as f:
f.write(self.prep_script(tmpl))
cmd = [str(tmpdir / 'foo.exe')]
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.STDOUT)
stdout, stderr = proc.communicate()
actual = stdout.decode('ascii').replace('\r\n', '\n')
expected = textwrap.dedent(r"""
\foo-script.py
[]
''
---
""").lstrip()
assert actual == expected
class TestGUI(WrapperTester):
"""
Testing the GUI Version
-----------------------
"""
script_name = 'bar-script.pyw'
wrapper_source = 'gui-32.exe'
wrapper_name = 'bar.exe'
script_tmpl = textwrap.dedent("""
#!%(python_exe)s
import sys
f = open(sys.argv[1], 'wb')
bytes_written = f.write(repr(sys.argv[2]).encode('utf-8'))
f.close()
""").strip()
def test_basic(self, tmpdir):
"""Test the GUI version with the simple scipt, bar-script.py"""
self.create_script(tmpdir)
cmd = [
str(tmpdir / 'bar.exe'),
str(tmpdir / 'test_output.txt'),
'Test Argument',
]
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.STDOUT)
stdout, stderr = proc.communicate()
assert not stdout
assert not stderr
with (tmpdir / 'test_output.txt').open('rb') as f_out:
actual = f_out.read().decode('ascii')
assert actual == repr('Test Argument')
from __future__ import absolute_import
import textwrap
def DALS(s):
"dedent and left-strip"
return textwrap.dedent(s).lstrip()
Python Script Wrapper for Windows
=================================
setuptools includes wrappers for Python scripts that allows them to be
executed like regular windows programs. There are 2 wrappers, once
for command-line programs, cli.exe, and one for graphical programs,
gui.exe. These programs are almost identical, function pretty much
the same way, and are generated from the same source file. The
wrapper programs are used by copying them to the directory containing
the script they are to wrap and with the same name as the script they
are to wrap. In the rest of this document, we'll give an example that
will illustrate this.
Let's create a simple script, foo-script.py:
>>> import os, sys, tempfile
>>> from setuptools.command.easy_install import nt_quote_arg
>>> sample_directory = tempfile.mkdtemp()
>>> f = open(os.path.join(sample_directory, 'foo-script.py'), 'w')
>>> bytes_written = f.write(
... """#!%(python_exe)s
... import sys
... input = repr(sys.stdin.read())
... print(sys.argv[0][-14:])
... print(sys.argv[1:])
... print(input)
... if __debug__:
... print('non-optimized')
... """ % dict(python_exe=nt_quote_arg(sys.executable)))
>>> f.close()
Note that the script starts with a Unix-style '#!' line saying which
Python executable to run. The wrapper will use this to find the
correct Python executable.
We'll also copy cli.exe to the sample-directory with the name foo.exe:
>>> import pkg_resources
>>> f = open(os.path.join(sample_directory, 'foo.exe'), 'wb')
>>> bytes_written = f.write(
... pkg_resources.resource_string('setuptools', 'cli-32.exe')
... )
>>> f.close()
When the copy of cli.exe, foo.exe in this example, runs, it examines
the path name it was run with and computes a Python script path name
by removing the '.exe' suffix and adding the '-script.py' suffix. (For
GUI programs, the suffix '-script-pyw' is added.) This is why we
named out script the way we did. Now we can run out script by running
the wrapper:
>>> import subprocess
>>> cmd = [os.path.join(sample_directory, 'foo.exe'), 'arg1', 'arg 2',
... 'arg "2\\"', 'arg 4\\', 'arg5 a\\\\b']
>>> proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE)
>>> stdout, stderr = proc.communicate('hello\nworld\n'.encode('ascii'))
>>> bytes = sys.stdout.write(stdout.decode('ascii').replace('\r\n', '\n'))
\foo-script.py
['arg1', 'arg 2', 'arg "2\\"', 'arg 4\\', 'arg5 a\\\\b']
'hello\nworld\n'
non-optimized
This example was a little pathological in that it exercised windows
(MS C runtime) quoting rules:
- Strings containing spaces are surrounded by double quotes.
- Double quotes in strings need to be escaped by preceding them with
back slashes.
- One or more backslashes preceding double quotes need to be escaped
by preceding each of them with back slashes.
Specifying Python Command-line Options
--------------------------------------
You can specify a single argument on the '#!' line. This can be used
to specify Python options like -O, to run in optimized mode or -i
to start the interactive interpreter. You can combine multiple
options as usual. For example, to run in optimized mode and
enter the interpreter after running the script, you could use -Oi:
>>> f = open(os.path.join(sample_directory, 'foo-script.py'), 'w')
>>> bytes_written = f.write(
... """#!%(python_exe)s -Oi
... import sys
... input = repr(sys.stdin.read())
... print(sys.argv[0][-14:])
... print(sys.argv[1:])
... print(input)
... if __debug__:
... print('non-optimized')
... sys.ps1 = '---'
... """ % dict(python_exe=nt_quote_arg(sys.executable)))
>>> f.close()
>>> cmd = [os.path.join(sample_directory, 'foo.exe')]
>>> proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.STDOUT)
>>> stdout, stderr = proc.communicate()
>>> bytes = sys.stdout.write(stdout.decode('ascii').replace('\r\n', '\n'))
\foo-script.py
[]
''
---
Testing the GUI Version
-----------------------
Now let's test the GUI version with the simple scipt, bar-script.py:
>>> import os, sys, tempfile
>>> from setuptools.command.easy_install import nt_quote_arg
>>> sample_directory = tempfile.mkdtemp()
>>> f = open(os.path.join(sample_directory, 'bar-script.pyw'), 'w')
>>> bytes_written = f.write(
... """#!%(python_exe)s
... import sys
... f = open(sys.argv[1], 'wb')
... bytes_written = f.write(repr(sys.argv[2]).encode('utf-8'))
... f.close()
... """ % dict(python_exe=nt_quote_arg(sys.executable)))
>>> f.close()
We'll also copy gui.exe to the sample-directory with the name bar.exe:
>>> import pkg_resources
>>> f = open(os.path.join(sample_directory, 'bar.exe'), 'wb')
>>> bytes_written = f.write(
... pkg_resources.resource_string('setuptools', 'gui-32.exe')
... )
>>> f.close()
Finally, we'll run the script and check the result:
>>> cmd = [
... os.path.join(sample_directory, 'bar.exe'),
... os.path.join(sample_directory, 'test_output.txt'),
... 'Test Argument',
... ]
>>> proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.STDOUT)
>>> stdout, stderr = proc.communicate()
>>> print(stdout.decode('ascii'))
<BLANKLINE>
>>> f_out = open(os.path.join(sample_directory, 'test_output.txt'), 'rb')
>>> print(f_out.read().decode('ascii'))
'Test Argument'
>>> f_out.close()
We're done with the sample_directory:
>>> import shutil
>>> shutil.rmtree(sample_directory)
__version__ = '7.1' __version__ = '10.2.2'
/* Generated by Pyrex 0.9.3 on Thu Jan 05 17:47:12 2006 */
#include "Python.h"
#include "structmember.h"
#ifndef PY_LONG_LONG
#define PY_LONG_LONG LONG_LONG
#endif
typedef struct {PyObject **p; char *s;} __Pyx_InternTabEntry; /*proto*/
typedef struct {PyObject **p; char *s; long n;} __Pyx_StringTabEntry; /*proto*/
static PyObject *__Pyx_UnpackItem(PyObject *, int); /*proto*/
static int __Pyx_EndUnpack(PyObject *, int); /*proto*/
static int __Pyx_PrintItem(PyObject *); /*proto*/
static int __Pyx_PrintNewline(void); /*proto*/
static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb); /*proto*/
static void __Pyx_ReRaise(void); /*proto*/
static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list); /*proto*/
static PyObject *__Pyx_GetExcValue(void); /*proto*/
static int __Pyx_ArgTypeTest(PyObject *obj, PyTypeObject *type, int none_allowed, char *name); /*proto*/
static int __Pyx_TypeTest(PyObject *obj, PyTypeObject *type); /*proto*/
static int __Pyx_GetStarArgs(PyObject **args, PyObject **kwds, char *kwd_list[], int nargs, PyObject **args2, PyObject **kwds2); /*proto*/
static void __Pyx_WriteUnraisable(char *name); /*proto*/
static void __Pyx_AddTraceback(char *funcname); /*proto*/
static PyTypeObject *__Pyx_ImportType(char *module_name, char *class_name, long size); /*proto*/
static int __Pyx_SetVtable(PyObject *dict, void *vtable); /*proto*/
static int __Pyx_GetVtable(PyObject *dict, void *vtabptr); /*proto*/
static PyObject *__Pyx_CreateClass(PyObject *bases, PyObject *dict, PyObject *name, char *modname); /*proto*/
static int __Pyx_InternStrings(__Pyx_InternTabEntry *t); /*proto*/
static int __Pyx_InitStrings(__Pyx_StringTabEntry *t); /*proto*/
static PyObject *__Pyx_GetName(PyObject *dict, PyObject *name); /*proto*/
static PyObject *__pyx_m;
static PyObject *__pyx_b;
static int __pyx_lineno;
static char *__pyx_filename;
staticforward char **__pyx_f;
/* Declarations from hello */
char (*(get_hello_msg(void))); /*proto*/
/* Implementation of hello */
static PyObject *__pyx_n_hello;
static PyObject *__pyx_f_5hello_hello(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/
static PyObject *__pyx_f_5hello_hello(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) {
PyObject *__pyx_r;
PyObject *__pyx_1 = 0;
static char *__pyx_argnames[] = {0};
if (!PyArg_ParseTupleAndKeywords(__pyx_args, __pyx_kwds, "", __pyx_argnames)) return 0;
/* "C:\cygwin\home\pje\setuptools\tests\shlib_test\hello.pyx":4 */
__pyx_1 = PyString_FromString(get_hello_msg()); if (!__pyx_1) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 4; goto __pyx_L1;}
__pyx_r = __pyx_1;
__pyx_1 = 0;
goto __pyx_L0;
__pyx_r = Py_None; Py_INCREF(__pyx_r);
goto __pyx_L0;
__pyx_L1:;
Py_XDECREF(__pyx_1);
__Pyx_AddTraceback("hello.hello");
__pyx_r = 0;
__pyx_L0:;
return __pyx_r;
}
static __Pyx_InternTabEntry __pyx_intern_tab[] = {
{&__pyx_n_hello, "hello"},
{0, 0}
};
static struct PyMethodDef __pyx_methods[] = {
{"hello", (PyCFunction)__pyx_f_5hello_hello, METH_VARARGS|METH_KEYWORDS, 0},
{0, 0, 0, 0}
};
DL_EXPORT(void) inithello(void); /*proto*/
DL_EXPORT(void) inithello(void) {
__pyx_m = Py_InitModule4("hello", __pyx_methods, 0, 0, PYTHON_API_VERSION);
if (!__pyx_m) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; goto __pyx_L1;};
__pyx_b = PyImport_AddModule("__builtin__");
if (!__pyx_b) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; goto __pyx_L1;};
if (PyObject_SetAttrString(__pyx_m, "__builtins__", __pyx_b) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; goto __pyx_L1;};
if (__Pyx_InternStrings(__pyx_intern_tab) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; goto __pyx_L1;};
/* "C:\cygwin\home\pje\setuptools\tests\shlib_test\hello.pyx":3 */
return;
__pyx_L1:;
__Pyx_AddTraceback("hello");
}
static char *__pyx_filenames[] = {
"hello.pyx",
};
statichere char **__pyx_f = __pyx_filenames;
/* Runtime support code */
static int __Pyx_InternStrings(__Pyx_InternTabEntry *t) {
while (t->p) {
*t->p = PyString_InternFromString(t->s);
if (!*t->p)
return -1;
++t;
}
return 0;
}
#include "compile.h"
#include "frameobject.h"
#include "traceback.h"
static void __Pyx_AddTraceback(char *funcname) {
PyObject *py_srcfile = 0;
PyObject *py_funcname = 0;
PyObject *py_globals = 0;
PyObject *empty_tuple = 0;
PyObject *empty_string = 0;
PyCodeObject *py_code = 0;
PyFrameObject *py_frame = 0;
py_srcfile = PyString_FromString(__pyx_filename);
if (!py_srcfile) goto bad;
py_funcname = PyString_FromString(funcname);
if (!py_funcname) goto bad;
py_globals = PyModule_GetDict(__pyx_m);
if (!py_globals) goto bad;
empty_tuple = PyTuple_New(0);
if (!empty_tuple) goto bad;
empty_string = PyString_FromString("");
if (!empty_string) goto bad;
py_code = PyCode_New(
0, /*int argcount,*/
0, /*int nlocals,*/
0, /*int stacksize,*/
0, /*int flags,*/
empty_string, /*PyObject *code,*/
empty_tuple, /*PyObject *consts,*/
empty_tuple, /*PyObject *names,*/
empty_tuple, /*PyObject *varnames,*/
empty_tuple, /*PyObject *freevars,*/
empty_tuple, /*PyObject *cellvars,*/
py_srcfile, /*PyObject *filename,*/
py_funcname, /*PyObject *name,*/
__pyx_lineno, /*int firstlineno,*/
empty_string /*PyObject *lnotab*/
);
if (!py_code) goto bad;
py_frame = PyFrame_New(
PyThreadState_Get(), /*PyThreadState *tstate,*/
py_code, /*PyCodeObject *code,*/
py_globals, /*PyObject *globals,*/
0 /*PyObject *locals*/
);
if (!py_frame) goto bad;
py_frame->f_lineno = __pyx_lineno;
PyTraceBack_Here(py_frame);
bad:
Py_XDECREF(py_srcfile);
Py_XDECREF(py_funcname);
Py_XDECREF(empty_tuple);
Py_XDECREF(empty_string);
Py_XDECREF(py_code);
Py_XDECREF(py_frame);
}
cdef extern char *get_hello_msg()
def hello():
return get_hello_msg()
extern char* get_hello_msg() {
return "Hello, world!";
}
from setuptools import setup, Extension, Library
setup(
name="shlib_test",
ext_modules = [
Library("hellolib", ["hellolib.c"]),
Extension("hello", ["hello.pyx"], libraries=["hellolib"])
],
test_suite="test_hello.HelloWorldTest",
)
from unittest import TestCase
class HelloWorldTest(TestCase):
def testHelloMsg(self):
from hello import hello
self.assertEqual(hello(), "Hello, world!")
...@@ -4,6 +4,7 @@ import os ...@@ -4,6 +4,7 @@ import os
import zipfile import zipfile
import datetime import datetime
import time import time
import subprocess
import pkg_resources import pkg_resources
...@@ -89,3 +90,22 @@ class TestResourceManager(object): ...@@ -89,3 +90,22 @@ class TestResourceManager(object):
type_ = str(type(path)) type_ = str(type(path))
message = "Unexpected type from get_cache_path: " + type_ message = "Unexpected type from get_cache_path: " + type_
assert isinstance(path, (unicode, str)), message assert isinstance(path, (unicode, str)), message
class TestIndependence:
"""
Tests to ensure that pkg_resources runs independently from setuptools.
"""
def test_setuptools_not_imported(self):
"""
In a separate Python environment, import pkg_resources and assert
that action doesn't cause setuptools to be imported.
"""
lines = (
'import pkg_resources',
'import sys',
'assert "setuptools" not in sys.modules, '
'"setuptools was imported"',
)
cmd = [sys.executable, '-c', '; '.join(lines)]
subprocess.check_call(cmd)
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment