Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
S
setuptools
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Analytics
Analytics
CI / CD
Repository
Value Stream
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
Jérome Perrin
setuptools
Commits
470dd358
Commit
470dd358
authored
Jan 16, 2021
by
Jason R. Coombs
Committed by
GitHub
Jan 16, 2021
Browse files
Options
Browse Files
Download
Plain Diff
Merge pull request #2518 from webknjaz/maintenance/flake8-mccabe-complexity
Enable McCabe complexity check in flake8
parents
7e7b6b99
6f74941c
Changes
12
Hide whitespace changes
Inline
Side-by-side
Showing
12 changed files
with
346 additions
and
287 deletions
+346
-287
.flake8
.flake8
+3
-0
pkg_resources/__init__.py
pkg_resources/__init__.py
+6
-3
setuptools/archive_util.py
setuptools/archive_util.py
+62
-32
setuptools/command/bdist_egg.py
setuptools/command/bdist_egg.py
+1
-1
setuptools/command/easy_install.py
setuptools/command/easy_install.py
+100
-74
setuptools/command/egg_info.py
setuptools/command/egg_info.py
+67
-62
setuptools/dist.py
setuptools/dist.py
+31
-27
setuptools/glob.py
setuptools/glob.py
+5
-12
setuptools/installer.py
setuptools/installer.py
+9
-16
setuptools/msvc.py
setuptools/msvc.py
+20
-24
setuptools/package_index.py
setuptools/package_index.py
+41
-35
setuptools/ssl_support.py
setuptools/ssl_support.py
+1
-1
No files found.
.flake8
View file @
470dd358
...
...
@@ -12,3 +12,6 @@ ignore =
E203
setuptools/site-patch.py F821
setuptools/py*compat.py F811
# Let's not overcomplicate the code:
max-complexity = 10
pkg_resources/__init__.py
View file @
470dd358
...
...
@@ -697,7 +697,8 @@ class WorkingSet:
keys2.append(dist.key)
self._added_new(dist)
def resolve(self, requirements, env=None, installer=None,
# FIXME: 'WorkingSet.resolve' is too complex (11)
def resolve(self, requirements, env=None, installer=None, # noqa: C901
replace_conflicting=False, extras=None):
"""List all distributions needed to (recursively) meet `requirements`
...
...
@@ -1746,7 +1747,8 @@ class ZipProvider(EggProvider):
timestamp = time.mktime(date_time)
return timestamp, size
def _extract_resource(self, manager, zip_path):
# FIXME: 'ZipProvider._extract_resource' is too complex (12)
def _extract_resource(self, manager, zip_path): # noqa: C901
if zip_path in self._index():
for name in self._index()[zip_path]:
...
...
@@ -2859,7 +2861,8 @@ class Distribution:
"""Return the EntryPoint object for `group`+`name`, or ``None``"""
return
self
.
get_entry_map
(
group
).
get
(
name
)
def
insert_on
(
self
,
path
,
loc
=
None
,
replace
=
False
):
# FIXME: 'Distribution.insert_on' is too complex (13)
def
insert_on
(
self
,
path
,
loc
=
None
,
replace
=
False
):
# noqa: C901
"""Ensure self.location is on path
If replace=False (default):
...
...
setuptools/archive_util.py
View file @
470dd358
...
...
@@ -125,6 +125,56 @@ def unpack_zipfile(filename, extract_dir, progress_filter=default_filter):
os
.
chmod
(
target
,
unix_attributes
)
def
_resolve_tar_file_or_dir
(
tar_obj
,
tar_member_obj
):
"""Resolve any links and extract link targets as normal files."""
while
tar_member_obj
is
not
None
and
(
tar_member_obj
.
islnk
()
or
tar_member_obj
.
issym
()):
linkpath
=
tar_member_obj
.
linkname
if
tar_member_obj
.
issym
():
base
=
posixpath
.
dirname
(
tar_member_obj
.
name
)
linkpath
=
posixpath
.
join
(
base
,
linkpath
)
linkpath
=
posixpath
.
normpath
(
linkpath
)
tar_member_obj
=
tar_obj
.
_getmember
(
linkpath
)
is_file_or_dir
=
(
tar_member_obj
is
not
None
and
(
tar_member_obj
.
isfile
()
or
tar_member_obj
.
isdir
())
)
if
is_file_or_dir
:
return
tar_member_obj
raise
LookupError
(
'Got unknown file type'
)
def
_iter_open_tar
(
tar_obj
,
extract_dir
,
progress_filter
):
"""Emit member-destination pairs from a tar archive."""
# don't do any chowning!
tar_obj
.
chown
=
lambda
*
args
:
None
with
contextlib
.
closing
(
tar_obj
):
for
member
in
tar_obj
:
name
=
member
.
name
# don't extract absolute paths or ones with .. in them
if
name
.
startswith
(
'/'
)
or
'..'
in
name
.
split
(
'/'
):
continue
prelim_dst
=
os
.
path
.
join
(
extract_dir
,
*
name
.
split
(
'/'
))
try
:
member
=
_resolve_tar_file_or_dir
(
tar_obj
,
member
)
except
LookupError
:
continue
final_dst
=
progress_filter
(
name
,
prelim_dst
)
if
not
final_dst
:
continue
if
final_dst
.
endswith
(
os
.
sep
):
final_dst
=
final_dst
[:
-
1
]
yield
member
,
final_dst
def
unpack_tarfile
(
filename
,
extract_dir
,
progress_filter
=
default_filter
):
"""Unpack tar/tar.gz/tar.bz2 `filename` to `extract_dir`
...
...
@@ -138,38 +188,18 @@ def unpack_tarfile(filename, extract_dir, progress_filter=default_filter):
raise
UnrecognizedFormat
(
"%s is not a compressed or uncompressed tar file"
%
(
filename
,)
)
from
e
with
contextlib
.
closing
(
tarobj
):
# don't do any chowning!
tarobj
.
chown
=
lambda
*
args
:
None
for
member
in
tarobj
:
name
=
member
.
name
# don't extract absolute paths or ones with .. in them
if
not
name
.
startswith
(
'/'
)
and
'..'
not
in
name
.
split
(
'/'
):
prelim_dst
=
os
.
path
.
join
(
extract_dir
,
*
name
.
split
(
'/'
))
# resolve any links and to extract the link targets as normal
# files
while
member
is
not
None
and
(
member
.
islnk
()
or
member
.
issym
()):
linkpath
=
member
.
linkname
if
member
.
issym
():
base
=
posixpath
.
dirname
(
member
.
name
)
linkpath
=
posixpath
.
join
(
base
,
linkpath
)
linkpath
=
posixpath
.
normpath
(
linkpath
)
member
=
tarobj
.
_getmember
(
linkpath
)
if
member
is
not
None
and
(
member
.
isfile
()
or
member
.
isdir
()):
final_dst
=
progress_filter
(
name
,
prelim_dst
)
if
final_dst
:
if
final_dst
.
endswith
(
os
.
sep
):
final_dst
=
final_dst
[:
-
1
]
try
:
# XXX Ugh
tarobj
.
_extract_member
(
member
,
final_dst
)
except
tarfile
.
ExtractError
:
# chown/chmod/mkfifo/mknode/makedev failed
pass
return
True
for
member
,
final_dst
in
_iter_open_tar
(
tarobj
,
extract_dir
,
progress_filter
,
):
try
:
# XXX Ugh
tarobj
.
_extract_member
(
member
,
final_dst
)
except
tarfile
.
ExtractError
:
# chown/chmod/mkfifo/mknode/makedev failed
pass
return
True
extraction_drivers
=
unpack_directory
,
unpack_zipfile
,
unpack_tarfile
setuptools/command/bdist_egg.py
View file @
470dd358
...
...
@@ -153,7 +153,7 @@ class bdist_egg(Command):
self
.
run_command
(
cmdname
)
return
cmd
def
run
(
self
):
def
run
(
self
):
# noqa: C901 # is too complex (14) # FIXME
# Generate metadata first
self
.
run_command
(
"egg_info"
)
# We run install_lib before install_data, because some data hacks
...
...
setuptools/command/easy_install.py
View file @
470dd358
...
...
@@ -226,7 +226,7 @@ class easy_install(Command):
print
(
tmpl
.
format
(
**
locals
()))
raise
SystemExit
()
def
finalize_options
(
self
):
def
finalize_options
(
self
):
# noqa: C901 # is too complex (25) # FIXME
self
.
version
and
self
.
_render_version
()
py_version
=
sys
.
version
.
split
()[
0
]
...
...
@@ -437,7 +437,7 @@ class easy_install(Command):
def
warn_deprecated_options
(
self
):
pass
def
check_site_dir
(
self
):
def
check_site_dir
(
self
):
# noqa: C901 # is too complex (12) # FIXME
"""Verify that self.install_dir is .pth-capable dir, if needed"""
instdir
=
normalize_path
(
self
.
install_dir
)
...
...
@@ -713,7 +713,10 @@ class easy_install(Command):
if
getattr
(
self
,
attrname
)
is
None
:
setattr
(
self
,
attrname
,
scheme
[
key
])
def
process_distribution
(
self
,
requirement
,
dist
,
deps
=
True
,
*
info
):
# FIXME: 'easy_install.process_distribution' is too complex (12)
def
process_distribution
(
# noqa: C901
self
,
requirement
,
dist
,
deps
=
True
,
*
info
,
):
self
.
update_pth
(
dist
)
self
.
package_index
.
add
(
dist
)
if
dist
in
self
.
local_index
[
dist
.
key
]:
...
...
@@ -837,12 +840,19 @@ class easy_install(Command):
def
install_eggs
(
self
,
spec
,
dist_filename
,
tmpdir
):
# .egg dirs or files are already built, so just return them
if
dist_filename
.
lower
().
endswith
(
'.egg'
):
return
[
self
.
install_egg
(
dist_filename
,
tmpdir
)]
elif
dist_filename
.
lower
().
endswith
(
'.exe'
):
return
[
self
.
install_exe
(
dist_filename
,
tmpdir
)]
elif
dist_filename
.
lower
().
endswith
(
'.whl'
):
return
[
self
.
install_wheel
(
dist_filename
,
tmpdir
)]
installer_map
=
{
'.egg'
:
self
.
install_egg
,
'.exe'
:
self
.
install_exe
,
'.whl'
:
self
.
install_wheel
,
}
try
:
install_dist
=
installer_map
[
dist_filename
.
lower
()[
-
4
:]
]
except
KeyError
:
pass
else
:
return
[
install_dist
(
dist_filename
,
tmpdir
)]
# Anything else, try to extract and build
setup_base
=
tmpdir
...
...
@@ -887,7 +897,8 @@ class easy_install(Command):
metadata
=
EggMetadata
(
zipimport
.
zipimporter
(
egg_path
))
return
Distribution
.
from_filename
(
egg_path
,
metadata
=
metadata
)
def
install_egg
(
self
,
egg_path
,
tmpdir
):
# FIXME: 'easy_install.install_egg' is too complex (11)
def
install_egg
(
self
,
egg_path
,
tmpdir
):
# noqa: C901
destination
=
os
.
path
.
join
(
self
.
install_dir
,
os
.
path
.
basename
(
egg_path
),
...
...
@@ -986,7 +997,8 @@ class easy_install(Command):
# install the .egg
return
self
.
install_egg
(
egg_path
,
tmpdir
)
def
exe_to_egg
(
self
,
dist_filename
,
egg_tmp
):
# FIXME: 'easy_install.exe_to_egg' is too complex (12)
def
exe_to_egg
(
self
,
dist_filename
,
egg_tmp
):
# noqa: C901
"""Extract a bdist_wininst to the directories an egg would use"""
# Check for .pth file and set up prefix translations
prefixes
=
get_exe_prefixes
(
dist_filename
)
...
...
@@ -1184,16 +1196,18 @@ class easy_install(Command):
cfg_filename
=
os
.
path
.
join
(
base
,
'setup.cfg'
)
setopt
.
edit_config
(
cfg_filename
,
settings
)
def
update_pth
(
self
,
dist
):
def
update_pth
(
self
,
dist
):
# noqa: C901 # is too complex (11) # FIXME
if
self
.
pth_file
is
None
:
return
for
d
in
self
.
pth_file
[
dist
.
key
]:
# drop old entries
if
self
.
multi_version
or
d
.
location
!=
dist
.
location
:
log
.
info
(
"Removing %s from easy-install.pth file"
,
d
)
self
.
pth_file
.
remove
(
d
)
if
d
.
location
in
self
.
shadow_path
:
self
.
shadow_path
.
remove
(
d
.
location
)
if
not
self
.
multi_version
and
d
.
location
==
dist
.
location
:
continue
log
.
info
(
"Removing %s from easy-install.pth file"
,
d
)
self
.
pth_file
.
remove
(
d
)
if
d
.
location
in
self
.
shadow_path
:
self
.
shadow_path
.
remove
(
d
.
location
)
if
not
self
.
multi_version
:
if
dist
.
location
in
self
.
pth_file
.
paths
:
...
...
@@ -1207,19 +1221,21 @@ class easy_install(Command):
if
dist
.
location
not
in
self
.
shadow_path
:
self
.
shadow_path
.
append
(
dist
.
location
)
if
not
self
.
dry_run
:
if
self
.
dry_run
:
return
self
.
pth_file
.
save
()
self
.
pth_file
.
save
()
if
dist
.
key
==
'setuptools'
:
# Ensure that setuptools itself never becomes unavailable!
# XXX should this check for latest version?
filename
=
os
.
path
.
join
(
self
.
install_dir
,
'setuptools.pth'
)
if
os
.
path
.
islink
(
filename
):
os
.
unlink
(
filename
)
f
=
open
(
filename
,
'wt'
)
f
.
write
(
self
.
pth_file
.
make_relative
(
dist
.
location
)
+
'
\
n
'
)
f
.
close
()
if
dist
.
key
!=
'setuptools'
:
return
# Ensure that setuptools itself never becomes unavailable!
# XXX should this check for latest version?
filename
=
os
.
path
.
join
(
self
.
install_dir
,
'setuptools.pth'
)
if
os
.
path
.
islink
(
filename
):
os
.
unlink
(
filename
)
with
open
(
filename
,
'wt'
)
as
f
:
f
.
write
(
self
.
pth_file
.
make_relative
(
dist
.
location
)
+
'
\
n
'
)
def
unpack_progress
(
self
,
src
,
dst
):
# Progress filter for unpacking
...
...
@@ -1360,58 +1376,63 @@ def get_site_dirs():
if
sys
.
exec_prefix
!=
sys
.
prefix
:
prefixes
.
append
(
sys
.
exec_prefix
)
for
prefix
in
prefixes
:
if
prefix
:
if
sys
.
platform
in
(
'os2emx'
,
'riscos'
):
sitedirs
.
append
(
os
.
path
.
join
(
prefix
,
"Lib"
,
"site-packages"
))
elif
os
.
sep
==
'/'
:
sitedirs
.
extend
([
os
.
path
.
join
(
prefix
,
"lib"
,
"python{}.{}"
.
format
(
*
sys
.
version_info
),
"site-packages"
,
),
os
.
path
.
join
(
prefix
,
"lib"
,
"site-python"
),
])
else
:
sitedirs
.
extend
([
if
not
prefix
:
continue
if
sys
.
platform
in
(
'os2emx'
,
'riscos'
):
sitedirs
.
append
(
os
.
path
.
join
(
prefix
,
"Lib"
,
"site-packages"
))
elif
os
.
sep
==
'/'
:
sitedirs
.
extend
([
os
.
path
.
join
(
prefix
,
os
.
path
.
join
(
prefix
,
"lib"
,
"site-packages"
),
])
if
sys
.
platform
==
'darwin'
:
# for framework builds *only* we add the standard Apple
# locations. Currently only per-user, but /Library and
# /Network/Library could be added too
if
'Python.framework'
in
prefix
:
home
=
os
.
environ
.
get
(
'HOME'
)
if
home
:
home_sp
=
os
.
path
.
join
(
home
,
'Library'
,
'Python'
,
'{}.{}'
.
format
(
*
sys
.
version_info
),
'site-packages'
,
)
sitedirs
.
append
(
home_sp
)
"lib"
,
"python{}.{}"
.
format
(
*
sys
.
version_info
),
"site-packages"
,
),
os
.
path
.
join
(
prefix
,
"lib"
,
"site-python"
),
])
else
:
sitedirs
.
extend
([
prefix
,
os
.
path
.
join
(
prefix
,
"lib"
,
"site-packages"
),
])
if
sys
.
platform
!=
'darwin'
:
continue
# for framework builds *only* we add the standard Apple
# locations. Currently only per-user, but /Library and
# /Network/Library could be added too
if
'Python.framework'
not
in
prefix
:
continue
home
=
os
.
environ
.
get
(
'HOME'
)
if
not
home
:
continue
home_sp
=
os
.
path
.
join
(
home
,
'Library'
,
'Python'
,
'{}.{}'
.
format
(
*
sys
.
version_info
),
'site-packages'
,
)
sitedirs
.
append
(
home_sp
)
lib_paths
=
get_path
(
'purelib'
),
get_path
(
'platlib'
)
for
site_lib
in
lib_paths
:
if
site_lib
not
in
sitedirs
:
sitedirs
.
append
(
site_lib
)
sitedirs
.
extend
(
s
for
s
in
lib_paths
if
s
not
in
sitedirs
)
if
site
.
ENABLE_USER_SITE
:
sitedirs
.
append
(
site
.
USER_SITE
)
try
:
with
contextlib
.
suppress
(
AttributeError
)
:
sitedirs
.
extend
(
site
.
getsitepackages
())
except
AttributeError
:
pass
sitedirs
=
list
(
map
(
normalize_path
,
sitedirs
))
return
sitedirs
def
expand_paths
(
inputs
):
def
expand_paths
(
inputs
):
# noqa: C901 # is too complex (11) # FIXME
"""Yield sys.path directories that might contain "old-style" packages"""
seen
=
{}
...
...
@@ -1443,13 +1464,18 @@ def expand_paths(inputs):
# Yield existing non-dupe, non-import directory lines from it
for
line
in
lines
:
if
not
line
.
startswith
(
"import"
):
line
=
normalize_path
(
line
.
rstrip
())
if
line
not
in
seen
:
seen
[
line
]
=
1
if
not
os
.
path
.
isdir
(
line
):
continue
yield
line
,
os
.
listdir
(
line
)
if
line
.
startswith
(
"import"
):
continue
line
=
normalize_path
(
line
.
rstrip
())
if
line
in
seen
:
continue
seen
[
line
]
=
1
if
not
os
.
path
.
isdir
(
line
):
continue
yield
line
,
os
.
listdir
(
line
)
def
extract_wininst_cfg
(
dist_filename
):
...
...
setuptools/command/egg_info.py
View file @
470dd358
...
...
@@ -8,6 +8,7 @@ from distutils.util import convert_path
from
distutils
import
log
import
distutils.errors
import
distutils.filelist
import
functools
import
os
import
re
import
sys
...
...
@@ -31,7 +32,7 @@ from setuptools.extern import packaging
from
setuptools
import
SetuptoolsDeprecationWarning
def
translate_pattern
(
glob
):
def
translate_pattern
(
glob
):
# noqa: C901 # is too complex (14) # FIXME
"""
Translate a file path glob like '*.txt' in to a regular expression.
This differs from fnmatch.translate which allows wildcards to match
...
...
@@ -334,70 +335,74 @@ class FileList(_FileList):
# patterns, (dir and patterns), or (dir_pattern).
(
action
,
patterns
,
dir
,
dir_pattern
)
=
self
.
_parse_template_line
(
line
)
action_map
=
{
'include'
:
self
.
include
,
'exclude'
:
self
.
exclude
,
'global-include'
:
self
.
global_include
,
'global-exclude'
:
self
.
global_exclude
,
'recursive-include'
:
functools
.
partial
(
self
.
recursive_include
,
dir
,
),
'recursive-exclude'
:
functools
.
partial
(
self
.
recursive_exclude
,
dir
,
),
'graft'
:
self
.
graft
,
'prune'
:
self
.
prune
,
}
log_map
=
{
'include'
:
"warning: no files found matching '%s'"
,
'exclude'
:
(
"warning: no previously-included files found "
"matching '%s'"
),
'global-include'
:
(
"warning: no files found matching '%s' "
"anywhere in distribution"
),
'global-exclude'
:
(
"warning: no previously-included files matching "
"'%s' found anywhere in distribution"
),
'recursive-include'
:
(
"warning: no files found matching '%s' "
"under directory '%s'"
),
'recursive-exclude'
:
(
"warning: no previously-included files matching "
"'%s' found under directory '%s'"
),
'graft'
:
"warning: no directories found matching '%s'"
,
'prune'
:
"no previously-included directories found matching '%s'"
,
}
try
:
process_action
=
action_map
[
action
]
except
KeyError
:
raise
DistutilsInternalError
(
"this cannot happen: invalid action '{action!s}'"
.
format
(
action
=
action
),
)
# OK, now we know that the action is valid and we have the
# right number of words on the line for that action -- so we
# can proceed with minimal error-checking.
if
action
==
'include'
:
self
.
debug_print
(
"include "
+
' '
.
join
(
patterns
))
for
pattern
in
patterns
:
if
not
self
.
include
(
pattern
):
log
.
warn
(
"warning: no files found matching '%s'"
,
pattern
)
elif
action
==
'exclude'
:
self
.
debug_print
(
"exclude "
+
' '
.
join
(
patterns
))
for
pattern
in
patterns
:
if
not
self
.
exclude
(
pattern
):
log
.
warn
((
"warning: no previously-included files "
"found matching '%s'"
),
pattern
)
elif
action
==
'global-include'
:
self
.
debug_print
(
"global-include "
+
' '
.
join
(
patterns
))
for
pattern
in
patterns
:
if
not
self
.
global_include
(
pattern
):
log
.
warn
((
"warning: no files found matching '%s' "
"anywhere in distribution"
),
pattern
)
elif
action
==
'global-exclude'
:
self
.
debug_print
(
"global-exclude "
+
' '
.
join
(
patterns
))
for
pattern
in
patterns
:
if
not
self
.
global_exclude
(
pattern
):
log
.
warn
((
"warning: no previously-included files matching "
"'%s' found anywhere in distribution"
),
pattern
)
elif
action
==
'recursive-include'
:
self
.
debug_print
(
"recursive-include %s %s"
%
(
dir
,
' '
.
join
(
patterns
)))
for
pattern
in
patterns
:
if
not
self
.
recursive_include
(
dir
,
pattern
):
log
.
warn
((
"warning: no files found matching '%s' "
"under directory '%s'"
),
pattern
,
dir
)
elif
action
==
'recursive-exclude'
:
self
.
debug_print
(
"recursive-exclude %s %s"
%
(
dir
,
' '
.
join
(
patterns
)))
for
pattern
in
patterns
:
if
not
self
.
recursive_exclude
(
dir
,
pattern
):
log
.
warn
((
"warning: no previously-included files matching "
"'%s' found under directory '%s'"
),
pattern
,
dir
)
elif
action
==
'graft'
:
self
.
debug_print
(
"graft "
+
dir_pattern
)
if
not
self
.
graft
(
dir_pattern
):
log
.
warn
(
"warning: no directories found matching '%s'"
,
dir_pattern
)
elif
action
==
'prune'
:
self
.
debug_print
(
"prune "
+
dir_pattern
)
if
not
self
.
prune
(
dir_pattern
):
log
.
warn
((
"no previously-included directories found "
"matching '%s'"
),
dir_pattern
)
else
:
raise
DistutilsInternalError
(
"this cannot happen: invalid action '%s'"
%
action
)
action_is_recursive
=
action
.
startswith
(
'recursive-'
)
if
action
in
{
'graft'
,
'prune'
}:
patterns
=
[
dir_pattern
]
extra_log_args
=
(
dir
,
)
if
action_is_recursive
else
()
log_tmpl
=
log_map
[
action
]
self
.
debug_print
(
' '
.
join
(
[
action
]
+
([
dir
]
if
action_is_recursive
else
[])
+
patterns
,
)
)
for
pattern
in
patterns
:
if
not
process_action
(
pattern
):
log
.
warn
(
log_tmpl
,
pattern
,
*
extra_log_args
)
def
_remove_files
(
self
,
predicate
):
"""
...
...
setuptools/dist.py
View file @
470dd358
...
...
@@ -119,7 +119,7 @@ def read_pkg_file(self, file):
# Based on Python 3.5 version
def
write_pkg_file
(
self
,
file
):
def
write_pkg_file
(
self
,
file
):
# noqa: C901 # is too complex (14) # FIXME
"""Write the PKG-INFO format data to a file object.
"""
version
=
self
.
get_metadata_version
()
...
...
@@ -548,7 +548,8 @@ class Distribution(_Distribution):
req
.
marker
=
None
return
req
def
_parse_config_files
(
self
,
filenames
=
None
):
# FIXME: 'Distribution._parse_config_files' is too complex (14)
def
_parse_config_files
(
self
,
filenames
=
None
):
# noqa: C901
"""
Adapted from distutils.dist.Distribution.parse_config_files,
this method provides the same functionality in subtly-improved
...
...
@@ -557,14 +558,12 @@ class Distribution(_Distribution):
from
configparser
import
ConfigParser
# Ignore install directory options if we have a venv
if
sys
.
prefix
!=
sys
.
base_prefix
:
ignore_options
=
[
'install-base'
,
'install-platbase'
,
'install-lib'
,
'install-platlib'
,
'install-purelib'
,
'install-headers'
,
'install-scripts'
,
'install-data'
,
'prefix'
,
'exec-prefix'
,
'home'
,
'user'
,
'root'
]
else
:
ignore_options
=
[]
ignore_options
=
[]
if
sys
.
prefix
==
sys
.
base_prefix
else
[
'install-base'
,
'install-platbase'
,
'install-lib'
,
'install-platlib'
,
'install-purelib'
,
'install-headers'
,
'install-scripts'
,
'install-data'
,
'prefix'
,
'exec-prefix'
,
'home'
,
'user'
,
'root'
,
]
ignore_options
=
frozenset
(
ignore_options
)
...
...
@@ -585,32 +584,37 @@ class Distribution(_Distribution):
opt_dict
=
self
.
get_option_dict
(
section
)
for
opt
in
options
:
if
opt
!=
'__name__'
and
opt
not
in
ignore_options
:
val
=
parser
.
get
(
section
,
opt
)
opt
=
opt
.
replace
(
'-'
,
'_'
)
opt_dict
[
opt
]
=
(
filename
,
val
)
if
opt
==
'__name__'
or
opt
in
ignore_options
:
continue
val
=
parser
.
get
(
section
,
opt
)
opt
=
opt
.
replace
(
'-'
,
'_'
)
opt_dict
[
opt
]
=
(
filename
,
val
)
# Make the ConfigParser forget everything (so we retain
# the original filenames that options come from)
parser
.
__init__
()
if
'global'
not
in
self
.
command_options
:
return
# If there was a "global" section in the config file, use it
# to set Distribution options.
if
'global'
in
self
.
command_options
:
for
(
opt
,
(
src
,
val
))
in
self
.
command_options
[
'global'
].
items
():
alias
=
self
.
negative_opt
.
get
(
opt
)
try
:
if
alias
:
setattr
(
self
,
alias
,
not
strtobool
(
val
))
elif
opt
in
(
'verbose'
,
'dry_run'
):
# ugh!
setattr
(
self
,
opt
,
strtobool
(
val
))
else
:
setattr
(
self
,
opt
,
val
)
except
ValueError
as
e
:
raise
DistutilsOptionError
(
e
)
from
e
for
(
opt
,
(
src
,
val
))
in
self
.
command_options
[
'global'
].
items
():
alias
=
self
.
negative_opt
.
get
(
opt
)
if
alias
:
val
=
not
strtobool
(
val
)
elif
opt
in
(
'verbose'
,
'dry_run'
):
# ugh!
val
=
strtobool
(
val
)
try
:
setattr
(
self
,
alias
or
opt
,
val
)
except
ValueError
as
e
:
raise
DistutilsOptionError
(
e
)
from
e
def
_set_command_options
(
self
,
command_obj
,
option_dict
=
None
):
# FIXME: 'Distribution._set_command_options' is too complex (14)
def
_set_command_options
(
self
,
command_obj
,
option_dict
=
None
):
# noqa: C901
"""
Set the options for 'command_obj' from 'option_dict'. Basically
this means copying elements of a dictionary ('option_dict') to
...
...
setuptools/glob.py
View file @
470dd358
...
...
@@ -47,6 +47,8 @@ def iglob(pathname, recursive=False):
def
_iglob
(
pathname
,
recursive
):
dirname
,
basename
=
os
.
path
.
split
(
pathname
)
glob_in_dir
=
glob2
if
recursive
and
_isrecursive
(
basename
)
else
glob1
if
not
has_magic
(
pathname
):
if
basename
:
if
os
.
path
.
lexists
(
pathname
):
...
...
@@ -56,13 +58,9 @@ def _iglob(pathname, recursive):
if
os
.
path
.
isdir
(
dirname
):
yield
pathname
return
if
not
dirname
:
if
recursive
and
_isrecursive
(
basename
):
for
x
in
glob2
(
dirname
,
basename
):
yield
x
else
:
for
x
in
glob1
(
dirname
,
basename
):
yield
x
yield
from
glob_in_dir
(
dirname
,
basename
)
return
# `os.path.split()` returns the argument itself as a dirname if it is a
# drive or UNC path. Prevent an infinite recursion if a drive or UNC path
...
...
@@ -71,12 +69,7 @@ def _iglob(pathname, recursive):
dirs
=
_iglob
(
dirname
,
recursive
)
else
:
dirs
=
[
dirname
]
if
has_magic
(
basename
):
if
recursive
and
_isrecursive
(
basename
):
glob_in_dir
=
glob2
else
:
glob_in_dir
=
glob1
else
:
if
not
has_magic
(
basename
):
glob_in_dir
=
glob0
for
dirname
in
dirs
:
for
name
in
glob_in_dir
(
dirname
,
basename
):
...
...
setuptools/installer.py
View file @
470dd358
...
...
@@ -51,7 +51,7 @@ def _legacy_fetch_build_egg(dist, req):
return
cmd
.
easy_install
(
req
)
def
fetch_build_egg
(
dist
,
req
):
def
fetch_build_egg
(
dist
,
req
):
# noqa: C901 # is too complex (16) # FIXME
"""Fetch an egg needed for building.
Use pip/wheel to fetch/build a wheel."""
...
...
@@ -80,20 +80,17 @@ def fetch_build_egg(dist, req):
if
'allow_hosts'
in
opts
:
raise
DistutilsError
(
'the `allow-hosts` option is not supported '
'when using pip to install requirements.'
)
if
'PIP_QUIET'
in
os
.
environ
or
'PIP_VERBOSE'
in
os
.
environ
:
quiet
=
False
else
:
quiet
=
True
quiet
=
'PIP_QUIET'
not
in
os
.
environ
and
'PIP_VERBOSE'
not
in
os
.
environ
if
'PIP_INDEX_URL'
in
os
.
environ
:
index_url
=
None
elif
'index_url'
in
opts
:
index_url
=
opts
[
'index_url'
][
1
]
else
:
index_url
=
None
if
'find_links'
in
opts
:
find_links
=
_fixup_find_links
(
opts
[
'find_links'
][
1
])[:]
else
:
find_links
=
[]
find_links
=
(
_fixup_find_links
(
opts
[
'find_links'
][
1
])[:]
if
'find_links'
in
opts
else
[]
)
if
dist
.
dependency_links
:
find_links
.
extend
(
dist
.
dependency_links
)
eggs_dir
=
os
.
path
.
realpath
(
dist
.
get_egg_cache_dir
())
...
...
@@ -112,16 +109,12 @@ def fetch_build_egg(dist, req):
cmd
.
append
(
'--quiet'
)
if
index_url
is
not
None
:
cmd
.
extend
((
'--index-url'
,
index_url
))
if
find_links
is
not
None
:
for
link
in
find_links
:
cmd
.
extend
((
'--find-links'
,
link
))
for
link
in
find_links
or
[]:
cmd
.
extend
((
'--find-links'
,
link
))
# If requirement is a PEP 508 direct URL, directly pass
# the URL to pip, as `req @ url` does not work on the
# command line.
if
req
.
url
:
cmd
.
append
(
req
.
url
)
else
:
cmd
.
append
(
str
(
req
))
cmd
.
append
(
req
.
url
or
str
(
req
))
try
:
subprocess
.
check_call
(
cmd
)
except
subprocess
.
CalledProcessError
as
e
:
...
...
setuptools/msvc.py
View file @
470dd358
...
...
@@ -24,6 +24,7 @@ from io import open
from
os
import
listdir
,
pathsep
from
os.path
import
join
,
isfile
,
isdir
,
dirname
import
sys
import
contextlib
import
platform
import
itertools
import
subprocess
...
...
@@ -724,28 +725,23 @@ class SystemInfo:
ms = self.ri.microsoft
vckeys = (self.ri.vc, self.ri.vc_for_python, self.ri.vs)
vs_vers = []
for hkey in self.ri.HKEYS:
for key in vckeys:
try:
bkey = winreg.OpenKey(hkey, ms(key), 0, winreg.KEY_READ)
except (OSError, IOError):
continue
with bkey:
subkeys, values, _ = winreg.QueryInfoKey(bkey)
for i in range(values):
try:
ver = float(winreg.EnumValue(bkey, i)[0])
if ver not in vs_vers:
vs_vers.append(ver)
except ValueError:
pass
for i in range(subkeys):
try:
ver = float(winreg.EnumKey(bkey, i))
if ver not in vs_vers:
vs_vers.append(ver)
except ValueError:
pass
for hkey, key in itertools.product(self.ri.HKEYS, vckeys):
try:
bkey = winreg.OpenKey(hkey, ms(key), 0, winreg.KEY_READ)
except (OSError, IOError):
continue
with bkey:
subkeys, values, _ = winreg.QueryInfoKey(bkey)
for i in range(values):
with contextlib.suppress(ValueError):
ver = float(winreg.EnumValue(bkey, i)[0])
if ver not in vs_vers:
vs_vers.append(ver)
for i in range(subkeys):
with contextlib.suppress(ValueError):
ver = float(winreg.EnumKey(bkey, i))
if ver not in vs_vers:
vs_vers.append(ver)
return sorted(vs_vers)
def find_programdata_vs_vers(self):
...
...
@@ -925,8 +921,8 @@ class SystemInfo:
"""
return self._use_last_dir_name(join(self.WindowsSdkDir, '
lib
'))
@property
def WindowsSdkDir(self):
@property
# noqa: C901
def WindowsSdkDir(self):
# noqa: C901 # is too complex (12) # FIXME
"""
Microsoft Windows SDK directory.
...
...
setuptools/package_index.py
View file @
470dd358
...
...
@@ -320,7 +320,8 @@ class PackageIndex(Environment):
else:
self.opener = urllib.request.urlopen
def process_url(self, url, retrieve=False):
# FIXME: 'PackageIndex.process_url' is too complex (14)
def process_url(self, url, retrieve=False): # noqa: C901
"""
Evaluate
a
URL
as
a
possible
download
,
and
maybe
retrieve
it
"""
if url in self.scanned_urls and not retrieve:
return
...
...
@@ -428,49 +429,53 @@ class PackageIndex(Environment):
dist.precedence = SOURCE_DIST
self.add(dist)
def _scan(self, link):
# Process a URL to see if it's for a package page
NO_MATCH_SENTINEL = None, None
if not link.startswith(self.index_url):
return NO_MATCH_SENTINEL
parts = list(map(
urllib.parse.unquote, link[len(self.index_url):].split('/')
))
if len(parts) != 2 or '#' in parts[1]:
return NO_MATCH_SENTINEL
# it's a package page, sanitize and index it
pkg = safe_name(parts[0])
ver = safe_version(parts[1])
self.package_pages.setdefault(pkg.lower(), {})[link] = True
return to_filename(pkg), to_filename(ver)
def process_index(self, url, page):
"""
Process
the
contents
of
a
PyPI
page
"""
def scan(link):
# Process a URL to see if it's for a package page
if link.startswith(self.index_url):
parts = list(map(
urllib.parse.unquote, link[len(self.index_url):].split('/')
))
if len(parts) == 2 and '#' not in parts[1]:
# it's a package page, sanitize and index it
pkg = safe_name(parts[0])
ver = safe_version(parts[1])
self.package_pages.setdefault(pkg.lower(), {})[link] = True
return to_filename(pkg), to_filename(ver)
return None, None
# process an index page into the package-page index
for match in HREF.finditer(page):
try:
scan(urllib.parse.urljoin(url, htmldecode(match.group(1))))
s
elf._s
can(urllib.parse.urljoin(url, htmldecode(match.group(1))))
except ValueError:
pass
pkg, ver = scan(url) # ensure this page is in the page index
if pkg:
# process individual package page
for new_url in find_external_links(url, page):
# Process the found URL
base, frag = egg_info_for_url(new_url)
if base.endswith('.py') and not frag:
if ver:
new_url += '#egg=%s-%s' % (pkg, ver)
else:
self.need_version_info(url)
self.scan_url(new_url)
return PYPI_MD5.sub(
lambda m: '<a href="%s#md5=%s">%s</a>' % m.group(1, 3, 2), page
)
else:
pkg, ver = self._scan(url) # ensure this page is in the page index
if not pkg:
return "" # no sense double-scanning non-package pages
# process individual package page
for new_url in find_external_links(url, page):
# Process the found URL
base, frag = egg_info_for_url(new_url)
if base.endswith('.py') and not frag:
if ver:
new_url += '#egg=%s-%s' % (pkg, ver)
else:
self.need_version_info(url)
self.scan_url(new_url)
return PYPI_MD5.sub(
lambda m: '<a href="%s#md5=%s">%s</a>' % m.group(1, 3, 2), page
)
def need_version_info(self, url):
self.scan_all(
"Page at %s links to .py file(s) without version info; an index "
...
...
@@ -591,7 +596,7 @@ class PackageIndex(Environment):
spec = parse_requirement_arg(spec)
return getattr(self.fetch_distribution(spec, tmpdir), 'location', None)
def fetch_distribution(
def fetch_distribution(
# noqa: C901 # is too complex (14) # FIXME
self, requirement, tmpdir, force_scan=False, source=False,
develop_ok=False, local_index=None):
"""
Obtain
a
distribution
suitable
for
fulfilling
`requirement`
...
...
@@ -762,7 +767,8 @@ class PackageIndex(Environment):
def reporthook(self, url, filename, blocknum, blksize, size):
pass # no-op
def open_url(self, url, warning=None):
# FIXME:
def open_url(self, url, warning=None): # noqa: C901 # is too complex (12)
if url.startswith('file:'):
return local_open(url)
try:
...
...
setuptools/ssl_support.py
View file @
470dd358
...
...
@@ -56,7 +56,7 @@ if not CertificateError:
pass
if
not
match_hostname
:
if
not
match_hostname
:
# noqa: C901 # 'If 59' is too complex (21) # FIXME
def
_dnsname_match
(
dn
,
hostname
,
max_wildcards
=
1
):
"""Matching according to RFC 6125, section 6.4.3
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment