diff --git a/pip/__init__.py b/pip/__init__.py old mode 100755 new mode 100644 index 804dfa1c43f..99fa932637e --- a/pip/__init__.py +++ b/pip/__init__.py @@ -29,8 +29,8 @@ except ImportError: pass else: - if (sys.platform == "darwin" and - ssl.OPENSSL_VERSION_NUMBER < 0x1000100f): # OpenSSL 1.0.1 + # OpenSSL 1.0.1 on MacOS + if sys.platform == "darwin" and ssl.OPENSSL_VERSION_NUMBER < 0x1000100f: try: from pip._vendor.requests.packages.urllib3.contrib import ( securetransport, @@ -51,7 +51,6 @@ InsecureRequestWarning, ) - # assignment for flake8 to be happy # This fixes a peculiarity when importing via __import__ - as we are @@ -157,7 +156,8 @@ def create_main_parser(): pip_pkg_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) parser.version = 'pip %s from %s (python %s)' % ( - __version__, pip_pkg_dir, sys.version[:3]) + __version__, pip_pkg_dir, sys.version[:3] + ) # add the general options gen_opts = cmdoptions.make_option_group(cmdoptions.general_group, parser) diff --git a/pip/basecommand.py b/pip/basecommand.py index c2e3c34cad3..3692e7caac4 100644 --- a/pip/basecommand.py +++ b/pip/basecommand.py @@ -191,8 +191,8 @@ def main(self, args): deprecation.RemovedInPip11Warning, ) - # TODO: try to get these passing down from the command? - # without resorting to os.environ to hold these. + # TODO: Try to get these passing down from the command without + # resorting to os.environ to hold these? if options.no_input: os.environ['PIP_NO_INPUT'] = '1' @@ -212,8 +212,8 @@ def main(self, args): try: status = self.run(options, args) - # FIXME: all commands should return an exit status - # and when it is done, isinstance is not needed anymore + # FIXME: All commands should return an exit status + # and when it is done, isinstance is not needed anymore if isinstance(status, int): return status except PreviousBuildDirError as exc: @@ -242,13 +242,14 @@ def main(self, args): return UNKNOWN_ERROR finally: # Check if we're using the latest version of pip available - if (not options.disable_pip_version_check and not - getattr(options, "no_index", False)): - with self._build_session( - options, - retries=0, - timeout=min(5, options.timeout)) as session: + _no_index = getattr(options, "no_index", False) + if not options.disable_pip_version_check and not _no_index: + session = self._build_session( + options, retries=0, timeout=min(5, options.timeout) + ) + with session: pip_version_check(session, options) + # Avoid leaking loggers for handler in set(logging.root.handlers) - original_root_handlers: # this method benefit from the Logger class internal lock @@ -269,10 +270,12 @@ def populate_requirement_set(requirement_set, args, options, finder, # requirement_set.require_hashes may be updated for filename in options.constraints: - for req in parse_requirements( - filename, - constraint=True, finder=finder, options=options, - session=session, wheel_cache=wheel_cache): + parsed_constraints = parse_requirements( + filename, + constraint=True, finder=finder, options=options, + session=session, wheel_cache=wheel_cache + ) + for req in parsed_constraints: requirement_set.add_requirement(req) for req in args: @@ -293,10 +296,11 @@ def populate_requirement_set(requirement_set, args, options, finder, ) for filename in options.requirements: - for req in parse_requirements( - filename, - finder=finder, options=options, session=session, - wheel_cache=wheel_cache): + parsed_reqs = parse_requirements( + filename, finder=finder, options=options, session=session, + wheel_cache=wheel_cache + ) + for req in parsed_reqs: requirement_set.add_requirement(req) # If --require-hashes was a line in a requirements file, tell # RequirementSet about it: diff --git a/pip/baseparser.py b/pip/baseparser.py index ea76a21afeb..93864b60fbb 100644 --- a/pip/baseparser.py +++ b/pip/baseparser.py @@ -203,6 +203,7 @@ def _update_defaults(self, defaults): late_eval.add(option.dest) opt_str = option.get_opt_string() val = option.convert_value(opt_str, val) + # From take_action args = option.callback_args or () kwargs = option.callback_kwargs or {} @@ -214,6 +215,7 @@ def _update_defaults(self, defaults): for key in late_eval: defaults[key] = getattr(self.values, key) + self.values = None return defaults @@ -230,6 +232,7 @@ def get_default_values(self): if isinstance(default, string_types): opt_str = option.get_opt_string() defaults[option.dest] = option.check_value(opt_str, default) + return optparse.Values(defaults) def error(self, msg): diff --git a/pip/cache.py b/pip/cache.py index 271c7c1df33..9f1815044a9 100644 --- a/pip/cache.py +++ b/pip/cache.py @@ -19,12 +19,11 @@ class Cache(object): """An abstract class - provides cache directories for data from links - - :param cache_dir: The root of the cache. - :param format_control: A pip.index.FormatControl object to limit - binaries being read from the cache. - :param allowed_formats: which formats of files the cache should store. - ('binary' and 'source' are the only allowed values) + :param cache_dir: The root of the cache. + :param format_control: A pip.index.FormatControl object to limit + binaries being read from the cache. + :param allowed_formats: Which formats of files the cache should store. + ('binary' and 'source' are the only allowed values) """ def __init__(self, cache_dir, format_control, allowed_formats): @@ -70,6 +69,7 @@ def _get_candidates(self, link, package_name): if can_not_cache: return [] + # Check if the format of this cache is allowed to be used canonical_name = canonicalize_name(package_name) formats = pip.index.fmt_ctl_formats( self.format_control, canonical_name diff --git a/pip/cmdoptions.py b/pip/cmdoptions.py index 3f9db5ba5ad..a276234f9a3 100644 --- a/pip/cmdoptions.py +++ b/pip/cmdoptions.py @@ -52,7 +52,8 @@ def getname(n): fmt_ctl_no_binary(control) warnings.warn( 'Disabling all use of wheels due to the use of --build-options ' - '/ --global-options / --install-options.', stacklevel=2) + '/ --global-options / --install-options.', stacklevel=2 + ) ########### @@ -64,7 +65,8 @@ def getname(n): '-h', '--help', dest='help', action='help', - help='Show help.') + help='Show help.' +) isolated_mode = partial( Option, @@ -72,10 +74,8 @@ def getname(n): dest="isolated_mode", action="store_true", default=False, - help=( - "Run pip in an isolated mode, ignoring environment variables and user " - "configuration." - ), + help='Run pip in an isolated mode, ignoring environment variables and ' + 'user configuration.' ) require_virtualenv = partial( @@ -85,7 +85,8 @@ def getname(n): dest='require_venv', action='store_true', default=False, - help=SUPPRESS_HELP) + help=SUPPRESS_HELP +) verbose = partial( Option, @@ -101,7 +102,8 @@ def getname(n): '-V', '--version', dest='version', action='store_true', - help='Show version and exit.') + help='Show version and exit.' +) quiet = partial( Option, @@ -109,9 +111,8 @@ def getname(n): dest='quiet', action='count', default=0, - help=('Give less output. Option is additive, and can be used up to 3' - ' times (corresponding to WARNING, ERROR, and CRITICAL logging' - ' levels).') + help='Give less output. Option is additive, and can be used up to 3 times' + '(corresponding to WARNING, ERROR, and CRITICAL logging levels).' ) progress_bar = partial( @@ -121,8 +122,9 @@ def getname(n): type='choice', choices=list(BAR_TYPES.keys()), default='on', - help='Specify type of progress to be displayed [' + - '|'.join(BAR_TYPES.keys()) + '] (default: %default)') + help='Specify type of progress to be displayed [{}] (default: %default)' + .format('|'.join(BAR_TYPES.keys())) +) log = partial( Option, @@ -139,7 +141,8 @@ def getname(n): dest='no_input', action='store_true', default=False, - help=SUPPRESS_HELP) + help=SUPPRESS_HELP +) proxy = partial( Option, @@ -147,7 +150,8 @@ def getname(n): dest='proxy', type='str', default='', - help="Specify a proxy in the form [user:passwd@]proxy.server:port.") + help="Specify a proxy in the form [user:password@]proxy.server:port." +) retries = partial( Option, @@ -156,7 +160,8 @@ def getname(n): type='int', default=5, help="Maximum number of retries each connection should attempt " - "(default %default times).") + "(default %default times)." +) timeout = partial( Option, @@ -165,7 +170,8 @@ def getname(n): dest='timeout', type='float', default=15, - help='Set the socket timeout (default %default seconds).') + help='Set the socket timeout (default %default seconds).' +) skip_requirements_regex = partial( Option, @@ -174,7 +180,8 @@ def getname(n): dest='skip_requirements_regex', type='str', default='', - help=SUPPRESS_HELP) + help=SUPPRESS_HELP +) def exists_action(): @@ -188,7 +195,8 @@ def exists_action(): action='append', metavar='action', help="Default action when a path already exists: " - "(s)witch, (i)gnore, (w)ipe, (b)ackup, (a)bort.") + "(s)witch, (i)gnore, (w)ipe, (b)ackup, (a)bort." + ) cert = partial( @@ -197,7 +205,8 @@ def exists_action(): dest='cert', type='str', metavar='path', - help="Path to alternate CA bundle.") + help="Path to alternate CA bundle." +) client_cert = partial( Option, @@ -207,7 +216,8 @@ def exists_action(): default=None, metavar='path', help="Path to SSL client certificate, a single file containing the " - "private key and the certificate in PEM format.") + "private key and the certificate in PEM format." +) index_url = partial( Option, @@ -218,7 +228,8 @@ def exists_action(): help="Base URL of Python Package Index (default %default). " "This should point to a repository compliant with PEP 503 " "(the simple repository API) or a local directory laid out " - "in the same format.") + "in the same format." +) def extra_index_url(): @@ -240,7 +251,8 @@ def extra_index_url(): dest='no_index', action='store_true', default=False, - help='Ignore package index (only looking at --find-links URLs instead).') + help='Ignore package index (only looking at --find-links URLs instead).' +) def find_links(): @@ -252,7 +264,8 @@ def find_links(): metavar='url', help="If a url or path to an html file, then parse for links to " "archives. If a local path or file:// url that's a directory, " - "then look for archives in the directory listing.") + "then look for archives in the directory listing." + ) def trusted_host(): @@ -263,7 +276,7 @@ def trusted_host(): metavar="HOSTNAME", default=[], help="Mark this host as trusted, even though it does not have valid " - "or any HTTPS.", + "or any HTTPS." ) @@ -274,7 +287,7 @@ def trusted_host(): dest="process_dependency_links", action="store_true", default=False, - help="Enable the processing of dependency links.", + help="Enable the processing of dependency links." ) @@ -286,7 +299,8 @@ def constraints(): default=[], metavar='file', help='Constrain versions using the given constraints file. ' - 'This option can be used multiple times.') + 'This option can be used multiple times.' + ) def requirements(): @@ -297,7 +311,8 @@ def requirements(): default=[], metavar='file', help='Install from the given requirements file. ' - 'This option can be used multiple times.') + 'This option can be used multiple times.' + ) def editable(): @@ -307,8 +322,8 @@ def editable(): action='append', default=[], metavar='path/url', - help=('Install a project in editable mode (i.e. setuptools ' - '"develop mode") from a local project path or a VCS url.'), + help='Install a project in editable mode (i.e. setuptools ' + '"develop mode") from a local project path or a VCS url.' ) @@ -319,8 +334,8 @@ def editable(): metavar='dir', default=src_prefix, help='Directory to check out editable projects into. ' - 'The default in a virtualenv is "/src". ' - 'The default for global installs is "/src".' + 'The default in a virtualenv is "/src". ' + 'The default for global installs is "/src".' ) @@ -332,13 +347,15 @@ def _get_format_control(values, option): def _handle_no_binary(option, opt_str, value, parser): existing = getattr(parser.values, option.dest) fmt_ctl_handle_mutual_exclude( - value, existing.no_binary, existing.only_binary) + value, existing.no_binary, existing.only_binary + ) def _handle_only_binary(option, opt_str, value, parser): existing = getattr(parser.values, option.dest) fmt_ctl_handle_mutual_exclude( - value, existing.only_binary, existing.no_binary) + value, existing.only_binary, existing.no_binary + ) def no_binary(): @@ -351,7 +368,8 @@ def no_binary(): "disable all binary packages, :none: to empty the set, or one or " "more package names with commas between them. Note that some " "packages are tricky to compile and may fail to install when " - "this option is used on them.") + "this option is used on them." + ) def only_binary(): @@ -364,7 +382,8 @@ def only_binary(): "disable all source packages, :none: to empty the set, or one or " "more package names with commas between them. Packages without " "binary distributions will fail to install when this option is " - "used on them.") + "used on them." + ) cache_dir = partial( @@ -390,7 +409,8 @@ def only_binary(): dest='ignore_dependencies', action='store_true', default=False, - help="Don't install package dependencies.") + help="Don't install package dependencies." +) build_dir = partial( Option, @@ -405,7 +425,8 @@ def only_binary(): '--ignore-requires-python', dest='ignore_requires_python', action='store_true', - help='Ignore the Requires-Python information.') + help='Ignore the Requires-Python information.' +) install_options = partial( Option, @@ -417,7 +438,8 @@ def only_binary(): "command (use like --install-option=\"--install-scripts=/usr/local/" "bin\"). Use multiple --install-option options to pass multiple " "options to setup.py install. If you are using an option with a " - "directory path, be sure to use absolute path.") + "directory path, be sure to use absolute path." +) global_options = partial( Option, @@ -426,14 +448,16 @@ def only_binary(): action='append', metavar='options', help="Extra global options to be supplied to the setup.py " - "call before the install command.") + "call before the install command." +) no_clean = partial( Option, '--no-clean', action='store_true', default=False, - help="Don't clean up build directories.") + help="Don't clean up build directories." +) pre = partial( Option, @@ -441,7 +465,8 @@ def only_binary(): action='store_true', default=False, help="Include pre-release and development versions. By default, " - "pip only finds stable versions.") + "pip only finds stable versions." +) disable_pip_version_check = partial( Option, @@ -450,7 +475,8 @@ def only_binary(): action="store_true", default=False, help="Don't periodically check PyPI to determine whether a new version " - "of pip is available for download. Implied with --no-index.") + "of pip is available for download. Implied with --no-index." +) # Deprecated, Remove later @@ -471,12 +497,15 @@ def _merge_hash(option, opt_str, value, parser): try: algo, digest = value.split(':', 1) except ValueError: - parser.error('Arguments to %s must be a hash name ' - 'followed by a value, like --hash=sha256:abcde...' % - opt_str) + parser.error( + 'Arguments to %s must be a hash name followed by a value, like ' + '--hash=sha256:abcde...' % opt_str + ) if algo not in STRONG_HASHES: - parser.error('Allowed hash algorithms for %s are %s.' % - (opt_str, ', '.join(STRONG_HASHES))) + parser.error( + 'Allowed hash algorithms for %s are %s.' + % (opt_str, ', '.join(STRONG_HASHES)) + ) parser.values.hashes.setdefault(algo, []).append(digest) @@ -490,7 +519,8 @@ def _merge_hash(option, opt_str, value, parser): callback=_merge_hash, type='string', help="Verify that the package's archive matches this " - 'hash before installing. Example: --hash=sha256:abcdef...') + 'hash before installing. Example: --hash=sha256:abcdef...' +) require_hashes = partial( @@ -501,7 +531,8 @@ def _merge_hash(option, opt_str, value, parser): default=False, help='Require a hash to check each requirement against, for ' 'repeatable installs. This option is implied when any package in a ' - 'requirements file has a --hash option.') + 'requirements file has a --hash option.' +) ########## diff --git a/pip/commands/check.py b/pip/commands/check.py index d951f8e87d2..fade62cdbe5 100644 --- a/pip/commands/check.py +++ b/pip/commands/check.py @@ -22,14 +22,16 @@ def run(self, options, args): for requirement in missing_reqs_dict.get(dist.key, []): logger.info( "%s %s requires %s, which is not installed.", - dist.project_name, dist.version, requirement.project_name) + dist.project_name, dist.version, requirement.project_name + ) - for requirement, actual in incompatible_reqs_dict.get( - dist.key, []): + incompatible = incompatible_reqs_dict.get(dist.key, []) + for requirement, actual in incompatible: logger.info( "%s %s has requirement %s, but you have %s %s.", dist.project_name, dist.version, requirement, - actual.project_name, actual.version) + actual.project_name, actual.version + ) if missing_reqs_dict or incompatible_reqs_dict: return 1 diff --git a/pip/commands/completion.py b/pip/commands/completion.py index 1c0744d23a4..acc0d962f00 100644 --- a/pip/commands/completion.py +++ b/pip/commands/completion.py @@ -58,19 +58,22 @@ def __init__(self, *args, **kw): action='store_const', const='bash', dest='shell', - help='Emit completion code for bash') + help='Emit completion code for bash' + ) cmd_opts.add_option( '--zsh', '-z', action='store_const', const='zsh', dest='shell', - help='Emit completion code for zsh') + help='Emit completion code for zsh' + ) cmd_opts.add_option( '--fish', '-f', action='store_const', const='fish', dest='shell', - help='Emit completion code for fish') + help='Emit completion code for fish' + ) self.parser.insert_option_group(0, cmd_opts) diff --git a/pip/commands/download.py b/pip/commands/download.py index a33c6097a2d..015c19c8a21 100644 --- a/pip/commands/download.py +++ b/pip/commands/download.py @@ -63,7 +63,7 @@ def __init__(self, *args, **kw): dest='download_dir', metavar='dir', default=os.curdir, - help=("Download packages into ."), + help="Download packages into ." ) cmd_opts.add_option( @@ -71,8 +71,8 @@ def __init__(self, *args, **kw): dest='platform', metavar='platform', default=None, - help=("Only download wheels compatible with . " - "Defaults to the platform of the running system."), + help="Only download wheels compatible with . " + "Defaults to the platform of the running system.", ) cmd_opts.add_option( @@ -80,12 +80,12 @@ def __init__(self, *args, **kw): dest='python_version', metavar='python_version', default=None, - help=("Only download wheels compatible with Python " - "interpreter version . If not specified, then the " - "current system interpreter minor version is used. A major " - "version (e.g. '2') can be specified to match all " - "minor revs of that major version. A minor version " - "(e.g. '34') can also be specified."), + help="Only download wheels compatible with Python interpreter " + "version . If not specified, then the current " + "system interpreter minor version is used. " + "A major version (e.g. '2') can be specified to match all " + "minor revs of that major version. " + "A minor version (e.g. '34') can also be specified." ) cmd_opts.add_option( @@ -93,11 +93,11 @@ def __init__(self, *args, **kw): dest='implementation', metavar='implementation', default=None, - help=("Only download wheels compatible with Python " - "implementation , e.g. 'pp', 'jy', 'cp', " - " or 'ip'. If not specified, then the current " - "interpreter implementation is used. Use 'py' to force " - "implementation-agnostic wheels."), + help="Only download wheels compatible with Python " + "implementation , e.g. 'pp', 'jy', 'cp', " + " or 'ip'. If not specified, then the current interpreter " + "implementation is used. Use 'py' to force " + "implementation-agnostic wheels." ) cmd_opts.add_option( @@ -105,17 +105,15 @@ def __init__(self, *args, **kw): dest='abi', metavar='abi', default=None, - help=("Only download wheels compatible with Python " - "abi , e.g. 'pypy_41'. If not specified, then the " - "current interpreter abi tag is used. Generally " - "you will need to specify --implementation, " - "--platform, and --python-version when using " - "this option."), + help="Only download wheels compatible with Python " + "abi , e.g. 'pypy_41'. If not specified, then the " + "current interpreter abi tag is used. Generally " + "you will need to specify --implementation, --platform and " + "--python-version when using this option." ) index_opts = cmdoptions.make_option_group( - cmdoptions.index_group, - self.parser, + cmdoptions.index_group, self.parser, ) self.parser.insert_option_group(0, index_opts) @@ -144,7 +142,7 @@ def run(self, options, args): "--only-binary=:all: must be set and --no-binary must not " "be set (or must be set to :none:) when restricting platform " "and interpreter constraints using --python-version, " - "--platform, --abi, or --implementation." + "--platform, --abi or --implementation." ) options.src_dir = os.path.abspath(options.src_dir) @@ -173,10 +171,10 @@ def run(self, options, args): ) options.cache_dir = None - with TempDirectory( + directory = TempDirectory( options.build_dir, delete=build_delete, kind="download" - ) as directory: - + ) + with directory: requirement_set = RequirementSet( require_hashes=options.require_hashes, ) @@ -217,9 +215,7 @@ def run(self, options, args): req.name for req in requirement_set.successfully_downloaded ]) if downloaded: - logger.info( - 'Successfully downloaded %s', downloaded - ) + logger.info('Successfully downloaded %s', downloaded) # Clean up if not options.no_clean: diff --git a/pip/commands/freeze.py b/pip/commands/freeze.py index e02fdcf8799..de59ea855ba 100644 --- a/pip/commands/freeze.py +++ b/pip/commands/freeze.py @@ -34,7 +34,8 @@ def __init__(self, *args, **kw): metavar='file', help="Use the order in the given requirements file and its " "comments when generating output. This option can be " - "used multiple times.") + "used multiple times." + ) self.cmd_opts.add_option( '-f', '--find-links', dest='find_links', @@ -42,31 +43,36 @@ def __init__(self, *args, **kw): default=[], metavar='URL', help='URL for finding packages, which will be added to the ' - 'output.') + 'output.' + ) self.cmd_opts.add_option( '-l', '--local', dest='local', action='store_true', default=False, help='If in a virtualenv that has global access, do not output ' - 'globally-installed packages.') + 'globally-installed packages.' + ) self.cmd_opts.add_option( '--user', dest='user', action='store_true', default=False, - help='Only output packages installed in user-site.') + help='Only output packages installed in user-site.' + ) self.cmd_opts.add_option( '--all', dest='freeze_all', action='store_true', - help='Do not skip these packages in the output:' - ' %s' % ', '.join(DEV_PKGS)) + help='Do not skip these packages in the output: %s' + % ', '.join(DEV_PKGS) + ) self.cmd_opts.add_option( '--exclude-editable', dest='exclude_editable', action='store_true', - help='Exclude editable package from output.') + help='Exclude editable package from output.' + ) self.parser.insert_option_group(0, self.cmd_opts) @@ -86,7 +92,8 @@ def run(self, options, args): isolated=options.isolated_mode, wheel_cache=wheel_cache, skip=skip, - exclude_editable=options.exclude_editable) + exclude_editable=options.exclude_editable + ) for line in freeze(**freeze_kwargs): sys.stdout.write(line + '\n') diff --git a/pip/commands/install.py b/pip/commands/install.py index 6c7dab73e5d..23dca265b8b 100644 --- a/pip/commands/install.py +++ b/pip/commands/install.py @@ -88,21 +88,24 @@ def __init__(self, *args, **kw): help="Install to the Python user install directory for your " "platform. Typically ~/.local/, or %APPDATA%\\Python on " "Windows. (See the Python documentation for site.USER_BASE " - "for full details.)") + "for full details.)" + ) cmd_opts.add_option( '--root', dest='root_path', metavar='dir', default=None, help="Install everything relative to this alternate root " - "directory.") + "directory." + ) cmd_opts.add_option( '--prefix', dest='prefix_path', metavar='dir', default=None, help="Installation prefix where lib, bin and other top-level " - "folders are placed") + "folders are placed" + ) cmd_opts.add_option(cmdoptions.build_dir()) @@ -136,13 +139,15 @@ def __init__(self, *args, **kw): dest='force_reinstall', action='store_true', help='When upgrading, reinstall all packages even if they are ' - 'already up-to-date.') + 'already up-to-date.' + ) cmd_opts.add_option( '-I', '--ignore-installed', dest='ignore_installed', action='store_true', - help='Ignore the installed packages (reinstalling instead).') + help='Ignore the installed packages (reinstalling instead).' + ) cmd_opts.add_option(cmdoptions.ignore_requires_python()) @@ -236,10 +241,10 @@ def run(self, options, args): options.cache_dir, ) options.cache_dir = None - - with TempDirectory( + directory = TempDirectory( options.build_dir, delete=build_delete, kind="install" - ) as directory: + ) + with directory: requirement_set = RequirementSet( target_dir=target_temp_dir.path, pycompile=options.compile, diff --git a/pip/commands/list.py b/pip/commands/list.py index 2891362f545..fd52f541d34 100644 --- a/pip/commands/list.py +++ b/pip/commands/list.py @@ -43,37 +43,41 @@ def __init__(self, *args, **kw): '-o', '--outdated', action='store_true', default=False, - help='List outdated packages') + help='List outdated packages' + ) cmd_opts.add_option( '-u', '--uptodate', action='store_true', default=False, - help='List uptodate packages') + help='List uptodate packages' + ) cmd_opts.add_option( '-e', '--editable', action='store_true', default=False, - help='List editable projects.') + help='List editable projects.' + ) cmd_opts.add_option( '-l', '--local', action='store_true', default=False, - help=('If in a virtualenv that has global access, do not list ' - 'globally-installed packages.'), + help='If in a virtualenv that has global access, do not list ' + 'globally-installed packages.' ) self.cmd_opts.add_option( '--user', dest='user', action='store_true', default=False, - help='Only output packages installed in user-site.') + help='Only output packages installed in user-site.' + ) cmd_opts.add_option( '--pre', action='store_true', default=False, - help=("Include pre-release and development versions. By default, " - "pip only finds stable versions."), + help="Include pre-release and development versions. By default, " + "pip only finds stable versions." ) cmd_opts.add_option( @@ -83,7 +87,7 @@ def __init__(self, *args, **kw): default="columns", choices=('legacy', 'columns', 'freeze', 'json'), help="Select the output format among: columns (default), freeze, " - "json, or legacy.", + "json or legacy.", ) cmd_opts.add_option( diff --git a/pip/commands/show.py b/pip/commands/show.py index 92c6c78cd55..ff5e4a104d8 100644 --- a/pip/commands/show.py +++ b/pip/commands/show.py @@ -28,7 +28,8 @@ def __init__(self, *args, **kw): dest='files', action='store_true', default=False, - help='Show the full list of installed files for each package.') + help='Show the full list of installed files for each package.' + ) self.parser.insert_option_group(0, self.cmd_opts) @@ -39,10 +40,10 @@ def run(self, options, args): query = args results = search_packages_info(query) - if not print_results( - results, list_files=options.files, verbose=options.verbose): - return ERROR - return SUCCESS + successful = print_results( + results, list_files=options.files, verbose=options.verbose + ) + return SUCCESS if successful else ERROR def search_packages_info(query): @@ -102,8 +103,11 @@ def search_packages_info(query): feed_parser = FeedParser() feed_parser.feed(metadata) pkg_info_dict = feed_parser.close() - for key in ('metadata-version', 'summary', - 'home-page', 'author', 'author-email', 'license'): + _keys = ( + 'metadata-version', 'summary', 'home-page', 'author', + 'author-email', 'license' + ) + for key in _keys: package[key] = pkg_info_dict.get(key) # It looks like FeedParser cannot deal with repeated headers @@ -161,4 +165,5 @@ def print_results(distributions, list_files=False, verbose=False): logger.info(" %s", line.strip()) if "files" not in dist: logger.info("Cannot locate installed-files.txt") + return results_printed diff --git a/pip/commands/uninstall.py b/pip/commands/uninstall.py index 8608f37cac0..3b3e31e0f2d 100644 --- a/pip/commands/uninstall.py +++ b/pip/commands/uninstall.py @@ -38,13 +38,16 @@ def __init__(self, *args, **kw): '-y', '--yes', dest='yes', action='store_true', - help="Don't ask for confirmation of uninstall deletions.") + help="Don't ask for confirmation of uninstall deletions." + ) self.parser.insert_option_group(0, self.cmd_opts) def run(self, options, args): with self._build_session(options) as session: reqs_to_uninstall = {} + + # Determine which requirements have been given for name in args: req = InstallRequirement.from_line( name, isolated=options.isolated_mode, @@ -52,19 +55,20 @@ def run(self, options, args): if req.name: reqs_to_uninstall[canonicalize_name(req.name)] = req for filename in options.requirements: - for req in parse_requirements( - filename, - options=options, - session=session): + parsed_reqs = parse_requirements( + filename, options=options, session=session + ) + for req in parsed_reqs: if req.name: reqs_to_uninstall[canonicalize_name(req.name)] = req + if not reqs_to_uninstall: raise InstallationError( 'You must give at least one requirement to %(name)s (see ' '"pip help %(name)s")' % dict(name=self.name) ) + for req in reqs_to_uninstall.values(): - req.uninstall( - auto_confirm=options.yes, verbose=options.verbose != 0 - ) + verbose = options.verbose != 0 + req.uninstall(auto_confirm=options.yes, verbose=verbose) req.uninstalled_pathset.commit() diff --git a/pip/commands/wheel.py b/pip/commands/wheel.py index cc6476c27a5..fcc94f303ac 100644 --- a/pip/commands/wheel.py +++ b/pip/commands/wheel.py @@ -53,8 +53,8 @@ def __init__(self, *args, **kw): dest='wheel_dir', metavar='dir', default=os.curdir, - help=("Build wheels into , where the default is the " - "current working directory."), + help="Build wheels into , where the default is the " + "current working directory." ) cmd_opts.add_option(cmdoptions.no_binary()) cmd_opts.add_option(cmdoptions.only_binary()) @@ -63,7 +63,8 @@ def __init__(self, *args, **kw): dest='build_options', metavar='options', action='append', - help="Extra arguments to be supplied to 'setup.py bdist_wheel'.") + help="Extra arguments to be supplied to 'setup.py bdist_wheel'." + ) cmd_opts.add_option(cmdoptions.constraints()) cmd_opts.add_option(cmdoptions.editable()) cmd_opts.add_option(cmdoptions.requirements()) @@ -79,14 +80,15 @@ def __init__(self, *args, **kw): action='append', metavar='options', help="Extra global options to be supplied to the setup.py " - "call before the 'bdist_wheel' command.") + "call before the 'bdist_wheel' command." + ) cmd_opts.add_option( '--pre', action='store_true', default=False, - help=("Include pre-release and development versions. By default, " - "pip only finds stable versions."), + help="Include pre-release and development versions. By default, " + "pip only finds stable versions.", ) cmd_opts.add_option(cmdoptions.no_clean()) @@ -113,9 +115,7 @@ def check_required_packages(self): "To fix this, run: pip install --upgrade setuptools>=0.8" ) pkg_resources = import_or_raise( - 'pkg_resources', - CommandError, - need_setuptools_message + 'pkg_resources', CommandError, need_setuptools_message ) if not hasattr(pkg_resources, 'DistInfoDistribution'): raise CommandError(need_setuptools_message) @@ -139,9 +139,10 @@ def run(self, options, args): build_delete = (not (options.no_clean or options.build_dir)) wheel_cache = WheelCache(options.cache_dir, options.format_control) - with TempDirectory( + directory = TempDirectory( options.build_dir, delete=build_delete, kind="wheel" - ) as directory: + ) + with directory: requirement_set = RequirementSet( require_hashes=options.require_hashes, ) diff --git a/pip/compat.py b/pip/compat.py index 08cf2a14fff..eed2af93b37 100644 --- a/pip/compat.py +++ b/pip/compat.py @@ -57,7 +57,8 @@ def backslashreplace_decode_fn(err): return u"".join(u"\\x%x" % c for c in raw_bytes), err.end codecs.register_error( "backslashreplace_decode", - backslashreplace_decode_fn) + backslashreplace_decode_fn + ) backslashreplace_decode = "backslashreplace_decode" @@ -86,7 +87,8 @@ def console_to_str(data): except UnicodeDecodeError: logger.warning( "Subprocess output does not appear to be encoded as %s" % - encoding) + encoding + ) s = data.decode(encoding, errors=backslashreplace_decode) # Make sure we can print the output, by encoding it to the output @@ -101,8 +103,9 @@ def console_to_str(data): # or doesn't have an encoding attribute. Neither of these cases # should occur in normal pip use, but there's no harm in checking # in case people use pip in (unsupported) unusual situations. - output_encoding = getattr(getattr(sys, "__stderr__", None), - "encoding", None) + output_encoding = getattr( + getattr(sys, "__stderr__", None), "encoding", None + ) if output_encoding: s = s.encode(output_encoding, errors="backslashreplace") diff --git a/pip/download.py b/pip/download.py index 424a8a4bf7f..76b1f8fcc5c 100644 --- a/pip/download.py +++ b/pip/download.py @@ -589,8 +589,9 @@ def written_chunks(chunks): url = link.url_without_fragment if show_progress: # We don't show progress on cached responses - progress_indicator = DownloadProgressProvider(progress_bar, - max=total_length) + progress_indicator = DownloadProgressProvider( + progress_bar, max=total_length + ) if total_length: logger.info("Downloading %s (%s)", url, format_size(total_length)) else: @@ -652,20 +653,18 @@ def unpack_http_url(link, location, download_dir=None, # If a download dir is specified, is the file already downloaded there? already_downloaded_path = None if download_dir: - already_downloaded_path = _check_download_dir(link, - download_dir, - hashes) + already_downloaded_path = _check_download_dir( + link, download_dir, hashes + ) if already_downloaded_path: from_path = already_downloaded_path content_type = mimetypes.guess_type(from_path)[0] else: # let's download to a tmp dir - from_path, content_type = _download_http_url(link, - session, - temp_dir.path, - hashes, - progress_bar) + from_path, content_type = _download_http_url( + link, session, temp_dir.path, hashes, progress_bar + ) # unpack the archive to the build dir location. even when only # downloading archives, they have to be unpacked to parse dependencies @@ -707,9 +706,9 @@ def unpack_file_url(link, location, download_dir=None, hashes=None): # If a download dir is specified, is the file already there and valid? already_downloaded_path = None if download_dir: - already_downloaded_path = _check_download_dir(link, - download_dir, - hashes) + already_downloaded_path = _check_download_dir( + link, download_dir, hashes + ) if already_downloaded_path: from_path = already_downloaded_path @@ -779,8 +778,9 @@ def request(self, host, handler, request_body, verbose=False): url = urllib_parse.urlunparse(parts) try: headers = {'Content-Type': 'text/xml'} - response = self._session.post(url, data=request_body, - headers=headers, stream=True) + response = self._session.post( + url, data=request_body, headers=headers, stream=True + ) response.raise_for_status() self.verbose = verbose return self.parse_response(response.raw) diff --git a/pip/exceptions.py b/pip/exceptions.py index 0353329f4db..3f8cb1b1996 100644 --- a/pip/exceptions.py +++ b/pip/exceptions.py @@ -173,9 +173,9 @@ def body(self): # In case someone feeds something downright stupid # to InstallRequirement's constructor. else getattr(self.req, 'req', None)) - return ' %s --hash=%s:%s' % (package or 'unknown package', - FAVORITE_HASH, - self.gotten_hash) + return ' %s --hash=%s:%s' % ( + package or 'unknown package', FAVORITE_HASH, self.gotten_hash + ) class HashUnpinned(HashError): @@ -233,12 +233,15 @@ def hash_then_or(hash_name): return chain([hash_name], repeat(' or')) lines = [] - for hash_name, expecteds in iteritems(self.allowed): + for hash_name, expect in iteritems(self.allowed): prefix = hash_then_or(hash_name) - lines.extend((' Expected %s %s' % (next(prefix), e)) - for e in expecteds) - lines.append(' Got %s\n' % - self.gots[hash_name].hexdigest()) + lines.extend( + (' Expected %s %s' % (next(prefix), e)) for e in expect + ) + lines.append( + ' Got %s\n' + % self.gots[hash_name].hexdigest() + ) prefix = ' or' return '\n'.join(lines) diff --git a/pip/index.py b/pip/index.py index 5808b921a88..d0c31294852 100644 --- a/pip/index.py +++ b/pip/index.py @@ -253,14 +253,16 @@ def sort_path(path): else: logger.warning( "Url '%s' is ignored: it is neither a file " - "nor a directory.", url) + "nor a directory.", url + ) elif is_url(url): # Only add url with clear scheme urls.append(url) else: logger.warning( "Url '%s' is ignored. It is either a non-existing " - "path or lacks a specific scheme.", url) + "path or lacks a specific scheme.", url + ) return files, urls diff --git a/pip/operations/check.py b/pip/operations/check.py index eef0630fc0a..3855f34edbf 100644 --- a/pip/operations/check.py +++ b/pip/operations/check.py @@ -9,8 +9,7 @@ def check_requirements(installed_dists): if missing_reqs: missing_reqs_dict[dist.key] = missing_reqs - incompatible_reqs = list(get_incompatible_reqs( - dist, installed_dists)) + incompatible_reqs = list(get_incompatible_reqs(dist, installed_dists)) if incompatible_reqs: incompatible_reqs_dict[dist.key] = incompatible_reqs diff --git a/pip/operations/freeze.py b/pip/operations/freeze.py index 6772022fdef..7da3a7dd227 100644 --- a/pip/operations/freeze.py +++ b/pip/operations/freeze.py @@ -44,9 +44,10 @@ def freeze( for link in find_links: yield '-f %s' % link installations = {} - for dist in get_installed_distributions(local_only=local_only, - skip=(), - user_only=user_only): + installed_dists = get_installed_distributions( + local_only=local_only, skip=(), user_only=user_only + ) + for dist in installed_dists: try: req = FrozenRequirement.from_dist( dist, @@ -71,18 +72,22 @@ def freeze( for req_file_path in requirement: with open(req_file_path) as req_file: for line in req_file: - if (not line.strip() or - line.strip().startswith('#') or - (skip_match and skip_match(line)) or - line.startswith(( - '-r', '--requirement', - '-Z', '--always-unzip', - '-f', '--find-links', - '-i', '--index-url', - '--pre', - '--trusted-host', - '--process-dependency-links', - '--extra-index-url'))): + line_is_ignorable = ( + not line.strip() or + line.strip().startswith('#') or + (skip_match and skip_match(line)) or + line.startswith(( + '-r', '--requirement', + '-Z', '--always-unzip', + '-f', '--find-links', + '-i', '--index-url', + '--pre', + '--trusted-host', + '--process-dependency-links', + '--extra-index-url' + )) + ) + if line_is_ignorable: line = line.rstrip() if line not in emitted_options: emitted_options.add(line) @@ -189,7 +194,8 @@ def from_dist(cls, dist, dependency_links): ) if not svn_location: logger.warning( - 'Warning: cannot find svn location for %s', req) + 'Warning: cannot find svn location for %s', req + ) comments.append( '## FIXME: could not find svn URL in dependency_links ' 'for this package:' diff --git a/pip/operations/prepare.py b/pip/operations/prepare.py index 8212b191b69..26fa8ff72f1 100644 --- a/pip/operations/prepare.py +++ b/pip/operations/prepare.py @@ -72,8 +72,7 @@ def prep_for_dist(self): class IsWheel(DistAbstraction): def dist(self, finder): - return list(pkg_resources.find_distributions( - self.req.source_dir))[0] + return list(pkg_resources.find_distributions(self.req.source_dir))[0] def prep_for_dist(self): # FIXME:https://github.com/pypa/pip/issues/1112 @@ -189,12 +188,12 @@ def _prepare_linked_requirement(self, req, resolver): # occurs when the script attempts to unpack the # build directory req.ensure_has_source_dir(self.build_dir) + # If a checkout exists, it's unwise to keep going. version # inconsistencies are logged later, but do not fail the # installation. # FIXME: this won't upgrade when there's an existing # package unpacked in `req.source_dir` - # package unpacked in `req.source_dir` if os.path.exists(os.path.join(req.source_dir, 'setup.py')): raise PreviousBuildDirError( "pip can't proceed with requirements '%s' due to a" @@ -210,9 +209,8 @@ def _prepare_linked_requirement(self, req, resolver): resolver.require_hashes ) # We can't hit this spot and have populate_link return None. - # req.satisfied_by is None here (because we're - # guarded) and upgrade has no impact except when satisfied_by - # is not None. + # req.satisfied_by is None here (because we're guarded) and upgrade + # has no impact except when satisfied_by is not None. # Then inside find_requirement existing_applicable -> False # If no new versions are found, DistributionNotFound is raised, # otherwise a result is guaranteed. @@ -224,11 +222,10 @@ def _prepare_linked_requirement(self, req, resolver): # than otherwise. (For example, we can raise VcsHashUnsupported # for a VCS URL rather than HashMissing.) if resolver.require_hashes: - # We could check these first 2 conditions inside - # unpack_url and save repetition of conditions, but then - # we would report less-useful error messages for - # unhashable requirements, complaining that there's no - # hash provided. + # We could check these first 2 conditions inside unpack_url + # and save repetition of conditions, but then we would report + # less-useful error messages for unhashable requirements, + # complaining that there's no hash provided. if is_vcs_url(link): raise VcsHashUnsupported() elif is_file_url(link) and is_dir_url(link): @@ -276,13 +273,11 @@ def _prepare_linked_requirement(self, req, resolver): except requests.HTTPError as exc: logger.critical( 'Could not install requirement %s because of error %s', - req, - exc, + req, exc, ) raise InstallationError( 'Could not install requirement %s because of HTTP ' - 'error %s for URL %s' % - (req, exc, req.link) + 'error %s for URL %s' % (req, exc, req.link) ) abstract_dist = make_abstract_dist(req) abstract_dist.prep_for_dist() @@ -304,8 +299,8 @@ def _prepare_linked_requirement(self, req, resolver): resolver._set_req_to_reinstall(req) else: logger.info( - 'Requirement already satisfied (use ' - '--upgrade to upgrade): %s', + 'Requirement already satisfied ' + '(use --upgrade to upgrade): %s', req, ) return abstract_dist @@ -341,8 +336,8 @@ def _prepare_installed_requirement(self, req, resolver, skip_reason): """ assert req.satisfied_by, "req should have been satisfied but isn't" assert skip_reason is not None, ( - "did not get skip reason skipped but req.satisfied_by " - "is set to %r" % (req.satisfied_by,) + "did not get skip reason skipped but req.satisfied_by is set to %r" + % (req.satisfied_by,) ) logger.info( 'Requirement %s: %s (%s)', diff --git a/pip/pep425tags.py b/pip/pep425tags.py index e33b6ce0ed0..a23577da039 100644 --- a/pip/pep425tags.py +++ b/pip/pep425tags.py @@ -51,8 +51,11 @@ def get_impl_version_info(): version.""" if get_abbr_impl() == 'pp': # as per https://github.com/pypa/pip/issues/2882 - return (sys.version_info[0], sys.pypy_version_info.major, - sys.pypy_version_info.minor) + return ( + sys.version_info[0], + sys.pypy_version_info.major, + sys.pypy_version_info.minor + ) else: return sys.version_info[0], sys.version_info[1] @@ -70,8 +73,11 @@ def get_flag(var, fallback, expected=True, warn=True): val = get_config_var(var) if val is None: if warn: - logger.debug("Config variable '%s' is unset, Python ABI tag may " - "be incorrect", var) + logger.debug( + "Config variable '%s' is unset, " + "Python ABI tag may be incorrect", + var + ) return fallback() return val == expected diff --git a/pip/req/req_file.py b/pip/req/req_file.py index 8231c456f5b..e1153b7340e 100644 --- a/pip/req/req_file.py +++ b/pip/req/req_file.py @@ -77,9 +77,10 @@ def parse_requirements(filename, finder=None, comes_from=None, options=None, lines_enum = preprocess(content, options) for line_number, line in lines_enum: - req_iter = process_line(line, filename, line_number, finder, - comes_from, options, session, wheel_cache, - constraint=constraint) + req_iter = process_line( + line, filename, line_number, finder, comes_from, options, session, + wheel_cache, constraint=constraint + ) for req in req_iter: yield req @@ -131,7 +132,8 @@ def process_line(line, filename, line_number, finder=None, comes_from=None, # preserve for the nested code path line_comes_from = '%s %s (line %s)' % ( - '-c' if constraint else '-r', filename, line_number) + '-c' if constraint else '-r', filename, line_number + ) # yield a line requirement if args_str: @@ -297,7 +299,5 @@ def skip_regex(lines_enum, options): skip_regex = options.skip_requirements_regex if options else None if skip_regex: pattern = re.compile(skip_regex) - lines_enum = filterfalse( - lambda e: pattern.search(e[1]), - lines_enum) + lines_enum = filterfalse(lambda e: pattern.search(e[1]), lines_enum) return lines_enum diff --git a/pip/req/req_install.py b/pip/req/req_install.py index 4aacb34b404..cb8cc57a0aa 100644 --- a/pip/req/req_install.py +++ b/pip/req/req_install.py @@ -399,7 +399,8 @@ def name(self): def setup_py_dir(self): return os.path.join( self.source_dir, - self.link and self.link.subdirectory_fragment or '') + self.link and self.link.subdirectory_fragment or '' + ) @property def setup_py(self): @@ -475,7 +476,8 @@ def run_egg_info(self): egg_info_cmd + egg_base_option, cwd=self.setup_py_dir, show_stdout=False, - command_desc='python setup.py egg_info') + command_desc='python setup.py egg_info' + ) if not self.req: if isinstance(parse_version(self.pkg_info()["Version"]), Version): @@ -529,24 +531,25 @@ def egg_info_path(self, filename): # Iterate over a copy of ``dirs``, since mutating # a list while iterating over it can cause trouble. # (See https://github.com/pypa/pip/pull/462.) - for dir in list(dirs): + for dir_ in list(dirs): # Don't search in anything that looks like a virtualenv # environment - if ( - os.path.lexists( - os.path.join(root, dir, 'bin', 'python') - ) or - os.path.exists( - os.path.join( - root, dir, 'Scripts', 'Python.exe' - ) - )): - dirs.remove(dir) + should_remove = ( + os.path.lexists(os.path.join( + root, dir_, 'bin', 'python' + )) or + os.path.exists(os.path.join( + root, dir_, 'Scripts', 'Python.exe' + )) + ) + if should_remove: + dirs.remove(dir_) # Also don't search through tests - elif dir == 'test' or dir == 'tests': - dirs.remove(dir) - filenames.extend([os.path.join(root, dir) - for dir in dirs]) + elif dir_ == 'test' or dir_ == 'tests': + dirs.remove(dir_) + filenames.extend( + [os.path.join(root, dir_) for dir_ in dirs] + ) filenames = [f for f in filenames if f.endswith('.egg-info')] if not filenames: @@ -716,7 +719,8 @@ def match_markers(self, extras_requested=None): if self.markers is not None: return any( self.markers.evaluate({'extra': extra}) - for extra in extras_requested) + for extra in extras_requested + ) else: return True @@ -725,7 +729,8 @@ def install(self, install_options, global_options=None, root=None, global_options = global_options if global_options is not None else [] if self.editable: self.install_editable( - install_options, global_options, prefix=prefix) + install_options, global_options, prefix=prefix + ) return if self.is_wheel: version = pip.wheel.wheel_version(self.source_dir) @@ -749,7 +754,8 @@ def install(self, install_options, global_options=None, root=None, with TempDirectory(kind="record") as temp_dir: record_filename = os.path.join(temp_dir.path, 'install-record.txt') install_args = self.get_install_args( - global_options, record_filename, root, prefix) + global_options, record_filename, root, prefix + ) msg = 'Running setup.py install for %s' % (self.name,) with open_spinner(msg) as spinner: with indent_log(): @@ -834,9 +840,10 @@ def get_install_args(self, global_options, record_filename, root, prefix): if running_under_virtualenv(): py_ver_str = 'python' + sysconfig.get_python_version() - install_args += ['--install-headers', - os.path.join(sys.prefix, 'include', 'site', - py_ver_str, self.name)] + header_location = os.path.join( + sys.prefix, 'include', 'site', py_ver_str, self.name + ) + install_args += ['--install-headers', header_location] return install_args @@ -862,19 +869,14 @@ def install_editable(self, install_options, install_options = list(install_options) + prefix_param with indent_log(): - # FIXME: should we do --install-headers here too? - call_subprocess( - [ - sys.executable, - '-c', - SETUPTOOLS_SHIM % self.setup_py - ] + + command = ( + [sys.executable, '-c', SETUPTOOLS_SHIM % self.setup_py] + list(global_options) + ['develop', '--no-deps'] + - list(install_options), - - cwd=self.setup_py_dir, - show_stdout=False) + list(install_options) + ) + # FIXME: should we do --install-headers here too? + call_subprocess(command, cwd=self.setup_py_dir, show_stdout=False) self.install_succeeded = True @@ -942,8 +944,8 @@ def get_dist(self): dist_name = os.path.splitext(os.path.basename(egg_info))[0] return pkg_resources.Distribution( os.path.dirname(egg_info), - project_name=dist_name, - metadata=metadata) + project_name=dist_name, metadata=metadata + ) @property def has_hash_options(self): diff --git a/pip/req/req_set.py b/pip/req/req_set.py index 1c8a35e29a5..7151b663a8b 100644 --- a/pip/req/req_set.py +++ b/pip/req/req_set.py @@ -40,9 +40,8 @@ def __repr__(self): class RequirementSet(object): - def __init__(self, - require_hashes=False, target_dir=None, use_user_site=False, - pycompile=True): + def __init__(self, require_hashes=False, target_dir=None, + use_user_site=False, pycompile=True): """Create a RequirementSet. :param wheel_cache: The pip wheel cache, for passing to @@ -96,9 +95,10 @@ def add_requirement(self, install_req, parent_req_name=None, """ name = install_req.name if not install_req.match_markers(extras_requested): - logger.warning("Ignoring %s: markers '%s' don't match your " - "environment", install_req.name, - install_req.markers) + logger.warning( + "Ignoring %s: markers '%s' don't match your environment", + install_req.name, install_req.markers + ) return [] # This check has to come after we filter requirements with the @@ -125,13 +125,19 @@ def add_requirement(self, install_req, parent_req_name=None, existing_req = self.get_requirement(name) except KeyError: existing_req = None - if (parent_req_name is None and existing_req and not - existing_req.constraint and - existing_req.extras == install_req.extras and not - existing_req.req.specifier == install_req.req.specifier): + already_given = ( + parent_req_name is None and + existing_req and + not existing_req.constraint and + existing_req.extras == install_req.extras and + not existing_req.req.specifier == install_req.req.specifier + ) + if already_given: raise InstallationError( 'Double requirement given: %s (already in %s, name=%r)' - % (install_req, existing_req, name)) + % (install_req, existing_req, name) + ) + if not existing_req: # Add requirement self.requirements[name] = install_req @@ -144,21 +150,30 @@ def add_requirement(self, install_req, parent_req_name=None, # encountered this for scanning. result = [] if not install_req.constraint and existing_req.constraint: - if (install_req.link and not (existing_req.link and - install_req.link.path == existing_req.link.path)): + cannot_have_constraint = ( + install_req.link and not ( + existing_req.link and + install_req.link.path == existing_req.link.path + ) + ) + if cannot_have_constraint: self.reqs_to_cleanup.append(install_req) raise InstallationError( "Could not satisfy constraints for '%s': " "installation from path or url cannot be " - "constrained to a version" % name) + "constrained to a version" % name + ) + # If we're now installing a constraint, mark the existing # object for real installation. existing_req.constraint = False existing_req.extras = tuple( sorted(set(existing_req.extras).union( set(install_req.extras)))) - logger.debug("Setting %s extras to: %s", - existing_req, existing_req.extras) + logger.debug( + "Setting %s extras to: %s", + existing_req, existing_req.extras + ) # And now we need to scan this. result = [existing_req] # Canonicalise to the already-added object for the backref @@ -171,17 +186,18 @@ def add_requirement(self, install_req, parent_req_name=None, def has_requirement(self, project_name): name = project_name.lower() - if (name in self.requirements and - not self.requirements[name].constraint or - name in self.requirement_aliases and - not self.requirements[self.requirement_aliases[name]].constraint): - return True - return False + return ( + name in self.requirements and + not self.requirements[name].constraint or + name in self.requirement_aliases and + not self.requirements[self.requirement_aliases[name]].constraint + ) @property def has_requirements(self): - return list(req for req in self.requirements.values() if not - req.constraint) or self.unnamed_requirements + return list( + req for req in self.requirements.values() if not req.constraint + ) or self.unnamed_requirements def get_requirement(self, project_name): for name in project_name, project_name.lower(): @@ -227,8 +243,8 @@ def schedule(req): def install(self, install_options, global_options=(), *args, **kwargs): """ - Install everything in this set (after having downloaded and unpacked - the packages) + Install everything in this set \ + (after having downloaded and unpacked the packages) """ to_install = self._to_install() diff --git a/pip/req/req_uninstall.py b/pip/req/req_uninstall.py index ca161e716e7..16e3170e599 100644 --- a/pip/req/req_uninstall.py +++ b/pip/req/req_uninstall.py @@ -193,8 +193,9 @@ def _stash(self, path): ) def remove(self, auto_confirm=False, verbose=False): - """Remove paths in ``self.paths`` with confirmation (unless - ``auto_confirm`` is True).""" + """Remove paths in ``self.paths`` with confirmation \ + (unless ``auto_confirm`` is True). + """ if not self.paths: logger.info( @@ -308,8 +309,8 @@ def from_dist(cls, dist): # are in fact in the develop_egg_link case paths_to_remove.add(dist.egg_info) if dist.has_metadata('installed-files.txt'): - for installed_file in dist.get_metadata( - 'installed-files.txt').splitlines(): + lines = dist.get_metadata('installed-files.txt').splitlines() + for installed_file in lines: path = os.path.normpath( os.path.join(dist.egg_info, installed_file) ) @@ -347,8 +348,9 @@ def from_dist(cls, dist): # i.e. setuptools-0.6c11-py2.6.egg vs setuptools-0.6rc11-py2.6.egg paths_to_remove.add(dist.location) easy_install_egg = os.path.split(dist.location)[1] - easy_install_pth = os.path.join(os.path.dirname(dist.location), - 'easy-install.pth') + easy_install_pth = os.path.join( + os.path.dirname(dist.location), 'easy-install.pth' + ) paths_to_remove.add_pth(easy_install_pth, './' + easy_install_egg) elif egg_info_exists and dist.egg_info.endswith('.dist-info'): @@ -364,14 +366,16 @@ def from_dist(cls, dist): '(at %s)' % (link_pointer, dist.project_name, dist.location) ) paths_to_remove.add(develop_egg_link) - easy_install_pth = os.path.join(os.path.dirname(develop_egg_link), - 'easy-install.pth') + easy_install_pth = os.path.join( + os.path.dirname(develop_egg_link), 'easy-install.pth' + ) paths_to_remove.add_pth(easy_install_pth, dist.location) else: logger.debug( 'Not sure how to uninstall: %s - Check: %s', - dist, dist.location) + dist, dist.location + ) # find distutils scripts= scripts if dist.has_metadata('scripts') and dist.metadata_isdir('scripts'): diff --git a/pip/resolve.py b/pip/resolve.py index 250b7313462..a3f8e607861 100644 --- a/pip/resolve.py +++ b/pip/resolve.py @@ -14,8 +14,8 @@ from itertools import chain from pip.exceptions import ( - BestVersionAlreadyInstalled, - DistributionNotFound, HashError, HashErrors, UnsupportedPythonVersion + BestVersionAlreadyInstalled, DistributionNotFound, HashError, HashErrors, + UnsupportedPythonVersion ) from pip.req.req_install import InstallRequirement from pip.utils import dist_in_usersite, ensure_dir diff --git a/pip/utils/__init__.py b/pip/utils/__init__.py index e0091ebecac..c276cc8ceda 100644 --- a/pip/utils/__init__.py +++ b/pip/utils/__init__.py @@ -54,7 +54,8 @@ ZIP_EXTENSIONS = ('.zip', '.whl') TAR_EXTENSIONS = ('.tar.gz', '.tgz', '.tar') ARCHIVE_EXTENSIONS = ( - ZIP_EXTENSIONS + BZ2_EXTENSIONS + TAR_EXTENSIONS + XZ_EXTENSIONS) + ZIP_EXTENSIONS + BZ2_EXTENSIONS + TAR_EXTENSIONS + XZ_EXTENSIONS +) SUPPORTED_EXTENSIONS = ZIP_EXTENSIONS + TAR_EXTENSIONS try: import bz2 # noqa @@ -97,9 +98,10 @@ def get_prog(): # Retry every half second for up to 3 seconds @retry(stop_max_delay=3000, wait_fixed=500) -def rmtree(dir, ignore_errors=False): - shutil.rmtree(dir, ignore_errors=ignore_errors, - onerror=rmtree_errorhandler) +def rmtree(dir_, ignore_errors=False): + shutil.rmtree( + dir_, ignore_errors=ignore_errors, onerror=rmtree_errorhandler + ) def rmtree_errorhandler(func, path, exc_info): @@ -211,8 +213,10 @@ def read_chunks(file, size=io.DEFAULT_BUFFER_SIZE): def split_leading_dir(path): path = path.lstrip('/').lstrip('\\') - if '/' in path and (('\\' in path and path.find('/') < path.find('\\')) or - '\\' not in path): + split_by_forward_slash = ( + ('\\' in path and path.find('/') < path.find('\\')) or '\\' not in path + ) + if '/' in path and split_by_forward_slash: return path.split('/', 1) elif '\\' in path: return path.split('\\', 1) @@ -310,9 +314,9 @@ def dist_in_site_packages(dist): Return True if given Distribution is installed in sysconfig.get_python_lib(). """ - return normalize_path( - dist_location(dist) - ).startswith(normalize_path(site_packages)) + return normalize_path(dist_location(dist)).startswith( + normalize_path(site_packages) + ) def dist_is_editable(dist): @@ -590,35 +594,48 @@ def untar_file(filename, location): def unpack_file(filename, location, content_type, link): filename = os.path.realpath(filename) - if (content_type == 'application/zip' or - filename.lower().endswith(ZIP_EXTENSIONS) or - zipfile.is_zipfile(filename)): - unzip_file( - filename, - location, - flatten=not filename.endswith('.whl') + + is_zip_file = ( + content_type == 'application/zip' or + filename.lower().endswith(ZIP_EXTENSIONS) or + zipfile.is_zipfile(filename) + ) + if is_zip_file: + unzip_file(filename, location, flatten=not filename.endswith('.whl')) + return + + is_tarball = ( + content_type == 'application/x-gzip' or + tarfile.is_tarfile(filename) or + filename.lower().endswith( + TAR_EXTENSIONS + BZ2_EXTENSIONS + XZ_EXTENSIONS ) - elif (content_type == 'application/x-gzip' or - tarfile.is_tarfile(filename) or - filename.lower().endswith( - TAR_EXTENSIONS + BZ2_EXTENSIONS + XZ_EXTENSIONS)): + ) + if is_tarball: untar_file(filename, location) - elif (content_type and content_type.startswith('text/html') and - is_svn_page(file_contents(filename))): + return + + is_svn_html_page = ( + content_type and + content_type.startswith('text/html') and + is_svn_page(file_contents(filename)) + ) + if is_svn_html_page: # We don't really care about this from pip.vcs.subversion import Subversion Subversion('svn+' + link.url).unpack(location) - else: - # FIXME: handle? - # FIXME: magic signatures? - logger.critical( - 'Cannot unpack file %s (downloaded from %s, content-type: %s); ' - 'cannot detect archive format', - filename, location, content_type, - ) - raise InstallationError( - 'Cannot determine archive format of %s' % location - ) + return + + # FIXME: handle? + # FIXME: magic signatures? + logger.critical( + 'Cannot unpack file %s (downloaded from %s, content-type: %s); ' + 'cannot detect archive format', + filename, location, content_type, + ) + raise InstallationError( + 'Cannot determine archive format of %s' % location + ) def call_subprocess(cmd, show_stdout=True, cwd=None, @@ -664,7 +681,8 @@ def call_subprocess(cmd, show_stdout=True, cwd=None, try: proc = subprocess.Popen( cmd, stderr=subprocess.STDOUT, stdin=None, stdout=stdout, - cwd=cwd, env=env) + cwd=cwd, env=env + ) except Exception as exc: logger.critical( "Error %s while executing command %s", exc, command_desc, @@ -697,8 +715,11 @@ def call_subprocess(cmd, show_stdout=True, cwd=None, spinner.finish("done") if proc.returncode: if on_returncode == 'raise': - if (logger.getEffectiveLevel() > std_logging.DEBUG and - not show_stdout): + should_show_output = ( + logger.getEffectiveLevel() > std_logging.DEBUG and + not show_stdout + ) + if should_show_output: logger.info( 'Complete output from command %s:', command_desc, ) diff --git a/pip/utils/outdated.py b/pip/utils/outdated.py index 517d3bc9a1c..6066a0174d4 100644 --- a/pip/utils/outdated.py +++ b/pip/utils/outdated.py @@ -142,10 +142,13 @@ def pip_version_check(session, options): remote_version = packaging_version.parse(pypi_version) # Determine if our pypi_version is older - if (pip_version < remote_version and - pip_version.base_version != remote_version.base_version): - # Advise "python -m pip" on Windows to avoid issues - # with overwriting pip.exe. + newer_version_available = ( + pip_version < remote_version and + pip_version.base_version != remote_version.base_version + ) + if newer_version_available: + # Advise "python -m pip" on Windows to avoid issues with + # overwriting pip.exe. if WINDOWS: pip_cmd = "python -m pip" else: diff --git a/pip/utils/packaging.py b/pip/utils/packaging.py index e789d45e61e..d8a27b42b50 100644 --- a/pip/utils/packaging.py +++ b/pip/utils/packaging.py @@ -48,16 +48,16 @@ def check_dist_requires_python(dist): requires_python = pkg_info_dict.get('Requires-Python') try: if not check_requires_python(requires_python): + version_str = '.'.join(map(str, sys.version_info[:3])) raise exceptions.UnsupportedPythonVersion( - "%s requires Python '%s' but the running Python is %s" % ( - dist.project_name, - requires_python, - '.'.join(map(str, sys.version_info[:3])),) + "%s requires Python '%s' but the running Python is %s" + % (dist.project_name, requires_python, version_str) ) except specifiers.InvalidSpecifier as e: logger.warning( - "Package %s has an invalid Requires-Python entry %s - %s" % ( - dist.project_name, requires_python, e)) + "Package %s has an invalid Requires-Python entry %s - %s" + % (dist.project_name, requires_python, e) + ) return diff --git a/pip/utils/temp_dir.py b/pip/utils/temp_dir.py index d3307d51650..6c9f28ef622 100644 --- a/pip/utils/temp_dir.py +++ b/pip/utils/temp_dir.py @@ -30,7 +30,7 @@ class TempDirectory(object): Deletes the temporary directory and sets path attribute to None When used as a context manager, a temporary directory is created on - entering the context and, if the delete attribute is True, on exiting the + entering the context and if the delete attribute is True, on exiting the context the created directory is deleted. """ diff --git a/pip/vcs/__init__.py b/pip/vcs/__init__.py index 204155a8575..58c215a434c 100644 --- a/pip/vcs/__init__.py +++ b/pip/vcs/__init__.py @@ -74,8 +74,9 @@ def get_backend_name(self, location): """ for vc_type in self._registry.values(): if vc_type.controls_location(location): - logger.debug('Determine that %s uses VCS: %s', - location, vc_type.name) + logger.debug( + 'Determine that %s uses VCS: %s', location, vc_type.name + ) return vc_type.name return None @@ -207,55 +208,47 @@ def check_destination(self, dest, url, rev_options, rev_display): if self.compare_urls(existing_url, url): logger.debug( '%s in %s exists, and has correct URL (%s)', - self.repo_name.title(), - display_path(dest), - url, + self.repo_name.title(), display_path(dest), url, ) if not self.check_version(dest, rev_options): logger.info( 'Updating %s %s%s', - display_path(dest), - self.repo_name, - rev_display, + display_path(dest), self.repo_name, rev_display, ) self.update(dest, rev_options) else: logger.info( - 'Skipping because already up-to-date.') + 'Skipping because already up-to-date.' + ) else: logger.warning( '%s %s in %s exists with URL %s', - self.name, - self.repo_name, - display_path(dest), + self.name, self.repo_name, display_path(dest), existing_url, ) - prompt = ('(s)witch, (i)gnore, (w)ipe, (b)ackup ', - ('s', 'i', 'w', 'b')) + prompt = ( + '(s)witch, (i)gnore, (w)ipe, (b)ackup ', + ('s', 'i', 'w', 'b') + ) else: logger.warning( 'Directory %s already exists, and is not a %s %s.', - dest, - self.name, - self.repo_name, + dest, self.name, self.repo_name, ) prompt = ('(i)gnore, (w)ipe, (b)ackup ', ('i', 'w', 'b')) if prompt: logger.warning( 'The plan is to install the %s repository %s', - self.name, - url, + self.name, url, ) - response = ask_path_exists('What to do? %s' % prompt[0], - prompt[1]) + response = ask_path_exists( + 'What to do? %s' % prompt[0], prompt[1] + ) if response == 's': logger.info( 'Switching %s %s to %s%s', - self.repo_name, - display_path(dest), - url, - rev_display, + self.repo_name, display_path(dest), url, rev_display, ) self.switch(dest, url, rev_options) elif response == 'i': @@ -319,18 +312,19 @@ def run_command(self, cmd, show_stdout=True, cwd=None, """ cmd = [self.name] + cmd try: - return call_subprocess(cmd, show_stdout, cwd, - on_returncode, - command_desc, extra_environ, - spinner) + return call_subprocess( + cmd, show_stdout, cwd, on_returncode, command_desc, + extra_environ, spinner + ) except OSError as e: # errno.ENOENT = no such file or directory # In other words, the VCS executable isn't available if e.errno == errno.ENOENT: raise BadCommand( 'Cannot find command %r - do you have ' - '%r installed and in your ' - 'PATH?' % (self.name, self.name)) + '%r installed and in your PATH?' + % (self.name, self.name) + ) else: raise # re-raise exception if a different error occurred @@ -341,8 +335,9 @@ def controls_location(cls, location): It is meant to be overridden to implement smarter detection mechanisms for specific vcs. """ - logger.debug('Checking in %s for %s (%s)...', - location, cls.dirname, cls.name) + logger.debug( + 'Checking in %s for %s (%s)...', location, cls.dirname, cls.name + ) path = os.path.join(location, cls.dirname) return os.path.exists(path) @@ -351,14 +346,12 @@ def get_src_requirement(dist, location): version_control = vcs.get_backend_from_location(location) if version_control: try: - return version_control().get_src_requirement(dist, - location) + return version_control().get_src_requirement(dist, location) except BadCommand: logger.warning( 'cannot determine version of editable source in %s ' '(%s command not found in path)', - location, - version_control.name, + location, version_control.name, ) return dist.as_requirement() logger.warning( diff --git a/pip/vcs/bazaar.py b/pip/vcs/bazaar.py index 8a53d741513..e5072591c54 100644 --- a/pip/vcs/bazaar.py +++ b/pip/vcs/bazaar.py @@ -79,8 +79,7 @@ def get_url(self, location): urls = self.run_command(['info'], show_stdout=False, cwd=location) for line in urls.splitlines(): line = line.strip() - for x in ('checkout of branch: ', - 'parent branch: '): + for x in ('checkout of branch: ', 'parent branch: '): if line.startswith(x): repo = line.split(x)[1] if self._is_local_repository(repo): @@ -89,8 +88,7 @@ def get_url(self, location): return None def get_revision(self, location): - revision = self.run_command( - ['revno'], show_stdout=False, cwd=location) + revision = self.run_command(['revno'], show_stdout=False, cwd=location) return revision.splitlines()[-1] def get_src_requirement(self, dist, location): diff --git a/pip/vcs/git.py b/pip/vcs/git.py index c1d02ad766e..2ef8fb11fac 100644 --- a/pip/vcs/git.py +++ b/pip/vcs/git.py @@ -145,12 +145,10 @@ def obtain(self, dest): # Only do a checkout if rev_options differs from HEAD if not self.check_version(dest, rev_options): self.run_command( - ['fetch', '-q', url] + rev_options, - cwd=dest, + ['fetch', '-q', url] + rev_options, cwd=dest, ) self.run_command( - ['checkout', '-q', 'FETCH_HEAD'], - cwd=dest, + ['checkout', '-q', 'FETCH_HEAD'], cwd=dest, ) #: repo may contain submodules @@ -160,7 +158,8 @@ def get_url(self, location): """Return URL of the first remote encountered.""" remotes = self.run_command( ['config', '--get-regexp', r'remote\..*\.url'], - show_stdout=False, cwd=location) + show_stdout=False, cwd=location + ) remotes = remotes.splitlines() found_remote = remotes[0] for remote in remotes: @@ -172,13 +171,15 @@ def get_url(self, location): def get_revision(self, location): current_rev = self.run_command( - ['rev-parse', 'HEAD'], show_stdout=False, cwd=location) + ['rev-parse', 'HEAD'], show_stdout=False, cwd=location + ) return current_rev.strip() def get_full_refs(self, location): """Yields tuples of (commit, ref) for branches and tags""" - output = self.run_command(['show-ref'], - show_stdout=False, cwd=location) + output = self.run_command( + ['show-ref'], show_stdout=False, cwd=location + ) for line in output.strip().splitlines(): commit, ref = line.split(' ', 1) yield commit.strip(), ref.strip() @@ -222,8 +223,9 @@ def get_short_refs(self, location): def _get_subdirectory(self, location): """Return the relative path of setup.py to the git repo root.""" # find the repo root - git_dir = self.run_command(['rev-parse', '--git-dir'], - show_stdout=False, cwd=location).strip() + git_dir = self.run_command( + ['rev-parse', '--git-dir'], show_stdout=False, cwd=location + ).strip() if not os.path.isabs(git_dir): git_dir = os.path.join(location, git_dir) root_dir = os.path.join(git_dir, '..') @@ -290,14 +292,16 @@ def controls_location(cls, location): if super(Git, cls).controls_location(location): return True try: - r = cls().run_command(['rev-parse'], - cwd=location, - show_stdout=False, - on_returncode='ignore') + r = cls().run_command( + ['rev-parse'], cwd=location, show_stdout=False, + on_returncode='ignore' + ) return not r except BadCommand: - logger.debug("could not determine if %s is under git control " - "because git is not available", location) + logger.debug( + "Could not determine if %s is under git control " + "because git is not available", location + ) return False diff --git a/pip/vcs/mercurial.py b/pip/vcs/mercurial.py index c9226d6a3ec..40f9579a3ec 100644 --- a/pip/vcs/mercurial.py +++ b/pip/vcs/mercurial.py @@ -68,7 +68,8 @@ def obtain(self, dest): def get_url(self, location): url = self.run_command( ['showconfig', 'paths.default'], - show_stdout=False, cwd=location).strip() + show_stdout=False, cwd=location + ).strip() if self._is_local_repository(url): url = path_to_url(url) return url.strip() @@ -76,13 +77,15 @@ def get_url(self, location): def get_revision(self, location): current_revision = self.run_command( ['parents', '--template={rev}'], - show_stdout=False, cwd=location).strip() + show_stdout=False, cwd=location + ).strip() return current_revision def get_revision_hash(self, location): current_rev_hash = self.run_command( ['parents', '--template={node}'], - show_stdout=False, cwd=location).strip() + show_stdout=False, cwd=location + ).strip() return current_rev_hash def get_src_requirement(self, dist, location): diff --git a/pip/vcs/subversion.py b/pip/vcs/subversion.py index 77b66c91e09..213e864716f 100644 --- a/pip/vcs/subversion.py +++ b/pip/vcs/subversion.py @@ -33,9 +33,7 @@ def get_info(self, location): assert not location.rstrip('/').endswith(self.dirname), \ 'Bad directory: %s' % location output = self.run_command( - ['info', location], - show_stdout=False, - extra_environ={'LANG': 'C'}, + ['info', location], show_stdout=False, extra_environ={'LANG': 'C'} ) match = _svn_url_re.search(output) if not match: @@ -69,7 +67,8 @@ def export(self, location): rmtree(location) self.run_command( ['export'] + rev_options + [url, location], - show_stdout=False) + show_stdout=False + ) def switch(self, dest, url, rev_options): self.run_command(['switch'] + rev_options + [url, dest]) @@ -172,9 +171,7 @@ def _get_svn_url_rev(self, location): else: # subversion >= 1.7 does not have the 'entries' file data = '' - if (data.startswith('8') or - data.startswith('9') or - data.startswith('10')): + if data.startswith(('8', '9', '10')): data = list(map(str.splitlines, data.split('\n\x0c\n'))) del data[0][0] # get rid of the '8' url = data[0][3] diff --git a/pip/wheel.py b/pip/wheel.py index 31cceed19ef..de7d11deb0b 100644 --- a/pip/wheel.py +++ b/pip/wheel.py @@ -52,9 +52,8 @@ def rehash(path, algo='sha256', blocksize=1 << 20): for block in read_chunks(f, size=blocksize): length += len(block) h.update(block) - digest = 'sha256=' + urlsafe_b64encode( - h.digest() - ).decode('latin1').rstrip('=') + digest = 'sha256=' + digest += urlsafe_b64encode(h.digest()).decode('latin1').rstrip('=') return (digest, length) @@ -86,8 +85,9 @@ def fix_script(path): return True -dist_info_re = re.compile(r"""^(?P(?P.+?)(-(?P.+?))?) - \.dist-info$""", re.VERBOSE) +dist_info_re = re.compile( + r"^(?P(?P.+?)(-(?P.+?))?)\.dist-info$", re.VERBOSE +) def root_is_purelib(name, wheeldir): @@ -623,12 +623,17 @@ def _install_build_reqs(self, reqs, prefix): # we don't recurse trying to build a self-hosting build system. finder = copy.copy(self.finder) finder.format_control = FormatControl(set(), set()) - urls = [finder.find_requirement(InstallRequirement.from_line(r), - upgrade=False).url - for r in reqs] + urls = [ + finder.find_requirement( + InstallRequirement.from_line(r), upgrade=False + ).url + for r in reqs + ] - args = [sys.executable, '-m', 'pip', 'install', '--ignore-installed', - '--prefix', prefix] + list(urls) + args = [ + sys.executable, '-m', 'pip', 'install', '--ignore-installed', + '--prefix', prefix + ] + list(urls) with open_spinner("Installing build dependencies") as spinner: call_subprocess(args, show_stdout=False, spinner=spinner) @@ -642,19 +647,22 @@ def _build_one(self, req, output_dir, python_tag=None): logger.warning( "This version of pip does not implement PEP 516, so " "it cannot build a wheel without setuptools. You may need to " - "upgrade to a newer version of pip.") + "upgrade to a newer version of pip." + ) # Install build deps into temporary directory (PEP 518) with BuildEnvironment(self.no_clean) as prefix: self._install_build_reqs(build_reqs, prefix) - return self._build_one_inside_env(req, output_dir, - python_tag=python_tag, - isolate=True) + return self._build_one_inside_env( + req, output_dir, python_tag=python_tag, isolate=True + ) def _build_one_inside_env(self, req, output_dir, python_tag=None, isolate=False): with TempDirectory(kind="wheel") as temp_dir: - if self.__build_one(req, temp_dir.path, python_tag=python_tag, - isolate=isolate): + built_it = self.__build_one( + req, temp_dir.path, python_tag=python_tag, isolate=isolate + ) + if built_it: try: wheel_name = os.listdir(temp_dir.path)[0] wheel_path = os.path.join(output_dir, wheel_name) @@ -698,9 +706,10 @@ def __build_one(self, req, tempd, python_tag=None, isolate=False): env['PYTHONNOUSERSITE'] = '1' try: - call_subprocess(wheel_args, cwd=req.setup_py_dir, - extra_environ=env, - show_stdout=False, spinner=spinner) + call_subprocess( + wheel_args, cwd=req.setup_py_dir, extra_environ=env, + show_stdout=False, spinner=spinner + ) return True except: spinner.finish("error") @@ -740,7 +749,8 @@ def build(self, session, autobuilding=False): if req.is_wheel: if not autobuilding: logger.info( - 'Skipping %s, due to already being wheel.', req.name) + 'Skipping %s, due to already being wheel.', req.name + ) elif autobuilding and req.editable: pass elif autobuilding and req.link and not req.link.is_artifact: @@ -760,7 +770,8 @@ def build(self, session, autobuilding=False): canonicalize_name(req.name)): logger.info( "Skipping bdist_wheel for %s, due to binaries " - "being disabled for it.", req.name) + "being disabled for it.", req.name + ) continue buildset.append(req) @@ -782,8 +793,9 @@ def build(self, session, autobuilding=False): try: ensure_dir(output_dir) except OSError as e: - logger.warning("Building wheel for %s failed: %s", - req.name, e) + logger.warning( + "Building wheel for %s failed: %s", req.name, e + ) build_failure.append(req) continue else: @@ -795,30 +807,36 @@ def build(self, session, autobuilding=False): if wheel_file: build_success.append(req) if autobuilding: - # XXX: This is mildly duplicative with prepare_files, + # XXX: This is mildly duplicative with Resolver, # but not close enough to pull out to a single common # method. # The code below assumes temporary source dirs - # prevent it doing bad things. - if req.source_dir and not os.path.exists(os.path.join( - req.source_dir, PIP_DELETE_MARKER_FILENAME)): + bad_src_dir = ( + req.source_dir and + not os.path.exists(os.path.join( + req.source_dir, PIP_DELETE_MARKER_FILENAME + )) + ) + if bad_src_dir: raise AssertionError( - "bad source dir - missing marker") + "bad source dir - missing marker" + ) # Delete the source we built the wheel from req.remove_temporary_source() # set the build directory again - name is known from - # the work prepare_files did. + # the work Resolver did. req.source_dir = req.build_location( self.preparer.build_dir ) # Update the link for this. - req.link = pip.index.Link( - path_to_url(wheel_file)) + req.link = pip.index.Link(path_to_url(wheel_file)) assert req.link.is_wheel # extract the wheel into the dir unpack_url( req.link, req.source_dir, None, False, - session=session) + session=session + ) else: build_failure.append(req)