diff --git a/docs/html/conf.py b/docs/html/conf.py index 33a2b9767c0..49ca2824b1e 100644 --- a/docs/html/conf.py +++ b/docs/html/conf.py @@ -16,7 +16,7 @@ import re import sys -on_rtd = os.environ.get('READTHEDOCS', None) == 'True' +on_rtd = os.environ.get("READTHEDOCS", None) == "True" docs_dir = os.path.dirname(os.path.dirname(__file__)) # If extensions (or modules to document with autodoc) are in another directory, @@ -30,13 +30,13 @@ # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones. # extensions = ['sphinx.ext.autodoc'] -extensions = ['sphinx.ext.extlinks', 'pip_sphinxext', 'sphinx.ext.intersphinx'] +extensions = ["sphinx.ext.extlinks", "pip_sphinxext", "sphinx.ext.intersphinx"] # intersphinx intersphinx_cache_limit = 0 intersphinx_mapping = { - 'pypug': ('https://packaging.python.org/', None), - 'pypa': ('https://www.pypa.io/en/latest/', None), + "pypug": ("https://packaging.python.org/", None), + "pypa": ("https://www.pypa.io/en/latest/", None), } @@ -44,17 +44,17 @@ templates_path = [] # The suffix of source filenames. -source_suffix = '.rst' +source_suffix = ".rst" # The encoding of source files. # source_encoding = 'utf-8' # The master toctree document. -master_doc = 'index' +master_doc = "index" # General information about the project. -project = 'pip' -copyright = '2008-2017, PyPA' +project = "pip" +copyright = "2008-2017, PyPA" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the @@ -62,7 +62,7 @@ # # The short X.Y version. -version = release = 'dev' +version = release = "dev" # Readthedocs seems to install pip as an egg (via setup.py install) which # is somehow resulting in "import pip" picking up an older copy of pip. @@ -70,14 +70,14 @@ # read the version direct from the __init__.py file. (Yes, this is # fragile, but it works...) -pip_init = os.path.join(docs_dir, '..', 'src', 'pip', '__init__.py') +pip_init = os.path.join(docs_dir, "..", "src", "pip", "__init__.py") with open(pip_init) as f: for line in f: m = re.match(r'__version__ = "(.*)"', line) if m: __version__ = m.group(1) # The short X.Y version. - version = '.'.join(__version__.split('.')[:2]) + version = ".".join(__version__.split(".")[:2]) # The full version, including alpha/beta/rc tags. release = __version__ break @@ -95,14 +95,14 @@ # non-false value, then it is used: # today = '' # Else, today_fmt is used as the format for a strftime call. -today_fmt = '%B %d, %Y' +today_fmt = "%B %d, %Y" # List of documents that shouldn't be included in the build. # unused_docs = [] # List of directories, relative to source directory, that shouldn't be searched # for source files. -exclude_patterns = ['build/'] +exclude_patterns = ["build/"] # The reST default role (used for this markup: `text`) to use for all documents # default_role = None @@ -119,15 +119,15 @@ # show_authors = False # The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' +pygments_style = "sphinx" # A list of ignored prefixes for module index sorting. # modindex_common_prefix = [] extlinks = { - 'issue': ('https://github.com/pypa/pip/issues/%s', '#'), - 'pull': ('https://github.com/pypa/pip/pull/%s', 'PR #'), - 'pypi': ('https://pypi.org/project/%s', ''), + "issue": ("https://github.com/pypa/pip/issues/%s", "#"), + "pull": ("https://github.com/pypa/pip/pull/%s", "PR #"), + "pypi": ("https://pypi.org/project/%s", ""), } # -- Options for HTML output -------------------------------------------------- @@ -140,10 +140,10 @@ # further. For a list of options available for each theme, see the # documentation. html_theme_options = { - 'collapsiblesidebar': True, - 'externalrefs': True, - 'navigation_depth': 3, - 'issues_url': 'https://github.com/pypa/pip/issues', + "collapsiblesidebar": True, + "externalrefs": True, + "navigation_depth": 3, + "issues_url": "https://github.com/pypa/pip/issues", } # Add any paths that contain custom themes here, relative to this directory. @@ -171,7 +171,7 @@ # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. -html_last_updated_fmt = '%b %d, %Y' +html_last_updated_fmt = "%b %d, %Y" # If true, the Docutils Smart Quotes transform (originally based on # SmartyPants) will be used to convert characters like quotes and dashes @@ -190,7 +190,7 @@ smartquotes_action = "qe" # Custom sidebar templates, maps document names to template names. -html_sidebars = {'**': ['localtoc.html', 'relations.html'], 'index': ['localtoc.html']} +html_sidebars = {"**": ["localtoc.html", "relations.html"], "index": ["localtoc.html"]} # Additional templates that should be rendered to pages, maps page names to # template names. @@ -217,7 +217,7 @@ # html_file_suffix = '' # Output file base name for HTML help builder. -htmlhelp_basename = 'pipdocs' +htmlhelp_basename = "pipdocs" # -- Options for LaTeX output ------------------------------------------------- @@ -231,7 +231,7 @@ # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]) latex_documents = [ - ('index', 'pip.tex', u'pip Documentation', u'pip developers', 'manual') + ("index", "pip.tex", u"pip Documentation", u"pip developers", "manual") ] # The name of an image file (relative to this directory) to place at the top of @@ -255,18 +255,18 @@ # List of manual pages generated man_pages = [ - ('index', 'pip', u'package manager for Python packages', u'pip developers', 1) + ("index", "pip", u"package manager for Python packages", u"pip developers", 1) ] # Here, we crawl the entire man/commands/ directory and list every file with # appropriate name and details -man_dir = os.path.join(docs_dir, 'man/') -raw_subcommands = glob.glob(os.path.join(man_dir, 'commands/*.rst')) +man_dir = os.path.join(docs_dir, "man/") +raw_subcommands = glob.glob(os.path.join(man_dir, "commands/*.rst")) if not raw_subcommands: - raise FileNotFoundError('The individual subcommand manpages could not be found!') + raise FileNotFoundError("The individual subcommand manpages could not be found!") for fname in raw_subcommands: fname_base = fname[len(man_dir) : -4] - outname = 'pip-' + fname_base[9:] - description = u'description of {} command'.format(outname.replace('-', ' ')) + outname = "pip-" + fname_base[9:] + description = u"description of {} command".format(outname.replace("-", " ")) - man_pages.append((fname_base, outname, description, u'pip developers', 1)) + man_pages.append((fname_base, outname, description, u"pip developers", 1)) diff --git a/docs/pip_sphinxext.py b/docs/pip_sphinxext.py index eaba2979135..41bcc7282dc 100644 --- a/docs/pip_sphinxext.py +++ b/docs/pip_sphinxext.py @@ -17,7 +17,7 @@ class PipCommandUsage(rst.Directive): def run(self): cmd = create_command(self.arguments[0]) - usage = dedent(cmd.usage.replace('%prog', 'pip {}'.format(cmd.name))).strip() + usage = dedent(cmd.usage.replace("%prog", "pip {}".format(cmd.name))).strip() node = nodes.literal_block(usage, usage) return [node] @@ -31,7 +31,7 @@ def run(self): desc = ViewList() cmd = create_command(self.arguments[0]) description = dedent(cmd.__doc__) - for line in description.split('\n'): + for line in description.split("\n"): desc.append(line, "") self.state.nested_parse(desc, 0, node) return [node] @@ -54,7 +54,7 @@ def _format_option(self, option, cmd_name=None): metavar = option.metavar or option.dest.lower() line += " <%s>" % metavar.lower() # fix defaults - opt_help = option.help.replace('%default', str(option.default)) + opt_help = option.help.replace("%default", str(option.default)) # fix paths with sys.prefix opt_help = opt_help.replace(sys.prefix, "") return [bookmark_line, "", line, "", " %s" % opt_help, ""] @@ -77,12 +77,12 @@ def run(self): class PipGeneralOptions(PipOptions): def process_options(self): - self._format_options([o() for o in cmdoptions.general_group['options']]) + self._format_options([o() for o in cmdoptions.general_group["options"]]) class PipIndexOptions(PipOptions): def process_options(self): - self._format_options([o() for o in cmdoptions.index_group['options']]) + self._format_options([o() for o in cmdoptions.index_group["options"]]) class PipCommandOptions(PipOptions): @@ -94,8 +94,8 @@ def process_options(self): def setup(app): - app.add_directive('pip-command-usage', PipCommandUsage) - app.add_directive('pip-command-description', PipCommandDescription) - app.add_directive('pip-command-options', PipCommandOptions) - app.add_directive('pip-general-options', PipGeneralOptions) - app.add_directive('pip-index-options', PipIndexOptions) + app.add_directive("pip-command-usage", PipCommandUsage) + app.add_directive("pip-command-description", PipCommandDescription) + app.add_directive("pip-command-options", PipCommandOptions) + app.add_directive("pip-general-options", PipGeneralOptions) + app.add_directive("pip-index-options", PipIndexOptions) diff --git a/setup.py b/setup.py index 28af6cc0c29..42a39092f95 100644 --- a/setup.py +++ b/setup.py @@ -11,7 +11,7 @@ def read(*parts): # intentionally *not* adding an encoding option to open, See: # https://github.com/pypa/virtualenv/issues/201#issuecomment-3145690 - with codecs.open(os.path.join(here, *parts), 'r') as fp: + with codecs.open(os.path.join(here, *parts), "r") as fp: return fp.read() @@ -24,14 +24,14 @@ def find_version(*file_paths): raise RuntimeError("Unable to find version string.") -long_description = read('README.rst') +long_description = read("README.rst") setup( name="pip", version=find_version("src", "pip", "__init__.py"), description="The PyPA recommended tool for installing Python packages.", long_description=long_description, - license='MIT', + license="MIT", classifiers=[ "Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", @@ -47,10 +47,10 @@ def find_version(*file_paths): "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy", ], - url='https://pip.pypa.io/', - keywords='distutils easy_install egg setuptools wheel virtualenv', - author='The pip developers', - author_email='pypa-dev@groups.google.com', + url="https://pip.pypa.io/", + keywords="distutils easy_install egg setuptools wheel virtualenv", + author="The pip developers", + author_email="pypa-dev@groups.google.com", package_dir={"": "src"}, packages=find_packages(where="src", exclude=["contrib", "docs", "tests*", "tasks"]), package_data={ @@ -67,5 +67,5 @@ def find_version(*file_paths): ] }, zip_safe=False, - python_requires='>=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*', + python_requires=">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*", ) diff --git a/src/pip/__main__.py b/src/pip/__main__.py index 0c223f8c187..93972f07836 100644 --- a/src/pip/__main__.py +++ b/src/pip/__main__.py @@ -5,7 +5,7 @@ # If we are running from a wheel, add the wheel to sys.path # This allows the usage python pip-*.whl/pip install pip-*.whl -if __package__ == '': +if __package__ == "": # __file__ is pip-*.whl/pip/__main__.py # first dirname call strips of '/__main__.py', second strips off '/pip' # Resulting path is the name of the wheel itself @@ -15,5 +15,5 @@ from pip._internal import main as _main # isort:skip # noqa -if __name__ == '__main__': +if __name__ == "__main__": sys.exit(_main()) diff --git a/src/pip/_internal/__init__.py b/src/pip/_internal/__init__.py index bb5cbff323c..29b14bc5663 100755 --- a/src/pip/_internal/__init__.py +++ b/src/pip/_internal/__init__.py @@ -9,10 +9,7 @@ # We ignore certain warnings from urllib3, since they are not relevant to pip's # usecases. -from pip._vendor.urllib3.exceptions import ( - DependencyWarning, - InsecureRequestWarning, -) +from pip._vendor.urllib3.exceptions import DependencyWarning, InsecureRequestWarning import pip._internal.utils.inject_securetransport # noqa from pip._internal.cli.autocompletion import autocomplete @@ -50,7 +47,7 @@ def main(args=None): # Needed for locale.getpreferredencoding(False) to work # in pip._internal.utils.encoding.auto_decode try: - locale.setlocale(locale.LC_ALL, '') + locale.setlocale(locale.LC_ALL, "") except locale.Error as e: # setlocale can apparently crash if locale are uninitialized logger.debug("Ignoring error %s when setting locale", e) diff --git a/src/pip/_internal/build_env.py b/src/pip/_internal/build_env.py index fd51a759c52..108745f298f 100644 --- a/src/pip/_internal/build_env.py +++ b/src/pip/_internal/build_env.py @@ -33,9 +33,9 @@ def __init__(self, path): self.path = path self.setup = False self.bin_dir = get_paths( - 'nt' if os.name == 'nt' else 'posix_prefix', - vars={'base': path, 'platbase': path}, - )['scripts'] + "nt" if os.name == "nt" else "posix_prefix", + vars={"base": path, "platbase": path}, + )["scripts"] # Note: prefer distutils' sysconfig to get the # library paths so PyPy is correctly supported. purelib = get_python_lib(plat_specific=False, prefix=path) @@ -57,7 +57,7 @@ def __init__(self): self._prefixes = OrderedDict( ( (name, _Prefix(os.path.join(self._temp_dir.path, name))) - for name in ('normal', 'overlay') + for name in ("normal", "overlay") ) ) @@ -77,13 +77,13 @@ def __init__(self): get_python_lib(plat_specific=True), ) } - self._site_dir = os.path.join(self._temp_dir.path, 'site') + self._site_dir = os.path.join(self._temp_dir.path, "site") if not os.path.exists(self._site_dir): os.mkdir(self._site_dir) - with open(os.path.join(self._site_dir, 'sitecustomize.py'), 'w') as fp: + with open(os.path.join(self._site_dir, "sitecustomize.py"), "w") as fp: fp.write( textwrap.dedent( - ''' + """ import os, site, sys # First, drop system-sites related paths. @@ -106,18 +106,18 @@ def __init__(self): for path in {lib_dirs!r}: assert not path in sys.path site.addsitedir(path) - ''' + """ ).format(system_sites=system_sites, lib_dirs=self._lib_dirs) ) def __enter__(self): self._save_env = { name: os.environ.get(name, None) - for name in ('PATH', 'PYTHONNOUSERSITE', 'PYTHONPATH') + for name in ("PATH", "PYTHONNOUSERSITE", "PYTHONPATH") } path = self._bin_dirs[:] - old_path = self._save_env['PATH'] + old_path = self._save_env["PATH"] if old_path: path.extend(old_path.split(os.pathsep)) @@ -125,9 +125,9 @@ def __enter__(self): os.environ.update( { - 'PATH': os.pathsep.join(path), - 'PYTHONNOUSERSITE': '1', - 'PYTHONPATH': os.pathsep.join(pythonpath), + "PATH": os.pathsep.join(path), + "PYTHONNOUSERSITE": "1", + "PYTHONPATH": os.pathsep.join(pythonpath), } ) @@ -176,39 +176,39 @@ def install_requirements( args = [ sys.executable, os.path.dirname(pip_location), - 'install', - '--ignore-installed', - '--no-user', - '--prefix', + "install", + "--ignore-installed", + "--no-user", + "--prefix", prefix.path, - '--no-warn-script-location', + "--no-warn-script-location", ] # type: List[str] if logger.getEffectiveLevel() <= logging.DEBUG: - args.append('-v') - for format_control in ('no_binary', 'only_binary'): + args.append("-v") + for format_control in ("no_binary", "only_binary"): formats = getattr(finder.format_control, format_control) args.extend( ( - '--' + format_control.replace('_', '-'), - ','.join(sorted(formats or {':none:'})), + "--" + format_control.replace("_", "-"), + ",".join(sorted(formats or {":none:"})), ) ) index_urls = finder.index_urls if index_urls: - args.extend(['-i', index_urls[0]]) + args.extend(["-i", index_urls[0]]) for extra_index in index_urls[1:]: - args.extend(['--extra-index-url', extra_index]) + args.extend(["--extra-index-url", extra_index]) else: - args.append('--no-index') + args.append("--no-index") for link in finder.find_links: - args.extend(['--find-links', link]) + args.extend(["--find-links", link]) for host in finder.trusted_hosts: - args.extend(['--trusted-host', host]) + args.extend(["--trusted-host", host]) if finder.allow_all_prereleases: - args.append('--pre') - args.append('--') + args.append("--pre") + args.append("--") args.extend(requirements) with open_spinner(message) as spinner: call_subprocess(args, spinner=spinner) diff --git a/src/pip/_internal/cache.py b/src/pip/_internal/cache.py index 3d49e9521a4..5d70af31d55 100644 --- a/src/pip/_internal/cache.py +++ b/src/pip/_internal/cache.py @@ -200,7 +200,7 @@ class WheelCache(Cache): def __init__(self, cache_dir, format_control): # type: (str, FormatControl) -> None - super(WheelCache, self).__init__(cache_dir, format_control, {'binary'}) + super(WheelCache, self).__init__(cache_dir, format_control, {"binary"}) self._wheel_cache = SimpleWheelCache(cache_dir, format_control) self._ephem_cache = EphemWheelCache(format_control) diff --git a/src/pip/_internal/cli/autocompletion.py b/src/pip/_internal/cli/autocompletion.py index 2d0ee6d43c4..81e9d8423e4 100644 --- a/src/pip/_internal/cli/autocompletion.py +++ b/src/pip/_internal/cli/autocompletion.py @@ -14,14 +14,14 @@ def autocomplete(): """Entry Point for completion of main and subcommand options. """ # Don't complete if user hasn't sourced bash_completion file. - if 'PIP_AUTO_COMPLETE' not in os.environ: + if "PIP_AUTO_COMPLETE" not in os.environ: return - cwords = os.environ['COMP_WORDS'].split()[1:] - cword = int(os.environ['COMP_CWORD']) + cwords = os.environ["COMP_WORDS"].split()[1:] + cword = int(os.environ["COMP_CWORD"]) try: current = cwords[cword - 1] except IndexError: - current = '' + current = "" subcommands = list(commands_dict) options = [] @@ -35,13 +35,13 @@ def autocomplete(): # subcommand options if subcommand_name: # special case: 'help' subcommand has no options - if subcommand_name == 'help': + if subcommand_name == "help": sys.exit(1) # special case: list locally installed dists for show and uninstall should_list_installed = subcommand_name in [ - 'show', - 'uninstall', - ] and not current.startswith('-') + "show", + "uninstall", + ] and not current.startswith("-") if should_list_installed: installed = [] lc = current.lower() @@ -62,7 +62,7 @@ def autocomplete(): options.append((opt_str, opt.nargs)) # filter out previously specified options from available options - prev_opts = [x.split('=')[0] for x in cwords[1 : cword - 1]] + prev_opts = [x.split("=")[0] for x in cwords[1 : cword - 1]] options = [(x, v) for (x, v) in options if x not in prev_opts] # filter options by current input options = [(k, v) for k, v in options if k.startswith(current)] @@ -79,7 +79,7 @@ def autocomplete(): opt_label = option[0] # append '=' to options which require args if option[1] and option[0][:2] == "--": - opt_label += '=' + opt_label += "=" print(opt_label) else: # show main parser options only when necessary @@ -87,7 +87,7 @@ def autocomplete(): opts = [i.option_list for i in parser.option_groups] opts.append(parser.option_list) opts = (o for it in opts for o in it) - if current.startswith('-'): + if current.startswith("-"): for opt in opts: if opt.help != optparse.SUPPRESS_HELP: subcommands += opt._long_opts + opt._short_opts @@ -97,7 +97,7 @@ def autocomplete(): if completion_type: subcommands = auto_complete_paths(current, completion_type) - print(' '.join([x for x in subcommands if x.startswith(current)])) + print(" ".join([x for x in subcommands if x.startswith(current)])) sys.exit(1) @@ -109,15 +109,15 @@ def get_path_completion_type(cwords, cword, opts): :param opts: The available options to check :return: path completion type (``file``, ``dir``, ``path`` or None) """ - if cword < 2 or not cwords[cword - 2].startswith('-'): + if cword < 2 or not cwords[cword - 2].startswith("-"): return for opt in opts: if opt.help == optparse.SUPPRESS_HELP: continue - for o in str(opt).split('/'): - if cwords[cword - 2].split('=')[0] == o: + for o in str(opt).split("/"): + if cwords[cword - 2].split("=")[0] == o: if not opt.metavar or any( - x in ('path', 'file', 'dir') for x in opt.metavar.split('/') + x in ("path", "file", "dir") for x in opt.metavar.split("/") ): return opt.metavar @@ -147,7 +147,7 @@ def auto_complete_paths(current, completion_type): # complete regular files when there is not ```` after option # complete directories when there is ````, ```` or # ````after option - if completion_type != 'dir' and os.path.isfile(opt): + if completion_type != "dir" and os.path.isfile(opt): yield comp_file elif os.path.isdir(opt): - yield os.path.join(comp_file, '') + yield os.path.join(comp_file, "") diff --git a/src/pip/_internal/cli/base_command.py b/src/pip/_internal/cli/base_command.py index a8a2dd93546..bd6ea7d32ca 100644 --- a/src/pip/_internal/cli/base_command.py +++ b/src/pip/_internal/cli/base_command.py @@ -12,10 +12,7 @@ from pip._internal.cli import cmdoptions from pip._internal.cli.command_context import CommandContextMixIn -from pip._internal.cli.parser import ( - ConfigOptionParser, - UpdatingDefaultsHelpFormatter, -) +from pip._internal.cli.parser import ConfigOptionParser, UpdatingDefaultsHelpFormatter from pip._internal.cli.status_codes import ( ERROR, PREVIOUS_BUILD_DIR_ERROR, @@ -40,7 +37,7 @@ from typing import List, Tuple, Any from optparse import Values -__all__ = ['Command'] +__all__ = ["Command"] logger = logging.getLogger(__name__) @@ -53,13 +50,13 @@ def __init__(self, name, summary, isolated=False): # type: (str, str, bool) -> None super(Command, self).__init__() parser_kw = { - 'usage': self.usage, - 'prog': '%s %s' % (get_prog(), name), - 'formatter': UpdatingDefaultsHelpFormatter(), - 'add_help_option': False, - 'name': name, - 'description': self.__doc__, - 'isolated': isolated, + "usage": self.usage, + "prog": "%s %s" % (get_prog(), name), + "formatter": UpdatingDefaultsHelpFormatter(), + "add_help_option": False, + "name": name, + "description": self.__doc__, + "isolated": isolated, } self.name = name @@ -67,7 +64,7 @@ def __init__(self, name, summary, isolated=False): self.parser = ConfigOptionParser(**parser_kw) # Commands should add options to this option group - optgroup_name = '%s Options' % self.name.capitalize() + optgroup_name = "%s Options" % self.name.capitalize() self.cmd_opts = optparse.OptionGroup(self.parser, optgroup_name) # Add the general options @@ -82,7 +79,7 @@ def handle_pip_version_check(self, options): """ # Make sure we do the pip version check if the index_group options # are present. - assert not hasattr(options, 'no_index') + assert not hasattr(options, "no_index") def run(self, options, args): # type: (Values, List[Any]) -> Any @@ -133,15 +130,15 @@ def _main(self, args): # This also affects isolated builds and it should. if options.no_input: - os.environ['PIP_NO_INPUT'] = '1' + os.environ["PIP_NO_INPUT"] = "1" if options.exists_action: - os.environ['PIP_EXISTS_ACTION'] = ' '.join(options.exists_action) + os.environ["PIP_EXISTS_ACTION"] = " ".join(options.exists_action) if options.require_venv and not self.ignore_require_venv: # If a venv is required check if it can really be found if not running_under_virtualenv(): - logger.critical('Could not find an activated virtualenv (required).') + logger.critical("Could not find an activated virtualenv (required).") sys.exit(VIRTUALENV_NOT_FOUND) try: @@ -152,34 +149,34 @@ def _main(self, args): return status except PreviousBuildDirError as exc: logger.critical(str(exc)) - logger.debug('Exception information:', exc_info=True) + logger.debug("Exception information:", exc_info=True) return PREVIOUS_BUILD_DIR_ERROR except (InstallationError, UninstallationError, BadCommand) as exc: logger.critical(str(exc)) - logger.debug('Exception information:', exc_info=True) + logger.debug("Exception information:", exc_info=True) return ERROR except CommandError as exc: - logger.critical('%s', exc) - logger.debug('Exception information:', exc_info=True) + logger.critical("%s", exc) + logger.debug("Exception information:", exc_info=True) return ERROR except BrokenStdoutLoggingError: # Bypass our logger and write any remaining messages to stderr # because stdout no longer works. - print('ERROR: Pipe to stdout was broken', file=sys.stderr) + print("ERROR: Pipe to stdout was broken", file=sys.stderr) if level_number <= logging.DEBUG: traceback.print_exc(file=sys.stderr) return ERROR except KeyboardInterrupt: - logger.critical('Operation cancelled by user') - logger.debug('Exception information:', exc_info=True) + logger.critical("Operation cancelled by user") + logger.debug("Exception information:", exc_info=True) return ERROR except BaseException: - logger.critical('Exception:', exc_info=True) + logger.critical("Exception:", exc_info=True) return UNKNOWN_ERROR finally: diff --git a/src/pip/_internal/cli/cmdoptions.py b/src/pip/_internal/cli/cmdoptions.py index c3af7084be0..7c5655230f9 100644 --- a/src/pip/_internal/cli/cmdoptions.py +++ b/src/pip/_internal/cli/cmdoptions.py @@ -46,8 +46,8 @@ def raise_option_error(parser, option, msg): option: an Option instance. msg: the error text. """ - msg = '{} error: {}'.format(option, msg) - msg = textwrap.fill(' '.join(msg.split())) + msg = "{} error: {}".format(option, msg) + msg = textwrap.fill(" ".join(msg.split())) parser.error(msg) @@ -58,8 +58,8 @@ def make_option_group(group, parser): group -- assumed to be dict with 'name' and 'options' keys parser -- an optparse Parser """ - option_group = OptionGroup(parser, group['name']) - for option in group['options']: + option_group = OptionGroup(parser, group["name"]) + for option in group["options"]: option_group.add_option(option()) return option_group @@ -83,8 +83,8 @@ def getname(n): control = options.format_control control.disallow_binaries() warnings.warn( - 'Disabling all use of wheels due to the use of --build-options ' - '/ --global-options / --install-options.', + "Disabling all use of wheels due to the use of --build-options " + "/ --global-options / --install-options.", stacklevel=2, ) @@ -100,7 +100,7 @@ def check_dist_restriction(options, check_target=False): [options.python_version, options.platform, options.abi, options.implementation] ) - binary_only = FormatControl(set(), {':all:'}) + binary_only = FormatControl(set(), {":all:"}) sdist_dependencies_allowed = ( options.format_control != binary_only and not options.ignore_dependencies ) @@ -130,7 +130,7 @@ def check_dist_restriction(options, check_target=False): ########### help_ = partial( - Option, '-h', '--help', dest='help', action='help', help='Show help.' + Option, "-h", "--help", dest="help", action="help", help="Show help." ) # type: Callable[..., Option] isolated_mode = partial( @@ -148,67 +148,67 @@ def check_dist_restriction(options, check_target=False): require_virtualenv = partial( Option, # Run only if inside a virtualenv, bail if not. - '--require-virtualenv', - '--require-venv', - dest='require_venv', - action='store_true', + "--require-virtualenv", + "--require-venv", + dest="require_venv", + action="store_true", default=False, help=SUPPRESS_HELP, ) # type: Callable[..., Option] verbose = partial( Option, - '-v', - '--verbose', - dest='verbose', - action='count', + "-v", + "--verbose", + dest="verbose", + action="count", default=0, - help='Give more output. Option is additive, and can be used up to 3 times.', + help="Give more output. Option is additive, and can be used up to 3 times.", ) # type: Callable[..., Option] no_color = partial( Option, - '--no-color', - dest='no_color', - action='store_true', + "--no-color", + dest="no_color", + action="store_true", default=False, help="Suppress colored output", ) # type: Callable[..., Option] version = partial( Option, - '-V', - '--version', - dest='version', - action='store_true', - help='Show version and exit.', + "-V", + "--version", + dest="version", + action="store_true", + help="Show version and exit.", ) # type: Callable[..., Option] quiet = partial( Option, - '-q', - '--quiet', - dest='quiet', - action='count', + "-q", + "--quiet", + dest="quiet", + action="count", default=0, help=( - 'Give less output. Option is additive, and can be used up to 3' - ' times (corresponding to WARNING, ERROR, and CRITICAL logging' - ' levels).' + "Give less output. Option is additive, and can be used up to 3" + " times (corresponding to WARNING, ERROR, and CRITICAL logging" + " levels)." ), ) # type: Callable[..., Option] progress_bar = partial( Option, - '--progress-bar', - dest='progress_bar', - type='choice', + "--progress-bar", + dest="progress_bar", + type="choice", choices=list(BAR_TYPES.keys()), - default='on', + default="on", help=( - 'Specify type of progress to be displayed [' - + '|'.join(BAR_TYPES.keys()) - + '] (default: %default)' + "Specify type of progress to be displayed [" + + "|".join(BAR_TYPES.keys()) + + "] (default: %default)" ), ) # type: Callable[..., Option] @@ -225,27 +225,27 @@ def check_dist_restriction(options, check_target=False): no_input = partial( Option, # Don't ask for input - '--no-input', - dest='no_input', - action='store_true', + "--no-input", + dest="no_input", + action="store_true", default=False, help=SUPPRESS_HELP, ) # type: Callable[..., Option] proxy = partial( Option, - '--proxy', - dest='proxy', - type='str', - default='', + "--proxy", + dest="proxy", + type="str", + default="", help="Specify a proxy in the form [user:passwd@]proxy.server:port.", ) # type: Callable[..., Option] retries = partial( Option, - '--retries', - dest='retries', - type='int', + "--retries", + dest="retries", + type="int", default=5, help="Maximum number of retries each connection should attempt " "(default %default times).", @@ -253,22 +253,22 @@ def check_dist_restriction(options, check_target=False): timeout = partial( Option, - '--timeout', - '--default-timeout', - metavar='sec', - dest='timeout', - type='float', + "--timeout", + "--default-timeout", + metavar="sec", + dest="timeout", + type="float", default=15, - help='Set the socket timeout (default %default seconds).', + help="Set the socket timeout (default %default seconds).", ) # type: Callable[..., Option] skip_requirements_regex = partial( Option, # A regex to be used to skip requirements - '--skip-requirements-regex', - dest='skip_requirements_regex', - type='str', - default='', + "--skip-requirements-regex", + dest="skip_requirements_regex", + type="str", + default="", help=SUPPRESS_HELP, ) # type: Callable[..., Option] @@ -277,13 +277,13 @@ def exists_action(): # type: () -> Option return Option( # Option when path already exist - '--exists-action', - dest='exists_action', - type='choice', - choices=['s', 'i', 'w', 'b', 'a'], + "--exists-action", + dest="exists_action", + type="choice", + choices=["s", "i", "w", "b", "a"], default=[], - action='append', - metavar='action', + action="append", + metavar="action", help="Default action when a path already exists: " "(s)witch, (i)gnore, (w)ipe, (b)ackup, (a)bort.", ) @@ -291,31 +291,31 @@ def exists_action(): cert = partial( Option, - '--cert', - dest='cert', - type='str', - metavar='path', + "--cert", + dest="cert", + type="str", + metavar="path", help="Path to alternate CA bundle.", ) # type: Callable[..., Option] client_cert = partial( Option, - '--client-cert', - dest='client_cert', - type='str', + "--client-cert", + dest="client_cert", + type="str", default=None, - metavar='path', + metavar="path", help="Path to SSL client certificate, a single file containing the " "private key and the certificate in PEM format.", ) # type: Callable[..., Option] index_url = partial( Option, - '-i', - '--index-url', - '--pypi-url', - dest='index_url', - metavar='URL', + "-i", + "--index-url", + "--pypi-url", + dest="index_url", + metavar="URL", default=PyPI.simple_url, help="Base URL of the Python Package Index (default %default). " "This should point to a repository compliant with PEP 503 " @@ -326,10 +326,10 @@ def exists_action(): def extra_index_url(): return Option( - '--extra-index-url', - dest='extra_index_urls', - metavar='URL', - action='append', + "--extra-index-url", + dest="extra_index_urls", + metavar="URL", + action="append", default=[], help="Extra URLs of package indexes to use in addition to " "--index-url. Should follow the same rules as " @@ -339,23 +339,23 @@ def extra_index_url(): no_index = partial( Option, - '--no-index', - dest='no_index', - action='store_true', + "--no-index", + dest="no_index", + action="store_true", default=False, - help='Ignore package index (only looking at --find-links URLs instead).', + help="Ignore package index (only looking at --find-links URLs instead).", ) # type: Callable[..., Option] def find_links(): # type: () -> Option return Option( - '-f', - '--find-links', - dest='find_links', - action='append', + "-f", + "--find-links", + dest="find_links", + action="append", default=[], - metavar='url', + metavar="url", help="If a url or path to an html file, then parse for links to " "archives. If a local path or file:// url that's a directory, " "then look for archives in the directory listing.", @@ -378,42 +378,42 @@ def trusted_host(): def constraints(): # type: () -> Option return Option( - '-c', - '--constraint', - dest='constraints', - action='append', + "-c", + "--constraint", + dest="constraints", + action="append", default=[], - metavar='file', - help='Constrain versions using the given constraints file. ' - 'This option can be used multiple times.', + metavar="file", + help="Constrain versions using the given constraints file. " + "This option can be used multiple times.", ) def requirements(): # type: () -> Option return Option( - '-r', - '--requirement', - dest='requirements', - action='append', + "-r", + "--requirement", + dest="requirements", + action="append", default=[], - metavar='file', - help='Install from the given requirements file. ' - 'This option can be used multiple times.', + metavar="file", + help="Install from the given requirements file. " + "This option can be used multiple times.", ) def editable(): # type: () -> Option return Option( - '-e', - '--editable', - dest='editables', - action='append', + "-e", + "--editable", + dest="editables", + action="append", default=[], - metavar='path/url', + metavar="path/url", help=( - 'Install a project in editable mode (i.e. setuptools ' + "Install a project in editable mode (i.e. setuptools " '"develop mode") from a local project path or a VCS url.' ), ) @@ -421,14 +421,14 @@ def editable(): src = partial( Option, - '--src', - '--source', - '--source-dir', - '--source-directory', - dest='src_dir', - metavar='dir', + "--src", + "--source", + "--source-dir", + "--source-directory", + dest="src_dir", + metavar="dir", default=get_src_prefix(), - help='Directory to check out editable projects into. ' + help="Directory to check out editable projects into. " 'The default in a virtualenv is "/src". ' 'The default for global installs is "/src".', ) # type: Callable[..., Option] @@ -496,9 +496,9 @@ def only_binary(): platform = partial( Option, - '--platform', - dest='platform', - metavar='platform', + "--platform", + dest="platform", + metavar="platform", default=None, help=( "Only use wheels compatible with . " @@ -520,9 +520,9 @@ def _convert_python_version(value): # The empty string is the same as not providing a value. return (None, None) - parts = value.split('.') + parts = value.split(".") if len(parts) > 3: - return ((), 'at most three version parts are allowed') + return ((), "at most three version parts are allowed") if len(parts) == 1: # Then we are in the case of "3" or "37". @@ -533,7 +533,7 @@ def _convert_python_version(value): try: version_info = tuple(int(part) for part in parts) except ValueError: - return ((), 'each version part must be an integer') + return ((), "each version part must be an integer") return (version_info, None) @@ -545,7 +545,7 @@ def _handle_python_version(option, opt_str, value, parser): """ version_info, error_msg = _convert_python_version(value) if error_msg is not None: - msg = 'invalid --python-version value: {!r}: {}'.format(value, error_msg) + msg = "invalid --python-version value: {!r}: {}".format(value, error_msg) raise_option_error(parser, option=option, msg=msg) parser.values.python_version = version_info @@ -553,12 +553,12 @@ def _handle_python_version(option, opt_str, value, parser): python_version = partial( Option, - '--python-version', - dest='python_version', - metavar='python_version', - action='callback', + "--python-version", + dest="python_version", + metavar="python_version", + action="callback", callback=_handle_python_version, - type='str', + type="str", default=None, help=dedent( """\ @@ -574,9 +574,9 @@ def _handle_python_version(option, opt_str, value, parser): implementation = partial( Option, - '--implementation', - dest='implementation', - metavar='implementation', + "--implementation", + dest="implementation", + metavar="implementation", default=None, help=( "Only use wheels compatible with Python " @@ -590,9 +590,9 @@ def _handle_python_version(option, opt_str, value, parser): abi = partial( Option, - '--abi', - dest='abi', - metavar='abi', + "--abi", + dest="abi", + metavar="abi", default=None, help=( "Only use wheels compatible with Python " @@ -685,46 +685,46 @@ def _handle_no_cache_dir(option, opt, value, parser): no_deps = partial( Option, - '--no-deps', - '--no-dependencies', - dest='ignore_dependencies', - action='store_true', + "--no-deps", + "--no-dependencies", + dest="ignore_dependencies", + action="store_true", default=False, help="Don't install package dependencies.", ) # type: Callable[..., Option] build_dir = partial( Option, - '-b', - '--build', - '--build-dir', - '--build-directory', - dest='build_dir', - metavar='dir', - help='Directory to unpack packages into and build in. Note that ' - 'an initial build still takes place in a temporary directory. ' - 'The location of temporary directories can be controlled by setting ' - 'the TMPDIR environment variable (TEMP on Windows) appropriately. ' - 'When passed, build directories are not cleaned in case of failures.', + "-b", + "--build", + "--build-dir", + "--build-directory", + dest="build_dir", + metavar="dir", + help="Directory to unpack packages into and build in. Note that " + "an initial build still takes place in a temporary directory. " + "The location of temporary directories can be controlled by setting " + "the TMPDIR environment variable (TEMP on Windows) appropriately. " + "When passed, build directories are not cleaned in case of failures.", ) # type: Callable[..., Option] ignore_requires_python = partial( Option, - '--ignore-requires-python', - dest='ignore_requires_python', - action='store_true', - help='Ignore the Requires-Python information.', + "--ignore-requires-python", + dest="ignore_requires_python", + action="store_true", + help="Ignore the Requires-Python information.", ) # type: Callable[..., Option] no_build_isolation = partial( Option, - '--no-build-isolation', - dest='build_isolation', - action='store_false', + "--no-build-isolation", + dest="build_isolation", + action="store_false", default=True, - help='Disable isolation when building a modern source distribution. ' - 'Build dependencies specified by PEP 518 must be already installed ' - 'if this option is used.', + help="Disable isolation when building a modern source distribution. " + "Build dependencies specified by PEP 518 must be already installed " + "if this option is used.", ) # type: Callable[..., Option] @@ -754,19 +754,19 @@ def _handle_no_use_pep517(option, opt, value, parser): use_pep517 = partial( Option, - '--use-pep517', - dest='use_pep517', - action='store_true', + "--use-pep517", + dest="use_pep517", + action="store_true", default=None, - help='Use PEP 517 for building source distributions ' - '(use --no-use-pep517 to force legacy behaviour).', + help="Use PEP 517 for building source distributions " + "(use --no-use-pep517 to force legacy behaviour).", ) # type: Any no_use_pep517 = partial( Option, - '--no-use-pep517', - dest='use_pep517', - action='callback', + "--no-use-pep517", + dest="use_pep517", + action="callback", callback=_handle_no_use_pep517, default=None, help=SUPPRESS_HELP, @@ -774,39 +774,39 @@ def _handle_no_use_pep517(option, opt, value, parser): install_options = partial( Option, - '--install-option', - dest='install_options', - action='append', - metavar='options', + "--install-option", + dest="install_options", + action="append", + metavar="options", help="Extra arguments to be supplied to the setup.py install " - "command (use like --install-option=\"--install-scripts=/usr/local/" - "bin\"). Use multiple --install-option options to pass multiple " + 'command (use like --install-option="--install-scripts=/usr/local/' + 'bin"). Use multiple --install-option options to pass multiple ' "options to setup.py install. If you are using an option with a " "directory path, be sure to use absolute path.", ) # type: Callable[..., Option] global_options = partial( Option, - '--global-option', - dest='global_options', - action='append', - metavar='options', + "--global-option", + dest="global_options", + action="append", + metavar="options", help="Extra global options to be supplied to the setup.py " "call before the install command.", ) # type: Callable[..., Option] no_clean = partial( Option, - '--no-clean', - action='store_true', + "--no-clean", + action="store_true", default=False, help="Don't clean up build directories.", ) # type: Callable[..., Option] pre = partial( Option, - '--pre', - action='store_true', + "--pre", + action="store_true", default=False, help="Include pre-release and development versions. By default, " "pip only finds stable versions.", @@ -826,10 +826,10 @@ def _handle_no_use_pep517(option, opt, value, parser): # Deprecated, Remove later always_unzip = partial( Option, - '-Z', - '--always-unzip', - dest='always_unzip', - action='store_true', + "-Z", + "--always-unzip", + dest="always_unzip", + action="store_true", help=SUPPRESS_HELP, ) # type: Callable[..., Option] @@ -841,53 +841,53 @@ def _handle_merge_hash(option, opt_str, value, parser): if not parser.values.hashes: parser.values.hashes = {} try: - algo, digest = value.split(':', 1) + algo, digest = value.split(":", 1) except ValueError: parser.error( - 'Arguments to %s must be a hash name ' - 'followed by a value, like --hash=sha256:abcde...' % opt_str + "Arguments to %s must be a hash name " + "followed by a value, like --hash=sha256:abcde..." % opt_str ) if algo not in STRONG_HASHES: parser.error( - 'Allowed hash algorithms for %s are %s.' - % (opt_str, ', '.join(STRONG_HASHES)) + "Allowed hash algorithms for %s are %s." + % (opt_str, ", ".join(STRONG_HASHES)) ) parser.values.hashes.setdefault(algo, []).append(digest) hash = partial( Option, - '--hash', + "--hash", # Hash values eventually end up in InstallRequirement.hashes due to # __dict__ copying in process_line(). - dest='hashes', - action='callback', + dest="hashes", + action="callback", callback=_handle_merge_hash, - type='string', + type="string", help="Verify that the package's archive matches this " - 'hash before installing. Example: --hash=sha256:abcdef...', + "hash before installing. Example: --hash=sha256:abcdef...", ) # type: Callable[..., Option] require_hashes = partial( Option, - '--require-hashes', - dest='require_hashes', - action='store_true', + "--require-hashes", + dest="require_hashes", + action="store_true", default=False, - help='Require a hash to check each requirement against, for ' - 'repeatable installs. This option is implied when any package in a ' - 'requirements file has a --hash option.', + help="Require a hash to check each requirement against, for " + "repeatable installs. This option is implied when any package in a " + "requirements file has a --hash option.", ) # type: Callable[..., Option] list_path = partial( Option, - '--path', - dest='path', - action='append', - help='Restrict to the specified installation path for listing ' - 'packages (can be used multiple times).', + "--path", + dest="path", + action="append", + help="Restrict to the specified installation path for listing " + "packages (can be used multiple times).", ) # type: Callable[..., Option] @@ -902,8 +902,8 @@ def check_list_path_option(options): ########## general_group = { - 'name': 'General Options', - 'options': [ + "name": "General Options", + "options": [ help_, isolated_mode, require_virtualenv, @@ -928,6 +928,6 @@ def check_list_path_option(options): } # type: Dict[str, Any] index_group = { - 'name': 'Package Index Options', - 'options': [index_url, extra_index_url, no_index, find_links], + "name": "Package Index Options", + "options": [index_url, extra_index_url, no_index, find_links], } # type: Dict[str, Any] diff --git a/src/pip/_internal/cli/main_parser.py b/src/pip/_internal/cli/main_parser.py index a89821d4489..df0cbcf9c85 100644 --- a/src/pip/_internal/cli/main_parser.py +++ b/src/pip/_internal/cli/main_parser.py @@ -5,10 +5,7 @@ import sys from pip._internal.cli import cmdoptions -from pip._internal.cli.parser import ( - ConfigOptionParser, - UpdatingDefaultsHelpFormatter, -) +from pip._internal.cli.parser import ConfigOptionParser, UpdatingDefaultsHelpFormatter from pip._internal.commands import commands_dict, get_similar_commands from pip._internal.exceptions import CommandError from pip._internal.utils.misc import get_pip_version, get_prog @@ -27,11 +24,11 @@ def create_main_parser(): """ parser_kw = { - 'usage': '\n%prog [options]', - 'add_help_option': False, - 'formatter': UpdatingDefaultsHelpFormatter(), - 'name': 'global', - 'prog': get_prog(), + "usage": "\n%prog [options]", + "add_help_option": False, + "formatter": UpdatingDefaultsHelpFormatter(), + "name": "global", + "prog": get_prog(), } parser = ConfigOptionParser(**parser_kw) @@ -47,11 +44,11 @@ def create_main_parser(): parser.main = True # type: ignore # create command listing for description - description = [''] + [ - '%-27s %s' % (name, command_info.summary) + description = [""] + [ + "%-27s %s" % (name, command_info.summary) for name, command_info in commands_dict.items() ] - parser.description = '\n'.join(description) + parser.description = "\n".join(description) return parser @@ -76,7 +73,7 @@ def parse_command(args): sys.exit() # pip || pip help -> print_help() - if not args_else or (args_else[0] == 'help' and len(args_else) == 1): + if not args_else or (args_else[0] == "help" and len(args_else) == 1): parser.print_help() sys.exit() @@ -90,7 +87,7 @@ def parse_command(args): if guess: msg.append('maybe you meant "%s"' % guess) - raise CommandError(' - '.join(msg)) + raise CommandError(" - ".join(msg)) # all the args without the subcommand cmd_args = args[:] diff --git a/src/pip/_internal/cli/parser.py b/src/pip/_internal/cli/parser.py index 70a18db132a..0d232d80750 100644 --- a/src/pip/_internal/cli/parser.py +++ b/src/pip/_internal/cli/parser.py @@ -21,15 +21,15 @@ class PrettyHelpFormatter(optparse.IndentedHelpFormatter): def __init__(self, *args, **kwargs): # help position must be aligned with __init__.parseopts.description - kwargs['max_help_position'] = 30 - kwargs['indent_increment'] = 1 - kwargs['width'] = get_terminal_size()[0] - 2 + kwargs["max_help_position"] = 30 + kwargs["indent_increment"] = 1 + kwargs["width"] = get_terminal_size()[0] - 2 optparse.IndentedHelpFormatter.__init__(self, *args, **kwargs) def format_option_strings(self, option): - return self._format_option_strings(option, ' <%s>', ', ') + return self._format_option_strings(option, " <%s>", ", ") - def _format_option_strings(self, option, mvarfmt=' <%s>', optsep=', '): + def _format_option_strings(self, option, mvarfmt=" <%s>", optsep=", "): """ Return a comma-separated list of option strings and metavars. @@ -50,48 +50,48 @@ def _format_option_strings(self, option, mvarfmt=' <%s>', optsep=', '): metavar = option.metavar or option.dest.lower() opts.append(mvarfmt % metavar.lower()) - return ''.join(opts) + return "".join(opts) def format_heading(self, heading): - if heading == 'Options': - return '' - return heading + ':\n' + if heading == "Options": + return "" + return heading + ":\n" def format_usage(self, usage): """ Ensure there is only one newline between usage and the first heading if there is no description. """ - msg = '\nUsage: %s\n' % self.indent_lines(textwrap.dedent(usage), " ") + msg = "\nUsage: %s\n" % self.indent_lines(textwrap.dedent(usage), " ") return msg def format_description(self, description): # leave full control over description to us if description: - if hasattr(self.parser, 'main'): - label = 'Commands' + if hasattr(self.parser, "main"): + label = "Commands" else: - label = 'Description' + label = "Description" # some doc strings have initial newlines, some don't - description = description.lstrip('\n') + description = description.lstrip("\n") # some doc strings have final newlines and spaces, some don't description = description.rstrip() # dedent, then reindent description = self.indent_lines(textwrap.dedent(description), " ") - description = '%s:\n%s\n' % (label, description) + description = "%s:\n%s\n" % (label, description) return description else: - return '' + return "" def format_epilog(self, epilog): # leave full control over epilog to us if epilog: return epilog else: - return '' + return "" def indent_lines(self, text, indent): - new_lines = [indent + line for line in text.split('\n')] + new_lines = [indent + line for line in text.split("\n")] return "\n".join(new_lines) @@ -133,7 +133,7 @@ class ConfigOptionParser(CustomOptionParser): configuration files and environmental variables""" def __init__(self, *args, **kwargs): - self.name = kwargs.pop('name') + self.name = kwargs.pop("name") isolated = kwargs.pop("isolated", False) self.config = Configuration(isolated) @@ -183,7 +183,7 @@ def _update_defaults(self, defaults): # Then set the options with those values for key, val in self._get_ordered_configuration_items(): # '--' because configuration supports only long names - option = self.get_option('--' + key) + option = self.get_option("--" + key) # Ignore options not present in this parser. E.g. non-globals put # in [global] by users that want them to apply to all applicable @@ -191,17 +191,17 @@ def _update_defaults(self, defaults): if option is None: continue - if option.action in ('store_true', 'store_false', 'count'): + if option.action in ("store_true", "store_false", "count"): try: val = strtobool(val) except ValueError: error_msg = invalid_config_error_message(option.action, key, val) self.error(error_msg) - elif option.action == 'append': + elif option.action == "append": val = val.split() val = [self.check_default(option, key, v) for v in val] - elif option.action == 'callback': + elif option.action == "callback": late_eval.add(option.dest) opt_str = option.get_opt_string() val = option.convert_value(opt_str, val) @@ -248,7 +248,7 @@ def error(self, msg): def invalid_config_error_message(action, key, val): """Returns a better error message when invalid configuration option is provided.""" - if action in ('store_true', 'store_false'): + if action in ("store_true", "store_false"): return ( "{0} is not a valid value for {1} option, " "please specify a boolean value like yes/no, " diff --git a/src/pip/_internal/cli/req_command.py b/src/pip/_internal/cli/req_command.py index 652fd56f053..ae5e0925b76 100644 --- a/src/pip/_internal/cli/req_command.py +++ b/src/pip/_internal/cli/req_command.py @@ -118,7 +118,7 @@ def handle_pip_version_check(self, options): This overrides the default behavior of not doing the check. """ # Make sure the index_group options are present. - assert hasattr(options, 'no_index') + assert hasattr(options, "no_index") if options.disable_pip_version_check or options.no_index: return @@ -260,16 +260,16 @@ def populate_requirement_set( requirement_set.require_hashes = options.require_hashes if not (args or options.editables or options.requirements): - opts = {'name': self.name} + opts = {"name": self.name} if options.find_links: raise CommandError( - 'You must give at least one requirement to %(name)s ' + "You must give at least one requirement to %(name)s " '(maybe you meant "pip %(name)s %(links)s"?)' - % dict(opts, links=' '.join(options.find_links)) + % dict(opts, links=" ".join(options.find_links)) ) else: raise CommandError( - 'You must give at least one requirement to %(name)s ' + "You must give at least one requirement to %(name)s " '(see "pip help %(name)s")' % opts ) diff --git a/src/pip/_internal/collector.py b/src/pip/_internal/collector.py index 3bc9e9d7234..c2f93f20808 100644 --- a/src/pip/_internal/collector.py +++ b/src/pip/_internal/collector.py @@ -55,7 +55,7 @@ def _match_vcs_scheme(url): Returns the matched VCS scheme, or None if there's no match. """ for scheme in vcs.schemes: - if url.lower().startswith(scheme) and url[len(scheme)] in '+:': + if url.lower().startswith(scheme) and url[len(scheme)] in "+:": return scheme return None @@ -102,7 +102,7 @@ def _ensure_html_response(url, session): `_NotHTML` if the content type is not text/html. """ scheme, netloc, path, query, fragment = urllib_parse.urlsplit(url) - if scheme not in {'http', 'https'}: + if scheme not in {"http", "https"}: raise _NotHTTP() resp = session.head(url, allow_redirects=True) @@ -128,7 +128,7 @@ def _get_html_response(url, session): if _is_url_like_archive(url): _ensure_html_response(url, session=session) - logger.debug('Getting page %s', redact_auth_from_url(url)) + logger.debug("Getting page %s", redact_auth_from_url(url)) resp = session.get( url, @@ -169,7 +169,7 @@ def _get_encoding_from_headers(headers): if headers and "Content-Type" in headers: content_type, params = cgi.parse_header(headers["Content-Type"]) if "charset" in params: - return params['charset'] + return params["charset"] return None @@ -232,10 +232,10 @@ def _create_link_from_element( return None url = _clean_link(urllib_parse.urljoin(base_url, href)) - pyrequire = anchor.get('data-requires-python') + pyrequire = anchor.get("data-requires-python") pyrequire = unescape(pyrequire) if pyrequire else None - yanked_reason = anchor.get('data-yanked') + yanked_reason = anchor.get("data-yanked") if yanked_reason: # This is a unicode string in Python 2 (and 3). yanked_reason = unescape(yanked_reason) @@ -311,35 +311,35 @@ def _get_html_page(link, session=None): "_get_html_page() missing 1 required keyword argument: 'session'" ) - url = link.url.split('#', 1)[0] + url = link.url.split("#", 1)[0] # Check for VCS schemes that do not support lookup as web pages. vcs_scheme = _match_vcs_scheme(url) if vcs_scheme: - logger.debug('Cannot look at %s URL %s', vcs_scheme, link) + logger.debug("Cannot look at %s URL %s", vcs_scheme, link) return None # Tack index.html onto file:// URLs that point to directories scheme, _, path, _, _, _ = urllib_parse.urlparse(url) - if scheme == 'file' and os.path.isdir(urllib_request.url2pathname(path)): + if scheme == "file" and os.path.isdir(urllib_request.url2pathname(path)): # add trailing slash if not present so urljoin doesn't trim # final segment - if not url.endswith('/'): - url += '/' - url = urllib_parse.urljoin(url, 'index.html') - logger.debug(' file: URL is directory, getting %s', url) + if not url.endswith("/"): + url += "/" + url = urllib_parse.urljoin(url, "index.html") + logger.debug(" file: URL is directory, getting %s", url) try: resp = _get_html_response(url, session=session) except _NotHTTP: logger.debug( - 'Skipping page %s because it looks like an archive, and cannot ' - 'be checked by HEAD.', + "Skipping page %s because it looks like an archive, and cannot " + "be checked by HEAD.", link, ) except _NotHTML as exc: logger.debug( - 'Skipping page %s because the %s request got Content-Type: %s', + "Skipping page %s because the %s request got Content-Type: %s", link, exc.request_desc, exc.content_type, @@ -383,7 +383,7 @@ def group_locations(locations, expand_dir=False): # puts the url for the given file path into the appropriate list def sort_path(path): url = path_to_url(path) - if mimetypes.guess_type(url, strict=False)[0] == 'text/html': + if mimetypes.guess_type(url, strict=False)[0] == "text/html": urls.append(url) else: files.append(url) @@ -391,7 +391,7 @@ def sort_path(path): for url in locations: is_local_path = os.path.exists(url) - is_file_url = url.startswith('file:') + is_file_url = url.startswith("file:") if is_local_path or is_file_url: if is_local_path: @@ -506,7 +506,7 @@ def collect_links(self, project_name): file_links = [Link(url) for url in itertools.chain(index_file_loc, fl_file_loc)] # We trust every directly linked archive in find_links - find_link_links = [Link(url, '-f') for url in self.find_links] + find_link_links = [Link(url, "-f") for url in self.find_links] # We trust every url that the user has given us whether it was given # via --index-url or --find-links. @@ -521,13 +521,13 @@ def collect_links(self, project_name): url_locations = _remove_duplicate_links(url_locations) lines = [ - '{} location(s) to search for versions of {}:'.format( + "{} location(s) to search for versions of {}:".format( len(url_locations), project_name ) ] for link in url_locations: - lines.append('* {}'.format(link)) - logger.debug('\n'.join(lines)) + lines.append("* {}".format(link)) + logger.debug("\n".join(lines)) pages_links = {} for page in self._get_pages(url_locations): diff --git a/src/pip/_internal/commands/__init__.py b/src/pip/_internal/commands/__init__.py index d24984e59f0..227bfdba008 100644 --- a/src/pip/_internal/commands/__init__.py +++ b/src/pip/_internal/commands/__init__.py @@ -13,7 +13,7 @@ from pip._internal.cli.base_command import Command -CommandInfo = namedtuple('CommandInfo', 'module_path, class_name, summary') +CommandInfo = namedtuple("CommandInfo", "module_path, class_name, summary") # The ordering matters for help display. # Also, even though the module path starts with the same @@ -26,109 +26,109 @@ commands_dict = OrderedDict( [ ( - 'install', + "install", CommandInfo( - 'pip._internal.commands.install', 'InstallCommand', 'Install packages.' + "pip._internal.commands.install", "InstallCommand", "Install packages." ), ), ( - 'download', + "download", CommandInfo( - 'pip._internal.commands.download', - 'DownloadCommand', - 'Download packages.', + "pip._internal.commands.download", + "DownloadCommand", + "Download packages.", ), ), ( - 'uninstall', + "uninstall", CommandInfo( - 'pip._internal.commands.uninstall', - 'UninstallCommand', - 'Uninstall packages.', + "pip._internal.commands.uninstall", + "UninstallCommand", + "Uninstall packages.", ), ), ( - 'freeze', + "freeze", CommandInfo( - 'pip._internal.commands.freeze', - 'FreezeCommand', - 'Output installed packages in requirements format.', + "pip._internal.commands.freeze", + "FreezeCommand", + "Output installed packages in requirements format.", ), ), ( - 'list', + "list", CommandInfo( - 'pip._internal.commands.list', 'ListCommand', 'List installed packages.' + "pip._internal.commands.list", "ListCommand", "List installed packages." ), ), ( - 'show', + "show", CommandInfo( - 'pip._internal.commands.show', - 'ShowCommand', - 'Show information about installed packages.', + "pip._internal.commands.show", + "ShowCommand", + "Show information about installed packages.", ), ), ( - 'check', + "check", CommandInfo( - 'pip._internal.commands.check', - 'CheckCommand', - 'Verify installed packages have compatible dependencies.', + "pip._internal.commands.check", + "CheckCommand", + "Verify installed packages have compatible dependencies.", ), ), ( - 'config', + "config", CommandInfo( - 'pip._internal.commands.configuration', - 'ConfigurationCommand', - 'Manage local and global configuration.', + "pip._internal.commands.configuration", + "ConfigurationCommand", + "Manage local and global configuration.", ), ), ( - 'search', + "search", CommandInfo( - 'pip._internal.commands.search', - 'SearchCommand', - 'Search PyPI for packages.', + "pip._internal.commands.search", + "SearchCommand", + "Search PyPI for packages.", ), ), ( - 'wheel', + "wheel", CommandInfo( - 'pip._internal.commands.wheel', - 'WheelCommand', - 'Build wheels from your requirements.', + "pip._internal.commands.wheel", + "WheelCommand", + "Build wheels from your requirements.", ), ), ( - 'hash', + "hash", CommandInfo( - 'pip._internal.commands.hash', - 'HashCommand', - 'Compute hashes of package archives.', + "pip._internal.commands.hash", + "HashCommand", + "Compute hashes of package archives.", ), ), ( - 'completion', + "completion", CommandInfo( - 'pip._internal.commands.completion', - 'CompletionCommand', - 'A helper command used for command completion.', + "pip._internal.commands.completion", + "CompletionCommand", + "A helper command used for command completion.", ), ), ( - 'debug', + "debug", CommandInfo( - 'pip._internal.commands.debug', - 'DebugCommand', - 'Show information useful for debugging.', + "pip._internal.commands.debug", + "DebugCommand", + "Show information useful for debugging.", ), ), ( - 'help', + "help", CommandInfo( - 'pip._internal.commands.help', 'HelpCommand', 'Show help for commands.' + "pip._internal.commands.help", "HelpCommand", "Show help for commands." ), ), ] diff --git a/src/pip/_internal/commands/completion.py b/src/pip/_internal/commands/completion.py index 197f181451c..92d683b0ea2 100644 --- a/src/pip/_internal/commands/completion.py +++ b/src/pip/_internal/commands/completion.py @@ -11,7 +11,7 @@ """ COMPLETION_SCRIPTS = { - 'bash': """ + "bash": """ _pip_completion() { COMPREPLY=( $( COMP_WORDS="${COMP_WORDS[*]}" \\ @@ -20,7 +20,7 @@ } complete -o default -F _pip_completion %(prog)s """, - 'zsh': """ + "zsh": """ function _pip_completion { local words cword read -Ac words @@ -31,7 +31,7 @@ } compctl -K _pip_completion %(prog)s """, - 'fish': """ + "fish": """ function __fish_complete_pip set -lx COMP_WORDS (commandline -o) "" set -lx COMP_CWORD ( \\ @@ -56,28 +56,28 @@ def __init__(self, *args, **kw): cmd_opts = self.cmd_opts cmd_opts.add_option( - '--bash', - '-b', - action='store_const', - const='bash', - dest='shell', - help='Emit completion code for bash', + "--bash", + "-b", + action="store_const", + const="bash", + dest="shell", + help="Emit completion code for bash", ) cmd_opts.add_option( - '--zsh', - '-z', - action='store_const', - const='zsh', - dest='shell', - help='Emit completion code for zsh', + "--zsh", + "-z", + action="store_const", + const="zsh", + dest="shell", + help="Emit completion code for zsh", ) cmd_opts.add_option( - '--fish', - '-f', - action='store_const', - const='fish', - dest='shell', - help='Emit completion code for fish', + "--fish", + "-f", + action="store_const", + const="fish", + dest="shell", + help="Emit completion code for fish", ) self.parser.insert_option_group(0, cmd_opts) @@ -85,11 +85,11 @@ def __init__(self, *args, **kw): def run(self, options, args): """Prints the completion code of the given shell""" shells = COMPLETION_SCRIPTS.keys() - shell_options = ['--' + shell for shell in sorted(shells)] + shell_options = ["--" + shell for shell in sorted(shells)] if options.shell in shells: script = textwrap.dedent( - COMPLETION_SCRIPTS.get(options.shell, '') % {'prog': get_prog()} + COMPLETION_SCRIPTS.get(options.shell, "") % {"prog": get_prog()} ) - print(BASE_COMPLETION % {'script': script, 'shell': options.shell}) + print(BASE_COMPLETION % {"script": script, "shell": options.shell}) else: - sys.stderr.write('ERROR: You must pass %s\n' % ' or '.join(shell_options)) + sys.stderr.write("ERROR: You must pass %s\n" % " or ".join(shell_options)) diff --git a/src/pip/_internal/commands/configuration.py b/src/pip/_internal/commands/configuration.py index fcf6493c3e9..826c80d0f43 100644 --- a/src/pip/_internal/commands/configuration.py +++ b/src/pip/_internal/commands/configuration.py @@ -4,11 +4,7 @@ from pip._internal.cli.base_command import Command from pip._internal.cli.status_codes import ERROR, SUCCESS -from pip._internal.configuration import ( - Configuration, - get_configuration_files, - kinds, -) +from pip._internal.configuration import Configuration, get_configuration_files, kinds from pip._internal.exceptions import PipError from pip._internal.utils.deprecation import deprecated from pip._internal.utils.misc import get_prog, write_output @@ -50,48 +46,48 @@ def __init__(self, *args, **kwargs): self.configuration = None self.cmd_opts.add_option( - '--editor', - dest='editor', - action='store', + "--editor", + dest="editor", + action="store", default=None, help=( - 'Editor to use to edit the file. Uses VISUAL or EDITOR ' - 'environment variables if not provided.' + "Editor to use to edit the file. Uses VISUAL or EDITOR " + "environment variables if not provided." ), ) self.cmd_opts.add_option( - '--global', - dest='global_file', - action='store_true', + "--global", + dest="global_file", + action="store_true", default=False, - help='Use the system-wide configuration file only', + help="Use the system-wide configuration file only", ) self.cmd_opts.add_option( - '--user', - dest='user_file', - action='store_true', + "--user", + dest="user_file", + action="store_true", default=False, - help='Use the user configuration file only', + help="Use the user configuration file only", ) self.cmd_opts.add_option( - '--site', - dest='site_file', - action='store_true', + "--site", + dest="site_file", + action="store_true", default=False, - help='Use the current environment configuration file only', + help="Use the current environment configuration file only", ) self.cmd_opts.add_option( - '--venv', - dest='venv_file', - action='store_true', + "--venv", + dest="venv_file", + action="store_true", default=False, help=( - '[Deprecated] Use the current environment configuration ' - 'file in a virtual environment only' + "[Deprecated] Use the current environment configuration " + "file in a virtual environment only" ), ) @@ -228,7 +224,7 @@ def _get_n_args(self, args, example, n): """ if len(args) != n: msg = ( - 'Got unexpected number of arguments, expected {}. ' + "Got unexpected number of arguments, expected {}. " '(example: "{} config {}")' ).format(n, get_prog(), example) raise PipError(msg) diff --git a/src/pip/_internal/commands/debug.py b/src/pip/_internal/commands/debug.py index bb22e6328fe..ee5299c5ec4 100644 --- a/src/pip/_internal/commands/debug.py +++ b/src/pip/_internal/commands/debug.py @@ -22,20 +22,20 @@ def show_value(name, value): # type: (str, str) -> None - logger.info('{}: {}'.format(name, value)) + logger.info("{}: {}".format(name, value)) def show_sys_implementation(): # type: () -> None - logger.info('sys.implementation:') - if hasattr(sys, 'implementation'): + logger.info("sys.implementation:") + if hasattr(sys, "implementation"): implementation = sys.implementation # type: ignore implementation_name = implementation.name else: - implementation_name = '' + implementation_name = "" with indent_log(): - show_value('name', implementation_name) + show_value("name", implementation_name) def show_tags(options): @@ -47,11 +47,11 @@ def show_tags(options): # Display the target options that were explicitly provided. formatted_target = target_python.format_given() - suffix = '' + suffix = "" if formatted_target: - suffix = ' (target: {})'.format(formatted_target) + suffix = " (target: {})".format(formatted_target) - msg = 'Compatible tags: {}{}'.format(len(tags), suffix) + msg = "Compatible tags: {}{}".format(len(tags), suffix) logger.info(msg) if options.verbose < 1 and len(tags) > tag_limit: @@ -66,7 +66,7 @@ def show_tags(options): if tags_limited: msg = ( - '...\n[First {tag_limit} tags shown. Pass --verbose to show all.]' + "...\n[First {tag_limit} tags shown. Pass --verbose to show all.]" ).format(tag_limit=tag_limit) logger.info(msg) @@ -95,13 +95,13 @@ def run(self, options, args): "details, since the output and options of this command may " "change without notice." ) - show_value('pip version', get_pip_version()) - show_value('sys.version', sys.version) - show_value('sys.executable', sys.executable) - show_value('sys.getdefaultencoding', sys.getdefaultencoding()) - show_value('sys.getfilesystemencoding', sys.getfilesystemencoding()) - show_value('locale.getpreferredencoding', locale.getpreferredencoding()) - show_value('sys.platform', sys.platform) + show_value("pip version", get_pip_version()) + show_value("sys.version", sys.version) + show_value("sys.executable", sys.executable) + show_value("sys.getdefaultencoding", sys.getdefaultencoding()) + show_value("sys.getfilesystemencoding", sys.getfilesystemencoding()) + show_value("locale.getpreferredencoding", locale.getpreferredencoding()) + show_value("sys.platform", sys.platform) show_sys_implementation() show_tags(options) diff --git a/src/pip/_internal/commands/download.py b/src/pip/_internal/commands/download.py index f7699864260..4b1b36823a1 100644 --- a/src/pip/_internal/commands/download.py +++ b/src/pip/_internal/commands/download.py @@ -58,12 +58,12 @@ def __init__(self, *args, **kw): cmd_opts.add_option(cmdoptions.no_use_pep517()) cmd_opts.add_option( - '-d', - '--dest', - '--destination-dir', - '--destination-directory', - dest='download_dir', - metavar='dir', + "-d", + "--dest", + "--destination-dir", + "--destination-directory", + dest="download_dir", + metavar="dir", default=os.curdir, help=("Download packages into ."), ) @@ -131,11 +131,11 @@ def run(self, options, args): ) resolver.resolve(requirement_set) - downloaded = ' '.join( + downloaded = " ".join( [req.name for req in requirement_set.successfully_downloaded] ) if downloaded: - write_output('Successfully downloaded %s', downloaded) + write_output("Successfully downloaded %s", downloaded) # Clean up if not options.no_clean: diff --git a/src/pip/_internal/commands/freeze.py b/src/pip/_internal/commands/freeze.py index 14b9e8699be..98cfb69cdb8 100644 --- a/src/pip/_internal/commands/freeze.py +++ b/src/pip/_internal/commands/freeze.py @@ -9,7 +9,7 @@ from pip._internal.operations.freeze import freeze from pip._internal.utils.compat import stdlib_pkgs -DEV_PKGS = {'pip', 'setuptools', 'distribute', 'wheel'} +DEV_PKGS = {"pip", "setuptools", "distribute", "wheel"} class FreezeCommand(Command): @@ -27,54 +27,54 @@ def __init__(self, *args, **kw): super(FreezeCommand, self).__init__(*args, **kw) self.cmd_opts.add_option( - '-r', - '--requirement', - dest='requirements', - action='append', + "-r", + "--requirement", + dest="requirements", + action="append", default=[], - metavar='file', + metavar="file", help="Use the order in the given requirements file and its " "comments when generating output. This option can be " "used multiple times.", ) self.cmd_opts.add_option( - '-f', - '--find-links', - dest='find_links', - action='append', + "-f", + "--find-links", + dest="find_links", + action="append", default=[], - metavar='URL', - help='URL for finding packages, which will be added to the output.', + metavar="URL", + help="URL for finding packages, which will be added to the output.", ) self.cmd_opts.add_option( - '-l', - '--local', - dest='local', - action='store_true', + "-l", + "--local", + dest="local", + action="store_true", default=False, - help='If in a virtualenv that has global access, do not output ' - 'globally-installed packages.', + help="If in a virtualenv that has global access, do not output " + "globally-installed packages.", ) self.cmd_opts.add_option( - '--user', - dest='user', - action='store_true', + "--user", + dest="user", + action="store_true", default=False, - help='Only output packages installed in user-site.', + help="Only output packages installed in user-site.", ) self.cmd_opts.add_option(cmdoptions.list_path()) self.cmd_opts.add_option( - '--all', - dest='freeze_all', - action='store_true', - help='Do not skip these packages in the output:' - ' %s' % ', '.join(DEV_PKGS), + "--all", + dest="freeze_all", + action="store_true", + help="Do not skip these packages in the output:" + " %s" % ", ".join(DEV_PKGS), ) self.cmd_opts.add_option( - '--exclude-editable', - dest='exclude_editable', - action='store_true', - help='Exclude editable package from output.', + "--exclude-editable", + dest="exclude_editable", + action="store_true", + help="Exclude editable package from output.", ) self.parser.insert_option_group(0, self.cmd_opts) @@ -103,6 +103,6 @@ def run(self, options, args): try: for line in freeze(**freeze_kwargs): - sys.stdout.write(line + '\n') + sys.stdout.write(line + "\n") finally: wheel_cache.cleanup() diff --git a/src/pip/_internal/commands/hash.py b/src/pip/_internal/commands/hash.py index 8844c0fb8e7..fcd6463d03e 100644 --- a/src/pip/_internal/commands/hash.py +++ b/src/pip/_internal/commands/hash.py @@ -20,19 +20,19 @@ class HashCommand(Command): installs. """ - usage = '%prog [options] ...' + usage = "%prog [options] ..." ignore_require_venv = True def __init__(self, *args, **kw): super(HashCommand, self).__init__(*args, **kw) self.cmd_opts.add_option( - '-a', - '--algorithm', - dest='algorithm', + "-a", + "--algorithm", + dest="algorithm", choices=STRONG_HASHES, - action='store', + action="store", default=FAVORITE_HASH, - help='The hash algorithm to use: one of %s' % ', '.join(STRONG_HASHES), + help="The hash algorithm to use: one of %s" % ", ".join(STRONG_HASHES), ) self.parser.insert_option_group(0, self.cmd_opts) @@ -44,13 +44,13 @@ def run(self, options, args): algorithm = options.algorithm for path in args: write_output( - '%s:\n--hash=%s:%s', path, algorithm, _hash_of_file(path, algorithm) + "%s:\n--hash=%s:%s", path, algorithm, _hash_of_file(path, algorithm) ) def _hash_of_file(path, algorithm): """Return the hash digest of a file.""" - with open(path, 'rb') as archive: + with open(path, "rb") as archive: hash = hashlib.new(algorithm) for chunk in read_chunks(archive): hash.update(chunk) diff --git a/src/pip/_internal/commands/help.py b/src/pip/_internal/commands/help.py index 754e4d4fe43..161d02e3b2e 100644 --- a/src/pip/_internal/commands/help.py +++ b/src/pip/_internal/commands/help.py @@ -32,7 +32,7 @@ def run(self, options, args): if guess: msg.append('maybe you meant "%s"' % guess) - raise CommandError(' - '.join(msg)) + raise CommandError(" - ".join(msg)) command = create_command(cmd_name) command.parser.print_help() diff --git a/src/pip/_internal/commands/install.py b/src/pip/_internal/commands/install.py index cf58fb162be..1634256c8bc 100644 --- a/src/pip/_internal/commands/install.py +++ b/src/pip/_internal/commands/install.py @@ -133,41 +133,41 @@ def __init__(self, *args, **kw): cmd_opts.add_option(cmdoptions.editable()) cmd_opts.add_option( - '-t', - '--target', - dest='target_dir', - metavar='dir', + "-t", + "--target", + dest="target_dir", + metavar="dir", default=None, - help='Install packages into . ' - 'By default this will not replace existing files/folders in ' - '. Use --upgrade to replace existing packages in ' - 'with new versions.', + help="Install packages into . " + "By default this will not replace existing files/folders in " + ". Use --upgrade to replace existing packages in " + "with new versions.", ) cmdoptions.add_target_python_options(cmd_opts) cmd_opts.add_option( - '--user', - dest='use_user_site', - action='store_true', + "--user", + dest="use_user_site", + action="store_true", help="Install to the Python user install directory for your " "platform. Typically ~/.local/, or %APPDATA%\\Python on " "Windows. (See the Python documentation for site.USER_BASE " "for full details.)", ) cmd_opts.add_option( - '--no-user', dest='use_user_site', action='store_false', help=SUPPRESS_HELP + "--no-user", dest="use_user_site", action="store_false", help=SUPPRESS_HELP ) cmd_opts.add_option( - '--root', - dest='root_path', - metavar='dir', + "--root", + dest="root_path", + metavar="dir", default=None, help="Install everything relative to this alternate root directory.", ) cmd_opts.add_option( - '--prefix', - dest='prefix_path', - metavar='dir', + "--prefix", + dest="prefix_path", + metavar="dir", default=None, help="Installation prefix where lib, bin and other top-level " "folders are placed", @@ -178,45 +178,45 @@ def __init__(self, *args, **kw): cmd_opts.add_option(cmdoptions.src()) cmd_opts.add_option( - '-U', - '--upgrade', - dest='upgrade', - action='store_true', - help='Upgrade all specified packages to the newest available ' - 'version. The handling of dependencies depends on the ' - 'upgrade-strategy used.', + "-U", + "--upgrade", + dest="upgrade", + action="store_true", + help="Upgrade all specified packages to the newest available " + "version. The handling of dependencies depends on the " + "upgrade-strategy used.", ) cmd_opts.add_option( - '--upgrade-strategy', - dest='upgrade_strategy', - default='only-if-needed', - choices=['only-if-needed', 'eager'], - help='Determines how dependency upgrading should be handled ' - '[default: %default]. ' + "--upgrade-strategy", + dest="upgrade_strategy", + default="only-if-needed", + choices=["only-if-needed", "eager"], + help="Determines how dependency upgrading should be handled " + "[default: %default]. " '"eager" - dependencies are upgraded regardless of ' - 'whether the currently installed version satisfies the ' - 'requirements of the upgraded package(s). ' + "whether the currently installed version satisfies the " + "requirements of the upgraded package(s). " '"only-if-needed" - are upgraded only when they do not ' - 'satisfy the requirements of the upgraded package(s).', + "satisfy the requirements of the upgraded package(s).", ) cmd_opts.add_option( - '--force-reinstall', - dest='force_reinstall', - action='store_true', - help='Reinstall all packages even if they are already up-to-date.', + "--force-reinstall", + dest="force_reinstall", + action="store_true", + help="Reinstall all packages even if they are already up-to-date.", ) cmd_opts.add_option( - '-I', - '--ignore-installed', - dest='ignore_installed', - action='store_true', - help='Ignore the installed packages, overwriting them. ' - 'This can break your system if the existing package ' - 'is of a different version or was installed ' - 'with a different package manager!', + "-I", + "--ignore-installed", + dest="ignore_installed", + action="store_true", + help="Ignore the installed packages, overwriting them. " + "This can break your system if the existing package " + "is of a different version or was installed " + "with a different package manager!", ) cmd_opts.add_option(cmdoptions.ignore_requires_python()) @@ -294,8 +294,8 @@ def run(self, options, args): "Can not perform a '--user' install. User site-packages " "are not visible in this virtualenv." ) - install_options.append('--user') - install_options.append('--prefix=') + install_options.append("--user") + install_options.append("--prefix=") target_temp_dir = None # type: Optional[TempDirectory] target_temp_dir_path = None # type: Optional[str] @@ -312,7 +312,7 @@ def run(self, options, args): # Create a target directory for using with the target option target_temp_dir = TempDirectory(kind="target") target_temp_dir_path = target_temp_dir.path - install_options.append('--home=' + target_temp_dir_path) + install_options.append("--home=" + target_temp_dir_path) global_options = options.global_options or [] @@ -449,7 +449,7 @@ def run(self, options, args): ) working_set = pkg_resources.WorkingSet(lib_locations) - reqs = sorted(installed, key=operator.attrgetter('name')) + reqs = sorted(installed, key=operator.attrgetter("name")) items = [] for req in reqs: item = req.name @@ -458,13 +458,13 @@ def run(self, options, args): req.name, working_set=working_set ) if installed_version: - item += '-' + installed_version + item += "-" + installed_version except Exception: pass items.append(item) - installed_desc = ' '.join(items) + installed_desc = " ".join(items) if installed_desc: - write_output('Successfully installed %s', installed_desc) + write_output("Successfully installed %s", installed_desc) except EnvironmentError as error: show_traceback = self.verbosity >= 1 @@ -500,10 +500,10 @@ def _handle_target_dir(self, target_dir, target_temp_dir, upgrade): with target_temp_dir: # Checking both purelib and platlib directories for installed # packages to be moved to target directory - scheme = distutils_scheme('', home=target_temp_dir.path) - purelib_dir = scheme['purelib'] - platlib_dir = scheme['platlib'] - data_dir = scheme['data'] + scheme = distutils_scheme("", home=target_temp_dir.path) + purelib_dir = scheme["purelib"] + platlib_dir = scheme["platlib"] + data_dir = scheme["data"] if os.path.exists(purelib_dir): lib_dir_list.append(purelib_dir) @@ -522,17 +522,17 @@ def _handle_target_dir(self, target_dir, target_temp_dir, upgrade): if os.path.exists(target_item_dir): if not upgrade: logger.warning( - 'Target directory %s already exists. Specify ' - '--upgrade to force replacement.', + "Target directory %s already exists. Specify " + "--upgrade to force replacement.", target_item_dir, ) continue if os.path.islink(target_item_dir): logger.warning( - 'Target directory %s already exists and is ' - 'a link. Pip will not automatically replace ' - 'links, please remove if replacement is ' - 'desired.', + "Target directory %s already exists and is " + "a link. Pip will not automatically replace " + "links, please remove if replacement is " + "desired.", target_item_dir, ) continue @@ -577,8 +577,8 @@ def _warn_about_conflicts(self, to_install): def get_lib_location_guesses(*args, **kwargs): - scheme = distutils_scheme('', *args, **kwargs) - return [scheme['purelib'], scheme['platlib']] + scheme = distutils_scheme("", *args, **kwargs) + return [scheme["purelib"], scheme["platlib"]] def create_env_error_message(error, show_traceback, using_user_site): diff --git a/src/pip/_internal/commands/list.py b/src/pip/_internal/commands/list.py index 86d743983a3..8cbf7822dbc 100644 --- a/src/pip/_internal/commands/list.py +++ b/src/pip/_internal/commands/list.py @@ -38,47 +38,47 @@ def __init__(self, *args, **kw): cmd_opts = self.cmd_opts cmd_opts.add_option( - '-o', - '--outdated', - action='store_true', + "-o", + "--outdated", + action="store_true", default=False, - help='List outdated packages', + help="List outdated packages", ) cmd_opts.add_option( - '-u', - '--uptodate', - action='store_true', + "-u", + "--uptodate", + action="store_true", default=False, - help='List uptodate packages', + help="List uptodate packages", ) cmd_opts.add_option( - '-e', - '--editable', - action='store_true', + "-e", + "--editable", + action="store_true", default=False, - help='List editable projects.', + help="List editable projects.", ) cmd_opts.add_option( - '-l', - '--local', - action='store_true', + "-l", + "--local", + action="store_true", default=False, help=( - 'If in a virtualenv that has global access, do not list ' - 'globally-installed packages.' + "If in a virtualenv that has global access, do not list " + "globally-installed packages." ), ) self.cmd_opts.add_option( - '--user', - dest='user', - action='store_true', + "--user", + dest="user", + action="store_true", default=False, - help='Only output packages installed in user-site.', + help="Only output packages installed in user-site.", ) cmd_opts.add_option(cmdoptions.list_path()) cmd_opts.add_option( - '--pre', - action='store_true', + "--pre", + action="store_true", default=False, help=( "Include pre-release and development versions. By default, " @@ -87,33 +87,33 @@ def __init__(self, *args, **kw): ) cmd_opts.add_option( - '--format', - action='store', - dest='list_format', + "--format", + action="store", + dest="list_format", default="columns", - choices=('columns', 'freeze', 'json'), + choices=("columns", "freeze", "json"), help="Select the output format among: columns (default), freeze, " "or json", ) cmd_opts.add_option( - '--not-required', - action='store_true', - dest='not_required', + "--not-required", + action="store_true", + dest="not_required", help="List packages that are not dependencies of installed packages.", ) cmd_opts.add_option( - '--exclude-editable', - action='store_false', - dest='include_editable', - help='Exclude editable package from output.', + "--exclude-editable", + action="store_false", + dest="include_editable", + help="Exclude editable package from output.", ) cmd_opts.add_option( - '--include-editable', - action='store_true', - dest='include_editable', - help='Include editable package from output.', + "--include-editable", + action="store_true", + dest="include_editable", + help="Include editable package from output.", default=True, ) index_opts = cmdoptions.make_option_group(cmdoptions.index_group, self.parser) @@ -189,7 +189,7 @@ def iter_packages_latest_infos(self, packages, options): finder = self._build_package_finder(options, session) for dist in packages: - typ = 'unknown' + typ = "unknown" all_candidates = finder.find_all_candidates(dist.key) if not options.pre: # Remove prereleases @@ -208,9 +208,9 @@ def iter_packages_latest_infos(self, packages, options): remote_version = best_candidate.version if best_candidate.link.is_wheel: - typ = 'wheel' + typ = "wheel" else: - typ = 'sdist' + typ = "sdist" # This is dirty but makes the rest of the code much cleaner dist.latest_version = remote_version dist.latest_filetype = typ @@ -218,10 +218,10 @@ def iter_packages_latest_infos(self, packages, options): def output_package_listing(self, packages, options): packages = sorted(packages, key=lambda dist: dist.project_name.lower()) - if options.list_format == 'columns' and packages: + if options.list_format == "columns" and packages: data, header = format_for_columns(packages, options) self.output_package_listing_columns(data, header) - elif options.list_format == 'freeze': + elif options.list_format == "freeze": for dist in packages: if options.verbose >= 1: write_output( @@ -229,7 +229,7 @@ def output_package_listing(self, packages, options): ) else: write_output("%s==%s", dist.project_name, dist.version) - elif options.list_format == 'json': + elif options.list_format == "json": write_output(format_for_json(packages, options)) def output_package_listing_columns(self, data, header): @@ -241,7 +241,7 @@ def output_package_listing_columns(self, data, header): # Create and add a separator. if len(data) > 0: - pkg_strings.insert(1, " ".join(map(lambda x: '-' * x, sizes))) + pkg_strings.insert(1, " ".join(map(lambda x: "-" * x, sizes))) for val in pkg_strings: write_output(val) @@ -260,7 +260,7 @@ def tabulate(vals): for row in vals: display = " ".join( [ - str(c).ljust(s) if c is not None else '' + str(c).ljust(s) if c is not None else "" for s, c in zip_longest(sizes, row) ] ) @@ -309,12 +309,12 @@ def format_for_columns(pkgs, options): def format_for_json(packages, options): data = [] for dist in packages: - info = {'name': dist.project_name, 'version': six.text_type(dist.version)} + info = {"name": dist.project_name, "version": six.text_type(dist.version)} if options.verbose >= 1: - info['location'] = dist.location - info['installer'] = get_installer(dist) + info["location"] = dist.location + info["installer"] = get_installer(dist) if options.outdated: - info['latest_version'] = six.text_type(dist.latest_version) - info['latest_filetype'] = dist.latest_filetype + info["latest_version"] = six.text_type(dist.latest_version) + info["latest_filetype"] = dist.latest_filetype data.append(info) return json.dumps(data) diff --git a/src/pip/_internal/commands/search.py b/src/pip/_internal/commands/search.py index cdccde5fa92..cb5624243c2 100644 --- a/src/pip/_internal/commands/search.py +++ b/src/pip/_internal/commands/search.py @@ -7,6 +7,7 @@ from pip._vendor import pkg_resources from pip._vendor.packaging.version import parse as parse_version + # NOTE: XMLRPC Client is not annotated in typeshed as on 2017-07-17, which is # why we ignore the type on this import from pip._vendor.six.moves import xmlrpc_client # type: ignore @@ -34,19 +35,19 @@ class SearchCommand(Command, SessionCommandMixin): def __init__(self, *args, **kw): super(SearchCommand, self).__init__(*args, **kw) self.cmd_opts.add_option( - '-i', - '--index', - dest='index', - metavar='URL', + "-i", + "--index", + dest="index", + metavar="URL", default=PyPI.pypi_url, - help='Base URL of Python Package Index (default %default)', + help="Base URL of Python Package Index (default %default)", ) self.parser.insert_option_group(0, self.cmd_opts) def run(self, options, args): if not args: - raise CommandError('Missing required argument (search query).') + raise CommandError("Missing required argument (search query).") query = args pypi_hits = self.search(query, options) hits = transform_hits(pypi_hits) @@ -67,7 +68,7 @@ def search(self, query, options): transport = PipXmlrpcTransport(index_url, session) pypi = xmlrpc_client.ServerProxy(index_url, transport) - hits = pypi.search({'name': query, 'summary': query}, 'or') + hits = pypi.search({"name": query, "summary": query}, "or") return hits @@ -79,18 +80,18 @@ def transform_hits(hits): """ packages = OrderedDict() for hit in hits: - name = hit['name'] - summary = hit['summary'] - version = hit['version'] + name = hit["name"] + summary = hit["summary"] + version = hit["version"] if name not in packages.keys(): - packages[name] = {'name': name, 'summary': summary, 'versions': [version]} + packages[name] = {"name": name, "summary": summary, "versions": [version]} else: - packages[name]['versions'].append(version) + packages[name]["versions"].append(version) # if this is the highest version, replace summary and score - if version == highest_version(packages[name]['versions']): - packages[name]['summary'] = summary + if version == highest_version(packages[name]["versions"]): + packages[name]["summary"] = summary return list(packages.values()) @@ -102,7 +103,7 @@ def print_results(hits, name_column_width=None, terminal_width=None): name_column_width = ( max( [ - len(hit['name']) + len(highest_version(hit.get('versions', ['-']))) + len(hit["name"]) + len(highest_version(hit.get("versions", ["-"]))) for hit in hits ] ) @@ -111,34 +112,34 @@ def print_results(hits, name_column_width=None, terminal_width=None): installed_packages = [p.project_name for p in pkg_resources.working_set] for hit in hits: - name = hit['name'] - summary = hit['summary'] or '' - latest = highest_version(hit.get('versions', ['-'])) + name = hit["name"] + summary = hit["summary"] or "" + latest = highest_version(hit.get("versions", ["-"])) if terminal_width is not None: target_width = terminal_width - name_column_width - 5 if target_width > 10: # wrap and indent summary to fit terminal summary = textwrap.wrap(summary, target_width) - summary = ('\n' + ' ' * (name_column_width + 3)).join(summary) + summary = ("\n" + " " * (name_column_width + 3)).join(summary) - line = '%-*s - %s' % (name_column_width, '%s (%s)' % (name, latest), summary) + line = "%-*s - %s" % (name_column_width, "%s (%s)" % (name, latest), summary) try: write_output(line) if name in installed_packages: dist = pkg_resources.get_distribution(name) with indent_log(): if dist.version == latest: - write_output('INSTALLED: %s (latest)', dist.version) + write_output("INSTALLED: %s (latest)", dist.version) else: - write_output('INSTALLED: %s', dist.version) + write_output("INSTALLED: %s", dist.version) if parse_version(latest).pre: write_output( - 'LATEST: %s (pre-release; install' + "LATEST: %s (pre-release; install" ' with "pip install --pre")', latest, ) else: - write_output('LATEST: %s', latest) + write_output("LATEST: %s", latest) except UnicodeEncodeError: pass diff --git a/src/pip/_internal/commands/show.py b/src/pip/_internal/commands/show.py index ea02269a085..1be55553e10 100644 --- a/src/pip/_internal/commands/show.py +++ b/src/pip/_internal/commands/show.py @@ -28,19 +28,19 @@ class ShowCommand(Command): def __init__(self, *args, **kw): super(ShowCommand, self).__init__(*args, **kw) self.cmd_opts.add_option( - '-f', - '--files', - dest='files', - action='store_true', + "-f", + "--files", + dest="files", + action="store_true", default=False, - help='Show the full list of installed files for each package.', + help="Show the full list of installed files for each package.", ) self.parser.insert_option_group(0, self.cmd_opts) def run(self, options, args): if not args: - logger.warning('ERROR: Please provide a package name or names.') + logger.warning("ERROR: Please provide a package name or names.") return ERROR query = args @@ -68,7 +68,7 @@ def search_packages_info(query): [name for name, pkg in zip(query, query_names) if pkg not in installed] ) if missing: - logger.warning('Package(s) not found: %s', ', '.join(missing)) + logger.warning("Package(s) not found: %s", ", ".join(missing)) def get_requiring_packages(package_name): canonical_name = canonicalize_name(package_name) @@ -81,42 +81,42 @@ def get_requiring_packages(package_name): for dist in [installed[pkg] for pkg in query_names if pkg in installed]: package = { - 'name': dist.project_name, - 'version': dist.version, - 'location': dist.location, - 'requires': [dep.project_name for dep in dist.requires()], - 'required_by': get_requiring_packages(dist.project_name), + "name": dist.project_name, + "version": dist.version, + "location": dist.location, + "requires": [dep.project_name for dep in dist.requires()], + "required_by": get_requiring_packages(dist.project_name), } file_list = None metadata = None if isinstance(dist, pkg_resources.DistInfoDistribution): # RECORDs should be part of .dist-info metadatas - if dist.has_metadata('RECORD'): - lines = dist.get_metadata_lines('RECORD') - paths = [l.split(',')[0] for l in lines] + if dist.has_metadata("RECORD"): + lines = dist.get_metadata_lines("RECORD") + paths = [l.split(",")[0] for l in lines] paths = [os.path.join(dist.location, p) for p in paths] file_list = [os.path.relpath(p, dist.location) for p in paths] - if dist.has_metadata('METADATA'): - metadata = dist.get_metadata('METADATA') + if dist.has_metadata("METADATA"): + metadata = dist.get_metadata("METADATA") else: # Otherwise use pip's log for .egg-info's - if dist.has_metadata('installed-files.txt'): - paths = dist.get_metadata_lines('installed-files.txt') + if dist.has_metadata("installed-files.txt"): + paths = dist.get_metadata_lines("installed-files.txt") paths = [os.path.join(dist.egg_info, p) for p in paths] file_list = [os.path.relpath(p, dist.location) for p in paths] - if dist.has_metadata('PKG-INFO'): - metadata = dist.get_metadata('PKG-INFO') + if dist.has_metadata("PKG-INFO"): + metadata = dist.get_metadata("PKG-INFO") - if dist.has_metadata('entry_points.txt'): - entry_points = dist.get_metadata_lines('entry_points.txt') - package['entry_points'] = entry_points + if dist.has_metadata("entry_points.txt"): + entry_points = dist.get_metadata_lines("entry_points.txt") + package["entry_points"] = entry_points - if dist.has_metadata('INSTALLER'): - for line in dist.get_metadata_lines('INSTALLER'): + if dist.has_metadata("INSTALLER"): + for line in dist.get_metadata_lines("INSTALLER"): if line.strip(): - package['installer'] = line.strip() + package["installer"] = line.strip() break # @todo: Should pkg_resources.Distribution have a @@ -125,24 +125,24 @@ def get_requiring_packages(package_name): feed_parser.feed(metadata) pkg_info_dict = feed_parser.close() for key in ( - 'metadata-version', - 'summary', - 'home-page', - 'author', - 'author-email', - 'license', + "metadata-version", + "summary", + "home-page", + "author", + "author-email", + "license", ): package[key] = pkg_info_dict.get(key) # It looks like FeedParser cannot deal with repeated headers classifiers = [] for line in metadata.splitlines(): - if line.startswith('Classifier: '): - classifiers.append(line[len('Classifier: ') :]) - package['classifiers'] = classifiers + if line.startswith("Classifier: "): + classifiers.append(line[len("Classifier: ") :]) + package["classifiers"] = classifiers if file_list: - package['files'] = sorted(file_list) + package["files"] = sorted(file_list) yield package @@ -156,29 +156,29 @@ def print_results(distributions, list_files=False, verbose=False): if i > 0: write_output("---") - write_output("Name: %s", dist.get('name', '')) - write_output("Version: %s", dist.get('version', '')) - write_output("Summary: %s", dist.get('summary', '')) - write_output("Home-page: %s", dist.get('home-page', '')) - write_output("Author: %s", dist.get('author', '')) - write_output("Author-email: %s", dist.get('author-email', '')) - write_output("License: %s", dist.get('license', '')) - write_output("Location: %s", dist.get('location', '')) - write_output("Requires: %s", ', '.join(dist.get('requires', []))) - write_output("Required-by: %s", ', '.join(dist.get('required_by', []))) + write_output("Name: %s", dist.get("name", "")) + write_output("Version: %s", dist.get("version", "")) + write_output("Summary: %s", dist.get("summary", "")) + write_output("Home-page: %s", dist.get("home-page", "")) + write_output("Author: %s", dist.get("author", "")) + write_output("Author-email: %s", dist.get("author-email", "")) + write_output("License: %s", dist.get("license", "")) + write_output("Location: %s", dist.get("location", "")) + write_output("Requires: %s", ", ".join(dist.get("requires", []))) + write_output("Required-by: %s", ", ".join(dist.get("required_by", []))) if verbose: - write_output("Metadata-Version: %s", dist.get('metadata-version', '')) - write_output("Installer: %s", dist.get('installer', '')) + write_output("Metadata-Version: %s", dist.get("metadata-version", "")) + write_output("Installer: %s", dist.get("installer", "")) write_output("Classifiers:") - for classifier in dist.get('classifiers', []): + for classifier in dist.get("classifiers", []): write_output(" %s", classifier) write_output("Entry-points:") - for entry in dist.get('entry_points', []): + for entry in dist.get("entry_points", []): write_output(" %s", entry.strip()) if list_files: write_output("Files:") - for line in dist.get('files', []): + for line in dist.get("files", []): write_output(" %s", line.strip()) if "files" not in dist: write_output("Cannot locate installed-files.txt") diff --git a/src/pip/_internal/commands/uninstall.py b/src/pip/_internal/commands/uninstall.py index 0436941ab5d..bff27398ed9 100644 --- a/src/pip/_internal/commands/uninstall.py +++ b/src/pip/_internal/commands/uninstall.py @@ -28,20 +28,20 @@ class UninstallCommand(Command, SessionCommandMixin): def __init__(self, *args, **kw): super(UninstallCommand, self).__init__(*args, **kw) self.cmd_opts.add_option( - '-r', - '--requirement', - dest='requirements', - action='append', + "-r", + "--requirement", + dest="requirements", + action="append", default=[], - metavar='file', - help='Uninstall all the packages listed in the given requirements ' - 'file. This option can be used multiple times.', + metavar="file", + help="Uninstall all the packages listed in the given requirements " + "file. This option can be used multiple times.", ) self.cmd_opts.add_option( - '-y', - '--yes', - dest='yes', - action='store_true', + "-y", + "--yes", + dest="yes", + action="store_true", help="Don't ask for confirmation of uninstall deletions.", ) @@ -61,7 +61,7 @@ def run(self, options, args): reqs_to_uninstall[canonicalize_name(req.name)] = req if not reqs_to_uninstall: raise InstallationError( - 'You must give at least one requirement to %(name)s (see ' + "You must give at least one requirement to %(name)s (see " '"pip help %(name)s")' % dict(name=self.name) ) diff --git a/src/pip/_internal/commands/wheel.py b/src/pip/_internal/commands/wheel.py index 468352f31fa..fae72802160 100644 --- a/src/pip/_internal/commands/wheel.py +++ b/src/pip/_internal/commands/wheel.py @@ -50,10 +50,10 @@ def __init__(self, *args, **kw): cmd_opts = self.cmd_opts cmd_opts.add_option( - '-w', - '--wheel-dir', - dest='wheel_dir', - metavar='dir', + "-w", + "--wheel-dir", + dest="wheel_dir", + metavar="dir", default=os.curdir, help=( "Build wheels into , where the default is the " @@ -64,10 +64,10 @@ def __init__(self, *args, **kw): cmd_opts.add_option(cmdoptions.only_binary()) cmd_opts.add_option(cmdoptions.prefer_binary()) cmd_opts.add_option( - '--build-option', - dest='build_options', - metavar='options', - action='append', + "--build-option", + dest="build_options", + metavar="options", + action="append", help="Extra arguments to be supplied to 'setup.py bdist_wheel'.", ) cmd_opts.add_option(cmdoptions.no_build_isolation()) @@ -83,17 +83,17 @@ def __init__(self, *args, **kw): cmd_opts.add_option(cmdoptions.progress_bar()) cmd_opts.add_option( - '--global-option', - dest='global_options', - action='append', - metavar='options', + "--global-option", + dest="global_options", + action="append", + metavar="options", help="Extra global options to be supplied to the setup.py " "call before the 'bdist_wheel' command.", ) cmd_opts.add_option( - '--pre', - action='store_true', + "--pre", + action="store_true", default=False, help=( "Include pre-release and development versions. By default, " diff --git a/src/pip/_internal/configuration.py b/src/pip/_internal/configuration.py index 103387536ac..d5118774bf5 100644 --- a/src/pip/_internal/configuration.py +++ b/src/pip/_internal/configuration.py @@ -44,8 +44,8 @@ def _normalize_name(name): # type: (str) -> str """Make a name consistent regardless of source (environment or file) """ - name = name.lower().replace('_', '-') - if name.startswith('--'): + name = name.lower().replace("_", "-") + if name.startswith("--"): name = name[2:] # only prefer long opts return name @@ -71,17 +71,17 @@ def _disassemble_key(name): ) -CONFIG_BASENAME = 'pip.ini' if WINDOWS else 'pip.conf' +CONFIG_BASENAME = "pip.ini" if WINDOWS else "pip.conf" def get_configuration_files(): global_config_files = [ - os.path.join(path, CONFIG_BASENAME) for path in appdirs.site_config_dirs('pip') + os.path.join(path, CONFIG_BASENAME) for path in appdirs.site_config_dirs("pip") ] site_config_file = os.path.join(sys.prefix, CONFIG_BASENAME) legacy_config_file = os.path.join( - expanduser('~'), 'pip' if WINDOWS else '.pip', CONFIG_BASENAME + expanduser("~"), "pip" if WINDOWS else ".pip", CONFIG_BASENAME ) new_config_file = os.path.join(appdirs.user_config_dir("pip"), CONFIG_BASENAME) return { @@ -371,7 +371,7 @@ def _iter_config_files(self): # SMELL: Move the conditions out of this function # environment variables have the lowest priority - config_file = os.environ.get('PIP_CONFIG_FILE', None) + config_file = os.environ.get("PIP_CONFIG_FILE", None) if config_file is not None: yield kinds.ENV, [config_file] else: diff --git a/src/pip/_internal/distributions/source/legacy.py b/src/pip/_internal/distributions/source/legacy.py index 61cc8571449..615a61ffaf4 100644 --- a/src/pip/_internal/distributions/source/legacy.py +++ b/src/pip/_internal/distributions/source/legacy.py @@ -44,8 +44,8 @@ def _raise_conflicts(conflicting_with, conflicting_reqs): error_message = format_string.format( requirement=self.req, conflicting_with=conflicting_with, - description=', '.join( - '%s is incompatible with %s' % (installed, wanted) + description=", ".join( + "%s is incompatible with %s" % (installed, wanted) for installed, wanted in sorted(conflicting) ), ) @@ -57,7 +57,7 @@ def _raise_conflicts(conflicting_with, conflicting_reqs): self.req.build_env.install_requirements( finder, self.req.pyproject_requires, - 'overlay', + "overlay", "Installing build dependencies", ) conflicting, missing = self.req.build_env.check_requirements( @@ -85,5 +85,5 @@ def _raise_conflicts(conflicting_with, conflicting_reqs): if conflicting: _raise_conflicts("the backend dependencies", conflicting) self.req.build_env.install_requirements( - finder, missing, 'normal', "Installing backend dependencies" + finder, missing, "normal", "Installing backend dependencies" ) diff --git a/src/pip/_internal/download.py b/src/pip/_internal/download.py index ab4c64b20ed..0829dc31745 100644 --- a/src/pip/_internal/download.py +++ b/src/pip/_internal/download.py @@ -17,6 +17,7 @@ from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response from pip._vendor.requests.structures import CaseInsensitiveDict from pip._vendor.six import PY2 + # NOTE: XMLRPC Client is not annotated in typeshed as on 2017-07-17, which is # why we ignore the type on this import from pip._vendor.six.moves import xmlrpc_client # type: ignore @@ -27,6 +28,7 @@ from pip._internal.models.index import PyPI from pip._internal.network.auth import MultiDomainBasicAuth from pip._internal.network.cache import SafeFileCache + # Import ssl from compat so the initial import occurs in only one place. from pip._internal.utils.compat import HAS_TLS, ipaddress, ssl from pip._internal.utils.encoding import auto_decode @@ -66,31 +68,31 @@ if PY2: CopytreeKwargs = TypedDict( - 'CopytreeKwargs', - {'ignore': Callable[[str, List[str]], List[str]], 'symlinks': bool}, + "CopytreeKwargs", + {"ignore": Callable[[str, List[str]], List[str]], "symlinks": bool}, total=False, ) else: CopytreeKwargs = TypedDict( - 'CopytreeKwargs', + "CopytreeKwargs", { - 'copy_function': Callable[[str, str], None], - 'ignore': Callable[[str, List[str]], List[str]], - 'ignore_dangling_symlinks': bool, - 'symlinks': bool, + "copy_function": Callable[[str, str], None], + "ignore": Callable[[str, List[str]], List[str]], + "ignore_dangling_symlinks": bool, + "symlinks": bool, }, total=False, ) __all__ = [ - 'get_file_content', - 'unpack_vcs_link', - 'unpack_file_url', - 'unpack_http_url', - 'unpack_url', - 'parse_content_disposition', - 'sanitize_content_filename', + "get_file_content", + "unpack_vcs_link", + "unpack_file_url", + "unpack_http_url", + "unpack_url", + "parse_content_disposition", + "sanitize_content_filename", ] @@ -119,13 +121,13 @@ # For more background, see: https://github.com/pypa/pip/issues/5499 CI_ENVIRONMENT_VARIABLES = ( # Azure Pipelines - 'BUILD_BUILDID', + "BUILD_BUILDID", # Jenkins - 'BUILD_ID', + "BUILD_ID", # AppVeyor, CircleCI, Codeship, Gitlab CI, Shippable, Travis CI - 'CI', + "CI", # Explicit environment variable. - 'PIP_IS_CI', + "PIP_IS_CI", ) @@ -150,20 +152,20 @@ def user_agent(): "implementation": {"name": platform.python_implementation()}, } - if data["implementation"]["name"] == 'CPython': + if data["implementation"]["name"] == "CPython": data["implementation"]["version"] = platform.python_version() - elif data["implementation"]["name"] == 'PyPy': - if sys.pypy_version_info.releaselevel == 'final': + elif data["implementation"]["name"] == "PyPy": + if sys.pypy_version_info.releaselevel == "final": pypy_version_info = sys.pypy_version_info[:3] else: pypy_version_info = sys.pypy_version_info data["implementation"]["version"] = ".".join( [str(x) for x in pypy_version_info] ) - elif data["implementation"]["name"] == 'Jython': + elif data["implementation"]["name"] == "Jython": # Complete Guess data["implementation"]["version"] = platform.python_version() - elif data["implementation"]["name"] == 'IronPython': + elif data["implementation"]["name"] == "IronPython": # Complete Guess data["implementation"]["version"] = platform.python_version() @@ -253,7 +255,7 @@ def close(self): class InsecureHTTPAdapter(HTTPAdapter): def cert_verify(self, conn, url, verify, cert): - conn.cert_reqs = 'CERT_NONE' + conn.cert_reqs = "CERT_NONE" conn.ca_certs = None @@ -351,26 +353,26 @@ def add_trusted_host(self, host, source=None, suppress_logging=False): string came from. """ if not suppress_logging: - msg = 'adding trusted host: {!r}'.format(host) + msg = "adding trusted host: {!r}".format(host) if source is not None: - msg += ' (from {})'.format(source) + msg += " (from {})".format(source) logger.info(msg) host_port = parse_netloc(host) if host_port not in self.pip_trusted_origins: self.pip_trusted_origins.append(host_port) - self.mount(build_url_from_netloc(host) + '/', self._insecure_adapter) + self.mount(build_url_from_netloc(host) + "/", self._insecure_adapter) if not host_port[1]: # Mount wildcard ports for the same host. - self.mount(build_url_from_netloc(host) + ':', self._insecure_adapter) + self.mount(build_url_from_netloc(host) + ":", self._insecure_adapter) def iter_secure_origins(self): # type: () -> Iterator[SecureOrigin] for secure_origin in SECURE_ORIGINS: yield secure_origin for host, port in self.pip_trusted_origins: - yield ('*', host, '*' if port is None else port) + yield ("*", host, "*" if port is None else port) def is_secure_origin(self, location): # type: (Link) -> bool @@ -386,7 +388,7 @@ def is_secure_origin(self, location): # Don't count the repository type as part of the protocol: in # cases such as "git+ssh", only use "ssh". (I.e., Only verify against # the last scheme.) - origin_protocol = origin_protocol.rsplit('+', 1)[-1] + origin_protocol = origin_protocol.rsplit("+", 1)[-1] # Determine if our origin is a secure origin by looking through our # hardcoded list of secure origins, as well as any additional ones @@ -476,38 +478,38 @@ def get_file_content(url, comes_from=None, session=None): scheme = get_url_scheme(url) - if scheme in ['http', 'https']: + if scheme in ["http", "https"]: # FIXME: catch some errors resp = session.get(url) resp.raise_for_status() return resp.url, resp.text - elif scheme == 'file': - if comes_from and comes_from.startswith('http'): + elif scheme == "file": + if comes_from and comes_from.startswith("http"): raise InstallationError( - 'Requirements file %s references URL %s, which is local' + "Requirements file %s references URL %s, which is local" % (comes_from, url) ) - path = url.split(':', 1)[1] - path = path.replace('\\', '/') + path = url.split(":", 1)[1] + path = path.replace("\\", "/") match = _url_slash_drive_re.match(path) if match: - path = match.group(1) + ':' + path.split('|', 1)[1] + path = match.group(1) + ":" + path.split("|", 1)[1] path = urllib_parse.unquote(path) - if path.startswith('/'): - path = '/' + path.lstrip('/') + if path.startswith("/"): + path = "/" + path.lstrip("/") url = path try: - with open(url, 'rb') as f: + with open(url, "rb") as f: content = auto_decode(f.read()) except IOError as exc: - raise InstallationError('Could not open requirements file: %s' % str(exc)) + raise InstallationError("Could not open requirements file: %s" % str(exc)) return url, content -_url_slash_drive_re = re.compile(r'/*([a-z])\|', re.I) +_url_slash_drive_re = re.compile(r"/*([a-z])\|", re.I) def unpack_vcs_link(link, location): @@ -541,7 +543,7 @@ def _download_url( ): # type: (...) -> None try: - total_length = int(resp.headers['content-length']) + total_length = int(resp.headers["content-length"]) except (ValueError, KeyError, TypeError): total_length = 0 @@ -634,28 +636,28 @@ def _copy_file(filename, location, link): download_location = os.path.join(location, link.filename) if os.path.exists(download_location): response = ask_path_exists( - 'The file %s exists. (i)gnore, (w)ipe, (b)ackup, (a)abort' + "The file %s exists. (i)gnore, (w)ipe, (b)ackup, (a)abort" % display_path(download_location), - ('i', 'w', 'b', 'a'), + ("i", "w", "b", "a"), ) - if response == 'i': + if response == "i": copy = False - elif response == 'w': - logger.warning('Deleting %s', display_path(download_location)) + elif response == "w": + logger.warning("Deleting %s", display_path(download_location)) os.remove(download_location) - elif response == 'b': + elif response == "b": dest_file = backup_dir(download_location) logger.warning( - 'Backing up %s to %s', + "Backing up %s to %s", display_path(download_location), display_path(dest_file), ) shutil.move(download_location, dest_file) - elif response == 'a': + elif response == "a": sys.exit(-1) if copy: shutil.copy(filename, download_location) - logger.info('Saved %s', display_path(download_location)) + logger.info("Saved %s", display_path(download_location)) def unpack_http_url( @@ -727,14 +729,14 @@ def ignore(d, names): # exclude the following directories if they appear in the top # level dir (and only it). # See discussion at https://github.com/pypa/pip/pull/6770 - return ['.tox', '.nox'] if d == source else [] + return [".tox", ".nox"] if d == source else [] kwargs = dict(ignore=ignore, symlinks=True) # type: CopytreeKwargs if not PY2: # Python 2 does not support copy_function, so we only ignore # errors on special file copy in Python 3. - kwargs['copy_function'] = _copy2_ignoring_special_files + kwargs["copy_function"] = _copy2_ignoring_special_files shutil.copytree(source, target, **kwargs) @@ -758,7 +760,7 @@ def unpack_file_url( rmtree(location) _copy_source_tree(link_path, location) if download_dir: - logger.info('Link is a directory, ignoring download_dir') + logger.info("Link is a directory, ignoring download_dir") return # If --require-hashes is off, `hashes` is either empty, the @@ -805,7 +807,7 @@ def request(self, host, handler, request_body, verbose=False): parts = (self._scheme, host, handler, None, None, None) url = urllib_parse.urlunparse(parts) try: - headers = {'Content-Type': 'text/xml'} + headers = {"Content-Type": "text/xml"} response = self._session.post( url, data=request_body, headers=headers, stream=True ) @@ -880,7 +882,7 @@ def parse_content_disposition(content_disposition, default_filename): return the default filename if the result is empty. """ _type, params = cgi.parse_header(content_disposition) - filename = params.get('filename') + filename = params.get("filename") if filename: # We need to sanitize the filename to prevent directory traversal # in case the filename contains ".." path parts. @@ -897,7 +899,7 @@ def _download_http_url( ): # type: (...) -> Tuple[str, str] """Download link url into temp_dir using provided session""" - target_url = link.url.split('#', 1)[0] + target_url = link.url.split("#", 1)[0] try: resp = session.get( target_url, @@ -930,10 +932,10 @@ def _download_http_url( ) raise - content_type = resp.headers.get('content-type', '') + content_type = resp.headers.get("content-type", "") filename = link.filename # fallback # Have a look at the Content-Disposition header for a better guess - content_disposition = resp.headers.get('content-disposition') + content_disposition = resp.headers.get("content-disposition") if content_disposition: filename = parse_content_disposition(content_disposition, filename) ext = splitext(filename)[1] # type: Optional[str] @@ -946,7 +948,7 @@ def _download_http_url( if ext: filename += ext file_path = os.path.join(temp_dir, filename) - with open(file_path, 'wb') as content_file: + with open(file_path, "wb") as content_file: _download_url(resp, link, content_file, hashes, progress_bar) return file_path, content_type @@ -962,13 +964,13 @@ def _check_download_dir(link, download_dir, hashes): return None # If already downloaded, does its hash match? - logger.info('File was already downloaded %s', download_path) + logger.info("File was already downloaded %s", download_path) if hashes: try: hashes.check_against_path(download_path) except HashMismatch: logger.warning( - 'Previously-downloaded file %s has bad hash. Re-downloading.', + "Previously-downloaded file %s has bad hash. Re-downloading.", download_path, ) os.unlink(download_path) diff --git a/src/pip/_internal/exceptions.py b/src/pip/_internal/exceptions.py index 7fd1a2866e3..83816e86b2a 100644 --- a/src/pip/_internal/exceptions.py +++ b/src/pip/_internal/exceptions.py @@ -52,7 +52,7 @@ def __str__(self): # type: () -> str # Use `dist` in the error message because its stringification # includes more information, like the version and location. - return 'None {} metadata found for distribution: {}'.format( + return "None {} metadata found for distribution: {}".format( self.metadata_name, self.dist ) @@ -106,7 +106,7 @@ def __str__(self): lines.append(cls.head) lines.extend(e.body() for e in errors_of_cls) if lines: - return '\n'.join(lines) + return "\n".join(lines) def __nonzero__(self): return bool(self.errors) @@ -133,7 +133,7 @@ class HashError(InstallationError): """ req = None # type: Optional[InstallRequirement] - head = '' + head = "" def body(self): """Return a summary of me for display under the heading. @@ -145,10 +145,10 @@ def body(self): populate_link() having already been called """ - return ' %s' % self._requirement_name() + return " %s" % self._requirement_name() def __str__(self): - return '%s\n%s' % (self.head, self.body()) + return "%s\n%s" % (self.head, self.body()) def _requirement_name(self): """Return a description of the requirement that triggered me. @@ -157,7 +157,7 @@ def _requirement_name(self): line numbers """ - return str(self.req) if self.req else 'unknown package' + return str(self.req) if self.req else "unknown package" class VcsHashUnsupported(HashError): @@ -187,13 +187,13 @@ class HashMissing(HashError): order = 2 head = ( - 'Hashes are required in --require-hashes mode, but they are ' - 'missing from some requirements. Here is a list of those ' - 'requirements along with the hashes their downloaded archives ' - 'actually had. Add lines like these to your requirements files to ' - 'prevent tampering. (If you did not enable --require-hashes ' - 'manually, note that it turns on automatically when any package ' - 'has a hash.)' + "Hashes are required in --require-hashes mode, but they are " + "missing from some requirements. Here is a list of those " + "requirements along with the hashes their downloaded archives " + "actually had. Add lines like these to your requirements files to " + "prevent tampering. (If you did not enable --require-hashes " + "manually, note that it turns on automatically when any package " + "has a hash.)" ) def __init__(self, gotten_hash): @@ -217,10 +217,10 @@ def body(self): if self.req.original_link # In case someone feeds something downright stupid # to InstallRequirement's constructor. - else getattr(self.req, 'req', None) + else getattr(self.req, "req", None) ) - return ' %s --hash=%s:%s' % ( - package or 'unknown package', + return " %s --hash=%s:%s" % ( + package or "unknown package", FAVORITE_HASH, self.gotten_hash, ) @@ -232,8 +232,8 @@ class HashUnpinned(HashError): order = 3 head = ( - 'In --require-hashes mode, all requirements must have their ' - 'versions pinned with ==. These do not:' + "In --require-hashes mode, all requirements must have their " + "versions pinned with ==. These do not:" ) @@ -249,10 +249,10 @@ class HashMismatch(HashError): order = 4 head = ( - 'THESE PACKAGES DO NOT MATCH THE HASHES FROM THE REQUIREMENTS ' - 'FILE. If you have updated the package versions, please update ' - 'the hashes. Otherwise, examine the package contents carefully; ' - 'someone may have tampered with them.' + "THESE PACKAGES DO NOT MATCH THE HASHES FROM THE REQUIREMENTS " + "FILE. If you have updated the package versions, please update " + "the hashes. Otherwise, examine the package contents carefully; " + "someone may have tampered with them." ) def __init__(self, allowed, gots): @@ -266,7 +266,7 @@ def __init__(self, allowed, gots): self.gots = gots def body(self): - return ' %s:\n%s' % (self._requirement_name(), self._hash_comparison()) + return " %s:\n%s" % (self._requirement_name(), self._hash_comparison()) def _hash_comparison(self): """ @@ -283,19 +283,19 @@ def _hash_comparison(self): def hash_then_or(hash_name): # For now, all the decent hashes have 6-char names, so we can get # away with hard-coding space literals. - return chain([hash_name], repeat(' or')) + return chain([hash_name], repeat(" or")) lines = [] for hash_name, expecteds in iteritems(self.allowed): prefix = hash_then_or(hash_name) lines.extend( - (' Expected %s %s' % (next(prefix), e)) for e in expecteds + (" Expected %s %s" % (next(prefix), e)) for e in expecteds ) lines.append( - ' Got %s\n' % self.gots[hash_name].hexdigest() + " Got %s\n" % self.gots[hash_name].hexdigest() ) - prefix = ' or' - return '\n'.join(lines) + prefix = " or" + return "\n".join(lines) class UnsupportedPythonVersion(InstallationError): diff --git a/src/pip/_internal/index.py b/src/pip/_internal/index.py index 8366eb9305e..b7fe62921fb 100644 --- a/src/pip/_internal/index.py +++ b/src/pip/_internal/index.py @@ -45,7 +45,7 @@ CandidateSortingKey = Tuple[int, int, int, _BaseVersion, BuildTag, Optional[int]] -__all__ = ['FormatControl', 'BestCandidateResult', 'PackageFinder'] +__all__ = ["FormatControl", "BestCandidateResult", "PackageFinder"] logger = logging.getLogger(__name__) @@ -78,10 +78,10 @@ def _check_link_requires_python( ) else: if not is_compatible: - version = '.'.join(map(str, version_info)) + version = ".".join(map(str, version_info)) if not ignore_requires_python: logger.debug( - 'Link requires a different Python (%s not in: %r): %s', + "Link requires a different Python (%s not in: %r): %s", version, link.requires_python, link, @@ -89,7 +89,7 @@ def _check_link_requires_python( return False logger.debug( - 'Ignoring failed Requires-Python check (%s not in: %r) for link: %s', + "Ignoring failed Requires-Python check (%s not in: %r) for link: %s", version, link.requires_python, link, @@ -104,7 +104,7 @@ class LinkEvaluator(object): Responsible for evaluating links for a particular project. """ - _py_version_re = re.compile(r'-py([123]\.?[0-9]?)$') + _py_version_re = re.compile(r"-py([123]\.?[0-9]?)$") # Don't include an allow_yanked default value to make sure each call # site considers whether yanked releases are allowed. This also causes @@ -160,11 +160,11 @@ def evaluate_link(self, link): """ version = None if link.is_yanked and not self._allow_yanked: - reason = link.yanked_reason or '' + reason = link.yanked_reason or "" # Mark this as a unicode string to prevent "UnicodeEncodeError: # 'ascii' codec can't encode character" in Python 2 when # the reason contains non-ascii characters. - return (False, u'yanked for reason: {}'.format(reason)) + return (False, u"yanked for reason: {}".format(reason)) if link.egg_fragment: egg_info = link.egg_fragment @@ -172,21 +172,21 @@ def evaluate_link(self, link): else: egg_info, ext = link.splitext() if not ext: - return (False, 'not a file') + return (False, "not a file") if ext not in SUPPORTED_EXTENSIONS: - return (False, 'unsupported archive format: %s' % ext) + return (False, "unsupported archive format: %s" % ext) if "binary" not in self._formats and ext == WHEEL_EXTENSION: - reason = 'No binaries permitted for %s' % self.project_name + reason = "No binaries permitted for %s" % self.project_name return (False, reason) - if "macosx10" in link.path and ext == '.zip': - return (False, 'macosx10 one') + if "macosx10" in link.path and ext == ".zip": + return (False, "macosx10 one") if ext == WHEEL_EXTENSION: try: wheel = Wheel(link.filename) except InvalidWheelFilename: - return (False, 'invalid wheel filename') + return (False, "invalid wheel filename") if canonicalize_name(wheel.name) != self._canonical_name: - reason = 'wrong project name (not %s)' % self.project_name + reason = "wrong project name (not %s)" % self.project_name return (False, reason) supported_tags = self._target_python.get_tags() @@ -195,7 +195,7 @@ def evaluate_link(self, link): # simplify troubleshooting compatibility issues. file_tags = wheel.get_formatted_file_tags() reason = "none of the wheel's tags match: {}".format( - ', '.join(file_tags) + ", ".join(file_tags) ) return (False, reason) @@ -203,19 +203,19 @@ def evaluate_link(self, link): # This should be up by the self.ok_binary check, but see issue 2700. if "source" not in self._formats and ext != WHEEL_EXTENSION: - return (False, 'No sources permitted for %s' % self.project_name) + return (False, "No sources permitted for %s" % self.project_name) if not version: version = _extract_version_from_fragment(egg_info, self._canonical_name) if not version: - return (False, 'Missing project version for %s' % self.project_name) + return (False, "Missing project version for %s" % self.project_name) match = self._py_version_re.search(version) if match: version = version[: match.start()] py_version = match.group(1) if py_version != self._target_python.py_version: - return (False, 'Python version is incorrect') + return (False, "Python version is incorrect") supports_python = _check_link_requires_python( link, @@ -227,7 +227,7 @@ def evaluate_link(self, link): # _log_skipped_link(). return (False, None) - logger.debug('Found link %s, version: %s', link, version) + logger.debug("Found link %s, version: %s", link, version) return (True, version) @@ -255,8 +255,8 @@ def filter_unallowed_hashes( """ if not hashes: logger.debug( - 'Given no hashes to check %s links for project %r: ' - 'discarding no candidates', + "Given no hashes to check %s links for project %r: " + "discarding no candidates", len(candidates), project_name, ) @@ -286,16 +286,16 @@ def filter_unallowed_hashes( filtered = list(candidates) if len(filtered) == len(candidates): - discard_message = 'discarding no candidates' + discard_message = "discarding no candidates" else: - discard_message = 'discarding {} non-matches:\n {}'.format( + discard_message = "discarding {} non-matches:\n {}".format( len(non_matches), - '\n '.join(str(candidate.link) for candidate in non_matches), + "\n ".join(str(candidate.link) for candidate in non_matches), ) logger.debug( - 'Checked %s links for project %r against %s hashes ' - '(%s matches, %s no digest): %s', + "Checked %s links for project %r against %s hashes " + "(%s matches, %s no digest): %s", len(candidates), project_name, hashes.digest_count, @@ -519,7 +519,7 @@ def _sort_key(self, candidate): binary_preference = 1 pri = -(wheel.support_index_min(valid_tags)) if wheel.build_tag is not None: - match = re.match(r'^(\d+)(.*)$', wheel.build_tag) + match = re.match(r"^(\d+)(.*)$", wheel.build_tag) build_tag_groups = match.groups() build_tag = (int(build_tag_groups[0]), build_tag_groups[1]) else: # sdist @@ -551,14 +551,14 @@ def sort_best_candidate( # Log a warning per PEP 592 if necessary before returning. link = best_candidate.link if link.is_yanked: - reason = link.yanked_reason or '' + reason = link.yanked_reason or "" msg = ( # Mark this as a unicode string to prevent # "UnicodeEncodeError: 'ascii' codec can't encode character" # in Python 2 when the reason contains non-ascii characters. - u'The candidate selected for download or install is a ' - 'yanked version: {candidate}\n' - 'Reason for being yanked: {reason}' + u"The candidate selected for download or install is a " + "yanked version: {candidate}\n" + "Reason for being yanked: {reason}" ).format(candidate=best_candidate, reason=reason) logger.warning(msg) @@ -736,7 +736,7 @@ def _log_skipped_link(self, link, reason): # the reason contains non-ascii characters. # Also, put the link at the end so the reason is more visible # and because the link string is usually very long. - logger.debug(u'Skipping link: %s: %s', reason, link) + logger.debug(u"Skipping link: %s: %s", reason, link) self._logged_links.add(link) def get_install_candidate(self, link_evaluator, link): @@ -792,7 +792,7 @@ def find_all_candidates(self, project_name): page_versions = [] for page_url, page_links in collected_links.pages.items(): - logger.debug('Analyzing links from page %s', page_url) + logger.debug("Analyzing links from page %s", page_url) with indent_log(): new_versions = self.evaluate_links(link_evaluator, links=page_links) page_versions.extend(new_versions) @@ -801,8 +801,8 @@ def find_all_candidates(self, project_name): if file_versions: file_versions.sort(reverse=True) logger.debug( - 'Local files found: %s', - ', '.join( + "Local files found: %s", + ", ".join( [url_to_path(candidate.link.url) for candidate in file_versions] ), ) @@ -882,13 +882,13 @@ def _format_versions(cand_iter): if installed_version is None and best_candidate is None: logger.critical( - 'Could not find a version that satisfies the requirement %s ' - '(from versions: %s)', + "Could not find a version that satisfies the requirement %s " + "(from versions: %s)", req, _format_versions(best_candidate_result.iter_all()), ) - raise DistributionNotFound('No matching distribution found for %s' % req) + raise DistributionNotFound("No matching distribution found for %s" % req) best_installed = False if installed_version and ( @@ -899,14 +899,14 @@ def _format_versions(cand_iter): if not upgrade and installed_version is not None: if best_installed: logger.debug( - 'Existing installed version (%s) is most up-to-date and ' - 'satisfies requirement', + "Existing installed version (%s) is most up-to-date and " + "satisfies requirement", installed_version, ) else: logger.debug( - 'Existing installed version (%s) satisfies requirement ' - '(most up-to-date version is %s)', + "Existing installed version (%s) satisfies requirement " + "(most up-to-date version is %s)", installed_version, best_candidate.version, ) @@ -915,14 +915,14 @@ def _format_versions(cand_iter): if best_installed: # We have an existing version, and its the best version logger.debug( - 'Installed version (%s) is most up-to-date (past versions: %s)', + "Installed version (%s) is most up-to-date (past versions: %s)", installed_version, _format_versions(best_candidate_result.iter_applicable()), ) raise BestVersionAlreadyInstalled logger.debug( - 'Using version %s (newest of versions: %s)', + "Using version %s (newest of versions: %s)", best_candidate.version, _format_versions(best_candidate_result.iter_applicable()), ) diff --git a/src/pip/_internal/legacy_resolve.py b/src/pip/_internal/legacy_resolve.py index cd6bb47b262..58743f5bcd3 100644 --- a/src/pip/_internal/legacy_resolve.py +++ b/src/pip/_internal/legacy_resolve.py @@ -33,10 +33,7 @@ ensure_dir, normalize_version_info, ) -from pip._internal.utils.packaging import ( - check_requires_python, - get_requires_python, -) +from pip._internal.utils.packaging import check_requires_python, get_requires_python from pip._internal.utils.typing import MYPY_CHECK_RUNNING if MYPY_CHECK_RUNNING: @@ -87,10 +84,10 @@ def _check_dist_requires_python( if is_compatible: return - version = '.'.join(map(str, version_info)) + version = ".".join(map(str, version_info)) if ignore_requires_python: logger.debug( - 'Ignoring failed Requires-Python check for package %r: %s not in %r', + "Ignoring failed Requires-Python check for package %r: %s not in %r", dist.project_name, version, requires_python, @@ -98,7 +95,7 @@ def _check_dist_requires_python( return raise UnsupportedPythonVersion( - 'Package {!r} requires a different Python: {} not in {!r}'.format( + "Package {!r} requires a different Python: {} not in {!r}".format( dist.project_name, version, requires_python ) ) @@ -255,8 +252,8 @@ def _check_skip_installed(self, req_to_install): if not self._is_upgrade_allowed(req_to_install): if self.upgrade_strategy == "only-if-needed": - return 'already satisfied, skipping upgrade' - return 'already satisfied' + return "already satisfied, skipping upgrade" + return "already satisfied" # Check for the possibility of an upgrade. For link-based # requirements we have to pull the tree down and inspect to assess @@ -266,7 +263,7 @@ def _check_skip_installed(self, req_to_install): self.finder.find_requirement(req_to_install, upgrade=True) except BestVersionAlreadyInstalled: # Then the best version is installed. - return 'already up-to-date' + return "already up-to-date" except DistributionNotFound: # No distribution found, so we squash the error. It will # be raised later when we re-try later to do the install. @@ -324,14 +321,13 @@ def _get_abstract_dist_for(self, req): self.upgrade_strategy != "to-satisfy-only" or self.force_reinstall or self.ignore_installed - or req.link.scheme == 'file' + or req.link.scheme == "file" ) if should_modify: self._set_req_to_reinstall(req) else: logger.info( - 'Requirement already satisfied (use --upgrade to upgrade): %s', - req, + "Requirement already satisfied (use --upgrade to upgrade): %s", req ) return abstract_dist @@ -395,15 +391,13 @@ def add_req(subreq, extras_requested): if req_to_install.extras: logger.debug( "Installing extra requirements: %r", - ','.join(req_to_install.extras), + ",".join(req_to_install.extras), ) missing_requested = sorted( set(req_to_install.extras) - set(dist.extras) ) for missing in missing_requested: - logger.warning( - '%s does not provide the extra \'%s\'', dist, missing - ) + logger.warning("%s does not provide the extra '%s'", dist, missing) available_requested = sorted( set(dist.extras) & set(req_to_install.extras) diff --git a/src/pip/_internal/locations.py b/src/pip/_internal/locations.py index 70e4f784f21..21b8b2666e9 100644 --- a/src/pip/_internal/locations.py +++ b/src/pip/_internal/locations.py @@ -33,16 +33,16 @@ def get_major_minor_version(): Return the major-minor version of the current Python as a string, e.g. "3.7" or "3.10". """ - return '{}.{}'.format(*sys.version_info) + return "{}.{}".format(*sys.version_info) def get_src_prefix(): if running_under_virtualenv(): - src_prefix = os.path.join(sys.prefix, 'src') + src_prefix = os.path.join(sys.prefix, "src") else: # FIXME: keep src in cwd for now (it is not a temporary folder) try: - src_prefix = os.path.join(os.getcwd(), 'src') + src_prefix = os.path.join(os.getcwd(), "src") except OSError: # In case the current working directory has been renamed or deleted sys.exit("The folder you are executing pip from can no longer be found.") @@ -69,20 +69,20 @@ def get_src_prefix(): user_site = site.USER_SITE if WINDOWS: - bin_py = os.path.join(sys.prefix, 'Scripts') - bin_user = os.path.join(user_site, 'Scripts') + bin_py = os.path.join(sys.prefix, "Scripts") + bin_user = os.path.join(user_site, "Scripts") # buildout uses 'bin' on Windows too? if not os.path.exists(bin_py): - bin_py = os.path.join(sys.prefix, 'bin') - bin_user = os.path.join(user_site, 'bin') + bin_py = os.path.join(sys.prefix, "bin") + bin_user = os.path.join(user_site, "bin") else: - bin_py = os.path.join(sys.prefix, 'bin') - bin_user = os.path.join(user_site, 'bin') + bin_py = os.path.join(sys.prefix, "bin") + bin_user = os.path.join(user_site, "bin") # Forcing to use /usr/local/bin for standard macOS framework installs # Also log to ~/Library/Logs/ for use with the Console.app log viewer - if sys.platform[:6] == 'darwin' and sys.prefix[:16] == '/System/Library/': - bin_py = '/usr/local/bin' + if sys.platform[:6] == "darwin" and sys.prefix[:16] == "/System/Library/": + bin_py = "/usr/local/bin" def distutils_scheme( @@ -100,14 +100,14 @@ def distutils_scheme( extra_dist_args = {"script_args": ["--no-user-cfg"]} else: extra_dist_args = {} - dist_args = {'name': dist_name} # type: Dict[str, Union[str, List[str]]] + dist_args = {"name": dist_name} # type: Dict[str, Union[str, List[str]]] dist_args.update(extra_dist_args) d = Distribution(dist_args) # Ignoring, typeshed issue reported python/typeshed/issues/2567 d.parse_config_files() # NOTE: Ignoring type since mypy can't find attributes on 'Command' - i = d.get_command_obj('install', create=True) # type: Any + i = d.get_command_obj("install", create=True) # type: Any assert i is not None # NOTE: setting user or home has the side-effect of creating the home dir # or user base for installations during finalize_options() @@ -122,7 +122,7 @@ def distutils_scheme( i.root = root or i.root i.finalize_options() for key in SCHEME_KEYS: - scheme[key] = getattr(i, 'install_' + key) + scheme[key] = getattr(i, "install_" + key) # install_lib specified in setup.cfg should install *everything* # into there (i.e. it takes precedence over both purelib and @@ -131,15 +131,15 @@ def distutils_scheme( # has explicitly requested it hence going back to the config # Ignoring, typeshed issue reported python/typeshed/issues/2567 - if 'install_lib' in d.get_option_dict('install'): # type: ignore + if "install_lib" in d.get_option_dict("install"): # type: ignore scheme.update(dict(purelib=i.install_lib, platlib=i.install_lib)) if running_under_virtualenv(): - scheme['headers'] = os.path.join( + scheme["headers"] = os.path.join( sys.prefix, - 'include', - 'site', - 'python{}'.format(get_major_minor_version()), + "include", + "site", + "python{}".format(get_major_minor_version()), dist_name, ) diff --git a/src/pip/_internal/models/candidate.py b/src/pip/_internal/models/candidate.py index ab958d71f16..fb67b9c457e 100644 --- a/src/pip/_internal/models/candidate.py +++ b/src/pip/_internal/models/candidate.py @@ -31,6 +31,6 @@ def __repr__(self): ) def __str__(self): - return '{!r} candidate (version {} at {})'.format( + return "{!r} candidate (version {} at {})".format( self.project, self.version, self.link ) diff --git a/src/pip/_internal/models/format_control.py b/src/pip/_internal/models/format_control.py index ed5fd9ed85e..b404b87e6b0 100644 --- a/src/pip/_internal/models/format_control.py +++ b/src/pip/_internal/models/format_control.py @@ -38,21 +38,21 @@ def __repr__(self): @staticmethod def handle_mutual_excludes(value, target, other): # type: (str, Optional[Set], Optional[Set]) -> None - if value.startswith('-'): + if value.startswith("-"): raise CommandError( "--no-binary / --only-binary option requires 1 argument." ) - new = value.split(',') - while ':all:' in new: + new = value.split(",") + while ":all:" in new: other.clear() target.clear() - target.add(':all:') - del new[: new.index(':all:') + 1] + target.add(":all:") + del new[: new.index(":all:") + 1] # Without a none, we want to discard everything as :all: covers it - if ':none:' not in new: + if ":none:" not in new: return for name in new: - if name == ':none:': + if name == ":none:": target.clear() continue name = canonicalize_name(name) @@ -63,15 +63,15 @@ def get_allowed_formats(self, canonical_name): # type: (str) -> FrozenSet result = {"binary", "source"} if canonical_name in self.only_binary: - result.discard('source') + result.discard("source") elif canonical_name in self.no_binary: - result.discard('binary') - elif ':all:' in self.only_binary: - result.discard('source') - elif ':all:' in self.no_binary: - result.discard('binary') + result.discard("binary") + elif ":all:" in self.only_binary: + result.discard("source") + elif ":all:" in self.no_binary: + result.discard("binary") return frozenset(result) def disallow_binaries(self): # type: () -> None - self.handle_mutual_excludes(':all:', self.no_binary, self.only_binary) + self.handle_mutual_excludes(":all:", self.no_binary, self.only_binary) diff --git a/src/pip/_internal/models/index.py b/src/pip/_internal/models/index.py index 7c0a69653a8..6824ab8dcf5 100644 --- a/src/pip/_internal/models/index.py +++ b/src/pip/_internal/models/index.py @@ -10,8 +10,8 @@ def __init__(self, url, file_storage_domain): super(PackageIndex, self).__init__() self.url = url self.netloc = urllib_parse.urlsplit(url).netloc - self.simple_url = self._url_for_path('simple') - self.pypi_url = self._url_for_path('pypi') + self.simple_url = self._url_for_path("simple") + self.pypi_url = self._url_for_path("pypi") # This is part of a temporary hack used to block installs of PyPI # packages which depend on external urls only necessary until PyPI can @@ -23,7 +23,7 @@ def _url_for_path(self, path): return urllib_parse.urljoin(self.url, path) -PyPI = PackageIndex('https://pypi.org/', file_storage_domain='files.pythonhosted.org') +PyPI = PackageIndex("https://pypi.org/", file_storage_domain="files.pythonhosted.org") TestPyPI = PackageIndex( - 'https://test.pypi.org/', file_storage_domain='test-files.pythonhosted.org' + "https://test.pypi.org/", file_storage_domain="test-files.pythonhosted.org" ) diff --git a/src/pip/_internal/models/link.py b/src/pip/_internal/models/link.py index 0cedd2e5787..80fee5a0947 100644 --- a/src/pip/_internal/models/link.py +++ b/src/pip/_internal/models/link.py @@ -49,7 +49,7 @@ def __init__( """ # url can be a UNC windows share - if url.startswith('\\\\'): + if url.startswith("\\\\"): url = path_to_url(url) self._parsed_url = urllib_parse.urlsplit(url) @@ -65,11 +65,11 @@ def __init__( def __str__(self): if self.requires_python: - rp = ' (requires-python:%s)' % self.requires_python + rp = " (requires-python:%s)" % self.requires_python else: - rp = '' + rp = "" if self.comes_from: - return '%s (from %s)%s' % ( + return "%s (from %s)%s" % ( redact_auth_from_url(self._url), self.comes_from, rp, @@ -78,7 +78,7 @@ def __str__(self): return redact_auth_from_url(str(self._url)) def __repr__(self): - return '' % self + return "" % self @property def url(self): @@ -88,7 +88,7 @@ def url(self): @property def filename(self): # type: () -> str - path = self.path.rstrip('/') + path = self.path.rstrip("/") name = posixpath.basename(path) if not name: # Make sure we don't leak auth information if the netloc @@ -97,7 +97,7 @@ def filename(self): return netloc name = urllib_parse.unquote(name) - assert name, 'URL %r produced no filename' % self._url + assert name, "URL %r produced no filename" % self._url return name @property @@ -125,7 +125,7 @@ def path(self): def splitext(self): # type: () -> Tuple[str, str] - return splitext(posixpath.basename(self.path.rstrip('/'))) + return splitext(posixpath.basename(self.path.rstrip("/"))) @property def ext(self): @@ -138,7 +138,7 @@ def url_without_fragment(self): scheme, netloc, path, query, fragment = self._parsed_url return urllib_parse.urlunsplit((scheme, netloc, path, query, None)) - _egg_fragment_re = re.compile(r'[#&]egg=([^&]*)') + _egg_fragment_re = re.compile(r"[#&]egg=([^&]*)") @property def egg_fragment(self): @@ -148,7 +148,7 @@ def egg_fragment(self): return None return match.group(1) - _subdirectory_fragment_re = re.compile(r'[#&]subdirectory=([^&]*)') + _subdirectory_fragment_re = re.compile(r"[#&]subdirectory=([^&]*)") @property def subdirectory_fragment(self): @@ -158,7 +158,7 @@ def subdirectory_fragment(self): return None return match.group(1) - _hash_re = re.compile(r'(sha1|sha224|sha384|sha256|sha512|md5)=([a-f0-9]+)') + _hash_re = re.compile(r"(sha1|sha224|sha384|sha256|sha512|md5)=([a-f0-9]+)") @property def hash(self): @@ -179,12 +179,12 @@ def hash_name(self): @property def show_url(self): # type: () -> Optional[str] - return posixpath.basename(self._url.split('#', 1)[0].split('?', 1)[0]) + return posixpath.basename(self._url.split("#", 1)[0].split("?", 1)[0]) @property def is_file(self): # type: () -> bool - return self.scheme == 'file' + return self.scheme == "file" def is_existing_dir(self): # type: () -> bool diff --git a/src/pip/_internal/models/search_scope.py b/src/pip/_internal/models/search_scope.py index 9f737d93da7..a9d03daba3e 100644 --- a/src/pip/_internal/models/search_scope.py +++ b/src/pip/_internal/models/search_scope.py @@ -41,7 +41,7 @@ def create( # blindly normalize anything starting with a ~... built_find_links = [] # type: List[str] for link in find_links: - if link.startswith('~'): + if link.startswith("~"): new_link = normalize_path(link) if os.path.exists(new_link): link = new_link @@ -52,11 +52,11 @@ def create( if not HAS_TLS: for link in itertools.chain(index_urls, built_find_links): parsed = urllib_parse.urlparse(link) - if parsed.scheme == 'https': + if parsed.scheme == "https": logger.warning( - 'pip is configured with locations that require ' - 'TLS/SSL, however the ssl module in Python is not ' - 'available.' + "pip is configured with locations that require " + "TLS/SSL, however the ssl module in Python is not " + "available." ) break @@ -76,17 +76,17 @@ def get_formatted_locations(self): lines = [] if self.index_urls and self.index_urls != [PyPI.simple_url]: lines.append( - 'Looking in indexes: {}'.format( - ', '.join(redact_auth_from_url(url) for url in self.index_urls) + "Looking in indexes: {}".format( + ", ".join(redact_auth_from_url(url) for url in self.index_urls) ) ) if self.find_links: lines.append( - 'Looking in links: {}'.format( - ', '.join(redact_auth_from_url(url) for url in self.find_links) + "Looking in links: {}".format( + ", ".join(redact_auth_from_url(url) for url in self.find_links) ) ) - return '\n'.join(lines) + return "\n".join(lines) def get_index_urls_locations(self, project_name): # type: (str) -> List[str] @@ -105,8 +105,8 @@ def mkurl_pypi_url(url): # (and PyPI can handle it without the slash) some other index # implementations might break if they relied on easy_install's # behavior. - if not loc.endswith('/'): - loc = loc + '/' + if not loc.endswith("/"): + loc = loc + "/" return loc return [mkurl_pypi_url(url) for url in self.index_urls] diff --git a/src/pip/_internal/models/target_python.py b/src/pip/_internal/models/target_python.py index 008fecf38ed..cf94f4727f4 100644 --- a/src/pip/_internal/models/target_python.py +++ b/src/pip/_internal/models/target_python.py @@ -46,7 +46,7 @@ def __init__( else: py_version_info = normalize_version_info(py_version_info) - py_version = '.'.join(map(str, py_version_info[:2])) + py_version = ".".join(map(str, py_version_info[:2])) self.abi = abi self.implementation = implementation @@ -64,18 +64,18 @@ def format_given(self): """ display_version = None if self._given_py_version_info is not None: - display_version = '.'.join( + display_version = ".".join( str(part) for part in self._given_py_version_info ) key_values = [ - ('platform', self.platform), - ('version_info', display_version), - ('abi', self.abi), - ('implementation', self.implementation), + ("platform", self.platform), + ("version_info", display_version), + ("abi", self.abi), + ("implementation", self.implementation), ] - return ' '.join( - '{}={!r}'.format(key, value) + return " ".join( + "{}={!r}".format(key, value) for key, value in key_values if value is not None ) diff --git a/src/pip/_internal/network/auth.py b/src/pip/_internal/network/auth.py index a296aacf2d4..6cca42e1c1b 100644 --- a/src/pip/_internal/network/auth.py +++ b/src/pip/_internal/network/auth.py @@ -268,7 +268,7 @@ def warn_on_401(self, resp, **kwargs): """Response callback to warn about incorrect credentials.""" if resp.status_code == 401: logger.warning( - '401 Error, Credentials not correct for %s', resp.request.url + "401 Error, Credentials not correct for %s", resp.request.url ) def save_credentials(self, resp, **kwargs): @@ -281,7 +281,7 @@ def save_credentials(self, resp, **kwargs): self._credentials_to_save = None if creds and resp.status_code < 400: try: - logger.info('Saving credentials to keyring') + logger.info("Saving credentials to keyring") keyring.set_password(*creds) except Exception: - logger.exception('Failed to save credentials') + logger.exception("Failed to save credentials") diff --git a/src/pip/_internal/network/cache.py b/src/pip/_internal/network/cache.py index 9cd6403003e..ca4cb3e7e73 100644 --- a/src/pip/_internal/network/cache.py +++ b/src/pip/_internal/network/cache.py @@ -50,7 +50,7 @@ def get(self, key): # type: (str) -> Optional[bytes] path = self._get_cache_path(key) with suppressed_cache_errors(): - with open(path, 'rb') as f: + with open(path, "rb") as f: return f.read() def set(self, key, value): diff --git a/src/pip/_internal/operations/check.py b/src/pip/_internal/operations/check.py index b796c1697bc..d05dc84c93b 100644 --- a/src/pip/_internal/operations/check.py +++ b/src/pip/_internal/operations/check.py @@ -10,9 +10,7 @@ from pip._vendor.packaging.utils import canonicalize_name from pip._vendor.pkg_resources import RequirementParseError -from pip._internal.distributions import ( - make_distribution_for_install_requirement, -) +from pip._internal.distributions import make_distribution_for_install_requirement from pip._internal.utils.misc import get_installed_distributions from pip._internal.utils.typing import MYPY_CHECK_RUNNING @@ -23,7 +21,7 @@ from typing import Any, Callable, Dict, Optional, Set, Tuple, List # Shorthands - PackageSet = Dict[str, 'PackageDetails'] + PackageSet = Dict[str, "PackageDetails"] Missing = Tuple[str, Any] Conflicting = Tuple[str, str, Any] @@ -31,7 +29,7 @@ ConflictingDict = Dict[str, List[Conflicting]] CheckResult = Tuple[MissingDict, ConflictingDict] -PackageDetails = namedtuple('PackageDetails', ['version', 'requires']) +PackageDetails = namedtuple("PackageDetails", ["version", "requires"]) def create_package_set_from_installed(**kwargs): diff --git a/src/pip/_internal/operations/freeze.py b/src/pip/_internal/operations/freeze.py index 0f7e1a31942..ea14cf434f5 100644 --- a/src/pip/_internal/operations/freeze.py +++ b/src/pip/_internal/operations/freeze.py @@ -18,10 +18,7 @@ install_req_from_line, ) from pip._internal.req.req_file import COMMENT_RE -from pip._internal.utils.misc import ( - dist_is_editable, - get_installed_distributions, -) +from pip._internal.utils.misc import dist_is_editable, get_installed_distributions from pip._internal.utils.typing import MYPY_CHECK_RUNNING if MYPY_CHECK_RUNNING: @@ -65,7 +62,7 @@ def freeze( skip_match = re.compile(skip_regex).search for link in find_links: - yield '-f %s' % link + yield "-f %s" % link installations = {} # type: Dict[str, FrozenRequirement] for dist in get_installed_distributions( local_only=local_only, skip=(), user_only=user_only, paths=paths @@ -78,7 +75,7 @@ def freeze( # location. We also include the exception message to aid # troubleshooting. logger.warning( - 'Could not generate requirement for distribution %r: %s', dist, exc + "Could not generate requirement for distribution %r: %s", dist, exc ) continue if exclude_editable and req.editable: @@ -99,22 +96,22 @@ def freeze( for line in req_file: if ( not line.strip() - or line.strip().startswith('#') + or line.strip().startswith("#") or (skip_match and skip_match(line)) or line.startswith( ( - '-r', - '--requirement', - '-Z', - '--always-unzip', - '-f', - '--find-links', - '-i', - '--index-url', - '--pre', - '--trusted-host', - '--process-dependency-links', - '--extra-index-url', + "-r", + "--requirement", + "-Z", + "--always-unzip", + "-f", + "--find-links", + "-i", + "--index-url", + "--pre", + "--trusted-host", + "--process-dependency-links", + "--extra-index-url", ) ) ): @@ -124,17 +121,17 @@ def freeze( yield line continue - if line.startswith('-e') or line.startswith('--editable'): - if line.startswith('-e'): + if line.startswith("-e") or line.startswith("--editable"): + if line.startswith("-e"): line = line[2:].strip() else: - line = line[len('--editable') :].strip().lstrip('=') + line = line[len("--editable") :].strip().lstrip("=") line_req = install_req_from_editable( line, isolated=isolated, wheel_cache=wheel_cache ) else: line_req = install_req_from_line( - COMMENT_RE.sub('', line).strip(), + COMMENT_RE.sub("", line).strip(), isolated=isolated, wheel_cache=wheel_cache, ) @@ -158,7 +155,7 @@ def freeze( "Requirement file [%s] contains %s, but " "package %r is not installed", req_file_path, - COMMENT_RE.sub('', line).strip(), + COMMENT_RE.sub("", line).strip(), line_req.name, ) else: @@ -175,10 +172,10 @@ def freeze( logger.warning( "Requirement %s included multiple times [%s]", name, - ', '.join(sorted(set(files))), + ", ".join(sorted(set(files))), ) - yield ('## The following requirements were added by pip freeze:') + yield ("## The following requirements were added by pip freeze:") for installation in sorted(installations.values(), key=lambda x: x.name.lower()): if canonicalize_name(installation.name) not in skip: yield str(installation).rstrip() @@ -202,7 +199,7 @@ def get_requirement_info(dist): if vcs_backend is None: req = dist.as_requirement() logger.debug('No VCS found for editable requirement "%s" in: %r', req, location) - comments = ['# Editable install with no version control ({})'.format(req)] + comments = ["# Editable install with no version control ({})".format(req)] return (location, True, comments) try: @@ -210,7 +207,7 @@ def get_requirement_info(dist): except RemoteNotFoundError: req = dist.as_requirement() comments = [ - '# Editable {} install with no remote ({})'.format( + "# Editable {} install with no remote ({})".format( type(vcs_backend).__name__, req ) ] @@ -218,8 +215,8 @@ def get_requirement_info(dist): except BadCommand: logger.warning( - 'cannot determine version of editable source in %s ' - '(%s command not found in path)', + "cannot determine version of editable source in %s " + "(%s command not found in path)", location, vcs_backend.name, ) @@ -235,8 +232,8 @@ def get_requirement_info(dist): if req is not None: return (req, True, []) - logger.warning('Could not determine repository location of %s', location) - comments = ['## !! Could not determine repository location'] + logger.warning("Could not determine repository location of %s", location) + comments = ["## !! Could not determine repository location"] return (None, False, comments) @@ -261,5 +258,5 @@ def from_dist(cls, dist): def __str__(self): req = self.req if self.editable: - req = '-e %s' % req - return '\n'.join(list(self.comments) + [str(req)]) + '\n' + req = "-e %s" % req + return "\n".join(list(self.comments) + [str(req)]) + "\n" diff --git a/src/pip/_internal/operations/generate_metadata.py b/src/pip/_internal/operations/generate_metadata.py index 58e0342e2b3..95dbc9cd6fb 100644 --- a/src/pip/_internal/operations/generate_metadata.py +++ b/src/pip/_internal/operations/generate_metadata.py @@ -27,7 +27,7 @@ def _generate_metadata_legacy(install_req): # type: (InstallRequirement) -> None req_details_str = install_req.name or "from {}".format(install_req.link) logger.debug( - 'Running setup.py (path:%s) egg_info for package %s', + "Running setup.py (path:%s) egg_info for package %s", install_req.setup_py_path, req_details_str, ) @@ -42,8 +42,8 @@ def _generate_metadata_legacy(install_req): # egg. egg_base_option = [] # type: List[str] if not install_req.editable: - egg_info_dir = os.path.join(install_req.setup_py_dir, 'pip-egg-info') - egg_base_option = ['--egg-base', egg_info_dir] + egg_info_dir = os.path.join(install_req.setup_py_dir, "pip-egg-info") + egg_base_option = ["--egg-base", egg_info_dir] # setuptools complains if the target directory does not exist. ensure_dir(egg_info_dir) @@ -52,7 +52,7 @@ def _generate_metadata_legacy(install_req): call_subprocess( base_cmd + ["egg_info"] + egg_base_option, cwd=install_req.setup_py_dir, - command_desc='python setup.py egg_info', + command_desc="python setup.py egg_info", ) diff --git a/src/pip/_internal/operations/prepare.py b/src/pip/_internal/operations/prepare.py index 5eec72cfa74..ee46fe34455 100644 --- a/src/pip/_internal/operations/prepare.py +++ b/src/pip/_internal/operations/prepare.py @@ -9,9 +9,7 @@ from pip._vendor import requests -from pip._internal.distributions import ( - make_distribution_for_install_requirement, -) +from pip._internal.distributions import make_distribution_for_install_requirement from pip._internal.distributions.installed import InstalledDistribution from pip._internal.download import unpack_url from pip._internal.exceptions import ( @@ -102,7 +100,7 @@ def _download_should_save(self): if os.path.exists(self.download_dir): return True - logger.critical('Could not find download directory') + logger.critical("Could not find download directory") raise InstallationError( "Could not find or access download directory '%s'" % display_path(self.download_dir) @@ -122,11 +120,11 @@ def prepare_linked_requirement( link = req.link # TODO: Breakup into smaller functions - if link.scheme == 'file': + if link.scheme == "file": path = link.file_path - logger.info('Processing %s', display_path(path)) + logger.info("Processing %s", display_path(path)) else: - logger.info('Collecting %s', req.req or req) + logger.info("Collecting %s", req.req or req) with indent_log(): # @@ if filesystem packages are not marked @@ -139,7 +137,7 @@ def prepare_linked_requirement( # installation. # FIXME: this won't upgrade when there's an existing # package unpacked in `req.source_dir` - if os.path.exists(os.path.join(req.source_dir, 'setup.py')): + if os.path.exists(os.path.join(req.source_dir, "setup.py")): raise PreviousBuildDirError( "pip can't proceed with requirements '%s' due to a" " pre-existing build directory (%s). This is " @@ -198,11 +196,11 @@ def prepare_linked_requirement( ) except requests.HTTPError as exc: logger.critical( - 'Could not install requirement %s because of error %s', req, exc + "Could not install requirement %s because of error %s", req, exc ) raise InstallationError( - 'Could not install requirement %s because of HTTP ' - 'error %s for URL %s' % (req, exc, link) + "Could not install requirement %s because of HTTP " + "error %s for URL %s" % (req, exc, link) ) if link.is_wheel: @@ -242,14 +240,14 @@ def prepare_editable_requirement( """ assert req.editable, "cannot prepare a non-editable req as editable" - logger.info('Obtaining %s', req) + logger.info("Obtaining %s", req) with indent_log(): if require_hashes: raise InstallationError( - 'The editable requirement %s cannot be installed when ' - 'requiring hashes, because there is no single file to ' - 'hash.' % req + "The editable requirement %s cannot be installed when " + "requiring hashes, because there is no single file to " + "hash." % req ) req.ensure_has_source_dir(self.src_dir) req.update_editable(not self._download_should_save) @@ -279,15 +277,15 @@ def prepare_installed_requirement( "is set to %r" % (req.satisfied_by,) ) logger.info( - 'Requirement %s: %s (%s)', skip_reason, req, req.satisfied_by.version + "Requirement %s: %s (%s)", skip_reason, req, req.satisfied_by.version ) with indent_log(): if require_hashes: logger.debug( - 'Since it is already installed, we are trusting this ' - 'package without checking its hash. To ensure a ' - 'completely repeatable environment, install into an ' - 'empty virtualenv.' + "Since it is already installed, we are trusting this " + "package without checking its hash. To ensure a " + "completely repeatable environment, install into an " + "empty virtualenv." ) abstract_dist = InstalledDistribution(req) diff --git a/src/pip/_internal/pep425tags.py b/src/pip/_internal/pep425tags.py index 0f81300dabc..f0c289b9c62 100644 --- a/src/pip/_internal/pep425tags.py +++ b/src/pip/_internal/pep425tags.py @@ -21,7 +21,7 @@ logger = logging.getLogger(__name__) -_osx_arch_pat = re.compile(r'(.+)_(\d+)_(\d+)_(.+)') +_osx_arch_pat = re.compile(r"(.+)_(\d+)_(\d+)_(.+)") def get_config_var(var): @@ -36,29 +36,29 @@ def get_config_var(var): def get_abbr_impl(): # type: () -> str """Return abbreviated implementation name.""" - if hasattr(sys, 'pypy_version_info'): - pyimpl = 'pp' - elif sys.platform.startswith('java'): - pyimpl = 'jy' - elif sys.platform == 'cli': - pyimpl = 'ip' + if hasattr(sys, "pypy_version_info"): + pyimpl = "pp" + elif sys.platform.startswith("java"): + pyimpl = "jy" + elif sys.platform == "cli": + pyimpl = "ip" else: - pyimpl = 'cp' + pyimpl = "cp" return pyimpl def version_info_to_nodot(version_info): # type: (Tuple[int, ...]) -> str # Only use up to the first two numbers. - return ''.join(map(str, version_info[:2])) + return "".join(map(str, version_info[:2])) def get_impl_ver(): # type: () -> str """Return implementation version.""" impl_ver = get_config_var("py_version_nodot") - if not impl_ver or get_abbr_impl() == 'pp': - impl_ver = ''.join(map(str, get_impl_version_info())) + if not impl_ver or get_abbr_impl() == "pp": + impl_ver = "".join(map(str, get_impl_version_info())) return impl_ver @@ -66,7 +66,7 @@ def get_impl_version_info(): # type: () -> Tuple[int, ...] """Return sys.version_info-like tuple for use in decrementing the minor version.""" - if get_abbr_impl() == 'pp': + if get_abbr_impl() == "pp": # as per https://github.com/pypa/pip/issues/2882 # attrs exist only on pypy return ( @@ -104,35 +104,35 @@ def get_abi_tag(): # type: () -> Optional[str] """Return the ABI tag based on SOABI (if available) or emulate SOABI (CPython 2, PyPy).""" - soabi = get_config_var('SOABI') + soabi = get_config_var("SOABI") impl = get_abbr_impl() abi = None # type: Optional[str] - if not soabi and impl in {'cp', 'pp'} and hasattr(sys, 'maxunicode'): - d = '' - m = '' - u = '' - is_cpython = impl == 'cp' + if not soabi and impl in {"cp", "pp"} and hasattr(sys, "maxunicode"): + d = "" + m = "" + u = "" + is_cpython = impl == "cp" if get_flag( - 'Py_DEBUG', lambda: hasattr(sys, 'gettotalrefcount'), warn=is_cpython + "Py_DEBUG", lambda: hasattr(sys, "gettotalrefcount"), warn=is_cpython ): - d = 'd' + d = "d" if sys.version_info < (3, 8) and get_flag( - 'WITH_PYMALLOC', lambda: is_cpython, warn=is_cpython + "WITH_PYMALLOC", lambda: is_cpython, warn=is_cpython ): - m = 'm' + m = "m" if sys.version_info < (3, 3) and get_flag( - 'Py_UNICODE_SIZE', + "Py_UNICODE_SIZE", lambda: sys.maxunicode == 0x10FFFF, expected=4, warn=is_cpython, ): - u = 'u' - abi = '%s%s%s%s%s' % (impl, get_impl_ver(), d, m, u) - elif soabi and soabi.startswith('cpython-'): - abi = 'cp' + soabi.split('-')[1] + u = "u" + abi = "%s%s%s%s%s" % (impl, get_impl_ver(), d, m, u) + elif soabi and soabi.startswith("cpython-"): + abi = "cp" + soabi.split("-")[1] elif soabi: - abi = soabi.replace('.', '_').replace('-', '_') + abi = soabi.replace(".", "_").replace("-", "_") return abi @@ -145,22 +145,22 @@ def _is_running_32bit(): def get_platform(): # type: () -> str """Return our platform name 'win32', 'linux_x86_64'""" - if sys.platform == 'darwin': + if sys.platform == "darwin": # distutils.util.get_platform() returns the release based on the value # of MACOSX_DEPLOYMENT_TARGET on which Python was built, which may # be significantly older than the user's current machine. release, _, machine = platform.mac_ver() - split_ver = release.split('.') + split_ver = release.split(".") if machine == "x86_64" and _is_running_32bit(): machine = "i386" elif machine == "ppc64" and _is_running_32bit(): machine = "ppc" - return 'macosx_{}_{}_{}'.format(split_ver[0], split_ver[1], machine) + return "macosx_{}_{}_{}".format(split_ver[0], split_ver[1], machine) # XXX remove distutils dependency - result = distutils.util.get_platform().replace('.', '_').replace('-', '_') + result = distutils.util.get_platform().replace(".", "_").replace("-", "_") if result == "linux_x86_64" and _is_running_32bit(): # 32 bit Python program (running on a 64 bit Linux): pip should only # install and run 32 bit compiled extensions in that case. @@ -237,13 +237,13 @@ def _supports_arch(major, minor, arch): # column in the chart not the "Processor support" since I believe # that we care about what instruction sets an application can use # not which processors the OS supports. - if arch == 'ppc': + if arch == "ppc": return (major, minor) <= (10, 5) - if arch == 'ppc64': + if arch == "ppc64": return (major, minor) == (10, 5) - if arch == 'i386': + if arch == "i386": return (major, minor) >= (10, 4) - if arch == 'x86_64': + if arch == "x86_64": return (major, minor) >= (10, 5) if arch in groups: for garch in groups[arch]: @@ -267,7 +267,7 @@ def _supports_arch(major, minor, arch): if machine in groups[garch] and _supports_arch(major, minor, garch): arches.append(garch) - arches.append('universal') + arches.append("universal") return arches @@ -278,7 +278,7 @@ def get_all_minor_versions_as_strings(version_info): major = version_info[:-1] # Support all previous minor Python versions. for minor in range(version_info[-1], -1, -1): - versions.append(''.join(map(str, major + (minor,)))) + versions.append("".join(map(str, major + (minor,)))) return versions @@ -319,22 +319,22 @@ def get_supported( abi3s = set() # type: Set[str] for suffix in get_extension_suffixes(): - if suffix.startswith('.abi'): - abi3s.add(suffix.split('.', 2)[1]) + if suffix.startswith(".abi"): + abi3s.add(suffix.split(".", 2)[1]) abis.extend(sorted(list(abi3s))) - abis.append('none') + abis.append("none") if not noarch: arch = platform or get_platform() - arch_prefix, arch_sep, arch_suffix = arch.partition('_') - if arch.startswith('macosx'): + arch_prefix, arch_sep, arch_suffix = arch.partition("_") + if arch.startswith("macosx"): # support macosx-10.6-intel on macosx-10.9-x86_64 match = _osx_arch_pat.match(arch) if match: name, major, minor, actual_arch = match.groups() - tpl = '{}_{}_%i_%s'.format(name, major) + tpl = "{}_{}_%i_%s".format(name, major) arches = [] for m in reversed(range(int(minor) + 1)): for a in get_darwin_arches(int(major), m, actual_arch): @@ -342,18 +342,18 @@ def get_supported( else: # arch pattern didn't match (?!) arches = [arch] - elif arch_prefix == 'manylinux2010': + elif arch_prefix == "manylinux2010": # manylinux1 wheels run on most manylinux2010 systems with the # exception of wheels depending on ncurses. PEP 571 states # manylinux1 wheels should be considered manylinux2010 wheels: # https://www.python.org/dev/peps/pep-0571/#backwards-compatibility-with-manylinux1-wheels - arches = [arch, 'manylinux1' + arch_sep + arch_suffix] + arches = [arch, "manylinux1" + arch_sep + arch_suffix] elif platform is None: arches = [] if is_manylinux2010_compatible(): - arches.append('manylinux2010' + arch_sep + arch_suffix) + arches.append("manylinux2010" + arch_sep + arch_suffix) if is_manylinux1_compatible(): - arches.append('manylinux1' + arch_sep + arch_suffix) + arches.append("manylinux1" + arch_sep + arch_suffix) arches.append(arch) else: arches = [arch] @@ -361,12 +361,12 @@ def get_supported( # Current version, current API (built specifically for our Python): for abi in abis: for arch in arches: - supported.append(('%s%s' % (impl, versions[0]), abi, arch)) + supported.append(("%s%s" % (impl, versions[0]), abi, arch)) # abi3 modules compatible with older version of Python for version in versions[1:]: # abi3 was introduced in Python 3.2 - if version in {'31', '30'}: + if version in {"31", "30"}: break for abi in abi3s: # empty set if not Python 3 for arch in arches: @@ -374,19 +374,19 @@ def get_supported( # Has binaries, does not use the Python API: for arch in arches: - supported.append(('py%s' % (versions[0][0]), 'none', arch)) + supported.append(("py%s" % (versions[0][0]), "none", arch)) # No abi / arch, but requires our implementation: - supported.append(('%s%s' % (impl, versions[0]), 'none', 'any')) + supported.append(("%s%s" % (impl, versions[0]), "none", "any")) # Tagged specifically as being cross-version compatible # (with just the major version specified) - supported.append(('%s%s' % (impl, versions[0][0]), 'none', 'any')) + supported.append(("%s%s" % (impl, versions[0][0]), "none", "any")) # No abi / arch, generic Python for i, version in enumerate(versions): - supported.append(('py%s' % (version,), 'none', 'any')) + supported.append(("py%s" % (version,), "none", "any")) if i == 0: - supported.append(('py%s' % (version[0]), 'none', 'any')) + supported.append(("py%s" % (version[0]), "none", "any")) return supported diff --git a/src/pip/_internal/pyproject.py b/src/pip/_internal/pyproject.py index b29d0a4b3f0..0b0cf218276 100644 --- a/src/pip/_internal/pyproject.py +++ b/src/pip/_internal/pyproject.py @@ -22,7 +22,7 @@ def _is_list_of_str(obj): def make_pyproject_path(setup_py_dir): # type: (str) -> str - path = os.path.join(setup_py_dir, 'pyproject.toml') + path = os.path.join(setup_py_dir, "pyproject.toml") # Python2 __file__ should not be unicode if six.PY2 and isinstance(path, six.text_type): diff --git a/src/pip/_internal/req/__init__.py b/src/pip/_internal/req/__init__.py index f2f723cb141..e006be74957 100644 --- a/src/pip/_internal/req/__init__.py +++ b/src/pip/_internal/req/__init__.py @@ -41,15 +41,15 @@ def install_given_reqs( if to_install: logger.info( - 'Installing collected packages: %s', - ', '.join([req.name for req in to_install]), + "Installing collected packages: %s", + ", ".join([req.name for req in to_install]), ) with indent_log(): for requirement in to_install: if requirement.conflicts_with: logger.info( - 'Found existing installation: %s', requirement.conflicts_with + "Found existing installation: %s", requirement.conflicts_with ) with indent_log(): uninstalled_pathset = requirement.uninstall(auto_confirm=True) diff --git a/src/pip/_internal/req/constructors.py b/src/pip/_internal/req/constructors.py index 4edc9da0e47..c475d682dca 100644 --- a/src/pip/_internal/req/constructors.py +++ b/src/pip/_internal/req/constructors.py @@ -54,7 +54,7 @@ def is_archive_file(name): def _strip_extras(path): # type: (str) -> Tuple[str, Optional[str]] - m = re.match(r'^(.+)(\[[^\]]+\])$', path) + m = re.match(r"^(.+)(\[[^\]]+\])$", path) extras = None if m: path_no_extras = m.group(1) @@ -90,23 +90,23 @@ def parse_editable(editable_req): url_no_extras, extras = _strip_extras(url) if os.path.isdir(url_no_extras): - if not os.path.exists(os.path.join(url_no_extras, 'setup.py')): + if not os.path.exists(os.path.join(url_no_extras, "setup.py")): msg = ( 'File "setup.py" not found. Directory cannot be installed ' - 'in editable mode: {}'.format(os.path.abspath(url_no_extras)) + "in editable mode: {}".format(os.path.abspath(url_no_extras)) ) pyproject_path = make_pyproject_path(url_no_extras) if os.path.isfile(pyproject_path): msg += ( '\n(A "pyproject.toml" file was found, but editable ' - 'mode currently requires a setup.py based build.)' + "mode currently requires a setup.py based build.)" ) raise InstallationError(msg) # Treating it as code that has already been checked out url_no_extras = path_to_url(url_no_extras) - if url_no_extras.lower().startswith('file:'): + if url_no_extras.lower().startswith("file:"): package_name = Link(url_no_extras).egg_fragment if extras: return ( @@ -118,24 +118,24 @@ def parse_editable(editable_req): return package_name, url_no_extras, None for version_control in vcs: - if url.lower().startswith('%s:' % version_control): - url = '%s+%s' % (version_control, url) + if url.lower().startswith("%s:" % version_control): + url = "%s+%s" % (version_control, url) break - if '+' not in url: + if "+" not in url: raise InstallationError( - '{} is not a valid editable requirement. ' - 'It should either be a path to a local project or a VCS URL ' - '(beginning with svn+, git+, hg+, or bzr+).'.format(editable_req) + "{} is not a valid editable requirement. " + "It should either be a path to a local project or a VCS URL " + "(beginning with svn+, git+, hg+, or bzr+).".format(editable_req) ) - vc_type = url.split('+', 1)[0].lower() + vc_type = url.split("+", 1)[0].lower() if not vcs.get_backend(vc_type): error_message = ( - 'For --editable=%s only ' % editable_req - + ', '.join([backend.name + '+URL' for backend in vcs.backends]) - + ' is currently supported' + "For --editable=%s only " % editable_req + + ", ".join([backend.name + "+URL" for backend in vcs.backends]) + + " is currently supported" ) raise InstallationError(error_message) @@ -160,7 +160,7 @@ def deduce_helpful_msg(req): msg = " It does exist." # Try to parse and check if it is a requirements file. try: - with open(req, 'r') as fp: + with open(req, "r") as fp: # parse first line only next(parse_requirements(fp.read())) msg += ( @@ -229,7 +229,7 @@ def install_req_from_editable( parts = parse_req_from_editable(editable_req) - source_dir = parts.link.file_path if parts.link.scheme == 'file' else None + source_dir = parts.link.file_path if parts.link.scheme == "file" else None return InstallRequirement( parts.requirement, @@ -287,13 +287,13 @@ def _get_url_from_path(path, name): return None if os.path.isfile(path): return path_to_url(path) - urlreq_parts = name.split('@', 1) + urlreq_parts = name.split("@", 1) if len(urlreq_parts) >= 2 and not _looks_like_path(urlreq_parts[0]): # If the path contains '@' and the part before it does not look # like a path, try to treat it as a PEP 440 URL req instead. return None logger.warning( - 'Requirement %r looks like a filename, but the file does not exist', name + "Requirement %r looks like a filename, but the file does not exist", name ) return path_to_url(path) @@ -301,9 +301,9 @@ def _get_url_from_path(path, name): def parse_req_from_line(name, line_source): # type: (str, Optional[str]) -> RequirementParts if is_url(name): - marker_sep = '; ' + marker_sep = "; " else: - marker_sep = ';' + marker_sep = ";" if marker_sep in name: name, markers_as_string = name.split(marker_sep, 1) markers_as_string = markers_as_string.strip() @@ -330,7 +330,7 @@ def parse_req_from_line(name, line_source): # it's a local file, dir, or url if link: # Handle relative file URLs - if link.scheme == 'file' and re.search(r'\.\./', link.url): + if link.scheme == "file" and re.search(r"\.\./", link.url): link = Link(path_to_url(os.path.normpath(os.path.abspath(link.path)))) # wheel file if link.is_wheel: @@ -350,7 +350,7 @@ def parse_req_from_line(name, line_source): def with_source(text): if not line_source: return text - return '{} (from {})'.format(text, line_source) + return "{} (from {})".format(text, line_source) if req_as_string is not None: try: @@ -359,15 +359,15 @@ def with_source(text): if os.path.sep in req_as_string: add_msg = "It looks like a path." add_msg += deduce_helpful_msg(req_as_string) - elif '=' in req_as_string and not any( + elif "=" in req_as_string and not any( op in req_as_string for op in operators ): add_msg = "= is not a valid operator. Did you mean == ?" else: - add_msg = '' - msg = with_source('Invalid requirement: {!r}'.format(req_as_string)) + add_msg = "" + msg = with_source("Invalid requirement: {!r}".format(req_as_string)) if add_msg: - msg += '\nHint: {}'.format(add_msg) + msg += "\nHint: {}".format(add_msg) raise InstallationError(msg) else: req = None diff --git a/src/pip/_internal/req/req_file.py b/src/pip/_internal/req/req_file.py index fe81aac3d1c..e541499c469 100644 --- a/src/pip/_internal/req/req_file.py +++ b/src/pip/_internal/req/req_file.py @@ -35,16 +35,16 @@ ReqFileLines = Iterator[Tuple[int, Text]] -__all__ = ['parse_requirements'] +__all__ = ["parse_requirements"] -SCHEME_RE = re.compile(r'^(http|https|file):', re.I) -COMMENT_RE = re.compile(r'(^|\s+)#.*$') +SCHEME_RE = re.compile(r"^(http|https|file):", re.I) +COMMENT_RE = re.compile(r"(^|\s+)#.*$") # Matches environment variable-style values in '${MY_VARIABLE_1}' with the # variable name consisting of only uppercase letters, digits or the '_' # (underscore). This follows the POSIX standard defined in IEEE Std 1003.1, # 2013 Edition. -ENV_VAR_RE = re.compile(r'(?P\$\{(?P[A-Z0-9_]+)\})') +ENV_VAR_RE = re.compile(r"(?P\$\{(?P[A-Z0-9_]+)\})") SUPPORTED_OPTIONS = [ cmdoptions.constraints, @@ -176,13 +176,13 @@ def process_line( # Prior to 2.7.3, shlex cannot deal with unicode entries if sys.version_info < (2, 7, 3): # https://github.com/python/mypy/issues/1174 - options_str = options_str.encode('utf8') # type: ignore + options_str = options_str.encode("utf8") # type: ignore # https://github.com/python/mypy/issues/1174 opts, _ = parser.parse_args(shlex.split(options_str), defaults) # type: ignore # preserve for the nested code path - line_comes_from = '%s %s (line %s)' % ( - '-c' if constraint else '-r', + line_comes_from = "%s %s (line %s)" % ( + "-c" if constraint else "-r", filename, line_number, ) @@ -197,7 +197,7 @@ def process_line( for dest in SUPPORTED_OPTIONS_REQ_DEST: if dest in opts.__dict__ and opts.__dict__[dest]: req_options[dest] = opts.__dict__[dest] - line_source = 'line {} of {}'.format(line_number, filename) + line_source = "line {} of {}".format(line_number, filename) yield install_req_from_line( args_str, comes_from=line_comes_from, @@ -281,7 +281,7 @@ def process_line( if opts.pre: finder.set_allow_all_prereleases() for host in opts.trusted_hosts or []: - source = 'line {} of {}'.format(line_number, filename) + source = "line {} of {}".format(line_number, filename) session.add_trusted_host(host, source=source) @@ -291,16 +291,16 @@ def break_args_options(line): (and then optparse) the options, not the args. args can contain markers which are corrupted by shlex. """ - tokens = line.split(' ') + tokens = line.split(" ") args = [] options = tokens[:] for token in tokens: - if token.startswith('-') or token.startswith('--'): + if token.startswith("-") or token.startswith("--"): break else: args.append(token) options.pop(0) - return ' '.join(args), ' '.join(options) # type: ignore + return " ".join(args), " ".join(options) # type: ignore def build_parser(line): @@ -320,7 +320,7 @@ def build_parser(line): def parser_exit(self, msg): # type: (Any, str) -> NoReturn # add offending line - msg = 'Invalid requirement: %s\n%s' % (line, msg) + msg = "Invalid requirement: %s\n%s" % (line, msg) raise RequirementsFileParseError(msg) # NOTE: mypy disallows assigning to a method @@ -338,24 +338,24 @@ def join_lines(lines_enum): primary_line_number = None new_line = [] # type: List[Text] for line_number, line in lines_enum: - if not line.endswith('\\') or COMMENT_RE.match(line): + if not line.endswith("\\") or COMMENT_RE.match(line): if COMMENT_RE.match(line): # this ensures comments are always matched later - line = ' ' + line + line = " " + line if new_line: new_line.append(line) - yield primary_line_number, ''.join(new_line) + yield primary_line_number, "".join(new_line) new_line = [] else: yield line_number, line else: if not new_line: primary_line_number = line_number - new_line.append(line.strip('\\')) + new_line.append(line.strip("\\")) # last line contains \ if new_line: - yield primary_line_number, ''.join(new_line) + yield primary_line_number, "".join(new_line) # TODO: handle space after '\'. @@ -366,7 +366,7 @@ def ignore_comments(lines_enum): Strips comments and filter empty lines. """ for line_number, line in lines_enum: - line = COMMENT_RE.sub('', line) + line = COMMENT_RE.sub("", line) line = line.strip() if line: yield line_number, line diff --git a/src/pip/_internal/req/req_install.py b/src/pip/_internal/req/req_install.py index a40426ec8a7..ddbfa169e86 100644 --- a/src/pip/_internal/req/req_install.py +++ b/src/pip/_internal/req/req_install.py @@ -166,25 +166,25 @@ def __str__(self): if self.req: s = str(self.req) if self.link: - s += ' from %s' % redact_auth_from_url(self.link.url) + s += " from %s" % redact_auth_from_url(self.link.url) elif self.link: s = redact_auth_from_url(self.link.url) else: - s = '' + s = "" if self.satisfied_by is not None: - s += ' in %s' % display_path(self.satisfied_by.location) + s += " in %s" % display_path(self.satisfied_by.location) if self.comes_from: if isinstance(self.comes_from, six.string_types): comes_from = self.comes_from # type: Optional[str] else: comes_from = self.comes_from.from_path() if comes_from: - s += ' (from %s)' % comes_from + s += " (from %s)" % comes_from return s def __repr__(self): # type: () -> str - return '<%s object: %s editable=%r>' % ( + return "<%s object: %s editable=%r>" % ( self.__class__.__name__, str(self), self.editable, @@ -198,7 +198,7 @@ def format_debug(self): names = sorted(attributes) state = ("{}={!r}".format(attr, attributes[attr]) for attr in sorted(names)) - return '<{name} object: {{{state}}}>'.format( + return "<{name} object: {{{state}}}>".format( name=self.__class__.__name__, state=", ".join(state) ) @@ -224,7 +224,7 @@ def populate_link(self, finder, upgrade, require_hashes): link=self.link, package_name=self.name, supported_tags=supported_tags ) if old_link != self.link: - logger.debug('Using cached wheel link: %s', self.link) + logger.debug("Using cached wheel link: %s", self.link) # Things that are valid for all kinds of requirements? @property @@ -247,7 +247,7 @@ def is_pinned(self): For example, some-package==1.2 is pinned; some-package>1.2 is not. """ specifiers = self.specifier - return len(specifiers) == 1 and next(iter(specifiers)).operator in {'==', '==='} + return len(specifiers) == 1 and next(iter(specifiers)).operator in {"==", "==="} @property def installed_version(self): @@ -259,10 +259,10 @@ def match_markers(self, extras_requested=None): if not extras_requested: # Provide an extra to safely evaluate the markers # without matching any extra - extras_requested = ('',) + extras_requested = ("",) if self.markers is not None: return any( - self.markers.evaluate({'extra': extra}) for extra in extras_requested + self.markers.evaluate({"extra": extra}) for extra in extras_requested ) else: return True @@ -276,7 +276,7 @@ def has_hash_options(self): URL do not. """ - return bool(self.options.get('hashes', {})) + return bool(self.options.get("hashes", {})) def hashes(self, trust_internet=True): # type: (bool) -> Hashes @@ -294,7 +294,7 @@ def hashes(self, trust_internet=True): downloaded from the internet, as by populate_link() """ - good_hashes = self.options.get('hashes', {}).copy() + good_hashes = self.options.get("hashes", {}).copy() link = self.link if trust_internet else self.original_link if link and link.hash: good_hashes.setdefault(link.hash_name, []).append(link.hash) @@ -313,7 +313,7 @@ def from_path(self): else: comes_from = self.comes_from.from_path() if comes_from: - s += '->' + comes_from + s += "->" + comes_from return s def ensure_build_location(self, build_dir): @@ -341,7 +341,7 @@ def ensure_build_location(self, build_dir): # FIXME: Is there a better place to create the build_dir? (hg and bzr # need this) if not os.path.exists(build_dir): - logger.debug('Creating directory %s', build_dir) + logger.debug("Creating directory %s", build_dir) _make_build_dir(build_dir) return os.path.join(build_dir, name) @@ -370,11 +370,11 @@ def move_to_correct_build_directory(self): new_location = self.ensure_build_location(self._ideal_build_dir) if os.path.exists(new_location): raise InstallationError( - 'A package already exists in %s; please remove it to continue' + "A package already exists in %s; please remove it to continue" % display_path(new_location) ) logger.debug( - 'Moving package %s from %s to new location %s', + "Moving package %s from %s to new location %s", self, display_path(old_location.path), display_path(new_location), @@ -399,7 +399,7 @@ def remove_temporary_source(self): """Remove the source files from this requirement, if they are marked for deletion""" if self.source_dir and has_delete_marker_file(self.source_dir): - logger.debug('Removing source in %s', self.source_dir) + logger.debug("Removing source in %s", self.source_dir) rmtree(self.source_dir) self.source_dir = None if self._temp_build_dir: @@ -485,7 +485,7 @@ def move_wheel_files( def setup_py_dir(self): # type: () -> str return os.path.join( - self.source_dir, self.link and self.link.subdirectory_fragment or '' + self.source_dir, self.link and self.link.subdirectory_fragment or "" ) @property @@ -493,7 +493,7 @@ def setup_py_path(self): # type: () -> str assert self.source_dir, "No source dir for %s" % self - setup_py = os.path.join(self.setup_py_dir, 'setup.py') + setup_py = os.path.join(self.setup_py_dir, "setup.py") # Python2 __file__ should not be unicode if six.PY2 and isinstance(setup_py, six.text_type): @@ -574,9 +574,9 @@ def prepare_metadata(self): metadata_name = canonicalize_name(self.metadata["Name"]) if canonicalize_name(self.req.name) != metadata_name: logger.warning( - 'Generating metadata for package %s ' - 'produced metadata for project name %s. Fix your ' - '#egg=%s fragments.', + "Generating metadata for package %s " + "produced metadata for project name %s. Fix your " + "#egg=%s fragments.", self.name, metadata_name, self.name, @@ -594,7 +594,7 @@ def prepare_pep517_metadata(self): # NOTE: This needs to be refactored to stop using atexit self._temp_dir = TempDirectory(delete=False, kind="req-install") - metadata_dir = os.path.join(self._temp_dir.path, 'pip-wheel-metadata') + metadata_dir = os.path.join(self._temp_dir.path, "pip-wheel-metadata") atexit.register(self.cleanup) ensure_dir(metadata_dir) @@ -617,8 +617,8 @@ def egg_info_path(self): def looks_like_virtual_env(path): return os.path.lexists( - os.path.join(path, 'bin', 'python') - ) or os.path.exists(os.path.join(path, 'Scripts', 'Python.exe')) + os.path.join(path, "bin", "python") + ) or os.path.exists(os.path.join(path, "Scripts", "Python.exe")) def locate_editable_egg_info(base): candidates = [] @@ -633,10 +633,10 @@ def locate_editable_egg_info(base): if looks_like_virtual_env(os.path.join(root, dir_)): dirs.remove(dir_) # Also don't search through tests - elif dir_ == 'test' or dir_ == 'tests': + elif dir_ == "test" or dir_ == "tests": dirs.remove(dir_) candidates.extend(os.path.join(root, dir_) for dir_ in dirs) - return [f for f in candidates if f.endswith('.egg-info')] + return [f for f in candidates if f.endswith(".egg-info")] def depth_of_directory(dir_): return dir_.count(os.path.sep) + ( @@ -647,7 +647,7 @@ def depth_of_directory(dir_): base = self.source_dir filenames = locate_editable_egg_info(base) else: - base = os.path.join(self.setup_py_dir, 'pip-egg-info') + base = os.path.join(self.setup_py_dir, "pip-egg-info") filenames = os.listdir(base) if not filenames: @@ -665,7 +665,7 @@ def depth_of_directory(dir_): @property def metadata(self): # type: () -> Any - if not hasattr(self, '_metadata'): + if not hasattr(self, "_metadata"): self._metadata = get_metadata(self.get_dist()) return self._metadata @@ -692,12 +692,12 @@ def get_dist(self): def assert_source_matches_version(self): # type: () -> None assert self.source_dir - version = self.metadata['version'] + version = self.metadata["version"] if self.req.specifier and version not in self.req.specifier: - logger.warning('Requested %s, but installing version %s', self, version) + logger.warning("Requested %s, but installing version %s", self, version) else: logger.debug( - 'Source in %s has version %s, which satisfies requirement %s', + "Source in %s has version %s, which satisfies requirement %s", display_path(self.source_dir), version, self, @@ -726,10 +726,10 @@ def install_editable( prefix=None, # type: Optional[str] ): # type: (...) -> None - logger.info('Running setup.py develop for %s', self.name) + logger.info("Running setup.py develop for %s", self.name) if prefix: - prefix_param = ['--prefix={}'.format(prefix)] + prefix_param = ["--prefix={}".format(prefix)] install_options = list(install_options) + prefix_param base_cmd = make_setuptools_shim_args( self.setup_py_path, @@ -739,7 +739,7 @@ def install_editable( with indent_log(): with self.build_env: call_subprocess( - base_cmd + ['develop', '--no-deps'] + list(install_options), + base_cmd + ["develop", "--no-deps"] + list(install_options), cwd=self.setup_py_dir, ) @@ -755,11 +755,11 @@ def update_editable(self, obtain=True): return assert self.editable assert self.source_dir - if self.link.scheme == 'file': + if self.link.scheme == "file": # Static paths don't get updated return - assert '+' in self.link.url, "bad url: %r" % self.link.url - vc_type, url = self.link.url.split('+', 1) + assert "+" in self.link.url, "bad url: %r" % self.link.url + vc_type, url = self.link.url.split("+", 1) vcs_backend = vcs.get_backend(vc_type) if vcs_backend: hidden_url = hide_url(self.link.url) @@ -768,7 +768,7 @@ def update_editable(self, obtain=True): else: vcs_backend.export(self.source_dir, url=hidden_url) else: - assert 0, 'Unexpected version control type (in %s): %s' % ( + assert 0, "Unexpected version control type (in %s): %s" % ( self.link, vc_type, ) @@ -803,14 +803,14 @@ def _clean_zip_name(self, name, prefix): # only used by archive. prefix + os.path.sep ), "name %r doesn't start with prefix %r" % (name, prefix) name = name[len(prefix) + 1 :] - name = name.replace(os.path.sep, '/') + name = name.replace(os.path.sep, "/") return name def _get_archive_name(self, path, parentdir, rootdir): # type: (str, str, str) -> str path = os.path.join(parentdir, path) name = self._clean_zip_name(path, rootdir) - return self.name + '/' + name + return self.name + "/" + name def archive(self, build_dir): # type: (str) -> None @@ -821,49 +821,49 @@ def archive(self, build_dir): assert self.source_dir create_archive = True - archive_name = '%s-%s.zip' % (self.name, self.metadata["version"]) + archive_name = "%s-%s.zip" % (self.name, self.metadata["version"]) archive_path = os.path.join(build_dir, archive_name) if os.path.exists(archive_path): response = ask_path_exists( - 'The file %s exists. (i)gnore, (w)ipe, (b)ackup, (a)bort ' + "The file %s exists. (i)gnore, (w)ipe, (b)ackup, (a)bort " % display_path(archive_path), - ('i', 'w', 'b', 'a'), + ("i", "w", "b", "a"), ) - if response == 'i': + if response == "i": create_archive = False - elif response == 'w': - logger.warning('Deleting %s', display_path(archive_path)) + elif response == "w": + logger.warning("Deleting %s", display_path(archive_path)) os.remove(archive_path) - elif response == 'b': + elif response == "b": dest_file = backup_dir(archive_path) logger.warning( - 'Backing up %s to %s', + "Backing up %s to %s", display_path(archive_path), display_path(dest_file), ) shutil.move(archive_path, dest_file) - elif response == 'a': + elif response == "a": sys.exit(-1) if not create_archive: return zip_output = zipfile.ZipFile( - archive_path, 'w', zipfile.ZIP_DEFLATED, allowZip64=True + archive_path, "w", zipfile.ZIP_DEFLATED, allowZip64=True ) with zip_output: dir = os.path.normcase(os.path.abspath(self.setup_py_dir)) for dirpath, dirnames, filenames in os.walk(dir): - if 'pip-egg-info' in dirnames: - dirnames.remove('pip-egg-info') + if "pip-egg-info" in dirnames: + dirnames.remove("pip-egg-info") for dirname in dirnames: dir_arcname = self._get_archive_name( dirname, parentdir=dirpath, rootdir=dir ) - zipdir = zipfile.ZipInfo(dir_arcname + '/') + zipdir = zipfile.ZipInfo(dir_arcname + "/") zipdir.external_attr = 0x1ED << 16 # 0o755 - zip_output.writestr(zipdir, '') + zip_output.writestr(zipdir, "") for filename in filenames: if filename == PIP_DELETE_MARKER_FILENAME: continue @@ -873,7 +873,7 @@ def archive(self, build_dir): filename = os.path.join(dirpath, filename) zip_output.write(filename, file_arcname) - logger.info('Saved %s', display_path(archive_path)) + logger.info("Saved %s", display_path(archive_path)) def install( self, @@ -912,17 +912,17 @@ def install( # Options specified in requirements file override those # specified on the command line, since the last option given # to setup.py is the one that is used. - global_options = list(global_options) + self.options.get('global_options', []) + global_options = list(global_options) + self.options.get("global_options", []) install_options = list(install_options) + self.options.get( - 'install_options', [] + "install_options", [] ) with TempDirectory(kind="record") as temp_dir: - record_filename = os.path.join(temp_dir.path, 'install-record.txt') + record_filename = os.path.join(temp_dir.path, "install-record.txt") install_args = self.get_install_args( global_options, record_filename, root, prefix, pycompile ) - msg = 'Running setup.py install for %s' % (self.name,) + msg = "Running setup.py install for %s" % (self.name,) with open_spinner(msg) as spinner: with indent_log(): with self.build_env: @@ -933,7 +933,7 @@ def install( ) if not os.path.exists(record_filename): - logger.debug('Record file %s not found', record_filename) + logger.debug("Record file %s not found", record_filename) return self.install_succeeded = True @@ -947,13 +947,13 @@ def prepend_root(path): with open(record_filename) as f: for line in f: directory = os.path.dirname(line) - if directory.endswith('.egg-info'): + if directory.endswith(".egg-info"): egg_info_dir = prepend_root(directory) break else: logger.warning( - 'Could not find .egg-info directory in install record' - ' for %s', + "Could not find .egg-info directory in install record" + " for %s", self, ) # FIXME: put the record somewhere @@ -969,9 +969,9 @@ def prepend_root(path): ) new_lines.sort() ensure_dir(egg_info_dir) - inst_files_path = os.path.join(egg_info_dir, 'installed-files.txt') - with open(inst_files_path, 'w') as f: - f.write('\n'.join(new_lines) + '\n') + inst_files_path = os.path.join(egg_info_dir, "installed-files.txt") + with open(inst_files_path, "w") as f: + f.write("\n".join(new_lines) + "\n") def get_install_args( self, @@ -988,13 +988,13 @@ def get_install_args( no_user_config=self.isolated, unbuffered_output=True, ) - install_args += ['install', '--record', record_filename] - install_args += ['--single-version-externally-managed'] + install_args += ["install", "--record", record_filename] + install_args += ["--single-version-externally-managed"] if root is not None: - install_args += ['--root', root] + install_args += ["--root", root] if prefix is not None: - install_args += ['--prefix', prefix] + install_args += ["--prefix", prefix] if pycompile: install_args += ["--compile"] @@ -1002,10 +1002,10 @@ def get_install_args( install_args += ["--no-compile"] if running_under_virtualenv(): - py_ver_str = 'python' + sysconfig.get_python_version() + py_ver_str = "python" + sysconfig.get_python_version() install_args += [ - '--install-headers', - os.path.join(sys.prefix, 'include', 'site', py_ver_str, self.name), + "--install-headers", + os.path.join(sys.prefix, "include", "site", py_ver_str, self.name), ] return install_args diff --git a/src/pip/_internal/req/req_set.py b/src/pip/_internal/req/req_set.py index 34ed6b9b47e..d887f91cb8f 100644 --- a/src/pip/_internal/req/req_set.py +++ b/src/pip/_internal/req/req_set.py @@ -44,7 +44,7 @@ def __str__(self): (req for req in self.requirements.values() if not req.comes_from), key=lambda req: canonicalize_name(req.name), ) - return ' '.join(str(req.req) for req in requirements) + return " ".join(str(req.req) for req in requirements) def __repr__(self): # type: () -> str @@ -52,11 +52,11 @@ def __repr__(self): self.requirements.values(), key=lambda req: canonicalize_name(req.name) ) - format_string = '<{classname} object; {count} requirement(s): {reqs}>' + format_string = "<{classname} object; {count} requirement(s): {reqs}>" return format_string.format( classname=self.__class__.__name__, count=len(requirements), - reqs=', '.join(str(req.req) for req in requirements), + reqs=", ".join(str(req.req) for req in requirements), ) def add_unnamed_requirement(self, install_req): @@ -197,7 +197,7 @@ def get_requirement(self, name): def cleanup_files(self): # type: () -> None """Clean up files, remove builds.""" - logger.debug('Cleaning up...') + logger.debug("Cleaning up...") with indent_log(): for req in self.reqs_to_cleanup: req.remove_temporary_source() diff --git a/src/pip/_internal/req/req_tracker.py b/src/pip/_internal/req/req_tracker.py index c3b1fa98343..f7c2fca819c 100644 --- a/src/pip/_internal/req/req_tracker.py +++ b/src/pip/_internal/req/req_tracker.py @@ -24,14 +24,14 @@ class RequirementTracker(object): def __init__(self): # type: () -> None - self._root = os.environ.get('PIP_REQ_TRACKER') + self._root = os.environ.get("PIP_REQ_TRACKER") if self._root is None: - self._temp_dir = TempDirectory(delete=False, kind='req-tracker') - self._root = os.environ['PIP_REQ_TRACKER'] = self._temp_dir.path - logger.debug('Created requirements tracker %r', self._root) + self._temp_dir = TempDirectory(delete=False, kind="req-tracker") + self._root = os.environ["PIP_REQ_TRACKER"] = self._temp_dir.path + logger.debug("Created requirements tracker %r", self._root) else: self._temp_dir = None - logger.debug('Re-using requirements tracker %r', self._root) + logger.debug("Re-using requirements tracker %r", self._root) self._entries = set() # type: Set[InstallRequirement] def __enter__(self): @@ -60,22 +60,22 @@ def add(self, req): try: with open(entry_path) as fp: # Error, these's already a build in progress. - raise LookupError('%s is already being built: %s' % (link, fp.read())) + raise LookupError("%s is already being built: %s" % (link, fp.read())) except IOError as e: if e.errno != errno.ENOENT: raise assert req not in self._entries - with open(entry_path, 'w') as fp: + with open(entry_path, "w") as fp: fp.write(info) self._entries.add(req) - logger.debug('Added %s to build tracker %r', req, self._root) + logger.debug("Added %s to build tracker %r", req, self._root) def remove(self, req): # type: (InstallRequirement) -> None link = req.link self._entries.remove(req) os.unlink(self._entry_path(link)) - logger.debug('Removed %s from build tracker %r', req, self._root) + logger.debug("Removed %s from build tracker %r", req, self._root) def cleanup(self): # type: () -> None @@ -85,7 +85,7 @@ def cleanup(self): if remove: self._temp_dir.cleanup() logger.debug( - '%s build tracker %r', 'Removed' if remove else 'Cleaned', self._root + "%s build tracker %r", "Removed" if remove else "Cleaned", self._root ) @contextlib.contextmanager diff --git a/src/pip/_internal/req/req_uninstall.py b/src/pip/_internal/req/req_uninstall.py index c5955c14a39..bc7423ed7a5 100644 --- a/src/pip/_internal/req/req_uninstall.py +++ b/src/pip/_internal/req/req_uninstall.py @@ -57,12 +57,12 @@ def _script_names(dist, script_name, is_gui): exe_name = os.path.join(bin_dir, script_name) paths_to_remove = [exe_name] if WINDOWS: - paths_to_remove.append(exe_name + '.exe') - paths_to_remove.append(exe_name + '.exe.manifest') + paths_to_remove.append(exe_name + ".exe") + paths_to_remove.append(exe_name + ".exe.manifest") if is_gui: - paths_to_remove.append(exe_name + '-script.pyw') + paths_to_remove.append(exe_name + "-script.pyw") else: - paths_to_remove.append(exe_name + '-script.py') + paths_to_remove.append(exe_name + "-script.py") return paths_to_remove @@ -91,16 +91,16 @@ def uninstallation_paths(dist): UninstallPathSet.add() takes care of the __pycache__ .py[co]. """ - r = csv.reader(FakeFile(dist.get_metadata_lines('RECORD'))) + r = csv.reader(FakeFile(dist.get_metadata_lines("RECORD"))) for row in r: path = os.path.join(dist.location, row[0]) yield path - if path.endswith('.py'): + if path.endswith(".py"): dn, fn = os.path.split(path) base = fn[:-3] - path = os.path.join(dn, base + '.pyc') + path = os.path.join(dn, base + ".pyc") yield path - path = os.path.join(dn, base + '.pyo') + path = os.path.join(dn, base + ".pyo") yield path @@ -259,7 +259,7 @@ def _get_file_stash(self, path): else: # Did not find any suitable root head = os.path.dirname(path) - save_dir = TempDirectory(kind='uninstall') + save_dir = TempDirectory(kind="uninstall") self._save_dirs[head] = save_dir relpath = os.path.relpath(path, head) @@ -305,7 +305,7 @@ def rollback(self): for new_path, path in self._moves: try: - logger.debug('Replacing %s from %s', new_path, path) + logger.debug("Replacing %s from %s", new_path, path) if os.path.isfile(new_path) or os.path.islink(new_path): os.unlink(new_path) elif os.path.isdir(new_path): @@ -361,7 +361,7 @@ def add(self, path): # __pycache__ files can show up after 'installed-files.txt' is created, # due to imports - if os.path.splitext(path)[1] == '.py' and uses_pycache: + if os.path.splitext(path)[1] == ".py" and uses_pycache: self.add(cache_from_source(path)) def add_pth(self, pth_file, entry): @@ -387,7 +387,7 @@ def remove(self, auto_confirm=False, verbose=False): return dist_name_version = self.dist.project_name + "-" + self.dist.version - logger.info('Uninstalling %s:', dist_name_version) + logger.info("Uninstalling %s:", dist_name_version) with indent_log(): if auto_confirm or self._allowed_to_proceed(verbose): @@ -397,12 +397,12 @@ def remove(self, auto_confirm=False, verbose=False): for path in sorted(compact(for_rename)): moved.stash(path) - logger.debug('Removing file or directory %s', path) + logger.debug("Removing file or directory %s", path) for pth in self.pth.values(): pth.remove() - logger.info('Successfully uninstalled %s', dist_name_version) + logger.info("Successfully uninstalled %s", dist_name_version) def _allowed_to_proceed(self, verbose): # type: (bool) -> bool @@ -427,13 +427,13 @@ def _display(msg, paths): will_remove = set(self.paths) will_skip = set() - _display('Would remove:', will_remove) - _display('Would not remove (might be manually added):', will_skip) - _display('Would not remove (outside of prefix):', self._refuse) + _display("Would remove:", will_remove) + _display("Would not remove (might be manually added):", will_skip) + _display("Would not remove (outside of prefix):", self._refuse) if verbose: - _display('Will actually move:', compress_for_rename(self.paths)) + _display("Will actually move:", compress_for_rename(self.paths)) - return ask('Proceed (y/n)? ', ('y', 'n')) == 'y' + return ask("Proceed (y/n)? ", ("y", "n")) == "y" def rollback(self): # type: () -> None @@ -443,7 +443,7 @@ def rollback(self): "Can't roll back %s; was not uninstalled", self.dist.project_name ) return - logger.info('Rolling back uninstall of %s', self.dist.project_name) + logger.info("Rolling back uninstall of %s", self.dist.project_name) self._moved_paths.rollback() for pth in self.pth.values(): pth.rollback() @@ -480,47 +480,47 @@ def from_dist(cls, dist): paths_to_remove = cls(dist) develop_egg_link = egg_link_path(dist) - develop_egg_link_egg_info = '{}.egg-info'.format( + develop_egg_link_egg_info = "{}.egg-info".format( pkg_resources.to_filename(dist.project_name) ) egg_info_exists = dist.egg_info and os.path.exists(dist.egg_info) # Special case for distutils installed package - distutils_egg_info = getattr(dist._provider, 'path', None) + distutils_egg_info = getattr(dist._provider, "path", None) # Uninstall cases order do matter as in the case of 2 installs of the # same package, pip needs to uninstall the currently detected version if ( egg_info_exists - and dist.egg_info.endswith('.egg-info') + and dist.egg_info.endswith(".egg-info") and not dist.egg_info.endswith(develop_egg_link_egg_info) ): # if dist.egg_info.endswith(develop_egg_link_egg_info), we # are in fact in the develop_egg_link case paths_to_remove.add(dist.egg_info) - if dist.has_metadata('installed-files.txt'): + if dist.has_metadata("installed-files.txt"): for installed_file in dist.get_metadata( - 'installed-files.txt' + "installed-files.txt" ).splitlines(): path = os.path.normpath(os.path.join(dist.egg_info, installed_file)) paths_to_remove.add(path) # FIXME: need a test for this elif block # occurs with --single-version-externally-managed/--record outside # of pip - elif dist.has_metadata('top_level.txt'): - if dist.has_metadata('namespace_packages.txt'): - namespaces = dist.get_metadata('namespace_packages.txt') + elif dist.has_metadata("top_level.txt"): + if dist.has_metadata("namespace_packages.txt"): + namespaces = dist.get_metadata("namespace_packages.txt") else: namespaces = [] for top_level_pkg in [ p - for p in dist.get_metadata('top_level.txt').splitlines() + for p in dist.get_metadata("top_level.txt").splitlines() if p and p not in namespaces ]: path = os.path.join(dist.location, top_level_pkg) paths_to_remove.add(path) - paths_to_remove.add(path + '.py') - paths_to_remove.add(path + '.pyc') - paths_to_remove.add(path + '.pyo') + paths_to_remove.add(path + ".py") + paths_to_remove.add(path + ".pyc") + paths_to_remove.add(path + ".pyo") elif distutils_egg_info: raise UninstallationError( @@ -531,58 +531,58 @@ def from_dist(cls, dist): ) ) - elif dist.location.endswith('.egg'): + elif dist.location.endswith(".egg"): # package installed by easy_install # We cannot match on dist.egg_name because it can slightly vary # i.e. setuptools-0.6c11-py2.6.egg vs setuptools-0.6rc11-py2.6.egg paths_to_remove.add(dist.location) easy_install_egg = os.path.split(dist.location)[1] easy_install_pth = os.path.join( - os.path.dirname(dist.location), 'easy-install.pth' + os.path.dirname(dist.location), "easy-install.pth" ) - paths_to_remove.add_pth(easy_install_pth, './' + easy_install_egg) + paths_to_remove.add_pth(easy_install_pth, "./" + easy_install_egg) - elif egg_info_exists and dist.egg_info.endswith('.dist-info'): + elif egg_info_exists and dist.egg_info.endswith(".dist-info"): for path in uninstallation_paths(dist): paths_to_remove.add(path) elif develop_egg_link: # develop egg - with open(develop_egg_link, 'r') as fh: + with open(develop_egg_link, "r") as fh: link_pointer = os.path.normcase(fh.readline().strip()) assert link_pointer == dist.location, ( - 'Egg-link %s does not match installed location of %s ' - '(at %s)' % (link_pointer, dist.project_name, dist.location) + "Egg-link %s does not match installed location of %s " + "(at %s)" % (link_pointer, dist.project_name, dist.location) ) paths_to_remove.add(develop_egg_link) easy_install_pth = os.path.join( - os.path.dirname(develop_egg_link), 'easy-install.pth' + os.path.dirname(develop_egg_link), "easy-install.pth" ) paths_to_remove.add_pth(easy_install_pth, dist.location) else: logger.debug( - 'Not sure how to uninstall: %s - Check: %s', dist, dist.location + "Not sure how to uninstall: %s - Check: %s", dist, dist.location ) # find distutils scripts= scripts - if dist.has_metadata('scripts') and dist.metadata_isdir('scripts'): - for script in dist.metadata_listdir('scripts'): + if dist.has_metadata("scripts") and dist.metadata_isdir("scripts"): + for script in dist.metadata_listdir("scripts"): if dist_in_usersite(dist): bin_dir = bin_user else: bin_dir = bin_py paths_to_remove.add(os.path.join(bin_dir, script)) if WINDOWS: - paths_to_remove.add(os.path.join(bin_dir, script) + '.bat') + paths_to_remove.add(os.path.join(bin_dir, script) + ".bat") # find console_scripts _scripts_to_remove = [] - console_scripts = dist.get_entry_map(group='console_scripts') + console_scripts = dist.get_entry_map(group="console_scripts") for name in console_scripts.keys(): _scripts_to_remove.extend(_script_names(dist, name, False)) # find gui_scripts - gui_scripts = dist.get_entry_map(group='gui_scripts') + gui_scripts = dist.get_entry_map(group="gui_scripts") for name in gui_scripts.keys(): _scripts_to_remove.extend(_script_names(dist, name, True)) @@ -616,38 +616,38 @@ def add(self, entry): # have more than "\\sever\share". Valid examples: "\\server\share\" or # "\\server\share\folder". Python 2.7.8+ support UNC in splitdrive. if WINDOWS and not os.path.splitdrive(entry)[0]: - entry = entry.replace('\\', '/') + entry = entry.replace("\\", "/") self.entries.add(entry) def remove(self): # type: () -> None - logger.debug('Removing pth entries from %s:', self.file) - with open(self.file, 'rb') as fh: + logger.debug("Removing pth entries from %s:", self.file) + with open(self.file, "rb") as fh: # windows uses '\r\n' with py3k, but uses '\n' with py2.x lines = fh.readlines() self._saved_lines = lines - if any(b'\r\n' in line for line in lines): - endline = '\r\n' + if any(b"\r\n" in line for line in lines): + endline = "\r\n" else: - endline = '\n' + endline = "\n" # handle missing trailing newline if lines and not lines[-1].endswith(endline.encode("utf-8")): lines[-1] = lines[-1] + endline.encode("utf-8") for entry in self.entries: try: - logger.debug('Removing entry: %s', entry) + logger.debug("Removing entry: %s", entry) lines.remove((entry + endline).encode("utf-8")) except ValueError: pass - with open(self.file, 'wb') as fh: + with open(self.file, "wb") as fh: fh.writelines(lines) def rollback(self): # type: () -> bool if self._saved_lines is None: - logger.error('Cannot roll back changes to %s, none were made', self.file) + logger.error("Cannot roll back changes to %s, none were made", self.file) return False - logger.debug('Rolling %s back to previous state', self.file) - with open(self.file, 'wb') as fh: + logger.debug("Rolling %s back to previous state", self.file) + with open(self.file, "wb") as fh: fh.writelines(self._saved_lines) return True diff --git a/src/pip/_internal/utils/appdirs.py b/src/pip/_internal/utils/appdirs.py index 373ce3ce3e5..116d73e0f41 100644 --- a/src/pip/_internal/utils/appdirs.py +++ b/src/pip/_internal/utils/appdirs.py @@ -99,15 +99,15 @@ def user_data_dir(appname, roaming=False): path = os.path.join(os.path.normpath(_get_win_folder(const)), appname) elif sys.platform == "darwin": path = ( - os.path.join(expanduser('~/Library/Application Support/'), appname) + os.path.join(expanduser("~/Library/Application Support/"), appname) if os.path.isdir( - os.path.join(expanduser('~/Library/Application Support/'), appname) + os.path.join(expanduser("~/Library/Application Support/"), appname) ) - else os.path.join(expanduser('~/.config/'), appname) + else os.path.join(expanduser("~/.config/"), appname) ) else: path = os.path.join( - os.getenv('XDG_DATA_HOME', expanduser("~/.local/share")), appname + os.getenv("XDG_DATA_HOME", expanduser("~/.local/share")), appname ) return path @@ -139,7 +139,7 @@ def user_config_dir(appname, roaming=True): elif sys.platform == "darwin": path = user_data_dir(appname) else: - path = os.getenv('XDG_CONFIG_HOME', expanduser("~/.config")) + path = os.getenv("XDG_CONFIG_HOME", expanduser("~/.config")) path = os.path.join(path, appname) return path @@ -167,11 +167,11 @@ def site_config_dirs(appname): if WINDOWS: path = os.path.normpath(_get_win_folder("CSIDL_COMMON_APPDATA")) pathlist = [os.path.join(path, appname)] - elif sys.platform == 'darwin': - pathlist = [os.path.join('/Library/Application Support', appname)] + elif sys.platform == "darwin": + pathlist = [os.path.join("/Library/Application Support", appname)] else: # try looking in $XDG_CONFIG_DIRS - xdg_config_dirs = os.getenv('XDG_CONFIG_DIRS', '/etc/xdg') + xdg_config_dirs = os.getenv("XDG_CONFIG_DIRS", "/etc/xdg") if xdg_config_dirs: pathlist = [ os.path.join(expanduser(x), appname) @@ -181,7 +181,7 @@ def site_config_dirs(appname): pathlist = [] # always look in /etc directly as well - pathlist.append('/etc') + pathlist.append("/etc") return pathlist @@ -260,7 +260,7 @@ def _win_path_to_bytes(path): If encoding using ASCII and MBCS fails, return the original Unicode path. """ - for encoding in ('ASCII', 'MBCS'): + for encoding in ("ASCII", "MBCS"): try: return path.encode(encoding) except (UnicodeEncodeError, LookupError): diff --git a/src/pip/_internal/utils/compat.py b/src/pip/_internal/utils/compat.py index 619a2c8ae62..6be675141de 100644 --- a/src/pip/_internal/utils/compat.py +++ b/src/pip/_internal/utils/compat.py @@ -124,8 +124,8 @@ def str_to_display(data, desc=None): decoded_data = data.decode(encoding) except UnicodeDecodeError: if desc is None: - desc = 'Bytes object' - msg_format = '{} does not appear to be encoded as %s'.format(desc) + desc = "Bytes object" + msg_format = "{} does not appear to be encoded as %s".format(desc) logger.warning(msg_format, encoding) decoded_data = data.decode(encoding, errors=backslashreplace_decode) @@ -154,7 +154,7 @@ def console_to_str(data): # type: (bytes) -> Text """Return a string, safe for output, of subprocess output. """ - return str_to_display(data, desc='Subprocess output') + return str_to_display(data, desc="Subprocess output") if PY2: @@ -163,7 +163,7 @@ def native_str(s, replace=False): # type: (str, bool) -> str # Replace is ignored -- unicode to UTF-8 can't fail if isinstance(s, text_type): - return s.encode('utf-8') + return s.encode("utf-8") return s @@ -172,7 +172,7 @@ def native_str(s, replace=False): def native_str(s, replace=False): # type: (str, bool) -> str if isinstance(s, bytes): - return s.decode('utf-8', 'replace' if replace else 'strict') + return s.decode("utf-8", "replace" if replace else "strict") return s @@ -189,7 +189,7 @@ def get_path_uid(path): :raises OSError: When path is a symlink or can't be read. """ - if hasattr(os, 'O_NOFOLLOW'): + if hasattr(os, "O_NOFOLLOW"): fd = os.open(path, os.O_RDONLY | os.O_NOFOLLOW) file_uid = os.fstat(fd).st_uid os.close(fd) @@ -226,7 +226,7 @@ def expanduser(path): Includes a workaround for https://bugs.python.org/issue14768 """ expanded = os.path.expanduser(path) - if path.startswith('~/') and expanded.startswith('//'): + if path.startswith("~/") and expanded.startswith("//"): expanded = expanded[1:] return expanded @@ -240,13 +240,13 @@ def expanduser(path): # windows detection, covers cpython and ironpython -WINDOWS = sys.platform.startswith("win") or (sys.platform == 'cli' and os.name == 'nt') +WINDOWS = sys.platform.startswith("win") or (sys.platform == "cli" and os.name == "nt") def samefile(file1, file2): # type: (str, str) -> bool """Provide an alternative for os.path.samefile on Windows/Python2""" - if hasattr(os.path, 'samefile'): + if hasattr(os.path, "samefile"): return os.path.samefile(file1, file2) else: path1 = os.path.normcase(os.path.abspath(file1)) @@ -254,7 +254,7 @@ def samefile(file1, file2): return path1 == path2 -if hasattr(shutil, 'get_terminal_size'): +if hasattr(shutil, "get_terminal_size"): def get_terminal_size(): # type: () -> Tuple[int, int] @@ -281,7 +281,7 @@ def ioctl_GWINSZ(fd): import struct cr = struct.unpack_from( - 'hh', fcntl.ioctl(fd, termios.TIOCGWINSZ, '12345678') + "hh", fcntl.ioctl(fd, termios.TIOCGWINSZ, "12345678") ) except Exception: return None @@ -298,5 +298,5 @@ def ioctl_GWINSZ(fd): except Exception: pass if not cr: - cr = (os.environ.get('LINES', 25), os.environ.get('COLUMNS', 80)) + cr = (os.environ.get("LINES", 25), os.environ.get("COLUMNS", 80)) return int(cr[1]), int(cr[0]) diff --git a/src/pip/_internal/utils/encoding.py b/src/pip/_internal/utils/encoding.py index 3fffa1f69d3..00499ebb182 100644 --- a/src/pip/_internal/utils/encoding.py +++ b/src/pip/_internal/utils/encoding.py @@ -12,16 +12,16 @@ from typing import List, Tuple, Text BOMS = [ - (codecs.BOM_UTF8, 'utf-8'), - (codecs.BOM_UTF16, 'utf-16'), - (codecs.BOM_UTF16_BE, 'utf-16-be'), - (codecs.BOM_UTF16_LE, 'utf-16-le'), - (codecs.BOM_UTF32, 'utf-32'), - (codecs.BOM_UTF32_BE, 'utf-32-be'), - (codecs.BOM_UTF32_LE, 'utf-32-le'), + (codecs.BOM_UTF8, "utf-8"), + (codecs.BOM_UTF16, "utf-16"), + (codecs.BOM_UTF16_BE, "utf-16-be"), + (codecs.BOM_UTF16_LE, "utf-16-le"), + (codecs.BOM_UTF32, "utf-32"), + (codecs.BOM_UTF32_BE, "utf-32-be"), + (codecs.BOM_UTF32_LE, "utf-32-le"), ] # type: List[Tuple[bytes, Text]] -ENCODING_RE = re.compile(br'coding[:=]\s*([-\w.]+)') +ENCODING_RE = re.compile(br"coding[:=]\s*([-\w.]+)") def auto_decode(data): @@ -33,8 +33,8 @@ def auto_decode(data): if data.startswith(bom): return data[len(bom) :].decode(encoding) # Lets check the first two lines as in PEP263 - for line in data.split(b'\n')[:2]: - if line[0:1] == b'#' and ENCODING_RE.search(line): - encoding = ENCODING_RE.search(line).groups()[0].decode('ascii') + for line in data.split(b"\n")[:2]: + if line[0:1] == b"#" and ENCODING_RE.search(line): + encoding = ENCODING_RE.search(line).groups()[0].decode("ascii") return data.decode(encoding) return data.decode(locale.getpreferredencoding(False) or sys.getdefaultencoding()) diff --git a/src/pip/_internal/utils/filesystem.py b/src/pip/_internal/utils/filesystem.py index a2d2fd6b14d..6d0fed85474 100644 --- a/src/pip/_internal/utils/filesystem.py +++ b/src/pip/_internal/utils/filesystem.py @@ -89,9 +89,9 @@ def adjacent_tmp_file(path): delete=False, dir=os.path.dirname(path), prefix=os.path.basename(path), - suffix='.tmp', + suffix=".tmp", ) as f: - result = cast('NamedTemporaryFileResult', f) + result = cast("NamedTemporaryFileResult", f) try: yield result finally: diff --git a/src/pip/_internal/utils/filetypes.py b/src/pip/_internal/utils/filetypes.py index 89423fb5435..4297cbda17e 100644 --- a/src/pip/_internal/utils/filetypes.py +++ b/src/pip/_internal/utils/filetypes.py @@ -1,9 +1,9 @@ """Filetype information. """ -WHEEL_EXTENSION = '.whl' -BZ2_EXTENSIONS = ('.tar.bz2', '.tbz') -XZ_EXTENSIONS = ('.tar.xz', '.txz', '.tlz', '.tar.lz', '.tar.lzma') -ZIP_EXTENSIONS = ('.zip', WHEEL_EXTENSION) -TAR_EXTENSIONS = ('.tar.gz', '.tgz', '.tar') +WHEEL_EXTENSION = ".whl" +BZ2_EXTENSIONS = (".tar.bz2", ".tbz") +XZ_EXTENSIONS = (".tar.xz", ".txz", ".tlz", ".tar.lz", ".tar.lzma") +ZIP_EXTENSIONS = (".zip", WHEEL_EXTENSION) +TAR_EXTENSIONS = (".tar.gz", ".tgz", ".tar") ARCHIVE_EXTENSIONS = ZIP_EXTENSIONS + BZ2_EXTENSIONS + TAR_EXTENSIONS + XZ_EXTENSIONS diff --git a/src/pip/_internal/utils/hashes.py b/src/pip/_internal/utils/hashes.py index 8a78e78381f..04e9e21a2cd 100644 --- a/src/pip/_internal/utils/hashes.py +++ b/src/pip/_internal/utils/hashes.py @@ -4,11 +4,7 @@ from pip._vendor.six import iteritems, iterkeys, itervalues -from pip._internal.exceptions import ( - HashMismatch, - HashMissing, - InstallationError, -) +from pip._internal.exceptions import HashMismatch, HashMissing, InstallationError from pip._internal.utils.misc import read_chunks from pip._internal.utils.typing import MYPY_CHECK_RUNNING @@ -24,12 +20,12 @@ # The recommended hash algo of the moment. Change this whenever the state of # the art changes; it won't hurt backward compatibility. -FAVORITE_HASH = 'sha256' +FAVORITE_HASH = "sha256" # Names of hashlib algorithms allowed by the --hash option and ``pip hash`` # Currently, those are the ones at least as collision-resistant as sha256. -STRONG_HASHES = ['sha256', 'sha384', 'sha512'] +STRONG_HASHES = ["sha256", "sha384", "sha512"] class Hashes(object): @@ -72,7 +68,7 @@ def check_against_chunks(self, chunks): try: gots[hash_name] = hashlib.new(hash_name) except (ValueError, TypeError): - raise InstallationError('Unknown hash name: %s' % hash_name) + raise InstallationError("Unknown hash name: %s" % hash_name) for chunk in chunks: for hash in itervalues(gots): @@ -98,7 +94,7 @@ def check_against_file(self, file): def check_against_path(self, path): # type: (str) -> None - with open(path, 'rb') as file: + with open(path, "rb") as file: return self.check_against_file(file) def __nonzero__(self): diff --git a/src/pip/_internal/utils/logging.py b/src/pip/_internal/utils/logging.py index 869f58dca32..a43054fef3d 100644 --- a/src/pip/_internal/utils/logging.py +++ b/src/pip/_internal/utils/logging.py @@ -115,7 +115,7 @@ def indent_log(num=2): def get_indentation(): - return getattr(_log_state, 'indentation', 0) + return getattr(_log_state, "indentation", 0) class IndentingFormatter(logging.Formatter): @@ -135,15 +135,15 @@ def get_message_start(self, formatted, levelno): prefix to add to each line). """ if levelno < logging.WARNING: - return '' + return "" if formatted.startswith(DEPRECATION_MSG_PREFIX): # Then the message already has a prefix. We don't want it to # look like "WARNING: DEPRECATION: ...." - return '' + return "" if levelno < logging.ERROR: - return 'WARNING: ' + return "WARNING: " - return 'ERROR: ' + return "ERROR: " def format(self, record): """ @@ -154,11 +154,11 @@ def format(self, record): message_start = self.get_message_start(formatted, record.levelno) formatted = message_start + formatted - prefix = '' + prefix = "" if self.add_timestamp: # TODO: Use Formatter.default_time_format after dropping PY2. t = self.formatTime(record, "%Y-%m-%dT%H:%M:%S") - prefix = '%s,%03d ' % (t, record.msecs) + prefix = "%s,%03d " % (t, record.msecs) prefix += " " * get_indentation() formatted = "".join([prefix + line for line in formatted.splitlines(True)]) return formatted diff --git a/src/pip/_internal/utils/marker_files.py b/src/pip/_internal/utils/marker_files.py index c0396951f94..004984ac93a 100644 --- a/src/pip/_internal/utils/marker_files.py +++ b/src/pip/_internal/utils/marker_files.py @@ -1,13 +1,13 @@ import os.path -DELETE_MARKER_MESSAGE = '''\ +DELETE_MARKER_MESSAGE = """\ This file is placed here by pip to indicate the source was put here by pip. Once this package is successfully installed this source code will be deleted (unless you remove this file). -''' -PIP_DELETE_MARKER_FILENAME = 'pip-delete-this-directory.txt' +""" +PIP_DELETE_MARKER_FILENAME = "pip-delete-this-directory.txt" def has_delete_marker_file(directory): @@ -20,5 +20,5 @@ def write_delete_marker_file(directory): Write the pip delete marker file into this directory. """ filepath = os.path.join(directory, PIP_DELETE_MARKER_FILENAME) - with open(filepath, 'w') as marker_fp: + with open(filepath, "w") as marker_fp: marker_fp.write(DELETE_MARKER_MESSAGE) diff --git a/src/pip/_internal/utils/misc.py b/src/pip/_internal/utils/misc.py index 6cbc5066cf6..f66e341f16f 100644 --- a/src/pip/_internal/utils/misc.py +++ b/src/pip/_internal/utils/misc.py @@ -17,6 +17,7 @@ from collections import deque from pip._vendor import pkg_resources + # NOTE: retrying is not annotated in typeshed as on 2017-07-17, which is # why we ignore the type on this import. from pip._vendor.retrying import retry # type: ignore @@ -27,11 +28,7 @@ from pip import __version__ from pip._internal.exceptions import CommandError, InstallationError -from pip._internal.locations import ( - get_major_minor_version, - site_packages, - user_site, -) +from pip._internal.locations import get_major_minor_version, site_packages, user_site from pip._internal.utils.compat import ( WINDOWS, console_to_str, @@ -69,7 +66,7 @@ from pip._internal.utils.ui import SpinnerInterface VersionInfo = Tuple[int, int, int] - CommandArgs = List[Union[str, 'HiddenText']] + CommandArgs = List[Union[str, "HiddenText"]] else: # typing's cast() is needed at runtime, but we don't want to import typing. # Thus, we use a dummy no-op version, which we tell mypy to ignore. @@ -78,28 +75,28 @@ def cast(type_, value): # type: ignore __all__ = [ - 'rmtree', - 'display_path', - 'backup_dir', - 'ask', - 'splitext', - 'format_size', - 'is_installable_dir', - 'normalize_path', - 'renames', - 'get_prog', - 'call_subprocess', - 'captured_stdout', - 'ensure_dir', - 'get_installed_version', - 'remove_auth_from_url', + "rmtree", + "display_path", + "backup_dir", + "ask", + "splitext", + "format_size", + "is_installable_dir", + "normalize_path", + "renames", + "get_prog", + "call_subprocess", + "captured_stdout", + "ensure_dir", + "get_installed_version", + "remove_auth_from_url", ] logger = logging.getLogger(__name__) -subprocess_logger = logging.getLogger('pip.subprocessor') +subprocess_logger = logging.getLogger("pip.subprocessor") -LOG_DIVIDER = '----------------------------------------' +LOG_DIVIDER = "----------------------------------------" def get_pip_version(): @@ -107,7 +104,7 @@ def get_pip_version(): pip_pkg_dir = os.path.join(os.path.dirname(__file__), "..", "..") pip_pkg_dir = os.path.abspath(pip_pkg_dir) - return 'pip {} from {} (python {})'.format( + return "pip {} from {} (python {})".format( __version__, pip_pkg_dir, get_major_minor_version() ) @@ -129,7 +126,7 @@ def normalize_version_info(py_version_info): elif len(py_version_info) > 3: py_version_info = py_version_info[:3] - return cast('VersionInfo', py_version_info) + return cast("VersionInfo", py_version_info) def ensure_dir(path): @@ -146,13 +143,13 @@ def get_prog(): # type: () -> str try: prog = os.path.basename(sys.argv[0]) - if prog in ('__main__.py', '-c'): + if prog in ("__main__.py", "-c"): return "%s -m pip" % sys.executable else: return prog except (AttributeError, TypeError, IndexError): pass - return 'pip' + return "pip" # Retry every half second for up to 3 seconds @@ -197,7 +194,7 @@ def path_to_display(path): return path # Otherwise, path is a bytes object (str in Python 2). try: - display_path = path.decode(sys.getfilesystemencoding(), 'strict') + display_path = path.decode(sys.getfilesystemencoding(), "strict") except UnicodeDecodeError: # Include the full bytes to make troubleshooting easier, even though # it may not be very human readable. @@ -207,7 +204,7 @@ def path_to_display(path): # Also, we add the prefix "b" to the repr() return value both # to make the Python 2 output look like the Python 3 output, and # to signal to the user that this is a bytes representation. - display_path = str_to_display('b{!r}'.format(path)) + display_path = str_to_display("b{!r}".format(path)) else: # Silence the "F821 undefined name 'ascii'" flake8 error since # in Python 3 ascii() is a built-in. @@ -222,14 +219,14 @@ def display_path(path): if possible.""" path = os.path.normcase(os.path.abspath(path)) if sys.version_info[0] == 2: - path = path.decode(sys.getfilesystemencoding(), 'replace') - path = path.encode(sys.getdefaultencoding(), 'replace') + path = path.decode(sys.getfilesystemencoding(), "replace") + path = path.encode(sys.getdefaultencoding(), "replace") if path.startswith(os.getcwd() + os.path.sep): - path = '.' + path[len(os.getcwd()) :] + path = "." + path[len(os.getcwd()) :] return path -def backup_dir(dir, ext='.bak'): +def backup_dir(dir, ext=".bak"): # type: (str, str) -> str """Figure out the name of a directory to back up the given dir to (adding .bak, .bak2, etc)""" @@ -243,7 +240,7 @@ def backup_dir(dir, ext='.bak'): def ask_path_exists(message, options): # type: (str, Iterable[str]) -> str - for action in os.environ.get('PIP_EXISTS_ACTION', '').split(): + for action in os.environ.get("PIP_EXISTS_ACTION", "").split(): if action in options: return action return ask(message, options) @@ -252,9 +249,9 @@ def ask_path_exists(message, options): def _check_no_input(message): # type: (str) -> None """Raise an error if no input is allowed.""" - if os.environ.get('PIP_NO_INPUT'): + if os.environ.get("PIP_NO_INPUT"): raise Exception( - 'No input was expected ($PIP_NO_INPUT set); question: %s' % message + "No input was expected ($PIP_NO_INPUT set); question: %s" % message ) @@ -267,8 +264,8 @@ def ask(message, options): response = response.strip().lower() if response not in options: print( - 'Your response (%r) was not one of the expected responses: ' - '%s' % (response, ', '.join(options)) + "Your response (%r) was not one of the expected responses: " + "%s" % (response, ", ".join(options)) ) else: return response @@ -291,13 +288,13 @@ def ask_password(message): def format_size(bytes): # type: (float) -> str if bytes > 1000 * 1000: - return '%.1fMB' % (bytes / 1000.0 / 1000) + return "%.1fMB" % (bytes / 1000.0 / 1000) elif bytes > 10 * 1000: - return '%ikB' % (bytes / 1000) + return "%ikB" % (bytes / 1000) elif bytes > 1000: - return '%.1fkB' % (bytes / 1000.0) + return "%.1fkB" % (bytes / 1000.0) else: - return '%ibytes' % bytes + return "%ibytes" % bytes def is_installable_dir(path): @@ -306,10 +303,10 @@ def is_installable_dir(path): """ if not os.path.isdir(path): return False - setup_py = os.path.join(path, 'setup.py') + setup_py = os.path.join(path, "setup.py") if os.path.isfile(setup_py): return True - pyproject_toml = os.path.join(path, 'pyproject.toml') + pyproject_toml = os.path.join(path, "pyproject.toml") if os.path.isfile(pyproject_toml): return True return False @@ -342,7 +339,7 @@ def splitext(path): # type: (str) -> Tuple[str, str] """Like os.path.splitext, but take off .tar too""" base, ext = posixpath.splitext(path) - if base.lower().endswith('.tar'): + if base.lower().endswith(".tar"): ext = base[-4:] + ext base = base[:-4] return base, ext @@ -416,7 +413,7 @@ def dist_is_editable(dist): Return True if given Distribution is an editable install. """ for path_item in sys.path: - egg_link = os.path.join(path_item, dist.project_name + '.egg-link') + egg_link = os.path.join(path_item, dist.project_name + ".egg-link") if os.path.isfile(egg_link): return True return False @@ -534,7 +531,7 @@ def egg_link_path(dist): sites.append(site_packages) for site in sites: - egglink = os.path.join(site, dist.project_name) + '.egg-link' + egglink = os.path.join(site, dist.project_name) + ".egg-link" if os.path.isfile(egglink): return egglink return None @@ -584,7 +581,7 @@ def format_command_args(args): # this can trigger a UnicodeDecodeError in Python 2 if the argument # has type unicode and includes a non-ascii character. (The type # checker doesn't ensure the annotations are correct in all cases.) - return ' '.join( + return " ".join( shlex_quote(str(arg)) if isinstance(arg, HiddenText) else shlex_quote(arg) for arg in args ) @@ -616,19 +613,19 @@ def make_subprocess_output_error( # them as arguments in the unicode format string below. This avoids # "UnicodeDecodeError: 'ascii' codec can't decode byte ..." in Python 2 # if either contains a non-ascii character. - command_display = str_to_display(command, desc='command bytes') + command_display = str_to_display(command, desc="command bytes") cwd_display = path_to_display(cwd) # We know the joined output value ends in a newline. - output = ''.join(lines) + output = "".join(lines) msg = ( # Use a unicode string to avoid "UnicodeEncodeError: 'ascii' # codec can't encode character ..." in Python 2 when a format # argument (e.g. `output`) has a non-ascii character. - u'Command errored out with exit status {exit_status}:\n' - ' command: {command_display}\n' - ' cwd: {cwd_display}\n' - 'Complete output ({line_count} lines):\n{output}{divider}' + u"Command errored out with exit status {exit_status}:\n" + " command: {command_display}\n" + " cwd: {cwd_display}\n" + "Complete output ({line_count} lines):\n{output}{divider}" ).format( exit_status=exit_status, command_display=command_display, @@ -644,7 +641,7 @@ def call_subprocess( cmd, # type: Union[List[str], CommandArgs] show_stdout=False, # type: bool cwd=None, # type: Optional[str] - on_returncode='raise', # type: str + on_returncode="raise", # type: str extra_ok_returncodes=None, # type: Optional[Iterable[int]] command_desc=None, # type: Optional[str] extra_environ=None, # type: Optional[Mapping[str, Any]] @@ -729,7 +726,7 @@ def call_subprocess( if not line: break line = line.rstrip() - all_output.append(line + '\n') + all_output.append(line + "\n") # Show the line immediately. log_subprocess(line) @@ -748,7 +745,7 @@ def call_subprocess( else: spinner.finish("done") if proc_had_error: - if on_returncode == 'raise': + if on_returncode == "raise": if not showing_subprocess: # Then the subprocess streams haven't been logged to the # console yet. @@ -757,22 +754,22 @@ def call_subprocess( ) subprocess_logger.error(msg) exc_msg = ( - 'Command errored out with exit status {}: {} ' - 'Check the logs for full command output.' + "Command errored out with exit status {}: {} " + "Check the logs for full command output." ).format(proc.returncode, command_desc) raise InstallationError(exc_msg) - elif on_returncode == 'warn': + elif on_returncode == "warn": subprocess_logger.warning( 'Command "%s" had error code %s in %s', command_desc, proc.returncode, cwd, ) - elif on_returncode == 'ignore': + elif on_returncode == "ignore": pass else: - raise ValueError('Invalid value: on_returncode=%s' % repr(on_returncode)) - return ''.join(all_output) + raise ValueError("Invalid value: on_returncode=%s" % repr(on_returncode)) + return "".join(all_output) def write_output(msg, *args): @@ -799,7 +796,7 @@ def readline(self): except NameError: return self._gen.next() except StopIteration: - return '' + return "" def __iter__(self): return self._gen @@ -841,14 +838,14 @@ def captured_stdout(): Taken from Lib/support/__init__.py in the CPython repo. """ - return captured_output('stdout') + return captured_output("stdout") def captured_stderr(): """ See captured_stdout(). """ - return captured_output('stderr') + return captured_output("stderr") class cached_property(object): @@ -860,7 +857,7 @@ class cached_property(object): """ def __init__(self, func): - self.__doc__ = getattr(func, '__doc__') + self.__doc__ = getattr(func, "__doc__") self.func = func def __get__(self, obj, cls): @@ -898,8 +895,8 @@ def consume(iterator): def enum(*sequential, **named): enums = dict(zip(sequential, range(len(sequential))), **named) reverse = {value: key for key, value in enums.items()} - enums['reverse_mapping'] = reverse - return type('Enum', (), enums) + enums["reverse_mapping"] = reverse + return type("Enum", (), enums) def build_netloc(host, port): @@ -909,21 +906,21 @@ def build_netloc(host, port): """ if port is None: return host - if ':' in host: + if ":" in host: # Only wrap host with square brackets when it is IPv6 - host = '[{}]'.format(host) - return '{}:{}'.format(host, port) + host = "[{}]".format(host) + return "{}:{}".format(host, port) -def build_url_from_netloc(netloc, scheme='https'): +def build_url_from_netloc(netloc, scheme="https"): # type: (str, str) -> str """ Build a full URL from a netloc. """ - if netloc.count(':') >= 2 and '@' not in netloc and '[' not in netloc: + if netloc.count(":") >= 2 and "@" not in netloc and "[" not in netloc: # It must be a bare IPv6 address, so wrap it with brackets. - netloc = '[{}]'.format(netloc) - return '{}://{}'.format(scheme, netloc) + netloc = "[{}]".format(netloc) + return "{}://{}".format(scheme, netloc) def parse_netloc(netloc): @@ -942,18 +939,18 @@ def split_auth_from_netloc(netloc): Returns: (netloc, (username, password)). """ - if '@' not in netloc: + if "@" not in netloc: return netloc, (None, None) # Split from the right because that's how urllib.parse.urlsplit() # behaves if more than one @ is present (which can be checked using # the password attribute of urlsplit()'s return value). - auth, netloc = netloc.rsplit('@', 1) - if ':' in auth: + auth, netloc = netloc.rsplit("@", 1) + if ":" in auth: # Split from the left because that's how urllib.parse.urlsplit() # behaves if more than one : is present (which again can be checked # using the password attribute of the return value) - user_pass = auth.split(':', 1) + user_pass = auth.split(":", 1) else: user_pass = auth, None @@ -975,12 +972,12 @@ def redact_netloc(netloc): if user is None: return netloc if password is None: - user = '****' - password = '' + user = "****" + password = "" else: user = urllib_parse.quote(user) - password = ':****' - return '{user}{password}@{netloc}'.format( + password = ":****" + return "{user}{password}@{netloc}".format( user=user, password=password, netloc=netloc ) @@ -1048,7 +1045,7 @@ def __init__( def __repr__(self): # type: (...) -> str - return ''.format(str(self)) + return "".format(str(self)) def __str__(self): # type: (...) -> str @@ -1073,7 +1070,7 @@ def __ne__(self, other): def hide_value(value): # type: (str) -> HiddenText - return HiddenText(value, redacted='****') + return HiddenText(value, redacted="****") def hide_url(url): @@ -1090,10 +1087,10 @@ def protect_pip_from_modification_on_windows(modifying_pip): python -m pip ... """ pip_names = set() - for ext in ('', '.exe'): - pip_names.add('pip{ext}'.format(ext=ext)) - pip_names.add('pip{}{ext}'.format(sys.version_info[0], ext=ext)) - pip_names.add('pip{}.{}{ext}'.format(*sys.version_info[:2], ext=ext)) + for ext in ("", ".exe"): + pip_names.add("pip{ext}".format(ext=ext)) + pip_names.add("pip{}{ext}".format(sys.version_info[0], ext=ext)) + pip_names.add("pip{}.{}{ext}".format(*sys.version_info[:2], ext=ext)) # See https://github.com/pypa/pip/issues/1299 for more discussion should_show_use_python_msg = ( @@ -1103,7 +1100,7 @@ def protect_pip_from_modification_on_windows(modifying_pip): if should_show_use_python_msg: new_command = [sys.executable, "-m", "pip"] + sys.argv[1:] raise CommandError( - 'To modify pip, please run the following command:\n{}'.format( + "To modify pip, please run the following command:\n{}".format( " ".join(new_command) ) ) diff --git a/src/pip/_internal/utils/outdated.py b/src/pip/_internal/utils/outdated.py index e4027661d8a..9cc305b14e7 100644 --- a/src/pip/_internal/utils/outdated.py +++ b/src/pip/_internal/utils/outdated.py @@ -16,11 +16,7 @@ from pip._internal.models.search_scope import SearchScope from pip._internal.models.selection_prefs import SelectionPreferences from pip._internal.utils.compat import WINDOWS -from pip._internal.utils.filesystem import ( - adjacent_tmp_file, - check_path_owner, - replace, -) +from pip._internal.utils.filesystem import adjacent_tmp_file, check_path_owner, replace from pip._internal.utils.misc import ( ensure_dir, get_installed_version, @@ -56,8 +52,8 @@ def make_link_collector( index_urls = [options.index_url] + options.extra_index_urls if options.no_index and not suppress_no_index: logger.debug( - 'Ignoring indexes: %s', - ','.join(redact_auth_from_url(url) for url in index_urls), + "Ignoring indexes: %s", + ",".join(redact_auth_from_url(url) for url in index_urls), ) index_urls = [] @@ -208,7 +204,7 @@ def pip_version_check(session, options): local_version_is_older = ( pip_version < remote_version and pip_version.base_version != remote_version.base_version - and was_installed_by_pip('pip') + and was_installed_by_pip("pip") ) # Determine if our pypi_version is older diff --git a/src/pip/_internal/utils/packaging.py b/src/pip/_internal/utils/packaging.py index 2f87313211a..b05dd01206b 100644 --- a/src/pip/_internal/utils/packaging.py +++ b/src/pip/_internal/utils/packaging.py @@ -37,7 +37,7 @@ def check_requires_python(requires_python, version_info): return True requires_python_specifier = specifiers.SpecifierSet(requires_python) - python_version = version.parse('.'.join(map(str, version_info))) + python_version = version.parse(".".join(map(str, version_info))) return python_version in requires_python_specifier @@ -47,17 +47,17 @@ def get_metadata(dist): :raises NoneMetadataError: if the distribution reports `has_metadata()` True but `get_metadata()` returns None. """ - metadata_name = 'METADATA' + metadata_name = "METADATA" if isinstance(dist, pkg_resources.DistInfoDistribution) and dist.has_metadata( metadata_name ): metadata = dist.get_metadata(metadata_name) - elif dist.has_metadata('PKG-INFO'): - metadata_name = 'PKG-INFO' + elif dist.has_metadata("PKG-INFO"): + metadata_name = "PKG-INFO" metadata = dist.get_metadata(metadata_name) else: logger.warning("No metadata found in %s", display_path(dist.location)) - metadata = '' + metadata = "" if metadata is None: raise NoneMetadataError(dist, metadata_name) @@ -76,7 +76,7 @@ def get_requires_python(dist): if not present. """ pkg_info_dict = get_metadata(dist) - requires_python = pkg_info_dict.get('Requires-Python') + requires_python = pkg_info_dict.get("Requires-Python") if requires_python is not None: # Convert to a str to satisfy the type checker, since requires_python @@ -88,8 +88,8 @@ def get_requires_python(dist): def get_installer(dist): # type: (Distribution) -> str - if dist.has_metadata('INSTALLER'): - for line in dist.get_metadata_lines('INSTALLER'): + if dist.has_metadata("INSTALLER"): + for line in dist.get_metadata_lines("INSTALLER"): if line.strip(): return line.strip() - return '' + return "" diff --git a/src/pip/_internal/utils/setuptools_build.py b/src/pip/_internal/utils/setuptools_build.py index f65c3a536fb..27afda06e28 100644 --- a/src/pip/_internal/utils/setuptools_build.py +++ b/src/pip/_internal/utils/setuptools_build.py @@ -38,10 +38,10 @@ def make_setuptools_shim_args( """ args = [sys.executable] if unbuffered_output: - args.append('-u') - args.extend(['-c', _SETUPTOOLS_SHIM.format(setup_py_path)]) + args.append("-u") + args.extend(["-c", _SETUPTOOLS_SHIM.format(setup_py_path)]) if global_options: args.extend(global_options) if no_user_config: - args.append('--no-user-cfg') + args.append("--no-user-cfg") return args diff --git a/src/pip/_internal/utils/temp_dir.py b/src/pip/_internal/utils/temp_dir.py index f1de4b33ceb..b4ff547b6a2 100644 --- a/src/pip/_internal/utils/temp_dir.py +++ b/src/pip/_internal/utils/temp_dir.py @@ -118,7 +118,7 @@ class AdjacentTempDirectory(TempDirectory): LEADING_CHARS = "-~.=%0123456789" def __init__(self, original, delete=None): - self.original = original.rstrip('/\\') + self.original = original.rstrip("/\\") super(AdjacentTempDirectory, self).__init__(delete=delete) @classmethod @@ -134,7 +134,7 @@ def _generate_names(cls, name): for candidate in itertools.combinations_with_replacement( cls.LEADING_CHARS, i - 1 ): - new_name = '~' + ''.join(candidate) + name[i:] + new_name = "~" + "".join(candidate) + name[i:] if new_name != name: yield new_name @@ -143,7 +143,7 @@ def _generate_names(cls, name): for candidate in itertools.combinations_with_replacement( cls.LEADING_CHARS, i ): - new_name = '~' + ''.join(candidate) + name + new_name = "~" + "".join(candidate) + name if new_name != name: yield new_name diff --git a/src/pip/_internal/utils/ui.py b/src/pip/_internal/utils/ui.py index 93df7313559..315c375c4e8 100644 --- a/src/pip/_internal/utils/ui.py +++ b/src/pip/_internal/utils/ui.py @@ -244,7 +244,7 @@ def update(self): message = self.message % self phase = self.next_phase() suffix = self.suffix % self - line = ''.join( + line = "".join( [message, " " if message else "", phase, " " if suffix else "", suffix] ) diff --git a/src/pip/_internal/utils/unpacking.py b/src/pip/_internal/utils/unpacking.py index 5663cafd070..458710352e1 100644 --- a/src/pip/_internal/utils/unpacking.py +++ b/src/pip/_internal/utils/unpacking.py @@ -37,7 +37,7 @@ SUPPORTED_EXTENSIONS += BZ2_EXTENSIONS except ImportError: - logger.debug('bz2 module is not available') + logger.debug("bz2 module is not available") try: # Only for Python 3.3+ @@ -45,7 +45,7 @@ SUPPORTED_EXTENSIONS += XZ_EXTENSIONS except ImportError: - logger.debug('lzma module is not available') + logger.debug("lzma module is not available") def current_umask(): @@ -57,15 +57,15 @@ def current_umask(): def split_leading_dir(path): # type: (Union[str, Text]) -> List[Union[str, Text]] - path = path.lstrip('/').lstrip('\\') - if '/' in path and ( - ('\\' in path and path.find('/') < path.find('\\')) or '\\' not in path + path = path.lstrip("/").lstrip("\\") + if "/" in path and ( + ("\\" in path and path.find("/") < path.find("\\")) or "\\" not in path ): - return path.split('/', 1) - elif '\\' in path: - return path.split('\\', 1) + return path.split("/", 1) + elif "\\" in path: + return path.split("\\", 1) else: - return [path, ''] + return [path, ""] def has_leading_dir(paths): @@ -95,7 +95,7 @@ def unzip_file(filename, location, flatten=True): no-ops per the python docs. """ ensure_dir(location) - zipfp = open(filename, 'rb') + zipfp = open(filename, "rb") try: zip = zipfile.ZipFile(zipfp, allowZip64=True) leading = has_leading_dir(zip.namelist()) and flatten @@ -106,7 +106,7 @@ def unzip_file(filename, location, flatten=True): fn = split_leading_dir(name)[1] fn = os.path.join(location, fn) dir = os.path.dirname(fn) - if fn.endswith('/') or fn.endswith('\\'): + if fn.endswith("/") or fn.endswith("\\"): # A directory ensure_dir(fn) else: @@ -115,7 +115,7 @@ def unzip_file(filename, location, flatten=True): # chunk of memory for the file's content fp = zip.open(name) try: - with open(fn, 'wb') as destfp: + with open(fn, "wb") as destfp: shutil.copyfileobj(fp, destfp) finally: fp.close() @@ -141,17 +141,17 @@ def untar_file(filename, location): no-ops per the python docs. """ ensure_dir(location) - if filename.lower().endswith('.gz') or filename.lower().endswith('.tgz'): - mode = 'r:gz' + if filename.lower().endswith(".gz") or filename.lower().endswith(".tgz"): + mode = "r:gz" elif filename.lower().endswith(BZ2_EXTENSIONS): - mode = 'r:bz2' + mode = "r:bz2" elif filename.lower().endswith(XZ_EXTENSIONS): - mode = 'r:xz' - elif filename.lower().endswith('.tar'): - mode = 'r' + mode = "r:xz" + elif filename.lower().endswith(".tar"): + mode = "r" else: - logger.warning('Cannot determine compression type for file %s', filename) - mode = 'r:*' + logger.warning("Cannot determine compression type for file %s", filename) + mode = "r:*" tar = tarfile.open(filename, mode) try: leading = has_leading_dir([member.name for member in tar.getmembers()]) @@ -171,7 +171,7 @@ def untar_file(filename, location): # Some corrupt tar files seem to produce this # (specifically bad symlinks) logger.warning( - 'In the tar file %s the member %s is invalid: %s', + "In the tar file %s the member %s is invalid: %s", filename, member.name, exc, @@ -184,14 +184,14 @@ def untar_file(filename, location): # Some corrupt tar files seem to produce this # (specifically bad symlinks) logger.warning( - 'In the tar file %s the member %s is invalid: %s', + "In the tar file %s the member %s is invalid: %s", filename, member.name, exc, ) continue ensure_dir(os.path.dirname(path)) - with open(path, 'wb') as destfp: + with open(path, "wb") as destfp: shutil.copyfileobj(fp, destfp) fp.close() # Update the timestamp (useful for cython compiled files) @@ -214,13 +214,13 @@ def unpack_file( # type: (...) -> None filename = os.path.realpath(filename) if ( - content_type == 'application/zip' + content_type == "application/zip" or filename.lower().endswith(ZIP_EXTENSIONS) or zipfile.is_zipfile(filename) ): - unzip_file(filename, location, flatten=not filename.endswith('.whl')) + unzip_file(filename, location, flatten=not filename.endswith(".whl")) elif ( - content_type == 'application/x-gzip' + content_type == "application/x-gzip" or tarfile.is_tarfile(filename) or filename.lower().endswith(TAR_EXTENSIONS + BZ2_EXTENSIONS + XZ_EXTENSIONS) ): @@ -229,12 +229,12 @@ def unpack_file( # FIXME: handle? # FIXME: magic signatures? logger.critical( - 'Cannot unpack file %s (downloaded from %s, content-type: %s); ' - 'cannot detect archive format', + "Cannot unpack file %s (downloaded from %s, content-type: %s); " + "cannot detect archive format", filename, location, content_type, ) raise InstallationError( - 'Cannot determine archive format of {}'.format(location) + "Cannot determine archive format of {}".format(location) ) diff --git a/src/pip/_internal/utils/urls.py b/src/pip/_internal/utils/urls.py index 941f0dc79af..fa83da114f7 100644 --- a/src/pip/_internal/utils/urls.py +++ b/src/pip/_internal/utils/urls.py @@ -12,9 +12,9 @@ def get_url_scheme(url): # type: (Union[str, Text]) -> Optional[Text] - if ':' not in url: + if ":" not in url: return None - return url.split(':', 1)[0].lower() + return url.split(":", 1)[0].lower() def path_to_url(path): @@ -24,7 +24,7 @@ def path_to_url(path): quoted path parts. """ path = os.path.normpath(os.path.abspath(path)) - url = urllib_parse.urljoin('file:', urllib_request.pathname2url(path)) + url = urllib_parse.urljoin("file:", urllib_request.pathname2url(path)) return url @@ -33,21 +33,21 @@ def url_to_path(url): """ Convert a file: URL to a path. """ - assert url.startswith('file:'), ( + assert url.startswith("file:"), ( "You can only turn file: urls into filenames (not %r)" % url ) _, netloc, path, _, _ = urllib_parse.urlsplit(url) - if not netloc or netloc == 'localhost': + if not netloc or netloc == "localhost": # According to RFC 8089, same as empty authority. - netloc = '' - elif sys.platform == 'win32': + netloc = "" + elif sys.platform == "win32": # If we have a UNC path, prepend UNC share notation. - netloc = '\\\\' + netloc + netloc = "\\\\" + netloc else: raise ValueError( - 'non-local file URIs are not supported on this platform: %r' % url + "non-local file URIs are not supported on this platform: %r" % url ) path = urllib_request.url2pathname(netloc + path) diff --git a/src/pip/_internal/utils/virtualenv.py b/src/pip/_internal/utils/virtualenv.py index 380db1c3281..a0f22513612 100644 --- a/src/pip/_internal/utils/virtualenv.py +++ b/src/pip/_internal/utils/virtualenv.py @@ -9,7 +9,7 @@ def running_under_virtualenv(): Return True if we're running inside a virtualenv, False otherwise. """ - if hasattr(sys, 'real_prefix'): + if hasattr(sys, "real_prefix"): # pypa/virtualenv case return True elif sys.prefix != getattr(sys, "base_prefix", sys.prefix): @@ -27,7 +27,7 @@ def virtualenv_no_global(): # this mirrors the logic in virtualenv.py for locating the # no-global-site-packages.txt file site_mod_dir = os.path.dirname(os.path.abspath(site.__file__)) - no_global_file = os.path.join(site_mod_dir, 'no-global-site-packages.txt') + no_global_file = os.path.join(site_mod_dir, "no-global-site-packages.txt") if running_under_virtualenv() and os.path.isfile(no_global_file): return True else: diff --git a/src/pip/_internal/vcs/bazaar.py b/src/pip/_internal/vcs/bazaar.py index 931edb7778d..f2ab049ed11 100644 --- a/src/pip/_internal/vcs/bazaar.py +++ b/src/pip/_internal/vcs/bazaar.py @@ -20,29 +20,29 @@ class Bazaar(VersionControl): - name = 'bzr' - dirname = '.bzr' - repo_name = 'branch' + name = "bzr" + dirname = ".bzr" + repo_name = "branch" schemes = ( - 'bzr', - 'bzr+http', - 'bzr+https', - 'bzr+ssh', - 'bzr+sftp', - 'bzr+ftp', - 'bzr+lp', + "bzr", + "bzr+http", + "bzr+https", + "bzr+ssh", + "bzr+sftp", + "bzr+ftp", + "bzr+lp", ) def __init__(self, *args, **kwargs): super(Bazaar, self).__init__(*args, **kwargs) # This is only needed for python <2.7.5 # Register lp but do not expose as a scheme to support bzr+lp. - if getattr(urllib_parse, 'uses_fragment', None): - urllib_parse.uses_fragment.extend(['lp']) + if getattr(urllib_parse, "uses_fragment", None): + urllib_parse.uses_fragment.extend(["lp"]) @staticmethod def get_base_rev_args(rev): - return ['-r', rev] + return ["-r", rev] def export(self, location, url): # type: (str, HiddenText) -> None @@ -55,24 +55,24 @@ def export(self, location, url): url, rev_options = self.get_url_rev_options(url) self.run_command( - make_command('export', location, url, rev_options.to_args()), + make_command("export", location, url, rev_options.to_args()), show_stdout=False, ) def fetch_new(self, dest, url, rev_options): # type: (str, HiddenText, RevOptions) -> None rev_display = rev_options.to_display() - logger.info('Checking out %s%s to %s', url, rev_display, display_path(dest)) - cmd_args = make_command('branch', '-q', rev_options.to_args(), url, dest) + logger.info("Checking out %s%s to %s", url, rev_display, display_path(dest)) + cmd_args = make_command("branch", "-q", rev_options.to_args(), url, dest) self.run_command(cmd_args) def switch(self, dest, url, rev_options): # type: (str, HiddenText, RevOptions) -> None - self.run_command(make_command('switch', url), cwd=dest) + self.run_command(make_command("switch", url), cwd=dest) def update(self, dest, url, rev_options): # type: (str, HiddenText, RevOptions) -> None - cmd_args = make_command('pull', '-q', rev_options.to_args()) + cmd_args = make_command("pull", "-q", rev_options.to_args()) self.run_command(cmd_args, cwd=dest) @classmethod @@ -80,16 +80,16 @@ def get_url_rev_and_auth(cls, url): # type: (str) -> Tuple[str, Optional[str], AuthInfo] # hotfix the URL scheme after removing bzr+ from bzr+ssh:// readd it url, rev, user_pass = super(Bazaar, cls).get_url_rev_and_auth(url) - if url.startswith('ssh://'): - url = 'bzr+' + url + if url.startswith("ssh://"): + url = "bzr+" + url return url, rev, user_pass @classmethod def get_remote_url(cls, location): - urls = cls.run_command(['info'], show_stdout=False, cwd=location) + urls = cls.run_command(["info"], show_stdout=False, cwd=location) for line in urls.splitlines(): line = line.strip() - for x in ('checkout of branch: ', 'parent branch: '): + for x in ("checkout of branch: ", "parent branch: "): if line.startswith(x): repo = line.split(x)[1] if cls._is_local_repository(repo): @@ -99,7 +99,7 @@ def get_remote_url(cls, location): @classmethod def get_revision(cls, location): - revision = cls.run_command(['revno'], show_stdout=False, cwd=location) + revision = cls.run_command(["revno"], show_stdout=False, cwd=location) return revision.splitlines()[-1] @classmethod diff --git a/src/pip/_internal/vcs/git.py b/src/pip/_internal/vcs/git.py index 87e7588218d..2f0e870e869 100644 --- a/src/pip/_internal/vcs/git.py +++ b/src/pip/_internal/vcs/git.py @@ -13,11 +13,7 @@ from pip._internal.utils.misc import display_path, make_command from pip._internal.utils.temp_dir import TempDirectory from pip._internal.utils.typing import MYPY_CHECK_RUNNING -from pip._internal.vcs.versioncontrol import ( - RemoteNotFoundError, - VersionControl, - vcs, -) +from pip._internal.vcs.versioncontrol import RemoteNotFoundError, VersionControl, vcs if MYPY_CHECK_RUNNING: from typing import Optional, Tuple @@ -32,7 +28,7 @@ logger = logging.getLogger(__name__) -HASH_REGEX = re.compile('^[a-fA-F0-9]{40}$') +HASH_REGEX = re.compile("^[a-fA-F0-9]{40}$") def looks_like_hash(sha): @@ -40,30 +36,30 @@ def looks_like_hash(sha): class Git(VersionControl): - name = 'git' - dirname = '.git' - repo_name = 'clone' - schemes = ('git', 'git+http', 'git+https', 'git+ssh', 'git+git', 'git+file') + name = "git" + dirname = ".git" + repo_name = "clone" + schemes = ("git", "git+http", "git+https", "git+ssh", "git+git", "git+file") # Prevent the user's environment variables from interfering with pip: # https://github.com/pypa/pip/issues/1130 - unset_environ = ('GIT_DIR', 'GIT_WORK_TREE') - default_arg_rev = 'HEAD' + unset_environ = ("GIT_DIR", "GIT_WORK_TREE") + default_arg_rev = "HEAD" @staticmethod def get_base_rev_args(rev): return [rev] def get_git_version(self): - VERSION_PFX = 'git version ' - version = self.run_command(['version'], show_stdout=False) + VERSION_PFX = "git version " + version = self.run_command(["version"], show_stdout=False) if version.startswith(VERSION_PFX): version = version[len(VERSION_PFX) :].split()[0] else: - version = '' + version = "" # get first 3 positions of the git version because # on windows it is x.y.z.windows.t, and this parses as # LegacyVersion which always smaller than a Version. - version = '.'.join(version.split('.')[:3]) + version = ".".join(version.split(".")[:3]) return parse_version(version) @classmethod @@ -76,27 +72,27 @@ def get_current_branch(cls, location): # HEAD rather than a symbolic ref. In addition, the -q causes the # command to exit with status code 1 instead of 128 in this case # and to suppress the message to stderr. - args = ['symbolic-ref', '-q', 'HEAD'] + args = ["symbolic-ref", "-q", "HEAD"] output = cls.run_command( args, extra_ok_returncodes=(1,), show_stdout=False, cwd=location ) ref = output.strip() - if ref.startswith('refs/heads/'): - return ref[len('refs/heads/') :] + if ref.startswith("refs/heads/"): + return ref[len("refs/heads/") :] return None def export(self, location, url): # type: (str, HiddenText) -> None """Export the Git repository at the url to the destination location""" - if not location.endswith('/'): - location = location + '/' + if not location.endswith("/"): + location = location + "/" with TempDirectory(kind="export") as temp_dir: self.unpack(temp_dir.path, url=url) self.run_command( - ['checkout-index', '-a', '-f', '--prefix', location], + ["checkout-index", "-a", "-f", "--prefix", location], show_stdout=False, cwd=temp_dir.path, ) @@ -113,7 +109,7 @@ def get_revision_sha(cls, dest, rev): """ # Pass rev to pre-filter the list. output = cls.run_command( - ['show-ref', rev], cwd=dest, show_stdout=False, on_returncode='ignore' + ["show-ref", rev], cwd=dest, show_stdout=False, on_returncode="ignore" ) refs = {} for line in output.strip().splitlines(): @@ -122,12 +118,12 @@ def get_revision_sha(cls, dest, rev): except ValueError: # Include the offending line to simplify troubleshooting if # this error ever occurs. - raise ValueError('unexpected show-ref line: {!r}'.format(line)) + raise ValueError("unexpected show-ref line: {!r}".format(line)) refs[ref] = sha - branch_ref = 'refs/remotes/origin/{}'.format(rev) - tag_ref = 'refs/tags/{}'.format(rev) + branch_ref = "refs/remotes/origin/{}".format(rev) + tag_ref = "refs/tags/{}".format(rev) sha = refs.get(branch_ref) if sha is not None: @@ -167,15 +163,15 @@ def resolve_revision(cls, dest, url, rev_options): "Did not find branch or tag '%s', assuming revision or ref.", rev ) - if not rev.startswith('refs/'): + if not rev.startswith("refs/"): return rev_options # If it looks like a ref, we have to fetch it explicitly. cls.run_command( - make_command('fetch', '-q', url, rev_options.to_args()), cwd=dest + make_command("fetch", "-q", url, rev_options.to_args()), cwd=dest ) # Change the revision to the SHA of the ref we fetched - sha = cls.get_revision(dest, rev='FETCH_HEAD') + sha = cls.get_revision(dest, rev="FETCH_HEAD") rev_options = rev_options.make_new(sha) return rev_options @@ -198,24 +194,24 @@ def is_commit_id_equal(cls, dest, name): def fetch_new(self, dest, url, rev_options): # type: (str, HiddenText, RevOptions) -> None rev_display = rev_options.to_display() - logger.info('Cloning %s%s to %s', url, rev_display, display_path(dest)) - self.run_command(make_command('clone', '-q', url, dest)) + logger.info("Cloning %s%s to %s", url, rev_display, display_path(dest)) + self.run_command(make_command("clone", "-q", url, dest)) if rev_options.rev: # Then a specific revision was requested. rev_options = self.resolve_revision(dest, url, rev_options) - branch_name = getattr(rev_options, 'branch_name', None) + branch_name = getattr(rev_options, "branch_name", None) if branch_name is None: # Only do a checkout if the current commit id doesn't match # the requested revision. if not self.is_commit_id_equal(dest, rev_options.rev): - cmd_args = make_command('checkout', '-q', rev_options.to_args()) + cmd_args = make_command("checkout", "-q", rev_options.to_args()) self.run_command(cmd_args, cwd=dest) elif self.get_current_branch(dest) != branch_name: # Then a specific branch was requested, and that branch # is not yet checked out. - track_branch = 'origin/{}'.format(branch_name) - cmd_args = ['checkout', '-b', branch_name, '--track', track_branch] + track_branch = "origin/{}".format(branch_name) + cmd_args = ["checkout", "-b", branch_name, "--track", track_branch] self.run_command(cmd_args, cwd=dest) #: repo may contain submodules @@ -223,8 +219,8 @@ def fetch_new(self, dest, url, rev_options): def switch(self, dest, url, rev_options): # type: (str, HiddenText, RevOptions) -> None - self.run_command(make_command('config', 'remote.origin.url', url), cwd=dest) - cmd_args = make_command('checkout', '-q', rev_options.to_args()) + self.run_command(make_command("config", "remote.origin.url", url), cwd=dest) + cmd_args = make_command("checkout", "-q", rev_options.to_args()) self.run_command(cmd_args, cwd=dest) self.update_submodules(dest) @@ -232,14 +228,14 @@ def switch(self, dest, url, rev_options): def update(self, dest, url, rev_options): # type: (str, HiddenText, RevOptions) -> None # First fetch changes from the default remote - if self.get_git_version() >= parse_version('1.9.0'): + if self.get_git_version() >= parse_version("1.9.0"): # fetch tags in addition to everything else - self.run_command(['fetch', '-q', '--tags'], cwd=dest) + self.run_command(["fetch", "-q", "--tags"], cwd=dest) else: - self.run_command(['fetch', '-q'], cwd=dest) + self.run_command(["fetch", "-q"], cwd=dest) # Then reset to wanted revision (maybe even origin/master) rev_options = self.resolve_revision(dest, url, rev_options) - cmd_args = make_command('reset', '--hard', '-q', rev_options.to_args()) + cmd_args = make_command("reset", "--hard", "-q", rev_options.to_args()) self.run_command(cmd_args, cwd=dest) #: update submodules self.update_submodules(dest) @@ -255,7 +251,7 @@ def get_remote_url(cls, location): # We need to pass 1 for extra_ok_returncodes since the command # exits with return code 1 if there are no matching lines. stdout = cls.run_command( - ['config', '--get-regexp', r'remote\..*\.url'], + ["config", "--get-regexp", r"remote\..*\.url"], extra_ok_returncodes=(1,), show_stdout=False, cwd=location, @@ -267,18 +263,18 @@ def get_remote_url(cls, location): raise RemoteNotFoundError for remote in remotes: - if remote.startswith('remote.origin.url '): + if remote.startswith("remote.origin.url "): found_remote = remote break - url = found_remote.split(' ')[1] + url = found_remote.split(" ")[1] return url.strip() @classmethod def get_revision(cls, location, rev=None): if rev is None: - rev = 'HEAD' + rev = "HEAD" current_rev = cls.run_command( - ['rev-parse', rev], show_stdout=False, cwd=location + ["rev-parse", rev], show_stdout=False, cwd=location ) return current_rev.strip() @@ -286,14 +282,14 @@ def get_revision(cls, location, rev=None): def get_subdirectory(cls, location): # find the repo root git_dir = cls.run_command( - ['rev-parse', '--git-dir'], show_stdout=False, cwd=location + ["rev-parse", "--git-dir"], show_stdout=False, cwd=location ).strip() if not os.path.isabs(git_dir): git_dir = os.path.join(location, git_dir) - root_dir = os.path.join(git_dir, '..') + root_dir = os.path.join(git_dir, "..") # find setup.py orig_location = location - while not os.path.exists(os.path.join(location, 'setup.py')): + while not os.path.exists(os.path.join(location, "setup.py")): last_location = location location = os.path.dirname(location) if location == last_location: @@ -322,22 +318,22 @@ def get_url_rev_and_auth(cls, url): # Works around an apparent Git bug # (see https://article.gmane.org/gmane.comp.version-control.git/146500) scheme, netloc, path, query, fragment = urlsplit(url) - if scheme.endswith('file'): - initial_slashes = path[: -len(path.lstrip('/'))] + if scheme.endswith("file"): + initial_slashes = path[: -len(path.lstrip("/"))] newpath = initial_slashes + urllib_request.url2pathname(path).replace( - '\\', '/' - ).lstrip('/') + "\\", "/" + ).lstrip("/") url = urlunsplit((scheme, netloc, newpath, query, fragment)) - after_plus = scheme.find('+') + 1 + after_plus = scheme.find("+") + 1 url = scheme[:after_plus] + urlunsplit( (scheme[after_plus:], netloc, newpath, query, fragment) ) - if '://' not in url: - assert 'file:' not in url - url = url.replace('git+', 'git+ssh://') + if "://" not in url: + assert "file:" not in url + url = url.replace("git+", "git+ssh://") url, rev, user_pass = super(Git, cls).get_url_rev_and_auth(url) - url = url.replace('ssh://', '') + url = url.replace("ssh://", "") else: url, rev, user_pass = super(Git, cls).get_url_rev_and_auth(url) @@ -345,10 +341,10 @@ def get_url_rev_and_auth(cls, url): @classmethod def update_submodules(cls, location): - if not os.path.exists(os.path.join(location, '.gitmodules')): + if not os.path.exists(os.path.join(location, ".gitmodules")): return cls.run_command( - ['submodule', 'update', '--init', '--recursive', '-q'], cwd=location + ["submodule", "update", "--init", "--recursive", "-q"], cwd=location ) @classmethod @@ -357,7 +353,7 @@ def controls_location(cls, location): return True try: r = cls.run_command( - ['rev-parse'], cwd=location, show_stdout=False, on_returncode='ignore' + ["rev-parse"], cwd=location, show_stdout=False, on_returncode="ignore" ) return not r except BadCommand: diff --git a/src/pip/_internal/vcs/mercurial.py b/src/pip/_internal/vcs/mercurial.py index fd179fa65d9..12657f069ff 100644 --- a/src/pip/_internal/vcs/mercurial.py +++ b/src/pip/_internal/vcs/mercurial.py @@ -20,10 +20,10 @@ class Mercurial(VersionControl): - name = 'hg' - dirname = '.hg' - repo_name = 'clone' - schemes = ('hg', 'hg+http', 'hg+https', 'hg+ssh', 'hg+static-http') + name = "hg" + dirname = ".hg" + repo_name = "clone" + schemes = ("hg", "hg+http", "hg+https", "hg+ssh", "hg+static-http") @staticmethod def get_base_rev_args(rev): @@ -36,41 +36,41 @@ def export(self, location, url): self.unpack(temp_dir.path, url=url) self.run_command( - ['archive', location], show_stdout=False, cwd=temp_dir.path + ["archive", location], show_stdout=False, cwd=temp_dir.path ) def fetch_new(self, dest, url, rev_options): # type: (str, HiddenText, RevOptions) -> None rev_display = rev_options.to_display() - logger.info('Cloning hg %s%s to %s', url, rev_display, display_path(dest)) - self.run_command(make_command('clone', '--noupdate', '-q', url, dest)) - self.run_command(make_command('update', '-q', rev_options.to_args()), cwd=dest) + logger.info("Cloning hg %s%s to %s", url, rev_display, display_path(dest)) + self.run_command(make_command("clone", "--noupdate", "-q", url, dest)) + self.run_command(make_command("update", "-q", rev_options.to_args()), cwd=dest) def switch(self, dest, url, rev_options): # type: (str, HiddenText, RevOptions) -> None - repo_config = os.path.join(dest, self.dirname, 'hgrc') + repo_config = os.path.join(dest, self.dirname, "hgrc") config = configparser.RawConfigParser() try: config.read(repo_config) - config.set('paths', 'default', url.secret) - with open(repo_config, 'w') as config_file: + config.set("paths", "default", url.secret) + with open(repo_config, "w") as config_file: config.write(config_file) except (OSError, configparser.NoSectionError) as exc: - logger.warning('Could not switch Mercurial repository to %s: %s', url, exc) + logger.warning("Could not switch Mercurial repository to %s: %s", url, exc) else: - cmd_args = make_command('update', '-q', rev_options.to_args()) + cmd_args = make_command("update", "-q", rev_options.to_args()) self.run_command(cmd_args, cwd=dest) def update(self, dest, url, rev_options): # type: (str, HiddenText, RevOptions) -> None - self.run_command(['pull', '-q'], cwd=dest) - cmd_args = make_command('update', '-q', rev_options.to_args()) + self.run_command(["pull", "-q"], cwd=dest) + cmd_args = make_command("update", "-q", rev_options.to_args()) self.run_command(cmd_args, cwd=dest) @classmethod def get_remote_url(cls, location): url = cls.run_command( - ['showconfig', 'paths.default'], show_stdout=False, cwd=location + ["showconfig", "paths.default"], show_stdout=False, cwd=location ).strip() if cls._is_local_repository(url): url = path_to_url(url) @@ -82,7 +82,7 @@ def get_revision(cls, location): Return the repository-local changeset revision number, as an integer. """ current_revision = cls.run_command( - ['parents', '--template={rev}'], show_stdout=False, cwd=location + ["parents", "--template={rev}"], show_stdout=False, cwd=location ).strip() return current_revision @@ -93,7 +93,7 @@ def get_requirement_revision(cls, location): hexadecimal string """ current_rev_hash = cls.run_command( - ['parents', '--template={node}'], show_stdout=False, cwd=location + ["parents", "--template={node}"], show_stdout=False, cwd=location ).strip() return current_rev_hash diff --git a/src/pip/_internal/vcs/subversion.py b/src/pip/_internal/vcs/subversion.py index 50a38962912..f296f361b2d 100644 --- a/src/pip/_internal/vcs/subversion.py +++ b/src/pip/_internal/vcs/subversion.py @@ -18,7 +18,7 @@ _svn_xml_url_re = re.compile('url="([^"]+)"') _svn_rev_re = re.compile(r'committed-rev="(\d+)"') _svn_info_xml_rev_re = re.compile(r'\s*revision="(\d+)"') -_svn_info_xml_url_re = re.compile(r'(.*)') +_svn_info_xml_url_re = re.compile(r"(.*)") if MYPY_CHECK_RUNNING: @@ -31,10 +31,10 @@ class Subversion(VersionControl): - name = 'svn' - dirname = '.svn' - repo_name = 'checkout' - schemes = ('svn', 'svn+ssh', 'svn+http', 'svn+https', 'svn+svn') + name = "svn" + dirname = ".svn" + repo_name = "checkout" + schemes = ("svn", "svn+ssh", "svn+http", "svn+https", "svn+svn") @classmethod def should_add_vcs_url_prefix(cls, remote_url): @@ -42,7 +42,7 @@ def should_add_vcs_url_prefix(cls, remote_url): @staticmethod def get_base_rev_args(rev): - return ['-r', rev] + return ["-r", rev] @classmethod def get_revision(cls, location): @@ -57,7 +57,7 @@ def get_revision(cls, location): dirs[:] = [] continue # no sense walking uncontrolled subdirs dirs.remove(cls.dirname) - entries_fn = os.path.join(base, cls.dirname, 'entries') + entries_fn = os.path.join(base, cls.dirname, "entries") if not os.path.exists(entries_fn): # FIXME: should we warn? continue @@ -65,7 +65,7 @@ def get_revision(cls, location): dirurl, localrev = cls._get_svn_url_rev(base) if base == location: - base = dirurl + '/' # save the root url + base = dirurl + "/" # save the root url elif not dirurl or not dirurl.startswith(base): dirs[:] = [] continue # not part of the same svn tree, skip it @@ -78,7 +78,7 @@ def get_netloc_and_auth(cls, netloc, scheme): This override allows the auth information to be passed to svn via the --username and --password options instead of via the URL. """ - if scheme == 'ssh': + if scheme == "ssh": # The --username and --password options can't be used for # svn+ssh URLs, so keep the auth information in the URL. return super(Subversion, cls).get_netloc_and_auth(netloc, scheme) @@ -90,8 +90,8 @@ def get_url_rev_and_auth(cls, url): # type: (str) -> Tuple[str, Optional[str], AuthInfo] # hotfix the URL scheme after removing svn+ from svn+ssh:// readd it url, rev, user_pass = super(Subversion, cls).get_url_rev_and_auth(url) - if url.startswith('ssh://'): - url = 'svn+' + url + if url.startswith("ssh://"): + url = "svn+" + url return url, rev, user_pass @staticmethod @@ -99,9 +99,9 @@ def make_rev_args(username, password): # type: (Optional[str], Optional[HiddenText]) -> CommandArgs extra_args = [] # type: CommandArgs if username: - extra_args += ['--username', username] + extra_args += ["--username", username] if password: - extra_args += ['--password', password] + extra_args += ["--password", password] return extra_args @@ -111,7 +111,7 @@ def get_remote_url(cls, location): # setup.py we have to look up in the location until we find a real # setup.py orig_location = location - while not os.path.exists(os.path.join(location, 'setup.py')): + while not os.path.exists(os.path.join(location, "setup.py")): last_location = location location = os.path.dirname(location) if location == last_location: @@ -130,22 +130,22 @@ def get_remote_url(cls, location): def _get_svn_url_rev(cls, location): from pip._internal.exceptions import InstallationError - entries_path = os.path.join(location, cls.dirname, 'entries') + entries_path = os.path.join(location, cls.dirname, "entries") if os.path.exists(entries_path): with open(entries_path) as f: data = f.read() else: # subversion >= 1.7 does not have the 'entries' file - data = '' + data = "" - if data.startswith('8') or data.startswith('9') or data.startswith('10'): - data = list(map(str.splitlines, data.split('\n\x0c\n'))) + if data.startswith("8") or data.startswith("9") or data.startswith("10"): + data = list(map(str.splitlines, data.split("\n\x0c\n"))) del data[0][0] # get rid of the '8' url = data[0][3] revs = [int(d[9]) for d in data if len(d) > 9 and d[9]] + [0] - elif data.startswith('= 1.8 runs in non-interactive mode if @@ -266,7 +266,7 @@ def get_remote_call_options(self): # SVN 1.7, pip should continue to support SVN 1.7. Therefore, pip # can't safely add the option if the SVN version is < 1.8 (or unknown). if svn_version >= (1, 8): - return ['--force-interactive'] + return ["--force-interactive"] return [] @@ -275,14 +275,14 @@ def export(self, location, url): """Export the svn repository at the url to the destination location""" url, rev_options = self.get_url_rev_options(url) - logger.info('Exporting svn repository %s to %s', url, location) + logger.info("Exporting svn repository %s to %s", url, location) with indent_log(): if os.path.exists(location): # Subversion doesn't like to check out over an existing # directory --force fixes this, but was only added in svn 1.5 rmtree(location) cmd_args = make_command( - 'export', + "export", self.get_remote_call_options(), rev_options.to_args(), url, @@ -293,10 +293,10 @@ def export(self, location, url): def fetch_new(self, dest, url, rev_options): # type: (str, HiddenText, RevOptions) -> None rev_display = rev_options.to_display() - logger.info('Checking out %s%s to %s', url, rev_display, display_path(dest)) + logger.info("Checking out %s%s to %s", url, rev_display, display_path(dest)) cmd_args = make_command( - 'checkout', - '-q', + "checkout", + "-q", self.get_remote_call_options(), rev_options.to_args(), url, @@ -307,14 +307,14 @@ def fetch_new(self, dest, url, rev_options): def switch(self, dest, url, rev_options): # type: (str, HiddenText, RevOptions) -> None cmd_args = make_command( - 'switch', self.get_remote_call_options(), rev_options.to_args(), url, dest + "switch", self.get_remote_call_options(), rev_options.to_args(), url, dest ) self.run_command(cmd_args) def update(self, dest, url, rev_options): # type: (str, HiddenText, RevOptions) -> None cmd_args = make_command( - 'update', self.get_remote_call_options(), rev_options.to_args(), dest + "update", self.get_remote_call_options(), rev_options.to_args(), dest ) self.run_command(cmd_args) diff --git a/src/pip/_internal/vcs/versioncontrol.py b/src/pip/_internal/vcs/versioncontrol.py index b097f352c7e..a1b1a84f877 100644 --- a/src/pip/_internal/vcs/versioncontrol.py +++ b/src/pip/_internal/vcs/versioncontrol.py @@ -43,7 +43,7 @@ AuthInfo = Tuple[Optional[str], Optional[str]] -__all__ = ['vcs'] +__all__ = ["vcs"] logger = logging.getLogger(__name__) @@ -57,7 +57,7 @@ def is_url(name): scheme = get_url_scheme(name) if scheme is None: return False - return scheme in ['http', 'https', 'file', 'ftp'] + vcs.all_schemes + return scheme in ["http", "https", "file", "ftp"] + vcs.all_schemes def make_vcs_requirement_url(repo_url, rev, project_name, subdir=None): @@ -69,9 +69,9 @@ def make_vcs_requirement_url(repo_url, rev, project_name, subdir=None): project_name: the (unescaped) project name. """ egg_project_name = pkg_resources.to_filename(project_name) - req = '{}@{}#egg={}'.format(repo_url, rev, egg_project_name) + req = "{}@{}#egg={}".format(repo_url, rev, egg_project_name) if subdir: - req += '&subdirectory={}'.format(subdir) + req += "&subdirectory={}".format(subdir) return req @@ -111,7 +111,7 @@ def __init__( self.branch_name = None # type: Optional[str] def __repr__(self): - return ''.format(self.vc_class.name, self.rev) + return "".format(self.vc_class.name, self.rev) @property def arg_rev(self): @@ -137,9 +137,9 @@ def to_args(self): def to_display(self): # type: () -> str if not self.rev: - return '' + return "" - return ' (to revision {})'.format(self.rev) + return " (to revision {})".format(self.rev) def make_new(self, rev): # type: (str) -> RevOptions @@ -154,7 +154,7 @@ def make_new(self, rev): class VcsSupport(object): _registry = {} # type: Dict[str, VersionControl] - schemes = ['ssh', 'git', 'hg', 'bzr', 'sftp', 'svn'] + schemes = ["ssh", "git", "hg", "bzr", "sftp", "svn"] def __init__(self): # type: () -> None @@ -162,7 +162,7 @@ def __init__(self): # systems urllib_parse.uses_netloc.extend(self.schemes) # Python >= 2.7.4, 3.3 doesn't have uses_fragment - if getattr(urllib_parse, 'uses_fragment', None): + if getattr(urllib_parse, "uses_fragment", None): urllib_parse.uses_fragment.extend(self.schemes) super(VcsSupport, self).__init__() @@ -189,12 +189,12 @@ def all_schemes(self): def register(self, cls): # type: (Type[VersionControl]) -> None - if not hasattr(cls, 'name'): - logger.warning('Cannot register VCS %s', cls.__name__) + if not hasattr(cls, "name"): + logger.warning("Cannot register VCS %s", cls.__name__) return if cls.name not in self._registry: self._registry[cls.name] = cls() - logger.debug('Registered VCS backend: %s', cls.name) + logger.debug("Registered VCS backend: %s", cls.name) def unregister(self, name): # type: (str) -> None @@ -210,7 +210,7 @@ def get_backend_for_dir(self, location): for vcs_backend in self._registry.values(): if vcs_backend.controls_location(location): logger.debug( - 'Determine that %s uses VCS: %s', location, vcs_backend.name + "Determine that %s uses VCS: %s", location, vcs_backend.name ) return vcs_backend return None @@ -228,9 +228,9 @@ def get_backend(self, name): class VersionControl(object): - name = '' - dirname = '' - repo_name = '' + name = "" + dirname = "" + repo_name = "" # List of supported schemes for this Version Control schemes = () # type: Tuple[str, ...] # Iterable of environment variable names to pass to call_subprocess(). @@ -243,7 +243,7 @@ def should_add_vcs_url_prefix(cls, remote_url): Return whether the vcs prefix (e.g. "git+") should be added to a repository's remote url when used in a requirement. """ - return not remote_url.lower().startswith('{}:'.format(cls.name)) + return not remote_url.lower().startswith("{}:".format(cls.name)) @classmethod def get_subdirectory(cls, repo_dir): @@ -277,7 +277,7 @@ def get_src_requirement(cls, repo_dir, project_name): return None if cls.should_add_vcs_url_prefix(repo_url): - repo_url = '{}+{}'.format(cls.name, repo_url) + repo_url = "{}+{}".format(cls.name, repo_url) revision = cls.get_requirement_revision(repo_dir) subdir = cls.get_subdirectory(repo_dir) @@ -356,19 +356,19 @@ def get_url_rev_and_auth(cls, url): Returns: (url, rev, (username, password)). """ scheme, netloc, path, query, frag = urllib_parse.urlsplit(url) - if '+' not in scheme: + if "+" not in scheme: raise ValueError( "Sorry, {!r} is a malformed VCS url. " "The format is +://, " "e.g. svn+http://myrepo/svn/MyApp#egg=MyApp".format(url) ) # Remove the vcs prefix. - scheme = scheme.split('+', 1)[1] + scheme = scheme.split("+", 1)[1] netloc, user_pass = cls.get_netloc_and_auth(netloc, scheme) rev = None - if '@' in path: - path, rev = path.rsplit('@', 1) - url = urllib_parse.urlunsplit((scheme, netloc, path, query, '')) + if "@" in path: + path, rev = path.rsplit("@", 1) + url = urllib_parse.urlunsplit((scheme, netloc, path, query, "")) return url, rev, user_pass @staticmethod @@ -402,7 +402,7 @@ def normalize_url(url): Normalize a URL for comparison by unquoting it and removing any trailing slash. """ - return urllib_parse.unquote(url).rstrip('/') + return urllib_parse.unquote(url).rstrip("/") @classmethod def compare_urls(cls, url1, url2): @@ -475,67 +475,67 @@ def obtain(self, dest, url): existing_url = self.get_remote_url(dest) if self.compare_urls(existing_url, url.secret): logger.debug( - '%s in %s exists, and has correct URL (%s)', + "%s in %s exists, and has correct URL (%s)", self.repo_name.title(), display_path(dest), url, ) if not self.is_commit_id_equal(dest, rev_options.rev): logger.info( - 'Updating %s %s%s', + "Updating %s %s%s", display_path(dest), self.repo_name, rev_display, ) self.update(dest, url, rev_options) else: - logger.info('Skipping because already up-to-date.') + logger.info("Skipping because already up-to-date.") return logger.warning( - '%s %s in %s exists with URL %s', + "%s %s in %s exists with URL %s", self.name, self.repo_name, display_path(dest), existing_url, ) - prompt = ('(s)witch, (i)gnore, (w)ipe, (b)ackup ', ('s', 'i', 'w', 'b')) + prompt = ("(s)witch, (i)gnore, (w)ipe, (b)ackup ", ("s", "i", "w", "b")) else: logger.warning( - 'Directory %s already exists, and is not a %s %s.', + "Directory %s already exists, and is not a %s %s.", dest, self.name, self.repo_name, ) # https://github.com/python/mypy/issues/1174 prompt = ( # type: ignore - '(i)gnore, (w)ipe, (b)ackup ', - ('i', 'w', 'b'), + "(i)gnore, (w)ipe, (b)ackup ", + ("i", "w", "b"), ) - logger.warning('The plan is to install the %s repository %s', self.name, url) - response = ask_path_exists('What to do? %s' % prompt[0], prompt[1]) + logger.warning("The plan is to install the %s repository %s", self.name, url) + response = ask_path_exists("What to do? %s" % prompt[0], prompt[1]) - if response == 'a': + if response == "a": sys.exit(-1) - if response == 'w': - logger.warning('Deleting %s', display_path(dest)) + if response == "w": + logger.warning("Deleting %s", display_path(dest)) rmtree(dest) self.fetch_new(dest, url, rev_options) return - if response == 'b': + if response == "b": dest_dir = backup_dir(dest) - logger.warning('Backing up %s to %s', display_path(dest), dest_dir) + logger.warning("Backing up %s to %s", display_path(dest), dest_dir) shutil.move(dest, dest_dir) self.fetch_new(dest, url, rev_options) return # Do nothing if the response is "i". - if response == 's': + if response == "s": logger.info( - 'Switching %s %s to %s%s', + "Switching %s %s to %s%s", self.repo_name, display_path(dest), url, @@ -578,7 +578,7 @@ def run_command( cmd, # type: Union[List[str], CommandArgs] show_stdout=True, # type: bool cwd=None, # type: Optional[str] - on_returncode='raise', # type: str + on_returncode="raise", # type: str extra_ok_returncodes=None, # type: Optional[Iterable[int]] command_desc=None, # type: Optional[str] extra_environ=None, # type: Optional[Mapping[str, Any]] @@ -608,9 +608,9 @@ def run_command( # In other words, the VCS executable isn't available if e.errno == errno.ENOENT: raise BadCommand( - 'Cannot find command %r - do you have ' - '%r installed and in your ' - 'PATH?' % (cls.name, cls.name) + "Cannot find command %r - do you have " + "%r installed and in your " + "PATH?" % (cls.name, cls.name) ) else: raise # re-raise exception if a different error occurred @@ -621,7 +621,7 @@ def is_repository_directory(cls, path): """ Return whether a directory path is a repository directory. """ - logger.debug('Checking in %s for %s (%s)...', path, cls.dirname, cls.name) + logger.debug("Checking in %s for %s (%s)...", path, cls.dirname, cls.name) return os.path.exists(os.path.join(path, cls.dirname)) @classmethod diff --git a/src/pip/_internal/wheel.py b/src/pip/_internal/wheel.py index e0b2c4d720f..aa1321bd76e 100644 --- a/src/pip/_internal/wheel.py +++ b/src/pip/_internal/wheel.py @@ -84,7 +84,7 @@ def normpath(src, p): - return os.path.relpath(src, p).replace(os.path.sep, '/') + return os.path.relpath(src, p).replace(os.path.sep, "/") def hash_file(path, blocksize=1 << 20): @@ -92,7 +92,7 @@ def hash_file(path, blocksize=1 << 20): """Return (hash, length) for path using hashlib.sha256()""" h = hashlib.sha256() length = 0 - with open(path, 'rb') as f: + with open(path, "rb") as f: for block in read_chunks(f, size=blocksize): length += len(block) h.update(block) @@ -103,7 +103,7 @@ def rehash(path, blocksize=1 << 20): # type: (str, int) -> Tuple[str, str] """Return (encoded_digest, length) for path using hashlib.sha256()""" h, length = hash_file(path, blocksize) - digest = 'sha256=' + urlsafe_b64encode(h.digest()).decode('latin1').rstrip('=') + digest = "sha256=" + urlsafe_b64encode(h.digest()).decode("latin1").rstrip("=") # unicode/str python2 issues return (digest, str(length)) # type: ignore @@ -112,10 +112,10 @@ def open_for_csv(name, mode): # type: (str, Text) -> IO if sys.version_info[0] < 3: nl = {} # type: Dict[str, Any] - bin = 'b' + bin = "b" else: - nl = {'newline': ''} # type: Dict[str, Any] - bin = '' + nl = {"newline": ""} # type: Dict[str, Any] + bin = "" return open(name, mode + bin, **nl) @@ -123,9 +123,9 @@ def replace_python_tag(wheelname, new_tag): # type: (str, str) -> str """Replace the Python tag in a wheel file name with a new value. """ - parts = wheelname.split('-') + parts = wheelname.split("-") parts[-3] = new_tag - return '-'.join(parts) + return "-".join(parts) def fix_script(path): @@ -134,14 +134,14 @@ def fix_script(path): Return True if file was changed.""" # XXX RECORD hashes will need to be updated if os.path.isfile(path): - with open(path, 'rb') as script: + with open(path, "rb") as script: firstline = script.readline() - if not firstline.startswith(b'#!python'): + if not firstline.startswith(b"#!python"): return False exename = sys.executable.encode(sys.getfilesystemencoding()) - firstline = b'#!' + exename + os.linesep.encode("ascii") + firstline = b"#!" + exename + os.linesep.encode("ascii") rest = script.read() - with open(path, 'wb') as script: + with open(path, "wb") as script: script.write(firstline) script.write(rest) return True @@ -163,8 +163,8 @@ def root_is_purelib(name, wheeldir): name_folded = name.replace("-", "_") for item in os.listdir(wheeldir): match = dist_info_re.match(item) - if match and match.group('name') == name_folded: - with open(os.path.join(wheeldir, item, 'WHEEL')) as wheel: + if match and match.group("name") == name_folded: + with open(os.path.join(wheeldir, item, "WHEEL")) as wheel: for line in wheel: line = line.lower().rstrip() if line == "root-is-purelib: true": @@ -190,8 +190,8 @@ def get_entrypoints(filename): # get the entry points and then the script names entry_points = pkg_resources.EntryPoint.parse_map(data) - console = entry_points.get('console_scripts', {}) - gui = entry_points.get('gui_scripts', {}) + console = entry_points.get("console_scripts", {}) + gui = entry_points.get("gui_scripts", {}) def _split_ep(s): """get the string representation of EntryPoint, remove space and split @@ -303,7 +303,7 @@ def get_csv_rows_for_installed( installed_rows = [] # type: List[InstalledCSVRow] for row in old_csv_rows: if len(row) > 3: - logger.warning('RECORD line has more than three elements: {}'.format(row)) + logger.warning("RECORD line has more than three elements: {}".format(row)) # Make a copy because we are mutating the row. row = list(row) old_path = row[0] @@ -318,7 +318,7 @@ def get_csv_rows_for_installed( digest, length = rehash(f) installed_rows.append((normpath(f, lib_dir), digest, str(length))) for f in installed: - installed_rows.append((installed[f], '', '')) + installed_rows.append((installed[f], "", "")) return installed_rows @@ -363,9 +363,9 @@ def move_wheel_files( ) if root_is_purelib(name, wheeldir): - lib_dir = scheme['purelib'] + lib_dir = scheme["purelib"] else: - lib_dir = scheme['platlib'] + lib_dir = scheme["platlib"] info_dir = [] # type: List[str] data_dirs = [] @@ -383,7 +383,7 @@ def move_wheel_files( if pycompile: with captured_stdout() as stdout: with warnings.catch_warnings(): - warnings.filterwarnings('ignore') + warnings.filterwarnings("ignore") compileall.compile_dir(source, force=True, quiet=True) logger.debug(stdout.getvalue()) @@ -401,23 +401,23 @@ def clobber(source, dest, is_base, fixer=None, filter=None): for dir, subdirs, files in os.walk(source): basedir = dir[len(source) :].lstrip(os.path.sep) destdir = os.path.join(dest, basedir) - if is_base and basedir.split(os.path.sep, 1)[0].endswith('.data'): + if is_base and basedir.split(os.path.sep, 1)[0].endswith(".data"): continue for s in subdirs: destsubdir = os.path.join(dest, basedir, s) - if is_base and basedir == '' and destsubdir.endswith('.data'): + if is_base and basedir == "" and destsubdir.endswith(".data"): data_dirs.append(s) continue elif ( is_base - and s.endswith('.dist-info') + and s.endswith(".dist-info") and canonicalize_name(s).startswith(canonicalize_name(req.name)) ): assert not info_dir, ( - 'Multiple .dist-info directories: ' + "Multiple .dist-info directories: " + destsubdir - + ', ' - + ', '.join(info_dir) + + ", " + + ", ".join(info_dir) ) info_dir.append(destsubdir) for f in files: @@ -474,15 +474,15 @@ def clobber(source, dest, is_base, fixer=None, filter=None): assert info_dir, "%s .dist-info directory not found" % req # Get the defined entry points - ep_file = os.path.join(info_dir[0], 'entry_points.txt') + ep_file = os.path.join(info_dir[0], "entry_points.txt") console, gui = get_entrypoints(ep_file) def is_entrypoint_wrapper(name): # EP, EP.exe and EP-script.py are scripts generated for # entry point EP by setuptools - if name.lower().endswith('.exe'): + if name.lower().endswith(".exe"): matchname = name[:-4] - elif name.lower().endswith('-script.py'): + elif name.lower().endswith("-script.py"): matchname = name[:-10] elif name.lower().endswith(".pya"): matchname = name[:-4] @@ -496,14 +496,14 @@ def is_entrypoint_wrapper(name): filter = None for subdir in os.listdir(os.path.join(wheeldir, datadir)): fixer = None - if subdir == 'scripts': + if subdir == "scripts": fixer = fix_script filter = is_entrypoint_wrapper source = os.path.join(wheeldir, datadir, subdir) dest = scheme[subdir] clobber(source, dest, False, fixer=fixer, filter=filter) - maker = PipScriptMaker(None, scheme['scripts']) + maker = PipScriptMaker(None, scheme["scripts"]) # Ensure old scripts are overwritten. # See https://github.com/pypa/pip/issues/1800 @@ -512,7 +512,7 @@ def is_entrypoint_wrapper(name): # Ensure we don't generate any variants for scripts because this is almost # never what somebody wants. # See https://bitbucket.org/pypa/distlib/issue/35/ - maker.variants = {''} + maker.variants = {""} # This is required because otherwise distlib creates scripts that are not # executable. @@ -554,40 +554,40 @@ def is_entrypoint_wrapper(name): # DEFAULT # - The default behavior is to install pip, pipX, pipX.Y, easy_install # and easy_install-X.Y. - pip_script = console.pop('pip', None) + pip_script = console.pop("pip", None) if pip_script: if "ENSUREPIP_OPTIONS" not in os.environ: - scripts_to_generate.append('pip = ' + pip_script) + scripts_to_generate.append("pip = " + pip_script) if os.environ.get("ENSUREPIP_OPTIONS", "") != "altinstall": - scripts_to_generate.append('pip%s = %s' % (sys.version_info[0], pip_script)) + scripts_to_generate.append("pip%s = %s" % (sys.version_info[0], pip_script)) scripts_to_generate.append( - 'pip%s = %s' % (get_major_minor_version(), pip_script) + "pip%s = %s" % (get_major_minor_version(), pip_script) ) # Delete any other versioned pip entry points - pip_ep = [k for k in console if re.match(r'pip(\d(\.\d)?)?$', k)] + pip_ep = [k for k in console if re.match(r"pip(\d(\.\d)?)?$", k)] for k in pip_ep: del console[k] - easy_install_script = console.pop('easy_install', None) + easy_install_script = console.pop("easy_install", None) if easy_install_script: if "ENSUREPIP_OPTIONS" not in os.environ: - scripts_to_generate.append('easy_install = ' + easy_install_script) + scripts_to_generate.append("easy_install = " + easy_install_script) scripts_to_generate.append( - 'easy_install-%s = %s' % (get_major_minor_version(), easy_install_script) + "easy_install-%s = %s" % (get_major_minor_version(), easy_install_script) ) # Delete any other versioned easy_install entry points easy_install_ep = [ - k for k in console if re.match(r'easy_install(-\d\.\d)?$', k) + k for k in console if re.match(r"easy_install(-\d\.\d)?$", k) ] for k in easy_install_ep: del console[k] # Generate the console and GUI entry points specified in the wheel - scripts_to_generate.extend('%s = %s' % kv for kv in console.items()) + scripts_to_generate.extend("%s = %s" % kv for kv in console.items()) - gui_scripts_to_generate = ['%s = %s' % kv for kv in gui.items()] + gui_scripts_to_generate = ["%s = %s" % kv for kv in gui.items()] generated_console_scripts = [] # type: List[str] @@ -595,7 +595,7 @@ def is_entrypoint_wrapper(name): generated_console_scripts = maker.make_multiple(scripts_to_generate) generated.extend(generated_console_scripts) - generated.extend(maker.make_multiple(gui_scripts_to_generate, {'gui': True})) + generated.extend(maker.make_multiple(gui_scripts_to_generate, {"gui": True})) except MissingCallableSuffix as e: entry = e.args[0] raise InstallationError( @@ -611,18 +611,18 @@ def is_entrypoint_wrapper(name): logger.warning(msg) # Record pip as the installer - installer = os.path.join(info_dir[0], 'INSTALLER') - temp_installer = os.path.join(info_dir[0], 'INSTALLER.pip') - with open(temp_installer, 'wb') as installer_file: - installer_file.write(b'pip\n') + installer = os.path.join(info_dir[0], "INSTALLER") + temp_installer = os.path.join(info_dir[0], "INSTALLER.pip") + with open(temp_installer, "wb") as installer_file: + installer_file.write(b"pip\n") shutil.move(temp_installer, installer) generated.append(installer) # Record details of all files installed - record = os.path.join(info_dir[0], 'RECORD') - temp_record = os.path.join(info_dir[0], 'RECORD.pip') - with open_for_csv(record, 'r') as record_in: - with open_for_csv(temp_record, 'w+') as record_out: + record = os.path.join(info_dir[0], "RECORD") + temp_record = os.path.join(info_dir[0], "RECORD.pip") + with open_for_csv(record, "r") as record_in: + with open_for_csv(temp_record, "w+") as record_out: reader = csv.reader(record_in) outrows = get_csv_rows_for_installed( reader, @@ -648,11 +648,11 @@ def wheel_version(source_dir): try: dist = [d for d in pkg_resources.find_on_path(None, source_dir)][0] - wheel_data = dist.get_metadata('WHEEL') + wheel_data = dist.get_metadata("WHEEL") wheel_data = Parser().parsestr(wheel_data) - version = wheel_data['Wheel-Version'].strip() - version = tuple(map(int, version.split('.'))) + version = wheel_data["Wheel-Version"].strip() + version = tuple(map(int, version.split("."))) return version except Exception: return None @@ -677,11 +677,11 @@ def check_compatibility(version, name): if version[0] > VERSION_COMPATIBLE[0]: raise UnsupportedWheel( "%s's Wheel-Version (%s) is not compatible with this version " - "of pip" % (name, '.'.join(map(str, version))) + "of pip" % (name, ".".join(map(str, version))) ) elif version > VERSION_COMPATIBLE: logger.warning( - 'Installing from a newer Wheel-Version (%s)', '.'.join(map(str, version)) + "Installing from a newer Wheel-Version (%s)", ".".join(map(str, version)) ) @@ -692,7 +692,7 @@ def format_tag(file_tag): :param file_tag: A 3-tuple of tags (python_tag, abi_tag, platform_tag). """ - return '-'.join(file_tag) + return "-".join(file_tag) class Wheel(object): @@ -717,14 +717,14 @@ def __init__(self, filename): if not wheel_info: raise InvalidWheelFilename("%s is not a valid wheel filename." % filename) self.filename = filename - self.name = wheel_info.group('name').replace('_', '-') + self.name = wheel_info.group("name").replace("_", "-") # we'll assume "_" means "-" due to wheel naming scheme # (https://github.com/pypa/pip/issues/1150) - self.version = wheel_info.group('ver').replace('_', '-') - self.build_tag = wheel_info.group('build') - self.pyversions = wheel_info.group('pyver').split('.') - self.abis = wheel_info.group('abi').split('.') - self.plats = wheel_info.group('plat').split('.') + self.version = wheel_info.group("ver").replace("_", "-") + self.build_tag = wheel_info.group("build") + self.pyversions = wheel_info.group("pyver").split(".") + self.abis = wheel_info.group("abi").split(".") + self.plats = wheel_info.group("plat").split(".") # All the tag combinations from this file self.file_tags = { @@ -766,7 +766,7 @@ def supported(self, tags): def _contains_egg_info( - s, _egg_info_re=re.compile(r'([a-z0-9_.]+)-([a-z0-9_.!+-]+)', re.I) + s, _egg_info_re=re.compile(r"([a-z0-9_.]+)-([a-z0-9_.!+-]+)", re.I) ): """Determine whether the string looks like an egg_info. @@ -797,7 +797,7 @@ def should_use_ephemeral_cache( return None if req.is_wheel: if not should_unpack: - logger.info('Skipping %s, due to already being wheel.', req.name) + logger.info("Skipping %s, due to already being wheel.", req.name) return None if not should_unpack: # i.e. pip wheel, not pip install; @@ -840,16 +840,16 @@ def format_command_result( Format command information for logging. """ command_desc = format_command_args(command_args) - text = 'Command arguments: {}\n'.format(command_desc) + text = "Command arguments: {}\n".format(command_desc) if not command_output: - text += 'Command output: None' + text += "Command output: None" elif logger.getEffectiveLevel() > logging.DEBUG: - text += 'Command output: [use --verbose to show]' + text += "Command output: [use --verbose to show]" else: - if not command_output.endswith('\n'): - command_output += '\n' - text += 'Command output:\n{}{}'.format(command_output, LOG_DIVIDER) + if not command_output.endswith("\n"): + command_output += "\n" + text += "Command output:\n{}{}".format(command_output, LOG_DIVIDER) return text @@ -868,15 +868,15 @@ def get_legacy_build_wheel_path( # Sort for determinism. names = sorted(names) if not names: - msg = ('Legacy build of wheel for {!r} created no files.\n').format(req.name) + msg = ("Legacy build of wheel for {!r} created no files.\n").format(req.name) msg += format_command_result(command_args, command_output) logger.warning(msg) return None if len(names) > 1: msg = ( - 'Legacy build of wheel for {!r} created more than one file.\n' - 'Filenames (choosing first): {}\n' + "Legacy build of wheel for {!r} created more than one file.\n" + "Filenames (choosing first): {}\n" ).format(req.name, names) msg += format_command_result(command_args, command_output) logger.warning(msg) @@ -938,13 +938,13 @@ def _build_one_inside_env(self, req, output_dir, python_tag=None): wheel_hash, length = hash_file(wheel_path) shutil.move(wheel_path, dest_path) logger.info( - 'Created wheel for %s: filename=%s size=%d sha256=%s', + "Created wheel for %s: filename=%s size=%d sha256=%s", req.name, wheel_name, length, wheel_hash.hexdigest(), ) - logger.info('Stored in directory: %s', output_dir) + logger.info("Stored in directory: %s", output_dir) return dest_path except Exception: pass @@ -972,13 +972,13 @@ def _build_one_pep517(self, req, tempd, python_tag=None): if self.build_options: # PEP 517 does not support --build-options logger.error( - 'Cannot build wheel for %s using PEP 517 when ' - '--build-options is present' % (req.name,) + "Cannot build wheel for %s using PEP 517 when " + "--build-options is present" % (req.name,) ) return None try: - req.spin_message = 'Building wheel for %s (PEP 517)' % (req.name,) - logger.debug('Destination directory: %s', tempd) + req.spin_message = "Building wheel for %s (PEP 517)" % (req.name,) + logger.debug("Destination directory: %s", tempd) wheel_name = req.pep517_backend.build_wheel( tempd, metadata_directory=req.metadata_directory ) @@ -993,7 +993,7 @@ def _build_one_pep517(self, req, tempd, python_tag=None): # Reassign to simplify the return at the end of function wheel_name = new_name except Exception: - logger.error('Failed building wheel for %s', req.name) + logger.error("Failed building wheel for %s", req.name) return None return os.path.join(tempd, wheel_name) @@ -1004,10 +1004,10 @@ def _build_one_legacy(self, req, tempd, python_tag=None): """ base_args = self._base_setup_args(req) - spin_message = 'Building wheel for %s (setup.py)' % (req.name,) + spin_message = "Building wheel for %s (setup.py)" % (req.name,) with open_spinner(spin_message) as spinner: - logger.debug('Destination directory: %s', tempd) - wheel_args = base_args + ['bdist_wheel', '-d', tempd] + self.build_options + logger.debug("Destination directory: %s", tempd) + wheel_args = base_args + ["bdist_wheel", "-d", tempd] + self.build_options if python_tag is not None: wheel_args += ["--python-tag", python_tag] @@ -1018,7 +1018,7 @@ def _build_one_legacy(self, req, tempd, python_tag=None): ) except Exception: spinner.finish("error") - logger.error('Failed building wheel for %s', req.name) + logger.error("Failed building wheel for %s", req.name) return None names = os.listdir(tempd) wheel_path = get_legacy_build_wheel_path( @@ -1033,13 +1033,13 @@ def _build_one_legacy(self, req, tempd, python_tag=None): def _clean_one(self, req): base_args = self._base_setup_args(req) - logger.info('Running setup.py clean for %s', req.name) - clean_args = base_args + ['clean', '--all'] + logger.info("Running setup.py clean for %s", req.name) + clean_args = base_args + ["clean", "--all"] try: call_subprocess(clean_args, cwd=req.source_dir) return True except Exception: - logger.error('Failed cleaning build dir for %s', req.name) + logger.error("Failed cleaning build dir for %s", req.name) return False def build( @@ -1095,8 +1095,8 @@ def build( # Build the wheels. logger.info( - 'Building wheels for collected packages: %s', - ', '.join([req.name for (req, _) in buildset]), + "Building wheels for collected packages: %s", + ", ".join([req.name for (req, _) in buildset]), ) python_tag = None @@ -1144,11 +1144,11 @@ def build( # notify success/failure if build_success: logger.info( - 'Successfully built %s', ' '.join([req.name for req in build_success]) + "Successfully built %s", " ".join([req.name for req in build_success]) ) if build_failure: logger.info( - 'Failed to build %s', ' '.join([req.name for req in build_failure]) + "Failed to build %s", " ".join([req.name for req in build_failure]) ) # Return a list of requirements that failed to build return build_failure diff --git a/tests/conftest.py b/tests/conftest.py index 35449d6201e..e29a39fab4b 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -33,23 +33,23 @@ def pytest_addoption(parser): def pytest_collection_modifyitems(config, items): for item in items: - if not hasattr(item, 'module'): # e.g.: DoctestTextfile + if not hasattr(item, "module"): # e.g.: DoctestTextfile continue # Mark network tests as flaky - if item.get_closest_marker('network') is not None and "CI" in os.environ: + if item.get_closest_marker("network") is not None and "CI" in os.environ: item.add_marker(pytest.mark.flaky(reruns=3)) if six.PY3: if item.get_closest_marker( - 'incompatible_with_test_venv' + "incompatible_with_test_venv" ) and config.getoption("--use-venv"): - item.add_marker(pytest.mark.skip('Incompatible with test venv')) + item.add_marker(pytest.mark.skip("Incompatible with test venv")) if ( - item.get_closest_marker('incompatible_with_venv') + item.get_closest_marker("incompatible_with_venv") and sys.prefix != sys.base_prefix ): - item.add_marker(pytest.mark.skip('Incompatible with venv')) + item.add_marker(pytest.mark.skip("Incompatible with venv")) module_path = os.path.relpath( item.module.__file__, os.path.commonprefix([__file__, item.module.__file__]) @@ -68,7 +68,7 @@ def pytest_collection_modifyitems(config, items): raise RuntimeError("Unknown test type (filename = {})".format(module_path)) -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def tmpdir_factory(request, tmpdir_factory): """ Modified `tmpdir_factory` session fixture that will automatically cleanup after itself. @@ -119,17 +119,17 @@ def isolate(tmpdir): fake_root = os.path.join(str(tmpdir), "fake-root") os.makedirs(fake_root) - if sys.platform == 'win32': + if sys.platform == "win32": # Note: this will only take effect in subprocesses... home_drive, home_path = os.path.splitdrive(home_dir) os.environ.update( - {'USERPROFILE': home_dir, 'HOMEDRIVE': home_drive, 'HOMEPATH': home_path} + {"USERPROFILE": home_dir, "HOMEDRIVE": home_drive, "HOMEPATH": home_path} ) for env_var, sub_path in ( - ('APPDATA', 'AppData/Roaming'), - ('LOCALAPPDATA', 'AppData/Local'), + ("APPDATA", "AppData/Roaming"), + ("LOCALAPPDATA", "AppData/Local"), ): - path = os.path.join(home_dir, *sub_path.split('/')) + path = os.path.join(home_dir, *sub_path.split("/")) os.environ[env_var] = path os.makedirs(path) else: @@ -160,7 +160,7 @@ def isolate(tmpdir): os.environ["PIP_DISABLE_PIP_VERSION_CHECK"] = "true" # Make sure tests don't share a requirements tracker. - os.environ.pop('PIP_REQ_TRACKER', None) + os.environ.pop("PIP_REQ_TRACKER", None) # FIXME: Windows... os.makedirs(os.path.join(home_dir, ".config", "git")) @@ -168,7 +168,7 @@ def isolate(tmpdir): fp.write(b"[user]\n\tname = pip\n\temail = pypa-dev@googlegroups.com\n") -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def pip_src(tmpdir_factory): def not_code_files_and_folders(path, names): # In the root directory... @@ -188,67 +188,67 @@ def not_code_files_and_folders(path, names): ignored.update(fnmatch.filter(names, pattern)) return ignored - pip_src = Path(str(tmpdir_factory.mktemp('pip_src'))).joinpath('pip_src') + pip_src = Path(str(tmpdir_factory.mktemp("pip_src"))).joinpath("pip_src") # Copy over our source tree so that each use is self contained shutil.copytree(SRC_DIR, pip_src.abspath, ignore=not_code_files_and_folders) return pip_src def _common_wheel_editable_install(tmpdir_factory, common_wheels, package): - wheel_candidates = list(common_wheels.glob('%s-*.whl' % package)) + wheel_candidates = list(common_wheels.glob("%s-*.whl" % package)) assert len(wheel_candidates) == 1, wheel_candidates - install_dir = Path(str(tmpdir_factory.mktemp(package))) / 'install' + install_dir = Path(str(tmpdir_factory.mktemp(package))) / "install" Wheel(wheel_candidates[0]).install_as_egg(install_dir) - (install_dir / 'EGG-INFO').rename(install_dir / '%s.egg-info' % package) + (install_dir / "EGG-INFO").rename(install_dir / "%s.egg-info" % package) assert compileall.compile_dir(str(install_dir), quiet=1) return install_dir -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def setuptools_install(tmpdir_factory, common_wheels): - return _common_wheel_editable_install(tmpdir_factory, common_wheels, 'setuptools') + return _common_wheel_editable_install(tmpdir_factory, common_wheels, "setuptools") -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def wheel_install(tmpdir_factory, common_wheels): - return _common_wheel_editable_install(tmpdir_factory, common_wheels, 'wheel') + return _common_wheel_editable_install(tmpdir_factory, common_wheels, "wheel") def install_egg_link(venv, project_name, egg_info_dir): - with open(venv.site / 'easy-install.pth', 'a') as fp: - fp.write(str(egg_info_dir.abspath) + '\n') - with open(venv.site / (project_name + '.egg-link'), 'w') as fp: - fp.write(str(egg_info_dir) + '\n.') + with open(venv.site / "easy-install.pth", "a") as fp: + fp.write(str(egg_info_dir.abspath) + "\n") + with open(venv.site / (project_name + ".egg-link"), "w") as fp: + fp.write(str(egg_info_dir) + "\n.") -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def virtualenv_template( request, tmpdir_factory, pip_src, setuptools_install, common_wheels ): - if six.PY3 and request.config.getoption('--use-venv'): - venv_type = 'venv' + if six.PY3 and request.config.getoption("--use-venv"): + venv_type = "venv" else: - venv_type = 'virtualenv' + venv_type = "virtualenv" # Create the virtual environment - tmpdir = Path(str(tmpdir_factory.mktemp('virtualenv'))) + tmpdir = Path(str(tmpdir_factory.mktemp("virtualenv"))) venv = VirtualEnvironment(tmpdir.joinpath("venv_orig"), venv_type=venv_type) # Install setuptools and pip. - install_egg_link(venv, 'setuptools', setuptools_install) - pip_editable = Path(str(tmpdir_factory.mktemp('pip'))) / 'pip' + install_egg_link(venv, "setuptools", setuptools_install) + pip_editable = Path(str(tmpdir_factory.mktemp("pip"))) / "pip" shutil.copytree(pip_src, pip_editable, symlinks=True) assert compileall.compile_dir(str(pip_editable), quiet=1) subprocess.check_call( - [venv.bin / 'python', 'setup.py', '-q', 'develop'], cwd=pip_editable + [venv.bin / "python", "setup.py", "-q", "develop"], cwd=pip_editable ) # Drop (non-relocatable) launchers. for exe in os.listdir(venv.bin): if not ( - exe.startswith('python') - or exe.startswith('libpy') # Don't remove libpypy-c.so... + exe.startswith("python") + or exe.startswith("libpy") # Don't remove libpypy-c.so... ): (venv.bin / exe).unlink() @@ -276,7 +276,7 @@ def virtualenv(virtualenv_template, tmpdir, isolate): @pytest.fixture def with_wheel(virtualenv, wheel_install): - install_egg_link(virtualenv, 'wheel', wheel_install) + install_egg_link(virtualenv, "wheel", wheel_install) @pytest.fixture @@ -308,7 +308,7 @@ def script(tmpdir, virtualenv, deprecated_python): @pytest.fixture(scope="session") def common_wheels(): """Provide a directory with latest setuptools and wheel wheels""" - return DATA_DIR.joinpath('common_wheels') + return DATA_DIR.joinpath("common_wheels") @pytest.fixture diff --git a/tests/data/packages/BrokenEmitsUTF8/setup.py b/tests/data/packages/BrokenEmitsUTF8/setup.py index 8db605dadf4..7381a6acb18 100644 --- a/tests/data/packages/BrokenEmitsUTF8/setup.py +++ b/tests/data/packages/BrokenEmitsUTF8/setup.py @@ -8,27 +8,27 @@ class FakeError(Exception): pass -if sys.argv[1] == 'install': - if hasattr(sys.stdout, 'buffer'): +if sys.argv[1] == "install": + if hasattr(sys.stdout, "buffer"): sys.stdout.buffer.write( - '\nThis package prints out UTF-8 stuff like:\n'.encode('utf-8') + "\nThis package prints out UTF-8 stuff like:\n".encode("utf-8") ) sys.stdout.buffer.write( - '* return type of ‘main’ is not ‘int’\n'.encode('utf-8') + "* return type of ‘main’ is not ‘int’\n".encode("utf-8") ) sys.stdout.buffer.write( - '* Björk Guðmundsdóttir [ˈpjœr̥k ˈkvʏðmʏntsˌtoʊhtɪr]'.encode('utf-8') + "* Björk Guðmundsdóttir [ˈpjœr̥k ˈkvʏðmʏntsˌtoʊhtɪr]".encode("utf-8") ) else: pass - sys.stdout.write('\nThis package prints out UTF-8 stuff like:\n') + sys.stdout.write("\nThis package prints out UTF-8 stuff like:\n") sys.stdout.write( - '* return type of \xe2\x80\x98main\xe2\x80\x99 is not \xe2\x80\x98int\xe2\x80\x99\n' + "* return type of \xe2\x80\x98main\xe2\x80\x99 is not \xe2\x80\x98int\xe2\x80\x99\n" ) sys.stdout.write( - '* Bj\xc3\xb6rk Gu\xc3\xb0mundsd\xc3\xb3ttir [\xcb\x88pj\xc5\x93r\xcc\xa5k \xcb\x88kv\xca\x8f\xc3\xb0m\xca\x8fnts\xcb\x8cto\xca\x8aht\xc9\xaar]\n' + "* Bj\xc3\xb6rk Gu\xc3\xb0mundsd\xc3\xb3ttir [\xcb\x88pj\xc5\x93r\xcc\xa5k \xcb\x88kv\xca\x8f\xc3\xb0m\xca\x8fnts\xcb\x8cto\xca\x8aht\xc9\xaar]\n" ) - raise FakeError('this package designed to fail on install') + raise FakeError("this package designed to fail on install") -setup(name='broken', version='0.2', py_modules=['broken']) +setup(name="broken", version="0.2", py_modules=["broken"]) diff --git a/tests/data/packages/FSPkg/setup.py b/tests/data/packages/FSPkg/setup.py index dfec519f619..58a78413339 100644 --- a/tests/data/packages/FSPkg/setup.py +++ b/tests/data/packages/FSPkg/setup.py @@ -1,20 +1,20 @@ from setuptools import find_packages, setup -version = '0.1dev' +version = "0.1dev" setup( - name='FSPkg', + name="FSPkg", version=version, description="File system test package", long_description="""\ File system test package""", classifiers=[], # Get strings from https://pypi.org/pypi?%3Aaction=list_classifiers - keywords='pip tests', - author='pip', - author_email='pip@openplans.org', - url='http://pip.openplans.org', - license='', - packages=find_packages(exclude=['ez_setup', 'examples', 'tests']), + keywords="pip tests", + author="pip", + author_email="pip@openplans.org", + url="http://pip.openplans.org", + license="", + packages=find_packages(exclude=["ez_setup", "examples", "tests"]), include_package_data=True, zip_safe=False, install_requires=[ diff --git a/tests/data/packages/HackedEggInfo/setup.py b/tests/data/packages/HackedEggInfo/setup.py index 65f7953ead4..36201f79e1d 100644 --- a/tests/data/packages/HackedEggInfo/setup.py +++ b/tests/data/packages/HackedEggInfo/setup.py @@ -11,7 +11,7 @@ def run(self): setup( name="hackedegginfo", - version='0.0.0', - cmdclass={'egg_info': egg_info}, + version="0.0.0", + cmdclass={"egg_info": egg_info}, zip_safe=False, ) diff --git a/tests/data/packages/LocalEnvironMarker/setup.py b/tests/data/packages/LocalEnvironMarker/setup.py index 71edc76733e..4d991a6afa0 100644 --- a/tests/data/packages/LocalEnvironMarker/setup.py +++ b/tests/data/packages/LocalEnvironMarker/setup.py @@ -11,15 +11,15 @@ def path_to_url(path): path = os.path.normpath(os.path.abspath(path)) drive, path = os.path.splitdrive(path) filepath = path.split(os.path.sep) - url = '/'.join(filepath) + url = "/".join(filepath) if drive: - return 'file:///' + drive + url - return 'file://' + url + return "file:///" + drive + url + return "file://" + url setup( - name='LocalEnvironMarker', - version='0.0.1', + name="LocalEnvironMarker", + version="0.0.1", packages=find_packages(), - extras_require={":python_version == '2.7'": ['simple']}, + extras_require={":python_version == '2.7'": ["simple"]}, ) diff --git a/tests/data/packages/LocalExtras-0.0.2/setup.py b/tests/data/packages/LocalExtras-0.0.2/setup.py index 3b451366414..6e910d1cbb2 100644 --- a/tests/data/packages/LocalExtras-0.0.2/setup.py +++ b/tests/data/packages/LocalExtras-0.0.2/setup.py @@ -11,16 +11,16 @@ def path_to_url(path): path = os.path.normpath(os.path.abspath(path)) drive, path = os.path.splitdrive(path) filepath = path.split(os.path.sep) - url = '/'.join(filepath) + url = "/".join(filepath) if drive: - return 'file:///' + drive + url - return 'file://' + url + return "file:///" + drive + url + return "file://" + url setup( - name='LocalExtras', - version='0.0.2', + name="LocalExtras", + version="0.0.2", packages=find_packages(), - install_requires=['simple==1.0'], - extras_require={'bar': ['simple==2.0'], 'baz': ['singlemodule']}, + install_requires=["simple==1.0"], + extras_require={"bar": ["simple==2.0"], "baz": ["singlemodule"]}, ) diff --git a/tests/data/packages/LocalExtras/setup.py b/tests/data/packages/LocalExtras/setup.py index f2658a2d0ff..4bf2179da78 100644 --- a/tests/data/packages/LocalExtras/setup.py +++ b/tests/data/packages/LocalExtras/setup.py @@ -11,15 +11,15 @@ def path_to_url(path): path = os.path.normpath(os.path.abspath(path)) drive, path = os.path.splitdrive(path) filepath = path.split(os.path.sep) - url = '/'.join(filepath) + url = "/".join(filepath) if drive: - return 'file:///' + drive + url - return 'file://' + url + return "file:///" + drive + url + return "file://" + url setup( - name='LocalExtras', - version='0.0.1', + name="LocalExtras", + version="0.0.1", packages=find_packages(), - extras_require={'bar': ['simple'], 'baz': ['singlemodule']}, + extras_require={"bar": ["simple"], "baz": ["singlemodule"]}, ) diff --git a/tests/data/packages/requires_wheelbroken_upper/setup.py b/tests/data/packages/requires_wheelbroken_upper/setup.py index a502288c69b..210b7c67ad8 100644 --- a/tests/data/packages/requires_wheelbroken_upper/setup.py +++ b/tests/data/packages/requires_wheelbroken_upper/setup.py @@ -3,5 +3,5 @@ setuptools.setup( name="requires_wheelbroken_upper", version="0", - install_requires=['wheelbroken', 'upper'], + install_requires=["wheelbroken", "upper"], ) diff --git a/tests/data/packages/symlinks/setup.py b/tests/data/packages/symlinks/setup.py index fe6a846f64b..e52cf5d3047 100644 --- a/tests/data/packages/symlinks/setup.py +++ b/tests/data/packages/symlinks/setup.py @@ -1,5 +1,5 @@ from setuptools import setup -version = '0.1' +version = "0.1" -setup(name='symlinks', version=version, packages=["symlinks"]) +setup(name="symlinks", version=version, packages=["symlinks"]) diff --git a/tests/data/src/TopoRequires/setup.py b/tests/data/src/TopoRequires/setup.py index 0400138e3ea..dd7f367d938 100644 --- a/tests/data/src/TopoRequires/setup.py +++ b/tests/data/src/TopoRequires/setup.py @@ -1,3 +1,3 @@ from setuptools import setup -setup(name='TopoRequires', version='0.0.1', packages=['toporequires']) +setup(name="TopoRequires", version="0.0.1", packages=["toporequires"]) diff --git a/tests/data/src/TopoRequires2/setup.py b/tests/data/src/TopoRequires2/setup.py index 019f43cb231..11d009c4175 100644 --- a/tests/data/src/TopoRequires2/setup.py +++ b/tests/data/src/TopoRequires2/setup.py @@ -1,8 +1,8 @@ from setuptools import setup setup( - name='TopoRequires2', - version='0.0.1', - packages=['toporequires2'], - install_requires=['TopoRequires'], + name="TopoRequires2", + version="0.0.1", + packages=["toporequires2"], + install_requires=["TopoRequires"], ) diff --git a/tests/data/src/TopoRequires3/setup.py b/tests/data/src/TopoRequires3/setup.py index 772ed618e3c..550bb008eb9 100644 --- a/tests/data/src/TopoRequires3/setup.py +++ b/tests/data/src/TopoRequires3/setup.py @@ -1,8 +1,8 @@ from setuptools import setup setup( - name='TopoRequires3', - version='0.0.1', - packages=['toporequires3'], - install_requires=['TopoRequires'], + name="TopoRequires3", + version="0.0.1", + packages=["toporequires3"], + install_requires=["TopoRequires"], ) diff --git a/tests/data/src/TopoRequires4/setup.py b/tests/data/src/TopoRequires4/setup.py index e276f55a240..077eec765a5 100644 --- a/tests/data/src/TopoRequires4/setup.py +++ b/tests/data/src/TopoRequires4/setup.py @@ -1,8 +1,8 @@ from setuptools import setup setup( - name='TopoRequires4', - version='0.0.1', - packages=['toporequires4'], - install_requires=['TopoRequires2', 'TopoRequires', 'TopoRequires3'], + name="TopoRequires4", + version="0.0.1", + packages=["toporequires4"], + install_requires=["TopoRequires2", "TopoRequires", "TopoRequires3"], ) diff --git a/tests/data/src/chattymodule/setup.py b/tests/data/src/chattymodule/setup.py index 7211972d1a7..0d215d76af9 100644 --- a/tests/data/src/chattymodule/setup.py +++ b/tests/data/src/chattymodule/setup.py @@ -14,7 +14,7 @@ setup( name="chattymodule", - version='0.0.1', + version="0.0.1", description="A sample Python project with a single module", - py_modules=['chattymodule'], + py_modules=["chattymodule"], ) diff --git a/tests/data/src/compilewheel/setup.py b/tests/data/src/compilewheel/setup.py index 55df35cf2bb..da994945048 100644 --- a/tests/data/src/compilewheel/setup.py +++ b/tests/data/src/compilewheel/setup.py @@ -1,4 +1,4 @@ #!/usr/bin/env python from setuptools import find_packages, setup -setup(name='compilewheel', version='1.0', packages=find_packages()) +setup(name="compilewheel", version="1.0", packages=find_packages()) diff --git a/tests/data/src/pep518-3.0/setup.py b/tests/data/src/pep518-3.0/setup.py index 1424320d417..c0919b9a21a 100644 --- a/tests/data/src/pep518-3.0/setup.py +++ b/tests/data/src/pep518-3.0/setup.py @@ -3,4 +3,4 @@ import simplewheel # ensure dependency is installed -setup(name='pep518', version='3.0', py_modules=['pep518']) +setup(name="pep518", version="3.0", py_modules=["pep518"]) diff --git a/tests/data/src/pep518_conflicting_requires/setup.py b/tests/data/src/pep518_conflicting_requires/setup.py index a7c10c35340..11877f990cd 100644 --- a/tests/data/src/pep518_conflicting_requires/setup.py +++ b/tests/data/src/pep518_conflicting_requires/setup.py @@ -1,4 +1,4 @@ #!/usr/bin/env python from setuptools import setup -setup(name='pep518_conflicting_requires', version='1.0.0', py_modules=['pep518']) +setup(name="pep518_conflicting_requires", version="1.0.0", py_modules=["pep518"]) diff --git a/tests/data/src/pep518_forkbomb-235/setup.py b/tests/data/src/pep518_forkbomb-235/setup.py index 550276ba27a..f69346cac82 100644 --- a/tests/data/src/pep518_forkbomb-235/setup.py +++ b/tests/data/src/pep518_forkbomb-235/setup.py @@ -1,3 +1,3 @@ from setuptools import setup -setup(name='pep518_forkbomb', version='235', py_modules=['pep518_forkbomb']) +setup(name="pep518_forkbomb", version="235", py_modules=["pep518_forkbomb"]) diff --git a/tests/data/src/pep518_invalid_build_system/setup.py b/tests/data/src/pep518_invalid_build_system/setup.py index 02d680ab83c..e50a60f3bb4 100644 --- a/tests/data/src/pep518_invalid_build_system/setup.py +++ b/tests/data/src/pep518_invalid_build_system/setup.py @@ -1,4 +1,4 @@ #!/usr/bin/env python from setuptools import setup -setup(name='pep518_invalid_build_system', version='1.0.0', py_modules=['pep518']) +setup(name="pep518_invalid_build_system", version="1.0.0", py_modules=["pep518"]) diff --git a/tests/data/src/pep518_invalid_requires/setup.py b/tests/data/src/pep518_invalid_requires/setup.py index 97147e99778..c4093d0552e 100644 --- a/tests/data/src/pep518_invalid_requires/setup.py +++ b/tests/data/src/pep518_invalid_requires/setup.py @@ -1,4 +1,4 @@ #!/usr/bin/env python from setuptools import setup -setup(name='pep518_invalid_requires', version='1.0.0', py_modules=['pep518']) +setup(name="pep518_invalid_requires", version="1.0.0", py_modules=["pep518"]) diff --git a/tests/data/src/pep518_missing_requires/setup.py b/tests/data/src/pep518_missing_requires/setup.py index 63f988a9fda..e07fc16bb34 100644 --- a/tests/data/src/pep518_missing_requires/setup.py +++ b/tests/data/src/pep518_missing_requires/setup.py @@ -1,4 +1,4 @@ #!/usr/bin/env python from setuptools import setup -setup(name='pep518_missing_requires', version='1.0.0', py_modules=['pep518']) +setup(name="pep518_missing_requires", version="1.0.0", py_modules=["pep518"]) diff --git a/tests/data/src/pep518_twin_forkbombs_first-234/setup.py b/tests/data/src/pep518_twin_forkbombs_first-234/setup.py index ce4a639dbf5..acb97e18efc 100644 --- a/tests/data/src/pep518_twin_forkbombs_first-234/setup.py +++ b/tests/data/src/pep518_twin_forkbombs_first-234/setup.py @@ -1,7 +1,7 @@ from setuptools import setup setup( - name='pep518_twin_forkbombs_first', - version='234', - py_modules=['pep518_twin_forkbombs_first'], + name="pep518_twin_forkbombs_first", + version="234", + py_modules=["pep518_twin_forkbombs_first"], ) diff --git a/tests/data/src/pep518_twin_forkbombs_second-238/setup.py b/tests/data/src/pep518_twin_forkbombs_second-238/setup.py index f85a5603166..c14c1cfb025 100644 --- a/tests/data/src/pep518_twin_forkbombs_second-238/setup.py +++ b/tests/data/src/pep518_twin_forkbombs_second-238/setup.py @@ -1,7 +1,7 @@ from setuptools import setup setup( - name='pep518_twin_forkbombs_second', - version='238', - py_modules=['pep518_twin_forkbombs_second'], + name="pep518_twin_forkbombs_second", + version="238", + py_modules=["pep518_twin_forkbombs_second"], ) diff --git a/tests/data/src/pep518_with_extra_and_markers-1.0/setup.py b/tests/data/src/pep518_with_extra_and_markers-1.0/setup.py index 5759c8a7d12..08e6eceac1b 100644 --- a/tests/data/src/pep518_with_extra_and_markers-1.0/setup.py +++ b/tests/data/src/pep518_with_extra_and_markers-1.0/setup.py @@ -7,10 +7,10 @@ import simple import simplewheel -assert simplewheel.__version__ == '1.0' if sys.version_info < (3,) else '2.0' +assert simplewheel.__version__ == "1.0" if sys.version_info < (3,) else "2.0" setup( - name='pep518_with_extra_and_markers', - version='1.0', - py_modules=['pep518_with_extra_and_markers'], + name="pep518_with_extra_and_markers", + version="1.0", + py_modules=["pep518_with_extra_and_markers"], ) diff --git a/tests/data/src/pep518_with_namespace_package-1.0/setup.py b/tests/data/src/pep518_with_namespace_package-1.0/setup.py index 540ede4cf43..263ba19880e 100644 --- a/tests/data/src/pep518_with_namespace_package-1.0/setup.py +++ b/tests/data/src/pep518_with_namespace_package-1.0/setup.py @@ -3,7 +3,7 @@ import simple_namespace.module setup( - name='pep518_with_namespace_package', - version='1.0', - py_modules=['pep518_with_namespace_package'], + name="pep518_with_namespace_package", + version="1.0", + py_modules=["pep518_with_namespace_package"], ) diff --git a/tests/data/src/prjwithdatafile/setup.py b/tests/data/src/prjwithdatafile/setup.py index 3a280a75d7b..5ed643c96ce 100755 --- a/tests/data/src/prjwithdatafile/setup.py +++ b/tests/data/src/prjwithdatafile/setup.py @@ -2,11 +2,11 @@ from setuptools import setup setup( - name='prjwithdatafile', + name="prjwithdatafile", version="1.0", - packages=['prjwithdatafile'], + packages=["prjwithdatafile"], data_files=[ - (r'packages1', ['prjwithdatafile/README.txt']), - (r'packages2', ['prjwithdatafile/README.txt']), + (r"packages1", ["prjwithdatafile/README.txt"]), + (r"packages2", ["prjwithdatafile/README.txt"]), ], ) diff --git a/tests/data/src/requires_capitalized/setup.py b/tests/data/src/requires_capitalized/setup.py index 8ec59e9f273..287704f8628 100644 --- a/tests/data/src/requires_capitalized/setup.py +++ b/tests/data/src/requires_capitalized/setup.py @@ -1,3 +1,3 @@ from setuptools import setup -setup(name='Requires_Capitalized', version='0.1', install_requires=['simple==1.0']) +setup(name="Requires_Capitalized", version="0.1", install_requires=["simple==1.0"]) diff --git a/tests/data/src/requires_requires_capitalized/setup.py b/tests/data/src/requires_requires_capitalized/setup.py index ac519632d6c..8d099fddd21 100644 --- a/tests/data/src/requires_requires_capitalized/setup.py +++ b/tests/data/src/requires_requires_capitalized/setup.py @@ -1,7 +1,7 @@ from setuptools import setup setup( - name='requires_requires_capitalized', - version='1.0', - install_requires=['requires_Capitalized==0.1'], + name="requires_requires_capitalized", + version="1.0", + install_requires=["requires_Capitalized==0.1"], ) diff --git a/tests/data/src/requires_simple/setup.py b/tests/data/src/requires_simple/setup.py index 5b45b79d5f2..5eebde770d2 100644 --- a/tests/data/src/requires_simple/setup.py +++ b/tests/data/src/requires_simple/setup.py @@ -1,3 +1,3 @@ from setuptools import find_packages, setup -setup(name='requires_simple', version='0.1', install_requires=['simple==1.0']) +setup(name="requires_simple", version="0.1", install_requires=["simple==1.0"]) diff --git a/tests/data/src/requires_simple_extra/setup.py b/tests/data/src/requires_simple_extra/setup.py index d09b248de5e..5562ebc9541 100644 --- a/tests/data/src/requires_simple_extra/setup.py +++ b/tests/data/src/requires_simple_extra/setup.py @@ -1,8 +1,8 @@ from setuptools import setup setup( - name='requires_simple_extra', - version='0.1', - py_modules=['requires_simple_extra'], - extras_require={'extra': ['simple==1.0']}, + name="requires_simple_extra", + version="0.1", + py_modules=["requires_simple_extra"], + extras_require={"extra": ["simple==1.0"]}, ) diff --git a/tests/data/src/sample/sample/__init__.py b/tests/data/src/sample/sample/__init__.py index 949740b6386..ea6d4910475 100644 --- a/tests/data/src/sample/sample/__init__.py +++ b/tests/data/src/sample/sample/__init__.py @@ -1,4 +1,4 @@ -__version__ = '1.2.0' +__version__ = "1.2.0" def main(): diff --git a/tests/data/src/sample/setup.py b/tests/data/src/sample/setup.py index 3514598ab74..482f6d3b649 100644 --- a/tests/data/src/sample/setup.py +++ b/tests/data/src/sample/setup.py @@ -12,7 +12,7 @@ def find_version(*file_paths): # Open in Latin-1 so that we avoid encoding errors. # Use codecs.open for Python 2 compatibility - with codecs.open(os.path.join(here, *file_paths), 'r', 'latin1') as f: + with codecs.open(os.path.join(here, *file_paths), "r", "latin1") as f: version_file = f.read() # The version line must have the form @@ -24,60 +24,60 @@ def find_version(*file_paths): # Get the long description from the relevant file -with codecs.open('DESCRIPTION.rst', encoding='utf-8') as f: +with codecs.open("DESCRIPTION.rst", encoding="utf-8") as f: long_description = f.read() setup( name="sample", - version=find_version('sample', '__init__.py'), + version=find_version("sample", "__init__.py"), description="A sample Python project", long_description=long_description, # The project URL. - url='https://github.com/pypa/sampleproject', + url="https://github.com/pypa/sampleproject", # Author details - author='The Python Packaging Authority', - author_email='pypa-dev@googlegroups.com', + author="The Python Packaging Authority", + author_email="pypa-dev@googlegroups.com", # Choose your license - license='MIT', + license="MIT", classifiers=[ # How mature is this project? Common values are # 3 - Alpha # 4 - Beta # 5 - Production/Stable - 'Development Status :: 3 - Alpha', + "Development Status :: 3 - Alpha", # Indicate who your project is intended for - 'Intended Audience :: Developers', - 'Topic :: Software Development :: Build Tools', + "Intended Audience :: Developers", + "Topic :: Software Development :: Build Tools", # Pick your license as you wish (should match "license" above) - 'License :: OSI Approved :: MIT License', + "License :: OSI Approved :: MIT License", # Specify the Python versions you support here. In particular, ensure # that you indicate whether you support Python 2, Python 3 or both. - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.1', - 'Programming Language :: Python :: 3.2', - 'Programming Language :: Python :: 3.3', + "Programming Language :: Python :: 2", + "Programming Language :: Python :: 2.7", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.1", + "Programming Language :: Python :: 3.2", + "Programming Language :: Python :: 3.3", ], # What does your project relate to? - keywords='sample setuptools development', + keywords="sample setuptools development", # You can just specify the packages manually here if your project is # simple. Or you can use find_packages. packages=find_packages(exclude=["contrib", "docs", "tests*"]), # List run-time dependencies here. These will be installed by pip when your # project is installed. - install_requires=['peppercorn'], + install_requires=["peppercorn"], # If there are data files included in your packages that need to be # installed, specify them here. If using Python 2.6 or less, then these # have to be included in MANIFEST.in as well. - package_data={'sample': ['package_data.dat']}, + package_data={"sample": ["package_data.dat"]}, # Although 'package_data' is the preferred approach, in some case you may # need to place data files outside of your packages. # see https://docs.python.org/3.4/distutils/setupscript.html#installing-additional-files # In this case, 'data_file' will be installed into '/my_data' - data_files=[('my_data', ['data/data_file'])], + data_files=[("my_data", ["data/data_file"])], # To provide executable scripts, use entry points in preference to the # "scripts" keyword. Entry points provide cross-platform support and allow # pip to create the appropriate form of executable for the target platform. - entry_points={'console_scripts': ['sample=sample:main']}, + entry_points={"console_scripts": ["sample=sample:main"]}, ) diff --git a/tests/data/src/simple_namespace/setup.py b/tests/data/src/simple_namespace/setup.py index 9a49d52b757..c6b5978641c 100644 --- a/tests/data/src/simple_namespace/setup.py +++ b/tests/data/src/simple_namespace/setup.py @@ -1,8 +1,8 @@ from setuptools import setup setup( - name='simple_namespace', - version='1.0', - namespace_packages=['simple_namespace'], - packages=['simple_namespace.module'], + name="simple_namespace", + version="1.0", + namespace_packages=["simple_namespace"], + packages=["simple_namespace.module"], ) diff --git a/tests/data/src/simplewheel-1.0/setup.py b/tests/data/src/simplewheel-1.0/setup.py index 30339eb48ff..91ce41095f3 100644 --- a/tests/data/src/simplewheel-1.0/setup.py +++ b/tests/data/src/simplewheel-1.0/setup.py @@ -3,4 +3,4 @@ import simplewheel -setup(name='simplewheel', version=simplewheel.__version__, packages=['simplewheel']) +setup(name="simplewheel", version=simplewheel.__version__, packages=["simplewheel"]) diff --git a/tests/data/src/simplewheel-1.0/simplewheel/__init__.py b/tests/data/src/simplewheel-1.0/simplewheel/__init__.py index 7e49527e386..4802e90f8ed 100644 --- a/tests/data/src/simplewheel-1.0/simplewheel/__init__.py +++ b/tests/data/src/simplewheel-1.0/simplewheel/__init__.py @@ -1 +1 @@ -__version__ = '1.0' +__version__ = "1.0" diff --git a/tests/data/src/simplewheel-2.0/setup.py b/tests/data/src/simplewheel-2.0/setup.py index 30339eb48ff..91ce41095f3 100644 --- a/tests/data/src/simplewheel-2.0/setup.py +++ b/tests/data/src/simplewheel-2.0/setup.py @@ -3,4 +3,4 @@ import simplewheel -setup(name='simplewheel', version=simplewheel.__version__, packages=['simplewheel']) +setup(name="simplewheel", version=simplewheel.__version__, packages=["simplewheel"]) diff --git a/tests/data/src/simplewheel-2.0/simplewheel/__init__.py b/tests/data/src/simplewheel-2.0/simplewheel/__init__.py index 3b3dacb9af5..f2dc0e40061 100644 --- a/tests/data/src/simplewheel-2.0/simplewheel/__init__.py +++ b/tests/data/src/simplewheel-2.0/simplewheel/__init__.py @@ -1 +1 @@ -__version__ = '2.0' +__version__ = "2.0" diff --git a/tests/data/src/singlemodule/setup.py b/tests/data/src/singlemodule/setup.py index 622af1f8e99..e6358e2f7ca 100644 --- a/tests/data/src/singlemodule/setup.py +++ b/tests/data/src/singlemodule/setup.py @@ -2,7 +2,7 @@ setup( name="singlemodule", - version='0.0.1', + version="0.0.1", description="A sample Python project with a single module", - py_modules=['singlemodule'], + py_modules=["singlemodule"], ) diff --git a/tests/data/src/withpyproject/setup.py b/tests/data/src/withpyproject/setup.py index 1ea9e3e41c5..af10b3e3f37 100644 --- a/tests/data/src/withpyproject/setup.py +++ b/tests/data/src/withpyproject/setup.py @@ -1,3 +1,3 @@ from setuptools import setup -setup(name='withpyproject', version='0.0.1') +setup(name="withpyproject", version="0.0.1") diff --git a/tests/functional/test_broken_stdout.py b/tests/functional/test_broken_stdout.py index f7ad0e1d155..c6e600f30a8 100644 --- a/tests/functional/test_broken_stdout.py +++ b/tests/functional/test_broken_stdout.py @@ -15,9 +15,9 @@ def setup_broken_stdout_test(args, deprecated_python): # Call close() on stdout to cause a broken pipe. proc.stdout.close() returncode = proc.wait() - stderr = proc.stderr.read().decode('utf-8') + stderr = proc.stderr.read().decode("utf-8") - expected_msg = 'ERROR: Pipe to stdout was broken' + expected_msg = "ERROR: Pipe to stdout was broken" if deprecated_python: assert expected_msg in stderr else: @@ -31,12 +31,12 @@ def test_broken_stdout_pipe(deprecated_python): Test a broken pipe to stdout. """ stderr, returncode = setup_broken_stdout_test( - ['pip', 'list'], deprecated_python=deprecated_python + ["pip", "list"], deprecated_python=deprecated_python ) # Check that no traceback occurs. - assert 'raise BrokenStdoutLoggingError()' not in stderr - assert stderr.count('Traceback') == 0 + assert "raise BrokenStdoutLoggingError()" not in stderr + assert stderr.count("Traceback") == 0 assert returncode == _BROKEN_STDOUT_RETURN_CODE @@ -45,14 +45,14 @@ def test_broken_stdout_pipe__log_option(deprecated_python, tmpdir): """ Test a broken pipe to stdout when --log is passed. """ - log_path = os.path.join(str(tmpdir), 'log.txt') + log_path = os.path.join(str(tmpdir), "log.txt") stderr, returncode = setup_broken_stdout_test( - ['pip', '--log', log_path, 'list'], deprecated_python=deprecated_python + ["pip", "--log", log_path, "list"], deprecated_python=deprecated_python ) # Check that no traceback occurs. - assert 'raise BrokenStdoutLoggingError()' not in stderr - assert stderr.count('Traceback') == 0 + assert "raise BrokenStdoutLoggingError()" not in stderr + assert stderr.count("Traceback") == 0 assert returncode == _BROKEN_STDOUT_RETURN_CODE @@ -62,12 +62,12 @@ def test_broken_stdout_pipe__verbose(deprecated_python): Test a broken pipe to stdout with verbose logging enabled. """ stderr, returncode = setup_broken_stdout_test( - ['pip', '-v', 'list'], deprecated_python=deprecated_python + ["pip", "-v", "list"], deprecated_python=deprecated_python ) # Check that a traceback occurs and that it occurs at most once. # We permit up to two because the exception can be chained. - assert 'raise BrokenStdoutLoggingError()' in stderr - assert 1 <= stderr.count('Traceback') <= 2 + assert "raise BrokenStdoutLoggingError()" in stderr + assert 1 <= stderr.count("Traceback") <= 2 assert returncode == _BROKEN_STDOUT_RETURN_CODE diff --git a/tests/functional/test_check.py b/tests/functional/test_check.py index 2f669835bdf..1856b2f7676 100644 --- a/tests/functional/test_check.py +++ b/tests/functional/test_check.py @@ -12,7 +12,7 @@ def test_basic_check_clean(script): """On a clean environment, check should print a helpful message. """ - result = script.pip('check') + result = script.pip("check") expected_lines = ("No broken requirements found.",) assert matches_expected_lines(result.stdout, expected_lines) @@ -22,13 +22,13 @@ def test_basic_check_clean(script): def test_basic_check_missing_dependency(script): # Setup a small project pkga_path = create_test_package_with_setup( - script, name='pkga', version='1.0', install_requires=['missing==0.1'] + script, name="pkga", version="1.0", install_requires=["missing==0.1"] ) # Let's install pkga without its dependency - res = script.pip('install', '--no-index', pkga_path, '--no-deps') + res = script.pip("install", "--no-index", pkga_path, "--no-deps") assert "Successfully installed pkga-1.0" in res.stdout, str(res) - result = script.pip('check', expect_error=True) + result = script.pip("check", expect_error=True) expected_lines = ("pkga 1.0 requires missing, which is not installed.",) assert matches_expected_lines(result.stdout, expected_lines) @@ -38,19 +38,19 @@ def test_basic_check_missing_dependency(script): def test_basic_check_broken_dependency(script): # Setup pkga depending on pkgb>=1.0 pkga_path = create_test_package_with_setup( - script, name='pkga', version='1.0', install_requires=['broken>=1.0'] + script, name="pkga", version="1.0", install_requires=["broken>=1.0"] ) # Let's install pkga without its dependency - res = script.pip('install', '--no-index', pkga_path, '--no-deps') + res = script.pip("install", "--no-index", pkga_path, "--no-deps") assert "Successfully installed pkga-1.0" in res.stdout, str(res) # Setup broken==0.1 - broken_path = create_test_package_with_setup(script, name='broken', version='0.1') + broken_path = create_test_package_with_setup(script, name="broken", version="0.1") # Let's install broken==0.1 - res = script.pip('install', '--no-index', broken_path, '--no-warn-conflicts') + res = script.pip("install", "--no-index", broken_path, "--no-warn-conflicts") assert "Successfully installed broken-0.1" in res.stdout, str(res) - result = script.pip('check', expect_error=True) + result = script.pip("check", expect_error=True) expected_lines = ("pkga 1.0 has requirement broken>=1.0, but you have broken 0.1.",) assert matches_expected_lines(result.stdout, expected_lines) @@ -59,21 +59,21 @@ def test_basic_check_broken_dependency(script): def test_basic_check_broken_dependency_and_missing_dependency(script): pkga_path = create_test_package_with_setup( - script, name='pkga', version='1.0', install_requires=['broken>=1.0'] + script, name="pkga", version="1.0", install_requires=["broken>=1.0"] ) # Let's install pkga without its dependency - res = script.pip('install', '--no-index', pkga_path, '--no-deps') + res = script.pip("install", "--no-index", pkga_path, "--no-deps") assert "Successfully installed pkga-1.0" in res.stdout, str(res) # Setup broken==0.1 broken_path = create_test_package_with_setup( - script, name='broken', version='0.1', install_requires=['missing'] + script, name="broken", version="0.1", install_requires=["missing"] ) # Let's install broken==0.1 - res = script.pip('install', '--no-index', broken_path, '--no-deps') + res = script.pip("install", "--no-index", broken_path, "--no-deps") assert "Successfully installed broken-0.1" in res.stdout, str(res) - result = script.pip('check', expect_error=True) + result = script.pip("check", expect_error=True) expected_lines = ( "broken 0.1 requires missing, which is not installed.", @@ -86,14 +86,14 @@ def test_basic_check_broken_dependency_and_missing_dependency(script): def test_check_complicated_name_missing(script): package_a_path = create_test_package_with_setup( - script, name='package_A', version='1.0', install_requires=['Dependency-B>=1.0'] + script, name="package_A", version="1.0", install_requires=["Dependency-B>=1.0"] ) # Without dependency - result = script.pip('install', '--no-index', package_a_path, '--no-deps') + result = script.pip("install", "--no-index", package_a_path, "--no-deps") assert "Successfully installed package-A-1.0" in result.stdout, str(result) - result = script.pip('check', expect_error=True) + result = script.pip("check", expect_error=True) expected_lines = ("package-a 1.0 requires dependency-b, which is not installed.",) assert matches_expected_lines(result.stdout, expected_lines) assert result.returncode == 1 @@ -101,22 +101,22 @@ def test_check_complicated_name_missing(script): def test_check_complicated_name_broken(script): package_a_path = create_test_package_with_setup( - script, name='package_A', version='1.0', install_requires=['Dependency-B>=1.0'] + script, name="package_A", version="1.0", install_requires=["Dependency-B>=1.0"] ) dependency_b_path_incompatible = create_test_package_with_setup( - script, name='dependency-b', version='0.1' + script, name="dependency-b", version="0.1" ) # With broken dependency - result = script.pip('install', '--no-index', package_a_path, '--no-deps') + result = script.pip("install", "--no-index", package_a_path, "--no-deps") assert "Successfully installed package-A-1.0" in result.stdout, str(result) result = script.pip( - 'install', '--no-index', dependency_b_path_incompatible, '--no-deps' + "install", "--no-index", dependency_b_path_incompatible, "--no-deps" ) assert "Successfully installed dependency-b-0.1" in result.stdout - result = script.pip('check', expect_error=True) + result = script.pip("check", expect_error=True) expected_lines = ( "package-a 1.0 has requirement Dependency-B>=1.0, but you have " "dependency-b 0.1.", @@ -127,19 +127,19 @@ def test_check_complicated_name_broken(script): def test_check_complicated_name_clean(script): package_a_path = create_test_package_with_setup( - script, name='package_A', version='1.0', install_requires=['Dependency-B>=1.0'] + script, name="package_A", version="1.0", install_requires=["Dependency-B>=1.0"] ) dependency_b_path = create_test_package_with_setup( - script, name='dependency-b', version='1.0' + script, name="dependency-b", version="1.0" ) - result = script.pip('install', '--no-index', package_a_path, '--no-deps') + result = script.pip("install", "--no-index", package_a_path, "--no-deps") assert "Successfully installed package-A-1.0" in result.stdout, str(result) - result = script.pip('install', '--no-index', dependency_b_path, '--no-deps') + result = script.pip("install", "--no-index", dependency_b_path, "--no-deps") assert "Successfully installed dependency-b-1.0" in result.stdout - result = script.pip('check') + result = script.pip("check") expected_lines = ("No broken requirements found.",) assert matches_expected_lines(result.stdout, expected_lines) assert result.returncode == 0 @@ -148,18 +148,18 @@ def test_check_complicated_name_clean(script): def test_check_considers_conditional_reqs(script): package_a_path = create_test_package_with_setup( script, - name='package_A', - version='1.0', + name="package_A", + version="1.0", install_requires=[ "Dependency-B>=1.0; python_version != '2.7'", "Dependency-B>=2.0; python_version == '2.7'", ], ) - result = script.pip('install', '--no-index', package_a_path, '--no-deps') + result = script.pip("install", "--no-index", package_a_path, "--no-deps") assert "Successfully installed package-A-1.0" in result.stdout, str(result) - result = script.pip('check', expect_error=True) + result = script.pip("check", expect_error=True) expected_lines = ("package-a 1.0 requires dependency-b, which is not installed.",) assert matches_expected_lines(result.stdout, expected_lines) assert result.returncode == 1 @@ -168,21 +168,21 @@ def test_check_considers_conditional_reqs(script): def test_check_development_versions_are_also_considered(script): # Setup pkga depending on pkgb>=1.0 pkga_path = create_test_package_with_setup( - script, name='pkga', version='1.0', install_requires=['depend>=1.0'] + script, name="pkga", version="1.0", install_requires=["depend>=1.0"] ) # Let's install pkga without its dependency - res = script.pip('install', '--no-index', pkga_path, '--no-deps') + res = script.pip("install", "--no-index", pkga_path, "--no-deps") assert "Successfully installed pkga-1.0" in res.stdout, str(res) # Setup depend==1.1.0.dev0 depend_path = create_test_package_with_setup( - script, name='depend', version='1.1.0.dev0' + script, name="depend", version="1.1.0.dev0" ) # Let's install depend==1.1.0.dev0 - res = script.pip('install', '--no-index', depend_path, '--no-warn-conflicts') + res = script.pip("install", "--no-index", depend_path, "--no-warn-conflicts") assert "Successfully installed depend-1.1.0.dev0" in res.stdout, str(res) - result = script.pip('check') + result = script.pip("check") expected_lines = ("No broken requirements found.",) assert matches_expected_lines(result.stdout, expected_lines) assert result.returncode == 0 @@ -190,17 +190,17 @@ def test_check_development_versions_are_also_considered(script): def test_basic_check_broken_metadata(script): # Create some corrupt metadata - dist_info_dir = script.site_packages_path / 'pkga-1.0.dist-info' + dist_info_dir = script.site_packages_path / "pkga-1.0.dist-info" dist_info_dir.mkdir() - with open(dist_info_dir / 'METADATA', 'w') as f: + with open(dist_info_dir / "METADATA", "w") as f: f.write( - 'Metadata-Version: 2.1\n' - 'Name: pkga\n' - 'Version: 1.0\n' + "Metadata-Version: 2.1\n" + "Name: pkga\n" + "Version: 1.0\n" 'Requires-Dist: pip; python_version == "3.4";extra == "test"\n' ) - result = script.pip('check', expect_error=True) + result = script.pip("check", expect_error=True) - assert 'Error parsing requirements' in result.stderr + assert "Error parsing requirements" in result.stderr assert result.returncode == 1 diff --git a/tests/functional/test_completion.py b/tests/functional/test_completion.py index 2ba10aafaab..43f85e3da94 100644 --- a/tests/functional/test_completion.py +++ b/tests/functional/test_completion.py @@ -5,7 +5,7 @@ COMPLETION_FOR_SUPPORTED_SHELLS_TESTS = ( ( - 'bash', + "bash", """\ _pip_completion() { @@ -16,7 +16,7 @@ complete -o default -F _pip_completion pip""", ), ( - 'fish', + "fish", """\ function __fish_complete_pip set -lx COMP_WORDS (commandline -o) "" @@ -29,7 +29,7 @@ complete -fa "(__fish_complete_pip)" -c pip""", ), ( - 'zsh', + "zsh", """\ function _pip_completion { local words cword @@ -45,7 +45,7 @@ @pytest.mark.parametrize( - 'shell, completion', + "shell, completion", COMPLETION_FOR_SUPPORTED_SHELLS_TESTS, ids=[t[0] for t in COMPLETION_FOR_SUPPORTED_SHELLS_TESTS], ) @@ -56,9 +56,9 @@ def test_completion_for_supported_shells( Test getting completion for bash shell """ # Re-install pip so we get the launchers. - script.pip_install_local('-f', common_wheels, pip_src) + script.pip_install_local("-f", common_wheels, pip_src) - result = script.pip('completion', '--' + shell, use_module=False) + result = script.pip("completion", "--" + shell, use_module=False) assert completion in result.stdout, str(result.stdout) @@ -66,32 +66,32 @@ def test_completion_for_unknown_shell(script): """ Test getting completion for an unknown shell """ - error_msg = 'no such option: --myfooshell' - result = script.pip('completion', '--myfooshell', expect_error=True) - assert error_msg in result.stderr, 'tests for an unknown shell failed' + error_msg = "no such option: --myfooshell" + result = script.pip("completion", "--myfooshell", expect_error=True) + assert error_msg in result.stderr, "tests for an unknown shell failed" def test_completion_alone(script): """ Test getting completion for none shell, just pip completion """ - result = script.pip('completion', allow_stderr_error=True) - assert 'ERROR: You must pass --bash or --fish or --zsh' in result.stderr, ( - 'completion alone failed -- ' + result.stderr + result = script.pip("completion", allow_stderr_error=True) + assert "ERROR: You must pass --bash or --fish or --zsh" in result.stderr, ( + "completion alone failed -- " + result.stderr ) def setup_completion(script, words, cword, cwd=None): script.environ = os.environ.copy() - script.environ['PIP_AUTO_COMPLETE'] = '1' - script.environ['COMP_WORDS'] = words - script.environ['COMP_CWORD'] = cword + script.environ["PIP_AUTO_COMPLETE"] = "1" + script.environ["COMP_WORDS"] = words + script.environ["COMP_CWORD"] = cword # expect_error is True because autocomplete exists with 1 status code result = script.run( - 'python', - '-c', - 'import pip._internal;pip._internal.autocomplete()', + "python", + "-c", + "import pip._internal;pip._internal.autocomplete()", expect_error=True, cwd=cwd, ) @@ -104,8 +104,8 @@ def test_completion_for_un_snippet(script): Test getting completion for ``un`` should return uninstall """ - res, env = setup_completion(script, 'pip un', '1') - assert res.stdout.strip().split() == ['uninstall'], res.stdout + res, env = setup_completion(script, "pip un", "1") + assert res.stdout.strip().split() == ["uninstall"], res.stdout def test_completion_for_default_parameters(script): @@ -113,8 +113,8 @@ def test_completion_for_default_parameters(script): Test getting completion for ``--`` should contain --help """ - res, env = setup_completion(script, 'pip --', '1') - assert '--help' in res.stdout, "autocomplete function could not complete ``--``" + res, env = setup_completion(script, "pip --", "1") + assert "--help" in res.stdout, "autocomplete function could not complete ``--``" def test_completion_option_for_command(script): @@ -122,8 +122,8 @@ def test_completion_option_for_command(script): Test getting completion for ``--`` in command (e.g. ``pip search --``) """ - res, env = setup_completion(script, 'pip search --', '2') - assert '--help' in res.stdout, "autocomplete function could not complete ``--``" + res, env = setup_completion(script, "pip search --", "2") + assert "--help" in res.stdout, "autocomplete function could not complete ``--``" def test_completion_short_option(script): @@ -131,10 +131,10 @@ def test_completion_short_option(script): Test getting completion for short options after ``-`` (eg. pip -) """ - res, env = setup_completion(script, 'pip -', '1') + res, env = setup_completion(script, "pip -", "1") assert ( - '-h' in res.stdout.split() + "-h" in res.stdout.split() ), "autocomplete function could not complete short options after ``-``" @@ -144,10 +144,10 @@ def test_completion_short_option_for_command(script): (eg. pip search -) """ - res, env = setup_completion(script, 'pip search -', '2') + res, env = setup_completion(script, "pip search -", "2") assert ( - '-h' in res.stdout.split() + "-h" in res.stdout.split() ), "autocomplete function could not complete short options after ``-``" @@ -157,28 +157,28 @@ def test_completion_files_after_option(script, data): (e.g. ``pip install -r``) """ res, env = setup_completion( - script=script, words=('pip install -r r'), cword='3', cwd=data.completion_paths - ) - assert 'requirements.txt' in res.stdout, ( - "autocomplete function could not complete after options in command" - ) - assert os.path.join('resources', '') in res.stdout, ( - "autocomplete function could not complete after options in command" + script=script, words=("pip install -r r"), cword="3", cwd=data.completion_paths ) + assert ( + "requirements.txt" in res.stdout + ), "autocomplete function could not complete after options in command" + assert ( + os.path.join("resources", "") in res.stdout + ), "autocomplete function could not complete after options in command" assert not any( - out in res.stdout for out in (os.path.join('REPLAY', ''), 'README.txt') + out in res.stdout for out in (os.path.join("REPLAY", ""), "README.txt") ), ( "autocomplete function completed or that " "should not be completed" ) - if sys.platform != 'win32': + if sys.platform != "win32": return - assert 'readme.txt' in res.stdout, ( - "autocomplete function could not complete after options in command" - ) - assert os.path.join('replay', '') in res.stdout, ( - "autocomplete function could not complete after options in command" - ) + assert ( + "readme.txt" in res.stdout + ), "autocomplete function could not complete after options in command" + assert ( + os.path.join("replay", "") in res.stdout + ), "autocomplete function could not complete after options in command" def test_completion_not_files_after_option(script, data): @@ -187,14 +187,14 @@ def test_completion_not_files_after_option(script, data): (e.g. ``pip install``) """ res, env = setup_completion( - script=script, words=('pip install r'), cword='2', cwd=data.completion_paths - ) - assert not any(out in res.stdout for out in ('requirements.txt', 'readme.txt')), ( - "autocomplete function completed when it should not complete" + script=script, words=("pip install r"), cword="2", cwd=data.completion_paths ) assert not any( - os.path.join(out, '') in res.stdout for out in ('replay', 'resources') - ), ("autocomplete function completed when it should not complete") + out in res.stdout for out in ("requirements.txt", "readme.txt") + ), "autocomplete function completed when it should not complete" + assert not any( + os.path.join(out, "") in res.stdout for out in ("replay", "resources") + ), "autocomplete function completed when it should not complete" @pytest.mark.parametrize("cl_opts", ["-U", "--user", "-h"]) @@ -205,16 +205,16 @@ def test_completion_not_files_after_nonexpecting_option(script, data, cl_opts): """ res, env = setup_completion( script=script, - words=('pip install %s r' % cl_opts), - cword='2', + words=("pip install %s r" % cl_opts), + cword="2", cwd=data.completion_paths, ) - assert not any(out in res.stdout for out in ('requirements.txt', 'readme.txt')), ( - "autocomplete function completed when it should not complete" - ) assert not any( - os.path.join(out, '') in res.stdout for out in ('replay', 'resources') - ), ("autocomplete function completed when it should not complete") + out in res.stdout for out in ("requirements.txt", "readme.txt") + ), "autocomplete function completed when it should not complete" + assert not any( + os.path.join(out, "") in res.stdout for out in ("replay", "resources") + ), "autocomplete function completed when it should not complete" def test_completion_directories_after_option(script, data): @@ -223,18 +223,18 @@ def test_completion_directories_after_option(script, data): (e.g. ``pip --cache-dir``) """ res, env = setup_completion( - script=script, words=('pip --cache-dir r'), cword='2', cwd=data.completion_paths + script=script, words=("pip --cache-dir r"), cword="2", cwd=data.completion_paths ) assert ( - os.path.join('resources', '') in res.stdout + os.path.join("resources", "") in res.stdout ), "autocomplete function could not complete after options" assert not any( out in res.stdout - for out in ('requirements.txt', 'README.txt', os.path.join('REPLAY', '')) - ), ("autocomplete function completed when it should not complete") - if sys.platform == 'win32': + for out in ("requirements.txt", "README.txt", os.path.join("REPLAY", "")) + ), "autocomplete function completed when it should not complete" + if sys.platform == "win32": assert ( - os.path.join('replay', '') in res.stdout + os.path.join("replay", "") in res.stdout ), "autocomplete function could not complete after options" @@ -245,11 +245,11 @@ def test_completion_subdirectories_after_option(script, data): """ res, env = setup_completion( script=script, - words=('pip --cache-dir ' + os.path.join('resources', '')), - cword='2', + words=("pip --cache-dir " + os.path.join("resources", "")), + cword="2", cwd=data.completion_paths, ) - assert os.path.join('resources', os.path.join('images', '')) in res.stdout, ( + assert os.path.join("resources", os.path.join("images", "")) in res.stdout, ( "autocomplete function could not complete " "given path of a directory after options" ) @@ -262,23 +262,23 @@ def test_completion_path_after_option(script, data): """ res, env = setup_completion( script=script, - words=('pip install -e ' + os.path.join(data.completion_paths, 'R')), - cword='3', + words=("pip install -e " + os.path.join(data.completion_paths, "R")), + cword="3", ) assert all( os.path.normcase(os.path.join(data.completion_paths, out)) in res.stdout - for out in ('README.txt', os.path.join('REPLAY', '')) + for out in ("README.txt", os.path.join("REPLAY", "")) ), ( "autocomplete function could not complete " "after options in command given absolute path" ) -@pytest.mark.parametrize('flag', ['--bash', '--zsh', '--fish']) +@pytest.mark.parametrize("flag", ["--bash", "--zsh", "--fish"]) def test_completion_uses_same_executable_name(script, flag, deprecated_python): - executable_name = 'pip{}'.format(sys.version_info[0]) + executable_name = "pip{}".format(sys.version_info[0]) # Deprecated python versions produce an extra deprecation warning result = script.run( - executable_name, 'completion', flag, expect_stderr=deprecated_python + executable_name, "completion", flag, expect_stderr=deprecated_python ) assert executable_name in result.stdout diff --git a/tests/functional/test_configuration.py b/tests/functional/test_configuration.py index b9995293075..eb27e014e69 100644 --- a/tests/functional/test_configuration.py +++ b/tests/functional/test_configuration.py @@ -10,7 +10,7 @@ def test_no_options_passed_should_error(script): - result = script.pip('config', expect_error=True) + result = script.pip("config", expect_error=True) assert result.returncode == ERROR diff --git a/tests/functional/test_debug.py b/tests/functional/test_debug.py index 61af3720d22..b16cfc4d5ae 100644 --- a/tests/functional/test_debug.py +++ b/tests/functional/test_debug.py @@ -4,56 +4,56 @@ @pytest.mark.parametrize( - 'expected_text', + "expected_text", [ - 'sys.executable: ', - 'sys.getdefaultencoding: ', - 'sys.getfilesystemencoding: ', - 'locale.getpreferredencoding: ', - 'sys.platform: ', - 'sys.implementation:', + "sys.executable: ", + "sys.getdefaultencoding: ", + "sys.getfilesystemencoding: ", + "locale.getpreferredencoding: ", + "sys.platform: ", + "sys.implementation:", ], ) def test_debug(script, expected_text): """ Check that certain strings are present in the output. """ - args = ['debug'] + args = ["debug"] result = script.pip(*args, allow_stderr_warning=True) stdout = result.stdout assert expected_text in stdout -@pytest.mark.parametrize('args', [[], ['--verbose']]) +@pytest.mark.parametrize("args", [[], ["--verbose"]]) def test_debug__tags(script, args): """ Check the compatible tag output. """ - args = ['debug'] + args + args = ["debug"] + args result = script.pip(*args, allow_stderr_warning=True) stdout = result.stdout tags = pep425tags.get_supported() - expected_tag_header = 'Compatible tags: {}'.format(len(tags)) + expected_tag_header = "Compatible tags: {}".format(len(tags)) assert expected_tag_header in stdout - show_verbose_note = '--verbose' not in args + show_verbose_note = "--verbose" not in args assert ( - '...\n [First 10 tags shown. Pass --verbose to show all.]' in stdout + "...\n [First 10 tags shown. Pass --verbose to show all.]" in stdout ) == show_verbose_note @pytest.mark.parametrize( - 'args, expected', [(['--python-version', '3.7'], "(target: version_info='3.7')")] + "args, expected", [(["--python-version", "3.7"], "(target: version_info='3.7')")] ) def test_debug__target_options(script, args, expected): """ Check passing target-related options. """ - args = ['debug'] + args + args = ["debug"] + args result = script.pip(*args, allow_stderr_warning=True) stdout = result.stdout - assert 'Compatible tags: ' in stdout + assert "Compatible tags: " in stdout assert expected in stdout diff --git a/tests/functional/test_download.py b/tests/functional/test_download.py index 623ee72f34a..0add19685d7 100644 --- a/tests/functional/test_download.py +++ b/tests/functional/test_download.py @@ -10,7 +10,7 @@ def fake_wheel(data, wheel_path): shutil.copy( - data.packages.joinpath('simple.dist-0.1-py2.py3-none-any.whl'), + data.packages.joinpath("simple.dist-0.1-py2.py3-none-any.whl"), data.packages.joinpath(wheel_path), ) @@ -20,12 +20,12 @@ def test_download_if_requested(script): """ It should download (in the scratch path) and not install if requested. """ - result = script.pip('download', '-d', 'pip_downloads', 'INITools==0.1') + result = script.pip("download", "-d", "pip_downloads", "INITools==0.1") assert ( - Path('scratch') / 'pip_downloads' / 'INITools-0.1.tar.gz' + Path("scratch") / "pip_downloads" / "INITools-0.1.tar.gz" in result.files_created ) - assert script.site_packages / 'initools' not in result.files_created + assert script.site_packages / "initools" not in result.files_created @pytest.mark.network @@ -33,8 +33,8 @@ def test_basic_download_setuptools(script): """ It should download (in the scratch path) and not install if requested. """ - result = script.pip('download', 'setuptools') - setuptools_prefix = str(Path('scratch') / 'setuptools') + result = script.pip("download", "setuptools") + setuptools_prefix = str(Path("scratch") / "setuptools") assert any(path.startswith(setuptools_prefix) for path in result.files_created) @@ -43,10 +43,10 @@ def test_download_wheel(script, data): Test using "pip download" to download a *.whl archive. """ result = script.pip( - 'download', '--no-index', '-f', data.packages, '-d', '.', 'meta' + "download", "--no-index", "-f", data.packages, "-d", ".", "meta" ) - assert Path('scratch') / 'meta-1.0-py2.py3-none-any.whl' in result.files_created - assert script.site_packages / 'piptestpackage' not in result.files_created + assert Path("scratch") / "meta-1.0-py2.py3-none-any.whl" in result.files_created + assert script.site_packages / "piptestpackage" not in result.files_created @pytest.mark.network @@ -63,10 +63,10 @@ def test_single_download_from_requirements_file(script): ) ) result = script.pip( - 'download', '-r', script.scratch_path / 'test-req.txt', '-d', '.' + "download", "-r", script.scratch_path / "test-req.txt", "-d", "." ) - assert Path('scratch') / 'INITools-0.1.tar.gz' in result.files_created - assert script.site_packages / 'initools' not in result.files_created + assert Path("scratch") / "INITools-0.1.tar.gz" in result.files_created + assert script.site_packages / "initools" not in result.files_created @pytest.mark.network @@ -74,35 +74,35 @@ def test_basic_download_should_download_dependencies(script): """ It should download dependencies (in the scratch path) """ - result = script.pip('download', 'Paste[openid]==1.7.5.1', '-d', '.') - assert Path('scratch') / 'Paste-1.7.5.1.tar.gz' in result.files_created - openid_tarball_prefix = str(Path('scratch') / 'python-openid-') + result = script.pip("download", "Paste[openid]==1.7.5.1", "-d", ".") + assert Path("scratch") / "Paste-1.7.5.1.tar.gz" in result.files_created + openid_tarball_prefix = str(Path("scratch") / "python-openid-") assert any(path.startswith(openid_tarball_prefix) for path in result.files_created) - assert script.site_packages / 'openid' not in result.files_created + assert script.site_packages / "openid" not in result.files_created def test_download_wheel_archive(script, data): """ It should download a wheel archive path """ - wheel_filename = 'colander-0.9.9-py2.py3-none-any.whl' - wheel_path = '/'.join((data.find_links, wheel_filename)) - result = script.pip('download', wheel_path, '-d', '.', '--no-deps') - assert Path('scratch') / wheel_filename in result.files_created + wheel_filename = "colander-0.9.9-py2.py3-none-any.whl" + wheel_path = "/".join((data.find_links, wheel_filename)) + result = script.pip("download", wheel_path, "-d", ".", "--no-deps") + assert Path("scratch") / wheel_filename in result.files_created def test_download_should_download_wheel_deps(script, data): """ It should download dependencies for wheels(in the scratch path) """ - wheel_filename = 'colander-0.9.9-py2.py3-none-any.whl' - dep_filename = 'translationstring-1.1.tar.gz' - wheel_path = '/'.join((data.find_links, wheel_filename)) + wheel_filename = "colander-0.9.9-py2.py3-none-any.whl" + dep_filename = "translationstring-1.1.tar.gz" + wheel_path = "/".join((data.find_links, wheel_filename)) result = script.pip( - 'download', wheel_path, '-d', '.', '--find-links', data.find_links, '--no-index' + "download", wheel_path, "-d", ".", "--find-links", data.find_links, "--no-index" ) - assert Path('scratch') / wheel_filename in result.files_created - assert Path('scratch') / dep_filename in result.files_created + assert Path("scratch") / wheel_filename in result.files_created + assert Path("scratch") / dep_filename in result.files_created @pytest.mark.network @@ -119,10 +119,10 @@ def test_download_should_skip_existing_files(script): ) result = script.pip( - 'download', '-r', script.scratch_path / 'test-req.txt', '-d', '.' + "download", "-r", script.scratch_path / "test-req.txt", "-d", "." ) - assert Path('scratch') / 'INITools-0.1.tar.gz' in result.files_created - assert script.site_packages / 'initools' not in result.files_created + assert Path("scratch") / "INITools-0.1.tar.gz" in result.files_created + assert script.site_packages / "initools" not in result.files_created # adding second package to test-req.txt script.scratch_path.joinpath("test-req.txt").write_text( @@ -136,13 +136,13 @@ def test_download_should_skip_existing_files(script): # only the second package should be downloaded result = script.pip( - 'download', '-r', script.scratch_path / 'test-req.txt', '-d', '.' + "download", "-r", script.scratch_path / "test-req.txt", "-d", "." ) - openid_tarball_prefix = str(Path('scratch') / 'python-openid-') + openid_tarball_prefix = str(Path("scratch") / "python-openid-") assert any(path.startswith(openid_tarball_prefix) for path in result.files_created) - assert Path('scratch') / 'INITools-0.1.tar.gz' not in result.files_created - assert script.site_packages / 'initools' not in result.files_created - assert script.site_packages / 'openid' not in result.files_created + assert Path("scratch") / "INITools-0.1.tar.gz" not in result.files_created + assert script.site_packages / "initools" not in result.files_created + assert script.site_packages / "openid" not in result.files_created @pytest.mark.network @@ -151,10 +151,10 @@ def test_download_vcs_link(script): It should allow -d flag for vcs links, regression test for issue #798. """ result = script.pip( - 'download', '-d', '.', 'git+git://github.com/pypa/pip-test-package.git' + "download", "-d", ".", "git+git://github.com/pypa/pip-test-package.git" ) - assert Path('scratch') / 'pip-test-package-0.1.1.zip' in result.files_created - assert script.site_packages / 'piptestpackage' not in result.files_created + assert Path("scratch") / "pip-test-package-0.1.1.zip" in result.files_created + assert script.site_packages / "piptestpackage" not in result.files_created def test_only_binary_set_then_download_specific_platform(script, data): @@ -162,21 +162,21 @@ def test_only_binary_set_then_download_specific_platform(script, data): Confirm that specifying an interpreter/platform constraint is allowed when ``--only-binary=:all:`` is set. """ - fake_wheel(data, 'fake-1.0-py2.py3-none-any.whl') + fake_wheel(data, "fake-1.0-py2.py3-none-any.whl") result = script.pip( - 'download', - '--no-index', - '--find-links', + "download", + "--no-index", + "--find-links", data.find_links, - '--only-binary=:all:', - '--dest', - '.', - '--platform', - 'linux_x86_64', - 'fake', + "--only-binary=:all:", + "--dest", + ".", + "--platform", + "linux_x86_64", + "fake", ) - assert Path('scratch') / 'fake-1.0-py2.py3-none-any.whl' in result.files_created + assert Path("scratch") / "fake-1.0-py2.py3-none-any.whl" in result.files_created def test_no_deps_set_then_download_specific_platform(script, data): @@ -184,21 +184,21 @@ def test_no_deps_set_then_download_specific_platform(script, data): Confirm that specifying an interpreter/platform constraint is allowed when ``--no-deps`` is set. """ - fake_wheel(data, 'fake-1.0-py2.py3-none-any.whl') + fake_wheel(data, "fake-1.0-py2.py3-none-any.whl") result = script.pip( - 'download', - '--no-index', - '--find-links', + "download", + "--no-index", + "--find-links", data.find_links, - '--no-deps', - '--dest', - '.', - '--platform', - 'linux_x86_64', - 'fake', + "--no-deps", + "--dest", + ".", + "--platform", + "linux_x86_64", + "fake", ) - assert Path('scratch') / 'fake-1.0-py2.py3-none-any.whl' in result.files_created + assert Path("scratch") / "fake-1.0-py2.py3-none-any.whl" in result.files_created def test_download_specific_platform_fails(script, data): @@ -206,21 +206,21 @@ def test_download_specific_platform_fails(script, data): Confirm that specifying an interpreter/platform constraint enforces that ``--no-deps`` or ``--only-binary=:all:`` is set. """ - fake_wheel(data, 'fake-1.0-py2.py3-none-any.whl') + fake_wheel(data, "fake-1.0-py2.py3-none-any.whl") result = script.pip( - 'download', - '--no-index', - '--find-links', + "download", + "--no-index", + "--find-links", data.find_links, - '--dest', - '.', - '--platform', - 'linux_x86_64', - 'fake', + "--dest", + ".", + "--platform", + "linux_x86_64", + "fake", expect_error=True, ) - assert '--only-binary=:all:' in result.stderr + assert "--only-binary=:all:" in result.stderr def test_no_binary_set_then_download_specific_platform_fails(script, data): @@ -228,23 +228,23 @@ def test_no_binary_set_then_download_specific_platform_fails(script, data): Confirm that specifying an interpreter/platform constraint enforces that ``--only-binary=:all:`` is set without ``--no-binary``. """ - fake_wheel(data, 'fake-1.0-py2.py3-none-any.whl') + fake_wheel(data, "fake-1.0-py2.py3-none-any.whl") result = script.pip( - 'download', - '--no-index', - '--find-links', + "download", + "--no-index", + "--find-links", data.find_links, - '--only-binary=:all:', - '--no-binary=fake', - '--dest', - '.', - '--platform', - 'linux_x86_64', - 'fake', + "--only-binary=:all:", + "--no-binary=fake", + "--dest", + ".", + "--platform", + "linux_x86_64", + "fake", expect_error=True, ) - assert '--only-binary=:all:' in result.stderr + assert "--only-binary=:all:" in result.stderr def test_download_specify_platform(script, data): @@ -252,102 +252,102 @@ def test_download_specify_platform(script, data): Test using "pip download --platform" to download a .whl archive supported for a specific platform """ - fake_wheel(data, 'fake-1.0-py2.py3-none-any.whl') + fake_wheel(data, "fake-1.0-py2.py3-none-any.whl") # Confirm that universal wheels are returned even for specific # platforms. result = script.pip( - 'download', - '--no-index', - '--find-links', + "download", + "--no-index", + "--find-links", data.find_links, - '--only-binary=:all:', - '--dest', - '.', - '--platform', - 'linux_x86_64', - 'fake', + "--only-binary=:all:", + "--dest", + ".", + "--platform", + "linux_x86_64", + "fake", ) - assert Path('scratch') / 'fake-1.0-py2.py3-none-any.whl' in result.files_created + assert Path("scratch") / "fake-1.0-py2.py3-none-any.whl" in result.files_created result = script.pip( - 'download', - '--no-index', - '--find-links', + "download", + "--no-index", + "--find-links", data.find_links, - '--only-binary=:all:', - '--dest', - '.', - '--platform', - 'macosx_10_9_x86_64', - 'fake', + "--only-binary=:all:", + "--dest", + ".", + "--platform", + "macosx_10_9_x86_64", + "fake", ) data.reset() - fake_wheel(data, 'fake-1.0-py2.py3-none-macosx_10_9_x86_64.whl') - fake_wheel(data, 'fake-2.0-py2.py3-none-linux_x86_64.whl') + fake_wheel(data, "fake-1.0-py2.py3-none-macosx_10_9_x86_64.whl") + fake_wheel(data, "fake-2.0-py2.py3-none-linux_x86_64.whl") result = script.pip( - 'download', - '--no-index', - '--find-links', + "download", + "--no-index", + "--find-links", data.find_links, - '--only-binary=:all:', - '--dest', - '.', - '--platform', - 'macosx_10_10_x86_64', - 'fake', + "--only-binary=:all:", + "--dest", + ".", + "--platform", + "macosx_10_10_x86_64", + "fake", ) assert ( - Path('scratch') / 'fake-1.0-py2.py3-none-macosx_10_9_x86_64.whl' + Path("scratch") / "fake-1.0-py2.py3-none-macosx_10_9_x86_64.whl" in result.files_created ) # OSX platform wheels are not backward-compatible. result = script.pip( - 'download', - '--no-index', - '--find-links', + "download", + "--no-index", + "--find-links", data.find_links, - '--only-binary=:all:', - '--dest', - '.', - '--platform', - 'macosx_10_8_x86_64', - 'fake', + "--only-binary=:all:", + "--dest", + ".", + "--platform", + "macosx_10_8_x86_64", + "fake", expect_error=True, ) # No linux wheel provided for this version. result = script.pip( - 'download', - '--no-index', - '--find-links', + "download", + "--no-index", + "--find-links", data.find_links, - '--only-binary=:all:', - '--dest', - '.', - '--platform', - 'linux_x86_64', - 'fake==1', + "--only-binary=:all:", + "--dest", + ".", + "--platform", + "linux_x86_64", + "fake==1", expect_error=True, ) result = script.pip( - 'download', - '--no-index', - '--find-links', + "download", + "--no-index", + "--find-links", data.find_links, - '--only-binary=:all:', - '--dest', - '.', - '--platform', - 'linux_x86_64', - 'fake==2', + "--only-binary=:all:", + "--dest", + ".", + "--platform", + "linux_x86_64", + "fake==2", ) assert ( - Path('scratch') / 'fake-2.0-py2.py3-none-linux_x86_64.whl' + Path("scratch") / "fake-2.0-py2.py3-none-linux_x86_64.whl" in result.files_created ) @@ -365,20 +365,20 @@ def test_download_universal(self, platform, script, data): """ Universal wheels are returned even for specific platforms. """ - fake_wheel(data, 'fake-1.0-py2.py3-none-any.whl') + fake_wheel(data, "fake-1.0-py2.py3-none-any.whl") result = script.pip( - 'download', - '--no-index', - '--find-links', + "download", + "--no-index", + "--find-links", data.find_links, - '--only-binary=:all:', - '--dest', - '.', - '--platform', + "--only-binary=:all:", + "--dest", + ".", + "--platform", platform, - 'fake', + "fake", ) - assert Path('scratch') / 'fake-1.0-py2.py3-none-any.whl' in result.files_created + assert Path("scratch") / "fake-1.0-py2.py3-none-any.whl" in result.files_created @pytest.mark.parametrize( "wheel_abi,platform", @@ -392,21 +392,21 @@ def test_download_compatible_manylinuxes(self, wheel_abi, platform, script, data """ Earlier manylinuxes are compatible with later manylinuxes. """ - wheel = 'fake-1.0-py2.py3-none-{}.whl'.format(wheel_abi) + wheel = "fake-1.0-py2.py3-none-{}.whl".format(wheel_abi) fake_wheel(data, wheel) result = script.pip( - 'download', - '--no-index', - '--find-links', + "download", + "--no-index", + "--find-links", data.find_links, - '--only-binary=:all:', - '--dest', - '.', - '--platform', + "--only-binary=:all:", + "--dest", + ".", + "--platform", platform, - 'fake', + "fake", ) - assert Path('scratch') / wheel in result.files_created + assert Path("scratch") / wheel in result.files_created def test_explicit_platform_only(self, data, script): """ @@ -414,18 +414,18 @@ def test_explicit_platform_only(self, data, script): explicit platform--it won't ever be added to the compatible tags. """ - fake_wheel(data, 'fake-1.0-py2.py3-none-linux_x86_64.whl') + fake_wheel(data, "fake-1.0-py2.py3-none-linux_x86_64.whl") script.pip( - 'download', - '--no-index', - '--find-links', + "download", + "--no-index", + "--find-links", data.find_links, - '--only-binary=:all:', - '--dest', - '.', - '--platform', - 'linux_x86_64', - 'fake', + "--only-binary=:all:", + "--dest", + ".", + "--platform", + "linux_x86_64", + "fake", ) @@ -434,119 +434,119 @@ def test_download__python_version(script, data): Test using "pip download --python-version" to download a .whl archive supported for a specific interpreter """ - fake_wheel(data, 'fake-1.0-py2.py3-none-any.whl') + fake_wheel(data, "fake-1.0-py2.py3-none-any.whl") result = script.pip( - 'download', - '--no-index', - '--find-links', + "download", + "--no-index", + "--find-links", data.find_links, - '--only-binary=:all:', - '--dest', - '.', - '--python-version', - '2', - 'fake', + "--only-binary=:all:", + "--dest", + ".", + "--python-version", + "2", + "fake", ) - assert Path('scratch') / 'fake-1.0-py2.py3-none-any.whl' in result.files_created + assert Path("scratch") / "fake-1.0-py2.py3-none-any.whl" in result.files_created result = script.pip( - 'download', - '--no-index', - '--find-links', + "download", + "--no-index", + "--find-links", data.find_links, - '--only-binary=:all:', - '--dest', - '.', - '--python-version', - '3', - 'fake', + "--only-binary=:all:", + "--dest", + ".", + "--python-version", + "3", + "fake", ) result = script.pip( - 'download', - '--no-index', - '--find-links', + "download", + "--no-index", + "--find-links", data.find_links, - '--only-binary=:all:', - '--dest', - '.', - '--python-version', - '27', - 'fake', + "--only-binary=:all:", + "--dest", + ".", + "--python-version", + "27", + "fake", ) result = script.pip( - 'download', - '--no-index', - '--find-links', + "download", + "--no-index", + "--find-links", data.find_links, - '--only-binary=:all:', - '--dest', - '.', - '--python-version', - '33', - 'fake', + "--only-binary=:all:", + "--dest", + ".", + "--python-version", + "33", + "fake", ) data.reset() - fake_wheel(data, 'fake-1.0-py2-none-any.whl') - fake_wheel(data, 'fake-2.0-py3-none-any.whl') + fake_wheel(data, "fake-1.0-py2-none-any.whl") + fake_wheel(data, "fake-2.0-py3-none-any.whl") # No py3 provided for version 1. result = script.pip( - 'download', - '--no-index', - '--find-links', + "download", + "--no-index", + "--find-links", data.find_links, - '--only-binary=:all:', - '--dest', - '.', - '--python-version', - '3', - 'fake==1.0', + "--only-binary=:all:", + "--dest", + ".", + "--python-version", + "3", + "fake==1.0", expect_error=True, ) result = script.pip( - 'download', - '--no-index', - '--find-links', + "download", + "--no-index", + "--find-links", data.find_links, - '--only-binary=:all:', - '--dest', - '.', - '--python-version', - '2', - 'fake', + "--only-binary=:all:", + "--dest", + ".", + "--python-version", + "2", + "fake", ) - assert Path('scratch') / 'fake-1.0-py2-none-any.whl' in result.files_created + assert Path("scratch") / "fake-1.0-py2-none-any.whl" in result.files_created result = script.pip( - 'download', - '--no-index', - '--find-links', + "download", + "--no-index", + "--find-links", data.find_links, - '--only-binary=:all:', - '--dest', - '.', - '--python-version', - '26', - 'fake', + "--only-binary=:all:", + "--dest", + ".", + "--python-version", + "26", + "fake", ) result = script.pip( - 'download', - '--no-index', - '--find-links', + "download", + "--no-index", + "--find-links", data.find_links, - '--only-binary=:all:', - '--dest', - '.', - '--python-version', - '3', - 'fake', + "--only-binary=:all:", + "--dest", + ".", + "--python-version", + "3", + "fake", ) - assert Path('scratch') / 'fake-2.0-py3-none-any.whl' in result.files_created + assert Path("scratch") / "fake-2.0-py3-none-any.whl" in result.files_created def make_wheel_with_python_requires(script, package_name, python_requires): @@ -566,11 +566,11 @@ def make_wheel_with_python_requires(script, package_name, python_requires): version='1.0') """ ).format(package_name, python_requires) - package_dir.joinpath('setup.py').write_text(text) - script.run('python', 'setup.py', 'bdist_wheel', '--universal', cwd=package_dir) + package_dir.joinpath("setup.py").write_text(text) + script.run("python", "setup.py", "bdist_wheel", "--universal", cwd=package_dir) - file_name = '{}-1.0-py2.py3-none-any.whl'.format(package_name) - return package_dir / 'dist' / file_name + file_name = "{}-1.0-py2.py3-none-any.whl".format(package_name) + return package_dir / "dist" / file_name def test_download__python_version_used_for_python_requires(script, data, with_wheel): @@ -578,34 +578,34 @@ def test_download__python_version_used_for_python_requires(script, data, with_wh Test that --python-version is used for the Requires-Python check. """ wheel_path = make_wheel_with_python_requires( - script, 'mypackage', python_requires='==3.2' + script, "mypackage", python_requires="==3.2" ) wheel_dir = os.path.dirname(wheel_path) def make_args(python_version): return [ - 'download', - '--no-index', - '--find-links', + "download", + "--no-index", + "--find-links", wheel_dir, - '--only-binary=:all:', - '--dest', - '.', - '--python-version', + "--only-binary=:all:", + "--dest", + ".", + "--python-version", python_version, - 'mypackage==1.0', + "mypackage==1.0", ] - args = make_args('33') + args = make_args("33") result = script.pip(*args, expect_error=True) expected_err = ( "ERROR: Package 'mypackage' requires a different Python: " "3.3.0 not in '==3.2'" ) - assert expected_err in result.stderr, 'stderr: {}'.format(result.stderr) + assert expected_err in result.stderr, "stderr: {}".format(result.stderr) # Now try with a --python-version that satisfies the Requires-Python. - args = make_args('32') + args = make_args("32") script.pip(*args) # no exception @@ -614,91 +614,91 @@ def test_download_specify_abi(script, data): Test using "pip download --abi" to download a .whl archive supported for a specific abi """ - fake_wheel(data, 'fake-1.0-py2.py3-none-any.whl') + fake_wheel(data, "fake-1.0-py2.py3-none-any.whl") result = script.pip( - 'download', - '--no-index', - '--find-links', + "download", + "--no-index", + "--find-links", data.find_links, - '--only-binary=:all:', - '--dest', - '.', - '--implementation', - 'fk', - '--abi', - 'fake_abi', - 'fake', + "--only-binary=:all:", + "--dest", + ".", + "--implementation", + "fk", + "--abi", + "fake_abi", + "fake", ) - assert Path('scratch') / 'fake-1.0-py2.py3-none-any.whl' in result.files_created + assert Path("scratch") / "fake-1.0-py2.py3-none-any.whl" in result.files_created result = script.pip( - 'download', - '--no-index', - '--find-links', + "download", + "--no-index", + "--find-links", data.find_links, - '--only-binary=:all:', - '--dest', - '.', - '--implementation', - 'fk', - '--abi', - 'none', - 'fake', + "--only-binary=:all:", + "--dest", + ".", + "--implementation", + "fk", + "--abi", + "none", + "fake", ) result = script.pip( - 'download', - '--no-index', - '--find-links', + "download", + "--no-index", + "--find-links", data.find_links, - '--only-binary=:all:', - '--dest', - '.', - '--abi', - 'cp27m', - 'fake', + "--only-binary=:all:", + "--dest", + ".", + "--abi", + "cp27m", + "fake", ) data.reset() - fake_wheel(data, 'fake-1.0-fk2-fakeabi-fake_platform.whl') + fake_wheel(data, "fake-1.0-fk2-fakeabi-fake_platform.whl") result = script.pip( - 'download', - '--no-index', - '--find-links', + "download", + "--no-index", + "--find-links", data.find_links, - '--only-binary=:all:', - '--dest', - '.', - '--python-version', - '2', - '--implementation', - 'fk', - '--platform', - 'fake_platform', - '--abi', - 'fakeabi', - 'fake', + "--only-binary=:all:", + "--dest", + ".", + "--python-version", + "2", + "--implementation", + "fk", + "--platform", + "fake_platform", + "--abi", + "fakeabi", + "fake", ) assert ( - Path('scratch') / 'fake-1.0-fk2-fakeabi-fake_platform.whl' + Path("scratch") / "fake-1.0-fk2-fakeabi-fake_platform.whl" in result.files_created ) result = script.pip( - 'download', - '--no-index', - '--find-links', + "download", + "--no-index", + "--find-links", data.find_links, - '--only-binary=:all:', - '--dest', - '.', - '--implementation', - 'fk', - '--platform', - 'fake_platform', - '--abi', - 'none', - 'fake', + "--only-binary=:all:", + "--dest", + ".", + "--implementation", + "fk", + "--platform", + "fake_platform", + "--abi", + "none", + "fake", expect_error=True, ) @@ -708,68 +708,68 @@ def test_download_specify_implementation(script, data): Test using "pip download --abi" to download a .whl archive supported for a specific abi """ - fake_wheel(data, 'fake-1.0-py2.py3-none-any.whl') + fake_wheel(data, "fake-1.0-py2.py3-none-any.whl") result = script.pip( - 'download', - '--no-index', - '--find-links', + "download", + "--no-index", + "--find-links", data.find_links, - '--only-binary=:all:', - '--dest', - '.', - '--implementation', - 'fk', - 'fake', + "--only-binary=:all:", + "--dest", + ".", + "--implementation", + "fk", + "fake", ) - assert Path('scratch') / 'fake-1.0-py2.py3-none-any.whl' in result.files_created + assert Path("scratch") / "fake-1.0-py2.py3-none-any.whl" in result.files_created data.reset() - fake_wheel(data, 'fake-1.0-fk2.fk3-none-any.whl') + fake_wheel(data, "fake-1.0-fk2.fk3-none-any.whl") result = script.pip( - 'download', - '--no-index', - '--find-links', + "download", + "--no-index", + "--find-links", data.find_links, - '--only-binary=:all:', - '--dest', - '.', - '--implementation', - 'fk', - 'fake', + "--only-binary=:all:", + "--dest", + ".", + "--implementation", + "fk", + "fake", ) - assert Path('scratch') / 'fake-1.0-fk2.fk3-none-any.whl' in result.files_created + assert Path("scratch") / "fake-1.0-fk2.fk3-none-any.whl" in result.files_created data.reset() - fake_wheel(data, 'fake-1.0-fk3-none-any.whl') + fake_wheel(data, "fake-1.0-fk3-none-any.whl") result = script.pip( - 'download', - '--no-index', - '--find-links', + "download", + "--no-index", + "--find-links", data.find_links, - '--only-binary=:all:', - '--dest', - '.', - '--implementation', - 'fk', - '--python-version', - '3', - 'fake', + "--only-binary=:all:", + "--dest", + ".", + "--implementation", + "fk", + "--python-version", + "3", + "fake", ) - assert Path('scratch') / 'fake-1.0-fk3-none-any.whl' in result.files_created + assert Path("scratch") / "fake-1.0-fk3-none-any.whl" in result.files_created result = script.pip( - 'download', - '--no-index', - '--find-links', + "download", + "--no-index", + "--find-links", data.find_links, - '--only-binary=:all:', - '--dest', - '.', - '--implementation', - 'fk', - '--python-version', - '2', - 'fake', + "--only-binary=:all:", + "--dest", + ".", + "--implementation", + "fk", + "--python-version", + "2", + "fake", expect_error=True, ) @@ -778,7 +778,7 @@ def test_download_exit_status_code_when_no_requirements(script): """ Test download exit status code when no requirements specified """ - result = script.pip('download', expect_error=True) + result = script.pip("download", expect_error=True) assert "You must give at least one requirement to download" in result.stderr assert result.returncode == ERROR @@ -788,27 +788,27 @@ def test_download_exit_status_code_when_blank_requirements_file(script): Test download exit status code when blank requirements file specified """ script.scratch_path.joinpath("blank.txt").write_text("\n") - script.pip('download', '-r', 'blank.txt') + script.pip("download", "-r", "blank.txt") def test_download_prefer_binary_when_tarball_higher_than_wheel(script, data): - fake_wheel(data, 'source-0.8-py2.py3-none-any.whl') + fake_wheel(data, "source-0.8-py2.py3-none-any.whl") result = script.pip( - 'download', - '--prefer-binary', - '--no-index', - '-f', + "download", + "--prefer-binary", + "--no-index", + "-f", data.packages, - '-d', - '.', - 'source', + "-d", + ".", + "source", ) - assert Path('scratch') / 'source-0.8-py2.py3-none-any.whl' in result.files_created - assert Path('scratch') / 'source-1.0.tar.gz' not in result.files_created + assert Path("scratch") / "source-0.8-py2.py3-none-any.whl" in result.files_created + assert Path("scratch") / "source-1.0.tar.gz" not in result.files_created def test_download_prefer_binary_when_wheel_doesnt_satisfy_req(script, data): - fake_wheel(data, 'source-0.8-py2.py3-none-any.whl') + fake_wheel(data, "source-0.8-py2.py3-none-any.whl") script.scratch_path.joinpath("test-req.txt").write_text( textwrap.dedent( """ @@ -818,31 +818,31 @@ def test_download_prefer_binary_when_wheel_doesnt_satisfy_req(script, data): ) result = script.pip( - 'download', - '--prefer-binary', - '--no-index', - '-f', + "download", + "--prefer-binary", + "--no-index", + "-f", data.packages, - '-d', - '.', - '-r', - script.scratch_path / 'test-req.txt', + "-d", + ".", + "-r", + script.scratch_path / "test-req.txt", ) - assert Path('scratch') / 'source-1.0.tar.gz' in result.files_created + assert Path("scratch") / "source-1.0.tar.gz" in result.files_created assert ( - Path('scratch') / 'source-0.8-py2.py3-none-any.whl' not in result.files_created + Path("scratch") / "source-0.8-py2.py3-none-any.whl" not in result.files_created ) def test_download_prefer_binary_when_only_tarball_exists(script, data): result = script.pip( - 'download', - '--prefer-binary', - '--no-index', - '-f', + "download", + "--prefer-binary", + "--no-index", + "-f", data.packages, - '-d', - '.', - 'source', + "-d", + ".", + "source", ) - assert Path('scratch') / 'source-1.0.tar.gz' in result.files_created + assert Path("scratch") / "source-1.0.tar.gz" in result.files_created diff --git a/tests/functional/test_freeze.py b/tests/functional/test_freeze.py index 5155773b772..d21fb00f3c5 100644 --- a/tests/functional/test_freeze.py +++ b/tests/functional/test_freeze.py @@ -15,7 +15,7 @@ path_to_url, ) -distribute_re = re.compile('^distribute==[0-9.]+\n', re.MULTILINE) +distribute_re = re.compile("^distribute==[0-9.]+\n", re.MULTILINE) def _check_output(result, expected): @@ -30,18 +30,18 @@ def _check_output(result, expected): # but then you have to remember to upcase . The right # thing to do in the end is probably to find out how to report # the proper fully-cased package name in our error message. - if sys.platform == 'win32': - actual = actual.replace('initools', 'INITools') + if sys.platform == "win32": + actual = actual.replace("initools", "INITools") # This allows our existing tests to work when run in a context # with distribute installed. - actual = distribute_re.sub('', actual) + actual = distribute_re.sub("", actual) def banner(msg): - return '\n========== %s ==========\n' % msg + return "\n========== %s ==========\n" % msg assert checker.check_output(expected, actual, ELLIPSIS), ( - banner('EXPECTED') + expected + banner('ACTUAL') + actual + banner(6 * '=') + banner("EXPECTED") + expected + banner("ACTUAL") + actual + banner(6 * "=") ) @@ -63,8 +63,8 @@ def test_basic_freeze(script): """ ) ) - script.pip_install_local('-r', script.scratch_path / 'initools-req.txt') - result = script.pip('freeze', expect_stderr=True) + script.pip_install_local("-r", script.scratch_path / "initools-req.txt") + result = script.pip("freeze", expect_stderr=True) expected = textwrap.dedent( """\ ...simple==2.0 @@ -76,8 +76,8 @@ def test_basic_freeze(script): def test_freeze_with_pip(script): """Test pip shows itself""" - result = script.pip('freeze', '--all') - assert 'pip==' in result.stdout + result = script.pip("freeze", "--all") + assert "pip==" in result.stdout def test_freeze_with_invalid_names(script): @@ -88,11 +88,11 @@ def test_freeze_with_invalid_names(script): def fake_install(pkgname, dest): egg_info_path = os.path.join( dest, - '{}-1.0-py{}.{}.egg-info'.format( - pkgname.replace('-', '_'), sys.version_info[0], sys.version_info[1] + "{}-1.0-py{}.{}.egg-info".format( + pkgname.replace("-", "_"), sys.version_info[0], sys.version_info[1] ), ) - with open(egg_info_path, 'w') as egg_info_file: + with open(egg_info_path, "w") as egg_info_file: egg_info_file.write( textwrap.dedent( """\ @@ -105,24 +105,24 @@ def fake_install(pkgname, dest): ) ) - valid_pkgnames = ('middle-dash', 'middle_underscore', 'middle.dot') + valid_pkgnames = ("middle-dash", "middle_underscore", "middle.dot") invalid_pkgnames = ( - '-leadingdash', - '_leadingunderscore', - '.leadingdot', - 'trailingdash-', - 'trailingunderscore_', - 'trailingdot.', + "-leadingdash", + "_leadingunderscore", + ".leadingdot", + "trailingdash-", + "trailingunderscore_", + "trailingdot.", ) for pkgname in valid_pkgnames + invalid_pkgnames: fake_install(pkgname, script.site_packages_path) - result = script.pip('freeze', expect_stderr=True) + result = script.pip("freeze", expect_stderr=True) for pkgname in valid_pkgnames: - _check_output(result.stdout, '...{}==1.0...'.format(pkgname.replace('_', '-'))) + _check_output(result.stdout, "...{}==1.0...".format(pkgname.replace("_", "-"))) for pkgname in invalid_pkgnames: # Check that the full distribution repr is present. - dist_repr = '{} 1.0 ('.format(pkgname.replace('_', '-')) - expected = '...Could not generate requirement for distribution {}...'.format( + dist_repr = "{} 1.0 (".format(pkgname.replace("_", "-")) + expected = "...Could not generate requirement for distribution {}...".format( dist_repr ) _check_output(result.stderr, expected) @@ -142,9 +142,9 @@ def test_freeze_editable_not_vcs(script, tmpdir): pkg_path = _create_test_package(script) # Rename the .git directory so the directory is no longer recognized # as a VCS directory. - os.rename(os.path.join(pkg_path, '.git'), os.path.join(pkg_path, '.bak')) - script.pip('install', '-e', pkg_path) - result = script.pip('freeze') + os.rename(os.path.join(pkg_path, ".git"), os.path.join(pkg_path, ".bak")) + script.pip("install", "-e", pkg_path) + result = script.pip("freeze") # We need to apply os.path.normcase() to the path since that is what # the freeze code does. @@ -165,11 +165,11 @@ def test_freeze_editable_git_with_no_remote(script, tmpdir, deprecated_python): Test an editable Git install with no remote url. """ pkg_path = _create_test_package(script) - script.pip('install', '-e', pkg_path) - result = script.pip('freeze') + script.pip("install", "-e", pkg_path) + result = script.pip("freeze") if not deprecated_python: - assert result.stderr == '' + assert result.stderr == "" # We need to apply os.path.normcase() to the path since that is what # the freeze code does. @@ -188,11 +188,11 @@ def test_freeze_editable_git_with_no_remote(script, tmpdir, deprecated_python): def test_freeze_svn(script, tmpdir): """Test freezing a svn checkout""" - checkout_path = _create_test_package(script, vcs='svn') + checkout_path = _create_test_package(script, vcs="svn") # Install with develop - script.run('python', 'setup.py', 'develop', cwd=checkout_path, expect_stderr=True) - result = script.pip('freeze', expect_stderr=True) + script.run("python", "setup.py", "develop", cwd=checkout_path, expect_stderr=True) + result = script.pip("freeze", expect_stderr=True) expected = textwrap.dedent( """\ ...-e svn+...#egg=version_pkg @@ -211,13 +211,13 @@ def test_freeze_exclude_editable(script, tmpdir): pkg_version = _create_test_package(script) result = script.run( - 'git', 'clone', pkg_version, 'pip-test-package', expect_stderr=True + "git", "clone", pkg_version, "pip-test-package", expect_stderr=True ) - repo_dir = script.scratch_path / 'pip-test-package' + repo_dir = script.scratch_path / "pip-test-package" result = script.run( - 'python', 'setup.py', 'develop', cwd=repo_dir, expect_stderr=True + "python", "setup.py", "develop", cwd=repo_dir, expect_stderr=True ) - result = script.pip('freeze', '--exclude-editable', expect_stderr=True) + result = script.pip("freeze", "--exclude-editable", expect_stderr=True) expected = textwrap.dedent( """ ...-e git+...#egg=version_pkg @@ -236,13 +236,13 @@ def test_freeze_git_clone(script, tmpdir): pkg_version = _create_test_package(script) result = script.run( - 'git', 'clone', pkg_version, 'pip-test-package', expect_stderr=True + "git", "clone", pkg_version, "pip-test-package", expect_stderr=True ) - repo_dir = script.scratch_path / 'pip-test-package' + repo_dir = script.scratch_path / "pip-test-package" result = script.run( - 'python', 'setup.py', 'develop', cwd=repo_dir, expect_stderr=True + "python", "setup.py", "develop", cwd=repo_dir, expect_stderr=True ) - result = script.pip('freeze', expect_stderr=True) + result = script.pip("freeze", expect_stderr=True) expected = textwrap.dedent( """ ...-e git+...#egg=version_pkg @@ -252,7 +252,7 @@ def test_freeze_git_clone(script, tmpdir): _check_output(result.stdout, expected) result = script.pip( - 'freeze', '-f', '%s#egg=pip_test_package' % repo_dir, expect_stderr=True + "freeze", "-f", "%s#egg=pip_test_package" % repo_dir, expect_stderr=True ) expected = textwrap.dedent( """ @@ -260,27 +260,27 @@ def test_freeze_git_clone(script, tmpdir): -e git+...#egg=version_pkg ... """ - % {'repo': repo_dir} + % {"repo": repo_dir} ).strip() _check_output(result.stdout, expected) # Check that slashes in branch or tag names are translated. # See also issue #1083: https://github.com/pypa/pip/issues/1083 script.run( - 'git', - 'checkout', - '-b', - 'branch/name/with/slash', + "git", + "checkout", + "-b", + "branch/name/with/slash", cwd=repo_dir, expect_stderr=True, ) # Create a new commit to ensure that the commit has only one branch # or tag name associated to it (to avoid the non-determinism reported # in issue #1867). - script.run('touch', 'newfile', cwd=repo_dir) - script.run('git', 'add', 'newfile', cwd=repo_dir) - _git_commit(script, repo_dir, message='...') - result = script.pip('freeze', expect_stderr=True) + script.run("touch", "newfile", cwd=repo_dir) + script.run("git", "add", "newfile", cwd=repo_dir) + _git_commit(script, repo_dir, message="...") + result = script.pip("freeze", expect_stderr=True) expected = textwrap.dedent( """ ...-e ...@...#egg=version_pkg @@ -301,13 +301,13 @@ def test_freeze_git_clone_srcdir(script, tmpdir): pkg_version = _create_test_package_with_srcdir(script) result = script.run( - 'git', 'clone', pkg_version, 'pip-test-package', expect_stderr=True + "git", "clone", pkg_version, "pip-test-package", expect_stderr=True ) - repo_dir = script.scratch_path / 'pip-test-package' + repo_dir = script.scratch_path / "pip-test-package" result = script.run( - 'python', 'setup.py', 'develop', cwd=repo_dir / 'subdir', expect_stderr=True + "python", "setup.py", "develop", cwd=repo_dir / "subdir", expect_stderr=True ) - result = script.pip('freeze', expect_stderr=True) + result = script.pip("freeze", expect_stderr=True) expected = textwrap.dedent( """ ...-e git+...#egg=version_pkg&subdirectory=subdir @@ -317,7 +317,7 @@ def test_freeze_git_clone_srcdir(script, tmpdir): _check_output(result.stdout, expected) result = script.pip( - 'freeze', '-f', '%s#egg=pip_test_package' % repo_dir, expect_stderr=True + "freeze", "-f", "%s#egg=pip_test_package" % repo_dir, expect_stderr=True ) expected = textwrap.dedent( """ @@ -325,7 +325,7 @@ def test_freeze_git_clone_srcdir(script, tmpdir): -e git+...#egg=version_pkg&subdirectory=subdir ... """ - % {'repo': repo_dir} + % {"repo": repo_dir} ).strip() _check_output(result.stdout, expected) @@ -339,16 +339,16 @@ def test_freeze_git_remote(script, tmpdir): pkg_version = _create_test_package(script) result = script.run( - 'git', 'clone', pkg_version, 'pip-test-package', expect_stderr=True + "git", "clone", pkg_version, "pip-test-package", expect_stderr=True ) - repo_dir = script.scratch_path / 'pip-test-package' + repo_dir = script.scratch_path / "pip-test-package" result = script.run( - 'python', 'setup.py', 'develop', cwd=repo_dir, expect_stderr=True + "python", "setup.py", "develop", cwd=repo_dir, expect_stderr=True ) origin_remote = pkg_version - other_remote = pkg_version + '-other' + other_remote = pkg_version + "-other" # check frozen remote after clone - result = script.pip('freeze', expect_stderr=True) + result = script.pip("freeze", expect_stderr=True) expected = ( textwrap.dedent( """ @@ -361,9 +361,9 @@ def test_freeze_git_remote(script, tmpdir): ) _check_output(result.stdout, expected) # check frozen remote when there is no remote named origin - script.run('git', 'remote', 'remove', 'origin', cwd=repo_dir) - script.run('git', 'remote', 'add', 'other', other_remote, cwd=repo_dir) - result = script.pip('freeze', expect_stderr=True) + script.run("git", "remote", "remove", "origin", cwd=repo_dir) + script.run("git", "remote", "add", "other", other_remote, cwd=repo_dir) + result = script.pip("freeze", expect_stderr=True) expected = ( textwrap.dedent( """ @@ -377,8 +377,8 @@ def test_freeze_git_remote(script, tmpdir): _check_output(result.stdout, expected) # when there are more than one origin, priority is given to the # remote named origin - script.run('git', 'remote', 'add', 'origin', origin_remote, cwd=repo_dir) - result = script.pip('freeze', expect_stderr=True) + script.run("git", "remote", "add", "origin", origin_remote, cwd=repo_dir) + result = script.pip("freeze", expect_stderr=True) expected = ( textwrap.dedent( """ @@ -399,16 +399,16 @@ def test_freeze_mercurial_clone(script, tmpdir): """ # Returns path to a generated package called "version_pkg" - pkg_version = _create_test_package(script, vcs='hg') + pkg_version = _create_test_package(script, vcs="hg") result = script.run( - 'hg', 'clone', pkg_version, 'pip-test-package', expect_stderr=True + "hg", "clone", pkg_version, "pip-test-package", expect_stderr=True ) - repo_dir = script.scratch_path / 'pip-test-package' + repo_dir = script.scratch_path / "pip-test-package" result = script.run( - 'python', 'setup.py', 'develop', cwd=repo_dir, expect_stderr=True + "python", "setup.py", "develop", cwd=repo_dir, expect_stderr=True ) - result = script.pip('freeze', expect_stderr=True) + result = script.pip("freeze", expect_stderr=True) expected = textwrap.dedent( """ ...-e hg+...#egg=version_pkg @@ -418,7 +418,7 @@ def test_freeze_mercurial_clone(script, tmpdir): _check_output(result.stdout, expected) result = script.pip( - 'freeze', '-f', '%s#egg=pip_test_package' % repo_dir, expect_stderr=True + "freeze", "-f", "%s#egg=pip_test_package" % repo_dir, expect_stderr=True ) expected = textwrap.dedent( """ @@ -426,7 +426,7 @@ def test_freeze_mercurial_clone(script, tmpdir): ...-e hg+...#egg=version_pkg ... """ - % {'repo': repo_dir} + % {"repo": repo_dir} ).strip() _check_output(result.stdout, expected) @@ -438,19 +438,19 @@ def test_freeze_bazaar_clone(script, tmpdir): """ try: - checkout_path = _create_test_package(script, vcs='bazaar') + checkout_path = _create_test_package(script, vcs="bazaar") except OSError as e: - pytest.fail('Invoking `bzr` failed: %s' % e) + pytest.fail("Invoking `bzr` failed: %s" % e) - result = script.run('bzr', 'checkout', checkout_path, 'bzr-package') + result = script.run("bzr", "checkout", checkout_path, "bzr-package") result = script.run( - 'python', - 'setup.py', - 'develop', - cwd=script.scratch_path / 'bzr-package', + "python", + "setup.py", + "develop", + cwd=script.scratch_path / "bzr-package", expect_stderr=True, ) - result = script.pip('freeze', expect_stderr=True) + result = script.pip("freeze", expect_stderr=True) expected = textwrap.dedent( """\ ...-e bzr+file://...@1#egg=version_pkg @@ -459,14 +459,14 @@ def test_freeze_bazaar_clone(script, tmpdir): _check_output(result.stdout, expected) result = script.pip( - 'freeze', '-f', '%s/#egg=django-wikiapp' % checkout_path, expect_stderr=True + "freeze", "-f", "%s/#egg=django-wikiapp" % checkout_path, expect_stderr=True ) expected = textwrap.dedent( """\ -f %(repo)s/#egg=django-wikiapp ...-e bzr+file://...@...#egg=version_pkg ...""" - % {'repo': checkout_path} + % {"repo": checkout_path} ) _check_output(result.stdout, expected) @@ -499,15 +499,15 @@ def test_freeze_with_requirement_option_file_url_egg_not_installed( is not installed. """ - url = path_to_url('my-package.tar.gz') + '#egg=Does.Not-Exist' - requirements_path = script.scratch_path.joinpath('requirements.txt') - requirements_path.write_text(url + '\n') + url = path_to_url("my-package.tar.gz") + "#egg=Does.Not-Exist" + requirements_path = script.scratch_path.joinpath("requirements.txt") + requirements_path.write_text(url + "\n") result = script.pip( - 'freeze', '--requirement', 'requirements.txt', expect_stderr=True + "freeze", "--requirement", "requirements.txt", expect_stderr=True ) expected_err = ( - 'WARNING: Requirement file [requirements.txt] contains {}, ' + "WARNING: Requirement file [requirements.txt] contains {}, " "but package 'Does.Not-Exist' is not installed\n" ).format(url) if deprecated_python: @@ -532,9 +532,9 @@ def test_freeze_with_requirement_option(script): ) + _freeze_req_opts ) - result = script.pip_install_local('initools==0.2') - result = script.pip_install_local('simple') - result = script.pip('freeze', '--requirement', 'hint.txt', expect_stderr=True) + result = script.pip_install_local("initools==0.2") + result = script.pip_install_local("simple") + result = script.pip("freeze", "--requirement", "hint.txt", expect_stderr=True) expected = textwrap.dedent( """\ INITools==0.2 @@ -556,7 +556,7 @@ def test_freeze_with_requirement_option_multiple(script): --requirement hints """ - script.scratch_path.joinpath('hint1.txt').write_text( + script.scratch_path.joinpath("hint1.txt").write_text( textwrap.dedent( """\ INITools==0.1 @@ -566,7 +566,7 @@ def test_freeze_with_requirement_option_multiple(script): ) + _freeze_req_opts ) - script.scratch_path.joinpath('hint2.txt').write_text( + script.scratch_path.joinpath("hint2.txt").write_text( textwrap.dedent( """\ NoExist2==2.0 @@ -575,16 +575,16 @@ def test_freeze_with_requirement_option_multiple(script): ) + _freeze_req_opts ) - result = script.pip_install_local('initools==0.2') - result = script.pip_install_local('simple') - result = script.pip_install_local('simple2==1.0') - result = script.pip_install_local('meta') + result = script.pip_install_local("initools==0.2") + result = script.pip_install_local("simple") + result = script.pip_install_local("simple2==1.0") + result = script.pip_install_local("meta") result = script.pip( - 'freeze', - '--requirement', - 'hint1.txt', - '--requirement', - 'hint2.txt', + "freeze", + "--requirement", + "hint1.txt", + "--requirement", + "hint2.txt", expect_stderr=True, ) expected = textwrap.dedent( @@ -600,7 +600,7 @@ def test_freeze_with_requirement_option_multiple(script): """ ) expected += "## The following requirements were added by pip freeze:" - expected += '\n' + textwrap.dedent( + expected += "\n" + textwrap.dedent( """\ ...meta==1.0... """ @@ -624,7 +624,7 @@ def test_freeze_with_requirement_option_package_repeated_one_file(script): Test freezing with single requirements file that contains a package multiple times """ - script.scratch_path.joinpath('hint1.txt').write_text( + script.scratch_path.joinpath("hint1.txt").write_text( textwrap.dedent( """\ simple2 @@ -634,9 +634,9 @@ def test_freeze_with_requirement_option_package_repeated_one_file(script): ) + _freeze_req_opts ) - result = script.pip_install_local('simple2==1.0') - result = script.pip_install_local('meta') - result = script.pip('freeze', '--requirement', 'hint1.txt', expect_stderr=True) + result = script.pip_install_local("simple2==1.0") + result = script.pip_install_local("meta") + result = script.pip("freeze", "--requirement", "hint1.txt", expect_stderr=True) expected_out = textwrap.dedent( """\ simple2==1.0 @@ -644,7 +644,7 @@ def test_freeze_with_requirement_option_package_repeated_one_file(script): ) expected_out += _freeze_req_opts expected_out += "## The following requirements were added by pip freeze:" - expected_out += '\n' + textwrap.dedent( + expected_out += "\n" + textwrap.dedent( """\ ...meta==1.0... """ @@ -658,14 +658,14 @@ def test_freeze_with_requirement_option_package_repeated_one_file(script): assert err1 in result.stderr assert err2 in result.stderr # there shouldn't be any other 'is not installed' warnings - assert result.stderr.count('is not installed') == 1 + assert result.stderr.count("is not installed") == 1 def test_freeze_with_requirement_option_package_repeated_multi_file(script): """ Test freezing with multiple requirements file that contain a package """ - script.scratch_path.joinpath('hint1.txt').write_text( + script.scratch_path.joinpath("hint1.txt").write_text( textwrap.dedent( """\ simple @@ -673,7 +673,7 @@ def test_freeze_with_requirement_option_package_repeated_multi_file(script): ) + _freeze_req_opts ) - script.scratch_path.joinpath('hint2.txt').write_text( + script.scratch_path.joinpath("hint2.txt").write_text( textwrap.dedent( """\ simple @@ -682,14 +682,14 @@ def test_freeze_with_requirement_option_package_repeated_multi_file(script): ) + _freeze_req_opts ) - result = script.pip_install_local('simple==1.0') - result = script.pip_install_local('meta') + result = script.pip_install_local("simple==1.0") + result = script.pip_install_local("meta") result = script.pip( - 'freeze', - '--requirement', - 'hint1.txt', - '--requirement', - 'hint2.txt', + "freeze", + "--requirement", + "hint1.txt", + "--requirement", + "hint2.txt", expect_stderr=True, ) expected_out = textwrap.dedent( @@ -699,7 +699,7 @@ def test_freeze_with_requirement_option_package_repeated_multi_file(script): ) expected_out += _freeze_req_opts expected_out += "## The following requirements were added by pip freeze:" - expected_out += '\n' + textwrap.dedent( + expected_out += "\n" + textwrap.dedent( """\ ...meta==1.0... """ @@ -714,7 +714,7 @@ def test_freeze_with_requirement_option_package_repeated_multi_file(script): assert err1 in result.stderr assert err2 in result.stderr # there shouldn't be any other 'is not installed' warnings - assert result.stderr.count('is not installed') == 1 + assert result.stderr.count("is not installed") == 1 @pytest.mark.network @@ -722,17 +722,17 @@ def test_freeze_user(script, virtualenv, data): """ Testing freeze with --user, first we have to install some stuff. """ - script.pip('download', 'setuptools', 'wheel', '-d', data.packages) - script.pip_install_local('--find-links', data.find_links, '--user', 'simple==2.0') - script.pip_install_local('--find-links', data.find_links, 'simple2==3.0') - result = script.pip('freeze', '--user', expect_stderr=True) + script.pip("download", "setuptools", "wheel", "-d", data.packages) + script.pip_install_local("--find-links", data.find_links, "--user", "simple==2.0") + script.pip_install_local("--find-links", data.find_links, "simple2==3.0") + result = script.pip("freeze", "--user", expect_stderr=True) expected = textwrap.dedent( """\ simple==2.0 """ ) _check_output(result.stdout, expected) - assert 'simple2' not in result.stdout + assert "simple2" not in result.stdout def test_freeze_path(tmpdir, script, data): @@ -740,9 +740,9 @@ def test_freeze_path(tmpdir, script, data): Test freeze with --path. """ script.pip( - 'install', '--find-links', data.find_links, '--target', tmpdir, 'simple==2.0' + "install", "--find-links", data.find_links, "--target", tmpdir, "simple==2.0" ) - result = script.pip('freeze', '--path', tmpdir) + result = script.pip("freeze", "--path", tmpdir) expected = textwrap.dedent( """\ simple==2.0 @@ -756,18 +756,18 @@ def test_freeze_path_exclude_user(tmpdir, script, data): Test freeze with --path and make sure packages from --user are not picked up. """ - script.pip_install_local('--find-links', data.find_links, '--user', 'simple2') + script.pip_install_local("--find-links", data.find_links, "--user", "simple2") script.pip( - 'install', '--find-links', data.find_links, '--target', tmpdir, 'simple==1.0' + "install", "--find-links", data.find_links, "--target", tmpdir, "simple==1.0" ) - result = script.pip('freeze', '--user') + result = script.pip("freeze", "--user") expected = textwrap.dedent( """\ simple2==3.0 """ ) _check_output(result.stdout, expected) - result = script.pip('freeze', '--path', tmpdir) + result = script.pip("freeze", "--path", tmpdir) expected = textwrap.dedent( """\ simple==1.0 @@ -785,19 +785,19 @@ def test_freeze_path_multiple(tmpdir, script, data): path2 = tmpdir / "path2" os.mkdir(path2) script.pip( - 'install', '--find-links', data.find_links, '--target', path1, 'simple==2.0' + "install", "--find-links", data.find_links, "--target", path1, "simple==2.0" ) script.pip( - 'install', '--find-links', data.find_links, '--target', path2, 'simple2==3.0' + "install", "--find-links", data.find_links, "--target", path2, "simple2==3.0" ) - result = script.pip('freeze', '--path', path1) + result = script.pip("freeze", "--path", path1) expected = textwrap.dedent( """\ simple==2.0 """ ) _check_output(result.stdout, expected) - result = script.pip('freeze', '--path', path1, '--path', path2) + result = script.pip("freeze", "--path", path1, "--path", path2) expected = textwrap.dedent( """\ simple==2.0 diff --git a/tests/functional/test_hash.py b/tests/functional/test_hash.py index c7f88a88387..f6dd8b2a3d5 100644 --- a/tests/functional/test_hash.py +++ b/tests/functional/test_hash.py @@ -4,34 +4,34 @@ def test_basic_hash(script, tmpdir): """Run 'pip hash' through its default behavior.""" expected = ( - '--hash=sha256:2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425' - 'e73043362938b9824' + "--hash=sha256:2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425" + "e73043362938b9824" ) - result = script.pip('hash', _hello_file(tmpdir)) + result = script.pip("hash", _hello_file(tmpdir)) assert expected in str(result) def test_good_algo_option(script, tmpdir): """Make sure the -a option works.""" expected = ( - '--hash=sha512:9b71d224bd62f3785d96d46ad3ea3d73319bfbc2890caad' - 'ae2dff72519673ca72323c3d99ba5c11d7c7acc6e14b8c5da0c4663475c2e' - '5c3adef46f73bcdec043' + "--hash=sha512:9b71d224bd62f3785d96d46ad3ea3d73319bfbc2890caad" + "ae2dff72519673ca72323c3d99ba5c11d7c7acc6e14b8c5da0c4663475c2e" + "5c3adef46f73bcdec043" ) - result = script.pip('hash', '-a', 'sha512', _hello_file(tmpdir)) + result = script.pip("hash", "-a", "sha512", _hello_file(tmpdir)) assert expected in str(result) def test_bad_algo_option(script, tmpdir): """Make sure the -a option raises an error when given a bad operand.""" result = script.pip( - 'hash', '-a', 'invalidname', _hello_file(tmpdir), expect_error=True + "hash", "-a", "invalidname", _hello_file(tmpdir), expect_error=True ) assert "invalid choice: 'invalidname'" in str(result) def _hello_file(tmpdir): """Return a temp file to hash containing "hello".""" - file = tmpdir / 'hashable' - file.write_text('hello') + file = tmpdir / "hashable" + file.write_text("hello") return file diff --git a/tests/functional/test_help.py b/tests/functional/test_help.py index f04f9c87380..45f1801b831 100644 --- a/tests/functional/test_help.py +++ b/tests/functional/test_help.py @@ -11,8 +11,8 @@ def test_run_method_should_return_success_when_finds_command_name(): Test HelpCommand.run for existing command """ options_mock = Mock() - args = ('freeze',) - help_cmd = create_command('help') + args = ("freeze",) + help_cmd = create_command("help") status = help_cmd.run(options_mock, args) assert status == SUCCESS @@ -23,7 +23,7 @@ def test_run_method_should_return_success_when_command_name_not_specified(): """ options_mock = Mock() args = () - help_cmd = create_command('help') + help_cmd = create_command("help") status = help_cmd.run(options_mock, args) assert status == SUCCESS @@ -33,8 +33,8 @@ def test_run_method_should_raise_command_error_when_command_does_not_exist(): Test HelpCommand.run for non-existing command """ options_mock = Mock() - args = ('mycommand',) - help_cmd = create_command('help') + args = ("mycommand",) + help_cmd = create_command("help") with pytest.raises(CommandError): help_cmd.run(options_mock, args) @@ -44,7 +44,7 @@ def test_help_command_should_exit_status_ok_when_command_exists(script): """ Test `help` command for existing command """ - result = script.pip('help', 'freeze') + result = script.pip("help", "freeze") assert result.returncode == SUCCESS @@ -52,7 +52,7 @@ def test_help_command_should_exit_status_ok_when_no_cmd_is_specified(script): """ Test `help` command for no command """ - result = script.pip('help') + result = script.pip("help") assert result.returncode == SUCCESS @@ -60,7 +60,7 @@ def test_help_command_should_exit_status_error_when_cmd_does_not_exist(script): """ Test `help` command for non-existing command """ - result = script.pip('help', 'mycommand', expect_error=True) + result = script.pip("help", "mycommand", expect_error=True) assert result.returncode == ERROR @@ -68,20 +68,20 @@ def test_help_commands_equally_functional(in_memory_pip): """ Test if `pip help` and 'pip --help' behave the same way. """ - results = list(map(in_memory_pip.pip, ('help', '--help'))) + results = list(map(in_memory_pip.pip, ("help", "--help"))) results.append(in_memory_pip.pip()) out = map(lambda x: x.stdout, results) ret = map(lambda x: x.returncode, results) msg = '"pip --help" != "pip help" != "pip"' - assert len(set(out)) == 1, 'output of: ' + msg - assert sum(ret) == 0, 'exit codes of: ' + msg + assert len(set(out)) == 1, "output of: " + msg + assert sum(ret) == 0, "exit codes of: " + msg assert all(len(o) > 0 for o in out) for name in commands_dict: assert ( - in_memory_pip.pip('help', name).stdout - == in_memory_pip.pip(name, '--help').stdout + in_memory_pip.pip("help", name).stdout + == in_memory_pip.pip(name, "--help").stdout != "" ) diff --git a/tests/functional/test_install.py b/tests/functional/test_install.py index 99bd945e515..cbf3cf8111e 100644 --- a/tests/functional/test_install.py +++ b/tests/functional/test_install.py @@ -34,23 +34,23 @@ skip_if_not_python2 = pytest.mark.skipif(not PY2, reason="Python 2 only") -@pytest.mark.parametrize('command', ('install', 'wheel')) -@pytest.mark.parametrize('variant', ('missing_setuptools', 'bad_setuptools')) +@pytest.mark.parametrize("command", ("install", "wheel")) +@pytest.mark.parametrize("variant", ("missing_setuptools", "bad_setuptools")) def test_pep518_uses_build_env(script, data, common_wheels, command, variant): - if variant == 'missing_setuptools': + if variant == "missing_setuptools": script.pip("uninstall", "-y", "setuptools") - elif variant == 'bad_setuptools': + elif variant == "bad_setuptools": setuptools_mod = script.site_packages_path.joinpath("setuptools.py") - with open(setuptools_mod, 'a') as f: + with open(setuptools_mod, "a") as f: f.write('\nraise ImportError("toto")') else: raise ValueError(variant) script.pip( command, - '--no-index', - '-f', + "--no-index", + "-f", common_wheels, - '-f', + "-f", data.packages, data.src.joinpath("pep518-3.0"), ) @@ -62,16 +62,16 @@ def test_pep518_build_env_uses_same_pip( """Ensure the subprocess call to pip for installing the build dependencies is using the same version of pip. """ - with open(script.scratch_path / 'pip.py', 'w') as fp: - fp.write('raise ImportError') + with open(script.scratch_path / "pip.py", "w") as fp: + fp.write("raise ImportError") script.run( - 'python', - pip_src / 'src/pip', - 'install', - '--no-index', - '-f', + "python", + pip_src / "src/pip", + "install", + "--no-index", + "-f", common_wheels, - '-f', + "-f", data.packages, data.src.joinpath("pep518-3.0"), expect_stderr=deprecated_python, @@ -79,18 +79,18 @@ def test_pep518_build_env_uses_same_pip( def test_pep518_refuses_conflicting_requires(script, data): - create_basic_wheel_for_package(script, 'setuptools', '1.0') - create_basic_wheel_for_package(script, 'wheel', '1.0') + create_basic_wheel_for_package(script, "setuptools", "1.0") + create_basic_wheel_for_package(script, "wheel", "1.0") project_dir = data.src.joinpath("pep518_conflicting_requires") result = script.pip_install_local( - '-f', script.scratch_path, project_dir, expect_error=True + "-f", script.scratch_path, project_dir, expect_error=True ) assert ( result.returncode != 0 and ( - 'Some build dependencies for %s conflict with PEP 517/518 supported ' - 'requirements: setuptools==1.0 is incompatible with ' - 'setuptools>=40.8.0.' % path_to_url(project_dir) + "Some build dependencies for %s conflict with PEP 517/518 supported " + "requirements: setuptools==1.0 is incompatible with " + "setuptools>=40.8.0." % path_to_url(project_dir) ) in result.stderr ), str(result) @@ -98,8 +98,8 @@ def test_pep518_refuses_conflicting_requires(script, data): def test_pep518_refuses_invalid_requires(script, data, common_wheels): result = script.pip( - 'install', - '-f', + "install", + "-f", common_wheels, data.src.joinpath("pep518_invalid_requires"), expect_error=True, @@ -110,8 +110,8 @@ def test_pep518_refuses_invalid_requires(script, data, common_wheels): def test_pep518_refuses_invalid_build_system(script, data, common_wheels): result = script.pip( - 'install', - '-f', + "install", + "-f", common_wheels, data.src.joinpath("pep518_invalid_build_system"), expect_error=True, @@ -122,8 +122,8 @@ def test_pep518_refuses_invalid_build_system(script, data, common_wheels): def test_pep518_allows_missing_requires(script, data, common_wheels): result = script.pip( - 'install', - '-f', + "install", + "-f", common_wheels, data.src.joinpath("pep518_missing_requires"), expect_stderr=True, @@ -149,17 +149,17 @@ def test_pep518_with_user_pip(script, pip_src, data, common_wheels): so that isolated uses of pip will fail. """ script.pip("install", "--ignore-installed", "-f", common_wheels, "--user", pip_src) - system_pip_dir = script.site_packages_path / 'pip' + system_pip_dir = script.site_packages_path / "pip" assert not system_pip_dir.exists() system_pip_dir.mkdir() - with open(system_pip_dir / '__init__.py', 'w') as fp: - fp.write('raise ImportError\n') + with open(system_pip_dir / "__init__.py", "w") as fp: + fp.write("raise ImportError\n") script.pip( - 'wheel', - '--no-index', - '-f', + "wheel", + "--no-index", + "-f", common_wheels, - '-f', + "-f", data.packages, data.src.joinpath("pep518-3.0"), ) @@ -167,11 +167,11 @@ def test_pep518_with_user_pip(script, pip_src, data, common_wheels): def test_pep518_with_extra_and_markers(script, data, common_wheels): script.pip( - 'wheel', - '--no-index', - '-f', + "wheel", + "--no-index", + "-f", common_wheels, - '-f', + "-f", data.find_links, data.src.joinpath("pep518_with_extra_and_markers-1.0"), ) @@ -179,11 +179,11 @@ def test_pep518_with_extra_and_markers(script, data, common_wheels): def test_pep518_with_namespace_package(script, data, common_wheels): script.pip( - 'wheel', - '--no-index', - '-f', + "wheel", + "--no-index", + "-f", common_wheels, - '-f', + "-f", data.find_links, data.src.joinpath("pep518_with_namespace_package-1.0"), use_module=True, @@ -191,26 +191,26 @@ def test_pep518_with_namespace_package(script, data, common_wheels): @pytest.mark.timeout(60) -@pytest.mark.parametrize('command', ('install', 'wheel')) +@pytest.mark.parametrize("command", ("install", "wheel")) @pytest.mark.parametrize( - 'package', - ('pep518_forkbomb', 'pep518_twin_forkbombs_first', 'pep518_twin_forkbombs_second'), + "package", + ("pep518_forkbomb", "pep518_twin_forkbombs_first", "pep518_twin_forkbombs_second"), ) def test_pep518_forkbombs(script, data, common_wheels, command, package): - package_source = next(data.packages.glob(package + '-[0-9]*.tar.gz')) + package_source = next(data.packages.glob(package + "-[0-9]*.tar.gz")) result = script.pip( command, - '--no-index', - '-v', - '-f', + "--no-index", + "-v", + "-f", common_wheels, - '-f', + "-f", data.find_links, package, expect_error=True, ) assert ( - '{1} is already being built: {0} from {1}'.format( + "{1} is already being built: {0} from {1}".format( package, path_to_url(package_source) ) in result.stderr @@ -225,19 +225,19 @@ def test_pip_second_command_line_interface_works( Check if ``pip`` commands behaves equally """ # Re-install pip so we get the launchers. - script.pip_install_local('-f', common_wheels, pip_src) + script.pip_install_local("-f", common_wheels, pip_src) # On old versions of Python, urllib3/requests will raise a warning about # the lack of an SSLContext. - kwargs = {'expect_stderr': deprecated_python} + kwargs = {"expect_stderr": deprecated_python} if pyversion_tuple < (2, 7, 9): - kwargs['expect_stderr'] = True + kwargs["expect_stderr"] = True - args = ['pip%s' % pyversion] - args.extend(['install', 'INITools==0.2']) - args.extend(['-f', data.packages]) + args = ["pip%s" % pyversion] + args.extend(["install", "INITools==0.2"]) + args.extend(["-f", data.packages]) result = script.run(*args, **kwargs) - egg_info_folder = script.site_packages / 'INITools-0.2-py%s.egg-info' % pyversion - initools_folder = script.site_packages / 'initools' + egg_info_folder = script.site_packages / "INITools-0.2-py%s.egg-info" % pyversion + initools_folder = script.site_packages / "initools" assert egg_info_folder in result.files_created, str(result) assert initools_folder in result.files_created, str(result) @@ -246,7 +246,7 @@ def test_install_exit_status_code_when_no_requirements(script): """ Test install exit status code when no requirements specified """ - result = script.pip('install', expect_error=True) + result = script.pip("install", expect_error=True) assert "You must give at least one requirement to install" in result.stderr assert result.returncode == ERROR @@ -256,7 +256,7 @@ def test_install_exit_status_code_when_blank_requirements_file(script): Test install exit status code when blank requirements file specified """ script.scratch_path.joinpath("blank.txt").write_text("\n") - script.pip('install', '-r', 'blank.txt') + script.pip("install", "-r", "blank.txt") @pytest.mark.network @@ -264,9 +264,9 @@ def test_basic_install_from_pypi(script): """ Test installing a package from PyPI. """ - result = script.pip('install', '-vvv', 'INITools==0.2') - egg_info_folder = script.site_packages / 'INITools-0.2-py%s.egg-info' % pyversion - initools_folder = script.site_packages / 'initools' + result = script.pip("install", "-vvv", "INITools==0.2") + egg_info_folder = script.site_packages / "INITools-0.2-py%s.egg-info" % pyversion + initools_folder = script.site_packages / "initools" assert egg_info_folder in result.files_created, str(result) assert initools_folder in result.files_created, str(result) @@ -279,7 +279,7 @@ def test_basic_editable_install(script): """ Test editable installation. """ - result = script.pip('install', '-e', 'INITools==0.2', expect_error=True) + result = script.pip("install", "-e", "INITools==0.2", expect_error=True) assert "INITools==0.2 is not a valid editable requirement" in result.stderr assert not result.files_created assert not result.files_updated @@ -292,16 +292,16 @@ def test_basic_install_editable_from_svn(script): """ checkout_path = _create_test_package(script) repo_url = _create_svn_repo(script, checkout_path) - result = script.pip('install', '-e', 'svn+' + repo_url + '#egg=version-pkg') - result.assert_installed('version-pkg', with_files=['.svn']) + result = script.pip("install", "-e", "svn+" + repo_url + "#egg=version-pkg") + result.assert_installed("version-pkg", with_files=[".svn"]) def _test_install_editable_from_git(script, tmpdir): """Test cloning from Git.""" - pkg_path = _create_test_package(script, name='testpackage', vcs='git') - args = ['install', '-e', 'git+%s#egg=testpackage' % path_to_url(pkg_path)] + pkg_path = _create_test_package(script, name="testpackage", vcs="git") + args = ["install", "-e", "git+%s#egg=testpackage" % path_to_url(pkg_path)] result = script.pip(*args) - result.assert_installed('testpackage', with_files=['.git']) + result.assert_installed("testpackage", with_files=[".git"]) def test_basic_install_editable_from_git(script, tmpdir): @@ -322,19 +322,19 @@ def test_install_editable_uninstalls_existing(data, script, tmpdir): """ to_install = data.packages.joinpath("pip-test-package-0.1.tar.gz") result = script.pip_install_local(to_install) - assert 'Successfully installed pip-test-package' in result.stdout - result.assert_installed('piptestpackage', editable=False) + assert "Successfully installed pip-test-package" in result.stdout + result.assert_installed("piptestpackage", editable=False) result = script.pip( - 'install', - '-e', - '%s#egg=pip-test-package' - % local_checkout('git+https://github.com/pypa/pip-test-package.git', tmpdir), + "install", + "-e", + "%s#egg=pip-test-package" + % local_checkout("git+https://github.com/pypa/pip-test-package.git", tmpdir), ) - result.assert_installed('pip-test-package', with_files=['.git']) - assert 'Found existing installation: pip-test-package 0.1' in result.stdout - assert 'Uninstalling pip-test-package-' in result.stdout - assert 'Successfully uninstalled pip-test-package' in result.stdout + result.assert_installed("pip-test-package", with_files=[".git"]) + assert "Found existing installation: pip-test-package 0.1" in result.stdout + assert "Uninstalling pip-test-package-" in result.stdout + assert "Successfully uninstalled pip-test-package" in result.stdout def test_install_editable_uninstalls_existing_from_path(script, data): @@ -342,29 +342,29 @@ def test_install_editable_uninstalls_existing_from_path(script, data): Test that installing an editable uninstalls a previously installed non-editable version from path """ - to_install = data.src.joinpath('simplewheel-1.0') + to_install = data.src.joinpath("simplewheel-1.0") result = script.pip_install_local(to_install) - assert 'Successfully installed simplewheel' in result.stdout - simple_folder = script.site_packages / 'simplewheel' - result.assert_installed('simplewheel', editable=False) + assert "Successfully installed simplewheel" in result.stdout + simple_folder = script.site_packages / "simplewheel" + result.assert_installed("simplewheel", editable=False) assert simple_folder in result.files_created, str(result.stdout) - result = script.pip('install', '-e', to_install) - install_path = script.site_packages / 'simplewheel.egg-link' + result = script.pip("install", "-e", to_install) + install_path = script.site_packages / "simplewheel.egg-link" assert install_path in result.files_created, str(result) - assert 'Found existing installation: simplewheel 1.0' in result.stdout - assert 'Uninstalling simplewheel-' in result.stdout - assert 'Successfully uninstalled simplewheel' in result.stdout + assert "Found existing installation: simplewheel 1.0" in result.stdout + assert "Uninstalling simplewheel-" in result.stdout + assert "Successfully uninstalled simplewheel" in result.stdout assert simple_folder in result.files_deleted, str(result.stdout) @need_mercurial def test_basic_install_editable_from_hg(script, tmpdir): """Test cloning from Mercurial.""" - pkg_path = _create_test_package(script, name='testpackage', vcs='hg') - args = ['install', '-e', 'hg+%s#egg=testpackage' % path_to_url(pkg_path)] + pkg_path = _create_test_package(script, name="testpackage", vcs="hg") + args = ["install", "-e", "hg+%s#egg=testpackage" % path_to_url(pkg_path)] result = script.pip(*args) - result.assert_installed('testpackage', with_files=['.hg']) + result.assert_installed("testpackage", with_files=[".hg"]) @need_mercurial @@ -372,19 +372,19 @@ def test_vcs_url_final_slash_normalization(script, tmpdir): """ Test that presence or absence of final slash in VCS URL is normalized. """ - pkg_path = _create_test_package(script, name='testpackage', vcs='hg') - args = ['install', '-e', 'hg+%s/#egg=testpackage' % path_to_url(pkg_path)] + pkg_path = _create_test_package(script, name="testpackage", vcs="hg") + args = ["install", "-e", "hg+%s/#egg=testpackage" % path_to_url(pkg_path)] result = script.pip(*args) - result.assert_installed('testpackage', with_files=['.hg']) + result.assert_installed("testpackage", with_files=[".hg"]) @need_bzr def test_install_editable_from_bazaar(script, tmpdir): """Test checking out from Bazaar.""" - pkg_path = _create_test_package(script, name='testpackage', vcs='bazaar') - args = ['install', '-e', 'bzr+%s/#egg=testpackage' % path_to_url(pkg_path)] + pkg_path = _create_test_package(script, name="testpackage", vcs="bazaar") + args = ["install", "-e", "bzr+%s/#egg=testpackage" % path_to_url(pkg_path)] result = script.pip(*args) - result.assert_installed('testpackage', with_files=['.bzr']) + result.assert_installed("testpackage", with_files=[".bzr"]) @pytest.mark.network @@ -394,12 +394,12 @@ def test_vcs_url_urlquote_normalization(script, tmpdir): Test that urlquoted characters are normalized for repo URL comparison. """ script.pip( - 'install', - '-e', - '%s/#egg=django-wikiapp' + "install", + "-e", + "%s/#egg=django-wikiapp" % local_checkout( - 'bzr+http://bazaar.launchpad.net/%7Edjango-wikiapp/django-wikiapp' - '/release-0.1', + "bzr+http://bazaar.launchpad.net/%7Edjango-wikiapp/django-wikiapp" + "/release-0.1", tmpdir, ), ) @@ -410,9 +410,9 @@ def test_basic_install_from_local_directory(script, data): Test installing from a local directory. """ to_install = data.packages.joinpath("FSPkg") - result = script.pip('install', to_install, expect_error=False) - fspkg_folder = script.site_packages / 'fspkg' - egg_info_folder = script.site_packages / 'FSPkg-0.1.dev0-py%s.egg-info' % pyversion + result = script.pip("install", to_install, expect_error=False) + fspkg_folder = script.site_packages / "fspkg" + egg_info_folder = script.site_packages / "FSPkg-0.1.dev0-py%s.egg-info" % pyversion assert fspkg_folder in result.files_created, str(result.stdout) assert egg_info_folder in result.files_created, str(result) @@ -421,28 +421,28 @@ def test_basic_install_relative_directory(script, data): """ Test installing a requirement using a relative path. """ - egg_info_file = script.site_packages / 'FSPkg-0.1.dev0-py%s.egg-info' % pyversion - egg_link_file = script.site_packages / 'FSPkg.egg-link' - package_folder = script.site_packages / 'fspkg' + egg_info_file = script.site_packages / "FSPkg-0.1.dev0-py%s.egg-info" % pyversion + egg_link_file = script.site_packages / "FSPkg.egg-link" + package_folder = script.site_packages / "fspkg" # Compute relative install path to FSPkg from scratch path. - full_rel_path = data.packages.joinpath('FSPkg') - script.scratch_path - full_rel_url = 'file:' + full_rel_path.replace(os.path.sep, '/') + '#egg=FSPkg' + full_rel_path = data.packages.joinpath("FSPkg") - script.scratch_path + full_rel_url = "file:" + full_rel_path.replace(os.path.sep, "/") + "#egg=FSPkg" embedded_rel_path = script.scratch_path.joinpath(full_rel_path) # For each relative path, install as either editable or not using either # URLs with egg links or not. for req_path in (full_rel_path, full_rel_url, embedded_rel_path): # Regular install. - result = script.pip('install', req_path, cwd=script.scratch_path) + result = script.pip("install", req_path, cwd=script.scratch_path) assert egg_info_file in result.files_created, str(result) assert package_folder in result.files_created, str(result) - script.pip('uninstall', '-y', 'fspkg') + script.pip("uninstall", "-y", "fspkg") # Editable install. - result = script.pip('install', '-e' + req_path, cwd=script.scratch_path) + result = script.pip("install", "-e" + req_path, cwd=script.scratch_path) assert egg_link_file in result.files_created, str(result) - script.pip('uninstall', '-y', 'fspkg') + script.pip("uninstall", "-y", "fspkg") def test_install_quiet(script, data): @@ -454,7 +454,7 @@ def test_install_quiet(script, data): # https://github.com/pypa/pip/issues/3418 # https://github.com/docker-library/python/issues/83 to_install = data.packages.joinpath("FSPkg") - result = script.pip('install', '-qqq', to_install, expect_error=False) + result = script.pip("install", "-qqq", to_install, expect_error=False) assert result.stdout == "" assert result.stderr == "" @@ -468,15 +468,15 @@ def test_hashed_install_success(script, data, tmpdir): scenes). """ - file_url = path_to_url((data.packages / 'simple-1.0.tar.gz').abspath) + file_url = path_to_url((data.packages / "simple-1.0.tar.gz").abspath) with requirements_file( - 'simple2==1.0 --hash=sha256:9336af72ca661e6336eb87bc7de3e8844d853e' - '3848c2b9bbd2e8bf01db88c2c7\n' - '{simple} --hash=sha256:393043e672415891885c9a2a0929b1af95fb866d6c' - 'a016b42d2e6ce53619b653'.format(simple=file_url), + "simple2==1.0 --hash=sha256:9336af72ca661e6336eb87bc7de3e8844d853e" + "3848c2b9bbd2e8bf01db88c2c7\n" + "{simple} --hash=sha256:393043e672415891885c9a2a0929b1af95fb866d6c" + "a016b42d2e6ce53619b653".format(simple=file_url), tmpdir, ) as reqs_file: - script.pip_install_local('-r', reqs_file.abspath, expect_error=False) + script.pip_install_local("-r", reqs_file.abspath, expect_error=False) def test_hashed_install_failure(script, tmpdir): @@ -488,11 +488,11 @@ def test_hashed_install_failure(script, tmpdir): """ with requirements_file( - 'simple2==1.0 --hash=sha256:9336af72ca661e6336eb87b' - 'c7de3e8844d853e3848c2b9bbd2e8bf01db88c2c\n', + "simple2==1.0 --hash=sha256:9336af72ca661e6336eb87b" + "c7de3e8844d853e3848c2b9bbd2e8bf01db88c2c\n", tmpdir, ) as reqs_file: - result = script.pip_install_local('-r', reqs_file.abspath, expect_error=True) + result = script.pip_install_local("-r", reqs_file.abspath, expect_error=True) assert len(result.files_created) == 0 @@ -501,10 +501,10 @@ def test_install_from_local_directory_with_symlinks_to_directories(script, data) Test installing from a local directory containing symlinks to directories. """ to_install = data.packages.joinpath("symlinks") - result = script.pip('install', to_install, expect_error=False) - pkg_folder = script.site_packages / 'symlinks' + result = script.pip("install", to_install, expect_error=False) + pkg_folder = script.site_packages / "symlinks" egg_info_folder = ( - script.site_packages / 'symlinks-0.1.dev0-py%s.egg-info' % pyversion + script.site_packages / "symlinks-0.1.dev0-py%s.egg-info" % pyversion ) assert pkg_folder in result.files_created, str(result.stdout) assert egg_info_folder in result.files_created, str(result) @@ -535,7 +535,7 @@ def test_install_from_local_directory_with_no_setup_py(script, data): """ Test installing from a local directory with no 'setup.py'. """ - result = script.pip('install', data.root, expect_error=True) + result = script.pip("install", data.root, expect_error=True) assert not result.files_created assert "is not installable." in result.stderr assert "Neither 'setup.py' nor 'pyproject.toml' found." in result.stderr @@ -545,7 +545,7 @@ def test_editable_install__local_dir_no_setup_py(script, data, deprecated_python """ Test installing in editable mode from a local directory with no setup.py. """ - result = script.pip('install', '-e', data.root, expect_error=True) + result = script.pip("install", "-e", data.root, expect_error=True) assert not result.files_created msg = result.stderr @@ -553,7 +553,7 @@ def test_editable_install__local_dir_no_setup_py(script, data, deprecated_python assert 'File "setup.py" not found. ' in msg else: assert msg.startswith('ERROR: File "setup.py" not found. ') - assert 'pyproject.toml' not in msg + assert "pyproject.toml" not in msg def test_editable_install__local_dir_no_setup_py_with_pyproject( @@ -563,11 +563,11 @@ def test_editable_install__local_dir_no_setup_py_with_pyproject( Test installing in editable mode from a local directory with no setup.py but that does have pyproject.toml. """ - local_dir = script.scratch_path.joinpath('temp').mkdir() - pyproject_path = local_dir.joinpath('pyproject.toml') - pyproject_path.write_text('') + local_dir = script.scratch_path.joinpath("temp").mkdir() + pyproject_path = local_dir.joinpath("pyproject.toml") + pyproject_path.write_text("") - result = script.pip('install', '-e', local_dir, expect_error=True) + result = script.pip("install", "-e", local_dir, expect_error=True) assert not result.files_created msg = result.stderr @@ -588,7 +588,7 @@ def test_install_argparse_shadowed(script): # bad. # XXX: Note, this test hits the outside-environment check, not the # in-stdlib check, because our tests run in virtualenvs... - result = script.pip('install', 'argparse>=1.4') + result = script.pip("install", "argparse>=1.4") assert "Not uninstalling argparse" in result.stdout @@ -597,8 +597,8 @@ def test_install_argparse_shadowed(script): def test_upgrade_argparse_shadowed(script): # If argparse is installed - even if shadowed for imported - we support # upgrading it and properly remove the older versions files. - script.pip('install', 'argparse==1.3') - result = script.pip('install', 'argparse>=1.4') + script.pip("install", "argparse==1.3") + result = script.pip("install", "argparse>=1.4") assert "Not uninstalling argparse" not in result.stdout @@ -611,9 +611,9 @@ def test_install_curdir(script, data): egg_info = join(run_from, "FSPkg.egg-info") if os.path.isdir(egg_info): rmtree(egg_info) - result = script.pip('install', curdir, cwd=run_from, expect_error=False) - fspkg_folder = script.site_packages / 'fspkg' - egg_info_folder = script.site_packages / 'FSPkg-0.1.dev0-py%s.egg-info' % pyversion + result = script.pip("install", curdir, cwd=run_from, expect_error=False) + fspkg_folder = script.site_packages / "fspkg" + egg_info_folder = script.site_packages / "FSPkg-0.1.dev0-py%s.egg-info" % pyversion assert fspkg_folder in result.files_created, str(result.stdout) assert egg_info_folder in result.files_created, str(result) @@ -623,9 +623,9 @@ def test_install_pardir(script, data): Test installing parent directory ('..'). """ run_from = data.packages.joinpath("FSPkg", "fspkg") - result = script.pip('install', pardir, cwd=run_from, expect_error=False) - fspkg_folder = script.site_packages / 'fspkg' - egg_info_folder = script.site_packages / 'FSPkg-0.1.dev0-py%s.egg-info' % pyversion + result = script.pip("install", pardir, cwd=run_from, expect_error=False) + fspkg_folder = script.site_packages / "fspkg" + egg_info_folder = script.site_packages / "FSPkg-0.1.dev0-py%s.egg-info" % pyversion assert fspkg_folder in result.files_created, str(result.stdout) assert egg_info_folder in result.files_created, str(result) @@ -637,9 +637,9 @@ def test_install_global_option(script): (In particular those that disable the actual install action) """ result = script.pip( - 'install', '--global-option=--version', "INITools==0.1", expect_stderr=True + "install", "--global-option=--version", "INITools==0.1", expect_stderr=True ) - assert 'INITools==0.1\n' in result.stdout + assert "INITools==0.1\n" in result.stdout def test_install_with_hacked_egg_info(script, data): @@ -647,8 +647,8 @@ def test_install_with_hacked_egg_info(script, data): test installing a package which defines its own egg_info class """ run_from = data.packages.joinpath("HackedEggInfo") - result = script.pip('install', '.', cwd=run_from) - assert 'Successfully installed hackedegginfo-0.0.0\n' in result.stdout + result = script.pip("install", ".", cwd=run_from) + assert "Successfully installed hackedegginfo-0.0.0\n" in result.stdout @pytest.mark.network @@ -656,18 +656,18 @@ def test_install_using_install_option_and_editable(script, tmpdir): """ Test installing a tool using -e and --install-option """ - folder = 'script_folder' + folder = "script_folder" script.scratch_path.joinpath(folder).mkdir() - url = 'git+git://github.com/pypa/pip-test-package' + url = "git+git://github.com/pypa/pip-test-package" result = script.pip( - 'install', - '-e', - '%s#egg=pip-test-package' % local_checkout(url, tmpdir), - '--install-option=--script-dir=%s' % folder, + "install", + "-e", + "%s#egg=pip-test-package" % local_checkout(url, tmpdir), + "--install-option=--script-dir=%s" % folder, expect_stderr=True, ) script_file = ( - script.venv / 'src' / 'pip-test-package' / folder / 'pip-test-package' + script.venv / "src" / "pip-test-package" / folder / "pip-test-package" + script.exe ) assert script_file in result.files_created @@ -679,15 +679,15 @@ def test_install_global_option_using_editable(script, tmpdir): """ Test using global distutils options, but in an editable installation """ - url = 'hg+http://bitbucket.org/runeh/anyjson' + url = "hg+http://bitbucket.org/runeh/anyjson" result = script.pip( - 'install', - '--global-option=--version', - '-e', - '%s@0.2.5#egg=anyjson' % local_checkout(url, tmpdir), + "install", + "--global-option=--version", + "-e", + "%s@0.2.5#egg=anyjson" % local_checkout(url, tmpdir), expect_stderr=True, ) - assert 'Successfully installed anyjson' in result.stdout + assert "Successfully installed anyjson" in result.stdout @pytest.mark.network @@ -696,16 +696,16 @@ def test_install_package_with_same_name_in_curdir(script): Test installing a package with the same name of a local folder """ script.scratch_path.joinpath("mock==0.6").mkdir() - result = script.pip('install', 'mock==0.6') - egg_folder = script.site_packages / 'mock-0.6.0-py%s.egg-info' % pyversion + result = script.pip("install", "mock==0.6") + egg_folder = script.site_packages / "mock-0.6.0-py%s.egg-info" % pyversion assert egg_folder in result.files_created, str(result) mock100_setup_py = textwrap.dedent( - '''\ + """\ from setuptools import setup setup(name='mock', - version='100.1')''' + version='100.1')""" ) @@ -714,10 +714,10 @@ def test_install_folder_using_dot_slash(script): Test installing a folder using pip install ./foldername """ script.scratch_path.joinpath("mock").mkdir() - pkg_path = script.scratch_path / 'mock' + pkg_path = script.scratch_path / "mock" pkg_path.joinpath("setup.py").write_text(mock100_setup_py) - result = script.pip('install', './mock') - egg_folder = script.site_packages / 'mock-100.1-py%s.egg-info' % pyversion + result = script.pip("install", "./mock") + egg_folder = script.site_packages / "mock-100.1-py%s.egg-info" % pyversion assert egg_folder in result.files_created, str(result) @@ -726,10 +726,10 @@ def test_install_folder_using_slash_in_the_end(script): Test installing a folder using pip install foldername/ or foldername\ """ script.scratch_path.joinpath("mock").mkdir() - pkg_path = script.scratch_path / 'mock' + pkg_path = script.scratch_path / "mock" pkg_path.joinpath("setup.py").write_text(mock100_setup_py) - result = script.pip('install', 'mock' + os.path.sep) - egg_folder = script.site_packages / 'mock-100.1-py%s.egg-info' % pyversion + result = script.pip("install", "mock" + os.path.sep) + egg_folder = script.site_packages / "mock-100.1-py%s.egg-info" % pyversion assert egg_folder in result.files_created, str(result) @@ -739,10 +739,10 @@ def test_install_folder_using_relative_path(script): """ script.scratch_path.joinpath("initools").mkdir() script.scratch_path.joinpath("initools", "mock").mkdir() - pkg_path = script.scratch_path / 'initools' / 'mock' + pkg_path = script.scratch_path / "initools" / "mock" pkg_path.joinpath("setup.py").write_text(mock100_setup_py) - result = script.pip('install', Path('initools') / 'mock') - egg_folder = script.site_packages / 'mock-100.1-py%s.egg-info' % pyversion + result = script.pip("install", Path("initools") / "mock") + egg_folder = script.site_packages / "mock-100.1-py%s.egg-info" % pyversion assert egg_folder in result.files_created, str(result) @@ -751,10 +751,10 @@ def test_install_package_which_contains_dev_in_name(script): """ Test installing package from PyPI which contains 'dev' in name """ - result = script.pip('install', 'django-devserver==0.0.4') - devserver_folder = script.site_packages / 'devserver' + result = script.pip("install", "django-devserver==0.0.4") + devserver_folder = script.site_packages / "devserver" egg_info_folder = ( - script.site_packages / 'django_devserver-0.0.4-py%s.egg-info' % pyversion + script.site_packages / "django_devserver-0.0.4-py%s.egg-info" % pyversion ) assert devserver_folder in result.files_created, str(result.stdout) assert egg_info_folder in result.files_created, str(result) @@ -764,100 +764,100 @@ def test_install_package_with_target(script): """ Test installing a package using pip install --target """ - target_dir = script.scratch_path / 'target' - result = script.pip_install_local('-t', target_dir, "simple==1.0") - assert Path('scratch') / 'target' / 'simple' in result.files_created, str(result) + target_dir = script.scratch_path / "target" + result = script.pip_install_local("-t", target_dir, "simple==1.0") + assert Path("scratch") / "target" / "simple" in result.files_created, str(result) # Test repeated call without --upgrade, no files should have changed result = script.pip_install_local( - '-t', target_dir, "simple==1.0", expect_stderr=True + "-t", target_dir, "simple==1.0", expect_stderr=True ) - assert not Path('scratch') / 'target' / 'simple' in result.files_updated + assert not Path("scratch") / "target" / "simple" in result.files_updated # Test upgrade call, check that new version is installed - result = script.pip_install_local('--upgrade', '-t', target_dir, "simple==2.0") - assert Path('scratch') / 'target' / 'simple' in result.files_updated, str(result) - egg_folder = Path('scratch') / 'target' / 'simple-2.0-py%s.egg-info' % pyversion + result = script.pip_install_local("--upgrade", "-t", target_dir, "simple==2.0") + assert Path("scratch") / "target" / "simple" in result.files_updated, str(result) + egg_folder = Path("scratch") / "target" / "simple-2.0-py%s.egg-info" % pyversion assert egg_folder in result.files_created, str(result) # Test install and upgrade of single-module package - result = script.pip_install_local('-t', target_dir, 'singlemodule==0.0.0') - singlemodule_py = Path('scratch') / 'target' / 'singlemodule.py' + result = script.pip_install_local("-t", target_dir, "singlemodule==0.0.0") + singlemodule_py = Path("scratch") / "target" / "singlemodule.py" assert singlemodule_py in result.files_created, str(result) result = script.pip_install_local( - '-t', target_dir, 'singlemodule==0.0.1', '--upgrade' + "-t", target_dir, "singlemodule==0.0.1", "--upgrade" ) assert singlemodule_py in result.files_updated, str(result) def test_install_nonlocal_compatible_wheel(script, data): - target_dir = script.scratch_path / 'target' + target_dir = script.scratch_path / "target" # Test install with --target result = script.pip( - 'install', - '-t', + "install", + "-t", target_dir, - '--no-index', - '--find-links', + "--no-index", + "--find-links", data.find_links, - '--only-binary=:all:', - '--python', - '3', - '--platform', - 'fakeplat', - '--abi', - 'fakeabi', - 'simplewheel', + "--only-binary=:all:", + "--python", + "3", + "--platform", + "fakeplat", + "--abi", + "fakeabi", + "simplewheel", ) assert result.returncode == SUCCESS - distinfo = Path('scratch') / 'target' / 'simplewheel-2.0-1.dist-info' + distinfo = Path("scratch") / "target" / "simplewheel-2.0-1.dist-info" assert distinfo in result.files_created # Test install without --target result = script.pip( - 'install', - '--no-index', - '--find-links', + "install", + "--no-index", + "--find-links", data.find_links, - '--only-binary=:all:', - '--python', - '3', - '--platform', - 'fakeplat', - '--abi', - 'fakeabi', - 'simplewheel', + "--only-binary=:all:", + "--python", + "3", + "--platform", + "fakeplat", + "--abi", + "fakeabi", + "simplewheel", expect_error=True, ) assert result.returncode == ERROR def test_install_nonlocal_compatible_wheel_path(script, data): - target_dir = script.scratch_path / 'target' + target_dir = script.scratch_path / "target" # Test a full path requirement result = script.pip( - 'install', - '-t', + "install", + "-t", target_dir, - '--no-index', - '--only-binary=:all:', - Path(data.packages) / 'simplewheel-2.0-py3-fakeabi-fakeplat.whl', + "--no-index", + "--only-binary=:all:", + Path(data.packages) / "simplewheel-2.0-py3-fakeabi-fakeplat.whl", ) assert result.returncode == SUCCESS - distinfo = Path('scratch') / 'target' / 'simplewheel-2.0.dist-info' + distinfo = Path("scratch") / "target" / "simplewheel-2.0.dist-info" assert distinfo in result.files_created # Test a full path requirement (without --target) result = script.pip( - 'install', - '--no-index', - '--only-binary=:all:', - Path(data.packages) / 'simplewheel-2.0-py3-fakeabi-fakeplat.whl', + "install", + "--no-index", + "--only-binary=:all:", + Path(data.packages) / "simplewheel-2.0-py3-fakeabi-fakeplat.whl", expect_error=True, ) assert result.returncode == ERROR @@ -868,8 +868,8 @@ def test_install_with_target_and_scripts_no_warning(script, with_wheel): Test that installing with --target does not trigger the "script not in PATH" warning (issue #5201) """ - target_dir = script.scratch_path / 'target' - pkga_path = script.scratch_path / 'pkga' + target_dir = script.scratch_path / "target" + pkga_path = script.scratch_path / "pkga" pkga_path.mkdir() pkga_path.joinpath("setup.py").write_text( textwrap.dedent( @@ -892,7 +892,7 @@ def main(): pass """ ) ) - result = script.pip('install', '--target', target_dir, pkga_path) + result = script.pip("install", "--target", target_dir, pkga_path) # This assertion isn't actually needed, if we get the script warning # the script.pip() call will fail with "stderr not expected". But we # leave the assertion to make the intention of the code clearer. @@ -903,23 +903,23 @@ def test_install_package_with_root(script, data): """ Test installing a package using pip install --root """ - root_dir = script.scratch_path / 'root' + root_dir = script.scratch_path / "root" result = script.pip( - 'install', - '--root', + "install", + "--root", root_dir, - '-f', + "-f", data.find_links, - '--no-index', - 'simple==1.0', + "--no-index", + "simple==1.0", ) normal_install_path = ( - script.base_path / script.site_packages / 'simple-1.0-py%s.egg-info' % pyversion + script.base_path / script.site_packages / "simple-1.0-py%s.egg-info" % pyversion ) # use distutils to change the root exactly how the --root option does it from distutils.util import change_root - root_path = change_root(os.path.join(script.scratch, 'root'), normal_install_path) + root_path = change_root(os.path.join(script.scratch, "root"), normal_install_path) assert root_path in result.files_created, str(result) # Should show find-links location in output @@ -931,29 +931,29 @@ def test_install_package_with_prefix(script, data): """ Test installing a package using pip install --prefix """ - prefix_path = script.scratch_path / 'prefix' + prefix_path = script.scratch_path / "prefix" result = script.pip( - 'install', - '--prefix', + "install", + "--prefix", prefix_path, - '-f', + "-f", data.find_links, - '--no-binary', - 'simple', - '--no-index', - 'simple==1.0', + "--no-binary", + "simple", + "--no-index", + "simple==1.0", ) - rel_prefix_path = script.scratch / 'prefix' + rel_prefix_path = script.scratch / "prefix" install_path = distutils.sysconfig.get_python_lib( prefix=rel_prefix_path - ) / 'simple-1.0-py{}.egg-info'.format(pyversion) + ) / "simple-1.0-py{}.egg-info".format(pyversion) assert install_path in result.files_created, str(result) def test_install_editable_with_prefix(script): # make a dummy project - pkga_path = script.scratch_path / 'pkga' + pkga_path = script.scratch_path / "pkga" pkga_path.mkdir() pkga_path.joinpath("setup.py").write_text( textwrap.dedent( @@ -967,10 +967,10 @@ def test_install_editable_with_prefix(script): if hasattr(sys, "pypy_version_info"): site_packages = os.path.join( - 'prefix', 'lib', 'python{}'.format(pyversion), 'site-packages' + "prefix", "lib", "python{}".format(pyversion), "site-packages" ) else: - site_packages = distutils.sysconfig.get_python_lib(prefix='prefix') + site_packages = distutils.sysconfig.get_python_lib(prefix="prefix") # make sure target path is in PYTHONPATH pythonpath = script.scratch_path / site_packages @@ -978,11 +978,11 @@ def test_install_editable_with_prefix(script): script.environ["PYTHONPATH"] = pythonpath # install pkga package into the absolute prefix directory - prefix_path = script.scratch_path / 'prefix' - result = script.pip('install', '--editable', pkga_path, '--prefix', prefix_path) + prefix_path = script.scratch_path / "prefix" + result = script.pip("install", "--editable", pkga_path, "--prefix", prefix_path) # assert pkga is installed at correct location - install_path = script.scratch / site_packages / 'pkga.egg-link' + install_path = script.scratch / site_packages / "pkga.egg-link" assert install_path in result.files_created, str(result) @@ -990,16 +990,16 @@ def test_install_package_conflict_prefix_and_user(script, data): """ Test installing a package using pip install --prefix --user errors out """ - prefix_path = script.scratch_path / 'prefix' + prefix_path = script.scratch_path / "prefix" result = script.pip( - 'install', - '-f', + "install", + "-f", data.find_links, - '--no-index', - '--user', - '--prefix', + "--no-index", + "--user", + "--prefix", prefix_path, - 'simple==1.0', + "simple==1.0", expect_error=True, quiet=True, ) @@ -1014,25 +1014,25 @@ def test_install_package_that_emits_unicode(script, data): """ to_install = data.packages.joinpath("BrokenEmitsUTF8") result = script.pip( - 'install', to_install, expect_error=True, expect_temp=True, quiet=True + "install", to_install, expect_error=True, expect_temp=True, quiet=True ) assert ( - 'FakeError: this package designed to fail on install' in result.stderr - ), 'stderr: {}'.format(result.stderr) - assert 'UnicodeDecodeError' not in result.stderr - assert 'UnicodeDecodeError' not in result.stdout + "FakeError: this package designed to fail on install" in result.stderr + ), "stderr: {}".format(result.stderr) + assert "UnicodeDecodeError" not in result.stderr + assert "UnicodeDecodeError" not in result.stdout def test_install_package_with_utf8_setup(script, data): """Install a package with a setup.py that declares a utf-8 encoding.""" to_install = data.packages.joinpath("SetupPyUTF8") - script.pip('install', to_install) + script.pip("install", to_install) def test_install_package_with_latin1_setup(script, data): """Install a package with a setup.py that declares a latin-1 encoding.""" to_install = data.packages.joinpath("SetupPyLatin1") - script.pip('install', to_install) + script.pip("install", to_install) def test_url_req_case_mismatch_no_index(script, data): @@ -1044,15 +1044,15 @@ def test_url_req_case_mismatch_no_index(script, data): tests/data/packages contains Upper-1.0.tar.gz and Upper-2.0.tar.gz 'requiresupper' has install_requires = ['upper'] """ - Upper = '/'.join((data.find_links, 'Upper-1.0.tar.gz')) + Upper = "/".join((data.find_links, "Upper-1.0.tar.gz")) result = script.pip( - 'install', '--no-index', '-f', data.find_links, Upper, 'requiresupper' + "install", "--no-index", "-f", data.find_links, Upper, "requiresupper" ) # only Upper-1.0.tar.gz should get installed. - egg_folder = script.site_packages / 'Upper-1.0-py%s.egg-info' % pyversion + egg_folder = script.site_packages / "Upper-1.0-py%s.egg-info" % pyversion assert egg_folder in result.files_created, str(result) - egg_folder = script.site_packages / 'Upper-2.0-py%s.egg-info' % pyversion + egg_folder = script.site_packages / "Upper-2.0-py%s.egg-info" % pyversion assert egg_folder not in result.files_created, str(result) @@ -1071,15 +1071,15 @@ def test_url_req_case_mismatch_file_index(script, data): set of packages as it requires a prepared index.html file and subdirectory-per-package structure. """ - Dinner = '/'.join((data.find_links3, 'dinner', 'Dinner-1.0.tar.gz')) + Dinner = "/".join((data.find_links3, "dinner", "Dinner-1.0.tar.gz")) result = script.pip( - 'install', '--index-url', data.find_links3, Dinner, 'requiredinner' + "install", "--index-url", data.find_links3, Dinner, "requiredinner" ) # only Upper-1.0.tar.gz should get installed. - egg_folder = script.site_packages / 'Dinner-1.0-py%s.egg-info' % pyversion + egg_folder = script.site_packages / "Dinner-1.0-py%s.egg-info" % pyversion assert egg_folder in result.files_created, str(result) - egg_folder = script.site_packages / 'Dinner-2.0-py%s.egg-info' % pyversion + egg_folder = script.site_packages / "Dinner-2.0-py%s.egg-info" % pyversion assert egg_folder not in result.files_created, str(result) @@ -1089,12 +1089,12 @@ def test_url_incorrect_case_no_index(script, data): where the incorrect case is given in the name of the package to install rather than in a requirements file. """ - result = script.pip('install', '--no-index', '-f', data.find_links, "upper") + result = script.pip("install", "--no-index", "-f", data.find_links, "upper") # only Upper-2.0.tar.gz should get installed. - egg_folder = script.site_packages / 'Upper-1.0-py%s.egg-info' % pyversion + egg_folder = script.site_packages / "Upper-1.0-py%s.egg-info" % pyversion assert egg_folder not in result.files_created, str(result) - egg_folder = script.site_packages / 'Upper-2.0-py%s.egg-info' % pyversion + egg_folder = script.site_packages / "Upper-2.0-py%s.egg-info" % pyversion assert egg_folder in result.files_created, str(result) @@ -1105,13 +1105,13 @@ def test_url_incorrect_case_file_index(script, data): rather than in a requirements file. """ result = script.pip( - 'install', '--index-url', data.find_links3, "dinner", expect_stderr=True + "install", "--index-url", data.find_links3, "dinner", expect_stderr=True ) # only Upper-2.0.tar.gz should get installed. - egg_folder = script.site_packages / 'Dinner-1.0-py%s.egg-info' % pyversion + egg_folder = script.site_packages / "Dinner-1.0-py%s.egg-info" % pyversion assert egg_folder not in result.files_created, str(result) - egg_folder = script.site_packages / 'Dinner-2.0-py%s.egg-info' % pyversion + egg_folder = script.site_packages / "Dinner-2.0-py%s.egg-info" % pyversion assert egg_folder in result.files_created, str(result) # Should show index-url location in output @@ -1159,7 +1159,7 @@ def test_no_compiles_pyc(script): def test_install_upgrade_editable_depending_on_other_editable(script): script.scratch_path.joinpath("pkga").mkdir() - pkga_path = script.scratch_path / 'pkga' + pkga_path = script.scratch_path / "pkga" pkga_path.joinpath("setup.py").write_text( textwrap.dedent( """ @@ -1169,12 +1169,12 @@ def test_install_upgrade_editable_depending_on_other_editable(script): """ ) ) - script.pip('install', '--editable', pkga_path) - result = script.pip('list', '--format=freeze') + script.pip("install", "--editable", pkga_path) + result = script.pip("list", "--format=freeze") assert "pkga==0.1" in result.stdout script.scratch_path.joinpath("pkgb").mkdir() - pkgb_path = script.scratch_path / 'pkgb' + pkgb_path = script.scratch_path / "pkgb" pkgb_path.joinpath("setup.py").write_text( textwrap.dedent( """ @@ -1185,13 +1185,13 @@ def test_install_upgrade_editable_depending_on_other_editable(script): """ ) ) - script.pip('install', '--upgrade', '--editable', pkgb_path, '--no-index') - result = script.pip('list', '--format=freeze') + script.pip("install", "--upgrade", "--editable", pkgb_path, "--no-index") + result = script.pip("list", "--format=freeze") assert "pkgb==0.1" in result.stdout def test_install_subprocess_output_handling(script, data): - args = ['install', data.src.joinpath('chattymodule')] + args = ["install", data.src.joinpath("chattymodule")] # Regular install should not show output from the chatty setup.py result = script.pip(*args) @@ -1219,34 +1219,34 @@ def test_install_subprocess_output_handling(script, data): def test_install_log(script, data, tmpdir): # test that verbose logs go to "--log" file f = tmpdir.joinpath("log.txt") - args = ['--log=%s' % f, 'install', data.src.joinpath('chattymodule')] + args = ["--log=%s" % f, "install", data.src.joinpath("chattymodule")] result = script.pip(*args) assert 0 == result.stdout.count("HELLO FROM CHATTYMODULE") - with open(f, 'r') as fp: + with open(f, "r") as fp: # one from egg_info, one from install assert 2 == fp.read().count("HELLO FROM CHATTYMODULE") def test_install_topological_sort(script, data): - args = ['install', 'TopoRequires4', '--no-index', '-f', data.packages] + args = ["install", "TopoRequires4", "--no-index", "-f", data.packages] res = str(script.pip(*args, expect_error=False)) - order1 = 'TopoRequires, TopoRequires2, TopoRequires3, TopoRequires4' - order2 = 'TopoRequires, TopoRequires3, TopoRequires2, TopoRequires4' + order1 = "TopoRequires, TopoRequires2, TopoRequires3, TopoRequires4" + order2 = "TopoRequires, TopoRequires3, TopoRequires2, TopoRequires4" assert order1 in res or order2 in res, res def test_install_wheel_broken(script, with_wheel): - res = script.pip_install_local('wheelbroken', expect_stderr=True) + res = script.pip_install_local("wheelbroken", expect_stderr=True) assert "Successfully installed wheelbroken-0.1" in str(res), str(res) def test_cleanup_after_failed_wheel(script, with_wheel): - res = script.pip_install_local('wheelbrokenafter', expect_stderr=True) + res = script.pip_install_local("wheelbrokenafter", expect_stderr=True) # One of the effects of not cleaning up is broken scripts: script_py = script.bin_path / "script.py" assert script_py.exists(), script_py - shebang = open(script_py, 'r').readline().strip() - assert shebang != '#!python', shebang + shebang = open(script_py, "r").readline().strip() + assert shebang != "#!python", shebang # OK, assert that we *said* we were cleaning up: assert "Running setup.py clean for wheelbrokenafter" in str(res), str(res) @@ -1254,20 +1254,20 @@ def test_cleanup_after_failed_wheel(script, with_wheel): def test_install_builds_wheels(script, data, with_wheel): # We need to use a subprocess to get the right value on Windows. res = script.run( - 'python', - '-c', + "python", + "-c", ( - 'from pip._internal.utils import appdirs; ' + "from pip._internal.utils import appdirs; " 'print(appdirs.user_cache_dir("pip"))' ), ) - wheels_cache = os.path.join(res.stdout.rstrip('\n'), 'wheels') + wheels_cache = os.path.join(res.stdout.rstrip("\n"), "wheels") # NB This incidentally tests a local tree + tarball inputs # see test_install_editable_from_git_autobuild_wheel for editable # vcs coverage. - to_install = data.packages.joinpath('requires_wheelbroken_upper') + to_install = data.packages.joinpath("requires_wheelbroken_upper") res = script.pip( - 'install', '--no-index', '-f', data.find_links, to_install, expect_stderr=True + "install", "--no-index", "-f", data.find_links, to_install, expect_stderr=True ) expected = ( "Successfully installed requires-wheelbroken-upper-0" @@ -1297,12 +1297,12 @@ def test_install_builds_wheels(script, data, with_wheel): def test_install_no_binary_disables_building_wheels(script, data, with_wheel): - to_install = data.packages.joinpath('requires_wheelbroken_upper') + to_install = data.packages.joinpath("requires_wheelbroken_upper") res = script.pip( - 'install', - '--no-index', - '--no-binary=upper', - '-f', + "install", + "--no-index", + "--no-binary=upper", + "-f", data.find_links, to_install, expect_stderr=True, @@ -1328,15 +1328,15 @@ def test_install_no_binary_disables_building_wheels(script, data, with_wheel): def test_install_no_binary_disables_cached_wheels(script, data, with_wheel): # Seed the cache - script.pip('install', '--no-index', '-f', data.find_links, 'upper') - script.pip('uninstall', 'upper', '-y') + script.pip("install", "--no-index", "-f", data.find_links, "upper") + script.pip("uninstall", "upper", "-y") res = script.pip( - 'install', - '--no-index', - '--no-binary=:all:', - '-f', + "install", + "--no-index", + "--no-binary=:all:", + "-f", data.find_links, - 'upper', + "upper", expect_stderr=True, ) assert "Successfully installed upper-2.0" in str(res), str(res) @@ -1348,7 +1348,7 @@ def test_install_no_binary_disables_cached_wheels(script, data, with_wheel): def test_install_editable_with_wrong_egg_name(script): script.scratch_path.joinpath("pkga").mkdir() - pkga_path = script.scratch_path / 'pkga' + pkga_path = script.scratch_path / "pkga" pkga_path.joinpath("setup.py").write_text( textwrap.dedent( """ @@ -1358,7 +1358,7 @@ def test_install_editable_with_wrong_egg_name(script): """ ) ) - result = script.pip('install', '--editable', 'file://%s#egg=pkgb' % pkga_path) + result = script.pip("install", "--editable", "file://%s#egg=pkgb" % pkga_path) assert ( "Generating metadata for package pkgb produced metadata " "for project name pkga. Fix your #egg=pkgb " @@ -1372,7 +1372,7 @@ def test_install_tar_xz(script, data): import lzma # noqa except ImportError: pytest.skip("No lzma support") - res = script.pip('install', data.packages / 'singlemodule-0.0.1.tar.xz') + res = script.pip("install", data.packages / "singlemodule-0.0.1.tar.xz") assert "Successfully installed singlemodule-0.0.1" in res.stdout, res @@ -1381,7 +1381,7 @@ def test_install_tar_lzma(script, data): import lzma # noqa except ImportError: pytest.skip("No lzma support") - res = script.pip('install', data.packages / 'singlemodule-0.0.1.tar.lzma') + res = script.pip("install", data.packages / "singlemodule-0.0.1.tar.lzma") assert "Successfully installed singlemodule-0.0.1" in res.stdout, res @@ -1389,7 +1389,7 @@ def test_double_install(script): """ Test double install passing with two same version requirements """ - result = script.pip('install', 'pip', 'pip', expect_error=False) + result = script.pip("install", "pip", "pip", expect_error=False) msg = "Double requirement given: pip (already in pip, name='pip')" assert msg not in result.stderr @@ -1398,20 +1398,20 @@ def test_double_install_fail(script): """ Test double install failing with two different version requirements """ - result = script.pip('install', 'pip==*', 'pip==7.1.2', expect_error=True) + result = script.pip("install", "pip==*", "pip==7.1.2", expect_error=True) msg = "Double requirement given: pip==7.1.2 (already in pip==*, name='pip')" assert msg in result.stderr def _get_expected_error_text(): return ("Package 'pkga' requires a different Python: {} not in '<1.0'").format( - '.'.join(map(str, sys.version_info[:3])) + ".".join(map(str, sys.version_info[:3])) ) def test_install_incompatible_python_requires(script): script.scratch_path.joinpath("pkga").mkdir() - pkga_path = script.scratch_path / 'pkga' + pkga_path = script.scratch_path / "pkga" pkga_path.joinpath("setup.py").write_text( textwrap.dedent( """ @@ -1422,13 +1422,13 @@ def test_install_incompatible_python_requires(script): """ ) ) - result = script.pip('install', pkga_path, expect_error=True) + result = script.pip("install", pkga_path, expect_error=True) assert _get_expected_error_text() in result.stderr, str(result) def test_install_incompatible_python_requires_editable(script): script.scratch_path.joinpath("pkga").mkdir() - pkga_path = script.scratch_path / 'pkga' + pkga_path = script.scratch_path / "pkga" pkga_path.joinpath("setup.py").write_text( textwrap.dedent( """ @@ -1439,13 +1439,13 @@ def test_install_incompatible_python_requires_editable(script): """ ) ) - result = script.pip('install', '--editable=%s' % pkga_path, expect_error=True) + result = script.pip("install", "--editable=%s" % pkga_path, expect_error=True) assert _get_expected_error_text() in result.stderr, str(result) def test_install_incompatible_python_requires_wheel(script, with_wheel): script.scratch_path.joinpath("pkga").mkdir() - pkga_path = script.scratch_path / 'pkga' + pkga_path = script.scratch_path / "pkga" pkga_path.joinpath("setup.py").write_text( textwrap.dedent( """ @@ -1456,16 +1456,16 @@ def test_install_incompatible_python_requires_wheel(script, with_wheel): """ ) ) - script.run('python', 'setup.py', 'bdist_wheel', '--universal', cwd=pkga_path) + script.run("python", "setup.py", "bdist_wheel", "--universal", cwd=pkga_path) result = script.pip( - 'install', './pkga/dist/pkga-0.1-py2.py3-none-any.whl', expect_error=True + "install", "./pkga/dist/pkga-0.1-py2.py3-none-any.whl", expect_error=True ) assert _get_expected_error_text() in result.stderr, str(result) def test_install_compatible_python_requires(script): script.scratch_path.joinpath("pkga").mkdir() - pkga_path = script.scratch_path / 'pkga' + pkga_path = script.scratch_path / "pkga" pkga_path.joinpath("setup.py").write_text( textwrap.dedent( """ @@ -1476,19 +1476,19 @@ def test_install_compatible_python_requires(script): """ ) ) - res = script.pip('install', pkga_path) + res = script.pip("install", pkga_path) assert "Successfully installed pkga-0.1" in res.stdout, res @pytest.mark.network def test_install_pep508_with_url(script): res = script.pip( - 'install', - '--no-index', - 'packaging@https://files.pythonhosted.org/packages/2f/2b/' - 'c681de3e1dbcd469537aefb15186b800209aa1f299d933d23b48d85c9d56/' - 'packaging-15.3-py2.py3-none-any.whl#sha256=' - 'ce1a869fe039fbf7e217df36c4653d1dbe657778b2d41709593a0003584405f4', + "install", + "--no-index", + "packaging@https://files.pythonhosted.org/packages/2f/2b/" + "c681de3e1dbcd469537aefb15186b800209aa1f299d933d23b48d85c9d56/" + "packaging-15.3-py2.py3-none-any.whl#sha256=" + "ce1a869fe039fbf7e217df36c4653d1dbe657778b2d41709593a0003584405f4", ) assert "Successfully installed packaging-15.3" in str(res), str(res) @@ -1497,24 +1497,24 @@ def test_install_pep508_with_url(script): def test_install_pep508_with_url_in_install_requires(script): pkga_path = create_test_package_with_setup( script, - name='pkga', - version='1.0', + name="pkga", + version="1.0", install_requires=[ - 'packaging@https://files.pythonhosted.org/packages/2f/2b/' - 'c681de3e1dbcd469537aefb15186b800209aa1f299d933d23b48d85c9d56/' - 'packaging-15.3-py2.py3-none-any.whl#sha256=' - 'ce1a869fe039fbf7e217df36c4653d1dbe657778b2d41709593a0003584405f4' + "packaging@https://files.pythonhosted.org/packages/2f/2b/" + "c681de3e1dbcd469537aefb15186b800209aa1f299d933d23b48d85c9d56/" + "packaging-15.3-py2.py3-none-any.whl#sha256=" + "ce1a869fe039fbf7e217df36c4653d1dbe657778b2d41709593a0003584405f4" ], ) - res = script.pip('install', pkga_path) + res = script.pip("install", pkga_path) assert "Successfully installed packaging-15.3" in str(res), str(res) @pytest.mark.network -@pytest.mark.parametrize('index', (PyPI.simple_url, TestPyPI.simple_url)) +@pytest.mark.parametrize("index", (PyPI.simple_url, TestPyPI.simple_url)) def test_install_from_test_pypi_with_ext_url_dep_is_blocked(script, index): res = script.pip( - 'install', '--index-url', index, 'pep-508-url-deps', expect_error=True + "install", "--index-url", index, "pep-508-url-deps", expect_error=True ) error_message = ( "Packages installed from PyPI cannot depend on packages " @@ -1555,68 +1555,68 @@ def test_installed_files_recorded_in_deterministic_order(script, data): order, to make installs reproducible. """ to_install = data.packages.joinpath("FSPkg") - result = script.pip('install', to_install, expect_error=False) - fspkg_folder = script.site_packages / 'fspkg' - egg_info = 'FSPkg-0.1.dev0-py%s.egg-info' % pyversion - installed_files_path = script.site_packages / egg_info / 'installed-files.txt' + result = script.pip("install", to_install, expect_error=False) + fspkg_folder = script.site_packages / "fspkg" + egg_info = "FSPkg-0.1.dev0-py%s.egg-info" % pyversion + installed_files_path = script.site_packages / egg_info / "installed-files.txt" assert fspkg_folder in result.files_created, str(result.stdout) assert installed_files_path in result.files_created, str(result) installed_files_path = result.files_created[installed_files_path].full installed_files_lines = [ - p for p in Path(installed_files_path).read_text().split('\n') if p + p for p in Path(installed_files_path).read_text().split("\n") if p ] assert installed_files_lines == sorted(installed_files_lines) def test_install_conflict_results_in_warning(script, data): pkgA_path = create_test_package_with_setup( - script, name='pkgA', version='1.0', install_requires=['pkgb == 1.0'] + script, name="pkgA", version="1.0", install_requires=["pkgb == 1.0"] ) - pkgB_path = create_test_package_with_setup(script, name='pkgB', version='2.0') + pkgB_path = create_test_package_with_setup(script, name="pkgB", version="2.0") # Install pkgA without its dependency - result1 = script.pip('install', '--no-index', pkgA_path, '--no-deps') + result1 = script.pip("install", "--no-index", pkgA_path, "--no-deps") assert "Successfully installed pkgA-1.0" in result1.stdout, str(result1) # Then install an incorrect version of the dependency - result2 = script.pip('install', '--no-index', pkgB_path, allow_stderr_error=True) + result2 = script.pip("install", "--no-index", pkgB_path, allow_stderr_error=True) assert "pkga 1.0 has requirement pkgb==1.0" in result2.stderr, str(result2) assert "Successfully installed pkgB-2.0" in result2.stdout, str(result2) def test_install_conflict_warning_can_be_suppressed(script, data): pkgA_path = create_test_package_with_setup( - script, name='pkgA', version='1.0', install_requires=['pkgb == 1.0'] + script, name="pkgA", version="1.0", install_requires=["pkgb == 1.0"] ) - pkgB_path = create_test_package_with_setup(script, name='pkgB', version='2.0') + pkgB_path = create_test_package_with_setup(script, name="pkgB", version="2.0") # Install pkgA without its dependency - result1 = script.pip('install', '--no-index', pkgA_path, '--no-deps') + result1 = script.pip("install", "--no-index", pkgA_path, "--no-deps") assert "Successfully installed pkgA-1.0" in result1.stdout, str(result1) # Then install an incorrect version of the dependency; suppressing warning - result2 = script.pip('install', '--no-index', pkgB_path, '--no-warn-conflicts') + result2 = script.pip("install", "--no-index", pkgB_path, "--no-warn-conflicts") assert "Successfully installed pkgB-2.0" in result2.stdout, str(result2) def test_target_install_ignores_distutils_config_install_prefix(script): - prefix = script.scratch_path / 'prefix' + prefix = script.scratch_path / "prefix" distutils_config = Path( - os.path.expanduser('~'), - 'pydistutils.cfg' if sys.platform == 'win32' else '.pydistutils.cfg', + os.path.expanduser("~"), + "pydistutils.cfg" if sys.platform == "win32" else ".pydistutils.cfg", ) distutils_config.write_text( textwrap.dedent( - ''' + """ [install] prefix=%s - ''' + """ % str(prefix) ) ) - target = script.scratch_path / 'target' - result = script.pip_install_local('simplewheel', '-t', target) + target = script.scratch_path / "target" + result = script.pip_install_local("simplewheel", "-t", target) assert ( "Successfully installed simplewheel" in result.stdout and (target - script.base_path) in result.files_created @@ -1627,14 +1627,14 @@ def test_target_install_ignores_distutils_config_install_prefix(script): @pytest.mark.network @pytest.mark.skipif("sys.platform != 'win32'") @pytest.mark.parametrize( - 'pip_name', + "pip_name", [ - 'pip', - 'pip{}'.format(sys.version_info[0]), - 'pip{}.{}'.format(*sys.version_info[:2]), - 'pip.exe', - 'pip{}.exe'.format(sys.version_info[0]), - 'pip{}.{}.exe'.format(*sys.version_info[:2]), + "pip", + "pip{}".format(sys.version_info[0]), + "pip{}.{}".format(*sys.version_info[:2]), + "pip.exe", + "pip{}.exe".format(sys.version_info[0]), + "pip{}.{}.exe".format(*sys.version_info[:2]), ], ) def test_protect_pip_from_modification_on_windows(script, pip_name): @@ -1642,11 +1642,11 @@ def test_protect_pip_from_modification_on_windows(script, pip_name): Test that pip modification command using ``pip install ...`` raises an error on Windows. """ - command = [pip_name, 'install', 'pip != {}'.format(pip_current_version)] + command = [pip_name, "install", "pip != {}".format(pip_current_version)] result = script.run(*command, expect_error=True) - new_command = [sys.executable, '-m', 'pip'] + command[1:] - expected_message = 'To modify pip, please run the following command:\n{}'.format( - ' '.join(new_command) + new_command = [sys.executable, "-m", "pip"] + command[1:] + expected_message = "To modify pip, please run the following command:\n{}".format( + " ".join(new_command) ) assert expected_message in result.stderr, str(result) @@ -1659,15 +1659,15 @@ def test_protect_pip_from_modification_via_deps_on_windows(script): if `pkga` implicitly tries to upgrade pip. """ pkga_wheel_path = create_basic_wheel_for_package( - script, 'pkga', '0.1', depends=['pip != {}'.format(pip_current_version)] + script, "pkga", "0.1", depends=["pip != {}".format(pip_current_version)] ) # Make sure pip install pkga raises an error - args = ['install', pkga_wheel_path] + args = ["install", pkga_wheel_path] result = script.pip(*args, expect_error=True, use_module=False) - new_command = [sys.executable, '-m', 'pip'] + args - expected_message = 'To modify pip, please run the following command:\n{}'.format( - ' '.join(new_command) + new_command = [sys.executable, "-m", "pip"] + args + expected_message = "To modify pip, please run the following command:\n{}".format( + " ".join(new_command) ) assert expected_message in result.stderr, str(result) @@ -1681,29 +1681,29 @@ def test_protect_pip_from_modification_via_sub_deps_on_windows(script): """ # Make a wheel for pkga which requires pkgb pkga_wheel_path = create_basic_wheel_for_package( - script, 'pkga', '0.1', depends=['pkgb'] + script, "pkga", "0.1", depends=["pkgb"] ) # Make a wheel for pkgb which requires pip pkgb_wheel_path = create_basic_wheel_for_package( - script, 'pkgb', '0.1', depends=['pip != {}'.format(pip_current_version)] + script, "pkgb", "0.1", depends=["pip != {}".format(pip_current_version)] ) # Make sure pip install pkga raises an error - args = ['install', pkga_wheel_path, '--find-links', pkgb_wheel_path.parent] + args = ["install", pkga_wheel_path, "--find-links", pkgb_wheel_path.parent] result = script.pip(*args, expect_error=True, use_module=False) - new_command = [sys.executable, '-m', 'pip'] + args - expected_message = 'To modify pip, please run the following command:\n{}'.format( - ' '.join(new_command) + new_command = [sys.executable, "-m", "pip"] + args + expected_message = "To modify pip, please run the following command:\n{}".format( + " ".join(new_command) ) assert expected_message in result.stderr, str(result) @pytest.mark.parametrize( - 'install_args, expected_message', + "install_args, expected_message", [ - ([], 'Requirement already satisfied: pip in'), - (['--upgrade'], 'Requirement already up-to-date: pip in'), + ([], "Requirement already satisfied: pip in"), + (["--upgrade"], "Requirement already up-to-date: pip in"), ], ) @pytest.mark.parametrize("use_module", [True, False]) @@ -1713,5 +1713,5 @@ def test_install_pip_does_not_modify_pip_when_satisfied( """ Test it doesn't upgrade the pip if it already satisfies the requirement. """ - result = script.pip_install_local('pip', *install_args, use_module=use_module) + result = script.pip_install_local("pip", *install_args, use_module=use_module) assert expected_message in result.stdout, str(result) diff --git a/tests/functional/test_install_check.py b/tests/functional/test_install_check.py index ec97d049c8a..98179941191 100644 --- a/tests/functional/test_install_check.py +++ b/tests/functional/test_install_check.py @@ -11,25 +11,25 @@ def matches_expected_lines(string, expected_lines, exact=True): def test_check_install_canonicalization(script, deprecated_python): pkga_path = create_test_package_with_setup( script, - name='pkgA', - version='1.0', - install_requires=['normal-missing', 'SPECIAL.missing'], + name="pkgA", + version="1.0", + install_requires=["normal-missing", "SPECIAL.missing"], ) normal_path = create_test_package_with_setup( - script, name='normal-missing', version='0.1' + script, name="normal-missing", version="0.1" ) special_path = create_test_package_with_setup( - script, name='SPECIAL.missing', version='0.1' + script, name="SPECIAL.missing", version="0.1" ) # Let's install pkgA without its dependency - result = script.pip('install', '--no-index', pkga_path, '--no-deps') + result = script.pip("install", "--no-index", pkga_path, "--no-deps") assert "Successfully installed pkgA-1.0" in result.stdout, str(result) # Install the first missing dependency. Only an error for the # second dependency should remain. result = script.pip( - 'install', '--no-index', normal_path, '--quiet', allow_stderr_error=True + "install", "--no-index", normal_path, "--quiet", allow_stderr_error=True ) expected_lines = [ "ERROR: pkga 1.0 requires SPECIAL.missing, which is not installed." @@ -43,12 +43,12 @@ def test_check_install_canonicalization(script, deprecated_python): # Install the second missing package and expect that there is no warning # during the installation. This is special as the package name requires # name normalization (as in https://github.com/pypa/pip/issues/5134) - result = script.pip('install', '--no-index', special_path, '--quiet') + result = script.pip("install", "--no-index", special_path, "--quiet") assert matches_expected_lines(result.stderr, [], exact=not deprecated_python) assert result.returncode == 0 # Double check that all errors are resolved in the end - result = script.pip('check') + result = script.pip("check") expected_lines = ["No broken requirements found."] assert matches_expected_lines(result.stdout, expected_lines) assert result.returncode == 0 @@ -57,25 +57,25 @@ def test_check_install_canonicalization(script, deprecated_python): def test_check_install_does_not_warn_for_out_of_graph_issues(script, deprecated_python): pkg_broken_path = create_test_package_with_setup( script, - name='broken', - version='1.0', - install_requires=['missing', 'conflict < 1.0'], + name="broken", + version="1.0", + install_requires=["missing", "conflict < 1.0"], ) pkg_unrelated_path = create_test_package_with_setup( - script, name='unrelated', version='1.0' + script, name="unrelated", version="1.0" ) pkg_conflict_path = create_test_package_with_setup( - script, name='conflict', version='1.0' + script, name="conflict", version="1.0" ) # Install a package without it's dependencies - result = script.pip('install', '--no-index', pkg_broken_path, '--no-deps') + result = script.pip("install", "--no-index", pkg_broken_path, "--no-deps") # Deprecated python versions produce an extra warning on stderr assert matches_expected_lines(result.stderr, [], exact=not deprecated_python) # Install conflict package result = script.pip( - 'install', '--no-index', pkg_conflict_path, allow_stderr_error=True + "install", "--no-index", pkg_conflict_path, allow_stderr_error=True ) assert matches_expected_lines( result.stderr, @@ -90,11 +90,11 @@ def test_check_install_does_not_warn_for_out_of_graph_issues(script, deprecated_ ) # Install unrelated package - result = script.pip('install', '--no-index', pkg_unrelated_path, '--quiet') + result = script.pip("install", "--no-index", pkg_unrelated_path, "--quiet") # should not warn about broken's deps when installing unrelated package assert matches_expected_lines(result.stderr, [], exact=not deprecated_python) - result = script.pip('check', expect_error=True) + result = script.pip("check", expect_error=True) expected_lines = [ "broken 1.0 requires missing, which is not installed.", "broken 1.0 has requirement conflict<1.0, but you have conflict 1.0.", diff --git a/tests/functional/test_install_cleanup.py b/tests/functional/test_install_cleanup.py index a42c6c30f31..8a8692d445a 100644 --- a/tests/functional/test_install_cleanup.py +++ b/tests/functional/test_install_cleanup.py @@ -13,7 +13,7 @@ def test_cleanup_after_install(script, data): """ Test clean up after installing a package. """ - script.pip('install', '--no-index', '--find-links=%s' % data.find_links, 'simple') + script.pip("install", "--no-index", "--find-links=%s" % data.find_links, "simple") build = script.venv_path / "build" src = script.venv_path / "src" assert not exists(build), "build/ dir still exists: %s" % build @@ -26,15 +26,15 @@ def test_no_clean_option_blocks_cleaning_after_install(script, data): """ Test --no-clean option blocks cleaning after install """ - build = script.base_path / 'pip-build' + build = script.base_path / "pip-build" script.pip( - 'install', - '--no-clean', - '--no-index', - '--build', + "install", + "--no-clean", + "--no-index", + "--build", build, - '--find-links=%s' % data.find_links, - 'simple', + "--find-links=%s" % data.find_links, + "simple", expect_temp=True, ) assert exists(build) @@ -48,13 +48,13 @@ def test_cleanup_after_install_editable_from_hg(script, tmpdir): """ script.pip( - 'install', - '-e', - '%s#egg=ScriptTest' - % local_checkout('hg+https://bitbucket.org/ianb/scripttest', tmpdir), + "install", + "-e", + "%s#egg=ScriptTest" + % local_checkout("hg+https://bitbucket.org/ianb/scripttest", tmpdir), ) - build = script.venv_path / 'build' - src = script.venv_path / 'src' + build = script.venv_path / "build" + src = script.venv_path / "src" assert not exists(build), "build/ dir still exists: %s" % build assert exists(src), "expected src/ dir doesn't exist: %s" % src script.assert_no_temp() @@ -65,9 +65,9 @@ def test_cleanup_after_install_from_local_directory(script, data): Test clean up after installing from a local directory. """ to_install = data.packages.joinpath("FSPkg") - script.pip('install', to_install, expect_error=False) - build = script.venv_path / 'build' - src = script.venv_path / 'src' + script.pip("install", to_install, expect_error=False) + build = script.venv_path / "build" + src = script.venv_path / "src" assert not exists(build), "unexpected build/ dir exists: %s" % build assert not exists(src), "unexpected src/ dir exist: %s" % src script.assert_no_temp() @@ -85,10 +85,10 @@ def test_cleanup_req_satisfied_no_name(script, data): # 2) parent-0.1.tar.gz dist = data.packages.joinpath("parent-0.1.tar.gz") - script.pip('install', dist) - script.pip('install', dist) + script.pip("install", dist) + script.pip("install", dist) - build = script.venv_path / 'build' + build = script.venv_path / "build" assert not exists(build), "unexpected build/ dir exists: %s" % build script.assert_no_temp() @@ -99,14 +99,14 @@ def test_cleanup_after_install_exception(script, data): """ # broken==0.2broken fails during install; see packages readme file result = script.pip( - 'install', - '-f', + "install", + "-f", data.find_links, - '--no-index', - 'broken==0.2broken', + "--no-index", + "broken==0.2broken", expect_error=True, ) - build = script.venv_path / 'build' + build = script.venv_path / "build" assert not exists(build), "build/ dir still exists: %s" % result.stdout script.assert_no_temp() @@ -117,14 +117,14 @@ def test_cleanup_after_egg_info_exception(script, data): """ # brokenegginfo fails during egg_info; see packages readme file result = script.pip( - 'install', - '-f', + "install", + "-f", data.find_links, - '--no-index', - 'brokenegginfo==0.1', + "--no-index", + "brokenegginfo==0.1", expect_error=True, ) - build = script.venv_path / 'build' + build = script.venv_path / "build" assert not exists(build), "build/ dir still exists: %s" % result.stdout script.assert_no_temp() @@ -134,18 +134,18 @@ def test_cleanup_prevented_upon_build_dir_exception(script, data): """ Test no cleanup occurs after a PreviousBuildDirError """ - build = script.venv_path / 'build' - build_simple = build / 'simple' + build = script.venv_path / "build" + build_simple = build / "simple" os.makedirs(build_simple) write_delete_marker_file(build_simple) build_simple.joinpath("setup.py").write_text("#") result = script.pip( - 'install', - '-f', + "install", + "-f", data.find_links, - '--no-index', - 'simple', - '--build', + "--no-index", + "simple", + "--build", build, expect_error=True, expect_temp=True, diff --git a/tests/functional/test_install_compat.py b/tests/functional/test_install_compat.py index 6d69236899d..ffde5114091 100644 --- a/tests/functional/test_install_compat.py +++ b/tests/functional/test_install_compat.py @@ -21,7 +21,7 @@ def test_debian_egg_name_workaround(script): https://bitbucket.org/ianb/pip/issue/104/pip-uninstall-on-ubuntu-linux """ - result = script.pip('install', 'INITools==0.2') + result = script.pip("install", "INITools==0.2") egg_info = os.path.join( script.site_packages, "INITools-0.2-py%s.egg-info" % pyversion @@ -46,7 +46,7 @@ def test_debian_egg_name_workaround(script): # Try the uninstall and verify that everything is removed. result2 = script.pip("uninstall", "INITools", "-y") - assert_all_changes(result, result2, [script.venv / 'build', 'cache']) + assert_all_changes(result, result2, [script.venv / "build", "cache"]) def test_setup_py_with_dos_line_endings(script, data): @@ -56,4 +56,4 @@ def test_setup_py_with_dos_line_endings(script, data): Refs https://github.com/pypa/pip/issues/237 """ to_install = data.packages.joinpath("LineEndings") - script.pip('install', to_install, expect_error=False) + script.pip("install", to_install, expect_error=False) diff --git a/tests/functional/test_install_config.py b/tests/functional/test_install_config.py index 1d368f7ccce..376dac1eba8 100644 --- a/tests/functional/test_install_config.py +++ b/tests/functional/test_install_config.py @@ -10,8 +10,8 @@ def test_options_from_env_vars(script): Test if ConfigOptionParser reads env vars (e.g. not using PyPI here) """ - script.environ['PIP_NO_INDEX'] = '1' - result = script.pip('install', '-vvv', 'INITools', expect_error=True) + script.environ["PIP_NO_INDEX"] = "1" + result = script.pip("install", "-vvv", "INITools", expect_error=True) assert "Ignoring indexes:" in result.stdout, str(result) assert ( "DistributionNotFound: No matching distribution found for INITools" @@ -24,16 +24,16 @@ def test_command_line_options_override_env_vars(script, virtualenv): Test that command line options override environmental variables. """ - script.environ['PIP_INDEX_URL'] = 'https://example.com/simple/' - result = script.pip('install', '-vvv', 'INITools', expect_error=True) + script.environ["PIP_INDEX_URL"] = "https://example.com/simple/" + result = script.pip("install", "-vvv", "INITools", expect_error=True) assert "Getting page https://example.com/simple/initools" in result.stdout virtualenv.clear() result = script.pip( - 'install', - '-vvv', - '--index-url', - 'https://download.zope.org/ppix', - 'INITools', + "install", + "-vvv", + "--index-url", + "https://download.zope.org/ppix", + "INITools", expect_error=True, ) assert "example.com" not in result.stdout @@ -46,7 +46,7 @@ def test_env_vars_override_config_file(script, virtualenv): Test that environmental variables override settings in config files. """ - fd, config_file = tempfile.mkstemp('-pip.cfg', 'test-') + fd, config_file = tempfile.mkstemp("-pip.cfg", "test-") try: _test_env_vars_override_config_file(script, virtualenv, config_file) finally: @@ -58,7 +58,7 @@ def test_env_vars_override_config_file(script, virtualenv): def _test_env_vars_override_config_file(script, virtualenv, config_file): # set this to make pip load it - script.environ['PIP_CONFIG_FILE'] = config_file + script.environ["PIP_CONFIG_FILE"] = config_file # It's important that we test this particular config value ('no-index') # because there is/was a bug which only shows up in cases in which # 'config-item' and 'config_item' hash to the same value modulo the size @@ -71,14 +71,14 @@ def _test_env_vars_override_config_file(script, virtualenv, config_file): """ ) ) - result = script.pip('install', '-vvv', 'INITools', expect_error=True) + result = script.pip("install", "-vvv", "INITools", expect_error=True) assert ( "DistributionNotFound: No matching distribution found for INITools" in result.stdout ) - script.environ['PIP_NO_INDEX'] = '0' + script.environ["PIP_NO_INDEX"] = "0" virtualenv.clear() - result = script.pip('install', '-vvv', 'INITools') + result = script.pip("install", "-vvv", "INITools") assert "Successfully installed INITools" in result.stdout @@ -89,27 +89,27 @@ def test_command_line_append_flags(script, virtualenv, data): variables. """ - script.environ['PIP_FIND_LINKS'] = 'https://test.pypi.org' + script.environ["PIP_FIND_LINKS"] = "https://test.pypi.org" result = script.pip( - 'install', '-vvv', 'INITools', '--trusted-host', 'test.pypi.org' + "install", "-vvv", "INITools", "--trusted-host", "test.pypi.org" ) assert "Analyzing links from page https://test.pypi.org" in result.stdout, str( result ) virtualenv.clear() result = script.pip( - 'install', - '-vvv', - '--find-links', + "install", + "-vvv", + "--find-links", data.find_links, - 'INITools', - '--trusted-host', - 'test.pypi.org', + "INITools", + "--trusted-host", + "test.pypi.org", ) assert "Analyzing links from page https://test.pypi.org" in result.stdout assert ( - 'Skipping link: not a file: {}'.format(data.find_links) in result.stdout - ), 'stdout: {}'.format(result.stdout) + "Skipping link: not a file: {}".format(data.find_links) in result.stdout + ), "stdout: {}".format(result.stdout) @pytest.mark.network @@ -118,17 +118,17 @@ def test_command_line_appends_correctly(script, data): Test multiple appending options set by environmental variables. """ - script.environ['PIP_FIND_LINKS'] = 'https://test.pypi.org %s' % data.find_links + script.environ["PIP_FIND_LINKS"] = "https://test.pypi.org %s" % data.find_links result = script.pip( - 'install', '-vvv', 'INITools', '--trusted-host', 'test.pypi.org' + "install", "-vvv", "INITools", "--trusted-host", "test.pypi.org" ) assert ( "Analyzing links from page https://test.pypi.org" in result.stdout ), result.stdout assert ( - 'Skipping link: not a file: {}'.format(data.find_links) in result.stdout - ), 'stdout: {}'.format(result.stdout) + "Skipping link: not a file: {}".format(data.find_links) in result.stdout + ), "stdout: {}".format(result.stdout) def test_config_file_override_stack(script, virtualenv): @@ -137,7 +137,7 @@ def test_config_file_override_stack(script, virtualenv): local, overriding all with a command line flag). """ - fd, config_file = tempfile.mkstemp('-pip.cfg', 'test-') + fd, config_file = tempfile.mkstemp("-pip.cfg", "test-") try: _test_config_file_override_stack(script, virtualenv, config_file) finally: @@ -149,7 +149,7 @@ def test_config_file_override_stack(script, virtualenv): def _test_config_file_override_stack(script, virtualenv, config_file): # set this to make pip load it - script.environ['PIP_CONFIG_FILE'] = config_file + script.environ["PIP_CONFIG_FILE"] = config_file (script.scratch_path / config_file).write_text( textwrap.dedent( """\ @@ -158,7 +158,7 @@ def _test_config_file_override_stack(script, virtualenv, config_file): """ ) ) - result = script.pip('install', '-vvv', 'INITools', expect_error=True) + result = script.pip("install", "-vvv", "INITools", expect_error=True) assert "Getting page https://download.zope.org/ppix/initools" in result.stdout virtualenv.clear() (script.scratch_path / config_file).write_text( @@ -171,10 +171,10 @@ def _test_config_file_override_stack(script, virtualenv, config_file): """ ) ) - result = script.pip('install', '-vvv', 'INITools', expect_error=True) + result = script.pip("install", "-vvv", "INITools", expect_error=True) assert "Getting page https://pypi.gocept.com/initools" in result.stdout result = script.pip( - 'install', '-vvv', '--index-url', 'https://pypi.org/simple/', 'INITools' + "install", "-vvv", "--index-url", "https://pypi.org/simple/", "INITools" ) assert "Getting page http://download.zope.org/ppix/INITools" not in result.stdout assert "Getting page https://pypi.gocept.com/INITools" not in result.stdout @@ -190,9 +190,9 @@ def test_options_from_venv_config(script, virtualenv): conf = "[global]\nno-index = true" ini = virtualenv.location / CONFIG_BASENAME - with open(ini, 'w') as f: + with open(ini, "w") as f: f.write(conf) - result = script.pip('install', '-vvv', 'INITools', expect_error=True) + result = script.pip("install", "-vvv", "INITools", expect_error=True) assert "Ignoring indexes:" in result.stdout, str(result) assert ( "DistributionNotFound: No matching distribution found for INITools" @@ -201,9 +201,9 @@ def test_options_from_venv_config(script, virtualenv): def test_install_no_binary_via_config_disables_cached_wheels(script, data, with_wheel): - config_file = tempfile.NamedTemporaryFile(mode='wt', delete=False) + config_file = tempfile.NamedTemporaryFile(mode="wt", delete=False) try: - script.environ['PIP_CONFIG_FILE'] = config_file.name + script.environ["PIP_CONFIG_FILE"] = config_file.name config_file.write( textwrap.dedent( """\ @@ -214,7 +214,7 @@ def test_install_no_binary_via_config_disables_cached_wheels(script, data, with_ ) config_file.close() res = script.pip( - 'install', '--no-index', '-f', data.find_links, 'upper', expect_stderr=True + "install", "--no-index", "-f", data.find_links, "upper", expect_stderr=True ) finally: os.unlink(config_file.name) diff --git a/tests/functional/test_install_extras.py b/tests/functional/test_install_extras.py index 5ee1939a835..2ec51ddf1e4 100644 --- a/tests/functional/test_install_extras.py +++ b/tests/functional/test_install_extras.py @@ -9,8 +9,8 @@ def test_simple_extras_install_from_pypi(script): """ Test installing a package from PyPI using extras dependency Paste[openid]. """ - result = script.pip('install', 'Paste[openid]==1.7.5.1', expect_stderr=True) - initools_folder = script.site_packages / 'openid' + result = script.pip("install", "Paste[openid]==1.7.5.1", expect_stderr=True) + initools_folder = script.site_packages / "openid" assert initools_folder in result.files_created, result.files_created @@ -18,24 +18,24 @@ def test_extras_after_wheel(script, data): """ Test installing a package with extras after installing from a wheel. """ - simple = script.site_packages / 'simple' + simple = script.site_packages / "simple" no_extra = script.pip( - 'install', - '--no-index', - '-f', + "install", + "--no-index", + "-f", data.find_links, - 'requires_simple_extra', + "requires_simple_extra", expect_stderr=True, ) assert simple not in no_extra.files_created, no_extra.files_created extra = script.pip( - 'install', - '--no-index', - '-f', + "install", + "--no-index", + "-f", data.find_links, - 'requires_simple_extra[extra]', + "requires_simple_extra[extra]", expect_stderr=True, ) assert simple in extra.files_created, extra.files_created @@ -46,16 +46,16 @@ def test_no_extras_uninstall(script): """ No extras dependency gets uninstalled when the root package is uninstalled """ - result = script.pip('install', 'Paste[openid]==1.7.5.1', expect_stderr=True) - assert join(script.site_packages, 'paste') in result.files_created, sorted( + result = script.pip("install", "Paste[openid]==1.7.5.1", expect_stderr=True) + assert join(script.site_packages, "paste") in result.files_created, sorted( result.files_created.keys() ) - assert join(script.site_packages, 'openid') in result.files_created, sorted( + assert join(script.site_packages, "openid") in result.files_created, sorted( result.files_created.keys() ) - result2 = script.pip('uninstall', 'Paste', '-y') + result2 = script.pip("uninstall", "Paste", "-y") # openid should not be uninstalled - initools_folder = script.site_packages / 'openid' + initools_folder = script.site_packages / "openid" assert initools_folder not in result2.files_deleted, result.files_deleted @@ -67,11 +67,11 @@ def test_nonexistent_extra_warns_user_no_wheel(script, data): This exercises source installs. """ result = script.pip( - 'install', - '--no-binary=:all:', - '--no-index', - '--find-links=' + data.find_links, - 'simple[nonexistent]', + "install", + "--no-binary=:all:", + "--no-index", + "--find-links=" + data.find_links, + "simple[nonexistent]", expect_stderr=True, ) assert "simple 3.0 does not provide the extra 'nonexistent'" in result.stderr, str( @@ -87,10 +87,10 @@ def test_nonexistent_extra_warns_user_with_wheel(script, data): This exercises wheel installs. """ result = script.pip( - 'install', - '--no-index', - '--find-links=' + data.find_links, - 'simplewheel[nonexistent]', + "install", + "--no-index", + "--find-links=" + data.find_links, + "simplewheel[nonexistent]", expect_stderr=True, ) assert "simplewheel 2.0 does not provide the extra 'nonexistent'" in result.stderr @@ -101,10 +101,10 @@ def test_nonexistent_options_listed_in_order(script, data): Warn the user for each extra that doesn't exist. """ result = script.pip( - 'install', - '--no-index', - '--find-links=' + data.find_links, - 'simplewheel[nonexistent, nope]', + "install", + "--no-index", + "--find-links=" + data.find_links, + "simplewheel[nonexistent, nope]", expect_stderr=True, ) msg = ( @@ -117,7 +117,7 @@ def test_nonexistent_options_listed_in_order(script, data): def test_install_special_extra(script): # Check that uppercase letters and '-' are dealt with # make a dummy project - pkga_path = script.scratch_path / 'pkga' + pkga_path = script.scratch_path / "pkga" pkga_path.mkdir() pkga_path.joinpath("setup.py").write_text( textwrap.dedent( @@ -132,7 +132,7 @@ def test_install_special_extra(script): ) result = script.pip( - 'install', '--no-index', '%s[Hop_hOp-hoP]' % pkga_path, expect_error=True + "install", "--no-index", "%s[Hop_hOp-hoP]" % pkga_path, expect_error=True ) assert ( "Could not find a version that satisfies the requirement missing_pkg" diff --git a/tests/functional/test_install_force_reinstall.py b/tests/functional/test_install_force_reinstall.py index 964db70a9e2..321da76f215 100644 --- a/tests/functional/test_install_force_reinstall.py +++ b/tests/functional/test_install_force_reinstall.py @@ -2,14 +2,14 @@ def check_installed_version(script, package, expected): - result = script.pip('show', package) + result = script.pip("show", package) lines = result.stdout.splitlines() version = None for line in lines: - if line.startswith('Version: '): + if line.startswith("Version: "): version = line.split()[-1] break - assert version == expected, 'version {} != {}'.format(version, expected) + assert version == expected, "version {} != {}".format(version, expected) def check_force_reinstall(script, specifier, expected): @@ -18,15 +18,15 @@ def check_force_reinstall(script, specifier, expected): specifier: the requirement specifier to force-reinstall. expected: the expected version after force-reinstalling. """ - result = script.pip_install_local('simplewheel==1.0') - check_installed_version(script, 'simplewheel', '1.0') + result = script.pip_install_local("simplewheel==1.0") + check_installed_version(script, "simplewheel", "1.0") - result2 = script.pip_install_local('--force-reinstall', specifier) - assert result2.files_updated, 'force-reinstall failed' - check_installed_version(script, 'simplewheel', expected) + result2 = script.pip_install_local("--force-reinstall", specifier) + assert result2.files_updated, "force-reinstall failed" + check_installed_version(script, "simplewheel", expected) - result3 = script.pip('uninstall', 'simplewheel', '-y') - assert_all_changes(result, result3, [script.venv / 'build', 'cache']) + result3 = script.pip("uninstall", "simplewheel", "-y") + assert_all_changes(result, result3, [script.venv / "build", "cache"]) def test_force_reinstall_with_no_version_specifier(script): @@ -34,7 +34,7 @@ def test_force_reinstall_with_no_version_specifier(script): Check --force-reinstall when there is no version specifier and the installed version is not the newest version. """ - check_force_reinstall(script, 'simplewheel', '2.0') + check_force_reinstall(script, "simplewheel", "2.0") def test_force_reinstall_with_same_version_specifier(script): @@ -42,4 +42,4 @@ def test_force_reinstall_with_same_version_specifier(script): Check --force-reinstall when the version specifier equals the installed version and the installed version is not the newest version. """ - check_force_reinstall(script, 'simplewheel==1.0', '1.0') + check_force_reinstall(script, "simplewheel==1.0", "1.0") diff --git a/tests/functional/test_install_index.py b/tests/functional/test_install_index.py index 5c231fb0f0d..ac6b5323dd9 100644 --- a/tests/functional/test_install_index.py +++ b/tests/functional/test_install_index.py @@ -9,15 +9,15 @@ def test_find_links_relative_path(script, data): """Test find-links as a relative path.""" result = script.pip( - 'install', - 'parent==0.1', - '--no-index', - '--find-links', - 'packages/', + "install", + "parent==0.1", + "--no-index", + "--find-links", + "packages/", cwd=data.root, ) - egg_info_folder = script.site_packages / 'parent-0.1-py%s.egg-info' % pyversion - initools_folder = script.site_packages / 'parent' + egg_info_folder = script.site_packages / "parent-0.1-py%s.egg-info" % pyversion + initools_folder = script.site_packages / "parent" assert egg_info_folder in result.files_created, str(result) assert initools_folder in result.files_created, str(result) @@ -31,14 +31,14 @@ def test_find_links_requirements_file_relative_path(script, data): --find-links=%s parent==0.1 """ - % data.packages.replace(os.path.sep, '/') + % data.packages.replace(os.path.sep, "/") ) ) result = script.pip( - 'install', '-r', script.scratch_path / "test-req.txt", cwd=data.root + "install", "-r", script.scratch_path / "test-req.txt", cwd=data.root ) - egg_info_folder = script.site_packages / 'parent-0.1-py%s.egg-info' % pyversion - initools_folder = script.site_packages / 'parent' + egg_info_folder = script.site_packages / "parent-0.1-py%s.egg-info" % pyversion + initools_folder = script.site_packages / "parent" assert egg_info_folder in result.files_created, str(result) assert initools_folder in result.files_created, str(result) @@ -48,8 +48,8 @@ def test_install_from_file_index_hash_link(script, data): Test that a pkg can be installed from a file:// index using a link with a hash """ - result = script.pip('install', '-i', data.index_url(), 'simple==1.0') - egg_info_folder = script.site_packages / 'simple-1.0-py%s.egg-info' % pyversion + result = script.pip("install", "-i", data.index_url(), "simple==1.0") + egg_info_folder = script.site_packages / "simple-1.0-py%s.egg-info" % pyversion assert egg_info_folder in result.files_created, str(result) @@ -59,9 +59,9 @@ def test_file_index_url_quoting(script, data): """ index_url = data.index_url(urllib_parse.quote("in dex")) result = script.pip( - 'install', '-vvv', '--index-url', index_url, 'simple', expect_error=False + "install", "-vvv", "--index-url", index_url, "simple", expect_error=False ) - assert (script.site_packages / 'simple') in result.files_created, str(result.stdout) + assert (script.site_packages / "simple") in result.files_created, str(result.stdout) assert ( - script.site_packages / 'simple-1.0-py%s.egg-info' % pyversion + script.site_packages / "simple-1.0-py%s.egg-info" % pyversion ) in result.files_created, str(result) diff --git a/tests/functional/test_install_reqs.py b/tests/functional/test_install_reqs.py index 502930c6d96..dc5dfc45a74 100644 --- a/tests/functional/test_install_reqs.py +++ b/tests/functional/test_install_reqs.py @@ -18,7 +18,7 @@ def test_requirements_file(script): Test installing from a requirements file. """ - other_lib_name, other_lib_version = 'anyjson', '0.3' + other_lib_name, other_lib_version = "anyjson", "0.3" script.scratch_path.joinpath("initools-req.txt").write_text( textwrap.dedent( """\ @@ -29,14 +29,14 @@ def test_requirements_file(script): % (other_lib_name, other_lib_version) ) ) - result = script.pip('install', '-r', script.scratch_path / 'initools-req.txt') + result = script.pip("install", "-r", script.scratch_path / "initools-req.txt") assert ( - script.site_packages / 'INITools-0.2-py%s.egg-info' % pyversion + script.site_packages / "INITools-0.2-py%s.egg-info" % pyversion in result.files_created ) - assert script.site_packages / 'initools' in result.files_created + assert script.site_packages / "initools" in result.files_created assert result.files_created[script.site_packages / other_lib_name].dir - fn = '%s-%s-py%s.egg-info' % (other_lib_name, other_lib_version, pyversion) + fn = "%s-%s-py%s.egg-info" % (other_lib_name, other_lib_version, pyversion) assert result.files_created[script.site_packages / fn].dir @@ -63,37 +63,37 @@ def test_relative_requirements_file(script, data): URLs, use an egg= definition. """ - egg_info_file = script.site_packages / 'FSPkg-0.1.dev0-py%s.egg-info' % pyversion - egg_link_file = script.site_packages / 'FSPkg.egg-link' - package_folder = script.site_packages / 'fspkg' + egg_info_file = script.site_packages / "FSPkg-0.1.dev0-py%s.egg-info" % pyversion + egg_link_file = script.site_packages / "FSPkg.egg-link" + package_folder = script.site_packages / "fspkg" # Compute relative install path to FSPkg from scratch path. - full_rel_path = data.packages.joinpath('FSPkg') - script.scratch_path - full_rel_url = 'file:' + full_rel_path + '#egg=FSPkg' + full_rel_path = data.packages.joinpath("FSPkg") - script.scratch_path + full_rel_url = "file:" + full_rel_path + "#egg=FSPkg" embedded_rel_path = script.scratch_path.joinpath(full_rel_path) # For each relative path, install as either editable or not using either # URLs with egg links or not. for req_path in (full_rel_path, full_rel_url, embedded_rel_path): - req_path = req_path.replace(os.path.sep, '/') + req_path = req_path.replace(os.path.sep, "/") # Regular install. - with requirements_file(req_path + '\n', script.scratch_path) as reqs_file: + with requirements_file(req_path + "\n", script.scratch_path) as reqs_file: result = script.pip( - 'install', '-vvv', '-r', reqs_file.name, cwd=script.scratch_path + "install", "-vvv", "-r", reqs_file.name, cwd=script.scratch_path ) assert egg_info_file in result.files_created, str(result) assert package_folder in result.files_created, str(result) - script.pip('uninstall', '-y', 'fspkg') + script.pip("uninstall", "-y", "fspkg") # Editable install. with requirements_file( - '-e ' + req_path + '\n', script.scratch_path + "-e " + req_path + "\n", script.scratch_path ) as reqs_file: result = script.pip( - 'install', '-vvv', '-r', reqs_file.name, cwd=script.scratch_path + "install", "-vvv", "-r", reqs_file.name, cwd=script.scratch_path ) assert egg_link_file in result.files_created, str(result) - script.pip('uninstall', '-y', 'fspkg') + script.pip("uninstall", "-y", "fspkg") @pytest.mark.network @@ -103,7 +103,7 @@ def test_multiple_requirements_files(script, tmpdir): Test installing from multiple nested requirements files. """ - other_lib_name, other_lib_version = 'anyjson', '0.3' + other_lib_name, other_lib_version = "anyjson", "0.3" script.scratch_path.joinpath("initools-req.txt").write_text( textwrap.dedent( """ @@ -112,18 +112,18 @@ def test_multiple_requirements_files(script, tmpdir): """ ) % ( - local_checkout('svn+http://svn.colorstudy.com/INITools', tmpdir), + local_checkout("svn+http://svn.colorstudy.com/INITools", tmpdir), other_lib_name, ) ) script.scratch_path.joinpath("%s-req.txt" % other_lib_name).write_text( "%s<=%s" % (other_lib_name, other_lib_version) ) - result = script.pip('install', '-r', script.scratch_path / 'initools-req.txt') + result = script.pip("install", "-r", script.scratch_path / "initools-req.txt") assert result.files_created[script.site_packages / other_lib_name].dir - fn = '%s-%s-py%s.egg-info' % (other_lib_name, other_lib_version, pyversion) + fn = "%s-%s-py%s.egg-info" % (other_lib_name, other_lib_version, pyversion) assert result.files_created[script.site_packages / fn].dir - assert script.venv / 'src' / 'initools' in result.files_created + assert script.venv / "src" / "initools" in result.files_created def test_package_in_constraints_and_dependencies(script, data): @@ -131,30 +131,30 @@ def test_package_in_constraints_and_dependencies(script, data): "TopoRequires2==0.0.1\nTopoRequires==0.0.1" ) result = script.pip( - 'install', - '--no-index', - '-f', + "install", + "--no-index", + "-f", data.find_links, - '-c', - script.scratch_path / 'constraints.txt', - 'TopoRequires2', + "-c", + script.scratch_path / "constraints.txt", + "TopoRequires2", ) - assert 'installed TopoRequires-0.0.1' in result.stdout + assert "installed TopoRequires-0.0.1" in result.stdout def test_multiple_constraints_files(script, data): script.scratch_path.joinpath("outer.txt").write_text("-c inner.txt") script.scratch_path.joinpath("inner.txt").write_text("Upper==1.0") result = script.pip( - 'install', - '--no-index', - '-f', + "install", + "--no-index", + "-f", data.find_links, - '-c', - script.scratch_path / 'outer.txt', - 'Upper', + "-c", + script.scratch_path / "outer.txt", + "Upper", ) - assert 'installed Upper-1.0' in result.stdout + assert "installed Upper-1.0" in result.stdout def test_respect_order_in_requirements_file(script, data): @@ -169,23 +169,23 @@ def test_respect_order_in_requirements_file(script, data): ) result = script.pip( - 'install', - '--no-index', - '-f', + "install", + "--no-index", + "-f", data.find_links, - '-r', - script.scratch_path / 'frameworks-req.txt', + "-r", + script.scratch_path / "frameworks-req.txt", ) - downloaded = [line for line in result.stdout.split('\n') if 'Processing' in line] + downloaded = [line for line in result.stdout.split("\n") if "Processing" in line] - assert 'parent' in downloaded[0], ( + assert "parent" in downloaded[0], ( 'First download should be "parent" but was "%s"' % downloaded[0] ) - assert 'child' in downloaded[1], ( + assert "child" in downloaded[1], ( 'Second download should be "child" but was "%s"' % downloaded[1] ) - assert 'simple' in downloaded[2], ( + assert "simple" in downloaded[2], ( 'Third download should be "simple" but was "%s"' % downloaded[2] ) @@ -193,50 +193,50 @@ def test_respect_order_in_requirements_file(script, data): def test_install_local_editable_with_extras(script, data): to_install = data.packages.joinpath("LocalExtras") res = script.pip_install_local( - '-e', to_install + '[bar]', expect_error=False, expect_stderr=True + "-e", to_install + "[bar]", expect_error=False, expect_stderr=True ) - assert script.site_packages / 'easy-install.pth' in res.files_updated, str(res) - assert script.site_packages / 'LocalExtras.egg-link' in res.files_created, str(res) - assert script.site_packages / 'simple' in res.files_created, str(res) + assert script.site_packages / "easy-install.pth" in res.files_updated, str(res) + assert script.site_packages / "LocalExtras.egg-link" in res.files_created, str(res) + assert script.site_packages / "simple" in res.files_created, str(res) def test_install_collected_dependencies_first(script): - result = script.pip_install_local('toporequires2') - text = [line for line in result.stdout.split('\n') if 'Installing' in line][0] - assert text.endswith('toporequires2') + result = script.pip_install_local("toporequires2") + text = [line for line in result.stdout.split("\n") if "Installing" in line][0] + assert text.endswith("toporequires2") @pytest.mark.network def test_install_local_editable_with_subdirectory(script): - version_pkg_path = _create_test_package_with_subdirectory(script, 'version_subdir') + version_pkg_path = _create_test_package_with_subdirectory(script, "version_subdir") result = script.pip( - 'install', - '-e', - '%s#egg=version_subpkg&subdirectory=version_subdir' - % ('git+%s' % path_to_url(version_pkg_path),), + "install", + "-e", + "%s#egg=version_subpkg&subdirectory=version_subdir" + % ("git+%s" % path_to_url(version_pkg_path),), ) - result.assert_installed('version-subpkg', sub_dir='version_subdir') + result.assert_installed("version-subpkg", sub_dir="version_subdir") @pytest.mark.network def test_install_local_with_subdirectory(script): - version_pkg_path = _create_test_package_with_subdirectory(script, 'version_subdir') + version_pkg_path = _create_test_package_with_subdirectory(script, "version_subdir") result = script.pip( - 'install', - '%s#egg=version_subpkg&subdirectory=version_subdir' - % ('git+' + path_to_url(version_pkg_path),), + "install", + "%s#egg=version_subpkg&subdirectory=version_subdir" + % ("git+" + path_to_url(version_pkg_path),), ) - result.assert_installed('version_subpkg.py', editable=False) + result.assert_installed("version_subpkg.py", editable=False) def test_wheel_user_with_prefix_in_pydistutils_cfg(script, data, with_wheel): - if os.name == 'posix': + if os.name == "posix": user_filename = ".pydistutils.cfg" else: user_filename = "pydistutils.cfg" - user_cfg = os.path.join(os.path.expanduser('~'), user_filename) + user_cfg = os.path.join(os.path.expanduser("~"), user_filename) script.scratch_path.joinpath("bin").mkdir() with open(user_cfg, "w") as cfg: cfg.write( @@ -249,11 +249,11 @@ def test_wheel_user_with_prefix_in_pydistutils_cfg(script, data, with_wheel): ) result = script.pip( - 'install', '--user', '--no-index', '-f', data.find_links, 'requiresupper' + "install", "--user", "--no-index", "-f", data.find_links, "requiresupper" ) # Check that we are really installing a wheel - assert 'Running setup.py install for requiresupper' not in result.stdout - assert 'installed requiresupper' in result.stdout + assert "Running setup.py install for requiresupper" not in result.stdout + assert "installed requiresupper" in result.stdout def test_install_option_in_requirements_file(script, data, virtualenv): @@ -272,87 +272,87 @@ def test_install_option_in_requirements_file(script, data, virtualenv): ) result = script.pip( - 'install', - '--no-index', - '-f', + "install", + "--no-index", + "-f", data.find_links, - '-r', - script.scratch_path / 'reqs.txt', - '--install-option=--home=%s' % script.scratch_path.joinpath("home2"), + "-r", + script.scratch_path / "reqs.txt", + "--install-option=--home=%s" % script.scratch_path.joinpath("home2"), expect_stderr=True, ) - package_dir = script.scratch / 'home1' / 'lib' / 'python' / 'simple' + package_dir = script.scratch / "home1" / "lib" / "python" / "simple" assert package_dir in result.files_created def test_constraints_not_installed_by_default(script, data): script.scratch_path.joinpath("c.txt").write_text("requiresupper") result = script.pip( - 'install', - '--no-index', - '-f', + "install", + "--no-index", + "-f", data.find_links, - '-c', - script.scratch_path / 'c.txt', - 'Upper', + "-c", + script.scratch_path / "c.txt", + "Upper", ) - assert 'requiresupper' not in result.stdout + assert "requiresupper" not in result.stdout def test_constraints_only_causes_error(script, data): script.scratch_path.joinpath("c.txt").write_text("requiresupper") result = script.pip( - 'install', - '--no-index', - '-f', + "install", + "--no-index", + "-f", data.find_links, - '-c', - script.scratch_path / 'c.txt', + "-c", + script.scratch_path / "c.txt", expect_error=True, ) - assert 'installed requiresupper' not in result.stdout + assert "installed requiresupper" not in result.stdout def test_constraints_local_editable_install_causes_error(script, data): script.scratch_path.joinpath("constraints.txt").write_text("singlemodule==0.0.0") to_install = data.src.joinpath("singlemodule") result = script.pip( - 'install', - '--no-index', - '-f', + "install", + "--no-index", + "-f", data.find_links, - '-c', - script.scratch_path / 'constraints.txt', - '-e', + "-c", + script.scratch_path / "constraints.txt", + "-e", to_install, expect_error=True, ) - assert 'Could not satisfy constraints for' in result.stderr + assert "Could not satisfy constraints for" in result.stderr @pytest.mark.network def test_constraints_local_editable_install_pep518(script, data): to_install = data.src.joinpath("pep518-3.0") - script.pip('download', 'setuptools', 'wheel', '-d', data.packages) - script.pip('install', '--no-index', '-f', data.find_links, '-e', to_install) + script.pip("download", "setuptools", "wheel", "-d", data.packages) + script.pip("install", "--no-index", "-f", data.find_links, "-e", to_install) def test_constraints_local_install_causes_error(script, data): script.scratch_path.joinpath("constraints.txt").write_text("singlemodule==0.0.0") to_install = data.src.joinpath("singlemodule") result = script.pip( - 'install', - '--no-index', - '-f', + "install", + "--no-index", + "-f", data.find_links, - '-c', - script.scratch_path / 'constraints.txt', + "-c", + script.scratch_path / "constraints.txt", to_install, expect_error=True, ) - assert 'Could not satisfy constraints for' in result.stderr + assert "Could not satisfy constraints for" in result.stderr def test_constraints_constrain_to_local_editable(script, data): @@ -361,15 +361,15 @@ def test_constraints_constrain_to_local_editable(script, data): "-e %s#egg=singlemodule" % path_to_url(to_install) ) result = script.pip( - 'install', - '--no-index', - '-f', + "install", + "--no-index", + "-f", data.find_links, - '-c', - script.scratch_path / 'constraints.txt', - 'singlemodule', + "-c", + script.scratch_path / "constraints.txt", + "singlemodule", ) - assert 'Running setup.py develop for singlemodule' in result.stdout + assert "Running setup.py develop for singlemodule" in result.stdout def test_constraints_constrain_to_local(script, data): @@ -378,15 +378,15 @@ def test_constraints_constrain_to_local(script, data): "%s#egg=singlemodule" % path_to_url(to_install) ) result = script.pip( - 'install', - '--no-index', - '-f', + "install", + "--no-index", + "-f", data.find_links, - '-c', - script.scratch_path / 'constraints.txt', - 'singlemodule', + "-c", + script.scratch_path / "constraints.txt", + "singlemodule", ) - assert 'Running setup.py install for singlemodule' in result.stdout + assert "Running setup.py install for singlemodule" in result.stdout def test_constrained_to_url_install_same_url(script, data): @@ -394,15 +394,15 @@ def test_constrained_to_url_install_same_url(script, data): constraints = path_to_url(to_install) + "#egg=singlemodule" script.scratch_path.joinpath("constraints.txt").write_text(constraints) result = script.pip( - 'install', - '--no-index', - '-f', + "install", + "--no-index", + "-f", data.find_links, - '-c', - script.scratch_path / 'constraints.txt', + "-c", + script.scratch_path / "constraints.txt", to_install, ) - assert 'Running setup.py install for singlemodule' in result.stdout, str(result) + assert "Running setup.py install for singlemodule" in result.stdout, str(result) def test_double_install_spurious_hash_mismatch(script, tmpdir, data, with_wheel): @@ -417,25 +417,25 @@ def test_double_install_spurious_hash_mismatch(script, tmpdir, data, with_wheel) """ # Install wheel package, otherwise, it won't try to build wheels. with requirements_file( - 'simple==1.0 --hash=sha256:393043e672415891885c9a2a' - '0929b1af95fb866d6ca016b42d2e6ce53619b653', + "simple==1.0 --hash=sha256:393043e672415891885c9a2a" + "0929b1af95fb866d6ca016b42d2e6ce53619b653", tmpdir, ) as reqs_file: # Install a package (and build its wheel): result = script.pip_install_local( - '--find-links', data.find_links, '-r', reqs_file.abspath, expect_error=False + "--find-links", data.find_links, "-r", reqs_file.abspath, expect_error=False ) - assert 'Successfully installed simple-1.0' in str(result) + assert "Successfully installed simple-1.0" in str(result) # Uninstall it: - script.pip('uninstall', '-y', 'simple', expect_error=False) + script.pip("uninstall", "-y", "simple", expect_error=False) # Then install it again. We should not hit a hash mismatch, and the # package should install happily. result = script.pip_install_local( - '--find-links', data.find_links, '-r', reqs_file.abspath, expect_error=False + "--find-links", data.find_links, "-r", reqs_file.abspath, expect_error=False ) - assert 'Successfully installed simple-1.0' in str(result) + assert "Successfully installed simple-1.0" in str(result) def test_install_with_extras_from_constraints(script, data): @@ -444,9 +444,9 @@ def test_install_with_extras_from_constraints(script, data): "%s#egg=LocalExtras[bar]" % path_to_url(to_install) ) result = script.pip_install_local( - '-c', script.scratch_path / 'constraints.txt', 'LocalExtras' + "-c", script.scratch_path / "constraints.txt", "LocalExtras" ) - assert script.site_packages / 'simple' in result.files_created + assert script.site_packages / "simple" in result.files_created def test_install_with_extras_from_install(script, data): @@ -455,9 +455,9 @@ def test_install_with_extras_from_install(script, data): "%s#egg=LocalExtras" % path_to_url(to_install) ) result = script.pip_install_local( - '-c', script.scratch_path / 'constraints.txt', 'LocalExtras[baz]' + "-c", script.scratch_path / "constraints.txt", "LocalExtras[baz]" ) - assert script.site_packages / 'singlemodule.py' in result.files_created + assert script.site_packages / "singlemodule.py" in result.files_created def test_install_with_extras_joined(script, data): @@ -466,10 +466,10 @@ def test_install_with_extras_joined(script, data): "%s#egg=LocalExtras[bar]" % path_to_url(to_install) ) result = script.pip_install_local( - '-c', script.scratch_path / 'constraints.txt', 'LocalExtras[baz]' + "-c", script.scratch_path / "constraints.txt", "LocalExtras[baz]" ) - assert script.site_packages / 'simple' in result.files_created - assert script.site_packages / 'singlemodule.py' in result.files_created + assert script.site_packages / "simple" in result.files_created + assert script.site_packages / "singlemodule.py" in result.files_created def test_install_with_extras_editable_joined(script, data): @@ -478,10 +478,10 @@ def test_install_with_extras_editable_joined(script, data): "-e %s#egg=LocalExtras[bar]" % path_to_url(to_install) ) result = script.pip_install_local( - '-c', script.scratch_path / 'constraints.txt', 'LocalExtras[baz]' + "-c", script.scratch_path / "constraints.txt", "LocalExtras[baz]" ) - assert script.site_packages / 'simple' in result.files_created - assert script.site_packages / 'singlemodule.py' in result.files_created + assert script.site_packages / "simple" in result.files_created + assert script.site_packages / "singlemodule.py" in result.files_created def test_install_distribution_full_union(script, data): @@ -489,9 +489,9 @@ def test_install_distribution_full_union(script, data): result = script.pip_install_local( to_install, to_install + "[bar]", to_install + "[baz]" ) - assert 'Running setup.py install for LocalExtras' in result.stdout - assert script.site_packages / 'simple' in result.files_created - assert script.site_packages / 'singlemodule.py' in result.files_created + assert "Running setup.py install for LocalExtras" in result.stdout + assert script.site_packages / "simple" in result.files_created + assert script.site_packages / "singlemodule.py" in result.files_created def test_install_distribution_duplicate_extras(script, data): @@ -499,17 +499,17 @@ def test_install_distribution_duplicate_extras(script, data): package_name = to_install + "[bar]" with pytest.raises(AssertionError): result = script.pip_install_local(package_name, package_name) - assert 'Double requirement given: %s' % package_name in result.stderr + assert "Double requirement given: %s" % package_name in result.stderr def test_install_distribution_union_with_constraints(script, data): to_install = data.packages.joinpath("LocalExtras") script.scratch_path.joinpath("constraints.txt").write_text("%s[bar]" % to_install) result = script.pip_install_local( - '-c', script.scratch_path / 'constraints.txt', to_install + '[baz]' + "-c", script.scratch_path / "constraints.txt", to_install + "[baz]" ) - assert 'Running setup.py install for LocalExtras' in result.stdout - assert script.site_packages / 'singlemodule.py' in result.files_created + assert "Running setup.py install for LocalExtras" in result.stdout + assert script.site_packages / "singlemodule.py" in result.files_created def test_install_distribution_union_with_versions(script, data): @@ -534,7 +534,7 @@ def test_install_distribution_union_conflicting_extras(script, data): result = script.pip_install_local( to_install, to_install + "[bar]", expect_error=True ) - assert 'installed' not in result.stdout + assert "installed" not in result.stdout assert "Conflict" in result.stderr @@ -546,12 +546,12 @@ def test_install_unsupported_wheel_link_with_marker(script): """ ) % ( - 'https://github.com/a/b/c/asdf-1.5.2-cp27-none-xyz.whl', + "https://github.com/a/b/c/asdf-1.5.2-cp27-none-xyz.whl", 'sys_platform == "xyz"', ) ) result = script.pip( - 'install', '-r', script.scratch_path / 'with-marker.txt', expect_error=False + "install", "-r", script.scratch_path / "with-marker.txt", expect_error=False ) assert ( @@ -573,9 +573,9 @@ def test_install_unsupported_wheel_file(script, data): ) ) result = script.pip( - 'install', - '-r', - script.scratch_path / 'wheel-file.txt', + "install", + "-r", + script.scratch_path / "wheel-file.txt", expect_error=True, expect_stderr=True, ) @@ -608,11 +608,11 @@ def test_install_options_local_to_package(script, data): % home_simple ) ) - result = script.pip('install', '--no-index', '-f', data.find_links, '-r', reqs_file) + result = script.pip("install", "--no-index", "-f", data.find_links, "-r", reqs_file) - simple = test_simple / 'lib' / 'python' / 'simple' - bad = test_simple / 'lib' / 'python' / 'initools' - good = script.site_packages / 'initools' + simple = test_simple / "lib" / "python" / "simple" + bad = test_simple / "lib" / "python" / "initools" + good = script.site_packages / "initools" assert simple in result.files_created assert result.files_created[simple].dir assert bad not in result.files_created diff --git a/tests/functional/test_install_upgrade.py b/tests/functional/test_install_upgrade.py index ff998789e35..6800932785e 100644 --- a/tests/functional/test_install_upgrade.py +++ b/tests/functional/test_install_upgrade.py @@ -13,11 +13,11 @@ def test_no_upgrade_unless_requested(script): No upgrade if not specifically requested. """ - script.pip('install', 'INITools==0.1') - result = script.pip('install', 'INITools') + script.pip("install", "INITools==0.1") + result = script.pip("install", "INITools") assert ( not result.files_created - ), 'pip install INITools upgraded when it should not have' + ), "pip install INITools upgraded when it should not have" def test_invalid_upgrade_strategy_causes_error(script): @@ -26,7 +26,7 @@ def test_invalid_upgrade_strategy_causes_error(script): """ result = script.pip_install_local( - '--upgrade', '--upgrade-strategy=bazinga', 'simple', expect_error=True + "--upgrade", "--upgrade-strategy=bazinga", "simple", expect_error=True ) assert result.returncode @@ -38,16 +38,16 @@ def test_only_if_needed_does_not_upgrade_deps_when_satisfied(script): It doesn't upgrade a dependency if it already satisfies the requirements. """ - script.pip_install_local('simple==2.0') + script.pip_install_local("simple==2.0") result = script.pip_install_local( - '--upgrade', '--upgrade-strategy=only-if-needed', 'require_simple' + "--upgrade", "--upgrade-strategy=only-if-needed", "require_simple" ) assert ( - script.site_packages / 'require_simple-1.0-py%s.egg-info' % pyversion + script.site_packages / "require_simple-1.0-py%s.egg-info" % pyversion ) not in result.files_deleted, "should have installed require_simple==1.0" assert ( - script.site_packages / 'simple-2.0-py%s.egg-info' % pyversion + script.site_packages / "simple-2.0-py%s.egg-info" % pyversion ) not in result.files_deleted, "should not have uninstalled simple==2.0" assert ( "Requirement already satisfied, skipping upgrade: simple" in result.stdout @@ -59,20 +59,20 @@ def test_only_if_needed_does_upgrade_deps_when_no_longer_satisfied(script): It does upgrade a dependency if it no longer satisfies the requirements. """ - script.pip_install_local('simple==1.0') + script.pip_install_local("simple==1.0") result = script.pip_install_local( - '--upgrade', '--upgrade-strategy=only-if-needed', 'require_simple' + "--upgrade", "--upgrade-strategy=only-if-needed", "require_simple" ) assert ( - script.site_packages / 'require_simple-1.0-py%s.egg-info' % pyversion + script.site_packages / "require_simple-1.0-py%s.egg-info" % pyversion ) not in result.files_deleted, "should have installed require_simple==1.0" assert ( - script.site_packages / 'simple-3.0-py%s.egg-info' % pyversion + script.site_packages / "simple-3.0-py%s.egg-info" % pyversion in result.files_created ), "should have installed simple==3.0" assert ( - script.site_packages / 'simple-1.0-py%s.egg-info' % pyversion + script.site_packages / "simple-1.0-py%s.egg-info" % pyversion in result.files_deleted ), "should have uninstalled simple==1.0" @@ -82,16 +82,16 @@ def test_eager_does_upgrade_dependecies_when_currently_satisfied(script): It does upgrade a dependency even if it already satisfies the requirements. """ - script.pip_install_local('simple==2.0') + script.pip_install_local("simple==2.0") result = script.pip_install_local( - '--upgrade', '--upgrade-strategy=eager', 'require_simple' + "--upgrade", "--upgrade-strategy=eager", "require_simple" ) assert ( - script.site_packages / 'require_simple-1.0-py%s.egg-info' % pyversion + script.site_packages / "require_simple-1.0-py%s.egg-info" % pyversion ) not in result.files_deleted, "should have installed require_simple==1.0" assert ( - script.site_packages / 'simple-2.0-py%s.egg-info' % pyversion + script.site_packages / "simple-2.0-py%s.egg-info" % pyversion ) in result.files_deleted, "should have uninstalled simple==2.0" @@ -100,20 +100,20 @@ def test_eager_does_upgrade_dependecies_when_no_longer_satisfied(script): It does upgrade a dependency if it no longer satisfies the requirements. """ - script.pip_install_local('simple==1.0') + script.pip_install_local("simple==1.0") result = script.pip_install_local( - '--upgrade', '--upgrade-strategy=eager', 'require_simple' + "--upgrade", "--upgrade-strategy=eager", "require_simple" ) assert ( - script.site_packages / 'require_simple-1.0-py%s.egg-info' % pyversion + script.site_packages / "require_simple-1.0-py%s.egg-info" % pyversion ) not in result.files_deleted, "should have installed require_simple==1.0" assert ( - script.site_packages / 'simple-3.0-py%s.egg-info' % pyversion + script.site_packages / "simple-3.0-py%s.egg-info" % pyversion in result.files_created ), "should have installed simple==3.0" assert ( - script.site_packages / 'simple-1.0-py%s.egg-info' % pyversion + script.site_packages / "simple-1.0-py%s.egg-info" % pyversion in result.files_deleted ), "should have uninstalled simple==1.0" @@ -124,15 +124,15 @@ def test_upgrade_to_specific_version(script): It does upgrade to specific version requested. """ - script.pip('install', 'INITools==0.1') - result = script.pip('install', 'INITools==0.2') - assert result.files_created, 'pip install with specific version did not upgrade' + script.pip("install", "INITools==0.1") + result = script.pip("install", "INITools==0.2") + assert result.files_created, "pip install with specific version did not upgrade" assert ( - script.site_packages / 'INITools-0.1-py%s.egg-info' % pyversion + script.site_packages / "INITools-0.1-py%s.egg-info" % pyversion in result.files_deleted ) assert ( - script.site_packages / 'INITools-0.2-py%s.egg-info' % pyversion + script.site_packages / "INITools-0.2-py%s.egg-info" % pyversion in result.files_created ) @@ -143,11 +143,11 @@ def test_upgrade_if_requested(script): And it does upgrade if requested. """ - script.pip('install', 'INITools==0.1') - result = script.pip('install', '--upgrade', 'INITools') - assert result.files_created, 'pip install --upgrade did not upgrade' + script.pip("install", "INITools==0.1") + result = script.pip("install", "--upgrade", "INITools") + assert result.files_created, "pip install --upgrade did not upgrade" assert ( - script.site_packages / 'INITools-0.1-py%s.egg-info' % pyversion + script.site_packages / "INITools-0.1-py%s.egg-info" % pyversion not in result.files_created ) @@ -157,12 +157,12 @@ def test_upgrade_with_newest_already_installed(script, data): If the newest version of a package is already installed, the package should not be reinstalled and the user should be informed. """ - script.pip('install', '-f', data.find_links, '--no-index', 'simple') + script.pip("install", "-f", data.find_links, "--no-index", "simple") result = script.pip( - 'install', '--upgrade', '-f', data.find_links, '--no-index', 'simple' + "install", "--upgrade", "-f", data.find_links, "--no-index", "simple" ) - assert not result.files_created, 'simple upgraded when it should not have' - assert 'already up-to-date' in result.stdout, result.stdout + assert not result.files_created, "simple upgraded when it should not have" + assert "already up-to-date" in result.stdout, result.stdout @pytest.mark.network @@ -171,14 +171,14 @@ def test_upgrade_force_reinstall_newest(script): Force reinstallation of a package even if it is already at its newest version if --force-reinstall is supplied. """ - result = script.pip('install', 'INITools') - assert script.site_packages / 'initools' in result.files_created, sorted( + result = script.pip("install", "INITools") + assert script.site_packages / "initools" in result.files_created, sorted( result.files_created.keys() ) - result2 = script.pip('install', '--upgrade', '--force-reinstall', 'INITools') - assert result2.files_updated, 'upgrade to INITools 0.3 failed' - result3 = script.pip('uninstall', 'initools', '-y') - assert_all_changes(result, result3, [script.venv / 'build', 'cache']) + result2 = script.pip("install", "--upgrade", "--force-reinstall", "INITools") + assert result2.files_updated, "upgrade to INITools 0.3 failed" + result3 = script.pip("uninstall", "initools", "-y") + assert_all_changes(result, result3, [script.venv / "build", "cache"]) @pytest.mark.network @@ -187,14 +187,14 @@ def test_uninstall_before_upgrade(script): Automatic uninstall-before-upgrade. """ - result = script.pip('install', 'INITools==0.2') - assert script.site_packages / 'initools' in result.files_created, sorted( + result = script.pip("install", "INITools==0.2") + assert script.site_packages / "initools" in result.files_created, sorted( result.files_created.keys() ) - result2 = script.pip('install', 'INITools==0.3') - assert result2.files_created, 'upgrade to INITools 0.3 failed' - result3 = script.pip('uninstall', 'initools', '-y') - assert_all_changes(result, result3, [script.venv / 'build', 'cache']) + result2 = script.pip("install", "INITools==0.3") + assert result2.files_created, "upgrade to INITools 0.3 failed" + result3 = script.pip("uninstall", "initools", "-y") + assert_all_changes(result, result3, [script.venv / "build", "cache"]) @pytest.mark.network @@ -203,18 +203,18 @@ def test_uninstall_before_upgrade_from_url(script): Automatic uninstall-before-upgrade from URL. """ - result = script.pip('install', 'INITools==0.2') - assert script.site_packages / 'initools' in result.files_created, sorted( + result = script.pip("install", "INITools==0.2") + assert script.site_packages / "initools" in result.files_created, sorted( result.files_created.keys() ) result2 = script.pip( - 'install', - 'https://files.pythonhosted.org/packages/source/I/INITools/INITools-' - '0.3.tar.gz', + "install", + "https://files.pythonhosted.org/packages/source/I/INITools/INITools-" + "0.3.tar.gz", ) - assert result2.files_created, 'upgrade to INITools 0.3 failed' - result3 = script.pip('uninstall', 'initools', '-y') - assert_all_changes(result, result3, [script.venv / 'build', 'cache']) + assert result2.files_created, "upgrade to INITools 0.3 failed" + result3 = script.pip("uninstall", "initools", "-y") + assert_all_changes(result, result3, [script.venv / "build", "cache"]) @pytest.mark.network @@ -224,18 +224,18 @@ def test_upgrade_to_same_version_from_url(script): need to uninstall and reinstall if --upgrade is not specified. """ - result = script.pip('install', 'INITools==0.3') - assert script.site_packages / 'initools' in result.files_created, sorted( + result = script.pip("install", "INITools==0.3") + assert script.site_packages / "initools" in result.files_created, sorted( result.files_created.keys() ) result2 = script.pip( - 'install', - 'https://files.pythonhosted.org/packages/source/I/INITools/INITools-' - '0.3.tar.gz', + "install", + "https://files.pythonhosted.org/packages/source/I/INITools/INITools-" + "0.3.tar.gz", ) - assert not result2.files_updated, 'INITools 0.3 reinstalled same version' - result3 = script.pip('uninstall', 'initools', '-y') - assert_all_changes(result, result3, [script.venv / 'build', 'cache']) + assert not result2.files_updated, "INITools 0.3 reinstalled same version" + result3 = script.pip("uninstall", "initools", "-y") + assert_all_changes(result, result3, [script.venv / "build", "cache"]) @pytest.mark.network @@ -253,7 +253,7 @@ def test_upgrade_from_reqs_file(script): """ ) ) - install_result = script.pip('install', '-r', script.scratch_path / 'test-req.txt') + install_result = script.pip("install", "-r", script.scratch_path / "test-req.txt") script.scratch_path.joinpath("test-req.txt").write_text( textwrap.dedent( """\ @@ -263,14 +263,14 @@ def test_upgrade_from_reqs_file(script): """ ) ) - script.pip('install', '--upgrade', '-r', script.scratch_path / 'test-req.txt') + script.pip("install", "--upgrade", "-r", script.scratch_path / "test-req.txt") uninstall_result = script.pip( - 'uninstall', '-r', script.scratch_path / 'test-req.txt', '-y' + "uninstall", "-r", script.scratch_path / "test-req.txt", "-y" ) assert_all_changes( install_result, uninstall_result, - [script.venv / 'build', 'cache', script.scratch / 'test-req.txt'], + [script.venv / "build", "cache", script.scratch / "test-req.txt"], ) @@ -280,24 +280,24 @@ def test_uninstall_rollback(script, data): crafted to fail on install). """ - result = script.pip('install', '-f', data.find_links, '--no-index', 'broken==0.1') - assert script.site_packages / 'broken.py' in result.files_created, list( + result = script.pip("install", "-f", data.find_links, "--no-index", "broken==0.1") + assert script.site_packages / "broken.py" in result.files_created, list( result.files_created.keys() ) result2 = script.pip( - 'install', - '-f', + "install", + "-f", data.find_links, - '--no-index', - 'broken===0.2broken', + "--no-index", + "broken===0.2broken", expect_error=True, ) assert result2.returncode == 1, str(result2) assert ( - script.run('python', '-c', "import broken; print(broken.VERSION)").stdout - == '0.1\n' + script.run("python", "-c", "import broken; print(broken.VERSION)").stdout + == "0.1\n" ) - assert_all_changes(result.files_after, result2, [script.venv / 'build']) + assert_all_changes(result.files_after, result2, [script.venv / "build"]) @pytest.mark.network @@ -306,15 +306,15 @@ def test_should_not_install_always_from_cache(script): If there is an old cached package, pip should download the newer version Related to issue #175 """ - script.pip('install', 'INITools==0.2') - script.pip('uninstall', '-y', 'INITools') - result = script.pip('install', 'INITools==0.1') + script.pip("install", "INITools==0.2") + script.pip("uninstall", "-y", "INITools") + result = script.pip("install", "INITools==0.1") assert ( - script.site_packages / 'INITools-0.2-py%s.egg-info' % pyversion + script.site_packages / "INITools-0.2-py%s.egg-info" % pyversion not in result.files_created ) assert ( - script.site_packages / 'INITools-0.1-py%s.egg-info' % pyversion + script.site_packages / "INITools-0.1-py%s.egg-info" % pyversion in result.files_created ) @@ -324,15 +324,15 @@ def test_install_with_ignoreinstalled_requested(script): """ Test old conflicting package is completely ignored """ - script.pip('install', 'INITools==0.1') - result = script.pip('install', '-I', 'INITools==0.3') - assert result.files_created, 'pip install -I did not install' + script.pip("install", "INITools==0.1") + result = script.pip("install", "-I", "INITools==0.3") + assert result.files_created, "pip install -I did not install" # both the old and new metadata should be present. assert os.path.exists( - script.site_packages_path / 'INITools-0.1-py%s.egg-info' % pyversion + script.site_packages_path / "INITools-0.1-py%s.egg-info" % pyversion ) assert os.path.exists( - script.site_packages_path / 'INITools-0.3-py%s.egg-info' % pyversion + script.site_packages_path / "INITools-0.3-py%s.egg-info" % pyversion ) @@ -385,24 +385,24 @@ class TestUpgradeDistributeToSetuptools(object): def prep_ve(self, script, version, pip_src, distribute=False): self.script = script - self.script.pip_install_local('virtualenv==%s' % version) - args = ['virtualenv', self.script.scratch_path / 'VE'] + self.script.pip_install_local("virtualenv==%s" % version) + args = ["virtualenv", self.script.scratch_path / "VE"] if distribute: - args.insert(1, '--distribute') + args.insert(1, "--distribute") if version == "1.9.1" and not distribute: # setuptools 0.6 didn't support PYTHONDONTWRITEBYTECODE del self.script.environ["PYTHONDONTWRITEBYTECODE"] self.script.run(*args) - if sys.platform == 'win32': + if sys.platform == "win32": bindir = "Scripts" else: bindir = "bin" - self.ve_bin = self.script.scratch_path / 'VE' / bindir - self.script.run(self.ve_bin / 'pip', 'uninstall', '-y', 'pip') + self.ve_bin = self.script.scratch_path / "VE" / bindir + self.script.run(self.ve_bin / "pip", "uninstall", "-y", "pip") self.script.run( - self.ve_bin / 'python', - 'setup.py', - 'install', + self.ve_bin / "python", + "setup.py", + "install", cwd=pip_src, expect_stderr=True, ) diff --git a/tests/functional/test_install_user.py b/tests/functional/test_install_user.py index 9cba4ef79a4..0ac8a245e1f 100644 --- a/tests/functional/test_install_user.py +++ b/tests/functional/test_install_user.py @@ -32,15 +32,15 @@ def test_reset_env_system_site_packages_usersite(self, script): """ Check user site works as expected. """ - script.pip('install', '--user', 'INITools==0.2') + script.pip("install", "--user", "INITools==0.2") result = script.run( - 'python', - '-c', + "python", + "-c", "import pkg_resources; print(pkg_resources.get_distribution" "('initools').project_name)", ) project_name = result.stdout.strip() - assert 'INITools' == project_name, project_name + assert "INITools" == project_name, project_name @pytest.mark.network @pytest.mark.svn @@ -50,13 +50,13 @@ def test_install_subversion_usersite_editable_with_distribute(self, script, tmpd distribute """ result = script.pip( - 'install', - '--user', - '-e', - '%s#egg=initools' - % local_checkout('svn+http://svn.colorstudy.com/INITools', tmpdir), + "install", + "--user", + "-e", + "%s#egg=initools" + % local_checkout("svn+http://svn.colorstudy.com/INITools", tmpdir), ) - result.assert_installed('INITools', use_user_site=True) + result.assert_installed("INITools", use_user_site=True) def test_install_from_current_directory_into_usersite( self, script, data, with_wheel @@ -66,13 +66,13 @@ def test_install_from_current_directory_into_usersite( """ run_from = data.packages.joinpath("FSPkg") result = script.pip( - 'install', '-vvv', '--user', curdir, cwd=run_from, expect_error=False + "install", "-vvv", "--user", curdir, cwd=run_from, expect_error=False ) - fspkg_folder = script.user_site / 'fspkg' + fspkg_folder = script.user_site / "fspkg" assert fspkg_folder in result.files_created, result.stdout - dist_info_folder = script.user_site / 'FSPkg-0.1.dev0.dist-info' + dist_info_folder = script.user_site / "FSPkg-0.1.dev0.dist-info" assert dist_info_folder in result.files_created @pytest.mark.incompatible_with_test_venv @@ -85,7 +85,7 @@ def test_install_user_venv_nositepkgs_fails(self, virtualenv, script, data): virtualenv.user_site_packages = False run_from = data.packages.joinpath("FSPkg") result = script.pip( - 'install', '--user', curdir, cwd=run_from, expect_error=True + "install", "--user", curdir, cwd=run_from, expect_error=True ) assert ( "Can not perform a '--user' install. User site-packages are not " @@ -98,18 +98,18 @@ def test_install_user_conflict_in_usersite(self, script): Test user install with conflict in usersite updates usersite. """ - script.pip('install', '--user', 'INITools==0.3', '--no-binary=:all:') + script.pip("install", "--user", "INITools==0.3", "--no-binary=:all:") - result2 = script.pip('install', '--user', 'INITools==0.1', '--no-binary=:all:') + result2 = script.pip("install", "--user", "INITools==0.1", "--no-binary=:all:") # usersite has 0.1 - egg_info_folder = script.user_site / 'INITools-0.1-py%s.egg-info' % pyversion + egg_info_folder = script.user_site / "INITools-0.1-py%s.egg-info" % pyversion initools_v3_file = ( # file only in 0.3 script.base_path / script.user_site - / 'initools' - / 'configparser.py' + / "initools" + / "configparser.py" ) assert egg_info_folder in result2.files_created, str(result2) assert not isfile(initools_v3_file), initools_v3_file @@ -122,13 +122,13 @@ def test_install_user_conflict_in_globalsite(self, virtualenv, script): """ _patch_dist_in_site_packages(virtualenv) - script.pip('install', 'INITools==0.2', '--no-binary=:all:') + script.pip("install", "INITools==0.2", "--no-binary=:all:") - result2 = script.pip('install', '--user', 'INITools==0.1', '--no-binary=:all:') + result2 = script.pip("install", "--user", "INITools==0.1", "--no-binary=:all:") # usersite has 0.1 - egg_info_folder = script.user_site / 'INITools-0.1-py%s.egg-info' % pyversion - initools_folder = script.user_site / 'initools' + egg_info_folder = script.user_site / "INITools-0.1-py%s.egg-info" % pyversion + initools_folder = script.user_site / "initools" assert egg_info_folder in result2.files_created, str(result2) assert initools_folder in result2.files_created, str(result2) @@ -136,10 +136,10 @@ def test_install_user_conflict_in_globalsite(self, virtualenv, script): egg_info_folder = ( script.base_path / script.site_packages - / 'INITools-0.2-py%s.egg-info' + / "INITools-0.2-py%s.egg-info" % pyversion ) - initools_folder = script.base_path / script.site_packages / 'initools' + initools_folder = script.base_path / script.site_packages / "initools" assert isdir(egg_info_folder) assert isdir(initools_folder) @@ -151,14 +151,14 @@ def test_upgrade_user_conflict_in_globalsite(self, virtualenv, script): """ _patch_dist_in_site_packages(virtualenv) - script.pip('install', 'INITools==0.2', '--no-binary=:all:') + script.pip("install", "INITools==0.2", "--no-binary=:all:") result2 = script.pip( - 'install', '--user', '--upgrade', 'INITools', '--no-binary=:all:' + "install", "--user", "--upgrade", "INITools", "--no-binary=:all:" ) # usersite has 0.3.1 - egg_info_folder = script.user_site / 'INITools-0.3.1-py%s.egg-info' % pyversion - initools_folder = script.user_site / 'initools' + egg_info_folder = script.user_site / "INITools-0.3.1-py%s.egg-info" % pyversion + initools_folder = script.user_site / "initools" assert egg_info_folder in result2.files_created, str(result2) assert initools_folder in result2.files_created, str(result2) @@ -166,10 +166,10 @@ def test_upgrade_user_conflict_in_globalsite(self, virtualenv, script): egg_info_folder = ( script.base_path / script.site_packages - / 'INITools-0.2-py%s.egg-info' + / "INITools-0.2-py%s.egg-info" % pyversion ) - initools_folder = script.base_path / script.site_packages / 'initools' + initools_folder = script.base_path / script.site_packages / "initools" assert isdir(egg_info_folder), result2.stdout assert isdir(initools_folder) @@ -181,19 +181,19 @@ def test_install_user_conflict_in_globalsite_and_usersite(self, virtualenv, scri """ _patch_dist_in_site_packages(virtualenv) - script.pip('install', 'INITools==0.2', '--no-binary=:all:') - script.pip('install', '--user', 'INITools==0.3', '--no-binary=:all:') + script.pip("install", "INITools==0.2", "--no-binary=:all:") + script.pip("install", "--user", "INITools==0.3", "--no-binary=:all:") - result3 = script.pip('install', '--user', 'INITools==0.1', '--no-binary=:all:') + result3 = script.pip("install", "--user", "INITools==0.1", "--no-binary=:all:") # usersite has 0.1 - egg_info_folder = script.user_site / 'INITools-0.1-py%s.egg-info' % pyversion + egg_info_folder = script.user_site / "INITools-0.1-py%s.egg-info" % pyversion initools_v3_file = ( # file only in 0.3 script.base_path / script.user_site - / 'initools' - / 'configparser.py' + / "initools" + / "configparser.py" ) assert egg_info_folder in result3.files_created, str(result3) assert not isfile(initools_v3_file), initools_v3_file @@ -202,10 +202,10 @@ def test_install_user_conflict_in_globalsite_and_usersite(self, virtualenv, scri egg_info_folder = ( script.base_path / script.site_packages - / 'INITools-0.2-py%s.egg-info' + / "INITools-0.2-py%s.egg-info" % pyversion ) - initools_folder = script.base_path / script.site_packages / 'initools' + initools_folder = script.base_path / script.site_packages / "initools" assert isdir(egg_info_folder) assert isdir(initools_folder) @@ -216,17 +216,17 @@ def test_install_user_in_global_virtualenv_with_conflict_fails(self, script): site fails. """ - script.pip('install', 'INITools==0.2') + script.pip("install", "INITools==0.2") - result2 = script.pip('install', '--user', 'INITools==0.1', expect_error=True) + result2 = script.pip("install", "--user", "INITools==0.1", expect_error=True) resultp = script.run( - 'python', - '-c', + "python", + "-c", "import pkg_resources; print(pkg_resources.get_distribution" "('initools').location)", ) dist_location = resultp.stdout.strip() assert ( "Will not install to the user site because it will lack sys.path " - "precedence to %s in %s" % ('INITools', dist_location) in result2.stderr + "precedence to %s in %s" % ("INITools", dist_location) in result2.stderr ) diff --git a/tests/functional/test_install_vcs_git.py b/tests/functional/test_install_vcs_git.py index 920933d0c45..f074e8d993c 100644 --- a/tests/functional/test_install_vcs_git.py +++ b/tests/functional/test_install_vcs_git.py @@ -18,7 +18,7 @@ def _get_editable_repo_dir(script, package_name): """ Return the repository directory for an editable install. """ - return script.venv_path / 'src' / package_name + return script.venv_path / "src" / package_name def _get_editable_branch(script, package_name): @@ -26,7 +26,7 @@ def _get_editable_branch(script, package_name): Return the current branch of an editable install. """ repo_dir = _get_editable_repo_dir(script, package_name) - result = script.run('git', 'rev-parse', '--abbrev-ref', 'HEAD', cwd=repo_dir) + result = script.run("git", "rev-parse", "--abbrev-ref", "HEAD", cwd=repo_dir) return result.stdout.strip() @@ -36,7 +36,7 @@ def _get_branch_remote(script, package_name, branch): """ repo_dir = _get_editable_repo_dir(script, package_name) result = script.run( - 'git', 'config', 'branch.{}.remote'.format(branch), cwd=repo_dir + "git", "config", "branch.{}.remote".format(branch), cwd=repo_dir ) return result.stdout.strip() @@ -54,13 +54,13 @@ def _github_checkout(url_path, temp_dir, rev=None, egg=None, scheme=None): scheme: the scheme without the "git+" prefix. Defaults to "https". """ if scheme is None: - scheme = 'https' - url = 'git+{}://github.com/{}'.format(scheme, url_path) + scheme = "https" + url = "git+{}://github.com/{}".format(scheme, url_path) local_url = local_checkout(url, temp_dir) if rev is not None: - local_url += '@{}'.format(rev) + local_url += "@{}".format(rev) if egg is not None: - local_url += '#egg={}'.format(egg) + local_url += "#egg={}".format(egg) return local_url @@ -75,8 +75,8 @@ def _make_version_pkg_url(path, rev=None): rev: an optional revision to install like a branch name, tag, or SHA. """ file_url = _test_path_to_file_url(path) - url_rev = '' if rev is None else '@{}'.format(rev) - url = 'git+{}{}#egg=version_pkg'.format(file_url, url_rev) + url_rev = "" if rev is None else "@{}".format(rev) + url = "git+{}{}#egg=version_pkg".format(file_url, url_rev) return url @@ -92,7 +92,7 @@ def _install_version_pkg_only(script, path, rev=None, expect_stderr=False): rev: an optional revision to install like a branch name or tag. """ version_pkg_url = _make_version_pkg_url(path, rev=rev) - script.pip('install', '-e', version_pkg_url, expect_stderr=expect_stderr) + script.pip("install", "-e", version_pkg_url, expect_stderr=expect_stderr) def _install_version_pkg(script, path, rev=None, expect_stderr=False): @@ -106,7 +106,7 @@ def _install_version_pkg(script, path, rev=None, expect_stderr=False): rev: an optional revision to install like a branch name or tag. """ _install_version_pkg_only(script, path, rev=rev, expect_stderr=expect_stderr) - result = script.run('version_pkg') + result = script.run("version_pkg") version = result.stdout.strip() return version @@ -123,11 +123,11 @@ def test_git_install_again_after_changes(script): """ version_pkg_path = _create_test_package(script) version = _install_version_pkg(script, version_pkg_path) - assert version == '0.1' + assert version == "0.1" _change_test_package_version(script, version_pkg_path) version = _install_version_pkg(script, version_pkg_path) - assert version == 'some different version' + assert version == "some different version" def test_git_install_branch_again_after_branch_changes(script): @@ -136,12 +136,12 @@ def test_git_install_branch_again_after_branch_changes(script): repository. """ version_pkg_path = _create_test_package(script) - version = _install_version_pkg(script, version_pkg_path, rev='master') - assert version == '0.1' + version = _install_version_pkg(script, version_pkg_path, rev="master") + assert version == "0.1" _change_test_package_version(script, version_pkg_path) - version = _install_version_pkg(script, version_pkg_path, rev='master') - assert version == 'some different version' + version = _install_version_pkg(script, version_pkg_path, rev="master") + assert version == "some different version" @pytest.mark.network @@ -149,10 +149,10 @@ def test_install_editable_from_git_with_https(script, tmpdir): """ Test cloning from Git with https. """ - url_path = 'pypa/pip-test-package.git' - local_url = _github_checkout(url_path, tmpdir, egg='pip-test-package') - result = script.pip('install', '-e', local_url) - result.assert_installed('pip-test-package', with_files=['.git']) + url_path = "pypa/pip-test-package.git" + local_url = _github_checkout(url_path, tmpdir, egg="pip-test-package") + result = script.pip("install", "-e", local_url) + result.assert_installed("pip-test-package", with_files=[".git"]) @pytest.mark.network @@ -161,14 +161,14 @@ def test_install_noneditable_git(script, tmpdir): Test installing from a non-editable git URL with a given tag. """ result = script.pip( - 'install', - 'git+https://github.com/pypa/pip-test-package.git' - '@0.1.1#egg=pip-test-package', + "install", + "git+https://github.com/pypa/pip-test-package.git" + "@0.1.1#egg=pip-test-package", ) egg_info_folder = ( - script.site_packages / 'pip_test_package-0.1.1-py%s.egg-info' % pyversion + script.site_packages / "pip_test_package-0.1.1-py%s.egg-info" % pyversion ) - result.assert_installed('piptestpackage', without_egg_link=True, editable=False) + result.assert_installed("piptestpackage", without_egg_link=True, editable=False) assert egg_info_folder in result.files_created, str(result) @@ -178,9 +178,9 @@ def test_git_with_sha1_revisions(script): """ version_pkg_path = _create_test_package(script) _change_test_package_version(script, version_pkg_path) - sha1 = script.run('git', 'rev-parse', 'HEAD~1', cwd=version_pkg_path).stdout.strip() + sha1 = script.run("git", "rev-parse", "HEAD~1", cwd=version_pkg_path).stdout.strip() version = _install_version_pkg(script, version_pkg_path, rev=sha1) - assert '0.1' == version + assert "0.1" == version def test_git_with_short_sha1_revisions(script): @@ -190,10 +190,10 @@ def test_git_with_short_sha1_revisions(script): version_pkg_path = _create_test_package(script) _change_test_package_version(script, version_pkg_path) sha1 = script.run( - 'git', 'rev-parse', 'HEAD~1', cwd=version_pkg_path + "git", "rev-parse", "HEAD~1", cwd=version_pkg_path ).stdout.strip()[:7] version = _install_version_pkg(script, version_pkg_path, rev=sha1) - assert '0.1' == version + assert "0.1" == version def test_git_with_branch_name_as_revision(script): @@ -201,11 +201,11 @@ def test_git_with_branch_name_as_revision(script): Git backend should be able to install from branch names """ version_pkg_path = _create_test_package(script) - branch = 'test_branch' - script.run('git', 'checkout', '-b', branch, cwd=version_pkg_path) + branch = "test_branch" + script.run("git", "checkout", "-b", branch, cwd=version_pkg_path) _change_test_package_version(script, version_pkg_path) version = _install_version_pkg(script, version_pkg_path, rev=branch) - assert 'some different version' == version + assert "some different version" == version def test_git_with_tag_name_as_revision(script): @@ -213,17 +213,17 @@ def test_git_with_tag_name_as_revision(script): Git backend should be able to install from tag names """ version_pkg_path = _create_test_package(script) - script.run('git', 'tag', 'test_tag', cwd=version_pkg_path) + script.run("git", "tag", "test_tag", cwd=version_pkg_path) _change_test_package_version(script, version_pkg_path) - version = _install_version_pkg(script, version_pkg_path, rev='test_tag') - assert '0.1' == version + version = _install_version_pkg(script, version_pkg_path, rev="test_tag") + assert "0.1" == version def _add_ref(script, path, ref): """ Add a new ref to a repository at the given path. """ - script.run('git', 'update-ref', ref, 'HEAD', cwd=path) + script.run("git", "update-ref", ref, "HEAD", cwd=path) def test_git_install_ref(script): @@ -231,11 +231,11 @@ def test_git_install_ref(script): The Git backend should be able to install a ref with the first install. """ version_pkg_path = _create_test_package(script) - _add_ref(script, version_pkg_path, 'refs/foo/bar') + _add_ref(script, version_pkg_path, "refs/foo/bar") _change_test_package_version(script, version_pkg_path) - version = _install_version_pkg(script, version_pkg_path, rev='refs/foo/bar') - assert '0.1' == version + version = _install_version_pkg(script, version_pkg_path, rev="refs/foo/bar") + assert "0.1" == version def test_git_install_then_install_ref(script): @@ -244,15 +244,15 @@ def test_git_install_then_install_ref(script): already been installed. """ version_pkg_path = _create_test_package(script) - _add_ref(script, version_pkg_path, 'refs/foo/bar') + _add_ref(script, version_pkg_path, "refs/foo/bar") _change_test_package_version(script, version_pkg_path) version = _install_version_pkg(script, version_pkg_path) - assert 'some different version' == version + assert "some different version" == version # Now install the ref. - version = _install_version_pkg(script, version_pkg_path, rev='refs/foo/bar') - assert '0.1' == version + version = _install_version_pkg(script, version_pkg_path, rev="refs/foo/bar") + assert "0.1" == version @pytest.mark.network @@ -260,16 +260,16 @@ def test_git_with_tag_name_and_update(script, tmpdir): """ Test cloning a git repository and updating to a different version. """ - url_path = 'pypa/pip-test-package.git' + url_path = "pypa/pip-test-package.git" base_local_url = _github_checkout(url_path, tmpdir) - local_url = '{}#egg=pip-test-package'.format(base_local_url) - result = script.pip('install', '-e', local_url) - result.assert_installed('pip-test-package', with_files=['.git']) + local_url = "{}#egg=pip-test-package".format(base_local_url) + result = script.pip("install", "-e", local_url) + result.assert_installed("pip-test-package", with_files=[".git"]) - new_local_url = '{}@0.1.2#egg=pip-test-package'.format(base_local_url) - result = script.pip('install', '--global-option=--version', '-e', new_local_url) - assert '0.1.2' in result.stdout + new_local_url = "{}@0.1.2#egg=pip-test-package".format(base_local_url) + result = script.pip("install", "--global-option=--version", "-e", new_local_url) + assert "0.1.2" in result.stdout @pytest.mark.network @@ -278,11 +278,11 @@ def test_git_branch_should_not_be_changed(script, tmpdir): Editable installations should not change branch related to issue #32 and #161 """ - url_path = 'pypa/pip-test-package.git' - local_url = _github_checkout(url_path, tmpdir, egg='pip-test-package') - script.pip('install', '-e', local_url) - branch = _get_editable_branch(script, 'pip-test-package') - assert 'master' == branch + url_path = "pypa/pip-test-package.git" + local_url = _github_checkout(url_path, tmpdir, egg="pip-test-package") + script.pip("install", "-e", local_url) + branch = _get_editable_branch(script, "pip-test-package") + assert "master" == branch @pytest.mark.network @@ -290,10 +290,10 @@ def test_git_with_non_editable_unpacking(script, tmpdir): """ Test cloning a git repository from a non-editable URL with a given tag. """ - url_path = 'pypa/pip-test-package.git' - local_url = _github_checkout(url_path, tmpdir, rev='0.1.2', egg='pip-test-package') - result = script.pip('install', '--global-option=--version', local_url) - assert '0.1.2' in result.stdout + url_path = "pypa/pip-test-package.git" + local_url = _github_checkout(url_path, tmpdir, rev="0.1.2", egg="pip-test-package") + result = script.pip("install", "--global-option=--version", local_url) + assert "0.1.2" in result.stdout @pytest.mark.network @@ -302,10 +302,10 @@ def test_git_with_editable_where_egg_contains_dev_string(script, tmpdir): Test cloning a git repository from an editable url which contains "dev" string """ - url_path = 'dcramer/django-devserver.git' - local_url = _github_checkout(url_path, tmpdir, egg='django-devserver', scheme='git') - result = script.pip('install', '-e', local_url) - result.assert_installed('django-devserver', with_files=['.git']) + url_path = "dcramer/django-devserver.git" + local_url = _github_checkout(url_path, tmpdir, egg="django-devserver", scheme="git") + result = script.pip("install", "-e", local_url) + result.assert_installed("django-devserver", with_files=[".git"]) @pytest.mark.network @@ -314,10 +314,10 @@ def test_git_with_non_editable_where_egg_contains_dev_string(script, tmpdir): Test cloning a git repository from a non-editable url which contains "dev" string """ - url_path = 'dcramer/django-devserver.git' - local_url = _github_checkout(url_path, tmpdir, egg='django-devserver', scheme='git') - result = script.pip('install', local_url) - devserver_folder = script.site_packages / 'devserver' + url_path = "dcramer/django-devserver.git" + local_url = _github_checkout(url_path, tmpdir, egg="django-devserver", scheme="git") + result = script.pip("install", local_url) + devserver_folder = script.site_packages / "devserver" assert devserver_folder in result.files_created, str(result) @@ -326,13 +326,13 @@ def test_git_with_ambiguous_revs(script): Test git with two "names" (tag/branch) pointing to the same commit """ version_pkg_path = _create_test_package(script) - version_pkg_url = _make_version_pkg_url(version_pkg_path, rev='0.1') - script.run('git', 'tag', '0.1', cwd=version_pkg_path) - result = script.pip('install', '-e', version_pkg_url) - assert 'Could not find a tag or branch' not in result.stdout + version_pkg_url = _make_version_pkg_url(version_pkg_path, rev="0.1") + script.run("git", "tag", "0.1", cwd=version_pkg_path) + result = script.pip("install", "-e", version_pkg_url) + assert "Could not find a tag or branch" not in result.stdout # it is 'version-pkg' instead of 'version_pkg' because # egg-link name is version-pkg.egg-link because it is a single .py module - result.assert_installed('version-pkg', with_files=['.git']) + result.assert_installed("version-pkg", with_files=[".git"]) def test_editable__no_revision(script): @@ -342,11 +342,11 @@ def test_editable__no_revision(script): version_pkg_path = _create_test_package(script) _install_version_pkg_only(script, version_pkg_path) - branch = _get_editable_branch(script, 'version-pkg') - assert branch == 'master' + branch = _get_editable_branch(script, "version-pkg") + assert branch == "master" - remote = _get_branch_remote(script, 'version-pkg', 'master') - assert remote == 'origin' + remote = _get_branch_remote(script, "version-pkg", "master") + assert remote == "origin" def test_editable__branch_with_sha_same_as_default(script): @@ -356,14 +356,14 @@ def test_editable__branch_with_sha_same_as_default(script): """ version_pkg_path = _create_test_package(script) # Create a second branch with the same SHA. - script.run('git', 'branch', 'develop', cwd=version_pkg_path) - _install_version_pkg_only(script, version_pkg_path, rev='develop') + script.run("git", "branch", "develop", cwd=version_pkg_path) + _install_version_pkg_only(script, version_pkg_path, rev="develop") - branch = _get_editable_branch(script, 'version-pkg') - assert branch == 'develop' + branch = _get_editable_branch(script, "version-pkg") + assert branch == "develop" - remote = _get_branch_remote(script, 'version-pkg', 'develop') - assert remote == 'origin' + remote = _get_branch_remote(script, "version-pkg", "develop") + assert remote == "origin" def test_editable__branch_with_sha_different_from_default(script): @@ -373,18 +373,18 @@ def test_editable__branch_with_sha_different_from_default(script): """ version_pkg_path = _create_test_package(script) # Create a second branch. - script.run('git', 'branch', 'develop', cwd=version_pkg_path) + script.run("git", "branch", "develop", cwd=version_pkg_path) # Add another commit to the master branch to give it a different sha. _change_test_package_version(script, version_pkg_path) - version = _install_version_pkg(script, version_pkg_path, rev='develop') - assert version == '0.1' + version = _install_version_pkg(script, version_pkg_path, rev="develop") + assert version == "0.1" - branch = _get_editable_branch(script, 'version-pkg') - assert branch == 'develop' + branch = _get_editable_branch(script, "version-pkg") + assert branch == "develop" - remote = _get_branch_remote(script, 'version-pkg', 'develop') - assert remote == 'origin' + remote = _get_branch_remote(script, "version-pkg", "develop") + assert remote == "origin" def test_editable__non_master_default_branch(script): @@ -395,11 +395,11 @@ def test_editable__non_master_default_branch(script): version_pkg_path = _create_test_package(script) # Change the default branch of the remote repo to a name that is # alphabetically after "master". - script.run('git', 'checkout', '-b', 'release', cwd=version_pkg_path) + script.run("git", "checkout", "-b", "release", cwd=version_pkg_path) _install_version_pkg_only(script, version_pkg_path) - branch = _get_editable_branch(script, 'version-pkg') - assert branch == 'release' + branch = _get_editable_branch(script, "version-pkg") + assert branch == "release" def test_reinstalling_works_with_editable_non_master_branch(script): @@ -410,14 +410,14 @@ def test_reinstalling_works_with_editable_non_master_branch(script): version_pkg_path = _create_test_package(script) # Switch the default branch to something other than 'master' - script.run('git', 'branch', '-m', 'foobar', cwd=version_pkg_path) + script.run("git", "branch", "-m", "foobar", cwd=version_pkg_path) version = _install_version_pkg(script, version_pkg_path) - assert '0.1' == version + assert "0.1" == version _change_test_package_version(script, version_pkg_path) version = _install_version_pkg(script, version_pkg_path) - assert 'some different version' == version + assert "some different version" == version # TODO(pnasrat) fix all helpers to do right things with paths on windows. @@ -427,26 +427,26 @@ def test_check_submodule_addition(script): Submodules are pulled in on install and updated on upgrade. """ module_path, submodule_path = _create_test_package_with_submodule( - script, rel_path='testpkg/static' + script, rel_path="testpkg/static" ) install_result = script.pip( - 'install', '-e', 'git+' + module_path + '#egg=version_pkg' + "install", "-e", "git+" + module_path + "#egg=version_pkg" ) assert ( - script.venv / 'src/version-pkg/testpkg/static/testfile' + script.venv / "src/version-pkg/testpkg/static/testfile" in install_result.files_created ) _change_test_package_submodule(script, submodule_path) - _pull_in_submodule_changes_to_module(script, module_path, rel_path='testpkg/static') + _pull_in_submodule_changes_to_module(script, module_path, rel_path="testpkg/static") # expect error because git may write to stderr update_result = script.pip( - 'install', '-e', 'git+' + module_path + '#egg=version_pkg', '--upgrade' + "install", "-e", "git+" + module_path + "#egg=version_pkg", "--upgrade" ) assert ( - script.venv / 'src/version-pkg/testpkg/static/testfile2' + script.venv / "src/version-pkg/testpkg/static/testfile2" in update_result.files_created ) diff --git a/tests/functional/test_install_wheel.py b/tests/functional/test_install_wheel.py index 07fd1f4e7a1..c7cffa5c5a9 100644 --- a/tests/functional/test_install_wheel.py +++ b/tests/functional/test_install_wheel.py @@ -14,15 +14,15 @@ def test_install_from_future_wheel_version(script, data): from tests.lib import TestFailure package = data.packages.joinpath("futurewheel-3.0-py2.py3-none-any.whl") - result = script.pip('install', package, '--no-index', expect_error=True) + result = script.pip("install", package, "--no-index", expect_error=True) with pytest.raises(TestFailure): - result.assert_installed('futurewheel', without_egg_link=True, editable=False) + result.assert_installed("futurewheel", without_egg_link=True, editable=False) package = data.packages.joinpath("futurewheel-1.9-py2.py3-none-any.whl") result = script.pip( - 'install', package, '--no-index', expect_error=False, expect_stderr=True + "install", package, "--no-index", expect_error=False, expect_stderr=True ) - result.assert_installed('futurewheel', without_egg_link=True, editable=False) + result.assert_installed("futurewheel", without_egg_link=True, editable=False) def test_install_from_broken_wheel(script, data): @@ -32,9 +32,9 @@ def test_install_from_broken_wheel(script, data): from tests.lib import TestFailure package = data.packages.joinpath("brokenwheel-1.0-py2.py3-none-any.whl") - result = script.pip('install', package, '--no-index', expect_error=True) + result = script.pip("install", package, "--no-index", expect_error=True) with pytest.raises(TestFailure): - result.assert_installed('futurewheel', without_egg_link=True, editable=False) + result.assert_installed("futurewheel", without_egg_link=True, editable=False) def test_basic_install_from_wheel(script, data): @@ -42,19 +42,19 @@ def test_basic_install_from_wheel(script, data): Test installing from a wheel (that has a script) """ result = script.pip( - 'install', - 'has.script==1.0', - '--no-index', - '--find-links=' + data.find_links, + "install", + "has.script==1.0", + "--no-index", + "--find-links=" + data.find_links, expect_error=False, ) - dist_info_folder = script.site_packages / 'has.script-1.0.dist-info' + dist_info_folder = script.site_packages / "has.script-1.0.dist-info" assert dist_info_folder in result.files_created, ( dist_info_folder, result.files_created, result.stdout, ) - script_file = script.bin / 'script.py' + script_file = script.bin / "script.py" assert script_file in result.files_created @@ -63,19 +63,19 @@ def test_basic_install_from_wheel_with_extras(script, data): Test installing from a wheel with extras. """ result = script.pip( - 'install', - 'complex-dist[simple]', - '--no-index', - '--find-links=' + data.find_links, + "install", + "complex-dist[simple]", + "--no-index", + "--find-links=" + data.find_links, expect_error=False, ) - dist_info_folder = script.site_packages / 'complex_dist-0.1.dist-info' + dist_info_folder = script.site_packages / "complex_dist-0.1.dist-info" assert dist_info_folder in result.files_created, ( dist_info_folder, result.files_created, result.stdout, ) - dist_info_folder = script.site_packages / 'simple.dist-0.1.dist-info' + dist_info_folder = script.site_packages / "simple.dist-0.1.dist-info" assert dist_info_folder in result.files_created, ( dist_info_folder, result.files_created, @@ -88,23 +88,23 @@ def test_basic_install_from_wheel_file(script, data): Test installing directly from a wheel file. """ package = data.packages.joinpath("simple.dist-0.1-py2.py3-none-any.whl") - result = script.pip('install', package, '--no-index', expect_error=False) - dist_info_folder = script.site_packages / 'simple.dist-0.1.dist-info' + result = script.pip("install", package, "--no-index", expect_error=False) + dist_info_folder = script.site_packages / "simple.dist-0.1.dist-info" assert dist_info_folder in result.files_created, ( dist_info_folder, result.files_created, result.stdout, ) - installer = dist_info_folder / 'INSTALLER' + installer = dist_info_folder / "INSTALLER" assert installer in result.files_created, ( dist_info_folder, result.files_created, result.stdout, ) - with open(script.base_path / installer, 'rb') as installer_file: + with open(script.base_path / installer, "rb") as installer_file: installer_details = installer_file.read() - assert installer_details == b'pip\n' - installer_temp = dist_info_folder / 'INSTALLER.pip' + assert installer_details == b"pip\n" + installer_temp = dist_info_folder / "INSTALLER.pip" assert installer_temp not in result.files_created, ( dist_info_folder, result.files_created, @@ -117,8 +117,8 @@ def test_install_from_wheel_with_headers(script, data): Test installing from a wheel file with headers """ package = data.packages.joinpath("headers.dist-0.1-py2.py3-none-any.whl") - result = script.pip('install', package, '--no-index', expect_error=False) - dist_info_folder = script.site_packages / 'headers.dist-0.1.dist-info' + result = script.pip("install", package, "--no-index", expect_error=False) + dist_info_folder = script.site_packages / "headers.dist-0.1.dist-info" assert dist_info_folder in result.files_created, ( dist_info_folder, result.files_created, @@ -130,16 +130,16 @@ def test_install_wheel_with_target(script, data, with_wheel): """ Test installing a wheel using pip install --target """ - target_dir = script.scratch_path / 'target' + target_dir = script.scratch_path / "target" result = script.pip( - 'install', - 'simple.dist==0.1', - '-t', + "install", + "simple.dist==0.1", + "-t", target_dir, - '--no-index', - '--find-links=' + data.find_links, + "--no-index", + "--find-links=" + data.find_links, ) - assert Path('scratch') / 'target' / 'simpledist' in result.files_created, str( + assert Path("scratch") / "target" / "simpledist" in result.files_created, str( result ) @@ -163,22 +163,22 @@ def test_install_wheel_with_target_and_data_files(script, data, with_wheel): ] ) """ - target_dir = script.scratch_path / 'prjwithdatafile' + target_dir = script.scratch_path / "prjwithdatafile" package = data.packages.joinpath("prjwithdatafile-1.0-py2.py3-none-any.whl") result = script.pip( - 'install', package, '-t', target_dir, '--no-index', expect_error=False + "install", package, "-t", target_dir, "--no-index", expect_error=False ) assert ( - Path('scratch') / 'prjwithdatafile' / 'packages1' / 'README.txt' + Path("scratch") / "prjwithdatafile" / "packages1" / "README.txt" in result.files_created ), str(result) assert ( - Path('scratch') / 'prjwithdatafile' / 'packages2' / 'README.txt' + Path("scratch") / "prjwithdatafile" / "packages2" / "README.txt" in result.files_created ), str(result) assert ( - Path('scratch') / 'prjwithdatafile' / 'lib' / 'python' + Path("scratch") / "prjwithdatafile" / "lib" / "python" not in result.files_created ), str(result) @@ -187,32 +187,32 @@ def test_install_wheel_with_root(script, data): """ Test installing a wheel using pip install --root """ - root_dir = script.scratch_path / 'root' + root_dir = script.scratch_path / "root" result = script.pip( - 'install', - 'simple.dist==0.1', - '--root', + "install", + "simple.dist==0.1", + "--root", root_dir, - '--no-index', - '--find-links=' + data.find_links, + "--no-index", + "--find-links=" + data.find_links, ) - assert Path('scratch') / 'root' in result.files_created + assert Path("scratch") / "root" in result.files_created def test_install_wheel_with_prefix(script, data): """ Test installing a wheel using pip install --prefix """ - prefix_dir = script.scratch_path / 'prefix' + prefix_dir = script.scratch_path / "prefix" result = script.pip( - 'install', - 'simple.dist==0.1', - '--prefix', + "install", + "simple.dist==0.1", + "--prefix", prefix_dir, - '--no-index', - '--find-links=' + data.find_links, + "--no-index", + "--find-links=" + data.find_links, ) - lib = distutils.sysconfig.get_python_lib(prefix=Path('scratch') / 'prefix') + lib = distutils.sysconfig.get_python_lib(prefix=Path("scratch") / "prefix") assert lib in result.files_created, str(result) @@ -223,9 +223,9 @@ def test_install_from_wheel_installs_deps(script, data): # 'requires_source' depends on the 'source' project package = data.packages.joinpath("requires_source-1.0-py2.py3-none-any.whl") result = script.pip( - 'install', '--no-index', '--find-links', data.find_links, package + "install", "--no-index", "--find-links", data.find_links, package ) - result.assert_installed('source', editable=False) + result.assert_installed("source", editable=False) def test_install_from_wheel_no_deps(script, data): @@ -235,24 +235,24 @@ def test_install_from_wheel_no_deps(script, data): # 'requires_source' depends on the 'source' project package = data.packages.joinpath("requires_source-1.0-py2.py3-none-any.whl") result = script.pip( - 'install', '--no-index', '--find-links', data.find_links, '--no-deps', package + "install", "--no-index", "--find-links", data.find_links, "--no-deps", package ) - pkg_folder = script.site_packages / 'source' + pkg_folder = script.site_packages / "source" assert pkg_folder not in result.files_created def test_wheel_record_lines_in_deterministic_order(script, data): to_install = data.packages.joinpath("simplewheel-1.0-py2.py3-none-any.whl") - result = script.pip('install', to_install) + result = script.pip("install", to_install) - dist_info_folder = script.site_packages / 'simplewheel-1.0.dist-info' - record_path = dist_info_folder / 'RECORD' + dist_info_folder = script.site_packages / "simplewheel-1.0.dist-info" + record_path = dist_info_folder / "RECORD" assert dist_info_folder in result.files_created, str(result) assert record_path in result.files_created, str(result) record_path = result.files_created[record_path].full - record_lines = [p for p in Path(record_path).read_text().split('\n') if p] + record_lines = [p for p in Path(record_path).read_text().split("\n") if p] assert record_lines == sorted(record_lines) @@ -261,15 +261,15 @@ def test_install_user_wheel(script, data, with_wheel): Test user install from wheel (that has a script) """ result = script.pip( - 'install', - 'has.script==1.0', - '--user', - '--no-index', - '--find-links=' + data.find_links, + "install", + "has.script==1.0", + "--user", + "--no-index", + "--find-links=" + data.find_links, ) - egg_info_folder = script.user_site / 'has.script-1.0.dist-info' + egg_info_folder = script.user_site / "has.script-1.0.dist-info" assert egg_info_folder in result.files_created, str(result) - script_file = script.user_bin / 'script.py' + script_file = script.user_bin / "script.py" assert script_file in result.files_created, str(result) @@ -278,16 +278,16 @@ def test_install_from_wheel_gen_entrypoint(script, data): Test installing scripts (entry points are generated) """ result = script.pip( - 'install', - 'script.wheel1a==0.1', - '--no-index', - '--find-links=' + data.find_links, + "install", + "script.wheel1a==0.1", + "--no-index", + "--find-links=" + data.find_links, expect_error=False, ) - if os.name == 'nt': - wrapper_file = script.bin / 't1.exe' + if os.name == "nt": + wrapper_file = script.bin / "t1.exe" else: - wrapper_file = script.bin / 't1' + wrapper_file = script.bin / "t1" assert wrapper_file in result.files_created if os.name != "nt": @@ -299,17 +299,17 @@ def test_install_from_wheel_gen_uppercase_entrypoint(script, data): Test installing scripts with uppercase letters in entry point names """ result = script.pip( - 'install', - 'console-scripts-uppercase==1.0', - '--no-index', - '--find-links=' + data.find_links, + "install", + "console-scripts-uppercase==1.0", + "--no-index", + "--find-links=" + data.find_links, expect_error=False, ) - if os.name == 'nt': + if os.name == "nt": # Case probably doesn't make any difference on NT - wrapper_file = script.bin / 'cmdName.exe' + wrapper_file = script.bin / "cmdName.exe" else: - wrapper_file = script.bin / 'cmdName' + wrapper_file = script.bin / "cmdName" assert wrapper_file in result.files_created if os.name != "nt": @@ -321,15 +321,15 @@ def test_install_from_wheel_with_legacy(script, data): Test installing scripts (legacy scripts are preserved) """ result = script.pip( - 'install', - 'script.wheel2a==0.1', - '--no-index', - '--find-links=' + data.find_links, + "install", + "script.wheel2a==0.1", + "--no-index", + "--find-links=" + data.find_links, expect_error=False, ) - legacy_file1 = script.bin / 'testscript1.bat' - legacy_file2 = script.bin / 'testscript2' + legacy_file1 = script.bin / "testscript1.bat" + legacy_file2 = script.bin / "testscript2" assert legacy_file1 in result.files_created assert legacy_file2 in result.files_created @@ -341,17 +341,17 @@ def test_install_from_wheel_no_setuptools_entrypoint(script, data): the wheel are skipped. """ result = script.pip( - 'install', - 'script.wheel1==0.1', - '--no-index', - '--find-links=' + data.find_links, + "install", + "script.wheel1==0.1", + "--no-index", + "--find-links=" + data.find_links, expect_error=False, ) - if os.name == 'nt': - wrapper_file = script.bin / 't1.exe' + if os.name == "nt": + wrapper_file = script.bin / "t1.exe" else: - wrapper_file = script.bin / 't1' - wrapper_helper = script.bin / 't1-script.py' + wrapper_file = script.bin / "t1" + wrapper_helper = script.bin / "t1-script.py" # The wheel has t1.exe and t1-script.py. We will be generating t1 or # t1.exe depending on the platform. So we check that the correct wrapper @@ -368,16 +368,16 @@ def test_skipping_setuptools_doesnt_skip_legacy(script, data): setuptools wrappers) """ result = script.pip( - 'install', - 'script.wheel2==0.1', - '--no-index', - '--find-links=' + data.find_links, + "install", + "script.wheel2==0.1", + "--no-index", + "--find-links=" + data.find_links, expect_error=False, ) - legacy_file1 = script.bin / 'testscript1.bat' - legacy_file2 = script.bin / 'testscript2' - wrapper_helper = script.bin / 't1-script.py' + legacy_file1 = script.bin / "testscript1.bat" + legacy_file2 = script.bin / "testscript2" + wrapper_helper = script.bin / "t1-script.py" assert legacy_file1 in result.files_created assert legacy_file2 in result.files_created @@ -389,16 +389,16 @@ def test_install_from_wheel_gui_entrypoint(script, data): Test installing scripts (gui entry points are generated) """ result = script.pip( - 'install', - 'script.wheel3==0.1', - '--no-index', - '--find-links=' + data.find_links, + "install", + "script.wheel3==0.1", + "--no-index", + "--find-links=" + data.find_links, expect_error=False, ) - if os.name == 'nt': - wrapper_file = script.bin / 't1.exe' + if os.name == "nt": + wrapper_file = script.bin / "t1.exe" else: - wrapper_file = script.bin / 't1' + wrapper_file = script.bin / "t1" assert wrapper_file in result.files_created @@ -449,25 +449,25 @@ def test_wheel_no_compiles_pyc(script, data): def test_install_from_wheel_uninstalls_old_version(script, data): # regression test for https://github.com/pypa/pip/issues/1825 package = data.packages.joinpath("simplewheel-1.0-py2.py3-none-any.whl") - result = script.pip('install', package, '--no-index') + result = script.pip("install", package, "--no-index") package = data.packages.joinpath("simplewheel-2.0-py2.py3-none-any.whl") - result = script.pip('install', package, '--no-index', expect_error=False) - dist_info_folder = script.site_packages / 'simplewheel-2.0.dist-info' + result = script.pip("install", package, "--no-index", expect_error=False) + dist_info_folder = script.site_packages / "simplewheel-2.0.dist-info" assert dist_info_folder in result.files_created - dist_info_folder = script.site_packages / 'simplewheel-1.0.dist-info' + dist_info_folder = script.site_packages / "simplewheel-1.0.dist-info" assert dist_info_folder not in result.files_created def test_wheel_compile_syntax_error(script, data): package = data.packages.joinpath("compilewheel-1.0-py2.py3-none-any.whl") - result = script.pip('install', '--compile', package, '--no-index') - assert 'yield from' not in result.stdout - assert 'SyntaxError: ' not in result.stdout + result = script.pip("install", "--compile", package, "--no-index") + assert "yield from" not in result.stdout + assert "SyntaxError: " not in result.stdout def test_wheel_install_with_no_cache_dir(script, tmpdir, data): """Check wheel installations work, even with no cache. """ package = data.packages.joinpath("simple.dist-0.1-py2.py3-none-any.whl") - result = script.pip('install', '--no-cache-dir', '--no-index', package) - result.assert_installed('simpledist', editable=False) + result = script.pip("install", "--no-cache-dir", "--no-index", package) + result.assert_installed("simpledist", editable=False) diff --git a/tests/functional/test_list.py b/tests/functional/test_list.py index 04c20c2ebc0..411a4322b5c 100644 --- a/tests/functional/test_list.py +++ b/tests/functional/test_list.py @@ -10,11 +10,11 @@ def test_basic_list(script, data): """ script.pip( - 'install', '-f', data.find_links, '--no-index', 'simple==1.0', 'simple2==3.0' + "install", "-f", data.find_links, "--no-index", "simple==1.0", "simple2==3.0" ) - result = script.pip('list') - assert 'simple 1.0' in result.stdout, str(result) - assert 'simple2 3.0' in result.stdout, str(result) + result = script.pip("list") + assert "simple 1.0" in result.stdout, str(result) + assert "simple2 3.0" in result.stdout, str(result) def test_verbose_flag(script, data): @@ -22,15 +22,15 @@ def test_verbose_flag(script, data): Test the list command with the '-v' option """ script.pip( - 'install', '-f', data.find_links, '--no-index', 'simple==1.0', 'simple2==3.0' + "install", "-f", data.find_links, "--no-index", "simple==1.0", "simple2==3.0" ) - result = script.pip('list', '-v', '--format=columns') - assert 'Package' in result.stdout, str(result) - assert 'Version' in result.stdout, str(result) - assert 'Location' in result.stdout, str(result) - assert 'Installer' in result.stdout, str(result) - assert 'simple 1.0' in result.stdout, str(result) - assert 'simple2 3.0' in result.stdout, str(result) + result = script.pip("list", "-v", "--format=columns") + assert "Package" in result.stdout, str(result) + assert "Version" in result.stdout, str(result) + assert "Location" in result.stdout, str(result) + assert "Installer" in result.stdout, str(result) + assert "simple 1.0" in result.stdout, str(result) + assert "simple2 3.0" in result.stdout, str(result) def test_columns_flag(script, data): @@ -38,14 +38,14 @@ def test_columns_flag(script, data): Test the list command with the '--format=columns' option """ script.pip( - 'install', '-f', data.find_links, '--no-index', 'simple==1.0', 'simple2==3.0' + "install", "-f", data.find_links, "--no-index", "simple==1.0", "simple2==3.0" ) - result = script.pip('list', '--format=columns') - assert 'Package' in result.stdout, str(result) - assert 'Version' in result.stdout, str(result) - assert 'simple (1.0)' not in result.stdout, str(result) - assert 'simple 1.0' in result.stdout, str(result) - assert 'simple2 3.0' in result.stdout, str(result) + result = script.pip("list", "--format=columns") + assert "Package" in result.stdout, str(result) + assert "Version" in result.stdout, str(result) + assert "simple (1.0)" not in result.stdout, str(result) + assert "simple 1.0" in result.stdout, str(result) + assert "simple2 3.0" in result.stdout, str(result) def test_format_priority(script, data): @@ -53,23 +53,23 @@ def test_format_priority(script, data): Test that latest format has priority over previous ones. """ script.pip( - 'install', '-f', data.find_links, '--no-index', 'simple==1.0', 'simple2==3.0' + "install", "-f", data.find_links, "--no-index", "simple==1.0", "simple2==3.0" ) result = script.pip( - 'list', '--format=columns', '--format=freeze', expect_stderr=True + "list", "--format=columns", "--format=freeze", expect_stderr=True ) - assert 'simple==1.0' in result.stdout, str(result) - assert 'simple2==3.0' in result.stdout, str(result) - assert 'simple 1.0' not in result.stdout, str(result) - assert 'simple2 3.0' not in result.stdout, str(result) + assert "simple==1.0" in result.stdout, str(result) + assert "simple2==3.0" in result.stdout, str(result) + assert "simple 1.0" not in result.stdout, str(result) + assert "simple2 3.0" not in result.stdout, str(result) - result = script.pip('list', '--format=freeze', '--format=columns') - assert 'Package' in result.stdout, str(result) - assert 'Version' in result.stdout, str(result) - assert 'simple==1.0' not in result.stdout, str(result) - assert 'simple2==3.0' not in result.stdout, str(result) - assert 'simple 1.0' in result.stdout, str(result) - assert 'simple2 3.0' in result.stdout, str(result) + result = script.pip("list", "--format=freeze", "--format=columns") + assert "Package" in result.stdout, str(result) + assert "Version" in result.stdout, str(result) + assert "simple==1.0" not in result.stdout, str(result) + assert "simple2==3.0" not in result.stdout, str(result) + assert "simple 1.0" in result.stdout, str(result) + assert "simple2 3.0" in result.stdout, str(result) def test_local_flag(script, data): @@ -77,8 +77,8 @@ def test_local_flag(script, data): Test the behavior of --local flag in the list command """ - script.pip('install', '-f', data.find_links, '--no-index', 'simple==1.0') - result = script.pip('list', '--local', '--format=json') + script.pip("install", "-f", data.find_links, "--no-index", "simple==1.0") + result = script.pip("list", "--local", "--format=json") assert {"name": "simple", "version": "1.0"} in json.loads(result.stdout) @@ -87,12 +87,12 @@ def test_local_columns_flag(script, data): Test the behavior of --local --format=columns flags in the list command """ - script.pip('install', '-f', data.find_links, '--no-index', 'simple==1.0') - result = script.pip('list', '--local', '--format=columns') - assert 'Package' in result.stdout - assert 'Version' in result.stdout - assert 'simple (1.0)' not in result.stdout - assert 'simple 1.0' in result.stdout, str(result) + script.pip("install", "-f", data.find_links, "--no-index", "simple==1.0") + result = script.pip("list", "--local", "--format=columns") + assert "Package" in result.stdout + assert "Version" in result.stdout + assert "simple (1.0)" not in result.stdout + assert "simple 1.0" in result.stdout, str(result) @pytest.mark.network @@ -101,10 +101,10 @@ def test_user_flag(script, data): Test the behavior of --user flag in the list command """ - script.pip('download', 'setuptools', 'wheel', '-d', data.packages) - script.pip('install', '-f', data.find_links, '--no-index', 'simple==1.0') - script.pip('install', '-f', data.find_links, '--no-index', '--user', 'simple2==2.0') - result = script.pip('list', '--user', '--format=json') + script.pip("download", "setuptools", "wheel", "-d", data.packages) + script.pip("install", "-f", data.find_links, "--no-index", "simple==1.0") + script.pip("install", "-f", data.find_links, "--no-index", "--user", "simple2==2.0") + result = script.pip("list", "--user", "--format=json") assert {"name": "simple", "version": "1.0"} not in json.loads(result.stdout) assert {"name": "simple2", "version": "2.0"} in json.loads(result.stdout) @@ -115,14 +115,14 @@ def test_user_columns_flag(script, data): Test the behavior of --user --format=columns flags in the list command """ - script.pip('download', 'setuptools', 'wheel', '-d', data.packages) - script.pip('install', '-f', data.find_links, '--no-index', 'simple==1.0') - script.pip('install', '-f', data.find_links, '--no-index', '--user', 'simple2==2.0') - result = script.pip('list', '--user', '--format=columns') - assert 'Package' in result.stdout - assert 'Version' in result.stdout - assert 'simple2 (2.0)' not in result.stdout - assert 'simple2 2.0' in result.stdout, str(result) + script.pip("download", "setuptools", "wheel", "-d", data.packages) + script.pip("install", "-f", data.find_links, "--no-index", "simple==1.0") + script.pip("install", "-f", data.find_links, "--no-index", "--user", "simple2==2.0") + result = script.pip("list", "--user", "--format=columns") + assert "Package" in result.stdout + assert "Version" in result.stdout + assert "simple2 (2.0)" not in result.stdout + assert "simple2 2.0" in result.stdout, str(result) @pytest.mark.network @@ -132,15 +132,15 @@ def test_uptodate_flag(script, data): """ script.pip( - 'install', '-f', data.find_links, '--no-index', 'simple==1.0', 'simple2==3.0' + "install", "-f", data.find_links, "--no-index", "simple==1.0", "simple2==3.0" ) script.pip( - 'install', - '-e', - 'git+https://github.com/pypa/pip-test-package.git#egg=pip-test-package', + "install", + "-e", + "git+https://github.com/pypa/pip-test-package.git#egg=pip-test-package", ) result = script.pip( - 'list', '-f', data.find_links, '--no-index', '--uptodate', '--format=json' + "list", "-f", data.find_links, "--no-index", "--uptodate", "--format=json" ) assert {"name": "simple", "version": "1.0"} not in json.loads( result.stdout @@ -158,22 +158,22 @@ def test_uptodate_columns_flag(script, data): """ script.pip( - 'install', '-f', data.find_links, '--no-index', 'simple==1.0', 'simple2==3.0' + "install", "-f", data.find_links, "--no-index", "simple==1.0", "simple2==3.0" ) script.pip( - 'install', - '-e', - 'git+https://github.com/pypa/pip-test-package.git#egg=pip-test-package', + "install", + "-e", + "git+https://github.com/pypa/pip-test-package.git#egg=pip-test-package", ) result = script.pip( - 'list', '-f', data.find_links, '--no-index', '--uptodate', '--format=columns' + "list", "-f", data.find_links, "--no-index", "--uptodate", "--format=columns" ) - assert 'Package' in result.stdout - assert 'Version' in result.stdout - assert 'Location' in result.stdout # editables included - assert 'pip-test-package (0.1.1,' not in result.stdout - assert 'pip-test-package 0.1.1' in result.stdout, str(result) - assert 'simple2 3.0' in result.stdout, str(result) + assert "Package" in result.stdout + assert "Version" in result.stdout + assert "Location" in result.stdout # editables included + assert "pip-test-package (0.1.1," not in result.stdout + assert "pip-test-package 0.1.1" in result.stdout, str(result) + assert "simple2 3.0" in result.stdout, str(result) @pytest.mark.network @@ -183,21 +183,21 @@ def test_outdated_flag(script, data): """ script.pip( - 'install', - '-f', + "install", + "-f", data.find_links, - '--no-index', - 'simple==1.0', - 'simple2==3.0', - 'simplewheel==1.0', + "--no-index", + "simple==1.0", + "simple2==3.0", + "simplewheel==1.0", ) script.pip( - 'install', - '-e', - 'git+https://github.com/pypa/pip-test-package.git@0.1#egg=pip-test-package', + "install", + "-e", + "git+https://github.com/pypa/pip-test-package.git@0.1#egg=pip-test-package", ) result = script.pip( - 'list', '-f', data.find_links, '--no-index', '--outdated', '--format=json' + "list", "-f", data.find_links, "--no-index", "--outdated", "--format=json" ) assert { "name": "simple", @@ -224,31 +224,31 @@ def test_outdated_columns_flag(script, data): """ script.pip( - 'install', - '-f', + "install", + "-f", data.find_links, - '--no-index', - 'simple==1.0', - 'simple2==3.0', - 'simplewheel==1.0', + "--no-index", + "simple==1.0", + "simple2==3.0", + "simplewheel==1.0", ) script.pip( - 'install', - '-e', - 'git+https://github.com/pypa/pip-test-package.git@0.1#egg=pip-test-package', + "install", + "-e", + "git+https://github.com/pypa/pip-test-package.git@0.1#egg=pip-test-package", ) result = script.pip( - 'list', '-f', data.find_links, '--no-index', '--outdated', '--format=columns' + "list", "-f", data.find_links, "--no-index", "--outdated", "--format=columns" ) - assert 'Package' in result.stdout - assert 'Version' in result.stdout - assert 'Latest' in result.stdout - assert 'Type' in result.stdout - assert 'simple (1.0) - Latest: 3.0 [sdist]' not in result.stdout - assert 'simplewheel (1.0) - Latest: 2.0 [wheel]' not in result.stdout - assert 'simple 1.0 3.0 sdist' in result.stdout, str(result) - assert 'simplewheel 1.0 2.0 wheel' in result.stdout, str(result) - assert 'simple2' not in result.stdout, str(result) # 3.0 is latest + assert "Package" in result.stdout + assert "Version" in result.stdout + assert "Latest" in result.stdout + assert "Type" in result.stdout + assert "simple (1.0) - Latest: 3.0 [sdist]" not in result.stdout + assert "simplewheel (1.0) - Latest: 2.0 [wheel]" not in result.stdout + assert "simple 1.0 3.0 sdist" in result.stdout, str(result) + assert "simplewheel 1.0 2.0 wheel" in result.stdout, str(result) + assert "simple2" not in result.stdout, str(result) # 3.0 is latest @pytest.mark.network @@ -256,16 +256,16 @@ def test_editables_flag(script, data): """ Test the behavior of --editables flag in the list command """ - script.pip('install', '-f', data.find_links, '--no-index', 'simple==1.0') + script.pip("install", "-f", data.find_links, "--no-index", "simple==1.0") result = script.pip( - 'install', - '-e', - 'git+https://github.com/pypa/pip-test-package.git#egg=pip-test-package', + "install", + "-e", + "git+https://github.com/pypa/pip-test-package.git#egg=pip-test-package", ) - result = script.pip('list', '--editable', '--format=json') - result2 = script.pip('list', '--editable') + result = script.pip("list", "--editable", "--format=json") + result2 = script.pip("list", "--editable") assert {"name": "simple", "version": "1.0"} not in json.loads(result.stdout) - assert os.path.join('src', 'pip-test-package') in result2.stdout + assert os.path.join("src", "pip-test-package") in result2.stdout @pytest.mark.network @@ -273,13 +273,13 @@ def test_exclude_editable_flag(script, data): """ Test the behavior of --editables flag in the list command """ - script.pip('install', '-f', data.find_links, '--no-index', 'simple==1.0') + script.pip("install", "-f", data.find_links, "--no-index", "simple==1.0") result = script.pip( - 'install', - '-e', - 'git+https://github.com/pypa/pip-test-package.git#egg=pip-test-package', + "install", + "-e", + "git+https://github.com/pypa/pip-test-package.git#egg=pip-test-package", ) - result = script.pip('list', '--exclude-editable', '--format=json') + result = script.pip("list", "--exclude-editable", "--format=json") assert {"name": "simple", "version": "1.0"} in json.loads(result.stdout) assert "pip-test-package" not in {p["name"] for p in json.loads(result.stdout)} @@ -289,17 +289,17 @@ def test_editables_columns_flag(script, data): """ Test the behavior of --editables flag in the list command """ - script.pip('install', '-f', data.find_links, '--no-index', 'simple==1.0') + script.pip("install", "-f", data.find_links, "--no-index", "simple==1.0") result = script.pip( - 'install', - '-e', - 'git+https://github.com/pypa/pip-test-package.git#egg=pip-test-package', + "install", + "-e", + "git+https://github.com/pypa/pip-test-package.git#egg=pip-test-package", ) - result = script.pip('list', '--editable', '--format=columns') - assert 'Package' in result.stdout - assert 'Version' in result.stdout - assert 'Location' in result.stdout - assert os.path.join('src', 'pip-test-package') in result.stdout, str(result) + result = script.pip("list", "--editable", "--format=columns") + assert "Package" in result.stdout + assert "Version" in result.stdout + assert "Location" in result.stdout + assert os.path.join("src", "pip-test-package") in result.stdout, str(result) @pytest.mark.network @@ -307,17 +307,17 @@ def test_uptodate_editables_flag(script, data): """ test the behavior of --editable --uptodate flag in the list command """ - script.pip('install', '-f', data.find_links, '--no-index', 'simple==1.0') + script.pip("install", "-f", data.find_links, "--no-index", "simple==1.0") result = script.pip( - 'install', - '-e', - 'git+https://github.com/pypa/pip-test-package.git#egg=pip-test-package', + "install", + "-e", + "git+https://github.com/pypa/pip-test-package.git#egg=pip-test-package", ) result = script.pip( - 'list', '-f', data.find_links, '--no-index', '--editable', '--uptodate' + "list", "-f", data.find_links, "--no-index", "--editable", "--uptodate" ) - assert 'simple' not in result.stdout - assert os.path.join('src', 'pip-test-package') in result.stdout, str(result) + assert "simple" not in result.stdout + assert os.path.join("src", "pip-test-package") in result.stdout, str(result) @pytest.mark.network @@ -326,25 +326,25 @@ def test_uptodate_editables_columns_flag(script, data): test the behavior of --editable --uptodate --format=columns flag in the list command """ - script.pip('install', '-f', data.find_links, '--no-index', 'simple==1.0') + script.pip("install", "-f", data.find_links, "--no-index", "simple==1.0") result = script.pip( - 'install', - '-e', - 'git+https://github.com/pypa/pip-test-package.git#egg=pip-test-package', + "install", + "-e", + "git+https://github.com/pypa/pip-test-package.git#egg=pip-test-package", ) result = script.pip( - 'list', - '-f', + "list", + "-f", data.find_links, - '--no-index', - '--editable', - '--uptodate', - '--format=columns', + "--no-index", + "--editable", + "--uptodate", + "--format=columns", ) - assert 'Package' in result.stdout - assert 'Version' in result.stdout - assert 'Location' in result.stdout - assert os.path.join('src', 'pip-test-package') in result.stdout, str(result) + assert "Package" in result.stdout + assert "Version" in result.stdout + assert "Location" in result.stdout + assert os.path.join("src", "pip-test-package") in result.stdout, str(result) @pytest.mark.network @@ -352,17 +352,17 @@ def test_outdated_editables_flag(script, data): """ test the behavior of --editable --outdated flag in the list command """ - script.pip('install', '-f', data.find_links, '--no-index', 'simple==1.0') + script.pip("install", "-f", data.find_links, "--no-index", "simple==1.0") result = script.pip( - 'install', - '-e', - 'git+https://github.com/pypa/pip-test-package.git@0.1#egg=pip-test-package', + "install", + "-e", + "git+https://github.com/pypa/pip-test-package.git@0.1#egg=pip-test-package", ) result = script.pip( - 'list', '-f', data.find_links, '--no-index', '--editable', '--outdated' + "list", "-f", data.find_links, "--no-index", "--editable", "--outdated" ) - assert 'simple' not in result.stdout - assert os.path.join('src', 'pip-test-package') in result.stdout + assert "simple" not in result.stdout + assert os.path.join("src", "pip-test-package") in result.stdout @pytest.mark.network @@ -370,25 +370,25 @@ def test_outdated_editables_columns_flag(script, data): """ test the behavior of --editable --outdated flag in the list command """ - script.pip('install', '-f', data.find_links, '--no-index', 'simple==1.0') + script.pip("install", "-f", data.find_links, "--no-index", "simple==1.0") result = script.pip( - 'install', - '-e', - 'git+https://github.com/pypa/pip-test-package.git@0.1#egg=pip-test-package', + "install", + "-e", + "git+https://github.com/pypa/pip-test-package.git@0.1#egg=pip-test-package", ) result = script.pip( - 'list', - '-f', + "list", + "-f", data.find_links, - '--no-index', - '--editable', - '--outdated', - '--format=columns', + "--no-index", + "--editable", + "--outdated", + "--format=columns", ) - assert 'Package' in result.stdout - assert 'Version' in result.stdout - assert 'Location' in result.stdout - assert os.path.join('src', 'pip-test-package') in result.stdout, str(result) + assert "Package" in result.stdout + assert "Version" in result.stdout + assert "Location" in result.stdout + assert os.path.join("src", "pip-test-package") in result.stdout, str(result) def test_outdated_not_required_flag(script, data): @@ -396,44 +396,44 @@ def test_outdated_not_required_flag(script, data): test the behavior of --outdated --not-required flag in the list command """ script.pip( - 'install', - '-f', + "install", + "-f", data.find_links, - '--no-index', - 'simple==2.0', - 'require_simple==1.0', + "--no-index", + "simple==2.0", + "require_simple==1.0", ) result = script.pip( - 'list', - '-f', + "list", + "-f", data.find_links, - '--no-index', - '--outdated', - '--not-required', - '--format=json', + "--no-index", + "--outdated", + "--not-required", + "--format=json", ) assert [] == json.loads(result.stdout) def test_outdated_pre(script, data): - script.pip('install', '-f', data.find_links, '--no-index', 'simple==1.0') + script.pip("install", "-f", data.find_links, "--no-index", "simple==1.0") # Let's build a fake wheelhouse script.scratch_path.joinpath("wheelhouse").mkdir() - wheelhouse_path = script.scratch_path / 'wheelhouse' - wheelhouse_path.joinpath('simple-1.1-py2.py3-none-any.whl').write_text('') - wheelhouse_path.joinpath('simple-2.0.dev0-py2.py3-none-any.whl').write_text('') + wheelhouse_path = script.scratch_path / "wheelhouse" + wheelhouse_path.joinpath("simple-1.1-py2.py3-none-any.whl").write_text("") + wheelhouse_path.joinpath("simple-2.0.dev0-py2.py3-none-any.whl").write_text("") result = script.pip( - 'list', '--no-index', '--find-links', wheelhouse_path, '--format=json' + "list", "--no-index", "--find-links", wheelhouse_path, "--format=json" ) assert {"name": "simple", "version": "1.0"} in json.loads(result.stdout) result = script.pip( - 'list', - '--no-index', - '--find-links', + "list", + "--no-index", + "--find-links", wheelhouse_path, - '--outdated', - '--format=json', + "--outdated", + "--format=json", ) assert { "name": "simple", @@ -442,13 +442,13 @@ def test_outdated_pre(script, data): "latest_filetype": "wheel", } in json.loads(result.stdout) result_pre = script.pip( - 'list', - '--no-index', - '--find-links', + "list", + "--no-index", + "--find-links", wheelhouse_path, - '--outdated', - '--pre', - '--format=json', + "--outdated", + "--pre", + "--format=json", ) assert { "name": "simple", @@ -460,67 +460,67 @@ def test_outdated_pre(script, data): def test_outdated_formats(script, data): """ Test of different outdated formats """ - script.pip('install', '-f', data.find_links, '--no-index', 'simple==1.0') + script.pip("install", "-f", data.find_links, "--no-index", "simple==1.0") # Let's build a fake wheelhouse script.scratch_path.joinpath("wheelhouse").mkdir() - wheelhouse_path = script.scratch_path / 'wheelhouse' - wheelhouse_path.joinpath('simple-1.1-py2.py3-none-any.whl').write_text('') + wheelhouse_path = script.scratch_path / "wheelhouse" + wheelhouse_path.joinpath("simple-1.1-py2.py3-none-any.whl").write_text("") result = script.pip( - 'list', '--no-index', '--find-links', wheelhouse_path, '--format=freeze' + "list", "--no-index", "--find-links", wheelhouse_path, "--format=freeze" ) - assert 'simple==1.0' in result.stdout + assert "simple==1.0" in result.stdout # Check columns result = script.pip( - 'list', - '--no-index', - '--find-links', + "list", + "--no-index", + "--find-links", wheelhouse_path, - '--outdated', - '--format=columns', + "--outdated", + "--format=columns", ) - assert 'Package Version Latest Type' in result.stdout - assert 'simple 1.0 1.1 wheel' in result.stdout + assert "Package Version Latest Type" in result.stdout + assert "simple 1.0 1.1 wheel" in result.stdout # Check freeze result = script.pip( - 'list', - '--no-index', - '--find-links', + "list", + "--no-index", + "--find-links", wheelhouse_path, - '--outdated', - '--format=freeze', + "--outdated", + "--format=freeze", ) - assert 'simple==1.0' in result.stdout + assert "simple==1.0" in result.stdout # Check json result = script.pip( - 'list', - '--no-index', - '--find-links', + "list", + "--no-index", + "--find-links", wheelhouse_path, - '--outdated', - '--format=json', + "--outdated", + "--format=json", ) data = json.loads(result.stdout) assert data == [ { - 'name': 'simple', - 'version': '1.0', - 'latest_version': '1.1', - 'latest_filetype': 'wheel', + "name": "simple", + "version": "1.0", + "latest_version": "1.1", + "latest_filetype": "wheel", } ] def test_not_required_flag(script, data): - script.pip('install', '-f', data.find_links, '--no-index', 'TopoRequires4') - result = script.pip('list', '--not-required', expect_stderr=True) - assert 'TopoRequires4 ' in result.stdout, str(result) - assert 'TopoRequires ' not in result.stdout - assert 'TopoRequires2 ' not in result.stdout - assert 'TopoRequires3 ' not in result.stdout + script.pip("install", "-f", data.find_links, "--no-index", "TopoRequires4") + result = script.pip("list", "--not-required", expect_stderr=True) + assert "TopoRequires4 " in result.stdout, str(result) + assert "TopoRequires " not in result.stdout + assert "TopoRequires2 " not in result.stdout + assert "TopoRequires3 " not in result.stdout def test_list_freeze(script, data): @@ -529,11 +529,11 @@ def test_list_freeze(script, data): """ script.pip( - 'install', '-f', data.find_links, '--no-index', 'simple==1.0', 'simple2==3.0' + "install", "-f", data.find_links, "--no-index", "simple==1.0", "simple2==3.0" ) - result = script.pip('list', '--format=freeze') - assert 'simple==1.0' in result.stdout, str(result) - assert 'simple2==3.0' in result.stdout, str(result) + result = script.pip("list", "--format=freeze") + assert "simple==1.0" in result.stdout, str(result) + assert "simple2==3.0" in result.stdout, str(result) def test_list_json(script, data): @@ -542,26 +542,26 @@ def test_list_json(script, data): """ script.pip( - 'install', '-f', data.find_links, '--no-index', 'simple==1.0', 'simple2==3.0' + "install", "-f", data.find_links, "--no-index", "simple==1.0", "simple2==3.0" ) - result = script.pip('list', '--format=json') + result = script.pip("list", "--format=json") data = json.loads(result.stdout) - assert {'name': 'simple', 'version': '1.0'} in data - assert {'name': 'simple2', 'version': '3.0'} in data + assert {"name": "simple", "version": "1.0"} in data + assert {"name": "simple2", "version": "3.0"} in data def test_list_path(tmpdir, script, data): """ Test list with --path. """ - result = script.pip('list', '--path', tmpdir, '--format=json') + result = script.pip("list", "--path", tmpdir, "--format=json") json_result = json.loads(result.stdout) - assert {'name': 'simple', 'version': '2.0'} not in json_result + assert {"name": "simple", "version": "2.0"} not in json_result - script.pip_install_local('--target', tmpdir, 'simple==2.0') - result = script.pip('list', '--path', tmpdir, '--format=json') + script.pip_install_local("--target", tmpdir, "simple==2.0") + result = script.pip("list", "--path", tmpdir, "--format=json") json_result = json.loads(result.stdout) - assert {'name': 'simple', 'version': '2.0'} in json_result + assert {"name": "simple", "version": "2.0"} in json_result def test_list_path_exclude_user(tmpdir, script, data): @@ -569,16 +569,16 @@ def test_list_path_exclude_user(tmpdir, script, data): Test list with --path and make sure packages from --user are not picked up. """ - script.pip_install_local('--user', 'simple2') - script.pip_install_local('--target', tmpdir, 'simple==1.0') + script.pip_install_local("--user", "simple2") + script.pip_install_local("--target", tmpdir, "simple==1.0") - result = script.pip('list', '--user', '--format=json') + result = script.pip("list", "--user", "--format=json") json_result = json.loads(result.stdout) - assert {'name': 'simple2', 'version': '3.0'} in json_result + assert {"name": "simple2", "version": "3.0"} in json_result - result = script.pip('list', '--path', tmpdir, '--format=json') + result = script.pip("list", "--path", tmpdir, "--format=json") json_result = json.loads(result.stdout) - assert {'name': 'simple', 'version': '1.0'} in json_result + assert {"name": "simple", "version": "1.0"} in json_result def test_list_path_multiple(tmpdir, script, data): @@ -590,14 +590,14 @@ def test_list_path_multiple(tmpdir, script, data): path2 = tmpdir / "path2" os.mkdir(path2) - script.pip_install_local('--target', path1, 'simple==2.0') - script.pip_install_local('--target', path2, 'simple2==3.0') + script.pip_install_local("--target", path1, "simple==2.0") + script.pip_install_local("--target", path2, "simple2==3.0") - result = script.pip('list', '--path', path1, '--format=json') + result = script.pip("list", "--path", path1, "--format=json") json_result = json.loads(result.stdout) - assert {'name': 'simple', 'version': '2.0'} in json_result + assert {"name": "simple", "version": "2.0"} in json_result - result = script.pip('list', '--path', path1, '--path', path2, '--format=json') + result = script.pip("list", "--path", path1, "--path", path2, "--format=json") json_result = json.loads(result.stdout) - assert {'name': 'simple', 'version': '2.0'} in json_result - assert {'name': 'simple2', 'version': '3.0'} in json_result + assert {"name": "simple", "version": "2.0"} in json_result + assert {"name": "simple2", "version": "3.0"} in json_result diff --git a/tests/functional/test_no_color.py b/tests/functional/test_no_color.py index 995896834e8..2f6ae6b3615 100644 --- a/tests/functional/test_no_color.py +++ b/tests/functional/test_no_color.py @@ -19,7 +19,7 @@ def test_no_color(script): # # This test will stay until someone has the time to rewrite it. command = ( - 'script --flush --quiet --return /tmp/pip-test-no-color.txt ' + "script --flush --quiet --return /tmp/pip-test-no-color.txt " '--command "pip uninstall {} noSuchPackage"' ) diff --git a/tests/functional/test_pep517.py b/tests/functional/test_pep517.py index 206260787e3..19ab9c82892 100644 --- a/tests/functional/test_pep517.py +++ b/tests/functional/test_pep517.py @@ -7,12 +7,12 @@ def make_project(tmpdir, requires=[], backend=None): - project_dir = (tmpdir / 'project').mkdir() - buildsys = {'requires': requires} + project_dir = (tmpdir / "project").mkdir() + buildsys = {"requires": requires} if backend: - buildsys['build-backend'] = backend - data = pytoml.dumps({'build-system': buildsys}) - project_dir.joinpath('pyproject.toml').write_text(data) + buildsys["build-backend"] = backend + data = pytoml.dumps({"build-system": buildsys}) + project_dir.joinpath("pyproject.toml").write_text(data) return project_dir @@ -23,10 +23,10 @@ def test_backend(tmpdir, data): req.load_pyproject_toml() env = BuildEnvironment() finder = make_test_finder(find_links=[data.backends]) - env.install_requirements(finder, ["dummy_backend"], 'normal', "Installing") + env.install_requirements(finder, ["dummy_backend"], "normal", "Installing") conflicting, missing = env.check_requirements(["dummy_backend"]) assert not conflicting and not missing - assert hasattr(req.pep517_backend, 'build_wheel') + assert hasattr(req.pep517_backend, "build_wheel") with env: assert req.pep517_backend.build_wheel("dir") == "Backend called" @@ -34,52 +34,52 @@ def test_backend(tmpdir, data): def test_pep517_install(script, tmpdir, data): """Check we can build with a custom backend""" project_dir = make_project( - tmpdir, requires=['test_backend'], backend="test_backend" + tmpdir, requires=["test_backend"], backend="test_backend" ) - result = script.pip('install', '--no-index', '-f', data.backends, project_dir) - result.assert_installed('project', editable=False) + result = script.pip("install", "--no-index", "-f", data.backends, project_dir) + result.assert_installed("project", editable=False) def test_pep517_install_with_reqs(script, tmpdir, data): """Backend generated requirements are installed in the build env""" project_dir = make_project( - tmpdir, requires=['test_backend'], backend="test_backend" + tmpdir, requires=["test_backend"], backend="test_backend" ) project_dir.joinpath("backend_reqs.txt").write_text("simplewheel") result = script.pip( - 'install', '--no-index', '-f', data.backends, '-f', data.packages, project_dir + "install", "--no-index", "-f", data.backends, "-f", data.packages, project_dir ) - result.assert_installed('project', editable=False) + result.assert_installed("project", editable=False) def test_no_use_pep517_without_setup_py(script, tmpdir, data): """Using --no-use-pep517 requires setup.py""" project_dir = make_project( - tmpdir, requires=['test_backend'], backend="test_backend" + tmpdir, requires=["test_backend"], backend="test_backend" ) result = script.pip( - 'install', - '--no-index', - '--no-use-pep517', - '-f', + "install", + "--no-index", + "--no-use-pep517", + "-f", data.backends, project_dir, expect_error=True, ) - assert 'project does not have a setup.py' in result.stderr + assert "project does not have a setup.py" in result.stderr def test_conflicting_pep517_backend_requirements(script, tmpdir, data): project_dir = make_project( - tmpdir, requires=['test_backend', 'simplewheel==1.0'], backend="test_backend" + tmpdir, requires=["test_backend", "simplewheel==1.0"], backend="test_backend" ) project_dir.joinpath("backend_reqs.txt").write_text("simplewheel==2.0") result = script.pip( - 'install', - '--no-index', - '-f', + "install", + "--no-index", + "-f", data.backends, - '-f', + "-f", data.packages, project_dir, expect_error=True, @@ -87,9 +87,9 @@ def test_conflicting_pep517_backend_requirements(script, tmpdir, data): assert ( result.returncode != 0 and ( - 'Some build dependencies for %s conflict with the backend ' - 'dependencies: simplewheel==1.0 is incompatible with ' - 'simplewheel==2.0.' % path_to_url(project_dir) + "Some build dependencies for %s conflict with the backend " + "dependencies: simplewheel==1.0 is incompatible with " + "simplewheel==2.0." % path_to_url(project_dir) ) in result.stderr ), str(result) @@ -97,58 +97,58 @@ def test_conflicting_pep517_backend_requirements(script, tmpdir, data): def test_pep517_backend_requirements_already_satisfied(script, tmpdir, data): project_dir = make_project( - tmpdir, requires=['test_backend', 'simplewheel==1.0'], backend="test_backend" + tmpdir, requires=["test_backend", "simplewheel==1.0"], backend="test_backend" ) project_dir.joinpath("backend_reqs.txt").write_text("simplewheel") result = script.pip( - 'install', '--no-index', '-f', data.backends, '-f', data.packages, project_dir + "install", "--no-index", "-f", data.backends, "-f", data.packages, project_dir ) - assert 'Installing backend dependencies:' not in result.stdout + assert "Installing backend dependencies:" not in result.stdout def test_pep517_install_with_no_cache_dir(script, tmpdir, data): """Check builds with a custom backends work, even with no cache. """ project_dir = make_project( - tmpdir, requires=['test_backend'], backend="test_backend" + tmpdir, requires=["test_backend"], backend="test_backend" ) result = script.pip( - 'install', '--no-cache-dir', '--no-index', '-f', data.backends, project_dir + "install", "--no-cache-dir", "--no-index", "-f", data.backends, project_dir ) - result.assert_installed('project', editable=False) + result.assert_installed("project", editable=False) def make_pyproject_with_setup(tmpdir, build_system=True, set_backend=True): - project_dir = (tmpdir / 'project').mkdir() - setup_script = 'from setuptools import setup\n' + project_dir = (tmpdir / "project").mkdir() + setup_script = "from setuptools import setup\n" expect_script_dir_on_path = True if build_system: - buildsys = {'requires': ['setuptools', 'wheel']} + buildsys = {"requires": ["setuptools", "wheel"]} if set_backend: - buildsys['build-backend'] = 'setuptools.build_meta' + buildsys["build-backend"] = "setuptools.build_meta" expect_script_dir_on_path = False - project_data = pytoml.dumps({'build-system': buildsys}) + project_data = pytoml.dumps({"build-system": buildsys}) else: - project_data = '' + project_data = "" if expect_script_dir_on_path: - setup_script += 'from pep517_test import __version__\n' + setup_script += "from pep517_test import __version__\n" else: setup_script += ( - 'try:\n' - ' import pep517_test\n' - 'except ImportError:\n' - ' pass\n' - 'else:\n' + "try:\n" + " import pep517_test\n" + "except ImportError:\n" + " pass\n" + "else:\n" ' raise RuntimeError("Source dir incorrectly on sys.path")\n' ) setup_script += 'setup(name="pep517_test", version="0.1", packages=["pep517_test"])' - project_dir.joinpath('pyproject.toml').write_text(project_data) - project_dir.joinpath('setup.py').write_text(setup_script) + project_dir.joinpath("pyproject.toml").write_text(project_data) + project_dir.joinpath("setup.py").write_text(setup_script) package_dir = (project_dir / "pep517_test").mkdir() - package_dir.joinpath('__init__.py').write_text('__version__ = "0.1"') + package_dir.joinpath("__init__.py").write_text('__version__ = "0.1"') return project_dir, "pep517_test" @@ -157,7 +157,7 @@ def test_no_build_system_section(script, tmpdir, data, common_wheels): """ project_dir, name = make_pyproject_with_setup(tmpdir, build_system=False) result = script.pip( - 'install', '--no-cache-dir', '--no-index', '-f', common_wheels, project_dir + "install", "--no-cache-dir", "--no-index", "-f", common_wheels, project_dir ) result.assert_installed(name, editable=False) @@ -167,7 +167,7 @@ def test_no_build_backend_entry(script, tmpdir, data, common_wheels): """ project_dir, name = make_pyproject_with_setup(tmpdir, set_backend=False) result = script.pip( - 'install', '--no-cache-dir', '--no-index', '-f', common_wheels, project_dir + "install", "--no-cache-dir", "--no-index", "-f", common_wheels, project_dir ) result.assert_installed(name, editable=False) @@ -177,7 +177,7 @@ def test_explicit_setuptools_backend(script, tmpdir, data, common_wheels): """ project_dir, name = make_pyproject_with_setup(tmpdir) result = script.pip( - 'install', '--no-cache-dir', '--no-index', '-f', common_wheels, project_dir + "install", "--no-cache-dir", "--no-index", "-f", common_wheels, project_dir ) result.assert_installed(name, editable=False) @@ -187,15 +187,15 @@ def test_pep517_and_build_options(script, tmpdir, data, common_wheels): """Backend generated requirements are installed in the build env""" project_dir, name = make_pyproject_with_setup(tmpdir) result = script.pip( - 'wheel', - '--wheel-dir', + "wheel", + "--wheel-dir", tmpdir, - '--build-option', - 'foo', - '-f', + "--build-option", + "foo", + "-f", common_wheels, project_dir, expect_error=True, ) - assert 'Cannot build wheel' in result.stderr - assert 'when --build-options is present' in result.stderr + assert "Cannot build wheel" in result.stderr + assert "when --build-options is present" in result.stderr diff --git a/tests/functional/test_search.py b/tests/functional/test_search.py index 505f0d7beef..5abf6dce78e 100644 --- a/tests/functional/test_search.py +++ b/tests/functional/test_search.py @@ -5,14 +5,10 @@ from pip._internal.cli.status_codes import NO_MATCHES_FOUND, SUCCESS from pip._internal.commands import create_command -from pip._internal.commands.search import ( - highest_version, - print_results, - transform_hits, -) +from pip._internal.commands.search import highest_version, print_results, transform_hits from tests.lib import pyversion -if pyversion >= '3': +if pyversion >= "3": VERBOSE_FALSE = False else: VERBOSE_FALSE = 0 @@ -23,8 +19,8 @@ def test_version_compare(): Test version comparison. """ - assert highest_version(['1.0', '2.0', '0.1']) == '2.0' - assert highest_version(['1.0a1', '1.0']) == '1.0' + assert highest_version(["1.0", "2.0", "0.1"]) == "2.0" + assert highest_version(["1.0a1", "1.0"]) == "1.0" def test_pypi_xml_transformation(): @@ -33,18 +29,18 @@ def test_pypi_xml_transformation(): """ pypi_hits = [ - {'name': 'foo', 'summary': 'foo summary', 'version': '1.0'}, - {'name': 'foo', 'summary': 'foo summary v2', 'version': '2.0'}, + {"name": "foo", "summary": "foo summary", "version": "1.0"}, + {"name": "foo", "summary": "foo summary v2", "version": "2.0"}, { - '_pypi_ordering': 50, - 'name': 'bar', - 'summary': 'bar summary', - 'version': '1.0', + "_pypi_ordering": 50, + "name": "bar", + "summary": "bar summary", + "version": "1.0", }, ] expected = [ - {'versions': ['1.0', '2.0'], 'name': 'foo', 'summary': 'foo summary v2'}, - {'versions': ['1.0'], 'name': 'bar', 'summary': 'bar summary'}, + {"versions": ["1.0", "2.0"], "name": "foo", "summary": "foo summary v2"}, + {"versions": ["1.0"], "name": "bar", "summary": "bar summary"}, ] assert transform_hits(pypi_hits) == expected @@ -55,10 +51,8 @@ def test_basic_search(script): End to end test of search command. """ - output = script.pip('search', 'pip') - assert ( - 'The PyPA recommended tool for installing Python packages.' in output.stdout - ) + output = script.pip("search", "pip") + assert "The PyPA recommended tool for installing Python packages." in output.stdout @pytest.mark.network @@ -75,19 +69,17 @@ def test_multiple_search(script): Test searching for multiple packages at once. """ - output = script.pip('search', 'pip', 'INITools') - assert ( - 'The PyPA recommended tool for installing Python packages.' in output.stdout - ) - assert 'Tools for parsing and using INI-style files' in output.stdout + output = script.pip("search", "pip", "INITools") + assert "The PyPA recommended tool for installing Python packages." in output.stdout + assert "Tools for parsing and using INI-style files" in output.stdout def test_search_missing_argument(script): """ Test missing required argument for search """ - result = script.pip('search', expect_error=True) - assert 'ERROR: Missing required argument (search query).' in result.stderr + result = script.pip("search", expect_error=True) + assert "ERROR: Missing required argument (search query)." in result.stderr @pytest.mark.network @@ -95,7 +87,7 @@ def test_run_method_should_return_success_when_find_packages(): """ Test SearchCommand.run for found package """ - command = create_command('search') + command = create_command("search") cmdline = "--index=https://pypi.org/pypi pip" with command.main_context(): options, args = command.parse_args(cmdline.split()) @@ -108,7 +100,7 @@ def test_run_method_should_return_no_matches_found_when_does_not_find_pkgs(): """ Test SearchCommand.run for no matches """ - command = create_command('search') + command = create_command("search") cmdline = "--index=https://pypi.org/pypi nonexistentpackage" with command.main_context(): options, args = command.parse_args(cmdline.split()) @@ -121,7 +113,7 @@ def test_search_should_exit_status_code_zero_when_find_packages(script): """ Test search exit status code for package found """ - result = script.pip('search', 'pip') + result = script.pip("search", "pip") assert result.returncode == SUCCESS @@ -130,7 +122,7 @@ def test_search_exit_status_code_when_finds_no_package(script): """ Test search exit status code for no matches """ - result = script.pip('search', 'nonexistentpackage', expect_error=True) + result = script.pip("search", "nonexistentpackage", expect_error=True) assert result.returncode == NO_MATCHES_FOUND, result.returncode @@ -140,9 +132,9 @@ def test_latest_prerelease_install_message(caplog, monkeypatch): """ hits = [ { - 'name': 'ni', - 'summary': 'For knights who say Ni!', - 'versions': ['1.0.0', '1.0.1a'], + "name": "ni", + "summary": "For knights who say Ni!", + "versions": ["1.0.0", "1.0.1a"], } ] @@ -157,7 +149,7 @@ def test_latest_prerelease_install_message(caplog, monkeypatch): message = caplog.records[-1].getMessage() assert 'pre-release; install with "pip install --pre"' in message - assert get_dist.calls == [pretend.call('ni')] + assert get_dist.calls == [pretend.call("ni")] def test_search_print_results_should_contain_latest_versions(caplog): @@ -166,14 +158,14 @@ def test_search_print_results_should_contain_latest_versions(caplog): """ hits = [ { - 'name': 'testlib1', - 'summary': 'Test library 1.', - 'versions': ['1.0.5', '1.0.3'], + "name": "testlib1", + "summary": "Test library 1.", + "versions": ["1.0.5", "1.0.3"], }, { - 'name': 'testlib2', - 'summary': 'Test library 1.', - 'versions': ['2.0.1', '2.0.3'], + "name": "testlib2", + "summary": "Test library 1.", + "versions": ["2.0.1", "2.0.3"], }, ] @@ -181,5 +173,5 @@ def test_search_print_results_should_contain_latest_versions(caplog): print_results(hits) log_messages = sorted([r.getMessage() for r in caplog.records]) - assert log_messages[0].startswith('testlib1 (1.0.5)') - assert log_messages[1].startswith('testlib2 (2.0.3)') + assert log_messages[0].startswith("testlib1 (1.0.5)") + assert log_messages[1].startswith("testlib2 (2.0.3)") diff --git a/tests/functional/test_show.py b/tests/functional/test_show.py index d93a5606d9e..b6fa0f44d4c 100644 --- a/tests/functional/test_show.py +++ b/tests/functional/test_show.py @@ -11,13 +11,13 @@ def test_basic_show(script): """ Test end to end test for show command. """ - result = script.pip('show', 'pip') + result = script.pip("show", "pip") lines = result.stdout.splitlines() assert len(lines) == 10 - assert 'Name: pip' in lines - assert 'Version: %s' % __version__ in lines - assert any(line.startswith('Location: ') for line in lines) - assert 'Requires: ' in lines + assert "Name: pip" in lines + assert "Version: %s" % __version__ in lines + assert any(line.startswith("Location: ") for line in lines) + assert "Requires: " in lines def test_show_with_files_not_found(script, data): @@ -25,29 +25,29 @@ def test_show_with_files_not_found(script, data): Test for show command with installed files listing enabled and installed-files.txt not found. """ - editable = data.packages.joinpath('SetupPyUTF8') - script.pip('install', '-e', editable) - result = script.pip('show', '-f', 'SetupPyUTF8') + editable = data.packages.joinpath("SetupPyUTF8") + script.pip("install", "-e", editable) + result = script.pip("show", "-f", "SetupPyUTF8") lines = result.stdout.splitlines() assert len(lines) == 12 - assert 'Name: SetupPyUTF8' in lines - assert 'Version: 0.0.0' in lines - assert any(line.startswith('Location: ') for line in lines) - assert 'Requires: ' in lines - assert 'Files:' in lines - assert 'Cannot locate installed-files.txt' in lines + assert "Name: SetupPyUTF8" in lines + assert "Version: 0.0.0" in lines + assert any(line.startswith("Location: ") for line in lines) + assert "Requires: " in lines + assert "Files:" in lines + assert "Cannot locate installed-files.txt" in lines def test_show_with_files_from_wheel(script, data): """ Test that a wheel's files can be listed """ - wheel_file = data.packages.joinpath('simple.dist-0.1-py2.py3-none-any.whl') - script.pip('install', '--no-index', wheel_file) - result = script.pip('show', '-f', 'simple.dist') + wheel_file = data.packages.joinpath("simple.dist-0.1-py2.py3-none-any.whl") + script.pip("install", "--no-index", wheel_file) + result = script.pip("show", "-f", "simple.dist") lines = result.stdout.splitlines() - assert 'Name: simple.dist' in lines - assert 'Cannot locate installed-files.txt' not in lines[6], lines[6] + assert "Name: simple.dist" in lines + assert "Cannot locate installed-files.txt" not in lines[6], lines[6] assert re.search(r"Files:\n( .+\n)+", result.stdout) @@ -56,10 +56,10 @@ def test_show_with_all_files(script): """ Test listing all files in the show command. """ - script.pip('install', 'initools==0.2') - result = script.pip('show', '--files', 'initools') + script.pip("install", "initools==0.2") + result = script.pip("show", "--files", "initools") lines = result.stdout.splitlines() - assert 'Cannot locate installed-files.txt' not in lines[6], lines[6] + assert "Cannot locate installed-files.txt" not in lines[6], lines[6] assert re.search(r"Files:\n( .+\n)+", result.stdout) @@ -67,15 +67,15 @@ def test_missing_argument(script): """ Test show command with no arguments. """ - result = script.pip('show', expect_error=True) - assert 'ERROR: Please provide a package name or names.' in result.stderr + result = script.pip("show", expect_error=True) + assert "ERROR: Please provide a package name or names." in result.stderr def test_find_package_not_found(): """ Test trying to get info about a nonexistent package. """ - result = search_packages_info(['abcd3']) + result = search_packages_info(["abcd3"]) assert len(list(result)) == 0 @@ -88,8 +88,8 @@ def test_report_single_not_found(script): # Also, the following should report an error as there are no results # to print. Consequently, there is no need to pass # allow_stderr_warning=True since this is implied by expect_error=True. - result = script.pip('show', 'Abcd-3', expect_error=True) - assert 'WARNING: Package(s) not found: Abcd-3' in result.stderr + result = script.pip("show", "Abcd-3", expect_error=True) + assert "WARNING: Package(s) not found: Abcd-3" in result.stderr assert not result.stdout.splitlines() @@ -98,11 +98,11 @@ def test_report_mixed_not_found(script): Test passing a mixture of found and not-found names. """ # We test passing non-canonicalized names. - result = script.pip('show', 'Abcd3', 'A-B-C', 'pip', allow_stderr_warning=True) - assert 'WARNING: Package(s) not found: A-B-C, Abcd3' in result.stderr + result = script.pip("show", "Abcd3", "A-B-C", "pip", allow_stderr_warning=True) + assert "WARNING: Package(s) not found: A-B-C, Abcd3" in result.stderr lines = result.stdout.splitlines() assert len(lines) == 10 - assert 'Name: pip' in lines + assert "Name: pip" in lines def test_search_any_case(): @@ -110,9 +110,9 @@ def test_search_any_case(): Search for a package in any case. """ - result = list(search_packages_info(['PIP'])) + result = list(search_packages_info(["PIP"])) assert len(result) == 1 - assert result[0]['name'] == 'pip' + assert result[0]["name"] == "pip" def test_more_than_one_package(): @@ -120,7 +120,7 @@ def test_more_than_one_package(): Search for more than one package. """ - result = list(search_packages_info(['Pip', 'pytest', 'Virtualenv'])) + result = list(search_packages_info(["Pip", "pytest", "Virtualenv"])) assert len(result) == 3 @@ -128,9 +128,9 @@ def test_show_verbose_with_classifiers(script): """ Test that classifiers can be listed """ - result = script.pip('show', 'pip', '--verbose') + result = script.pip("show", "pip", "--verbose") lines = result.stdout.splitlines() - assert 'Name: pip' in lines + assert "Name: pip" in lines assert re.search(r"Classifiers:\n( .+\n)+", result.stdout) assert "Intended Audience :: Developers" in result.stdout @@ -139,45 +139,45 @@ def test_show_verbose_installer(script, data): """ Test that the installer is shown (this currently needs a wheel install) """ - wheel_file = data.packages.joinpath('simple.dist-0.1-py2.py3-none-any.whl') - script.pip('install', '--no-index', wheel_file) - result = script.pip('show', '--verbose', 'simple.dist') + wheel_file = data.packages.joinpath("simple.dist-0.1-py2.py3-none-any.whl") + script.pip("install", "--no-index", wheel_file) + result = script.pip("show", "--verbose", "simple.dist") lines = result.stdout.splitlines() - assert 'Name: simple.dist' in lines - assert 'Installer: pip' in lines + assert "Name: simple.dist" in lines + assert "Installer: pip" in lines def test_show_verbose(script): """ Test end to end test for verbose show command. """ - result = script.pip('show', '--verbose', 'pip') + result = script.pip("show", "--verbose", "pip") lines = result.stdout.splitlines() - assert any(line.startswith('Metadata-Version: ') for line in lines) - assert any(line.startswith('Installer: ') for line in lines) - assert 'Entry-points:' in lines - assert 'Classifiers:' in lines + assert any(line.startswith("Metadata-Version: ") for line in lines) + assert any(line.startswith("Installer: ") for line in lines) + assert "Entry-points:" in lines + assert "Classifiers:" in lines def test_all_fields(script): """ Test that all the fields are present """ - result = script.pip('show', 'pip') + result = script.pip("show", "pip") lines = result.stdout.splitlines() expected = { - 'Name', - 'Version', - 'Summary', - 'Home-page', - 'Author', - 'Author-email', - 'License', - 'Location', - 'Requires', - 'Required-by', + "Name", + "Version", + "Summary", + "Home-page", + "Author", + "Author-email", + "License", + "Location", + "Requires", + "Required-by", } - actual = {re.sub(':.*$', '', line) for line in lines} + actual = {re.sub(":.*$", "", line) for line in lines} assert actual == expected @@ -185,7 +185,7 @@ def test_pip_show_is_short(script): """ Test that pip show stays short """ - result = script.pip('show', 'pip') + result = script.pip("show", "pip") lines = result.stdout.splitlines() assert len(lines) <= 10 @@ -194,17 +194,17 @@ def test_pip_show_divider(script, data): """ Expect a divider between packages """ - script.pip('install', 'pip-test-package', '--no-index', '-f', data.packages) - result = script.pip('show', 'pip', 'pip-test-package') + script.pip("install", "pip-test-package", "--no-index", "-f", data.packages) + result = script.pip("show", "pip", "pip-test-package") lines = result.stdout.splitlines() assert "---" in lines def test_package_name_is_canonicalized(script, data): - script.pip('install', 'pip-test-package', '--no-index', '-f', data.packages) + script.pip("install", "pip-test-package", "--no-index", "-f", data.packages) - dash_show_result = script.pip('show', 'pip-test-package') - underscore_upper_show_result = script.pip('show', 'pip-test_Package') + dash_show_result = script.pip("show", "pip-test-package") + underscore_upper_show_result = script.pip("show", "pip-test_Package") assert underscore_upper_show_result.returncode == 0 assert underscore_upper_show_result.stdout == dash_show_result.stdout @@ -214,14 +214,14 @@ def test_show_required_by_packages_basic(script, data): """ Test that installed packages that depend on this package are shown """ - editable_path = os.path.join(data.src, 'requires_simple') - script.pip('install', '--no-index', '-f', data.find_links, editable_path) + editable_path = os.path.join(data.src, "requires_simple") + script.pip("install", "--no-index", "-f", data.find_links, editable_path) - result = script.pip('show', 'simple') + result = script.pip("show", "simple") lines = result.stdout.splitlines() - assert 'Name: simple' in lines - assert 'Required-by: requires-simple' in lines + assert "Name: simple" in lines + assert "Required-by: requires-simple" in lines def test_show_required_by_packages_capitalized(script, data): @@ -229,14 +229,14 @@ def test_show_required_by_packages_capitalized(script, data): Test that the installed packages which depend on a package are shown where the package has a capital letter """ - editable_path = os.path.join(data.src, 'requires_capitalized') - script.pip('install', '--no-index', '-f', data.find_links, editable_path) + editable_path = os.path.join(data.src, "requires_capitalized") + script.pip("install", "--no-index", "-f", data.find_links, editable_path) - result = script.pip('show', 'simple') + result = script.pip("show", "simple") lines = result.stdout.splitlines() - assert 'Name: simple' in lines - assert 'Required-by: Requires-Capitalized' in lines + assert "Name: simple" in lines + assert "Required-by: Requires-Capitalized" in lines def test_show_required_by_packages_requiring_capitalized(script, data): @@ -245,14 +245,14 @@ def test_show_required_by_packages_requiring_capitalized(script, data): where the package has a name with a mix of lower and upper case letters """ - required_package_path = os.path.join(data.src, 'requires_capitalized') - script.pip('install', '--no-index', '-f', data.find_links, required_package_path) - editable_path = os.path.join(data.src, 'requires_requires_capitalized') - script.pip('install', '--no-index', '-f', data.find_links, editable_path) + required_package_path = os.path.join(data.src, "requires_capitalized") + script.pip("install", "--no-index", "-f", data.find_links, required_package_path) + editable_path = os.path.join(data.src, "requires_requires_capitalized") + script.pip("install", "--no-index", "-f", data.find_links, editable_path) - result = script.pip('show', 'Requires_Capitalized') + result = script.pip("show", "Requires_Capitalized") lines = result.stdout.splitlines() print(lines) - assert 'Name: Requires-Capitalized' in lines - assert 'Required-by: requires-requires-capitalized' in lines + assert "Name: Requires-Capitalized" in lines + assert "Required-by: requires-requires-capitalized" in lines diff --git a/tests/functional/test_uninstall.py b/tests/functional/test_uninstall.py index ca861da0919..21298aa0468 100644 --- a/tests/functional/test_uninstall.py +++ b/tests/functional/test_uninstall.py @@ -23,15 +23,15 @@ def test_basic_uninstall(script): Test basic install and uninstall. """ - result = script.pip('install', 'INITools==0.2') - assert join(script.site_packages, 'initools') in result.files_created, sorted( + result = script.pip("install", "INITools==0.2") + assert join(script.site_packages, "initools") in result.files_created, sorted( result.files_created.keys() ) # the import forces the generation of __pycache__ if the version of python # supports it - script.run('python', '-c', "import initools") - result2 = script.pip('uninstall', 'INITools', '-y') - assert_all_changes(result, result2, [script.venv / 'build', 'cache']) + script.run("python", "-c", "import initools") + result2 = script.pip("uninstall", "INITools", "-y") + assert_all_changes(result, result2, [script.venv / "build", "cache"]) def test_basic_uninstall_distutils(script): @@ -40,7 +40,7 @@ def test_basic_uninstall_distutils(script): """ script.scratch_path.joinpath("distutils_install").mkdir() - pkg_path = script.scratch_path / 'distutils_install' + pkg_path = script.scratch_path / "distutils_install" pkg_path.joinpath("setup.py").write_text( textwrap.dedent( """ @@ -52,11 +52,11 @@ def test_basic_uninstall_distutils(script): """ ) ) - result = script.run('python', pkg_path / 'setup.py', 'install') - result = script.pip('list', '--format=json') + result = script.run("python", pkg_path / "setup.py", "install") + result = script.pip("list", "--format=json") assert {"name": "distutils-install", "version": "0.1"} in json.loads(result.stdout) result = script.pip( - 'uninstall', 'distutils_install', '-y', expect_stderr=True, expect_error=True + "uninstall", "distutils_install", "-y", expect_stderr=True, expect_error=True ) assert ( "Cannot uninstall 'distutils-install'. It is a distutils installed " @@ -71,13 +71,13 @@ def test_basic_uninstall_with_scripts(script): Uninstall an easy_installed package with scripts. """ - result = script.easy_install('PyLogo', expect_stderr=True) - easy_install_pth = script.site_packages / 'easy-install.pth' - pylogo = sys.platform == 'win32' and 'pylogo' or 'PyLogo' + result = script.easy_install("PyLogo", expect_stderr=True) + easy_install_pth = script.site_packages / "easy-install.pth" + pylogo = sys.platform == "win32" and "pylogo" or "PyLogo" assert pylogo in result.files_updated[easy_install_pth].bytes - result2 = script.pip('uninstall', 'pylogo', '-y') + result2 = script.pip("uninstall", "pylogo", "-y") assert_all_changes( - result, result2, [script.venv / 'build', 'cache', easy_install_pth] + result, result2, [script.venv / "build", "cache", easy_install_pth] ) @@ -87,15 +87,15 @@ def test_uninstall_easy_install_after_import(script): Uninstall an easy_installed package after it's been imported """ - result = script.easy_install('--always-unzip', 'INITools==0.2', expect_stderr=True) + result = script.easy_install("--always-unzip", "INITools==0.2", expect_stderr=True) # the import forces the generation of __pycache__ if the version of python # supports it - script.run('python', '-c', "import initools") - result2 = script.pip('uninstall', 'INITools', '-y') + script.run("python", "-c", "import initools") + result2 = script.pip("uninstall", "INITools", "-y") assert_all_changes( result, result2, - [script.venv / 'build', 'cache', script.site_packages / 'easy-install.pth'], + [script.venv / "build", "cache", script.site_packages / "easy-install.pth"], ) @@ -106,25 +106,25 @@ def test_uninstall_trailing_newline(script): lacks a trailing newline """ - script.easy_install('INITools==0.2', expect_stderr=True) - script.easy_install('PyLogo', expect_stderr=True) - easy_install_pth = script.site_packages_path / 'easy-install.pth' + script.easy_install("INITools==0.2", expect_stderr=True) + script.easy_install("PyLogo", expect_stderr=True) + easy_install_pth = script.site_packages_path / "easy-install.pth" # trim trailing newline from easy-install.pth with open(easy_install_pth) as f: pth_before = f.read() - with open(easy_install_pth, 'w') as f: + with open(easy_install_pth, "w") as f: f.write(pth_before.rstrip()) # uninstall initools - script.pip('uninstall', 'INITools', '-y') + script.pip("uninstall", "INITools", "-y") with open(easy_install_pth) as f: pth_after = f.read() # verify that only initools is removed before_without_initools = [ - line for line in pth_before.splitlines() if 'initools' not in line.lower() + line for line in pth_before.splitlines() if "initools" not in line.lower() ] lines_after = pth_after.splitlines() @@ -138,15 +138,15 @@ def test_basic_uninstall_namespace_package(script): the namespace and everything in it. """ - result = script.pip('install', 'pd.requires==0.0.3') - assert join(script.site_packages, 'pd') in result.files_created, sorted( + result = script.pip("install", "pd.requires==0.0.3") + assert join(script.site_packages, "pd") in result.files_created, sorted( result.files_created.keys() ) - result2 = script.pip('uninstall', 'pd.find', '-y') - assert join(script.site_packages, 'pd') not in result2.files_deleted, sorted( + result2 = script.pip("uninstall", "pd.find", "-y") + assert join(script.site_packages, "pd") not in result2.files_deleted, sorted( result2.files_deleted.keys() ) - assert join(script.site_packages, 'pd', 'find') in result2.files_deleted, sorted( + assert join(script.site_packages, "pd", "find") in result2.files_deleted, sorted( result2.files_deleted.keys() ) @@ -162,30 +162,30 @@ def test_uninstall_overlapping_package(script, data): parent_pkg = data.packages.joinpath("parent-0.1.tar.gz") child_pkg = data.packages.joinpath("child-0.1.tar.gz") - result1 = script.pip('install', parent_pkg, expect_error=False) - assert join(script.site_packages, 'parent') in result1.files_created, sorted( + result1 = script.pip("install", parent_pkg, expect_error=False) + assert join(script.site_packages, "parent") in result1.files_created, sorted( result1.files_created.keys() ) - result2 = script.pip('install', child_pkg, expect_error=False) - assert join(script.site_packages, 'child') in result2.files_created, sorted( + result2 = script.pip("install", child_pkg, expect_error=False) + assert join(script.site_packages, "child") in result2.files_created, sorted( result2.files_created.keys() ) assert ( - normpath(join(script.site_packages, 'parent/plugins/child_plugin.py')) + normpath(join(script.site_packages, "parent/plugins/child_plugin.py")) in result2.files_created ), sorted(result2.files_created.keys()) # The import forces the generation of __pycache__ if the version of python # supports it - script.run('python', '-c', "import parent.plugins.child_plugin, child") - result3 = script.pip('uninstall', '-y', 'child', expect_error=False) - assert join(script.site_packages, 'child') in result3.files_deleted, sorted( + script.run("python", "-c", "import parent.plugins.child_plugin, child") + result3 = script.pip("uninstall", "-y", "child", expect_error=False) + assert join(script.site_packages, "child") in result3.files_deleted, sorted( result3.files_created.keys() ) assert ( - normpath(join(script.site_packages, 'parent/plugins/child_plugin.py')) + normpath(join(script.site_packages, "parent/plugins/child_plugin.py")) in result3.files_deleted ), sorted(result3.files_deleted.keys()) - assert join(script.site_packages, 'parent') not in result3.files_deleted, sorted( + assert join(script.site_packages, "parent") not in result3.files_deleted, sorted( result3.files_deleted.keys() ) # Additional check: uninstalling 'child' should return things to the @@ -201,11 +201,11 @@ def test_uninstall_entry_point(script, console_scripts): Test uninstall package with two or more entry points in the same section, whose name contain a colon. """ - pkg_name = 'ep_install' + pkg_name = "ep_install" pkg_path = create_test_package_with_setup( script, name=pkg_name, - version='0.1', + version="0.1", entry_points={ "console_scripts": [console_scripts], "pip_test.ep": [ @@ -214,16 +214,16 @@ def test_uninstall_entry_point(script, console_scripts): ], }, ) - script_name = script.bin_path.joinpath(console_scripts.split('=')[0].strip()) - if sys.platform == 'win32': - script_name += '.exe' - result = script.pip('install', pkg_path) + script_name = script.bin_path.joinpath(console_scripts.split("=")[0].strip()) + if sys.platform == "win32": + script_name += ".exe" + result = script.pip("install", pkg_path) assert script_name.exists() - result = script.pip('list', '--format=json') + result = script.pip("list", "--format=json") assert {"name": "ep-install", "version": "0.1"} in json.loads(result.stdout) - script.pip('uninstall', 'ep_install', '-y') + script.pip("uninstall", "ep_install", "-y") assert not script_name.exists() - result2 = script.pip('list', '--format=json') + result2 = script.pip("list", "--format=json") assert {"name": "ep-install", "version": "0.1"} not in json.loads(result2.stdout) @@ -235,15 +235,15 @@ def test_uninstall_gui_scripts(script): pkg_path = create_test_package_with_setup( script, name=pkg_name, - version='0.1', + version="0.1", entry_points={"gui_scripts": ["test_ = distutils_install"]}, ) - script_name = script.bin_path.joinpath('test_') - if sys.platform == 'win32': - script_name += '.exe' - script.pip('install', pkg_path) + script_name = script.bin_path.joinpath("test_") + if sys.platform == "win32": + script_name += ".exe" + script.pip("install", pkg_path) assert script_name.exists() - script.pip('uninstall', pkg_name, '-y') + script.pip("uninstall", pkg_name, "-y") assert not script_name.exists() @@ -253,14 +253,14 @@ def test_uninstall_console_scripts(script): Test uninstalling a package with more files (console_script entry points, extra directories). """ - args = ['install'] - args.append('discover') + args = ["install"] + args.append("discover") result = script.pip(*args) - assert script.bin / 'discover' + script.exe in result.files_created, sorted( + assert script.bin / "discover" + script.exe in result.files_created, sorted( result.files_created.keys() ) - result2 = script.pip('uninstall', 'discover', '-y') - assert_all_changes(result, result2, [script.venv / 'build', 'cache']) + result2 = script.pip("uninstall", "discover", "-y") + assert_all_changes(result, result2, [script.venv / "build", "cache"]) @pytest.mark.network @@ -268,15 +268,15 @@ def test_uninstall_easy_installed_console_scripts(script): """ Test uninstalling package with console_scripts that is easy_installed. """ - result = script.easy_install('discover') - assert script.bin / 'discover' + script.exe in result.files_created, sorted( + result = script.easy_install("discover") + assert script.bin / "discover" + script.exe in result.files_created, sorted( result.files_created.keys() ) - result2 = script.pip('uninstall', 'discover', '-y') + result2 = script.pip("uninstall", "discover", "-y") assert_all_changes( result, result2, - [script.venv / 'build', 'cache', script.site_packages / 'easy-install.pth'], + [script.venv / "build", "cache", script.site_packages / "easy-install.pth"], ) @@ -286,21 +286,21 @@ def test_uninstall_editable_from_svn(script, tmpdir): Test uninstalling an editable installation from svn. """ result = script.pip( - 'install', - '-e', - '%s#egg=initools' - % (local_checkout('svn+http://svn.colorstudy.com/INITools', tmpdir)), + "install", + "-e", + "%s#egg=initools" + % (local_checkout("svn+http://svn.colorstudy.com/INITools", tmpdir)), ) - result.assert_installed('INITools') - result2 = script.pip('uninstall', '-y', 'initools') - assert script.venv / 'src' / 'initools' in result2.files_after + result.assert_installed("INITools") + result2 = script.pip("uninstall", "-y", "initools") + assert script.venv / "src" / "initools" in result2.files_after assert_all_changes( result, result2, [ - script.venv / 'src', - script.venv / 'build', - script.site_packages / 'easy-install.pth', + script.venv / "src", + script.venv / "build", + script.site_packages / "easy-install.pth", ], ) @@ -312,7 +312,7 @@ def test_uninstall_editable_with_source_outside_venv(script, tmpdir): """ try: temp = mkdtemp() - temp_pkg_dir = join(temp, 'pip-test-package') + temp_pkg_dir = join(temp, "pip-test-package") _test_uninstall_editable_with_source_outside_venv(script, tmpdir, temp_pkg_dir) finally: rmtree(temp) @@ -320,21 +320,21 @@ def test_uninstall_editable_with_source_outside_venv(script, tmpdir): def _test_uninstall_editable_with_source_outside_venv(script, tmpdir, temp_pkg_dir): result = script.run( - 'git', - 'clone', - local_repo('git+git://github.com/pypa/pip-test-package', tmpdir), + "git", + "clone", + local_repo("git+git://github.com/pypa/pip-test-package", tmpdir), temp_pkg_dir, expect_stderr=True, ) - result2 = script.pip('install', '-e', temp_pkg_dir) + result2 = script.pip("install", "-e", temp_pkg_dir) assert ( - join(script.site_packages, 'pip-test-package.egg-link') in result2.files_created + join(script.site_packages, "pip-test-package.egg-link") in result2.files_created ), list(result2.files_created.keys()) - result3 = script.pip('uninstall', '-y', 'pip-test-package') + result3 = script.pip("uninstall", "-y", "pip-test-package") assert_all_changes( result, result3, - [script.venv / 'build', script.site_packages / 'easy-install.pth'], + [script.venv / "build", script.site_packages / "easy-install.pth"], ) @@ -345,7 +345,7 @@ def test_uninstall_from_reqs_file(script, tmpdir): Test uninstall from a requirements file. """ - local_svn_url = local_checkout('svn+http://svn.colorstudy.com/INITools', tmpdir) + local_svn_url = local_checkout("svn+http://svn.colorstudy.com/INITools", tmpdir) script.scratch_path.joinpath("test-req.txt").write_text( textwrap.dedent( """ @@ -356,7 +356,7 @@ def test_uninstall_from_reqs_file(script, tmpdir): ) % local_svn_url ) - result = script.pip('install', '-r', 'test-req.txt') + result = script.pip("install", "-r", "test-req.txt") script.scratch_path.joinpath("test-req.txt").write_text( textwrap.dedent( """ @@ -372,15 +372,15 @@ def test_uninstall_from_reqs_file(script, tmpdir): ) % local_svn_url ) - result2 = script.pip('uninstall', '-r', 'test-req.txt', '-y') + result2 = script.pip("uninstall", "-r", "test-req.txt", "-y") assert_all_changes( result, result2, [ - script.venv / 'build', - script.venv / 'src', - script.scratch / 'test-req.txt', - script.site_packages / 'easy-install.pth', + script.venv / "build", + script.venv / "src", + script.scratch / "test-req.txt", + script.site_packages / "easy-install.pth", ], ) @@ -395,7 +395,7 @@ def test_uninstallpathset_no_paths(caplog): caplog.set_level(logging.INFO) - test_dist = get_distribution('pip') + test_dist = get_distribution("pip") uninstall_set = UninstallPathSet(test_dist) uninstall_set.remove() # with no files added to set @@ -428,10 +428,10 @@ def test_uninstall_wheel(script, data): Test uninstalling a wheel """ package = data.packages.joinpath("simple.dist-0.1-py2.py3-none-any.whl") - result = script.pip('install', package, '--no-index') - dist_info_folder = script.site_packages / 'simple.dist-0.1.dist-info' + result = script.pip("install", package, "--no-index") + dist_info_folder = script.site_packages / "simple.dist-0.1.dist-info" assert dist_info_folder in result.files_created - result2 = script.pip('uninstall', 'simple.dist', '-y') + result2 = script.pip("uninstall", "simple.dist", "-y") assert_all_changes(result, result2, []) @@ -442,17 +442,17 @@ def test_uninstall_with_symlink(script, data, tmpdir): https://github.com/pypa/pip/issues/6892 """ package = data.packages.joinpath("simple.dist-0.1-py2.py3-none-any.whl") - script.pip('install', package, '--no-index') + script.pip("install", package, "--no-index") symlink_target = tmpdir / "target" symlink_target.mkdir() symlink_source = script.site_packages / "symlink" (script.base_path / symlink_source).symlink_to(symlink_target) st_mode = symlink_target.stat().st_mode - distinfo_path = script.site_packages_path / 'simple.dist-0.1.dist-info' - record_path = distinfo_path / 'RECORD' + distinfo_path = script.site_packages_path / "simple.dist-0.1.dist-info" + record_path = distinfo_path / "RECORD" with open(record_path, "a") as f: f.write("symlink,,\n") - uninstall_result = script.pip('uninstall', 'simple.dist', '-y') + uninstall_result = script.pip("uninstall", "simple.dist", "-y") assert symlink_source in uninstall_result.files_deleted assert symlink_target.stat().st_mode == st_mode @@ -460,20 +460,20 @@ def test_uninstall_with_symlink(script, data, tmpdir): def test_uninstall_setuptools_develop_install(script, data): """Try uninstall after setup.py develop followed of setup.py install""" pkg_path = data.packages.joinpath("FSPkg") - script.run('python', 'setup.py', 'develop', expect_stderr=True, cwd=pkg_path) - script.run('python', 'setup.py', 'install', expect_stderr=True, cwd=pkg_path) - list_result = script.pip('list', '--format=json') + script.run("python", "setup.py", "develop", expect_stderr=True, cwd=pkg_path) + script.run("python", "setup.py", "install", expect_stderr=True, cwd=pkg_path) + list_result = script.pip("list", "--format=json") assert {"name": os.path.normcase("FSPkg"), "version": "0.1.dev0"} in json.loads( list_result.stdout ), str(list_result) # Uninstall both develop and install - uninstall = script.pip('uninstall', 'FSPkg', '-y') - assert any(filename.endswith('.egg') for filename in uninstall.files_deleted.keys()) - uninstall2 = script.pip('uninstall', 'FSPkg', '-y') + uninstall = script.pip("uninstall", "FSPkg", "-y") + assert any(filename.endswith(".egg") for filename in uninstall.files_deleted.keys()) + uninstall2 = script.pip("uninstall", "FSPkg", "-y") assert ( - join(script.site_packages, 'FSPkg.egg-link') in uninstall2.files_deleted + join(script.site_packages, "FSPkg.egg-link") in uninstall2.files_deleted ), list(uninstall2.files_deleted.keys()) - list_result2 = script.pip('list', '--format=json') + list_result2 = script.pip("list", "--format=json") assert "FSPkg" not in {p["name"] for p in json.loads(list_result2.stdout)} @@ -482,40 +482,40 @@ def test_uninstall_editable_and_pip_install(script, data): # SETUPTOOLS_SYS_PATH_TECHNIQUE=raw removes the assumption that `-e` # installs are always higher priority than regular installs. # This becomes the default behavior in setuptools 25. - script.environ['SETUPTOOLS_SYS_PATH_TECHNIQUE'] = 'raw' + script.environ["SETUPTOOLS_SYS_PATH_TECHNIQUE"] = "raw" pkg_path = data.packages.joinpath("FSPkg") - script.pip('install', '-e', '.', expect_stderr=True, cwd=pkg_path) + script.pip("install", "-e", ".", expect_stderr=True, cwd=pkg_path) # ensure both are installed with --ignore-installed: - script.pip('install', '--ignore-installed', '.', expect_stderr=True, cwd=pkg_path) - list_result = script.pip('list', '--format=json') + script.pip("install", "--ignore-installed", ".", expect_stderr=True, cwd=pkg_path) + list_result = script.pip("list", "--format=json") assert {"name": "FSPkg", "version": "0.1.dev0"} in json.loads(list_result.stdout) # Uninstall both develop and install - uninstall = script.pip('uninstall', 'FSPkg', '-y') + uninstall = script.pip("uninstall", "FSPkg", "-y") assert not any( - filename.endswith('.egg-link') for filename in uninstall.files_deleted.keys() + filename.endswith(".egg-link") for filename in uninstall.files_deleted.keys() ) - uninstall2 = script.pip('uninstall', 'FSPkg', '-y') + uninstall2 = script.pip("uninstall", "FSPkg", "-y") assert ( - join(script.site_packages, 'FSPkg.egg-link') in uninstall2.files_deleted + join(script.site_packages, "FSPkg.egg-link") in uninstall2.files_deleted ), list(uninstall2.files_deleted.keys()) - list_result2 = script.pip('list', '--format=json') + list_result2 = script.pip("list", "--format=json") assert "FSPkg" not in {p["name"] for p in json.loads(list_result2.stdout)} def test_uninstall_ignores_missing_packages(script, data): """Uninstall of a non existent package prints a warning and exits cleanly """ - result = script.pip('uninstall', '-y', 'non-existent-pkg', expect_stderr=True) + result = script.pip("uninstall", "-y", "non-existent-pkg", expect_stderr=True) assert "Skipping non-existent-pkg as it is not installed." in result.stderr assert result.returncode == 0, "Expected clean exit" def test_uninstall_ignores_missing_packages_and_uninstalls_rest(script, data): - script.pip_install_local('simple') + script.pip_install_local("simple") result = script.pip( - 'uninstall', '-y', 'non-existent-pkg', 'simple', expect_stderr=True + "uninstall", "-y", "non-existent-pkg", "simple", expect_stderr=True ) assert "Skipping non-existent-pkg as it is not installed." in result.stderr diff --git a/tests/functional/test_uninstall_user.py b/tests/functional/test_uninstall_user.py index 0e1a52d115c..88d8ab4d538 100644 --- a/tests/functional/test_uninstall_user.py +++ b/tests/functional/test_uninstall_user.py @@ -15,9 +15,9 @@ def test_uninstall_from_usersite(self, script): """ Test uninstall from usersite """ - result1 = script.pip('install', '--user', 'INITools==0.3') - result2 = script.pip('uninstall', '-y', 'INITools') - assert_all_changes(result1, result2, [script.venv / 'build', 'cache']) + result1 = script.pip("install", "--user", "INITools==0.3") + result2 = script.pip("uninstall", "-y", "INITools") + assert_all_changes(result1, result2, [script.venv / "build", "cache"]) def test_uninstall_from_usersite_with_dist_in_global_site(self, virtualenv, script): """ @@ -25,25 +25,25 @@ def test_uninstall_from_usersite_with_dist_in_global_site(self, virtualenv, scri """ _patch_dist_in_site_packages(virtualenv) - script.pip_install_local('pip-test-package==0.1', '--no-binary=:all:') + script.pip_install_local("pip-test-package==0.1", "--no-binary=:all:") result2 = script.pip_install_local( - '--user', 'pip-test-package==0.1.1', '--no-binary=:all:' + "--user", "pip-test-package==0.1.1", "--no-binary=:all:" ) - result3 = script.pip('uninstall', '-vy', 'pip-test-package') + result3 = script.pip("uninstall", "-vy", "pip-test-package") # uninstall console is mentioning user scripts, but not global scripts assert normcase(script.user_bin_path) in result3.stdout, str(result3) assert normcase(script.bin_path) not in result3.stdout, str(result3) # uninstall worked - assert_all_changes(result2, result3, [script.venv / 'build', 'cache']) + assert_all_changes(result2, result3, [script.venv / "build", "cache"]) # site still has 0.2 (can't look in result1; have to check) egg_info_folder = ( script.base_path / script.site_packages - / 'pip_test_package-0.1-py%s.egg-info' + / "pip_test_package-0.1-py%s.egg-info" % pyversion ) assert isdir(egg_info_folder) @@ -56,16 +56,16 @@ def test_uninstall_editable_from_usersite(self, script, data): # install to_install = data.packages.joinpath("FSPkg") - result1 = script.pip('install', '--user', '-e', to_install, expect_error=False) - egg_link = script.user_site / 'FSPkg.egg-link' + result1 = script.pip("install", "--user", "-e", to_install, expect_error=False) + egg_link = script.user_site / "FSPkg.egg-link" assert egg_link in result1.files_created, str(result1.stdout) # uninstall - result2 = script.pip('uninstall', '-y', 'FSPkg') + result2 = script.pip("uninstall", "-y", "FSPkg") assert not isfile(script.base_path / egg_link) assert_all_changes( result1, result2, - [script.venv / 'build', 'cache', script.user_site / 'easy-install.pth'], + [script.venv / "build", "cache", script.user_site / "easy-install.pth"], ) diff --git a/tests/functional/test_vcs_bazaar.py b/tests/functional/test_vcs_bazaar.py index 7d92aab30ea..de7ddd1106f 100644 --- a/tests/functional/test_vcs_bazaar.py +++ b/tests/functional/test_vcs_bazaar.py @@ -18,7 +18,7 @@ @pytest.mark.skipif( - 'TRAVIS' not in os.environ, reason='Bazaar is only required under Travis' + "TRAVIS" not in os.environ, reason="Bazaar is only required under Travis" ) def test_ensure_bzr_available(): """Make sure that bzr is available when running in Travis.""" @@ -28,45 +28,45 @@ def test_ensure_bzr_available(): @need_bzr def test_export(script, tmpdir): """Test that a Bazaar branch can be exported.""" - source_dir = tmpdir / 'test-source' + source_dir = tmpdir / "test-source" source_dir.mkdir() - create_file(source_dir / 'test_file', 'something') + create_file(source_dir / "test_file", "something") - _vcs_add(script, str(source_dir), vcs='bazaar') + _vcs_add(script, str(source_dir), vcs="bazaar") - export_dir = str(tmpdir / 'export') - url = hide_url('bzr+' + _test_path_to_file_url(source_dir)) + export_dir = str(tmpdir / "export") + url = hide_url("bzr+" + _test_path_to_file_url(source_dir)) Bazaar().export(export_dir, url=url) - assert os.listdir(export_dir) == ['test_file'] + assert os.listdir(export_dir) == ["test_file"] @need_bzr def test_export_rev(script, tmpdir): """Test that a Bazaar branch can be exported, specifying a rev.""" - source_dir = tmpdir / 'test-source' + source_dir = tmpdir / "test-source" source_dir.mkdir() # Create a single file that is changed by two revisions. - create_file(source_dir / 'test_file', 'something initial') - _vcs_add(script, str(source_dir), vcs='bazaar') + create_file(source_dir / "test_file", "something initial") + _vcs_add(script, str(source_dir), vcs="bazaar") - create_file(source_dir / 'test_file', 'something new') + create_file(source_dir / "test_file", "something new") script.run( - 'bzr', - 'commit', - '-q', - '--author', - 'pip ', - '-m', - 'change test file', + "bzr", + "commit", + "-q", + "--author", + "pip ", + "-m", + "change test file", cwd=source_dir, ) - export_dir = tmpdir / 'export' - url = hide_url('bzr+' + _test_path_to_file_url(source_dir) + '@1') + export_dir = tmpdir / "export" + url = hide_url("bzr+" + _test_path_to_file_url(source_dir) + "@1") Bazaar().export(str(export_dir), url=url) - with open(export_dir / 'test_file', 'r') as f: - assert f.read() == 'something initial' + with open(export_dir / "test_file", "r") as f: + assert f.read() == "something initial" diff --git a/tests/functional/test_vcs_git.py b/tests/functional/test_vcs_git.py index 09b69940300..31319c37107 100644 --- a/tests/functional/test_vcs_git.py +++ b/tests/functional/test_vcs_git.py @@ -12,22 +12,22 @@ def get_head_sha(script, dest): """Return the HEAD sha.""" - result = script.run('git', 'rev-parse', 'HEAD', cwd=dest) + result = script.run("git", "rev-parse", "HEAD", cwd=dest) sha = result.stdout.strip() return sha def checkout_ref(script, repo_dir, ref): - script.run('git', 'checkout', ref, cwd=repo_dir) + script.run("git", "checkout", ref, cwd=repo_dir) def checkout_new_branch(script, repo_dir, branch): - script.run('git', 'checkout', '-b', branch, cwd=repo_dir) + script.run("git", "checkout", "-b", branch, cwd=repo_dir) def do_commit(script, dest): - _git_commit(script, dest, message='test commit', args=['--allow-empty']) + _git_commit(script, dest, message="test commit", args=["--allow-empty"]) return get_head_sha(script, dest) @@ -49,43 +49,43 @@ def test_git_dir_ignored(tmpdir): """ Test that a GIT_DIR environment variable is ignored. """ - repo_path = tmpdir / 'test-repo' + repo_path = tmpdir / "test-repo" repo_path.mkdir() repo_dir = str(repo_path) - env = {'GIT_DIR': 'foo'} + env = {"GIT_DIR": "foo"} # If GIT_DIR is not ignored, then os.listdir() will return ['foo']. - Git.run_command(['init', repo_dir], cwd=repo_dir, extra_environ=env) - assert os.listdir(repo_dir) == ['.git'] + Git.run_command(["init", repo_dir], cwd=repo_dir, extra_environ=env) + assert os.listdir(repo_dir) == [".git"] def test_git_work_tree_ignored(tmpdir): """ Test that a GIT_WORK_TREE environment variable is ignored. """ - repo_path = tmpdir / 'test-repo' + repo_path = tmpdir / "test-repo" repo_path.mkdir() repo_dir = str(repo_path) - Git.run_command(['init', repo_dir], cwd=repo_dir) + Git.run_command(["init", repo_dir], cwd=repo_dir) # Choose a directory relative to the cwd that does not exist. # If GIT_WORK_TREE is not ignored, then the command will error out # with: "fatal: This operation must be run in a work tree". - env = {'GIT_WORK_TREE': 'foo'} - Git.run_command(['status', repo_dir], extra_environ=env, cwd=repo_dir) + env = {"GIT_WORK_TREE": "foo"} + Git.run_command(["status", repo_dir], extra_environ=env, cwd=repo_dir) def test_get_remote_url(script, tmpdir): - source_dir = tmpdir / 'source' + source_dir = tmpdir / "source" source_dir.mkdir() source_url = _test_path_to_file_url(source_dir) source_dir = str(source_dir) - script.run('git', 'init', cwd=source_dir) + script.run("git", "init", cwd=source_dir) do_commit(script, source_dir) - repo_dir = str(tmpdir / 'repo') - script.run('git', 'clone', source_url, repo_dir) + repo_dir = str(tmpdir / "repo") + script.run("git", "clone", source_url, repo_dir) remote_url = Git.get_remote_url(repo_dir) assert remote_url == source_url @@ -95,11 +95,11 @@ def test_get_remote_url__no_remote(script, tmpdir): """ Test a repo with no remote. """ - repo_dir = tmpdir / 'temp-repo' + repo_dir = tmpdir / "temp-repo" repo_dir.mkdir() repo_dir = str(repo_dir) - script.run('git', 'init', cwd=repo_dir) + script.run("git", "init", cwd=repo_dir) with pytest.raises(RemoteNotFoundError): Git.get_remote_url(repo_dir) @@ -108,15 +108,15 @@ def test_get_remote_url__no_remote(script, tmpdir): def test_get_current_branch(script): repo_dir = str(script.scratch_path) - script.run('git', 'init', cwd=repo_dir) + script.run("git", "init", cwd=repo_dir) sha = do_commit(script, repo_dir) - assert Git.get_current_branch(repo_dir) == 'master' + assert Git.get_current_branch(repo_dir) == "master" # Switch to a branch with the same SHA as "master" but whose name # is alphabetically after. - checkout_new_branch(script, repo_dir, 'release') - assert Git.get_current_branch(repo_dir) == 'release' + checkout_new_branch(script, repo_dir, "release") + assert Git.get_current_branch(repo_dir) == "release" # Also test the detached HEAD case. checkout_ref(script, repo_dir, sha) @@ -129,23 +129,23 @@ def test_get_current_branch__branch_and_tag_same_name(script, tmpdir): and tag have the same name. """ repo_dir = str(tmpdir) - script.run('git', 'init', cwd=repo_dir) + script.run("git", "init", cwd=repo_dir) do_commit(script, repo_dir) - checkout_new_branch(script, repo_dir, 'dev') + checkout_new_branch(script, repo_dir, "dev") # Create a tag with the same name as the branch. - script.run('git', 'tag', 'dev', cwd=repo_dir) + script.run("git", "tag", "dev", cwd=repo_dir) - assert Git.get_current_branch(repo_dir) == 'dev' + assert Git.get_current_branch(repo_dir) == "dev" # Now try with the tag checked out. - checkout_ref(script, repo_dir, 'refs/tags/dev') + checkout_ref(script, repo_dir, "refs/tags/dev") assert Git.get_current_branch(repo_dir) is None def test_get_revision_sha(script): repo_dir = str(script.scratch_path) - script.run('git', 'init', cwd=repo_dir) + script.run("git", "init", cwd=repo_dir) shas = add_commits(script, repo_dir, count=3) tag_sha = shas[0] @@ -153,46 +153,46 @@ def test_get_revision_sha(script): head_sha = shas[2] assert head_sha == shas[-1] - origin_ref = 'refs/remotes/origin/origin-branch' - generic_ref = 'refs/generic-ref' + origin_ref = "refs/remotes/origin/origin-branch" + generic_ref = "refs/generic-ref" - script.run('git', 'branch', 'local-branch', head_sha, cwd=repo_dir) - script.run('git', 'tag', 'v1.0', tag_sha, cwd=repo_dir) - script.run('git', 'update-ref', origin_ref, origin_sha, cwd=repo_dir) + script.run("git", "branch", "local-branch", head_sha, cwd=repo_dir) + script.run("git", "tag", "v1.0", tag_sha, cwd=repo_dir) + script.run("git", "update-ref", origin_ref, origin_sha, cwd=repo_dir) script.run( - 'git', - 'update-ref', - 'refs/remotes/upstream/upstream-branch', + "git", + "update-ref", + "refs/remotes/upstream/upstream-branch", head_sha, cwd=repo_dir, ) - script.run('git', 'update-ref', generic_ref, head_sha, cwd=repo_dir) + script.run("git", "update-ref", generic_ref, head_sha, cwd=repo_dir) # Test two tags pointing to the same sha. - script.run('git', 'tag', 'v2.0', tag_sha, cwd=repo_dir) + script.run("git", "tag", "v2.0", tag_sha, cwd=repo_dir) # Test tags sharing the same suffix as another tag, both before and # after the suffix alphabetically. - script.run('git', 'tag', 'aaa/v1.0', head_sha, cwd=repo_dir) - script.run('git', 'tag', 'zzz/v1.0', head_sha, cwd=repo_dir) + script.run("git", "tag", "aaa/v1.0", head_sha, cwd=repo_dir) + script.run("git", "tag", "zzz/v1.0", head_sha, cwd=repo_dir) - check_rev(repo_dir, 'v1.0', (tag_sha, False)) - check_rev(repo_dir, 'v2.0', (tag_sha, False)) - check_rev(repo_dir, 'origin-branch', (origin_sha, True)) + check_rev(repo_dir, "v1.0", (tag_sha, False)) + check_rev(repo_dir, "v2.0", (tag_sha, False)) + check_rev(repo_dir, "origin-branch", (origin_sha, True)) ignored_names = [ # Local branches should be ignored. - 'local-branch', + "local-branch", # Non-origin remote branches should be ignored. - 'upstream-branch', + "upstream-branch", # Generic refs should be ignored. - 'generic-ref', + "generic-ref", # Fully spelled-out refs should be ignored. origin_ref, generic_ref, # Test passing a valid commit hash. tag_sha, # Test passing a non-existent name. - 'does-not-exist', + "does-not-exist", ] for name in ignored_names: check_rev(repo_dir, name, (None, False)) @@ -203,12 +203,12 @@ def test_is_commit_id_equal(script): Test Git.is_commit_id_equal(). """ version_pkg_path = _create_test_package(script) - script.run('git', 'branch', 'branch0.1', cwd=version_pkg_path) - commit = script.run('git', 'rev-parse', 'HEAD', cwd=version_pkg_path).stdout.strip() + script.run("git", "branch", "branch0.1", cwd=version_pkg_path) + commit = script.run("git", "rev-parse", "HEAD", cwd=version_pkg_path).stdout.strip() assert Git.is_commit_id_equal(version_pkg_path, commit) assert not Git.is_commit_id_equal(version_pkg_path, commit[:7]) - assert not Git.is_commit_id_equal(version_pkg_path, 'branch0.1') - assert not Git.is_commit_id_equal(version_pkg_path, 'abc123') + assert not Git.is_commit_id_equal(version_pkg_path, "branch0.1") + assert not Git.is_commit_id_equal(version_pkg_path, "abc123") # Also check passing a None value. assert not Git.is_commit_id_equal(version_pkg_path, None) diff --git a/tests/functional/test_warning.py b/tests/functional/test_warning.py index 6bf8b91bb4b..776fd74ca13 100644 --- a/tests/functional/test_warning.py +++ b/tests/functional/test_warning.py @@ -2,10 +2,10 @@ def test_environ(script, tmpdir): - demo = tmpdir.joinpath('warnings_demo.py') + demo = tmpdir.joinpath("warnings_demo.py") demo.write_text( textwrap.dedent( - ''' + """ from logging import basicConfig from pip._internal.utils import deprecation @@ -13,15 +13,15 @@ def test_environ(script, tmpdir): basicConfig() deprecation.deprecated("deprecated!", replacement=None, gone_in=None) - ''' + """ ) ) - result = script.run('python', demo, expect_stderr=True) - expected = 'WARNING:pip._internal.deprecations:DEPRECATION: deprecated!\n' + result = script.run("python", demo, expect_stderr=True) + expected = "WARNING:pip._internal.deprecations:DEPRECATION: deprecated!\n" assert result.stderr == expected # $PYTHONWARNINGS was added in python2.7 - script.environ['PYTHONWARNINGS'] = 'ignore' - result = script.run('python', demo) - assert result.stderr == '' + script.environ["PYTHONWARNINGS"] = "ignore" + result = script.run("python", demo) + assert result.stderr == "" diff --git a/tests/functional/test_wheel.py b/tests/functional/test_wheel.py index 34966e23312..dce6e720a4e 100644 --- a/tests/functional/test_wheel.py +++ b/tests/functional/test_wheel.py @@ -16,8 +16,8 @@ def auto_with_wheel(with_wheel): def add_files_to_dist_directory(folder): - (folder / 'dist').mkdir(parents=True) - (folder / 'dist' / 'a_name-0.0.1.tar.gz').write_text("hello") + (folder / "dist").mkdir(parents=True) + (folder / "dist" / "a_name-0.0.1.tar.gz").write_text("hello") # Not adding a wheel file since that confuses setuptools' backend. # (folder / 'dist' / 'a_name-0.0.1-py2.py3-none-any.whl').write_text( # "hello" @@ -28,7 +28,7 @@ def test_wheel_exit_status_code_when_no_requirements(script): """ Test wheel exit status code when no requirements specified """ - result = script.pip('wheel', expect_error=True) + result = script.pip("wheel", expect_error=True) assert "You must give at least one requirement to wheel" in result.stderr assert result.returncode == ERROR @@ -38,15 +38,15 @@ def test_wheel_exit_status_code_when_blank_requirements_file(script): Test wheel exit status code when blank requirements file specified """ script.scratch_path.joinpath("blank.txt").write_text("\n") - script.pip('wheel', '-r', 'blank.txt') + script.pip("wheel", "-r", "blank.txt") def test_pip_wheel_success(script, data): """ Test 'pip wheel' success. """ - result = script.pip('wheel', '--no-index', '-f', data.find_links, 'simple==3.0') - wheel_file_name = 'simple-3.0-py%s-none-any.whl' % pyversion[0] + result = script.pip("wheel", "--no-index", "-f", data.find_links, "simple==3.0") + wheel_file_name = "simple-3.0-py%s-none-any.whl" % pyversion[0] wheel_file_path = script.scratch / wheel_file_name assert re.search( r"Created wheel for simple: " @@ -62,24 +62,24 @@ def test_basic_pip_wheel_downloads_wheels(script, data): """ Test 'pip wheel' downloads wheels """ - result = script.pip('wheel', '--no-index', '-f', data.find_links, 'simple.dist') - wheel_file_name = 'simple.dist-0.1-py2.py3-none-any.whl' + result = script.pip("wheel", "--no-index", "-f", data.find_links, "simple.dist") + wheel_file_name = "simple.dist-0.1-py2.py3-none-any.whl" wheel_file_path = script.scratch / wheel_file_name assert wheel_file_path in result.files_created, result.stdout assert "Saved" in result.stdout, result.stdout def test_pip_wheel_builds_when_no_binary_set(script, data): - data.packages.joinpath('simple-3.0-py2.py3-none-any.whl').touch() + data.packages.joinpath("simple-3.0-py2.py3-none-any.whl").touch() # Check that the wheel package is ignored res = script.pip( - 'wheel', - '--no-index', - '--no-binary', - ':all:', - '-f', + "wheel", + "--no-index", + "--no-binary", + ":all:", + "-f", data.find_links, - 'simple==3.0', + "simple==3.0", ) assert "Building wheel for simple" in str(res), str(res) @@ -88,11 +88,11 @@ def test_pip_wheel_builds_editable_deps(script, data): """ Test 'pip wheel' finds and builds dependencies of editables """ - editable_path = os.path.join(data.src, 'requires_simple') + editable_path = os.path.join(data.src, "requires_simple") result = script.pip( - 'wheel', '--no-index', '-f', data.find_links, '-e', editable_path + "wheel", "--no-index", "-f", data.find_links, "-e", editable_path ) - wheel_file_name = 'simple-1.0-py%s-none-any.whl' % pyversion[0] + wheel_file_name = "simple-1.0-py%s-none-any.whl" % pyversion[0] wheel_file_path = script.scratch / wheel_file_name assert wheel_file_path in result.files_created, result.stdout @@ -101,11 +101,11 @@ def test_pip_wheel_builds_editable(script, data): """ Test 'pip wheel' builds an editable package """ - editable_path = os.path.join(data.src, 'simplewheel-1.0') + editable_path = os.path.join(data.src, "simplewheel-1.0") result = script.pip( - 'wheel', '--no-index', '-f', data.find_links, '-e', editable_path + "wheel", "--no-index", "-f", data.find_links, "-e", editable_path ) - wheel_file_name = 'simplewheel-1.0-py%s-none-any.whl' % pyversion[0] + wheel_file_name = "simplewheel-1.0-py%s-none-any.whl" % pyversion[0] wheel_file_path = script.scratch / wheel_file_name assert wheel_file_path in result.files_created, result.stdout @@ -115,14 +115,14 @@ def test_pip_wheel_fail(script, data): Test 'pip wheel' failure. """ result = script.pip( - 'wheel', - '--no-index', - '-f', + "wheel", + "--no-index", + "-f", data.find_links, - 'wheelbroken==0.1', + "wheelbroken==0.1", expect_error=True, ) - wheel_file_name = 'wheelbroken-0.1-py%s-none-any.whl' % pyversion[0] + wheel_file_name = "wheelbroken-0.1-py%s-none-any.whl" % pyversion[0] wheel_file_path = script.scratch / wheel_file_name assert wheel_file_path not in result.files_created, ( wheel_file_path, @@ -137,18 +137,18 @@ def test_no_clean_option_blocks_cleaning_after_wheel(script, data): """ Test --no-clean option blocks cleaning after wheel build """ - build = script.venv_path / 'build' + build = script.venv_path / "build" result = script.pip( - 'wheel', - '--no-clean', - '--no-index', - '--build', + "wheel", + "--no-clean", + "--no-index", + "--build", build, - '--find-links=%s' % data.find_links, - 'simple', + "--find-links=%s" % data.find_links, + "simple", expect_temp=True, ) - build = build / 'simple' + build = build / "simple" assert exists(build), "build/simple should still exist %s" % str(result) @@ -158,8 +158,8 @@ def test_pip_wheel_source_deps(script, data): of wheels """ # 'requires_source' is a wheel that depends on the 'source' project - result = script.pip('wheel', '--no-index', '-f', data.find_links, 'requires_source') - wheel_file_name = 'source-1.0-py%s-none-any.whl' % pyversion[0] + result = script.pip("wheel", "--no-index", "-f", data.find_links, "requires_source") + wheel_file_name = "source-1.0-py%s-none-any.whl" % pyversion[0] wheel_file_path = script.scratch / wheel_file_name assert wheel_file_path in result.files_created, result.stdout assert "Successfully built source" in result.stdout, result.stdout @@ -172,19 +172,19 @@ def test_pip_wheel_fail_cause_of_previous_build_dir(script, data): """ # Given that I have a previous build dir of the `simple` package - build = script.venv_path / 'build' / 'simple' + build = script.venv_path / "build" / "simple" os.makedirs(build) - write_delete_marker_file(script.venv_path / 'build' / 'simple') - build.joinpath('setup.py').write_text('#') + write_delete_marker_file(script.venv_path / "build" / "simple") + build.joinpath("setup.py").write_text("#") # When I call pip trying to install things again result = script.pip( - 'wheel', - '--no-index', - '--find-links=%s' % data.find_links, - '--build', - script.venv_path / 'build', - 'simple==3.0', + "wheel", + "--no-index", + "--find-links=%s" % data.find_links, + "--build", + script.venv_path / "build", + "simple==3.0", expect_error=True, expect_temp=True, ) @@ -197,15 +197,15 @@ def test_wheel_package_with_latin1_setup(script, data): """Create a wheel from a package with latin-1 encoded setup.py.""" pkg_to_wheel = data.packages.joinpath("SetupPyLatin1") - result = script.pip('wheel', pkg_to_wheel) - assert 'Successfully built SetupPyUTF8' in result.stdout + result = script.pip("wheel", pkg_to_wheel) + assert "Successfully built SetupPyUTF8" in result.stdout def test_pip_wheel_with_pep518_build_reqs(script, data, common_wheels): result = script.pip( - 'wheel', '--no-index', '-f', data.find_links, '-f', common_wheels, 'pep518==3.0' + "wheel", "--no-index", "-f", data.find_links, "-f", common_wheels, "pep518==3.0" ) - wheel_file_name = 'pep518-3.0-py%s-none-any.whl' % pyversion[0] + wheel_file_name = "pep518-3.0-py%s-none-any.whl" % pyversion[0] wheel_file_path = script.scratch / wheel_file_name assert wheel_file_path in result.files_created, result.stdout assert "Successfully built pep518" in result.stdout, result.stdout @@ -213,16 +213,16 @@ def test_pip_wheel_with_pep518_build_reqs(script, data, common_wheels): def test_pip_wheel_with_pep518_build_reqs_no_isolation(script, data): - script.pip_install_local('simplewheel==2.0') + script.pip_install_local("simplewheel==2.0") result = script.pip( - 'wheel', - '--no-index', - '-f', + "wheel", + "--no-index", + "-f", data.find_links, - '--no-build-isolation', - 'pep518==3.0', + "--no-build-isolation", + "pep518==3.0", ) - wheel_file_name = 'pep518-3.0-py%s-none-any.whl' % pyversion[0] + wheel_file_name = "pep518-3.0-py%s-none-any.whl" % pyversion[0] wheel_file_path = script.scratch / wheel_file_name assert wheel_file_path in result.files_created, result.stdout assert "Successfully built pep518" in result.stdout, result.stdout @@ -230,11 +230,11 @@ def test_pip_wheel_with_pep518_build_reqs_no_isolation(script, data): def test_pip_wheel_with_user_set_in_config(script, data, common_wheels): - config_file = script.scratch_path / 'pip.conf' - script.environ['PIP_CONFIG_FILE'] = str(config_file) + config_file = script.scratch_path / "pip.conf" + script.environ["PIP_CONFIG_FILE"] = str(config_file) config_file.write_text("[install]\nuser = true") result = script.pip( - 'wheel', data.src / 'withpyproject', '--no-index', '-f', common_wheels + "wheel", data.src / "withpyproject", "--no-index", "-f", common_wheels ) assert "Successfully built withpyproject" in result.stdout, result.stdout @@ -243,13 +243,13 @@ def test_pip_wheel_with_user_set_in_config(script, data, common_wheels): def test_pep517_wheels_are_not_confused_with_other_files(script, tmpdir, data): """Check correct wheels are copied. (#6196) """ - pkg_to_wheel = data.src / 'withpyproject' + pkg_to_wheel = data.src / "withpyproject" add_files_to_dist_directory(pkg_to_wheel) - result = script.pip('wheel', pkg_to_wheel, '-w', script.scratch_path) + result = script.pip("wheel", pkg_to_wheel, "-w", script.scratch_path) assert "Installing build dependencies" in result.stdout, result.stdout - wheel_file_name = 'withpyproject-0.0.1-py%s-none-any.whl' % pyversion[0] + wheel_file_name = "withpyproject-0.0.1-py%s-none-any.whl" % pyversion[0] wheel_file_path = script.scratch / wheel_file_name assert wheel_file_path in result.files_created, result.stdout @@ -257,12 +257,12 @@ def test_pep517_wheels_are_not_confused_with_other_files(script, tmpdir, data): def test_legacy_wheels_are_not_confused_with_other_files(script, tmpdir, data): """Check correct wheels are copied. (#6196) """ - pkg_to_wheel = data.src / 'simplewheel-1.0' + pkg_to_wheel = data.src / "simplewheel-1.0" add_files_to_dist_directory(pkg_to_wheel) - result = script.pip('wheel', pkg_to_wheel, '-w', script.scratch_path) + result = script.pip("wheel", pkg_to_wheel, "-w", script.scratch_path) assert "Installing build dependencies" not in result.stdout, result.stdout - wheel_file_name = 'simplewheel-1.0-py%s-none-any.whl' % pyversion[0] + wheel_file_name = "simplewheel-1.0-py%s-none-any.whl" % pyversion[0] wheel_file_path = script.scratch / wheel_file_name assert wheel_file_path in result.files_created, result.stdout diff --git a/tests/lib/__init__.py b/tests/lib/__init__.py index e47ea0b16bc..aeaad426c11 100644 --- a/tests/lib/__init__.py +++ b/tests/lib/__init__.py @@ -50,12 +50,12 @@ def path_to_url(path): path = os.path.normpath(os.path.abspath(path)) drive, path = os.path.splitdrive(path) filepath = path.split(os.path.sep) - url = '/'.join(filepath) + url = "/".join(filepath) if drive: # Note: match urllib.request.pathname2url's # behavior: uppercase the drive letter. - return 'file:///' + drive.upper() + url - return 'file://' + url + return "file:///" + drive.upper() + url + return "file://" + url def _test_path_to_file_url(path): @@ -65,7 +65,7 @@ def _test_path_to_file_url(path): Args: path: a tests.lib.path.Path object. """ - return 'file://' + path.abspath.replace('\\', '/') + return "file://" + path.abspath.replace("\\", "/") def create_file(path, contents=None): @@ -227,25 +227,25 @@ def __init__(self, impl, verbose=False): if verbose: print(self.stdout) if self.stderr: - print('======= stderr ========') + print("======= stderr ========") print(self.stderr) - print('=======================') + print("=======================") def __getattr__(self, attr): return getattr(self._impl, attr) - if sys.platform == 'win32': + if sys.platform == "win32": @property def stdout(self): - return self._impl.stdout.replace('\r\n', '\n') + return self._impl.stdout.replace("\r\n", "\n") @property def stderr(self): - return self._impl.stderr.replace('\r\n', '\n') + return self._impl.stderr.replace("\r\n", "\n") def __str__(self): - return str(self._impl).replace('\r\n', '\n') + return str(self._impl).replace("\r\n", "\n") else: # Python doesn't automatically forward __str__ through __getattr__ @@ -266,7 +266,7 @@ def assert_installed( e = self.test_env if editable: - pkg_dir = e.venv / 'src' / pkg_name.lower() + pkg_dir = e.venv / "src" / pkg_name.lower() # If package was installed in a sub directory if sub_dir: pkg_dir = pkg_dir / sub_dir @@ -275,64 +275,64 @@ def assert_installed( pkg_dir = e.site_packages / pkg_name if use_user_site: - egg_link_path = e.user_site / pkg_name + '.egg-link' + egg_link_path = e.user_site / pkg_name + ".egg-link" else: - egg_link_path = e.site_packages / pkg_name + '.egg-link' + egg_link_path = e.site_packages / pkg_name + ".egg-link" if without_egg_link: if egg_link_path in self.files_created: raise TestFailure( - 'unexpected egg link file created: %r\n%s' % (egg_link_path, self) + "unexpected egg link file created: %r\n%s" % (egg_link_path, self) ) else: if egg_link_path not in self.files_created: raise TestFailure( - 'expected egg link file missing: %r\n%s' % (egg_link_path, self) + "expected egg link file missing: %r\n%s" % (egg_link_path, self) ) egg_link_file = self.files_created[egg_link_path] - egg_link_contents = egg_link_file.bytes.replace(os.linesep, '\n') + egg_link_contents = egg_link_file.bytes.replace(os.linesep, "\n") # FIXME: I don't understand why there's a trailing . here if not ( - egg_link_contents.endswith('\n.') + egg_link_contents.endswith("\n.") and egg_link_contents[:-2].endswith(pkg_dir) ): raise TestFailure( textwrap.dedent( - u'''\ + u"""\ Incorrect egg_link file %r Expected ending: %r ------- Actual contents ------- %s - -------------------------------''' - % (egg_link_file, pkg_dir + '\n.', repr(egg_link_contents)) + -------------------------------""" + % (egg_link_file, pkg_dir + "\n.", repr(egg_link_contents)) ) ) if use_user_site: - pth_file = e.user_site / 'easy-install.pth' + pth_file = e.user_site / "easy-install.pth" else: - pth_file = e.site_packages / 'easy-install.pth' + pth_file = e.site_packages / "easy-install.pth" if (pth_file in self.files_updated) == without_egg_link: raise TestFailure( - '%r unexpectedly %supdated by install' - % (pth_file, (not without_egg_link and 'not ' or '')) + "%r unexpectedly %supdated by install" + % (pth_file, (not without_egg_link and "not " or "")) ) if (pkg_dir in self.files_created) == (curdir in without_files): raise TestFailure( textwrap.dedent( - '''\ + """\ expected package directory %r %sto be created actually created: %s - ''' + """ ) % ( pkg_dir, - (curdir in without_files and 'not ' or ''), + (curdir in without_files and "not " or ""), sorted(self.files_created.keys()), ) ) @@ -341,14 +341,14 @@ def assert_installed( normalized_path = os.path.normpath(pkg_dir / f) if normalized_path not in self.files_created: raise TestFailure( - 'Package directory %r missing expected content %r' % (pkg_dir, f) + "Package directory %r missing expected content %r" % (pkg_dir, f) ) for f in without_files: normalized_path = os.path.normpath(pkg_dir / f) if normalized_path in self.files_created: raise TestFailure( - 'Package directory %r has unexpected content %f' % (pkg_dir, f) + "Package directory %r has unexpected content %f" % (pkg_dir, f) ) @@ -385,29 +385,29 @@ def _check_stderr(stderr, allow_stderr_warning, allow_stderr_error): # sent directly to stderr and so bypass any configured log formatter. # The "--- Logging error ---" string is used in Python 3.4+, and # "Logged from file " is used in Python 2. - if line.startswith('--- Logging error ---') or line.startswith( - 'Logged from file ' + if line.startswith("--- Logging error ---") or line.startswith( + "Logged from file " ): - reason = 'stderr has a logging error, which is never allowed' + reason = "stderr has a logging error, which is never allowed" msg = make_check_stderr_message(stderr, line=line, reason=reason) raise RuntimeError(msg) if allow_stderr_error: continue - if line.startswith('ERROR: '): + if line.startswith("ERROR: "): reason = ( - 'stderr has an unexpected error ' - '(pass allow_stderr_error=True to permit this)' + "stderr has an unexpected error " + "(pass allow_stderr_error=True to permit this)" ) msg = make_check_stderr_message(stderr, line=line, reason=reason) raise RuntimeError(msg) if allow_stderr_warning: continue - if line.startswith('WARNING: ') or line.startswith(DEPRECATION_MSG_PREFIX): + if line.startswith("WARNING: ") or line.startswith(DEPRECATION_MSG_PREFIX): reason = ( - 'stderr has an unexpected warning ' - '(pass allow_stderr_warning=True to permit this)' + "stderr has an unexpected warning " + "(pass allow_stderr_warning=True to permit this)" ) msg = make_check_stderr_message(stderr, line=line, reason=reason) raise RuntimeError(msg) @@ -427,7 +427,7 @@ class PipTestEnvironment(TestFileEnvironment): # a name of the form xxxx_path and relative paths have a name that # does not end in '_path'. - exe = sys.platform == 'win32' and '.exe' or '' + exe = sys.platform == "win32" and ".exe" or "" verbose = False def __init__(self, base_path, *args, **kwargs): @@ -445,14 +445,14 @@ def __init__(self, base_path, *args, **kwargs): self.user_site_path = self.venv_path.joinpath( "user", site.USER_SITE[len(site.USER_BASE) + 1 :] ) - if sys.platform == 'win32': + if sys.platform == "win32": if sys.version_info >= (3, 5): scripts_base = Path( - os.path.normpath(self.user_site_path.joinpath('..')) + os.path.normpath(self.user_site_path.joinpath("..")) ) else: scripts_base = self.user_base_path - self.user_bin_path = scripts_base.joinpath('Scripts') + self.user_bin_path = scripts_base.joinpath("Scripts") else: self.user_bin_path = self.user_base_path.joinpath( self.bin_path - self.venv_path @@ -479,7 +479,7 @@ def __init__(self, base_path, *args, **kwargs): # Whether all pip invocations should expect stderr # (useful for Python version deprecation) - self.pip_expect_warning = kwargs.pop('pip_expect_warning', None) + self.pip_expect_warning = kwargs.pop("pip_expect_warning", None) # Call the TestFileEnvironment __init__ super(PipTestEnvironment, self).__init__(base_path, *args, **kwargs) @@ -510,7 +510,7 @@ def __init__(self, base_path, *args, **kwargs): self.user_site_path.joinpath("easy-install.pth").touch() def _ignore_file(self, fn): - if fn.endswith('__pycache__') or fn.endswith(".pyc"): + if fn.endswith("__pycache__") or fn.endswith(".pyc"): result = True else: result = super(PipTestEnvironment, self)._ignore_file(fn) @@ -521,7 +521,7 @@ def _find_traverse(self, path, result): # results because of venv `lib64 -> lib/` symlink on Linux. full = os.path.join(self.base_path, path) if os.path.isdir(full) and os.path.islink(full): - if not self.temp_path or path != 'tmp': + if not self.temp_path or path != "tmp": result[path] = FoundDir(self.base_path, path) else: super(PipTestEnvironment, self)._find_traverse(path, result) @@ -543,44 +543,44 @@ def run(self, *args, **kw): compatibility. """ if self.verbose: - print('>> running %s %s' % (args, kw)) + print(">> running %s %s" % (args, kw)) - cwd = kw.pop('cwd', None) - run_from = kw.pop('run_from', None) + cwd = kw.pop("cwd", None) + run_from = kw.pop("run_from", None) assert not cwd or not run_from, "Don't use run_from; it's going away" cwd = cwd or run_from or self.cwd - if sys.platform == 'win32': + if sys.platform == "win32": # Partial fix for ScriptTest.run using `shell=True` on Windows. - args = [str(a).replace('^', '^^').replace('&', '^&') for a in args] + args = [str(a).replace("^", "^^").replace("&", "^&") for a in args] # Remove `allow_stderr_error` and `allow_stderr_warning` before # calling run() because PipTestEnvironment doesn't support them. - allow_stderr_error = kw.pop('allow_stderr_error', None) - allow_stderr_warning = kw.pop('allow_stderr_warning', None) + allow_stderr_error = kw.pop("allow_stderr_error", None) + allow_stderr_warning = kw.pop("allow_stderr_warning", None) # Propagate default values. - expect_error = kw.get('expect_error') + expect_error = kw.get("expect_error") if expect_error: # Then default to allowing logged errors. if allow_stderr_error is not None and not allow_stderr_error: raise RuntimeError( - 'cannot pass allow_stderr_error=False with expect_error=True' + "cannot pass allow_stderr_error=False with expect_error=True" ) allow_stderr_error = True - elif kw.get('expect_stderr'): + elif kw.get("expect_stderr"): # Then default to allowing logged warnings. if allow_stderr_warning is not None and not allow_stderr_warning: raise RuntimeError( - 'cannot pass allow_stderr_warning=False with expect_stderr=True' + "cannot pass allow_stderr_warning=False with expect_stderr=True" ) allow_stderr_warning = True if allow_stderr_error: if allow_stderr_warning is not None and not allow_stderr_warning: raise RuntimeError( - 'cannot pass allow_stderr_warning=False with ' - 'allow_stderr_error=True' + "cannot pass allow_stderr_warning=False with " + "allow_stderr_error=True" ) # Default values if not set. @@ -591,7 +591,7 @@ def run(self, *args, **kw): # Pass expect_stderr=True to allow any stderr. We do this because # we do our checking of stderr further on in check_stderr(). - kw['expect_stderr'] = True + kw["expect_stderr"] = True result = super(PipTestEnvironment, self).run(cwd=cwd, *args, **kw) if expect_error: @@ -610,12 +610,12 @@ def run(self, *args, **kw): def pip(self, *args, **kwargs): __tracebackhide__ = True if self.pip_expect_warning: - kwargs['allow_stderr_warning'] = True - if kwargs.pop('use_module', True): - exe = 'python' - args = ('-m', 'pip') + args + kwargs["allow_stderr_warning"] = True + if kwargs.pop("use_module", True): + exe = "python" + args = ("-m", "pip") + args else: - exe = 'pip' + exe = "pip" return self.run(exe, *args, **kwargs) def pip_install_local(self, *args, **kwargs): @@ -629,8 +629,8 @@ def pip_install_local(self, *args, **kwargs): ) def easy_install(self, *args, **kwargs): - args = ('-m', 'easy_install') + args - return self.run('python', *args, **kwargs) + args = ("-m", "easy_install") + args + return self.run("python", *args, **kwargs) # FIXME ScriptTest does something similar, but only within a single @@ -706,8 +706,8 @@ def assert_all_changes(start_state, end_state, expected_changes): diff = diff_states(start_files, end_files, ignore=expected_changes) if list(diff.values()) != [{}, {}, {}]: raise TestFailure( - 'Unexpected changes:\n' - + '\n'.join([k + ': ' + ', '.join(v.keys()) for k, v in diff.items()]) + "Unexpected changes:\n" + + "\n".join([k + ": " + ", ".join(v.keys()) for k, v in diff.items()]) ) # Don't throw away this potentially useful information @@ -719,9 +719,9 @@ def _create_main_file(dir_path, name=None, output=None): Create a module with a main() function that prints the given output. """ if name is None: - name = 'version_pkg' + name = "version_pkg" if output is None: - output = '0.1' + output = "0.1" text = textwrap.dedent( """\ def main(): @@ -730,7 +730,7 @@ def main(): output ) ) - filename = '{}.py'.format(name) + filename = "{}.py".format(name) dir_path.joinpath(filename).write_text(text) @@ -745,69 +745,69 @@ def _git_commit(env_or_script, repo_dir, message=None, args=None, expect_stderr= args: optional additional options to pass to git-commit. """ if message is None: - message = 'test commit' + message = "test commit" if args is None: args = [] - new_args = ['git', 'commit', '-q', '--author', 'pip '] + new_args = ["git", "commit", "-q", "--author", "pip "] new_args.extend(args) - new_args.extend(['-m', message]) + new_args.extend(["-m", message]) env_or_script.run(*new_args, cwd=repo_dir, expect_stderr=expect_stderr) -def _vcs_add(script, version_pkg_path, vcs='git'): - if vcs == 'git': - script.run('git', 'init', cwd=version_pkg_path) - script.run('git', 'add', '.', cwd=version_pkg_path) - _git_commit(script, version_pkg_path, message='initial version') - elif vcs == 'hg': - script.run('hg', 'init', cwd=version_pkg_path) - script.run('hg', 'add', '.', cwd=version_pkg_path) +def _vcs_add(script, version_pkg_path, vcs="git"): + if vcs == "git": + script.run("git", "init", cwd=version_pkg_path) + script.run("git", "add", ".", cwd=version_pkg_path) + _git_commit(script, version_pkg_path, message="initial version") + elif vcs == "hg": + script.run("hg", "init", cwd=version_pkg_path) + script.run("hg", "add", ".", cwd=version_pkg_path) script.run( - 'hg', - 'commit', - '-q', - '--user', - 'pip ', - '-m', - 'initial version', + "hg", + "commit", + "-q", + "--user", + "pip ", + "-m", + "initial version", cwd=version_pkg_path, ) - elif vcs == 'svn': + elif vcs == "svn": repo_url = _create_svn_repo(script, version_pkg_path) script.run( - 'svn', 'checkout', repo_url, 'pip-test-package', cwd=script.scratch_path + "svn", "checkout", repo_url, "pip-test-package", cwd=script.scratch_path ) - checkout_path = script.scratch_path / 'pip-test-package' + checkout_path = script.scratch_path / "pip-test-package" # svn internally stores windows drives as uppercase; we'll match that. - checkout_path = checkout_path.replace('c:', 'C:') + checkout_path = checkout_path.replace("c:", "C:") version_pkg_path = checkout_path - elif vcs == 'bazaar': - script.run('bzr', 'init', cwd=version_pkg_path) - script.run('bzr', 'add', '.', cwd=version_pkg_path) + elif vcs == "bazaar": + script.run("bzr", "init", cwd=version_pkg_path) + script.run("bzr", "add", ".", cwd=version_pkg_path) script.run( - 'bzr', 'whoami', 'pip ', cwd=version_pkg_path + "bzr", "whoami", "pip ", cwd=version_pkg_path ) script.run( - 'bzr', - 'commit', - '-q', - '--author', - 'pip ', - '-m', - 'initial version', + "bzr", + "commit", + "-q", + "--author", + "pip ", + "-m", + "initial version", cwd=version_pkg_path, ) else: - raise ValueError('Unknown vcs: %r' % vcs) + raise ValueError("Unknown vcs: %r" % vcs) return version_pkg_path def _create_test_package_with_subdirectory(script, subdirectory): script.scratch_path.joinpath("version_pkg").mkdir() - version_pkg_path = script.scratch_path / 'version_pkg' + version_pkg_path = script.scratch_path / "version_pkg" _create_main_file(version_pkg_path, name="version_pkg", output="0.1") version_pkg_path.joinpath("setup.py").write_text( textwrap.dedent( @@ -826,7 +826,7 @@ def _create_test_package_with_subdirectory(script, subdirectory): subdirectory_path.mkdir() _create_main_file(subdirectory_path, name="version_subpkg", output="0.1") - subdirectory_path.joinpath('setup.py').write_text( + subdirectory_path.joinpath("setup.py").write_text( textwrap.dedent( """ from setuptools import setup, find_packages @@ -839,23 +839,23 @@ def _create_test_package_with_subdirectory(script, subdirectory): ) ) - script.run('git', 'init', cwd=version_pkg_path) - script.run('git', 'add', '.', cwd=version_pkg_path) - _git_commit(script, version_pkg_path, message='initial version') + script.run("git", "init", cwd=version_pkg_path) + script.run("git", "add", ".", cwd=version_pkg_path) + _git_commit(script, version_pkg_path, message="initial version") return version_pkg_path -def _create_test_package_with_srcdir(script, name='version_pkg', vcs='git'): +def _create_test_package_with_srcdir(script, name="version_pkg", vcs="git"): script.scratch_path.joinpath(name).mkdir() version_pkg_path = script.scratch_path / name - subdir_path = version_pkg_path.joinpath('subdir') + subdir_path = version_pkg_path.joinpath("subdir") subdir_path.mkdir() - src_path = subdir_path.joinpath('src') + src_path = subdir_path.joinpath("src") src_path.mkdir() - pkg_path = src_path.joinpath('pkg') + pkg_path = src_path.joinpath("pkg") pkg_path.mkdir() - pkg_path.joinpath('__init__.py').write_text('') + pkg_path.joinpath("__init__.py").write_text("") subdir_path.joinpath("setup.py").write_text( textwrap.dedent( """ @@ -874,10 +874,10 @@ def _create_test_package_with_srcdir(script, name='version_pkg', vcs='git'): return _vcs_add(script, version_pkg_path, vcs) -def _create_test_package(script, name='version_pkg', vcs='git'): +def _create_test_package(script, name="version_pkg", vcs="git"): script.scratch_path.joinpath(name).mkdir() version_pkg_path = script.scratch_path / name - _create_main_file(version_pkg_path, name=name, output='0.1') + _create_main_file(version_pkg_path, name=name, output="0.1") version_pkg_path.joinpath("setup.py").write_text( textwrap.dedent( """ @@ -898,15 +898,15 @@ def _create_test_package(script, name='version_pkg', vcs='git'): def _create_svn_repo(script, version_pkg_path): - repo_url = path_to_url(script.scratch_path / 'pip-test-package-repo' / 'trunk') - script.run('svnadmin', 'create', 'pip-test-package-repo', cwd=script.scratch_path) + repo_url = path_to_url(script.scratch_path / "pip-test-package-repo" / "trunk") + script.run("svnadmin", "create", "pip-test-package-repo", cwd=script.scratch_path) script.run( - 'svn', - 'import', + "svn", + "import", version_pkg_path, repo_url, - '-m', - 'Initial import of pip-test-package', + "-m", + "Initial import of pip-test-package", cwd=script.scratch_path, ) return repo_url @@ -914,14 +914,14 @@ def _create_svn_repo(script, version_pkg_path): def _change_test_package_version(script, version_pkg_path): _create_main_file( - version_pkg_path, name='version_pkg', output='some different version' + version_pkg_path, name="version_pkg", output="some different version" ) # Pass -a to stage the change to the main file. _git_commit( script, version_pkg_path, - message='messed version', - args=['-a'], + message="messed version", + args=["-a"], expect_stderr=True, ) @@ -948,15 +948,15 @@ def requirements_file(contents, tmpdir): :param tmpdir: A Path to the folder in which to create the file """ - path = tmpdir / 'reqs.txt' + path = tmpdir / "reqs.txt" path.write_text(contents) yield path path.unlink() def create_test_package_with_setup(script, **setup_kwargs): - assert 'name' in setup_kwargs, setup_kwargs - pkg_path = script.scratch_path / setup_kwargs['name'] + assert "name" in setup_kwargs, setup_kwargs + pkg_path = script.scratch_path / setup_kwargs["name"] pkg_path.mkdir() pkg_path.joinpath("setup.py").write_text( textwrap.dedent( @@ -1025,7 +1025,7 @@ def hello(): ["Requires-Dist: {}".format(pkg) for pkg in depends] + ["Provides-Extra: {}".format(pkg) for pkg in extras.keys()] + [ - "Requires-Dist: {}; extra == \"{}\"".format(pkg, extra) + 'Requires-Dist: {}; extra == "{}"'.format(pkg, extra) for extra in extras for pkg in extras[extra] ] @@ -1047,7 +1047,7 @@ def hello(): path.write_text(files[fname]) retval = script.scratch_path / archive_name - generated = shutil.make_archive(retval, 'zip', script.temp_path) + generated = shutil.make_archive(retval, "zip", script.temp_path) shutil.move(generated, retval) shutil.rmtree(script.temp_path) @@ -1061,7 +1061,7 @@ def wrapper(fn): try: subprocess.check_output(check_cmd) except OSError: - return pytest.mark.skip(reason='%s is not available' % name)(fn) + return pytest.mark.skip(reason="%s is not available" % name)(fn) return fn return wrapper @@ -1069,7 +1069,7 @@ def wrapper(fn): def is_bzr_installed(): try: - subprocess.check_output(('bzr', 'version', '--short')) + subprocess.check_output(("bzr", "version", "--short")) except OSError: return False return True @@ -1077,15 +1077,15 @@ def is_bzr_installed(): def is_svn_installed(): try: - subprocess.check_output(('svn', '--version')) + subprocess.check_output(("svn", "--version")) except OSError: return False return True def need_bzr(fn): - return pytest.mark.bzr(need_executable('Bazaar', ('bzr', 'version', '--short'))(fn)) + return pytest.mark.bzr(need_executable("Bazaar", ("bzr", "version", "--short"))(fn)) def need_mercurial(fn): - return pytest.mark.mercurial(need_executable('Mercurial', ('hg', 'version'))(fn)) + return pytest.mark.mercurial(need_executable("Mercurial", ("hg", "version"))(fn)) diff --git a/tests/lib/git_submodule_helpers.py b/tests/lib/git_submodule_helpers.py index 4b209851ba6..01f92fc90f4 100644 --- a/tests/lib/git_submodule_helpers.py +++ b/tests/lib/git_submodule_helpers.py @@ -7,11 +7,11 @@ def _create_test_package_submodule(env): env.scratch_path.joinpath("version_pkg_submodule").mkdir() - submodule_path = env.scratch_path / 'version_pkg_submodule' - env.run('touch', 'testfile', cwd=submodule_path) - env.run('git', 'init', cwd=submodule_path) - env.run('git', 'add', '.', cwd=submodule_path) - _git_commit(env, submodule_path, message='initial version / submodule') + submodule_path = env.scratch_path / "version_pkg_submodule" + env.run("touch", "testfile", cwd=submodule_path) + env.run("git", "init", cwd=submodule_path) + env.run("git", "add", ".", cwd=submodule_path) + _git_commit(env, submodule_path, message="initial version / submodule") return submodule_path @@ -19,8 +19,8 @@ def _create_test_package_submodule(env): def _change_test_package_submodule(env, submodule_path): submodule_path.joinpath("testfile").write_text("this is a changed file") submodule_path.joinpath("testfile2").write_text("this is an added file") - env.run('git', 'add', '.', cwd=submodule_path) - _git_commit(env, submodule_path, message='submodule change') + env.run("git", "add", ".", cwd=submodule_path) + _git_commit(env, submodule_path, message="submodule change") def _pull_in_submodule_changes_to_module(env, module_path, rel_path): @@ -29,9 +29,9 @@ def _pull_in_submodule_changes_to_module(env, module_path, rel_path): rel_path: the location of the submodule relative to the superproject. """ submodule_path = module_path / rel_path - env.run('git', 'pull', '-q', 'origin', 'master', cwd=submodule_path) + env.run("git", "pull", "-q", "origin", "master", cwd=submodule_path) # Pass -a to stage the submodule changes that were just pulled in. - _git_commit(env, module_path, message='submodule change', args=['-a']) + _git_commit(env, module_path, message="submodule change", args=["-a"]) def _create_test_package_with_submodule(env, rel_path): @@ -40,30 +40,30 @@ def _create_test_package_with_submodule(env, rel_path): rel_path: the location of the submodule relative to the superproject. """ env.scratch_path.joinpath("version_pkg").mkdir() - version_pkg_path = env.scratch_path / 'version_pkg' + version_pkg_path = env.scratch_path / "version_pkg" version_pkg_path.joinpath("testpkg").mkdir() - pkg_path = version_pkg_path / 'testpkg' + pkg_path = version_pkg_path / "testpkg" pkg_path.joinpath("__init__.py").write_text("# hello there") _create_main_file(pkg_path, name="version_pkg", output="0.1") version_pkg_path.joinpath("setup.py").write_text( textwrap.dedent( - '''\ + """\ from setuptools import setup, find_packages setup(name='version_pkg', version='0.1', packages=find_packages(), ) - ''' + """ ) ) - env.run('git', 'init', cwd=version_pkg_path) - env.run('git', 'add', '.', cwd=version_pkg_path) - _git_commit(env, version_pkg_path, message='initial version') + env.run("git", "init", cwd=version_pkg_path) + env.run("git", "add", ".", cwd=version_pkg_path) + _git_commit(env, version_pkg_path, message="initial version") submodule_path = _create_test_package_submodule(env) - env.run('git', 'submodule', 'add', submodule_path, rel_path, cwd=version_pkg_path) - _git_commit(env, version_pkg_path, message='initial version w submodule') + env.run("git", "submodule", "add", submodule_path, rel_path, cwd=version_pkg_path) + _git_commit(env, version_pkg_path, message="initial version w submodule") return version_pkg_path, submodule_path diff --git a/tests/lib/local_repos.py b/tests/lib/local_repos.py index bd4d23ec97b..002919312d1 100644 --- a/tests/lib/local_repos.py +++ b/tests/lib/local_repos.py @@ -19,16 +19,16 @@ def _create_svn_initools_repo(initools_dir): Create the SVN INITools repo. """ directory = os.path.dirname(initools_dir) - subprocess.check_call('svnadmin create INITools'.split(), cwd=directory) + subprocess.check_call("svnadmin create INITools".split(), cwd=directory) filename, _ = urllib_request.urlretrieve( - 'http://bitbucket.org/hltbra/pip-initools-dump/raw/8b55c908a320/' - 'INITools_modified.dump' + "http://bitbucket.org/hltbra/pip-initools-dump/raw/8b55c908a320/" + "INITools_modified.dump" ) - devnull = open(os.devnull, 'w') + devnull = open(os.devnull, "w") dump = open(filename) subprocess.check_call( - ['svnadmin', 'load', initools_dir], stdin=dump, stdout=devnull + ["svnadmin", "load", initools_dir], stdin=dump, stdout=devnull ) dump.close() devnull.close() @@ -45,27 +45,27 @@ def local_checkout( temp directory Path object unique to each test function invocation, created as a sub directory of the base temp directory. """ - assert '+' in remote_repo - vcs_name = remote_repo.split('+', 1)[0] + assert "+" in remote_repo + vcs_name = remote_repo.split("+", 1)[0] repository_name = os.path.basename(remote_repo) - directory = temp_path.joinpath('cache') + directory = temp_path.joinpath("cache") repo_url_path = os.path.join(directory, repository_name) assert not os.path.exists(repo_url_path) if not os.path.exists(directory): os.mkdir(directory) - if vcs_name == 'svn': - assert repository_name == 'INITools' + if vcs_name == "svn": + assert repository_name == "INITools" _create_svn_initools_repo(repo_url_path) - repo_url_path = os.path.join(repo_url_path, 'trunk') + repo_url_path = os.path.join(repo_url_path, "trunk") else: vcs_backend = vcs.get_backend(vcs_name) vcs_backend.obtain(repo_url_path, url=hide_url(remote_repo)) - return '{}+{}'.format(vcs_name, path_to_url(repo_url_path)) + return "{}+{}".format(vcs_name, path_to_url(repo_url_path)) def local_repo(remote_repo, temp_path): - return local_checkout(remote_repo, temp_path).split('+', 1)[1] + return local_checkout(remote_repo, temp_path).split("+", 1)[1] diff --git a/tests/lib/options_helpers.py b/tests/lib/options_helpers.py index c7fb318a927..ca218a164a5 100644 --- a/tests/lib/options_helpers.py +++ b/tests/lib/options_helpers.py @@ -19,10 +19,10 @@ def main(self, args): class AddFakeCommandMixin(object): def setup(self): self.environ_before = os.environ.copy() - commands_dict['fake'] = CommandInfo( - 'tests.lib.options_helpers', 'FakeCommand', 'fake summary' + commands_dict["fake"] = CommandInfo( + "tests.lib.options_helpers", "FakeCommand", "fake summary" ) def teardown(self): reset_os_environ(self.environ_before) - commands_dict.pop('fake') + commands_dict.pop("fake") diff --git a/tests/lib/path.py b/tests/lib/path.py index 6339aca6c61..b1f6952d062 100644 --- a/tests/lib/path.py +++ b/tests/lib/path.py @@ -192,7 +192,7 @@ def joinpath(self, *parts): # TODO: Remove after removing inheritance from str. def join(self, *parts): - raise RuntimeError('Path.join is invalid, use joinpath instead.') + raise RuntimeError("Path.join is invalid, use joinpath instead.") def read_text(self): with open(self, "r") as fp: diff --git a/tests/lib/test_lib.py b/tests/lib/test_lib.py index a190a466a01..2c92e634f83 100644 --- a/tests/lib/test_lib.py +++ b/tests/lib/test_lib.py @@ -20,7 +20,7 @@ def assert_error_startswith(exc_type, expected_start): with pytest.raises(exc_type) as err: yield - assert str(err.value).startswith(expected_start), 'full message: {}'.format( + assert str(err.value).startswith(expected_start), "full message: {}".format( err.value ) @@ -33,7 +33,7 @@ def test_tmp_dir_exists_in_env(script): # need these tests to ensure the assert_no_temp feature of scripttest is # working script.assert_no_temp() # this fails if env.tmp_path doesn't exist - assert script.environ['TMPDIR'] == script.temp_path + assert script.environ["TMPDIR"] == script.temp_path assert isdir(script.temp_path) @@ -43,16 +43,16 @@ def test_correct_pip_version(script): """ # output is like: # pip PIPVERSION from PIPDIRECTORY (python PYVERSION) - result = script.pip('--version') + result = script.pip("--version") # compare the directory tree of the invoked pip with that of this source # distribution pip_folder_outputed = re.match( - r'pip \d+(\.[\d]+)+(\.?(b|rc|dev|pre|post)\d+)? from (.*) ' - r'\(python \d(.[\d])+\)$', + r"pip \d+(\.[\d]+)+(\.?(b|rc|dev|pre|post)\d+)? from (.*) " + r"\(python \d(.[\d])+\)$", result.stdout, ).group(4) - pip_folder = join(SRC_DIR, 'src', 'pip') + pip_folder = join(SRC_DIR, "src", "pip") diffs = filecmp.dircmp(pip_folder, pip_folder_outputed) @@ -63,9 +63,9 @@ def test_correct_pip_version(script): mismatch_py = [ x for x in diffs.left_only + diffs.right_only + diffs.diff_files - if x.endswith('.py') + if x.endswith(".py") ] - assert not mismatch_py, 'mismatched source files in %r and %r: %r' % ( + assert not mismatch_py, "mismatched source files in %r and %r: %r" % ( pip_folder, pip_folder_outputed, mismatch_py, @@ -86,9 +86,9 @@ def run_stderr_with_prefix(self, script, prefix, **kwargs): """ Call run() that prints stderr with the given prefix. """ - text = '{}: hello, world\\n'.format(prefix) + text = "{}: hello, world\\n".format(prefix) command = 'import sys; sys.stderr.write("{}")'.format(text) - args = [sys.executable, '-c', command] + args = [sys.executable, "-c", command] script.run(*args, **kwargs) def run_with_log_command(self, script, sub_string, **kwargs): @@ -100,10 +100,10 @@ def run_with_log_command(self, script, sub_string, **kwargs): "import logging; logging.basicConfig(level='INFO'); " "logging.getLogger().info('sub: {}', 'foo')" ).format(sub_string) - args = [sys.executable, '-c', command] + args = [sys.executable, "-c", command] script.run(*args, **kwargs) - @pytest.mark.parametrize('prefix', ('DEBUG', 'INFO', 'FOO')) + @pytest.mark.parametrize("prefix", ("DEBUG", "INFO", "FOO")) def test_run__allowed_stderr(self, script, prefix): """ Test calling run() with allowed stderr. @@ -116,14 +116,14 @@ def test_run__allow_stderr_warning(self, script): Test passing allow_stderr_warning=True. """ # Check that no error happens. - self.run_stderr_with_prefix(script, 'WARNING', allow_stderr_warning=True) + self.run_stderr_with_prefix(script, "WARNING", allow_stderr_warning=True) # Check that an error still happens with ERROR. - expected_start = 'stderr has an unexpected error' + expected_start = "stderr has an unexpected error" with assert_error_startswith(RuntimeError, expected_start): - self.run_stderr_with_prefix(script, 'ERROR', allow_stderr_warning=True) + self.run_stderr_with_prefix(script, "ERROR", allow_stderr_warning=True) - @pytest.mark.parametrize('prefix', ('DEPRECATION', 'WARNING', 'ERROR')) + @pytest.mark.parametrize("prefix", ("DEPRECATION", "WARNING", "ERROR")) def test_run__allow_stderr_error(self, script, prefix): """ Test passing allow_stderr_error=True. @@ -132,11 +132,11 @@ def test_run__allow_stderr_error(self, script, prefix): self.run_stderr_with_prefix(script, prefix, allow_stderr_error=True) @pytest.mark.parametrize( - 'prefix, expected_start', + "prefix, expected_start", ( - ('DEPRECATION', 'stderr has an unexpected warning'), - ('WARNING', 'stderr has an unexpected warning'), - ('ERROR', 'stderr has an unexpected error'), + ("DEPRECATION", "stderr has an unexpected warning"), + ("WARNING", "stderr has an unexpected warning"), + ("ERROR", "stderr has an unexpected error"), ), ) def test_run__unexpected_stderr(self, script, prefix, expected_start): @@ -151,54 +151,54 @@ def test_run__logging_error(self, script): Test calling run() with an unexpected logging error. """ # Pass a good substitution string. - self.run_with_log_command(script, sub_string='%r') + self.run_with_log_command(script, sub_string="%r") - expected_start = 'stderr has a logging error, which is never allowed' + expected_start = "stderr has a logging error, which is never allowed" with assert_error_startswith(RuntimeError, expected_start): # Pass a bad substitution string. Also, pass # allow_stderr_error=True to check that the RuntimeError occurs # even under the stricter test condition of when we are allowing # other types of errors. self.run_with_log_command( - script, sub_string='{!r}', allow_stderr_error=True + script, sub_string="{!r}", allow_stderr_error=True ) def test_run__allow_stderr_error_false_error_with_expect_error(self, script): """ Test passing allow_stderr_error=False with expect_error=True. """ - expected_start = 'cannot pass allow_stderr_error=False with expect_error=True' + expected_start = "cannot pass allow_stderr_error=False with expect_error=True" with assert_error_startswith(RuntimeError, expected_start): - script.run('python', allow_stderr_error=False, expect_error=True) + script.run("python", allow_stderr_error=False, expect_error=True) def test_run__allow_stderr_warning_false_error_with_expect_stderr(self, script): """ Test passing allow_stderr_warning=False with expect_stderr=True. """ expected_start = ( - 'cannot pass allow_stderr_warning=False with expect_stderr=True' + "cannot pass allow_stderr_warning=False with expect_stderr=True" ) with assert_error_startswith(RuntimeError, expected_start): - script.run('python', allow_stderr_warning=False, expect_stderr=True) + script.run("python", allow_stderr_warning=False, expect_stderr=True) - @pytest.mark.parametrize('arg_name', ('expect_error', 'allow_stderr_error')) + @pytest.mark.parametrize("arg_name", ("expect_error", "allow_stderr_error")) def test_run__allow_stderr_warning_false_error(self, script, arg_name): """ Test passing allow_stderr_warning=False when it is not allowed. """ - kwargs = {'allow_stderr_warning': False, arg_name: True} + kwargs = {"allow_stderr_warning": False, arg_name: True} expected_start = ( - 'cannot pass allow_stderr_warning=False with allow_stderr_error=True' + "cannot pass allow_stderr_warning=False with allow_stderr_error=True" ) with assert_error_startswith(RuntimeError, expected_start): - script.run('python', **kwargs) + script.run("python", **kwargs) def test_run__expect_error_fails_when_zero_returncode(self, script): - expected_start = 'Script passed unexpectedly' + expected_start = "Script passed unexpectedly" with assert_error_startswith(AssertionError, expected_start): - script.run('python', expect_error=True) + script.run("python", expect_error=True) def test_run__no_expect_error_fails_when_nonzero_returncode(self, script): - expected_start = 'Script returned code: 1' + expected_start = "Script returned code: 1" with assert_error_startswith(AssertionError, expected_start): - script.run('python', '-c', 'import sys; sys.exit(1)') + script.run("python", "-c", "import sys; sys.exit(1)") diff --git a/tests/lib/venv.py b/tests/lib/venv.py index a9899d8c3d0..477b5d98edd 100644 --- a/tests/lib/venv.py +++ b/tests/lib/venv.py @@ -22,9 +22,9 @@ class VirtualEnvironment(object): def __init__(self, location, template=None, venv_type=None): assert template is None or venv_type is None - assert venv_type in (None, 'virtualenv', 'venv') + assert venv_type in (None, "virtualenv", "venv") self.location = Path(location) - self._venv_type = venv_type or template._venv_type or 'virtualenv' + self._venv_type = venv_type or template._venv_type or "virtualenv" self._user_site_packages = False self._template = template self._sitecustomize = None @@ -34,12 +34,12 @@ def __init__(self, location, template=None, venv_type=None): def _update_paths(self): home, lib, inc, bin = _virtualenv.path_locations(self.location) self.bin = Path(bin) - self.site = Path(lib) / 'site-packages' + self.site = Path(lib) / "site-packages" # Workaround for https://github.com/pypa/virtualenv/issues/306 if hasattr(sys, "pypy_version_info"): - version_fmt = '{0}' if six.PY3 else '{0}.{1}' + version_fmt = "{0}" if six.PY3 else "{0}.{1}" version_dir = version_fmt.format(*sys.version_info) - self.lib = Path(home, 'lib-python', version_dir) + self.lib = Path(home, "lib-python", version_dir) else: self.lib = Path(lib) @@ -52,7 +52,7 @@ def _create(self, clear=False): if self._template: # On Windows, calling `_virtualenv.path_locations(target)` # will have created the `target` directory... - if sys.platform == 'win32' and self.location.exists(): + if sys.platform == "win32" and self.location.exists(): self.location.rmdir() # Clone virtual environment from template. shutil.copytree(self._template.location, self.location, symlinks=True) @@ -60,12 +60,12 @@ def _create(self, clear=False): self._user_site_packages = self._template.user_site_packages else: # Create a new virtual environment. - if self._venv_type == 'virtualenv': + if self._venv_type == "virtualenv": _virtualenv.create_environment( self.location, no_pip=True, no_wheel=True, no_setuptools=True ) self._fix_virtualenv_site_module() - elif self._venv_type == 'venv': + elif self._venv_type == "venv": builder = _venv.EnvBuilder() context = builder.ensure_directories(self.location) builder.create_configuration(context) @@ -76,44 +76,44 @@ def _create(self, clear=False): def _fix_virtualenv_site_module(self): # Patch `site.py` so user site work as expected. - site_py = self.lib / 'site.py' + site_py = self.lib / "site.py" with open(site_py) as fp: site_contents = fp.read() for pattern, replace in ( ( # Ensure enabling user site does not result in adding # the real site-packages' directory to `sys.path`. - ('\ndef virtual_addsitepackages(known_paths):\n'), + ("\ndef virtual_addsitepackages(known_paths):\n"), ( - '\ndef virtual_addsitepackages(known_paths):\n' - ' return known_paths\n' + "\ndef virtual_addsitepackages(known_paths):\n" + " return known_paths\n" ), ), ( # Fix sites ordering: user site must be added before system. ( - '\n paths_in_sys = addsitepackages(paths_in_sys)' - '\n paths_in_sys = addusersitepackages(paths_in_sys)\n' + "\n paths_in_sys = addsitepackages(paths_in_sys)" + "\n paths_in_sys = addusersitepackages(paths_in_sys)\n" ), ( - '\n paths_in_sys = addusersitepackages(paths_in_sys)' - '\n paths_in_sys = addsitepackages(paths_in_sys)\n' + "\n paths_in_sys = addusersitepackages(paths_in_sys)" + "\n paths_in_sys = addsitepackages(paths_in_sys)\n" ), ), ): assert pattern in site_contents site_contents = site_contents.replace(pattern, replace) - with open(site_py, 'w') as fp: + with open(site_py, "w") as fp: fp.write(site_contents) # Make sure bytecode is up-to-date too. assert compileall.compile_file(str(site_py), quiet=1, force=True) def _customize_site(self): - contents = '' - if self._venv_type == 'venv': + contents = "" + if self._venv_type == "venv": # Enable user site (before system). contents += textwrap.dedent( - ''' + """ import os, site, sys if not os.environ.get('PYTHONNOUSERSITE', False): @@ -137,10 +137,10 @@ def _customize_site(self): # Third, add back system-sites related paths. for path in site.getsitepackages(): site.addsitedir(path) - ''' + """ ).strip() if self._sitecustomize is not None: - contents += '\n' + self._sitecustomize + contents += "\n" + self._sitecustomize sitecustomize = self.site / "sitecustomize.py" sitecustomize.write_text(contents) # Make sure bytecode is up-to-date too. @@ -170,11 +170,11 @@ def user_site_packages(self): @user_site_packages.setter def user_site_packages(self, value): self._user_site_packages = value - if self._venv_type == 'virtualenv': + if self._venv_type == "virtualenv": marker = self.lib / "no-global-site-packages.txt" if self._user_site_packages: marker.unlink() else: marker.touch() - elif self._venv_type == 'venv': + elif self._venv_type == "venv": self._customize_site() diff --git a/tests/scripts/test_all_pip.py b/tests/scripts/test_all_pip.py index 9a3c11b172a..e26836a98ef 100644 --- a/tests/scripts/test_all_pip.py +++ b/tests/scripts/test_all_pip.py @@ -10,15 +10,15 @@ src_folder = dirname(dirname(abspath(__file__))) -if sys.platform == 'win32': - bin_dir = 'Scripts' +if sys.platform == "win32": + bin_dir = "Scripts" else: - bin_dir = 'bin' + bin_dir = "bin" def all_projects(): - data = urllib_request.urlopen('http://pypi.org/simple/').read() - projects = [m.group(1) for m in re.finditer(r'(.+)', data)] + data = urllib_request.urlopen("http://pypi.org/simple/").read() + projects = [m.group(1) for m in re.finditer(r"(.+)", data)] return projects @@ -26,64 +26,64 @@ def main(args=None): if args is None: args = sys.argv[1:] if not args: - print('Usage: test_all_pip.py ') + print("Usage: test_all_pip.py ") sys.exit(1) output = os.path.abspath(args[0]) if not os.path.exists(output): - print('Creating %s' % output) + print("Creating %s" % output) os.makedirs(output) - pending_fn = os.path.join(output, 'pending.txt') + pending_fn = os.path.join(output, "pending.txt") if not os.path.exists(pending_fn): - print('Downloading pending list') + print("Downloading pending list") projects = all_projects() - print('Found %s projects' % len(projects)) - with open(pending_fn, 'w') as f: + print("Found %s projects" % len(projects)) + with open(pending_fn, "w") as f: for name in projects: - f.write(name + '\n') - print('Starting testing...') + f.write(name + "\n") + print("Starting testing...") while os.stat(pending_fn).st_size: _test_packages(output, pending_fn) - print('Finished all pending!') + print("Finished all pending!") def _test_packages(output, pending_fn): package = get_last_item(pending_fn) - print('Testing package %s' % package) + print("Testing package %s" % package) dest_dir = os.path.join(output, package) - print('Creating virtualenv in %s' % dest_dir) + print("Creating virtualenv in %s" % dest_dir) create_venv(dest_dir) - print('Uninstalling actual pip') + print("Uninstalling actual pip") code = subprocess.check_call( - [os.path.join(dest_dir, bin_dir, 'pip'), 'uninstall', '-y', 'pip'] + [os.path.join(dest_dir, bin_dir, "pip"), "uninstall", "-y", "pip"] ) - assert not code, 'pip uninstallation failed' - print('Installing development pip') + assert not code, "pip uninstallation failed" + print("Installing development pip") code = subprocess.check_call( - [os.path.join(dest_dir, bin_dir, 'python'), 'setup.py', 'install'], + [os.path.join(dest_dir, bin_dir, "python"), "setup.py", "install"], cwd=src_folder, ) - assert not code, 'pip installation failed' - print('Trying installation of %s' % dest_dir) + assert not code, "pip installation failed" + print("Trying installation of %s" % dest_dir) code = subprocess.check_call( - [os.path.join(dest_dir, bin_dir, 'pip'), 'install', package] + [os.path.join(dest_dir, bin_dir, "pip"), "install", package] ) if code: - print('Installation of %s failed' % package) - print('Now checking easy_install...') + print("Installation of %s failed" % package) + print("Now checking easy_install...") create_venv(dest_dir) code = subprocess.check_call( - [os.path.join(dest_dir, bin_dir, 'easy_install'), package] + [os.path.join(dest_dir, bin_dir, "easy_install"), package] ) if code: - print('easy_install also failed') - add_package(os.path.join(output, 'easy-failure.txt'), package) + print("easy_install also failed") + add_package(os.path.join(output, "easy-failure.txt"), package) else: - print('easy_install succeeded') - add_package(os.path.join(output, 'failure.txt'), package) + print("easy_install succeeded") + add_package(os.path.join(output, "failure.txt"), package) pop_last_item(pending_fn, package) else: - print('Installation of %s succeeded' % package) - add_package(os.path.join(output, 'success.txt'), package) + print("Installation of %s succeeded" % package) + add_package(os.path.join(output, "success.txt"), package) pop_last_item(pending_fn, package) rmtree(dest_dir) @@ -91,35 +91,35 @@ def _test_packages(output, pending_fn): def create_venv(dest_dir): if os.path.exists(dest_dir): rmtree(dest_dir) - print('Creating virtualenv in %s' % dest_dir) - code = subprocess.check_call(['virtualenv', '--no-site-packages', dest_dir]) + print("Creating virtualenv in %s" % dest_dir) + code = subprocess.check_call(["virtualenv", "--no-site-packages", dest_dir]) assert not code, "virtualenv failed" def get_last_item(fn): - f = open(fn, 'r') + f = open(fn, "r") lines = f.readlines() f.close() return lines[-1].strip() def pop_last_item(fn, line=None): - f = open(fn, 'r') + f = open(fn, "r") lines = f.readlines() f.close() if line: assert lines[-1].strip() == line.strip() lines.pop() - f = open(fn, 'w') + f = open(fn, "w") f.writelines(lines) f.close() def add_package(filename, package): - f = open(filename, 'a') - f.write(package + '\n') + f = open(filename, "a") + f.write(package + "\n") f.close() -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/tests/unit/test_appdirs.py b/tests/unit/test_appdirs.py index 49d97531e01..c5d6a1fdf13 100644 --- a/tests/unit/test_appdirs.py +++ b/tests/unit/test_appdirs.py @@ -61,7 +61,7 @@ def test_user_cache_dir_linux_home_slash(self, monkeypatch): assert appdirs.user_cache_dir("pip") == "/.cache/pip" def test_user_cache_dir_unicode(self, monkeypatch): - if sys.platform != 'win32': + if sys.platform != "win32": return def my_get_win_folder(csidl_name): @@ -72,7 +72,7 @@ def my_get_win_folder(csidl_name): # Do not use the isinstance expression directly in the # assert statement, as the Unicode characters in the result # cause pytest to fail with an internal error on Python 2.7 - result_is_str = isinstance(appdirs.user_cache_dir('test'), str) + result_is_str = isinstance(appdirs.user_cache_dir("test"), str) assert result_is_str, "user_cache_dir did not return a str" # Test against regression #3463 @@ -108,20 +108,20 @@ def test_site_config_dirs_linux(self, monkeypatch): monkeypatch.delenv("XDG_CONFIG_DIRS", raising=False) monkeypatch.setattr(sys, "platform", "linux2") - assert appdirs.site_config_dirs("pip") == ['/etc/xdg/pip', '/etc'] + assert appdirs.site_config_dirs("pip") == ["/etc/xdg/pip", "/etc"] def test_site_config_dirs_linux_override(self, monkeypatch): monkeypatch.setattr(appdirs, "WINDOWS", False) monkeypatch.setattr(os, "path", posixpath) - monkeypatch.setattr(os, "pathsep", ':') + monkeypatch.setattr(os, "pathsep", ":") monkeypatch.setenv("XDG_CONFIG_DIRS", "/spam:/etc:/etc/xdg") monkeypatch.setattr(sys, "platform", "linux2") assert appdirs.site_config_dirs("pip") == [ - '/spam/pip', - '/etc/pip', - '/etc/xdg/pip', - '/etc', + "/spam/pip", + "/etc/pip", + "/etc/xdg/pip", + "/etc", ] @@ -159,7 +159,7 @@ def test_user_data_dir_osx(self, monkeypatch): monkeypatch.setenv("HOME", "/home/test") monkeypatch.setattr(sys, "platform", "darwin") - if os.path.isdir('/home/test/Library/Application Support/'): + if os.path.isdir("/home/test/Library/Application Support/"): assert ( appdirs.user_data_dir("pip") == "/home/test/Library/Application Support/pip" @@ -232,7 +232,7 @@ def test_user_config_dir_osx(self, monkeypatch): monkeypatch.setenv("HOME", "/home/test") monkeypatch.setattr(sys, "platform", "darwin") - if os.path.isdir('/home/test/Library/Application Support/'): + if os.path.isdir("/home/test/Library/Application Support/"): assert ( appdirs.user_data_dir("pip") == "/home/test/Library/Application Support/pip" diff --git a/tests/unit/test_base_command.py b/tests/unit/test_base_command.py index 058cfcd84f7..987b4ca96cd 100644 --- a/tests/unit/test_base_command.py +++ b/tests/unit/test_base_command.py @@ -10,7 +10,7 @@ class FakeCommand(Command): - _name = 'fake' + _name = "fake" def __init__(self, run_func=None, error=False): if error: @@ -32,7 +32,7 @@ def run(self, options, args): class FakeCommandWithUnicode(FakeCommand): - _name = 'fake_unicode' + _name = "fake_unicode" def run(self, options, args): logging.getLogger("pip.tests").info(b"bytes here \xE9") @@ -61,19 +61,19 @@ def test_raise_broken_stdout(self, capsys): """ stderr = self.call_main(capsys, []) - assert stderr.rstrip() == 'ERROR: Pipe to stdout was broken' + assert stderr.rstrip() == "ERROR: Pipe to stdout was broken" def test_raise_broken_stdout__debug_logging(self, capsys): """ Test raising BrokenStdoutLoggingError with debug logging enabled. """ - stderr = self.call_main(capsys, ['-v']) + stderr = self.call_main(capsys, ["-v"]) - assert 'ERROR: Pipe to stdout was broken' in stderr - assert 'Traceback (most recent call last):' in stderr + assert "ERROR: Pipe to stdout was broken" in stderr + assert "Traceback (most recent call last):" in stderr -@patch('pip._internal.cli.req_command.Command.handle_pip_version_check') +@patch("pip._internal.cli.req_command.Command.handle_pip_version_check") def test_handle_pip_version_check_called(mock_handle_version_check): """ Check that Command.handle_pip_version_check() is called. @@ -92,18 +92,18 @@ class Test_base_command_logging(object): def setup(self): self.old_time = time.time time.time = lambda: 1547704837.040001 - self.old_tz = os.environ.get('TZ') - os.environ['TZ'] = 'UTC' + self.old_tz = os.environ.get("TZ") + os.environ["TZ"] = "UTC" # time.tzset() is not implemented on some platforms (notably, Windows). - if hasattr(time, 'tzset'): + if hasattr(time, "tzset"): time.tzset() def teardown(self): if self.old_tz: - os.environ['TZ'] = self.old_tz + os.environ["TZ"] = self.old_tz else: - del os.environ['TZ'] - if 'tzset' in dir(time): + del os.environ["TZ"] + if "tzset" in dir(time): time.tzset() time.time = self.old_time @@ -112,35 +112,35 @@ def test_log_command_success(self, tmpdir): Test the --log option logs when command succeeds """ cmd = FakeCommand() - log_path = tmpdir.joinpath('log') - cmd.main(['fake', '--log', log_path]) + log_path = tmpdir.joinpath("log") + cmd.main(["fake", "--log", log_path]) with open(log_path) as f: - assert f.read().rstrip() == '2019-01-17T06:00:37,040 fake' + assert f.read().rstrip() == "2019-01-17T06:00:37,040 fake" def test_log_command_error(self, tmpdir): """ Test the --log option logs when command fails """ cmd = FakeCommand(error=True) - log_path = tmpdir.joinpath('log') - cmd.main(['fake', '--log', log_path]) + log_path = tmpdir.joinpath("log") + cmd.main(["fake", "--log", log_path]) with open(log_path) as f: - assert f.read().startswith('2019-01-17T06:00:37,040 fake') + assert f.read().startswith("2019-01-17T06:00:37,040 fake") def test_log_file_command_error(self, tmpdir): """ Test the --log-file option logs (when there's an error). """ cmd = FakeCommand(error=True) - log_file_path = tmpdir.joinpath('log_file') - cmd.main(['fake', '--log-file', log_file_path]) + log_file_path = tmpdir.joinpath("log_file") + cmd.main(["fake", "--log-file", log_file_path]) with open(log_file_path) as f: - assert f.read().startswith('2019-01-17T06:00:37,040 fake') + assert f.read().startswith("2019-01-17T06:00:37,040 fake") def test_unicode_messages(self, tmpdir): """ Tests that logging bytestrings and unicode objects don't break logging """ cmd = FakeCommandWithUnicode() - log_path = tmpdir.joinpath('log') - cmd.main(['fake_unicode', '--log', log_path]) + log_path = tmpdir.joinpath("log") + cmd.main(["fake_unicode", "--log", log_path]) diff --git a/tests/unit/test_build_env.py b/tests/unit/test_build_env.py index 360a3792545..b16a51aad6c 100644 --- a/tests/unit/test_build_env.py +++ b/tests/unit/test_build_env.py @@ -7,14 +7,14 @@ def indent(text, prefix): - return '\n'.join((prefix if line else '') + line for line in text.split('\n')) + return "\n".join((prefix if line else "") + line for line in text.split("\n")) def run_with_build_env(script, setup_script_contents, test_script_contents=None): - build_env_script = script.scratch_path / 'build_env.py' + build_env_script = script.scratch_path / "build_env.py" build_env_script.write_text( dedent( - ''' + """ from __future__ import print_function import subprocess import sys @@ -42,23 +42,23 @@ def run_with_build_env(script, setup_script_contents, test_script_contents=None) build_env = BuildEnvironment() try: - ''' + """ % str(script.scratch_path) ) - + indent(dedent(setup_script_contents), ' ') + + indent(dedent(setup_script_contents), " ") + dedent( - ''' + """ if len(sys.argv) > 1: with build_env: subprocess.check_call((sys.executable, sys.argv[1])) finally: build_env.cleanup() - ''' + """ ) ) - args = ['python', build_env_script] + args = ["python", build_env_script] if test_script_contents is not None: - test_script = script.scratch_path / 'test.py' + test_script = script.scratch_path / "test.py" test_script.write_text(dedent(test_script_contents)) args.append(test_script) return script.run(*args) @@ -66,41 +66,41 @@ def run_with_build_env(script, setup_script_contents, test_script_contents=None) def test_build_env_allow_empty_requirements_install(): build_env = BuildEnvironment() - for prefix in ('normal', 'overlay'): + for prefix in ("normal", "overlay"): build_env.install_requirements(None, [], prefix, None) def test_build_env_allow_only_one_install(script): - create_basic_wheel_for_package(script, 'foo', '1.0') - create_basic_wheel_for_package(script, 'bar', '1.0') + create_basic_wheel_for_package(script, "foo", "1.0") + create_basic_wheel_for_package(script, "bar", "1.0") finder = make_test_finder(find_links=[script.scratch_path]) build_env = BuildEnvironment() - for prefix in ('normal', 'overlay'): + for prefix in ("normal", "overlay"): build_env.install_requirements( - finder, ['foo'], prefix, 'installing foo in %s' % prefix + finder, ["foo"], prefix, "installing foo in %s" % prefix ) with pytest.raises(AssertionError): build_env.install_requirements( - finder, ['bar'], prefix, 'installing bar in %s' % prefix + finder, ["bar"], prefix, "installing bar in %s" % prefix ) with pytest.raises(AssertionError): build_env.install_requirements( - finder, [], prefix, 'installing in %s' % prefix + finder, [], prefix, "installing in %s" % prefix ) def test_build_env_requirements_check(script): - create_basic_wheel_for_package(script, 'foo', '2.0') - create_basic_wheel_for_package(script, 'bar', '1.0') - create_basic_wheel_for_package(script, 'bar', '3.0') - create_basic_wheel_for_package(script, 'other', '0.5') + create_basic_wheel_for_package(script, "foo", "2.0") + create_basic_wheel_for_package(script, "bar", "1.0") + create_basic_wheel_for_package(script, "bar", "3.0") + create_basic_wheel_for_package(script, "other", "0.5") - script.pip_install_local('-f', script.scratch_path, 'foo', 'bar', 'other') + script.pip_install_local("-f", script.scratch_path, "foo", "bar", "other") run_with_build_env( script, - ''' + """ r = build_env.check_requirements(['foo', 'bar', 'other']) assert r == (set(), {'foo', 'bar', 'other'}), repr(r) @@ -109,12 +109,12 @@ def test_build_env_requirements_check(script): r = build_env.check_requirements(['foo>3.0', 'bar>=2.5']) assert r == (set(), {'foo>3.0', 'bar>=2.5'}), repr(r) - ''', + """, ) run_with_build_env( script, - ''' + """ build_env.install_requirements(finder, ['foo', 'bar==3.0'], 'normal', 'installing foo in normal') @@ -126,12 +126,12 @@ def test_build_env_requirements_check(script): r = build_env.check_requirements(['foo>3.0', 'bar>=2.5']) assert r == ({('foo==2.0', 'foo>3.0')}, set()), repr(r) - ''', + """, ) run_with_build_env( script, - ''' + """ build_env.install_requirements(finder, ['foo', 'bar==3.0'], 'normal', 'installing foo in normal') build_env.install_requirements(finder, ['bar==1.0'], 'overlay', @@ -146,55 +146,55 @@ def test_build_env_requirements_check(script): r = build_env.check_requirements(['foo>3.0', 'bar>=2.5']) assert r == ({('bar==1.0', 'bar>=2.5'), ('foo==2.0', 'foo>3.0')}, \ set()), repr(r) - ''', + """, ) def test_build_env_overlay_prefix_has_priority(script): - create_basic_wheel_for_package(script, 'pkg', '2.0') - create_basic_wheel_for_package(script, 'pkg', '4.3') + create_basic_wheel_for_package(script, "pkg", "2.0") + create_basic_wheel_for_package(script, "pkg", "4.3") result = run_with_build_env( script, - ''' + """ build_env.install_requirements(finder, ['pkg==2.0'], 'overlay', 'installing pkg==2.0 in overlay') build_env.install_requirements(finder, ['pkg==4.3'], 'normal', 'installing pkg==4.3 in normal') - ''', - ''' + """, + """ from __future__ import print_function print(__import__('pkg').__version__) - ''', + """, ) - assert result.stdout.strip() == '2.0', str(result) + assert result.stdout.strip() == "2.0", str(result) def test_build_env_isolation(script): # Create dummy `pkg` wheel. - pkg_whl = create_basic_wheel_for_package(script, 'pkg', '1.0') + pkg_whl = create_basic_wheel_for_package(script, "pkg", "1.0") # Install it to site packages. script.pip_install_local(pkg_whl) # And a copy in the user site. - script.pip_install_local('--ignore-installed', '--user', pkg_whl) + script.pip_install_local("--ignore-installed", "--user", pkg_whl) # And to another directory available through a .pth file. - target = script.scratch_path / 'pth_install' - script.pip_install_local('-t', target, pkg_whl) - (script.site_packages_path / 'build_requires.pth').write_text(str(target) + '\n') + target = script.scratch_path / "pth_install" + script.pip_install_local("-t", target, pkg_whl) + (script.site_packages_path / "build_requires.pth").write_text(str(target) + "\n") # And finally to yet another directory available through PYTHONPATH. - target = script.scratch_path / 'pypath_install' - script.pip_install_local('-t', target, pkg_whl) + target = script.scratch_path / "pypath_install" + script.pip_install_local("-t", target, pkg_whl) script.environ["PYTHONPATH"] = target run_with_build_env( script, - '', - r''' + "", + r""" from __future__ import print_function from distutils.sysconfig import get_python_lib import sys @@ -211,5 +211,5 @@ def test_build_env_isolation(script): })), file=sys.stderr) print('sys.path:\n ' + '\n '.join(sys.path), file=sys.stderr) sys.exit(1) - ''', + """, ) diff --git a/tests/unit/test_cmdoptions.py b/tests/unit/test_cmdoptions.py index e08f00c3818..9bb63007d63 100644 --- a/tests/unit/test_cmdoptions.py +++ b/tests/unit/test_cmdoptions.py @@ -4,24 +4,24 @@ @pytest.mark.parametrize( - 'value, expected', + "value, expected", [ - ('', (None, None)), - ('2', ((2,), None)), - ('3', ((3,), None)), - ('3.7', ((3, 7), None)), - ('3.7.3', ((3, 7, 3), None)), + ("", (None, None)), + ("2", ((2,), None)), + ("3", ((3,), None)), + ("3.7", ((3, 7), None)), + ("3.7.3", ((3, 7, 3), None)), # Test strings without dots of length bigger than 1. - ('34', ((3, 4), None)), + ("34", ((3, 4), None)), # Test a 2-digit minor version. - ('310', ((3, 10), None)), + ("310", ((3, 10), None)), # Test some values that fail to parse. - ('ab', ((), 'each version part must be an integer')), - ('3a', ((), 'each version part must be an integer')), - ('3.7.a', ((), 'each version part must be an integer')), - ('3.7.3.1', ((), 'at most three version parts are allowed')), + ("ab", ((), "each version part must be an integer")), + ("3a", ((), "each version part must be an integer")), + ("3.7.a", ((), "each version part must be an integer")), + ("3.7.3.1", ((), "at most three version parts are allowed")), ], ) def test_convert_python_version(value, expected): actual = _convert_python_version(value) - assert actual == expected, 'actual: {!r}'.format(actual) + assert actual == expected, "actual: {!r}".format(actual) diff --git a/tests/unit/test_collector.py b/tests/unit/test_collector.py index d391a01f90f..8824f878726 100644 --- a/tests/unit/test_collector.py +++ b/tests/unit/test_collector.py @@ -144,7 +144,7 @@ def test_get_html_response_dont_log_clear_text_password(caplog): assert len(caplog.records) == 1 record = caplog.records[0] - assert record.levelname == 'DEBUG' + assert record.levelname == "DEBUG" assert record.message.splitlines() == [ "Getting page https://user:****@example.com/simple/" ] @@ -155,15 +155,13 @@ def test_get_html_response_dont_log_clear_text_password(caplog): [ (b"", "https://example.com/", "https://example.com/"), ( - b"" - b"" - b"", + b"" b'' b"", "https://example.com/", "https://foo.example.com/", ), ( b"" - b"" + b'' b"", "https://example.com/", "https://foo.example.com/", @@ -245,22 +243,22 @@ def test_clean_link(url, clean_url): @pytest.mark.parametrize( - 'anchor_html, expected', + "anchor_html, expected", [ # Test not present. ('', None), # Test present with no value. - ('', ''), + ('', ""), # Test the empty string. - ('', ''), + ('', ""), # Test a non-empty string. - ('', 'error'), + ('', "error"), # Test a value with an escaped character. - ('', 'version < 1'), + ('', "version < 1"), # Test a yanked reason with a non-ascii character. ( u'', - u'curlyquote \u2018', + u"curlyquote \u2018", ), ], ) @@ -269,10 +267,10 @@ def test_parse_links__yanked_reason(anchor_html, expected): # Mark this as a unicode string for Python 2 since anchor_html # can contain non-ascii. u'' - '{}' + "{}" ).format(anchor_html) - html_bytes = html.encode('utf-8') - page = HTMLPage(html_bytes, encoding=None, url='https://example.com/simple/') + html_bytes = html.encode("utf-8") + page = HTMLPage(html_bytes, encoding=None, url="https://example.com/simple/") links = list(parse_links(page)) link, = links actual = link.yanked_reason @@ -281,33 +279,33 @@ def test_parse_links__yanked_reason(anchor_html, expected): def test_request_http_error(caplog): caplog.set_level(logging.DEBUG) - link = Link('http://localhost') + link = Link("http://localhost") session = Mock(PipSession) session.get.return_value = resp = Mock() - resp.raise_for_status.side_effect = requests.HTTPError('Http error') + resp.raise_for_status.side_effect = requests.HTTPError("Http error") assert _get_html_page(link, session=session) is None - assert 'Could not fetch URL http://localhost: Http error - skipping' in caplog.text + assert "Could not fetch URL http://localhost: Http error - skipping" in caplog.text def test_request_retries(caplog): caplog.set_level(logging.DEBUG) - link = Link('http://localhost') + link = Link("http://localhost") session = Mock(PipSession) - session.get.side_effect = requests.exceptions.RetryError('Retry error') + session.get.side_effect = requests.exceptions.RetryError("Retry error") assert _get_html_page(link, session=session) is None - assert 'Could not fetch URL http://localhost: Retry error - skipping' in caplog.text + assert "Could not fetch URL http://localhost: Retry error - skipping" in caplog.text def test_make_html_page(): - headers = {'Content-Type': 'text/html; charset=UTF-8'} + headers = {"Content-Type": "text/html; charset=UTF-8"} response = pretend.stub( - content=b'', url='https://example.com/index.html', headers=headers + content=b"", url="https://example.com/index.html", headers=headers ) actual = _make_html_page(response) - assert actual.content == b'' - assert actual.encoding == 'UTF-8' - assert actual.url == 'https://example.com/index.html' + assert actual.content == b"" + assert actual.encoding == "UTF-8" + assert actual.url == "https://example.com/index.html" @pytest.mark.parametrize( @@ -347,7 +345,7 @@ def make_fake_html_response(url): """ ) - content = html.encode('utf-8') + content = html.encode("utf-8") return pretend.stub(content=content, url=url, headers={}) @@ -365,7 +363,7 @@ def test_get_html_page_directory_append_index(tmpdir): actual = _get_html_page(Link(dir_url), session=session) assert mock_func.mock_calls == [ mock.call(expected_url, session=session) - ], 'actual calls: {}'.format(mock_func.mock_calls) + ], "actual calls: {}".format(mock_func.mock_calls) assert actual.content == fake_response.content assert actual.encoding is None @@ -375,12 +373,12 @@ def test_get_html_page_directory_append_index(tmpdir): def test_remove_duplicate_links(): links = [ # We choose Links that will test that ordering is preserved. - Link('https://example.com/2'), - Link('https://example.com/1'), - Link('https://example.com/2'), + Link("https://example.com/2"), + Link("https://example.com/1"), + Link("https://example.com/2"), ] actual = _remove_duplicate_links(links) - assert actual == [Link('https://example.com/2'), Link('https://example.com/1')] + assert actual == [Link("https://example.com/2"), Link("https://example.com/1")] def test_group_locations__file_expand_dir(data): @@ -407,7 +405,7 @@ def test_group_locations__non_existing_path(): """ Test that a non-existing path is ignored. """ - files, urls = group_locations([os.path.join('this', 'doesnt', 'exist')]) + files, urls = group_locations([os.path.join("this", "doesnt", "exist")]) assert not urls and not files, "nothing should have been found" @@ -419,15 +417,15 @@ def check_links_include(links, names): for name in names: assert any( link.url.endswith(name) for link in links - ), 'name {!r} not among links: {}'.format(name, links) + ), "name {!r} not among links: {}".format(name, links) class TestLinkCollector(object): - @patch('pip._internal.collector._get_html_response') + @patch("pip._internal.collector._get_html_response") def test_collect_links(self, mock_get_html_response, caplog, data): caplog.set_level(logging.DEBUG) - expected_url = 'https://pypi.org/simple/twine/' + expected_url = "https://pypi.org/simple/twine/" fake_page = make_fake_html_response(expected_url) mock_get_html_response.return_value = fake_page @@ -438,7 +436,7 @@ def test_collect_links(self, mock_get_html_response, caplog, data): # is skipped. index_urls=[PyPI.simple_url, PyPI.simple_url], ) - actual = link_collector.collect_links('twine') + actual = link_collector.collect_links("twine") mock_get_html_response.assert_called_once_with( expected_url, session=link_collector.session @@ -446,17 +444,17 @@ def test_collect_links(self, mock_get_html_response, caplog, data): # Spot-check the CollectedLinks return value. assert len(actual.files) > 20 - check_links_include(actual.files, names=['simple-1.0.tar.gz']) + check_links_include(actual.files, names=["simple-1.0.tar.gz"]) assert len(actual.find_links) == 1 - check_links_include(actual.find_links, names=['packages']) + check_links_include(actual.find_links, names=["packages"]) actual_pages = actual.pages assert list(actual_pages) == [expected_url] actual_page_links = actual_pages[expected_url] assert len(actual_page_links) == 1 assert actual_page_links[0].url == ( - 'https://pypi.org/abc-1.0.tar.gz#md5=000000000' + "https://pypi.org/abc-1.0.tar.gz#md5=000000000" ) expected_message = dedent( @@ -465,5 +463,5 @@ def test_collect_links(self, mock_get_html_response, caplog, data): * https://pypi.org/simple/twine/""" ) assert caplog.record_tuples == [ - ('pip._internal.collector', logging.DEBUG, expected_message) + ("pip._internal.collector", logging.DEBUG, expected_message) ] diff --git a/tests/unit/test_command_install.py b/tests/unit/test_command_install.py index 37566e574d5..f2079c4db1e 100644 --- a/tests/unit/test_command_install.py +++ b/tests/unit/test_command_install.py @@ -24,32 +24,32 @@ def build(reqs, **kwargs): return (builder.build.mock_calls, build_failures) - @patch('pip._internal.commands.install.is_wheel_installed') + @patch("pip._internal.commands.install.is_wheel_installed") def test_build_wheels__wheel_installed(self, is_wheel_installed): is_wheel_installed.return_value = True mock_calls, build_failures = self.check_build_wheels( - pep517_requirements=['a', 'b'], legacy_requirements=['c', 'd'] + pep517_requirements=["a", "b"], legacy_requirements=["c", "d"] ) # Legacy requirements were built. assert mock_calls == [ - call(['a', 'b'], should_unpack=True), - call(['c', 'd'], should_unpack=True), + call(["a", "b"], should_unpack=True), + call(["c", "d"], should_unpack=True), ] # Legacy build failures are not included in the return value. - assert build_failures == ['a'] + assert build_failures == ["a"] - @patch('pip._internal.commands.install.is_wheel_installed') + @patch("pip._internal.commands.install.is_wheel_installed") def test_build_wheels__wheel_not_installed(self, is_wheel_installed): is_wheel_installed.return_value = False mock_calls, build_failures = self.check_build_wheels( - pep517_requirements=['a', 'b'], legacy_requirements=['c', 'd'] + pep517_requirements=["a", "b"], legacy_requirements=["c", "d"] ) # Legacy requirements were not built. - assert mock_calls == [call(['a', 'b'], should_unpack=True)] + assert mock_calls == [call(["a", "b"], should_unpack=True)] - assert build_failures == ['a'] + assert build_failures == ["a"] diff --git a/tests/unit/test_commands.py b/tests/unit/test_commands.py index 3d42312cc03..8f2dadd54ba 100644 --- a/tests/unit/test_commands.py +++ b/tests/unit/test_commands.py @@ -10,7 +10,7 @@ # These are the expected names of the commands whose classes inherit from # IndexGroupCommand. -EXPECTED_INDEX_GROUP_COMMANDS = ['download', 'install', 'list', 'wheel'] +EXPECTED_INDEX_GROUP_COMMANDS = ["download", "install", "list", "wheel"] def check_commands(pred, expected): @@ -19,7 +19,7 @@ def check_commands(pred, expected): """ commands = [create_command(name) for name in sorted(commands_dict)] actual = [command.name for command in commands if pred(command)] - assert actual == expected, 'actual: {}'.format(actual) + assert actual == expected, "actual: {}".format(actual) def test_commands_dict__order(): @@ -29,11 +29,11 @@ def test_commands_dict__order(): names = list(commands_dict) # A spot-check is sufficient to check that commands_dict encodes an # ordering. - assert names[0] == 'install' - assert names[-1] == 'help' + assert names[0] == "install" + assert names[-1] == "help" -@pytest.mark.parametrize('name', list(commands_dict)) +@pytest.mark.parametrize("name", list(commands_dict)) def test_create_command(name): """Test creating an instance of each available command.""" command = create_command(name) @@ -49,7 +49,7 @@ def test_session_commands(): def is_session_command(command): return isinstance(command, SessionCommandMixin) - expected = ['download', 'install', 'list', 'search', 'uninstall', 'wheel'] + expected = ["download", "install", "list", "search", "uninstall", "wheel"] check_commands(is_session_command, expected) @@ -66,14 +66,14 @@ def is_index_group_command(command): # Also check that the commands inheriting from IndexGroupCommand are # exactly the commands with the --no-index option. def has_option_no_index(command): - return command.parser.has_option('--no-index') + return command.parser.has_option("--no-index") check_commands(has_option_no_index, EXPECTED_INDEX_GROUP_COMMANDS) -@pytest.mark.parametrize('command_name', EXPECTED_INDEX_GROUP_COMMANDS) +@pytest.mark.parametrize("command_name", EXPECTED_INDEX_GROUP_COMMANDS) @pytest.mark.parametrize( - 'disable_pip_version_check, no_index, expected_called', + "disable_pip_version_check, no_index, expected_called", [ # pip_version_check() is only called when both # disable_pip_version_check and no_index are False. @@ -83,7 +83,7 @@ def has_option_no_index(command): (True, True, False), ], ) -@patch('pip._internal.cli.req_command.pip_version_check') +@patch("pip._internal.cli.req_command.pip_version_check") def test_index_group_handle_pip_version_check( mock_version_check, command_name, @@ -115,4 +115,4 @@ def test_requirement_commands(): def is_requirement_command(command): return isinstance(command, RequirementCommand) - check_commands(is_requirement_command, ['download', 'install', 'wheel']) + check_commands(is_requirement_command, ["download", "install", "wheel"]) diff --git a/tests/unit/test_compat.py b/tests/unit/test_compat.py index 853dab1c307..8d33b6d7fef 100644 --- a/tests/unit/test_compat.py +++ b/tests/unit/test_compat.py @@ -33,7 +33,7 @@ def test_get_path_uid_without_NOFOLLOW(monkeypatch): def test_get_path_uid_symlink(tmpdir): f = tmpdir.mkdir("symlink").joinpath("somefile") f.write_text("content") - fs = f + '_link' + fs = f + "_link" os.symlink(f, fs) with pytest.raises(OSError): get_path_uid(fs) @@ -45,73 +45,73 @@ def test_get_path_uid_symlink_without_NOFOLLOW(tmpdir, monkeypatch): monkeypatch.delattr("os.O_NOFOLLOW") f = tmpdir.mkdir("symlink").joinpath("somefile") f.write_text("content") - fs = f + '_link' + fs = f + "_link" os.symlink(f, fs) with pytest.raises(OSError): get_path_uid(fs) @pytest.mark.parametrize( - 'data, expected', + "data, expected", [ - ('abc', u'abc'), + ("abc", u"abc"), # Test text (unicode in Python 2) input. - (u'abc', u'abc'), + (u"abc", u"abc"), # Test text input with non-ascii characters. - (u'déf', u'déf'), + (u"déf", u"déf"), ], ) def test_str_to_display(data, expected): actual = str_to_display(data) assert actual == expected, ( # Show the encoding for easier troubleshooting. - 'encoding: {!r}'.format(locale.getpreferredencoding()) + "encoding: {!r}".format(locale.getpreferredencoding()) ) @pytest.mark.parametrize( - 'data, encoding, expected', + "data, encoding, expected", [ # Test str input with non-ascii characters. - ('déf', 'utf-8', u'déf'), + ("déf", "utf-8", u"déf"), # Test bytes input with non-ascii characters: - (u'déf'.encode('utf-8'), 'utf-8', u'déf'), + (u"déf".encode("utf-8"), "utf-8", u"déf"), # Test a Windows encoding. - (u'déf'.encode('cp1252'), 'cp1252', u'déf'), + (u"déf".encode("cp1252"), "cp1252", u"déf"), # Test a Windows encoding with incompatibly encoded text. - (u'déf'.encode('utf-8'), 'cp1252', u'déf'), + (u"déf".encode("utf-8"), "cp1252", u"déf"), ], ) def test_str_to_display__encoding(monkeypatch, data, encoding, expected): - monkeypatch.setattr(locale, 'getpreferredencoding', lambda: encoding) + monkeypatch.setattr(locale, "getpreferredencoding", lambda: encoding) actual = str_to_display(data) assert actual == expected, ( # Show the encoding for easier troubleshooting. - 'encoding: {!r}'.format(locale.getpreferredencoding()) + "encoding: {!r}".format(locale.getpreferredencoding()) ) def test_str_to_display__decode_error(monkeypatch, caplog): - monkeypatch.setattr(locale, 'getpreferredencoding', lambda: 'utf-8') + monkeypatch.setattr(locale, "getpreferredencoding", lambda: "utf-8") # Encode with an incompatible encoding. - data = u'ab'.encode('utf-16') + data = u"ab".encode("utf-16") actual = str_to_display(data) - assert actual == u'\\xff\\xfea\x00b\x00', ( + assert actual == u"\\xff\\xfea\x00b\x00", ( # Show the encoding for easier troubleshooting. - 'encoding: {!r}'.format(locale.getpreferredencoding()) + "encoding: {!r}".format(locale.getpreferredencoding()) ) assert len(caplog.records) == 1 record = caplog.records[0] - assert record.levelname == 'WARNING' - assert record.message == ('Bytes object does not appear to be encoded as utf-8') + assert record.levelname == "WARNING" + assert record.message == ("Bytes object does not appear to be encoded as utf-8") def test_console_to_str(monkeypatch): some_bytes = b"a\xE9\xC3\xE9b" - encodings = ('ascii', 'utf-8', 'iso-8859-1', 'iso-8859-5', 'koi8_r', 'cp850') + encodings = ("ascii", "utf-8", "iso-8859-1", "iso-8859-5", "koi8_r", "cp850") for e in encodings: - monkeypatch.setattr(locale, 'getpreferredencoding', lambda: e) + monkeypatch.setattr(locale, "getpreferredencoding", lambda: e) result = console_to_str(some_bytes) assert result.startswith("a") assert result.endswith("b") @@ -123,14 +123,14 @@ def test_console_to_str_warning(monkeypatch): def check_warning(msg, *args, **kwargs): assert msg.startswith("Subprocess output does not appear to be encoded as") - monkeypatch.setattr(locale, 'getpreferredencoding', lambda: 'utf-8') - monkeypatch.setattr(pip_compat.logger, 'warning', check_warning) + monkeypatch.setattr(locale, "getpreferredencoding", lambda: "utf-8") + monkeypatch.setattr(pip_compat.logger, "warning", check_warning) console_to_str(some_bytes) def test_to_native_str_type(): some_bytes = b"test\xE9 et approuv\xC3\xE9" - some_unicode = b"test\xE9 et approuv\xE9".decode('iso-8859-15') + some_unicode = b"test\xE9 et approuv\xE9".decode("iso-8859-15") assert isinstance(native_str(some_bytes, True), str) assert isinstance(native_str(some_unicode, True), str) diff --git a/tests/unit/test_download.py b/tests/unit/test_download.py index 459a9629e89..87665c1aff3 100644 --- a/tests/unit/test_download.py +++ b/tests/unit/test_download.py @@ -26,11 +26,7 @@ from pip._internal.utils.hashes import Hashes from pip._internal.utils.urls import path_to_url from tests.lib import create_file -from tests.lib.filesystem import ( - get_filelist, - make_socket_file, - make_unreadable_file, -) +from tests.lib.filesystem import get_filelist, make_socket_file, make_unreadable_file from tests.lib.path import Path @@ -54,11 +50,11 @@ def _fake_session_get(*args, **kwargs): try: unpack_http_url(link, temp_dir, download_dir=None, session=session) assert set(os.listdir(temp_dir)) == { - 'PKG-INFO', - 'setup.cfg', - 'setup.py', - 'simple', - 'simple.egg-info', + "PKG-INFO", + "setup.cfg", + "setup.py", + "simple", + "simple.egg-info", } finally: rmtree(temp_dir) @@ -75,14 +71,14 @@ def test_user_agent(): @pytest.mark.parametrize( - 'name, expected_like_ci', + "name, expected_like_ci", [ - ('BUILD_BUILDID', True), - ('BUILD_ID', True), - ('CI', True), - ('PIP_IS_CI', True), + ("BUILD_BUILDID", True), + ("BUILD_ID", True), + ("CI", True), + ("PIP_IS_CI", True), # Test a prefix substring of one of the variable names we use. - ('BUILD', False), + ("BUILD", False), ], ) def test_user_agent__ci(monkeypatch, name, expected_like_ci): @@ -99,7 +95,7 @@ def test_user_agent__ci(monkeypatch, name, expected_like_ci): assert '"ci":null' in user_agent assert '"ci":true' not in user_agent - monkeypatch.setenv(name, 'true') + monkeypatch.setenv(name, "true") user_agent = get_user_agent() assert ('"ci":true' in user_agent) == expected_like_ci assert ('"ci":null' in user_agent) == (not expected_like_ci) @@ -160,44 +156,44 @@ def register_hook(self, event_name, callback): self.hooks.setdefault(event_name, []).append(callback) -@patch('pip._internal.download.unpack_file') +@patch("pip._internal.download.unpack_file") def test_unpack_http_url_bad_downloaded_checksum(mock_unpack_file): """ If already-downloaded file has bad checksum, re-download. """ - base_url = 'http://www.example.com/somepackage.tgz' - contents = b'downloaded' - download_hash = hashlib.new('sha1', contents) - link = Link(base_url + '#sha1=' + download_hash.hexdigest()) + base_url = "http://www.example.com/somepackage.tgz" + contents = b"downloaded" + download_hash = hashlib.new("sha1", contents) + link = Link(base_url + "#sha1=" + download_hash.hexdigest()) session = Mock() session.get = Mock() response = session.get.return_value = MockResponse(contents) - response.headers = {'content-type': 'application/x-tar'} + response.headers = {"content-type": "application/x-tar"} response.url = base_url download_dir = mkdtemp() try: - downloaded_file = os.path.join(download_dir, 'somepackage.tgz') - create_file(downloaded_file, 'some contents') + downloaded_file = os.path.join(download_dir, "somepackage.tgz") + create_file(downloaded_file, "some contents") unpack_http_url( link, - 'location', + "location", download_dir=download_dir, session=session, - hashes=Hashes({'sha1': [download_hash.hexdigest()]}), + hashes=Hashes({"sha1": [download_hash.hexdigest()]}), ) # despite existence of downloaded file with bad hash, downloaded again session.get.assert_called_once_with( - 'http://www.example.com/somepackage.tgz', + "http://www.example.com/somepackage.tgz", headers={"Accept-Encoding": "identity"}, stream=True, ) # cached file is replaced with newly downloaded file with open(downloaded_file) as fh: - assert fh.read() == 'downloaded' + assert fh.read() == "downloaded" finally: rmtree(download_dir) @@ -206,12 +202,12 @@ def test_unpack_http_url_bad_downloaded_checksum(mock_unpack_file): @pytest.mark.parametrize( "filename, expected", [ - ('dir/file', 'file'), - ('../file', 'file'), - ('../../file', 'file'), - ('../', ''), - ('../..', '..'), - ('/', ''), + ("dir/file", "file"), + ("../file", "file"), + ("../../file", "file"), + ("../", ""), + ("../..", ".."), + ("/", ""), ], ) def test_sanitize_content_filename(filename, expected): @@ -224,12 +220,12 @@ def test_sanitize_content_filename(filename, expected): @pytest.mark.parametrize( "filename, win_expected, non_win_expected", [ - ('dir\\file', 'file', 'dir\\file'), - ('..\\file', 'file', '..\\file'), - ('..\\..\\file', 'file', '..\\..\\file'), - ('..\\', '', '..\\'), - ('..\\..', '..', '..\\..'), - ('\\', '', '\\'), + ("dir\\file", "file", "dir\\file"), + ("..\\file", "file", "..\\file"), + ("..\\..\\file", "file", "..\\..\\file"), + ("..\\", "", "..\\"), + ("..\\..", "..", "..\\.."), + ("\\", "", "\\"), ], ) def test_sanitize_content_filename__platform_dependent( @@ -238,7 +234,7 @@ def test_sanitize_content_filename__platform_dependent( """ Test inputs where the result is different for Windows and non-Windows. """ - if sys.platform == 'win32': + if sys.platform == "win32": expected = win_expected else: expected = non_win_expected @@ -247,7 +243,7 @@ def test_sanitize_content_filename__platform_dependent( @pytest.mark.parametrize( "content_disposition, default_filename, expected", - [('attachment;filename="../file"', 'df', 'file')], + [('attachment;filename="../file"', "df", "file")], ) def test_parse_content_disposition(content_disposition, default_filename, expected): actual = parse_content_disposition(content_disposition, default_filename) @@ -259,8 +255,8 @@ def test_download_http_url__no_directory_traversal(tmpdir): Test that directory traversal doesn't happen on download when the Content-Disposition header contains a filename with a ".." path part. """ - mock_url = 'http://www.example.com/whatever.tgz' - contents = b'downloaded' + mock_url = "http://www.example.com/whatever.tgz" + contents = b"downloaded" link = Link(mock_url) session = Mock() @@ -269,19 +265,19 @@ def test_download_http_url__no_directory_traversal(tmpdir): resp.headers = { # Set the content-type to a random value to prevent # mimetypes.guess_extension from guessing the extension. - 'content-type': 'random', - 'content-disposition': 'attachment;filename="../out_dir_file"', + "content-type": "random", + "content-disposition": 'attachment;filename="../out_dir_file"', } session.get.return_value = resp - download_dir = tmpdir.joinpath('download') + download_dir = tmpdir.joinpath("download") os.mkdir(download_dir) file_path, content_type = _download_http_url( - link, session, download_dir, hashes=None, progress_bar='on' + link, session, download_dir, hashes=None, progress_bar="on" ) # The file should be downloaded to download_dir. actual = os.listdir(download_dir) - assert actual == ['out_dir_file'] + assert actual == ["out_dir_file"] @pytest.fixture @@ -319,7 +315,7 @@ def test_copy_source_tree_with_socket(clean_project, tmpdir, caplog): # Warning should have been logged. assert len(caplog.records) == 1 record = caplog.records[0] - assert record.levelname == 'WARNING' + assert record.levelname == "WARNING" assert socket_path in record.message @@ -363,8 +359,8 @@ def test_copy_source_tree_with_unreadable_dir_fails(clean_project, tmpdir): class Test_unpack_file_url(object): def prep(self, tmpdir, data): - self.build_dir = tmpdir.joinpath('build') - self.download_dir = tmpdir.joinpath('download') + self.build_dir = tmpdir.joinpath("build") + self.download_dir = tmpdir.joinpath("download") os.mkdir(self.build_dir) os.mkdir(self.download_dir) self.dist_file = "simple-1.0.tar.gz" @@ -377,13 +373,13 @@ def prep(self, tmpdir, data): def test_unpack_file_url_no_download(self, tmpdir, data): self.prep(tmpdir, data) unpack_file_url(self.dist_url, self.build_dir) - assert os.path.isdir(os.path.join(self.build_dir, 'simple')) + assert os.path.isdir(os.path.join(self.build_dir, "simple")) assert not os.path.isfile(os.path.join(self.download_dir, self.dist_file)) def test_unpack_file_url_and_download(self, tmpdir, data): self.prep(tmpdir, data) unpack_file_url(self.dist_url, self.build_dir, download_dir=self.download_dir) - assert os.path.isdir(os.path.join(self.build_dir, 'simple')) + assert os.path.isdir(os.path.join(self.build_dir, "simple")) assert os.path.isfile(os.path.join(self.download_dir, self.dist_file)) def test_unpack_file_url_download_already_exists(self, tmpdir, data, monkeypatch): @@ -392,12 +388,12 @@ def test_unpack_file_url_download_already_exists(self, tmpdir, data, monkeypatch # so we can tell it didn't get overwritten dest_file = os.path.join(self.download_dir, self.dist_file) copy(self.dist_path2, dest_file) - with open(self.dist_path2, 'rb') as f: + with open(self.dist_path2, "rb") as f: dist_path2_md5 = hashlib.md5(f.read()).hexdigest() unpack_file_url(self.dist_url, self.build_dir, download_dir=self.download_dir) # our hash should be the same, i.e. not overwritten by simple-1.0 hash - with open(dest_file, 'rb') as f: + with open(dest_file, "rb") as f: assert dist_path2_md5 == hashlib.md5(f.read()).hexdigest() def test_unpack_file_url_bad_hash(self, tmpdir, data, monkeypatch): @@ -405,10 +401,10 @@ def test_unpack_file_url_bad_hash(self, tmpdir, data, monkeypatch): Test when the file url hash fragment is wrong """ self.prep(tmpdir, data) - url = '{}#md5=bogus'.format(self.dist_url.url) + url = "{}#md5=bogus".format(self.dist_url.url) dist_url = Link(url) with pytest.raises(HashMismatch): - unpack_file_url(dist_url, self.build_dir, hashes=Hashes({'md5': ['bogus']})) + unpack_file_url(dist_url, self.build_dir, hashes=Hashes({"md5": ["bogus"]})) def test_unpack_file_url_download_bad_hash(self, tmpdir, data, monkeypatch): """ @@ -422,25 +418,25 @@ def test_unpack_file_url_download_bad_hash(self, tmpdir, data, monkeypatch): dest_file = os.path.join(self.download_dir, self.dist_file) copy(self.dist_path2, dest_file) - with open(self.dist_path, 'rb') as f: + with open(self.dist_path, "rb") as f: dist_path_md5 = hashlib.md5(f.read()).hexdigest() - with open(dest_file, 'rb') as f: + with open(dest_file, "rb") as f: dist_path2_md5 = hashlib.md5(f.read()).hexdigest() assert dist_path_md5 != dist_path2_md5 - url = '{}#md5={}'.format(self.dist_url.url, dist_path_md5) + url = "{}#md5={}".format(self.dist_url.url, dist_path_md5) dist_url = Link(url) unpack_file_url( dist_url, self.build_dir, download_dir=self.download_dir, - hashes=Hashes({'md5': [dist_path_md5]}), + hashes=Hashes({"md5": [dist_path_md5]}), ) # confirm hash is for simple1-1.0 # the previous bad download has been removed - with open(dest_file, 'rb') as f: + with open(dest_file, "rb") as f: assert hashlib.md5(f.read()).hexdigest() == dist_path_md5 def test_unpack_file_url_thats_a_dir(self, tmpdir, data): @@ -448,17 +444,17 @@ def test_unpack_file_url_thats_a_dir(self, tmpdir, data): dist_path = data.packages.joinpath("FSPkg") dist_url = Link(path_to_url(dist_path)) unpack_file_url(dist_url, self.build_dir, download_dir=self.download_dir) - assert os.path.isdir(os.path.join(self.build_dir, 'fspkg')) + assert os.path.isdir(os.path.join(self.build_dir, "fspkg")) -@pytest.mark.parametrize('exclude_dir', ['.nox', '.tox']) +@pytest.mark.parametrize("exclude_dir", [".nox", ".tox"]) def test_unpack_file_url_excludes_expected_dirs(tmpdir, exclude_dir): - src_dir = tmpdir / 'src' - dst_dir = tmpdir / 'dst' - src_included_file = src_dir.joinpath('file.txt') + src_dir = tmpdir / "src" + dst_dir = tmpdir / "dst" + src_included_file = src_dir.joinpath("file.txt") src_excluded_dir = src_dir.joinpath(exclude_dir) - src_excluded_file = src_dir.joinpath(exclude_dir, 'file.txt') - src_included_dir = src_dir.joinpath('subdir', exclude_dir) + src_excluded_file = src_dir.joinpath(exclude_dir, "file.txt") + src_included_dir = src_dir.joinpath("subdir", exclude_dir) # set up source directory src_excluded_dir.mkdir(parents=True) @@ -466,10 +462,10 @@ def test_unpack_file_url_excludes_expected_dirs(tmpdir, exclude_dir): src_included_file.touch() src_excluded_file.touch() - dst_included_file = dst_dir.joinpath('file.txt') + dst_included_file = dst_dir.joinpath("file.txt") dst_excluded_dir = dst_dir.joinpath(exclude_dir) - dst_excluded_file = dst_dir.joinpath(exclude_dir, 'file.txt') - dst_included_dir = dst_dir.joinpath('subdir', exclude_dir) + dst_excluded_file = dst_dir.joinpath(exclude_dir, "file.txt") + dst_included_dir = dst_dir.joinpath("subdir", exclude_dir) src_link = Link(path_to_url(src_dir)) unpack_file_url(src_link, dst_dir, download_dir=None) @@ -513,46 +509,46 @@ def test_insecure_host_adapter(self, tmpdir): def test_add_trusted_host(self): # Leave a gap to test how the ordering is affected. - trusted_hosts = ['host1', 'host3'] + trusted_hosts = ["host1", "host3"] session = PipSession(trusted_hosts=trusted_hosts) insecure_adapter = session._insecure_adapter - prefix2 = 'https://host2/' - prefix3 = 'https://host3/' - prefix3_wildcard = 'https://host3:' + prefix2 = "https://host2/" + prefix3 = "https://host3/" + prefix3_wildcard = "https://host3:" # Confirm some initial conditions as a baseline. - assert session.pip_trusted_origins == [('host1', None), ('host3', None)] + assert session.pip_trusted_origins == [("host1", None), ("host3", None)] assert session.adapters[prefix3] is insecure_adapter assert session.adapters[prefix3_wildcard] is insecure_adapter assert prefix2 not in session.adapters # Test adding a new host. - session.add_trusted_host('host2') + session.add_trusted_host("host2") assert session.pip_trusted_origins == [ - ('host1', None), - ('host3', None), - ('host2', None), + ("host1", None), + ("host3", None), + ("host2", None), ] # Check that prefix3 is still present. assert session.adapters[prefix3] is insecure_adapter assert session.adapters[prefix2] is insecure_adapter # Test that adding the same host doesn't create a duplicate. - session.add_trusted_host('host3') + session.add_trusted_host("host3") assert session.pip_trusted_origins == [ - ('host1', None), - ('host3', None), - ('host2', None), - ], 'actual: {}'.format(session.pip_trusted_origins) + ("host1", None), + ("host3", None), + ("host2", None), + ], "actual: {}".format(session.pip_trusted_origins) - session.add_trusted_host('host4:8080') - prefix4 = 'https://host4:8080/' + session.add_trusted_host("host4:8080") + prefix4 = "https://host4:8080/" assert session.pip_trusted_origins == [ - ('host1', None), - ('host3', None), - ('host2', None), - ('host4', 8080), + ("host1", None), + ("host3", None), + ("host2", None), + ("host4", 8080), ] assert session.adapters[prefix4] is insecure_adapter @@ -560,36 +556,36 @@ def test_add_trusted_host__logging(self, caplog): """ Test logging when add_trusted_host() is called. """ - trusted_hosts = ['host0', 'host1'] + trusted_hosts = ["host0", "host1"] session = PipSession(trusted_hosts=trusted_hosts) with caplog.at_level(logging.INFO): # Test adding an existing host. - session.add_trusted_host('host1', source='somewhere') - session.add_trusted_host('host2') + session.add_trusted_host("host1", source="somewhere") + session.add_trusted_host("host2") # Test calling add_trusted_host() on the same host twice. - session.add_trusted_host('host2') + session.add_trusted_host("host2") actual = [(r.levelname, r.message) for r in caplog.records] # Observe that "host0" isn't included in the logs. expected = [ - ('INFO', "adding trusted host: 'host1' (from somewhere)"), - ('INFO', "adding trusted host: 'host2'"), - ('INFO', "adding trusted host: 'host2'"), + ("INFO", "adding trusted host: 'host1' (from somewhere)"), + ("INFO", "adding trusted host: 'host2'"), + ("INFO", "adding trusted host: 'host2'"), ] assert actual == expected def test_iter_secure_origins(self): - trusted_hosts = ['host1', 'host2', 'host3:8080'] + trusted_hosts = ["host1", "host2", "host3:8080"] session = PipSession(trusted_hosts=trusted_hosts) actual = list(session.iter_secure_origins()) assert len(actual) == 9 # Spot-check that SECURE_ORIGINS is included. - assert actual[0] == ('https', '*', '*') + assert actual[0] == ("https", "*", "*") assert actual[-3:] == [ - ('*', 'host1', '*'), - ('*', 'host2', '*'), - ('*', 'host3', 8080), + ("*", "host1", "*"), + ("*", "host2", "*"), + ("*", "host3", 8080), ] def test_iter_secure_origins__trusted_hosts_empty(self): @@ -601,10 +597,10 @@ def test_iter_secure_origins__trusted_hosts_empty(self): actual = list(session.iter_secure_origins()) assert len(actual) == 6 # Spot-check that SECURE_ORIGINS is included. - assert actual[0] == ('https', '*', '*') + assert actual[0] == ("https", "*", "*") @pytest.mark.parametrize( - 'location, trusted, expected', + "location, trusted, expected", [ ("http://pypi.org/something", [], False), ("https://pypi.org/something", [], True), @@ -644,5 +640,5 @@ def warning(self, *args, **kwargs): assert len(log_records) == 1 actual_level, actual_message = log_records[0] - assert actual_level == 'WARNING' - assert 'is not a trusted or secure host' in actual_message + assert actual_level == "WARNING" + assert "is not a trusted or secure host" in actual_message diff --git a/tests/unit/test_finder.py b/tests/unit/test_finder.py index 3f28c189972..5dcd19ceb83 100644 --- a/tests/unit/test_finder.py +++ b/tests/unit/test_finder.py @@ -8,10 +8,7 @@ import pip._internal.pep425tags import pip._internal.wheel -from pip._internal.exceptions import ( - BestVersionAlreadyInstalled, - DistributionNotFound, -) +from pip._internal.exceptions import BestVersionAlreadyInstalled, DistributionNotFound from pip._internal.index import ( CandidateEvaluator, InstallationCandidate, @@ -60,8 +57,8 @@ def test_no_partial_name_match(data): def test_tilde(): """Finder can accept a path with ~ in it and will normalize it.""" - with patch('pip._internal.collector.os.path.exists', return_value=True): - finder = make_test_finder(find_links=['~/python-pkgs']) + with patch("pip._internal.collector.os.path.exists", return_value=True): + finder = make_test_finder(find_links=["~/python-pkgs"]) req = install_req_from_line("gmpy") with pytest.raises(DistributionNotFound): finder.find_requirement(req, False) @@ -79,7 +76,7 @@ def test_duplicates_sort_ok(data): def test_finder_detects_latest_find_links(data): """Test PackageFinder detects latest using find-links""" - req = install_req_from_line('simple', None) + req = install_req_from_line("simple", None) finder = make_test_finder(find_links=[data.find_links]) link = finder.find_requirement(req, False) assert link.url.endswith("simple-3.0.tar.gz") @@ -87,7 +84,7 @@ def test_finder_detects_latest_find_links(data): def test_incorrect_case_file_index(data): """Test PackageFinder detects latest using wrong case""" - req = install_req_from_line('dinner', None) + req = install_req_from_line("dinner", None) finder = make_test_finder(index_urls=[data.find_links3]) link = finder.find_requirement(req, False) assert link.url.endswith("Dinner-2.0.tar.gz") @@ -96,7 +93,7 @@ def test_incorrect_case_file_index(data): @pytest.mark.network def test_finder_detects_latest_already_satisfied_find_links(data): """Test PackageFinder detects latest already satisfied using find-links""" - req = install_req_from_line('simple', None) + req = install_req_from_line("simple", None) # the latest simple in local pkgs is 3.0 latest_version = "3.0" satisfied_by = Mock( @@ -114,7 +111,7 @@ def test_finder_detects_latest_already_satisfied_find_links(data): @pytest.mark.network def test_finder_detects_latest_already_satisfied_pypi_links(): """Test PackageFinder detects latest already satisfied using pypi links""" - req = install_req_from_line('initools', None) + req = install_req_from_line("initools", None) # the latest initools on PyPI is 0.3.1 latest_version = "0.3.1" satisfied_by = Mock( @@ -142,7 +139,7 @@ def test_skip_invalid_wheel_link(self, caplog, data): with pytest.raises(DistributionNotFound): finder.find_requirement(req, True) - assert 'Skipping link: invalid wheel filename:' in caplog.text + assert "Skipping link: invalid wheel filename:" in caplog.text def test_not_find_wheel_not_supported(self, data, monkeypatch): """ @@ -166,7 +163,7 @@ def test_find_wheel_supported(self, data, monkeypatch): monkeypatch.setattr( pip._internal.pep425tags, "get_supported", - lambda **kw: [('py2', 'none', 'any')], + lambda **kw: [("py2", "none", "any")], ) req = install_req_from_line("simple.dist") @@ -189,7 +186,7 @@ def test_existing_over_wheel_priority(self, data): Test existing install has priority over wheels. `test_link_sorting` also covers this at a lower level """ - req = install_req_from_line('priority', None) + req = install_req_from_line("priority", None) latest_version = "1.0" satisfied_by = Mock( location="/path", @@ -207,22 +204,22 @@ def test_link_sorting(self): Test link sorting """ links = [ - InstallationCandidate("simple", "2.0", Link('simple-2.0.tar.gz')), + InstallationCandidate("simple", "2.0", Link("simple-2.0.tar.gz")), InstallationCandidate( - "simple", "1.0", Link('simple-1.0-pyT-none-TEST.whl') + "simple", "1.0", Link("simple-1.0-pyT-none-TEST.whl") ), - InstallationCandidate("simple", '1.0', Link('simple-1.0-pyT-TEST-any.whl')), - InstallationCandidate("simple", '1.0', Link('simple-1.0-pyT-none-any.whl')), - InstallationCandidate("simple", '1.0', Link('simple-1.0.tar.gz')), + InstallationCandidate("simple", "1.0", Link("simple-1.0-pyT-TEST-any.whl")), + InstallationCandidate("simple", "1.0", Link("simple-1.0-pyT-none-any.whl")), + InstallationCandidate("simple", "1.0", Link("simple-1.0.tar.gz")), ] valid_tags = [ - ('pyT', 'none', 'TEST'), - ('pyT', 'TEST', 'any'), - ('pyT', 'none', 'any'), + ("pyT", "none", "TEST"), + ("pyT", "TEST", "any"), + ("pyT", "none", "any"), ] specifier = SpecifierSet() evaluator = CandidateEvaluator( - 'my-project', supported_tags=valid_tags, specifier=specifier + "my-project", supported_tags=valid_tags, specifier=specifier ) sort_key = evaluator._sort_key results = sorted(links, key=sort_key, reverse=True) @@ -243,7 +240,7 @@ def test_link_sorting_wheels_with_build_tags(self): "simplewheel", "1.0", Link("simplewheel-1.0-py2.py3-none-any.whl") ), ] - candidate_evaluator = CandidateEvaluator.create('my-project') + candidate_evaluator = CandidateEvaluator.create("my-project") sort_key = candidate_evaluator._sort_key results = sorted(links, key=sort_key, reverse=True) results2 = sorted(reversed(links), key=sort_key, reverse=True) @@ -252,15 +249,15 @@ def test_link_sorting_wheels_with_build_tags(self): def test_finder_priority_file_over_page(data): """Test PackageFinder prefers file links over equivalent page links""" - req = install_req_from_line('gmpy==1.15', None) + req = install_req_from_line("gmpy==1.15", None) finder = make_test_finder( find_links=[data.find_links], index_urls=["http://pypi.org/simple/"] ) all_versions = finder.find_all_candidates(req.name) # 1 file InstallationCandidate followed by all https ones - assert all_versions[0].link.scheme == 'file' + assert all_versions[0].link.scheme == "file" assert all( - version.link.scheme == 'https' for version in all_versions[1:] + version.link.scheme == "https" for version in all_versions[1:] ), all_versions link = finder.find_requirement(req, False) @@ -269,27 +266,27 @@ def test_finder_priority_file_over_page(data): def test_finder_priority_nonegg_over_eggfragments(): """Test PackageFinder prefers non-egg links over "#egg=" links""" - req = install_req_from_line('bar==1.0', None) - links = ['http://foo/bar.py#egg=bar-1.0', 'http://foo/bar-1.0.tar.gz'] + req = install_req_from_line("bar==1.0", None) + links = ["http://foo/bar.py#egg=bar-1.0", "http://foo/bar-1.0.tar.gz"] finder = make_no_network_finder(links) all_versions = finder.find_all_candidates(req.name) - assert all_versions[0].link.url.endswith('tar.gz') - assert all_versions[1].link.url.endswith('#egg=bar-1.0') + assert all_versions[0].link.url.endswith("tar.gz") + assert all_versions[1].link.url.endswith("#egg=bar-1.0") link = finder.find_requirement(req, False) - assert link.url.endswith('tar.gz') + assert link.url.endswith("tar.gz") links.reverse() finder = make_no_network_finder(links) all_versions = finder.find_all_candidates(req.name) - assert all_versions[0].link.url.endswith('tar.gz') - assert all_versions[1].link.url.endswith('#egg=bar-1.0') + assert all_versions[0].link.url.endswith("tar.gz") + assert all_versions[1].link.url.endswith("#egg=bar-1.0") link = finder.find_requirement(req, False) - assert link.url.endswith('tar.gz') + assert link.url.endswith("tar.gz") def test_finder_only_installs_stable_releases(data): @@ -333,11 +330,11 @@ def test_finder_only_installs_data_require(data): finder = make_test_finder(index_urls=[data.index_url("datarequire")]) links = finder.find_all_candidates("fakepackage") - expected = ['1.0.0', '9.9.9'] + expected = ["1.0.0", "9.9.9"] if (2, 7) < sys.version_info < (3,): - expected.append('2.7.0') + expected.append("2.7.0") elif sys.version_info > (3, 3): - expected.append('3.3.0') + expected.append("3.3.0") assert {str(v.version) for v in links} == set(expected) @@ -407,43 +404,43 @@ class TestLinkEvaluator(object): def make_test_link_evaluator(self, formats): target_python = TargetPython() return LinkEvaluator( - project_name='pytest', - canonical_name='pytest', + project_name="pytest", + canonical_name="pytest", formats=formats, target_python=target_python, allow_yanked=True, ) @pytest.mark.parametrize( - 'url, expected_version', + "url, expected_version", [ - ('http:/yo/pytest-1.0.tar.gz', '1.0'), - ('http:/yo/pytest-1.0-py2.py3-none-any.whl', '1.0'), + ("http:/yo/pytest-1.0.tar.gz", "1.0"), + ("http:/yo/pytest-1.0-py2.py3-none-any.whl", "1.0"), ], ) def test_evaluate_link__match(self, url, expected_version): """Test that 'pytest' archives match for 'pytest'""" link = Link(url) - evaluator = self.make_test_link_evaluator(formats=['source', 'binary']) + evaluator = self.make_test_link_evaluator(formats=["source", "binary"]) actual = evaluator.evaluate_link(link) assert actual == (True, expected_version) @pytest.mark.parametrize( - 'url, expected_msg', + "url, expected_msg", [ # TODO: Uncomment this test case when #1217 is fixed. # 'http:/yo/pytest-xdist-1.0.tar.gz', - ('http:/yo/pytest2-1.0.tar.gz', 'Missing project version for pytest'), + ("http:/yo/pytest2-1.0.tar.gz", "Missing project version for pytest"), ( - 'http:/yo/pytest_xdist-1.0-py2.py3-none-any.whl', - 'wrong project name (not pytest)', + "http:/yo/pytest_xdist-1.0-py2.py3-none-any.whl", + "wrong project name (not pytest)", ), ], ) def test_evaluate_link__substring_fails(self, url, expected_msg): """Test that 'pytest archives won't match for 'pytest'.""" link = Link(url) - evaluator = self.make_test_link_evaluator(formats=['source', 'binary']) + evaluator = self.make_test_link_evaluator(formats=["source", "binary"]) actual = evaluator.evaluate_link(link) assert actual == (False, expected_msg) @@ -451,25 +448,25 @@ def test_evaluate_link__substring_fails(self, url, expected_msg): def test_find_all_candidates_nothing(): """Find nothing without anything""" finder = make_test_finder() - assert not finder.find_all_candidates('pip') + assert not finder.find_all_candidates("pip") def test_find_all_candidates_find_links(data): finder = make_test_finder(find_links=[data.find_links]) - versions = finder.find_all_candidates('simple') - assert [str(v.version) for v in versions] == ['3.0', '2.0', '1.0'] + versions = finder.find_all_candidates("simple") + assert [str(v.version) for v in versions] == ["3.0", "2.0", "1.0"] def test_find_all_candidates_index(data): - finder = make_test_finder(index_urls=[data.index_url('simple')]) - versions = finder.find_all_candidates('simple') - assert [str(v.version) for v in versions] == ['1.0'] + finder = make_test_finder(index_urls=[data.index_url("simple")]) + versions = finder.find_all_candidates("simple") + assert [str(v.version) for v in versions] == ["1.0"] def test_find_all_candidates_find_links_and_index(data): finder = make_test_finder( - find_links=[data.find_links], index_urls=[data.index_url('simple')] + find_links=[data.find_links], index_urls=[data.index_url("simple")] ) - versions = finder.find_all_candidates('simple') + versions = finder.find_all_candidates("simple") # first the find-links versions then the page versions - assert [str(v.version) for v in versions] == ['3.0', '2.0', '1.0', '1.0'] + assert [str(v.version) for v in versions] == ["3.0", "2.0", "1.0", "1.0"] diff --git a/tests/unit/test_format_control.py b/tests/unit/test_format_control.py index 6dcf77dfb70..841337a426f 100644 --- a/tests/unit/test_format_control.py +++ b/tests/unit/test_format_control.py @@ -7,7 +7,7 @@ class SimpleCommand(Command): def __init__(self): - super(SimpleCommand, self).__init__('fake', 'fake summary') + super(SimpleCommand, self).__init__("fake", "fake summary") self.cmd_opts.add_option(cmdoptions.no_binary()) self.cmd_opts.add_option(cmdoptions.only_binary()) @@ -17,36 +17,36 @@ def run(self, options, args): def test_no_binary_overrides(): cmd = SimpleCommand() - cmd.main(['fake', '--only-binary=:all:', '--no-binary=fred']) - format_control = FormatControl({'fred'}, {':all:'}) + cmd.main(["fake", "--only-binary=:all:", "--no-binary=fred"]) + format_control = FormatControl({"fred"}, {":all:"}) assert cmd.options.format_control == format_control def test_only_binary_overrides(): cmd = SimpleCommand() - cmd.main(['fake', '--no-binary=:all:', '--only-binary=fred']) - format_control = FormatControl({':all:'}, {'fred'}) + cmd.main(["fake", "--no-binary=:all:", "--only-binary=fred"]) + format_control = FormatControl({":all:"}, {"fred"}) assert cmd.options.format_control == format_control def test_none_resets(): cmd = SimpleCommand() - cmd.main(['fake', '--no-binary=:all:', '--no-binary=:none:']) + cmd.main(["fake", "--no-binary=:all:", "--no-binary=:none:"]) format_control = FormatControl(set(), set()) assert cmd.options.format_control == format_control def test_none_preserves_other_side(): cmd = SimpleCommand() - cmd.main(['fake', '--no-binary=:all:', '--only-binary=fred', '--no-binary=:none:']) - format_control = FormatControl(set(), {'fred'}) + cmd.main(["fake", "--no-binary=:all:", "--only-binary=fred", "--no-binary=:none:"]) + format_control = FormatControl(set(), {"fred"}) assert cmd.options.format_control == format_control def test_comma_separated_values(): cmd = SimpleCommand() - cmd.main(['fake', '--no-binary=1,2,3']) - format_control = FormatControl({'1', '2', '3'}, set()) + cmd.main(["fake", "--no-binary=1,2,3"]) + format_control = FormatControl({"1", "2", "3"}, set()) assert cmd.options.format_control == format_control diff --git a/tests/unit/test_index.py b/tests/unit/test_index.py index 8b8fb6cf4b9..ee4adbe78c5 100644 --- a/tests/unit/test_index.py +++ b/tests/unit/test_index.py @@ -27,29 +27,29 @@ def make_mock_candidate(version, yanked_reason=None, hex_digest=None): - url = 'https://example.com/pkg-{}.tar.gz'.format(version) + url = "https://example.com/pkg-{}.tar.gz".format(version) if hex_digest is not None: assert len(hex_digest) == 64 - url += '#sha256={}'.format(hex_digest) + url += "#sha256={}".format(hex_digest) link = Link(url, yanked_reason=yanked_reason) - candidate = InstallationCandidate('mypackage', version, link) + candidate = InstallationCandidate("mypackage", version, link) return candidate @pytest.mark.parametrize( - 'requires_python, expected', + "requires_python, expected", [ - ('== 3.6.4', False), - ('== 3.6.5', True), + ("== 3.6.4", False), + ("== 3.6.5", True), # Test an invalid Requires-Python value. - ('invalid', True), + ("invalid", True), ], ) def test_check_link_requires_python(requires_python, expected): version_info = (3, 6, 5) - link = Link('https://example.com', requires_python=requires_python) + link = Link("https://example.com", requires_python=requires_python) actual = _check_link_requires_python(link, version_info) assert actual == expected @@ -62,13 +62,13 @@ def check_caplog(caplog, expected_level, expected_message): @pytest.mark.parametrize( - 'ignore_requires_python, expected', + "ignore_requires_python, expected", [ ( None, ( False, - 'DEBUG', + "DEBUG", "Link requires a different Python (3.6.5 not in: '== 3.6.4'): " "https://example.com", ), @@ -77,7 +77,7 @@ def check_caplog(caplog, expected_level, expected_message): True, ( True, - 'DEBUG', + "DEBUG", "Ignoring failed Requires-Python check (3.6.5 not in: '== 3.6.4') " "for link: https://example.com", ), @@ -91,7 +91,7 @@ def test_check_link_requires_python__incompatible_python( Test an incompatible Python. """ expected_return, expected_level, expected_message = expected - link = Link('https://example.com', requires_python='== 3.6.4') + link = Link("https://example.com", requires_python="== 3.6.4") caplog.set_level(logging.DEBUG) actual = _check_link_requires_python( link, version_info=(3, 6, 5), ignore_requires_python=ignore_requires_python @@ -105,7 +105,7 @@ def test_check_link_requires_python__invalid_requires(caplog): """ Test the log message for an invalid Requires-Python. """ - link = Link('https://example.com', requires_python='invalid') + link = Link("https://example.com", requires_python="invalid") caplog.set_level(logging.DEBUG) actual = _check_link_requires_python(link, version_info=(3, 6, 5)) assert actual @@ -113,62 +113,62 @@ def test_check_link_requires_python__invalid_requires(caplog): expected_message = ( "Ignoring invalid Requires-Python ('invalid') for link: https://example.com" ) - check_caplog(caplog, 'DEBUG', expected_message) + check_caplog(caplog, "DEBUG", expected_message) class TestLinkEvaluator: @pytest.mark.parametrize( - 'py_version_info,ignore_requires_python,expected', + "py_version_info,ignore_requires_python,expected", [ - ((3, 6, 5), None, (True, '1.12')), + ((3, 6, 5), None, (True, "1.12")), # Test an incompatible Python. ((3, 6, 4), None, (False, None)), # Test an incompatible Python with ignore_requires_python=True. - ((3, 6, 4), True, (True, '1.12')), + ((3, 6, 4), True, (True, "1.12")), ], ) def test_evaluate_link(self, py_version_info, ignore_requires_python, expected): target_python = TargetPython(py_version_info=py_version_info) evaluator = LinkEvaluator( - project_name='twine', - canonical_name='twine', - formats={'source'}, + project_name="twine", + canonical_name="twine", + formats={"source"}, target_python=target_python, allow_yanked=True, ignore_requires_python=ignore_requires_python, ) - link = Link('https://example.com/#egg=twine-1.12', requires_python='== 3.6.5') + link = Link("https://example.com/#egg=twine-1.12", requires_python="== 3.6.5") actual = evaluator.evaluate_link(link) assert actual == expected @pytest.mark.parametrize( - 'yanked_reason, allow_yanked, expected', + "yanked_reason, allow_yanked, expected", [ - (None, True, (True, '1.12')), - (None, False, (True, '1.12')), - ('', True, (True, '1.12')), - ('', False, (False, 'yanked for reason: ')), - ('bad metadata', True, (True, '1.12')), - ('bad metadata', False, (False, 'yanked for reason: bad metadata')), + (None, True, (True, "1.12")), + (None, False, (True, "1.12")), + ("", True, (True, "1.12")), + ("", False, (False, "yanked for reason: ")), + ("bad metadata", True, (True, "1.12")), + ("bad metadata", False, (False, "yanked for reason: bad metadata")), # Test a unicode string with a non-ascii character. - (u'curly quote: \u2018', True, (True, '1.12')), + (u"curly quote: \u2018", True, (True, "1.12")), ( - u'curly quote: \u2018', + u"curly quote: \u2018", False, - (False, u'yanked for reason: curly quote: \u2018'), + (False, u"yanked for reason: curly quote: \u2018"), ), ], ) def test_evaluate_link__allow_yanked(self, yanked_reason, allow_yanked, expected): target_python = TargetPython(py_version_info=(3, 6, 4)) evaluator = LinkEvaluator( - project_name='twine', - canonical_name='twine', - formats={'source'}, + project_name="twine", + canonical_name="twine", + formats={"source"}, target_python=target_python, allow_yanked=allow_yanked, ) - link = Link('https://example.com/#egg=twine-1.12', yanked_reason=yanked_reason) + link = Link("https://example.com/#egg=twine-1.12", yanked_reason=yanked_reason) actual = evaluator.evaluate_link(link) assert actual == expected @@ -180,37 +180,37 @@ def test_evaluate_link__incompatible_wheel(self): # Set the valid tags to an empty list to make sure nothing matches. target_python._valid_tags = [] evaluator = LinkEvaluator( - project_name='sample', - canonical_name='sample', - formats={'binary'}, + project_name="sample", + canonical_name="sample", + formats={"binary"}, target_python=target_python, allow_yanked=True, ) - link = Link('https://example.com/sample-1.0-py2.py3-none-any.whl') + link = Link("https://example.com/sample-1.0-py2.py3-none-any.whl") actual = evaluator.evaluate_link(link) expected = (False, "none of the wheel's tags match: py2-none-any, py3-none-any") assert actual == expected @pytest.mark.parametrize( - 'hex_digest, expected_versions', + "hex_digest, expected_versions", [ - (None, ['1.0', '1.1', '1.2']), - (64 * 'a', ['1.0', '1.1']), - (64 * 'b', ['1.0', '1.2']), - (64 * 'c', ['1.0', '1.1', '1.2']), + (None, ["1.0", "1.1", "1.2"]), + (64 * "a", ["1.0", "1.1"]), + (64 * "b", ["1.0", "1.2"]), + (64 * "c", ["1.0", "1.1", "1.2"]), ], ) def test_filter_unallowed_hashes(hex_digest, expected_versions): candidates = [ - make_mock_candidate('1.0'), - make_mock_candidate('1.1', hex_digest=(64 * 'a')), - make_mock_candidate('1.2', hex_digest=(64 * 'b')), + make_mock_candidate("1.0"), + make_mock_candidate("1.1", hex_digest=(64 * "a")), + make_mock_candidate("1.2", hex_digest=(64 * "b")), ] - hashes_data = {'sha256': [hex_digest]} + hashes_data = {"sha256": [hex_digest]} hashes = Hashes(hashes_data) actual = filter_unallowed_hashes( - candidates, hashes=hashes, project_name='my-project' + candidates, hashes=hashes, project_name="my-project" ) actual_versions = [str(candidate.version) for candidate in actual] @@ -222,9 +222,9 @@ def test_filter_unallowed_hashes(hex_digest, expected_versions): def test_filter_unallowed_hashes__no_hashes(caplog): caplog.set_level(logging.DEBUG) - candidates = [make_mock_candidate('1.0'), make_mock_candidate('1.1')] + candidates = [make_mock_candidate("1.0"), make_mock_candidate("1.1")] actual = filter_unallowed_hashes( - candidates, hashes=Hashes(), project_name='my-project' + candidates, hashes=Hashes(), project_name="my-project" ) # Check that the return value is a copy. @@ -235,7 +235,7 @@ def test_filter_unallowed_hashes__no_hashes(caplog): "Given no hashes to check 2 links for project 'my-project': " "discarding no candidates" ) - check_caplog(caplog, 'DEBUG', expected_message) + check_caplog(caplog, "DEBUG", expected_message) def test_filter_unallowed_hashes__log_message_with_match(caplog): @@ -244,17 +244,17 @@ def test_filter_unallowed_hashes__log_message_with_match(caplog): # Test 1 match, 2 non-matches, 3 no hashes so all 3 values will be # different. candidates = [ - make_mock_candidate('1.0'), - make_mock_candidate('1.1'), - make_mock_candidate('1.2'), - make_mock_candidate('1.3', hex_digest=(64 * 'a')), - make_mock_candidate('1.4', hex_digest=(64 * 'b')), - make_mock_candidate('1.5', hex_digest=(64 * 'c')), + make_mock_candidate("1.0"), + make_mock_candidate("1.1"), + make_mock_candidate("1.2"), + make_mock_candidate("1.3", hex_digest=(64 * "a")), + make_mock_candidate("1.4", hex_digest=(64 * "b")), + make_mock_candidate("1.5", hex_digest=(64 * "c")), ] - hashes_data = {'sha256': [64 * 'a', 64 * 'd']} + hashes_data = {"sha256": [64 * "a", 64 * "d"]} hashes = Hashes(hashes_data) actual = filter_unallowed_hashes( - candidates, hashes=hashes, project_name='my-project' + candidates, hashes=hashes, project_name="my-project" ) assert len(actual) == 4 @@ -266,21 +266,21 @@ def test_filter_unallowed_hashes__log_message_with_match(caplog): " https://example.com/pkg-1.5.tar.gz#sha256=" "cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc" ) - check_caplog(caplog, 'DEBUG', expected_message) + check_caplog(caplog, "DEBUG", expected_message) def test_filter_unallowed_hashes__log_message_with_no_match(caplog): caplog.set_level(logging.DEBUG) candidates = [ - make_mock_candidate('1.0'), - make_mock_candidate('1.1', hex_digest=(64 * 'b')), - make_mock_candidate('1.2', hex_digest=(64 * 'c')), + make_mock_candidate("1.0"), + make_mock_candidate("1.1", hex_digest=(64 * "b")), + make_mock_candidate("1.2", hex_digest=(64 * "c")), ] - hashes_data = {'sha256': [64 * 'a', 64 * 'd']} + hashes_data = {"sha256": [64 * "a", 64 * "d"]} hashes = Hashes(hashes_data) actual = filter_unallowed_hashes( - candidates, hashes=hashes, project_name='my-project' + candidates, hashes=hashes, project_name="my-project" ) assert len(actual) == 3 @@ -288,20 +288,20 @@ def test_filter_unallowed_hashes__log_message_with_no_match(caplog): "Checked 3 links for project 'my-project' against 2 hashes " "(0 matches, 1 no digest): discarding no candidates" ) - check_caplog(caplog, 'DEBUG', expected_message) + check_caplog(caplog, "DEBUG", expected_message) class TestCandidateEvaluator: @pytest.mark.parametrize( - 'allow_all_prereleases, prefer_binary', + "allow_all_prereleases, prefer_binary", [(False, False), (False, True), (True, False), (True, True)], ) def test_create(self, allow_all_prereleases, prefer_binary): target_python = TargetPython() - target_python._valid_tags = [('py36', 'none', 'any')] + target_python._valid_tags = [("py36", "none", "any")] specifier = SpecifierSet() evaluator = CandidateEvaluator.create( - project_name='my-project', + project_name="my-project", target_python=target_python, allow_all_prereleases=allow_all_prereleases, prefer_binary=prefer_binary, @@ -310,13 +310,13 @@ def test_create(self, allow_all_prereleases, prefer_binary): assert evaluator._allow_all_prereleases == allow_all_prereleases assert evaluator._prefer_binary == prefer_binary assert evaluator._specifier is specifier - assert evaluator._supported_tags == [('py36', 'none', 'any')] + assert evaluator._supported_tags == [("py36", "none", "any")] def test_create__target_python_none(self): """ Test passing target_python=None. """ - evaluator = CandidateEvaluator.create('my-project') + evaluator = CandidateEvaluator.create("my-project") expected_tags = get_supported() assert evaluator._supported_tags == expected_tags @@ -324,28 +324,28 @@ def test_create__specifier_none(self): """ Test passing specifier=None. """ - evaluator = CandidateEvaluator.create('my-project') + evaluator = CandidateEvaluator.create("my-project") expected_specifier = SpecifierSet() assert evaluator._specifier == expected_specifier def test_get_applicable_candidates(self): - specifier = SpecifierSet('<= 1.11') - versions = ['1.10', '1.11', '1.12'] + specifier = SpecifierSet("<= 1.11") + versions = ["1.10", "1.11", "1.12"] candidates = [make_mock_candidate(version) for version in versions] - evaluator = CandidateEvaluator.create('my-project', specifier=specifier) + evaluator = CandidateEvaluator.create("my-project", specifier=specifier) actual = evaluator.get_applicable_candidates(candidates) expected_applicable = candidates[:2] - assert [str(c.version) for c in expected_applicable] == ['1.10', '1.11'] + assert [str(c.version) for c in expected_applicable] == ["1.10", "1.11"] assert actual == expected_applicable @pytest.mark.parametrize( - 'specifier, expected_versions', + "specifier, expected_versions", [ # Test no version constraint. - (SpecifierSet(), ['1.0', '1.2']), + (SpecifierSet(), ["1.0", "1.2"]), # Test a version constraint that excludes the candidate whose # hash matches. Then the non-allowed hash is a candidate. - (SpecifierSet('<= 1.1'), ['1.0', '1.1']), + (SpecifierSet("<= 1.1"), ["1.0", "1.1"]), ], ) def test_get_applicable_candidates__hashes(self, specifier, expected_versions): @@ -353,29 +353,29 @@ def test_get_applicable_candidates__hashes(self, specifier, expected_versions): Test a non-None hashes value. """ candidates = [ - make_mock_candidate('1.0'), - make_mock_candidate('1.1', hex_digest=(64 * 'a')), - make_mock_candidate('1.2', hex_digest=(64 * 'b')), + make_mock_candidate("1.0"), + make_mock_candidate("1.1", hex_digest=(64 * "a")), + make_mock_candidate("1.2", hex_digest=(64 * "b")), ] - hashes_data = {'sha256': [64 * 'b']} + hashes_data = {"sha256": [64 * "b"]} hashes = Hashes(hashes_data) evaluator = CandidateEvaluator.create( - 'my-project', specifier=specifier, hashes=hashes + "my-project", specifier=specifier, hashes=hashes ) actual = evaluator.get_applicable_candidates(candidates) actual_versions = [str(c.version) for c in actual] assert actual_versions == expected_versions def test_compute_best_candidate(self): - specifier = SpecifierSet('<= 1.11') - versions = ['1.10', '1.11', '1.12'] + specifier = SpecifierSet("<= 1.11") + versions = ["1.10", "1.11", "1.12"] candidates = [make_mock_candidate(version) for version in versions] - evaluator = CandidateEvaluator.create('my-project', specifier=specifier) + evaluator = CandidateEvaluator.create("my-project", specifier=specifier) result = evaluator.compute_best_candidate(candidates) assert result._candidates == candidates expected_applicable = candidates[:2] - assert [str(c.version) for c in expected_applicable] == ['1.10', '1.11'] + assert [str(c.version) for c in expected_applicable] == ["1.10", "1.11"] assert result._applicable_candidates == expected_applicable assert result.best_candidate is expected_applicable[1] @@ -384,10 +384,10 @@ def test_compute_best_candidate__none_best(self): """ Test returning a None best candidate. """ - specifier = SpecifierSet('<= 1.10') - versions = ['1.11', '1.12'] + specifier = SpecifierSet("<= 1.10") + versions = ["1.11", "1.12"] candidates = [make_mock_candidate(version) for version in versions] - evaluator = CandidateEvaluator.create('my-project', specifier=specifier) + evaluator = CandidateEvaluator.create("my-project", specifier=specifier) result = evaluator.compute_best_candidate(candidates) assert result._candidates == candidates @@ -395,44 +395,44 @@ def test_compute_best_candidate__none_best(self): assert result.best_candidate is None @pytest.mark.parametrize( - 'hex_digest, expected', + "hex_digest, expected", [ # Test a link with no hash. (None, 0), # Test a link with an allowed hash. - (64 * 'a', 1), + (64 * "a", 1), # Test a link with a hash that isn't allowed. - (64 * 'b', 0), + (64 * "b", 0), ], ) def test_sort_key__hash(self, hex_digest, expected): """ Test the effect of the link's hash on _sort_key()'s return value. """ - candidate = make_mock_candidate('1.0', hex_digest=hex_digest) - hashes_data = {'sha256': [64 * 'a']} + candidate = make_mock_candidate("1.0", hex_digest=hex_digest) + hashes_data = {"sha256": [64 * "a"]} hashes = Hashes(hashes_data) - evaluator = CandidateEvaluator.create('my-project', hashes=hashes) + evaluator = CandidateEvaluator.create("my-project", hashes=hashes) sort_value = evaluator._sort_key(candidate) # The hash is reflected in the first element of the tuple. actual = sort_value[0] assert actual == expected @pytest.mark.parametrize( - 'yanked_reason, expected', + "yanked_reason, expected", [ # Test a non-yanked file. (None, 0), # Test a yanked file (has a lower value than non-yanked). - ('bad metadata', -1), + ("bad metadata", -1), ], ) def test_sort_key__is_yanked(self, yanked_reason, expected): """ Test the effect of is_yanked on _sort_key()'s return value. """ - candidate = make_mock_candidate('1.0', yanked_reason=yanked_reason) - evaluator = CandidateEvaluator.create('my-project') + candidate = make_mock_candidate("1.0", yanked_reason=yanked_reason) + evaluator = CandidateEvaluator.create("my-project") sort_value = evaluator._sort_key(candidate) # Yanked / non-yanked is reflected in the second element of the tuple. actual = sort_value[1] @@ -442,7 +442,7 @@ def test_sort_best_candidate__no_candidates(self): """ Test passing an empty list. """ - evaluator = CandidateEvaluator.create('my-project') + evaluator = CandidateEvaluator.create("my-project") actual = evaluator.sort_best_candidate([]) assert actual is None @@ -451,35 +451,35 @@ def test_sort_best_candidate__all_yanked(self, caplog): Test all candidates yanked. """ candidates = [ - make_mock_candidate('1.0', yanked_reason='bad metadata #1'), + make_mock_candidate("1.0", yanked_reason="bad metadata #1"), # Put the best candidate in the middle, to test sorting. - make_mock_candidate('3.0', yanked_reason='bad metadata #3'), - make_mock_candidate('2.0', yanked_reason='bad metadata #2'), + make_mock_candidate("3.0", yanked_reason="bad metadata #3"), + make_mock_candidate("2.0", yanked_reason="bad metadata #2"), ] expected_best = candidates[1] - evaluator = CandidateEvaluator.create('my-project') + evaluator = CandidateEvaluator.create("my-project") actual = evaluator.sort_best_candidate(candidates) assert actual is expected_best - assert str(actual.version) == '3.0' + assert str(actual.version) == "3.0" # Check the log messages. assert len(caplog.records) == 1 record = caplog.records[0] - assert record.levelname == 'WARNING' + assert record.levelname == "WARNING" assert record.message == ( - 'The candidate selected for download or install is a yanked ' + "The candidate selected for download or install is a yanked " "version: 'mypackage' candidate " - '(version 3.0 at https://example.com/pkg-3.0.tar.gz)\n' - 'Reason for being yanked: bad metadata #3' + "(version 3.0 at https://example.com/pkg-3.0.tar.gz)\n" + "Reason for being yanked: bad metadata #3" ) @pytest.mark.parametrize( - 'yanked_reason, expected_reason', + "yanked_reason, expected_reason", [ # Test no reason given. - ('', ''), + ("", ""), # Test a unicode string with a non-ascii character. - (u'curly quote: \u2018', u'curly quote: \u2018'), + (u"curly quote: \u2018", u"curly quote: \u2018"), ], ) def test_sort_best_candidate__yanked_reason( @@ -488,19 +488,19 @@ def test_sort_best_candidate__yanked_reason( """ Test the log message with various reason strings. """ - candidates = [make_mock_candidate('1.0', yanked_reason=yanked_reason)] - evaluator = CandidateEvaluator.create('my-project') + candidates = [make_mock_candidate("1.0", yanked_reason=yanked_reason)] + evaluator = CandidateEvaluator.create("my-project") actual = evaluator.sort_best_candidate(candidates) - assert str(actual.version) == '1.0' + assert str(actual.version) == "1.0" assert len(caplog.records) == 1 record = caplog.records[0] - assert record.levelname == 'WARNING' + assert record.levelname == "WARNING" expected_message = ( - 'The candidate selected for download or install is a yanked ' + "The candidate selected for download or install is a yanked " "version: 'mypackage' candidate " - '(version 1.0 at https://example.com/pkg-1.0.tar.gz)\n' - 'Reason for being yanked: ' + "(version 1.0 at https://example.com/pkg-1.0.tar.gz)\n" + "Reason for being yanked: " ) + expected_reason assert record.message == expected_message @@ -510,17 +510,17 @@ def test_sort_best_candidate__best_yanked_but_not_all(self, caplog): """ caplog.set_level(logging.INFO) candidates = [ - make_mock_candidate('4.0', yanked_reason='bad metadata #4'), + make_mock_candidate("4.0", yanked_reason="bad metadata #4"), # Put the best candidate in the middle, to test sorting. - make_mock_candidate('2.0'), - make_mock_candidate('3.0', yanked_reason='bad metadata #3'), - make_mock_candidate('1.0'), + make_mock_candidate("2.0"), + make_mock_candidate("3.0", yanked_reason="bad metadata #3"), + make_mock_candidate("1.0"), ] expected_best = candidates[1] - evaluator = CandidateEvaluator.create('my-project') + evaluator = CandidateEvaluator.create("my-project") actual = evaluator.sort_best_candidate(candidates) assert actual is expected_best - assert str(actual.version) == '2.0' + assert str(actual.version) == "2.0" # Check the log messages. assert len(caplog.records) == 0 @@ -528,7 +528,7 @@ def test_sort_best_candidate__best_yanked_but_not_all(self, caplog): class TestPackageFinder: @pytest.mark.parametrize( - 'allow_all_prereleases, prefer_binary', + "allow_all_prereleases, prefer_binary", [(False, False), (False, True), (True, False), (True, True)], ) def test_create__candidate_prefs(self, allow_all_prereleases, prefer_binary): @@ -600,7 +600,7 @@ def test_create__target_python_none(self): assert actual_target_python._given_py_version_info is None assert actual_target_python.py_version_info == CURRENT_PY_VERSION_INFO - @pytest.mark.parametrize('allow_yanked', [False, True]) + @pytest.mark.parametrize("allow_yanked", [False, True]) def test_create__allow_yanked(self, allow_yanked): """ Test that the _allow_yanked attribute is set correctly. @@ -614,7 +614,7 @@ def test_create__allow_yanked(self, allow_yanked): ) assert finder._allow_yanked == allow_yanked - @pytest.mark.parametrize('ignore_requires_python', [False, True]) + @pytest.mark.parametrize("ignore_requires_python", [False, True]) def test_create__ignore_requires_python(self, ignore_requires_python): """ Test that the _ignore_requires_python attribute is set correctly. @@ -637,7 +637,7 @@ def test_create__format_control(self): link_collector = LinkCollector( session=PipSession(), search_scope=SearchScope([], []) ) - format_control = FormatControl(set(), {':all:'}) + format_control = FormatControl(set(), {":all:"}) selection_prefs = SelectionPreferences( allow_yanked=True, format_control=format_control ) @@ -647,18 +647,18 @@ def test_create__format_control(self): actual_format_control = finder.format_control assert actual_format_control is format_control # Check that the attributes weren't reset. - assert actual_format_control.only_binary == {':all:'} + assert actual_format_control.only_binary == {":all:"} @pytest.mark.parametrize( - 'allow_yanked, ignore_requires_python, only_binary, expected_formats', + "allow_yanked, ignore_requires_python, only_binary, expected_formats", [ - (False, False, {}, frozenset({'binary', 'source'})), + (False, False, {}, frozenset({"binary", "source"})), # Test allow_yanked=True. - (True, False, {}, frozenset({'binary', 'source'})), + (True, False, {}, frozenset({"binary", "source"})), # Test ignore_requires_python=True. - (False, True, {}, frozenset({'binary', 'source'})), + (False, True, {}, frozenset({"binary", "source"})), # Test a non-trivial only_binary. - (False, False, {'twine'}, frozenset({'binary'})), + (False, False, {"twine"}, frozenset({"binary"})), ], ) def test_make_link_evaluator( @@ -681,10 +681,10 @@ def test_make_link_evaluator( ) # Pass a project_name that will be different from canonical_name. - link_evaluator = finder.make_link_evaluator('Twine') + link_evaluator = finder.make_link_evaluator("Twine") - assert link_evaluator.project_name == 'Twine' - assert link_evaluator._canonical_name == 'twine' + assert link_evaluator.project_name == "Twine" + assert link_evaluator._canonical_name == "twine" assert link_evaluator._allow_yanked == allow_yanked assert link_evaluator._ignore_requires_python == ignore_requires_python assert link_evaluator._formats == expected_formats @@ -698,12 +698,12 @@ def test_make_link_evaluator( assert actual_target_python.py_version_info == (3, 7, 0) @pytest.mark.parametrize( - 'allow_all_prereleases, prefer_binary', + "allow_all_prereleases, prefer_binary", [(False, False), (False, True), (True, False), (True, True)], ) def test_make_candidate_evaluator(self, allow_all_prereleases, prefer_binary): target_python = TargetPython() - target_python._valid_tags = [('py36', 'none', 'any')] + target_python._valid_tags = [("py36", "none", "any")] candidate_prefs = CandidatePreferences( prefer_binary=prefer_binary, allow_all_prereleases=allow_all_prereleases ) @@ -719,16 +719,16 @@ def test_make_candidate_evaluator(self, allow_all_prereleases, prefer_binary): specifier = SpecifierSet() # Pass hashes to check that _hashes is set. - hashes = Hashes({'sha256': [64 * 'a']}) + hashes = Hashes({"sha256": [64 * "a"]}) evaluator = finder.make_candidate_evaluator( - 'my-project', specifier=specifier, hashes=hashes + "my-project", specifier=specifier, hashes=hashes ) assert evaluator._allow_all_prereleases == allow_all_prereleases assert evaluator._hashes == hashes assert evaluator._prefer_binary == prefer_binary - assert evaluator._project_name == 'my-project' + assert evaluator._project_name == "my-project" assert evaluator._specifier is specifier - assert evaluator._supported_tags == [('py36', 'none', 'any')] + assert evaluator._supported_tags == [("py36", "none", "any")] @pytest.mark.parametrize( diff --git a/tests/unit/test_legacy_resolve.py b/tests/unit/test_legacy_resolve.py index 048d7772350..ce032b8297c 100644 --- a/tests/unit/test_legacy_resolve.py +++ b/tests/unit/test_legacy_resolve.py @@ -3,10 +3,7 @@ import pytest from pip._vendor import pkg_resources -from pip._internal.exceptions import ( - NoneMetadataError, - UnsupportedPythonVersion, -) +from pip._internal.exceptions import NoneMetadataError, UnsupportedPythonVersion from pip._internal.legacy_resolve import _check_dist_requires_python from pip._internal.utils.packaging import get_requires_python @@ -22,14 +19,14 @@ def __init__(self, metadata, metadata_name=None): (can be "METADATA" or "PKG-INFO"). Defaults to "METADATA". """ if metadata_name is None: - metadata_name = 'METADATA' + metadata_name = "METADATA" - self.project_name = 'my-project' + self.project_name = "my-project" self.metadata_name = metadata_name self.metadata = metadata def __str__(self): - return ''.format(self.project_name) + return "".format(self.project_name) def has_metadata(self, name): return name == self.metadata_name @@ -40,9 +37,9 @@ def get_metadata(self, name): def make_fake_dist(requires_python=None, metadata_name=None): - metadata = 'Name: test\n' + metadata = "Name: test\n" if requires_python is not None: - metadata += 'Requires-Python:{}'.format(requires_python) + metadata += "Requires-Python:{}".format(requires_python) return FakeDist(metadata, metadata_name=metadata_name) @@ -58,7 +55,7 @@ def test_compatible(self, caplog): Test a Python version compatible with the dist's Requires-Python. """ caplog.set_level(logging.DEBUG) - dist = make_fake_dist('== 3.6.5') + dist = make_fake_dist("== 3.6.5") _check_dist_requires_python( dist, version_info=(3, 6, 5), ignore_requires_python=False @@ -69,7 +66,7 @@ def test_incompatible(self): """ Test a Python version incompatible with the dist's Requires-Python. """ - dist = make_fake_dist('== 3.6.4') + dist = make_fake_dist("== 3.6.4") with pytest.raises(UnsupportedPythonVersion) as exc: _check_dist_requires_python( dist, version_info=(3, 6, 5), ignore_requires_python=False @@ -85,13 +82,13 @@ def test_incompatible_with_ignore_requires(self, caplog): while passing ignore_requires_python=True. """ caplog.set_level(logging.DEBUG) - dist = make_fake_dist('== 3.6.4') + dist = make_fake_dist("== 3.6.4") _check_dist_requires_python( dist, version_info=(3, 6, 5), ignore_requires_python=True ) assert len(caplog.records) == 1 record = caplog.records[0] - assert record.levelname == 'DEBUG' + assert record.levelname == "DEBUG" assert record.message == ( "Ignoring failed Requires-Python check for package 'my-project': " "3.6.5 not in '== 3.6.4'" @@ -118,19 +115,19 @@ def test_invalid_requires_python(self, caplog): Test a dist with an invalid Requires-Python. """ caplog.set_level(logging.DEBUG) - dist = make_fake_dist('invalid') + dist = make_fake_dist("invalid") _check_dist_requires_python( dist, version_info=(3, 6, 5), ignore_requires_python=False ) assert len(caplog.records) == 1 record = caplog.records[0] - assert record.levelname == 'WARNING' + assert record.levelname == "WARNING" assert record.message == ( "Package 'my-project' has an invalid Requires-Python: " "Invalid specifier: 'invalid'" ) - @pytest.mark.parametrize('metadata_name', ['METADATA', 'PKG-INFO']) + @pytest.mark.parametrize("metadata_name", ["METADATA", "PKG-INFO"]) def test_empty_metadata_error(self, caplog, metadata_name): """ Test dist.has_metadata() returning True and dist.get_metadata() diff --git a/tests/unit/test_link.py b/tests/unit/test_link.py index ef278277ab7..cf32bc5b2d2 100644 --- a/tests/unit/test_link.py +++ b/tests/unit/test_link.py @@ -6,11 +6,11 @@ class TestLink: @pytest.mark.parametrize( - 'url, expected', + "url, expected", [ ( - 'https://user:password@example.com/path/page.html', - '', + "https://user:password@example.com/path/page.html", + "", ) ], ) @@ -19,24 +19,24 @@ def test_repr(self, url, expected): assert repr(link) == expected @pytest.mark.parametrize( - 'url, expected', + "url, expected", [ - ('http://yo/wheel.whl', 'wheel.whl'), - ('http://yo/wheel', 'wheel'), - ('https://example.com/path/page.html', 'page.html'), + ("http://yo/wheel.whl", "wheel.whl"), + ("http://yo/wheel", "wheel"), + ("https://example.com/path/page.html", "page.html"), # Test a quoted character. - ('https://example.com/path/page%231.html', 'page#1.html'), + ("https://example.com/path/page%231.html", "page#1.html"), ( - 'http://yo/myproject-1.0%2Bfoobar.0-py2.py3-none-any.whl', - 'myproject-1.0+foobar.0-py2.py3-none-any.whl', + "http://yo/myproject-1.0%2Bfoobar.0-py2.py3-none-any.whl", + "myproject-1.0+foobar.0-py2.py3-none-any.whl", ), # Test a path that ends in a slash. - ('https://example.com/path/', 'path'), - ('https://example.com/path//', 'path'), + ("https://example.com/path/", "path"), + ("https://example.com/path//", "path"), # Test a url with no filename. - ('https://example.com/', 'example.com'), + ("https://example.com/", "example.com"), # Test a url with no filename and with auth information. - ('https://user:password@example.com/', 'example.com'), + ("https://user:password@example.com/", "example.com"), ], ) def test_filename(self, url, expected): @@ -44,94 +44,94 @@ def test_filename(self, url, expected): assert link.filename == expected def test_splitext(self): - assert ('wheel', '.whl') == Link('http://yo/wheel.whl').splitext() + assert ("wheel", ".whl") == Link("http://yo/wheel.whl").splitext() def test_no_ext(self): - assert '' == Link('http://yo/wheel').ext + assert "" == Link("http://yo/wheel").ext def test_ext(self): - assert '.whl' == Link('http://yo/wheel.whl').ext + assert ".whl" == Link("http://yo/wheel.whl").ext def test_ext_fragment(self): - assert '.whl' == Link('http://yo/wheel.whl#frag').ext + assert ".whl" == Link("http://yo/wheel.whl#frag").ext def test_ext_query(self): - assert '.whl' == Link('http://yo/wheel.whl?a=b').ext + assert ".whl" == Link("http://yo/wheel.whl?a=b").ext def test_is_wheel(self): - assert Link('http://yo/wheel.whl').is_wheel + assert Link("http://yo/wheel.whl").is_wheel def test_is_wheel_false(self): - assert not Link('http://yo/not_a_wheel').is_wheel + assert not Link("http://yo/not_a_wheel").is_wheel def test_fragments(self): - url = 'git+https://example.com/package#egg=eggname' - assert 'eggname' == Link(url).egg_fragment + url = "git+https://example.com/package#egg=eggname" + assert "eggname" == Link(url).egg_fragment assert None is Link(url).subdirectory_fragment - url = 'git+https://example.com/package#egg=eggname&subdirectory=subdir' - assert 'eggname' == Link(url).egg_fragment - assert 'subdir' == Link(url).subdirectory_fragment - url = 'git+https://example.com/package#subdirectory=subdir&egg=eggname' - assert 'eggname' == Link(url).egg_fragment - assert 'subdir' == Link(url).subdirectory_fragment + url = "git+https://example.com/package#egg=eggname&subdirectory=subdir" + assert "eggname" == Link(url).egg_fragment + assert "subdir" == Link(url).subdirectory_fragment + url = "git+https://example.com/package#subdirectory=subdir&egg=eggname" + assert "eggname" == Link(url).egg_fragment + assert "subdir" == Link(url).subdirectory_fragment @pytest.mark.parametrize( - 'yanked_reason, expected', - [(None, False), ('', True), ('there was a mistake', True)], + "yanked_reason, expected", + [(None, False), ("", True), ("there was a mistake", True)], ) def test_is_yanked(self, yanked_reason, expected): - link = Link('https://example.com/wheel.whl', yanked_reason=yanked_reason) + link = Link("https://example.com/wheel.whl", yanked_reason=yanked_reason) assert link.is_yanked == expected @pytest.mark.parametrize( - 'hash_name, hex_digest, expected', + "hash_name, hex_digest, expected", [ # Test a value that matches but with the wrong hash_name. - ('sha384', 128 * 'a', False), + ("sha384", 128 * "a", False), # Test matching values, including values other than the first. - ('sha512', 128 * 'a', True), - ('sha512', 128 * 'b', True), + ("sha512", 128 * "a", True), + ("sha512", 128 * "b", True), # Test a matching hash_name with a value that doesn't match. - ('sha512', 128 * 'c', False), + ("sha512", 128 * "c", False), # Test a link without a hash value. - ('sha512', '', False), + ("sha512", "", False), ], ) def test_is_hash_allowed(self, hash_name, hex_digest, expected): - url = 'https://example.com/wheel.whl#{hash_name}={hex_digest}'.format( + url = "https://example.com/wheel.whl#{hash_name}={hex_digest}".format( hash_name=hash_name, hex_digest=hex_digest ) link = Link(url) - hashes_data = {'sha512': [128 * 'a', 128 * 'b']} + hashes_data = {"sha512": [128 * "a", 128 * "b"]} hashes = Hashes(hashes_data) assert link.is_hash_allowed(hashes) == expected def test_is_hash_allowed__no_hash(self): - link = Link('https://example.com/wheel.whl') - hashes_data = {'sha512': [128 * 'a']} + link = Link("https://example.com/wheel.whl") + hashes_data = {"sha512": [128 * "a"]} hashes = Hashes(hashes_data) assert not link.is_hash_allowed(hashes) @pytest.mark.parametrize( - 'hashes, expected', + "hashes, expected", [ (None, False), # Also test a success case to show the test is correct. - (Hashes({'sha512': [128 * 'a']}), True), + (Hashes({"sha512": [128 * "a"]}), True), ], ) def test_is_hash_allowed__none_hashes(self, hashes, expected): - url = 'https://example.com/wheel.whl#sha512={}'.format(128 * 'a') + url = "https://example.com/wheel.whl#sha512={}".format(128 * "a") link = Link(url) assert link.is_hash_allowed(hashes) == expected @pytest.mark.parametrize( - 'url, expected', + "url, expected", [ - ('git+https://github.com/org/repo', True), - ('bzr+http://bzr.myproject.org/MyProject/trunk/#egg=MyProject', True), - ('https://example.com/some.whl', False), - ('file://home/foo/some.whl', False), + ("git+https://github.com/org/repo", True), + ("bzr+http://bzr.myproject.org/MyProject/trunk/#egg=MyProject", True), + ("https://example.com/some.whl", False), + ("file://home/foo/some.whl", False), ], ) def test_is_vcs(self, url, expected): diff --git a/tests/unit/test_locations.py b/tests/unit/test_locations.py index 103a7565be5..4ec4173af18 100644 --- a/tests/unit/test_locations.py +++ b/tests/unit/test_locations.py @@ -13,7 +13,7 @@ from pip._internal.locations import distutils_scheme -if sys.platform == 'win32': +if sys.platform == "win32": pwd = Mock() else: import pwd @@ -34,7 +34,7 @@ def patch(self): """ first store and then patch python methods pythons """ self.tempfile_gettempdir = tempfile.gettempdir self.old_os_fstat = os.fstat - if sys.platform != 'win32': + if sys.platform != "win32": # os.geteuid and pwd.getpwuid are not implemented on windows self.old_os_geteuid = os.geteuid self.old_pwd_getpwuid = pwd.getpwuid @@ -46,14 +46,14 @@ def patch(self): os.geteuid = lambda: self.st_uid os.fstat = lambda fd: self.get_mock_fstat(fd) - if sys.platform != 'win32': + if sys.platform != "win32": pwd.getpwuid = lambda uid: self.get_mock_getpwuid(uid) def revert_patch(self): """ revert the patches to python methods """ tempfile.gettempdir = self.tempfile_gettempdir getpass.getuser = self.old_getpass_getuser - if sys.platform != 'win32': + if sys.platform != "win32": # os.geteuid and pwd.getpwuid are not implemented on windows os.geteuid = self.old_os_geteuid pwd.getpwuid = self.old_pwd_getpwuid @@ -81,7 +81,7 @@ def test_root_modifies_appropriately(self, monkeypatch): # This deals with nt/posix path differences # root is c:\somewhere\else or /somewhere/else root = os.path.normcase( - os.path.abspath(os.path.join(os.path.sep, 'somewhere', 'else')) + os.path.abspath(os.path.join(os.path.sep, "somewhere", "else")) ) norm_scheme = distutils_scheme("example") root_scheme = distutils_scheme("example", root=root) @@ -95,16 +95,16 @@ def test_root_modifies_appropriately(self, monkeypatch): def test_distutils_config_file_read(self, tmpdir, monkeypatch): # This deals with nt/posix path differences install_scripts = os.path.normcase( - os.path.abspath(os.path.join(os.path.sep, 'somewhere', 'else')) + os.path.abspath(os.path.join(os.path.sep, "somewhere", "else")) ) f = tmpdir.mkdir("config").joinpath("setup.cfg") f.write_text("[install]\ninstall-scripts=" + install_scripts) from distutils.dist import Distribution # patch the function that returns what config files are present - monkeypatch.setattr(Distribution, 'find_config_files', lambda self: [f]) - scheme = distutils_scheme('example') - assert scheme['scripts'] == install_scripts + monkeypatch.setattr(Distribution, "find_config_files", lambda self: [f]) + scheme = distutils_scheme("example") + assert scheme["scripts"] == install_scripts @pytest.mark.incompatible_with_venv # when we request install-lib, we should install everything (.py & @@ -113,14 +113,14 @@ def test_distutils_config_file_read(self, tmpdir, monkeypatch): def test_install_lib_takes_precedence(self, tmpdir, monkeypatch): # This deals with nt/posix path differences install_lib = os.path.normcase( - os.path.abspath(os.path.join(os.path.sep, 'somewhere', 'else')) + os.path.abspath(os.path.join(os.path.sep, "somewhere", "else")) ) f = tmpdir.mkdir("config").joinpath("setup.cfg") f.write_text("[install]\ninstall-lib=" + install_lib) from distutils.dist import Distribution # patch the function that returns what config files are present - monkeypatch.setattr(Distribution, 'find_config_files', lambda self: [f]) - scheme = distutils_scheme('example') - assert scheme['platlib'] == install_lib + os.path.sep - assert scheme['purelib'] == install_lib + os.path.sep + monkeypatch.setattr(Distribution, "find_config_files", lambda self: [f]) + scheme = distutils_scheme("example") + assert scheme["platlib"] == install_lib + os.path.sep + assert scheme["purelib"] == install_lib + os.path.sep diff --git a/tests/unit/test_logging.py b/tests/unit/test_logging.py index 371ec5db469..b59ebf6a08c 100644 --- a/tests/unit/test_logging.py +++ b/tests/unit/test_logging.py @@ -25,7 +25,7 @@ def _make_broken_pipe_error(): if PY2: # This is one way a broken pipe error can show up in Python 2 # (a non-Windows example in this case). - return IOError(errno.EPIPE, 'Broken pipe') + return IOError(errno.EPIPE, "Broken pipe") return BrokenPipeError() # noqa: F821 @@ -36,18 +36,18 @@ class TestIndentingFormatter(object): """ def setup(self): - self.old_tz = os.environ.get('TZ') - os.environ['TZ'] = 'UTC' + self.old_tz = os.environ.get("TZ") + os.environ["TZ"] = "UTC" # time.tzset() is not implemented on some platforms (notably, Windows). - if hasattr(time, 'tzset'): + if hasattr(time, "tzset"): time.tzset() def teardown(self): if self.old_tz: - os.environ['TZ'] = self.old_tz + os.environ["TZ"] = self.old_tz else: - del os.environ['TZ'] - if 'tzset' in dir(time): + del os.environ["TZ"] + if "tzset" in dir(time): time.tzset() def make_record(self, msg, level_name): @@ -64,13 +64,13 @@ def make_record(self, msg, level_name): return record @pytest.mark.parametrize( - 'level_name, expected', + "level_name, expected", [ - ('DEBUG', 'hello\nworld'), - ('INFO', 'hello\nworld'), - ('WARNING', 'WARNING: hello\nworld'), - ('ERROR', 'ERROR: hello\nworld'), - ('CRITICAL', 'ERROR: hello\nworld'), + ("DEBUG", "hello\nworld"), + ("INFO", "hello\nworld"), + ("WARNING", "WARNING: hello\nworld"), + ("ERROR", "ERROR: hello\nworld"), + ("CRITICAL", "ERROR: hello\nworld"), ], ) def test_format(self, level_name, expected): @@ -78,32 +78,32 @@ def test_format(self, level_name, expected): Args: level_name: a logging level name (e.g. "WARNING"). """ - record = self.make_record('hello\nworld', level_name=level_name) + record = self.make_record("hello\nworld", level_name=level_name) f = IndentingFormatter(fmt="%(message)s") assert f.format(record) == expected @pytest.mark.parametrize( - 'level_name, expected', + "level_name, expected", [ - ('INFO', '2019-01-17T06:00:37,040 hello\n2019-01-17T06:00:37,040 world'), + ("INFO", "2019-01-17T06:00:37,040 hello\n2019-01-17T06:00:37,040 world"), ( - 'WARNING', - '2019-01-17T06:00:37,040 WARNING: hello\n' - '2019-01-17T06:00:37,040 world', + "WARNING", + "2019-01-17T06:00:37,040 WARNING: hello\n" + "2019-01-17T06:00:37,040 world", ), ], ) def test_format_with_timestamp(self, level_name, expected): - record = self.make_record('hello\nworld', level_name=level_name) + record = self.make_record("hello\nworld", level_name=level_name) f = IndentingFormatter(fmt="%(message)s", add_timestamp=True) assert f.format(record) == expected @pytest.mark.parametrize( - 'level_name, expected', + "level_name, expected", [ - ('WARNING', 'DEPRECATION: hello\nworld'), - ('ERROR', 'DEPRECATION: hello\nworld'), - ('CRITICAL', 'DEPRECATION: hello\nworld'), + ("WARNING", "DEPRECATION: hello\nworld"), + ("ERROR", "DEPRECATION: hello\nworld"), + ("CRITICAL", "DEPRECATION: hello\nworld"), ], ) def test_format_deprecated(self, level_name, expected): @@ -111,14 +111,14 @@ def test_format_deprecated(self, level_name, expected): Test that logged deprecation warnings coming from deprecated() don't get another prefix. """ - record = self.make_record('DEPRECATION: hello\nworld', level_name=level_name) + record = self.make_record("DEPRECATION: hello\nworld", level_name=level_name) f = IndentingFormatter(fmt="%(message)s") assert f.format(record) == expected class TestColorizedStreamHandler(object): def _make_log_record(self): - attrs = {'msg': 'my error'} + attrs = {"msg": "my error"} record = logging.makeLogRecord(attrs) return record @@ -133,21 +133,21 @@ def test_broken_pipe_in_stderr_flush(self): with captured_stderr() as stderr: handler = ColorizedStreamHandler(stream=stderr) - with patch('sys.stderr.flush') as mock_flush: + with patch("sys.stderr.flush") as mock_flush: mock_flush.side_effect = _make_broken_pipe_error() # The emit() call raises no exception. handler.emit(record) err_text = stderr.getvalue() - assert err_text.startswith('my error') + assert err_text.startswith("my error") # Check that the logging framework tried to log the exception. if PY2: - assert 'IOError: [Errno 32] Broken pipe' in err_text - assert 'Logged from file' in err_text + assert "IOError: [Errno 32] Broken pipe" in err_text + assert "Logged from file" in err_text else: - assert 'Logging error' in err_text - assert 'BrokenPipeError' in err_text + assert "Logging error" in err_text + assert "BrokenPipeError" in err_text assert "Message: 'my error'" in err_text def test_broken_pipe_in_stdout_write(self): @@ -160,7 +160,7 @@ def test_broken_pipe_in_stdout_write(self): with captured_stdout() as stdout: handler = ColorizedStreamHandler(stream=stdout) - with patch('sys.stdout.write') as mock_write: + with patch("sys.stdout.write") as mock_write: mock_write.side_effect = _make_broken_pipe_error() with pytest.raises(BrokenStdoutLoggingError): handler.emit(record) @@ -175,7 +175,7 @@ def test_broken_pipe_in_stdout_flush(self): with captured_stdout() as stdout: handler = ColorizedStreamHandler(stream=stdout) - with patch('sys.stdout.flush') as mock_flush: + with patch("sys.stdout.flush") as mock_flush: mock_flush.side_effect = _make_broken_pipe_error() with pytest.raises(BrokenStdoutLoggingError): handler.emit(record) @@ -184,4 +184,4 @@ def test_broken_pipe_in_stdout_flush(self): # Sanity check that the log record was written, since flush() happens # after write(). - assert output.startswith('my error') + assert output.startswith("my error") diff --git a/tests/unit/test_networking_auth.py b/tests/unit/test_networking_auth.py index 9aaebbd2ba5..6303f4c31d3 100644 --- a/tests/unit/test_networking_auth.py +++ b/tests/unit/test_networking_auth.py @@ -37,16 +37,16 @@ def test_get_credentials_parses_correctly(input_url, url, username, password): (username is None and password is None) or # Credentials were found and "cached" appropriately - auth.passwords['example.com'] == (username, password) + auth.passwords["example.com"] == (username, password) ) def test_get_credentials_uses_cached_credentials(): auth = MultiDomainBasicAuth() - auth.passwords['example.com'] = ('user', 'pass') + auth.passwords["example.com"] = ("user", "pass") got = auth._get_url_and_credentials("http://foo:bar@example.com/path") - expected = ('http://example.com/path', 'user', 'pass') + expected = ("http://example.com/path", "user", "pass") assert got == expected @@ -57,7 +57,7 @@ def test_get_index_url_credentials(): ) # Check resolution of indexes - assert get("http://example.com/path/path2") == ('foo', 'bar') + assert get("http://example.com/path/path2") == ("foo", "bar") assert get("http://example.com/path3/path2") == (None, None) @@ -81,7 +81,7 @@ def set_password(self, system, username, password): @pytest.mark.parametrize( - 'url, expect', + "url, expect", ( ("http://example.com/path1", (None, None)), # path1 URLs will be resolved by netloc @@ -94,7 +94,7 @@ def set_password(self, system, username, password): ) def test_keyring_get_password(monkeypatch, url, expect): keyring = KeyringModuleV1() - monkeypatch.setattr('pip._internal.network.auth.keyring', keyring) + monkeypatch.setattr("pip._internal.network.auth.keyring", keyring) auth = MultiDomainBasicAuth(index_urls=["http://example.com/path2"]) actual = auth._get_new_credentials(url, allow_netrc=False, allow_keyring=True) @@ -103,21 +103,21 @@ def test_keyring_get_password(monkeypatch, url, expect): def test_keyring_get_password_after_prompt(monkeypatch): keyring = KeyringModuleV1() - monkeypatch.setattr('pip._internal.network.auth.keyring', keyring) + monkeypatch.setattr("pip._internal.network.auth.keyring", keyring) auth = MultiDomainBasicAuth() def ask_input(prompt): assert prompt == "User for example.com: " return "user" - monkeypatch.setattr('pip._internal.network.auth.ask_input', ask_input) + monkeypatch.setattr("pip._internal.network.auth.ask_input", ask_input) actual = auth._prompt_for_password("example.com") assert actual == ("user", "user!netloc", False) def test_keyring_get_password_username_in_index(monkeypatch): keyring = KeyringModuleV1() - monkeypatch.setattr('pip._internal.network.auth.keyring', keyring) + monkeypatch.setattr("pip._internal.network.auth.keyring", keyring) auth = MultiDomainBasicAuth(index_urls=["http://user@example.com/path2"]) get = functools.partial( auth._get_new_credentials, allow_netrc=False, allow_keyring=True @@ -137,10 +137,10 @@ def test_keyring_get_password_username_in_index(monkeypatch): ) def test_keyring_set_password(monkeypatch, response_status, creds, expect_save): keyring = KeyringModuleV1() - monkeypatch.setattr('pip._internal.network.auth.keyring', keyring) + monkeypatch.setattr("pip._internal.network.auth.keyring", keyring) auth = MultiDomainBasicAuth(prompting=True) - monkeypatch.setattr(auth, '_get_url_and_credentials', lambda u: (u, None, None)) - monkeypatch.setattr(auth, '_prompt_for_password', lambda *a: creds) + monkeypatch.setattr(auth, "_get_url_and_credentials", lambda u: (u, None, None)) + monkeypatch.setattr(auth, "_prompt_for_password", lambda *a: creds) if creds[2]: # when _prompt_for_password indicates to save, we should save def should_save_password_to_keyring(*a): @@ -153,7 +153,7 @@ def should_save_password_to_keyring(*a): assert False, "_should_save_password_to_keyring should not be " + "called" monkeypatch.setattr( - auth, '_should_save_password_to_keyring', should_save_password_to_keyring + auth, "_should_save_password_to_keyring", should_save_password_to_keyring ) req = MockRequest("https://example.com") @@ -202,7 +202,7 @@ def get_credential(self, system, username): @pytest.mark.parametrize( - 'url, expect', + "url, expect", ( ("http://example.com/path1", ("username", "netloc")), ("http://example.com/path2/path3", ("username", "url")), @@ -210,7 +210,7 @@ def get_credential(self, system, username): ), ) def test_keyring_get_credential(monkeypatch, url, expect): - monkeypatch.setattr(pip._internal.network.auth, 'keyring', KeyringModuleV2()) + monkeypatch.setattr(pip._internal.network.auth, "keyring", KeyringModuleV2()) auth = MultiDomainBasicAuth(index_urls=["http://example.com/path2"]) assert ( diff --git a/tests/unit/test_options.py b/tests/unit/test_options.py index 4b7a7980a6b..2387a144fbf 100644 --- a/tests/unit/test_options.py +++ b/tests/unit/test_options.py @@ -46,7 +46,7 @@ def assert_option_error(capsys, expected): def assert_is_default_cache_dir(value): # This path looks different on different platforms, but the path always # has the substring "pip". - assert 'pip' in value + assert "pip" in value class TestOptionPrecedence(AddFakeCommandMixin): @@ -57,44 +57,44 @@ class TestOptionPrecedence(AddFakeCommandMixin): """ def get_config_section(self, section): - config = {'global': [('timeout', '-3')], 'fake': [('timeout', '-2')]} + config = {"global": [("timeout", "-3")], "fake": [("timeout", "-2")]} return config[section] def get_config_section_global(self, section): - config = {'global': [('timeout', '-3')], 'fake': []} + config = {"global": [("timeout", "-3")], "fake": []} return config[section] def test_env_override_default_int(self): """ Test that environment variable overrides an int option default. """ - os.environ['PIP_TIMEOUT'] = '-1' - options, args = main(['fake']) + os.environ["PIP_TIMEOUT"] = "-1" + options, args = main(["fake"]) assert options.timeout == -1 def test_env_override_default_append(self): """ Test that environment variable overrides an append option default. """ - os.environ['PIP_FIND_LINKS'] = 'F1' - options, args = main(['fake']) - assert options.find_links == ['F1'] + os.environ["PIP_FIND_LINKS"] = "F1" + options, args = main(["fake"]) + assert options.find_links == ["F1"] - os.environ['PIP_FIND_LINKS'] = 'F1 F2' - options, args = main(['fake']) - assert options.find_links == ['F1', 'F2'] + os.environ["PIP_FIND_LINKS"] = "F1 F2" + options, args = main(["fake"]) + assert options.find_links == ["F1", "F2"] def test_env_override_default_choice(self): """ Test that environment variable overrides a choice option default. """ - os.environ['PIP_EXISTS_ACTION'] = 'w' - options, args = main(['fake']) - assert options.exists_action == ['w'] + os.environ["PIP_EXISTS_ACTION"] = "w" + options, args = main(["fake"]) + assert options.exists_action == ["w"] - os.environ['PIP_EXISTS_ACTION'] = 's w' - options, args = main(['fake']) - assert options.exists_action == ['s', 'w'] + os.environ["PIP_EXISTS_ACTION"] = "s w" + options, args = main(["fake"]) + assert options.exists_action == ["s", "w"] def test_env_alias_override_default(self): """ @@ -102,35 +102,35 @@ def test_env_alias_override_default(self): using the env variable, "PIP_" works for all cases. (e.g. PIP_LOG_FILE and PIP_LOCAL_LOG should all work) """ - os.environ['PIP_LOG_FILE'] = 'override.log' - options, args = main(['fake']) - assert options.log == 'override.log' - os.environ['PIP_LOCAL_LOG'] = 'override.log' - options, args = main(['fake']) - assert options.log == 'override.log' + os.environ["PIP_LOG_FILE"] = "override.log" + options, args = main(["fake"]) + assert options.log == "override.log" + os.environ["PIP_LOCAL_LOG"] = "override.log" + options, args = main(["fake"]) + assert options.log == "override.log" def test_cli_override_environment(self): """ Test the cli overrides and environment variable """ - os.environ['PIP_TIMEOUT'] = '-1' - options, args = main(['fake', '--timeout', '-2']) + os.environ["PIP_TIMEOUT"] = "-1" + options, args = main(["fake", "--timeout", "-2"]) assert options.timeout == -2 @pytest.mark.parametrize( - 'pip_no_cache_dir', + "pip_no_cache_dir", [ # Enabling --no-cache-dir means no cache directory. - '1', - 'true', - 'on', - 'yes', + "1", + "true", + "on", + "yes", # For historical / backwards compatibility reasons, we also disable # the cache directory if provided a value that translates to 0. - '0', - 'false', - 'off', - 'no', + "0", + "false", + "off", + "no", ], ) def test_cache_dir__PIP_NO_CACHE_DIR(self, pip_no_cache_dir): @@ -138,28 +138,28 @@ def test_cache_dir__PIP_NO_CACHE_DIR(self, pip_no_cache_dir): Test setting the PIP_NO_CACHE_DIR environment variable without passing any command-line flags. """ - os.environ['PIP_NO_CACHE_DIR'] = pip_no_cache_dir - options, args = main(['fake']) + os.environ["PIP_NO_CACHE_DIR"] = pip_no_cache_dir + options, args = main(["fake"]) assert options.cache_dir is False - @pytest.mark.parametrize('pip_no_cache_dir', ['yes', 'no']) + @pytest.mark.parametrize("pip_no_cache_dir", ["yes", "no"]) def test_cache_dir__PIP_NO_CACHE_DIR__with_cache_dir(self, pip_no_cache_dir): """ Test setting PIP_NO_CACHE_DIR while also passing an explicit --cache-dir value. """ - os.environ['PIP_NO_CACHE_DIR'] = pip_no_cache_dir - options, args = main(['--cache-dir', '/cache/dir', 'fake']) + os.environ["PIP_NO_CACHE_DIR"] = pip_no_cache_dir + options, args = main(["--cache-dir", "/cache/dir", "fake"]) # The command-line flag takes precedence. - assert options.cache_dir == '/cache/dir' + assert options.cache_dir == "/cache/dir" - @pytest.mark.parametrize('pip_no_cache_dir', ['yes', 'no']) + @pytest.mark.parametrize("pip_no_cache_dir", ["yes", "no"]) def test_cache_dir__PIP_NO_CACHE_DIR__with_no_cache_dir(self, pip_no_cache_dir): """ Test setting PIP_NO_CACHE_DIR while also passing --no-cache-dir. """ - os.environ['PIP_NO_CACHE_DIR'] = pip_no_cache_dir - options, args = main(['--no-cache-dir', 'fake']) + os.environ["PIP_NO_CACHE_DIR"] = pip_no_cache_dir + options, args = main(["--no-cache-dir", "fake"]) # The command-line flag should take precedence (which has the same # value in this case). assert options.cache_dir is False @@ -169,10 +169,10 @@ def test_cache_dir__PIP_NO_CACHE_DIR_invalid__with_no_cache_dir(self, capsys): Test setting PIP_NO_CACHE_DIR to an invalid value while also passing --no-cache-dir. """ - os.environ['PIP_NO_CACHE_DIR'] = 'maybe' + os.environ["PIP_NO_CACHE_DIR"] = "maybe" expected_err = "--no-cache-dir error: invalid truth value 'maybe'" with assert_option_error(capsys, expected=expected_err): - main(['--no-cache-dir', 'fake']) + main(["--no-cache-dir", "fake"]) class TestUsePEP517Options(object): @@ -184,7 +184,7 @@ class TestUsePEP517Options(object): def parse_args(self, args): # We use DownloadCommand since that is one of the few Command # classes with the use_pep517 options. - command = create_command('download') + command = create_command("download") options, args = command.parse_args(args) return options @@ -200,21 +200,21 @@ def test_use_pep517(self): """ Test passing --use-pep517. """ - options = self.parse_args(['--use-pep517']) + options = self.parse_args(["--use-pep517"]) assert options.use_pep517 is True def test_no_use_pep517(self): """ Test passing --no-use-pep517. """ - options = self.parse_args(['--no-use-pep517']) + options = self.parse_args(["--no-use-pep517"]) assert options.use_pep517 is False def test_PIP_USE_PEP517_true(self): """ Test setting PIP_USE_PEP517 to "true". """ - with temp_environment_variable('PIP_USE_PEP517', 'true'): + with temp_environment_variable("PIP_USE_PEP517", "true"): options = self.parse_args([]) # This is an int rather than a boolean because strtobool() in pip's # configuration code returns an int. @@ -224,7 +224,7 @@ def test_PIP_USE_PEP517_false(self): """ Test setting PIP_USE_PEP517 to "false". """ - with temp_environment_variable('PIP_USE_PEP517', 'false'): + with temp_environment_variable("PIP_USE_PEP517", "false"): options = self.parse_args([]) # This is an int rather than a boolean because strtobool() in pip's # configuration code returns an int. @@ -234,16 +234,16 @@ def test_use_pep517_and_PIP_USE_PEP517_false(self): """ Test passing --use-pep517 and setting PIP_USE_PEP517 to "false". """ - with temp_environment_variable('PIP_USE_PEP517', 'false'): - options = self.parse_args(['--use-pep517']) + with temp_environment_variable("PIP_USE_PEP517", "false"): + options = self.parse_args(["--use-pep517"]) assert options.use_pep517 is True def test_no_use_pep517_and_PIP_USE_PEP517_true(self): """ Test passing --no-use-pep517 and setting PIP_USE_PEP517 to "true". """ - with temp_environment_variable('PIP_USE_PEP517', 'true'): - options = self.parse_args(['--no-use-pep517']) + with temp_environment_variable("PIP_USE_PEP517", "true"): + options = self.parse_args(["--no-use-pep517"]) assert options.use_pep517 is False def test_PIP_NO_USE_PEP517(self, capsys): @@ -251,29 +251,29 @@ def test_PIP_NO_USE_PEP517(self, capsys): Test setting PIP_NO_USE_PEP517, which isn't allowed. """ expected_err = ( - '--no-use-pep517 error: A value was passed for --no-use-pep517,\n' + "--no-use-pep517 error: A value was passed for --no-use-pep517,\n" ) - with temp_environment_variable('PIP_NO_USE_PEP517', 'true'): + with temp_environment_variable("PIP_NO_USE_PEP517", "true"): with assert_option_error(capsys, expected=expected_err): self.parse_args([]) class TestOptionsInterspersed(AddFakeCommandMixin): def test_general_option_after_subcommand(self): - options, args = main(['fake', '--timeout', '-1']) + options, args = main(["fake", "--timeout", "-1"]) assert options.timeout == -1 def test_option_after_subcommand_arg(self): - options, args = main(['fake', 'arg', '--timeout', '-1']) + options, args = main(["fake", "arg", "--timeout", "-1"]) assert options.timeout == -1 def test_additive_before_after_subcommand(self): - options, args = main(['-v', 'fake', '-v']) + options, args = main(["-v", "fake", "-v"]) assert options.verbose == 2 def test_subcommand_option_before_subcommand_fails(self): with pytest.raises(SystemExit): - main(['--find-links', 'F1', 'fake']) + main(["--find-links", "F1", "fake"]) class TestGeneralOptions(AddFakeCommandMixin): @@ -282,100 +282,100 @@ class TestGeneralOptions(AddFakeCommandMixin): # extra processing they receive, and the number of bugs we've had def test_cache_dir__default(self): - options, args = main(['fake']) + options, args = main(["fake"]) # With no options the default cache dir should be used. assert_is_default_cache_dir(options.cache_dir) def test_cache_dir__provided(self): - options, args = main(['--cache-dir', '/cache/dir', 'fake']) - assert options.cache_dir == '/cache/dir' + options, args = main(["--cache-dir", "/cache/dir", "fake"]) + assert options.cache_dir == "/cache/dir" def test_no_cache_dir__provided(self): - options, args = main(['--no-cache-dir', 'fake']) + options, args = main(["--no-cache-dir", "fake"]) assert options.cache_dir is False def test_require_virtualenv(self): - options1, args1 = main(['--require-virtualenv', 'fake']) - options2, args2 = main(['fake', '--require-virtualenv']) + options1, args1 = main(["--require-virtualenv", "fake"]) + options2, args2 = main(["fake", "--require-virtualenv"]) assert options1.require_venv assert options2.require_venv def test_verbose(self): - options1, args1 = main(['--verbose', 'fake']) - options2, args2 = main(['fake', '--verbose']) + options1, args1 = main(["--verbose", "fake"]) + options2, args2 = main(["fake", "--verbose"]) assert options1.verbose == options2.verbose == 1 def test_quiet(self): - options1, args1 = main(['--quiet', 'fake']) - options2, args2 = main(['fake', '--quiet']) + options1, args1 = main(["--quiet", "fake"]) + options2, args2 = main(["fake", "--quiet"]) assert options1.quiet == options2.quiet == 1 - options3, args3 = main(['--quiet', '--quiet', 'fake']) - options4, args4 = main(['fake', '--quiet', '--quiet']) + options3, args3 = main(["--quiet", "--quiet", "fake"]) + options4, args4 = main(["fake", "--quiet", "--quiet"]) assert options3.quiet == options4.quiet == 2 - options5, args5 = main(['--quiet', '--quiet', '--quiet', 'fake']) - options6, args6 = main(['fake', '--quiet', '--quiet', '--quiet']) + options5, args5 = main(["--quiet", "--quiet", "--quiet", "fake"]) + options6, args6 = main(["fake", "--quiet", "--quiet", "--quiet"]) assert options5.quiet == options6.quiet == 3 def test_log(self): - options1, args1 = main(['--log', 'path', 'fake']) - options2, args2 = main(['fake', '--log', 'path']) - assert options1.log == options2.log == 'path' + options1, args1 = main(["--log", "path", "fake"]) + options2, args2 = main(["fake", "--log", "path"]) + assert options1.log == options2.log == "path" def test_local_log(self): - options1, args1 = main(['--local-log', 'path', 'fake']) - options2, args2 = main(['fake', '--local-log', 'path']) - assert options1.log == options2.log == 'path' + options1, args1 = main(["--local-log", "path", "fake"]) + options2, args2 = main(["fake", "--local-log", "path"]) + assert options1.log == options2.log == "path" def test_no_input(self): - options1, args1 = main(['--no-input', 'fake']) - options2, args2 = main(['fake', '--no-input']) + options1, args1 = main(["--no-input", "fake"]) + options2, args2 = main(["fake", "--no-input"]) assert options1.no_input assert options2.no_input def test_proxy(self): - options1, args1 = main(['--proxy', 'path', 'fake']) - options2, args2 = main(['fake', '--proxy', 'path']) - assert options1.proxy == options2.proxy == 'path' + options1, args1 = main(["--proxy", "path", "fake"]) + options2, args2 = main(["fake", "--proxy", "path"]) + assert options1.proxy == options2.proxy == "path" def test_retries(self): - options1, args1 = main(['--retries', '-1', 'fake']) - options2, args2 = main(['fake', '--retries', '-1']) + options1, args1 = main(["--retries", "-1", "fake"]) + options2, args2 = main(["fake", "--retries", "-1"]) assert options1.retries == options2.retries == -1 def test_timeout(self): - options1, args1 = main(['--timeout', '-1', 'fake']) - options2, args2 = main(['fake', '--timeout', '-1']) + options1, args1 = main(["--timeout", "-1", "fake"]) + options2, args2 = main(["fake", "--timeout", "-1"]) assert options1.timeout == options2.timeout == -1 def test_skip_requirements_regex(self): - options1, args1 = main(['--skip-requirements-regex', 'path', 'fake']) - options2, args2 = main(['fake', '--skip-requirements-regex', 'path']) - assert options1.skip_requirements_regex == 'path' - assert options2.skip_requirements_regex == 'path' + options1, args1 = main(["--skip-requirements-regex", "path", "fake"]) + options2, args2 = main(["fake", "--skip-requirements-regex", "path"]) + assert options1.skip_requirements_regex == "path" + assert options2.skip_requirements_regex == "path" def test_exists_action(self): - options1, args1 = main(['--exists-action', 'w', 'fake']) - options2, args2 = main(['fake', '--exists-action', 'w']) - assert options1.exists_action == options2.exists_action == ['w'] + options1, args1 = main(["--exists-action", "w", "fake"]) + options2, args2 = main(["fake", "--exists-action", "w"]) + assert options1.exists_action == options2.exists_action == ["w"] def test_cert(self): - options1, args1 = main(['--cert', 'path', 'fake']) - options2, args2 = main(['fake', '--cert', 'path']) - assert options1.cert == options2.cert == 'path' + options1, args1 = main(["--cert", "path", "fake"]) + options2, args2 = main(["fake", "--cert", "path"]) + assert options1.cert == options2.cert == "path" def test_client_cert(self): - options1, args1 = main(['--client-cert', 'path', 'fake']) - options2, args2 = main(['fake', '--client-cert', 'path']) - assert options1.client_cert == options2.client_cert == 'path' + options1, args1 = main(["--client-cert", "path", "fake"]) + options2, args2 = main(["fake", "--client-cert", "path"]) + assert options1.client_cert == options2.client_cert == "path" class TestOptionsConfigFiles(object): def test_venv_config_file_found(self, monkeypatch): # strict limit on the global config files list monkeypatch.setattr( - pip._internal.utils.appdirs, 'site_config_dirs', lambda _: ['/a/place'] + pip._internal.utils.appdirs, "site_config_dirs", lambda _: ["/a/place"] ) cp = pip._internal.configuration.Configuration(isolated=False) @@ -399,7 +399,7 @@ def test_venv_config_file_found(self, monkeypatch): ), ) def test_config_file_options(self, monkeypatch, args, expect): - cmd = create_command('config') + cmd = create_command("config") # Replace a handler with a no-op to avoid side effects monkeypatch.setattr(cmd, "get_name", lambda *a: None) @@ -411,7 +411,7 @@ def test_config_file_options(self, monkeypatch, args, expect): assert expect == cmd._determine_file(options, need_value=False) def test_config_file_venv_option(self, monkeypatch): - cmd = create_command('config') + cmd = create_command("config") # Replace a handler with a no-op to avoid side effects monkeypatch.setattr(cmd, "get_name", lambda *a: None) diff --git a/tests/unit/test_packaging.py b/tests/unit/test_packaging.py index 5956a48273d..2386083dbd1 100644 --- a/tests/unit/test_packaging.py +++ b/tests/unit/test_packaging.py @@ -5,10 +5,10 @@ @pytest.mark.parametrize( - 'version_info, requires_python, expected', + "version_info, requires_python, expected", [ - ((3, 6, 5), '== 3.6.4', False), - ((3, 6, 5), '== 3.6.5', True), + ((3, 6, 5), "== 3.6.4", False), + ((3, 6, 5), "== 3.6.5", True), ((3, 6, 5), None, True), ], ) @@ -22,4 +22,4 @@ def test_check_requires_python__invalid(): Test an invalid Requires-Python value. """ with pytest.raises(specifiers.InvalidSpecifier): - check_requires_python('invalid', (3, 6, 5)) + check_requires_python("invalid", (3, 6, 5)) diff --git a/tests/unit/test_pep425tags.py b/tests/unit/test_pep425tags.py index 7a9e278b7d3..039f1fd0f3e 100644 --- a/tests/unit/test_pep425tags.py +++ b/tests/unit/test_pep425tags.py @@ -7,16 +7,16 @@ @pytest.mark.parametrize( - 'version_info, expected', + "version_info, expected", [ - ((2,), '2'), - ((2, 8), '28'), - ((3,), '3'), - ((3, 6), '36'), + ((2,), "2"), + ((2, 8), "28"), + ((3,), "3"), + ((3, 6), "36"), # Test a tuple of length 3. - ((3, 6, 5), '36'), + ((3, 6, 5), "36"), # Test a 2-digit minor version. - ((3, 10), '310'), + ((3, 10), "310"), ], ) def test_version_info_to_nodot(version_info, expected): @@ -46,7 +46,7 @@ def abi_tag_unicode(self, flags, config_vars): """ import pip._internal.pep425tags - config_vars.update({'SOABI': None}) + config_vars.update({"SOABI": None}) base = ( pip._internal.pep425tags.get_abbr_impl() + pip._internal.pep425tags.get_impl_ver() @@ -54,28 +54,28 @@ def abi_tag_unicode(self, flags, config_vars): if sys.version_info >= (3, 8): # Python 3.8 removes the m flag, so don't look for it. - flags = flags.replace('m', '') + flags = flags.replace("m", "") if sys.version_info < (3, 3): - config_vars.update({'Py_UNICODE_SIZE': 2}) + config_vars.update({"Py_UNICODE_SIZE": 2}) mock_gcf = self.mock_get_config_var(**config_vars) - with patch('pip._internal.pep425tags.sysconfig.get_config_var', mock_gcf): + with patch("pip._internal.pep425tags.sysconfig.get_config_var", mock_gcf): abi_tag = pip._internal.pep425tags.get_abi_tag() assert abi_tag == base + flags - config_vars.update({'Py_UNICODE_SIZE': 4}) + config_vars.update({"Py_UNICODE_SIZE": 4}) mock_gcf = self.mock_get_config_var(**config_vars) - with patch('pip._internal.pep425tags.sysconfig.get_config_var', mock_gcf): + with patch("pip._internal.pep425tags.sysconfig.get_config_var", mock_gcf): abi_tag = pip._internal.pep425tags.get_abi_tag() - assert abi_tag == base + flags + 'u' + assert abi_tag == base + flags + "u" else: # On Python >= 3.3, UCS-4 is essentially permanently enabled, and # Py_UNICODE_SIZE is None. SOABI on these builds does not include # the 'u' so manual SOABI detection should not do so either. - config_vars.update({'Py_UNICODE_SIZE': None}) + config_vars.update({"Py_UNICODE_SIZE": None}) mock_gcf = self.mock_get_config_var(**config_vars) - with patch('pip._internal.pep425tags.sysconfig.get_config_var', mock_gcf): + with patch("pip._internal.pep425tags.sysconfig.get_config_var", mock_gcf): abi_tag = pip._internal.pep425tags.get_abi_tag() assert abi_tag == base + flags @@ -90,7 +90,7 @@ def test_broken_sysconfig(self): def raises_ioerror(var): raise IOError("I have the wrong path!") - with patch('pip._internal.pep425tags.sysconfig.get_config_var', raises_ioerror): + with patch("pip._internal.pep425tags.sysconfig.get_config_var", raises_ioerror): assert len(pip._internal.pep425tags.get_supported()) def test_no_hyphen_tag(self): @@ -99,43 +99,43 @@ def test_no_hyphen_tag(self): """ import pip._internal.pep425tags - mock_gcf = self.mock_get_config_var(SOABI='cpython-35m-darwin') + mock_gcf = self.mock_get_config_var(SOABI="cpython-35m-darwin") - with patch('pip._internal.pep425tags.sysconfig.get_config_var', mock_gcf): + with patch("pip._internal.pep425tags.sysconfig.get_config_var", mock_gcf): supported = pip._internal.pep425tags.get_supported() for (py, abi, plat) in supported: - assert '-' not in py - assert '-' not in abi - assert '-' not in plat + assert "-" not in py + assert "-" not in abi + assert "-" not in plat def test_manual_abi_noflags(self): """ Test that no flags are set on a non-PyDebug, non-Pymalloc ABI tag. """ - self.abi_tag_unicode('', {'Py_DEBUG': False, 'WITH_PYMALLOC': False}) + self.abi_tag_unicode("", {"Py_DEBUG": False, "WITH_PYMALLOC": False}) def test_manual_abi_d_flag(self): """ Test that the `d` flag is set on a PyDebug, non-Pymalloc ABI tag. """ - self.abi_tag_unicode('d', {'Py_DEBUG': True, 'WITH_PYMALLOC': False}) + self.abi_tag_unicode("d", {"Py_DEBUG": True, "WITH_PYMALLOC": False}) def test_manual_abi_m_flag(self): """ Test that the `m` flag is set on a non-PyDebug, Pymalloc ABI tag. """ - self.abi_tag_unicode('m', {'Py_DEBUG': False, 'WITH_PYMALLOC': True}) + self.abi_tag_unicode("m", {"Py_DEBUG": False, "WITH_PYMALLOC": True}) def test_manual_abi_dm_flags(self): """ Test that the `dm` flags are set on a PyDebug, Pymalloc ABI tag. """ - self.abi_tag_unicode('dm', {'Py_DEBUG': True, 'WITH_PYMALLOC': True}) + self.abi_tag_unicode("dm", {"Py_DEBUG": True, "WITH_PYMALLOC": True}) @pytest.mark.parametrize( - 'is_manylinux_compatible', + "is_manylinux_compatible", [pep425tags.is_manylinux1_compatible, pep425tags.is_manylinux2010_compatible], ) class TestManylinuxTags(object): @@ -144,25 +144,25 @@ class TestManylinuxTags(object): ...) """ - @patch('pip._internal.pep425tags.get_platform', lambda: 'linux_x86_64') - @patch('pip._internal.utils.glibc.have_compatible_glibc', lambda major, minor: True) + @patch("pip._internal.pep425tags.get_platform", lambda: "linux_x86_64") + @patch("pip._internal.utils.glibc.have_compatible_glibc", lambda major, minor: True) def test_manylinux_compatible_on_linux_x86_64(self, is_manylinux_compatible): """ Test that manylinuxes are enabled on linux_x86_64 """ assert is_manylinux_compatible() - @patch('pip._internal.pep425tags.get_platform', lambda: 'linux_i686') - @patch('pip._internal.utils.glibc.have_compatible_glibc', lambda major, minor: True) + @patch("pip._internal.pep425tags.get_platform", lambda: "linux_i686") + @patch("pip._internal.utils.glibc.have_compatible_glibc", lambda major, minor: True) def test_manylinux1_compatible_on_linux_i686(self, is_manylinux_compatible): """ Test that manylinux1 is enabled on linux_i686 """ assert is_manylinux_compatible() - @patch('pip._internal.pep425tags.get_platform', lambda: 'linux_x86_64') + @patch("pip._internal.pep425tags.get_platform", lambda: "linux_x86_64") @patch( - 'pip._internal.utils.glibc.have_compatible_glibc', lambda major, minor: False + "pip._internal.utils.glibc.have_compatible_glibc", lambda major, minor: False ) def test_manylinux1_2(self, is_manylinux_compatible): """ @@ -170,8 +170,8 @@ def test_manylinux1_2(self, is_manylinux_compatible): """ assert not is_manylinux_compatible() - @patch('pip._internal.pep425tags.get_platform', lambda: 'arm6vl') - @patch('pip._internal.utils.glibc.have_compatible_glibc', lambda major, minor: True) + @patch("pip._internal.pep425tags.get_platform", lambda: "arm6vl") + @patch("pip._internal.utils.glibc.have_compatible_glibc", lambda major, minor: True) def test_manylinux1_3(self, is_manylinux_compatible): """ Test that manylinux1 is disabled on arm6vl @@ -180,10 +180,10 @@ def test_manylinux1_3(self, is_manylinux_compatible): class TestManylinux1Tags(object): - @patch('pip._internal.pep425tags.is_manylinux2010_compatible', lambda: False) - @patch('pip._internal.pep425tags.get_platform', lambda: 'linux_x86_64') - @patch('pip._internal.utils.glibc.have_compatible_glibc', lambda major, minor: True) - @patch('sys.platform', 'linux2') + @patch("pip._internal.pep425tags.is_manylinux2010_compatible", lambda: False) + @patch("pip._internal.pep425tags.get_platform", lambda: "linux_x86_64") + @patch("pip._internal.utils.glibc.have_compatible_glibc", lambda major, minor: True) + @patch("sys.platform", "linux2") def test_manylinux1_tag_is_first(self): """ Test that the more specific tag manylinux1 comes first. @@ -193,19 +193,19 @@ def test_manylinux1_tag_is_first(self): groups.setdefault((pyimpl, abi), []).append(arch) for arches in groups.values(): - if arches == ['any']: + if arches == ["any"]: continue # Expect the most specific arch first: if len(arches) == 3: - assert arches == ['manylinux1_x86_64', 'linux_x86_64', 'any'] + assert arches == ["manylinux1_x86_64", "linux_x86_64", "any"] else: - assert arches == ['manylinux1_x86_64', 'linux_x86_64'] + assert arches == ["manylinux1_x86_64", "linux_x86_64"] class TestManylinux2010Tags(object): - @patch('pip._internal.pep425tags.get_platform', lambda: 'linux_x86_64') - @patch('pip._internal.utils.glibc.have_compatible_glibc', lambda major, minor: True) - @patch('sys.platform', 'linux2') + @patch("pip._internal.pep425tags.get_platform", lambda: "linux_x86_64") + @patch("pip._internal.utils.glibc.have_compatible_glibc", lambda major, minor: True) + @patch("sys.platform", "linux2") def test_manylinux2010_tag_is_first(self): """ Test that the more specific tag manylinux2010 comes first. @@ -215,21 +215,21 @@ def test_manylinux2010_tag_is_first(self): groups.setdefault((pyimpl, abi), []).append(arch) for arches in groups.values(): - if arches == ['any']: + if arches == ["any"]: continue # Expect the most specific arch first: if len(arches) == 4: assert arches == [ - 'manylinux2010_x86_64', - 'manylinux1_x86_64', - 'linux_x86_64', - 'any', + "manylinux2010_x86_64", + "manylinux1_x86_64", + "linux_x86_64", + "any", ] else: assert arches == [ - 'manylinux2010_x86_64', - 'manylinux1_x86_64', - 'linux_x86_64', + "manylinux2010_x86_64", + "manylinux1_x86_64", + "linux_x86_64", ] @pytest.mark.parametrize( @@ -249,6 +249,6 @@ def test_manylinux2010_implies_manylinux1(self, manylinux2010, manylinux1): groups.setdefault((pyimpl, abi), []).append(arch) for arches in groups.values(): - if arches == ['any']: + if arches == ["any"]: continue assert arches[:2] == [manylinux2010, manylinux1] diff --git a/tests/unit/test_pep517.py b/tests/unit/test_pep517.py index 0bfff537513..073d2af134c 100644 --- a/tests/unit/test_pep517.py +++ b/tests/unit/test_pep517.py @@ -5,7 +5,7 @@ @pytest.mark.parametrize( - ('source', 'expected'), + ("source", "expected"), [ ("pep517_setup_and_pyproject", True), ("pep517_setup_only", False), @@ -23,7 +23,7 @@ def test_use_pep517(data, source, expected): @pytest.mark.parametrize( - ('source', 'msg'), + ("source", "msg"), [ ("pep517_setup_and_pyproject", "specifies a build backend"), ("pep517_pyproject_only", "does not have a setup.py"), diff --git a/tests/unit/test_req.py b/tests/unit/test_req.py index 78afa3db996..bbfb56e3108 100644 --- a/tests/unit/test_req.py +++ b/tests/unit/test_req.py @@ -40,7 +40,7 @@ ) -def get_processed_req_from_line(line, fname='file', lineno=1): +def get_processed_req_from_line(line, fname="file", lineno=1): req = list(process_line(line, fname, lineno))[0] req.is_direct = True return req @@ -57,8 +57,8 @@ def teardown(self): def _basic_resolver(self, finder): preparer = RequirementPreparer( - build_dir=os.path.join(self.tempdir, 'build'), - src_dir=os.path.join(self.tempdir, 'src'), + build_dir=os.path.join(self.tempdir, "build"), + src_dir=os.path.join(self.tempdir, "src"), download_dir=None, wheel_download_dir=None, progress_bar="on", @@ -87,12 +87,12 @@ def _basic_resolver(self, finder): def test_no_reuse_existing_build_dir(self, data): """Test prepare_files raise exception with previous build dir""" - build_dir = os.path.join(self.tempdir, 'build', 'simple') + build_dir = os.path.join(self.tempdir, "build", "simple") os.makedirs(build_dir) - with open(os.path.join(build_dir, "setup.py"), 'w'): + with open(os.path.join(build_dir, "setup.py"), "w"): pass reqset = RequirementSet() - req = install_req_from_line('simple') + req = install_req_from_line("simple") req.is_direct = True reqset.add_requirement(req) finder = make_test_finder(find_links=[data.find_links]) @@ -100,7 +100,7 @@ def test_no_reuse_existing_build_dir(self, data): assert_raises_regexp( PreviousBuildDirError, r"pip can't proceed with [\s\S]*%s[\s\S]*%s" - % (req, build_dir.replace('\\', '\\\\')), + % (req, build_dir.replace("\\", "\\\\")), resolver.resolve, reqset, ) @@ -120,9 +120,9 @@ def test_environment_marker_extras(self, data): resolver.resolve(reqset) # This is hacky but does test both case in py2 and py3 if sys.version_info[:2] == (2, 7): - assert reqset.has_requirement('simple') + assert reqset.has_requirement("simple") else: - assert not reqset.has_requirement('simple') + assert not reqset.has_requirement("simple") @pytest.mark.network def test_missing_hash_checking(self): @@ -132,43 +132,43 @@ def test_missing_hash_checking(self): reqset = RequirementSet() # No flags here. This tests that detection of later flags nonetheless # requires earlier packages to have hashes: - reqset.add_requirement(get_processed_req_from_line('blessings==1.0', lineno=1)) + reqset.add_requirement(get_processed_req_from_line("blessings==1.0", lineno=1)) # This flag activates --require-hashes mode: reqset.add_requirement( get_processed_req_from_line( - 'tracefront==0.1 --hash=sha256:somehash', lineno=2 + "tracefront==0.1 --hash=sha256:somehash", lineno=2 ) ) # This hash should be accepted because it came from the reqs file, not # from the internet: reqset.add_requirement( get_processed_req_from_line( - 'https://files.pythonhosted.org/packages/source/m/more-itertools/' - 'more-itertools-1.0.tar.gz#md5=b21850c3cfa7efbb70fd662ab5413bdd', + "https://files.pythonhosted.org/packages/source/m/more-itertools/" + "more-itertools-1.0.tar.gz#md5=b21850c3cfa7efbb70fd662ab5413bdd", lineno=3, ) ) # The error text should list this as a URL and not `peep==3.1.1`: reqset.add_requirement( get_processed_req_from_line( - 'https://files.pythonhosted.org/' - 'packages/source/p/peep/peep-3.1.1.tar.gz', + "https://files.pythonhosted.org/" + "packages/source/p/peep/peep-3.1.1.tar.gz", lineno=4, ) ) - finder = make_test_finder(index_urls=['https://pypi.org/simple/']) + finder = make_test_finder(index_urls=["https://pypi.org/simple/"]) resolver = self._basic_resolver(finder) assert_raises_regexp( HashErrors, - r'Hashes are required in --require-hashes mode, but they are ' - r'missing .*\n' - r' https://files\.pythonhosted\.org/packages/source/p/peep/peep' - r'-3\.1\.1\.tar\.gz --hash=sha256:[0-9a-f]+\n' - r' blessings==1.0 --hash=sha256:[0-9a-f]+\n' - r'THESE PACKAGES DO NOT MATCH THE HASHES.*\n' - r' tracefront==0.1 .*:\n' - r' Expected sha256 somehash\n' - r' Got [0-9a-f]+$', + r"Hashes are required in --require-hashes mode, but they are " + r"missing .*\n" + r" https://files\.pythonhosted\.org/packages/source/p/peep/peep" + r"-3\.1\.1\.tar\.gz --hash=sha256:[0-9a-f]+\n" + r" blessings==1.0 --hash=sha256:[0-9a-f]+\n" + r"THESE PACKAGES DO NOT MATCH THE HASHES.*\n" + r" tracefront==0.1 .*:\n" + r" Expected sha256 somehash\n" + r" Got [0-9a-f]+$", resolver.resolve, reqset, ) @@ -178,17 +178,17 @@ def test_missing_hash_with_require_hashes(self, data): are missing. """ reqset = RequirementSet(require_hashes=True) - reqset.add_requirement(get_processed_req_from_line('simple==1.0', lineno=1)) + reqset.add_requirement(get_processed_req_from_line("simple==1.0", lineno=1)) finder = make_test_finder(find_links=[data.find_links]) resolver = self._basic_resolver(finder) assert_raises_regexp( HashErrors, - r'Hashes are required in --require-hashes mode, but they are ' - r'missing .*\n' - r' simple==1.0 --hash=sha256:393043e672415891885c9a2a0929b1af95' - r'fb866d6ca016b42d2e6ce53619b653$', + r"Hashes are required in --require-hashes mode, but they are " + r"missing .*\n" + r" simple==1.0 --hash=sha256:393043e672415891885c9a2a0929b1af95" + r"fb866d6ca016b42d2e6ce53619b653$", resolver.resolve, reqset, ) @@ -200,9 +200,9 @@ def test_missing_hash_with_require_hashes_in_reqs_file(self, data, tmpdir): req_set = RequirementSet(require_hashes=False) finder = make_test_finder(find_links=[data.find_links]) session = finder._link_collector.session - command = create_command('install') - with requirements_file('--require-hashes', tmpdir) as reqs_file: - options, args = command.parse_args(['-r', reqs_file]) + command = create_command("install") + with requirements_file("--require-hashes", tmpdir) as reqs_file: + options, args = command.parse_args(["-r", reqs_file]) command.populate_requirement_set( req_set, args, options, finder, session, wheel_cache=None ) @@ -219,18 +219,18 @@ def test_unsupported_hashes(self, data): reqset = RequirementSet(require_hashes=True) reqset.add_requirement( get_processed_req_from_line( - 'git+git://github.com/pypa/pip-test-package --hash=sha256:123', lineno=1 + "git+git://github.com/pypa/pip-test-package --hash=sha256:123", lineno=1 ) ) - dir_path = data.packages.joinpath('FSPkg') + dir_path = data.packages.joinpath("FSPkg") reqset.add_requirement( - get_processed_req_from_line('file://%s' % (dir_path,), lineno=2) + get_processed_req_from_line("file://%s" % (dir_path,), lineno=2) ) finder = make_test_finder(find_links=[data.find_links]) resolver = self._basic_resolver(finder) sep = os.path.sep - if sep == '\\': - sep = '\\\\' # This needs to be escaped for the regex + if sep == "\\": + sep = "\\\\" # This needs to be escaped for the regex assert_raises_regexp( HashErrors, r"Can't verify hashes for these requirements because we don't " @@ -253,16 +253,16 @@ def test_unpinned_hash_checking(self, data): # Test that there must be exactly 1 specifier: reqset.add_requirement( get_processed_req_from_line( - 'simple --hash=sha256:a90427ae31f5d1d0d7ec06ee97d9fcf2d0fc9a786985' - '250c1c83fd68df5911dd', + "simple --hash=sha256:a90427ae31f5d1d0d7ec06ee97d9fcf2d0fc9a786985" + "250c1c83fd68df5911dd", lineno=1, ) ) # Test that the operator must be ==: reqset.add_requirement( get_processed_req_from_line( - 'simple2>1.0 --hash=sha256:3ad45e1e9aa48b4462af0' - '123f6a7e44a9115db1ef945d4d92c123dfe21815a06', + "simple2>1.0 --hash=sha256:3ad45e1e9aa48b4462af0" + "123f6a7e44a9115db1ef945d4d92c123dfe21815a06", lineno=2, ) ) @@ -271,29 +271,29 @@ def test_unpinned_hash_checking(self, data): assert_raises_regexp( HashErrors, # Make sure all failing requirements are listed: - r'versions pinned with ==. These do not:\n' - r' simple .* \(from -r file \(line 1\)\)\n' - r' simple2>1.0 .* \(from -r file \(line 2\)\)', + r"versions pinned with ==. These do not:\n" + r" simple .* \(from -r file \(line 1\)\)\n" + r" simple2>1.0 .* \(from -r file \(line 2\)\)", resolver.resolve, reqset, ) def test_hash_mismatch(self, data): """A hash mismatch should raise an error.""" - file_url = path_to_url((data.packages / 'simple-1.0.tar.gz').abspath) + file_url = path_to_url((data.packages / "simple-1.0.tar.gz").abspath) reqset = RequirementSet(require_hashes=True) reqset.add_requirement( - get_processed_req_from_line('%s --hash=sha256:badbad' % file_url, lineno=1) + get_processed_req_from_line("%s --hash=sha256:badbad" % file_url, lineno=1) ) finder = make_test_finder(find_links=[data.find_links]) resolver = self._basic_resolver(finder) assert_raises_regexp( HashErrors, - r'THESE PACKAGES DO NOT MATCH THE HASHES.*\n' - r' file:///.*/data/packages/simple-1\.0\.tar\.gz .*:\n' - r' Expected sha256 badbad\n' - r' Got 393043e672415891885c9a2a0929b1af95fb866d' - r'6ca016b42d2e6ce53619b653$', + r"THESE PACKAGES DO NOT MATCH THE HASHES.*\n" + r" file:///.*/data/packages/simple-1\.0\.tar\.gz .*:\n" + r" Expected sha256 badbad\n" + r" Got 393043e672415891885c9a2a0929b1af95fb866d" + r"6ca016b42d2e6ce53619b653$", resolver.resolve, reqset, ) @@ -306,17 +306,17 @@ def test_unhashed_deps_on_require_hashes(self, data): resolver = self._basic_resolver(finder) reqset.add_requirement( get_processed_req_from_line( - 'TopoRequires2==0.0.1 ' # requires TopoRequires - '--hash=sha256:eaf9a01242c9f2f42cf2bd82a6a848cd' - 'e3591d14f7896bdbefcf48543720c970', + "TopoRequires2==0.0.1 " # requires TopoRequires + "--hash=sha256:eaf9a01242c9f2f42cf2bd82a6a848cd" + "e3591d14f7896bdbefcf48543720c970", lineno=1, ) ) assert_raises_regexp( HashErrors, - r'In --require-hashes mode, all requirements must have their ' - r'versions pinned.*\n' - r' TopoRequires from .*$', + r"In --require-hashes mode, all requirements must have their " + r"versions pinned.*\n" + r" TopoRequires from .*$", resolver.resolve, reqset, ) @@ -333,17 +333,17 @@ def test_hashed_deps_on_require_hashes(self): reqset = RequirementSet() reqset.add_requirement( get_processed_req_from_line( - 'TopoRequires2==0.0.1 ' # requires TopoRequires - '--hash=sha256:eaf9a01242c9f2f42cf2bd82a6a848cd' - 'e3591d14f7896bdbefcf48543720c970', + "TopoRequires2==0.0.1 " # requires TopoRequires + "--hash=sha256:eaf9a01242c9f2f42cf2bd82a6a848cd" + "e3591d14f7896bdbefcf48543720c970", lineno=1, ) ) reqset.add_requirement( get_processed_req_from_line( - 'TopoRequires==0.0.1 ' - '--hash=sha256:d6dd1e22e60df512fdcf3640ced3039b3b02a56ab2cee81ebcb' - '3d0a6d4e8bfa6', + "TopoRequires==0.0.1 " + "--hash=sha256:d6dd1e22e60df512fdcf3640ced3039b3b02a56ab2cee81ebcb" + "3d0a6d4e8bfa6", lineno=2, ) ) @@ -358,32 +358,32 @@ def teardown(self): def test_url_with_query(self): """InstallRequirement should strip the fragment, but not the query.""" - url = 'http://foo.com/?p=bar.git;a=snapshot;h=v0.1;sf=tgz' - fragment = '#egg=bar' + url = "http://foo.com/?p=bar.git;a=snapshot;h=v0.1;sf=tgz" + fragment = "#egg=bar" req = install_req_from_line(url + fragment) assert req.link.url == url + fragment, req.link def test_pep440_wheel_link_requirement(self): - url = 'https://whatever.com/test-0.4-py2.py3-bogus-any.whl' - line = 'test @ https://whatever.com/test-0.4-py2.py3-bogus-any.whl' + url = "https://whatever.com/test-0.4-py2.py3-bogus-any.whl" + line = "test @ https://whatever.com/test-0.4-py2.py3-bogus-any.whl" req = install_req_from_line(line) - parts = str(req.req).split('@', 1) + parts = str(req.req).split("@", 1) assert len(parts) == 2 - assert parts[0].strip() == 'test' + assert parts[0].strip() == "test" assert parts[1].strip() == url def test_pep440_url_link_requirement(self): - url = 'git+http://foo.com@ref#egg=foo' - line = 'foo @ git+http://foo.com@ref#egg=foo' + url = "git+http://foo.com@ref#egg=foo" + line = "foo @ git+http://foo.com@ref#egg=foo" req = install_req_from_line(line) - parts = str(req.req).split('@', 1) + parts = str(req.req).split("@", 1) assert len(parts) == 2 - assert parts[0].strip() == 'foo' + assert parts[0].strip() == "foo" assert parts[1].strip() == url def test_url_with_authentication_link_requirement(self): - url = 'https://what@whatever.com/test-0.4-py2.py3-bogus-any.whl' - line = 'https://what@whatever.com/test-0.4-py2.py3-bogus-any.whl' + url = "https://what@whatever.com/test-0.4-py2.py3-bogus-any.whl" + line = "https://what@whatever.com/test-0.4-py2.py3-bogus-any.whl" req = install_req_from_line(line) assert req.link is not None assert req.link.is_wheel @@ -393,7 +393,7 @@ def test_url_with_authentication_link_requirement(self): def test_unsupported_wheel_link_requirement_raises(self): reqset = RequirementSet() req = install_req_from_line( - 'https://whatever.com/peppercorn-0.4-py2.py3-bogus-any.whl' + "https://whatever.com/peppercorn-0.4-py2.py3-bogus-any.whl" ) assert req.link is not None assert req.link.is_wheel @@ -405,7 +405,7 @@ def test_unsupported_wheel_link_requirement_raises(self): def test_unsupported_wheel_local_file_requirement_raises(self, data): reqset = RequirementSet() req = install_req_from_line( - data.packages.joinpath('simple.dist-0.1-py1-none-invalid.whl') + data.packages.joinpath("simple.dist-0.1-py1-none-invalid.whl") ) assert req.link is not None assert req.link.is_wheel @@ -415,53 +415,53 @@ def test_unsupported_wheel_local_file_requirement_raises(self, data): reqset.add_requirement(req) def test_installed_version_not_installed(self): - req = install_req_from_line('simple-0.1-py2.py3-none-any.whl') + req = install_req_from_line("simple-0.1-py2.py3-none-any.whl") assert req.installed_version is None def test_str(self): - req = install_req_from_line('simple==0.1') - assert str(req) == 'simple==0.1' + req = install_req_from_line("simple==0.1") + assert str(req) == "simple==0.1" def test_repr(self): - req = install_req_from_line('simple==0.1') - assert repr(req) == ('') + req = install_req_from_line("simple==0.1") + assert repr(req) == ("") def test_invalid_wheel_requirement_raises(self): with pytest.raises(InvalidWheelFilename): - install_req_from_line('invalid.whl') + install_req_from_line("invalid.whl") def test_wheel_requirement_sets_req_attribute(self): - req = install_req_from_line('simple-0.1-py2.py3-none-any.whl') + req = install_req_from_line("simple-0.1-py2.py3-none-any.whl") assert isinstance(req.req, Requirement) - assert str(req.req) == 'simple==0.1' + assert str(req.req) == "simple==0.1" def test_url_preserved_line_req(self): """Confirm the url is preserved in a non-editable requirement""" - url = 'git+http://foo.com@ref#egg=foo' + url = "git+http://foo.com@ref#egg=foo" req = install_req_from_line(url) assert req.link.url == url def test_url_preserved_editable_req(self): """Confirm the url is preserved in a editable requirement""" - url = 'git+http://foo.com@ref#egg=foo' + url = "git+http://foo.com@ref#egg=foo" req = install_req_from_editable(url) assert req.link.url == url @pytest.mark.parametrize( - 'path', + "path", ( - '/path/to/foo.egg-info'.replace('/', os.path.sep), + "/path/to/foo.egg-info".replace("/", os.path.sep), # Tests issue fixed by https://github.com/pypa/pip/pull/2530 - '/path/to/foo.egg-info/'.replace('/', os.path.sep), + "/path/to/foo.egg-info/".replace("/", os.path.sep), ), ) def test_get_dist(self, path): - req = install_req_from_line('foo') + req = install_req_from_line("foo") req._egg_info_path = path dist = req.get_dist() assert isinstance(dist, pkg_resources.Distribution) - assert dist.project_name == 'foo' - assert dist.location == '/path/to'.replace('/', os.path.sep) + assert dist.project_name == "foo" + assert dist.location == "/path/to".replace("/", os.path.sep) def test_markers(self): for line in ( @@ -473,27 +473,27 @@ def test_markers(self): 'mock3;python_version >= "3"', ): req = install_req_from_line(line) - assert req.req.name == 'mock3' - assert str(req.req.specifier) == '' + assert req.req.name == "mock3" + assert str(req.req.specifier) == "" assert str(req.markers) == 'python_version >= "3"' def test_markers_semicolon(self): # check that the markers can contain a semicolon req = install_req_from_line('semicolon; os_name == "a; b"') - assert req.req.name == 'semicolon' - assert str(req.req.specifier) == '' + assert req.req.name == "semicolon" + assert str(req.req.specifier) == "" assert str(req.markers) == 'os_name == "a; b"' def test_markers_url(self): # test "URL; markers" syntax - url = 'http://foo.com/?p=bar.git;a=snapshot;h=v0.1;sf=tgz' + url = "http://foo.com/?p=bar.git;a=snapshot;h=v0.1;sf=tgz" line = '%s; python_version >= "3"' % url req = install_req_from_line(line) assert req.link.url == url, req.url assert str(req.markers) == 'python_version >= "3"' # without space, markers are part of the URL - url = 'http://foo.com/?p=bar.git;a=snapshot;h=v0.1;sf=tgz' + url = "http://foo.com/?p=bar.git;a=snapshot;h=v0.1;sf=tgz" line = '%s;python_version >= "3"' % url req = install_req_from_line(line) assert req.link.url == line, req.url @@ -501,91 +501,91 @@ def test_markers_url(self): def test_markers_match_from_line(self): # match - for markers in ('python_version >= "1.0"', 'sys_platform == %r' % sys.platform): - line = 'name; ' + markers + for markers in ('python_version >= "1.0"', "sys_platform == %r" % sys.platform): + line = "name; " + markers req = install_req_from_line(line) assert str(req.markers) == str(Marker(markers)) assert req.match_markers() # don't match - for markers in ('python_version >= "5.0"', 'sys_platform != %r' % sys.platform): - line = 'name; ' + markers + for markers in ('python_version >= "5.0"', "sys_platform != %r" % sys.platform): + line = "name; " + markers req = install_req_from_line(line) assert str(req.markers) == str(Marker(markers)) assert not req.match_markers() def test_markers_match(self): # match - for markers in ('python_version >= "1.0"', 'sys_platform == %r' % sys.platform): - line = 'name; ' + markers - req = install_req_from_line(line, comes_from='') + for markers in ('python_version >= "1.0"', "sys_platform == %r" % sys.platform): + line = "name; " + markers + req = install_req_from_line(line, comes_from="") assert str(req.markers) == str(Marker(markers)) assert req.match_markers() # don't match - for markers in ('python_version >= "5.0"', 'sys_platform != %r' % sys.platform): - line = 'name; ' + markers - req = install_req_from_line(line, comes_from='') + for markers in ('python_version >= "5.0"', "sys_platform != %r" % sys.platform): + line = "name; " + markers + req = install_req_from_line(line, comes_from="") assert str(req.markers) == str(Marker(markers)) assert not req.match_markers() def test_extras_for_line_path_requirement(self): - line = 'SomeProject[ex1,ex2]' - filename = 'filename' - comes_from = '-r %s (line %s)' % (filename, 1) + line = "SomeProject[ex1,ex2]" + filename = "filename" + comes_from = "-r %s (line %s)" % (filename, 1) req = install_req_from_line(line, comes_from=comes_from) assert len(req.extras) == 2 - assert req.extras == {'ex1', 'ex2'} + assert req.extras == {"ex1", "ex2"} def test_extras_for_line_url_requirement(self): - line = 'git+https://url#egg=SomeProject[ex1,ex2]' - filename = 'filename' - comes_from = '-r %s (line %s)' % (filename, 1) + line = "git+https://url#egg=SomeProject[ex1,ex2]" + filename = "filename" + comes_from = "-r %s (line %s)" % (filename, 1) req = install_req_from_line(line, comes_from=comes_from) assert len(req.extras) == 2 - assert req.extras == {'ex1', 'ex2'} + assert req.extras == {"ex1", "ex2"} def test_extras_for_editable_path_requirement(self): - url = '.[ex1,ex2]' - filename = 'filename' - comes_from = '-r %s (line %s)' % (filename, 1) + url = ".[ex1,ex2]" + filename = "filename" + comes_from = "-r %s (line %s)" % (filename, 1) req = install_req_from_editable(url, comes_from=comes_from) assert len(req.extras) == 2 - assert req.extras == {'ex1', 'ex2'} + assert req.extras == {"ex1", "ex2"} def test_extras_for_editable_url_requirement(self): - url = 'git+https://url#egg=SomeProject[ex1,ex2]' - filename = 'filename' - comes_from = '-r %s (line %s)' % (filename, 1) + url = "git+https://url#egg=SomeProject[ex1,ex2]" + filename = "filename" + comes_from = "-r %s (line %s)" % (filename, 1) req = install_req_from_editable(url, comes_from=comes_from) assert len(req.extras) == 2 - assert req.extras == {'ex1', 'ex2'} + assert req.extras == {"ex1", "ex2"} def test_unexisting_path(self): with pytest.raises(InstallationError) as e: - install_req_from_line(os.path.join('this', 'path', 'does', 'not', 'exist')) + install_req_from_line(os.path.join("this", "path", "does", "not", "exist")) err_msg = e.value.args[0] assert "Invalid requirement" in err_msg assert "It looks like a path." in err_msg def test_single_equal_sign(self): with pytest.raises(InstallationError) as e: - install_req_from_line('toto=42') + install_req_from_line("toto=42") err_msg = e.value.args[0] assert "Invalid requirement" in err_msg assert "= is not a valid operator. Did you mean == ?" in err_msg def test_unidentifiable_name(self): - test_name = '-' + test_name = "-" with pytest.raises(InstallationError) as e: install_req_from_line(test_name) err_msg = e.value.args[0] assert "Invalid requirement: '{}'".format(test_name) == err_msg def test_requirement_file(self): - req_file_path = os.path.join(self.tempdir, 'test.txt') - with open(req_file_path, 'w') as req_file: - req_file.write('pip\nsetuptools') + req_file_path = os.path.join(self.tempdir, "test.txt") + with open(req_file_path, "w") as req_file: + req_file.write("pip\nsetuptools") with pytest.raises(InstallationError) as e: install_req_from_line(req_file_path) err_msg = e.value.args[0] @@ -595,46 +595,46 @@ def test_requirement_file(self): assert "If that is the case, use the '-r' flag to install" in err_msg -@patch('pip._internal.req.req_install.os.path.abspath') -@patch('pip._internal.req.req_install.os.path.exists') -@patch('pip._internal.req.req_install.os.path.isdir') +@patch("pip._internal.req.req_install.os.path.abspath") +@patch("pip._internal.req.req_install.os.path.exists") +@patch("pip._internal.req.req_install.os.path.isdir") def test_parse_editable_local(isdir_mock, exists_mock, abspath_mock): exists_mock.return_value = isdir_mock.return_value = True # mocks needed to support path operations on windows tests abspath_mock.return_value = "/some/path" - assert parse_editable('.') == (None, 'file:///some/path', None) + assert parse_editable(".") == (None, "file:///some/path", None) abspath_mock.return_value = "/some/path/foo" - assert parse_editable('foo') == (None, 'file:///some/path/foo', None) + assert parse_editable("foo") == (None, "file:///some/path/foo", None) def test_parse_editable_explicit_vcs(): - assert parse_editable('svn+https://foo#egg=foo') == ( - 'foo', - 'svn+https://foo#egg=foo', + assert parse_editable("svn+https://foo#egg=foo") == ( + "foo", + "svn+https://foo#egg=foo", None, ) def test_parse_editable_vcs_extras(): - assert parse_editable('svn+https://foo#egg=foo[extras]') == ( - 'foo[extras]', - 'svn+https://foo#egg=foo[extras]', + assert parse_editable("svn+https://foo#egg=foo[extras]") == ( + "foo[extras]", + "svn+https://foo#egg=foo[extras]", None, ) -@patch('pip._internal.req.req_install.os.path.abspath') -@patch('pip._internal.req.req_install.os.path.exists') -@patch('pip._internal.req.req_install.os.path.isdir') +@patch("pip._internal.req.req_install.os.path.abspath") +@patch("pip._internal.req.req_install.os.path.exists") +@patch("pip._internal.req.req_install.os.path.isdir") def test_parse_editable_local_extras(isdir_mock, exists_mock, abspath_mock): exists_mock.return_value = isdir_mock.return_value = True abspath_mock.return_value = "/some/path" - assert parse_editable('.[extras]') == (None, 'file://' + "/some/path", {'extras'}) + assert parse_editable(".[extras]") == (None, "file://" + "/some/path", {"extras"}) abspath_mock.return_value = "/some/path/foo" - assert parse_editable('foo[bar,baz]') == ( + assert parse_editable("foo[bar,baz]") == ( None, - 'file:///some/path/foo', - {'bar', 'baz'}, + "file:///some/path/foo", + {"bar", "baz"}, ) @@ -648,38 +648,38 @@ def test_exclusive_environment_markers(): req_set = RequirementSet() req_set.add_requirement(eq36) req_set.add_requirement(ne36) - assert req_set.has_requirement('Django') + assert req_set.has_requirement("Django") def test_mismatched_versions(caplog, tmpdir): - original_source = os.path.join(DATA_DIR, 'src', 'simplewheel-1.0') - source_dir = os.path.join(tmpdir, 'simplewheel') + original_source = os.path.join(DATA_DIR, "src", "simplewheel-1.0") + source_dir = os.path.join(tmpdir, "simplewheel") shutil.copytree(original_source, source_dir) req = InstallRequirement( - req=Requirement('simplewheel==2.0'), comes_from=None, source_dir=source_dir + req=Requirement("simplewheel==2.0"), comes_from=None, source_dir=source_dir ) req.prepare_metadata() req.assert_source_matches_version() assert caplog.records[-1].message == ( - 'Requested simplewheel==2.0, but installing version 1.0' + "Requested simplewheel==2.0, but installing version 1.0" ) @pytest.mark.parametrize( - 'args, expected', + "args, expected", [ # Test UNIX-like paths - (('/path/to/installable'), True), + (("/path/to/installable"), True), # Test relative paths - (('./path/to/installable'), True), + (("./path/to/installable"), True), # Test current path - (('.'), True), + (("."), True), # Test url paths - (('https://whatever.com/test-0.4-py2.py3-bogus-any.whl'), True), + (("https://whatever.com/test-0.4-py2.py3-bogus-any.whl"), True), # Test pep440 paths - (('test @ https://whatever.com/test-0.4-py2.py3-bogus-any.whl'), True), + (("test @ https://whatever.com/test-0.4-py2.py3-bogus-any.whl"), True), # Test wheel - (('simple-0.1-py2.py3-none-any.whl'), False), + (("simple-0.1-py2.py3-none-any.whl"), False), ], ) def test_looks_like_path(args, expected): @@ -687,16 +687,16 @@ def test_looks_like_path(args, expected): @pytest.mark.skipif( - not sys.platform.startswith("win"), reason='Test only available on Windows' + not sys.platform.startswith("win"), reason="Test only available on Windows" ) @pytest.mark.parametrize( - 'args, expected', + "args, expected", [ # Test relative paths - (('.\\path\\to\\installable'), True), - (('relative\\path'), True), + ((".\\path\\to\\installable"), True), + (("relative\\path"), True), # Test absolute paths - (('C:\\absolute\\path'), True), + (("C:\\absolute\\path"), True), ], ) def test_looks_like_path_win(args, expected): @@ -704,13 +704,13 @@ def test_looks_like_path_win(args, expected): @pytest.mark.parametrize( - 'args, mock_returns, expected', + "args, mock_returns, expected", [ # Test pep440 urls ( ( - '/path/to/foo @ git+http://foo.com@ref#egg=foo', - 'foo @ git+http://foo.com@ref#egg=foo', + "/path/to/foo @ git+http://foo.com@ref#egg=foo", + "foo @ git+http://foo.com@ref#egg=foo", ), (False, False), None, @@ -718,8 +718,8 @@ def test_looks_like_path_win(args, expected): # Test pep440 urls without spaces ( ( - '/path/to/foo@git+http://foo.com@ref#egg=foo', - 'foo @ git+http://foo.com@ref#egg=foo', + "/path/to/foo@git+http://foo.com@ref#egg=foo", + "foo @ git+http://foo.com@ref#egg=foo", ), (False, False), None, @@ -727,51 +727,51 @@ def test_looks_like_path_win(args, expected): # Test pep440 wheel ( ( - '/path/to/test @ https://whatever.com/test-0.4-py2.py3-bogus-any.whl', - 'test @ https://whatever.com/test-0.4-py2.py3-bogus-any.whl', + "/path/to/test @ https://whatever.com/test-0.4-py2.py3-bogus-any.whl", + "test @ https://whatever.com/test-0.4-py2.py3-bogus-any.whl", ), (False, False), None, ), # Test name is not a file - (('/path/to/simple==0.1', 'simple==0.1'), (False, False), None), + (("/path/to/simple==0.1", "simple==0.1"), (False, False), None), ], ) -@patch('pip._internal.req.req_install.os.path.isdir') -@patch('pip._internal.req.req_install.os.path.isfile') +@patch("pip._internal.req.req_install.os.path.isdir") +@patch("pip._internal.req.req_install.os.path.isfile") def test_get_url_from_path(isdir_mock, isfile_mock, args, mock_returns, expected): isdir_mock.return_value = mock_returns[0] isfile_mock.return_value = mock_returns[1] assert _get_url_from_path(*args) is expected -@patch('pip._internal.req.req_install.os.path.isdir') -@patch('pip._internal.req.req_install.os.path.isfile') +@patch("pip._internal.req.req_install.os.path.isdir") +@patch("pip._internal.req.req_install.os.path.isfile") def test_get_url_from_path__archive_file(isdir_mock, isfile_mock): isdir_mock.return_value = False isfile_mock.return_value = True - name = 'simple-0.1-py2.py3-none-any.whl' - path = os.path.join('/path/to/' + name) + name = "simple-0.1-py2.py3-none-any.whl" + path = os.path.join("/path/to/" + name) url = path_to_url(path) assert _get_url_from_path(path, name) == url -@patch('pip._internal.req.req_install.os.path.isdir') -@patch('pip._internal.req.req_install.os.path.isfile') +@patch("pip._internal.req.req_install.os.path.isdir") +@patch("pip._internal.req.req_install.os.path.isfile") def test_get_url_from_path__installable_dir(isdir_mock, isfile_mock): isdir_mock.return_value = True isfile_mock.return_value = True - name = 'some/setuptools/project' - path = os.path.join('/path/to/' + name) + name = "some/setuptools/project" + path = os.path.join("/path/to/" + name) url = path_to_url(path) assert _get_url_from_path(path, name) == url -@patch('pip._internal.req.req_install.os.path.isdir') +@patch("pip._internal.req.req_install.os.path.isdir") def test_get_url_from_path__installable_error(isdir_mock): isdir_mock.return_value = True - name = 'some/setuptools/project' - path = os.path.join('/path/to/' + name) + name = "some/setuptools/project" + path = os.path.join("/path/to/" + name) with pytest.raises(InstallationError) as e: _get_url_from_path(path, name) err_msg = e.value.args[0] diff --git a/tests/unit/test_req_file.py b/tests/unit/test_req_file.py index 768d0f7ec61..d85010c1fbf 100644 --- a/tests/unit/test_req_file.py +++ b/tests/unit/test_req_file.py @@ -9,10 +9,7 @@ import pip._internal.index from pip._internal.download import PipSession -from pip._internal.exceptions import ( - InstallationError, - RequirementsFileParseError, -) +from pip._internal.exceptions import InstallationError, RequirementsFileParseError from pip._internal.models.format_control import FormatControl from pip._internal.req.constructors import ( install_req_from_editable, @@ -44,7 +41,7 @@ def finder(session): def options(session): return stub( isolated_mode=False, - index_url='default_url', + index_url="default_url", skip_requirements_regex=False, format_control=FormatControl(set(), set()), ) @@ -62,7 +59,7 @@ def test_comments_and_joins_case1(self): """ ) result = preprocess(content, None) - assert list(result) == [(1, 'req1'), (3, 'req2')] + assert list(result) == [(1, "req1"), (3, "req2")] def test_comments_and_joins_case2(self): content = textwrap.dedent( @@ -72,7 +69,7 @@ def test_comments_and_joins_case2(self): """ ) result = preprocess(content, None) - assert list(result) == [(1, 'req1')] + assert list(result) == [(1, "req1")] def test_comments_and_joins_case3(self): content = textwrap.dedent( @@ -83,7 +80,7 @@ def test_comments_and_joins_case3(self): """ ) result = preprocess(content, None) - assert list(result) == [(1, 'req1'), (3, 'req2')] + assert list(result) == [(1, "req1"), (3, "req2")] def test_skip_regex_after_joining_case1(self, options): content = textwrap.dedent( @@ -93,9 +90,9 @@ def test_skip_regex_after_joining_case1(self, options): line2 """ ) - options.skip_requirements_regex = 'pattern' + options.skip_requirements_regex = "pattern" result = preprocess(content, options) - assert list(result) == [(3, 'line2')] + assert list(result) == [(3, "line2")] def test_skip_regex_after_joining_case2(self, options): content = textwrap.dedent( @@ -105,28 +102,28 @@ def test_skip_regex_after_joining_case2(self, options): line3 """ ) - options.skip_requirements_regex = 'pattern' + options.skip_requirements_regex = "pattern" result = preprocess(content, options) - assert list(result) == [(3, 'line3')] + assert list(result) == [(3, "line3")] class TestIgnoreComments(object): """tests for `ignore_comment`""" def test_ignore_line(self): - lines = [(1, ''), (2, 'req1'), (3, 'req2')] + lines = [(1, ""), (2, "req1"), (3, "req2")] result = ignore_comments(lines) - assert list(result) == [(2, 'req1'), (3, 'req2')] + assert list(result) == [(2, "req1"), (3, "req2")] def test_ignore_comment(self): - lines = [(1, 'req1'), (2, '# comment'), (3, 'req2')] + lines = [(1, "req1"), (2, "# comment"), (3, "req2")] result = ignore_comments(lines) - assert list(result) == [(1, 'req1'), (3, 'req2')] + assert list(result) == [(1, "req1"), (3, "req2")] def test_strip_comment(self): - lines = [(1, 'req1'), (2, 'req # comment'), (3, 'req2')] + lines = [(1, "req1"), (2, "req # comment"), (3, "req2")] result = ignore_comments(lines) - assert list(result) == [(1, 'req1'), (2, 'req'), (3, 'req2')] + assert list(result) == [(1, "req1"), (2, "req"), (3, "req2")] class TestJoinLines(object): @@ -135,27 +132,27 @@ class TestJoinLines(object): def test_join_lines(self): lines = enumerate( [ - 'line 1', - 'line 2:1 \\', - 'line 2:2', - 'line 3:1 \\', - 'line 3:2 \\', - 'line 3:3', - 'line 4', + "line 1", + "line 2:1 \\", + "line 2:2", + "line 3:1 \\", + "line 3:2 \\", + "line 3:3", + "line 4", ], start=1, ) expect = [ - (1, 'line 1'), - (2, 'line 2:1 line 2:2'), - (4, 'line 3:1 line 3:2 line 3:3'), - (7, 'line 4'), + (1, "line 1"), + (2, "line 2:1 line 2:2"), + (4, "line 3:1 line 3:2 line 3:3"), + (7, "line 4"), ] assert expect == list(join_lines(lines)) def test_last_line_with_escape(self): - lines = enumerate(['line 1', 'line 2 \\'], start=1) - expect = [(1, 'line 1'), (2, 'line 2 ')] + lines = enumerate(["line 1", "line 2 \\"], start=1) + expect = [(1, "line 1"), (2, "line 2 ")] assert expect == list(join_lines(lines)) @@ -163,23 +160,23 @@ class TestSkipRegex(object): """tests for `skip_reqex``""" def test_skip_regex_pattern_match(self): - options = stub(skip_requirements_regex='.*Bad.*') - line = '--extra-index-url Bad' + options = stub(skip_requirements_regex=".*Bad.*") + line = "--extra-index-url Bad" assert [] == list(skip_regex(enumerate([line]), options)) def test_skip_regex_pattern_not_match(self): - options = stub(skip_requirements_regex='.*Bad.*') - line = '--extra-index-url Good' + options = stub(skip_requirements_regex=".*Bad.*") + line = "--extra-index-url Good" assert [(0, line)] == list(skip_regex(enumerate([line]), options)) def test_skip_regex_no_options(self): options = None - line = '--extra-index-url Good' + line = "--extra-index-url Good" assert [(0, line)] == list(skip_regex(enumerate([line]), options)) def test_skip_regex_no_skip_option(self): options = stub(skip_requirements_regex=None) - line = '--extra-index-url Good' + line = "--extra-index-url Good" assert [(0, line)] == list(skip_regex(enumerate([line]), options)) @@ -191,16 +188,16 @@ def test_parser_error(self): list(process_line("--bogus", "file", 1)) def test_parser_offending_line(self): - line = 'pkg==1.0.0 --hash=somehash' + line = "pkg==1.0.0 --hash=somehash" with pytest.raises(RequirementsFileParseError) as err: - list(process_line(line, 'file', 1)) + list(process_line(line, "file", 1)) assert line in str(err.value) def test_parser_non_offending_line(self): try: - list(process_line('pkg==1.0.0 --hash=sha256:somehash', 'file', 1)) + list(process_line("pkg==1.0.0 --hash=sha256:somehash", "file", 1)) except RequirementsFileParseError: - pytest.fail('Reported offending line where it should not.') + pytest.fail("Reported offending line where it should not.") def test_only_one_req_per_line(self): # pkg_resources raises the ValueError @@ -213,70 +210,70 @@ def test_error_message(self): line number, and hint). """ iterator = process_line( - 'my-package=1.0', filename='path/requirements.txt', line_number=3 + "my-package=1.0", filename="path/requirements.txt", line_number=3 ) with pytest.raises(InstallationError) as exc: list(iterator) expected = ( "Invalid requirement: 'my-package=1.0' " - '(from line 3 of path/requirements.txt)\n' - 'Hint: = is not a valid operator. Did you mean == ?' + "(from line 3 of path/requirements.txt)\n" + "Hint: = is not a valid operator. Did you mean == ?" ) assert str(exc.value) == expected def test_yield_line_requirement(self): - line = 'SomeProject' - filename = 'filename' - comes_from = '-r %s (line %s)' % (filename, 1) + line = "SomeProject" + filename = "filename" + comes_from = "-r %s (line %s)" % (filename, 1) req = install_req_from_line(line, comes_from=comes_from) assert repr(list(process_line(line, filename, 1))[0]) == repr(req) def test_yield_pep440_line_requirement(self): - line = 'SomeProject @ https://url/SomeProject-py2-py3-none-any.whl' - filename = 'filename' - comes_from = '-r %s (line %s)' % (filename, 1) + line = "SomeProject @ https://url/SomeProject-py2-py3-none-any.whl" + filename = "filename" + comes_from = "-r %s (line %s)" % (filename, 1) req = install_req_from_line(line, comes_from=comes_from) assert repr(list(process_line(line, filename, 1))[0]) == repr(req) def test_yield_line_constraint(self): - line = 'SomeProject' - filename = 'filename' - comes_from = '-c %s (line %s)' % (filename, 1) + line = "SomeProject" + filename = "filename" + comes_from = "-c %s (line %s)" % (filename, 1) req = install_req_from_line(line, comes_from=comes_from, constraint=True) found_req = list(process_line(line, filename, 1, constraint=True))[0] assert repr(found_req) == repr(req) assert found_req.constraint is True def test_yield_line_requirement_with_spaces_in_specifier(self): - line = 'SomeProject >= 2' - filename = 'filename' - comes_from = '-r %s (line %s)' % (filename, 1) + line = "SomeProject >= 2" + filename = "filename" + comes_from = "-r %s (line %s)" % (filename, 1) req = install_req_from_line(line, comes_from=comes_from) assert repr(list(process_line(line, filename, 1))[0]) == repr(req) - assert str(req.req.specifier) == '>=2' + assert str(req.req.specifier) == ">=2" def test_yield_editable_requirement(self): - url = 'git+https://url#egg=SomeProject' - line = '-e %s' % url - filename = 'filename' - comes_from = '-r %s (line %s)' % (filename, 1) + url = "git+https://url#egg=SomeProject" + line = "-e %s" % url + filename = "filename" + comes_from = "-r %s (line %s)" % (filename, 1) req = install_req_from_editable(url, comes_from=comes_from) assert repr(list(process_line(line, filename, 1))[0]) == repr(req) def test_yield_editable_constraint(self): - url = 'git+https://url#egg=SomeProject' - line = '-e %s' % url - filename = 'filename' - comes_from = '-c %s (line %s)' % (filename, 1) + url = "git+https://url#egg=SomeProject" + line = "-e %s" % url + filename = "filename" + comes_from = "-c %s (line %s)" % (filename, 1) req = install_req_from_editable(url, comes_from=comes_from, constraint=True) found_req = list(process_line(line, filename, 1, constraint=True))[0] assert repr(found_req) == repr(req) assert found_req.constraint is True def test_nested_requirements_file(self, monkeypatch): - line = '-r another_file' - req = install_req_from_line('SomeProject') + line = "-r another_file" + req = install_req_from_line("SomeProject") import pip._internal.req.req_file def stub_parse_requirements( @@ -287,14 +284,14 @@ def stub_parse_requirements( parse_requirements_stub = stub(call=stub_parse_requirements) monkeypatch.setattr( pip._internal.req.req_file, - 'parse_requirements', + "parse_requirements", parse_requirements_stub.call, ) - assert list(process_line(line, 'filename', 1)) == [(req, False)] + assert list(process_line(line, "filename", 1)) == [(req, False)] def test_nested_constraints_file(self, monkeypatch): - line = '-c another_file' - req = install_req_from_line('SomeProject') + line = "-c another_file" + req = install_req_from_line("SomeProject") import pip._internal.req.req_file def stub_parse_requirements( @@ -305,21 +302,21 @@ def stub_parse_requirements( parse_requirements_stub = stub(call=stub_parse_requirements) monkeypatch.setattr( pip._internal.req.req_file, - 'parse_requirements', + "parse_requirements", parse_requirements_stub.call, ) - assert list(process_line(line, 'filename', 1)) == [(req, True)] + assert list(process_line(line, "filename", 1)) == [(req, True)] def test_options_on_a_requirement_line(self): line = ( - 'SomeProject --install-option=yo1 --install-option yo2 ' + "SomeProject --install-option=yo1 --install-option yo2 " '--global-option="yo3" --global-option "yo4"' ) - filename = 'filename' + filename = "filename" req = list(process_line(line, filename, 1))[0] assert req.options == { - 'global_options': ['yo3', 'yo4'], - 'install_options': ['yo1', 'yo2'], + "global_options": ["yo3", "yo4"], + "install_options": ["yo1", "yo2"], } def test_hash_options(self): @@ -329,33 +326,33 @@ def test_hash_options(self): """ line = ( - 'SomeProject --hash=sha256:2cf24dba5fb0a30e26e83b2ac5b9e29e1b1' - '61e5c1fa7425e73043362938b9824 ' - '--hash=sha384:59e1748777448c69de6b800d7a33bbfb9ff1b463e44354c' - '3553bcdb9c666fa90125a3c79f90397bdf5f6a13de828684f ' - '--hash=sha256:486ea46224d1bb4fb680f34f7c9ad96a8f24ec88be73ea8' - 'e5a6c65260e9cb8a7' + "SomeProject --hash=sha256:2cf24dba5fb0a30e26e83b2ac5b9e29e1b1" + "61e5c1fa7425e73043362938b9824 " + "--hash=sha384:59e1748777448c69de6b800d7a33bbfb9ff1b463e44354c" + "3553bcdb9c666fa90125a3c79f90397bdf5f6a13de828684f " + "--hash=sha256:486ea46224d1bb4fb680f34f7c9ad96a8f24ec88be73ea8" + "e5a6c65260e9cb8a7" ) - filename = 'filename' + filename = "filename" req = list(process_line(line, filename, 1))[0] assert req.options == { - 'hashes': { - 'sha256': [ - '2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e730433' - '62938b9824', - '486ea46224d1bb4fb680f34f7c9ad96a8f24ec88be73ea8e5a6c65' - '260e9cb8a7', + "hashes": { + "sha256": [ + "2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e730433" + "62938b9824", + "486ea46224d1bb4fb680f34f7c9ad96a8f24ec88be73ea8e5a6c65" + "260e9cb8a7", ], - 'sha384': [ - '59e1748777448c69de6b800d7a33bbfb9ff1b463e44354c3553bcd' - 'b9c666fa90125a3c79f90397bdf5f6a13de828684f' + "sha384": [ + "59e1748777448c69de6b800d7a33bbfb9ff1b463e44354c3553bcd" + "b9c666fa90125a3c79f90397bdf5f6a13de828684f" ], } } def test_set_isolated(self, options): - line = 'SomeProject' - filename = 'filename' + line = "SomeProject" + filename = "filename" options.isolated_mode = True result = process_line(line, filename, 1, options=options) assert list(result)[0].isolated @@ -366,15 +363,15 @@ def test_set_finder_no_index(self, finder): def test_set_finder_index_url(self, finder): list(process_line("--index-url=url", "file", 1, finder=finder)) - assert finder.index_urls == ['url'] + assert finder.index_urls == ["url"] def test_set_finder_find_links(self, finder): list(process_line("--find-links=url", "file", 1, finder=finder)) - assert finder.find_links == ['url'] + assert finder.find_links == ["url"] def test_set_finder_extra_index_urls(self, finder): list(process_line("--extra-index-url=url", "file", 1, finder=finder)) - assert finder.index_urls == ['url'] + assert finder.index_urls == ["url"] def test_set_finder_trusted_host(self, caplog, session, finder): with caplog.at_level(logging.INFO): @@ -387,14 +384,14 @@ def test_set_finder_trusted_host(self, caplog, session, finder): session=session, ) ) - assert list(finder.trusted_hosts) == ['host1', 'host2:8080'] + assert list(finder.trusted_hosts) == ["host1", "host2:8080"] session = finder._link_collector.session - assert session.adapters['https://host1/'] is session._insecure_adapter - assert session.adapters['https://host2:8080/'] is session._insecure_adapter + assert session.adapters["https://host1/"] is session._insecure_adapter + assert session.adapters["https://host2:8080/"] is session._insecure_adapter # Test the log message. actual = [(r.levelname, r.message) for r in caplog.records] - expected = ('INFO', "adding trusted host: 'host1' (from line 1 of file.txt)") + expected = ("INFO", "adding trusted host: 'host1' (from line 1 of file.txt)") assert expected in actual def test_noop_always_unzip(self, finder): @@ -411,10 +408,10 @@ def test_relative_local_find_links(self, finder, monkeypatch): """ # Make sure the test also passes on windows req_file = os.path.normcase( - os.path.abspath(os.path.normpath('/path/req_file.txt')) + os.path.abspath(os.path.normpath("/path/req_file.txt")) ) nested_link = os.path.normcase( - os.path.abspath(os.path.normpath('/path/rel_path')) + os.path.abspath(os.path.normpath("/path/rel_path")) ) exists_ = os.path.exists @@ -424,7 +421,7 @@ def exists(path): else: exists_(path) - monkeypatch.setattr(os.path, 'exists', exists) + monkeypatch.setattr(os.path, "exists", exists) list(process_line("--find-links=rel_path", req_file, 1, finder=finder)) assert finder.find_links == [nested_link] @@ -432,7 +429,7 @@ def test_relative_http_nested_req_files(self, finder, monkeypatch): """ Test a relative nested req file path is joined with the req file url """ - req_file = 'http://me.com/me/req_file.txt' + req_file = "http://me.com/me/req_file.txt" def parse(*args, **kwargs): return iter([]) @@ -440,17 +437,17 @@ def parse(*args, **kwargs): mock_parse = Mock() mock_parse.side_effect = parse monkeypatch.setattr( - pip._internal.req.req_file, 'parse_requirements', mock_parse + pip._internal.req.req_file, "parse_requirements", mock_parse ) list(process_line("-r reqs.txt", req_file, 1, finder=finder)) call = mock_parse.mock_calls[0] - assert call[1][0] == 'http://me.com/me/reqs.txt' + assert call[1][0] == "http://me.com/me/reqs.txt" def test_relative_local_nested_req_files(self, finder, monkeypatch): """ Test a relative nested req file path is joined with the req file dir """ - req_file = os.path.normpath('/path/req_file.txt') + req_file = os.path.normpath("/path/req_file.txt") def parse(*args, **kwargs): return iter([]) @@ -458,17 +455,17 @@ def parse(*args, **kwargs): mock_parse = Mock() mock_parse.side_effect = parse monkeypatch.setattr( - pip._internal.req.req_file, 'parse_requirements', mock_parse + pip._internal.req.req_file, "parse_requirements", mock_parse ) list(process_line("-r reqs.txt", req_file, 1, finder=finder)) call = mock_parse.mock_calls[0] - assert call[1][0] == os.path.normpath('/path/reqs.txt') + assert call[1][0] == os.path.normpath("/path/reqs.txt") def test_absolute_local_nested_req_files(self, finder, monkeypatch): """ Test an absolute nested req file path """ - req_file = '/path/req_file.txt' + req_file = "/path/req_file.txt" def parse(*args, **kwargs): return iter([]) @@ -476,17 +473,17 @@ def parse(*args, **kwargs): mock_parse = Mock() mock_parse.side_effect = parse monkeypatch.setattr( - pip._internal.req.req_file, 'parse_requirements', mock_parse + pip._internal.req.req_file, "parse_requirements", mock_parse ) list(process_line("-r /other/reqs.txt", req_file, 1, finder=finder)) call = mock_parse.mock_calls[0] - assert call[1][0] == '/other/reqs.txt' + assert call[1][0] == "/other/reqs.txt" def test_absolute_http_nested_req_file_in_local(self, finder, monkeypatch): """ Test a nested req file url in a local req file """ - req_file = '/path/req_file.txt' + req_file = "/path/req_file.txt" def parse(*args, **kwargs): return iter([]) @@ -494,27 +491,27 @@ def parse(*args, **kwargs): mock_parse = Mock() mock_parse.side_effect = parse monkeypatch.setattr( - pip._internal.req.req_file, 'parse_requirements', mock_parse + pip._internal.req.req_file, "parse_requirements", mock_parse ) list(process_line("-r http://me.com/me/reqs.txt", req_file, 1, finder=finder)) call = mock_parse.mock_calls[0] - assert call[1][0] == 'http://me.com/me/reqs.txt' + assert call[1][0] == "http://me.com/me/reqs.txt" class TestBreakOptionsArgs(object): def test_no_args(self): - assert ('', '--option') == break_args_options('--option') + assert ("", "--option") == break_args_options("--option") def test_no_options(self): - assert ('arg arg', '') == break_args_options('arg arg') + assert ("arg arg", "") == break_args_options("arg arg") def test_args_short_options(self): - result = break_args_options('arg arg -s') - assert ('arg arg', '-s') == result + result = break_args_options("arg arg -s") + assert ("arg arg", "-s") == result def test_args_long_options(self): - result = break_args_options('arg arg --long') - assert ('arg arg', '--long') == result + result = break_args_options("arg arg --long") + assert ("arg arg", "--long") == result class TestOptionVariants(object): @@ -523,23 +520,23 @@ class TestOptionVariants(object): def test_variant1(self, finder): list(process_line("-i url", "file", 1, finder=finder)) - assert finder.index_urls == ['url'] + assert finder.index_urls == ["url"] def test_variant2(self, finder): list(process_line("-i 'url'", "file", 1, finder=finder)) - assert finder.index_urls == ['url'] + assert finder.index_urls == ["url"] def test_variant3(self, finder): list(process_line("--index-url=url", "file", 1, finder=finder)) - assert finder.index_urls == ['url'] + assert finder.index_urls == ["url"] def test_variant4(self, finder): list(process_line("--index-url url", "file", 1, finder=finder)) - assert finder.index_urls == ['url'] + assert finder.index_urls == ["url"] def test_variant5(self, finder): list(process_line("--index-url='url'", "file", 1, finder=finder)) - assert finder.index_urls == ['url'] + assert finder.index_urls == ["url"] class TestParseRequirements(object): @@ -553,9 +550,9 @@ def test_remote_reqs_parse(self): # this requirements file just contains a comment previously this has # failed in py3: https://github.com/pypa/pip/issues/760 for req in parse_requirements( - 'https://raw.githubusercontent.com/pypa/' - 'pip-test-package/master/' - 'tests/req_just_comment.txt', + "https://raw.githubusercontent.com/pypa/" + "pip-test-package/master/" + "tests/req_just_comment.txt", session=PipSession(), ): pass @@ -574,10 +571,10 @@ def test_multiple_appending_options(self, tmpdir, finder, options): ) ) - assert finder.index_urls == ['url1', 'url2'] + assert finder.index_urls == ["url1", "url2"] def test_skip_regex(self, tmpdir, finder, options): - options.skip_requirements_regex = '.*Bad.*' + options.skip_requirements_regex = ".*Bad.*" with open(tmpdir.joinpath("req1.txt"), "w") as fp: fp.write("--extra-index-url Bad \n") fp.write("--extra-index-url Good ") @@ -591,54 +588,54 @@ def test_skip_regex(self, tmpdir, finder, options): ) ) - assert finder.index_urls == ['Good'] + assert finder.index_urls == ["Good"] def test_expand_existing_env_variables(self, tmpdir, finder): - template = 'https://%s:x-oauth-basic@github.com/user/%s/archive/master.zip' + template = "https://%s:x-oauth-basic@github.com/user/%s/archive/master.zip" - env_vars = (('GITHUB_TOKEN', 'notarealtoken'), ('DO_12_FACTOR', 'awwyeah')) + env_vars = (("GITHUB_TOKEN", "notarealtoken"), ("DO_12_FACTOR", "awwyeah")) - with open(tmpdir.joinpath('req1.txt'), 'w') as fp: - fp.write(template % tuple(['${%s}' % k for k, _ in env_vars])) + with open(tmpdir.joinpath("req1.txt"), "w") as fp: + fp.write(template % tuple(["${%s}" % k for k, _ in env_vars])) - with patch('pip._internal.req.req_file.os.getenv') as getenv: + with patch("pip._internal.req.req_file.os.getenv") as getenv: getenv.side_effect = lambda n: dict(env_vars)[n] reqs = list( parse_requirements( - tmpdir.joinpath('req1.txt'), finder=finder, session=PipSession() + tmpdir.joinpath("req1.txt"), finder=finder, session=PipSession() ) ) - assert len(reqs) == 1, 'parsing requirement file with env variable failed' + assert len(reqs) == 1, "parsing requirement file with env variable failed" expected_url = template % tuple([v for _, v in env_vars]) assert ( reqs[0].link.url == expected_url - ), 'variable expansion in req file failed' + ), "variable expansion in req file failed" def test_expand_missing_env_variables(self, tmpdir, finder): req_url = ( - 'https://${NON_EXISTENT_VARIABLE}:$WRONG_FORMAT@' - '%WINDOWS_FORMAT%github.com/user/repo/archive/master.zip' + "https://${NON_EXISTENT_VARIABLE}:$WRONG_FORMAT@" + "%WINDOWS_FORMAT%github.com/user/repo/archive/master.zip" ) - with open(tmpdir.joinpath('req1.txt'), 'w') as fp: + with open(tmpdir.joinpath("req1.txt"), "w") as fp: fp.write(req_url) - with patch('pip._internal.req.req_file.os.getenv') as getenv: - getenv.return_value = '' + with patch("pip._internal.req.req_file.os.getenv") as getenv: + getenv.return_value = "" reqs = list( parse_requirements( - tmpdir.joinpath('req1.txt'), finder=finder, session=PipSession() + tmpdir.joinpath("req1.txt"), finder=finder, session=PipSession() ) ) - assert len(reqs) == 1, 'parsing requirement file with env variable failed' + assert len(reqs) == 1, "parsing requirement file with env variable failed" assert ( reqs[0].link.url == req_url - ), 'ignoring invalid env variable in req file failed' + ), "ignoring invalid env variable in req file failed" def test_join_lines(self, tmpdir, finder): with open(tmpdir.joinpath("req1.txt"), "w") as fp: @@ -650,7 +647,7 @@ def test_join_lines(self, tmpdir, finder): ) ) - assert finder.index_urls == ['url1', 'url2'] + assert finder.index_urls == ["url1", "url2"] def test_req_file_parse_no_only_binary(self, data, finder): list( @@ -660,7 +657,7 @@ def test_req_file_parse_no_only_binary(self, data, finder): session=PipSession(), ) ) - expected = FormatControl({'fred'}, {'wilma'}) + expected = FormatControl({"fred"}, {"wilma"}) assert finder.format_control == expected def test_req_file_parse_comment_start_of_line(self, tmpdir, finder): @@ -728,14 +725,14 @@ def test_req_file_no_finder(self, tmpdir): parse_requirements(tmpdir.joinpath("req.txt"), session=PipSession()) def test_install_requirements_with_options(self, tmpdir, finder, session, options): - global_option = '--dry-run' - install_option = '--prefix=/opt' + global_option = "--dry-run" + install_option = "--prefix=/opt" - content = ''' + content = """ --only-binary :all: INITools==2.0 --global-option="{global_option}" \ --install-option "{install_option}" - '''.format( + """.format( global_option=global_option, install_option=install_option ) @@ -747,7 +744,7 @@ def test_install_requirements_with_options(self, tmpdir, finder, session, option ) req.source_dir = os.curdir - with patch.object(subprocess, 'Popen') as popen: + with patch.object(subprocess, "Popen") as popen: popen.return_value.stdout.readline.return_value = b"" try: req.install([]) @@ -759,8 +756,8 @@ def test_install_requirements_with_options(self, tmpdir, finder, session, option assert ( 0 < args.index(global_option) - < args.index('install') + < args.index("install") < args.index(install_option) ) - assert options.format_control.no_binary == {':all:'} + assert options.format_control.no_binary == {":all:"} assert options.format_control.only_binary == set() diff --git a/tests/unit/test_req_install.py b/tests/unit/test_req_install.py index 49ffaa96b6a..65d2068d7c2 100644 --- a/tests/unit/test_req_install.py +++ b/tests/unit/test_req_install.py @@ -19,7 +19,7 @@ def test_tmp_build_directory(self): # when req is None, we can produce a temporary directory # Make sure we're handling it correctly with real path. requirement = InstallRequirement(None, None) - tmp_dir = tempfile.mkdtemp('-build', 'pip-') + tmp_dir = tempfile.mkdtemp("-build", "pip-") tmp_build_dir = requirement.ensure_build_location(tmp_dir) assert os.path.dirname(tmp_build_dir) == os.path.realpath( os.path.dirname(tmp_dir) @@ -38,8 +38,8 @@ def test_forward_slash_results_in_a_link(self, tmpdir): # Just create a file for letting the logic work setup_py_path = install_dir / "setup.py" os.makedirs(str(install_dir)) - with open(setup_py_path, 'w') as f: - f.write('') + with open(setup_py_path, "w") as f: + f.write("") requirement = install_req_from_line( str(install_dir).replace(os.sep, os.altsep or os.sep) diff --git a/tests/unit/test_req_uninstall.py b/tests/unit/test_req_uninstall.py index 7ce9922379e..95e223a8390 100644 --- a/tests/unit/test_req_uninstall.py +++ b/tests/unit/test_req_uninstall.py @@ -26,22 +26,22 @@ def mock_is_local(path): def test_uninstallation_paths(): class dist(object): def get_metadata_lines(self, record): - return ['file.py,,', 'file.pyc,,', 'file.so,,', 'nopyc.py'] + return ["file.py,,", "file.pyc,,", "file.so,,", "nopyc.py"] - location = '' + location = "" d = dist() paths = list(uninstallation_paths(d)) expected = [ - 'file.py', - 'file.pyc', - 'file.pyo', - 'file.so', - 'nopyc.py', - 'nopyc.pyc', - 'nopyc.pyo', + "file.py", + "file.pyc", + "file.pyo", + "file.so", + "nopyc.py", + "nopyc.pyc", + "nopyc.pyo", ] assert paths == expected @@ -124,11 +124,11 @@ def in_tmpdir(paths): class TestUninstallPathSet(object): def test_add(self, tmpdir, monkeypatch): - monkeypatch.setattr(pip._internal.req.req_uninstall, 'is_local', mock_is_local) + monkeypatch.setattr(pip._internal.req.req_uninstall, "is_local", mock_is_local) # Fix case for windows tests - file_extant = os.path.normcase(os.path.join(tmpdir, 'foo')) - file_nonexistent = os.path.normcase(os.path.join(tmpdir, 'nonexistent')) - with open(file_extant, 'w'): + file_extant = os.path.normcase(os.path.join(tmpdir, "foo")) + file_nonexistent = os.path.normcase(os.path.join(tmpdir, "nonexistent")) + with open(file_extant, "w"): pass ups = UninstallPathSet(dist=Mock()) @@ -140,20 +140,20 @@ def test_add(self, tmpdir, monkeypatch): assert ups.paths == {file_extant} def test_add_pth(self, tmpdir, monkeypatch): - monkeypatch.setattr(pip._internal.req.req_uninstall, 'is_local', mock_is_local) + monkeypatch.setattr(pip._internal.req.req_uninstall, "is_local", mock_is_local) # Fix case for windows tests tmpdir = os.path.normcase(tmpdir) - on_windows = sys.platform == 'win32' - pth_file = os.path.join(tmpdir, 'foo.pth') - relative = '../../example' + on_windows = sys.platform == "win32" + pth_file = os.path.join(tmpdir, "foo.pth") + relative = "../../example" if on_windows: - share = '\\\\example\\share\\' - share_com = '\\\\example.com\\share\\' + share = "\\\\example\\share\\" + share_com = "\\\\example.com\\share\\" # Create a .pth file for testing - with open(pth_file, 'w') as f: - f.writelines([tmpdir, '\n', relative, '\n']) + with open(pth_file, "w") as f: + f.writelines([tmpdir, "\n", relative, "\n"]) if on_windows: - f.writelines([share, '\n', share_com, '\n']) + f.writelines([share, "\n", share_com, "\n"]) # Add paths to be removed pth = UninstallPthEntries(pth_file) pth.add(tmpdir) @@ -170,11 +170,11 @@ def test_add_pth(self, tmpdir, monkeypatch): @pytest.mark.skipif("sys.platform == 'win32'") def test_add_symlink(self, tmpdir, monkeypatch): - monkeypatch.setattr(pip._internal.req.req_uninstall, 'is_local', mock_is_local) - f = os.path.join(tmpdir, 'foo') - with open(f, 'w'): + monkeypatch.setattr(pip._internal.req.req_uninstall, "is_local", mock_is_local) + f = os.path.join(tmpdir, "foo") + with open(f, "w"): pass - foo_link = os.path.join(tmpdir, 'foo_link') + foo_link = os.path.join(tmpdir, "foo_link") os.symlink(f, foo_link) ups = UninstallPathSet(dist=Mock()) @@ -182,31 +182,31 @@ def test_add_symlink(self, tmpdir, monkeypatch): assert ups.paths == {foo_link} def test_compact_shorter_path(self, monkeypatch): - monkeypatch.setattr(pip._internal.req.req_uninstall, 'is_local', mock_is_local) - monkeypatch.setattr('os.path.exists', lambda p: True) + monkeypatch.setattr(pip._internal.req.req_uninstall, "is_local", mock_is_local) + monkeypatch.setattr("os.path.exists", lambda p: True) # This deals with nt/posix path differences short_path = os.path.normcase( - os.path.abspath(os.path.join(os.path.sep, 'path')) + os.path.abspath(os.path.join(os.path.sep, "path")) ) ups = UninstallPathSet(dist=Mock()) ups.add(short_path) - ups.add(os.path.join(short_path, 'longer')) + ups.add(os.path.join(short_path, "longer")) assert compact(ups.paths) == {short_path} @pytest.mark.skipif("sys.platform == 'win32'") def test_detect_symlink_dirs(self, monkeypatch, tmpdir): - monkeypatch.setattr(pip._internal.req.req_uninstall, 'is_local', mock_is_local) + monkeypatch.setattr(pip._internal.req.req_uninstall, "is_local", mock_is_local) # construct 2 paths: # tmpdir/dir/file # tmpdir/dirlink/file (where dirlink is a link to dir) - d = tmpdir.joinpath('dir') + d = tmpdir.joinpath("dir") d.mkdir() - dlink = tmpdir.joinpath('dirlink') + dlink = tmpdir.joinpath("dirlink") os.symlink(d, dlink) - d.joinpath('file').touch() - path1 = str(d.joinpath('file')) - path2 = str(dlink.joinpath('file')) + d.joinpath("file").touch() + path1 = str(d.joinpath("file")) + path2 = str(dlink.joinpath("file")) ups = UninstallPathSet(dist=Mock()) ups.add(path1) @@ -249,7 +249,7 @@ def test_compress_for_rename(self, monkeypatch): ] ] - monkeypatch.setattr('os.walk', self.mock_walk) + monkeypatch.setattr("os.walk", self.mock_walk) actual_paths = compress_for_rename(paths) assert set(expected_paths) == set(actual_paths) @@ -268,7 +268,7 @@ def make_stash(cls, tmpdir, paths): pathset = StashedUninstallPathSet() - paths = [os.path.join(tmpdir, *p.split('/')) for p in paths] + paths = [os.path.join(tmpdir, *p.split("/")) for p in paths] stashed_paths = [(p, pathset.stash(p)) for p in paths] return pathset, stashed_paths diff --git a/tests/unit/test_search_scope.py b/tests/unit/test_search_scope.py index 2d6dd44d967..b3c63b87829 100644 --- a/tests/unit/test_search_scope.py +++ b/tests/unit/test_search_scope.py @@ -9,24 +9,24 @@ def test_get_formatted_locations_basic_auth(self): is not included in formatted output. """ index_urls = [ - 'https://pypi.org/simple', - 'https://repo-user:repo-pass@repo.domain.com', + "https://pypi.org/simple", + "https://repo-user:repo-pass@repo.domain.com", ] - find_links = ['https://links-user:links-pass@page.domain.com'] + find_links = ["https://links-user:links-pass@page.domain.com"] search_scope = SearchScope(find_links=find_links, index_urls=index_urls) result = search_scope.get_formatted_locations() - assert 'repo-user:****@repo.domain.com' in result - assert 'repo-pass' not in result - assert 'links-user:****@page.domain.com' in result - assert 'links-pass' not in result + assert "repo-user:****@repo.domain.com" in result + assert "repo-pass" not in result + assert "links-user:****@page.domain.com" in result + assert "links-pass" not in result def test_get_index_urls_locations(self): """Check that the canonical name is on all indexes""" search_scope = SearchScope( - find_links=[], index_urls=['file://index1/', 'file://index2'] + find_links=[], index_urls=["file://index1/", "file://index2"] ) actual = search_scope.get_index_urls_locations( - install_req_from_line('Complex_Name').name + install_req_from_line("Complex_Name").name ) - assert actual == ['file://index1/complex-name/', 'file://index2/complex-name/'] + assert actual == ["file://index1/complex-name/", "file://index2/complex-name/"] diff --git a/tests/unit/test_target_python.py b/tests/unit/test_target_python.py index c69eac90ecc..07a92a178f7 100644 --- a/tests/unit/test_target_python.py +++ b/tests/unit/test_target_python.py @@ -7,15 +7,15 @@ class TestTargetPython: @pytest.mark.parametrize( - 'py_version_info, expected', + "py_version_info, expected", [ - ((), ((0, 0, 0), '0.0')), - ((2,), ((2, 0, 0), '2.0')), - ((3,), ((3, 0, 0), '3.0')), - ((3, 7), ((3, 7, 0), '3.7')), - ((3, 7, 3), ((3, 7, 3), '3.7')), + ((), ((0, 0, 0), "0.0")), + ((2,), ((2, 0, 0), "2.0")), + ((3,), ((3, 0, 0), "3.0")), + ((3, 7), ((3, 7, 0), "3.7")), + ((3, 7, 3), ((3, 7, 3), "3.7")), # Check a minor version with two digits. - ((3, 10, 1), ((3, 10, 1), '3.10')), + ((3, 10, 1), ((3, 10, 1), "3.10")), ], ) def test_init__py_version_info(self, py_version_info, expected): @@ -44,20 +44,20 @@ def test_init__py_version_info_none(self): assert target_python.py_version == pyversion @pytest.mark.parametrize( - 'kwargs, expected', + "kwargs, expected", [ - ({}, ''), + ({}, ""), (dict(py_version_info=(3, 6)), "version_info='3.6'"), ( - dict(platform='darwin', py_version_info=(3, 6)), + dict(platform="darwin", py_version_info=(3, 6)), "platform='darwin' version_info='3.6'", ), ( dict( - platform='darwin', + platform="darwin", py_version_info=(3, 6), - abi='cp36m', - implementation='cp', + abi="cp36m", + implementation="cp", ), ( "platform='darwin' version_info='3.6' abi='cp36m' " @@ -72,38 +72,38 @@ def test_format_given(self, kwargs, expected): assert actual == expected @pytest.mark.parametrize( - 'py_version_info, expected_versions', + "py_version_info, expected_versions", [ - ((), ['']), - ((2,), ['2']), - ((3,), ['3']), - ((3, 7), ['37']), - ((3, 7, 3), ['37']), + ((), [""]), + ((2,), ["2"]), + ((3,), ["3"]), + ((3, 7), ["37"]), + ((3, 7, 3), ["37"]), # Check a minor version with two digits. - ((3, 10, 1), ['310']), + ((3, 10, 1), ["310"]), # Check that versions=None is passed to get_tags(). (None, None), ], ) - @patch('pip._internal.models.target_python.get_supported') + @patch("pip._internal.models.target_python.get_supported") def test_get_tags(self, mock_get_supported, py_version_info, expected_versions): - mock_get_supported.return_value = ['tag-1', 'tag-2'] + mock_get_supported.return_value = ["tag-1", "tag-2"] target_python = TargetPython(py_version_info=py_version_info) actual = target_python.get_tags() - assert actual == ['tag-1', 'tag-2'] + assert actual == ["tag-1", "tag-2"] - actual = mock_get_supported.call_args[1]['versions'] + actual = mock_get_supported.call_args[1]["versions"] assert actual == expected_versions # Check that the value was cached. - assert target_python._valid_tags == ['tag-1', 'tag-2'] + assert target_python._valid_tags == ["tag-1", "tag-2"] def test_get_tags__uses_cached_value(self): """ Test that get_tags() uses the cached value. """ target_python = TargetPython(py_version_info=None) - target_python._valid_tags = ['tag-1', 'tag-2'] + target_python._valid_tags = ["tag-1", "tag-2"] actual = target_python.get_tags() - assert actual == ['tag-1', 'tag-2'] + assert actual == ["tag-1", "tag-2"] diff --git a/tests/unit/test_unit_outdated.py b/tests/unit/test_unit_outdated.py index 002f37fded4..f1de5cfebbc 100644 --- a/tests/unit/test_unit_outdated.py +++ b/tests/unit/test_unit_outdated.py @@ -22,15 +22,15 @@ @pytest.mark.parametrize( - 'find_links, no_index, suppress_no_index, expected', + "find_links, no_index, suppress_no_index, expected", [ - (['link1'], False, False, (['link1'], ['default_url', 'url1', 'url2'])), - (['link1'], False, True, (['link1'], ['default_url', 'url1', 'url2'])), - (['link1'], True, False, (['link1'], [])), + (["link1"], False, False, (["link1"], ["default_url", "url1", "url2"])), + (["link1"], False, True, (["link1"], ["default_url", "url1", "url2"])), + (["link1"], True, False, (["link1"], [])), # Passing suppress_no_index=True suppresses no_index=True. - (['link1'], True, True, (['link1'], ['default_url', 'url1', 'url2'])), + (["link1"], True, True, (["link1"], ["default_url", "url1", "url2"])), # Test options.find_links=False. - (False, False, False, ([], ['default_url', 'url1', 'url2'])), + (False, False, False, ([], ["default_url", "url1", "url2"])), ], ) def test_make_link_collector(find_links, no_index, suppress_no_index, expected): @@ -41,8 +41,8 @@ def test_make_link_collector(find_links, no_index, suppress_no_index, expected): session = PipSession() options = pretend.stub( find_links=find_links, - index_url='default_url', - extra_index_urls=['url1', 'url2'], + index_url="default_url", + extra_index_urls=["url1", "url2"], no_index=no_index, ) link_collector = make_link_collector( @@ -56,14 +56,14 @@ def test_make_link_collector(find_links, no_index, suppress_no_index, expected): assert search_scope.index_urls == expected_index_urls -@patch('pip._internal.utils.misc.expanduser') +@patch("pip._internal.utils.misc.expanduser") def test_make_link_collector__find_links_expansion(mock_expanduser, tmpdir): """ Test "~" expansion in --find-links paths. """ # This is a mock version of expanduser() that expands "~" to the tmpdir. def expand_path(path): - if path.startswith('~/'): + if path.startswith("~/"): path = os.path.join(tmpdir, path[2:]) return path @@ -71,14 +71,14 @@ def expand_path(path): session = PipSession() options = pretend.stub( - find_links=['~/temp1', '~/temp2'], - index_url='default_url', + find_links=["~/temp1", "~/temp2"], + index_url="default_url", extra_index_urls=[], no_index=False, ) # Only create temp2 and not temp1 to test that "~" expansion only occurs # when the directory exists. - temp2_dir = os.path.join(tmpdir, 'temp2') + temp2_dir = os.path.join(tmpdir, "temp2") os.mkdir(temp2_dir) link_collector = make_link_collector(session, options=options) @@ -86,8 +86,8 @@ def expand_path(path): search_scope = link_collector.search_scope # Only ~/temp2 gets expanded. Also, the path is normalized when expanded. expected_temp2_dir = os.path.normcase(temp2_dir) - assert search_scope.find_links == ['~/temp1', expected_temp2_dir] - assert search_scope.index_urls == ['default_url'] + assert search_scope.find_links == ["~/temp1", expected_temp2_dir] + assert search_scope.index_urls == ["default_url"] class MockBestCandidateResult(object): @@ -97,12 +97,12 @@ def __init__(self, best): class MockPackageFinder(object): - BASE_URL = 'https://pypi.org/simple/pip-{0}.tar.gz' - PIP_PROJECT_NAME = 'pip' + BASE_URL = "https://pypi.org/simple/pip-{0}.tar.gz" + PIP_PROJECT_NAME = "pip" INSTALLATION_CANDIDATES = [ - InstallationCandidate(PIP_PROJECT_NAME, '6.9.0', BASE_URL.format('6.9.0')), - InstallationCandidate(PIP_PROJECT_NAME, '3.3.1', BASE_URL.format('3.3.1')), - InstallationCandidate(PIP_PROJECT_NAME, '1.0', BASE_URL.format('1.0')), + InstallationCandidate(PIP_PROJECT_NAME, "6.9.0", BASE_URL.format("6.9.0")), + InstallationCandidate(PIP_PROJECT_NAME, "3.3.1", BASE_URL.format("3.3.1")), + InstallationCandidate(PIP_PROJECT_NAME, "1.0", BASE_URL.format("1.0")), ] @classmethod @@ -118,45 +118,45 @@ def __init__(self, installer): self.installer = installer def has_metadata(self, name): - return name == 'INSTALLER' + return name == "INSTALLER" def get_metadata_lines(self, name): if self.has_metadata(name): yield self.installer else: - raise NotImplementedError('nope') + raise NotImplementedError("nope") def _options(): - ''' Some default options that we pass to outdated.pip_version_check ''' + """ Some default options that we pass to outdated.pip_version_check """ return pretend.stub( find_links=[], - index_url='default_url', + index_url="default_url", extra_index_urls=[], no_index=False, pre=False, - cache_dir='', + cache_dir="", ) @pytest.mark.parametrize( [ - 'stored_time', - 'installed_ver', - 'new_ver', - 'installer', - 'check_if_upgrade_required', - 'check_warn_logs', + "stored_time", + "installed_ver", + "new_ver", + "installer", + "check_if_upgrade_required", + "check_warn_logs", ], [ # Test we return None when installed version is None - ('1970-01-01T10:00:00Z', None, '1.0', 'pip', False, False), + ("1970-01-01T10:00:00Z", None, "1.0", "pip", False, False), # Need an upgrade - upgrade warning should print - ('1970-01-01T10:00:00Z', '1.0', '6.9.0', 'pip', True, True), + ("1970-01-01T10:00:00Z", "1.0", "6.9.0", "pip", True, True), # Upgrade available, pip installed via rpm - warning should not print - ('1970-01-01T10:00:00Z', '1.0', '6.9.0', 'rpm', True, False), + ("1970-01-01T10:00:00Z", "1.0", "6.9.0", "rpm", True, False), # No upgrade - upgrade warning should not print - ('1970-01-9T10:00:00Z', '6.9.0', '6.9.0', 'pip', False, False), + ("1970-01-9T10:00:00Z", "6.9.0", "6.9.0", "pip", False, False), ], ) def test_pip_version_check( @@ -168,21 +168,21 @@ def test_pip_version_check( check_if_upgrade_required, check_warn_logs, ): - monkeypatch.setattr(outdated, 'get_installed_version', lambda name: installed_ver) - monkeypatch.setattr(outdated, 'PackageFinder', MockPackageFinder) - monkeypatch.setattr(logger, 'warning', pretend.call_recorder(lambda *a, **kw: None)) + monkeypatch.setattr(outdated, "get_installed_version", lambda name: installed_ver) + monkeypatch.setattr(outdated, "PackageFinder", MockPackageFinder) + monkeypatch.setattr(logger, "warning", pretend.call_recorder(lambda *a, **kw: None)) monkeypatch.setattr( - logger, 'debug', pretend.call_recorder(lambda s, exc_info=None: None) + logger, "debug", pretend.call_recorder(lambda s, exc_info=None: None) ) monkeypatch.setattr( - pkg_resources, 'get_distribution', lambda name: MockDistribution(installer) + pkg_resources, "get_distribution", lambda name: MockDistribution(installer) ) fake_state = pretend.stub( - state={"last_check": stored_time, 'pypi_version': installed_ver}, + state={"last_check": stored_time, "pypi_version": installed_ver}, save=pretend.call_recorder(lambda v, t: None), ) - monkeypatch.setattr(outdated, 'SelfCheckState', lambda **kw: fake_state) + monkeypatch.setattr(outdated, "SelfCheckState", lambda **kw: fake_state) with freezegun.freeze_time( "1970-01-09 10:00:00", diff --git a/tests/unit/test_urls.py b/tests/unit/test_urls.py index 5b65e395a75..026e54cafd6 100644 --- a/tests/unit/test_urls.py +++ b/tests/unit/test_urls.py @@ -10,10 +10,10 @@ @pytest.mark.parametrize( "url,expected", [ - ('http://localhost:8080/', 'http'), - ('file:c:/path/to/file', 'file'), - ('file:/dev/null', 'file'), - ('', None), + ("http://localhost:8080/", "http"), + ("file:c:/path/to/file", "file"), + ("file:/dev/null", "file"), + ("", None), ], ) def test_get_url_scheme(url, expected): @@ -22,35 +22,35 @@ def test_get_url_scheme(url, expected): @pytest.mark.skipif("sys.platform == 'win32'") def test_path_to_url_unix(): - assert path_to_url('/tmp/file') == 'file:///tmp/file' - path = os.path.join(os.getcwd(), 'file') - assert path_to_url('file') == 'file://' + urllib_request.pathname2url(path) + assert path_to_url("/tmp/file") == "file:///tmp/file" + path = os.path.join(os.getcwd(), "file") + assert path_to_url("file") == "file://" + urllib_request.pathname2url(path) @pytest.mark.skipif("sys.platform != 'win32'") def test_path_to_url_win(): - assert path_to_url('c:/tmp/file') == 'file:///C:/tmp/file' - assert path_to_url('c:\\tmp\\file') == 'file:///C:/tmp/file' - assert path_to_url(r'\\unc\as\path') == 'file://unc/as/path' - path = os.path.join(os.getcwd(), 'file') - assert path_to_url('file') == 'file:' + urllib_request.pathname2url(path) + assert path_to_url("c:/tmp/file") == "file:///C:/tmp/file" + assert path_to_url("c:\\tmp\\file") == "file:///C:/tmp/file" + assert path_to_url(r"\\unc\as\path") == "file://unc/as/path" + path = os.path.join(os.getcwd(), "file") + assert path_to_url("file") == "file:" + urllib_request.pathname2url(path) @pytest.mark.parametrize( "url,win_expected,non_win_expected", [ - ('file:tmp', 'tmp', 'tmp'), - ('file:c:/path/to/file', r'C:\path\to\file', 'c:/path/to/file'), - ('file:/path/to/file', r'\path\to\file', '/path/to/file'), - ('file://localhost/tmp/file', r'\tmp\file', '/tmp/file'), - ('file://localhost/c:/tmp/file', r'C:\tmp\file', '/c:/tmp/file'), - ('file://somehost/tmp/file', r'\\somehost\tmp\file', None), - ('file:///tmp/file', r'\tmp\file', '/tmp/file'), - ('file:///c:/tmp/file', r'C:\tmp\file', '/c:/tmp/file'), + ("file:tmp", "tmp", "tmp"), + ("file:c:/path/to/file", r"C:\path\to\file", "c:/path/to/file"), + ("file:/path/to/file", r"\path\to\file", "/path/to/file"), + ("file://localhost/tmp/file", r"\tmp\file", "/tmp/file"), + ("file://localhost/c:/tmp/file", r"C:\tmp\file", "/c:/tmp/file"), + ("file://somehost/tmp/file", r"\\somehost\tmp\file", None), + ("file:///tmp/file", r"\tmp\file", "/tmp/file"), + ("file:///c:/tmp/file", r"C:\tmp\file", "/c:/tmp/file"), ], ) def test_url_to_path(url, win_expected, non_win_expected): - if sys.platform == 'win32': + if sys.platform == "win32": expected_path = win_expected else: expected_path = non_win_expected @@ -64,8 +64,8 @@ def test_url_to_path(url, win_expected, non_win_expected): @pytest.mark.skipif("sys.platform != 'win32'") def test_url_to_path_path_to_url_symmetry_win(): - path = r'C:\tmp\file' + path = r"C:\tmp\file" assert url_to_path(path_to_url(path)) == path - unc_path = r'\\unc\share\path' + unc_path = r"\\unc\share\path" assert url_to_path(path_to_url(unc_path)) == unc_path diff --git a/tests/unit/test_utils.py b/tests/unit/test_utils.py index bc051c24a0a..afb0bab6e2c 100644 --- a/tests/unit/test_utils.py +++ b/tests/unit/test_utils.py @@ -19,11 +19,7 @@ import pytest from mock import Mock, patch -from pip._internal.exceptions import ( - HashMismatch, - HashMissing, - InstallationError, -) +from pip._internal.exceptions import HashMismatch, HashMissing, InstallationError from pip._internal.utils.deprecation import PipDeprecationWarning, deprecated from pip._internal.utils.encoding import BOMS, auto_decode from pip._internal.utils.glibc import ( @@ -67,21 +63,21 @@ class Tests_EgglinkPath: def setup(self): - project = 'foo' + project = "foo" self.mock_dist = Mock(project_name=project) - self.site_packages = 'SITE_PACKAGES' - self.user_site = 'USER_SITE' - self.user_site_egglink = os.path.join(self.user_site, '%s.egg-link' % project) + self.site_packages = "SITE_PACKAGES" + self.user_site = "USER_SITE" + self.user_site_egglink = os.path.join(self.user_site, "%s.egg-link" % project) self.site_packages_egglink = os.path.join( - self.site_packages, '%s.egg-link' % project + self.site_packages, "%s.egg-link" % project ) # patches from pip._internal.utils import misc as utils self.old_site_packages = utils.site_packages - self.mock_site_packages = utils.site_packages = 'SITE_PACKAGES' + self.mock_site_packages = utils.site_packages = "SITE_PACKAGES" self.old_running_under_virtualenv = utils.running_under_virtualenv self.mock_running_under_virtualenv = utils.running_under_virtualenv = Mock() self.old_virtualenv_no_global = utils.virtualenv_no_global @@ -195,9 +191,9 @@ def test_noegglink_in_sitepkgs_venv_global(self): assert egg_link_path(self.mock_dist) is None -@patch('pip._internal.utils.misc.dist_in_usersite') -@patch('pip._internal.utils.misc.dist_is_local') -@patch('pip._internal.utils.misc.dist_is_editable') +@patch("pip._internal.utils.misc.dist_in_usersite") +@patch("pip._internal.utils.misc.dist_is_local") +@patch("pip._internal.utils.misc.dist_is_editable") class Tests_get_installed_distributions: """test util.get_installed_distributions""" @@ -209,26 +205,26 @@ class Tests_get_installed_distributions: ] workingset_stdlib = [ - Mock(test_name='normal', key='argparse'), - Mock(test_name='normal', key='wsgiref'), + Mock(test_name="normal", key="argparse"), + Mock(test_name="normal", key="wsgiref"), ] workingset_freeze = [ - Mock(test_name='normal', key='pip'), - Mock(test_name='normal', key='setuptools'), - Mock(test_name='normal', key='distribute'), + Mock(test_name="normal", key="pip"), + Mock(test_name="normal", key="setuptools"), + Mock(test_name="normal", key="distribute"), ] def dist_is_editable(self, dist): return dist.test_name == "editable" def dist_is_local(self, dist): - return dist.test_name != "global" and dist.test_name != 'user' + return dist.test_name != "global" and dist.test_name != "user" def dist_in_usersite(self, dist): return dist.test_name == "user" - @patch('pip._vendor.pkg_resources.working_set', workingset) + @patch("pip._vendor.pkg_resources.working_set", workingset) def test_editables_only( self, mock_dist_is_editable, mock_dist_is_local, mock_dist_in_usersite ): @@ -239,7 +235,7 @@ def test_editables_only( assert len(dists) == 1, dists assert dists[0].test_name == "editable" - @patch('pip._vendor.pkg_resources.working_set', workingset) + @patch("pip._vendor.pkg_resources.working_set", workingset) def test_exclude_editables( self, mock_dist_is_editable, mock_dist_is_local, mock_dist_in_usersite ): @@ -250,7 +246,7 @@ def test_exclude_editables( assert len(dists) == 1 assert dists[0].test_name == "normal" - @patch('pip._vendor.pkg_resources.working_set', workingset) + @patch("pip._vendor.pkg_resources.working_set", workingset) def test_include_globals( self, mock_dist_is_editable, mock_dist_is_local, mock_dist_in_usersite ): @@ -260,7 +256,7 @@ def test_include_globals( dists = get_installed_distributions(local_only=False) assert len(dists) == 4 - @patch('pip._vendor.pkg_resources.working_set', workingset) + @patch("pip._vendor.pkg_resources.working_set", workingset) def test_user_only( self, mock_dist_is_editable, mock_dist_is_local, mock_dist_in_usersite ): @@ -271,7 +267,7 @@ def test_user_only( assert len(dists) == 1 assert dists[0].test_name == "user" - @patch('pip._vendor.pkg_resources.working_set', workingset_stdlib) + @patch("pip._vendor.pkg_resources.working_set", workingset_stdlib) def test_gte_py27_excludes( self, mock_dist_is_editable, mock_dist_is_local, mock_dist_in_usersite ): @@ -281,14 +277,14 @@ def test_gte_py27_excludes( dists = get_installed_distributions() assert len(dists) == 0 - @patch('pip._vendor.pkg_resources.working_set', workingset_freeze) + @patch("pip._vendor.pkg_resources.working_set", workingset_freeze) def test_freeze_excludes( self, mock_dist_is_editable, mock_dist_is_local, mock_dist_in_usersite ): mock_dist_is_editable.side_effect = self.dist_is_editable mock_dist_is_local.side_effect = self.dist_is_local mock_dist_in_usersite.side_effect = self.dist_in_usersite - dists = get_installed_distributions(skip=('setuptools', 'pip', 'distribute')) + dists = get_installed_distributions(skip=("setuptools", "pip", "distribute")) assert len(dists) == 0 @@ -296,7 +292,7 @@ def test_rmtree_errorhandler_nonexistent_directory(tmpdir): """ Test rmtree_errorhandler ignores the given non-existing directory. """ - nonexistent_path = str(tmpdir / 'foo') + nonexistent_path = str(tmpdir / "foo") mock_func = Mock() rmtree_errorhandler(mock_func, nonexistent_path, None) mock_func.assert_not_called() @@ -307,7 +303,7 @@ def test_rmtree_errorhandler_readonly_directory(tmpdir): Test rmtree_errorhandler makes the given read-only directory writable. """ # Create read only directory - path = str((tmpdir / 'subdir').mkdir()) + path = str((tmpdir / "subdir").mkdir()) os.chmod(path, stat.S_IREAD) # Make sure mock_func is called with the given path @@ -325,16 +321,16 @@ def test_rmtree_errorhandler_reraises_error(tmpdir): by the given unreadable directory. """ # Create directory without read permission - path = str((tmpdir / 'subdir').mkdir()) + path = str((tmpdir / "subdir").mkdir()) os.chmod(path, stat.S_IWRITE) mock_func = Mock() try: - raise RuntimeError('test message') + raise RuntimeError("test message") except RuntimeError: # Make sure the handler reraises an exception - with pytest.raises(RuntimeError, match='test message'): + with pytest.raises(RuntimeError, match="test message"): rmtree_errorhandler(mock_func, path, None) mock_func.assert_not_called() @@ -345,7 +341,7 @@ def test_rmtree_skips_nonexistent_directory(): Test wrapped rmtree doesn't raise an error by the given nonexistent directory. """ - rmtree.__wrapped__('nonexistent-subdir') + rmtree.__wrapped__("nonexistent-subdir") class Failer: @@ -362,41 +358,41 @@ def test_rmtree_retries(tmpdir, monkeypatch): """ Test pip._internal.utils.rmtree will retry failures """ - monkeypatch.setattr(shutil, 'rmtree', Failer(duration=1).call) - rmtree('foo') + monkeypatch.setattr(shutil, "rmtree", Failer(duration=1).call) + rmtree("foo") def test_rmtree_retries_for_3sec(tmpdir, monkeypatch): """ Test pip._internal.utils.rmtree will retry failures for no more than 3 sec """ - monkeypatch.setattr(shutil, 'rmtree', Failer(duration=5).call) + monkeypatch.setattr(shutil, "rmtree", Failer(duration=5).call) with pytest.raises(OSError): - rmtree('foo') + rmtree("foo") @pytest.mark.parametrize( - 'path, fs_encoding, expected', + "path, fs_encoding, expected", [ (None, None, None), # Test passing a text (unicode) string. - (u'/path/déf', None, u'/path/déf'), + (u"/path/déf", None, u"/path/déf"), # Test a bytes object with a non-ascii character. - (u'/path/déf'.encode('utf-8'), 'utf-8', u'/path/déf'), + (u"/path/déf".encode("utf-8"), "utf-8", u"/path/déf"), # Test a bytes object with a character that can't be decoded. - (u'/path/déf'.encode('utf-8'), 'ascii', u"b'/path/d\\xc3\\xa9f'"), + (u"/path/déf".encode("utf-8"), "ascii", u"b'/path/d\\xc3\\xa9f'"), ( - u'/path/déf'.encode('utf-16'), - 'utf-8', + u"/path/déf".encode("utf-16"), + "utf-8", u"b'\\xff\\xfe/\\x00p\\x00a\\x00t\\x00h\\x00/" "\\x00d\\x00\\xe9\\x00f\\x00'", ), ], ) def test_path_to_display(monkeypatch, path, fs_encoding, expected): - monkeypatch.setattr(sys, 'getfilesystemencoding', lambda: fs_encoding) + monkeypatch.setattr(sys, "getfilesystemencoding", lambda: fs_encoding) actual = path_to_display(path) - assert actual == expected, 'actual: {!r}'.format(actual) + assert actual == expected, "actual: {!r}".format(actual) class Test_normalize_path(object): @@ -410,27 +406,27 @@ def test_resolve_symlinks(self, tmpdir): orig_working_dir = os.getcwd() os.chdir(tmpdir) try: - d = os.path.join('foo', 'bar') - f = os.path.join(d, 'file1') + d = os.path.join("foo", "bar") + f = os.path.join(d, "file1") os.makedirs(d) - with open(f, 'w'): # Create the file + with open(f, "w"): # Create the file pass - os.symlink(d, 'dir_link') - os.symlink(f, 'file_link') + os.symlink(d, "dir_link") + os.symlink(f, "file_link") assert normalize_path( - 'dir_link/file1', resolve_symlinks=True + "dir_link/file1", resolve_symlinks=True ) == os.path.join(tmpdir, f) assert normalize_path( - 'dir_link/file1', resolve_symlinks=False - ) == os.path.join(tmpdir, 'dir_link', 'file1') + "dir_link/file1", resolve_symlinks=False + ) == os.path.join(tmpdir, "dir_link", "file1") - assert normalize_path('file_link', resolve_symlinks=True) == os.path.join( + assert normalize_path("file_link", resolve_symlinks=True) == os.path.join( tmpdir, f ) - assert normalize_path('file_link', resolve_symlinks=False) == os.path.join( - tmpdir, 'file_link' + assert normalize_path("file_link", resolve_symlinks=False) == os.path.join( + tmpdir, "file_link" ) finally: os.chdir(orig_working_dir) @@ -440,19 +436,19 @@ class TestHashes(object): """Tests for pip._internal.utils.hashes""" @pytest.mark.parametrize( - 'hash_name, hex_digest, expected', + "hash_name, hex_digest, expected", [ # Test a value that matches but with the wrong hash_name. - ('sha384', 128 * 'a', False), + ("sha384", 128 * "a", False), # Test matching values, including values other than the first. - ('sha512', 128 * 'a', True), - ('sha512', 128 * 'b', True), + ("sha512", 128 * "a", True), + ("sha512", 128 * "b", True), # Test a matching hash_name with a value that doesn't match. - ('sha512', 128 * 'c', False), + ("sha512", 128 * "c", False), ], ) def test_is_hash_allowed(self, hash_name, hex_digest, expected): - hashes_data = {'sha512': [128 * 'a', 128 * 'b']} + hashes_data = {"sha512": [128 * "a", 128 * "b"]} hashes = Hashes(hashes_data) assert hashes.is_hash_allowed(hash_name, hex_digest) == expected @@ -462,42 +458,42 @@ def test_success(self, tmpdir): Test check_against_path because it calls everything else. """ - file = tmpdir / 'to_hash' - file.write_text('hello') + file = tmpdir / "to_hash" + file.write_text("hello") hashes = Hashes( { - 'sha256': [ - '2cf24dba5fb0a30e26e83b2ac5b9e29e' - '1b161e5c1fa7425e73043362938b9824' + "sha256": [ + "2cf24dba5fb0a30e26e83b2ac5b9e29e" + "1b161e5c1fa7425e73043362938b9824" ], - 'sha224': ['wrongwrong'], - 'md5': ['5d41402abc4b2a76b9719d911017c592'], + "sha224": ["wrongwrong"], + "md5": ["5d41402abc4b2a76b9719d911017c592"], } ) hashes.check_against_path(file) def test_failure(self): """Hashes should raise HashMismatch when no hashes match.""" - hashes = Hashes({'sha256': ['wrongwrong']}) + hashes = Hashes({"sha256": ["wrongwrong"]}) with pytest.raises(HashMismatch): - hashes.check_against_file(BytesIO(b'hello')) + hashes.check_against_file(BytesIO(b"hello")) def test_missing_hashes(self): """MissingHashes should raise HashMissing when any check is done.""" with pytest.raises(HashMissing): - MissingHashes().check_against_file(BytesIO(b'hello')) + MissingHashes().check_against_file(BytesIO(b"hello")) def test_unknown_hash(self): """Hashes should raise InstallationError when it encounters an unknown hash.""" - hashes = Hashes({'badbad': ['dummy']}) + hashes = Hashes({"badbad": ["dummy"]}) with pytest.raises(InstallationError): - hashes.check_against_file(BytesIO(b'hello')) + hashes.check_against_file(BytesIO(b"hello")) def test_non_zero(self): """Test that truthiness tests tell whether any known-good hashes exist.""" - assert Hashes({'sha256': 'dummy'}) + assert Hashes({"sha256": "dummy"}) assert not Hashes() assert not Hashes({}) @@ -507,41 +503,41 @@ class TestEncoding(object): def test_auto_decode_utf_16_le(self): data = ( - b'\xff\xfeD\x00j\x00a\x00n\x00g\x00o\x00=\x00' - b'=\x001\x00.\x004\x00.\x002\x00' + b"\xff\xfeD\x00j\x00a\x00n\x00g\x00o\x00=\x00" + b"=\x001\x00.\x004\x00.\x002\x00" ) assert data.startswith(codecs.BOM_UTF16_LE) assert auto_decode(data) == "Django==1.4.2" def test_auto_decode_utf_16_be(self): data = ( - b'\xfe\xff\x00D\x00j\x00a\x00n\x00g\x00o\x00=' - b'\x00=\x001\x00.\x004\x00.\x002' + b"\xfe\xff\x00D\x00j\x00a\x00n\x00g\x00o\x00=" + b"\x00=\x001\x00.\x004\x00.\x002" ) assert data.startswith(codecs.BOM_UTF16_BE) assert auto_decode(data) == "Django==1.4.2" def test_auto_decode_no_bom(self): - assert auto_decode(b'foobar') == u'foobar' + assert auto_decode(b"foobar") == u"foobar" def test_auto_decode_pep263_headers(self): - latin1_req = u'# coding=latin1\n# Pas trop de café' - assert auto_decode(latin1_req.encode('latin1')) == latin1_req + latin1_req = u"# coding=latin1\n# Pas trop de café" + assert auto_decode(latin1_req.encode("latin1")) == latin1_req def test_auto_decode_no_preferred_encoding(self): om, em = Mock(), Mock() - om.return_value = 'ascii' + om.return_value = "ascii" em.return_value = None - data = u'data' - with patch('sys.getdefaultencoding', om): - with patch('locale.getpreferredencoding', em): + data = u"data" + with patch("sys.getdefaultencoding", om): + with patch("locale.getpreferredencoding", em): ret = auto_decode(data.encode(sys.getdefaultencoding())) assert ret == data - @pytest.mark.parametrize('encoding', [encoding for bom, encoding in BOMS]) + @pytest.mark.parametrize("encoding", [encoding for bom, encoding in BOMS]) def test_all_encodings_are_valid(self, encoding): # we really only care that there is no LookupError - assert ''.encode(encoding).decode(encoding) == '' + assert "".encode(encoding).decode(encoding) == "" def raises(error): @@ -608,7 +604,7 @@ def test_glibc_version_string_ctypes_missing(self, monkeypatch): @pytest.mark.parametrize( - 'version_info, expected', + "version_info, expected", [ ((), (0, 0, 0)), ((3,), (3, 0, 0)), @@ -626,29 +622,29 @@ class TestGetProg(object): @pytest.mark.parametrize( ("argv", "executable", "expected"), [ - ('/usr/bin/pip', '', 'pip'), - ('-c', '/usr/bin/python', '/usr/bin/python -m pip'), - ('__main__.py', '/usr/bin/python', '/usr/bin/python -m pip'), - ('/usr/bin/pip3', '', 'pip3'), + ("/usr/bin/pip", "", "pip"), + ("-c", "/usr/bin/python", "/usr/bin/python -m pip"), + ("__main__.py", "/usr/bin/python", "/usr/bin/python -m pip"), + ("/usr/bin/pip3", "", "pip3"), ], ) def test_get_prog(self, monkeypatch, argv, executable, expected): - monkeypatch.setattr('pip._internal.utils.misc.sys.argv', [argv]) - monkeypatch.setattr('pip._internal.utils.misc.sys.executable', executable) + monkeypatch.setattr("pip._internal.utils.misc.sys.argv", [argv]) + monkeypatch.setattr("pip._internal.utils.misc.sys.executable", executable) assert get_prog() == expected @pytest.mark.parametrize( - 'args, expected', + "args, expected", [ - (['pip', 'list'], 'pip list'), + (["pip", "list"], "pip list"), ( - ['foo', 'space space', 'new\nline', 'double"quote', "single'quote"], + ["foo", "space space", "new\nline", 'double"quote', "single'quote"], """foo 'space space' 'new\nline' 'double"quote' 'single'"'"'quote'""", ), # Test HiddenText arguments. ( - make_command(hide_value('secret1'), 'foo', hide_value('secret2')), + make_command(hide_value("secret1"), "foo", hide_value("secret2")), "'****' foo '****'", ), ], @@ -659,9 +655,9 @@ def test_format_command_args(args, expected): def test_make_subprocess_output_error(): - cmd_args = ['test', 'has space'] - cwd = '/path/to/cwd' - lines = ['line1\n', 'line2\n', 'line3\n'] + cmd_args = ["test", "has space"] + cwd = "/path/to/cwd" + lines = ["line1\n", "line2\n", "line3\n"] actual = make_subprocess_output_error( cmd_args=cmd_args, cwd=cwd, lines=lines, exit_status=3 ) @@ -676,24 +672,24 @@ def test_make_subprocess_output_error(): line3 ----------------------------------------""" ) - assert actual == expected, 'actual: {}'.format(actual) + assert actual == expected, "actual: {}".format(actual) def test_make_subprocess_output_error__non_ascii_command_arg(monkeypatch): """ Test a command argument with a non-ascii character. """ - cmd_args = ['foo', 'déf'] + cmd_args = ["foo", "déf"] if sys.version_info[0] == 2: # Check in Python 2 that the str (bytes object) with the non-ascii # character has the encoding we expect. (This comes from the source # code encoding at the top of the file.) - assert cmd_args[1].decode('utf-8') == u'déf' + assert cmd_args[1].decode("utf-8") == u"déf" # We need to monkeypatch so the encoding will be correct on Windows. - monkeypatch.setattr(locale, 'getpreferredencoding', lambda: 'utf-8') + monkeypatch.setattr(locale, "getpreferredencoding", lambda: "utf-8") actual = make_subprocess_output_error( - cmd_args=cmd_args, cwd='/path/to/cwd', lines=[], exit_status=1 + cmd_args=cmd_args, cwd="/path/to/cwd", lines=[], exit_status=1 ) expected = dedent( u"""\ @@ -703,7 +699,7 @@ def test_make_subprocess_output_error__non_ascii_command_arg(monkeypatch): Complete output (0 lines): ----------------------------------------""" ) - assert actual == expected, u'actual: {}'.format(actual) + assert actual == expected, u"actual: {}".format(actual) @pytest.mark.skipif("sys.version_info < (3,)") @@ -711,8 +707,8 @@ def test_make_subprocess_output_error__non_ascii_cwd_python_3(monkeypatch): """ Test a str (text) cwd with a non-ascii character in Python 3. """ - cmd_args = ['test'] - cwd = '/path/to/cwd/déf' + cmd_args = ["test"] + cwd = "/path/to/cwd/déf" actual = make_subprocess_output_error( cmd_args=cmd_args, cwd=cwd, lines=[], exit_status=1 ) @@ -724,15 +720,15 @@ def test_make_subprocess_output_error__non_ascii_cwd_python_3(monkeypatch): Complete output (0 lines): ----------------------------------------""" ) - assert actual == expected, 'actual: {}'.format(actual) + assert actual == expected, "actual: {}".format(actual) @pytest.mark.parametrize( - 'encoding', + "encoding", [ - 'utf-8', + "utf-8", # Test a Windows encoding. - 'cp1252', + "cp1252", ], ) @pytest.mark.skipif("sys.version_info >= (3,)") @@ -740,9 +736,9 @@ def test_make_subprocess_output_error__non_ascii_cwd_python_2(monkeypatch, encod """ Test a str (bytes object) cwd with a non-ascii character in Python 2. """ - cmd_args = ['test'] - cwd = u'/path/to/cwd/déf'.encode(encoding) - monkeypatch.setattr(sys, 'getfilesystemencoding', lambda: encoding) + cmd_args = ["test"] + cwd = u"/path/to/cwd/déf".encode(encoding) + monkeypatch.setattr(sys, "getfilesystemencoding", lambda: encoding) actual = make_subprocess_output_error( cmd_args=cmd_args, cwd=cwd, lines=[], exit_status=1 ) @@ -754,7 +750,7 @@ def test_make_subprocess_output_error__non_ascii_cwd_python_2(monkeypatch, encod Complete output (0 lines): ----------------------------------------""" ) - assert actual == expected, u'actual: {}'.format(actual) + assert actual == expected, u"actual: {}".format(actual) # This test is mainly important for checking unicode in Python 2. @@ -762,9 +758,9 @@ def test_make_subprocess_output_error__non_ascii_line(): """ Test a line with a non-ascii character. """ - lines = [u'curly-quote: \u2018\n'] + lines = [u"curly-quote: \u2018\n"] actual = make_subprocess_output_error( - cmd_args=['test'], cwd='/path/to/cwd', lines=lines, exit_status=1 + cmd_args=["test"], cwd="/path/to/cwd", lines=lines, exit_status=1 ) expected = dedent( u"""\ @@ -775,7 +771,7 @@ def test_make_subprocess_output_error__non_ascii_line(): curly-quote: \u2018 ----------------------------------------""" ) - assert actual == expected, u'actual: {}'.format(actual) + assert actual == expected, u"actual: {}".format(actual) class FakeSpinner(SpinnerInterface): @@ -824,11 +820,11 @@ def check_result( # Confirm that stdout and stderr haven't been written to. captured = capfd.readouterr() - assert (captured.out, captured.err) == ('', '') + assert (captured.out, captured.err) == ("", "") records = caplog.record_tuples if len(records) != len(expected_records): - raise RuntimeError('{} != {}'.format(records, expected_records)) + raise RuntimeError("{} != {}".format(records, expected_records)) for record, expected_record in zip(records, expected_records): # Check the logger_name and log level parts exactly. @@ -849,7 +845,7 @@ def prepare_call(self, caplog, log_level, command=None): caplog.set_level(log_level) spinner = FakeSpinner() - args = [sys.executable, '-c', command] + args = [sys.executable, "-c", command] return (args, spinner) @@ -862,11 +858,11 @@ def test_debug_logging(self, capfd, caplog): result = call_subprocess(args, spinner=spinner) expected = ( - ['Hello', 'world'], + ["Hello", "world"], [ - ('pip.subprocessor', DEBUG, 'Running command '), - ('pip.subprocessor', DEBUG, 'Hello'), - ('pip.subprocessor', DEBUG, 'world'), + ("pip.subprocessor", DEBUG, "Running command "), + ("pip.subprocessor", DEBUG, "Hello"), + ("pip.subprocessor", DEBUG, "world"), ], ) # The spinner shouldn't spin in this case since the subprocess @@ -889,7 +885,7 @@ def test_info_logging(self, capfd, caplog): args, spinner = self.prepare_call(caplog, log_level) result = call_subprocess(args, spinner=spinner) - expected = (['Hello', 'world'], []) + expected = (["Hello", "world"], []) # The spinner should spin twice in this case since the subprocess # output isn't being written to the console. self.check_result( @@ -899,7 +895,7 @@ def test_info_logging(self, capfd, caplog): spinner, result, expected, - expected_spinner=(2, 'done'), + expected_spinner=(2, "done"), ) def test_info_logging__subprocess_error(self, capfd, caplog): @@ -915,10 +911,10 @@ def test_info_logging__subprocess_error(self, capfd, caplog): call_subprocess(args, spinner=spinner) result = None exc_message = str(exc.value) - assert exc_message.startswith('Command errored out with exit status 1: ') - assert exc_message.endswith('Check the logs for full command output.') + assert exc_message.startswith("Command errored out with exit status 1: ") + assert exc_message.endswith("Check the logs for full command output.") - expected = (None, [('pip.subprocessor', ERROR, 'Complete output (3 lines):\n')]) + expected = (None, [("pip.subprocessor", ERROR, "Complete output (3 lines):\n")]) # The spinner should spin three times in this case since the # subprocess output isn't being written to the console. self.check_result( @@ -928,7 +924,7 @@ def test_info_logging__subprocess_error(self, capfd, caplog): spinner, result, expected, - expected_spinner=(3, 'error'), + expected_spinner=(3, "error"), ) # Do some further checking on the captured log records to confirm @@ -946,18 +942,18 @@ def test_info_logging__subprocess_error(self, capfd, caplog): # exact match. command_line = actual.pop(1) assert actual == [ - ' cwd: None', - '----------------------------------------', - 'Command errored out with exit status 1:', - 'Complete output (3 lines):', - 'Hello', - 'fail', - 'world', - ], 'lines: {}'.format( + " cwd: None", + "----------------------------------------", + "Command errored out with exit status 1:", + "Complete output (3 lines):", + "Hello", + "fail", + "world", + ], "lines: {}".format( actual ) # Show the full output on failure. - assert command_line.startswith(' command: ') + assert command_line.startswith(" command: ") assert command_line.endswith('print("world"); exit("fail")\'') def test_info_logging_with_show_stdout_true(self, capfd, caplog): @@ -969,11 +965,11 @@ def test_info_logging_with_show_stdout_true(self, capfd, caplog): result = call_subprocess(args, spinner=spinner, show_stdout=True) expected = ( - ['Hello', 'world'], + ["Hello", "world"], [ - ('pip.subprocessor', INFO, 'Running command '), - ('pip.subprocessor', INFO, 'Hello'), - ('pip.subprocessor', INFO, 'world'), + ("pip.subprocessor", INFO, "Running command "), + ("pip.subprocessor", INFO, "Hello"), + ("pip.subprocessor", INFO, "world"), ], ) # The spinner shouldn't spin in this case since the subprocess @@ -989,12 +985,12 @@ def test_info_logging_with_show_stdout_true(self, capfd, caplog): ) @pytest.mark.parametrize( - ('exit_status', 'show_stdout', 'extra_ok_returncodes', 'log_level', 'expected'), + ("exit_status", "show_stdout", "extra_ok_returncodes", "log_level", "expected"), [ # The spinner should show here because show_stdout=False means # the subprocess should get logged at DEBUG level, but the passed # log level is only INFO. - (0, False, None, INFO, (None, 'done', 2)), + (0, False, None, INFO, (None, "done", 2)), # Test some cases where the spinner should not be shown. (0, False, None, DEBUG, (None, None, 0)), # Test show_stdout=True. @@ -1003,11 +999,11 @@ def test_info_logging_with_show_stdout_true(self, capfd, caplog): # The spinner should show here because show_stdout=True means # the subprocess should get logged at INFO level, but the passed # log level is only WARNING. - (0, True, None, WARNING, (None, 'done', 2)), + (0, True, None, WARNING, (None, "done", 2)), # Test a non-zero exit status. - (3, False, None, INFO, (InstallationError, 'error', 2)), + (3, False, None, INFO, (InstallationError, "error", 2)), # Test a non-zero exit status also in extra_ok_returncodes. - (3, False, (3,), INFO, (None, 'done', 2)), + (3, False, (3,), INFO, (None, "done", 2)), ], ) def test_spinner_finish( @@ -1046,22 +1042,22 @@ def test_spinner_finish( def test_closes_stdin(self): with pytest.raises(InstallationError): - call_subprocess([sys.executable, '-c', 'input()'], show_stdout=True) + call_subprocess([sys.executable, "-c", "input()"], show_stdout=True) @pytest.mark.parametrize( - 'host_port, expected_netloc', + "host_port, expected_netloc", [ # Test domain name. - (('example.com', None), 'example.com'), - (('example.com', 5000), 'example.com:5000'), + (("example.com", None), "example.com"), + (("example.com", 5000), "example.com:5000"), # Test IPv4 address. - (('127.0.0.1', None), '127.0.0.1'), - (('127.0.0.1', 5000), '127.0.0.1:5000'), + (("127.0.0.1", None), "127.0.0.1"), + (("127.0.0.1", 5000), "127.0.0.1:5000"), # Test bare IPv6 address. - (('2001:db6::1', None), '2001:db6::1'), + (("2001:db6::1", None), "2001:db6::1"), # Test IPv6 with port. - (('2001:db6::1', 5000), '[2001:db6::1]:5000'), + (("2001:db6::1", 5000), "[2001:db6::1]:5000"), ], ) def test_build_netloc(host_port, expected_netloc): @@ -1069,23 +1065,23 @@ def test_build_netloc(host_port, expected_netloc): @pytest.mark.parametrize( - 'netloc, expected_url, expected_host_port', + "netloc, expected_url, expected_host_port", [ # Test domain name. - ('example.com', 'https://example.com', ('example.com', None)), - ('example.com:5000', 'https://example.com:5000', ('example.com', 5000)), + ("example.com", "https://example.com", ("example.com", None)), + ("example.com:5000", "https://example.com:5000", ("example.com", 5000)), # Test IPv4 address. - ('127.0.0.1', 'https://127.0.0.1', ('127.0.0.1', None)), - ('127.0.0.1:5000', 'https://127.0.0.1:5000', ('127.0.0.1', 5000)), + ("127.0.0.1", "https://127.0.0.1", ("127.0.0.1", None)), + ("127.0.0.1:5000", "https://127.0.0.1:5000", ("127.0.0.1", 5000)), # Test bare IPv6 address. - ('2001:db6::1', 'https://[2001:db6::1]', ('2001:db6::1', None)), + ("2001:db6::1", "https://[2001:db6::1]", ("2001:db6::1", None)), # Test IPv6 with port. - ('[2001:db6::1]:5000', 'https://[2001:db6::1]:5000', ('2001:db6::1', 5000)), + ("[2001:db6::1]:5000", "https://[2001:db6::1]:5000", ("2001:db6::1", 5000)), # Test netloc with auth. ( - 'user:password@localhost:5000', - 'https://user:password@localhost:5000', - ('localhost', 5000), + "user:password@localhost:5000", + "https://user:password@localhost:5000", + ("localhost", 5000), ), ], ) @@ -1097,22 +1093,22 @@ def test_build_url_from_netloc_and_parse_netloc( @pytest.mark.parametrize( - 'netloc, expected', + "netloc, expected", [ # Test a basic case. - ('example.com', ('example.com', (None, None))), + ("example.com", ("example.com", (None, None))), # Test with username and no password. - ('user@example.com', ('example.com', ('user', None))), + ("user@example.com", ("example.com", ("user", None))), # Test with username and password. - ('user:pass@example.com', ('example.com', ('user', 'pass'))), + ("user:pass@example.com", ("example.com", ("user", "pass"))), # Test with username and empty password. - ('user:@example.com', ('example.com', ('user', ''))), + ("user:@example.com", ("example.com", ("user", ""))), # Test the password containing an @ symbol. - ('user:pass@word@example.com', ('example.com', ('user', 'pass@word'))), + ("user:pass@word@example.com", ("example.com", ("user", "pass@word"))), # Test the password containing a : symbol. - ('user:pass:word@example.com', ('example.com', ('user', 'pass:word'))), + ("user:pass:word@example.com", ("example.com", ("user", "pass:word"))), # Test URL-encoded reserved characters. - ('user%3Aname:%23%40%5E@example.com', ('example.com', ('user:name', '#@^'))), + ("user%3Aname:%23%40%5E@example.com", ("example.com", ("user:name", "#@^"))), ], ) def test_split_auth_from_netloc(netloc, expected): @@ -1121,42 +1117,42 @@ def test_split_auth_from_netloc(netloc, expected): @pytest.mark.parametrize( - 'url, expected', + "url, expected", [ # Test a basic case. ( - 'http://example.com/path#anchor', - ('http://example.com/path#anchor', 'example.com', (None, None)), + "http://example.com/path#anchor", + ("http://example.com/path#anchor", "example.com", (None, None)), ), # Test with username and no password. ( - 'http://user@example.com/path#anchor', - ('http://example.com/path#anchor', 'example.com', ('user', None)), + "http://user@example.com/path#anchor", + ("http://example.com/path#anchor", "example.com", ("user", None)), ), # Test with username and password. ( - 'http://user:pass@example.com/path#anchor', - ('http://example.com/path#anchor', 'example.com', ('user', 'pass')), + "http://user:pass@example.com/path#anchor", + ("http://example.com/path#anchor", "example.com", ("user", "pass")), ), # Test with username and empty password. ( - 'http://user:@example.com/path#anchor', - ('http://example.com/path#anchor', 'example.com', ('user', '')), + "http://user:@example.com/path#anchor", + ("http://example.com/path#anchor", "example.com", ("user", "")), ), # Test the password containing an @ symbol. ( - 'http://user:pass@word@example.com/path#anchor', - ('http://example.com/path#anchor', 'example.com', ('user', 'pass@word')), + "http://user:pass@word@example.com/path#anchor", + ("http://example.com/path#anchor", "example.com", ("user", "pass@word")), ), # Test the password containing a : symbol. ( - 'http://user:pass:word@example.com/path#anchor', - ('http://example.com/path#anchor', 'example.com', ('user', 'pass:word')), + "http://user:pass:word@example.com/path#anchor", + ("http://example.com/path#anchor", "example.com", ("user", "pass:word")), ), # Test URL-encoded reserved characters. ( - 'http://user%3Aname:%23%40%5E@example.com/path#anchor', - ('http://example.com/path#anchor', 'example.com', ('user:name', '#@^')), + "http://user%3Aname:%23%40%5E@example.com/path#anchor", + ("http://example.com/path#anchor", "example.com", ("user:name", "#@^")), ), ], ) @@ -1166,22 +1162,22 @@ def test_split_auth_netloc_from_url(url, expected): @pytest.mark.parametrize( - 'netloc, expected', + "netloc, expected", [ # Test a basic case. - ('example.com', 'example.com'), + ("example.com", "example.com"), # Test with username and no password. - ('accesstoken@example.com', '****@example.com'), + ("accesstoken@example.com", "****@example.com"), # Test with username and password. - ('user:pass@example.com', 'user:****@example.com'), + ("user:pass@example.com", "user:****@example.com"), # Test with username and empty password. - ('user:@example.com', 'user:****@example.com'), + ("user:@example.com", "user:****@example.com"), # Test the password containing an @ symbol. - ('user:pass@word@example.com', 'user:****@example.com'), + ("user:pass@word@example.com", "user:****@example.com"), # Test the password containing a : symbol. - ('user:pass:word@example.com', 'user:****@example.com'), + ("user:pass:word@example.com", "user:****@example.com"), # Test URL-encoded reserved characters. - ('user%3Aname:%23%40%5E@example.com', 'user%3Aname:****@example.com'), + ("user%3Aname:%23%40%5E@example.com", "user%3Aname:****@example.com"), ], ) def test_redact_netloc(netloc, expected): @@ -1190,27 +1186,27 @@ def test_redact_netloc(netloc, expected): @pytest.mark.parametrize( - 'auth_url, expected_url', + "auth_url, expected_url", [ ( - 'https://user:pass@domain.tld/project/tags/v0.2', - 'https://domain.tld/project/tags/v0.2', + "https://user:pass@domain.tld/project/tags/v0.2", + "https://domain.tld/project/tags/v0.2", ), ( - 'https://domain.tld/project/tags/v0.2', - 'https://domain.tld/project/tags/v0.2', + "https://domain.tld/project/tags/v0.2", + "https://domain.tld/project/tags/v0.2", ), ( - 'https://user:pass@domain.tld/svn/project/trunk@8181', - 'https://domain.tld/svn/project/trunk@8181', + "https://user:pass@domain.tld/svn/project/trunk@8181", + "https://domain.tld/svn/project/trunk@8181", ), ( - 'https://domain.tld/project/trunk@8181', - 'https://domain.tld/project/trunk@8181', + "https://domain.tld/project/trunk@8181", + "https://domain.tld/project/trunk@8181", ), - ('git+https://pypi.org/something', 'git+https://pypi.org/something'), - ('git+https://user:pass@pypi.org/something', 'git+https://pypi.org/something'), - ('git+ssh://git@pypi.org/something', 'git+ssh://pypi.org/something'), + ("git+https://pypi.org/something", "git+https://pypi.org/something"), + ("git+https://user:pass@pypi.org/something", "git+https://pypi.org/something"), + ("git+ssh://git@pypi.org/something", "git+ssh://pypi.org/something"), ], ) def test_remove_auth_from_url(auth_url, expected_url): @@ -1219,16 +1215,16 @@ def test_remove_auth_from_url(auth_url, expected_url): @pytest.mark.parametrize( - 'auth_url, expected_url', + "auth_url, expected_url", [ - ('https://accesstoken@example.com/abc', 'https://****@example.com/abc'), - ('https://user:password@example.com', 'https://user:****@example.com'), - ('https://user:@example.com', 'https://user:****@example.com'), - ('https://example.com', 'https://example.com'), + ("https://accesstoken@example.com/abc", "https://****@example.com/abc"), + ("https://user:password@example.com", "https://user:****@example.com"), + ("https://user:@example.com", "https://user:****@example.com"), + ("https://example.com", "https://example.com"), # Test URL-encoded reserved characters. ( - 'https://user%3Aname:%23%40%5E@example.com', - 'https://user%3Aname:****@example.com', + "https://user%3Aname:%23%40%5E@example.com", + "https://user%3Aname:****@example.com", ), ], ) @@ -1242,17 +1238,17 @@ def test_basic(self): """ Test str(), repr(), and attribute access. """ - hidden = HiddenText('my-secret', redacted='######') + hidden = HiddenText("my-secret", redacted="######") assert repr(hidden) == "" - assert str(hidden) == '######' - assert hidden.redacted == '######' - assert hidden.secret == 'my-secret' + assert str(hidden) == "######" + assert hidden.redacted == "######" + assert hidden.secret == "my-secret" def test_equality_with_str(self): """ Test equality (and inequality) with str objects. """ - hidden = HiddenText('secret', redacted='****') + hidden = HiddenText("secret", redacted="****") # Test that the object doesn't compare equal to either its original # or redacted forms. @@ -1267,8 +1263,8 @@ def test_equality_same_secret(self): Test equality with an object having the same secret. """ # Choose different redactions for the two objects. - hidden1 = HiddenText('secret', redacted='****') - hidden2 = HiddenText('secret', redacted='####') + hidden1 = HiddenText("secret", redacted="****") + hidden2 = HiddenText("secret", redacted="####") assert hidden1 == hidden2 # Also test __ne__. This assertion fails in Python 2 without @@ -1279,8 +1275,8 @@ def test_equality_different_secret(self): """ Test equality with an object having a different secret. """ - hidden1 = HiddenText('secret-1', redacted='****') - hidden2 = HiddenText('secret-2', redacted='****') + hidden1 = HiddenText("secret-1", redacted="****") + hidden2 = HiddenText("secret-2", redacted="****") assert hidden1 != hidden2 # Also test __eq__. @@ -1288,19 +1284,19 @@ def test_equality_different_secret(self): def test_hide_value(): - hidden = hide_value('my-secret') + hidden = hide_value("my-secret") assert repr(hidden) == "" - assert str(hidden) == '****' - assert hidden.redacted == '****' - assert hidden.secret == 'my-secret' + assert str(hidden) == "****" + assert hidden.redacted == "****" + assert hidden.secret == "my-secret" def test_hide_url(): - hidden_url = hide_url('https://user:password@example.com') + hidden_url = hide_url("https://user:password@example.com") assert repr(hidden_url) == "" - assert str(hidden_url) == 'https://user:****@example.com' - assert hidden_url.redacted == 'https://user:****@example.com' - assert hidden_url.secret == 'https://user:password@example.com' + assert str(hidden_url) == "https://user:****@example.com" + assert hidden_url.redacted == "https://user:****@example.com" + assert hidden_url.secret == "https://user:password@example.com" @pytest.fixture() @@ -1380,23 +1376,23 @@ def test_deprecated_message_reads_well(): def test_make_setuptools_shim_args(): # Test all arguments at once, including the overall ordering. args = make_setuptools_shim_args( - '/dir/path/setup.py', - global_options=['--some', '--option'], + "/dir/path/setup.py", + global_options=["--some", "--option"], no_user_config=True, unbuffered_output=True, ) - assert args[1:3] == ['-u', '-c'] + assert args[1:3] == ["-u", "-c"] # Spot-check key aspects of the command string. assert "sys.argv[0] = '/dir/path/setup.py'" in args[3] assert "__file__='/dir/path/setup.py'" in args[3] - assert args[4:] == ['--some', '--option', '--no-user-cfg'] + assert args[4:] == ["--some", "--option", "--no-user-cfg"] -@pytest.mark.parametrize('global_options', [None, [], ['--some', '--option']]) +@pytest.mark.parametrize("global_options", [None, [], ["--some", "--option"]]) def test_make_setuptools_shim_args__global_options(global_options): args = make_setuptools_shim_args( - '/dir/path/setup.py', global_options=global_options + "/dir/path/setup.py", global_options=global_options ) if global_options: @@ -1407,17 +1403,17 @@ def test_make_setuptools_shim_args__global_options(global_options): assert len(args) == 3 -@pytest.mark.parametrize('no_user_config', [False, True]) +@pytest.mark.parametrize("no_user_config", [False, True]) def test_make_setuptools_shim_args__no_user_config(no_user_config): args = make_setuptools_shim_args( - '/dir/path/setup.py', no_user_config=no_user_config + "/dir/path/setup.py", no_user_config=no_user_config ) - assert ('--no-user-cfg' in args) == no_user_config + assert ("--no-user-cfg" in args) == no_user_config -@pytest.mark.parametrize('unbuffered_output', [False, True]) +@pytest.mark.parametrize("unbuffered_output", [False, True]) def test_make_setuptools_shim_args__unbuffered_output(unbuffered_output): args = make_setuptools_shim_args( - '/dir/path/setup.py', unbuffered_output=unbuffered_output + "/dir/path/setup.py", unbuffered_output=unbuffered_output ) - assert ('-u' in args) == unbuffered_output + assert ("-u" in args) == unbuffered_output diff --git a/tests/unit/test_utils_temp_dir.py b/tests/unit/test_utils_temp_dir.py index 66127b654bd..120cf7b6887 100644 --- a/tests/unit/test_utils_temp_dir.py +++ b/tests/unit/test_utils_temp_dir.py @@ -115,8 +115,8 @@ def names(): assert len(same_len) > 10 # Check the first group are correct - expected_names = ['~' + name[1:]] - expected_names.extend('~' + c + name[2:] for c in chars) + expected_names = ["~" + name[1:]] + expected_names.extend("~" + c + name[2:] for c in chars) for x, y in zip(some_names, expected_names): assert x == y diff --git a/tests/unit/test_utils_unpacking.py b/tests/unit/test_utils_unpacking.py index 82318298fca..aba337ce195 100644 --- a/tests/unit/test_utils_unpacking.py +++ b/tests/unit/test_utils_unpacking.py @@ -44,25 +44,25 @@ def confirm_files(self): # expectations based on 022 umask set above and the unpack logic that # sets execute permissions, not preservation for fname, expected_mode, test, expected_contents in [ - ('file.txt', 0o644, os.path.isfile, b'file\n'), + ("file.txt", 0o644, os.path.isfile, b"file\n"), # We don't test the "symlink.txt" contents for now. - ('symlink.txt', 0o644, os.path.isfile, None), - ('script_owner.sh', 0o755, os.path.isfile, b'file\n'), - ('script_group.sh', 0o755, os.path.isfile, b'file\n'), - ('script_world.sh', 0o755, os.path.isfile, b'file\n'), - ('dir', 0o755, os.path.isdir, None), - (os.path.join('dir', 'dirfile'), 0o644, os.path.isfile, b''), + ("symlink.txt", 0o644, os.path.isfile, None), + ("script_owner.sh", 0o755, os.path.isfile, b"file\n"), + ("script_group.sh", 0o755, os.path.isfile, b"file\n"), + ("script_world.sh", 0o755, os.path.isfile, b"file\n"), + ("dir", 0o755, os.path.isdir, None), + (os.path.join("dir", "dirfile"), 0o644, os.path.isfile, b""), ]: path = os.path.join(self.tempdir, fname) - if path.endswith('symlink.txt') and sys.platform == 'win32': + if path.endswith("symlink.txt") and sys.platform == "win32": # no symlinks created on windows continue assert test(path), path if expected_contents is not None: - with open(path, mode='rb') as f: + with open(path, mode="rb") as f: contents = f.read() - assert contents == expected_contents, 'fname: {}'.format(fname) - if sys.platform == 'win32': + assert contents == expected_contents, "fname: {}".format(fname) + if sys.platform == "win32": # the permissions tests below don't apply in windows # due to os.chmod being a noop continue @@ -80,7 +80,7 @@ def test_unpack_tgz(self, data): untar_file(test_file, self.tempdir) self.confirm_files() # Check the timestamp of an extracted file - file_txt_path = os.path.join(self.tempdir, 'file.txt') + file_txt_path = os.path.join(self.tempdir, "file.txt") mtime = time.gmtime(os.stat(file_txt_path).st_mtime) assert mtime[0:6] == (2013, 8, 16, 5, 13, 37), mtime diff --git a/tests/unit/test_utils_virtualenv.py b/tests/unit/test_utils_virtualenv.py index 8b9e54afbeb..b37393def7c 100644 --- a/tests/unit/test_utils_virtualenv.py +++ b/tests/unit/test_utils_virtualenv.py @@ -45,10 +45,10 @@ def test_running_under_virtualenv(monkeypatch, real_prefix, base_prefix, expecte def test_virtualenv_no_global( monkeypatch, tmpdir, running_under_virtualenv, no_global_file, expected ): - monkeypatch.setattr(site, '__file__', tmpdir / 'site.py') + monkeypatch.setattr(site, "__file__", tmpdir / "site.py") monkeypatch.setattr( - virtualenv, 'running_under_virtualenv', lambda: running_under_virtualenv + virtualenv, "running_under_virtualenv", lambda: running_under_virtualenv ) if no_global_file: - (tmpdir / 'no-global-site-packages.txt').touch() + (tmpdir / "no-global-site-packages.txt").touch() assert virtualenv.virtualenv_no_global() == expected diff --git a/tests/unit/test_vcs.py b/tests/unit/test_vcs.py index 81bfca662ec..b4b71ce081f 100644 --- a/tests/unit/test_vcs.py +++ b/tests/unit/test_vcs.py @@ -15,14 +15,14 @@ from pip._internal.vcs.versioncontrol import RevOptions, VersionControl from tests.lib import is_svn_installed, pyversion -if pyversion >= '3': +if pyversion >= "3": VERBOSE_FALSE = False else: VERBOSE_FALSE = 0 @pytest.mark.skipif( - 'TRAVIS' not in os.environ, reason='Subversion is only required under Travis' + "TRAVIS" not in os.environ, reason="Subversion is only required under Travis" ) def test_ensure_svn_available(): """Make sure that svn is available when running in Travis.""" @@ -30,27 +30,27 @@ def test_ensure_svn_available(): @pytest.mark.parametrize( - 'args, expected', + "args, expected", [ # Test without subdir. ( - ('git+https://example.com/pkg', 'dev', 'myproj'), - 'git+https://example.com/pkg@dev#egg=myproj', + ("git+https://example.com/pkg", "dev", "myproj"), + "git+https://example.com/pkg@dev#egg=myproj", ), # Test with subdir. ( - ('git+https://example.com/pkg', 'dev', 'myproj', 'sub/dir'), - 'git+https://example.com/pkg@dev#egg=myproj&subdirectory=sub/dir', + ("git+https://example.com/pkg", "dev", "myproj", "sub/dir"), + "git+https://example.com/pkg@dev#egg=myproj&subdirectory=sub/dir", ), # Test with None subdir. ( - ('git+https://example.com/pkg', 'dev', 'myproj', None), - 'git+https://example.com/pkg@dev#egg=myproj', + ("git+https://example.com/pkg", "dev", "myproj", None), + "git+https://example.com/pkg@dev#egg=myproj", ), # Test an unescaped project name. ( - ('git+https://example.com/pkg', 'dev', 'zope-interface'), - 'git+https://example.com/pkg@dev#egg=zope_interface', + ("git+https://example.com/pkg", "dev", "zope-interface"), + "git+https://example.com/pkg@dev#egg=zope_interface", ), ], ) @@ -60,24 +60,24 @@ def test_make_vcs_requirement_url(args, expected): def test_rev_options_repr(): - rev_options = RevOptions(Git, 'develop') + rev_options = RevOptions(Git, "develop") assert repr(rev_options) == "" @pytest.mark.parametrize( - ('vc_class', 'expected1', 'expected2', 'kwargs'), + ("vc_class", "expected1", "expected2", "kwargs"), [ # First check VCS-specific RevOptions behavior. - (Bazaar, [], ['-r', '123'], {}), - (Git, ['HEAD'], ['123'], {}), - (Mercurial, [], ['123'], {}), - (Subversion, [], ['-r', '123'], {}), + (Bazaar, [], ["-r", "123"], {}), + (Git, ["HEAD"], ["123"], {}), + (Mercurial, [], ["123"], {}), + (Subversion, [], ["-r", "123"], {}), # Test extra_args. For this, test using a single VersionControl class. ( Git, - ['HEAD', 'opt1', 'opt2'], - ['123', 'opt1', 'opt2'], - dict(extra_args=['opt1', 'opt2']), + ["HEAD", "opt1", "opt2"], + ["123", "opt1", "opt2"], + dict(extra_args=["opt1", "opt2"]), ), ], ) @@ -86,7 +86,7 @@ def test_rev_options_to_args(vc_class, expected1, expected2, kwargs): Test RevOptions.to_args(). """ assert RevOptions(vc_class, **kwargs).to_args() == expected1 - assert RevOptions(vc_class, '123', **kwargs).to_args() == expected2 + assert RevOptions(vc_class, "123", **kwargs).to_args() == expected2 def test_rev_options_to_display(): @@ -96,10 +96,10 @@ def test_rev_options_to_display(): # The choice of VersionControl class doesn't matter here since # the implementation is the same for all of them. rev_options = RevOptions(Git) - assert rev_options.to_display() == '' + assert rev_options.to_display() == "" - rev_options = RevOptions(Git, 'master') - assert rev_options.to_display() == ' (to revision master)' + rev_options = RevOptions(Git, "master") + assert rev_options.to_display() == " (to revision master)" def test_rev_options_make_new(): @@ -108,25 +108,25 @@ def test_rev_options_make_new(): """ # The choice of VersionControl class doesn't matter here since # the implementation is the same for all of them. - rev_options = RevOptions(Git, 'master', extra_args=['foo', 'bar']) - new_options = rev_options.make_new('develop') + rev_options = RevOptions(Git, "master", extra_args=["foo", "bar"]) + new_options = rev_options.make_new("develop") assert new_options is not rev_options - assert new_options.extra_args == ['foo', 'bar'] - assert new_options.rev == 'develop' + assert new_options.extra_args == ["foo", "bar"] + assert new_options.rev == "develop" assert new_options.vc_class is Git @pytest.mark.parametrize( - 'sha, expected', + "sha, expected", [ - ((40 * 'a'), True), - ((40 * 'A'), True), + ((40 * "a"), True), + ((40 * "A"), True), # Test a string containing all valid characters. - ((18 * 'a' + '0123456789abcdefABCDEF'), True), - ((40 * 'g'), False), - ((39 * 'a'), False), - ((41 * 'a'), False), + ((18 * "a" + "0123456789abcdefABCDEF"), True), + ((40 * "g"), False), + ((39 * "a"), False), + ((41 * "a"), False), ], ) def test_looks_like_hash(sha, expected): @@ -134,13 +134,13 @@ def test_looks_like_hash(sha, expected): @pytest.mark.parametrize( - 'vcs_cls, remote_url, expected', + "vcs_cls, remote_url, expected", [ # Git is one of the subclasses using the base class implementation. - (Git, 'git://example.com/MyProject', False), - (Git, 'http://example.com/MyProject', True), + (Git, "git://example.com/MyProject", False), + (Git, "http://example.com/MyProject", True), # Subversion is the only subclass overriding the base class implementation. - (Subversion, 'svn://example.com/MyProject', True), + (Subversion, "svn://example.com/MyProject", True), ], ) def test_should_add_vcs_url_prefix(vcs_cls, remote_url, expected): @@ -148,99 +148,99 @@ def test_should_add_vcs_url_prefix(vcs_cls, remote_url, expected): assert actual == expected -@patch('pip._internal.vcs.git.Git.get_remote_url') -@patch('pip._internal.vcs.git.Git.get_revision') -@patch('pip._internal.vcs.git.Git.get_subdirectory') +@patch("pip._internal.vcs.git.Git.get_remote_url") +@patch("pip._internal.vcs.git.Git.get_revision") +@patch("pip._internal.vcs.git.Git.get_subdirectory") @pytest.mark.network def test_git_get_src_requirements( mock_get_subdirectory, mock_get_revision, mock_get_remote_url ): - git_url = 'https://github.com/pypa/pip-test-package' - sha = '5547fa909e83df8bd743d3978d6667497983a4b7' + git_url = "https://github.com/pypa/pip-test-package" + sha = "5547fa909e83df8bd743d3978d6667497983a4b7" mock_get_remote_url.return_value = git_url mock_get_revision.return_value = sha mock_get_subdirectory.return_value = None - ret = Git.get_src_requirement('.', 'pip-test-package') + ret = Git.get_src_requirement(".", "pip-test-package") assert ret == ( - 'git+https://github.com/pypa/pip-test-package' - '@5547fa909e83df8bd743d3978d6667497983a4b7#egg=pip_test_package' + "git+https://github.com/pypa/pip-test-package" + "@5547fa909e83df8bd743d3978d6667497983a4b7#egg=pip_test_package" ) -@patch('pip._internal.vcs.git.Git.get_revision_sha') +@patch("pip._internal.vcs.git.Git.get_revision_sha") def test_git_resolve_revision_rev_exists(get_sha_mock): - get_sha_mock.return_value = ('123456', False) - url = 'git+https://git.example.com' - rev_options = Git.make_rev_options('develop') + get_sha_mock.return_value = ("123456", False) + url = "git+https://git.example.com" + rev_options = Git.make_rev_options("develop") - new_options = Git.resolve_revision('.', url, rev_options) - assert new_options.rev == '123456' + new_options = Git.resolve_revision(".", url, rev_options) + assert new_options.rev == "123456" -@patch('pip._internal.vcs.git.Git.get_revision_sha') +@patch("pip._internal.vcs.git.Git.get_revision_sha") def test_git_resolve_revision_rev_not_found(get_sha_mock): get_sha_mock.return_value = (None, False) - url = 'git+https://git.example.com' - rev_options = Git.make_rev_options('develop') + url = "git+https://git.example.com" + rev_options = Git.make_rev_options("develop") - new_options = Git.resolve_revision('.', url, rev_options) - assert new_options.rev == 'develop' + new_options = Git.resolve_revision(".", url, rev_options) + assert new_options.rev == "develop" -@patch('pip._internal.vcs.git.Git.get_revision_sha') +@patch("pip._internal.vcs.git.Git.get_revision_sha") def test_git_resolve_revision_not_found_warning(get_sha_mock, caplog): get_sha_mock.return_value = (None, False) - url = 'git+https://git.example.com' - sha = 40 * 'a' + url = "git+https://git.example.com" + sha = 40 * "a" rev_options = Git.make_rev_options(sha) - new_options = Git.resolve_revision('.', url, rev_options) + new_options = Git.resolve_revision(".", url, rev_options) assert new_options.rev == sha rev_options = Git.make_rev_options(sha[:6]) - new_options = Git.resolve_revision('.', url, rev_options) - assert new_options.rev == 'aaaaaa' + new_options = Git.resolve_revision(".", url, rev_options) + assert new_options.rev == "aaaaaa" # Check that a warning got logged only for the abbreviated hash. messages = [r.getMessage() for r in caplog.records] - messages = [msg for msg in messages if msg.startswith('Did not find ')] + messages = [msg for msg in messages if msg.startswith("Did not find ")] assert messages == [ "Did not find branch or tag 'aaaaaa', assuming revision or ref." ] @pytest.mark.parametrize( - 'rev_name,result', + "rev_name,result", ( - ('5547fa909e83df8bd743d3978d6667497983a4b7', True), - ('5547fa909', False), - ('5678', False), - ('abc123', False), - ('foo', False), + ("5547fa909e83df8bd743d3978d6667497983a4b7", True), + ("5547fa909", False), + ("5678", False), + ("abc123", False), + ("foo", False), (None, False), ), ) -@patch('pip._internal.vcs.git.Git.get_revision') +@patch("pip._internal.vcs.git.Git.get_revision") def test_git_is_commit_id_equal(mock_get_revision, rev_name, result): """ Test Git.is_commit_id_equal(). """ - mock_get_revision.return_value = '5547fa909e83df8bd743d3978d6667497983a4b7' - assert Git.is_commit_id_equal('/path', rev_name) is result + mock_get_revision.return_value = "5547fa909e83df8bd743d3978d6667497983a4b7" + assert Git.is_commit_id_equal("/path", rev_name) is result # The non-SVN backends all use the same get_netloc_and_auth(), so only test # Git as a representative. @pytest.mark.parametrize( - 'args, expected', + "args, expected", [ # Test a basic case. - (('example.com', 'https'), ('example.com', (None, None))), + (("example.com", "https"), ("example.com", (None, None))), # Test with username and password. - (('user:pass@example.com', 'https'), ('user:pass@example.com', (None, None))), + (("user:pass@example.com", "https"), ("user:pass@example.com", (None, None))), ], ) def test_git__get_netloc_and_auth(args, expected): @@ -253,21 +253,21 @@ def test_git__get_netloc_and_auth(args, expected): @pytest.mark.parametrize( - 'args, expected', + "args, expected", [ # Test https. - (('example.com', 'https'), ('example.com', (None, None))), + (("example.com", "https"), ("example.com", (None, None))), # Test https with username and no password. - (('user@example.com', 'https'), ('example.com', ('user', None))), + (("user@example.com", "https"), ("example.com", ("user", None))), # Test https with username and password. - (('user:pass@example.com', 'https'), ('example.com', ('user', 'pass'))), + (("user:pass@example.com", "https"), ("example.com", ("user", "pass"))), # Test https with URL-encoded reserved characters. ( - ('user%3Aname:%23%40%5E@example.com', 'https'), - ('example.com', ('user:name', '#@^')), + ("user%3Aname:%23%40%5E@example.com", "https"), + ("example.com", ("user:name", "#@^")), ), # Test ssh with username and password. - (('user:pass@example.com', 'ssh'), ('user:pass@example.com', (None, None))), + (("user:pass@example.com", "ssh"), ("user:pass@example.com", (None, None))), ], ) def test_subversion__get_netloc_and_auth(args, expected): @@ -286,25 +286,25 @@ def test_git__get_url_rev__idempotent(): Also check that it doesn't change self.url. """ - url = 'git+git@git.example.com:MyProject#egg=MyProject' + url = "git+git@git.example.com:MyProject#egg=MyProject" result1 = Git.get_url_rev_and_auth(url) result2 = Git.get_url_rev_and_auth(url) - expected = ('git@git.example.com:MyProject', None, (None, None)) + expected = ("git@git.example.com:MyProject", None, (None, None)) assert result1 == expected assert result2 == expected @pytest.mark.parametrize( - 'url, expected', + "url, expected", [ ( - 'svn+https://svn.example.com/MyProject', - ('https://svn.example.com/MyProject', None, (None, None)), + "svn+https://svn.example.com/MyProject", + ("https://svn.example.com/MyProject", None, (None, None)), ), # Test a "+" in the path portion. ( - 'svn+https://svn.example.com/My+Project', - ('https://svn.example.com/My+Project', None, (None, None)), + "svn+https://svn.example.com/My+Project", + ("https://svn.example.com/My+Project", None, (None, None)), ), ], ) @@ -317,11 +317,11 @@ def test_version_control__get_url_rev_and_auth(url, expected): @pytest.mark.parametrize( - 'url', + "url", [ - 'https://svn.example.com/MyProject', + "https://svn.example.com/MyProject", # Test a URL containing a "+" (but not in the scheme). - 'https://svn.example.com/My+Project', + "https://svn.example.com/My+Project", ], ) def test_version_control__get_url_rev_and_auth__missing_plus(url): @@ -332,38 +332,38 @@ def test_version_control__get_url_rev_and_auth__missing_plus(url): with pytest.raises(ValueError) as excinfo: VersionControl.get_url_rev_and_auth(url) - assert 'malformed VCS url' in str(excinfo.value) + assert "malformed VCS url" in str(excinfo.value) @pytest.mark.parametrize( - 'url, expected', + "url, expected", [ # Test http. ( - 'bzr+http://bzr.myproject.org/MyProject/trunk/#egg=MyProject', - 'http://bzr.myproject.org/MyProject/trunk/', + "bzr+http://bzr.myproject.org/MyProject/trunk/#egg=MyProject", + "http://bzr.myproject.org/MyProject/trunk/", ), # Test https. ( - 'bzr+https://bzr.myproject.org/MyProject/trunk/#egg=MyProject', - 'https://bzr.myproject.org/MyProject/trunk/', + "bzr+https://bzr.myproject.org/MyProject/trunk/#egg=MyProject", + "https://bzr.myproject.org/MyProject/trunk/", ), # Test ftp. ( - 'bzr+ftp://bzr.myproject.org/MyProject/trunk/#egg=MyProject', - 'ftp://bzr.myproject.org/MyProject/trunk/', + "bzr+ftp://bzr.myproject.org/MyProject/trunk/#egg=MyProject", + "ftp://bzr.myproject.org/MyProject/trunk/", ), # Test sftp. ( - 'bzr+sftp://bzr.myproject.org/MyProject/trunk/#egg=MyProject', - 'sftp://bzr.myproject.org/MyProject/trunk/', + "bzr+sftp://bzr.myproject.org/MyProject/trunk/#egg=MyProject", + "sftp://bzr.myproject.org/MyProject/trunk/", ), # Test launchpad. - ('bzr+lp:MyLaunchpadProject#egg=MyLaunchpadProject', 'lp:MyLaunchpadProject'), + ("bzr+lp:MyLaunchpadProject#egg=MyLaunchpadProject", "lp:MyLaunchpadProject"), # Test ssh (special handling). ( - 'bzr+ssh://bzr.myproject.org/MyProject/trunk/#egg=MyProject', - 'bzr+ssh://bzr.myproject.org/MyProject/trunk/', + "bzr+ssh://bzr.myproject.org/MyProject/trunk/#egg=MyProject", + "bzr+ssh://bzr.myproject.org/MyProject/trunk/", ), ], ) @@ -376,27 +376,27 @@ def test_bazaar__get_url_rev_and_auth(url, expected): @pytest.mark.parametrize( - 'url, expected', + "url, expected", [ # Test an https URL. ( - 'svn+https://svn.example.com/MyProject#egg=MyProject', - ('https://svn.example.com/MyProject', None, (None, None)), + "svn+https://svn.example.com/MyProject#egg=MyProject", + ("https://svn.example.com/MyProject", None, (None, None)), ), # Test an https URL with a username and password. ( - 'svn+https://user:pass@svn.example.com/MyProject#egg=MyProject', - ('https://svn.example.com/MyProject', None, ('user', 'pass')), + "svn+https://user:pass@svn.example.com/MyProject#egg=MyProject", + ("https://svn.example.com/MyProject", None, ("user", "pass")), ), # Test an ssh URL. ( - 'svn+ssh://svn.example.com/MyProject#egg=MyProject', - ('svn+ssh://svn.example.com/MyProject', None, (None, None)), + "svn+ssh://svn.example.com/MyProject#egg=MyProject", + ("svn+ssh://svn.example.com/MyProject", None, (None, None)), ), # Test an ssh URL with a username. ( - 'svn+ssh://user@svn.example.com/MyProject#egg=MyProject', - ('svn+ssh://user@svn.example.com/MyProject', None, (None, None)), + "svn+ssh://user@svn.example.com/MyProject#egg=MyProject", + ("svn+ssh://user@svn.example.com/MyProject", None, (None, None)), ), ], ) @@ -411,8 +411,8 @@ def test_subversion__get_url_rev_and_auth(url, expected): # The non-SVN backends all use the same make_rev_args(), so only test # Git as a representative. @pytest.mark.parametrize( - 'username, password, expected', - [(None, None, []), ('user', None, []), ('user', hide_value('pass'), [])], + "username, password, expected", + [(None, None, []), ("user", None, []), ("user", hide_value("pass"), [])], ) def test_git__make_rev_args(username, password, expected): """ @@ -423,14 +423,14 @@ def test_git__make_rev_args(username, password, expected): @pytest.mark.parametrize( - 'username, password, expected', + "username, password, expected", [ (None, None, []), - ('user', None, ['--username', 'user']), + ("user", None, ["--username", "user"]), ( - 'user', - hide_value('pass'), - ['--username', 'user', '--password', hide_value('pass')], + "user", + hide_value("pass"), + ["--username", "user", "--password", hide_value("pass")], ), ], ) @@ -446,23 +446,23 @@ def test_subversion__get_url_rev_options(): """ Test Subversion.get_url_rev_options(). """ - secret_url = 'svn+https://user:pass@svn.example.com/MyProject@v1.0#egg=MyProject' + secret_url = "svn+https://user:pass@svn.example.com/MyProject@v1.0#egg=MyProject" hidden_url = hide_url(secret_url) url, rev_options = Subversion().get_url_rev_options(hidden_url) - assert url == hide_url('https://svn.example.com/MyProject') - assert rev_options.rev == 'v1.0' + assert url == hide_url("https://svn.example.com/MyProject") + assert rev_options.rev == "v1.0" assert rev_options.extra_args == ( - ['--username', 'user', '--password', hide_value('pass')] + ["--username", "user", "--password", hide_value("pass")] ) def test_get_git_version(): git_version = Git().get_git_version() - assert git_version >= parse_version('1.0.0') + assert git_version >= parse_version("1.0.0") @pytest.mark.parametrize( - 'use_interactive,is_atty,expected', + "use_interactive,is_atty,expected", [ (None, False, False), (None, True, True), @@ -472,7 +472,7 @@ def test_get_git_version(): (True, True, True), ], ) -@patch('sys.stdin.isatty') +@patch("sys.stdin.isatty") def test_subversion__init_use_interactive( mock_isatty, use_interactive, is_atty, expected ): @@ -498,24 +498,24 @@ def test_subversion__call_vcs_version(): @pytest.mark.parametrize( - 'svn_output, expected_version', + "svn_output, expected_version", [ ( - 'svn, version 1.10.3 (r1842928)\n' - ' compiled Feb 25 2019, 14:20:39 on x86_64-apple-darwin17.0.0', + "svn, version 1.10.3 (r1842928)\n" + " compiled Feb 25 2019, 14:20:39 on x86_64-apple-darwin17.0.0", (1, 10, 3), ), - ('svn, version 1.9.7 (r1800392)', (1, 9, 7)), - ('svn, version 1.9.7a1 (r1800392)', ()), - ('svn, version 1.9 (r1800392)', (1, 9)), - ('svn, version .9.7 (r1800392)', ()), - ('svn version 1.9.7 (r1800392)', ()), - ('svn 1.9.7', ()), - ('svn, version . .', ()), - ('', ()), + ("svn, version 1.9.7 (r1800392)", (1, 9, 7)), + ("svn, version 1.9.7a1 (r1800392)", ()), + ("svn, version 1.9 (r1800392)", (1, 9)), + ("svn, version .9.7 (r1800392)", ()), + ("svn version 1.9.7 (r1800392)", ()), + ("svn 1.9.7", ()), + ("svn, version . .", ()), + ("", ()), ], ) -@patch('pip._internal.vcs.subversion.Subversion.run_command') +@patch("pip._internal.vcs.subversion.Subversion.run_command") def test_subversion__call_vcs_version_patched( mock_run_command, svn_output, expected_version ): @@ -527,7 +527,7 @@ def test_subversion__call_vcs_version_patched( assert version == expected_version -@patch('pip._internal.vcs.subversion.Subversion.run_command') +@patch("pip._internal.vcs.subversion.Subversion.run_command") def test_subversion__call_vcs_version_svn_not_installed(mock_run_command): """ Test Subversion.call_vcs_version() when svn is not installed. @@ -537,7 +537,7 @@ def test_subversion__call_vcs_version_svn_not_installed(mock_run_command): Subversion().call_vcs_version() -@pytest.mark.parametrize('version', [(), (1,), (1, 8), (1, 8, 0)]) +@pytest.mark.parametrize("version", [(), (1,), (1, 8), (1, 8, 0)]) def test_subversion__get_vcs_version_cached(version): """ Test Subversion.get_vcs_version() with previously cached result. @@ -547,8 +547,8 @@ def test_subversion__get_vcs_version_cached(version): assert svn.get_vcs_version() == version -@pytest.mark.parametrize('vcs_version', [(), (1, 7), (1, 8, 0)]) -@patch('pip._internal.vcs.subversion.Subversion.call_vcs_version') +@pytest.mark.parametrize("vcs_version", [(), (1, 7), (1, 8, 0)]) +@patch("pip._internal.vcs.subversion.Subversion.call_vcs_version") def test_subversion__get_vcs_version_call_vcs(mock_call_vcs, vcs_version): """ Test Subversion.get_vcs_version() with mocked output from @@ -563,14 +563,14 @@ def test_subversion__get_vcs_version_call_vcs(mock_call_vcs, vcs_version): @pytest.mark.parametrize( - 'use_interactive,vcs_version,expected_options', + "use_interactive,vcs_version,expected_options", [ - (False, (), ['--non-interactive']), - (False, (1, 7, 0), ['--non-interactive']), - (False, (1, 8, 0), ['--non-interactive']), + (False, (), ["--non-interactive"]), + (False, (1, 7, 0), ["--non-interactive"]), + (False, (1, 8, 0), ["--non-interactive"]), (True, (), []), (True, (1, 7, 0), []), - (True, (1, 8, 0), ['--force-interactive']), + (True, (1, 8, 0), ["--force-interactive"]), ], ) def test_subversion__get_remote_call_options( @@ -586,17 +586,17 @@ def test_subversion__get_remote_call_options( class TestSubversionArgs(TestCase): def setUp(self): - patcher = patch('pip._internal.vcs.versioncontrol.call_subprocess') + patcher = patch("pip._internal.vcs.versioncontrol.call_subprocess") self.addCleanup(patcher.stop) self.call_subprocess_mock = patcher.start() # Test Data. - self.url = 'svn+http://username:password@svn.example.com/' + self.url = "svn+http://username:password@svn.example.com/" # use_interactive is set to False to test that remote call options are # properly added. self.svn = Subversion(use_interactive=False) self.rev_options = RevOptions(Subversion) - self.dest = '/tmp/test' + self.dest = "/tmp/test" def assert_call_args(self, args): assert self.call_subprocess_mock.call_args[0][0] == args @@ -605,16 +605,16 @@ def test_obtain(self): self.svn.obtain(self.dest, hide_url(self.url)) self.assert_call_args( [ - 'svn', - 'checkout', - '-q', - '--non-interactive', - '--username', - 'username', - '--password', - hide_value('password'), - hide_url('http://svn.example.com/'), - '/tmp/test', + "svn", + "checkout", + "-q", + "--non-interactive", + "--username", + "username", + "--password", + hide_value("password"), + hide_url("http://svn.example.com/"), + "/tmp/test", ] ) @@ -622,15 +622,15 @@ def test_export(self): self.svn.export(self.dest, hide_url(self.url)) self.assert_call_args( [ - 'svn', - 'export', - '--non-interactive', - '--username', - 'username', - '--password', - hide_value('password'), - hide_url('http://svn.example.com/'), - '/tmp/test', + "svn", + "export", + "--non-interactive", + "--username", + "username", + "--password", + hide_value("password"), + hide_url("http://svn.example.com/"), + "/tmp/test", ] ) @@ -638,28 +638,28 @@ def test_fetch_new(self): self.svn.fetch_new(self.dest, hide_url(self.url), self.rev_options) self.assert_call_args( [ - 'svn', - 'checkout', - '-q', - '--non-interactive', - hide_url('svn+http://username:password@svn.example.com/'), - '/tmp/test', + "svn", + "checkout", + "-q", + "--non-interactive", + hide_url("svn+http://username:password@svn.example.com/"), + "/tmp/test", ] ) def test_fetch_new_revision(self): - rev_options = RevOptions(Subversion, '123') + rev_options = RevOptions(Subversion, "123") self.svn.fetch_new(self.dest, hide_url(self.url), rev_options) self.assert_call_args( [ - 'svn', - 'checkout', - '-q', - '--non-interactive', - '-r', - '123', - hide_url('svn+http://username:password@svn.example.com/'), - '/tmp/test', + "svn", + "checkout", + "-q", + "--non-interactive", + "-r", + "123", + hide_url("svn+http://username:password@svn.example.com/"), + "/tmp/test", ] ) @@ -667,14 +667,14 @@ def test_switch(self): self.svn.switch(self.dest, hide_url(self.url), self.rev_options) self.assert_call_args( [ - 'svn', - 'switch', - '--non-interactive', - hide_url('svn+http://username:password@svn.example.com/'), - '/tmp/test', + "svn", + "switch", + "--non-interactive", + hide_url("svn+http://username:password@svn.example.com/"), + "/tmp/test", ] ) def test_update(self): self.svn.update(self.dest, hide_url(self.url), self.rev_options) - self.assert_call_args(['svn', 'update', '--non-interactive', '/tmp/test']) + self.assert_call_args(["svn", "update", "--non-interactive", "/tmp/test"]) diff --git a/tests/unit/test_vcs_mercurial.py b/tests/unit/test_vcs_mercurial.py index 630619b8236..6554d8fc701 100644 --- a/tests/unit/test_vcs_mercurial.py +++ b/tests/unit/test_vcs_mercurial.py @@ -15,19 +15,19 @@ def test_mercurial_switch_updates_config_file_when_found(tmpdir): hg = Mercurial() options = hg.make_rev_options() - hg_dir = os.path.join(tmpdir, '.hg') + hg_dir = os.path.join(tmpdir, ".hg") os.mkdir(hg_dir) config = configparser.RawConfigParser() - config.add_section('paths') - config.set('paths', 'default', 'old_url') + config.add_section("paths") + config.set("paths", "default", "old_url") - hgrc_path = os.path.join(hg_dir, 'hgrc') - with open(hgrc_path, 'w') as f: + hgrc_path = os.path.join(hg_dir, "hgrc") + with open(hgrc_path, "w") as f: config.write(f) - hg.switch(tmpdir, hide_url('new_url'), options) + hg.switch(tmpdir, hide_url("new_url"), options) config.read(hgrc_path) - default_path = config.get('paths', 'default') - assert default_path == 'new_url' + default_path = config.get("paths", "default") + assert default_path == "new_url" diff --git a/tests/unit/test_wheel.py b/tests/unit/test_wheel.py index cd4ed4d95e5..26b9e30b9c9 100644 --- a/tests/unit/test_wheel.py +++ b/tests/unit/test_wheel.py @@ -14,10 +14,7 @@ from pip._internal.req.req_install import InstallRequirement from pip._internal.utils.compat import WINDOWS from pip._internal.utils.unpacking import unpack_file -from pip._internal.wheel import ( - MissingCallableSuffix, - _raise_for_invalid_entrypoint, -) +from pip._internal.wheel import MissingCallableSuffix, _raise_for_invalid_entrypoint from tests.lib import DATA_DIR, assert_paths_equal @@ -44,18 +41,18 @@ def make_test_install_req(base_name=None): Return an InstallRequirement object for testing purposes. """ if base_name is None: - base_name = 'pendulum-2.0.4' + base_name = "pendulum-2.0.4" - req = Requirement('pendulum') + req = Requirement("pendulum") link_url = ( - 'https://files.pythonhosted.org/packages/aa/{base_name}.tar.gz' - '#sha256=cf535d36c063575d4752af36df928882b2e0e31541b4482c97d637527' - '85f9fcb' + "https://files.pythonhosted.org/packages/aa/{base_name}.tar.gz" + "#sha256=cf535d36c063575d4752af36df928882b2e0e31541b4482c97d637527" + "85f9fcb" ).format(base_name=base_name) link = Link( url=link_url, - comes_from='https://pypi.org/simple/pendulum/', - requires_python='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*', + comes_from="https://pypi.org/simple/pendulum/", + requires_python=">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*", ) req = InstallRequirement( req=req, @@ -63,17 +60,17 @@ def make_test_install_req(base_name=None): constraint=False, editable=False, link=link, - source_dir='/tmp/pip-install-9py5m2z1/pendulum', + source_dir="/tmp/pip-install-9py5m2z1/pendulum", ) return req @pytest.mark.parametrize( - 'file_tag, expected', + "file_tag, expected", [ - (('py27', 'none', 'any'), 'py27-none-any'), - (('cp33', 'cp32dmu', 'linux_x86_64'), 'cp33-cp32dmu-linux_x86_64'), + (("py27", "none", "any"), "py27-none-any"), + (("cp33", "cp32dmu", "linux_x86_64"), "cp33-cp32dmu-linux_x86_64"), ], ) def test_format_tag(file_tag, expected): @@ -84,14 +81,14 @@ def test_format_tag(file_tag, expected): @pytest.mark.parametrize( "base_name, should_unpack, cache_available, expected", [ - ('pendulum-2.0.4', False, False, False), + ("pendulum-2.0.4", False, False, False), # The following cases test should_unpack=True. # Test _contains_egg_info() returning True. - ('pendulum-2.0.4', True, True, False), - ('pendulum-2.0.4', True, False, True), + ("pendulum-2.0.4", True, True, False), + ("pendulum-2.0.4", True, False, True), # Test _contains_egg_info() returning False. - ('pendulum', True, True, True), - ('pendulum', True, False, True), + ("pendulum", True, True, True), + ("pendulum", True, False, True), ], ) def test_should_use_ephemeral_cache__issue_6197( @@ -132,15 +129,15 @@ def test_should_use_ephemeral_cache__disallow_binaries_and_vcs_checkout( Test that disallowing binaries (e.g. from passing --global-option) causes should_use_ephemeral_cache() to return None for VCS checkouts. """ - req = Requirement('pendulum') - link = Link(url='git+https://git.example.com/pendulum.git') + req = Requirement("pendulum") + link = Link(url="git+https://git.example.com/pendulum.git") req = InstallRequirement( req=req, comes_from=None, constraint=False, editable=False, link=link, - source_dir='/tmp/pip-install-9py5m2z1/pendulum', + source_dir="/tmp/pip-install-9py5m2z1/pendulum", ) assert not req.is_wheel assert req.link.is_vcs @@ -161,47 +158,47 @@ def test_format_command_result__INFO(caplog): caplog.set_level(logging.INFO) actual = wheel.format_command_result( # Include an argument with a space to test argument quoting. - command_args=['arg1', 'second arg'], - command_output='output line 1\noutput line 2\n', + command_args=["arg1", "second arg"], + command_output="output line 1\noutput line 2\n", ) assert actual.splitlines() == [ "Command arguments: arg1 'second arg'", - 'Command output: [use --verbose to show]', + "Command output: [use --verbose to show]", ] @pytest.mark.parametrize( - 'command_output', + "command_output", [ # Test trailing newline. - 'output line 1\noutput line 2\n', + "output line 1\noutput line 2\n", # Test no trailing newline. - 'output line 1\noutput line 2', + "output line 1\noutput line 2", ], ) def test_format_command_result__DEBUG(caplog, command_output): caplog.set_level(logging.DEBUG) actual = wheel.format_command_result( - command_args=['arg1', 'arg2'], command_output=command_output + command_args=["arg1", "arg2"], command_output=command_output ) assert actual.splitlines() == [ "Command arguments: arg1 arg2", - 'Command output:', - 'output line 1', - 'output line 2', - '----------------------------------------', + "Command output:", + "output line 1", + "output line 2", + "----------------------------------------", ] -@pytest.mark.parametrize('log_level', ['DEBUG', 'INFO']) +@pytest.mark.parametrize("log_level", ["DEBUG", "INFO"]) def test_format_command_result__empty_output(caplog, log_level): caplog.set_level(log_level) actual = wheel.format_command_result( - command_args=['arg1', 'arg2'], command_output='' + command_args=["arg1", "arg2"], command_output="" ) assert actual.splitlines() == [ "Command arguments: arg1 arg2", - 'Command output: None', + "Command output: None", ] @@ -209,17 +206,17 @@ def call_get_legacy_build_wheel_path(caplog, names): req = make_test_install_req() wheel_path = wheel.get_legacy_build_wheel_path( names=names, - temp_dir='/tmp/abcd', + temp_dir="/tmp/abcd", req=req, - command_args=['arg1', 'arg2'], - command_output='output line 1\noutput line 2\n', + command_args=["arg1", "arg2"], + command_output="output line 1\noutput line 2\n", ) return wheel_path def test_get_legacy_build_wheel_path(caplog): - actual = call_get_legacy_build_wheel_path(caplog, names=['name']) - assert_paths_equal(actual, '/tmp/abcd/name') + actual = call_get_legacy_build_wheel_path(caplog, names=["name"]) + assert_paths_equal(actual, "/tmp/abcd/name") assert not caplog.records @@ -229,27 +226,27 @@ def test_get_legacy_build_wheel_path__no_names(caplog): assert actual is None assert len(caplog.records) == 1 record = caplog.records[0] - assert record.levelname == 'WARNING' + assert record.levelname == "WARNING" assert record.message.splitlines() == [ "Legacy build of wheel for 'pendulum' created no files.", "Command arguments: arg1 arg2", - 'Command output: [use --verbose to show]', + "Command output: [use --verbose to show]", ] def test_get_legacy_build_wheel_path__multiple_names(caplog): caplog.set_level(logging.INFO) # Deliberately pass the names in non-sorted order. - actual = call_get_legacy_build_wheel_path(caplog, names=['name2', 'name1']) - assert_paths_equal(actual, '/tmp/abcd/name1') + actual = call_get_legacy_build_wheel_path(caplog, names=["name2", "name1"]) + assert_paths_equal(actual, "/tmp/abcd/name1") assert len(caplog.records) == 1 record = caplog.records[0] - assert record.levelname == 'WARNING' + assert record.levelname == "WARNING" assert record.message.splitlines() == [ "Legacy build of wheel for 'pendulum' created more than one file.", "Filenames (choosing first): ['name1', 'name2']", "Command arguments: arg1 arg2", - 'Command output: [use --verbose to show]', + "Command output: [use --verbose to show]", ] @@ -273,7 +270,7 @@ def test_get_entrypoints(tmpdir, console_scripts): ) assert wheel.get_entrypoints(str(entry_points)) == ( - dict([console_scripts.split(' = ')]), + dict([console_scripts.split(" = ")]), {}, ) @@ -291,15 +288,15 @@ def test_raise_for_invalid_entrypoint_fail(entrypoint): @pytest.mark.parametrize( "outrows, expected", [ - ([('', '', 'a'), ('', '', '')], [('', '', ''), ('', '', 'a')]), + ([("", "", "a"), ("", "", "")], [("", "", ""), ("", "", "a")]), ( [ # Include an int to check avoiding the following error: # > TypeError: '<' not supported between instances of 'str' and 'int' - ('', '', 1), - ('', '', ''), + ("", "", 1), + ("", "", ""), ], - [('', '', ''), ('', '', 1)], + [("", "", ""), ("", "", 1)], ), ], ) @@ -309,17 +306,17 @@ def test_sorted_outrows(outrows, expected): def call_get_csv_rows_for_installed(tmpdir, text): - path = tmpdir.joinpath('temp.txt') + path = tmpdir.joinpath("temp.txt") path.write_text(text) # Test that an installed file appearing in RECORD has its filename # updated in the new RECORD file. - installed = {'a': 'z'} + installed = {"a": "z"} changed = set() generated = [] - lib_dir = '/lib/dir' + lib_dir = "/lib/dir" - with wheel.open_for_csv(path, 'r') as f: + with wheel.open_for_csv(path, "r") as f: reader = csv.reader(f) outrows = wheel.get_csv_rows_for_installed( reader, @@ -340,7 +337,7 @@ def test_get_csv_rows_for_installed(tmpdir, caplog): ) outrows = call_get_csv_rows_for_installed(tmpdir, text) - expected = [('z', 'b', 'c'), ('d', 'e', 'f')] + expected = [("z", "b", "c"), ("d", "e", "f")] assert outrows == expected # Check there were no warnings. assert len(caplog.records) == 0 @@ -356,7 +353,7 @@ def test_get_csv_rows_for_installed__long_lines(tmpdir, caplog): ) outrows = call_get_csv_rows_for_installed(tmpdir, text) - expected = [('z', 'b', 'c', 'd'), ('e', 'f', 'g'), ('h', 'i', 'j', 'k')] + expected = [("z", "b", "c", "d"), ("e", "f", "g"), ("h", "i", "j", "k")] assert outrows == expected messages = [rec.message for rec in caplog.records] @@ -368,34 +365,34 @@ def test_get_csv_rows_for_installed__long_lines(tmpdir, caplog): def test_wheel_version(tmpdir, data): - future_wheel = 'futurewheel-1.9-py2.py3-none-any.whl' - broken_wheel = 'brokenwheel-1.0-py2.py3-none-any.whl' + future_wheel = "futurewheel-1.9-py2.py3-none-any.whl" + broken_wheel = "brokenwheel-1.0-py2.py3-none-any.whl" future_version = (1, 9) - unpack_file(data.packages.joinpath(future_wheel), tmpdir + 'future') - unpack_file(data.packages.joinpath(broken_wheel), tmpdir + 'broken') + unpack_file(data.packages.joinpath(future_wheel), tmpdir + "future") + unpack_file(data.packages.joinpath(broken_wheel), tmpdir + "broken") - assert wheel.wheel_version(tmpdir + 'future') == future_version - assert not wheel.wheel_version(tmpdir + 'broken') + assert wheel.wheel_version(tmpdir + "future") == future_version + assert not wheel.wheel_version(tmpdir + "broken") def test_python_tag(): wheelnames = [ - 'simplewheel-1.0-py2.py3-none-any.whl', - 'simplewheel-1.0-py27-none-any.whl', - 'simplewheel-2.0-1-py2.py3-none-any.whl', + "simplewheel-1.0-py2.py3-none-any.whl", + "simplewheel-1.0-py27-none-any.whl", + "simplewheel-2.0-1-py2.py3-none-any.whl", ] newnames = [ - 'simplewheel-1.0-py37-none-any.whl', - 'simplewheel-1.0-py37-none-any.whl', - 'simplewheel-2.0-1-py37-none-any.whl', + "simplewheel-1.0-py37-none-any.whl", + "simplewheel-1.0-py37-none-any.whl", + "simplewheel-2.0-1-py37-none-any.whl", ] for name, new in zip(wheelnames, newnames): - assert wheel.replace_python_tag(name, 'py37') == new + assert wheel.replace_python_tag(name, "py37") == new def test_check_compatibility(): - name = 'test' + name = "test" vc = wheel.VERSION_COMPATIBLE # Major version is higher - should be incompatible @@ -404,7 +401,7 @@ def test_check_compatibility(): # test raises with correct error with pytest.raises(UnsupportedWheel) as e: wheel.check_compatibility(higher_v, name) - assert 'is not compatible' in str(e) + assert "is not compatible" in str(e) # Should only log.warning - minor version is greater higher_v = (vc[0], vc[1] + 1) @@ -420,127 +417,127 @@ def test_check_compatibility(): class TestWheelFile(object): def test_std_wheel_pattern(self): - w = wheel.Wheel('simple-1.1.1-py2-none-any.whl') - assert w.name == 'simple' - assert w.version == '1.1.1' - assert w.pyversions == ['py2'] - assert w.abis == ['none'] - assert w.plats == ['any'] + w = wheel.Wheel("simple-1.1.1-py2-none-any.whl") + assert w.name == "simple" + assert w.version == "1.1.1" + assert w.pyversions == ["py2"] + assert w.abis == ["none"] + assert w.plats == ["any"] def test_wheel_pattern_multi_values(self): - w = wheel.Wheel('simple-1.1-py2.py3-abi1.abi2-any.whl') - assert w.name == 'simple' - assert w.version == '1.1' - assert w.pyversions == ['py2', 'py3'] - assert w.abis == ['abi1', 'abi2'] - assert w.plats == ['any'] + w = wheel.Wheel("simple-1.1-py2.py3-abi1.abi2-any.whl") + assert w.name == "simple" + assert w.version == "1.1" + assert w.pyversions == ["py2", "py3"] + assert w.abis == ["abi1", "abi2"] + assert w.plats == ["any"] def test_wheel_with_build_tag(self): # pip doesn't do anything with build tags, but theoretically, we might # see one, in this case the build tag = '4' - w = wheel.Wheel('simple-1.1-4-py2-none-any.whl') - assert w.name == 'simple' - assert w.version == '1.1' - assert w.pyversions == ['py2'] - assert w.abis == ['none'] - assert w.plats == ['any'] + w = wheel.Wheel("simple-1.1-4-py2-none-any.whl") + assert w.name == "simple" + assert w.version == "1.1" + assert w.pyversions == ["py2"] + assert w.abis == ["none"] + assert w.plats == ["any"] def test_single_digit_version(self): - w = wheel.Wheel('simple-1-py2-none-any.whl') - assert w.version == '1' + w = wheel.Wheel("simple-1-py2-none-any.whl") + assert w.version == "1" def test_non_pep440_version(self): - w = wheel.Wheel('simple-_invalid_-py2-none-any.whl') - assert w.version == '-invalid-' + w = wheel.Wheel("simple-_invalid_-py2-none-any.whl") + assert w.version == "-invalid-" def test_missing_version_raises(self): with pytest.raises(InvalidWheelFilename): - wheel.Wheel('Cython-cp27-none-linux_x86_64.whl') + wheel.Wheel("Cython-cp27-none-linux_x86_64.whl") def test_invalid_filename_raises(self): with pytest.raises(InvalidWheelFilename): - wheel.Wheel('invalid.whl') + wheel.Wheel("invalid.whl") def test_supported_single_version(self): """ Test single-version wheel is known to be supported """ - w = wheel.Wheel('simple-0.1-py2-none-any.whl') - assert w.supported(tags=[('py2', 'none', 'any')]) + w = wheel.Wheel("simple-0.1-py2-none-any.whl") + assert w.supported(tags=[("py2", "none", "any")]) def test_supported_multi_version(self): """ Test multi-version wheel is known to be supported """ - w = wheel.Wheel('simple-0.1-py2.py3-none-any.whl') - assert w.supported(tags=[('py3', 'none', 'any')]) + w = wheel.Wheel("simple-0.1-py2.py3-none-any.whl") + assert w.supported(tags=[("py3", "none", "any")]) def test_not_supported_version(self): """ Test unsupported wheel is known to be unsupported """ - w = wheel.Wheel('simple-0.1-py2-none-any.whl') - assert not w.supported(tags=[('py1', 'none', 'any')]) + w = wheel.Wheel("simple-0.1-py2-none-any.whl") + assert not w.supported(tags=[("py1", "none", "any")]) - @patch('sys.platform', 'darwin') - @patch('pip._internal.pep425tags.get_abbr_impl', lambda: 'cp') - @patch('pip._internal.pep425tags.get_platform', lambda: 'macosx_10_9_intel') + @patch("sys.platform", "darwin") + @patch("pip._internal.pep425tags.get_abbr_impl", lambda: "cp") + @patch("pip._internal.pep425tags.get_platform", lambda: "macosx_10_9_intel") def test_supported_osx_version(self): """ Wheels built for macOS 10.6 are supported on 10.9 """ - tags = pep425tags.get_supported(['27'], False) - w = wheel.Wheel('simple-0.1-cp27-none-macosx_10_6_intel.whl') + tags = pep425tags.get_supported(["27"], False) + w = wheel.Wheel("simple-0.1-cp27-none-macosx_10_6_intel.whl") assert w.supported(tags=tags) - w = wheel.Wheel('simple-0.1-cp27-none-macosx_10_9_intel.whl') + w = wheel.Wheel("simple-0.1-cp27-none-macosx_10_9_intel.whl") assert w.supported(tags=tags) - @patch('sys.platform', 'darwin') - @patch('pip._internal.pep425tags.get_abbr_impl', lambda: 'cp') - @patch('pip._internal.pep425tags.get_platform', lambda: 'macosx_10_6_intel') + @patch("sys.platform", "darwin") + @patch("pip._internal.pep425tags.get_abbr_impl", lambda: "cp") + @patch("pip._internal.pep425tags.get_platform", lambda: "macosx_10_6_intel") def test_not_supported_osx_version(self): """ Wheels built for macOS 10.9 are not supported on 10.6 """ - tags = pep425tags.get_supported(['27'], False) - w = wheel.Wheel('simple-0.1-cp27-none-macosx_10_9_intel.whl') + tags = pep425tags.get_supported(["27"], False) + w = wheel.Wheel("simple-0.1-cp27-none-macosx_10_9_intel.whl") assert not w.supported(tags=tags) - @patch('sys.platform', 'darwin') - @patch('pip._internal.pep425tags.get_abbr_impl', lambda: 'cp') + @patch("sys.platform", "darwin") + @patch("pip._internal.pep425tags.get_abbr_impl", lambda: "cp") def test_supported_multiarch_darwin(self): """ Multi-arch wheels (intel) are supported on components (i386, x86_64) """ with patch( - 'pip._internal.pep425tags.get_platform', lambda: 'macosx_10_5_universal' + "pip._internal.pep425tags.get_platform", lambda: "macosx_10_5_universal" ): - universal = pep425tags.get_supported(['27'], False) + universal = pep425tags.get_supported(["27"], False) with patch( - 'pip._internal.pep425tags.get_platform', lambda: 'macosx_10_5_intel' + "pip._internal.pep425tags.get_platform", lambda: "macosx_10_5_intel" ): - intel = pep425tags.get_supported(['27'], False) + intel = pep425tags.get_supported(["27"], False) with patch( - 'pip._internal.pep425tags.get_platform', lambda: 'macosx_10_5_x86_64' + "pip._internal.pep425tags.get_platform", lambda: "macosx_10_5_x86_64" ): - x64 = pep425tags.get_supported(['27'], False) - with patch('pip._internal.pep425tags.get_platform', lambda: 'macosx_10_5_i386'): - i386 = pep425tags.get_supported(['27'], False) - with patch('pip._internal.pep425tags.get_platform', lambda: 'macosx_10_5_ppc'): - ppc = pep425tags.get_supported(['27'], False) + x64 = pep425tags.get_supported(["27"], False) + with patch("pip._internal.pep425tags.get_platform", lambda: "macosx_10_5_i386"): + i386 = pep425tags.get_supported(["27"], False) + with patch("pip._internal.pep425tags.get_platform", lambda: "macosx_10_5_ppc"): + ppc = pep425tags.get_supported(["27"], False) with patch( - 'pip._internal.pep425tags.get_platform', lambda: 'macosx_10_5_ppc64' + "pip._internal.pep425tags.get_platform", lambda: "macosx_10_5_ppc64" ): - ppc64 = pep425tags.get_supported(['27'], False) + ppc64 = pep425tags.get_supported(["27"], False) - w = wheel.Wheel('simple-0.1-cp27-none-macosx_10_5_intel.whl') + w = wheel.Wheel("simple-0.1-cp27-none-macosx_10_5_intel.whl") assert w.supported(tags=intel) assert w.supported(tags=x64) assert w.supported(tags=i386) assert not w.supported(tags=universal) assert not w.supported(tags=ppc) assert not w.supported(tags=ppc64) - w = wheel.Wheel('simple-0.1-cp27-none-macosx_10_5_universal.whl') + w = wheel.Wheel("simple-0.1-cp27-none-macosx_10_5_universal.whl") assert w.supported(tags=universal) assert w.supported(tags=intel) assert w.supported(tags=x64) @@ -548,25 +545,25 @@ def test_supported_multiarch_darwin(self): assert w.supported(tags=ppc) assert w.supported(tags=ppc64) - @patch('sys.platform', 'darwin') - @patch('pip._internal.pep425tags.get_abbr_impl', lambda: 'cp') + @patch("sys.platform", "darwin") + @patch("pip._internal.pep425tags.get_abbr_impl", lambda: "cp") def test_not_supported_multiarch_darwin(self): """ Single-arch wheels (x86_64) are not supported on multi-arch (intel) """ with patch( - 'pip._internal.pep425tags.get_platform', lambda: 'macosx_10_5_universal' + "pip._internal.pep425tags.get_platform", lambda: "macosx_10_5_universal" ): - universal = pep425tags.get_supported(['27'], False) + universal = pep425tags.get_supported(["27"], False) with patch( - 'pip._internal.pep425tags.get_platform', lambda: 'macosx_10_5_intel' + "pip._internal.pep425tags.get_platform", lambda: "macosx_10_5_intel" ): - intel = pep425tags.get_supported(['27'], False) + intel = pep425tags.get_supported(["27"], False) - w = wheel.Wheel('simple-0.1-cp27-none-macosx_10_5_i386.whl') + w = wheel.Wheel("simple-0.1-cp27-none-macosx_10_5_i386.whl") assert not w.supported(tags=intel) assert not w.supported(tags=universal) - w = wheel.Wheel('simple-0.1-cp27-none-macosx_10_5_x86_64.whl') + w = wheel.Wheel("simple-0.1-cp27-none-macosx_10_5_x86_64.whl") assert not w.supported(tags=intel) assert not w.supported(tags=universal) @@ -574,24 +571,24 @@ def test_support_index_min(self): """ Test results from `support_index_min` """ - tags = [('py2', 'none', 'TEST'), ('py2', 'TEST', 'any'), ('py2', 'none', 'any')] - w = wheel.Wheel('simple-0.1-py2-none-any.whl') + tags = [("py2", "none", "TEST"), ("py2", "TEST", "any"), ("py2", "none", "any")] + w = wheel.Wheel("simple-0.1-py2-none-any.whl") assert w.support_index_min(tags=tags) == 2 - w = wheel.Wheel('simple-0.1-py2-none-TEST.whl') + w = wheel.Wheel("simple-0.1-py2-none-TEST.whl") assert w.support_index_min(tags=tags) == 0 def test_support_index_min__none_supported(self): """ Test a wheel not supported by the given tags. """ - w = wheel.Wheel('simple-0.1-py2-none-any.whl') + w = wheel.Wheel("simple-0.1-py2-none-any.whl") with pytest.raises(ValueError): w.support_index_min(tags=[]) def test_unpack_wheel_no_flatten(self, tmpdir): - filepath = os.path.join(DATA_DIR, 'packages', 'meta-1.0-py2.py3-none-any.whl') + filepath = os.path.join(DATA_DIR, "packages", "meta-1.0-py2.py3-none-any.whl") unpack_file(filepath, tmpdir) - assert os.path.isdir(os.path.join(tmpdir, 'meta-1.0.dist-info')) + assert os.path.isdir(os.path.join(tmpdir, "meta-1.0.dist-info")) def test_purelib_platlib(self, data): """ @@ -616,8 +613,8 @@ def test_version_underscore_conversion(self): Test that we convert '_' to '-' for versions parsed out of wheel filenames """ - w = wheel.Wheel('simple-0.1_1-py2-none-any.whl') - assert w.version == '0.1-1' + w = wheel.Wheel("simple-0.1_1-py2-none-any.whl") + assert w.version == "0.1-1" class TestMoveWheelFiles(object): @@ -626,33 +623,33 @@ class TestMoveWheelFiles(object): """ def prep(self, data, tmpdir): - self.name = 'sample' - self.wheelpath = data.packages.joinpath('sample-1.2.0-py2.py3-none-any.whl') - self.req = Requirement('sample') - self.src = os.path.join(tmpdir, 'src') - self.dest = os.path.join(tmpdir, 'dest') + self.name = "sample" + self.wheelpath = data.packages.joinpath("sample-1.2.0-py2.py3-none-any.whl") + self.req = Requirement("sample") + self.src = os.path.join(tmpdir, "src") + self.dest = os.path.join(tmpdir, "dest") unpack_file(self.wheelpath, self.src) self.scheme = { - 'scripts': os.path.join(self.dest, 'bin'), - 'purelib': os.path.join(self.dest, 'lib'), - 'data': os.path.join(self.dest, 'data'), + "scripts": os.path.join(self.dest, "bin"), + "purelib": os.path.join(self.dest, "lib"), + "data": os.path.join(self.dest, "data"), } - self.src_dist_info = os.path.join(self.src, 'sample-1.2.0.dist-info') + self.src_dist_info = os.path.join(self.src, "sample-1.2.0.dist-info") self.dest_dist_info = os.path.join( - self.scheme['purelib'], 'sample-1.2.0.dist-info' + self.scheme["purelib"], "sample-1.2.0.dist-info" ) def assert_installed(self): # lib - assert os.path.isdir(os.path.join(self.scheme['purelib'], 'sample')) + assert os.path.isdir(os.path.join(self.scheme["purelib"], "sample")) # dist-info - metadata = os.path.join(self.dest_dist_info, 'METADATA') + metadata = os.path.join(self.dest_dist_info, "METADATA") assert os.path.isfile(metadata) # data files - data_file = os.path.join(self.scheme['data'], 'my_data', 'data_file') + data_file = os.path.join(self.scheme["data"], "my_data", "data_file") assert os.path.isfile(data_file) # package data - pkg_data = os.path.join(self.scheme['purelib'], 'sample', 'package_data.dat') + pkg_data = os.path.join(self.scheme["purelib"], "sample", "package_data.dat") assert os.path.isfile(pkg_data) def test_std_install(self, data, tmpdir): @@ -661,15 +658,15 @@ def test_std_install(self, data, tmpdir): self.assert_installed() def test_install_prefix(self, data, tmpdir): - prefix = os.path.join(os.path.sep, 'some', 'path') + prefix = os.path.join(os.path.sep, "some", "path") self.prep(data, tmpdir) wheel.move_wheel_files( self.name, self.req, self.src, root=tmpdir, prefix=prefix ) - bin_dir = 'Scripts' if WINDOWS else 'bin' - assert os.path.exists(os.path.join(tmpdir, 'some', 'path', bin_dir)) - assert os.path.exists(os.path.join(tmpdir, 'some', 'path', 'my_data')) + bin_dir = "Scripts" if WINDOWS else "bin" + assert os.path.exists(os.path.join(tmpdir, "some", "path", bin_dir)) + assert os.path.exists(os.path.join(tmpdir, "some", "path", "my_data")) def test_dist_info_contains_empty_dir(self, data, tmpdir): """ @@ -677,17 +674,17 @@ def test_dist_info_contains_empty_dir(self, data, tmpdir): """ # e.g. https://github.com/pypa/pip/issues/1632#issuecomment-38027275 self.prep(data, tmpdir) - src_empty_dir = os.path.join(self.src_dist_info, 'empty_dir', 'empty_dir') + src_empty_dir = os.path.join(self.src_dist_info, "empty_dir", "empty_dir") os.makedirs(src_empty_dir) assert os.path.isdir(src_empty_dir) wheel.move_wheel_files(self.name, self.req, self.src, scheme=self.scheme) self.assert_installed() - assert not os.path.isdir(os.path.join(self.dest_dist_info, 'empty_dir')) + assert not os.path.isdir(os.path.join(self.dest_dist_info, "empty_dir")) class TestWheelBuilder(object): def test_skip_building_wheels(self, caplog): - with patch('pip._internal.wheel.WheelBuilder._build_one') as mock_build_one: + with patch("pip._internal.wheel.WheelBuilder._build_one") as mock_build_one: wheel_req = Mock(is_wheel=True, editable=False, constraint=False) wb = wheel.WheelBuilder(preparer=Mock(), wheel_cache=Mock(cache_dir=None)) with caplog.at_level(logging.INFO): @@ -698,22 +695,22 @@ def test_skip_building_wheels(self, caplog): class TestMessageAboutScriptsNotOnPATH(object): def _template(self, paths, scripts): - with patch.dict('os.environ', {'PATH': os.pathsep.join(paths)}): + with patch.dict("os.environ", {"PATH": os.pathsep.join(paths)}): return wheel.message_about_scripts_not_on_PATH(scripts) def test_no_script(self): - retval = self._template(paths=['/a/b', '/c/d/bin'], scripts=[]) + retval = self._template(paths=["/a/b", "/c/d/bin"], scripts=[]) assert retval is None def test_single_script__single_dir_not_on_PATH(self): - retval = self._template(paths=['/a/b', '/c/d/bin'], scripts=['/c/d/foo']) + retval = self._template(paths=["/a/b", "/c/d/bin"], scripts=["/c/d/foo"]) assert retval is not None assert "--no-warn-script-location" in retval assert "foo is installed in '/c/d'" in retval def test_two_script__single_dir_not_on_PATH(self): retval = self._template( - paths=['/a/b', '/c/d/bin'], scripts=['/c/d/foo', '/c/d/baz'] + paths=["/a/b", "/c/d/bin"], scripts=["/c/d/foo", "/c/d/baz"] ) assert retval is not None assert "--no-warn-script-location" in retval @@ -721,8 +718,8 @@ def test_two_script__single_dir_not_on_PATH(self): def test_multi_script__multi_dir_not_on_PATH(self): retval = self._template( - paths=['/a/b', '/c/d/bin'], - scripts=['/c/d/foo', '/c/d/bar', '/c/d/baz', '/a/b/c/spam'], + paths=["/a/b", "/c/d/bin"], + scripts=["/c/d/foo", "/c/d/bar", "/c/d/baz", "/a/b/c/spam"], ) assert retval is not None assert "--no-warn-script-location" in retval @@ -731,8 +728,8 @@ def test_multi_script__multi_dir_not_on_PATH(self): def test_multi_script_all__multi_dir_not_on_PATH(self): retval = self._template( - paths=['/a/b', '/c/d/bin'], - scripts=['/c/d/foo', '/c/d/bar', '/c/d/baz', '/a/b/c/spam', '/a/b/c/eggs'], + paths=["/a/b", "/c/d/bin"], + scripts=["/c/d/foo", "/c/d/bar", "/c/d/baz", "/a/b/c/spam", "/a/b/c/eggs"], ) assert retval is not None assert "--no-warn-script-location" in retval @@ -741,29 +738,29 @@ def test_multi_script_all__multi_dir_not_on_PATH(self): def test_two_script__single_dir_on_PATH(self): retval = self._template( - paths=['/a/b', '/c/d/bin'], scripts=['/a/b/foo', '/a/b/baz'] + paths=["/a/b", "/c/d/bin"], scripts=["/a/b/foo", "/a/b/baz"] ) assert retval is None def test_multi_script__multi_dir_on_PATH(self): retval = self._template( - paths=['/a/b', '/c/d/bin'], - scripts=['/a/b/foo', '/a/b/bar', '/a/b/baz', '/c/d/bin/spam'], + paths=["/a/b", "/c/d/bin"], + scripts=["/a/b/foo", "/a/b/bar", "/a/b/baz", "/c/d/bin/spam"], ) assert retval is None def test_multi_script__single_dir_on_PATH(self): retval = self._template( - paths=['/a/b', '/c/d/bin'], scripts=['/a/b/foo', '/a/b/bar', '/a/b/baz'] + paths=["/a/b", "/c/d/bin"], scripts=["/a/b/foo", "/a/b/bar", "/a/b/baz"] ) assert retval is None def test_single_script__single_dir_on_PATH(self): - retval = self._template(paths=['/a/b', '/c/d/bin'], scripts=['/a/b/foo']) + retval = self._template(paths=["/a/b", "/c/d/bin"], scripts=["/a/b/foo"]) assert retval is None def test_PATH_check_case_insensitive_on_windows(self): - retval = self._template(paths=['C:\\A\\b'], scripts=['c:\\a\\b\\c', 'C:/A/b/d']) + retval = self._template(paths=["C:\\A\\b"], scripts=["c:\\a\\b\\c", "C:/A/b/d"]) if WINDOWS: assert retval is None else: @@ -771,19 +768,19 @@ def test_PATH_check_case_insensitive_on_windows(self): def test_trailing_ossep_removal(self): retval = self._template( - paths=[os.path.join('a', 'b', '')], scripts=[os.path.join('a', 'b', 'c')] + paths=[os.path.join("a", "b", "")], scripts=[os.path.join("a", "b", "c")] ) assert retval is None def test_missing_PATH_env_treated_as_empty_PATH_env(self): - scripts = ['a/b/foo'] + scripts = ["a/b/foo"] env = os.environ.copy() - del env['PATH'] - with patch.dict('os.environ', env, clear=True): + del env["PATH"] + with patch.dict("os.environ", env, clear=True): retval_missing = wheel.message_about_scripts_not_on_PATH(scripts) - with patch.dict('os.environ', {'PATH': ''}): + with patch.dict("os.environ", {"PATH": ""}): retval_empty = wheel.message_about_scripts_not_on_PATH(scripts) assert retval_missing == retval_empty @@ -797,10 +794,10 @@ def prep(self, tmpdir): with open(str(self.test_file), "w") as fp: fp.truncate(self.test_file_len) self.test_file_hash = ( - '5647f05ec18958947d32874eeb788fa396a05d0bab7c1b71f112ceb7e9b31eee' + "5647f05ec18958947d32874eeb788fa396a05d0bab7c1b71f112ceb7e9b31eee" ) self.test_file_hash_encoded = ( - 'sha256=VkfwXsGJWJR9ModO63iPo5agXQurfBtx8RLOt-mzHu4' + "sha256=VkfwXsGJWJR9ModO63iPo5agXQurfBtx8RLOt-mzHu4" ) def test_hash_file(self, tmpdir): diff --git a/tools/automation/vendoring/__init__.py b/tools/automation/vendoring/__init__.py index 9ef0b41a0a6..72c88e0e440 100644 --- a/tools/automation/vendoring/__init__.py +++ b/tools/automation/vendoring/__init__.py @@ -10,18 +10,18 @@ import invoke import requests -TASK_NAME = 'update' +TASK_NAME = "update" -FILE_WHITE_LIST = ('Makefile', 'vendor.txt', '__init__.py', 'README.rst') +FILE_WHITE_LIST = ("Makefile", "vendor.txt", "__init__.py", "README.rst") # libraries that have directories with different names -LIBRARY_DIRNAMES = {'setuptools': 'pkg_resources', 'msgpack-python': 'msgpack'} +LIBRARY_DIRNAMES = {"setuptools": "pkg_resources", "msgpack-python": "msgpack"} # from time to time, remove the no longer needed ones HARDCODED_LICENSE_URLS = { - 'pytoml': 'https://github.com/avakar/pytoml/raw/master/LICENSE', - 'webencodings': 'https://github.com/SimonSapin/python-webencodings/raw/' - 'master/LICENSE', + "pytoml": "https://github.com/avakar/pytoml/raw/master/LICENSE", + "webencodings": "https://github.com/SimonSapin/python-webencodings/raw/" + "master/LICENSE", } @@ -38,25 +38,25 @@ def remove_all(paths): def log(msg): - print('[vendoring.%s] %s' % (TASK_NAME, msg)) + print("[vendoring.%s] %s" % (TASK_NAME, msg)) def _get_vendor_dir(ctx): - git_root = ctx.run('git rev-parse --show-toplevel', hide=True).stdout - return Path(git_root.strip()) / 'src' / 'pip' / '_vendor' + git_root = ctx.run("git rev-parse --show-toplevel", hide=True).stdout + return Path(git_root.strip()) / "src" / "pip" / "_vendor" def clean_vendor(ctx, vendor_dir): # Old _vendor cleanup - remove_all(vendor_dir.glob('*.pyc')) - log('Cleaning %s' % vendor_dir) + remove_all(vendor_dir.glob("*.pyc")) + log("Cleaning %s" % vendor_dir) for item in vendor_dir.iterdir(): if item.is_dir(): shutil.rmtree(str(item)) elif item.name not in FILE_WHITE_LIST: item.unlink() else: - log('Skipping %s' % item) + log("Skipping %s" % item) def detect_vendored_libs(vendor_dir): @@ -77,59 +77,59 @@ def rewrite_imports(package_dir, vendored_libs): for item in package_dir.iterdir(): if item.is_dir(): rewrite_imports(item, vendored_libs) - elif item.name.endswith('.py'): + elif item.name.endswith(".py"): rewrite_file_imports(item, vendored_libs) def rewrite_file_imports(item, vendored_libs): """Rewrite 'import xxx' and 'from xxx import' for vendored_libs""" - text = item.read_text(encoding='utf-8') + text = item.read_text(encoding="utf-8") # Revendor pkg_resources.extern first - text = re.sub(r'pkg_resources\.extern', r'pip._vendor', text) - text = re.sub(r'from \.extern', r'from pip._vendor', text) + text = re.sub(r"pkg_resources\.extern", r"pip._vendor", text) + text = re.sub(r"from \.extern", r"from pip._vendor", text) for lib in vendored_libs: text = re.sub( - r'(\n\s*|^)import %s(\n\s*)' % lib, - r'\1from pip._vendor import %s\2' % lib, + r"(\n\s*|^)import %s(\n\s*)" % lib, + r"\1from pip._vendor import %s\2" % lib, text, ) text = re.sub( - r'(\n\s*|^)from %s(\.|\s+)' % lib, r'\1from pip._vendor.%s\2' % lib, text + r"(\n\s*|^)from %s(\.|\s+)" % lib, r"\1from pip._vendor.%s\2" % lib, text ) - item.write_text(text, encoding='utf-8') + item.write_text(text, encoding="utf-8") def apply_patch(ctx, patch_file_path): - log('Applying patch %s' % patch_file_path.name) - ctx.run('git apply --verbose %s' % patch_file_path) + log("Applying patch %s" % patch_file_path.name) + ctx.run("git apply --verbose %s" % patch_file_path) def vendor(ctx, vendor_dir): - log('Reinstalling vendored libraries') + log("Reinstalling vendored libraries") # We use --no-deps because we want to ensure that all of our dependencies # are added to vendor.txt, this includes all dependencies recursively up # the chain. ctx.run( - 'pip install -t {0} -r {0}/vendor.txt --no-compile --no-deps'.format( + "pip install -t {0} -r {0}/vendor.txt --no-compile --no-deps".format( str(vendor_dir) ) ) - remove_all(vendor_dir.glob('*.dist-info')) - remove_all(vendor_dir.glob('*.egg-info')) + remove_all(vendor_dir.glob("*.dist-info")) + remove_all(vendor_dir.glob("*.egg-info")) # Cleanup setuptools unneeded parts - (vendor_dir / 'easy_install.py').unlink() - drop_dir(vendor_dir / 'setuptools') - drop_dir(vendor_dir / 'pkg_resources' / '_vendor') - drop_dir(vendor_dir / 'pkg_resources' / 'extern') + (vendor_dir / "easy_install.py").unlink() + drop_dir(vendor_dir / "setuptools") + drop_dir(vendor_dir / "pkg_resources" / "_vendor") + drop_dir(vendor_dir / "pkg_resources" / "extern") # Drop the bin directory (contains easy_install, distro, chardetect etc.) # Might not appear on all OSes, so ignoring errors - drop_dir(vendor_dir / 'bin', ignore_errors=True) + drop_dir(vendor_dir / "bin", ignore_errors=True) # Drop interpreter and OS specific msgpack libs. # Pip will rely on the python-only fallback instead. - remove_all(vendor_dir.glob('msgpack/*.so')) + remove_all(vendor_dir.glob("msgpack/*.so")) # Detect the vendored packages/modules vendored_libs = detect_vendored_libs(vendor_dir) @@ -145,17 +145,17 @@ def vendor(ctx, vendor_dir): # Special cases: apply stored patches log("Apply patches") - patch_dir = Path(__file__).parent / 'patches' - for patch in patch_dir.glob('*.patch'): + patch_dir = Path(__file__).parent / "patches" + for patch in patch_dir.glob("*.patch"): apply_patch(ctx, patch) def download_licenses(ctx, vendor_dir): - log('Downloading licenses') - tmp_dir = vendor_dir / '__tmp__' + log("Downloading licenses") + tmp_dir = vendor_dir / "__tmp__" ctx.run( - 'pip download -r {0}/vendor.txt --no-binary ' - ':all: --no-deps -d {1}'.format(str(vendor_dir), str(tmp_dir)) + "pip download -r {0}/vendor.txt --no-binary " + ":all: --no-deps -d {1}".format(str(vendor_dir), str(tmp_dir)) ) for sdist in tmp_dir.iterdir(): extract_license(vendor_dir, sdist) @@ -163,18 +163,18 @@ def download_licenses(ctx, vendor_dir): def extract_license(vendor_dir, sdist): - if sdist.suffixes[-2] == '.tar': + if sdist.suffixes[-2] == ".tar": ext = sdist.suffixes[-1][1:] - with tarfile.open(sdist, mode='r:{}'.format(ext)) as tar: + with tarfile.open(sdist, mode="r:{}".format(ext)) as tar: found = find_and_extract_license(vendor_dir, tar, tar.getmembers()) - elif sdist.suffixes[-1] == '.zip': + elif sdist.suffixes[-1] == ".zip": with zipfile.ZipFile(sdist) as zip: found = find_and_extract_license(vendor_dir, zip, zip.infolist()) else: - raise NotImplementedError('new sdist type!') + raise NotImplementedError("new sdist type!") if not found: - log('License not found in {}, will download'.format(sdist.name)) + log("License not found in {}, will download".format(sdist.name)) license_fallback(vendor_dir, sdist.name) @@ -185,10 +185,10 @@ def find_and_extract_license(vendor_dir, tar, members): name = member.name except AttributeError: # zipfile name = member.filename - if 'LICENSE' in name or 'COPYING' in name: - if '/test' in name: + if "LICENSE" in name or "COPYING" in name: + if "/test" in name: # some testing licenses in html5lib and distlib - log('Ignoring {}'.format(name)) + log("Ignoring {}".format(name)) continue found = True extract_license_member(vendor_dir, tar, member, name) @@ -199,12 +199,12 @@ def license_fallback(vendor_dir, sdist_name): """Hardcoded license URLs. Check when updating if those are still needed""" libname = libname_from_dir(sdist_name) if libname not in HARDCODED_LICENSE_URLS: - raise ValueError('No hardcoded URL for {} license'.format(libname)) + raise ValueError("No hardcoded URL for {} license".format(libname)) url = HARDCODED_LICENSE_URLS[libname] - _, _, name = url.rpartition('/') + _, _, name = url.rpartition("/") dest = license_destination(vendor_dir, libname, name) - log('Downloading {}'.format(url)) + log("Downloading {}".format(url)) r = requests.get(url, allow_redirects=True) r.raise_for_status() dest.write_bytes(r.content) @@ -213,11 +213,11 @@ def license_fallback(vendor_dir, sdist_name): def libname_from_dir(dirname): """Reconstruct the library name without it's version""" parts = [] - for part in dirname.split('-'): + for part in dirname.split("-"): if part[0].isdigit(): break parts.append(part) - return '-'.join(parts) + return "-".join(parts) def license_destination(vendor_dir, libname, filename): @@ -231,7 +231,7 @@ def license_destination(vendor_dir, libname, filename): if libname in LIBRARY_DIRNAMES: return vendor_dir / LIBRARY_DIRNAMES[libname] / filename # fallback to libname.LICENSE (used for nondirs) - return vendor_dir / '{}.{}'.format(libname, filename) + return vendor_dir / "{}.{}".format(libname, filename) def extract_license_member(vendor_dir, tar, member, name): @@ -240,7 +240,7 @@ def extract_license_member(vendor_dir, tar, member, name): libname = libname_from_dir(dirname) dest = license_destination(vendor_dir, libname, mpath.name) dest_relative = dest.relative_to(Path.cwd()) - log('Extracting {} into {}'.format(name, dest_relative)) + log("Extracting {} into {}".format(name, dest_relative)) try: fileobj = tar.extractfile(member) dest.write_bytes(fileobj.read()) @@ -281,8 +281,8 @@ def update_stubs(ctx): @invoke.task(name=TASK_NAME, post=[update_stubs]) def main(ctx): vendor_dir = _get_vendor_dir(ctx) - log('Using vendor dir: %s' % vendor_dir) + log("Using vendor dir: %s" % vendor_dir) clean_vendor(ctx, vendor_dir) vendor(ctx, vendor_dir) download_licenses(ctx, vendor_dir) - log('Revendoring complete') + log("Revendoring complete") diff --git a/tools/tox_pip.py b/tools/tox_pip.py index 0ea23903b9e..e51b133a002 100644 --- a/tools/tox_pip.py +++ b/tools/tox_pip.py @@ -4,8 +4,8 @@ import sys from glob import glob -VIRTUAL_ENV = os.environ['VIRTUAL_ENV'] -TOX_PIP_DIR = os.path.join(VIRTUAL_ENV, 'pip') +VIRTUAL_ENV = os.environ["VIRTUAL_ENV"] +TOX_PIP_DIR = os.path.join(VIRTUAL_ENV, "pip") def pip(args): @@ -14,23 +14,23 @@ def pip(args): subprocess.check_call( [ sys.executable, - '-m', - 'pip', - '--disable-pip-version-check', - 'install', - '-t', + "-m", + "pip", + "--disable-pip-version-check", + "install", + "-t", TOX_PIP_DIR, - 'pip', + "pip", ] ) - shutil.rmtree(glob(os.path.join(TOX_PIP_DIR, 'pip-*.dist-info'))[0]) + shutil.rmtree(glob(os.path.join(TOX_PIP_DIR, "pip-*.dist-info"))[0]) # And use that version. - pypath = os.environ.get('PYTHONPATH') + pypath = os.environ.get("PYTHONPATH") pypath = pypath.split(os.pathsep) if pypath is not None else [] pypath.insert(0, TOX_PIP_DIR) - os.environ['PYTHONPATH'] = os.pathsep.join(pypath) - subprocess.check_call([sys.executable, '-m', 'pip'] + args) + os.environ["PYTHONPATH"] = os.pathsep.join(pypath) + subprocess.check_call([sys.executable, "-m", "pip"] + args) -if __name__ == '__main__': +if __name__ == "__main__": pip(sys.argv[1:])