Skip to content

Commit

Permalink
Merge branch 'pip-tools-integration'
Browse files Browse the repository at this point in the history
Also replace all occurences of "pip-tools" with "prequ" or "Prequ" in
the merged in content.

* pip-tools-integration:
  Adjust changelog for 1.10.0rc1
  Add Python 3.6 to the tox test matrix
  Remove redundant check for editable
  format and sort unsafe packages in help text
  Generate hashes for all available distributions
  update CHANGELOG
  pass allow_unsafe to write for unsafe dep comment output
  add tests for unsafe dependencies
  fix bug: unsafe dependencies pinned in generated requirements
  Use utils.UNSAFE_PACKAGES as source of truth
  Update contributing doc (jazzband#514)
  Update setup.py
  Add a PR checklist and some precisions on the contributing guidelines.
  Clean up setup.py trove classifiers
  Use set comprehension syntax throughout
  add ability to override compile command in generated req file
  Fix editable unittest for users with space in their path.
  • Loading branch information
suutari-ai committed Jun 8, 2017
2 parents 67c6712 + 75f200e commit 8ad1173
Show file tree
Hide file tree
Showing 14 changed files with 195 additions and 26 deletions.
7 changes: 7 additions & 0 deletions .github/PULL_REQUEST_TEMPLATE.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
Describe the changes here.

##### Contributor checklist

- [ ] Provided the tests for the changes
- [ ] Added the changes to CHANGELOG.md
- [ ] Requested (or received) a review from another contributor
21 changes: 20 additions & 1 deletion prequ/repositories/pypi.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
from pip.download import unpack_url
from pip.index import PackageFinder
from pip.req.req_set import RequirementSet
from pip.wheel import Wheel

from ..cache import CACHE_DIR
from ..exceptions import NoCandidateFound
Expand All @@ -29,6 +30,24 @@
from .._compat import TemporaryDirectory


# Monkey patch pip's Wheel class to support all platform tags. This allows
# Prequ to generate hashes for all available distributions, not only the
# one for the current platform.

def _wheel_supported(self, tags=None):
# Ignore current platform. Support everything.
return True


def _wheel_support_index_min(self, tags=None):
# All wheels are equal priority for sorting.
return 0


Wheel.supported = _wheel_supported
Wheel.support_index_min = _wheel_support_index_min


class PyPIRepository(BaseRepository):
DEFAULT_INDEX_URL = 'https://pypi.python.org/simple'

Expand Down Expand Up @@ -154,7 +173,7 @@ def get_hashes(self, ireq):
all of the files for a given requirement. It is not acceptable for an
editable or unpinned requirement to be passed to this function.
"""
if ireq.editable or not is_pinned_requirement(ireq):
if not is_pinned_requirement(ireq):
raise TypeError(
"Expected pinned requirement, not unpinned or editable, got {}".format(ireq))

Expand Down
28 changes: 23 additions & 5 deletions prequ/resolver.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
import os
from functools import partial
from itertools import chain, count
import os

import click
from pip.req import InstallRequirement
Expand Down Expand Up @@ -65,6 +66,7 @@ def __init__(self, constraints, repository, cache=None, prereleases=False, clear
self.prereleases = prereleases
self.clear_caches = clear_caches
self.allow_unsafe = allow_unsafe
self.unsafe_constraints = set()

@property
def constraints(self):
Expand Down Expand Up @@ -189,35 +191,46 @@ def _resolve_one_round(self):
"""
# Sort this list for readability of terminal output
constraints = sorted(self.constraints, key=_dep_key)
unsafe_constraints = []
original_constraints = copy.copy(constraints)
if not self.allow_unsafe:
for constraint in original_constraints:
if constraint.name in UNSAFE_PACKAGES:
constraints.remove(constraint)
constraint.req.specifier = None
unsafe_constraints.append(constraint)

log.debug('Current constraints:')
for constraint in constraints:
log.debug(' {}'.format(constraint))

log.debug('')
log.debug('Finding the best candidates:')
best_matches = set(self.get_best_match(ireq) for ireq in constraints)
best_matches = {self.get_best_match(ireq) for ireq in constraints}

# Find the new set of secondary dependencies
log.debug('')
log.debug('Finding secondary dependencies:')

ungrouped = list(self.limiters)
safe_constraints = list(self.limiters)
for best_match in best_matches:
for dep in self._iter_dependencies(best_match):
if self.allow_unsafe or dep.name not in UNSAFE_PACKAGES:
ungrouped.append(dep)
safe_constraints.append(dep)
# Grouping constraints to make clean diff between rounds
theirs = set(
ireq for ireq in self._group_constraints(ungrouped)
ireq for ireq in self._group_constraints(safe_constraints)
if not ireq.constraint)

# NOTE: We need to compare RequirementSummary objects, since
# InstallRequirement does not define equality
diff = {RequirementSummary(t.req) for t in theirs} - {RequirementSummary(t.req) for t in self.their_constraints}
removed = ({RequirementSummary(t.req) for t in self.their_constraints} -
{RequirementSummary(t.req) for t in theirs})
unsafe = ({RequirementSummary(t.req) for t in unsafe_constraints} -
{RequirementSummary(t.req) for t in self.unsafe_constraints})

has_changed = len(diff) > 0 or len(removed) > 0
has_changed = len(diff) > 0 or len(removed) > 0 or len(unsafe) > 0
if has_changed:
log.debug('')
log.debug('New dependencies found in this round:')
Expand All @@ -226,9 +239,14 @@ def _resolve_one_round(self):
log.debug('Removed dependencies in this round:')
for removed_dependency in sorted(removed, key=lambda req: key_from_req(req.req)):
log.debug(' removing {}'.format(removed_dependency))
log.debug('Unsafe dependencies in this round:')
for unsafe_dependency in sorted(unsafe, key=lambda req: key_from_req(req.req)):
log.debug(' remembering unsafe {}'.format(unsafe_dependency))

# Store the last round's results in the their_constraints
self.their_constraints = theirs
# Store the last round's unsafe constraints
self.unsafe_constraints = unsafe_constraints
return has_changed, best_matches

def get_best_match(self, ireq):
Expand Down
9 changes: 6 additions & 3 deletions prequ/scripts/compile_in.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,8 @@
from ..repositories import LocalRequirementsRepository
from ..resolver import Resolver
from ..utils import (
assert_compatible_pip_version, dedup, is_pinned_requirement, key_from_req)
UNSAFE_PACKAGES, assert_compatible_pip_version, dedup,
is_pinned_requirement, key_from_req)
from ..writer import OutputWriter
from ._repo import get_pip_options_and_pypi_repository

Expand Down Expand Up @@ -61,7 +62,7 @@ class PipCommand(pip.basecommand.Command):
help=('Output file name. Required if more than one input file is given. '
'Will be derived from input file otherwise.'))
@click.option('--allow-unsafe', is_flag=True, default=False,
help="Pin packages considered unsafe: pip, setuptools & distribute")
help="Pin packages considered unsafe: {}".format(', '.join(sorted(UNSAFE_PACKAGES))))
@click.option('--generate-hashes', is_flag=True, default=False,
help="Generate pip 8 style hashes in the resulting requirements file.")
@click.option('--max-rounds', default=10,
Expand Down Expand Up @@ -217,11 +218,13 @@ def cli(verbose, silent, dry_run, pre, rebuild, find_links, index_url,
format_control=repository.finder.format_control,
silent=silent)
writer.write(results=results,
unsafe_requirements=resolver.unsafe_constraints,
reverse_dependencies=reverse_dependencies,
primary_packages={key_from_req(ireq.req) for ireq in constraints if not ireq.constraint},
markers={key_from_req(ireq.req): ireq.markers
for ireq in constraints if ireq.markers},
hashes=hashes)
hashes=hashes,
allow_unsafe=allow_unsafe)

if dry_run:
log.warning('Dry-run, so nothing updated.')
25 changes: 15 additions & 10 deletions prequ/writer.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,44 +82,49 @@ def write_flags(self):
if emitted:
yield ''

def _iter_lines(self, results, reverse_dependencies, primary_packages, markers, hashes):
def _iter_lines(self, results, unsafe_requirements, reverse_dependencies,
primary_packages, markers, hashes, allow_unsafe=False):
for line in self.write_header():
yield line
for line in self.write_flags():
yield line

unsafe_packages = {r for r in results if r.name in UNSAFE_PACKAGES}
unsafe_requirements = {r for r in results if r.name in UNSAFE_PACKAGES} if not unsafe_requirements else unsafe_requirements # noqa
packages = {r for r in results if r.name not in UNSAFE_PACKAGES}

packages = sorted(packages, key=self._sort_key)
unsafe_packages = sorted(unsafe_packages, key=self._sort_key)

for ireq in packages:
line = self._format_requirement(
ireq, reverse_dependencies, primary_packages,
markers.get(ireq.req.name), hashes=hashes)
yield line

if unsafe_packages:
if unsafe_requirements:
unsafe_requirements = sorted(unsafe_requirements, key=self._sort_key)
yield ''
yield comment('# The following packages are considered to be unsafe in a requirements file:')

for ireq in unsafe_packages:

yield self._format_requirement(ireq,
for ireq in unsafe_requirements:
req = self._format_requirement(ireq,
reverse_dependencies,
primary_packages,
marker=markers.get(ireq.req.name),
hashes=hashes)
if not allow_unsafe:
yield comment('# {}'.format(req))
else:
yield req

def write(self, results, reverse_dependencies, primary_packages, markers, hashes):
def write(self, results, unsafe_requirements, reverse_dependencies,
primary_packages, markers, hashes, allow_unsafe=False):
with ExitStack() as stack:
f = None
if not self.dry_run:
f = stack.enter_context(AtomicSaver(self.dst_file))

for line in self._iter_lines(results, reverse_dependencies,
primary_packages, markers, hashes):
for line in self._iter_lines(results, unsafe_requirements, reverse_dependencies,
primary_packages, markers, hashes, allow_unsafe=allow_unsafe):
if not self.silent:
log.info(line)
if f:
Expand Down
2 changes: 2 additions & 0 deletions setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ classifiers =
Programming Language :: Python :: 3
Programming Language :: Python :: 3.4
Programming Language :: Python :: 3.5
Programming Language :: Python :: 3.6
Intended Audience :: Developers
Intended Audience :: System Administrators
License :: OSI Approved :: BSD License
Expand All @@ -29,6 +30,7 @@ include_package_data = True
packages = find:
install_requires =
click>=6
setuptools
six
backports.tempfile ; python_version<"3.0"
contextlib2 ; python_version<"3.0"
Expand Down
11 changes: 11 additions & 0 deletions tests/fixtures/fake-index.json
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,9 @@
"2.0.2": {"": ["vine>=1.1.1"]},
"2.1.4": {"": ["vine>=1.1.3"]}
},
"appdirs": {
"1.4.9": {"": []}
},
"arrow": {
"0.5.0": {"": ["python-dateutil"]},
"0.5.4": {"": ["python-dateutil"]}
Expand Down Expand Up @@ -46,6 +49,11 @@
"celery==3.1.18"
]}
},
"fake-prequ-test-with-unsafe-deps": {
"0.1": {"": [
"setuptools==34.0.0"
]}
},
"flask": {
"0.10.1": {"": [
"Jinja2>=2.4",
Expand Down Expand Up @@ -109,6 +117,9 @@
"markupsafe": {
"0.23": {"": []}
},
"packaging": {
"16.8": {"": []}
},
"psycopg2": {
"2.5.4": {"": []},
"2.6": {"": []}
Expand Down
2 changes: 1 addition & 1 deletion tests/fixtures/small_fake_package/setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,5 +5,5 @@
version=0.1,
install_requires=[
"six==1.10.0",
],
],
)
3 changes: 2 additions & 1 deletion tests/test_cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@

import pytest
from click.testing import CliRunner
from six.moves.urllib.request import pathname2url

from prequ.scripts.compile_in import cli

Expand Down Expand Up @@ -214,7 +215,7 @@ def test_run_as_module_sync():
def test_editable_package(tmpdir):
"""Prequ can compile an editable """
fake_package_dir = os.path.join(os.path.split(__file__)[0], 'fixtures', 'small_fake_package')
fake_package_dir = fake_package_dir.replace('\\', '/')
fake_package_dir = 'file:' + pathname2url(fake_package_dir)
runner = CliRunner()
with runner.isolated_filesystem():
with open('requirements.in', 'w') as req_in:
Expand Down
8 changes: 4 additions & 4 deletions tests/test_fake_index.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,21 +34,21 @@ def test_get_dependencies(from_line, repository):

ireq = from_line('Flask==0.10.1')
dependencies = repository.get_dependencies(ireq)
assert (set(str(req) for req in dependencies) ==
assert ({str(req) for req in dependencies} ==
{'Werkzeug>=0.7', 'Jinja2>=2.4', 'itsdangerous>=0.21'})

ireq = from_line('ipython==2.1.0')
dependencies = repository.get_dependencies(ireq)
assert set(str(req) for req in dependencies) == {'gnureadline'}
assert {str(req) for req in dependencies} == {'gnureadline'}

ireq = from_line('ipython[notebook]==2.1.0')
dependencies = repository.get_dependencies(ireq)
assert (set(str(req) for req in dependencies) ==
assert ({str(req) for req in dependencies} ==
{'gnureadline', 'pyzmq>=2.1.11', 'tornado>=3.1', 'jinja2'})

ireq = from_line('ipython[notebook,nbconvert]==2.1.0')
dependencies = repository.get_dependencies(ireq)
assert (set(str(req) for req in dependencies) ==
assert ({str(req) for req in dependencies} ==
{'gnureadline', 'pyzmq>=2.1.11', 'tornado>=3.1', 'jinja2', 'pygments', 'Sphinx>=0.3'})


Expand Down
55 changes: 55 additions & 0 deletions tests/test_repository_pypi.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
from prequ.repositories.pypi import PyPIRepository
from prequ.scripts._repo import get_pip_command


def test_generate_hashes_all_platforms(from_line):
expected = {
'sha256:04b133ef629ae2bc05f83d0b079a964494a9cd17914943e690c57209b44aae20',
'sha256:0f1b3193c17b93c75e73eeac92f22eec4c98a021d9969b1c347d1944fae0d26b',
'sha256:1fb1cf40c315656f98f4d3acfb1bd031a14a9a69d155e9a180d5f9b52eaf745a',
'sha256:20af85d8e154b50f540bc8d517a0dbf6b1c20b5d06e572afda919d5dafd1d06b',
'sha256:2570f93b42c61013ab4b26e23aa25b640faf5b093ad7dd3504c3a8eadd69bc24',
'sha256:2f4e2872833ee3764dfc168dea566b7dd83b01ac61b377490beba53b5ece57f7',
'sha256:31776a37a67424e7821324b9e03a05aa6378bbc2bccc58fa56402547f82803c6',
'sha256:353421c76545f1d440cacc137abc865f07eab9df0dd3510c0851a2ca04199e90',
'sha256:36d06de7b09b1eba54b1f5f76e2221afef7489cc61294508c5a7308a925a50c6',
'sha256:3f1908d0bcd654f8b7b73204f24336af9f020b707fb8af937e3e2279817cbcd6',
'sha256:5268de3a18f031e9787c919c1b9137ff681ea696e76740b1c6c336a26baaa58a',
'sha256:563e0bd53fda03c151573217b3a49b3abad8813de9dd0632e10090f6190fdaf8',
'sha256:5e1368d13f1774852f9e435260be19ad726bbfb501b80472f61c2dc768a0692a',
'sha256:60881c79eb72cb75bd0a4be5e31c9e431739146c4184a2618cabea3938418984',
'sha256:6120b62a642a40e47eb6c9ff00c02be69158fc7f7c5ff78e42a2c739d1c57cd6',
'sha256:65c223e77f87cb463191ace3398e0a6d84ce4ac575d42eb412a220b099f593d6',
'sha256:6fbf8db55710959344502b58ab937424173ad8b5eb514610bcf56b119caa350a',
'sha256:74aadea668c94eef4ceb09be3d0eae6619e28b4f1ced4e29cd43a05bb2cfd7a4',
'sha256:7be1efa623e1ed91b15b1e62e04c536def1d75785eb930a0b8179ca6b65ed16d',
'sha256:83266cdede210393889471b0c2631e78da9d4692fcca875af7e958ad39b897ee',
'sha256:86c68a3f8246495962446c6f96f6a27f182b91208187b68f1e87ec3dfd29fa32',
'sha256:9163f7743cf9991edaddf9cf886708e288fab38e1b9fec9c41c15c85c8f7f147',
'sha256:97d9f338f91b7927893ea6500b953e4b4b7e47c6272222992bb76221e17056ff',
'sha256:a7930e73a4359b52323d09de6d6860840314aa09346cbcf4def8875e1b07ebc7',
'sha256:ada8a42c493e4934a1a8875c2bc9efcb1b88c09883f70375bfa053ab32d6a118',
'sha256:b0bc2d83cc0ba0e8f0d9eca2ffe07f72f33bec7d84547071e7e875d4cca8272d',
'sha256:b5412a65605c642adf3e1544b59b8537daf5696dedadd2b3cbebc42e24da45ed',
'sha256:ba6b5205fced1625b6d9d55f9ef422f9667c5d95f18f07c0611eb964a3355331',
'sha256:bcaf3d86385daaab0ae51c9c53ebe70a6c1c5dfcb9e311b13517e04773ddf6b6',
'sha256:cfa15570ecec1ea6bee089e86fd4deae6208c96a811344ce246de5e5c9ac824a',
'sha256:d3e3063af1fa6b59e255da9a812891cdaf24b90fbaf653c02797871069b7c4c9',
'sha256:d9cfe26ecea2fec320cd0cac400c9c2435328994d23596ee6df63945fe7292b0',
'sha256:e5ef800ef8ef9ee05ae9a5b7d7d9cf7d6c936b32e312e54823faca3034ee16ab',
'sha256:f1366150acf611d09d37ffefb3559ed3ffeb1713643d3cd10716d6c5da3f83fb',
'sha256:f4eb9747a37120b35f59c8e96265e87b0c432ff010d32fc0772992aa14659502',
'sha256:f8264463cc08cd696ad17e4bf3c80f3344628c04c11ffdc545ddf0798bc17316',
'sha256:f8ba54848dfe280b1be0d6e699544cee4ba10d566f92464538063d9e645aed3e',
'sha256:f93d1edcaea7b6a7a8fbf936f4492a9a0ee0b4cb281efebd5e1dd73e5e432c71',
'sha256:fc8865c7e0ac25ddd71036c2b9a799418b32d9acb40400d345b8791b6e1058cb',
'sha256:fce6b0cb9ade1546178c031393633b09c4793834176496c99a94de0bfa471b27',
'sha256:fde17c52d7ce7d55a9fb263b57ccb5da6439915b5c7105617eb21f636bb1bd9c',
}

pip_command = get_pip_command()
pip_options, _ = pip_command.parse_args([])
session = pip_command._build_session(pip_options)
repository = PyPIRepository(pip_options, session)
ireq = from_line('cffi==1.9.1')
assert repository.get_hashes(ireq) == expected
Loading

0 comments on commit 8ad1173

Please sign in to comment.