From a6e402a14d303b3a9530ebf9c67631f03dd528fa Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20K=C3=B6ster?= Date: Mon, 23 Apr 2018 11:18:32 +0200 Subject: [PATCH 001/118] Pin to conda-build 3. --- bioconda_utils/bioconda_utils-requirements.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bioconda_utils/bioconda_utils-requirements.txt b/bioconda_utils/bioconda_utils-requirements.txt index 110c351208..99eaacd042 100644 --- a/bioconda_utils/bioconda_utils-requirements.txt +++ b/bioconda_utils/bioconda_utils-requirements.txt @@ -1,8 +1,8 @@ anaconda-client=1.6.* argh=0.26.* beautifulsoup4=4.6.* -conda=4.3.33 -conda-build=2.1.18 +conda=4.5.1 +conda-build=3.8.1 galaxy-lib>=18.5.5,<18.6.0a0 jinja2=2.10.* jsonschema=2.6.* From 12f128eb398ec430ea287c536eb01b77c7f82d16 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20K=C3=B6ster?= Date: Mon, 23 Apr 2018 11:25:28 +0200 Subject: [PATCH 002/118] Depend on latest conda-forge-pinning package. This will ensure that conda-build 3 is always using the pinnings defined in https://github.com/conda-forge/conda-forge-pinning-feedstock/blob/master/recipe/conda_build_config.yaml By contributing to this list, we can maintain compatibility with conda-forge. --- bioconda_utils/bioconda_utils-requirements.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/bioconda_utils/bioconda_utils-requirements.txt b/bioconda_utils/bioconda_utils-requirements.txt index 99eaacd042..977cc7bc06 100644 --- a/bioconda_utils/bioconda_utils-requirements.txt +++ b/bioconda_utils/bioconda_utils-requirements.txt @@ -21,3 +21,4 @@ colorlog=3.1.* six=1.11.* alabaster=0.7.* git=2.14.* +conda-forge-pinning From 66c1a4c3f3aca4ee7e26b8f466273cabf3835a84 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20K=C3=B6ster?= Date: Mon, 23 Apr 2018 11:36:09 +0200 Subject: [PATCH 003/118] Try downgrading to last minor release of conda. --- bioconda_utils/bioconda_utils-requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bioconda_utils/bioconda_utils-requirements.txt b/bioconda_utils/bioconda_utils-requirements.txt index 977cc7bc06..630bf3e7ce 100644 --- a/bioconda_utils/bioconda_utils-requirements.txt +++ b/bioconda_utils/bioconda_utils-requirements.txt @@ -1,7 +1,7 @@ anaconda-client=1.6.* argh=0.26.* beautifulsoup4=4.6.* -conda=4.5.1 +conda=4.4.* conda-build=3.8.1 galaxy-lib>=18.5.5,<18.6.0a0 jinja2=2.10.* From 9d6636c97e7d42d7bccf594df93c70794961c3e6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20K=C3=B6ster?= Date: Mon, 23 Apr 2018 11:39:31 +0200 Subject: [PATCH 004/118] Try using conda 4.3. --- bioconda_utils/bioconda_utils-requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bioconda_utils/bioconda_utils-requirements.txt b/bioconda_utils/bioconda_utils-requirements.txt index 630bf3e7ce..f30d8788e3 100644 --- a/bioconda_utils/bioconda_utils-requirements.txt +++ b/bioconda_utils/bioconda_utils-requirements.txt @@ -1,7 +1,7 @@ anaconda-client=1.6.* argh=0.26.* beautifulsoup4=4.6.* -conda=4.4.* +conda=4.3.* conda-build=3.8.1 galaxy-lib>=18.5.5,<18.6.0a0 jinja2=2.10.* From 6ee9510d201611d9e729045860a280352a231af6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20K=C3=B6ster?= Date: Mon, 23 Apr 2018 12:51:55 +0200 Subject: [PATCH 005/118] Try 4.5.0 --- bioconda_utils/bioconda_utils-requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bioconda_utils/bioconda_utils-requirements.txt b/bioconda_utils/bioconda_utils-requirements.txt index f30d8788e3..94998b4364 100644 --- a/bioconda_utils/bioconda_utils-requirements.txt +++ b/bioconda_utils/bioconda_utils-requirements.txt @@ -1,7 +1,7 @@ anaconda-client=1.6.* argh=0.26.* beautifulsoup4=4.6.* -conda=4.3.* +conda=4.5.0 conda-build=3.8.1 galaxy-lib>=18.5.5,<18.6.0a0 jinja2=2.10.* From 63f809f43dbbc2a64a87fc82f729e924451d598d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20K=C3=B6ster?= Date: Mon, 23 Apr 2018 14:09:38 +0200 Subject: [PATCH 006/118] Handle UnableToParse --- docs/source/generate_docs.py | 26 ++++++++++++++++---------- 1 file changed, 16 insertions(+), 10 deletions(-) diff --git a/docs/source/generate_docs.py b/docs/source/generate_docs.py index be6fe0e2a7..efb0aa2352 100644 --- a/docs/source/generate_docs.py +++ b/docs/source/generate_docs.py @@ -20,6 +20,7 @@ try: from conda_build.metadata import MetaData + from conda_build.exceptions import UnableToParse except Exception: logging.exception("Failed to import MetaData") raise @@ -198,18 +199,23 @@ def generate_readme(folder, repodata, renderer): versions.append(sf) # Read the meta.yaml file(s) - recipe = op.join(RECIPE_DIR, folder, "meta.yaml") - if op.exists(recipe): - metadata = MetaData(recipe) - if metadata.version() not in versions: - versions.insert(0, metadata.version()) - else: - if versions: - recipe = op.join(RECIPE_DIR, folder, versions[0], "meta.yaml") + try: + recipe = op.join(RECIPE_DIR, folder, "meta.yaml") + if op.exists(recipe): metadata = MetaData(recipe) + if metadata.version() not in versions: + versions.insert(0, metadata.version()) else: - # ignore non-recipe folders - return + if versions: + recipe = op.join(RECIPE_DIR, folder, versions[0], "meta.yaml") + metadata = MetaData(recipe) + else: + # ignore non-recipe folders + return + except UnableToParse as e: + logger.error("Failed to parse recipe {}".format(recipe)) + raise e + name = metadata.name() versions_in_channel = repodata.get_versions(name) From 3664db12a041c1d2816551750d3777b12e687330 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20K=C3=B6ster?= Date: Tue, 24 Apr 2018 09:09:12 +0200 Subject: [PATCH 007/118] Remove galaxy-lib version restriction. --- bioconda_utils/bioconda_utils-requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bioconda_utils/bioconda_utils-requirements.txt b/bioconda_utils/bioconda_utils-requirements.txt index 94998b4364..5fe3a3d615 100644 --- a/bioconda_utils/bioconda_utils-requirements.txt +++ b/bioconda_utils/bioconda_utils-requirements.txt @@ -3,7 +3,7 @@ argh=0.26.* beautifulsoup4=4.6.* conda=4.5.0 conda-build=3.8.1 -galaxy-lib>=18.5.5,<18.6.0a0 +galaxy-lib>=18.5.5 jinja2=2.10.* jsonschema=2.6.* networkx=1.11 From 20d58ebf756c62d38e4e147190e3fdd8d905e371 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20K=C3=B6ster?= Date: Tue, 24 Apr 2018 09:17:56 +0200 Subject: [PATCH 008/118] adapt to conda 4.5 --- bioconda_utils/bioconda_utils-requirements.txt | 2 +- bioconda_utils/pypi.py | 2 +- bioconda_utils/utils.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/bioconda_utils/bioconda_utils-requirements.txt b/bioconda_utils/bioconda_utils-requirements.txt index 5fe3a3d615..de8f66416b 100644 --- a/bioconda_utils/bioconda_utils-requirements.txt +++ b/bioconda_utils/bioconda_utils-requirements.txt @@ -1,7 +1,7 @@ anaconda-client=1.6.* argh=0.26.* beautifulsoup4=4.6.* -conda=4.5.0 +conda=4.5.1 conda-build=3.8.1 galaxy-lib>=18.5.5 jinja2=2.10.* diff --git a/bioconda_utils/pypi.py b/bioconda_utils/pypi.py index 8e953a2027..9c9997384e 100644 --- a/bioconda_utils/pypi.py +++ b/bioconda_utils/pypi.py @@ -1,6 +1,6 @@ import os import requests -from conda.version import VersionOrder +from conda.models.version import VersionOrder from . import utils from . import linting diff --git a/bioconda_utils/utils.py b/bioconda_utils/utils.py index a8ea9f4c13..43c440076f 100644 --- a/bioconda_utils/utils.py +++ b/bioconda_utils/utils.py @@ -22,7 +22,7 @@ from conda_build import api from conda_build.metadata import MetaData -from conda.version import VersionOrder +from conda.models.version import VersionOrder import yaml from jinja2 import Environment, PackageLoader from colorlog import ColoredFormatter From 14aafd2e3668d9209c6140dfe649f5bab9f1c621 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20K=C3=B6ster?= Date: Tue, 24 Apr 2018 09:17:56 +0200 Subject: [PATCH 009/118] use get_output_file_paths instead of get_output_file_path --- .circleci/config.yml | 6 ++---- bioconda_utils/bioconda_utils-requirements.txt | 2 +- bioconda_utils/pypi.py | 2 +- bioconda_utils/utils.py | 9 +++++++-- 4 files changed, 11 insertions(+), 8 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 2453fc37f3..e620a27668 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -55,8 +55,7 @@ jobs: - *install_bioconda_utils - run: name: Testing - # --tb=line is important because py.test otherwise exposes tokens in case of errors - command: py.test test/ -v --tb=line + command: py.test test/ -v --tb=native no_output_timeout: 1200 test-macos: <<: *macos @@ -69,8 +68,7 @@ jobs: - *install_bioconda_utils - run: name: Testing - # --tb=line is important because py.test otherwise exposes tokens in case of errors - command: py.test test/ -v -k "not docker" --tb=line + command: py.test test/ -v -k "not docker" --tb=native no_output_timeout: 1200 build-docs: <<: *linux diff --git a/bioconda_utils/bioconda_utils-requirements.txt b/bioconda_utils/bioconda_utils-requirements.txt index 5fe3a3d615..de8f66416b 100644 --- a/bioconda_utils/bioconda_utils-requirements.txt +++ b/bioconda_utils/bioconda_utils-requirements.txt @@ -1,7 +1,7 @@ anaconda-client=1.6.* argh=0.26.* beautifulsoup4=4.6.* -conda=4.5.0 +conda=4.5.1 conda-build=3.8.1 galaxy-lib>=18.5.5 jinja2=2.10.* diff --git a/bioconda_utils/pypi.py b/bioconda_utils/pypi.py index 8e953a2027..9c9997384e 100644 --- a/bioconda_utils/pypi.py +++ b/bioconda_utils/pypi.py @@ -1,6 +1,6 @@ import os import requests -from conda.version import VersionOrder +from conda.models.version import VersionOrder from . import utils from . import linting diff --git a/bioconda_utils/utils.py b/bioconda_utils/utils.py index a8ea9f4c13..0e554cdddb 100644 --- a/bioconda_utils/utils.py +++ b/bioconda_utils/utils.py @@ -22,7 +22,7 @@ from conda_build import api from conda_build.metadata import MetaData -from conda.version import VersionOrder +from conda.models.version import VersionOrder import yaml from jinja2 import Environment, PackageLoader from colorlog import ColoredFormatter @@ -595,7 +595,12 @@ def built_package_path(recipe, env=None): set_build_id=False) meta = MetaData(recipe, config=config) meta.parse_again() - path = api.get_output_file_path(meta, config=config) + paths = api.get_output_file_paths(meta, config=config) + assert len(paths) == 1, ('Bug: conda build returns multiple output ' + 'file paths. This is unexpected since we ' + 'configure it with one combination of ' + 'pinned versions.') + path = paths[0] return path From f6b56696320e433f300f733300f1f0d7740e59fb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20K=C3=B6ster?= Date: Tue, 24 Apr 2018 11:00:03 +0200 Subject: [PATCH 010/118] Add lint for deprecated numpy spec. --- bioconda_utils/lint_functions.py | 13 +++++++++++++ test/test_linting.py | 30 ++++++++++++++++++++++++++++++ test/test_utils.py | 2 ++ 3 files changed, 45 insertions(+) diff --git a/bioconda_utils/lint_functions.py b/bioconda_utils/lint_functions.py index 33cc45fc58..ddbda9d04f 100644 --- a/bioconda_utils/lint_functions.py +++ b/bioconda_utils/lint_functions.py @@ -299,6 +299,19 @@ def invalid_identifiers(recipe, meta, df): return +def deprecated_numpy_spec(recipe, meta, df): + reqs = meta.get('requirements') + if reqs is None: + return + for deps in reqs.get(section, []): + for d in deps: + if d.startswith("numpy") and d.contains("x.x"): + return { 'deprecated_numpy_spec': True, + 'fix': 'omit x.x as pinning of numpy is now ' + 'handled automatically'} + return + + def _pin(env_var, dep_name): """ Generates a linting function that checks to make sure `dep_name` is pinned diff --git a/test/test_linting.py b/test/test_linting.py index 43f6b85acf..fc8f1e3a40 100644 --- a/test/test_linting.py +++ b/test/test_linting.py @@ -852,3 +852,33 @@ def test_invalid_identifiers(): doi:10.1093/bioinformatics/btr010 '''] ) + + +def test_deprecated_numpy_spec(): + run_lint( + func=lint_functions.deprecated_numpy_spec, + should_pass=[''' + a: + meta.yaml: | + package: + name: a + version: 0.1 + requirements: + build: + - numpy + run: + - numpy + '''], + should_fail=[''' + a: + meta.yaml: | + package: + name: a + version: 0.1 + requirements: + build: + - numpy x.x + run: + - numpy x.x + '''] + ) diff --git a/test/test_utils.py b/test/test_utils.py index eee52799d9..5f75f5f611 100644 --- a/test/test_utils.py +++ b/test/test_utils.py @@ -597,6 +597,8 @@ def test_built_package_path(): name: one version: "0.1" requirements: + build: + - python run: - python From 0d7d1ccc55027e70a54fe8b46fef8fb5aee5b045 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20K=C3=B6ster?= Date: Tue, 24 Apr 2018 11:02:50 +0200 Subject: [PATCH 011/118] Document new lint. --- docs/source/linting.rst | 23 +++++++++++++++-------- 1 file changed, 15 insertions(+), 8 deletions(-) diff --git a/docs/source/linting.rst b/docs/source/linting.rst index 92424c6aae..e463cd4a3f 100644 --- a/docs/source/linting.rst +++ b/docs/source/linting.rst @@ -244,24 +244,31 @@ to:: `invalid_identifiers` ~~~~~~~~~~~~~~~~~~~~~ -Reason for failing: The recipes has an `extra -> identifiers` section with an +Reason for failing: The recipe has an ``extra -> identifiers`` section with an invalid format. Rationale: The identifiers section has to be machine readable. -How to resolve: Ensure that the section is of the following format: +How to resolve: Ensure that the section is of the following format:: -``` -extra: - identifiers: - - doi:10.1093/bioinformatics/bts480 - - biotools:Snakemake -``` + extra: + identifiers: + - doi:10.1093/bioinformatics/bts480 + - biotools:Snakemake In particular, ensure that each identifier starts with a type (`doi`, `biotools`, ...), followed by a colon and the identifier. Whitespace is not allowed. +`deprecated_numpy_spec` +~~~~~~~~~~~~~~~~~~~~~ +Reason for failing: The recipe contains ``numpy x.x`` in build or run requirements. + +Rationale: This kind of version pinning is deprecated, and numpy pinning is now +handled automatically by the system. + +How to resolve: Remove the ``x.x``. + `*_not_pinned` ~~~~~~~~~~~~~~ From 55e1e141463615037e99ba297bfdf8a89619bd33 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20K=C3=B6ster?= Date: Tue, 24 Apr 2018 11:03:24 +0200 Subject: [PATCH 012/118] register lint. --- bioconda_utils/lint_functions.py | 1 + 1 file changed, 1 insertion(+) diff --git a/bioconda_utils/lint_functions.py b/bioconda_utils/lint_functions.py index ddbda9d04f..a412b6a856 100644 --- a/bioconda_utils/lint_functions.py +++ b/bioconda_utils/lint_functions.py @@ -385,6 +385,7 @@ def pin(recipe, meta, df): should_not_be_noarch, setup_py_install_args, invalid_identifiers, + deprecated_numpy_spec, _pin('CONDA_ZLIB', 'zlib'), _pin('CONDA_GMP', 'gmp'), _pin('CONDA_BOOST', 'boost'), From 82abcf7476b2f7d50214c82f00c566d96d84e46d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20K=C3=B6ster?= Date: Tue, 24 Apr 2018 11:04:59 +0200 Subject: [PATCH 013/118] fix tests --- test/test_utils.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/test/test_utils.py b/test/test_utils.py index 5f75f5f611..1b6178aa05 100644 --- a/test/test_utils.py +++ b/test/test_utils.py @@ -508,6 +508,8 @@ def test_filter_recipes_existing_package(): name: gffutils version: "0.8.7.1" requirements: + build: + - python run: - python """, from_string=True) From 33848754e3c234a2d00270ad54153ecafc281adf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20K=C3=B6ster?= Date: Tue, 24 Apr 2018 11:19:15 +0200 Subject: [PATCH 014/118] fix lint --- bioconda_utils/lint_functions.py | 6 +++--- test/test_linting.py | 1 + 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/bioconda_utils/lint_functions.py b/bioconda_utils/lint_functions.py index a412b6a856..e9f83f5017 100644 --- a/bioconda_utils/lint_functions.py +++ b/bioconda_utils/lint_functions.py @@ -303,9 +303,9 @@ def deprecated_numpy_spec(recipe, meta, df): reqs = meta.get('requirements') if reqs is None: return - for deps in reqs.get(section, []): - for d in deps: - if d.startswith("numpy") and d.contains("x.x"): + for section in ['build', 'run']: + for dep in reqs.get(section, []): + if dep.startswith('numpy') and 'x.x' in dep: return { 'deprecated_numpy_spec': True, 'fix': 'omit x.x as pinning of numpy is now ' 'handled automatically'} diff --git a/test/test_linting.py b/test/test_linting.py index fc8f1e3a40..0c15af107e 100644 --- a/test/test_linting.py +++ b/test/test_linting.py @@ -866,6 +866,7 @@ def test_deprecated_numpy_spec(): requirements: build: - numpy + - python run: - numpy '''], From 38aaf263495fba12276eead2e6a18f95e8e2ee91 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20K=C3=B6ster?= Date: Tue, 24 Apr 2018 11:24:14 +0200 Subject: [PATCH 015/118] adapt test cases and remove obsolete ones. --- test/test_utils.py | 29 ++--------------------------- 1 file changed, 2 insertions(+), 27 deletions(-) diff --git a/test/test_utils.py b/test/test_utils.py index 1b6178aa05..5229c18eb7 100644 --- a/test/test_utils.py +++ b/test/test_utils.py @@ -616,8 +616,8 @@ def test_built_package_path(): r.write_recipes() assert os.path.basename( - utils.built_package_path(r.recipe_dirs['one']) - ) == 'one-0.1-py{ver.major}{ver.minor}_0.tar.bz2'.format(ver=sys.version_info) + utils.built_package_path(r.recipe_dirs['one'], env=dict(CONDA_PY=36)) + ) == 'one-0.1-py36_0.tar.bz2' # resetting with a different CONDA_PY passed as env dict assert os.path.basename( @@ -672,31 +672,6 @@ def test_built_package_path2(): ) == 'two-0.1-ncurses9.0_0.tar.bz2' -def test_pkgname_with_numpy_x_x(): - r = Recipes( - """ - one: - meta.yaml: | - package: - name: one - version: "0.1" - requirements: - run: - - python - - numpy x.x - build: - - python - - numpy x.x - - """, from_string=True) - r.write_recipes() - - os.environ['CONDA_NPY'] = '1.9' - assert os.path.basename( - utils.built_package_path(r.recipe_dirs['one'], env=os.environ) - ) == 'one-0.1-np19py{ver.major}{ver.minor}_0.tar.bz2'.format(ver=sys.version_info) - - def test_string_or_float_to_integer_python(): f = utils._string_or_float_to_integer_python assert f(27) == f('27') == f(2.7) == f('2.7') == 27 From 07eb37175f8a52dff0d326938cf412ca76025203 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20K=C3=B6ster?= Date: Wed, 25 Apr 2018 16:56:54 +0200 Subject: [PATCH 016/118] Remove env_matrix. --- bioconda_utils/bioconductor_skeleton.py | 5 +- bioconda_utils/build.py | 102 ++++--- bioconda_utils/config.schema.yaml | 2 - bioconda_utils/docker_utils.py | 32 +-- bioconda_utils/example_config.yaml | 1 - bioconda_utils/linting.py | 4 +- bioconda_utils/pypi.py | 7 +- bioconda_utils/utils.py | 221 ++++++--------- docs/source/build-system.rst | 5 +- docs/source/faqs.rst | 38 +-- docs/source/guidelines.rst | 7 +- test/helpers.py | 20 -- test/test_bioconductor_skeleton.py | 5 +- test/test_pkg_test.py | 32 ++- test/test_utils.py | 347 ++++++------------------ 15 files changed, 251 insertions(+), 577 deletions(-) diff --git a/bioconda_utils/bioconductor_skeleton.py b/bioconda_utils/bioconductor_skeleton.py index f85e9c233f..ff791ce8fd 100755 --- a/bioconda_utils/bioconductor_skeleton.py +++ b/bioconda_utils/bioconductor_skeleton.py @@ -856,7 +856,6 @@ def write_recipe(package, recipe_dir, config, force=False, bioc_version=None, `seen_dependencies`. Only has an effect if `recursive=True`. """ config = utils.load_config(config) - env = list(utils.EnvMatrix(config['env_matrix']))[0] proj = BioCProjectPage(package, bioc_version, pkg_version) logger.info('Making recipe for: {}'.format(package)) @@ -895,8 +894,8 @@ def write_recipe(package, recipe_dir, config, force=False, bioc_version=None, # *has* changed, then bump the version number. meta_file = os.path.join(recipe_dir, 'meta.yaml') if os.path.exists(meta_file): - updated_meta = utils.load_meta(proj.meta_yaml, env) - current_meta = utils.load_meta(meta_file, env) + updated_meta = utils.load_meta(proj.meta_yaml) + current_meta = utils.load_meta(meta_file) # pop off the version and build numbers so we can compare the rest of # the dicts diff --git a/bioconda_utils/build.py b/bioconda_utils/build.py index b4b144fbe6..7f6eac0965 100644 --- a/bioconda_utils/build.py +++ b/bioconda_utils/build.py @@ -11,7 +11,7 @@ logger = logging.getLogger(__name__) -BuildResult = namedtuple("BuildResult", ["success", "mulled_image"]) +BuildResult = namedtuple("BuildResult", ["success", "mulled_images"]) def purge(): @@ -28,7 +28,7 @@ def purge(): def build( recipe, recipe_folder, - env, + pkg_paths=None, testonly=False, mulled_test=True, force=False, @@ -44,9 +44,8 @@ def build( recipe : str Path to recipe - env : dict - Environment (typically a single yielded dictionary from EnvMatrix - instance) + pkgs : list + List of packages to build testonly : bool If True, skip building and instead run the test described in the @@ -78,20 +77,20 @@ def build( _docker = docker_builder is not None whitelisted_env = {} - whitelisted_env.update({k: str(v) for k, v in os.environ.items() if utils.allowed_env_var(k, _docker)}) - whitelisted_env.update({k: str(v) for k, v in dict(env).items() if utils.allowed_env_var(k, _docker)}) + whitelisted_env.update({k: str(v) + for k, v in os.environ.items() + if utils.allowed_env_var(k, _docker)}) - logger.info( - "BUILD START %s, env: %s", - recipe, ';'.join(['='.join(map(str, i)) for i in sorted(whitelisted_env.items())]) - ) + logger.info("BUILD START %s", recipe) # --no-build-id is needed for some very long package names that triggers the 89 character limits # this option can be removed as soon as all packages are rebuild with the 255 character limit # Moreover, --no-build-id will block us from using parallel builds in conda-build 2.x # build_args = ["--no-build-id"] - build_args = [] + # use global variant config file (contains pinnings) + build_args = ["-e", utils.load_conda_config().exclusive_config_file, + "--skip-existing"] if testonly: build_args.append("--test") else: @@ -107,8 +106,7 @@ def build( CONDA_BUILD_CMD = [utils.bin_for('conda'), 'build'] - pkg_path = utils.built_package_path(recipe, whitelisted_env) - meta = utils.load_meta(recipe, whitelisted_env) + meta = utils.load_metadata(recipe) try: # Note we're not sending the contents of os.environ here. But we do @@ -118,16 +116,17 @@ def build( response = docker_builder.build_recipe( recipe_dir=os.path.abspath(recipe), build_args=' '.join(channel_args + build_args), - pkg=os.path.basename(pkg_path), + pkgs=list(map(os.path.basename, pkg_paths)), env=whitelisted_env, - noarch=bool(utils.get_meta_value(meta, 'build', 'noarch')) + noarch=bool(meta.get_value('build/noarch', default=False)) ) - if not os.path.exists(pkg_path): - logger.error( - "BUILD FAILED: the built package %s " - "cannot be found", pkg_path) - return BuildResult(False, None) + for pkg_path in pkg_paths: + if not os.path.exists(pkg_path): + logger.error( + "BUILD FAILED: the built package %s " + "cannot be found", pkg_path) + return BuildResult(False, None) build_success = True else: @@ -144,14 +143,11 @@ def build( build_success = True - logger.info( - 'BUILD SUCCESS %s, %s', - utils.built_package_path(recipe, whitelisted_env), utils.envstr(whitelisted_env) - ) + logger.info('BUILD SUCCESS %s', + ' '.join(os.path.basename(p) for p in pkg_paths)) except (docker_utils.DockerCalledProcessError, sp.CalledProcessError) as e: - logger.error( - 'BUILD FAILED %s, %s', recipe, utils.envstr(whitelisted_env)) + logger.error('BUILD FAILED %s', recipe) if _raise_error: raise e return BuildResult(False, None) @@ -159,24 +155,24 @@ def build( if not mulled_test: return BuildResult(True, None) - logger.info( - 'TEST START via mulled-build %s, %s', - recipe, utils.envstr(whitelisted_env)) + logger.info('TEST START via mulled-build %s', recipe) use_base_image = utils.get_meta_value( meta, 'extra', 'container', 'extended-base') base_image = 'bioconda/extended-base-image' if use_base_image else None - try: - res = pkg_test.test_package(pkg_path, base_image=base_image) + mulled_images = [] + for pkg_path in pkg_paths: + try: + res = pkg_test.test_package(pkg_path, base_image=base_image) - logger.info("TEST SUCCESS %s, %s", recipe, utils.envstr(whitelisted_env)) - mulled_image = pkg_test.get_image_name(pkg_path) - return BuildResult(True, mulled_image) - except sp.CalledProcessError as e: - logger.error('TEST FAILED: %s, %s', recipe, utils.envstr(whitelisted_env)) - return BuildResult(False, None) + logger.info("TEST SUCCESS %s", recipe) + mulled_image = pkg_test.get_image_name(pkg_path) + except sp.CalledProcessError as e: + logger.error('TEST FAILED: %s', recipe) + return BuildResult(False, None) + return BuildResult(True, mulled_images) def build_recipes( @@ -242,7 +238,6 @@ def build_recipes( """ orig_config = config config = utils.load_config(config) - env_matrix = utils.EnvMatrix(config['env_matrix']) blacklist = utils.get_blacklist(config['blacklists'], recipe_folder) if check_channels is None: @@ -272,9 +267,9 @@ def build_recipes( logger.info('Filtering recipes') recipe_targets = dict( utils.filter_recipes( - recipes, env_matrix, check_channels, force=force) + recipes, check_channels, force=force) ) - recipes = set(list(recipe_targets.keys())) + recipes = set(recipe_targets.keys()) dag, name2recipes = utils.get_dag(recipes, config=orig_config, blacklist=blacklist) recipe2name = {} @@ -347,7 +342,7 @@ def build_recipes( skipped_recipes.append(recipe) continue - for target in recipe_targets[recipe]: + for pkg_paths in recipe_targets[recipe]: # If a recipe depends on conda, it means it must be installed in # the root env, which is not compatible with mulled-build tests. In @@ -368,7 +363,7 @@ def build_recipes( res = build( recipe=recipe, recipe_folder=recipe_folder, - env=target.env, + pkg_paths=pkg_paths, testonly=testonly, mulled_test=mulled_test and keep_mulled_test, force=force, @@ -380,16 +375,17 @@ def build_recipes( recipe_success &= res.success if not res.success: - failed.append((recipe, target)) + failed.append(recipe) for n in nx.algorithms.descendants(subdag, name): skip_dependent[n].append(recipe) elif not testonly: - # upload build - if anaconda_upload: - if not upload.anaconda_upload(target.pkg, label): - failed_uploads.append(target.pkg) + for pkg in pkg_paths: + # upload build + if anaconda_upload: + if not upload.anaconda_upload(pkg, label): + failed_uploads.append(pkg) if mulled_upload_target and keep_mulled_test: - upload.mulled_upload(res.mulled_image, mulled_upload_target) + upload.mulled_upload(res.mulled_images, mulled_upload_target) # remove traces of the build purge() @@ -398,12 +394,11 @@ def build_recipes( built_recipes.append(recipe) if failed or failed_uploads: - failed_recipes = set(i[0] for i in failed) logger.error( 'BUILD SUMMARY: of %s recipes, ' '%s failed and %s were skipped. ' 'Details of recipes and environments follow.', - len(recipes), len(failed_recipes), len(skipped_recipes)) + len(recipes), len(failed), len(skipped_recipes)) if len(built_recipes) > 0: logger.error( @@ -411,10 +406,9 @@ def build_recipes( 'the following recipes were built successfully:\n%s', '\n'.join(built_recipes)) - for recipe, target in failed: + for recipe in failed: logger.error( - 'BUILD SUMMARY: FAILED recipe %s, environment %s', - str(target), target.envstring()) + 'BUILD SUMMARY: FAILED recipe %s', recipe) for name, dep in skip_dependent.items(): logger.error( diff --git a/bioconda_utils/config.schema.yaml b/bioconda_utils/config.schema.yaml index 117a4946b7..94326ab19b 100644 --- a/bioconda_utils/config.schema.yaml +++ b/bioconda_utils/config.schema.yaml @@ -1,7 +1,5 @@ type: object properties: - env_matrix: - type: string blacklists: type: array docker_image: diff --git a/bioconda_utils/docker_utils.py b/bioconda_utils/docker_utils.py index f554d679a6..4504f17fde 100644 --- a/bioconda_utils/docker_utils.py +++ b/bioconda_utils/docker_utils.py @@ -95,28 +95,10 @@ # conda-build from building all subdirectories conda build {self.conda_build_args} {self.container_recipe}/meta.yaml 2>&1 -# Identify the output package -OUTPUT_DIR=$(dirname $(conda build {self.container_recipe}/meta.yaml --output 2> /dev/null)) -OUTPUT=$OUTPUT_DIR/{pkg} - - -# Some args to conda-build make it run and exit 0 without creating a package -# (e.g., -h or --skip-existing), so check to see if there's anything to copy -# over first. -if [[ -e $OUTPUT ]]; then - - # Copy over the recipe from where the container built it to the mounted - # conda-bld dir from the host. The arch will be either linux-64 or noarch. - cp $OUTPUT {self.container_staging}/{arch} - - conda index {self.container_staging}/{arch} > /dev/null 2>&1 - - # Ensure permissions are correct on the host. - HOST_USER={self.user_info[uid]} - chown $HOST_USER:$HOST_USER {self.container_staging}/{arch}/$(basename $OUTPUT) - chown $HOST_USER:$HOST_USER {self.container_staging}/{arch}/{{repodata.json,repodata.json.bz2,.index.json}} - -fi +cp `conda build {self.container_recipe}/meta.yaml --output` {self.container_staging}/{arch} +# Ensure permissions are correct on the host. +HOST_USER={self.user_info[uid]} +chown $HOST_USER:$HOST_USER {self.container_staging}/{arch}/* """ @@ -418,7 +400,7 @@ def _build_image(self, image_build_dir): shutil.rmtree(build_dir) return p - def build_recipe(self, recipe_dir, build_args, env, pkg, noarch=False): + def build_recipe(self, recipe_dir, build_args, env, noarch=False): """ Build a single recipe. @@ -435,8 +417,6 @@ def build_recipe(self, recipe_dir, build_args, env, pkg, noarch=False): env : dict Environmental variables - pkg : filename of the desired package (e.g. obtained by utils.built_package_path) - noarch: bool Has to be set to true if this is a noarch build @@ -455,7 +435,7 @@ def build_recipe(self, recipe_dir, build_args, env, pkg, noarch=False): build_dir = os.path.realpath(tempfile.mkdtemp()) with open(os.path.join(build_dir, 'build_script.bash'), 'w') as fout: fout.write(self.build_script_template.format( - self=self, pkg=pkg, arch='noarch' if noarch else 'linux-64')) + self=self, arch='noarch' if noarch else 'linux-64')) build_script = fout.name logger.debug('DOCKER: Container build script: \n%s', open(fout.name).read()) diff --git a/bioconda_utils/example_config.yaml b/bioconda_utils/example_config.yaml index d8964d63f3..f69b20d0aa 100644 --- a/bioconda_utils/example_config.yaml +++ b/bioconda_utils/example_config.yaml @@ -1,4 +1,3 @@ -env_matrix: example_env_matrix.yml requirements: requirements.txt blacklists: - r-blacklist diff --git a/bioconda_utils/linting.py b/bioconda_utils/linting.py index d15f76dc7a..7bb041f8aa 100644 --- a/bioconda_utils/linting.py +++ b/bioconda_utils/linting.py @@ -96,12 +96,10 @@ def get_meta(recipe, config): # TODO: Currently just uses the first env. Should turn this into # a generator. - env = dict(next(iter(utils.EnvMatrix(cfg['env_matrix'])))) - pth = os.path.join(recipe, 'meta.yaml') jinja_env = jinja2.Environment() content = jinja_env.from_string( - open(pth, 'r', encoding='utf-8').read()).render(env) + open(pth, 'r', encoding='utf-8').read()).render() meta = yaml.round_trip_load(content, preserve_quotes=True) return meta diff --git a/bioconda_utils/pypi.py b/bioconda_utils/pypi.py index 9c9997384e..c17a432a71 100644 --- a/bioconda_utils/pypi.py +++ b/bioconda_utils/pypi.py @@ -5,7 +5,7 @@ from . import linting -def compare_recipe_to_pypi(recipe, env): +def compare_recipe_to_pypi(recipe): """ If it looks like a PyPI package, returns a tuple of (name, current_bioconda_version, latest_version_on_PyPI, needs_update). @@ -16,7 +16,7 @@ def compare_recipe_to_pypi(recipe, env): "python-wget") then a tuple is returned but with a value of None for the latest version on PyPI and None for needs_update. """ - meta = utils.load_meta(os.path.join(recipe, 'meta.yaml'), env) + meta = utils.load_meta(os.path.join(recipe, 'meta.yaml')) current = meta['package']['version'] name = meta['package']['name'] @@ -70,10 +70,9 @@ def check_all(recipe_folder, config, packages='*'): # Only consider the latest version we can find here recipes = list(utils.get_latest_recipes(recipe_folder, config, packages)) config = utils.load_config(config) - env = list(utils.EnvMatrix(config['env_matrix']))[0] for recipe in recipes: - result = compare_recipe_to_pypi(recipe, env) + result = compare_recipe_to_pypi(recipe) if not result: continue diff --git a/bioconda_utils/utils.py b/bioconda_utils/utils.py index 0e554cdddb..0e89eb34b2 100644 --- a/bioconda_utils/utils.py +++ b/bioconda_utils/utils.py @@ -19,6 +19,7 @@ from distutils.version import LooseVersion import time import threading +from pathlib import PurePath from conda_build import api from conda_build.metadata import MetaData @@ -166,27 +167,33 @@ def load_all_meta(recipe, config): """ For each environment, yield the rendered meta.yaml. """ - cfg = load_config(config) - env_matrix = EnvMatrix(cfg['env_matrix']) - for env in env_matrix: - yield load_meta(recipe, env) + return api.render(recipe) -def load_meta(recipe, env): +def load_conda_config(): + """ + Load conda config while considering global pinnings from conda-forge. + """ + config = api.Config( + no_download_source=True, + set_build_id=False) + + # get environment root + env_root = PurePath(shutil.which("bioconda-utils")).parents[1] + # set path to pinnings from conda forge package + config.exclusive_config_file = os.path.join(env_root, + "conda_build_config.yaml") + return config + + +def load_metadata(recipe): """ Load metadata for a specific environment. """ - with temp_env(env): - # Disabling set_build_id prevents the creation of uniquely-named work - # directories just for checking the output file. - # It needs to be done within the context manager so that it sees the - # os.environ. - config = api.Config( - no_download_source=True, - set_build_id=False) - meta = MetaData(recipe, config=config) - meta.parse_again() - return meta.meta + config = load_conda_config() + meta = MetaData(recipe, config=config) + meta.parse_again() + return meta @contextlib.contextmanager @@ -464,7 +471,6 @@ def toplevel(x): recipe_folder, '').strip(os.path.sep).split(os.path.sep)[0] config = load_config(config) - env = list(EnvMatrix(config['env_matrix']))[0] recipes = sorted(get_recipes(recipe_folder, package), key=toplevel) for package, group in groupby(recipes, key=toplevel): @@ -474,7 +480,7 @@ def toplevel(x): else: def get_version(p): return VersionOrder( - load_meta(os.path.join(p, 'meta.yaml'), env)['package']['version'] + load_meta(os.path.join(p, 'meta.yaml'))['package']['version'] ) sorted_versions = sorted(group, key=get_version) if sorted_versions: @@ -568,62 +574,18 @@ def _string_or_float_to_integer_python(s): return s -def built_package_path(recipe, env=None): +def built_package_paths(recipe): """ Returns the path to which a recipe would be built. Does not necessarily exist; equivalent to `conda build --output recipename` but without the subprocess. """ - if env is None: - env = {} - env = dict(env) - - # Ensure CONDA_PY is an integer (needed by conda-build 2.0.4) - py = env.get('CONDA_PY', None) - env = dict(env) - if py is not None: - env['CONDA_PY'] = _string_or_float_to_integer_python(py) - - with temp_env(env): - # Disabling set_build_id prevents the creation of uniquely-named work - # directories just for checking the output file. - # It needs to be done within the context manager so that it sees the - # os.environ. - config = api.Config( - no_download_source=True, - set_build_id=False) - meta = MetaData(recipe, config=config) - meta.parse_again() - paths = api.get_output_file_paths(meta, config=config) - assert len(paths) == 1, ('Bug: conda build returns multiple output ' - 'file paths. This is unexpected since we ' - 'configure it with one combination of ' - 'pinned versions.') - path = paths[0] - return path - - -class Target: - def __init__(self, pkg, env): - """ - Class to represent a package built with a particular environment - (e.g. from EnvMatirix). - """ - self.pkg = pkg - self.env = env - - def __hash__(self): - return self.pkg.__hash__() - - def __eq__(self, other): - return self.pkg == other.pkg - - def __str__(self): - return os.path.basename(self.pkg) - - def envstring(self): - return ';'.join(['='.join([i, str(j)]) for i, j in self.env]) + config = load_conda_config() + meta = load_metadata(recipe) + paths = api.get_output_file_paths(meta, config=config) + assert paths, "bug: empty list of paths returned" + return paths def last_commit_to_master(): @@ -726,86 +688,70 @@ def changed_since_master(recipe_folder): ] -def filter_recipes(recipes, env_matrix, channels=None, force=False): +def filter_recipes(recipes, channels=None, force=False): """ - Generator yielding only those recipes that should be built. + Generator yielding only those (recipe, pkgs) that should be built. Parameters ---------- recipes : iterable Iterable of candidate recipes - env_matrix : str, dict, or EnvMatrix - If str or dict, create an EnvMatrix; if EnvMatrix already use it as-is. - channels : None or list Optional list of channels to check for existing recipes force : bool Build the package even if it is already available in supplied channels. """ - if not isinstance(env_matrix, EnvMatrix): - env_matrix = EnvMatrix(env_matrix) - if channels is None: channels = [] - channel_packages = defaultdict(set) + channel_packages = set() for channel in channels: - channel_packages[channel].update(get_channel_packages(channel=channel)) - - def tobuild(recipe, env): - pkg = os.path.basename(built_package_path(recipe, env)) - - in_channels = [ - channel for channel, pkgs in channel_packages.items() - if pkg in pkgs - ] - if in_channels and not force: + channel_packages.update(get_channel_packages(channel=channel)) + + def tobuild(recipe): + # with temp_os, we can fool the MetaData if needed. + platform = os.environ.get('OSTYPE', sys.platform) + if platform.startswith("darwin"): + platform = 'darwin' + elif platform == "linux-gnu": + platform = "linux" + + with temp_os(platform): + meta = load_metadata(recipe) + if meta.skip(): + logger.debug( + 'FILTER: not building %s because ' + 'it defines skip', pkg) + return [] + + # If on CI, handle noarch. + if os.environ.get('CI', None) == 'true': + if meta.get_value('build/noarch'): + if platform != 'linux': + logger.debug('FILTER: only building %s on ' + 'linux because it defines noarch.', + pkg) + return [] + + # get all packages that would be built + pkgs = list(map(os.path.basename, built_package_paths(recipe))) + # check which ones exist already + existing = channel_packages & pkgs + + for pkg in existing: logger.debug( 'FILTER: not building %s because ' - 'it is in channel(s) and it is not forced: %s', pkg, - in_channels) - return False - - # with temp_env, MetaData will see everything in env added to - # os.environ. - with temp_env(env): - - # with temp_os, we can fool the MetaData if needed. - platform = os.environ.get('OSTYPE', sys.platform) - if platform.startswith("darwin"): - platform = 'darwin' - elif platform == "linux-gnu": - platform = "linux" - - with temp_os(platform): - meta = MetaData(recipe) - if meta.skip(): - logger.debug( - 'FILTER: not building %s because ' - 'it defines skip for this env', pkg) - return False - - # If on CI, handle noarch. - if os.environ.get('CI', None) == 'true': - if meta.get_value('build/noarch'): - if platform != 'linux': - logger.debug('FILTER: only building %s on ' - 'linux because it defines noarch.', - pkg) - return False - - assert not pkg.endswith("_.tar.bz2"), ( - "rendered path {} does not " - "contain a build number and recipe does not " - "define skip for this environment. " - "This is a conda bug.".format(pkg)) - - logger.debug( - 'FILTER: building %s because it is not in channels and ' - 'does not define skip', pkg) - return True + 'it is in channel(s) and it is not forced.', pkg) + for pkg in pkgs: + assert not pkg.endswith("_.tar.bz2"), ( + "rendered path {} does not " + "contain a build number and recipe does not " + "define skip for this environment. " + "This is a conda bug.".format(pkg)) + # yield all pkgs that do not yet exist + return pkgs - existing logger.debug('recipes: %s', recipes) recipes = list(recipes) @@ -821,13 +767,9 @@ def tobuild(recipe, env): for i, recipe in enumerate(sorted(recipes)): perc = (i + 1) / nrecipes * 100 print(template.format(i + 1, nrecipes, perc, recipe), end='') - targets = set() - for env in env_matrix: - pkg = built_package_path(recipe, env) - if tobuild(recipe, env): - targets.update([Target(pkg, env)]) - if targets: - yield recipe, targets + pkgs = tobuild(recipe) + if pkgs: + yield recipe, pkgs print(end='\r') except sp.CalledProcessError as e: logger.debug(e.stdout) @@ -872,8 +814,7 @@ def validate_config(config): def load_config(path): """ - Parses config file, building paths to relevant blacklists and loading any - specified env_matrix files. + Parses config file, building paths to relevant blacklists Parameters ---------- @@ -897,15 +838,11 @@ def get_list(key): return value default_config = { - 'env_matrix': {'CONDA_PY': 35}, 'blacklists': [], 'channels': [], 'requirements': None, 'upload_channel': 'bioconda' } - if 'env_matrix' in config: - if isinstance(config['env_matrix'], str): - config['env_matrix'] = relpath(config['env_matrix']) if 'blacklists' in config: config['blacklists'] = [relpath(p) for p in get_list('blacklists')] if 'channels' in config: diff --git a/docs/source/build-system.rst b/docs/source/build-system.rst index daf4be9360..4d2cbc4efc 100644 --- a/docs/source/build-system.rst +++ b/docs/source/build-system.rst @@ -88,10 +88,7 @@ Configure the environment run in the container. - At the end of the build, the build script copies the package to the exported conda-bld directory - - ``bioconda-recipes: scripts/env_matrix.yml``: each unique combination of - env vars defined here will create an independent build - - A whitelist of env vars (including those defined in the - ``env_matrix.yml``) is exported. The whitelist is configured in + - A whitelist of env vars is exported. The whitelist is configured in ``bioconda-utils: utils.py``. - Upon successfully building and testing via ``conda-build``, the built package diff --git a/docs/source/faqs.rst b/docs/source/faqs.rst index 2d61d2ad34..812d7ac67c 100644 --- a/docs/source/faqs.rst +++ b/docs/source/faqs.rst @@ -71,38 +71,16 @@ a local test passes while the online test fails. Nevertheless, the local test should capture most problems, such that it is highly encouraged to first run a local test in order to save quota on Circle CI. -How are environmental variables defined and used? +How are dependencies pinned to particular versions? ------------------------------------------------- -In some cases a recipe may need to pin the version of a dependency. Jinja2 -templating is used within recipes to use a uniform set of versions for core -packages used by bioconda packages. For example, see this `meta.yaml -`_ -that uses a variable to hold the current GSL (GNU Scientific Library) version -supported by bioconda. - -The currently defined dependencies are defined in `scripts/env_matrix.yml` and -are sent to `conda-build` by setting them as environment variables. More -specifically: - -- `config.yml` indicates an `env_matrix` file in which CONDA_GSL is defined - - `config.yaml` example - `_ pointing to file - - - `env_matrix.yml` example - ` defining CONDA_GSL. - -- When figuring out which recipes need to be built, the filtering step attaches - each unique env to a Target object. For example, one env might be - `CONDA_GSL=1.6; CONDA_PY=27, CONDA_R=3.3.1;` while a different env would be - `CONDA_GSL=1.6; CONDA_PY=35, CONDA_R=3.3.1;`. - -- That env is provided to the build function which is either sent directly to - docker as environment variables, or used to temporarily update os.environ so - that conda-build sees it. - -- These environment variables are then seen by conda-build and used to fill in - the templated variables via jinja2. +In some cases a recipe may need to pin the version of a dependency. +A global set of default versions to pin against is shared with conda-forge and +can be found `here `_. +For new dependencies that are contained in conda-forge and not yet in this list, +please update the list via a pull request. +Local pinnings can be achieved by adding a file ``conda_build_config.yaml`` next +to your ``meta.yaml``. To find out against which version you can pin a package, e.g. x.y.* or x.* please use [ABI-Laboratory](https://abi-laboratory.pro/tracker/). diff --git a/docs/source/guidelines.rst b/docs/source/guidelines.rst index e2538ea899..b7656e9c40 100644 --- a/docs/source/guidelines.rst +++ b/docs/source/guidelines.rst @@ -364,10 +364,9 @@ example, bioconda provides an environnmnet variable ``CONDA_BOOST`` that contains the current major version of Boost. You should pin your boost dependency against that version. An example is the `salmon recipe `_. -You find the libraries you can currently pin in `scripts/env\_matrix.yml -`_. -If you need to pin another library, please notify @bioconda/core, and we will -set up a corresponding environment variable. +You find the libraries that are already pinned `here +`_. +If you need to pin another library, please notify @bioconda/core, and we will extend this list. It's not uncommon to have difficulty compiling package into a portable conda package. Since there is no single solution, here are some examples diff --git a/test/helpers.py b/test/helpers.py index c32f288a67..72b04077a0 100644 --- a/test/helpers.py +++ b/test/helpers.py @@ -4,7 +4,6 @@ import os import subprocess as sp from conda_build.metadata import MetaData -from bioconda_utils.utils import built_package_path def ensure_missing(package): @@ -28,25 +27,6 @@ def ensure_missing(package): sp.check_call(['conda', 'index', os.path.dirname(package)]) -def tmp_env_matrix(): - tmp = tempfile.NamedTemporaryFile(delete=False).name - with open(tmp, 'w') as fout: - fout.write(dedent( - """\ - CONDA_PY: - - "27" - - "35" - CONDA_BOOST: "1.60" - CONDA_R: "3.4.1" - CONDA_PERL: "5.22.0" - CONDA_NPY: "110" - CONDA_NCURSES: "5.9" - CONDA_GSL: "1.16" - CONDA_GMP: "5.1" - """)) - return tmp - - class Recipes(object): def __init__(self, data, from_string=False): """ diff --git a/test/test_bioconductor_skeleton.py b/test/test_bioconductor_skeleton.py index d80039987b..3d6b585640 100644 --- a/test/test_bioconductor_skeleton.py +++ b/test/test_bioconductor_skeleton.py @@ -8,9 +8,8 @@ import helpers -env_matrix = helpers.tmp_env_matrix() + config = { - 'env_matrix': env_matrix, 'channels': ['bioconda', 'conda-forge', 'defaults'] } @@ -62,9 +61,7 @@ def test_bioc_write_recipe_no_skipping(tmpdir): def test_meta_contents(tmpdir): - env_matrix = helpers.tmp_env_matrix() config = { - 'env_matrix': env_matrix, 'channels': ['bioconda', 'conda-forge', 'defaults'] } bioconductor_skeleton.write_recipe( diff --git a/test/test_pkg_test.py b/test/test_pkg_test.py index 1cc331c4ed..89f69ed17e 100644 --- a/test/test_pkg_test.py +++ b/test/test_pkg_test.py @@ -4,7 +4,7 @@ import pytest -from helpers import Recipes, ensure_missing, tmp_env_matrix +from helpers import Recipes, ensure_missing from bioconda_utils import pkg_test from bioconda_utils import utils from bioconda_utils import build @@ -50,17 +50,17 @@ def _build_pkg(recipe, mulled_test=False): r = Recipes(recipe, from_string=True) r.write_recipes() - env_matrix = list(utils.EnvMatrix(tmp_env_matrix()))[0] recipe = r.recipe_dirs['one'] - built_package = utils.built_package_path(recipe) - ensure_missing(built_package) + built_packages = utils.built_package_paths(recipe) + for pkg in built_packages: + ensure_missing(pkg) build.build( recipe=r.recipe_dirs['one'], recipe_folder='.', - env=env_matrix, + pkg_paths=built_packages, mulled_test=mulled_test, ) - return built_package + return built_packages @pytest.mark.skipif(SKIP_OSX, reason='skipping on osx') @@ -68,8 +68,9 @@ def test_pkg_test(): """ Running a mulled-build test shouldn't cause any errors. """ - built_package = _build_pkg(RECIPE_ONE) - res = pkg_test.test_package(built_package) + built_packages = _build_pkg(RECIPE_ONE) + for pkg in built_packages: + pkg_test.test_package(pkg) @pytest.mark.skipif(SKIP_OSX, reason='skipping on osx') @@ -77,9 +78,10 @@ def test_pkg_test_mulled_build_error(): """ Make sure calling mulled-build with the wrong arg fails correctly. """ - built_package = _build_pkg(RECIPE_ONE) + built_packages = _build_pkg(RECIPE_ONE) with pytest.raises(sp.CalledProcessError): - res = pkg_test.test_package(built_package, mulled_args='--wrong-arg') + for pkg in built_packages: + pkg_test.test_packages(pkg, mulled_args='--wrong-arg') @pytest.mark.skipif(SKIP_OSX, reason='skipping on osx') @@ -87,8 +89,9 @@ def test_pkg_test_custom_base_image(): """ Running a mulled-build test with a custom base image. """ - build_package = _build_pkg(RECIPE_CUSTOM_BASE) - res = pkg_test.test_package(build_package, base_image='debian:latest') + built_packages = _build_pkg(RECIPE_CUSTOM_BASE) + for pkg in built_packages: + pkg_test.test_package(pkg, base_image='debian:latest') @pytest.mark.skipif(SKIP_OSX, reason="skipping on osx") @@ -115,5 +118,6 @@ def test_pkg_test_conda_image(): /opt/conda/bin/conda --version > /usr/local/conda-version fi """) - build_package = _build_pkg(recipe) - pkg_test.test_package(build_package, conda_image="continuumio/miniconda3:4.3.11") + built_packages = _build_pkg(recipe) + for pkg in built_packages: + pkg_test.test_package(pkg, conda_image="continuumio/miniconda3:4.3.11") diff --git a/test/test_utils.py b/test/test_utils.py index 5229c18eb7..a54f9a7ea7 100644 --- a/test/test_utils.py +++ b/test/test_utils.py @@ -17,7 +17,7 @@ from bioconda_utils import cli from bioconda_utils import build from bioconda_utils import upload -from helpers import ensure_missing, Recipes, tmp_env_matrix +from helpers import ensure_missing, Recipes from conda_build import api from conda_build.metadata import MetaData @@ -73,10 +73,11 @@ def recipes_fixture(): r.write_recipes() r.pkgs = {} for k, v in r.recipe_dirs.items(): - r.pkgs[k] = utils.built_package_path(v) + r.pkgs[k] = utils.built_package_paths(v) yield r - for v in r.pkgs.values(): - ensure_missing(v) + for pkgs in r.pkgs.values(): + for pkg in pkgs: + ensure_missing(pkg) @pytest.fixture(scope='module', params=PARAMS, ids=IDS) @@ -84,7 +85,6 @@ def single_build(request, recipes_fixture): """ Builds the "one" recipe. """ - env_matrix = list(utils.EnvMatrix(tmp_env_matrix()))[0] if request.param: docker_builder = docker_utils.RecipeBuilder(use_host_conda_bld=True) mulled_test = True @@ -94,13 +94,13 @@ def single_build(request, recipes_fixture): build.build( recipe=recipes_fixture.recipe_dirs['one'], recipe_folder='.', + pkg_paths=recipes_fixture.pkgs['one'], docker_builder=docker_builder, - env=env_matrix, mulled_test=mulled_test, ) - built_package = recipes_fixture.pkgs['one'] - yield built_package - ensure_missing(built_package) + yield recipes_fixture.pkgs['one'] + for pkg in recipes_fixture.pkgs['one']: + ensure_missing(pkg) @@ -125,8 +125,9 @@ def multi_build(request, recipes_fixture): ) built_packages = recipes_fixture.pkgs yield built_packages - for v in built_packages.values(): - ensure_missing(v) + for pkgs in built_packages.values(): + for pkg in pkgs: + ensure_missing(pkg) @pytest.fixture(scope='module') @@ -147,16 +148,14 @@ def single_upload(): '''.format(name), from_string=True) r.write_recipes() - env_matrix = list(utils.EnvMatrix(tmp_env_matrix()))[0] build.build( recipe=r.recipe_dirs[name], recipe_folder='.', + pkg_paths=r.pkgs[name], docker_builder=None, - mulled_test=False, - env=env_matrix, + mulled_test=False ) - - pkg = utils.built_package_path(r.recipe_dirs[name]) + pkg = r.pkgs[name][0] upload.anaconda_upload(pkg, label=TEST_LABEL) @@ -206,10 +205,11 @@ def test_docker_builder_build(recipes_fixture): Tests just the build_recipe method of a RecipeBuilder object. """ docker_builder = docker_utils.RecipeBuilder(use_host_conda_bld=True) - pkg = os.path.basename(recipes_fixture.pkgs['one']) + pkgs = list(map(os.path.basename, recipes_fixture.pkgs['one'])) docker_builder.build_recipe( - recipes_fixture.recipe_dirs['one'], build_args='', pkg=pkg, env={}) - assert os.path.exists(recipes_fixture.pkgs['one']) + recipes_fixture.recipe_dirs['one'], build_args='', pkgs=pkgs, env={}) + for pkg in pkgs: + assert os.path.exists(pkg) @pytest.mark.skipif(SKIP_DOCKER_TESTS, reason='skipping on osx') @@ -311,31 +311,6 @@ def test_conda_as_dep(): assert build_result -def test_env_matrix(): - contents = { - 'CONDA_PY': [27, 35], - 'CONDA_BOOST': '1.60' - } - - with open(tempfile.NamedTemporaryFile().name, 'w') as fout: - fout.write(yaml.dump(contents, default_flow_style=False)) - - e1 = utils.EnvMatrix(contents) - e2 = utils.EnvMatrix(fout.name) - assert e1.env == e2.env - assert sorted( - [sorted(i) for i in e1]) == sorted([sorted(i) for i in e2]) == [ - [ - ('CONDA_BOOST', '1.60'), - ('CONDA_PY', 27), - ], - [ - ('CONDA_BOOST', '1.60'), - ('CONDA_PY', 35), - ] - ] - - def test_filter_recipes_no_skipping(): """ No recipes have skip so make sure none are filtered out. @@ -349,14 +324,10 @@ def test_filter_recipes_no_skipping(): version: "0.1" """, from_string=True) r.write_recipes() - env_matrix = { - 'CONDA_PY': [27, 35], - 'CONDA_BOOST': '1.60' - } recipes = list(r.recipe_dirs.values()) assert len(recipes) == 1 filtered = list( - utils.filter_recipes(recipes, env_matrix, channels=['bioconda'])) + utils.filter_recipes(recipes, channels=['bioconda'])) assert len(filtered) == 1 @@ -375,68 +346,16 @@ def test_filter_recipes_skip_is_true(): skip: true """, from_string=True) r.write_recipes() - env_matrix = {} recipes = list(r.recipe_dirs.values()) filtered = list( - utils.filter_recipes(recipes, env_matrix)) + utils.filter_recipes(recipes)) assert len(filtered) == 0 -def test_filter_recipes_skip_py27(): - """ - When we add build/skip = True # [py27] to recipe, it should not be - filtered out. This is because python version is not encoded in the output - package name, and so one-0.1-0.tar.bz2 will still be created for py35. - """ - r = Recipes( - """ - one: - meta.yaml: | - package: - name: one - version: "0.1" - build: - skip: true # [py27] - """, from_string=True) - r.write_recipes() - env_matrix = { - 'CONDA_PY': [27, 35], - 'CONDA_BOOST': '1.60' - } - recipes = list(r.recipe_dirs.values()) - filtered = list( - utils.filter_recipes(recipes, env_matrix, channels=['bioconda'])) - assert len(filtered) == 1 - - -def test_filter_recipes_skip_py27_in_build_string(): +def test_filter_recipes_skip_not_py27(): """ - When CONDA_PY is in the build string, py27 should be skipped + When all but one Python version is skipped, filtering should do that. """ - r = Recipes( - """ - one: - meta.yaml: | - package: - name: one - version: "0.1" - requirements: - build: - - python - run: - - python - """, from_string=True) - r.write_recipes() - env_matrix = { - 'CONDA_PY': [27, 35], - } - recipes = list(r.recipe_dirs.values()) - filtered = list( - utils.filter_recipes(recipes, env_matrix, channels=['bioconda'])) - - # one recipe, two targets - assert len(filtered) == 1 - assert len(filtered[0][1]) == 2 r = Recipes( """ @@ -446,7 +365,7 @@ def test_filter_recipes_skip_py27_in_build_string(): name: one version: "0.1" build: - skip: True # [py27] + skip: True # [not py27] requirements: build: - python @@ -454,47 +373,15 @@ def test_filter_recipes_skip_py27_in_build_string(): - python """, from_string=True) r.write_recipes() - env_matrix = { - 'CONDA_PY': [27, 35], - } recipes = list(r.recipe_dirs.values()) filtered = list( - utils.filter_recipes(recipes, env_matrix, channels=['bioconda'])) + utils.filter_recipes(recipes, channels=['bioconda'])) # one recipe, one target assert len(filtered) == 1 assert len(filtered[0][1]) == 1 -def test_filter_recipes_extra_in_build_string(): - """ - If CONDA_EXTRA is in os.environ, the pkg name should still be identifiable. - - This helps test env vars that don't have other defaults like CONDA_PY does - (e.g., CONDA_BOOST in bioconda) - """ - r = Recipes( - """ - one: - meta.yaml: | - package: - name: one - version: "0.1" - build: - number: 0 - string: {{CONDA_EXTRA}}_{{PKG_BUILDNUM}} - """, from_string=True) - r.write_recipes() - recipe = r.recipe_dirs['one'] - - env = { - 'CONDA_EXTRA': 'asdf', - } - pkg = os.path.basename(utils.built_package_path(recipe, env)) - - assert os.path.basename(pkg) == 'one-0.1-asdf_0.tar.bz2' - - def test_filter_recipes_existing_package(): "use a known-to-exist package in bioconda" @@ -515,43 +402,10 @@ def test_filter_recipes_existing_package(): """, from_string=True) r.write_recipes() recipes = list(r.recipe_dirs.values()) - env_matrix = { - 'CONDA_PY': [27, 35], - } pkgs = utils.get_channel_packages('bioconda') pth = utils.built_package_path(recipes[0]) filtered = list( - utils.filter_recipes(recipes, env_matrix, channels=['bioconda'])) - assert len(filtered) == 0 - - -def test_filter_recipes_custom_buildstring(): - "use a known-to-exist package in bioconda" - - # note that we need python as a run requirement in order to get the "pyXY" - # in the build string that matches the existing bioconda built package. - r = Recipes( - """ - one: - meta.yaml: | - package: - name: pindel - version: "0.2.5b8" - build: - number: 2 - skip: True # [osx] - string: "htslib{{CONDA_HTSLIB}}_{{PKG_BUILDNUM}}" - requirements: - run: - - python - """, from_string=True) - r.write_recipes() - recipes = list(r.recipe_dirs.values()) - env_matrix = { - 'CONDA_HTSLIB': "1.4", - } - filtered = list( - utils.filter_recipes(recipes, env_matrix, channels=['bioconda'])) + utils.filter_recipes(recipes, channels=['bioconda'])) assert len(filtered) == 0 @@ -573,14 +427,11 @@ def test_filter_recipes_force_existing_package(): """, from_string=True) r.write_recipes() recipes = list(r.recipe_dirs.values()) - env_matrix = { - 'CONDA_PY': [27, 35], - } pkgs = utils.get_channel_packages('bioconda') pth = utils.built_package_path(recipes[0]) filtered = list( utils.filter_recipes( - recipes, env_matrix, channels=['bioconda'], force=True)) + recipes, channels=['bioconda'], force=True)) assert len(filtered) == 1 @@ -600,9 +451,9 @@ def test_built_package_path(): version: "0.1" requirements: build: - - python + - python 3.6 run: - - python + - python 3.6 two: meta.yaml: | @@ -616,60 +467,8 @@ def test_built_package_path(): r.write_recipes() assert os.path.basename( - utils.built_package_path(r.recipe_dirs['one'], env=dict(CONDA_PY=36)) - ) == 'one-0.1-py36_0.tar.bz2' - - # resetting with a different CONDA_PY passed as env dict - assert os.path.basename( - utils.built_package_path(r.recipe_dirs['one'], env=dict(CONDA_PY=27)) - ) == 'one-0.1-py27_0.tar.bz2' - - # resetting CONDA_PY using os.environ - existing_env = dict(os.environ) - try: - os.environ['CONDA_PY'] = '27' - assert os.path.basename( - utils.built_package_path(r.recipe_dirs['one']) - ) == 'one-0.1-py27_0.tar.bz2' - os.environ = existing_env - except: - os.environ = existing_env - raise - - -def test_built_package_path2(): - r = Recipes( - """ - one: - meta.yaml: | - package: - name: one - version: "0.1" - requirements: - run: - - python - - two: - meta.yaml: | - package: - name: two - version: "0.1" - build: - number: 0 - string: ncurses{{ CONDA_NCURSES }}_{{ PKG_BUILDNUM }} - """, from_string=True) - r.write_recipes() - - os.environ['CONDA_NCURSES'] = '9.0' - assert os.path.basename( - utils.built_package_path(r.recipe_dirs['two'], env=os.environ) - ) == 'two-0.1-ncurses9.0_0.tar.bz2' - - del os.environ['CONDA_NCURSES'] - assert os.path.basename( - utils.built_package_path( - r.recipe_dirs['two'], env=dict(CONDA_NCURSES='9.0')) - ) == 'two-0.1-ncurses9.0_0.tar.bz2' + utils.built_package_paths(r.recipe_dirs['one']) + ) == ['one-0.1-py36_0.tar.bz2'] def test_string_or_float_to_integer_python(): @@ -690,6 +489,7 @@ def test_rendering_sandboxing(): """, from_string=True) r.write_recipes() + pkg_paths = utils.built_package_path(r.recipe_dirs['one']) env = { # First one is allowed, others are not 'CONDA_ARBITRARY_VAR': 'conda-val-here', @@ -712,7 +512,7 @@ def test_rendering_sandboxing(): res = build.build( recipe=r.recipe_dirs['one'], recipe_folder='.', - env=env, + pkg_paths=pkg_paths, mulled_test=False, _raise_error=True, ) @@ -724,7 +524,7 @@ def test_rendering_sandboxing(): res = build.build( recipe=r.recipe_dirs['one'], recipe_folder='.', - env=env, + pkg_paths=pkg_paths, mulled_test=False ) assert "'GITHUB_TOKEN' is undefined" in str(excinfo.value) @@ -741,21 +541,24 @@ def test_rendering_sandboxing(): """, from_string=True) r.write_recipes() - pkg = utils.built_package_path(r.recipe_dirs['two'], env=env) - ensure_missing(pkg) + pkg_paths = utils.built_package_path(r.recipe_dirs['two']) + for pkg in pkg_paths: + ensure_missing(pkg) + res = build.build( recipe=r.recipe_dirs['two'], recipe_folder='.', - env=env, + pkg_paths=pkg_paths, mulled_test=False ) - t = tarfile.open(pkg) - tmp = tempfile.mkdtemp() - target = 'info/recipe/meta.yaml' - t.extract(target, path=tmp) - contents = yaml.load(open(os.path.join(tmp, target)).read()) - assert contents['extra']['var2'] == 'conda-val-here', contents + for pkg in pkg_paths: + t = tarfile.open(pkg) + tmp = tempfile.mkdtemp() + target = 'info/recipe/meta.yaml' + t.extract(target, path=tmp) + contents = yaml.load(open(os.path.join(tmp, target)).read()) + assert contents['extra']['var2'] == 'conda-val-here', contents def test_sandboxed(): @@ -772,6 +575,7 @@ def test_sandboxed(): assert 'GITHUB_TOKEN' not in os.environ assert 'BUILDKITE_TOKEN' not in os.environ + def test_env_sandboxing(): r = Recipes( """ @@ -791,16 +595,19 @@ def test_env_sandboxing(): fi """, from_string=True) r.write_recipes() + pkg_paths = utils.built_package_paths(r.recipe_dirs['one']) - build.build( - recipe=r.recipe_dirs['one'], - recipe_folder='.', - env={'GITHUB_TOKEN': 'token_here'}, - mulled_test=False - ) - pkg = utils.built_package_path(r.recipe_dirs['one']) - assert os.path.exists(pkg) - ensure_missing(pkg) + with utils.temp_env({'GITHUB_TOKEN': 'token_here'}): + build.build( + recipe=r.recipe_dirs['one'], + recipe_folder='.', + pkg_paths=pkg_paths, + mulled_test=False + ) + + for pkg in pkg_paths: + assert os.path.exists(pkg) + ensure_missing(pkg) def test_skip_dependencies(): @@ -834,10 +641,11 @@ def test_skip_dependencies(): r.write_recipes() pkgs = {} for k, v in r.recipe_dirs.items(): - pkgs[k] = utils.built_package_path(v) + pkgs[k] = utils.built_package_paths(v) - for p in pkgs.values(): - ensure_missing(p) + for pkgs in pkgs.values(): + for pkg in pkgs: + ensure_missing(p) build.build_recipes( r.basedir, @@ -847,13 +655,17 @@ def test_skip_dependencies(): force=False, mulled_test=False, ) - assert os.path.exists(pkgs['one']) - assert not os.path.exists(pkgs['two']) - assert not os.path.exists(pkgs['three']) + for pkg in pkgs['one']: + assert os.path.exists(pkg) + for pkg in pkgs['two']: + assert not os.path.exists(pkg) + for pkg in pkgs['three']: + assert not os.path.exists(pkg) # clean up - for p in pkgs.values(): - ensure_missing(p) + for pkgs in pkgs.values(): + for pkg im pkgs: + ensure_missing(pkg) class TestSubdags(object): @@ -877,7 +689,7 @@ def test_zero_packages(): Regression test; make sure filter_recipes exits cleanly if no recipes were provided. """ - assert list(utils.filter_recipes([], {'CONDA_PY': [27, 35]})) == [] + assert list(utils.filter_recipes([])) == [] @pytest.mark.skipif(SKIP_DOCKER_TESTS, reason='skipping on osx') @@ -894,17 +706,18 @@ def test_build_empty_extra_container(): # empty """, from_string=True) r.write_recipes() + pkg_paths = utils.built_package_paths(r.recipe_dirs['one']) build_result = build.build( recipe=r.recipe_dirs['one'], recipe_folder='.', - env={}, + pkg_paths=pkg_paths, mulled_test=True, ) assert build_result.success - pkg = utils.built_package_path(r.recipe_dirs['one']) - assert os.path.exists(pkg) - ensure_missing(pkg) + for pkg in pkgs: + assert os.path.exists(pkg) + ensure_missing(pkg) @pytest.mark.skipif(SKIP_DOCKER_TESTS, reason='skipping on osx') @@ -939,10 +752,12 @@ def test_build_container_default_gcc(tmpdir): use_host_conda_bld=True, image_build_dir=image_build_dir, ) + + pkg_paths = utils.built_package_paths(r.recipe_dirs['one']) build_result = build.build( recipe=r.recipe_dirs['one'], recipe_folder='.', - env={}, + pkg_paths=pkg_paths, docker_builder=docker_builder, mulled_test=False, ) From de2e4a24950c9de534635b0f6035844f3a9f0b0c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20K=C3=B6ster?= Date: Wed, 25 Apr 2018 17:01:38 +0200 Subject: [PATCH 017/118] fix typo --- test/test_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/test_utils.py b/test/test_utils.py index a54f9a7ea7..fc9921895c 100644 --- a/test/test_utils.py +++ b/test/test_utils.py @@ -664,7 +664,7 @@ def test_skip_dependencies(): # clean up for pkgs in pkgs.values(): - for pkg im pkgs: + for pkg in pkgs: ensure_missing(pkg) From 601449508cefaa5dc860226950d861f24793f4f8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20K=C3=B6ster?= Date: Wed, 25 Apr 2018 20:31:12 +0200 Subject: [PATCH 018/118] fixes and further cleanup --- bioconda_utils/bioconductor_skeleton.py | 4 +- bioconda_utils/build.py | 5 +- bioconda_utils/lint_functions.py | 19 +++---- bioconda_utils/pypi.py | 2 +- bioconda_utils/utils.py | 69 ++++++++++--------------- test/test_bioconductor_skeleton.py | 6 +-- test/test_pkg_test.py | 2 +- test/test_utils.py | 28 +++++----- 8 files changed, 54 insertions(+), 81 deletions(-) diff --git a/bioconda_utils/bioconductor_skeleton.py b/bioconda_utils/bioconductor_skeleton.py index ff791ce8fd..155fe911fa 100755 --- a/bioconda_utils/bioconductor_skeleton.py +++ b/bioconda_utils/bioconductor_skeleton.py @@ -894,8 +894,8 @@ def write_recipe(package, recipe_dir, config, force=False, bioc_version=None, # *has* changed, then bump the version number. meta_file = os.path.join(recipe_dir, 'meta.yaml') if os.path.exists(meta_file): - updated_meta = utils.load_meta(proj.meta_yaml) - current_meta = utils.load_meta(meta_file) + updated_meta = utils.load_metadata(proj.meta_yaml).meta + current_meta = utils.load_metadata(meta_file).meta # pop off the version and build numbers so we can compare the rest of # the dicts diff --git a/bioconda_utils/build.py b/bioconda_utils/build.py index 7f6eac0965..658ecafd7f 100644 --- a/bioconda_utils/build.py +++ b/bioconda_utils/build.py @@ -116,7 +116,6 @@ def build( response = docker_builder.build_recipe( recipe_dir=os.path.abspath(recipe), build_args=' '.join(channel_args + build_args), - pkgs=list(map(os.path.basename, pkg_paths)), env=whitelisted_env, noarch=bool(meta.get_value('build/noarch', default=False)) ) @@ -157,9 +156,7 @@ def build( logger.info('TEST START via mulled-build %s', recipe) - use_base_image = utils.get_meta_value( - meta, - 'extra', 'container', 'extended-base') + use_base_image = meta.get_value('extra/container/extended-base') base_image = 'bioconda/extended-base-image' if use_base_image else None mulled_images = [] diff --git a/bioconda_utils/lint_functions.py b/bioconda_utils/lint_functions.py index e9f83f5017..16e6e3e2aa 100644 --- a/bioconda_utils/lint_functions.py +++ b/bioconda_utils/lint_functions.py @@ -6,8 +6,6 @@ import pandas import numpy as np -from .utils import get_meta_value - def _get_not_none(meta, key, none_subst=dict): """ @@ -115,7 +113,7 @@ def already_in_bioconda(recipe, meta, df): def missing_home(recipe, meta, df): - if not get_meta_value(meta, 'about', 'home'): + if not meta.get_value('about/home'): return { 'missing_home': True, 'fix': 'add about:home', @@ -123,7 +121,7 @@ def missing_home(recipe, meta, df): def missing_summary(recipe, meta, df): - if not get_meta_value(meta, 'about', 'summary'): + if not meta.get_value('about/summary'): return { 'missing_summary': True, 'fix': 'add about:summary', @@ -131,7 +129,7 @@ def missing_summary(recipe, meta, df): def missing_license(recipe, meta, df): - if not get_meta_value(meta, 'about', 'license'): + if not meta.get_value('about/license'): return { 'missing_license': True, 'fix': 'add about:license' @@ -140,7 +138,7 @@ def missing_license(recipe, meta, df): def missing_tests(recipe, meta, df): test_files = ['run_test.py', 'run_test.sh', 'run_test.pl'] - if not get_meta_value(meta, 'test'): + if not meta.get_value('test'): if not any([os.path.exists(os.path.join(recipe, f)) for f in test_files]): return { @@ -158,13 +156,8 @@ def missing_hash(recipe, meta, df): except KeyError: return - if not any(map(partial(get_meta_value, src), - ( - 'md5', - 'sha1', - 'sha256', - ) - )): + if not any(meta.get_value('{}/{}'.format(src, checksum)) + for checksum in ('md5', 'sha1', 'sha256')): return { 'missing_hash': True, 'fix': 'add md5, sha1, or sha256 hash to "source" section', diff --git a/bioconda_utils/pypi.py b/bioconda_utils/pypi.py index c17a432a71..241d649fc5 100644 --- a/bioconda_utils/pypi.py +++ b/bioconda_utils/pypi.py @@ -16,7 +16,7 @@ def compare_recipe_to_pypi(recipe): "python-wget") then a tuple is returned but with a value of None for the latest version on PyPI and None for needs_update. """ - meta = utils.load_meta(os.path.join(recipe, 'meta.yaml')) + meta = utils.load_meta(os.path.join(recipe, 'meta.yaml')).meta current = meta['package']['version'] name = meta['package']['name'] diff --git a/bioconda_utils/utils.py b/bioconda_utils/utils.py index 0e89eb34b2..df33c6b807 100644 --- a/bioconda_utils/utils.py +++ b/bioconda_utils/utils.py @@ -105,21 +105,6 @@ def bin_for(name='conda'): return name -def get_meta_value(meta, *keys, default=None): - """ - Return value from metadata. - Given keys can define a path in the document tree. - """ - try: - for key in keys: - if not meta: - raise KeyError(key) - meta = meta[key] - return meta - except KeyError: - return default - - @contextlib.contextmanager def temp_env(env): """ @@ -163,7 +148,7 @@ def sandboxed_env(env): os.environ.update(orig) -def load_all_meta(recipe, config): +def load_all_meta(recipe): """ For each environment, yield the rendered meta.yaml. """ @@ -318,7 +303,7 @@ def __iter__(self): yield env -def get_deps(recipe, config, build=True): +def get_deps(recipe=None, meta=None, build=True): """ Generator of dependencies for a single recipe @@ -333,24 +318,23 @@ def get_deps(recipe, config, build=True): build : bool If True yield build dependencies, if False yield run dependencies. """ - if isinstance(recipe, str): - metadata = load_all_meta(recipe, config) - - # TODO: This function is currently used only for creating DAGs, but it's - # unclear how to manage different dependencies depending on the - # particular environment. For now, just use the first environment. - metadata = list(metadata) - metadata = metadata[0] + if recipe is not None: + assert isinstance(recipe, str) + metadata = load_all_meta(recipe) + elif meta is not None: + metadata = [meta] else: - metadata = recipe + raise ValueError("Either meta or recipe has to be specified.") - reqs = metadata.get('requirements', {}) - if build: - deps = reqs.get('build', []) - else: - deps = reqs.get('run', []) - for dep in deps: - yield dep.split()[0] + all_deps = set() + for meta in metadata: + reqs = metadata.get('requirements', {}) + if build: + deps = reqs.get('build', []) + else: + deps = reqs.get('run', []) + all_deps.update(dep.split()[0] for dep in deps) + return all_deps def get_dag(recipes, config, blacklist=None, restrict=True): @@ -385,7 +369,7 @@ def get_dag(recipes, config, blacklist=None, restrict=True): recipes = list(recipes) metadata = [] for recipe in sorted(recipes): - for r in list(load_all_meta(recipe, config)): + for r in list(load_all_meta(recipe)): metadata.append((r, recipe)) if blacklist is None: blacklist = set() @@ -399,24 +383,23 @@ def get_dag(recipes, config, blacklist=None, restrict=True): # Note that this may change once we support conda-build 3. name2recipe = defaultdict(set) for meta, recipe in metadata: - name = meta['package']['name'] + name = meta.get_value('package/name') if name not in blacklist: name2recipe[name].update([recipe]) def get_inner_deps(dependencies): for dep in dependencies: - name = dep.split()[0] if name in name2recipe or not restrict: yield name dag = nx.DiGraph() dag.add_nodes_from(meta['package']['name'] for meta, recipe in metadata) for meta, recipe in metadata: - name = meta['package']['name'] + name = meta.get_value('package/name') dag.add_edges_from((dep, name) for dep in set(get_inner_deps(chain( - get_deps(meta, config=config), - get_deps(meta, config=config, + get_deps(meta=meta), + get_deps(meta=meta, build=False))))) return dag, name2recipe @@ -480,7 +463,7 @@ def toplevel(x): else: def get_version(p): return VersionOrder( - load_meta(os.path.join(p, 'meta.yaml'))['package']['version'] + load_metadata(os.path.join(p, 'meta.yaml')).get_value('package/version') ) sorted_versions = sorted(group, key=get_version) if sorted_versions: @@ -723,7 +706,7 @@ def tobuild(recipe): if meta.skip(): logger.debug( 'FILTER: not building %s because ' - 'it defines skip', pkg) + 'it defines skip', recipe) return [] # If on CI, handle noarch. @@ -732,13 +715,13 @@ def tobuild(recipe): if platform != 'linux': logger.debug('FILTER: only building %s on ' 'linux because it defines noarch.', - pkg) + recipe) return [] # get all packages that would be built pkgs = list(map(os.path.basename, built_package_paths(recipe))) # check which ones exist already - existing = channel_packages & pkgs + existing = channel_packages.intersection(pkgs) for pkg in existing: logger.debug( diff --git a/test/test_bioconductor_skeleton.py b/test/test_bioconductor_skeleton.py index 3d6b585640..1ea30b9fb6 100644 --- a/test/test_bioconductor_skeleton.py +++ b/test/test_bioconductor_skeleton.py @@ -67,7 +67,7 @@ def test_meta_contents(tmpdir): bioconductor_skeleton.write_recipe( 'edgeR', recipe_dir=str(tmpdir), config=config, recursive=False) - edger_meta = utils.load_meta(str(tmpdir.join('bioconductor-edger')), {}) + edger_meta = utils.load_metadata(str(tmpdir.join('bioconductor-edger'))).meta assert 'r-rcpp' in edger_meta['requirements']['build'] # note that the preprocessing selector is stripped off by yaml parsing, so @@ -141,7 +141,7 @@ def test_bioarchive_exists(): def test_annotation_data(tmpdir): bioconductor_skeleton.write_recipe('AHCytoBands', str(tmpdir), config, recursive=True) - meta = utils.load_meta(str(tmpdir.join('bioconductor-ahcytobands')), {}) + meta = utils.load_metadata(str(tmpdir.join('bioconductor-ahcytobands'))).meta assert 'wget' in meta['requirements']['run'] assert len(meta['source']['url']) == 3 assert not tmpdir.join('bioconductor-ahcytobands', 'build.sh').exists() @@ -151,7 +151,7 @@ def test_annotation_data(tmpdir): def test_experiment_data(tmpdir): bioconductor_skeleton.write_recipe('affydata', str(tmpdir), config, recursive=True) - meta = utils.load_meta(str(tmpdir.join('bioconductor-affydata')), {}) + meta = utils.load_metadata(str(tmpdir.join('bioconductor-affydata'))).meta assert 'wget' in meta['requirements']['run'] assert len(meta['source']['url']) == 3 assert not tmpdir.join('bioconductor-affydata', 'build.sh').exists() diff --git a/test/test_pkg_test.py b/test/test_pkg_test.py index 89f69ed17e..a924e629d1 100644 --- a/test/test_pkg_test.py +++ b/test/test_pkg_test.py @@ -81,7 +81,7 @@ def test_pkg_test_mulled_build_error(): built_packages = _build_pkg(RECIPE_ONE) with pytest.raises(sp.CalledProcessError): for pkg in built_packages: - pkg_test.test_packages(pkg, mulled_args='--wrong-arg') + pkg_test.test_package(pkg, mulled_args='--wrong-arg') @pytest.mark.skipif(SKIP_OSX, reason='skipping on osx') diff --git a/test/test_utils.py b/test/test_utils.py index fc9921895c..310b3ebd64 100644 --- a/test/test_utils.py +++ b/test/test_utils.py @@ -147,6 +147,7 @@ def single_upload(): version: "0.1" '''.format(name), from_string=True) r.write_recipes() + r.pkgs[name] = utils.build_package_paths(r.recipes[name]) build.build( recipe=r.recipe_dirs[name], @@ -186,17 +187,20 @@ def test_upload(single_upload): def test_single_build_only(single_build): - assert os.path.exists(single_build) + for pkg in single_build: + assert os.path.exists(pkg) @pytest.mark.skipif(SKIP_DOCKER_TESTS, reason='skipping on osx') def test_single_build_with_post_test(single_build): - pkg_test.test_package(single_build) + for pkg in single_build: + pkg_test.test_package(pkg) def test_multi_build(multi_build): for v in multi_build.values(): - assert os.path.exists(v) + for pkg in v: + assert os.path.exists(pkg) @pytest.mark.skipif(SKIP_DOCKER_TESTS, reason='skipping on osx') @@ -282,9 +286,9 @@ def test_get_deps(): - two """, from_string=True) r.write_recipes() - assert list(utils.get_deps(r.recipe_dirs['two'], config={})) == ['one'] - assert list(utils.get_deps(r.recipe_dirs['three'], config={}, build=True)) == ['one'] - assert list(utils.get_deps(r.recipe_dirs['three'], config={}, build=False)) == ['two'] + assert list(utils.get_deps(r.recipe_dirs['two'])) == ['one'] + assert list(utils.get_deps(r.recipe_dirs['three'], build=True)) == ['one'] + assert list(utils.get_deps(r.recipe_dirs['three'], build=False)) == ['two'] def test_conda_as_dep(): @@ -402,8 +406,6 @@ def test_filter_recipes_existing_package(): """, from_string=True) r.write_recipes() recipes = list(r.recipe_dirs.values()) - pkgs = utils.get_channel_packages('bioconda') - pth = utils.built_package_path(recipes[0]) filtered = list( utils.filter_recipes(recipes, channels=['bioconda'])) assert len(filtered) == 0 @@ -427,8 +429,6 @@ def test_filter_recipes_force_existing_package(): """, from_string=True) r.write_recipes() recipes = list(r.recipe_dirs.values()) - pkgs = utils.get_channel_packages('bioconda') - pth = utils.built_package_path(recipes[0]) filtered = list( utils.filter_recipes( recipes, channels=['bioconda'], force=True)) @@ -441,7 +441,7 @@ def test_get_channel_packages(): utils.get_channel_packages('bioconda') -def test_built_package_path(): +def test_built_package_paths(): r = Recipes( """ one: @@ -489,7 +489,7 @@ def test_rendering_sandboxing(): """, from_string=True) r.write_recipes() - pkg_paths = utils.built_package_path(r.recipe_dirs['one']) + pkg_paths = utils.built_package_paths(r.recipe_dirs['one']) env = { # First one is allowed, others are not 'CONDA_ARBITRARY_VAR': 'conda-val-here', @@ -541,7 +541,7 @@ def test_rendering_sandboxing(): """, from_string=True) r.write_recipes() - pkg_paths = utils.built_package_path(r.recipe_dirs['two']) + pkg_paths = utils.built_package_paths(r.recipe_dirs['two']) for pkg in pkg_paths: ensure_missing(pkg) @@ -645,7 +645,7 @@ def test_skip_dependencies(): for pkgs in pkgs.values(): for pkg in pkgs: - ensure_missing(p) + ensure_missing(pkg) build.build_recipes( r.basedir, From 9a4a3d9295084e7fba233f4a5d87e5c44c11a1af Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20K=C3=B6ster?= Date: Wed, 25 Apr 2018 21:35:07 +0200 Subject: [PATCH 019/118] fixes --- bioconda_utils/build.py | 3 ++- bioconda_utils/cli.py | 1 - bioconda_utils/linting.py | 30 ++---------------------------- bioconda_utils/utils.py | 9 ++++++--- test/test_utils.py | 6 +++--- 5 files changed, 13 insertions(+), 36 deletions(-) diff --git a/bioconda_utils/build.py b/bioconda_utils/build.py index 658ecafd7f..3fa3d3bb99 100644 --- a/bioconda_utils/build.py +++ b/bioconda_utils/build.py @@ -156,7 +156,8 @@ def build( logger.info('TEST START via mulled-build %s', recipe) - use_base_image = meta.get_value('extra/container/extended-base') + use_base_image = meta.get_section('extra', {}).get('container', {})\ + .get('extended-base') base_image = 'bioconda/extended-base-image' if use_base_image else None mulled_images = [] diff --git a/bioconda_utils/cli.py b/bioconda_utils/cli.py index 20c354db86..3f797b6d95 100644 --- a/bioconda_utils/cli.py +++ b/bioconda_utils/cli.py @@ -236,7 +236,6 @@ def lint(recipe_folder, config, packages="*", cache=None, list_funcs=False, report = linting.lint( _recipes, - config=config, df=df, exclude=exclude, registry=registry, diff --git a/bioconda_utils/linting.py b/bioconda_utils/linting.py index 7bb041f8aa..055e412ea4 100644 --- a/bioconda_utils/linting.py +++ b/bioconda_utils/linting.py @@ -78,32 +78,6 @@ def not_in_bioconda(x): """ -def get_meta(recipe, config): - """ - Given a package name, find the current meta.yaml file, parse it, and return - the dict. - - Parameters - ---------- - recipe : str - Path to recipe (directory containing the meta.yaml file) - - config : str or dict - Config YAML or dict - """ - cfg = utils.load_config(config) - - # TODO: Currently just uses the first env. Should turn this into - # a generator. - - pth = os.path.join(recipe, 'meta.yaml') - jinja_env = jinja2.Environment() - content = jinja_env.from_string( - open(pth, 'r', encoding='utf-8').read()).render() - meta = yaml.round_trip_load(content, preserve_quotes=True) - return meta - - def channel_dataframe(cache=None, channels=['bioconda', 'conda-forge', 'defaults']): """ @@ -149,7 +123,7 @@ def channel_dataframe(cache=None, channels=['bioconda', 'conda-forge', return df -def lint(recipes, config, df, exclude=None, registry=None): +def lint(recipes, df, exclude=None, registry=None): """ Parameters ---------- @@ -219,7 +193,7 @@ def lint(recipes, config, df, exclude=None, registry=None): # functions? I can't think of a reason we'd want to keep an unparseable # YAML. try: - meta = get_meta(recipe, config) + meta = utils.load_metadata(recipe) except ( yaml.scanner.ScannerError, yaml.constructor.ConstructorError ) as e: diff --git a/bioconda_utils/utils.py b/bioconda_utils/utils.py index df33c6b807..76bf9eff7e 100644 --- a/bioconda_utils/utils.py +++ b/bioconda_utils/utils.py @@ -309,6 +309,9 @@ def get_deps(recipe=None, meta=None, build=True): Only names (not versions) of dependencies are yielded. + If the variant/version matrix yields multiple instances of the metadata, + the union of these dependencies is returned. + Parameters ---------- recipe : str or MetaData @@ -328,7 +331,7 @@ def get_deps(recipe=None, meta=None, build=True): all_deps = set() for meta in metadata: - reqs = metadata.get('requirements', {}) + reqs = meta.get('requirements', {}) if build: deps = reqs.get('build', []) else: @@ -719,9 +722,9 @@ def tobuild(recipe): return [] # get all packages that would be built - pkgs = list(map(os.path.basename, built_package_paths(recipe))) + pkgs = set(map(os.path.basename, built_package_paths(recipe))) # check which ones exist already - existing = channel_packages.intersection(pkgs) + existing = channel_packages & pkgs for pkg in existing: logger.debug( diff --git a/test/test_utils.py b/test/test_utils.py index 310b3ebd64..adcb116042 100644 --- a/test/test_utils.py +++ b/test/test_utils.py @@ -147,7 +147,7 @@ def single_upload(): version: "0.1" '''.format(name), from_string=True) r.write_recipes() - r.pkgs[name] = utils.build_package_paths(r.recipes[name]) + r.pkgs[name] = utils.built_package_paths(r.recipes[name]) build.build( recipe=r.recipe_dirs[name], @@ -211,7 +211,7 @@ def test_docker_builder_build(recipes_fixture): docker_builder = docker_utils.RecipeBuilder(use_host_conda_bld=True) pkgs = list(map(os.path.basename, recipes_fixture.pkgs['one'])) docker_builder.build_recipe( - recipes_fixture.recipe_dirs['one'], build_args='', pkgs=pkgs, env={}) + recipes_fixture.recipe_dirs['one'], build_args='', env={}) for pkg in pkgs: assert os.path.exists(pkg) @@ -467,7 +467,7 @@ def test_built_package_paths(): r.write_recipes() assert os.path.basename( - utils.built_package_paths(r.recipe_dirs['one']) + utils.built_package_paths(r.recipe_dirs['one'])[0] ) == ['one-0.1-py36_0.tar.bz2'] From 312f9829e50df90c4bf790af597bca3840e736fb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20K=C3=B6ster?= Date: Wed, 25 Apr 2018 23:31:12 +0200 Subject: [PATCH 020/118] Fixes. --- bioconda_utils/build.py | 4 ++-- bioconda_utils/docker_utils.py | 11 +++++++++-- bioconda_utils/utils.py | 14 ++++++-------- test/test_linting.py | 10 +++++----- test/test_utils.py | 4 ++-- 5 files changed, 24 insertions(+), 19 deletions(-) diff --git a/bioconda_utils/build.py b/bioconda_utils/build.py index 3fa3d3bb99..608753b462 100644 --- a/bioconda_utils/build.py +++ b/bioconda_utils/build.py @@ -89,8 +89,7 @@ def build( # build_args = ["--no-build-id"] # use global variant config file (contains pinnings) - build_args = ["-e", utils.load_conda_config().exclusive_config_file, - "--skip-existing"] + build_args = ["--skip-existing"] if testonly: build_args.append("--test") else: @@ -135,6 +134,7 @@ def build( # conda-build from building all subdirectories with utils.sandboxed_env(whitelisted_env): cmd = CONDA_BUILD_CMD + build_args + channel_args + \ + ["-e", utils.load_conda_config().exclusive_config_file] + \ [os.path.join(recipe, 'meta.yaml')] logger.debug('command: %s', cmd) with utils.Progress(): diff --git a/bioconda_utils/docker_utils.py b/bioconda_utils/docker_utils.py index 4504f17fde..aab59fb346 100644 --- a/bioconda_utils/docker_utils.py +++ b/bioconda_utils/docker_utils.py @@ -93,7 +93,7 @@ # The actual building... # we explicitly point to the meta.yaml, in order to keep # conda-build from building all subdirectories -conda build {self.conda_build_args} {self.container_recipe}/meta.yaml 2>&1 +conda build -e {self.container_staging}/conda_build_config.yaml {self.conda_build_args} {self.container_recipe}/meta.yaml 2>&1 cp `conda build {self.container_recipe}/meta.yaml --output` {self.container_staging}/{arch} # Ensure permissions are correct on the host. @@ -198,7 +198,7 @@ def __init__( container_staging : str Directory to which the host's conda-bld dir will be mounted so that - the container can use previously-built packages as depdendencies. + the container can use previously-built packages as dependencies. Upon successful building container-built packages will be copied over. Mounted as read-write. @@ -309,6 +309,13 @@ def __init__( self.container_recipe = container_recipe self.container_staging = container_staging + + # Copy the conda build config to the staging directory that is + # visible in the container + shutil.copyfile(utils.load_conda_config().exclusive_config_file, + os.path.join(self.container_staging, + "conda_build_config.yaml")) + self.host_conda_bld = get_host_conda_bld() if use_host_conda_bld: diff --git a/bioconda_utils/utils.py b/bioconda_utils/utils.py index 76bf9eff7e..a769ccffff 100644 --- a/bioconda_utils/utils.py +++ b/bioconda_utils/utils.py @@ -152,7 +152,7 @@ def load_all_meta(recipe): """ For each environment, yield the rendered meta.yaml. """ - return api.render(recipe) + return [meta for (meta, _, _) in api.render(recipe, config=load_conda_config())] def load_conda_config(): @@ -168,6 +168,9 @@ def load_conda_config(): # set path to pinnings from conda forge package config.exclusive_config_file = os.path.join(env_root, "conda_build_config.yaml") + assert os.path.exists(config.exclusive_config_file), ("error: " + "conda_build_config.yaml not found in + "environment root") return config @@ -372,7 +375,7 @@ def get_dag(recipes, config, blacklist=None, restrict=True): recipes = list(recipes) metadata = [] for recipe in sorted(recipes): - for r in list(load_all_meta(recipe)): + for r in load_all_meta(recipe): metadata.append((r, recipe)) if blacklist is None: blacklist = set() @@ -697,6 +700,7 @@ def filter_recipes(recipes, channels=None, force=False): channel_packages.update(get_channel_packages(channel=channel)) def tobuild(recipe): + # check if package is noarch, if so, build only on linux # with temp_os, we can fool the MetaData if needed. platform = os.environ.get('OSTYPE', sys.platform) if platform.startswith("darwin"): @@ -706,12 +710,6 @@ def tobuild(recipe): with temp_os(platform): meta = load_metadata(recipe) - if meta.skip(): - logger.debug( - 'FILTER: not building %s because ' - 'it defines skip', recipe) - return [] - # If on CI, handle noarch. if os.environ.get('CI', None) == 'true': if meta.get_value('build/noarch'): diff --git a/test/test_linting.py b/test/test_linting.py index 0c15af107e..299980a00b 100644 --- a/test/test_linting.py +++ b/test/test_linting.py @@ -45,7 +45,7 @@ def _run(contents, expect_pass=True): assert len(r.recipe_dirs) == 1 name = list(r.recipe_dirs.keys())[0] recipe, meta, df = r.recipe_dirs[name], r.recipes[name]['meta.yaml'], should_pass_df - meta = yaml.load(meta) + meta = utils.load_metadata(r.recipe_dirs[name]) if expect_pass: assert func(recipe, meta, df) is None, "lint did not pass" else: @@ -72,7 +72,7 @@ def test_empty_build_section(): # access to contents of possibly empty build section can happen in # `should_be_noarch` and `should_not_be_noarch` registry = [lint_functions.should_be_noarch, lint_functions.should_not_be_noarch] - res = linting.lint(r.recipe_dirs.values(), config={}, df=None, registry=registry) + res = linting.lint(r.recipe_dirs.values(), df=None, registry=registry) assert res is None @@ -88,7 +88,7 @@ def test_lint_skip_in_recipe(): version: "0.1" ''', from_string=True) r.write_recipes() - res = linting.lint(r.recipe_dirs.values(), config={}, df=None, registry=[lint_functions.missing_home]) + res = linting.lint(r.recipe_dirs.values(), df=None, registry=[lint_functions.missing_home]) assert res is not None @@ -105,7 +105,7 @@ def test_lint_skip_in_recipe(): - missing_home ''', from_string=True) r.write_recipes() - res = linting.lint(r.recipe_dirs.values(), config={}, df=None, registry=[lint_functions.missing_home]) + res = linting.lint(r.recipe_dirs.values(), df=None, registry=[lint_functions.missing_home]) assert res is None # should pass; minimal recipe needs to skip these lints @@ -123,7 +123,7 @@ def test_lint_skip_in_recipe(): - no_tests ''', from_string=True) r.write_recipes() - res = linting.lint(r.recipe_dirs.values(), config={}, df=None) + res = linting.lint(r.recipe_dirs.values(), df=None) assert res is not None diff --git a/test/test_utils.py b/test/test_utils.py index adcb116042..2a1f01cace 100644 --- a/test/test_utils.py +++ b/test/test_utils.py @@ -147,7 +147,7 @@ def single_upload(): version: "0.1" '''.format(name), from_string=True) r.write_recipes() - r.pkgs[name] = utils.built_package_paths(r.recipes[name]) + r.pkgs[name] = utils.built_package_paths(r.recipe_dirs[name]) build.build( recipe=r.recipe_dirs[name], @@ -209,7 +209,7 @@ def test_docker_builder_build(recipes_fixture): Tests just the build_recipe method of a RecipeBuilder object. """ docker_builder = docker_utils.RecipeBuilder(use_host_conda_bld=True) - pkgs = list(map(os.path.basename, recipes_fixture.pkgs['one'])) + pkgs = recipes_fixture.pkgs['one'] docker_builder.build_recipe( recipes_fixture.recipe_dirs['one'], build_args='', env={}) for pkg in pkgs: From 06fa5717fcdc2fdbb92be19394725966d6c40a2b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20K=C3=B6ster?= Date: Thu, 26 Apr 2018 07:49:11 +0200 Subject: [PATCH 021/118] fix typo --- bioconda_utils/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bioconda_utils/utils.py b/bioconda_utils/utils.py index a769ccffff..e3487116aa 100644 --- a/bioconda_utils/utils.py +++ b/bioconda_utils/utils.py @@ -169,7 +169,7 @@ def load_conda_config(): config.exclusive_config_file = os.path.join(env_root, "conda_build_config.yaml") assert os.path.exists(config.exclusive_config_file), ("error: " - "conda_build_config.yaml not found in + "conda_build_config.yaml not found in " "environment root") return config From 742e61d1040cbfe53d7ef0fe9a897172af1b7eb0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20K=C3=B6ster?= Date: Thu, 26 Apr 2018 08:06:09 +0200 Subject: [PATCH 022/118] fixes --- bioconda_utils/linting.py | 2 +- bioconda_utils/utils.py | 5 +++-- test/test_linting.py | 3 ++- 3 files changed, 6 insertions(+), 4 deletions(-) diff --git a/bioconda_utils/linting.py b/bioconda_utils/linting.py index 055e412ea4..2e1da7c3de 100644 --- a/bioconda_utils/linting.py +++ b/bioconda_utils/linting.py @@ -210,7 +210,7 @@ def lint(recipes, df, exclude=None, registry=None): skip_for_this_recipe = set(skip_dict[recipe]) # skips defined in meta.yaml - persistent = meta.get('extra', {}).get('skip-lints', []) + persistent = meta.get_section('extra', {}).get('skip-lints', []) skip_for_this_recipe.update(persistent) for func in registry: diff --git a/bioconda_utils/utils.py b/bioconda_utils/utils.py index e3487116aa..a58857bcf7 100644 --- a/bioconda_utils/utils.py +++ b/bioconda_utils/utils.py @@ -334,7 +334,7 @@ def get_deps(recipe=None, meta=None, build=True): all_deps = set() for meta in metadata: - reqs = meta.get('requirements', {}) + reqs = meta.get_section('requirements', {}) if build: deps = reqs.get('build', []) else: @@ -399,7 +399,8 @@ def get_inner_deps(dependencies): yield name dag = nx.DiGraph() - dag.add_nodes_from(meta['package']['name'] for meta, recipe in metadata) + dag.add_nodes_from(meta.get_value('package/name') + for meta, recipe in metadata) for meta, recipe in metadata: name = meta.get_value('package/name') dag.add_edges_from((dep, name) diff --git a/test/test_linting.py b/test/test_linting.py index 299980a00b..d47b5ee8b4 100644 --- a/test/test_linting.py +++ b/test/test_linting.py @@ -2,7 +2,8 @@ import yaml from helpers import Recipes from bioconda_utils import lint_functions -from bioconda_utils import linting +from bioconda_utils import linting, utils + def run_lint( func, From 2aaf71e76f6e9b2be3911a86bc3811e19aa1a9e7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20K=C3=B6ster?= Date: Thu, 26 Apr 2018 08:20:53 +0200 Subject: [PATCH 023/118] fixes --- .ropeproject/config.py | 112 +++++++++++++++++++++++++++++++ bioconda_utils/build.py | 4 +- bioconda_utils/lint_functions.py | 37 +++++----- bioconda_utils/linting.py | 2 +- bioconda_utils/utils.py | 2 +- test/test_linting.py | 90 ------------------------- test/test_utils.py | 2 +- 7 files changed, 132 insertions(+), 117 deletions(-) create mode 100644 .ropeproject/config.py diff --git a/.ropeproject/config.py b/.ropeproject/config.py new file mode 100644 index 0000000000..0bf7750180 --- /dev/null +++ b/.ropeproject/config.py @@ -0,0 +1,112 @@ +# The default ``config.py`` +# flake8: noqa + + +def set_prefs(prefs): + """This function is called before opening the project""" + + # Specify which files and folders to ignore in the project. + # Changes to ignored resources are not added to the history and + # VCSs. Also they are not returned in `Project.get_files()`. + # Note that ``?`` and ``*`` match all characters but slashes. + # '*.pyc': matches 'test.pyc' and 'pkg/test.pyc' + # 'mod*.pyc': matches 'test/mod1.pyc' but not 'mod/1.pyc' + # '.svn': matches 'pkg/.svn' and all of its children + # 'build/*.o': matches 'build/lib.o' but not 'build/sub/lib.o' + # 'build//*.o': matches 'build/lib.o' and 'build/sub/lib.o' + prefs['ignored_resources'] = ['*.pyc', '*~', '.ropeproject', + '.hg', '.svn', '_svn', '.git', '.tox'] + + # Specifies which files should be considered python files. It is + # useful when you have scripts inside your project. Only files + # ending with ``.py`` are considered to be python files by + # default. + #prefs['python_files'] = ['*.py'] + + # Custom source folders: By default rope searches the project + # for finding source folders (folders that should be searched + # for finding modules). You can add paths to that list. Note + # that rope guesses project source folders correctly most of the + # time; use this if you have any problems. + # The folders should be relative to project root and use '/' for + # separating folders regardless of the platform rope is running on. + # 'src/my_source_folder' for instance. + #prefs.add('source_folders', 'src') + + # You can extend python path for looking up modules + #prefs.add('python_path', '~/python/') + + # Should rope save object information or not. + prefs['save_objectdb'] = True + prefs['compress_objectdb'] = False + + # If `True`, rope analyzes each module when it is being saved. + prefs['automatic_soa'] = True + # The depth of calls to follow in static object analysis + prefs['soa_followed_calls'] = 0 + + # If `False` when running modules or unit tests "dynamic object + # analysis" is turned off. This makes them much faster. + prefs['perform_doa'] = True + + # Rope can check the validity of its object DB when running. + prefs['validate_objectdb'] = True + + # How many undos to hold? + prefs['max_history_items'] = 32 + + # Shows whether to save history across sessions. + prefs['save_history'] = True + prefs['compress_history'] = False + + # Set the number spaces used for indenting. According to + # :PEP:`8`, it is best to use 4 spaces. Since most of rope's + # unit-tests use 4 spaces it is more reliable, too. + prefs['indent_size'] = 4 + + # Builtin and c-extension modules that are allowed to be imported + # and inspected by rope. + prefs['extension_modules'] = [] + + # Add all standard c-extensions to extension_modules list. + prefs['import_dynload_stdmods'] = True + + # If `True` modules with syntax errors are considered to be empty. + # The default value is `False`; When `False` syntax errors raise + # `rope.base.exceptions.ModuleSyntaxError` exception. + prefs['ignore_syntax_errors'] = False + + # If `True`, rope ignores unresolvable imports. Otherwise, they + # appear in the importing namespace. + prefs['ignore_bad_imports'] = False + + # If `True`, rope will insert new module imports as + # `from import ` by default. + prefs['prefer_module_from_imports'] = False + + # If `True`, rope will transform a comma list of imports into + # multiple separate import statements when organizing + # imports. + prefs['split_imports'] = False + + # If `True`, rope will remove all top-level import statements and + # reinsert them at the top of the module when making changes. + prefs['pull_imports_to_top'] = True + + # If `True`, rope will sort imports alphabetically by module name instead of + # alphabetically by import statement, with from imports after normal + # imports. + prefs['sort_imports_alphabetically'] = False + + # Location of implementation of rope.base.oi.type_hinting.interfaces.ITypeHintingFactory + # In general case, you don't have to change this value, unless you're an rope expert. + # Change this value to inject you own implementations of interfaces + # listed in module rope.base.oi.type_hinting.providers.interfaces + # For example, you can add you own providers for Django Models, or disable the search + # type-hinting in a class hierarchy, etc. + prefs['type_hinting_factory'] = 'rope.base.oi.type_hinting.factory.default_type_hinting_factory' + + +def project_opened(project): + """This function is called after opening the project""" + # Do whatever you like here! diff --git a/bioconda_utils/build.py b/bioconda_utils/build.py index 608753b462..740fb09d54 100644 --- a/bioconda_utils/build.py +++ b/bioconda_utils/build.py @@ -156,8 +156,8 @@ def build( logger.info('TEST START via mulled-build %s', recipe) - use_base_image = meta.get_section('extra', {}).get('container', {})\ - .get('extended-base') + use_base_image = meta.get_section('extra').get('container', {})\ + .get('extended-base') base_image = 'bioconda/extended-base-image' if use_base_image else None mulled_images = [] diff --git a/bioconda_utils/lint_functions.py b/bioconda_utils/lint_functions.py index 16e6e3e2aa..613f006dc5 100644 --- a/bioconda_utils/lint_functions.py +++ b/bioconda_utils/lint_functions.py @@ -51,7 +51,7 @@ def _get_deps(meta, section=None): get_name = lambda dep: dep.split()[0] - reqs = meta.get('requirements') + reqs = meta.get_section('requirements') if reqs is None: return [] if section is None: @@ -138,7 +138,7 @@ def missing_license(recipe, meta, df): def missing_tests(recipe, meta, df): test_files = ['run_test.py', 'run_test.sh', 'run_test.pl'] - if not meta.get_value('test'): + if not meta.get_section('test'): if not any([os.path.exists(os.path.join(recipe, f)) for f in test_files]): return { @@ -149,11 +149,7 @@ def missing_tests(recipe, meta, df): def missing_hash(recipe, meta, df): # could be a meta-package if no source section or if None - try: - src = meta['source'] - if src is None: - return - except KeyError: + if not meta.get_section('source'): return if not any(meta.get_value('{}/{}'.format(src, checksum)) @@ -165,20 +161,17 @@ def missing_hash(recipe, meta, df): def uses_git_url(recipe, meta, df): - try: - src = meta.get('source', {}) - if src is None: - # metapackage? - return - - if 'git_url' in src: - return { - 'uses_git_url': True, - 'fix': 'use tarballs whenever possible', - } - except KeyError: + src = meta.get_section('source') + if not src: + # metapackage? return + if 'git_url' in src: + return { + 'uses_git_url': True, + 'fix': 'use tarballs whenever possible', + } + def uses_perl_threaded(recipe, meta, df): if 'perl-threaded' in _get_deps(meta): @@ -276,7 +269,7 @@ def setup_py_install_args(recipe, meta, df): def invalid_identifiers(recipe, meta, df): try: - identifiers = meta['extra']['identifiers'] + identifiers = meta.get_section('extra').get('identifiers', []) if not isinstance(identifiers, list): return { 'invalid_identifiers': True, 'fix': 'extra:identifiers must hold a list of identifiers' } @@ -293,8 +286,8 @@ def invalid_identifiers(recipe, meta, df): def deprecated_numpy_spec(recipe, meta, df): - reqs = meta.get('requirements') - if reqs is None: + reqs = meta.get_section('requirements') + if not reqs: return for section in ['build', 'run']: for dep in reqs.get(section, []): diff --git a/bioconda_utils/linting.py b/bioconda_utils/linting.py index 2e1da7c3de..b5eec4ba3e 100644 --- a/bioconda_utils/linting.py +++ b/bioconda_utils/linting.py @@ -210,7 +210,7 @@ def lint(recipes, df, exclude=None, registry=None): skip_for_this_recipe = set(skip_dict[recipe]) # skips defined in meta.yaml - persistent = meta.get_section('extra', {}).get('skip-lints', []) + persistent = meta.get_section('extra').get('skip-lints', []) skip_for_this_recipe.update(persistent) for func in registry: diff --git a/bioconda_utils/utils.py b/bioconda_utils/utils.py index a58857bcf7..812b9ce47c 100644 --- a/bioconda_utils/utils.py +++ b/bioconda_utils/utils.py @@ -334,7 +334,7 @@ def get_deps(recipe=None, meta=None, build=True): all_deps = set() for meta in metadata: - reqs = meta.get_section('requirements', {}) + reqs = meta.get_section('requirements') if build: deps = reqs.get('build', []) else: diff --git a/test/test_linting.py b/test/test_linting.py index d47b5ee8b4..6afc31f4eb 100644 --- a/test/test_linting.py +++ b/test/test_linting.py @@ -626,96 +626,6 @@ def test_should_not_be_noarch(): ] ) -def test_lint_pin(): - run_lint( - func=lint_functions._pin('CONDA_ZLIB', 'zlib'), - should_pass=[ - ''' - a: - meta.yaml: | - package: - name: a - version: '0.1' - source: - patches: - - zlib_1.patch - ''', - ''' - a: - meta.yaml: | - package: - name: a - version: '0.1' - requirements: - build: - - zlib {{CONDA_ZLIB}}* - run: - - zlib {{CONDA_ZLIB}}* - ''', - ''' - a: - meta.yaml: | - package: - name: a - version: '0.1' - requirements: - build: - - zlib # build-only requirements are obviously statically linked, hence no pinning needed - run: - - libgcc - ''', - ''' - a: - meta.yaml: | - package: - name: a - version: '0.1' - requirements: - run: - - zlib # run-only requirement doesn't need pinning; build is not influenced by it - '''], - should_fail=[ - ''' - a: - meta.yaml: | - package: - name: a - version: '0.1' - - requirements: - build: - - zlib - run: - - zlib {{CONDA_ZLIB}}* - ''', - ''' - a: - meta.yaml: | - package: - name: a - version: '0.1' - - requirements: - build: - - zlib - - run: # ^-- DO NOT remove the preceding line; it's part of the test case! - - zlib - ''', - ''' - a: - meta.yaml: | - package: - name: a - version: '0.1' - requirements: - build: - - zlib {{CONDA_ZLIB}}* - run: - - zlib - '''], - ) - def test_setup_py_install_args(): run_lint( diff --git a/test/test_utils.py b/test/test_utils.py index 2a1f01cace..186c6fc770 100644 --- a/test/test_utils.py +++ b/test/test_utils.py @@ -468,7 +468,7 @@ def test_built_package_paths(): assert os.path.basename( utils.built_package_paths(r.recipe_dirs['one'])[0] - ) == ['one-0.1-py36_0.tar.bz2'] + ) == 'one-0.1-py36_0.tar.bz2' def test_string_or_float_to_integer_python(): From d888281d2352fe43c4bd7032a5dce0fc8adb855b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20K=C3=B6ster?= Date: Thu, 26 Apr 2018 09:06:21 +0200 Subject: [PATCH 024/118] fixes. --- .gitignore | 4 ++-- bioconda_utils/lint_functions.py | 25 +++++++------------------ test/test_linting.py | 6 ++++-- 3 files changed, 13 insertions(+), 22 deletions(-) diff --git a/.gitignore b/.gitignore index bfc9a344da..a9da2d4320 100644 --- a/.gitignore +++ b/.gitignore @@ -7,7 +7,7 @@ test/.coverage .coverage htmlcov/ docs/recipes/ - +.pytest_cache # Mac OS Files -.DS_Store \ No newline at end of file +.DS_Store diff --git a/bioconda_utils/lint_functions.py b/bioconda_utils/lint_functions.py index 613f006dc5..75bd293452 100644 --- a/bioconda_utils/lint_functions.py +++ b/bioconda_utils/lint_functions.py @@ -7,18 +7,6 @@ import numpy as np -def _get_not_none(meta, key, none_subst=dict): - """ - Return meta[key] if key is in meta and its value is not None, otherwise - return none_subst(). - - Some recipes have an empty build section, so it'll be None and we can't - do a chained get. - """ - ret = meta.get(key) - return ret if (ret is not None) else none_subst() - - def _subset_df(recipe, meta, df): """ Helper function to get the subset of `df` for this recipe. @@ -101,7 +89,7 @@ def already_in_bioconda(recipe, meta, df): Does the package exist in bioconda? """ results = _subset_df(recipe, meta, df) - build_section = _get_not_none(meta, 'build') + build_section = meta.get_section('build') build_number = int(build_section.get('number', 0)) build_results = results[results.build_number == build_number] channels = set(build_results.channel) @@ -149,7 +137,8 @@ def missing_tests(recipe, meta, df): def missing_hash(recipe, meta, df): # could be a meta-package if no source section or if None - if not meta.get_section('source'): + src = meta.get_section('source') + if not src: return if not any(meta.get_value('{}/{}'.format(src, checksum)) @@ -216,7 +205,7 @@ def should_be_noarch(recipe, meta, df): # the python version. not _has_preprocessing_selector(recipe) ) and ( - 'noarch' not in _get_not_none(meta, 'build') + 'noarch' not in meta.get_section('build') ): return { 'should_be_noarch': True, @@ -228,9 +217,9 @@ def should_not_be_noarch(recipe, meta, df): deps = _get_deps(meta) if ( ('gcc' in deps) or - _get_not_none(meta, 'build').get('skip', False) + meta.get_section('build').get('skip', False) ) and ( - 'noarch' in _get_not_none(meta, 'build') + 'noarch' in meta.get_section('build') ): return { 'should_not_be_noarch': True, @@ -248,7 +237,7 @@ def setup_py_install_args(recipe, meta, df): 'to setup.py command'), } - script_line = _get_not_none(meta, 'build').get('script', '') + script_line = meta.get_section('build').get('script', '') if ( 'setup.py install' in script_line and '--single-version-externally-managed' not in script_line diff --git a/test/test_linting.py b/test/test_linting.py index 6afc31f4eb..e151ec1fc2 100644 --- a/test/test_linting.py +++ b/test/test_linting.py @@ -333,8 +333,10 @@ def test_uses_git_url(): ''' uses_git_url: meta.yaml: | - name: uses_git_url - version: "0.1"'''], + package: + name: uses_git_url + version: "0.1" + '''], should_fail=''' uses_git_url: meta.yaml: | From 7d976688a5f9e1b811417633a6d003d1a098f45f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20K=C3=B6ster?= Date: Thu, 26 Apr 2018 09:13:09 +0200 Subject: [PATCH 025/118] fix DAG --- bioconda_utils/utils.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bioconda_utils/utils.py b/bioconda_utils/utils.py index 812b9ce47c..531d8f5121 100644 --- a/bioconda_utils/utils.py +++ b/bioconda_utils/utils.py @@ -395,8 +395,8 @@ def get_dag(recipes, config, blacklist=None, restrict=True): def get_inner_deps(dependencies): for dep in dependencies: - if name in name2recipe or not restrict: - yield name + if dep in name2recipe or not restrict: + yield dep dag = nx.DiGraph() dag.add_nodes_from(meta.get_value('package/name') From be141e0d2577f2c72102882b9536ad881288fcac Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20K=C3=B6ster?= Date: Thu, 26 Apr 2018 11:55:51 +0200 Subject: [PATCH 026/118] Linting now uses fully rendered recipes, and checks all combinations of the variant matrix and both linux and osx. --- bioconda_utils/docker_utils.py | 12 +- bioconda_utils/lint_functions.py | 421 ++++++++++++++----------------- bioconda_utils/linting.py | 11 +- bioconda_utils/utils.py | 40 +-- test/test_linting.py | 43 ++-- test/test_utils.py | 5 +- 6 files changed, 254 insertions(+), 278 deletions(-) diff --git a/bioconda_utils/docker_utils.py b/bioconda_utils/docker_utils.py index aab59fb346..0f7eb5ccf5 100644 --- a/bioconda_utils/docker_utils.py +++ b/bioconda_utils/docker_utils.py @@ -310,12 +310,6 @@ def __init__( self.container_recipe = container_recipe self.container_staging = container_staging - # Copy the conda build config to the staging directory that is - # visible in the container - shutil.copyfile(utils.load_conda_config().exclusive_config_file, - os.path.join(self.container_staging, - "conda_build_config.yaml")) - self.host_conda_bld = get_host_conda_bld() if use_host_conda_bld: @@ -328,6 +322,12 @@ def __init__( os.makedirs(pkg_dir) self.pkg_dir = pkg_dir + # Copy the conda build config to the staging directory that is + # visible in the container + shutil.copyfile(utils.load_conda_config().exclusive_config_file, + os.path.join(self.pkg_dir, + "conda_build_config.yaml")) + self._build_image(image_build_dir) def __del__(self): diff --git a/bioconda_utils/lint_functions.py b/bioconda_utils/lint_functions.py index 75bd293452..44322fd4e4 100644 --- a/bioconda_utils/lint_functions.py +++ b/bioconda_utils/lint_functions.py @@ -18,8 +18,8 @@ def _subset_df(recipe, meta, df): np.nan, index=[], columns=['channel', 'name', 'version', 'build_number']) - name = meta['package']['name'] - version = meta['package']['version'] + name = meta.get_value('package/name') + version = meta.get_value('package/version') return df[ (df.name == name) & @@ -71,123 +71,134 @@ def _has_preprocessing_selector(recipe): return True -def in_other_channels(recipe, meta, df): +def in_other_channels(recipe, metas, df): """ Does the package exist in any other non-bioconda channels? """ - results = _subset_df(recipe, meta, df) - channels = set(results.channel).difference(['bioconda']) - if len(channels): - return { - 'exists_in_channels': channels, - 'fix': 'consider deprecating', - } + for meta in metas: + results = _subset_df(recipe, meta, df) + channels = set(results.channel).difference(['bioconda']) + if len(channels): + return { + 'exists_in_channels': channels, + 'fix': 'consider deprecating', + } -def already_in_bioconda(recipe, meta, df): +def already_in_bioconda(recipe, metas, df): """ Does the package exist in bioconda? """ - results = _subset_df(recipe, meta, df) - build_section = meta.get_section('build') - build_number = int(build_section.get('number', 0)) - build_results = results[results.build_number == build_number] - channels = set(build_results.channel) - if 'bioconda' in channels: - return { - 'already_in_bioconda': True, - 'fix': 'bump version or build number' - } - - -def missing_home(recipe, meta, df): - if not meta.get_value('about/home'): - return { - 'missing_home': True, - 'fix': 'add about:home', - } + for meta in metas: + results = _subset_df(recipe, meta, df) + build_section = meta.get_section('build') + build_number = int(build_section.get('number', 0)) + build_results = results[results.build_number == build_number] + channels = set(build_results.channel) + if 'bioconda' in channels: + return { + 'already_in_bioconda': True, + 'fix': 'bump version or build number' + } -def missing_summary(recipe, meta, df): - if not meta.get_value('about/summary'): - return { - 'missing_summary': True, - 'fix': 'add about:summary', - } +def missing_home(recipe, metas, df): + for meta in metas: + if not meta.get_value('about/home'): + return { + 'missing_home': True, + 'fix': 'add about:home', + } -def missing_license(recipe, meta, df): - if not meta.get_value('about/license'): - return { - 'missing_license': True, - 'fix': 'add about:license' - } +def missing_summary(recipe, metas, df): + for meta in metas: + if not meta.get_value('about/summary'): + return { + 'missing_summary': True, + 'fix': 'add about:summary', + } -def missing_tests(recipe, meta, df): - test_files = ['run_test.py', 'run_test.sh', 'run_test.pl'] - if not meta.get_section('test'): - if not any([os.path.exists(os.path.join(recipe, f)) for f in - test_files]): +def missing_license(recipe, metas, df): + for meta in metas: + if not meta.get_value('about/license'): return { - 'no_tests': True, - 'fix': 'add basic tests', + 'missing_license': True, + 'fix': 'add about:license' } -def missing_hash(recipe, meta, df): - # could be a meta-package if no source section or if None - src = meta.get_section('source') - if not src: - return +def missing_tests(recipe, metas, df): + for meta in metas: + test_files = ['run_test.py', 'run_test.sh', 'run_test.pl'] + if not meta.get_section('test'): + if not any([os.path.exists(os.path.join(recipe, f)) for f in + test_files]): + return { + 'no_tests': True, + 'fix': 'add basic tests', + } - if not any(meta.get_value('{}/{}'.format(src, checksum)) - for checksum in ('md5', 'sha1', 'sha256')): - return { - 'missing_hash': True, - 'fix': 'add md5, sha1, or sha256 hash to "source" section', - } +def missing_hash(recipe, metas, df): + for meta in metas: + # could be a meta-package if no source section or if None + src = meta.get_section('source') + if not src: + continue -def uses_git_url(recipe, meta, df): - src = meta.get_section('source') - if not src: - # metapackage? - return + if not any(src.get(checksum) + for checksum in ('md5', 'sha1', 'sha256')): + return { + 'missing_hash': True, + 'fix': 'add md5, sha1, or sha256 hash to "source" section', + } - if 'git_url' in src: - return { - 'uses_git_url': True, - 'fix': 'use tarballs whenever possible', - } +def uses_git_url(recipe, metas, df): + for meta in metas: + src = meta.get_section('source') + if not src: + # metapackage? + continue -def uses_perl_threaded(recipe, meta, df): - if 'perl-threaded' in _get_deps(meta): - return { - 'depends_on_perl_threaded': True, - 'fix': 'use "perl" instead of "perl-threaded"', - } + if 'git_url' in src: + return { + 'uses_git_url': True, + 'fix': 'use tarballs whenever possible', + } -def uses_javajdk(recipe, meta, df): - if 'java-jdk' in _get_deps(meta): - return { - 'depends_on_java-jdk': True, - 'fix': 'use "openjdk" instead of "java-jdk"', - } +def uses_perl_threaded(recipe, metas, df): + for meta in metas: + if 'perl-threaded' in _get_deps(meta): + return { + 'depends_on_perl_threaded': True, + 'fix': 'use "perl" instead of "perl-threaded"', + } -def uses_setuptools(recipe, meta, df): - if 'setuptools' in _get_deps(meta, 'run'): - return { - 'depends_on_setuptools': True, - 'fix': ('setuptools might not be a run requirement (unless it uses ' - 'pkg_resources or setuptools console scripts)'), - } +def uses_javajdk(recipe, metas, df): + for meta in metas: + if 'java-jdk' in _get_deps(meta): + return { + 'depends_on_java-jdk': True, + 'fix': 'use "openjdk" instead of "java-jdk"', + } + +def uses_setuptools(recipe, metas, df): + for meta in metas: + if 'setuptools' in _get_deps(meta, 'run'): + return { + 'depends_on_setuptools': True, + 'fix': ('setuptools might not be a run requirement (unless it uses ' + 'pkg_resources or setuptools console scripts)'), + } -def has_windows_bat_file(recipe, meta, df): + +def has_windows_bat_file(recipe, metas, df): if len(glob.glob(os.path.join(recipe, '*.bat'))) > 0: return { 'bat_file': True, @@ -195,146 +206,102 @@ def has_windows_bat_file(recipe, meta, df): } -def should_be_noarch(recipe, meta, df): - deps = _get_deps(meta) - if ( - ('gcc' not in deps) and - ('python' in deps) and - # This will also exclude recipes with skip sections - # which is a good thing, because noarch also implies independence of - # the python version. - not _has_preprocessing_selector(recipe) - ) and ( - 'noarch' not in meta.get_section('build') - ): - return { - 'should_be_noarch': True, - 'fix': 'add "build: noarch" section', - } +def should_be_noarch(recipe, metas, df): + for meta in metas: + print(meta.get_value("package/name")) + deps = _get_deps(meta) + if ( + ('gcc' not in deps) and + ('python' in deps) and + # This will also exclude recipes with skip sections + # which is a good thing, because noarch also implies independence of + # the python version. + not _has_preprocessing_selector(recipe) + ) and ( + 'noarch' not in meta.get_section('build') + ): + return { + 'should_be_noarch': True, + 'fix': 'add "build: noarch" section', + } -def should_not_be_noarch(recipe, meta, df): - deps = _get_deps(meta) - if ( - ('gcc' in deps) or - meta.get_section('build').get('skip', False) - ) and ( - 'noarch' in meta.get_section('build') - ): - return { - 'should_not_be_noarch': True, - 'fix': 'remove "build: noarch" section', +def should_not_be_noarch(recipe, metas, df): + for meta in metas: + deps = _get_deps(meta) + if ( + ('gcc' in deps) or + meta.get_section('build').get('skip', False) in ["true", "True"] + ) and ( + 'noarch' in meta.get_section('build') + ): + print("error") + return { + 'should_not_be_noarch': True, + 'fix': 'remove "build: noarch" section', + } + + +def setup_py_install_args(recipe, metas, df): + for meta in metas: + if 'setuptools' not in _get_deps(meta, 'build'): + continue + + err = { + 'needs_setuptools_args': True, + 'fix': ('add "--single-version-externally-managed --record=record.txt" ' + 'to setup.py command'), } + script_line = meta.get_section('build').get('script', '') + if ( + 'setup.py install' in script_line and + '--single-version-externally-managed' not in script_line + ): + return err -def setup_py_install_args(recipe, meta, df): - if 'setuptools' not in _get_deps(meta, 'build'): - return - - err = { - 'needs_setuptools_args': True, - 'fix': ('add "--single-version-externally-managed --record=record.txt" ' - 'to setup.py command'), - } - - script_line = meta.get_section('build').get('script', '') - if ( - 'setup.py install' in script_line and - '--single-version-externally-managed' not in script_line - ): - return err - - build_sh = os.path.join(recipe, 'build.sh') - if not os.path.exists(build_sh): - return - - contents = open(build_sh).read() - if ( - 'setup.py install' in contents and - '--single-version-externally-managed' not in contents - ): - return err - - -def invalid_identifiers(recipe, meta, df): - try: - identifiers = meta.get_section('extra').get('identifiers', []) - if not isinstance(identifiers, list): - return { 'invalid_identifiers': True, - 'fix': 'extra:identifiers must hold a list of identifiers' } - if not all(isinstance(i, str) for i in identifiers): - return { 'invalid_identifiers': True, - 'fix': 'each identifier must be a string' } - if not all((':' in i) for i in identifiers): - return { 'invalid_identifiers': True, - 'fix': 'each identifier must be of the form ' - 'type:identifier (e.g., doi:123)' } - except KeyError: - # no identifier section - return - - -def deprecated_numpy_spec(recipe, meta, df): - reqs = meta.get_section('requirements') - if not reqs: - return - for section in ['build', 'run']: - for dep in reqs.get(section, []): - if dep.startswith('numpy') and 'x.x' in dep: - return { 'deprecated_numpy_spec': True, - 'fix': 'omit x.x as pinning of numpy is now ' - 'handled automatically'} - return - - -def _pin(env_var, dep_name): - """ - Generates a linting function that checks to make sure `dep_name` is pinned - to `env_var` using jinja templating. - """ - pin_pattern = re.compile(r"\{{\{{\s*{}\s*\}}\}}\*".format(env_var)) - def pin(recipe, meta, df): - # Note that we can't parse the meta.yaml using a normal YAML parser if it - # has jinja templating - in_requirements = False - section = None - not_pinned = set() - pinned = set() - for line in open(os.path.join(recipe, 'meta.yaml')): - line = line.rstrip("\n") - if line.startswith("requirements:"): - in_requirements = True - elif line and not line.startswith(" ") and not line.startswith("#"): - in_requirements = False - section = None - if in_requirements: - dedented_line = line.lstrip(' ') - if dedented_line.startswith("run:"): - section = "run" - elif dedented_line.startswith("build:"): - section = "build" - elif dedented_line.startswith('- {}'.format(dep_name)): - if pin_pattern.search(dedented_line) is None: - not_pinned.add(section) - else: - pinned.add(section) - - # two error cases: 1) run is not pinned but in build - # 2) build is not pinned and run is pinned - # Everything else is ok. E.g., if dependency is not in run, we don't - # need to pin build, because it is statically linked. - if (("run" in not_pinned and "build" in pinned.union(not_pinned)) or - ("run" in pinned and "build" in not_pinned)): - err = { - '{}_not_pinned'.format(dep_name): True, - 'fix': ( - 'pin {0} using jinja templating: ' - '{{{{ {1} }}}}*'.format(dep_name, env_var)) - } + build_sh = os.path.join(recipe, 'build.sh') + if not os.path.exists(build_sh): + continue + + contents = open(build_sh).read() + if ( + 'setup.py install' in contents and + '--single-version-externally-managed' not in contents + ): return err - pin.__name__ = "{}_not_pinned".format(dep_name) - return pin + +def invalid_identifiers(recipe, metas, df): + for meta in metas: + try: + identifiers = meta.get_section('extra').get('identifiers', []) + if not isinstance(identifiers, list): + return { 'invalid_identifiers': True, + 'fix': 'extra:identifiers must hold a list of identifiers' } + if not all(isinstance(i, str) for i in identifiers): + return { 'invalid_identifiers': True, + 'fix': 'each identifier must be a string' } + if not all((':' in i) for i in identifiers): + return { 'invalid_identifiers': True, + 'fix': 'each identifier must be of the form ' + 'type:identifier (e.g., doi:123)' } + except KeyError: + # no identifier section + continue + + +def deprecated_numpy_spec(recipe, metas, df): + for meta in metas: + reqs = meta.get_section('requirements') + if not reqs: + continue + for section in ['build', 'run']: + for dep in reqs.get(section, []): + if dep.startswith('numpy') and 'x.x' in dep: + return { 'deprecated_numpy_spec': True, + 'fix': 'omit x.x as pinning of numpy is now ' + 'handled automatically'} registry = ( @@ -360,13 +327,5 @@ def pin(recipe, meta, df): should_not_be_noarch, setup_py_install_args, invalid_identifiers, - deprecated_numpy_spec, - _pin('CONDA_ZLIB', 'zlib'), - _pin('CONDA_GMP', 'gmp'), - _pin('CONDA_BOOST', 'boost'), - _pin('CONDA_GSL', 'gsl'), - _pin('CONDA_HDF5', 'hdf5'), - _pin('CONDA_NCURSES', 'ncurses'), - _pin('CONDA_HTSLIB', 'htslib'), - _pin('CONDA_BZIP2', 'bzip2'), + deprecated_numpy_spec ) diff --git a/bioconda_utils/linting.py b/bioconda_utils/linting.py index b5eec4ba3e..68d2bf3a1a 100644 --- a/bioconda_utils/linting.py +++ b/bioconda_utils/linting.py @@ -58,9 +58,6 @@ def not_in_bioconda(x): - if version changed, ensure build number is 0 - if version unchanged, ensure build number incremented -- currently we only check a single environment (see the `get_meta` function). - This should probably be converted to a generator function. - - currently we don't pay attention to py27/py3. It would be nice to handle that. @@ -192,8 +189,12 @@ def lint(recipes, df, exclude=None, registry=None): # TODO: do we need a way to skip this the same way we can skip lint # functions? I can't think of a reason we'd want to keep an unparseable # YAML. + metas = [] try: - meta = utils.load_metadata(recipe) + for platform in ["linux", "osx"]: + config = utils.load_conda_config(platform=platform, + trim_skip=False) + metas.extend(utils.load_all_meta(recipe, config=config)) except ( yaml.scanner.ScannerError, yaml.constructor.ConstructorError ) as e: @@ -226,7 +227,7 @@ def lint(recipes, df, exclude=None, registry=None): '%s defines skip lint test %s for recipe %s' % (source, func.__name__, recipe)) continue - result = func(recipe, meta, df) + result = func(recipe, metas, df) if result: hits.append( {'recipe': recipe, diff --git a/bioconda_utils/utils.py b/bioconda_utils/utils.py index 531d8f5121..d9b46c0fd2 100644 --- a/bioconda_utils/utils.py +++ b/bioconda_utils/utils.py @@ -148,14 +148,17 @@ def sandboxed_env(env): os.environ.update(orig) -def load_all_meta(recipe): +def load_all_meta(recipe, config=None): """ For each environment, yield the rendered meta.yaml. """ - return [meta for (meta, _, _) in api.render(recipe, config=load_conda_config())] + if config is None: + config = load_conda_config(platform=platform) + return [meta for (meta, _, _) in api.render(recipe, + config=config)] -def load_conda_config(): +def load_conda_config(platform=None, trim_skip=True): """ Load conda config while considering global pinnings from conda-forge. """ @@ -171,14 +174,18 @@ def load_conda_config(): assert os.path.exists(config.exclusive_config_file), ("error: " "conda_build_config.yaml not found in " "environment root") + if platform: + config.platform = platform + config.trim_skip = trim_skip return config -def load_metadata(recipe): +def load_metadata(recipe, config=None): """ Load metadata for a specific environment. """ - config = load_conda_config() + if config is None: + config = load_conda_config() meta = MetaData(recipe, config=config) meta.parse_again() return meta @@ -705,20 +712,21 @@ def tobuild(recipe): # with temp_os, we can fool the MetaData if needed. platform = os.environ.get('OSTYPE', sys.platform) if platform.startswith("darwin"): - platform = 'darwin' + platform = 'osx' elif platform == "linux-gnu": platform = "linux" - with temp_os(platform): - meta = load_metadata(recipe) - # If on CI, handle noarch. - if os.environ.get('CI', None) == 'true': - if meta.get_value('build/noarch'): - if platform != 'linux': - logger.debug('FILTER: only building %s on ' - 'linux because it defines noarch.', - recipe) - return [] + + meta = load_metadata(recipe, + config=load_conda_config(platform=platform)) + # If on CI, handle noarch. + if os.environ.get('CI', None) == 'true': + if meta.get_value('build/noarch'): + if platform != 'linux': + logger.debug('FILTER: only building %s on ' + 'linux because it defines noarch.', + recipe) + return [] # get all packages that would be built pkgs = set(map(os.path.basename, built_package_paths(recipe))) diff --git a/test/test_linting.py b/test/test_linting.py index e151ec1fc2..c859115a34 100644 --- a/test/test_linting.py +++ b/test/test_linting.py @@ -46,11 +46,16 @@ def _run(contents, expect_pass=True): assert len(r.recipe_dirs) == 1 name = list(r.recipe_dirs.keys())[0] recipe, meta, df = r.recipe_dirs[name], r.recipes[name]['meta.yaml'], should_pass_df - meta = utils.load_metadata(r.recipe_dirs[name]) + metas = [] + for platform in ["linux", "osx"]: + config = utils.load_conda_config(platform=platform, + trim_skip=False) + metas.extend(utils.load_all_meta(r.recipe_dirs[name], + config=config)) if expect_pass: - assert func(recipe, meta, df) is None, "lint did not pass" + assert func(recipe, metas, df) is None, "lint did not pass" else: - assert func(recipe, meta, df) is not None, "lint did not fail" + assert func(recipe, metas, df) is not None, "lint did not fail" for contents in should_pass: _run(contents, expect_pass=True) @@ -283,7 +288,7 @@ def test_missing_hash(): missing_hash: meta.yaml: | package: - name: missing_hash + name: md5hash version: "0.1" source: md5: 11111111111111111111111111111111 @@ -292,8 +297,9 @@ def test_missing_hash(): ''' missing_hash: meta.yaml: | - name: missing_hash - version: "0.1" + package: + name: metapackage + version: "0.1" ''', ], should_fail=[ @@ -310,9 +316,10 @@ def test_missing_hash(): missing_hash: meta.yaml: | package: - name: missing_hash + name: empty_hash version: "0.1" source: + fn: "a.txt" sha256: "" ''', ]) @@ -570,31 +577,31 @@ def test_should_not_be_noarch(): func=lint_functions.should_not_be_noarch, should_pass=[ ''' - should_not_be_noarch: + should_be_noarch1: meta.yaml: | package: - name: should_not_be_noarch + name: should_be_noarch1 version: "0.1" build: noarch: python ''', ''' - should_not_be_noarch: + should_be_noarch2: meta.yaml: | package: - name: should_not_be_noarch + name: should_be_noarch2 version: "0.1" build: noarch: python - skip: False + skip: false ''', ], should_fail=[ ''' - should_not_be_noarch: + should_not_be_noarch1: meta.yaml: | package: - name: should_not_be_noarch + name: should_not_be_noarch1 version: "0.1" build: noarch: python @@ -603,20 +610,20 @@ def test_should_not_be_noarch(): - gcc ''', ''' - should_not_be_noarch: + should_not_be_noarch2: meta.yaml: | package: - name: should_not_be_noarch + name: should_not_be_noarch2 version: "0.1" build: noarch: python skip: True # [osx] ''', ''' - should_not_be_noarch: + should_not_be_noarch3: meta.yaml: | package: - name: should_not_be_noarch + name: should_not_be_noarch3 version: "0.1" build: noarch: python diff --git a/test/test_utils.py b/test/test_utils.py index 186c6fc770..b688cd4639 100644 --- a/test/test_utils.py +++ b/test/test_utils.py @@ -147,6 +147,7 @@ def single_upload(): version: "0.1" '''.format(name), from_string=True) r.write_recipes() + r.pkgs = {} r.pkgs[name] = utils.built_package_paths(r.recipe_dirs[name]) build.build( @@ -706,12 +707,12 @@ def test_build_empty_extra_container(): # empty """, from_string=True) r.write_recipes() - pkg_paths = utils.built_package_paths(r.recipe_dirs['one']) + pkgs = utils.built_package_paths(r.recipe_dirs['one']) build_result = build.build( recipe=r.recipe_dirs['one'], recipe_folder='.', - pkg_paths=pkg_paths, + pkg_paths=pkgs, mulled_test=True, ) assert build_result.success From 660dc84719e467e349103d9cec4ebddfc3915506 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20K=C3=B6ster?= Date: Thu, 26 Apr 2018 12:22:06 +0200 Subject: [PATCH 027/118] fix conda execution in docker --- bioconda_utils/docker_utils.py | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/bioconda_utils/docker_utils.py b/bioconda_utils/docker_utils.py index 0f7eb5ccf5..2daae009b9 100644 --- a/bioconda_utils/docker_utils.py +++ b/bioconda_utils/docker_utils.py @@ -55,6 +55,9 @@ import re from distutils.version import LooseVersion +import conda +import conda_build + from . import utils import logging @@ -116,6 +119,7 @@ """ FROM bioconda/bioconda-utils-build-env {self.proxies} +RUN /opt/conda/bin/conda install -y conda={conda_ver} conda-build={conda_build_ver} """ class DockerCalledProcessError(sp.CalledProcessError): @@ -357,7 +361,11 @@ def _build_image(self, image_build_dir): ).read()) with open(os.path.join(build_dir, "Dockerfile"), 'w') as fout: - fout.write(self.dockerfile_template.format(self=self)) + fout.write(self.dockerfile_template.format( + self=self, + conda_ver=conda.__version__, + conda_build_ver=conda_build.__version__) + ) logger.debug('Dockerfile:\n' + open(fout.name).read()) @@ -440,9 +448,10 @@ def build_recipe(self, recipe_dir, build_args, env, noarch=False): # Write build script to tempfile build_dir = os.path.realpath(tempfile.mkdtemp()) + script = self.build_script_template.format( + self=self, arch='noarch' if noarch else 'linux-64') with open(os.path.join(build_dir, 'build_script.bash'), 'w') as fout: - fout.write(self.build_script_template.format( - self=self, arch='noarch' if noarch else 'linux-64')) + fout.write(script) build_script = fout.name logger.debug('DOCKER: Container build script: \n%s', open(fout.name).read()) From b3e2edace0dc8d0705961c5a3f725b1328f5ae51 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20K=C3=B6ster?= Date: Thu, 26 Apr 2018 12:35:01 +0200 Subject: [PATCH 028/118] fix argument --- bioconda_utils/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bioconda_utils/utils.py b/bioconda_utils/utils.py index d9b46c0fd2..fe3d6067fd 100644 --- a/bioconda_utils/utils.py +++ b/bioconda_utils/utils.py @@ -153,7 +153,7 @@ def load_all_meta(recipe, config=None): For each environment, yield the rendered meta.yaml. """ if config is None: - config = load_conda_config(platform=platform) + config = load_conda_config() return [meta for (meta, _, _) in api.render(recipe, config=config)] From 7934506dacfd692543244da94781f1aeaeb17af3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20K=C3=B6ster?= Date: Thu, 26 Apr 2018 13:29:12 +0200 Subject: [PATCH 029/118] fixes. --- bioconda_utils/build.py | 97 ++++++++++++++++++++--------------------- bioconda_utils/utils.py | 8 ++-- 2 files changed, 53 insertions(+), 52 deletions(-) diff --git a/bioconda_utils/build.py b/bioconda_utils/build.py index 740fb09d54..1135530d73 100644 --- a/bioconda_utils/build.py +++ b/bioconda_utils/build.py @@ -265,8 +265,7 @@ def build_recipes( logger.info('Filtering recipes') recipe_targets = dict( utils.filter_recipes( - recipes, check_channels, force=force) - ) + recipes, check_channels, force=force)) recipes = set(recipe_targets.keys()) dag, name2recipes = utils.get_dag(recipes, config=orig_config, blacklist=blacklist) @@ -340,53 +339,53 @@ def build_recipes( skipped_recipes.append(recipe) continue - for pkg_paths in recipe_targets[recipe]: - - # If a recipe depends on conda, it means it must be installed in - # the root env, which is not compatible with mulled-build tests. In - # that case, we temporarily disable the mulled-build tests for the - # recipe. - deps = [] - deps += utils.get_deps(recipe, orig_config, build=True) - deps += utils.get_deps(recipe, orig_config, build=False) - keep_mulled_test = True - if 'conda' in deps or 'conda-build' in deps: - keep_mulled_test = False - if mulled_test: - logger.info( - 'TEST SKIP: ' - 'skipping mulled-build test for %s because it ' - 'depends on conda or conda-build', recipe) - - res = build( - recipe=recipe, - recipe_folder=recipe_folder, - pkg_paths=pkg_paths, - testonly=testonly, - mulled_test=mulled_test and keep_mulled_test, - force=force, - channels=config['channels'], - docker_builder=docker_builder, - ) - - all_success &= res.success - recipe_success &= res.success - - if not res.success: - failed.append(recipe) - for n in nx.algorithms.descendants(subdag, name): - skip_dependent[n].append(recipe) - elif not testonly: - for pkg in pkg_paths: - # upload build - if anaconda_upload: - if not upload.anaconda_upload(pkg, label): - failed_uploads.append(pkg) - if mulled_upload_target and keep_mulled_test: - upload.mulled_upload(res.mulled_images, mulled_upload_target) - - # remove traces of the build - purge() + pkg_paths = recipe_targets[recipe] + + # If a recipe depends on conda, it means it must be installed in + # the root env, which is not compatible with mulled-build tests. In + # that case, we temporarily disable the mulled-build tests for the + # recipe. + deps = [] + deps += utils.get_deps(recipe, orig_config, build=True) + deps += utils.get_deps(recipe, orig_config, build=False) + keep_mulled_test = True + if 'conda' in deps or 'conda-build' in deps: + keep_mulled_test = False + if mulled_test: + logger.info( + 'TEST SKIP: ' + 'skipping mulled-build test for %s because it ' + 'depends on conda or conda-build', recipe) + + res = build( + recipe=recipe, + recipe_folder=recipe_folder, + pkg_paths=pkg_paths, + testonly=testonly, + mulled_test=mulled_test and keep_mulled_test, + force=force, + channels=config['channels'], + docker_builder=docker_builder, + ) + + all_success &= res.success + recipe_success &= res.success + + if not res.success: + failed.append(recipe) + for n in nx.algorithms.descendants(subdag, name): + skip_dependent[n].append(recipe) + elif not testonly: + for pkg in pkg_paths: + # upload build + if anaconda_upload: + if not upload.anaconda_upload(pkg, label): + failed_uploads.append(pkg) + if mulled_upload_target and keep_mulled_test: + upload.mulled_upload(res.mulled_images, mulled_upload_target) + + # remove traces of the build + purge() if recipe_success: built_recipes.append(recipe) diff --git a/bioconda_utils/utils.py b/bioconda_utils/utils.py index fe3d6067fd..e4a250d791 100644 --- a/bioconda_utils/utils.py +++ b/bioconda_utils/utils.py @@ -729,9 +729,10 @@ def tobuild(recipe): return [] # get all packages that would be built - pkgs = set(map(os.path.basename, built_package_paths(recipe))) + pkg_paths = built_package_paths(recipe) + pkgs = {os.path.basename(p): p for p in pkg_paths} # check which ones exist already - existing = channel_packages & pkgs + existing = [pkg for pkg in pkgs if pkg in channel_packages] for pkg in existing: logger.debug( @@ -744,7 +745,8 @@ def tobuild(recipe): "define skip for this environment. " "This is a conda bug.".format(pkg)) # yield all pkgs that do not yet exist - return pkgs - existing + return [pkg_path + for pkg, pkg_path in pkgs.items() if pkg not in existing] logger.debug('recipes: %s', recipes) recipes = list(recipes) From a2aa8e1cd14eeb7b2d00823203d76ac910c1b322 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20K=C3=B6ster?= Date: Mon, 30 Apr 2018 13:31:50 +0200 Subject: [PATCH 030/118] Adapt numpy x.x lint to new multi-metadata strategy. --- bioconda_utils/lint_functions.py | 15 +++++---------- bioconda_utils/linting.py | 5 +++-- 2 files changed, 8 insertions(+), 12 deletions(-) diff --git a/bioconda_utils/lint_functions.py b/bioconda_utils/lint_functions.py index 44322fd4e4..00711357af 100644 --- a/bioconda_utils/lint_functions.py +++ b/bioconda_utils/lint_functions.py @@ -292,16 +292,11 @@ def invalid_identifiers(recipe, metas, df): def deprecated_numpy_spec(recipe, metas, df): - for meta in metas: - reqs = meta.get_section('requirements') - if not reqs: - continue - for section in ['build', 'run']: - for dep in reqs.get(section, []): - if dep.startswith('numpy') and 'x.x' in dep: - return { 'deprecated_numpy_spec': True, - 'fix': 'omit x.x as pinning of numpy is now ' - 'handled automatically'} + with open(os.path.join(recipe, "meta.yaml")) as recipe: + if re.search("numpy( )+x\.x", recipe.read()): + return { 'deprecated_numpy_spec': True, + 'fix': 'omit x.x as pinning of numpy is now ' + 'handled automatically'} registry = ( diff --git a/bioconda_utils/linting.py b/bioconda_utils/linting.py index 68d2bf3a1a..568879364c 100644 --- a/bioconda_utils/linting.py +++ b/bioconda_utils/linting.py @@ -211,8 +211,9 @@ def lint(recipes, df, exclude=None, registry=None): skip_for_this_recipe = set(skip_dict[recipe]) # skips defined in meta.yaml - persistent = meta.get_section('extra').get('skip-lints', []) - skip_for_this_recipe.update(persistent) + for meta in metas: + persistent = meta.get_section('extra').get('skip-lints', []) + skip_for_this_recipe.update(persistent) for func in registry: if func.__name__ in skip_for_this_recipe: From 26132edc072c6af5190e1bd528e544811550d896 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20K=C3=B6ster?= Date: Mon, 30 Apr 2018 13:40:15 +0200 Subject: [PATCH 031/118] properly render metadata when finding built package paths. --- bioconda_utils/utils.py | 4 +--- test/test_utils.py | 4 +--- 2 files changed, 2 insertions(+), 6 deletions(-) diff --git a/bioconda_utils/utils.py b/bioconda_utils/utils.py index e4a250d791..d21f63a37e 100644 --- a/bioconda_utils/utils.py +++ b/bioconda_utils/utils.py @@ -579,9 +579,7 @@ def built_package_paths(recipe): but without the subprocess. """ config = load_conda_config() - meta = load_metadata(recipe) - paths = api.get_output_file_paths(meta, config=config) - assert paths, "bug: empty list of paths returned" + paths = api.get_output_file_paths(recipe, config=config) return paths diff --git a/test/test_utils.py b/test/test_utils.py index b688cd4639..3ebc134545 100644 --- a/test/test_utils.py +++ b/test/test_utils.py @@ -337,9 +337,6 @@ def test_filter_recipes_no_skipping(): def test_filter_recipes_skip_is_true(): - """ - - """ r = Recipes( """ one: @@ -354,6 +351,7 @@ def test_filter_recipes_skip_is_true(): recipes = list(r.recipe_dirs.values()) filtered = list( utils.filter_recipes(recipes)) + print(filtered) assert len(filtered) == 0 From 7aa4a7caf70f1f6479549c0d8716404fbf1815c5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20K=C3=B6ster?= Date: Mon, 30 Apr 2018 19:48:07 +0200 Subject: [PATCH 032/118] fixes. --- test/test_utils.py | 41 +++++++++++++++++++++-------------------- 1 file changed, 21 insertions(+), 20 deletions(-) diff --git a/test/test_utils.py b/test/test_utils.py index 3ebc134545..63a70d8848 100644 --- a/test/test_utils.py +++ b/test/test_utils.py @@ -488,7 +488,6 @@ def test_rendering_sandboxing(): """, from_string=True) r.write_recipes() - pkg_paths = utils.built_package_paths(r.recipe_dirs['one']) env = { # First one is allowed, others are not 'CONDA_ARBITRARY_VAR': 'conda-val-here', @@ -508,6 +507,7 @@ def test_rendering_sandboxing(): if 'GITHUB_TOKEN' in os.environ: with pytest.raises(sp.CalledProcessError) as excinfo: + pkg_paths = utils.built_package_paths(r.recipe_dirs['one']) res = build.build( recipe=r.recipe_dirs['one'], recipe_folder='.', @@ -515,8 +515,7 @@ def test_rendering_sandboxing(): mulled_test=False, _raise_error=True, ) - assert ("Undefined Jinja2 variables remain (['GITHUB_TOKEN']). " - "Please enable source downloading and try again.") in str(excinfo.value.stdout) + assert ("'GITHUB_TOKEN' is undefined" in str(excinfo.value.stdout)) else: # recipe for "one" should fail because GITHUB_TOKEN is not a jinja var. with pytest.raises(SystemExit) as excinfo: @@ -526,7 +525,7 @@ def test_rendering_sandboxing(): pkg_paths=pkg_paths, mulled_test=False ) - assert "'GITHUB_TOKEN' is undefined" in str(excinfo.value) + assert "'GITHUB_TOKEN' is undefined" in str(excinfo.value.stdout) r = Recipes( """ @@ -540,24 +539,26 @@ def test_rendering_sandboxing(): """, from_string=True) r.write_recipes() - pkg_paths = utils.built_package_paths(r.recipe_dirs['two']) - for pkg in pkg_paths: - ensure_missing(pkg) - res = build.build( - recipe=r.recipe_dirs['two'], - recipe_folder='.', - pkg_paths=pkg_paths, - mulled_test=False - ) + with utils.temp_env(env): + pkg_paths = utils.built_package_paths(r.recipe_dirs['two']) + for pkg in pkg_paths: + ensure_missing(pkg) - for pkg in pkg_paths: - t = tarfile.open(pkg) - tmp = tempfile.mkdtemp() - target = 'info/recipe/meta.yaml' - t.extract(target, path=tmp) - contents = yaml.load(open(os.path.join(tmp, target)).read()) - assert contents['extra']['var2'] == 'conda-val-here', contents + res = build.build( + recipe=r.recipe_dirs['two'], + recipe_folder='.', + pkg_paths=pkg_paths, + mulled_test=False + ) + + for pkg in pkg_paths: + t = tarfile.open(pkg) + tmp = tempfile.mkdtemp() + target = 'info/recipe/meta.yaml' + t.extract(target, path=tmp) + contents = yaml.load(open(os.path.join(tmp, target)).read()) + assert contents['extra']['var2'] == 'conda-val-here', contents def test_sandboxed(): From 69b0123920db0b236a099810d004f3ac7d822b03 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20K=C3=B6ster?= Date: Mon, 30 Apr 2018 20:12:54 +0200 Subject: [PATCH 033/118] fixes --- test/test_utils.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/test/test_utils.py b/test/test_utils.py index 63a70d8848..0d9db73f35 100644 --- a/test/test_utils.py +++ b/test/test_utils.py @@ -519,6 +519,7 @@ def test_rendering_sandboxing(): else: # recipe for "one" should fail because GITHUB_TOKEN is not a jinja var. with pytest.raises(SystemExit) as excinfo: + pkg_paths = utils.built_package_paths(r.recipe_dirs['one']) res = build.build( recipe=r.recipe_dirs['one'], recipe_folder='.', @@ -643,8 +644,8 @@ def test_skip_dependencies(): for k, v in r.recipe_dirs.items(): pkgs[k] = utils.built_package_paths(v) - for pkgs in pkgs.values(): - for pkg in pkgs: + for _pkgs in pkgs.values(): + for pkg in _pkgs: ensure_missing(pkg) build.build_recipes( @@ -663,8 +664,8 @@ def test_skip_dependencies(): assert not os.path.exists(pkg) # clean up - for pkgs in pkgs.values(): - for pkg in pkgs: + for _pkgs in pkgs.values(): + for pkg in _pkgs: ensure_missing(pkg) From 2102ad041a3b0a005e535c461fddaa865664de5d Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Mon, 30 Apr 2018 21:13:57 -0400 Subject: [PATCH 034/118] fix test for rendering sandboxing --- test/test_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/test_utils.py b/test/test_utils.py index 0d9db73f35..d7a6f69022 100644 --- a/test/test_utils.py +++ b/test/test_utils.py @@ -526,7 +526,7 @@ def test_rendering_sandboxing(): pkg_paths=pkg_paths, mulled_test=False ) - assert "'GITHUB_TOKEN' is undefined" in str(excinfo.value.stdout) + assert "'GITHUB_TOKEN' is undefined" in str(excinfo.value) r = Recipes( """ From ff204febc064680f0c6eb34e95af6006fca76dc5 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Tue, 1 May 2018 11:59:28 -0400 Subject: [PATCH 035/118] disable skip-existing --- bioconda_utils/build.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/bioconda_utils/build.py b/bioconda_utils/build.py index 1135530d73..9104e821c9 100644 --- a/bioconda_utils/build.py +++ b/bioconda_utils/build.py @@ -89,7 +89,8 @@ def build( # build_args = ["--no-build-id"] # use global variant config file (contains pinnings) - build_args = ["--skip-existing"] + # build_args = ["--skip-existing"] + build_args = [] if testonly: build_args.append("--test") else: From 3e1533af3469fb731ddf5913dc5a67a2d3843ed9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20K=C3=B6ster?= Date: Wed, 2 May 2018 18:14:44 +0200 Subject: [PATCH 036/118] Use same build args for the copying as for building. --- bioconda_utils/build.py | 4 ++-- bioconda_utils/docker_utils.py | 3 ++- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/bioconda_utils/build.py b/bioconda_utils/build.py index 9104e821c9..ddc24c853f 100644 --- a/bioconda_utils/build.py +++ b/bioconda_utils/build.py @@ -89,7 +89,7 @@ def build( # build_args = ["--no-build-id"] # use global variant config file (contains pinnings) - # build_args = ["--skip-existing"] + build_args = ["--skip-existing"] build_args = [] if testonly: build_args.append("--test") @@ -357,7 +357,7 @@ def build_recipes( 'TEST SKIP: ' 'skipping mulled-build test for %s because it ' 'depends on conda or conda-build', recipe) - + res = build( recipe=recipe, recipe_folder=recipe_folder, diff --git a/bioconda_utils/docker_utils.py b/bioconda_utils/docker_utils.py index 2daae009b9..7803298765 100644 --- a/bioconda_utils/docker_utils.py +++ b/bioconda_utils/docker_utils.py @@ -98,7 +98,8 @@ # conda-build from building all subdirectories conda build -e {self.container_staging}/conda_build_config.yaml {self.conda_build_args} {self.container_recipe}/meta.yaml 2>&1 -cp `conda build {self.container_recipe}/meta.yaml --output` {self.container_staging}/{arch} +# copy all built packages to the staging area +cp `conda build {self.container_recipe}/meta.yaml {self.conda_build_args} --output` {self.container_staging}/{arch} # Ensure permissions are correct on the host. HOST_USER={self.user_info[uid]} chown $HOST_USER:$HOST_USER {self.container_staging}/{arch}/* From 3cd0ff6473ad24337407528a9a591d80de23e42f Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Fri, 4 May 2018 17:08:53 -0400 Subject: [PATCH 037/118] add test for conda-forge pinning --- test/test_utils.py | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/test/test_utils.py b/test/test_utils.py index d7a6f69022..0e0d683eeb 100644 --- a/test/test_utils.py +++ b/test/test_utils.py @@ -763,3 +763,28 @@ def test_build_container_default_gcc(tmpdir): mulled_test=False, ) assert build_result.success + + +def test_conda_forge_pins(caplog): + caplog.set_level(logging.DEBUG) + r = Recipes( + """ + one: + meta.yaml: | + package: + name: one + version: 0.1 + requirements: + run: + - zlib {{ zlib }} + """, from_string=True) + r.write_recipes() + build_result = build.build_recipes( + r.basedir, + config={}, + packages="*", + testonly=False, + force=False, + mulled_test=True, + ) + assert build_result From c6ed11944f7266c82b3745c722b09503fa3ec419 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Fri, 4 May 2018 21:14:26 -0400 Subject: [PATCH 038/118] add ability to have separate bioconda pins --- MANIFEST.in | 1 + bioconda_utils/build.py | 1 + bioconda_utils/utils.py | 28 ++++++++----- test/test_utils.py | 89 ++++++++++++++++++++++++++++++++++++++++- 4 files changed, 108 insertions(+), 11 deletions(-) diff --git a/MANIFEST.in b/MANIFEST.in index 113303f097..603fc794af 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,5 +1,6 @@ include bioconda_utils/bioconda_startup.sh include bioconda_utils/bioconda_utils-requirements.txt +include bioconda_utils/bioconda_utils-conda_build_config.yaml include bioconda_utils/config.schema.yaml include bioconda_utils/channel_order.txt include bioconda_utils/templates/* diff --git a/bioconda_utils/build.py b/bioconda_utils/build.py index ddc24c853f..fdcf9eba11 100644 --- a/bioconda_utils/build.py +++ b/bioconda_utils/build.py @@ -136,6 +136,7 @@ def build( with utils.sandboxed_env(whitelisted_env): cmd = CONDA_BUILD_CMD + build_args + channel_args + \ ["-e", utils.load_conda_config().exclusive_config_file] + \ + ["-m"] + utils.load_conda_config().variant_config_files + \ [os.path.join(recipe, 'meta.yaml')] logger.debug('command: %s', cmd) with utils.Progress(): diff --git a/bioconda_utils/utils.py b/bioconda_utils/utils.py index d21f63a37e..208e10e45e 100644 --- a/bioconda_utils/utils.py +++ b/bioconda_utils/utils.py @@ -22,7 +22,6 @@ from pathlib import PurePath from conda_build import api -from conda_build.metadata import MetaData from conda.models.version import VersionOrder import yaml from jinja2 import Environment, PackageLoader @@ -171,6 +170,13 @@ def load_conda_config(platform=None, trim_skip=True): # set path to pinnings from conda forge package config.exclusive_config_file = os.path.join(env_root, "conda_build_config.yaml") + config.variant_config_files = [ + os.path.join( + os.path.dirname(__file__), + 'bioconda_utils-conda_build_config.yaml') + ] + for cfg in config.variant_config_files: + assert os.path.exists(cfg), ('error: {0} does not exist'.format(cfg)) assert os.path.exists(config.exclusive_config_file), ("error: " "conda_build_config.yaml not found in " "environment root") @@ -180,15 +186,17 @@ def load_conda_config(platform=None, trim_skip=True): return config -def load_metadata(recipe, config=None): +def load_first_metadata(recipe, config=None): """ - Load metadata for a specific environment. + Returns just the first of possibly many metadata files. Used for when you + need to do things like check a package name or version number (which are + not expected to change between variants). + + If the recipe will be skipped, then returns None """ - if config is None: - config = load_conda_config() - meta = MetaData(recipe, config=config) - meta.parse_again() - return meta + metas = load_all_meta(recipe, config) + if len(metas) > 0: + return metas[0] @contextlib.contextmanager @@ -477,7 +485,7 @@ def toplevel(x): else: def get_version(p): return VersionOrder( - load_metadata(os.path.join(p, 'meta.yaml')).get_value('package/version') + load_first_metadata(os.path.join(p, 'meta.yaml')).get_value('package/version') ) sorted_versions = sorted(group, key=get_version) if sorted_versions: @@ -715,7 +723,7 @@ def tobuild(recipe): platform = "linux" - meta = load_metadata(recipe, + meta = load_first_metadata(recipe, config=load_conda_config(platform=platform)) # If on CI, handle noarch. if os.environ.get('CI', None) == 'true': diff --git a/test/test_utils.py b/test/test_utils.py index 0e0d683eeb..e7d949c81d 100644 --- a/test/test_utils.py +++ b/test/test_utils.py @@ -10,6 +10,7 @@ import tarfile import logging import shutil +from textwrap import dedent from bioconda_utils import utils from bioconda_utils import pkg_test @@ -28,6 +29,10 @@ # Label that will be used for uploading test packages to anaconda/binstar TEST_LABEL = 'bioconda-utils-test' +# PARAMS and ID are used with pytest.fixture. The end result is that, on Linux, +# any tests that depend on a fixture that uses PARAMS will run twice (once with +# docker, once without). On OSX, only the non-docker runs. + SKIP_DOCKER_TESTS = sys.platform.startswith('darwin') if SKIP_DOCKER_TESTS: @@ -785,6 +790,88 @@ def test_conda_forge_pins(caplog): packages="*", testonly=False, force=False, - mulled_test=True, + mulled_test=False, ) assert build_result + + +def test_bioconda_pins(caplog): + """ + htslib currently only provided by bioconda pinnings + """ + caplog.set_level(logging.DEBUG) + r = Recipes( + """ + one: + meta.yaml: | + package: + name: one + version: 0.1 + requirements: + run: + - htslib {{ htslib }} + """, from_string=True) + r.write_recipes() + build_result = build.build_recipes( + r.basedir, + config={}, + packages="*", + testonly=False, + force=False, + mulled_test=False, + ) + assert build_result + + +def test_load_meta_skipping(): + """ + Ensure that a skipped recipe returns no metadata + """ + r = Recipes( + """ + one: + meta.yaml: | + package: + name: one + version: "0.1" + build: + skip: true + """, from_string=True) + r.write_recipes() + recipe = r.recipe_dirs['one'] + assert utils.load_all_meta(recipe) == [] + + +def test_variants(): + """ + Multiple variants should return multiple metadata + """ + r = Recipes( + """ + one: + meta.yaml: | + package: + name: one + version: "0.1" + requirements: + build: + - mypkg {{ mypkg }} + """, from_string=True) + r.write_recipes() + recipe = r.recipe_dirs['one'] + + # Write a temporary conda_build_config.yaml that we'll point the config + # object to: + tmp = tempfile.NamedTemporaryFile(delete=False).name + with open(tmp, 'w') as fout: + fout.write( + dedent( + """ + mypkg: + - 1.0 + - 2.0 + """)) + config = utils.load_conda_config() + config.exclusive_config_file = tmp + + assert len(utils.load_all_meta(recipe, config)) == 2 From 31bd4840294e546dba5c993c1ad8156b9a451662 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Fri, 4 May 2018 21:17:30 -0400 Subject: [PATCH 039/118] pep8 --- bioconda_utils/build.py | 7 ++----- bioconda_utils/utils.py | 27 +++++++++++++-------------- 2 files changed, 15 insertions(+), 19 deletions(-) diff --git a/bioconda_utils/build.py b/bioconda_utils/build.py index fdcf9eba11..528bca29f4 100644 --- a/bioconda_utils/build.py +++ b/bioconda_utils/build.py @@ -113,7 +113,7 @@ def build( # want to add TRAVIS* vars if that behavior is not disabled. if docker_builder is not None: - response = docker_builder.build_recipe( + docker_builder.build_recipe( recipe_dir=os.path.abspath(recipe), build_args=' '.join(channel_args + build_args), env=whitelisted_env, @@ -126,7 +126,6 @@ def build( "BUILD FAILED: the built package %s " "cannot be found", pkg_path) return BuildResult(False, None) - build_success = True else: # Temporarily reset os.environ to avoid leaking env vars to @@ -140,9 +139,7 @@ def build( [os.path.join(recipe, 'meta.yaml')] logger.debug('command: %s', cmd) with utils.Progress(): - p = utils.run(cmd, env=os.environ, mask=False) - - build_success = True + utils.run(cmd, env=os.environ, mask=False) logger.info('BUILD SUCCESS %s', ' '.join(os.path.basename(p) for p in pkg_paths)) diff --git a/bioconda_utils/utils.py b/bioconda_utils/utils.py index 208e10e45e..4a4cd09c0d 100644 --- a/bioconda_utils/utils.py +++ b/bioconda_utils/utils.py @@ -40,16 +40,17 @@ 'CRITICAL': 'red', })) + def setup_logger(name, loglevel=None): - l = logging.getLogger(name) - l.propagate = False + logger = logging.getLogger(name) + logger.propagate = False if loglevel: - l.setLevel(getattr(logging, loglevel.upper())) - l.addHandler(log_stream_handler) - return l + logger.setLevel(getattr(logging, loglevel.upper())) + logger.addHandler(log_stream_handler) + return logger -logger = setup_logger(__name__) +logger = setup_logger(__name__) jinja = Environment( loader=PackageLoader('bioconda_utils', 'templates'), @@ -78,7 +79,6 @@ def setup_logger(name, loglevel=None): ] - def get_free_space(): """Return free space in MB on disk""" s = os.statvfs(os.getcwd()) @@ -177,9 +177,8 @@ def load_conda_config(platform=None, trim_skip=True): ] for cfg in config.variant_config_files: assert os.path.exists(cfg), ('error: {0} does not exist'.format(cfg)) - assert os.path.exists(config.exclusive_config_file), ("error: " - "conda_build_config.yaml not found in " - "environment root") + assert os.path.exists(config.exclusive_config_file), ( + "error: conda_build_config.yaml not found in environment root") if platform: config.platform = platform config.trim_skip = trim_skip @@ -232,12 +231,13 @@ def run(cmds, env=None, mask=None, **kwargs): except sp.CalledProcessError as e: e.stdout = e.stdout.decode(errors='replace') # mask command arguments + def do_mask(arg): if mask is None: # caller has not considered masking, hide the entire command # for security reasons return '' - elif mask == False: + elif mask is False: # masking has been deactivated return arg for m in mask: @@ -722,9 +722,8 @@ def tobuild(recipe): elif platform == "linux-gnu": platform = "linux" - - meta = load_first_metadata(recipe, - config=load_conda_config(platform=platform)) + meta = load_first_metadata( + recipe, config=load_conda_config(platform=platform)) # If on CI, handle noarch. if os.environ.get('CI', None) == 'true': if meta.get_value('build/noarch'): From 8139e77355860e7f2dca592cd352b8fb5b8c1b59 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Fri, 4 May 2018 21:19:12 -0400 Subject: [PATCH 040/118] add some more docs on testing --- docs/source/contribute-a-recipe.rst | 13 +++++++++-- docs/source/faqs.rst | 36 ++++++++++++++++++++++++++++- 2 files changed, 46 insertions(+), 3 deletions(-) diff --git a/docs/source/contribute-a-recipe.rst b/docs/source/contribute-a-recipe.rst index 4c45110196..065f5ab6e5 100644 --- a/docs/source/contribute-a-recipe.rst +++ b/docs/source/contribute-a-recipe.rst @@ -53,6 +53,10 @@ bioconda-utils. The first is probably more straightforward; the second is more stringent, can be used for testing on MacOS, and allows the full customization of the bioconda-utils calls. +.. _cci_clent: + +Circle CI client method ++++++++++++++++++++++++ The simplest way to conduct local tests is to :ref:`setup the Circle CI client `. Then run the following commands: @@ -72,8 +76,13 @@ in ``scripts/env.yaml`` exported into the build environment. The latter allows otherwise wouldn't work with a simple ``conda build`` directly from the command line. -However, due to technical limitations of the Circle CI client, the above test -does **not** run the more stringent ``mulled-build`` tests. To do so, use the + +.. _bootstrap: + +"Bootstrap" method +++++++++++++++++++ +Due to technical limitations of the Circle CI client, the above test does +**not** run the more stringent ``mulled-build`` tests. To do so, use the following commands: .. code-block:: bash diff --git a/docs/source/faqs.rst b/docs/source/faqs.rst index 812d7ac67c..d329aa4409 100644 --- a/docs/source/faqs.rst +++ b/docs/source/faqs.rst @@ -124,4 +124,38 @@ macOS builds to the bioconda team. To ensure that CircleCI uses the bioconda team account, please **disable** CircleCI on your fork (look for the big red "Stop Building" button at -https://circleci.com/dashboard under the settings for your fork. +https://circleci.com/dashboard under the settings for your fork). + +Testing ``bioconda-utils`` locally +---------------------------------- + +Follow the instructions at :ref:`bootstrap` to create a separate Miniconda +installation using the ``bootstrap.py`` script in the `bioconda-recipes` repo. + +Then, in the activated environment, install the bioconda-utils test +requirements, from the top-level directory of the ``bioconda-utils`` repo. +While the bootstrap script installs bioconda-utils dependencies, if there are +any changes in ``requirements.txt`` you will want to install them as well. + +The bootstrap script already installed bioconda-utils, but we want to install +it in develop mode so we can make local changes and they will be immediately +picked up. So we need to uninstall and then reinstall bioconda-utils. + +Finally, run the tests using ``pytest``. + +In summary: + +.. code-block:: bash + + # activate env + source ~/.config/bioconda/activate + + # install dependencies + conda install --file test-requirements.txt --file bioconda_utils/bioconda_utils-requirements.txt + + # uninstall and then reinstall + pip uninstall bioconda_utils + python setup.py develop + + # run tests + pytest test -vv From 788f6d54e7b13576fa1cc50648a38a679f5039e0 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Fri, 4 May 2018 21:19:37 -0400 Subject: [PATCH 041/118] add bioconda conda_build_config.yaml --- .../bioconda_utils-conda_build_config.yaml | 14 ++++++++++++++ 1 file changed, 14 insertions(+) create mode 100644 bioconda_utils/bioconda_utils-conda_build_config.yaml diff --git a/bioconda_utils/bioconda_utils-conda_build_config.yaml b/bioconda_utils/bioconda_utils-conda_build_config.yaml new file mode 100644 index 0000000000..ead983a40c --- /dev/null +++ b/bioconda_utils/bioconda_utils-conda_build_config.yaml @@ -0,0 +1,14 @@ +# Additional bioconda-specific pinnings to use in addition to those specified +# in +# https://github.com/conda-forge/conda-forge-pinning-feedstock/blob/master/recipe/conda_build_config.yaml + +pin_run_as_build: + htslib: + max_pin: x.x + bamtools: + max_pin: x.x + +htslib: + - 1.7 +bamtools: + - 2.4.1 From cfa406093855d02ba10e2909c410479fc6ee38b4 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Fri, 4 May 2018 21:39:58 -0400 Subject: [PATCH 042/118] add first metadata fix --- bioconda_utils/build.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/bioconda_utils/build.py b/bioconda_utils/build.py index 528bca29f4..2ab7a51de8 100644 --- a/bioconda_utils/build.py +++ b/bioconda_utils/build.py @@ -106,7 +106,10 @@ def build( CONDA_BUILD_CMD = [utils.bin_for('conda'), 'build'] - meta = utils.load_metadata(recipe) + # Even though there may be variants of the recipe that will be built, we + # will only be checking attributes that are independent of variants (pkg + # name, version, noarch, whether or not an extended container was used) + meta = utils.load_first_metadata(recipe) try: # Note we're not sending the contents of os.environ here. But we do From 04543f321c857a3e20c9fca48bb68dafe1d00e50 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Fri, 4 May 2018 22:37:50 -0400 Subject: [PATCH 043/118] update tests for load_first_metadata --- test/test_bioconductor_skeleton.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/test/test_bioconductor_skeleton.py b/test/test_bioconductor_skeleton.py index 1ea30b9fb6..7d619f06b9 100644 --- a/test/test_bioconductor_skeleton.py +++ b/test/test_bioconductor_skeleton.py @@ -67,7 +67,7 @@ def test_meta_contents(tmpdir): bioconductor_skeleton.write_recipe( 'edgeR', recipe_dir=str(tmpdir), config=config, recursive=False) - edger_meta = utils.load_metadata(str(tmpdir.join('bioconductor-edger'))).meta + edger_meta = utils.load_first_metadata(str(tmpdir.join('bioconductor-edger'))).meta assert 'r-rcpp' in edger_meta['requirements']['build'] # note that the preprocessing selector is stripped off by yaml parsing, so @@ -141,7 +141,7 @@ def test_bioarchive_exists(): def test_annotation_data(tmpdir): bioconductor_skeleton.write_recipe('AHCytoBands', str(tmpdir), config, recursive=True) - meta = utils.load_metadata(str(tmpdir.join('bioconductor-ahcytobands'))).meta + meta = utils.load_first_metadata(str(tmpdir.join('bioconductor-ahcytobands'))).meta assert 'wget' in meta['requirements']['run'] assert len(meta['source']['url']) == 3 assert not tmpdir.join('bioconductor-ahcytobands', 'build.sh').exists() @@ -151,7 +151,7 @@ def test_annotation_data(tmpdir): def test_experiment_data(tmpdir): bioconductor_skeleton.write_recipe('affydata', str(tmpdir), config, recursive=True) - meta = utils.load_metadata(str(tmpdir.join('bioconductor-affydata'))).meta + meta = utils.load_first_metadata(str(tmpdir.join('bioconductor-affydata'))).meta assert 'wget' in meta['requirements']['run'] assert len(meta['source']['url']) == 3 assert not tmpdir.join('bioconductor-affydata', 'build.sh').exists() From 526e48db59ccb043d307f172f2c028f002b16a59 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Fri, 4 May 2018 22:38:46 -0400 Subject: [PATCH 044/118] first pass of cb3 migration docs --- docs/source/cb3.rst | 148 ++++++++++++++++++++++++++++++++++++++++++ docs/source/index.rst | 1 + 2 files changed, 149 insertions(+) create mode 100644 docs/source/cb3.rst diff --git a/docs/source/cb3.rst b/docs/source/cb3.rst new file mode 100644 index 0000000000..ead91f8d0a --- /dev/null +++ b/docs/source/cb3.rst @@ -0,0 +1,148 @@ +Conda build v3 +-------------- + +Conda build version 3 has lots of nice features that will make managing +packages in Bioconda much easier. However there are some changes that you will +need to be aware of, especially if you're used to making recipes using the old +conda-build v2 way. + +This page documents each change and is intended to serve as a reference for the +transition. + +.. _host-section: + +``build:`` section is now ``host:`` section +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +**Summary:** + +- Previously, build-time dependencies were listed in the ``requirements:build:`` section. +- Instead, we should only put **compilers and build tools** in the ``build:`` + section. Other dependencies should go in a new ``host:`` section. + +Due to the improved way compilers are now being handled (see +:ref:`compiler-tools`), the old build section is now split into ``build`` and +``host``. The majority of dependencies in the old ``build:`` should now go into +the new ``host:`` section. If there are no compilers, then there should be no +``build:`` section. + +Before: + +.. code-block:: yaml + + package: + name: example + version: 0.1 + requirements: + build: + - python + run: + - python + +After: + +.. code-block:: yaml + + package: + name: example + version: 0.1 + requirements: + host: + - python + run: + - python + +.. seealso:: + + See the `requirements section + `_ + of the conda docs for more info. + + +.. _compiler-tools: + +Compiler tools +~~~~~~~~~~~~~~ +**Summary:** + +- Previously we used ``- gcc #[linux]`` and ``- llvm # [osx]`` for compilers +- Instead, we should use the syntax ``{{ compiler('c') }}``, ``{{ + compiler('cxx') }}``, and/or ``{{ compiler('fortran') }}``. This should go in + the ``build:`` section. + +Anaconda now provides platform-specific compilers that are automatically +determined. The string ``{{ compiler('c') }}`` will resolve to ``gcc`` on +Linux, but ``clang`` on macOS. This should greatly simplify recipes, as we no +longer need to have separate lines for linux and osx. + +This also opens up the future possibility of cross-compiling, allowing us to +build macOS packages from Linux infrastructure. + +Note that previously we typically would also add ``- libgcc #[linux]`` as a run +dependency, but this is now taken care of by the compiler tools. + +.. seealso:: + + - The `compiler tools + _ + section of the conda docs has much more info. + + - The default compiler options are defined by conda-build in the + `variants.DEFAULT_COMPILERS + `_ + variable. + + - More details on "strong" and "weak" exports (using examples of libpng and + libgcc) can be found in the `export runtime requirements + `_ + conda documentation. + +.. _global-pinning: + +Global pinning +~~~~~~~~~~~~~~ + +**Summary:** + +- Previously we pinned packages using the syntax ``- zlib {{ CONDA_ZLIB }}*`` +- Instead, we should now pin packages with the syntax ``- zlib {{ zlib }}``. + +Global pinning is the idea of making sure all recipes use the same versions of +common libraries. For example, many bioinformatics tools have `zlib` as +a dependency. The version of `zlib` used when building the package should be the +same as the version used when installing the package into a new environment. +Problems arise when the build-time version does not match the install-time +version. Furthermore, all packages installed into the same environment should +have been built using the same zlib so that they can co-exist. This implies +that we need to specify the `zlib` version in one place and have all recipes +use that version. + +Previously we maintained a global pinning file (see `scripts/env_matrix.yaml +`_), +and in there was the variable ``CONDA_ZLIB`` that was made available to the +recipes as a jinja2 variable. One problem with this is that we did not often +synchronize our pinned versions with conda-forge's pinned versions, and this +disconnect could cause problems. + +Now, conda-build 3 has the concept of "variants", which is a generalized way of +solving this problem. This generally takes the form of a YAML file. We have +adopted the pinned versions used by conda-forge, which they provide in the +``conda-forge-pinning`` conda package. That package unpacks a config YAML into +the conda environment so that we can use that for building all recipes. + +To see what versions of packages are pinned, please consult the +`conda_build_config.yaml +`_ +file used by the package. + +.. seealso:: + + The `build variants + `_ + section of the conda docs has much more information. + + Packages pinned by conda-forge (which we also use) can be found in their + `conda_build_config.yaml + `_ + + Bio-specific packages additionally pinned by bioconda can be found at + ``bioconda_utils-conda_build_config.yaml`` in the bioconda-utils source. diff --git a/docs/source/index.rst b/docs/source/index.rst index 1080e86b5e..b423dd39c5 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -166,4 +166,5 @@ Contents: linting faqs build-system + cb3 changes From ed371bda39aa3d2be6d043e063c3900dc4da933c Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sat, 5 May 2018 16:30:43 -0400 Subject: [PATCH 045/118] port bioconductor_skeleton to use cb3 compilers; improve detection --- bioconda_utils/bioconductor_skeleton.py | 73 +++++++++++++++++-------- 1 file changed, 51 insertions(+), 22 deletions(-) diff --git a/bioconda_utils/bioconductor_skeleton.py b/bioconda_utils/bioconductor_skeleton.py index 155fe911fa..d06c99391e 100755 --- a/bioconda_utils/bioconductor_skeleton.py +++ b/bioconda_utils/bioconductor_skeleton.py @@ -35,12 +35,6 @@ "grid", "methods", "parallel", "splines", "stats", "stats4", "tcltk", "tools", "utils"] - -# A list of packages, in recipe name format. If a package depends on something -# in this list, then we will add the gcc/llvm build-deps as appropriate to the -# constructed recipe. -GCC_PACKAGES = ['r-rcpp'] - HERE = os.path.abspath(os.path.dirname(__file__)) @@ -594,12 +588,43 @@ def dependencies(self): else: dependency_mapping[prefix + name.lower() + version] = name - if ( - (prefix + name.lower() in GCC_PACKAGES) or - (self.description.get('needscompilation', 'no') == 'yes') or - (self.description.get('linkingto', None) is not None) - ): - self.depends_on_gcc = True + if ( + (self.description.get('needscompilation', 'no') == 'yes') or + (self.description.get('linkingto', None) is not None) + ): + # Modified from conda_build.skeletons.cran + # + with tarfile.open(self.cached_tarball) as tf: + need_f = any([f.name.lower().endswith(('.f', '.f90', '.f77')) for f in tf]) + need_c = True if need_f else \ + any([f.name.lower().endswith('.c') for f in tf]) + need_cxx = any([f.name.lower().endswith(('.cxx', '.cpp', '.cc', '.c++')) + for f in tf]) + need_autotools = any([f.name.lower().endswith('/configure') for f in tf]) + need_make = True if any((need_autotools, need_f, need_cxx, need_c)) else \ + any([f.name.lower().endswith(('/makefile', '/makevars')) + for f in tf]) + else: + need_c = need_cxx = need_f = need_autotools = need_make = False + + for name, version in sorted(versions.items()): + if name in ['Rcpp', 'RcppArmadillo']: + need_cxx = True + + if need_cxx: + need_c = True + + self._cb3_build_reqs = {} + if need_c: + self._cb3_build_reqs['c'] = "{{ compiler('c') }}" + if need_cxx: + self._cb3_build_reqs['cxx'] = "{{ compiler('cxx') }}" + if need_f: + self._cb3_build_reqs['fortran'] = "{{ compiler('fortran') }}" + if need_autotools: + self._cb3_build_reqs['automake'] = 'automake' + if need_make: + self._cb3_build_reqs['make'] = 'make' # Add R itself if not specific_r_version: @@ -721,7 +746,7 @@ def sub_placeholders(x): # object and tries to make a shortcut, causing an error in # decoding unicode. Possible pyaml bug? Anyway, this fixes # it. - ('build', DEPENDENCIES[:]), + ('host', DEPENDENCIES[:]), ('run', DEPENDENCIES[:] + additional_run_deps), )), ), @@ -740,21 +765,25 @@ def sub_placeholders(x): )), ), )) - if self.depends_on_gcc: - d['requirements']['build'].append('GCC_PLACEHOLDER') - d['requirements']['build'].append('LLVM_PLACEHOLDER') + + if self._cb3_build_reqs: + d['requirements']['build'] = [] + for k, v in self._cb3_build_reqs.items(): + d['requirements']['build'].append(k + '_' + "PLACEHOLDER") rendered = pyaml.dumps(d, width=1e6).decode('utf-8') - rendered = rendered.replace('GCC_PLACEHOLDER', 'gcc # [linux]') - rendered = rendered.replace('LLVM_PLACEHOLDER', 'llvm # [osx]') rendered = ( '{% set version = "' + self.version + '" %}\n' + '{% set name = "' + self.package + '" %}\n' + '{% set bioc = "' + self.bioc_version + '" %}\n\n' + rendered ) - tmp = tempfile.NamedTemporaryFile(delete=False).name - with open(tmp, 'w') as fout: + + for k, v in self._cb3_build_reqs.items(): + rendered = rendered.replace(k + '_' + "PLACEHOLDER", v) + + tmpdir = tempfile.mkdtemp() + with open(os.path.join(tmpdir, 'meta.yaml'), 'w') as fout: fout.write(rendered) return fout.name @@ -894,8 +923,8 @@ def write_recipe(package, recipe_dir, config, force=False, bioc_version=None, # *has* changed, then bump the version number. meta_file = os.path.join(recipe_dir, 'meta.yaml') if os.path.exists(meta_file): - updated_meta = utils.load_metadata(proj.meta_yaml).meta - current_meta = utils.load_metadata(meta_file).meta + updated_meta = utils.load_first_metadata(proj.meta_yaml).meta + current_meta = utils.load_first_metadata(meta_file).meta # pop off the version and build numbers so we can compare the rest of # the dicts From 9473f744020e0c129e76224199441e6c2a833958 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sat, 5 May 2018 16:31:26 -0400 Subject: [PATCH 046/118] update bioconductor_skeleton test --- test/test_bioconductor_skeleton.py | 19 +++++-------------- 1 file changed, 5 insertions(+), 14 deletions(-) diff --git a/test/test_bioconductor_skeleton.py b/test/test_bioconductor_skeleton.py index 7d619f06b9..3dc4ff38f4 100644 --- a/test/test_bioconductor_skeleton.py +++ b/test/test_bioconductor_skeleton.py @@ -1,13 +1,9 @@ -import sys - import pytest from bioconda_utils import bioconductor_skeleton from bioconda_utils import cran_skeleton from bioconda_utils import utils -import helpers - config = { 'channels': ['bioconda', 'conda-forge', 'defaults'] @@ -68,16 +64,11 @@ def test_meta_contents(tmpdir): 'edgeR', recipe_dir=str(tmpdir), config=config, recursive=False) edger_meta = utils.load_first_metadata(str(tmpdir.join('bioconductor-edger'))).meta - assert 'r-rcpp' in edger_meta['requirements']['build'] - - # note that the preprocessing selector is stripped off by yaml parsing, so - # just check for gcc - if sys.platform == 'linux': - assert 'gcc' in edger_meta['requirements']['build'] - elif sys.platform == 'darwin': - assert 'llvm' in edger_meta['requirements']['build'] - else: - raise ValueError('Unhandled platform: {}'.format(sys.platform)) + assert 'r-rcpp' in edger_meta['requirements']['run'] + + # The rendered meta has {{ compiler('c') }} filled in, so we need to check + # for one of those filled-in values. + assert 'toolchain' in edger_meta['requirements']['build'] # bioconductor, bioarchive, and cargoport assert len(edger_meta['source']['url']) == 3 From 02e89ae0c399400560a4177da4bc8d9f6b8f9dc8 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sat, 5 May 2018 16:36:44 -0400 Subject: [PATCH 047/118] more cb3 docs --- docs/source/cb3.rst | 96 ++++++++++++++++++++++++++++++++++++--------- 1 file changed, 78 insertions(+), 18 deletions(-) diff --git a/docs/source/cb3.rst b/docs/source/cb3.rst index ead91f8d0a..6c4b248719 100644 --- a/docs/source/cb3.rst +++ b/docs/source/cb3.rst @@ -11,19 +11,37 @@ transition. .. _host-section: -``build:`` section is now ``host:`` section -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +The new ``host`` section +~~~~~~~~~~~~~~~~~~~~~~~~ + **Summary:** - Previously, build-time dependencies were listed in the ``requirements:build:`` section. -- Instead, we should only put **compilers and build tools** in the ``build:`` - section. Other dependencies should go in a new ``host:`` section. +- Conda-build 3 now has an additional ``host:`` section. It is required if + using compilers; otherwise old recipes can remain as-is and can be gradually + ported over. New recipes should use the new ``host:`` section as described + below. Due to the improved way compilers are now being handled (see -:ref:`compiler-tools`), the old build section is now split into ``build`` and -``host``. The majority of dependencies in the old ``build:`` should now go into -the new ``host:`` section. If there are no compilers, then there should be no -``build:`` section. +:ref:`compiler-tools`), the old build section is now split into two sections, +``build`` and ``host``. This change is largely to support cross-compiling, +which we are not doing yet. However if a recipe uses one of the new ``{{ +compiler() }}`` methods described in :ref:`compiler-tools`, the ``host`` +section is **required**. + +Existing recipes that only have a ``build:`` section and do not use the new +compiler tools (the thousands of existing recipes!) should still work for now; +they just won't work if we ever try to cross-compile them. However **new** +recipes created by ``conda skeleton`` have the new ``host`` section, and this +seems to be the way conda is going, so we will gradually port over our recipes. + +The new ``build`` section should have things like compilers, ``git``, +``automake``, ``make``, ``cmake``, and other build tools. If there are no +compilers or other build tools, there should be no ``build:`` section. + +The new ``host`` section should have everything else. + +The ``run`` section remains the same. Before: @@ -66,20 +84,60 @@ Compiler tools - Previously we used ``- gcc #[linux]`` and ``- llvm # [osx]`` for compilers - Instead, we should use the syntax ``{{ compiler('c') }}``, ``{{ - compiler('cxx') }}``, and/or ``{{ compiler('fortran') }}``. This should go in - the ``build:`` section. + compiler('cxx') }}``, and/or ``{{ compiler('fortran') }}``. These should go + in the ``build`` section, and all other build dependencies should go in the + ``host`` section. Anaconda now provides platform-specific compilers that are automatically determined. The string ``{{ compiler('c') }}`` will resolve to ``gcc`` on Linux, but ``clang`` on macOS. This should greatly simplify recipes, as we no longer need to have separate lines for linux and osx. -This also opens up the future possibility of cross-compiling, allowing us to -build macOS packages from Linux infrastructure. - Note that previously we typically would also add ``- libgcc #[linux]`` as a run dependency, but this is now taken care of by the compiler tools. +Conda-build 3 also now has the ability to cross-compile, making it now possible +to compile packages for macOS while running on Linux. To support this, recipes +must now make a distinction between dependencies that should be specific to the +building machine and dependencies that should be specific to the running +machine. + +Dependencies specific to the building machine go in ``build``; +dependencies specific to the running machine go in ``host`` (see +:ref:`host-section`). + + +Before: + +.. code-block:: yaml + + package: + name: example + version: 0.1 + requirements: + build: + - python + - gcc # [linux] + - llvm # [osx] + run: + - python + - libgcc # [linux] + +After: + +.. code-block:: yaml + + package: + name: example + version: 0.1 + requirements: + build: + - {{ compiler('c') }} + host: + - python + run: + - python + .. seealso:: - The `compiler tools @@ -96,6 +154,13 @@ dependency, but this is now taken care of by the compiler tools. `_ conda documentation. + +.. warning:: + + These compilers are only available in the ``anaconda`` channel. Until now + we have not had this channel as a dependency, so be sure to add the channel + when setting up bioconda (see :ref:`set-up-channels`). + .. _global-pinning: Global pinning @@ -129,11 +194,6 @@ adopted the pinned versions used by conda-forge, which they provide in the ``conda-forge-pinning`` conda package. That package unpacks a config YAML into the conda environment so that we can use that for building all recipes. -To see what versions of packages are pinned, please consult the -`conda_build_config.yaml -`_ -file used by the package. - .. seealso:: The `build variants From 3f57264d613fa1ad800a744a3b1d97dbeadd41fa Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sat, 5 May 2018 16:37:05 -0400 Subject: [PATCH 048/118] add anaconda to channels --- docs/source/index.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/source/index.rst b/docs/source/index.rst index b423dd39c5..3017e5e2e1 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -89,6 +89,7 @@ packages compiled against R 3.3.1 might not work. :: + conda config --add channels anaconda # NEW as of May 2018 conda config --add channels defaults conda config --add channels conda-forge conda config --add channels bioconda From 4494c4248f37011219f0b314626bcf2faccbb266 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sat, 5 May 2018 16:38:12 -0400 Subject: [PATCH 049/118] add anaconda channel to circleci setup --- .circleci/setup.sh | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.circleci/setup.sh b/.circleci/setup.sh index a83a482703..8e0bd198f3 100755 --- a/.circleci/setup.sh +++ b/.circleci/setup.sh @@ -28,6 +28,9 @@ if [[ ! -d $WORKSPACE/miniconda ]]; then bash miniconda.sh -b -p $WORKSPACE/miniconda # step 2: setup channels + + # Added to support the new compiler packages from Anaconda 5 + conda config --system --add channels anaconda conda config --system --add channels defaults conda config --system --add channels conda-forge conda config --system --add channels bioconda From 2b97c0ba24668e5cdda8afeca73899f710c66614 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sat, 5 May 2018 17:40:30 -0400 Subject: [PATCH 050/118] short-circuit filtering if skipped --- bioconda_utils/utils.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/bioconda_utils/utils.py b/bioconda_utils/utils.py index 4a4cd09c0d..53d57f9ff2 100644 --- a/bioconda_utils/utils.py +++ b/bioconda_utils/utils.py @@ -724,6 +724,11 @@ def tobuild(recipe): meta = load_first_metadata( recipe, config=load_conda_config(platform=platform)) + + # The recipe likely defined skip: True + if meta is None: + return [] + # If on CI, handle noarch. if os.environ.get('CI', None) == 'true': if meta.get_value('build/noarch'): From 0969bce632479a132e28d153f602df807bd3c5b8 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sat, 5 May 2018 17:40:49 -0400 Subject: [PATCH 051/118] fix text to only check names --- test/test_bioconductor_skeleton.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/test/test_bioconductor_skeleton.py b/test/test_bioconductor_skeleton.py index 3dc4ff38f4..4488429307 100644 --- a/test/test_bioconductor_skeleton.py +++ b/test/test_bioconductor_skeleton.py @@ -68,7 +68,8 @@ def test_meta_contents(tmpdir): # The rendered meta has {{ compiler('c') }} filled in, so we need to check # for one of those filled-in values. - assert 'toolchain' in edger_meta['requirements']['build'] + names = [i.split()[0] for i in edger_meta['requirements']['build']] + assert 'toolchain' in names # bioconductor, bioarchive, and cargoport assert len(edger_meta['source']['url']) == 3 From a3441ea295479cdcfe68b928cea55001be463d4e Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sat, 5 May 2018 17:41:01 -0400 Subject: [PATCH 052/118] add test for when CI=true --- test/test_utils.py | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/test/test_utils.py b/test/test_utils.py index e7d949c81d..8be8c3a4c8 100644 --- a/test/test_utils.py +++ b/test/test_utils.py @@ -360,6 +360,31 @@ def test_filter_recipes_skip_is_true(): assert len(filtered) == 0 +def test_filter_recipes_skip_is_true_with_CI_env_var(): + """ + utils.filter_recipes has a conditional that checks to see if there's + a CI=true env var which in some cases only causes failure when running on + CI. So temporarily fake it here so that local tests catch errors. + """ + with utils.temp_env(dict(CI="true")): + r = Recipes( + """ + one: + meta.yaml: | + package: + name: one + version: "0.1" + build: + skip: true + """, from_string=True) + r.write_recipes() + recipes = list(r.recipe_dirs.values()) + filtered = list( + utils.filter_recipes(recipes)) + print(filtered) + assert len(filtered) == 0 + + def test_filter_recipes_skip_not_py27(): """ When all but one Python version is skipped, filtering should do that. From a7042329f81b31da0ef7f1975c20a9629237413c Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sat, 5 May 2018 21:29:43 -0400 Subject: [PATCH 053/118] add extra checks that built pkgs exist --- test/test_utils.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/test/test_utils.py b/test/test_utils.py index 8be8c3a4c8..ddbac3f68a 100644 --- a/test/test_utils.py +++ b/test/test_utils.py @@ -819,6 +819,11 @@ def test_conda_forge_pins(caplog): ) assert build_result + for k, v in r.recipe_dirs.items(): + for i in utils.build_package_paths(v): + assert os.path.exists(i) + ensure_missing(i) + def test_bioconda_pins(caplog): """ @@ -847,6 +852,11 @@ def test_bioconda_pins(caplog): ) assert build_result + for k, v in r.recipe_dirs.items(): + for i in utils.build_package_paths(v): + assert os.path.exists(i) + ensure_missing(i) + def test_load_meta_skipping(): """ From 2da3a2058cf0837e365fa4d61aabff990cbc87bf Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sat, 5 May 2018 21:30:28 -0400 Subject: [PATCH 054/118] add test for cb3 outputs --- test/test_utils.py | 53 ++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 53 insertions(+) diff --git a/test/test_utils.py b/test/test_utils.py index ddbac3f68a..0955a405b0 100644 --- a/test/test_utils.py +++ b/test/test_utils.py @@ -910,3 +910,56 @@ def test_variants(): config.exclusive_config_file = tmp assert len(utils.load_all_meta(recipe, config)) == 2 + + +def test_cb3_outputs(): + r = Recipes( + """ + one: + meta.yaml: | + package: + name: one + version: "0.1" + + outputs: + - name: libone + - name: py-one + requirements: + - {{ pin_subpackage('libone', exact=True) }} + - python {{ python }} + + """, from_string=True) + r.write_recipes() + recipe = r.recipe_dirs['one'] + + # Write a temporary conda_build_config.yaml that we'll point the config + # object to: + tmp = tempfile.NamedTemporaryFile(delete=False).name + with open(tmp, 'w') as fout: + fout.write( + dedent( + """ + python: + - 2.7 + - 3.5 + """)) + config = utils.load_conda_config() + config.exclusive_config_file = tmp + + # should make three: one, libone, and py-one + assert len(utils.load_all_meta(recipe, config)) == 3 + + build_result = build.build_recipes( + r.basedir, + config={}, + packages="*", + testonly=False, + force=False, + mulled_test=False, + ) + assert build_result + + for k, v in r.recipe_dirs.items(): + for i in utils.build_package_paths(v): + assert os.path.exists(i) + ensure_missing(i) From 08562bf3fa555d208e08be0e4e33f437a9c50cbd Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sat, 5 May 2018 21:35:10 -0400 Subject: [PATCH 055/118] clean up cb3 output test; typos --- test/test_utils.py | 23 +++-------------------- 1 file changed, 3 insertions(+), 20 deletions(-) diff --git a/test/test_utils.py b/test/test_utils.py index 0955a405b0..96157272c7 100644 --- a/test/test_utils.py +++ b/test/test_utils.py @@ -820,7 +820,7 @@ def test_conda_forge_pins(caplog): assert build_result for k, v in r.recipe_dirs.items(): - for i in utils.build_package_paths(v): + for i in utils.built_package_paths(v): assert os.path.exists(i) ensure_missing(i) @@ -853,7 +853,7 @@ def test_bioconda_pins(caplog): assert build_result for k, v in r.recipe_dirs.items(): - for i in utils.build_package_paths(v): + for i in utils.built_package_paths(v): assert os.path.exists(i) ensure_missing(i) @@ -932,23 +932,6 @@ def test_cb3_outputs(): r.write_recipes() recipe = r.recipe_dirs['one'] - # Write a temporary conda_build_config.yaml that we'll point the config - # object to: - tmp = tempfile.NamedTemporaryFile(delete=False).name - with open(tmp, 'w') as fout: - fout.write( - dedent( - """ - python: - - 2.7 - - 3.5 - """)) - config = utils.load_conda_config() - config.exclusive_config_file = tmp - - # should make three: one, libone, and py-one - assert len(utils.load_all_meta(recipe, config)) == 3 - build_result = build.build_recipes( r.basedir, config={}, @@ -960,6 +943,6 @@ def test_cb3_outputs(): assert build_result for k, v in r.recipe_dirs.items(): - for i in utils.build_package_paths(v): + for i in utils.built_package_paths(v): assert os.path.exists(i) ensure_missing(i) From 9a3a9d8018d0a58cc77d2a47003071aa02fab107 Mon Sep 17 00:00:00 2001 From: Marcel Bargull Date: Sun, 6 May 2018 15:15:25 +0200 Subject: [PATCH 056/118] utils.Progress: remove unnecessary sleep --- bioconda_utils/utils.py | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/bioconda_utils/utils.py b/bioconda_utils/utils.py index 53d57f9ff2..220e856903 100644 --- a/bioconda_utils/utils.py +++ b/bioconda_utils/utils.py @@ -18,7 +18,7 @@ import datetime from distutils.version import LooseVersion import time -import threading +from threading import Event, Thread from pathlib import PurePath from conda_build import api @@ -929,14 +929,13 @@ def modified_recipes(git_range, recipe_folder, config_file): class Progress: def __init__(self): - self.thread = threading.Thread(target=self.progress) - self.stop = False + self.thread = Thread(target=self.progress) + self.stop = Event() def progress(self): - while not self.stop: + while not self.stop.wait(60): print(".", end="") sys.stdout.flush() - time.sleep(60) print("") def __enter__(self): @@ -944,5 +943,5 @@ def __enter__(self): return self def __exit__(self, exc_type, exc_val, exc_tb): - self.stop = True + self.stop.set() self.thread.join() From 6a86254622486b3fc28955e35abe077354da52bd Mon Sep 17 00:00:00 2001 From: Marcel Bargull Date: Sun, 6 May 2018 15:20:40 +0200 Subject: [PATCH 057/118] pkg_test: use --involucro-path to prevent download in galaxy-lib --- bioconda_utils/pkg_test.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/bioconda_utils/pkg_test.py b/bioconda_utils/pkg_test.py index f8aa2ce1ae..4ef77fcbc8 100644 --- a/bioconda_utils/pkg_test.py +++ b/bioconda_utils/pkg_test.py @@ -3,6 +3,7 @@ import tarfile import os import shlex +from shutil import which import logging from . import utils @@ -148,6 +149,13 @@ def test_package( cmd += ['--name-override', name_override] cmd += channel_args cmd += shlex.split(mulled_args) + + # galaxy-lib always downloads involucro, unless it's in cwd or its path is explicitly given. + # TODO: This should go into galaxy-lib. Once it is fixed upstream, remove this here. + involucro_path = which('involucro') + if involucro_path: + cmd += ['--involucro-path', involucro_path] + logger.debug('mulled-build command: %s' % cmd) env = os.environ.copy() From 71f52faf8457c49c87bd5abcd27a9bde87a004fc Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sun, 6 May 2018 20:13:07 -0400 Subject: [PATCH 058/118] remove anaconda channel --- .circleci/setup.sh | 2 -- docs/source/index.rst | 1 - 2 files changed, 3 deletions(-) diff --git a/.circleci/setup.sh b/.circleci/setup.sh index 8e0bd198f3..88f1d20d22 100755 --- a/.circleci/setup.sh +++ b/.circleci/setup.sh @@ -29,8 +29,6 @@ if [[ ! -d $WORKSPACE/miniconda ]]; then # step 2: setup channels - # Added to support the new compiler packages from Anaconda 5 - conda config --system --add channels anaconda conda config --system --add channels defaults conda config --system --add channels conda-forge conda config --system --add channels bioconda diff --git a/docs/source/index.rst b/docs/source/index.rst index 3017e5e2e1..b423dd39c5 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -89,7 +89,6 @@ packages compiled against R 3.3.1 might not work. :: - conda config --add channels anaconda # NEW as of May 2018 conda config --add channels defaults conda config --add channels conda-forge conda config --add channels bioconda From 7999eec17fea0f8e4735102888bbd6c598da4ac5 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sun, 6 May 2018 20:13:56 -0400 Subject: [PATCH 059/118] simplify htslib in test --- test/test_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/test_utils.py b/test/test_utils.py index 96157272c7..d6bb89c1ff 100644 --- a/test/test_utils.py +++ b/test/test_utils.py @@ -839,7 +839,7 @@ def test_bioconda_pins(caplog): version: 0.1 requirements: run: - - htslib {{ htslib }} + - htslib """, from_string=True) r.write_recipes() build_result = build.build_recipes( From 71b6b0cfaa8d41a05ed28455ed2a2b8ee3dcbc61 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sun, 6 May 2018 20:14:45 -0400 Subject: [PATCH 060/118] add compiler test --- test/test_utils.py | 32 ++++++++++++++++++++++++++++++++ 1 file changed, 32 insertions(+) diff --git a/test/test_utils.py b/test/test_utils.py index d6bb89c1ff..8fdc7c2674 100644 --- a/test/test_utils.py +++ b/test/test_utils.py @@ -946,3 +946,35 @@ def test_cb3_outputs(): for i in utils.built_package_paths(v): assert os.path.exists(i) ensure_missing(i) + +def test_compiler(): + r = Recipes( + """ + one: + meta.yaml: | + package: + name: one + version: 0.1 + requirements: + build: + - {{ compiler('c') }} + host: + - python + run: + - python + """, from_string=True) + r.write_recipes() + build_result = build.build_recipes( + r.basedir, + config={}, + packages="*", + testonly=False, + force=False, + mulled_test=False, + ) + assert build_result + + for k, v in r.recipe_dirs.items(): + for i in utils.built_package_paths(v): + assert os.path.exists(i) + ensure_missing(i) From ce67dffb944e6e53ea98ea16587ef09962a1f1a8 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sun, 6 May 2018 20:50:57 -0400 Subject: [PATCH 061/118] fix htslib max pin --- bioconda_utils/bioconda_utils-conda_build_config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bioconda_utils/bioconda_utils-conda_build_config.yaml b/bioconda_utils/bioconda_utils-conda_build_config.yaml index ead983a40c..1201b41d42 100644 --- a/bioconda_utils/bioconda_utils-conda_build_config.yaml +++ b/bioconda_utils/bioconda_utils-conda_build_config.yaml @@ -6,7 +6,7 @@ pin_run_as_build: htslib: max_pin: x.x bamtools: - max_pin: x.x + max_pin: x.x.x htslib: - 1.7 From d6ea7a1fa0c4a1d6acd49cc05514e98afced7f23 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sun, 6 May 2018 21:24:55 -0400 Subject: [PATCH 062/118] update docs to reflect run_exports --- docs/source/cb3.rst | 101 ++++++++++++++++++++++++++++++++++---------- 1 file changed, 79 insertions(+), 22 deletions(-) diff --git a/docs/source/cb3.rst b/docs/source/cb3.rst index 6c4b248719..608750c07d 100644 --- a/docs/source/cb3.rst +++ b/docs/source/cb3.rst @@ -17,9 +17,9 @@ The new ``host`` section **Summary:** - Previously, build-time dependencies were listed in the ``requirements:build:`` section. -- Conda-build 3 now has an additional ``host:`` section. It is required if +- Conda-build 3 now has an additional ``requirements:host:`` section. It is required if using compilers; otherwise old recipes can remain as-is and can be gradually - ported over. New recipes should use the new ``host:`` section as described + ported over. New recipes should use the new ``host`` section as described below. Due to the improved way compilers are now being handled (see @@ -94,7 +94,8 @@ Linux, but ``clang`` on macOS. This should greatly simplify recipes, as we no longer need to have separate lines for linux and osx. Note that previously we typically would also add ``- libgcc #[linux]`` as a run -dependency, but this is now taken care of by the compiler tools. +dependency, but this is now taken care of by the compiler tools (see the global +pinning section below for more on this). Conda-build 3 also now has the ability to cross-compile, making it now possible to compile packages for macOS while running on Linux. To support this, recipes @@ -169,30 +170,86 @@ Global pinning **Summary:** - Previously we pinned packages using the syntax ``- zlib {{ CONDA_ZLIB }}*`` -- Instead, we should now pin packages with the syntax ``- zlib {{ zlib }}``. + in both the ``build`` and ``run`` dependencies. +- Instead, we should now just specify package names in the ``host`` section, + e.g., as simply ``zlib``. They are pinned automatically. No need to add them + to run dependencies, as they will be added automatically. Global pinning is the idea of making sure all recipes use the same versions of -common libraries. For example, many bioinformatics tools have `zlib` as -a dependency. The version of `zlib` used when building the package should be the -same as the version used when installing the package into a new environment. -Problems arise when the build-time version does not match the install-time -version. Furthermore, all packages installed into the same environment should -have been built using the same zlib so that they can co-exist. This implies +common libraries. Problems arise when the build-time version does not match +the install-time version. Furthermore, all packages installed into the same +environment should have been built using the same version so that they can +co-exist. For example, many bioinformatics tools have `zlib` as a dependency. +The version of `zlib` used when building the package should be the same as the +version used when installing the package into a new environment. This implies that we need to specify the `zlib` version in one place and have all recipes use that version. -Previously we maintained a global pinning file (see `scripts/env_matrix.yaml -`_), -and in there was the variable ``CONDA_ZLIB`` that was made available to the -recipes as a jinja2 variable. One problem with this is that we did not often -synchronize our pinned versions with conda-forge's pinned versions, and this -disconnect could cause problems. +Previously we maintained a global, bioconda-specific pinning file (see +`scripts/env_matrix.yaml +`_). +For ``zlibe``, that file defined the variable ``CONDA_ZLIB`` and that variable +was made available to the recipes as a jinja2 variable. One problem with this +is that we did not often synchronize our pinned versions with conda-forge's +pinned versions, and this disconnect could cause problems. + +There are two major advances in conda-build 3 to address these problems. First +is the concept of "variants". Variants are a generalized way of specifying one +or more specific versions, and they come with many weird and wonderful ways to +specify constraints. Specifying variants generally takes the form of writing +a YAML file. We have adopted the variants defined by conda-forge by installing +their ``conda-forge-pinning`` conda package in our build environment. +Technically, that package unpacks the config YAML into our conda environment so +that it can be used for building all recipes. + +The second major advance in conda-build 3 is the the concept of "run exports". +The idea here is to specify that any time a dependency (``zlib``, in our running example) +is used as a build dependency, it should also be automatically be installed as +a run dependency without having to explicitly add it as such in the recipe. +This specification is done in the ``zlib`` recipe itself (which is hosted by +conda-forge), so in general bioconda collaborators can just add ``zlib`` as +a build dependency. + +Note that we don't have to specify the version of ``zlib`` in the recipe -- it +is pinned in the ``conda_build_config.yaml`` file we share with conda-forge. + +In a similar fashion, the reason that we no longer have to specify ``libgcc`` +as a run dependency (as described above in the compilers section) is that ``{{ +compiler('c') }}`` automatically export ``libgcc`` as a run dependency. + +Before: + +.. code-block:: yaml + + package: + name: example + version: 0.1 + requirements: + build: + - python + - gcc # [linux] + - llvm # [osx] + - zlib {{ CONDA_ZLIB }}* + run: + - python + - libgcc # [linux] + - zlib {{ CONDA_ZLIB }}* + +After: + +.. code-block:: yaml + + package: + name: example + version: 0.1 + requirements: + build: + - {{ compiler('c') }} + host: + - zlib + run: + - python -Now, conda-build 3 has the concept of "variants", which is a generalized way of -solving this problem. This generally takes the form of a YAML file. We have -adopted the pinned versions used by conda-forge, which they provide in the -``conda-forge-pinning`` conda package. That package unpacks a config YAML into -the conda environment so that we can use that for building all recipes. .. seealso:: @@ -200,7 +257,7 @@ the conda environment so that we can use that for building all recipes. `_ section of the conda docs has much more information. - Packages pinned by conda-forge (which we also use) can be found in their + We share the packages pinned by conda-forge, which can be found in their `conda_build_config.yaml `_ From d374363999478083f2445e9f244bf3b7691d456a Mon Sep 17 00:00:00 2001 From: Kyle Beauchamp Date: Tue, 8 May 2018 14:22:07 -0700 Subject: [PATCH 063/118] Update cb3.rst --- docs/source/cb3.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/cb3.rst b/docs/source/cb3.rst index 608750c07d..2eb1d1ebfe 100644 --- a/docs/source/cb3.rst +++ b/docs/source/cb3.rst @@ -188,7 +188,7 @@ use that version. Previously we maintained a global, bioconda-specific pinning file (see `scripts/env_matrix.yaml `_). -For ``zlibe``, that file defined the variable ``CONDA_ZLIB`` and that variable +For ``zlib``, that file defined the variable ``CONDA_ZLIB`` and that variable was made available to the recipes as a jinja2 variable. One problem with this is that we did not often synchronize our pinned versions with conda-forge's pinned versions, and this disconnect could cause problems. From c9ec48b1efdffe18cd19c8393e299ddc7f1db541 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sun, 13 May 2018 14:35:16 -0400 Subject: [PATCH 064/118] update docs --- docs/source/cb3.rst | 23 ++++++++++++----------- 1 file changed, 12 insertions(+), 11 deletions(-) diff --git a/docs/source/cb3.rst b/docs/source/cb3.rst index 2eb1d1ebfe..cfbf6bd81e 100644 --- a/docs/source/cb3.rst +++ b/docs/source/cb3.rst @@ -29,18 +29,16 @@ which we are not doing yet. However if a recipe uses one of the new ``{{ compiler() }}`` methods described in :ref:`compiler-tools`, the ``host`` section is **required**. -Existing recipes that only have a ``build:`` section and do not use the new -compiler tools (the thousands of existing recipes!) should still work for now; -they just won't work if we ever try to cross-compile them. However **new** -recipes created by ``conda skeleton`` have the new ``host`` section, and this -seems to be the way conda is going, so we will gradually port over our recipes. - The new ``build`` section should have things like compilers, ``git``, ``automake``, ``make``, ``cmake``, and other build tools. If there are no compilers or other build tools, there should be no ``build:`` section. The new ``host`` section should have everything else. +There are many existing recipes that only have a ``build:`` section. They will +work for now; they just won't work if we ever try to cross-compile them. +However **new** recipes should have the ``host:`` section. + The ``run`` section remains the same. Before: @@ -171,9 +169,8 @@ Global pinning - Previously we pinned packages using the syntax ``- zlib {{ CONDA_ZLIB }}*`` in both the ``build`` and ``run`` dependencies. -- Instead, we should now just specify package names in the ``host`` section, - e.g., as simply ``zlib``. They are pinned automatically. No need to add them - to run dependencies, as they will be added automatically. +- Instead, we should now specify only package names in the ``host`` and ``run`` + sections e.g., as simply ``zlib``. They are pinned automatically. Global pinning is the idea of making sure all recipes use the same versions of common libraries. Problems arise when the build-time version does not match @@ -200,7 +197,9 @@ specify constraints. Specifying variants generally takes the form of writing a YAML file. We have adopted the variants defined by conda-forge by installing their ``conda-forge-pinning`` conda package in our build environment. Technically, that package unpacks the config YAML into our conda environment so -that it can be used for building all recipes. +that it can be used for building all recipes. You can see this file at +`conda_build_config.yaml +`_ The second major advance in conda-build 3 is the the concept of "run exports". The idea here is to specify that any time a dependency (``zlib``, in our running example) @@ -211,7 +210,7 @@ conda-forge), so in general bioconda collaborators can just add ``zlib`` as a build dependency. Note that we don't have to specify the version of ``zlib`` in the recipe -- it -is pinned in the ``conda_build_config.yaml`` file we share with conda-forge. +is pinned in that ``conda_build_config.yaml`` file we share with conda-forge. In a similar fashion, the reason that we no longer have to specify ``libgcc`` as a run dependency (as described above in the compilers section) is that ``{{ @@ -246,9 +245,11 @@ After: build: - {{ compiler('c') }} host: + - python - zlib run: - python + - zlib .. seealso:: From fd7bfee0ef6d75e004d4d908addaecb540d5e7cb Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sun, 13 May 2018 19:42:21 -0400 Subject: [PATCH 065/118] add lint functions for compilers and fn --- bioconda_utils/lint_functions.py | 81 ++++++++++++++----- test/test_linting.py | 130 +++++++++++++++++++++++++++++++ 2 files changed, 192 insertions(+), 19 deletions(-) diff --git a/bioconda_utils/lint_functions.py b/bioconda_utils/lint_functions.py index 00711357af..18c5c2dfa3 100644 --- a/bioconda_utils/lint_functions.py +++ b/bioconda_utils/lint_functions.py @@ -34,7 +34,7 @@ def _get_deps(meta, section=None): section : str, list, or None If None, returns all dependencies. Otherwise can be a string or list of - options [build, run, test] to return section-specific dependencies. + options [build, host, run, test] to return section-specific dependencies. """ get_name = lambda dep: dep.split()[0] @@ -43,7 +43,7 @@ def _get_deps(meta, section=None): if reqs is None: return [] if section is None: - sections = ['build', 'run', 'test'] + sections = ['build', 'host', 'run', 'test'] if isinstance(section, str): sections = [section] deps = [] @@ -113,7 +113,7 @@ def missing_home(recipe, metas, df): def missing_summary(recipe, metas, df): for meta in metas: - if not meta.get_value('about/summary'): + if not meta.get_value('about/summary'): return { 'missing_summary': True, 'fix': 'add about:summary', @@ -122,7 +122,7 @@ def missing_summary(recipe, metas, df): def missing_license(recipe, metas, df): for meta in metas: - if not meta.get_value('about/license'): + if not meta.get_value('about/license'): return { 'missing_license': True, 'fix': 'add about:license' @@ -208,7 +208,6 @@ def has_windows_bat_file(recipe, metas, df): def should_be_noarch(recipe, metas, df): for meta in metas: - print(meta.get_value("package/name")) deps = _get_deps(meta) if ( ('gcc' not in deps) and @@ -277,15 +276,15 @@ def invalid_identifiers(recipe, metas, df): try: identifiers = meta.get_section('extra').get('identifiers', []) if not isinstance(identifiers, list): - return { 'invalid_identifiers': True, - 'fix': 'extra:identifiers must hold a list of identifiers' } + return {'invalid_identifiers': True, + 'fix': 'extra:identifiers must hold a list of identifiers'} if not all(isinstance(i, str) for i in identifiers): - return { 'invalid_identifiers': True, - 'fix': 'each identifier must be a string' } + return {'invalid_identifiers': True, + 'fix': 'each identifier must be a string'} if not all((':' in i) for i in identifiers): - return { 'invalid_identifiers': True, - 'fix': 'each identifier must be of the form ' - 'type:identifier (e.g., doi:123)' } + return {'invalid_identifiers': True, + 'fix': 'each identifier must be of the form ' + 'type:identifier (e.g., doi:123)'} except KeyError: # no identifier section continue @@ -294,9 +293,52 @@ def invalid_identifiers(recipe, metas, df): def deprecated_numpy_spec(recipe, metas, df): with open(os.path.join(recipe, "meta.yaml")) as recipe: if re.search("numpy( )+x\.x", recipe.read()): - return { 'deprecated_numpy_spec': True, - 'fix': 'omit x.x as pinning of numpy is now ' - 'handled automatically'} + return {'deprecated_numpy_spec': True, + 'fix': 'omit x.x as pinning of numpy is now ' + 'handled automatically'} + + +def should_not_use_fn(recipe, metas, df): + for meta in metas: + source = meta.get_section('source') + if 'fn' in source: + return { + 'should_not_use_fn': True, + 'fix': 'URL should specify path to file, which will be used as the filename' + } + + +def should_use_compilers(recipe, metas, df): + for meta in metas: + deps = _get_deps(meta) + if ( + ('gcc' in deps) or + ('llvm' in deps) or + ('libgfortran' in deps) or + ('libgcc' in deps) + + ): + return { + 'should_use_compilers': True, + 'fix': 'use {{ compiler("c") }} or other new-style compilers', + } + + +def compilers_must_be_in_build(recipe, metas, df): + for meta in metas: + + print(_get_deps(meta, 'run')) + if ( + + any(['toolchain' in i for i in _get_deps(meta, 'run')]) or + any(['toolchain' in i for i in _get_deps(meta, 'host')]) + ): + return { + 'compilers_must_be_in_build': True, + 'fix': ( + '{{ compiler("c") }} or other new-style compliers can ' + 'only go in the build: section') + } registry = ( @@ -316,11 +358,12 @@ def deprecated_numpy_spec(recipe, metas, df): # it breaks packages that use pkg_resources or setuptools console scripts! # uses_setuptools, has_windows_bat_file, - - # should_be_noarch, - # + should_be_noarch, should_not_be_noarch, setup_py_install_args, invalid_identifiers, - deprecated_numpy_spec + deprecated_numpy_spec, + should_not_use_fn, + should_use_compilers, + compilers_must_be_in_build, ) diff --git a/test/test_linting.py b/test/test_linting.py index c859115a34..d2e42ad878 100644 --- a/test/test_linting.py +++ b/test/test_linting.py @@ -803,3 +803,133 @@ def test_deprecated_numpy_spec(): - numpy x.x '''] ) + + +def test_should_use_compilers(): + run_lint( + func=lint_functions.should_use_compilers, + should_pass=[''' + a: + meta.yaml: | + package: + name: a + version: 0.1 + requirements: + host: + - python + run: + - python + ''', + ''' + a: + meta.yaml: | + package: + name: a + version: 0.1 + requirements: + build: + - {{ compiler ('c') }} + '''], + should_fail=[''' + a: + meta.yaml: | + package: + name: a + version: 0.1 + requirements: + build: + - gcc # [linux] + ''', + ''' + a: + meta.yaml: | + package: + name: a + version: 0.1 + requirements: + run: + - libgcc # [linux] + ''' + ] + ) + +def test_compilers_must_be_in_build(): + run_lint( + func=lint_functions.compilers_must_be_in_build, + should_pass=[''' + a: + meta.yaml: | + package: + name: a + version: 0.1 + requirements: + host: + - python + run: + - python + ''', + ''' + a: + meta.yaml: | + package: + name: a + version: 0.1 + requirements: + build: + - {{ compiler ('c') }} + '''], + should_fail=[''' + a: + meta.yaml: | + package: + name: a + version: 0.1 + requirements: + run: + - {{ compiler("c") }} + ''', + ''' + a: + meta.yaml: | + package: + name: a + version: 0.1 + requirements: + host: + - {{ compiler ('c') }} + ''' + ] + ) + + +def test_should_not_use_fn(): + run_lint( + func=lint_functions.should_not_use_fn, + should_pass=[''' + a: + meta.yaml: | + package: + name: a + version: 0.1 + ''', + ''' + a: + meta.yaml: | + package: + name: a + version: 0.1 + source: + url: https://bioconda.github.io/index.html + '''], + should_fail=[''' + a: + meta.yaml: | + package: + name: a + version: 0.1 + source: + fn: index.html + url: https://bioconda.github.io/index.html + ''', + ] + ) From 3ec76812e67183f772d08b94b45d9becde2e91fa Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sun, 13 May 2018 19:49:28 -0400 Subject: [PATCH 066/118] add linting docs --- docs/source/linting.rst | 56 +++++++++++++---------------------------- 1 file changed, 17 insertions(+), 39 deletions(-) diff --git a/docs/source/linting.rst b/docs/source/linting.rst index e463cd4a3f..8c3af5ee0c 100644 --- a/docs/source/linting.rst +++ b/docs/source/linting.rst @@ -261,7 +261,7 @@ In particular, ensure that each identifier starts with a type Whitespace is not allowed. `deprecated_numpy_spec` -~~~~~~~~~~~~~~~~~~~~~ +~~~~~~~~~~~~~~~~~~~~~~~ Reason for failing: The recipe contains ``numpy x.x`` in build or run requirements. Rationale: This kind of version pinning is deprecated, and numpy pinning is now @@ -269,52 +269,30 @@ handled automatically by the system. How to resolve: Remove the ``x.x``. +`should_not_use_fn` +~~~~~~~~~~~~~~~~~~~ +Reason for failing: Recipe contains a ``fn:`` key in the ``source:`` section -`*_not_pinned` -~~~~~~~~~~~~~~ - -Reason for failing: The recipe has dependencies that need to be pinned to -a specific version all across bioconda. - -Rationale: Sometimes when a core dependency (like ``zlib``, which is used across -many recipes) is updated it breaks backwards compatibility. In order to avoid -this, for known-to-be-problematic dependencies we pin to a specific version -across all recipes. - -How to resolve: Change the dependency line as follows. For each dependency -failing the linting, specify a jinja-templated version by converting it to -uppercase, prefixing it with ``CONDA_``, adding double braces, and adding a ``*``. +Rationale: Conda-build 3 no longer requres ``fn:``, and it is redundant with ``url:``. -Examples are much easier to understand: +How to resolve: Remove the ``source: fn:`` key. -- ``zlib`` should become ``zlib {{ CONDA_ZLIB }}*`` -- ``ncurses`` should become ``ncurses {{ CONDA_NCURSES }}*`` -- ``htslib`` should become ``htslib {{ CONDA_HTSLIB }}*`` -- ``boost`` should become ``boost {{ CONDA_BOOST }}*`` -- ... and so on. - -Here is an example in the context of a ``meta.yaml`` file where ``zlib`` needs to be -pinned: +`should_use_compilers` +~~~~~~~~~~~~~~~~~~~~~~ +Reason for failing: The recipe has one of ``gcc``, ``llvm``, ``libgfortran``, or ``libgcc`` as dependencies. -.. code-block:: yaml +Rationale: Conda-build 3 now uses compiler tools, which are more up-to-date and +better-supported. - # this will give a linting error because zlib is not pinned - build: - - zlib - run: - - zlib - - bedtools +How to resolve: Use ``{{ compiler() }}`` variables. See :ref:`compiler-tools` for details. -And here is the fixed version: +`compilers_must_be_in_build` +~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Reason for failing: A ``{{ compiler() }}`` varaiable was found, but not in the ``build:`` section. -.. code-block:: yaml +Rational: The compiler tools must not be in ``host:`` or ``run:`` sections. - # fixed: - build: - - zlib {{ CONDA_ZLIB }}* - run: - - zlib {{ CONDA_ZLIB }}* - - bedtools +How to resolve: Move ``{{ compiler() }}`` variables to the ``build:`` section. Developer docs From 80d25c74d42616c327eb50d3327771db444b83d1 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sun, 13 May 2018 19:50:22 -0400 Subject: [PATCH 067/118] bring back the noarch lint docs --- docs/source/linting.rst | 26 ++++++++++++++------------ 1 file changed, 14 insertions(+), 12 deletions(-) diff --git a/docs/source/linting.rst b/docs/source/linting.rst index 8c3af5ee0c..b6e3837038 100644 --- a/docs/source/linting.rst +++ b/docs/source/linting.rst @@ -133,20 +133,22 @@ How to resolve: Add a hash in the `source section `_. See :ref:`hashes` for more info. -`should_be_noarch` (currently disabled) -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +`should_be_noarch` +~~~~~~~~~~~~~~~~~~ Reason for failing: The package should be labelled as ``noarch``. -Rationale: A ``noarch`` package should be created for pure Python packages, data packages, or -packages that do not require compilation. With this a single ``noarch`` package can be -used across multiple platforms and (in case of Python) Python versions, which saves -on build time and saves on storage space on the bioconda channel. - -How to resolve: For pure Python packages, add ``noarch: python`` to the ``build`` section. -**Don't do this if your Python package has a command line interface**, as these are not -independent of the Python version! -For other generic packages (like a data package), add ``noarch: generic`` to the ``build`` section. -See `here `_ for +Rationale: A ``noarch`` package should be created for pure Python packages, +data packages, or packages that do not require compilation. With this a single +``noarch`` package can be used across multiple platforms and (in case of +Python) Python versions, which saves on build time and saves on storage space +on the bioconda channel. + +How to resolve: For pure Python packages, add ``noarch: python`` to the +``build`` section. **Don't do this if your Python package has a command line +interface**, as these are not independent of the Python version! For other +generic packages (like a data package), add ``noarch: generic`` to the +``build`` section. See `here +`_ for more details. `should_not_be_noarch` From 7079bb3ca4ca5bbf49ce25b820c863bc0413999b Mon Sep 17 00:00:00 2001 From: Marcel Bargull Date: Wed, 16 May 2018 15:20:28 +0200 Subject: [PATCH 068/118] render with finalize=False where possible --- bioconda_utils/linting.py | 2 +- bioconda_utils/utils.py | 36 +++++++++++++++++++++++++++--------- 2 files changed, 28 insertions(+), 10 deletions(-) diff --git a/bioconda_utils/linting.py b/bioconda_utils/linting.py index 568879364c..a4e418f55e 100644 --- a/bioconda_utils/linting.py +++ b/bioconda_utils/linting.py @@ -194,7 +194,7 @@ def lint(recipes, df, exclude=None, registry=None): for platform in ["linux", "osx"]: config = utils.load_conda_config(platform=platform, trim_skip=False) - metas.extend(utils.load_all_meta(recipe, config=config)) + metas.extend(utils.load_all_meta(recipe, config=config, finalize=False)) except ( yaml.scanner.ScannerError, yaml.constructor.ConstructorError ) as e: diff --git a/bioconda_utils/utils.py b/bioconda_utils/utils.py index 220e856903..66390c1e8e 100644 --- a/bioconda_utils/utils.py +++ b/bioconda_utils/utils.py @@ -147,14 +147,23 @@ def sandboxed_env(env): os.environ.update(orig) -def load_all_meta(recipe, config=None): +def load_all_meta(recipe, config=None, finalize=True): """ For each environment, yield the rendered meta.yaml. + + Parameters + ---------- + finalize : bool + If True, do a full conda-build render. Determines exact package builds + of build/host dependencies. It involves costly dependency resolution + via conda and also download of those packages (to inspect possible + run_exports). For fast-running tasks like linting, set to False. """ if config is None: config = load_conda_config() return [meta for (meta, _, _) in api.render(recipe, - config=config)] + config=config, + finalize=finalize)] def load_conda_config(platform=None, trim_skip=True): @@ -185,15 +194,23 @@ def load_conda_config(platform=None, trim_skip=True): return config -def load_first_metadata(recipe, config=None): +def load_first_metadata(recipe, config=None, finalize=True): """ Returns just the first of possibly many metadata files. Used for when you need to do things like check a package name or version number (which are not expected to change between variants). If the recipe will be skipped, then returns None + + Parameters + ---------- + finalize : bool + If True, do a full conda-build render. Determines exact package builds + of build/host dependencies. It involves costly dependency resolution + via conda and also download of those packages (to inspect possible + run_exports). For fast-running tasks like linting, set to False. """ - metas = load_all_meta(recipe, config) + metas = load_all_meta(recipe, config, finalize=finalize) if len(metas) > 0: return metas[0] @@ -341,7 +358,7 @@ def get_deps(recipe=None, meta=None, build=True): """ if recipe is not None: assert isinstance(recipe, str) - metadata = load_all_meta(recipe) + metadata = load_all_meta(recipe, finalize=False) elif meta is not None: metadata = [meta] else: @@ -390,7 +407,7 @@ def get_dag(recipes, config, blacklist=None, restrict=True): recipes = list(recipes) metadata = [] for recipe in sorted(recipes): - for r in load_all_meta(recipe): + for r in load_all_meta(recipe, finalize=False): metadata.append((r, recipe)) if blacklist is None: blacklist = set() @@ -484,9 +501,10 @@ def toplevel(x): yield group[0] else: def get_version(p): - return VersionOrder( - load_first_metadata(os.path.join(p, 'meta.yaml')).get_value('package/version') - ) + meta_path = os.path.join(p, 'meta.yaml') + meta = load_first_metadata(meta_path, finalize=False) + version = meta.get_value('package/version') + return VersionOrder(version) sorted_versions = sorted(group, key=get_version) if sorted_versions: yield sorted_versions[-1] From 0ae67abc85410d25a73814843ce3b8d2726b11dd Mon Sep 17 00:00:00 2001 From: Marcel Bargull Date: Wed, 16 May 2018 15:24:07 +0200 Subject: [PATCH 069/118] rename load_conda_config to load_conda_build_config --- bioconda_utils/build.py | 4 ++-- bioconda_utils/docker_utils.py | 2 +- bioconda_utils/linting.py | 3 +-- bioconda_utils/utils.py | 10 +++++----- 4 files changed, 9 insertions(+), 10 deletions(-) diff --git a/bioconda_utils/build.py b/bioconda_utils/build.py index 2ab7a51de8..a4d8d8b6e8 100644 --- a/bioconda_utils/build.py +++ b/bioconda_utils/build.py @@ -137,8 +137,8 @@ def build( # conda-build from building all subdirectories with utils.sandboxed_env(whitelisted_env): cmd = CONDA_BUILD_CMD + build_args + channel_args + \ - ["-e", utils.load_conda_config().exclusive_config_file] + \ - ["-m"] + utils.load_conda_config().variant_config_files + \ + ["-e", utils.load_conda_build_config().exclusive_config_file] + \ + ["-m"] + utils.load_conda_build_config().variant_config_files + \ [os.path.join(recipe, 'meta.yaml')] logger.debug('command: %s', cmd) with utils.Progress(): diff --git a/bioconda_utils/docker_utils.py b/bioconda_utils/docker_utils.py index 7803298765..26167646bf 100644 --- a/bioconda_utils/docker_utils.py +++ b/bioconda_utils/docker_utils.py @@ -329,7 +329,7 @@ def __init__( # Copy the conda build config to the staging directory that is # visible in the container - shutil.copyfile(utils.load_conda_config().exclusive_config_file, + shutil.copyfile(utils.load_conda_build_config().exclusive_config_file, os.path.join(self.pkg_dir, "conda_build_config.yaml")) diff --git a/bioconda_utils/linting.py b/bioconda_utils/linting.py index a4e418f55e..f47ca686c3 100644 --- a/bioconda_utils/linting.py +++ b/bioconda_utils/linting.py @@ -192,8 +192,7 @@ def lint(recipes, df, exclude=None, registry=None): metas = [] try: for platform in ["linux", "osx"]: - config = utils.load_conda_config(platform=platform, - trim_skip=False) + config = utils.load_conda_build_config(platform=platform, trim_skip=False) metas.extend(utils.load_all_meta(recipe, config=config, finalize=False)) except ( yaml.scanner.ScannerError, yaml.constructor.ConstructorError diff --git a/bioconda_utils/utils.py b/bioconda_utils/utils.py index 66390c1e8e..06bacb0bbe 100644 --- a/bioconda_utils/utils.py +++ b/bioconda_utils/utils.py @@ -160,15 +160,15 @@ def load_all_meta(recipe, config=None, finalize=True): run_exports). For fast-running tasks like linting, set to False. """ if config is None: - config = load_conda_config() + config = load_conda_build_config() return [meta for (meta, _, _) in api.render(recipe, config=config, finalize=finalize)] -def load_conda_config(platform=None, trim_skip=True): +def load_conda_build_config(platform=None, trim_skip=True): """ - Load conda config while considering global pinnings from conda-forge. + Load conda build config while considering global pinnings from conda-forge. """ config = api.Config( no_download_source=True, @@ -604,7 +604,7 @@ def built_package_paths(recipe): Does not necessarily exist; equivalent to `conda build --output recipename` but without the subprocess. """ - config = load_conda_config() + config = load_conda_build_config() paths = api.get_output_file_paths(recipe, config=config) return paths @@ -741,7 +741,7 @@ def tobuild(recipe): platform = "linux" meta = load_first_metadata( - recipe, config=load_conda_config(platform=platform)) + recipe, config=load_conda_build_config(platform=platform)) # The recipe likely defined skip: True if meta is None: From 884e7571005ed4044618c3eb3dc248608acf37a2 Mon Sep 17 00:00:00 2001 From: Marcel Bargull Date: Wed, 16 May 2018 15:25:58 +0200 Subject: [PATCH 070/118] use conda.exports (public API) --- bioconda_utils/pypi.py | 2 +- bioconda_utils/utils.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/bioconda_utils/pypi.py b/bioconda_utils/pypi.py index 241d649fc5..ba6ed2690d 100644 --- a/bioconda_utils/pypi.py +++ b/bioconda_utils/pypi.py @@ -1,6 +1,6 @@ import os import requests -from conda.models.version import VersionOrder +from conda.exports import VersionOrder from . import utils from . import linting diff --git a/bioconda_utils/utils.py b/bioconda_utils/utils.py index 06bacb0bbe..4443113dc7 100644 --- a/bioconda_utils/utils.py +++ b/bioconda_utils/utils.py @@ -22,7 +22,7 @@ from pathlib import PurePath from conda_build import api -from conda.models.version import VersionOrder +from conda.exports import VersionOrder import yaml from jinja2 import Environment, PackageLoader from colorlog import ColoredFormatter From 91639b8e94073d6f34323b981303da3a225a2131 Mon Sep 17 00:00:00 2001 From: Marcel Bargull Date: Wed, 16 May 2018 15:39:27 +0200 Subject: [PATCH 071/118] pin conda-forge-pinning, update conda/conda-build pins --- bioconda_utils/bioconda_utils-requirements.txt | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/bioconda_utils/bioconda_utils-requirements.txt b/bioconda_utils/bioconda_utils-requirements.txt index de8f66416b..da2017683b 100644 --- a/bioconda_utils/bioconda_utils-requirements.txt +++ b/bioconda_utils/bioconda_utils-requirements.txt @@ -1,8 +1,8 @@ anaconda-client=1.6.* argh=0.26.* beautifulsoup4=4.6.* -conda=4.5.1 -conda-build=3.8.1 +conda=4.5.3 +conda-build=3.10.2 galaxy-lib>=18.5.5 jinja2=2.10.* jsonschema=2.6.* @@ -21,4 +21,4 @@ colorlog=3.1.* six=1.11.* alabaster=0.7.* git=2.14.* -conda-forge-pinning +conda-forge-pinning=2018.05.07 From 77b4f84c8e2d37bde7b7a6c21f03fee13101e60d Mon Sep 17 00:00:00 2001 From: Marcel Bargull Date: Wed, 16 May 2018 16:01:06 +0200 Subject: [PATCH 072/118] lint: handle if source is a list --- bioconda_utils/lint_functions.py | 54 +++++++++++++++++++------------- test/test_linting.py | 49 ++++++++++++++++++++++++++--- 2 files changed, 78 insertions(+), 25 deletions(-) diff --git a/bioconda_utils/lint_functions.py b/bioconda_utils/lint_functions.py index 18c5c2dfa3..b722515012 100644 --- a/bioconda_utils/lint_functions.py +++ b/bioconda_utils/lint_functions.py @@ -144,30 +144,36 @@ def missing_tests(recipe, metas, df): def missing_hash(recipe, metas, df): for meta in metas: # could be a meta-package if no source section or if None - src = meta.get_section('source') - if not src: + sources = meta.get_section('source') + if not sources: continue + if isinstance(sources, dict): + sources = [sources] - if not any(src.get(checksum) - for checksum in ('md5', 'sha1', 'sha256')): - return { - 'missing_hash': True, - 'fix': 'add md5, sha1, or sha256 hash to "source" section', - } + for source in sources: + if not any(source.get(checksum) + for checksum in ('md5', 'sha1', 'sha256')): + return { + 'missing_hash': True, + 'fix': 'add md5, sha1, or sha256 hash to "source" section', + } def uses_git_url(recipe, metas, df): for meta in metas: - src = meta.get_section('source') - if not src: + sources = meta.get_section('source') + if not sources: # metapackage? continue + if isinstance(sources, dict): + sources = [sources] - if 'git_url' in src: - return { - 'uses_git_url': True, - 'fix': 'use tarballs whenever possible', - } + for source in sources: + if 'git_url' in source: + return { + 'uses_git_url': True, + 'fix': 'use tarballs whenever possible', + } def uses_perl_threaded(recipe, metas, df): @@ -300,12 +306,18 @@ def deprecated_numpy_spec(recipe, metas, df): def should_not_use_fn(recipe, metas, df): for meta in metas: - source = meta.get_section('source') - if 'fn' in source: - return { - 'should_not_use_fn': True, - 'fix': 'URL should specify path to file, which will be used as the filename' - } + sources = meta.get_section('source') + if not sources: + continue + if isinstance(sources, dict): + sources = [sources] + + for source in sources: + if 'fn' in source: + return { + 'should_not_use_fn': True, + 'fix': 'URL should specify path to file, which will be used as the filename' + } def should_use_compilers(recipe, metas, df): diff --git a/test/test_linting.py b/test/test_linting.py index d2e42ad878..8f81aa1603 100644 --- a/test/test_linting.py +++ b/test/test_linting.py @@ -293,6 +293,15 @@ def test_missing_hash(): source: md5: 11111111111111111111111111111111 ''', + ''' + missing_hash: + meta.yaml: | + package: + name: md5hash_list + version: "0.1" + source: + - md5: 11111111111111111111111111111111 + ''', # Should pass when source section is missing ''' missing_hash: @@ -322,13 +331,24 @@ def test_missing_hash(): fn: "a.txt" sha256: "" ''', + ''' + missing_hash: + meta.yaml: | + package: + name: missing_hash_list + version: "0.1" + source: + - fn: "a.txt" + - md5: 11111111111111111111111111111111 + ''', ]) def test_uses_git_url(): run_lint( func=lint_functions.uses_git_url, - should_pass=[''' + should_pass=[ + ''' uses_git_url: meta.yaml: | package: @@ -343,8 +363,19 @@ def test_uses_git_url(): package: name: uses_git_url version: "0.1" - '''], - should_fail=''' + ''', + ''' + uses_git_url: + meta.yaml: | + package: + name: uses_git_url_list + version: "0.1" + source: + - fn: "a.txt" + ''', + ], + should_fail=[ + ''' uses_git_url: meta.yaml: | package: @@ -352,7 +383,17 @@ def test_uses_git_url(): version: "0.1" source: git_url: https://github.com/bioconda/bioconda.git - ''') + ''', + ''' + uses_git_url: + meta.yaml: | + package: + name: uses_git_url_list + version: "0.1" + source: + - git_url: https://github.com/bioconda/bioconda.git + ''', + ]) def test_uses_perl_threaded(): From 70026837dcbfc30b0a315087c173702bdbb33dd3 Mon Sep 17 00:00:00 2001 From: Marcel Bargull Date: Wed, 16 May 2018 16:48:51 +0200 Subject: [PATCH 073/118] adjust tests --- test/test_linting.py | 6 ++---- test/test_utils.py | 2 +- 2 files changed, 3 insertions(+), 5 deletions(-) diff --git a/test/test_linting.py b/test/test_linting.py index 8f81aa1603..ff5f315c5d 100644 --- a/test/test_linting.py +++ b/test/test_linting.py @@ -48,10 +48,8 @@ def _run(contents, expect_pass=True): recipe, meta, df = r.recipe_dirs[name], r.recipes[name]['meta.yaml'], should_pass_df metas = [] for platform in ["linux", "osx"]: - config = utils.load_conda_config(platform=platform, - trim_skip=False) - metas.extend(utils.load_all_meta(r.recipe_dirs[name], - config=config)) + config = utils.load_conda_build_config(platform=platform, trim_skip=False) + metas.extend(utils.load_all_meta(r.recipe_dirs[name], config=config, finalize=False)) if expect_pass: assert func(recipe, metas, df) is None, "lint did not pass" else: diff --git a/test/test_utils.py b/test/test_utils.py index 8fdc7c2674..773a068723 100644 --- a/test/test_utils.py +++ b/test/test_utils.py @@ -906,7 +906,7 @@ def test_variants(): - 1.0 - 2.0 """)) - config = utils.load_conda_config() + config = utils.load_conda_build_config() config.exclusive_config_file = tmp assert len(utils.load_all_meta(recipe, config)) == 2 From a089bd0423764ed6178a94b0a5ac5780ff7db8dc Mon Sep 17 00:00:00 2001 From: Marcel Bargull Date: Wed, 16 May 2018 17:29:58 +0200 Subject: [PATCH 074/118] show test durations --- .circleci/config.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index e620a27668..428191ee20 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -55,7 +55,7 @@ jobs: - *install_bioconda_utils - run: name: Testing - command: py.test test/ -v --tb=native + command: py.test --duration=0 test/ -v --tb=native no_output_timeout: 1200 test-macos: <<: *macos @@ -68,7 +68,7 @@ jobs: - *install_bioconda_utils - run: name: Testing - command: py.test test/ -v -k "not docker" --tb=native + command: py.test --duration=0 test/ -v -k "not docker" --tb=native no_output_timeout: 1200 build-docs: <<: *linux From b6121dcc25014197ffee5e9af3c8542f312e3df7 Mon Sep 17 00:00:00 2001 From: Marcel Bargull Date: Wed, 16 May 2018 19:07:23 +0200 Subject: [PATCH 075/118] lint: use decorator instead explicit loops --- bioconda_utils/lint_functions.py | 449 ++++++++++++++++--------------- 1 file changed, 228 insertions(+), 221 deletions(-) diff --git a/bioconda_utils/lint_functions.py b/bioconda_utils/lint_functions.py index b722515012..e154790b9c 100644 --- a/bioconda_utils/lint_functions.py +++ b/bioconda_utils/lint_functions.py @@ -71,137 +71,146 @@ def _has_preprocessing_selector(recipe): return True -def in_other_channels(recipe, metas, df): +def lint_multiple_metas(lint_function): + def lint_metas(recipe, metas, df, *args, **kwargs): + lint = partial(lint_function, recipe) + for meta in metas: + ret = lint(meta, df, *args, **kwargs) + if ret is not None: + return ret + return lint_metas + +@lint_multiple_metas +def in_other_channels(recipe, meta, df): """ Does the package exist in any other non-bioconda channels? """ - for meta in metas: - results = _subset_df(recipe, meta, df) - channels = set(results.channel).difference(['bioconda']) - if len(channels): - return { - 'exists_in_channels': channels, - 'fix': 'consider deprecating', - } + results = _subset_df(recipe, meta, df) + channels = set(results.channel).difference(['bioconda']) + if len(channels): + return { + 'exists_in_channels': channels, + 'fix': 'consider deprecating', + } -def already_in_bioconda(recipe, metas, df): +@lint_multiple_metas +def already_in_bioconda(recipe, meta, df): """ Does the package exist in bioconda? """ - for meta in metas: - results = _subset_df(recipe, meta, df) - build_section = meta.get_section('build') - build_number = int(build_section.get('number', 0)) - build_results = results[results.build_number == build_number] - channels = set(build_results.channel) - if 'bioconda' in channels: - return { - 'already_in_bioconda': True, - 'fix': 'bump version or build number' - } + results = _subset_df(recipe, meta, df) + build_section = meta.get_section('build') + build_number = int(build_section.get('number', 0)) + build_results = results[results.build_number == build_number] + channels = set(build_results.channel) + if 'bioconda' in channels: + return { + 'already_in_bioconda': True, + 'fix': 'bump version or build number' + } -def missing_home(recipe, metas, df): - for meta in metas: - if not meta.get_value('about/home'): - return { - 'missing_home': True, - 'fix': 'add about:home', - } +@lint_multiple_metas +def missing_home(recipe, meta, df): + if not meta.get_value('about/home'): + return { + 'missing_home': True, + 'fix': 'add about:home', + } -def missing_summary(recipe, metas, df): - for meta in metas: - if not meta.get_value('about/summary'): - return { - 'missing_summary': True, - 'fix': 'add about:summary', - } +@lint_multiple_metas +def missing_summary(recipe, meta, df): + if not meta.get_value('about/summary'): + return { + 'missing_summary': True, + 'fix': 'add about:summary', + } -def missing_license(recipe, metas, df): - for meta in metas: - if not meta.get_value('about/license'): +@lint_multiple_metas +def missing_license(recipe, meta, df): + if not meta.get_value('about/license'): + return { + 'missing_license': True, + 'fix': 'add about:license' + } + + +@lint_multiple_metas +def missing_tests(recipe, meta, df): + test_files = ['run_test.py', 'run_test.sh', 'run_test.pl'] + if not meta.get_section('test'): + if not any([os.path.exists(os.path.join(recipe, f)) for f in + test_files]): return { - 'missing_license': True, - 'fix': 'add about:license' + 'no_tests': True, + 'fix': 'add basic tests', } -def missing_tests(recipe, metas, df): - for meta in metas: - test_files = ['run_test.py', 'run_test.sh', 'run_test.pl'] - if not meta.get_section('test'): - if not any([os.path.exists(os.path.join(recipe, f)) for f in - test_files]): - return { - 'no_tests': True, - 'fix': 'add basic tests', - } +@lint_multiple_metas +def missing_hash(recipe, meta, df): + # could be a meta-package if no source section or if None + sources = meta.get_section('source') + if not sources: + return + if isinstance(sources, dict): + sources = [sources] + for source in sources: + if not any(source.get(checksum) + for checksum in ('md5', 'sha1', 'sha256')): + return { + 'missing_hash': True, + 'fix': 'add md5, sha1, or sha256 hash to "source" section', + } -def missing_hash(recipe, metas, df): - for meta in metas: - # could be a meta-package if no source section or if None - sources = meta.get_section('source') - if not sources: - continue - if isinstance(sources, dict): - sources = [sources] - - for source in sources: - if not any(source.get(checksum) - for checksum in ('md5', 'sha1', 'sha256')): - return { - 'missing_hash': True, - 'fix': 'add md5, sha1, or sha256 hash to "source" section', - } - - -def uses_git_url(recipe, metas, df): - for meta in metas: - sources = meta.get_section('source') - if not sources: - # metapackage? - continue - if isinstance(sources, dict): - sources = [sources] - - for source in sources: - if 'git_url' in source: - return { - 'uses_git_url': True, - 'fix': 'use tarballs whenever possible', - } +@lint_multiple_metas +def uses_git_url(recipe, meta, df): + sources = meta.get_section('source') + if not sources: + # metapackage? + return + if isinstance(sources, dict): + sources = [sources] -def uses_perl_threaded(recipe, metas, df): - for meta in metas: - if 'perl-threaded' in _get_deps(meta): + for source in sources: + if 'git_url' in source: return { - 'depends_on_perl_threaded': True, - 'fix': 'use "perl" instead of "perl-threaded"', + 'uses_git_url': True, + 'fix': 'use tarballs whenever possible', } -def uses_javajdk(recipe, metas, df): - for meta in metas: - if 'java-jdk' in _get_deps(meta): - return { - 'depends_on_java-jdk': True, - 'fix': 'use "openjdk" instead of "java-jdk"', - } +@lint_multiple_metas +def uses_perl_threaded(recipe, meta, df): + if 'perl-threaded' in _get_deps(meta): + return { + 'depends_on_perl_threaded': True, + 'fix': 'use "perl" instead of "perl-threaded"', + } -def uses_setuptools(recipe, metas, df): - for meta in metas: - if 'setuptools' in _get_deps(meta, 'run'): - return { - 'depends_on_setuptools': True, - 'fix': ('setuptools might not be a run requirement (unless it uses ' - 'pkg_resources or setuptools console scripts)'), - } +@lint_multiple_metas +def uses_javajdk(recipe, meta, df): + if 'java-jdk' in _get_deps(meta): + return { + 'depends_on_java-jdk': True, + 'fix': 'use "openjdk" instead of "java-jdk"', + } + + +@lint_multiple_metas +def uses_setuptools(recipe, meta, df): + if 'setuptools' in _get_deps(meta, 'run'): + return { + 'depends_on_setuptools': True, + 'fix': ('setuptools might not be a run requirement (unless it uses ' + 'pkg_resources or setuptools console scripts)'), + } def has_windows_bat_file(recipe, metas, df): @@ -212,88 +221,88 @@ def has_windows_bat_file(recipe, metas, df): } -def should_be_noarch(recipe, metas, df): - for meta in metas: - deps = _get_deps(meta) - if ( - ('gcc' not in deps) and - ('python' in deps) and - # This will also exclude recipes with skip sections - # which is a good thing, because noarch also implies independence of - # the python version. - not _has_preprocessing_selector(recipe) - ) and ( - 'noarch' not in meta.get_section('build') - ): - return { - 'should_be_noarch': True, - 'fix': 'add "build: noarch" section', - } - - -def should_not_be_noarch(recipe, metas, df): - for meta in metas: - deps = _get_deps(meta) - if ( - ('gcc' in deps) or - meta.get_section('build').get('skip', False) in ["true", "True"] - ) and ( - 'noarch' in meta.get_section('build') - ): - print("error") - return { - 'should_not_be_noarch': True, - 'fix': 'remove "build: noarch" section', - } - +@lint_multiple_metas +def should_be_noarch(recipe, meta, df): + deps = _get_deps(meta) + if ( + ('gcc' not in deps) and + ('python' in deps) and + # This will also exclude recipes with skip sections + # which is a good thing, because noarch also implies independence of + # the python version. + not _has_preprocessing_selector(recipe) + ) and ( + 'noarch' not in meta.get_section('build') + ): + return { + 'should_be_noarch': True, + 'fix': 'add "build: noarch" section', + } -def setup_py_install_args(recipe, metas, df): - for meta in metas: - if 'setuptools' not in _get_deps(meta, 'build'): - continue - err = { - 'needs_setuptools_args': True, - 'fix': ('add "--single-version-externally-managed --record=record.txt" ' - 'to setup.py command'), +@lint_multiple_metas +def should_not_be_noarch(recipe, meta, df): + deps = _get_deps(meta) + if ( + ('gcc' in deps) or + meta.get_section('build').get('skip', False) in ["true", "True"] + ) and ( + 'noarch' in meta.get_section('build') + ): + print("error") + return { + 'should_not_be_noarch': True, + 'fix': 'remove "build: noarch" section', } - script_line = meta.get_section('build').get('script', '') - if ( - 'setup.py install' in script_line and - '--single-version-externally-managed' not in script_line - ): - return err - - build_sh = os.path.join(recipe, 'build.sh') - if not os.path.exists(build_sh): - continue - contents = open(build_sh).read() - if ( - 'setup.py install' in contents and - '--single-version-externally-managed' not in contents - ): - return err - - -def invalid_identifiers(recipe, metas, df): - for meta in metas: - try: - identifiers = meta.get_section('extra').get('identifiers', []) - if not isinstance(identifiers, list): - return {'invalid_identifiers': True, - 'fix': 'extra:identifiers must hold a list of identifiers'} - if not all(isinstance(i, str) for i in identifiers): - return {'invalid_identifiers': True, - 'fix': 'each identifier must be a string'} - if not all((':' in i) for i in identifiers): - return {'invalid_identifiers': True, - 'fix': 'each identifier must be of the form ' - 'type:identifier (e.g., doi:123)'} - except KeyError: - # no identifier section - continue +@lint_multiple_metas +def setup_py_install_args(recipe, meta, df): + if 'setuptools' not in _get_deps(meta, 'build'): + return + + err = { + 'needs_setuptools_args': True, + 'fix': ('add "--single-version-externally-managed --record=record.txt" ' + 'to setup.py command'), + } + + script_line = meta.get_section('build').get('script', '') + if ( + 'setup.py install' in script_line and + '--single-version-externally-managed' not in script_line + ): + return err + + build_sh = os.path.join(recipe, 'build.sh') + if not os.path.exists(build_sh): + return + + contents = open(build_sh).read() + if ( + 'setup.py install' in contents and + '--single-version-externally-managed' not in contents + ): + return err + + +@lint_multiple_metas +def invalid_identifiers(recipe, meta, df): + try: + identifiers = meta.get_section('extra').get('identifiers', []) + if not isinstance(identifiers, list): + return {'invalid_identifiers': True, + 'fix': 'extra:identifiers must hold a list of identifiers'} + if not all(isinstance(i, str) for i in identifiers): + return {'invalid_identifiers': True, + 'fix': 'each identifier must be a string'} + if not all((':' in i) for i in identifiers): + return {'invalid_identifiers': True, + 'fix': 'each identifier must be of the form ' + 'type:identifier (e.g., doi:123)'} + except KeyError: + # no identifier section + return def deprecated_numpy_spec(recipe, metas, df): @@ -304,53 +313,51 @@ def deprecated_numpy_spec(recipe, metas, df): 'handled automatically'} -def should_not_use_fn(recipe, metas, df): - for meta in metas: - sources = meta.get_section('source') - if not sources: - continue - if isinstance(sources, dict): - sources = [sources] - - for source in sources: - if 'fn' in source: - return { - 'should_not_use_fn': True, - 'fix': 'URL should specify path to file, which will be used as the filename' - } - - -def should_use_compilers(recipe, metas, df): - for meta in metas: - deps = _get_deps(meta) - if ( - ('gcc' in deps) or - ('llvm' in deps) or - ('libgfortran' in deps) or - ('libgcc' in deps) - - ): +@lint_multiple_metas +def should_not_use_fn(recipe, meta, df): + sources = meta.get_section('source') + if not sources: + return + if isinstance(sources, dict): + sources = [sources] + + for source in sources: + if 'fn' in source: return { - 'should_use_compilers': True, - 'fix': 'use {{ compiler("c") }} or other new-style compilers', + 'should_not_use_fn': True, + 'fix': 'URL should specify path to file, which will be used as the filename' } -def compilers_must_be_in_build(recipe, metas, df): - for meta in metas: +@lint_multiple_metas +def should_use_compilers(recipe, meta, df): + deps = _get_deps(meta) + if ( + ('gcc' in deps) or + ('llvm' in deps) or + ('libgfortran' in deps) or + ('libgcc' in deps) - print(_get_deps(meta, 'run')) - if ( + ): + return { + 'should_use_compilers': True, + 'fix': 'use {{ compiler("c") }} or other new-style compilers', + } - any(['toolchain' in i for i in _get_deps(meta, 'run')]) or - any(['toolchain' in i for i in _get_deps(meta, 'host')]) - ): - return { - 'compilers_must_be_in_build': True, - 'fix': ( - '{{ compiler("c") }} or other new-style compliers can ' - 'only go in the build: section') - } + +@lint_multiple_metas +def compilers_must_be_in_build(recipe, meta, df): + if ( + + any(['toolchain' in i for i in _get_deps(meta, 'run')]) or + any(['toolchain' in i for i in _get_deps(meta, 'host')]) + ): + return { + 'compilers_must_be_in_build': True, + 'fix': ( + '{{ compiler("c") }} or other new-style compliers can ' + 'only go in the build: section') + } registry = ( From 8f026ad3319b50b7f39a8879d5381744049d9c9c Mon Sep 17 00:00:00 2001 From: Marcel Bargull Date: Wed, 16 May 2018 19:30:52 +0200 Subject: [PATCH 076/118] lint: fix decorator, pass through __name__ for registry --- bioconda_utils/lint_functions.py | 1 + 1 file changed, 1 insertion(+) diff --git a/bioconda_utils/lint_functions.py b/bioconda_utils/lint_functions.py index e154790b9c..0f343bb3d9 100644 --- a/bioconda_utils/lint_functions.py +++ b/bioconda_utils/lint_functions.py @@ -78,6 +78,7 @@ def lint_metas(recipe, metas, df, *args, **kwargs): ret = lint(meta, df, *args, **kwargs) if ret is not None: return ret + lint_metas.__name__ = lint_function return lint_metas @lint_multiple_metas From 933a95422a22ab6c84cd4e7783dbdebb43f5b05e Mon Sep 17 00:00:00 2001 From: Marcel Bargull Date: Wed, 16 May 2018 19:38:49 +0200 Subject: [PATCH 077/118] lint: fix decorator, pass through __name__ for registry (2) --- bioconda_utils/lint_functions.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bioconda_utils/lint_functions.py b/bioconda_utils/lint_functions.py index 0f343bb3d9..17ecd3915c 100644 --- a/bioconda_utils/lint_functions.py +++ b/bioconda_utils/lint_functions.py @@ -78,7 +78,7 @@ def lint_metas(recipe, metas, df, *args, **kwargs): ret = lint(meta, df, *args, **kwargs) if ret is not None: return ret - lint_metas.__name__ = lint_function + lint_metas.__name__ = lint_function.__name__ return lint_metas @lint_multiple_metas From 03960fe43923580972062717783c32dfd1bf225d Mon Sep 17 00:00:00 2001 From: Marcel Bargull Date: Wed, 16 May 2018 20:35:13 +0200 Subject: [PATCH 078/118] run long-running tests separately --- .circleci/config.yml | 17 ++++++++++++++++- setup.cfg | 4 ++++ test/test_utils.py | 2 ++ 3 files changed, 22 insertions(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 428191ee20..c6c9eb0579 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -55,7 +55,20 @@ jobs: - *install_bioconda_utils - run: name: Testing - command: py.test --duration=0 test/ -v --tb=native + command: py.test --duration=0 test/ -v --tb=native -m 'not long_running' + no_output_timeout: 1200 + test-linux (long_running): + <<: *linux + steps: + - checkout + - *common + - *restore_cache + - *setup + - *save_cache + - *install_bioconda_utils + - run: + name: Testing + command: py.test --duration=0 test/ -v --tb=native -m 'long_running' no_output_timeout: 1200 test-macos: <<: *macos @@ -95,6 +108,8 @@ workflows: jobs: - test-linux: context: org-global + - test-linux (long_running): + context: org-global - test-macos: context: org-global - build-docs: diff --git a/setup.cfg b/setup.cfg index 0904fa0cc0..95189f284f 100644 --- a/setup.cfg +++ b/setup.cfg @@ -5,3 +5,7 @@ versionfile_source = bioconda_utils/_version.py versionfile_build = bioconda_utils/_version.py tag_prefix = v parentdir_prefix = bioconda-utils- + +[tool:pytest] +markers= + long_running: mark as long-running test (may be executed separately) diff --git a/test/test_utils.py b/test/test_utils.py index 773a068723..f185057edb 100644 --- a/test/test_utils.py +++ b/test/test_utils.py @@ -203,6 +203,7 @@ def test_single_build_with_post_test(single_build): pkg_test.test_package(pkg) +@pytest.mark.long_running def test_multi_build(multi_build): for v in multi_build.values(): for pkg in v: @@ -752,6 +753,7 @@ def test_build_empty_extra_container(): @pytest.mark.skipif(SKIP_DOCKER_TESTS, reason='skipping on osx') +@pytest.mark.long_running def test_build_container_default_gcc(tmpdir): r = Recipes( """ From b12212091aec54532505a9890c6c2f704b24c874 Mon Sep 17 00:00:00 2001 From: Marcel Bargull Date: Fri, 25 May 2018 17:04:54 +0200 Subject: [PATCH 079/118] docker_utils: use base image acc. to BIOCONDA_UTILS_TAG --- bioconda_utils/docker_utils.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/bioconda_utils/docker_utils.py b/bioconda_utils/docker_utils.py index 26167646bf..a2cafc1f92 100644 --- a/bioconda_utils/docker_utils.py +++ b/bioconda_utils/docker_utils.py @@ -118,7 +118,7 @@ DOCKERFILE_TEMPLATE = \ """ -FROM bioconda/bioconda-utils-build-env +FROM {self.docker_base_image} {self.proxies} RUN /opt/conda/bin/conda install -y conda={conda_ver} conda-build={conda_build_ver} """ @@ -186,6 +186,7 @@ def __init__( pkg_dir=None, keep_image=False, image_build_dir=None, + docker_base_image=None, ): """ Class to handle building a custom docker container that can be used for @@ -259,6 +260,11 @@ def __init__( image_build_dir : str or None If not None, use an existing directory as a docker image context instead of a temporary one. For testing purposes only. + + docker_base_image : str or None + Name of base image that can be used in `dockerfile_template`. + Defaults to 'bioconda/bioconda-utils-build-env:TAG' where TAG is + `os.environ.get('BIOCONDA_UTILS_TAG', 'latest')`. """ self.tag = tag self.requirements = requirements @@ -266,6 +272,10 @@ def __init__( self.build_script_template = build_script_template self.dockerfile_template = dockerfile_template self.keep_image = keep_image + if docker_base_image is None: + docker_base_image = 'bioconda/bioconda-utils-build-env:{}'.format( + os.environ.get('BIOCONDA_UTILS_TAG', 'latest')) + self.docker_base_image = docker_base_image # To address issue #5027: # From b738ca5dc38018a3e28356b042189f8e22f114a9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20K=C3=B6ster?= Date: Fri, 25 May 2018 21:09:22 +0200 Subject: [PATCH 080/118] Remove a priori filtering. Instead filter when each recipe is considered. --- bioconda_utils/build.py | 14 ++--- bioconda_utils/utils.py | 133 +++++++++++++++------------------------- 2 files changed, 55 insertions(+), 92 deletions(-) diff --git a/bioconda_utils/build.py b/bioconda_utils/build.py index a4d8d8b6e8..506762ab4c 100644 --- a/bioconda_utils/build.py +++ b/bioconda_utils/build.py @@ -264,12 +264,6 @@ def build_recipes( logger.debug('recipes: %s', recipes) - logger.info('Filtering recipes') - recipe_targets = dict( - utils.filter_recipes( - recipes, check_channels, force=force)) - recipes = set(recipe_targets.keys()) - dag, name2recipes = utils.get_dag(recipes, config=orig_config, blacklist=blacklist) recipe2name = {} for k, v in name2recipes.items(): @@ -327,6 +321,7 @@ def build_recipes( all_success = True failed_uploads = [] skip_dependent = defaultdict(list) + channel_packages = utils.get_channel_packages(check_channels) for recipe in recipes: recipe_success = True @@ -341,7 +336,12 @@ def build_recipes( skipped_recipes.append(recipe) continue - pkg_paths = recipe_targets[recipe] + logger.info('Determining expected packages') + pkg_paths = utils.get_package_paths(recipe, channel_packages, + force=force) + if not pkg_paths: + logger.info("Nothing to be done for recipe %s", recipe) + continue # If a recipe depends on conda, it means it must be installed in # the root env, which is not compatible with mulled-build tests. In diff --git a/bioconda_utils/utils.py b/bioconda_utils/utils.py index 4443113dc7..55561d5a69 100644 --- a/bioconda_utils/utils.py +++ b/bioconda_utils/utils.py @@ -709,97 +709,60 @@ def changed_since_master(recipe_folder): ] -def filter_recipes(recipes, channels=None, force=False): - """ - Generator yielding only those (recipe, pkgs) that should be built. - - Parameters - ---------- - recipes : iterable - Iterable of candidate recipes - - channels : None or list - Optional list of channels to check for existing recipes - - force : bool - Build the package even if it is already available in supplied channels. - """ +def get_package_paths(recipe, channel_packages, force=False): + # check if package is noarch, if so, build only on linux + # with temp_os, we can fool the MetaData if needed. + platform = os.environ.get('OSTYPE', sys.platform) + if platform.startswith("darwin"): + platform = 'osx' + elif platform == "linux-gnu": + platform = "linux" + + meta = load_first_metadata( + recipe, config=load_conda_build_config(platform=platform)) + + # The recipe likely defined skip: True + if meta is None: + return [] + + # If on CI, handle noarch. + if os.environ.get('CI', None) == 'true': + if meta.get_value('build/noarch'): + if platform != 'linux': + logger.debug('FILTER: only building %s on ' + 'linux because it defines noarch.', + recipe) + return [] + + # get all packages that would be built + pkg_paths = built_package_paths(recipe) + pkgs = {os.path.basename(p): p for p in pkg_paths} + # check which ones exist already + existing = [pkg for pkg in pkgs if pkg in channel_packages] + + for pkg in existing: + logger.info( + 'FILTER: not building %s because ' + 'it is in channel(s) and it is not forced.', pkg) + for pkg in pkgs: + assert not pkg.endswith("_.tar.bz2"), ( + "rendered path {} does not " + "contain a build number and recipe does not " + "define skip for this environment. " + "This is a conda bug.".format(pkg)) + # yield all pkgs that do not yet exist + return [pkg_path + for pkg, pkg_path in pkgs.items() if force or pkg not in existing] + + +def get_channel_packages(channels): if channels is None: channels = [] channel_packages = set() for channel in channels: channel_packages.update(get_channel_packages(channel=channel)) - - def tobuild(recipe): - # check if package is noarch, if so, build only on linux - # with temp_os, we can fool the MetaData if needed. - platform = os.environ.get('OSTYPE', sys.platform) - if platform.startswith("darwin"): - platform = 'osx' - elif platform == "linux-gnu": - platform = "linux" - - meta = load_first_metadata( - recipe, config=load_conda_build_config(platform=platform)) - - # The recipe likely defined skip: True - if meta is None: - return [] - - # If on CI, handle noarch. - if os.environ.get('CI', None) == 'true': - if meta.get_value('build/noarch'): - if platform != 'linux': - logger.debug('FILTER: only building %s on ' - 'linux because it defines noarch.', - recipe) - return [] - - # get all packages that would be built - pkg_paths = built_package_paths(recipe) - pkgs = {os.path.basename(p): p for p in pkg_paths} - # check which ones exist already - existing = [pkg for pkg in pkgs if pkg in channel_packages] - - for pkg in existing: - logger.debug( - 'FILTER: not building %s because ' - 'it is in channel(s) and it is not forced.', pkg) - for pkg in pkgs: - assert not pkg.endswith("_.tar.bz2"), ( - "rendered path {} does not " - "contain a build number and recipe does not " - "define skip for this environment. " - "This is a conda bug.".format(pkg)) - # yield all pkgs that do not yet exist - return [pkg_path - for pkg, pkg_path in pkgs.items() if pkg not in existing] - - logger.debug('recipes: %s', recipes) - recipes = list(recipes) - nrecipes = len(recipes) - if nrecipes == 0: - raise StopIteration - max_recipe = max(map(len, recipes)) - template = ( - 'Filtering {{0}} of {{1}} ({{2:.1f}}%) {{3:<{0}}}'.format(max_recipe) - ) - print(flush=True) - try: - for i, recipe in enumerate(sorted(recipes)): - perc = (i + 1) / nrecipes * 100 - print(template.format(i + 1, nrecipes, perc, recipe), end='') - pkgs = tobuild(recipe) - if pkgs: - yield recipe, pkgs - print(end='\r') - except sp.CalledProcessError as e: - logger.debug(e.stdout) - logger.error(e.stderr) - exit(1) - finally: - print(flush=True) + return channel_packages def get_blacklist(blacklists, recipe_folder): From 5ea1d5e3db698222f7a7203facde9de66a54bb0f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20K=C3=B6ster?= Date: Fri, 25 May 2018 21:14:19 +0200 Subject: [PATCH 081/118] Naming. --- bioconda_utils/build.py | 2 +- bioconda_utils/utils.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/bioconda_utils/build.py b/bioconda_utils/build.py index 506762ab4c..3d31e7faf3 100644 --- a/bioconda_utils/build.py +++ b/bioconda_utils/build.py @@ -321,7 +321,7 @@ def build_recipes( all_success = True failed_uploads = [] skip_dependent = defaultdict(list) - channel_packages = utils.get_channel_packages(check_channels) + channel_packages = utils.get_all_channel_packages(check_channels) for recipe in recipes: recipe_success = True diff --git a/bioconda_utils/utils.py b/bioconda_utils/utils.py index 55561d5a69..c463889deb 100644 --- a/bioconda_utils/utils.py +++ b/bioconda_utils/utils.py @@ -755,7 +755,7 @@ def get_package_paths(recipe, channel_packages, force=False): for pkg, pkg_path in pkgs.items() if force or pkg not in existing] -def get_channel_packages(channels): +def get_all_channel_packages(channels): if channels is None: channels = [] From 325f0f2a0e0545b3d1a2cce734f8b3010b4c8325 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20K=C3=B6ster?= Date: Fri, 25 May 2018 21:31:48 +0200 Subject: [PATCH 082/118] Skip filter tests for now. --- test/test_utils.py | 284 ++++++++++++++++++++++----------------------- 1 file changed, 142 insertions(+), 142 deletions(-) diff --git a/test/test_utils.py b/test/test_utils.py index f185057edb..bc14f0d6d8 100644 --- a/test/test_utils.py +++ b/test/test_utils.py @@ -321,148 +321,148 @@ def test_conda_as_dep(): ) assert build_result - -def test_filter_recipes_no_skipping(): - """ - No recipes have skip so make sure none are filtered out. - """ - r = Recipes( - """ - one: - meta.yaml: | - package: - name: one - version: "0.1" - """, from_string=True) - r.write_recipes() - recipes = list(r.recipe_dirs.values()) - assert len(recipes) == 1 - filtered = list( - utils.filter_recipes(recipes, channels=['bioconda'])) - assert len(filtered) == 1 - - -def test_filter_recipes_skip_is_true(): - r = Recipes( - """ - one: - meta.yaml: | - package: - name: one - version: "0.1" - build: - skip: true - """, from_string=True) - r.write_recipes() - recipes = list(r.recipe_dirs.values()) - filtered = list( - utils.filter_recipes(recipes)) - print(filtered) - assert len(filtered) == 0 - - -def test_filter_recipes_skip_is_true_with_CI_env_var(): - """ - utils.filter_recipes has a conditional that checks to see if there's - a CI=true env var which in some cases only causes failure when running on - CI. So temporarily fake it here so that local tests catch errors. - """ - with utils.temp_env(dict(CI="true")): - r = Recipes( - """ - one: - meta.yaml: | - package: - name: one - version: "0.1" - build: - skip: true - """, from_string=True) - r.write_recipes() - recipes = list(r.recipe_dirs.values()) - filtered = list( - utils.filter_recipes(recipes)) - print(filtered) - assert len(filtered) == 0 - - -def test_filter_recipes_skip_not_py27(): - """ - When all but one Python version is skipped, filtering should do that. - """ - - r = Recipes( - """ - one: - meta.yaml: | - package: - name: one - version: "0.1" - build: - skip: True # [not py27] - requirements: - build: - - python - run: - - python - """, from_string=True) - r.write_recipes() - recipes = list(r.recipe_dirs.values()) - filtered = list( - utils.filter_recipes(recipes, channels=['bioconda'])) - - # one recipe, one target - assert len(filtered) == 1 - assert len(filtered[0][1]) == 1 - - -def test_filter_recipes_existing_package(): - "use a known-to-exist package in bioconda" - - # note that we need python as a run requirement in order to get the "pyXY" - # in the build string that matches the existing bioconda built package. - r = Recipes( - """ - one: - meta.yaml: | - package: - name: gffutils - version: "0.8.7.1" - requirements: - build: - - python - run: - - python - """, from_string=True) - r.write_recipes() - recipes = list(r.recipe_dirs.values()) - filtered = list( - utils.filter_recipes(recipes, channels=['bioconda'])) - assert len(filtered) == 0 - - -def test_filter_recipes_force_existing_package(): - "same as above but force the recipe" - - # same as above, but this time force the recipe - # TODO: refactor as py.test fixture - r = Recipes( - """ - one: - meta.yaml: | - package: - name: gffutils - version: "0.8.7.1" - requirements: - run: - - python - """, from_string=True) - r.write_recipes() - recipes = list(r.recipe_dirs.values()) - filtered = list( - utils.filter_recipes( - recipes, channels=['bioconda'], force=True)) - assert len(filtered) == 1 +# TODO replace the filter tests with tests for utils.get_package_paths() +# def test_filter_recipes_no_skipping(): +# """ +# No recipes have skip so make sure none are filtered out. +# """ +# r = Recipes( +# """ +# one: +# meta.yaml: | +# package: +# name: one +# version: "0.1" +# """, from_string=True) +# r.write_recipes() +# recipes = list(r.recipe_dirs.values()) +# assert len(recipes) == 1 +# filtered = list( +# utils.filter_recipes(recipes, channels=['bioconda'])) +# assert len(filtered) == 1 +# +# +# def test_filter_recipes_skip_is_true(): +# r = Recipes( +# """ +# one: +# meta.yaml: | +# package: +# name: one +# version: "0.1" +# build: +# skip: true +# """, from_string=True) +# r.write_recipes() +# recipes = list(r.recipe_dirs.values()) +# filtered = list( +# utils.filter_recipes(recipes)) +# print(filtered) +# assert len(filtered) == 0 +# +# +# def test_filter_recipes_skip_is_true_with_CI_env_var(): +# """ +# utils.filter_recipes has a conditional that checks to see if there's +# a CI=true env var which in some cases only causes failure when running on +# CI. So temporarily fake it here so that local tests catch errors. +# """ +# with utils.temp_env(dict(CI="true")): +# r = Recipes( +# """ +# one: +# meta.yaml: | +# package: +# name: one +# version: "0.1" +# build: +# skip: true +# """, from_string=True) +# r.write_recipes() +# recipes = list(r.recipe_dirs.values()) +# filtered = list( +# utils.filter_recipes(recipes)) +# print(filtered) +# assert len(filtered) == 0 +# +# +# def test_filter_recipes_skip_not_py27(): +# """ +# When all but one Python version is skipped, filtering should do that. +# """ +# +# r = Recipes( +# """ +# one: +# meta.yaml: | +# package: +# name: one +# version: "0.1" +# build: +# skip: True # [not py27] +# requirements: +# build: +# - python +# run: +# - python +# """, from_string=True) +# r.write_recipes() +# recipes = list(r.recipe_dirs.values()) +# filtered = list( +# utils.filter_recipes(recipes, channels=['bioconda'])) +# +# # one recipe, one target +# assert len(filtered) == 1 +# assert len(filtered[0][1]) == 1 +# +# +# def test_filter_recipes_existing_package(): +# "use a known-to-exist package in bioconda" +# +# # note that we need python as a run requirement in order to get the "pyXY" +# # in the build string that matches the existing bioconda built package. +# r = Recipes( +# """ +# one: +# meta.yaml: | +# package: +# name: gffutils +# version: "0.8.7.1" +# requirements: +# build: +# - python +# run: +# - python +# """, from_string=True) +# r.write_recipes() +# recipes = list(r.recipe_dirs.values()) +# filtered = list( +# utils.filter_recipes(recipes, channels=['bioconda'])) +# assert len(filtered) == 0 +# +# +# def test_filter_recipes_force_existing_package(): +# "same as above but force the recipe" +# +# # same as above, but this time force the recipe +# # TODO: refactor as py.test fixture +# r = Recipes( +# """ +# one: +# meta.yaml: | +# package: +# name: gffutils +# version: "0.8.7.1" +# requirements: +# run: +# - python +# """, from_string=True) +# r.write_recipes() +# recipes = list(r.recipe_dirs.values()) +# filtered = list( +# utils.filter_recipes( +# recipes, channels=['bioconda'], force=True)) +# assert len(filtered) == 1 def test_get_channel_packages(): From d3530ad2e0e47fc3064cbcdfe6f8b1a639fb562b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20K=C3=B6ster?= Date: Fri, 25 May 2018 21:56:35 +0200 Subject: [PATCH 083/118] Disable DAG generation. --- bioconda_utils/build.py | 105 +++++++++++++++++++++------------------- 1 file changed, 54 insertions(+), 51 deletions(-) diff --git a/bioconda_utils/build.py b/bioconda_utils/build.py index 3d31e7faf3..36d6af8e91 100644 --- a/bioconda_utils/build.py +++ b/bioconda_utils/build.py @@ -264,56 +264,59 @@ def build_recipes( logger.debug('recipes: %s', recipes) - dag, name2recipes = utils.get_dag(recipes, config=orig_config, blacklist=blacklist) - recipe2name = {} - for k, v in name2recipes.items(): - for i in v: - recipe2name[i] = k - - if not dag: - logger.info("Nothing to be done.") - return True - else: - logger.info("Building and testing %s recipes in total", len(dag)) - logger.info("Recipes to build: \n%s", "\n".join(dag.nodes())) - - subdags_n = int(os.environ.get("SUBDAGS", 1)) - subdag_i = int(os.environ.get("SUBDAG", 0)) - - if subdag_i >= subdags_n: - raise ValueError( - "SUBDAG=%s (zero-based) but only SUBDAGS=%s " - "subdags are available") - - # Get connected subdags and sort by nodes - if testonly: - # use each node as a subdag (they are grouped into equal sizes below) - subdags = sorted([[n] for n in nx.nodes(dag)]) - else: - # take connected components as subdags - subdags = sorted(map(sorted, nx.connected_components(dag.to_undirected( - )))) - # chunk subdags such that we have at most subdags_n many - if subdags_n < len(subdags): - chunks = [[n for subdag in subdags[i::subdags_n] for n in subdag] - for i in range(subdags_n)] - else: - chunks = subdags - if subdag_i >= len(chunks): - logger.info("Nothing to be done.") - return True - # merge subdags of the selected chunk - subdag = dag.subgraph(chunks[subdag_i]) - - # ensure that packages which need a build are built in the right order - recipes = [recipe - for package in nx.topological_sort(subdag) - for recipe in name2recipes[package]] - - logger.info( - "Building and testing subdag %s of %s (%s recipes)", - subdag_i + 1, subdags_n, len(recipes) - ) + # TODO remove DAG building for now. It takes too long to read all the metadata + # with CB3. + + # dag, name2recipes = utils.get_dag(recipes, config=orig_config, blacklist=blacklist) + # recipe2name = {} + # for k, v in name2recipes.items(): + # for i in v: + # recipe2name[i] = k + # + # if not dag: + # logger.info("Nothing to be done.") + # return True + # else: + # logger.info("Building and testing %s recipes in total", len(dag)) + # logger.info("Recipes to build: \n%s", "\n".join(dag.nodes())) + # + # subdags_n = int(os.environ.get("SUBDAGS", 1)) + # subdag_i = int(os.environ.get("SUBDAG", 0)) + # + # if subdag_i >= subdags_n: + # raise ValueError( + # "SUBDAG=%s (zero-based) but only SUBDAGS=%s " + # "subdags are available") + # + # # Get connected subdags and sort by nodes + # if testonly: + # # use each node as a subdag (they are grouped into equal sizes below) + # subdags = sorted([[n] for n in nx.nodes(dag)]) + # else: + # # take connected components as subdags + # subdags = sorted(map(sorted, nx.connected_components(dag.to_undirected( + # )))) + # # chunk subdags such that we have at most subdags_n many + # if subdags_n < len(subdags): + # chunks = [[n for subdag in subdags[i::subdags_n] for n in subdag] + # for i in range(subdags_n)] + # else: + # chunks = subdags + # if subdag_i >= len(chunks): + # logger.info("Nothing to be done.") + # return True + # # merge subdags of the selected chunk + # subdag = dag.subgraph(chunks[subdag_i]) + # + # # ensure that packages which need a build are built in the right order + # recipes = [recipe + # for package in nx.topological_sort(subdag) + # for recipe in name2recipes[package]] + # + # logger.info( + # "Building and testing subdag %s of %s (%s recipes)", + # subdag_i + 1, subdags_n, len(recipes) + # ) failed = [] built_recipes = [] @@ -325,7 +328,7 @@ def build_recipes( for recipe in recipes: recipe_success = True - name = recipe2name[recipe] + name = os.path.basename(recipe) if name in skip_dependent: logger.info( From 26ec23c3ca135f2acba0e4276fbd7f63787c868f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20K=C3=B6ster?= Date: Fri, 25 May 2018 23:19:37 +0200 Subject: [PATCH 084/118] Activate DAG calculation again (we cannot avoid it because we need the topological sorting). --- bioconda_utils/build.py | 105 +++++++++++++++++++--------------------- bioconda_utils/utils.py | 7 ++- 2 files changed, 56 insertions(+), 56 deletions(-) diff --git a/bioconda_utils/build.py b/bioconda_utils/build.py index 36d6af8e91..3d31e7faf3 100644 --- a/bioconda_utils/build.py +++ b/bioconda_utils/build.py @@ -264,59 +264,56 @@ def build_recipes( logger.debug('recipes: %s', recipes) - # TODO remove DAG building for now. It takes too long to read all the metadata - # with CB3. - - # dag, name2recipes = utils.get_dag(recipes, config=orig_config, blacklist=blacklist) - # recipe2name = {} - # for k, v in name2recipes.items(): - # for i in v: - # recipe2name[i] = k - # - # if not dag: - # logger.info("Nothing to be done.") - # return True - # else: - # logger.info("Building and testing %s recipes in total", len(dag)) - # logger.info("Recipes to build: \n%s", "\n".join(dag.nodes())) - # - # subdags_n = int(os.environ.get("SUBDAGS", 1)) - # subdag_i = int(os.environ.get("SUBDAG", 0)) - # - # if subdag_i >= subdags_n: - # raise ValueError( - # "SUBDAG=%s (zero-based) but only SUBDAGS=%s " - # "subdags are available") - # - # # Get connected subdags and sort by nodes - # if testonly: - # # use each node as a subdag (they are grouped into equal sizes below) - # subdags = sorted([[n] for n in nx.nodes(dag)]) - # else: - # # take connected components as subdags - # subdags = sorted(map(sorted, nx.connected_components(dag.to_undirected( - # )))) - # # chunk subdags such that we have at most subdags_n many - # if subdags_n < len(subdags): - # chunks = [[n for subdag in subdags[i::subdags_n] for n in subdag] - # for i in range(subdags_n)] - # else: - # chunks = subdags - # if subdag_i >= len(chunks): - # logger.info("Nothing to be done.") - # return True - # # merge subdags of the selected chunk - # subdag = dag.subgraph(chunks[subdag_i]) - # - # # ensure that packages which need a build are built in the right order - # recipes = [recipe - # for package in nx.topological_sort(subdag) - # for recipe in name2recipes[package]] - # - # logger.info( - # "Building and testing subdag %s of %s (%s recipes)", - # subdag_i + 1, subdags_n, len(recipes) - # ) + dag, name2recipes = utils.get_dag(recipes, config=orig_config, blacklist=blacklist) + recipe2name = {} + for k, v in name2recipes.items(): + for i in v: + recipe2name[i] = k + + if not dag: + logger.info("Nothing to be done.") + return True + else: + logger.info("Building and testing %s recipes in total", len(dag)) + logger.info("Recipes to build: \n%s", "\n".join(dag.nodes())) + + subdags_n = int(os.environ.get("SUBDAGS", 1)) + subdag_i = int(os.environ.get("SUBDAG", 0)) + + if subdag_i >= subdags_n: + raise ValueError( + "SUBDAG=%s (zero-based) but only SUBDAGS=%s " + "subdags are available") + + # Get connected subdags and sort by nodes + if testonly: + # use each node as a subdag (they are grouped into equal sizes below) + subdags = sorted([[n] for n in nx.nodes(dag)]) + else: + # take connected components as subdags + subdags = sorted(map(sorted, nx.connected_components(dag.to_undirected( + )))) + # chunk subdags such that we have at most subdags_n many + if subdags_n < len(subdags): + chunks = [[n for subdag in subdags[i::subdags_n] for n in subdag] + for i in range(subdags_n)] + else: + chunks = subdags + if subdag_i >= len(chunks): + logger.info("Nothing to be done.") + return True + # merge subdags of the selected chunk + subdag = dag.subgraph(chunks[subdag_i]) + + # ensure that packages which need a build are built in the right order + recipes = [recipe + for package in nx.topological_sort(subdag) + for recipe in name2recipes[package]] + + logger.info( + "Building and testing subdag %s of %s (%s recipes)", + subdag_i + 1, subdags_n, len(recipes) + ) failed = [] built_recipes = [] @@ -328,7 +325,7 @@ def build_recipes( for recipe in recipes: recipe_success = True - name = os.path.basename(recipe) + name = recipe2name[recipe] if name in skip_dependent: logger.info( diff --git a/bioconda_utils/utils.py b/bioconda_utils/utils.py index c463889deb..4c13f583a2 100644 --- a/bioconda_utils/utils.py +++ b/bioconda_utils/utils.py @@ -404,9 +404,11 @@ def get_dag(recipes, config, blacklist=None, restrict=True): Dictionary mapping package names to recipe paths. These recipe path values are lists and contain paths to all defined versions. """ + logger.info("Generating DAG") recipes = list(recipes) metadata = [] for recipe in sorted(recipes): + logger.info("Inspecting %s", recipe) for r in load_all_meta(recipe, finalize=False): metadata.append((r, recipe)) if blacklist is None: @@ -751,8 +753,9 @@ def get_package_paths(recipe, channel_packages, force=False): "define skip for this environment. " "This is a conda bug.".format(pkg)) # yield all pkgs that do not yet exist - return [pkg_path - for pkg, pkg_path in pkgs.items() if force or pkg not in existing] + return ([pkg_path + for pkg, pkg_path in pkgs.items() if force or pkg not in existing], + meta) def get_all_channel_packages(channels): From 668ec3845cb7594c140e9442f35ef0b2cad39513 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20K=C3=B6ster?= Date: Sat, 26 May 2018 00:06:06 +0200 Subject: [PATCH 085/118] Fast loading of metadata for DAG. --- bioconda_utils/utils.py | 63 ++++++++++++++++++++++++++++++++++------- 1 file changed, 53 insertions(+), 10 deletions(-) diff --git a/bioconda_utils/utils.py b/bioconda_utils/utils.py index 4c13f583a2..6f0f4f8799 100644 --- a/bioconda_utils/utils.py +++ b/bioconda_utils/utils.py @@ -24,6 +24,7 @@ from conda_build import api from conda.exports import VersionOrder import yaml +import jinja2 from jinja2 import Environment, PackageLoader from colorlog import ColoredFormatter @@ -166,6 +167,38 @@ def load_all_meta(recipe, config=None, finalize=True): finalize=finalize)] + +def load_meta_fast(recipe): + """ + Given a package name, find the current meta.yaml file, parse it, and return + the dict. + + Parameters + ---------- + recipe : str + Path to recipe (directory containing the meta.yaml file) + + config : str or dict + Config YAML or dict + """ + class SilentUndefined(jinja2.Undefined): + def _fail_with_undefined_error(self, *args, **kwargs): + return "" + __add__ = __radd__ = __mul__ = __rmul__ = __div__ = __rdiv__ = \ + __truediv__ = __rtruediv__ = __floordiv__ = __rfloordiv__ = \ + __mod__ = __rmod__ = __pos__ = __neg__ = __call__ = \ + __getitem__ = __lt__ = __le__ = __gt__ = __ge__ = __int__ = \ + __float__ = __complex__ = __pow__ = __rpow__ = \ + _fail_with_undefined_error + + pth = os.path.join(recipe, 'meta.yaml') + jinja_env = jinja2.Environment(undefined=SilentUndefined) + content = jinja_env.from_string( + open(pth, 'r', encoding='utf-8').read()).render({}) + meta = yaml.load(content) + return meta + + def load_conda_build_config(platform=None, trim_skip=True): """ Load conda build config while considering global pinnings from conda-forge. @@ -407,10 +440,11 @@ def get_dag(recipes, config, blacklist=None, restrict=True): logger.info("Generating DAG") recipes = list(recipes) metadata = [] - for recipe in sorted(recipes): - logger.info("Inspecting %s", recipe) - for r in load_all_meta(recipe, finalize=False): - metadata.append((r, recipe)) + for i, recipe in enumerate(sorted(recipes)): + meta = load_meta_fast(recipe) + metadata.append((meta, recipe)) + if i % 100 == 0: + logger.info("Inspected {} of {} recipes".format(i, len(recipes))) if blacklist is None: blacklist = set() @@ -423,25 +457,34 @@ def get_dag(recipes, config, blacklist=None, restrict=True): # Note that this may change once we support conda-build 3. name2recipe = defaultdict(set) for meta, recipe in metadata: - name = meta.get_value('package/name') + name = meta["package"]["name"] if name not in blacklist: name2recipe[name].update([recipe]) + def get_deps(meta, sec): + reqs = meta.get("requirements") + if not reqs: + return [] + deps = reqs.get(sec) + if not deps: + return [] + return [dep.split()[0] for dep in deps if dep is not None] + def get_inner_deps(dependencies): + dependencies = list(dependencies) for dep in dependencies: if dep in name2recipe or not restrict: yield dep dag = nx.DiGraph() - dag.add_nodes_from(meta.get_value('package/name') + dag.add_nodes_from(meta["package"]["name"] for meta, recipe in metadata) for meta, recipe in metadata: - name = meta.get_value('package/name') + name = meta["package"]["name"] dag.add_edges_from((dep, name) for dep in set(get_inner_deps(chain( - get_deps(meta=meta), - get_deps(meta=meta, - build=False))))) + get_deps(meta, "build"), + get_deps(meta, "host"))))) return dag, name2recipe From a6dabe3cdc470e0c2c1c6ddd8a9e52a941e95589 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20K=C3=B6ster?= Date: Sat, 26 May 2018 13:20:45 +0200 Subject: [PATCH 086/118] Only use host dependencies when building DAG. --- bioconda_utils/utils.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/bioconda_utils/utils.py b/bioconda_utils/utils.py index 6f0f4f8799..13912f4bc3 100644 --- a/bioconda_utils/utils.py +++ b/bioconda_utils/utils.py @@ -482,9 +482,7 @@ def get_inner_deps(dependencies): for meta, recipe in metadata: name = meta["package"]["name"] dag.add_edges_from((dep, name) - for dep in set(get_inner_deps(chain( - get_deps(meta, "build"), - get_deps(meta, "host"))))) + for dep in set(get_inner_deps(get_deps(meta, "host")))) return dag, name2recipe From 2fb47ea9edc109ed5ff488b02752a79335006220 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sat, 26 May 2018 11:59:48 -0400 Subject: [PATCH 087/118] get_package_paths returns tuple --- bioconda_utils/build.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bioconda_utils/build.py b/bioconda_utils/build.py index 3d31e7faf3..0398e300da 100644 --- a/bioconda_utils/build.py +++ b/bioconda_utils/build.py @@ -337,7 +337,7 @@ def build_recipes( continue logger.info('Determining expected packages') - pkg_paths = utils.get_package_paths(recipe, channel_packages, + pkg_paths, _meta = utils.get_package_paths(recipe, channel_packages, force=force) if not pkg_paths: logger.info("Nothing to be done for recipe %s", recipe) From 6072d395fad290ba4bb1111fe925532c8269e61f Mon Sep 17 00:00:00 2001 From: Marcel Bargull Date: Sat, 26 May 2018 22:35:46 +0200 Subject: [PATCH 088/118] utils.get_package_paths: only return paths --- bioconda_utils/build.py | 2 +- bioconda_utils/utils.py | 5 ++--- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/bioconda_utils/build.py b/bioconda_utils/build.py index 0398e300da..3d31e7faf3 100644 --- a/bioconda_utils/build.py +++ b/bioconda_utils/build.py @@ -337,7 +337,7 @@ def build_recipes( continue logger.info('Determining expected packages') - pkg_paths, _meta = utils.get_package_paths(recipe, channel_packages, + pkg_paths = utils.get_package_paths(recipe, channel_packages, force=force) if not pkg_paths: logger.info("Nothing to be done for recipe %s", recipe) diff --git a/bioconda_utils/utils.py b/bioconda_utils/utils.py index 13912f4bc3..d1e5db3be3 100644 --- a/bioconda_utils/utils.py +++ b/bioconda_utils/utils.py @@ -794,9 +794,8 @@ def get_package_paths(recipe, channel_packages, force=False): "define skip for this environment. " "This is a conda bug.".format(pkg)) # yield all pkgs that do not yet exist - return ([pkg_path - for pkg, pkg_path in pkgs.items() if force or pkg not in existing], - meta) + return [pkg_path + for pkg, pkg_path in pkgs.items() if force or pkg not in existing] def get_all_channel_packages(channels): From 62f1d5201b8975ec651ed83a5113398338392ea7 Mon Sep 17 00:00:00 2001 From: Marcel Bargull Date: Sat, 26 May 2018 22:38:37 +0200 Subject: [PATCH 089/118] requirements: update conda, conda-build, conda-forge-pinning --- bioconda_utils/bioconda_utils-requirements.txt | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/bioconda_utils/bioconda_utils-requirements.txt b/bioconda_utils/bioconda_utils-requirements.txt index da2017683b..d9b1c59088 100644 --- a/bioconda_utils/bioconda_utils-requirements.txt +++ b/bioconda_utils/bioconda_utils-requirements.txt @@ -1,8 +1,8 @@ anaconda-client=1.6.* argh=0.26.* beautifulsoup4=4.6.* -conda=4.5.3 -conda-build=3.10.2 +conda=4.5.4 +conda-build=3.10.5 galaxy-lib>=18.5.5 jinja2=2.10.* jsonschema=2.6.* @@ -21,4 +21,4 @@ colorlog=3.1.* six=1.11.* alabaster=0.7.* git=2.14.* -conda-forge-pinning=2018.05.07 +conda-forge-pinning=2018.05.22 From 9821159d6ee2da3880bb892f4090b7752872a51a Mon Sep 17 00:00:00 2001 From: Marcel Bargull Date: Sat, 26 May 2018 23:56:29 +0200 Subject: [PATCH 090/118] use bypass_env_check=True when we use non-finalized renderings --- bioconda_utils/utils.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/bioconda_utils/utils.py b/bioconda_utils/utils.py index d1e5db3be3..0a47c0ced9 100644 --- a/bioconda_utils/utils.py +++ b/bioconda_utils/utils.py @@ -162,9 +162,16 @@ def load_all_meta(recipe, config=None, finalize=True): """ if config is None: config = load_conda_build_config() + # `bypass_env_check=True` prevents evaluating (=environment solving) the + # package versions used for `pin_compatible` and the like. + # To avoid adding a separate `bypass_env_check` alongside every `finalize` + # parameter, just assume we always want to bypass if `finalize is True`. + bypass_env_check = (not finalize) return [meta for (meta, _, _) in api.render(recipe, config=config, - finalize=finalize)] + finalize=finalize, + bypass_env_check=bypass_env_check, + )] From 62efbd537cafa3fdf2a8f9b6e5c4a4395272133b Mon Sep 17 00:00:00 2001 From: Marcel Bargull Date: Sun, 27 May 2018 00:04:50 +0200 Subject: [PATCH 091/118] requirements: use conda-build=3.10.3 for now --- bioconda_utils/bioconda_utils-requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bioconda_utils/bioconda_utils-requirements.txt b/bioconda_utils/bioconda_utils-requirements.txt index d9b1c59088..b03f6fce43 100644 --- a/bioconda_utils/bioconda_utils-requirements.txt +++ b/bioconda_utils/bioconda_utils-requirements.txt @@ -2,7 +2,7 @@ anaconda-client=1.6.* argh=0.26.* beautifulsoup4=4.6.* conda=4.5.4 -conda-build=3.10.5 +conda-build=3.10.3 galaxy-lib>=18.5.5 jinja2=2.10.* jsonschema=2.6.* From 66497f269e2fc262c329cc5e6e964b735847c847 Mon Sep 17 00:00:00 2001 From: Marcel Bargull Date: Sun, 27 May 2018 00:05:32 +0200 Subject: [PATCH 092/118] test_utils: disable the remaining filter_recipes test --- test/test_utils.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/test/test_utils.py b/test/test_utils.py index bc14f0d6d8..265ecde55e 100644 --- a/test/test_utils.py +++ b/test/test_utils.py @@ -463,6 +463,14 @@ def test_conda_as_dep(): # utils.filter_recipes( # recipes, channels=['bioconda'], force=True)) # assert len(filtered) == 1 +# +# +# def test_zero_packages(): +# """ +# Regression test; make sure filter_recipes exits cleanly if no recipes were +# provided. +# """ +# assert list(utils.filter_recipes([])) == [] def test_get_channel_packages(): @@ -716,14 +724,6 @@ def test_subdags_more_than_recipes(self, caplog, recipes_fixture): assert 'Nothing to be done' in caplog.records[-1].getMessage() -def test_zero_packages(): - """ - Regression test; make sure filter_recipes exits cleanly if no recipes were - provided. - """ - assert list(utils.filter_recipes([])) == [] - - @pytest.mark.skipif(SKIP_DOCKER_TESTS, reason='skipping on osx') def test_build_empty_extra_container(): r = Recipes( From c074b1315800d173bc6d20c4daa92d052617d12a Mon Sep 17 00:00:00 2001 From: Marcel Bargull Date: Tue, 29 May 2018 17:18:22 +0200 Subject: [PATCH 093/118] Abort building if build strings in repo are divergent --- bioconda_utils/build.py | 15 ++++++- bioconda_utils/utils.py | 90 +++++++++++++++++++++++++++++------------ 2 files changed, 78 insertions(+), 27 deletions(-) diff --git a/bioconda_utils/build.py b/bioconda_utils/build.py index 3d31e7faf3..0297b4f630 100644 --- a/bioconda_utils/build.py +++ b/bioconda_utils/build.py @@ -337,8 +337,19 @@ def build_recipes( continue logger.info('Determining expected packages') - pkg_paths = utils.get_package_paths(recipe, channel_packages, - force=force) + try: + pkg_paths = utils.get_package_paths(recipe, channel_packages, + force=force) + except utils.DivergentBuildsError as e + logger.error( + 'BUILD ERROR: ' + 'packages with divergent build strings in repository ' + 'for recipe %s. A build number bump is likely needed: %s', + recipe, e) + failed.append(recipe) + for n in nx.algorithms.descendants(subdag, name): + skip_dependent[n].append(recipe) + continue if not pkg_paths: logger.info("Nothing to be done for recipe %s", recipe) continue diff --git a/bioconda_utils/utils.py b/bioconda_utils/utils.py index 0a47c0ced9..d1ae246bc5 100644 --- a/bioconda_utils/utils.py +++ b/bioconda_utils/utils.py @@ -8,7 +8,7 @@ import sys import shutil import contextlib -from collections import defaultdict, Iterable +from collections import Iterable, defaultdict, namedtuple from itertools import product, chain, groupby import logging import pkg_resources @@ -17,7 +17,6 @@ from jsonschema import validate import datetime from distutils.version import LooseVersion -import time from threading import Event, Thread from pathlib import PurePath @@ -608,9 +607,17 @@ def get_channel_repodata(channel='bioconda', platform=None): return repodata.json(), noarch_repodata.json() +PackageKey = namedtuple('PackageKey', ('name', 'version', 'build_number')) + + +class DivergentBuildsError(Exception): + pass + + def get_channel_packages(channel='bioconda', platform=None): """ - Retrieves the existing packages for a channel from conda.anaconda.org + Retrieves the existing packages for a channel from conda.anaconda.org as a + dict where keys are PackageKey instances and values are sets of build strings. Parameters ---------- @@ -623,8 +630,13 @@ def get_channel_packages(channel='bioconda', platform=None): """ repodata, noarch_repodata = get_channel_repodata( channel=channel, platform=platform) - channel_packages = set(repodata['packages'].keys()) - channel_packages.update(noarch_repodata['packages'].keys()) + channel_packages = defaultdict(set) + for repo in (repodata, noarch_repodata): + for package in repo['packages'].values(): + pkg_key = PackageKey( + package['name'], package['version'], package['build_number']) + channel_packages[pkg_key].add(package['build']) + channel_packages.default_factory = None return channel_packages @@ -768,50 +780,78 @@ def get_package_paths(recipe, channel_packages, force=False): elif platform == "linux-gnu": platform = "linux" - meta = load_first_metadata( + metas = load_all_meta( recipe, config=load_conda_build_config(platform=platform)) # The recipe likely defined skip: True - if meta is None: + if not metas: return [] # If on CI, handle noarch. if os.environ.get('CI', None) == 'true': - if meta.get_value('build/noarch'): + first_meta = metas[0] + if first_meta.get_value('build/noarch'): if platform != 'linux': logger.debug('FILTER: only building %s on ' 'linux because it defines noarch.', recipe) return [] - # get all packages that would be built - pkg_paths = built_package_paths(recipe) - pkgs = {os.path.basename(p): p for p in pkg_paths} - # check which ones exist already - existing = [pkg for pkg in pkgs if pkg in channel_packages] + new_metas, existing_metas, divergent_builds = ( + _filter_existing_packages(metas, channel_packages)) + + if divergent_builds: + raise DivergentBuildsError(*sorted(divergent_builds)) - for pkg in existing: + for meta in existing_metas: logger.info( 'FILTER: not building %s because ' - 'it is in channel(s) and it is not forced.', pkg) - for pkg in pkgs: - assert not pkg.endswith("_.tar.bz2"), ( - "rendered path {} does not " - "contain a build number and recipe does not " - "define skip for this environment. " - "This is a conda bug.".format(pkg)) + 'it is in channel(s) and it is not forced.', meta.pkg_fn()) # yield all pkgs that do not yet exist - return [pkg_path - for pkg, pkg_path in pkgs.items() if force or pkg not in existing] + if force: + build_metas = new_metas + existing_metas + else: + build_metas = new_metas + return list(chain.from_iterable( + api.get_output_file_paths(meta) for meta in build_metas)) + + +def _filter_existing_packages(metas, channel_packages): + new_metas = [] # MetaData instances of packages not yet in channel + existing_metas = [] # MetaData instances of packages already in channel + divergent_builds = set() # set of Dist (i.e., name-version-build) strings + + recipe_pkgs = defaultdict(list) + for meta in metas: + pkg_key = PackageKey( + meta.name(), meta.version(), int(meta.build_number() or 0)) + recipe_pkgs[pkg_key].append(meta) + recipe_pkgs.default_factory = None + + for pkg_key, pkg_metas in recipe_pkgs.items(): + existing_build_ids = channel_packages.get(pkg_key, set()) + for meta in pkg_metas: + if meta.build_id() not in existing_build_ids: + new_metas.append(meta) + else: + existing_metas.append(meta) + pkg_build_ids = set(meta.build_id() for meta in pkg_metas) + for divergent_build in (existing_build_ids - pkg_build_ids): + divergent_builds.add( + '-'.join((pkg_key.name, pkg_key.version, divergent_build))) + return new_metas, existing_metas, divergent_builds def get_all_channel_packages(channels): if channels is None: channels = [] - channel_packages = set() + all_channel_packages = defaultdict(set) for channel in channels: - channel_packages.update(get_channel_packages(channel=channel)) + channel_packages = get_channel_packages(channel=channel) + for pkg_key, pkg_build_ids in channel_packages.items(): + all_channel_packages[pkg_key].update(pkg_build_ids) + all_channel_packages.default_factory = None return channel_packages From 4085e6a716fdf4e564d15ccceebb4353f2f45b0f Mon Sep 17 00:00:00 2001 From: Marcel Bargull Date: Tue, 29 May 2018 17:22:40 +0200 Subject: [PATCH 094/118] fix previous commit: add Python's redundant colon --- bioconda_utils/build.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bioconda_utils/build.py b/bioconda_utils/build.py index 0297b4f630..79823af1f0 100644 --- a/bioconda_utils/build.py +++ b/bioconda_utils/build.py @@ -340,7 +340,7 @@ def build_recipes( try: pkg_paths = utils.get_package_paths(recipe, channel_packages, force=force) - except utils.DivergentBuildsError as e + except utils.DivergentBuildsError as e: logger.error( 'BUILD ERROR: ' 'packages with divergent build strings in repository ' From b002ec05c743eaedb54de352be1b9118882963f7 Mon Sep 17 00:00:00 2001 From: Marcel Bargull Date: Tue, 29 May 2018 17:28:58 +0200 Subject: [PATCH 095/118] fix generate_docs.py --- docs/source/generate_docs.py | 16 ++++------------ 1 file changed, 4 insertions(+), 12 deletions(-) diff --git a/docs/source/generate_docs.py b/docs/source/generate_docs.py index 207f756199..3093e9b45e 100644 --- a/docs/source/generate_docs.py +++ b/docs/source/generate_docs.py @@ -37,10 +37,10 @@ def __init__(self): logger.info('Loading packages...') repodata = defaultdict(lambda: defaultdict(list)) for platform in ['linux', 'osx']: - for pkg in utils.get_channel_packages(channel='bioconda', - platform=platform): - name, version, _ = self._parse_pkgname(pkg) - repodata[name][version].append(platform) + channel_packages = utils.get_channel_packages( + channel='bioconda', platform=platform) + for pkg_key in channel_packages.keys(): + repodata[pkg_key.name][pkg_key.version].append(platform) self.repodata = repodata # e.g., repodata = { # 'package1': { @@ -49,14 +49,6 @@ def __init__(self): # }, # } - def _parse_pkgname(self, p): - p = p.replace('.tar.bz2', '') - toks = p.split('-') - build_string = toks.pop() - version = toks.pop() - name = '-'.join(toks) - return name, version, build_string - def get_versions(self, p): """Get versions available for package From b4917aa2ede4ed8e93b07c76edf92a5bc1bc73ea Mon Sep 17 00:00:00 2001 From: Marcel Bargull Date: Tue, 29 May 2018 17:38:57 +0200 Subject: [PATCH 096/118] fix another typo --- bioconda_utils/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bioconda_utils/utils.py b/bioconda_utils/utils.py index d1ae246bc5..4306981e91 100644 --- a/bioconda_utils/utils.py +++ b/bioconda_utils/utils.py @@ -852,7 +852,7 @@ def get_all_channel_packages(channels): for pkg_key, pkg_build_ids in channel_packages.items(): all_channel_packages[pkg_key].update(pkg_build_ids) all_channel_packages.default_factory = None - return channel_packages + return all_channel_packages def get_blacklist(blacklists, recipe_folder): From 69a1f357db4f9aff528f9180beb0a08f31fdfed3 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sat, 2 Jun 2018 13:46:43 -0400 Subject: [PATCH 097/118] support linting just before building --- bioconda_utils/build.py | 29 +++++++++++++++++++++++++++++ bioconda_utils/cli.py | 6 ++++++ 2 files changed, 35 insertions(+) diff --git a/bioconda_utils/build.py b/bioconda_utils/build.py index 0398e300da..d3369fe9f3 100644 --- a/bioconda_utils/build.py +++ b/bioconda_utils/build.py @@ -3,10 +3,12 @@ import os import logging import networkx as nx +import pandas from . import utils from . import docker_utils from . import pkg_test from . import upload +from . import linting logger = logging.getLogger(__name__) @@ -35,6 +37,8 @@ def build( channels=None, docker_builder=None, _raise_error=False, + prelint=False, + df=None, ): """ Build a single recipe for a single env @@ -70,8 +74,26 @@ def build( _raise_error : bool Instead of returning a failed build result, raise the error instead. Used for testing. + + prelint : bool + If True, then apply linting just before building. `df` should probably + be provided as well. + + df : pandas.DataFrame + Dataframe of channel info, likely from linting.channel_dataframe() """ + if prelint: + logger.info('Linting recipe') + report = linting.lint([recipe], df) + if report is not None: + summarized = pandas.DataFrame( + dict(failed_tests=report.groupby('recipe')['check'].agg('unique'))) + logger.error('\n\nThe following recipes failed linting. See ' + 'https://bioconda.github.io/linting.html for details:\n\n%s\n', + summarized.to_string()) + return BuildResult(False, None) + # Clean provided env and exisiting os.environ to only allow whitelisted env # vars _docker = docker_builder is not None @@ -187,6 +209,7 @@ def build_recipes( anaconda_upload=False, mulled_upload_target=None, check_channels=None, + prelint=False, ): """ Build one or many bioconda packages. @@ -264,6 +287,10 @@ def build_recipes( logger.debug('recipes: %s', recipes) + if prelint: + logger.info("Downloading channel information to use for linting") + df = linting.channel_dataframe(channels=['conda-forge', 'defaults']) + dag, name2recipes = utils.get_dag(recipes, config=orig_config, blacklist=blacklist) recipe2name = {} for k, v in name2recipes.items(): @@ -368,6 +395,8 @@ def build_recipes( force=force, channels=config['channels'], docker_builder=docker_builder, + df=df, + prelint=prelint, ) all_success &= res.success diff --git a/bioconda_utils/cli.py b/bioconda_utils/cli.py index 3f797b6d95..d4169b39a8 100644 --- a/bioconda_utils/cli.py +++ b/bioconda_utils/cli.py @@ -309,6 +309,10 @@ def lint(recipe_folder, config, packages="*", cache=None, list_funcs=False, @arg('--keep-image', action='store_true', help='''After building recipes, the created Docker image is removed by default to save disk space. Use this argument to disable this behavior.''') +@arg('--prelint', action='store_true', help='''Just before each recipe, apply + the linting functions to it. This can be used as an alternative to linting + all recipes before any building takes place with the `bioconda-utils lint` + command.''') def build( recipe_folder, config, @@ -324,6 +328,7 @@ def build( anaconda_upload=False, mulled_upload_target=None, keep_image=False, + prelint=False, ): utils.setup_logger('bioconda_utils', loglevel) @@ -383,6 +388,7 @@ def build( docker_builder=docker_builder, anaconda_upload=anaconda_upload, mulled_upload_target=mulled_upload_target, + prelint=prelint, ) exit(0 if success else 1) From ec47a17af8d9ee0a9f0a9ea4c5c06182a4f0da0e Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sat, 2 Jun 2018 13:48:03 -0400 Subject: [PATCH 098/118] sort recipes for linting --- bioconda_utils/linting.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/bioconda_utils/linting.py b/bioconda_utils/linting.py index f47ca686c3..2308d54bb9 100644 --- a/bioconda_utils/linting.py +++ b/bioconda_utils/linting.py @@ -182,7 +182,7 @@ def lint(recipes, df, exclude=None, registry=None): skip_dict[recipe].append(func) hits = [] - for recipe in recipes: + for recipe in sorted(recipes): # Since lint functions need a parsed meta.yaml, checking for parsing # errors can't be a lint function. # @@ -204,7 +204,6 @@ def lint(recipes, df, exclude=None, registry=None): 'severity': 'ERROR', 'info': result}) continue - logger.debug('lint {}'.format(recipe)) # skips defined in commit message skip_for_this_recipe = set(skip_dict[recipe]) From 77d673b32cf84cf083b74d28fe4c7e34d985885a Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sat, 2 Jun 2018 13:49:20 -0400 Subject: [PATCH 099/118] better noarch detection --- bioconda_utils/lint_functions.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/bioconda_utils/lint_functions.py b/bioconda_utils/lint_functions.py index 6add7dae30..ab76f9b77b 100644 --- a/bioconda_utils/lint_functions.py +++ b/bioconda_utils/lint_functions.py @@ -225,8 +225,10 @@ def has_windows_bat_file(recipe, metas, df): @lint_multiple_metas def should_be_noarch(recipe, meta, df): deps = _get_deps(meta) + no_compilers = sum(1 for i in deps if + i.startswith('toolchain_')) == 0 if ( - ('gcc' not in deps) and + no_compilers and ('python' in deps) and # This will also exclude recipes with skip sections # which is a good thing, because noarch also implies independence of From 0302291e107639e909b83b537e0ace410e772a1f Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sat, 2 Jun 2018 14:32:46 -0400 Subject: [PATCH 100/118] default df --- bioconda_utils/build.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/bioconda_utils/build.py b/bioconda_utils/build.py index 619eaf2353..38510d5312 100644 --- a/bioconda_utils/build.py +++ b/bioconda_utils/build.py @@ -290,6 +290,8 @@ def build_recipes( if prelint: logger.info("Downloading channel information to use for linting") df = linting.channel_dataframe(channels=['conda-forge', 'defaults']) + else: + df = None dag, name2recipes = utils.get_dag(recipes, config=orig_config, blacklist=blacklist) recipe2name = {} From 7d35d0b68a2279da3d55144084b42a4d6037da6e Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sat, 2 Jun 2018 15:31:22 -0400 Subject: [PATCH 101/118] be clear which recipe the problem is on --- bioconda_utils/utils.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/bioconda_utils/utils.py b/bioconda_utils/utils.py index 4306981e91..506bf42f9e 100644 --- a/bioconda_utils/utils.py +++ b/bioconda_utils/utils.py @@ -447,10 +447,13 @@ def get_dag(recipes, config, blacklist=None, restrict=True): recipes = list(recipes) metadata = [] for i, recipe in enumerate(sorted(recipes)): - meta = load_meta_fast(recipe) - metadata.append((meta, recipe)) - if i % 100 == 0: - logger.info("Inspected {} of {} recipes".format(i, len(recipes))) + try: + meta = load_meta_fast(recipe) + metadata.append((meta, recipe)) + if i % 100 == 0: + logger.info("Inspected {} of {} recipes".format(i, len(recipes))) + except: + raise ValueError('Problem inspecting {0}'.format(recipe)) if blacklist is None: blacklist = set() From b2fb76538028925c739d2973c92b2e1984bb6d6d Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sat, 2 Jun 2018 15:38:03 -0400 Subject: [PATCH 102/118] add back in debug info on linting --- bioconda_utils/linting.py | 1 + 1 file changed, 1 insertion(+) diff --git a/bioconda_utils/linting.py b/bioconda_utils/linting.py index 2308d54bb9..2ec6942386 100644 --- a/bioconda_utils/linting.py +++ b/bioconda_utils/linting.py @@ -204,6 +204,7 @@ def lint(recipes, df, exclude=None, registry=None): 'severity': 'ERROR', 'info': result}) continue + logger.debug('lint {}'.format(recipe)) # skips defined in commit message skip_for_this_recipe = set(skip_dict[recipe]) From b6763c5e38773c945c1b28a96037d71b54552783 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sat, 2 Jun 2018 15:38:19 -0400 Subject: [PATCH 103/118] more generic check for compilers --- bioconda_utils/lint_functions.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/bioconda_utils/lint_functions.py b/bioconda_utils/lint_functions.py index ab76f9b77b..e271d235ba 100644 --- a/bioconda_utils/lint_functions.py +++ b/bioconda_utils/lint_functions.py @@ -225,8 +225,9 @@ def has_windows_bat_file(recipe, metas, df): @lint_multiple_metas def should_be_noarch(recipe, meta, df): deps = _get_deps(meta) - no_compilers = sum(1 for i in deps if - i.startswith('toolchain_')) == 0 + no_compilers = all( + not dep.startswith(('clang_', 'clangxx_', 'gcc_', 'gxx_', 'gfortran_', + 'toolchain_')) for dep in deps) if ( no_compilers and ('python' in deps) and From 41f39c06607fe142d47e5ba244cd88444e5367cd Mon Sep 17 00:00:00 2001 From: Marcel Bargull Date: Sun, 3 Jun 2018 22:54:36 +0200 Subject: [PATCH 104/118] add conda_build_config files consistently --- bioconda_utils/build.py | 8 ++++---- bioconda_utils/docker_utils.py | 22 ++++++++++++++++------ bioconda_utils/utils.py | 16 ++++++++++++++++ 3 files changed, 36 insertions(+), 10 deletions(-) diff --git a/bioconda_utils/build.py b/bioconda_utils/build.py index 38510d5312..dc6ea9a460 100644 --- a/bioconda_utils/build.py +++ b/bioconda_utils/build.py @@ -158,10 +158,10 @@ def build( # we explicitly point to the meta.yaml, in order to keep # conda-build from building all subdirectories with utils.sandboxed_env(whitelisted_env): - cmd = CONDA_BUILD_CMD + build_args + channel_args + \ - ["-e", utils.load_conda_build_config().exclusive_config_file] + \ - ["-m"] + utils.load_conda_build_config().variant_config_files + \ - [os.path.join(recipe, 'meta.yaml')] + cmd = CONDA_BUILD_CMD + build_args + channel_args + for config_file in utils.get_conda_build_config_files(): + cmd.extend([config_file.arg, config_file.path]) + cmd += [os.path.join(recipe, 'meta.yaml')] logger.debug('command: %s', cmd) with utils.Progress(): utils.run(cmd, env=os.environ, mask=False) diff --git a/bioconda_utils/docker_utils.py b/bioconda_utils/docker_utils.py index a2cafc1f92..ea7a92f733 100644 --- a/bioconda_utils/docker_utils.py +++ b/bioconda_utils/docker_utils.py @@ -45,6 +45,8 @@ """ import os +import os.path +from shlex import quote import shutil import subprocess as sp import tempfile @@ -96,10 +98,10 @@ # The actual building... # we explicitly point to the meta.yaml, in order to keep # conda-build from building all subdirectories -conda build -e {self.container_staging}/conda_build_config.yaml {self.conda_build_args} {self.container_recipe}/meta.yaml 2>&1 +conda build {self.conda_build_args} {self.container_recipe}/meta.yaml 2>&1 # copy all built packages to the staging area -cp `conda build {self.container_recipe}/meta.yaml {self.conda_build_args} --output` {self.container_staging}/{arch} +cp `conda build {self.conda_build_args} {self.container_recipe}/meta.yaml --output` {self.container_staging}/{arch} # Ensure permissions are correct on the host. HOST_USER={self.user_info[uid]} chown $HOST_USER:$HOST_USER {self.container_staging}/{arch}/* @@ -337,14 +339,19 @@ def __init__( os.makedirs(pkg_dir) self.pkg_dir = pkg_dir - # Copy the conda build config to the staging directory that is + # Copy the conda build config files to the staging directory that is # visible in the container - shutil.copyfile(utils.load_conda_build_config().exclusive_config_file, - os.path.join(self.pkg_dir, - "conda_build_config.yaml")) + for i, config_file in enumerate(utils.get_conda_build_config_files()): + dst_file = self._get_config_path(self.pkg_dir, i, config_file) + shutil.copyfile(config_file.path, dst_file) self._build_image(image_build_dir) + def _get_config_path(self, staging_prefix, i, config_file): + src_basename = os.path.basename(config_file.path) + dst_basename = '_'.join(('conda_build_config', i, config_file.arg, src_basename)) + return os.path.join(staging_prefix, dst_basename) + def __del__(self): if not self.keep_image: self.cleanup() @@ -456,6 +463,9 @@ def build_recipe(self, recipe_dir, build_args, env, noarch=False): if not isinstance(build_args, str): raise ValueError('build_args must be str') self.conda_build_args = build_args + for i, config_file in enumerate(utils.get_conda_build_config_files()): + dst_file = self._get_config_path(self.container_staging, i, config_file) + self.conda_build_args += ' '.join((config_file.arg, quote(dst_file))) # Write build script to tempfile build_dir = os.path.realpath(tempfile.mkdtemp()) diff --git a/bioconda_utils/utils.py b/bioconda_utils/utils.py index 506bf42f9e..5aa1b01961 100644 --- a/bioconda_utils/utils.py +++ b/bioconda_utils/utils.py @@ -233,6 +233,22 @@ def load_conda_build_config(platform=None, trim_skip=True): return config +CondaBuildConfigFile = namedtuple('CondaBuildConfigFile', ( + 'arg', # either '-e' or '-m' + 'path', +)) + + +def get_conda_build_config_files(config=None): + if config is None: + config = load_conda_build_config() + # TODO: open PR upstream for conda-build to support multiple exclusive_config_files + for file_path in ([config.exclusive_config_file] if config.exclusive_config_file else []): + yield CondaBuildConfigFile('-e', file_path) + for file_path in (config.variant_config_files or []): + yield CondaBuildConfigFile('-m', file_path) + + def load_first_metadata(recipe, config=None, finalize=True): """ Returns just the first of possibly many metadata files. Used for when you From e0603ecb20c1c85b56e1b170fb9cf107efb00308 Mon Sep 17 00:00:00 2001 From: Marcel Bargull Date: Sun, 3 Jun 2018 23:03:15 +0200 Subject: [PATCH 105/118] fix previous commit --- bioconda_utils/docker_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bioconda_utils/docker_utils.py b/bioconda_utils/docker_utils.py index ea7a92f733..ff88ad2b56 100644 --- a/bioconda_utils/docker_utils.py +++ b/bioconda_utils/docker_utils.py @@ -349,7 +349,7 @@ def __init__( def _get_config_path(self, staging_prefix, i, config_file): src_basename = os.path.basename(config_file.path) - dst_basename = '_'.join(('conda_build_config', i, config_file.arg, src_basename)) + dst_basename = 'conda_build_config_{}_{}_{}'.format(i, config_file.arg, src_basename) return os.path.join(staging_prefix, dst_basename) def __del__(self): From 190ff4fd07362658690af5391316a56b433c5b15 Mon Sep 17 00:00:00 2001 From: Marcel Bargull Date: Sun, 3 Jun 2018 23:20:15 +0200 Subject: [PATCH 106/118] fix previous commit again --- bioconda_utils/docker_utils.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/bioconda_utils/docker_utils.py b/bioconda_utils/docker_utils.py index ff88ad2b56..7a0836873b 100644 --- a/bioconda_utils/docker_utils.py +++ b/bioconda_utils/docker_utils.py @@ -462,10 +462,11 @@ def build_recipe(self, recipe_dir, build_args, env, noarch=False): # template. if not isinstance(build_args, str): raise ValueError('build_args must be str') - self.conda_build_args = build_args + build_args_list = [build_args] for i, config_file in enumerate(utils.get_conda_build_config_files()): dst_file = self._get_config_path(self.container_staging, i, config_file) - self.conda_build_args += ' '.join((config_file.arg, quote(dst_file))) + build_args_list.extend([config_file.arg, quote(dst_file)]) + self.conda_build_args = ' '.join(build_args_list) # Write build script to tempfile build_dir = os.path.realpath(tempfile.mkdtemp()) From 9fdbc8f17e68f857fff36ab2a00a94a36c9455a5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20K=C3=B6ster?= Date: Thu, 14 Jun 2018 21:59:12 +0200 Subject: [PATCH 107/118] fix mulled upload: iterate over all generated images. --- bioconda_utils/build.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/bioconda_utils/build.py b/bioconda_utils/build.py index dc6ea9a460..803e3eb1aa 100644 --- a/bioconda_utils/build.py +++ b/bioconda_utils/build.py @@ -426,7 +426,8 @@ def build_recipes( if not upload.anaconda_upload(pkg, label): failed_uploads.append(pkg) if mulled_upload_target and keep_mulled_test: - upload.mulled_upload(res.mulled_images, mulled_upload_target) + for img in res.mulled_images: + upload.mulled_upload(img, mulled_upload_target) # remove traces of the build purge() From 6c45c878c9c72f33d08b242b1840ffb141cb24dd Mon Sep 17 00:00:00 2001 From: Marcel Bargull Date: Tue, 19 Jun 2018 18:55:52 +0200 Subject: [PATCH 108/118] build: fix collecting mulled_images --- bioconda_utils/build.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/bioconda_utils/build.py b/bioconda_utils/build.py index 803e3eb1aa..31a8f1f80c 100644 --- a/bioconda_utils/build.py +++ b/bioconda_utils/build.py @@ -188,12 +188,12 @@ def build( for pkg_path in pkg_paths: try: res = pkg_test.test_package(pkg_path, base_image=base_image) - - logger.info("TEST SUCCESS %s", recipe) - mulled_image = pkg_test.get_image_name(pkg_path) except sp.CalledProcessError as e: logger.error('TEST FAILED: %s', recipe) return BuildResult(False, None) + else: + logger.info("TEST SUCCESS %s", recipe) + mulled_images.append(pkg_test.get_image_name(pkg_path)) return BuildResult(True, mulled_images) From 3ab8023354425dfca43a18da0cbf05b7ef68c3a9 Mon Sep 17 00:00:00 2001 From: Marcel Bargull Date: Thu, 21 Jun 2018 18:49:24 +0200 Subject: [PATCH 109/118] requirements: require python >=3.6 --- bioconda_utils/bioconda_utils-requirements.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/bioconda_utils/bioconda_utils-requirements.txt b/bioconda_utils/bioconda_utils-requirements.txt index b03f6fce43..b90ba4c15f 100644 --- a/bioconda_utils/bioconda_utils-requirements.txt +++ b/bioconda_utils/bioconda_utils-requirements.txt @@ -22,3 +22,4 @@ six=1.11.* alabaster=0.7.* git=2.14.* conda-forge-pinning=2018.05.22 +python>=3.6 From d4701c68257f95e23fb8d7ab0bd68f5cacd6f32c Mon Sep 17 00:00:00 2001 From: Marcel Bargull Date: Sun, 24 Jun 2018 00:12:48 +0200 Subject: [PATCH 110/118] build: skip builds early based on num of existing builds in channel --- bioconda_utils/utils.py | 124 ++++++++++++++++++++++++++++------------ 1 file changed, 89 insertions(+), 35 deletions(-) diff --git a/bioconda_utils/utils.py b/bioconda_utils/utils.py index 5aa1b01961..ac977c8d7f 100644 --- a/bioconda_utils/utils.py +++ b/bioconda_utils/utils.py @@ -8,7 +8,7 @@ import sys import shutil import contextlib -from collections import Iterable, defaultdict, namedtuple +from collections import Counter, Iterable, defaultdict, namedtuple from itertools import product, chain, groupby import logging import pkg_resources @@ -627,6 +627,7 @@ def get_channel_repodata(channel='bioconda', platform=None): PackageKey = namedtuple('PackageKey', ('name', 'version', 'build_number')) +PackageBuild = namedtuple('PackageBuild', ('subdir', 'build_id')) class DivergentBuildsError(Exception): @@ -635,8 +636,8 @@ class DivergentBuildsError(Exception): def get_channel_packages(channel='bioconda', platform=None): """ - Retrieves the existing packages for a channel from conda.anaconda.org as a - dict where keys are PackageKey instances and values are sets of build strings. + Return a PackageKey -> set(PackageBuild) mapping. + Retrieves the existing packages for a channel from conda.anaconda.org. Parameters ---------- @@ -651,10 +652,12 @@ def get_channel_packages(channel='bioconda', platform=None): channel=channel, platform=platform) channel_packages = defaultdict(set) for repo in (repodata, noarch_repodata): + subdir = repo['info']['subdir'] for package in repo['packages'].values(): pkg_key = PackageKey( package['name'], package['version'], package['build_number']) - channel_packages[pkg_key].add(package['build']) + pkg_build = PackageBuild(subdir, package['build']) + channel_packages[pkg_key].add(pkg_build) channel_packages.default_factory = None return channel_packages @@ -790,7 +793,7 @@ def changed_since_master(recipe_folder): ] -def get_package_paths(recipe, channel_packages, force=False): +def _load_platform_metas(recipe, finalize=True): # check if package is noarch, if so, build only on linux # with temp_os, we can fool the MetaData if needed. platform = os.environ.get('OSTYPE', sys.platform) @@ -799,8 +802,82 @@ def get_package_paths(recipe, channel_packages, force=False): elif platform == "linux-gnu": platform = "linux" - metas = load_all_meta( - recipe, config=load_conda_build_config(platform=platform)) + config = load_conda_build_config(platform=platform) + return platform, load_all_meta(recipe, config=config, finalize=finalize) + + +def _meta_subdir(meta): + # logic extracted from conda_build.variants.bldpkg_path + return 'noarch' if meta.noarch or meta.noarch_python else meta.config.host_subdir + + +def _get_pkg_key_build_meta_map(metas): + key_build_meta = defaultdict(dict) + for meta in metas: + pkg_key = PackageKey(meta.name(), meta.version(), int(meta.build_number() or 0)) + pkg_build = PackageBuild(_meta_subdir(meta), meta.build_id()) + key_build_meta[pkg_key][pkg_build] = meta + key_build_meta.default_factory = None + return key_build_meta + + +def check_recipe_skippable(recipe, channel_packages, force=False): + """ + Return True if the same number of builds (per subdir) defined by the recipe + are already in channel_packages (and force is False). + """ + if force: + return False + platform, metas = _load_platform_metas(recipe, finalize=False) + key_build_meta = _get_pkg_key_build_meta_map(metas) + num_new_pkg_builds = sum( + ( + Counter((pkg_key, pkg_build.subdir) for pkg_build in build_meta.keys()) + for pkg_key, build_meta in key_build_meta.items() + ), + Counter() + ) + num_existing_pkg_builds = sum( + ( + Counter( + (pkg_key, pkg_build.subdir) + for pkg_build in channel_packages.get(pkg_key, set()) + ) + for pkg_key in key_build_meta.keys() + ), + Counter() + ) + return num_new_pkg_builds == num_existing_pkg_builds + + +def _filter_existing_packages(metas, channel_packages): + new_metas = [] # MetaData instances of packages not yet in channel + existing_metas = [] # MetaData instances of packages already in channel + divergent_builds = set() # set of Dist (i.e., name-version-build) strings + + key_build_meta = _get_pkg_key_build_meta_map(metas) + for pkg_key, build_meta in key_build_meta.items(): + existing_pkg_builds = channel_packages.get(pkg_key, set()) + for pkg_build, meta in build_meta.items(): + if pkg_build not in existing_pkg_builds: + new_metas.append(meta) + else: + existing_metas.append(meta) + for divergent_build in (existing_pkg_builds - set(build_meta.keys())): + divergent_builds.add( + '-'.join((pkg_key.name, pkg_key.version, divergent_build.build_id))) + return new_metas, existing_metas, divergent_builds + + +def get_package_paths(recipe, channel_packages, force=False): + if check_recipe_skippable(recipe, channel_packages, force): + # NB: If we skip early here, we don't detect possible divergent builds. + logger.info( + 'FILTER: not building recipe %s because ' + 'the same number of builds are in channel(s) and it is not forced.', + recipe) + return [] + platform, metas = _load_platform_metas(recipe, finalize=True) # The recipe likely defined skip: True if not metas: @@ -835,41 +912,18 @@ def get_package_paths(recipe, channel_packages, force=False): api.get_output_file_paths(meta) for meta in build_metas)) -def _filter_existing_packages(metas, channel_packages): - new_metas = [] # MetaData instances of packages not yet in channel - existing_metas = [] # MetaData instances of packages already in channel - divergent_builds = set() # set of Dist (i.e., name-version-build) strings - - recipe_pkgs = defaultdict(list) - for meta in metas: - pkg_key = PackageKey( - meta.name(), meta.version(), int(meta.build_number() or 0)) - recipe_pkgs[pkg_key].append(meta) - recipe_pkgs.default_factory = None - - for pkg_key, pkg_metas in recipe_pkgs.items(): - existing_build_ids = channel_packages.get(pkg_key, set()) - for meta in pkg_metas: - if meta.build_id() not in existing_build_ids: - new_metas.append(meta) - else: - existing_metas.append(meta) - pkg_build_ids = set(meta.build_id() for meta in pkg_metas) - for divergent_build in (existing_build_ids - pkg_build_ids): - divergent_builds.add( - '-'.join((pkg_key.name, pkg_key.version, divergent_build))) - return new_metas, existing_metas, divergent_builds - - def get_all_channel_packages(channels): + """ + Return a PackageKey -> set(PackageBuild) mapping. + """ if channels is None: channels = [] all_channel_packages = defaultdict(set) for channel in channels: channel_packages = get_channel_packages(channel=channel) - for pkg_key, pkg_build_ids in channel_packages.items(): - all_channel_packages[pkg_key].update(pkg_build_ids) + for pkg_key, pkg_builds in channel_packages.items(): + all_channel_packages[pkg_key].update(pkg_builds) all_channel_packages.default_factory = None return all_channel_packages From 99782c2e3590d8c110ad42ceee3a43cffaf2bca3 Mon Sep 17 00:00:00 2001 From: Marcel Bargull Date: Sun, 24 Jun 2018 12:15:13 +0200 Subject: [PATCH 111/118] build: topo-sort connected components before merging --- bioconda_utils/build.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/bioconda_utils/build.py b/bioconda_utils/build.py index 31a8f1f80c..12e1acfaf5 100644 --- a/bioconda_utils/build.py +++ b/bioconda_utils/build.py @@ -1,9 +1,12 @@ import subprocess as sp +from itertools import chain from collections import defaultdict, namedtuple import os import logging + import networkx as nx import pandas + from . import utils from . import docker_utils from . import pkg_test @@ -332,11 +335,13 @@ def build_recipes( logger.info("Nothing to be done.") return True # merge subdags of the selected chunk - subdag = dag.subgraph(chunks[subdag_i]) - # ensure that packages which need a build are built in the right order + subdag = dag.subgraph(chain.from_iterable( + nx.topological_sort(dag.subgraph(cc)) for cc in chunks[subdag_i] + )) + recipes = [recipe - for package in nx.topological_sort(subdag) + for package in subdag for recipe in name2recipes[package]] logger.info( From e033e47cddf74cd70a52bf9f25beef8fd2298982 Mon Sep 17 00:00:00 2001 From: Marcel Bargull Date: Sun, 24 Jun 2018 14:25:55 +0200 Subject: [PATCH 112/118] docs: handle extra.notes being a list --- docs/source/templates/readme.rst_t | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/docs/source/templates/readme.rst_t b/docs/source/templates/readme.rst_t index cdec5260e7..0a58a6fcc5 100644 --- a/docs/source/templates/readme.rst_t +++ b/docs/source/templates/readme.rst_t @@ -40,7 +40,13 @@ and update with:: {% if extra.notes %} Notes ----- +{%- if extra.notes is string %} {{ extra.notes | escape }} +{% else %} +{%- for note in extra.notes %} +- {{ note | escape }} +{% endfor -%} +{% endif -%} {% endif %} |docker| From 44565d0fcafff036892d57810dd9b1cd044febf8 Mon Sep 17 00:00:00 2001 From: Marcel Bargull Date: Sun, 24 Jun 2018 17:18:12 +0200 Subject: [PATCH 113/118] build: handle UnsatisfiableError --- bioconda_utils/build.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/bioconda_utils/build.py b/bioconda_utils/build.py index 12e1acfaf5..295161b07e 100644 --- a/bioconda_utils/build.py +++ b/bioconda_utils/build.py @@ -4,6 +4,7 @@ import os import logging +from conda.exports import UnsatisfiableError import networkx as nx import pandas @@ -384,6 +385,15 @@ def build_recipes( for n in nx.algorithms.descendants(subdag, name): skip_dependent[n].append(recipe) continue + except UnsatisfiableError as e: + logger.error( + 'BUILD ERROR: ' + 'could not determine dependencies for recipe %s: %s', + recipe, e) + failed.append(recipe) + for n in nx.algorithms.descendants(subdag, name): + skip_dependent[n].append(recipe) + continue if not pkg_paths: logger.info("Nothing to be done for recipe %s", recipe) continue From 86fc487549833718b3f647ca821659e07c95daf1 Mon Sep 17 00:00:00 2001 From: Marcel Bargull Date: Sun, 24 Jun 2018 17:40:39 +0200 Subject: [PATCH 114/118] fix import --- bioconda_utils/build.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/bioconda_utils/build.py b/bioconda_utils/build.py index 295161b07e..fc7a1b8900 100644 --- a/bioconda_utils/build.py +++ b/bioconda_utils/build.py @@ -4,7 +4,9 @@ import os import logging -from conda.exports import UnsatisfiableError +# TODO: UnsatisfiableError is not yet in exports for conda 4.5.4 +# from conda.exports import UnsatisfiableError +from conda.exceptions import UnsatisfiableError import networkx as nx import pandas From 4f6b405bb4dfebcc1d23b964320acedef45caeab Mon Sep 17 00:00:00 2001 From: Marcel Bargull Date: Tue, 26 Jun 2018 19:32:36 +0200 Subject: [PATCH 115/118] [WIP] update to conda-build 3.10.9 (#312) * build: rename --prelint to --lint, add --lint-only/--lint-exclude args * requirements: update to conda-build 3.10.9 * build: remove default value for --lint-exclude * requirements: require python >=3.6 * meta.get_section might return None (conda-build 3.10.9 compat) * silence most flake8 nagging * bioconductor: fix missing newline at EOF for post-link.sh * fix typo * use meta.get_value preferably * make flake8 shut up already * meta.get_value only works for path with depth 2 * lint: fix should_not_be_noarch for conda_build >3.10.3 * lint._has_compilers: also check for old compiler packages * lint._get_deps: fix sections being unset on default * docs: use only one job for debugging --- .circleci/build-docs.sh | 4 +- .../bioconda_utils-requirements.txt | 2 +- bioconda_utils/bioconductor_skeleton.py | 57 +- bioconda_utils/build.py | 51 +- bioconda_utils/cli.py | 62 +- bioconda_utils/cran_skeleton.py | 8 +- bioconda_utils/docker_utils.py | 7 +- bioconda_utils/github_integration.py | 12 +- bioconda_utils/lint_functions.py | 47 +- bioconda_utils/linting.py | 55 +- bioconda_utils/upload.py | 2 +- bioconda_utils/utils.py | 35 +- docs/source/conf.py | 2 +- docs/source/generate_docs.py | 7 +- setup.cfg | 4 + setup.py | 2 +- test/helpers.py | 1 - test/test_bioconductor_skeleton.py | 13 +- test/test_linting.py | 1564 +++++++++-------- test/test_pkg_test.py | 2 +- test/test_utils.py | 24 +- 21 files changed, 1039 insertions(+), 922 deletions(-) diff --git a/.circleci/build-docs.sh b/.circleci/build-docs.sh index db88ade44a..2159638c4f 100755 --- a/.circleci/build-docs.sh +++ b/.circleci/build-docs.sh @@ -58,7 +58,9 @@ rm -r * # build docs and copy over to tmpdir cd ${DOCSOURCE} -make clean html SPHINXOPTS="-j2" 2>&1 | grep -v "WARNING: nonlocal image URL found:" +# TODO: reenable "-j2" when docs build fine +make clean html 2>&1 | grep -v "WARNING: nonlocal image URL found:" +# make clean html SPHINXOPTS="-j2" 2>&1 | grep -v "WARNING: nonlocal image URL found:" cp -r ${DOCHTML}/* $STAGING # commit and push diff --git a/bioconda_utils/bioconda_utils-requirements.txt b/bioconda_utils/bioconda_utils-requirements.txt index b90ba4c15f..cf2a0e9ef0 100644 --- a/bioconda_utils/bioconda_utils-requirements.txt +++ b/bioconda_utils/bioconda_utils-requirements.txt @@ -2,7 +2,7 @@ anaconda-client=1.6.* argh=0.26.* beautifulsoup4=4.6.* conda=4.5.4 -conda-build=3.10.3 +conda-build=3.10.9 galaxy-lib>=18.5.5 jinja2=2.10.* jsonschema=2.6.* diff --git a/bioconda_utils/bioconductor_skeleton.py b/bioconda_utils/bioconductor_skeleton.py index d06c99391e..7bf55eae45 100755 --- a/bioconda_utils/bioconductor_skeleton.py +++ b/bioconda_utils/bioconductor_skeleton.py @@ -5,15 +5,16 @@ import configparser from textwrap import dedent import tarfile -import pyaml import hashlib import os import re -import bs4 from collections import OrderedDict import logging + +import bs4 +import pyaml import requests -from colorlog import ColoredFormatter + from . import utils from . import cran_skeleton @@ -200,7 +201,10 @@ def find_best_bioc_version(package, version): for bioc_version in bioconductor_versions(): for kind, func in zip( ('package', 'data'), - (bioconductor_tarball_url, bioconductor_annotation_data_url, bioconductor_experiment_data_url) + ( + bioconductor_tarball_url, bioconductor_annotation_data_url, + bioconductor_experiment_data_url, + ), ): url = func(package, version, bioc_version) if requests.head(url).status_code == 200: @@ -261,8 +265,8 @@ def __init__(self, package, bioc_version=None, pkg_version=None): htmls = { 'regular_package': os.path.join( - base_url, self.bioc_version, 'bioc', 'html', package - + '.html'), + base_url, self.bioc_version, 'bioc', 'html', + package + '.html'), 'annotation_package': os.path.join( base_url, self.bioc_version, 'data', 'annotation', 'html', package + '.html'), @@ -310,7 +314,6 @@ def __init__(self, package, bioc_version=None, pkg_version=None): self.depends_on_gcc = False - @property def bioarchive_url(self): """ @@ -337,7 +340,8 @@ def cargoport_url(self): elif response.status_code == 200: return url else: - raise PageNotFoundError("Unexpected error: {0.status_code} ({0.reason})".format(response)) + raise PageNotFoundError( + "Unexpected error: {0.status_code} ({0.reason})".format(response)) @property def bioconductor_tarball_url(self): @@ -396,7 +400,8 @@ def tarball_url(self): find_best_bioc_version(self.package, self.version) if self._tarball_url is None: - raise ValueError("No working URLs found for this version in any bioconductor version") + raise ValueError( + "No working URLs found for this version in any bioconductor version") return self._tarball_url @property @@ -428,7 +433,8 @@ def cached_tarball(self): if response.status_code == 200: fout.write(response.content) else: - raise PageNotFoundError('Unexpected error {0.status_code} ({0.reason})'.format(response)) + raise PageNotFoundError( + 'Unexpected error {0.status_code} ({0.reason})'.format(response)) shutil.move(tmp, fn) self._cached_tarball = fn return fn @@ -479,7 +485,6 @@ def depends(self): except KeyError: return [] - @property def linkingto(self): """ @@ -490,7 +495,6 @@ def linkingto(self): except KeyError: return [] - def _parse_dependencies(self, items): """ The goal is to go from @@ -595,15 +599,19 @@ def dependencies(self): # Modified from conda_build.skeletons.cran # with tarfile.open(self.cached_tarball) as tf: - need_f = any([f.name.lower().endswith(('.f', '.f90', '.f77')) for f in tf]) - need_c = True if need_f else \ - any([f.name.lower().endswith('.c') for f in tf]) - need_cxx = any([f.name.lower().endswith(('.cxx', '.cpp', '.cc', '.c++')) - for f in tf]) - need_autotools = any([f.name.lower().endswith('/configure') for f in tf]) - need_make = True if any((need_autotools, need_f, need_cxx, need_c)) else \ - any([f.name.lower().endswith(('/makefile', '/makevars')) - for f in tf]) + need_f = any(f.name.lower().endswith(('.f', '.f90', '.f77')) for f in tf) + if need_f: + need_c = True + else: + need_c = any(f.name.lower().endswith('.c') for f in tf) + need_cxx = any( + f.name.lower().endswith(('.cxx', '.cpp', '.cc', '.c++')) for f in tf) + need_autotools = any(f.name.lower().endswith('/configure') for f in tf) + if any((need_autotools, need_f, need_cxx, need_c)): + need_make = True + else: + need_make = any( + f.name.lower().endswith(('/makefile', '/makevars')) for f in tf) else: need_c = need_cxx = need_f = need_autotools = need_make = False @@ -818,7 +826,9 @@ def write_recipe_recursive(proj, seen_dependencies, recipe_dir, config, force, continue if conda_name_without_version in seen_dependencies: - logger.debug("{} already created or in existing channels, skipping".format(conda_name_without_version)) + logger.debug( + "{} already created or in existing channels, skipping" + .format(conda_name_without_version)) continue seen_dependencies.update([conda_name_without_version]) @@ -1018,7 +1028,8 @@ def write_recipe(package, recipe_dir, config, force=False, bioc_version=None, # Install and clean up R CMD INSTALL --library=$PREFIX/lib/R/library $TARBALL rm $TARBALL - rmdir $STAGING""") + rmdir $STAGING + """) # noqa: E501: line too long with open(os.path.join(recipe_dir, 'post-link.sh'), 'w') as fout: fout.write(dedent(post_link_template)) pre_unlink_template = "R CMD REMOVE --library=$PREFIX/lib/R/library/ {0}\n".format(package) diff --git a/bioconda_utils/build.py b/bioconda_utils/build.py index fc7a1b8900..e0f3d2cb6a 100644 --- a/bioconda_utils/build.py +++ b/bioconda_utils/build.py @@ -43,8 +43,7 @@ def build( channels=None, docker_builder=None, _raise_error=False, - prelint=False, - df=None, + lint_args=None, ): """ Build a single recipe for a single env @@ -81,17 +80,13 @@ def build( Instead of returning a failed build result, raise the error instead. Used for testing. - prelint : bool - If True, then apply linting just before building. `df` should probably - be provided as well. - - df : pandas.DataFrame - Dataframe of channel info, likely from linting.channel_dataframe() + lint_args : linting.LintArgs | None + If not None, then apply linting just before building. """ - if prelint: + if lint_args is not None: logger.info('Linting recipe') - report = linting.lint([recipe], df) + report = linting.lint([recipe], lint_args) if report is not None: summarized = pandas.DataFrame( dict(failed_tests=report.groupby('recipe')['check'].agg('unique'))) @@ -111,10 +106,11 @@ def build( logger.info("BUILD START %s", recipe) - # --no-build-id is needed for some very long package names that triggers the 89 character limits - # this option can be removed as soon as all packages are rebuild with the 255 character limit - # Moreover, --no-build-id will block us from using parallel builds in conda-build 2.x - # build_args = ["--no-build-id"] + # --no-build-id is needed for some very long package names that triggers + # the 89 character limits this option can be removed as soon as all + # packages are rebuild with the 255 character limit + # Moreover, --no-build-id will block us from using parallel builds in + # conda-build 2.x build_args = ["--no-build-id"] # use global variant config file (contains pinnings) build_args = ["--skip-existing"] @@ -186,14 +182,13 @@ def build( logger.info('TEST START via mulled-build %s', recipe) - use_base_image = meta.get_section('extra').get('container', {})\ - .get('extended-base') + use_base_image = meta.get_value('extra/container', {}).get('extended-base', False) base_image = 'bioconda/extended-base-image' if use_base_image else None mulled_images = [] for pkg_path in pkg_paths: try: - res = pkg_test.test_package(pkg_path, base_image=base_image) + pkg_test.test_package(pkg_path, base_image=base_image) except sp.CalledProcessError as e: logger.error('TEST FAILED: %s', recipe) return BuildResult(False, None) @@ -215,7 +210,7 @@ def build_recipes( anaconda_upload=False, mulled_upload_target=None, check_channels=None, - prelint=False, + lint_args=None, ): """ Build one or many bioconda packages. @@ -264,6 +259,8 @@ def build_recipes( `config['channels'][0]`). If this list is empty, then do not check any channels. + lint_args : linting.LintArgs | None + If not None, then apply linting just before building. """ orig_config = config config = utils.load_config(config) @@ -293,11 +290,16 @@ def build_recipes( logger.debug('recipes: %s', recipes) - if prelint: - logger.info("Downloading channel information to use for linting") - df = linting.channel_dataframe(channels=['conda-forge', 'defaults']) - else: - df = None + if lint_args is not None: + df = lint_args.df + if df is None: + logger.info("Downloading channel information to use for linting") + df = linting.channel_dataframe(channels=['conda-forge', 'defaults']) + lint_exclude = (lint_args.exclude or ()) + if 'already_in_bioconda' not in lint_exclude: + lint_exclude = tuple(lint_exclude) + ('already_in_bioconda',) + lint_args = linting.LintArgs(lint_args.df, lint_exclude, lint_args.registry) + lint_args = linting.LintArgs(df, lint_exclude, lint_args.registry) dag, name2recipes = utils.get_dag(recipes, config=orig_config, blacklist=blacklist) recipe2name = {} @@ -425,8 +427,7 @@ def build_recipes( force=force, channels=config['channels'], docker_builder=docker_builder, - df=df, - prelint=prelint, + lint_args=lint_args, ) all_success &= res.success diff --git a/bioconda_utils/cli.py b/bioconda_utils/cli.py index d4169b39a8..48e4128345 100644 --- a/bioconda_utils/cli.py +++ b/bioconda_utils/cli.py @@ -38,7 +38,8 @@ def select_recipes(packages, git_range, recipe_folder, config_filename, config, if os.path.basename(f) in ['meta.yaml', 'build.sh'] and os.path.exists(f) ] - logger.info('Recipes to consider according to git: \n{}'.format('\n '.join(changed_recipes))) + logger.info( + 'Recipes to consider according to git: \n{}'.format('\n '.join(changed_recipes))) else: changed_recipes = [] @@ -98,15 +99,18 @@ def duplicates( target_channel = channels[0] if strict_version: - get_spec = lambda pkg: (pkg['name'], pkg['version']) + def get_spec(pkg): + return (pkg['name'], pkg['version']) if not remove and not url: print('name', 'version', 'channels', sep='\t') elif strict_build: - get_spec = lambda pkg: (pkg['name'], pkg['version'], pkg['build']) + def get_spec(pkg): + return (pkg['name'], pkg['version'], pkg['build']) if not remove and not url: print('name', 'version', 'build', 'channels', sep='\t') else: - get_spec = lambda pkg: pkg['name'] + def get_spec(pkg): + return pkg['name'] if not remove and not url: print('name', 'channels', sep='\t') @@ -234,12 +238,8 @@ def lint(recipe_folder, config, packages="*", cache=None, list_funcs=False, _recipes = select_recipes(packages, git_range, recipe_folder, config_filename, config, force) - report = linting.lint( - _recipes, - df=df, - exclude=exclude, - registry=registry, - ) + lint_args = linting.LintArgs(df=df, exclude=exclude, registry=registry) + report = linting.lint(_recipes, lint_args) # The returned dataframe is in tidy format; summarize a bit to get a more # reasonable log @@ -309,10 +309,14 @@ def lint(recipe_folder, config, packages="*", cache=None, list_funcs=False, @arg('--keep-image', action='store_true', help='''After building recipes, the created Docker image is removed by default to save disk space. Use this argument to disable this behavior.''') -@arg('--prelint', action='store_true', help='''Just before each recipe, apply +@arg('--lint', '--prelint', action='store_true', help='''Just before each recipe, apply the linting functions to it. This can be used as an alternative to linting all recipes before any building takes place with the `bioconda-utils lint` command.''') +@arg('--lint-only', nargs='+', + help='''Only run this linting function. Can be used multiple times.''') +@arg('--lint-exclude', nargs='+', + help='''Exclude this linting function. Can be used multiple times.''') def build( recipe_folder, config, @@ -328,7 +332,9 @@ def build( anaconda_upload=False, mulled_upload_target=None, keep_image=False, - prelint=False, + lint=False, + lint_only=None, + lint_exclude=None, ): utils.setup_logger('bioconda_utils', loglevel) @@ -378,6 +384,22 @@ def build( else: docker_builder = None + if lint: + registry = lint_functions.registry + if lint_only is not None: + registry = tuple(func for func in registry if func.__name__ in lint_only) + if len(registry) == 0: + sys.stderr.write('No valid linting functions selected, exiting.\n') + sys.exit(1) + df = linting.channel_dataframe() + lint_args = linting.LintArgs(df=df, exclude=lint_exclude, registry=registry) + else: + lint_args = None + if lint_only is not None: + logger.warning('--lint-only has no effect unless --lint is specified.') + if lint_exclude is not None: + logger.warning('--lint-exclude has no effect unless --lint is specified.') + success = build_recipes( recipe_folder, config=config, @@ -388,7 +410,7 @@ def build( docker_builder=docker_builder, anaconda_upload=anaconda_upload, mulled_upload_target=mulled_upload_target, - prelint=prelint, + lint_args=lint_args, ) exit(0 if success else 1) @@ -456,7 +478,10 @@ def dag(recipe_folder, config, packages="*", format='gml', hide_singletons=False effect if --reverse-dependencies, which always looks just in the recipe dir.''') @arg('--loglevel', help="Set logging level (debug, info, warning, error, critical)") -def dependent(recipe_folder, config, restrict=False, dependencies=None, reverse_dependencies=None, loglevel='warning'): +def dependent( + recipe_folder, config, restrict=False, dependencies=None, reverse_dependencies=None, + loglevel='warning', +): """ Print recipes dependent on a package """ @@ -495,7 +520,7 @@ def dependent(recipe_folder, config, restrict=False, dependencies=None, reverse_ with the specified version in --pkg-version, or if --pkg-version not specified, then finds the the latest package version in the latest Bioconductor version""") -@arg('--loglevel', help='Log level') +@arg('--loglevel', help='Log level') @arg('--recursive', action='store_true', help="""Creates the recipes for all Bioconductor and CRAN dependencies of the specified package.""") @arg('--skip-if-in-channels', nargs='*', help="""When --recursive is used, it will build @@ -527,7 +552,7 @@ def bioconductor_skeleton( utils.setup_logger('bioconda_utils', loglevel) seen_dependencies = set() - written = _bioconductor_skeleton.write_recipe( + _bioconductor_skeleton.write_recipe( package, recipe_folder, config, force=force, bioc_version=bioc_version, pkg_version=pkg_version, versioned=versioned, recursive=recursive, seen_dependencies=seen_dependencies, @@ -608,4 +633,7 @@ def pypi_check(recipe_folder, config, loglevel='info', packages='*', only_out_of def main(): - argh.dispatch_commands([build, dag, dependent, lint, duplicates, bioconductor_skeleton, pypi_check, clean_cran_skeleton]) + argh.dispatch_commands([ + build, dag, dependent, lint, duplicates, + bioconductor_skeleton, pypi_check, clean_cran_skeleton, + ]) diff --git a/bioconda_utils/cran_skeleton.py b/bioconda_utils/cran_skeleton.py index 70b4682f1f..c4225b7574 100644 --- a/bioconda_utils/cran_skeleton.py +++ b/bioconda_utils/cran_skeleton.py @@ -3,13 +3,13 @@ conda-forge requirements. """ -import subprocess as sp import os -import logging import re from itertools import zip_longest import argparse + from conda_build.api import skeletonize + from .utils import run, setup_logger logger = setup_logger(__name__) @@ -40,7 +40,6 @@ win32_string = 'number: 0\n skip: true # [win32]' - def write_recipe(package, recipe_dir='.', recursive=False, force=False, no_windows=False, **kwargs): """ @@ -91,7 +90,8 @@ def write_recipe(package, recipe_dir='.', recursive=False, force=False, return try: - skeletonize(package, repo='cran', output_dir=recipe_dir, version=None, recursive=recursive) + skeletonize( + package, repo='cran', output_dir=recipe_dir, version=None, recursive=recursive) clean_skeleton_files( package=os.path.join(recipe_dir, 'r-' + package.lower()), no_windows=no_windows) diff --git a/bioconda_utils/docker_utils.py b/bioconda_utils/docker_utils.py index 7a0836873b..63788d1b8f 100644 --- a/bioconda_utils/docker_utils.py +++ b/bioconda_utils/docker_utils.py @@ -105,7 +105,7 @@ # Ensure permissions are correct on the host. HOST_USER={self.user_info[uid]} chown $HOST_USER:$HOST_USER {self.container_staging}/{arch}/* -""" +""" # noqa: E501,E122: line too long, continuation line missing indentation or outdented # ---------------------------------------------------------------------------- @@ -123,7 +123,8 @@ FROM {self.docker_base_image} {self.proxies} RUN /opt/conda/bin/conda install -y conda={conda_ver} conda-build={conda_build_ver} -""" +""" # noqa: E122 continuation line missing indentation or outdented + class DockerCalledProcessError(sp.CalledProcessError): pass @@ -485,7 +486,7 @@ def build_recipe(self, recipe_dir, build_args, env, noarch=False): env_list.append('{0}={1}'.format(k, v)) env_list.append('-e') - env_list.append('{0}={1}'.format('HOST_USER_ID',self.user_info['uid'])) + env_list.append('{0}={1}'.format('HOST_USER_ID', self.user_info['uid'])) cmd = [ 'docker', 'run', diff --git a/bioconda_utils/github_integration.py b/bioconda_utils/github_integration.py index 9876c419b8..d50a1f253d 100644 --- a/bioconda_utils/github_integration.py +++ b/bioconda_utils/github_integration.py @@ -1,6 +1,8 @@ -import github import os +import github + + def _n(x): """ Easy conversion of None to NotSet object used by PyGithub; otherwise pass @@ -31,7 +33,7 @@ def push_comment(user, repo, pull_request_number, msg): user = g.get_user(user) repo = user.get_repo(repo) pr = repo.get_pull(pull_request_number) - + return pr.create_issue_comment(msg) @@ -75,7 +77,6 @@ def update_status(user, repo, commit, state, context=None, description=None, target_url=_n(target_url)) - if __name__ == "__main__": import argparse ap = argparse.ArgumentParser() @@ -84,4 +85,7 @@ def update_status(user, repo, commit, state, context=None, description=None, ap.add_argument('--context') ap.add_argument('--description') args = ap.parse_args() - print(update_status('bioconda', 'bioconda-recipes', args.commit, args.state, args.context, args.description)) + status = update_status( + 'bioconda', 'bioconda-recipes', args.commit, args.state, args.context, args.description, + ) + print(status) diff --git a/bioconda_utils/lint_functions.py b/bioconda_utils/lint_functions.py index e271d235ba..cfa132cab2 100644 --- a/bioconda_utils/lint_functions.py +++ b/bioconda_utils/lint_functions.py @@ -36,19 +36,21 @@ def _get_deps(meta, section=None): If None, returns all dependencies. Otherwise can be a string or list of options [build, host, run, test] to return section-specific dependencies. """ + def get_name(dep): + return dep.split()[0] - get_name = lambda dep: dep.split()[0] - - reqs = meta.get_section('requirements') + reqs = (meta.get_section('requirements') or {}) if reqs is None: return [] if section is None: sections = ['build', 'host', 'run', 'test'] - if isinstance(section, str): + elif isinstance(section, str): sections = [section] + else: + sections = section deps = [] for s in sections: - dep = reqs.get(s, []) + dep = (reqs.get(s) or []) if dep: deps += [get_name(d) for d in dep] return deps @@ -71,6 +73,15 @@ def _has_preprocessing_selector(recipe): return True +def _has_compilers(meta): + build_deps = _get_deps(meta, ('build', 'host')) + return any( + dep in {'gcc', 'llvm', 'clangdev', 'llvmdev'} or + dep.startswith(('clang_', 'clangxx_', 'gcc_', 'gxx_', 'gfortran_', 'toolchain_')) + for dep in build_deps + ) + + def lint_multiple_metas(lint_function): def lint_metas(recipe, metas, df, *args, **kwargs): lint = partial(lint_function, recipe) @@ -81,6 +92,7 @@ def lint_metas(recipe, metas, df, *args, **kwargs): lint_metas.__name__ = lint_function.__name__ return lint_metas + @lint_multiple_metas def in_other_channels(recipe, meta, df): """ @@ -101,8 +113,7 @@ def already_in_bioconda(recipe, meta, df): Does the package exist in bioconda? """ results = _subset_df(recipe, meta, df) - build_section = meta.get_section('build') - build_number = int(build_section.get('number', 0)) + build_number = int(meta.get_value('build/number', 0)) build_results = results[results.build_number == build_number] channels = set(build_results.channel) if 'bioconda' in channels: @@ -225,18 +236,15 @@ def has_windows_bat_file(recipe, metas, df): @lint_multiple_metas def should_be_noarch(recipe, meta, df): deps = _get_deps(meta) - no_compilers = all( - not dep.startswith(('clang_', 'clangxx_', 'gcc_', 'gxx_', 'gfortran_', - 'toolchain_')) for dep in deps) if ( - no_compilers and + (not _has_compilers(meta)) and ('python' in deps) and # This will also exclude recipes with skip sections # which is a good thing, because noarch also implies independence of # the python version. not _has_preprocessing_selector(recipe) ) and ( - 'noarch' not in meta.get_section('build') + 'noarch' not in (meta.get_section('build') or {}) ): return { 'should_be_noarch': True, @@ -246,12 +254,11 @@ def should_be_noarch(recipe, meta, df): @lint_multiple_metas def should_not_be_noarch(recipe, meta, df): - deps = _get_deps(meta) if ( - ('gcc' in deps) or - meta.get_section('build').get('skip', False) in ["true", "True"] + _has_compilers(meta) or + meta.get_value('build/skip', False) ) and ( - 'noarch' in meta.get_section('build') + 'noarch' in (meta.get_section('build') or {}) ): print("error") return { @@ -271,7 +278,7 @@ def setup_py_install_args(recipe, meta, df): 'to setup.py command'), } - script_line = meta.get_section('build').get('script', '') + script_line = meta.get_value('build/script', '') if ( 'setup.py install' in script_line and '--single-version-externally-managed' not in script_line @@ -293,7 +300,7 @@ def setup_py_install_args(recipe, meta, df): @lint_multiple_metas def invalid_identifiers(recipe, meta, df): try: - identifiers = meta.get_section('extra').get('identifiers', []) + identifiers = meta.get_value('extra/identifiers', []) if not isinstance(identifiers, list): return {'invalid_identifiers': True, 'fix': 'extra:identifiers must hold a list of identifiers'} @@ -363,6 +370,7 @@ def compilers_must_be_in_build(recipe, meta, df): 'only go in the build: section') } + def bioconductor_37(recipe, meta, df): for line in open(os.path.join(recipe, 'meta.yaml')): if ('{% set bioc = "3.7" %}' in line) or ('{% set bioc = "release" %}' in line): @@ -371,6 +379,7 @@ def bioconductor_37(recipe, meta, df): 'fix': 'Need to wait until R 3.5 conda package is available', } + registry = ( in_other_channels, @@ -396,5 +405,5 @@ def bioconductor_37(recipe, meta, df): should_not_use_fn, should_use_compilers, compilers_must_be_in_build, - bioconductor_37 + bioconductor_37, ) diff --git a/bioconda_utils/linting.py b/bioconda_utils/linting.py index 2ec6942386..535f48b8eb 100644 --- a/bioconda_utils/linting.py +++ b/bioconda_utils/linting.py @@ -1,12 +1,11 @@ import os import re import itertools -from collections import defaultdict +from collections import defaultdict, namedtuple import pandas as pd import numpy as np import ruamel_yaml as yaml -import jinja2 from . import utils from . import lint_functions @@ -75,6 +74,30 @@ def not_in_bioconda(x): """ +class LintArgs(namedtuple('LintArgs', ( + 'df', 'exclude', 'registry', +))): + """ + df : pandas.DataFrame + Dataframe containing channel data, typically as output from + `channel_dataframe()` + + exclude : list + List of function names in `registry` to skip globally. When running on + CI, this will be merged with anything else detected from the commit + message or LINT_SKIP environment variable using the special string + "[skip lint for ]". While those other + mechanisms define skipping on a recipe-specific basis, this argument + can be used to skip tests for all recipes. Use sparingly. + + registry : list or tuple + List of functions to apply to each recipe. If None, defaults to + `lint_functions.registry`. + """ + def __new__(cls, df, exclude=None, registry=None): + return super().__new__(cls, df, exclude, registry) + + def channel_dataframe(cache=None, channels=['bioconda', 'conda-forge', 'defaults']): """ @@ -120,7 +143,7 @@ def channel_dataframe(cache=None, channels=['bioconda', 'conda-forge', return df -def lint(recipes, df, exclude=None, registry=None): +def lint(recipes, lint_args): """ Parameters ---------- @@ -128,27 +151,11 @@ def lint(recipes, df, exclude=None, registry=None): recipes : list List of recipes to lint - config : str, dict - Used to pass any necessary environment variables (CONDA_BOOST, etc) to - meta.yaml files. If str, path to config file. If dict, parsed version - of the config file. - - df : pandas.DataFrame - Dataframe containing channel data, typically as output from - `channel_dataframe()` - - exclude : list - List of function names in `registry` to skip globally. When running on - CI, this will be merged with anything else detected from the commit - message or LINT_SKIP environment variable using the special string - "[skip lint for ]". While those other - mechanisms define skipping on a recipe-specific basis, this argument - can be used to skip tests for all recipes. Use sparingly. - - registry : list or tuple - List of functions to apply to each recipe. If None, defaults to - `lint_functions.registry`. + lint_args : LintArgs """ + df = lint_args.df + exclude = lint_args.exclude + registry = lint_args.registry if registry is None: registry = lint_functions.registry @@ -211,7 +218,7 @@ def lint(recipes, df, exclude=None, registry=None): # skips defined in meta.yaml for meta in metas: - persistent = meta.get_section('extra').get('skip-lints', []) + persistent = meta.get_value('extra/skip-lints', []) skip_for_this_recipe.update(persistent) for func in registry: diff --git a/bioconda_utils/upload.py b/bioconda_utils/upload.py index 6e52e26416..52ed1e2182 100644 --- a/bioconda_utils/upload.py +++ b/bioconda_utils/upload.py @@ -40,7 +40,7 @@ def anaconda_upload(package, token=None, label=None): logger.info("UPLOAD uploading package %s", package) try: cmds = ["anaconda", "-t", token, 'upload', package] + label_arg - p = utils.run(cmds, mask=[token]) + utils.run(cmds, mask=[token]) logger.info("UPLOAD SUCCESS: uploaded package %s", package) return True diff --git a/bioconda_utils/utils.py b/bioconda_utils/utils.py index ac977c8d7f..51da29f44d 100644 --- a/bioconda_utils/utils.py +++ b/bioconda_utils/utils.py @@ -11,17 +11,17 @@ from collections import Counter, Iterable, defaultdict, namedtuple from itertools import product, chain, groupby import logging -import pkg_resources -import networkx as nx -import requests -from jsonschema import validate import datetime -from distutils.version import LooseVersion from threading import Event, Thread from pathlib import PurePath from conda_build import api from conda.exports import VersionOrder +import pkg_resources +import networkx as nx +import requests +from jsonschema import validate +from distutils.version import LooseVersion import yaml import jinja2 from jinja2 import Environment, PackageLoader @@ -173,7 +173,6 @@ def load_all_meta(recipe, config=None, finalize=True): )] - def load_meta_fast(recipe): """ Given a package name, find the current meta.yaml file, parse it, and return @@ -190,12 +189,13 @@ def load_meta_fast(recipe): class SilentUndefined(jinja2.Undefined): def _fail_with_undefined_error(self, *args, **kwargs): return "" + __add__ = __radd__ = __mul__ = __rmul__ = __div__ = __rdiv__ = \ - __truediv__ = __rtruediv__ = __floordiv__ = __rfloordiv__ = \ - __mod__ = __rmod__ = __pos__ = __neg__ = __call__ = \ - __getitem__ = __lt__ = __le__ = __gt__ = __ge__ = __int__ = \ - __float__ = __complex__ = __pow__ = __rpow__ = \ - _fail_with_undefined_error + __truediv__ = __rtruediv__ = __floordiv__ = __rfloordiv__ = \ + __mod__ = __rmod__ = __pos__ = __neg__ = __call__ = \ + __getitem__ = __lt__ = __le__ = __gt__ = __ge__ = __int__ = \ + __float__ = __complex__ = __pow__ = __rpow__ = \ + _fail_with_undefined_error pth = os.path.join(recipe, 'meta.yaml') jinja_env = jinja2.Environment(undefined=SilentUndefined) @@ -421,11 +421,10 @@ def get_deps(recipe=None, meta=None, build=True): all_deps = set() for meta in metadata: - reqs = meta.get_section('requirements') if build: - deps = reqs.get('build', []) + deps = meta.get_value('requirements/build', []) else: - deps = reqs.get('run', []) + deps = meta.get_value('requirements/run', []) all_deps.update(dep.split()[0] for dep in deps) return all_deps @@ -468,7 +467,7 @@ def get_dag(recipes, config, blacklist=None, restrict=True): metadata.append((meta, recipe)) if i % 100 == 0: logger.info("Inspected {} of {} recipes".format(i, len(recipes))) - except: + except Exception: raise ValueError('Problem inspecting {0}'.format(recipe)) if blacklist is None: blacklist = set() @@ -973,11 +972,13 @@ def load_config(path): validate_config(path) if isinstance(path, dict): + def relpath(p): + return p config = path - relpath = lambda p: p else: + def relpath(p): + return os.path.join(os.path.dirname(path), p) config = yaml.load(open(path)) - relpath = lambda p: os.path.join(os.path.dirname(path), p) def get_list(key): # always return empty list, also if NoneType is defined in yaml diff --git a/docs/source/conf.py b/docs/source/conf.py index 91dcda7b93..2c070cc9b2 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -23,7 +23,7 @@ sys.path.insert(0, os.path.abspath('.')) def setup(app): - app.add_stylesheet("custom.css") + app.add_stylesheet("custom.css") # -- Buld READMEs for all recipes ----------------------------------------- #import .generate_docs diff --git a/docs/source/generate_docs.py b/docs/source/generate_docs.py index 3093e9b45e..d41f7f92d9 100644 --- a/docs/source/generate_docs.py +++ b/docs/source/generate_docs.py @@ -80,7 +80,7 @@ def fmt(text): return "{0}: :{0}:`{1}`".format(*text) assert isinstance(text, list), "identifiers have to be given as list" - + return list(map(fmt, text)) @@ -210,15 +210,14 @@ def generate_readme(folder, repodata, renderer): logger.error("Failed to parse recipe {}".format(recipe)) raise e - name = metadata.name() versions_in_channel = repodata.get_versions(name) # Format the README template_options = { 'name': name, - 'about': metadata.get_section('about'), - 'extra': metadata.get_section('extra'), + 'about': (metadata.get_section('about') or {}), + 'extra': (metadata.get_section('extra') or {}), 'versions': versions_in_channel, 'gh_recipes': 'https://github.com/bioconda/bioconda-recipes/tree/master/recipes/', 'recipe_path': op.dirname(op.relpath(metadata.meta_path, RECIPE_DIR)), diff --git a/setup.cfg b/setup.cfg index 95189f284f..4689cd8cd5 100644 --- a/setup.cfg +++ b/setup.cfg @@ -9,3 +9,7 @@ parentdir_prefix = bioconda-utils- [tool:pytest] markers= long_running: mark as long-running test (may be executed separately) + +[flake8] +max_line_length = 99 +exclude = .git,__pycache__,docs/source/conf.py diff --git a/setup.py b/setup.py index b6d1e234c6..58b3d8d6b7 100644 --- a/setup.py +++ b/setup.py @@ -26,7 +26,7 @@ ]}, classifiers=[ "Development Status :: 4 - Beta", - #"Development Status :: 5 - Production/Stable", + # "Development Status :: 5 - Production/Stable", "Environment :: Console", "Intended Audience :: Science/Research", "License :: OSI Approved :: MIT License", diff --git a/test/helpers.py b/test/helpers.py index 72b04077a0..bac61b46fc 100644 --- a/test/helpers.py +++ b/test/helpers.py @@ -3,7 +3,6 @@ import yaml import os import subprocess as sp -from conda_build.metadata import MetaData def ensure_missing(package): diff --git a/test/test_bioconductor_skeleton.py b/test/test_bioconductor_skeleton.py index 4488429307..59888784f9 100644 --- a/test/test_bioconductor_skeleton.py +++ b/test/test_bioconductor_skeleton.py @@ -99,16 +99,17 @@ def test_pkg_version(): 'http://bioconductor.org/packages/3.4/bioc/src/contrib/DESeq2_1.14.1.tar.gz') assert b.bioarchive_url is None assert b.cargoport_url == ( - 'https://depot.galaxyproject.org/software/bioconductor-deseq2/bioconductor-deseq2_1.14.1_src_all.tar.gz') + 'https://depot.galaxyproject.org/software/bioconductor-deseq2/bioconductor-deseq2_1.14.1_src_all.tar.gz') # noqa: E501: line too long # bioc version specified, but not package version b = bioconductor_skeleton.BioCProjectPage('edgeR', bioc_version='3.5') assert b.version == '3.18.1' assert b.bioc_version == '3.5' - assert b.bioconductor_tarball_url == 'http://bioconductor.org/packages/3.5/bioc/src/contrib/edgeR_3.18.1.tar.gz' + assert b.bioconductor_tarball_url == ( + 'http://bioconductor.org/packages/3.5/bioc/src/contrib/edgeR_3.18.1.tar.gz') assert b.bioarchive_url is None assert b.cargoport_url == ( - 'https://depot.galaxyproject.org/software/bioconductor-edger/bioconductor-edger_3.18.1_src_all.tar.gz') + 'https://depot.galaxyproject.org/software/bioconductor-edger/bioconductor-edger_3.18.1_src_all.tar.gz') # noqa: E501: line too long def test_bioarchive_exists_but_not_bioconductor(): @@ -155,11 +156,13 @@ def test_nonexistent_pkg(tmpdir): # no such package exists in the current bioconductor with pytest.raises(bioconductor_skeleton.PageNotFoundError): - bioconductor_skeleton.write_recipe('nonexistent', str(tmpdir), config, recursive=True) + bioconductor_skeleton.write_recipe( + 'nonexistent', str(tmpdir), config, recursive=True) # package exists, but not this version with pytest.raises(bioconductor_skeleton.PackageNotFoundError): - bioconductor_skeleton.write_recipe('DESeq', str(tmpdir), config, recursive=True, pkg_version='5000') + bioconductor_skeleton.write_recipe( + 'DESeq', str(tmpdir), config, recursive=True, pkg_version='5000') def test_overwrite(tmpdir): diff --git a/test/test_linting.py b/test/test_linting.py index e9c9212d9c..b5a836979b 100644 --- a/test/test_linting.py +++ b/test/test_linting.py @@ -1,5 +1,3 @@ -import pandas -import yaml from helpers import Recipes from bioconda_utils import lint_functions from bioconda_utils import linting, utils @@ -45,7 +43,7 @@ def _run(contents, expect_pass=True): r.write_recipes() assert len(r.recipe_dirs) == 1 name = list(r.recipe_dirs.keys())[0] - recipe, meta, df = r.recipe_dirs[name], r.recipes[name]['meta.yaml'], should_pass_df + recipe, df = r.recipe_dirs[name], should_pass_df metas = [] for platform in ["linux", "osx"]: config = utils.load_conda_build_config(platform=platform, trim_skip=False) @@ -76,7 +74,9 @@ def test_empty_build_section(): # access to contents of possibly empty build section can happen in # `should_be_noarch` and `should_not_be_noarch` registry = [lint_functions.should_be_noarch, lint_functions.should_not_be_noarch] - res = linting.lint(r.recipe_dirs.values(), df=None, registry=registry) + res = linting.lint( + r.recipe_dirs.values(), + linting.LintArgs(df=None, registry=registry)) assert res is None @@ -92,10 +92,11 @@ def test_lint_skip_in_recipe(): version: "0.1" ''', from_string=True) r.write_recipes() - res = linting.lint(r.recipe_dirs.values(), df=None, registry=[lint_functions.missing_home]) + res = linting.lint( + r.recipe_dirs.values(), + linting.LintArgs(df=None, registry=[lint_functions.missing_home])) assert res is not None - # should now pass with the extra:skip-lints (only linting for `missing_home`) r = Recipes( ''' @@ -109,7 +110,9 @@ def test_lint_skip_in_recipe(): - missing_home ''', from_string=True) r.write_recipes() - res = linting.lint(r.recipe_dirs.values(), df=None, registry=[lint_functions.missing_home]) + res = linting.lint( + r.recipe_dirs.values(), + linting.LintArgs(df=None, registry=[lint_functions.missing_home])) assert res is None # should pass; minimal recipe needs to skip these lints @@ -127,488 +130,514 @@ def test_lint_skip_in_recipe(): - no_tests ''', from_string=True) r.write_recipes() - res = linting.lint(r.recipe_dirs.values(), df=None) + res = linting.lint(r.recipe_dirs.values(), linting.LintArgs(df=None)) assert res is not None def test_missing_home(): run_lint( func=lint_functions.missing_home, - should_pass=''' - missing_home: - meta.yaml: | - package: - name: missing_home - version: "0.1" - about: - home: "http://bioconda.github.io" - ''', + should_pass=[ + ''' + missing_home: + meta.yaml: | + package: + name: missing_home + version: "0.1" + about: + home: "http://bioconda.github.io" + ''', + ], should_fail=[ - ''' - missing_home: - meta.yaml: | - package: - name: missing_home - version: "0.1" - ''', - ''' - missing_home: - meta.yaml: | - package: - name: missing_home - version: "0.1" - about: - home: "" - ''', - ]) + ''' + missing_home: + meta.yaml: | + package: + name: missing_home + version: "0.1" + ''', + ''' + missing_home: + meta.yaml: | + package: + name: missing_home + version: "0.1" + about: + home: "" + ''', + ], + ) def test_missing_summary(): run_lint( func=lint_functions.missing_summary, - should_pass=''' - missing_summary: - meta.yaml: | - package: - name: missing_summary - version: "0.1" - about: - summary: "tool description" - ''', + should_pass=[ + ''' + missing_summary: + meta.yaml: | + package: + name: missing_summary + version: "0.1" + about: + summary: "tool description" + ''', + ], should_fail=[ - ''' - missing_summary: - meta.yaml: | - package: - name: missing_summary - version: "0.1" - ''', - ''' - missing_summary: - meta.yaml: | - package: - name: missing_summary - version: "0.1" - about: - summary: "" - ''', - ]) + ''' + missing_summary: + meta.yaml: | + package: + name: missing_summary + version: "0.1" + ''', + ''' + missing_summary: + meta.yaml: | + package: + name: missing_summary + version: "0.1" + about: + summary: "" + ''', + ], + ) def test_missing_license(): run_lint( func=lint_functions.missing_license, - should_pass=''' - missing_license: - meta.yaml: | - package: - name: missing_license - version: "0.1" - about: - license: "MIT" - ''', + should_pass=[ + ''' + missing_license: + meta.yaml: | + package: + name: missing_license + version: "0.1" + about: + license: "MIT" + ''', + ], should_fail=[ - ''' - missing_license: - meta.yaml: | - package: - name: missing_license - version: "0.1" - ''', - ''' - missing_license: - meta.yaml: | - package: - name: missing_license - version: "0.1" - about: - license: "" - ''', - ]) + ''' + missing_license: + meta.yaml: | + package: + name: missing_license + version: "0.1" + ''', + ''' + missing_license: + meta.yaml: | + package: + name: missing_license + version: "0.1" + about: + license: "" + ''', + ], + ) def test_missing_tests(): run_lint( func=lint_functions.missing_tests, should_pass=[ - ''' - missing_tests: - meta.yaml: | - package: - name: missing_tests - version: "0.1" - test: - commands: "ls" - ''', - ''' - missing_tests: - meta.yaml: | - package: - name: missing_tests - version: "0.1" - run_test.sh: "" - ''', - ''' - missing_tests: - meta.yaml: | - package: - name: missing_tests - version: "0.1" - run_test.py: "" - ''', + ''' + missing_tests: + meta.yaml: | + package: + name: missing_tests + version: "0.1" + test: + commands: "ls" + ''', + ''' + missing_tests: + meta.yaml: | + package: + name: missing_tests + version: "0.1" + run_test.sh: "" + ''', + ''' + missing_tests: + meta.yaml: | + package: + name: missing_tests + version: "0.1" + run_test.py: "" + ''', ], should_fail=[ - ''' - missing_tests: - meta.yaml: | - package: - name: missing_tests - version: "0.1" - run_tst.sh: "" - ''', - ''' - missing_tests: - meta.yaml: | - package: - name: missing_tests - version: "0.1" - test: - # empty test section - ''', - ]) + ''' + missing_tests: + meta.yaml: | + package: + name: missing_tests + version: "0.1" + run_tst.sh: "" + ''', + ''' + missing_tests: + meta.yaml: | + package: + name: missing_tests + version: "0.1" + test: + # empty test section + ''', + ], + ) def test_missing_hash(): run_lint( func=lint_functions.missing_hash, should_pass=[ - ''' - missing_hash: - meta.yaml: | - package: - name: md5hash - version: "0.1" - source: - md5: 11111111111111111111111111111111 - ''', - ''' - missing_hash: - meta.yaml: | - package: - name: md5hash_list - version: "0.1" - source: - - md5: 11111111111111111111111111111111 - ''', - # Should pass when source section is missing - ''' - missing_hash: - meta.yaml: | - package: - name: metapackage - version: "0.1" - ''', + ''' + missing_hash: + meta.yaml: | + package: + name: md5hash + version: "0.1" + source: + md5: 11111111111111111111111111111111 + ''', + ''' + missing_hash: + meta.yaml: | + package: + name: md5hash_list + version: "0.1" + source: + - md5: 11111111111111111111111111111111 + ''', + # Should pass when source section is missing + ''' + missing_hash: + meta.yaml: | + package: + name: metapackage + version: "0.1" + ''', ], should_fail=[ - ''' - missing_hash: - meta.yaml: | - package: - name: missing_hash - version: "0.1" - source: - fn: "a.txt" - ''', - ''' - missing_hash: - meta.yaml: | - package: - name: empty_hash - version: "0.1" - source: - fn: "a.txt" - sha256: "" - ''', - ''' - missing_hash: - meta.yaml: | - package: - name: missing_hash_list - version: "0.1" - source: - - fn: "a.txt" - - md5: 11111111111111111111111111111111 - ''', - ]) + ''' + missing_hash: + meta.yaml: | + package: + name: missing_hash + version: "0.1" + source: + fn: "a.txt" + ''', + ''' + missing_hash: + meta.yaml: | + package: + name: empty_hash + version: "0.1" + source: + fn: "a.txt" + sha256: "" + ''', + ''' + missing_hash: + meta.yaml: | + package: + name: missing_hash_list + version: "0.1" + source: + - fn: "a.txt" + - md5: 11111111111111111111111111111111 + ''', + ], + ) def test_uses_git_url(): run_lint( func=lint_functions.uses_git_url, should_pass=[ - ''' - uses_git_url: - meta.yaml: | - package: - name: uses_git_url - version: "0.1" - source: - fn: "a.txt" - ''', - ''' - uses_git_url: - meta.yaml: | - package: - name: uses_git_url - version: "0.1" - ''', - ''' - uses_git_url: - meta.yaml: | - package: - name: uses_git_url_list - version: "0.1" - source: - - fn: "a.txt" - ''', + ''' + uses_git_url: + meta.yaml: | + package: + name: uses_git_url + version: "0.1" + source: + fn: "a.txt" + ''', + ''' + uses_git_url: + meta.yaml: | + package: + name: uses_git_url + version: "0.1" + ''', + ''' + uses_git_url: + meta.yaml: | + package: + name: uses_git_url_list + version: "0.1" + source: + - fn: "a.txt" + ''', ], should_fail=[ - ''' - uses_git_url: - meta.yaml: | - package: - name: uses_git_url - version: "0.1" - source: - git_url: https://github.com/bioconda/bioconda.git - ''', - ''' - uses_git_url: - meta.yaml: | - package: - name: uses_git_url_list - version: "0.1" - source: - - git_url: https://github.com/bioconda/bioconda.git - ''', - ]) + ''' + uses_git_url: + meta.yaml: | + package: + name: uses_git_url + version: "0.1" + source: + git_url: https://github.com/bioconda/bioconda.git + ''', + ''' + uses_git_url: + meta.yaml: | + package: + name: uses_git_url_list + version: "0.1" + source: + - git_url: https://github.com/bioconda/bioconda.git + ''', + ], + ) def test_uses_perl_threaded(): run_lint( func=lint_functions.uses_perl_threaded, - should_pass=[''' - uses_perl_threaded: - meta.yaml: | - package: - name: uses_perl_threaded - version: "0.1" - requirements: - build: - - perl - run: - - perl - ''', - ''' - uses_perl_threaded: - meta.yaml: | - package: - name: uses_perl_threaded - version: "0.1" - requirements: - run: - - perl - ''', - ''' - uses_perl_threaded: - meta.yaml: | - package: - name: uses_perl_threaded - version: "0.1" - requirements: - build: - - perl - ''', - ''' - uses_perl_threaded: - meta.yaml: | - package: - name: uses_perl_threaded - version: "0.1" - '''], + should_pass=[ + ''' + uses_perl_threaded: + meta.yaml: | + package: + name: uses_perl_threaded + version: "0.1" + requirements: + build: + - perl + run: + - perl + ''', + ''' + uses_perl_threaded: + meta.yaml: | + package: + name: uses_perl_threaded + version: "0.1" + requirements: + run: + - perl + ''', + ''' + uses_perl_threaded: + meta.yaml: | + package: + name: uses_perl_threaded + version: "0.1" + requirements: + build: + - perl + ''', + ''' + uses_perl_threaded: + meta.yaml: | + package: + name: uses_perl_threaded + version: "0.1" + ''', + ], should_fail=[ - ''' - uses_perl_threaded: - meta.yaml: | - package: - name: uses_perl_threaded - version: "0.1" - requirements: - build: - - perl-threaded - ''', - ''' - uses_perl_threaded: - meta.yaml: | - package: - name: uses_perl_threaded - version: "0.1" - requirements: - run: - - perl-threaded - ''', - ''' - uses_perl_threaded: - meta.yaml: | - package: - name: uses_perl_threaded - version: "0.1" - requirements: - run: - - perl-threaded - build: - - perl-threaded - ''']) + ''' + uses_perl_threaded: + meta.yaml: | + package: + name: uses_perl_threaded + version: "0.1" + requirements: + build: + - perl-threaded + ''', + ''' + uses_perl_threaded: + meta.yaml: | + package: + name: uses_perl_threaded + version: "0.1" + requirements: + run: + - perl-threaded + ''', + ''' + uses_perl_threaded: + meta.yaml: | + package: + name: uses_perl_threaded + version: "0.1" + requirements: + run: + - perl-threaded + build: + - perl-threaded + ''', + ], + ) + def test_uses_javajdk(): run_lint( func=lint_functions.uses_javajdk, - should_pass=[''' - uses_javajdk: - meta.yaml: | - package: - name: uses_javajdk - version: "0.1" - requirements: - build: - - openjdk - run: - - openjdk - ''', - ''' - uses_javajdk: - meta.yaml: | - package: - name: uses_javajdk - version: "0.1" - requirements: - run: - - openjdk - ''', - ''' - uses_javajdk: - meta.yaml: | - package: - name: uses_javajdk - version: "0.1" - requirements: - build: - - openjdk - ''', - ''' - uses_javajdk: - meta.yaml: | - package: - name: uses_javajdk - version: "0.1" - '''], + should_pass=[ + ''' + uses_javajdk: + meta.yaml: | + package: + name: uses_javajdk + version: "0.1" + requirements: + build: + - openjdk + run: + - openjdk + ''', + ''' + uses_javajdk: + meta.yaml: | + package: + name: uses_javajdk + version: "0.1" + requirements: + run: + - openjdk + ''', + ''' + uses_javajdk: + meta.yaml: | + package: + name: uses_javajdk + version: "0.1" + requirements: + build: + - openjdk + ''', + ''' + uses_javajdk: + meta.yaml: | + package: + name: uses_javajdk + version: "0.1" + ''', + ], should_fail=[ - ''' - uses_javajdk: - meta.yaml: | - package: - name: uses_javajdk - version: "0.1" - requirements: - build: - - java-jdk - ''', - ''' - uses_javajdk: - meta.yaml: | - package: - name: uses_javajdk - version: "0.1" - requirements: - run: - - java-jdk - ''', - ''' - uses_javajdk: - meta.yaml: | - package: - name: uses_javajdk - version: "0.1" - requirements: - run: - - java-jdk - build: - - java-jdk - ''']) + ''' + uses_javajdk: + meta.yaml: | + package: + name: uses_javajdk + version: "0.1" + requirements: + build: + - java-jdk + ''', + ''' + uses_javajdk: + meta.yaml: | + package: + name: uses_javajdk + version: "0.1" + requirements: + run: + - java-jdk + ''', + ''' + uses_javajdk: + meta.yaml: | + package: + name: uses_javajdk + version: "0.1" + requirements: + run: + - java-jdk + build: + - java-jdk + ''', + ], + ) def test_uses_setuptools(): run_lint( func=lint_functions.uses_setuptools, should_pass=[ - ''' - uses_setuptools: - meta.yaml: | - package: - name: uses_setuptools - version: "0.1" - ''', - ''' - uses_setuptools: - meta.yaml: | - package: - name: uses_setuptools - version: "0.1" - requirements: - build: - - setuptools - '''], - - should_fail=''' - uses_setuptools: - meta.yaml: | - package: - name: uses_setuptools - version: "0.1" - requirements: - run: - - setuptools - ''',) + ''' + uses_setuptools: + meta.yaml: | + package: + name: uses_setuptools + version: "0.1" + ''', + ''' + uses_setuptools: + meta.yaml: | + package: + name: uses_setuptools + version: "0.1" + requirements: + build: + - setuptools + ''', + ], + should_fail=[ + ''' + uses_setuptools: + meta.yaml: | + package: + name: uses_setuptools + version: "0.1" + requirements: + run: + - setuptools + ''', + ], + ) def test_has_windows_bat_file(): run_lint( func=lint_functions.has_windows_bat_file, - should_pass=''' - has_windows_bat_file: - meta.yaml: | - package: - name: has_windows_bat_file - version: "0.1" - ''', - + should_pass=[ + ''' + has_windows_bat_file: + meta.yaml: | + package: + name: has_windows_bat_file + version: "0.1" + ''', + ], should_fail=[ - ''' - has_windows_bat_file: - meta.yaml: | - package: - name: has_windows_bat_file - version: "0.1" - build.bat: "" - ''', - ''' - has_windows_bat_file: - meta.yaml: | - package: - name: has_windows_bat_file - version: "0.1" - any.bat: "" - ''',] + ''' + has_windows_bat_file: + meta.yaml: | + package: + name: has_windows_bat_file + version: "0.1" + build.bat: "" + ''', + ''' + has_windows_bat_file: + meta.yaml: | + package: + name: has_windows_bat_file + version: "0.1" + any.bat: "" + ''', + ] ) @@ -616,62 +645,62 @@ def test_should_not_be_noarch(): run_lint( func=lint_functions.should_not_be_noarch, should_pass=[ - ''' - should_be_noarch1: - meta.yaml: | - package: - name: should_be_noarch1 - version: "0.1" - build: - noarch: python - ''', - ''' - should_be_noarch2: - meta.yaml: | - package: - name: should_be_noarch2 - version: "0.1" - build: - noarch: python - skip: false - ''', + ''' + should_be_noarch1: + meta.yaml: | + package: + name: should_be_noarch1 + version: "0.1" + build: + noarch: python + ''', + ''' + should_be_noarch2: + meta.yaml: | + package: + name: should_be_noarch2 + version: "0.1" + build: + noarch: python + skip: false + ''', ], should_fail=[ - ''' - should_not_be_noarch1: - meta.yaml: | - package: - name: should_not_be_noarch1 - version: "0.1" - build: - noarch: python - requirements: - build: - - gcc - ''', - ''' - should_not_be_noarch2: - meta.yaml: | - package: - name: should_not_be_noarch2 - version: "0.1" - build: - noarch: python - skip: True # [osx] - ''', - ''' - should_not_be_noarch3: - meta.yaml: | - package: - name: should_not_be_noarch3 - version: "0.1" - build: - noarch: python - skip: False - requirements: - build: - - gcc - ''', + ''' + should_not_be_noarch1: + meta.yaml: | + package: + name: should_not_be_noarch1 + version: "0.1" + build: + noarch: python + requirements: + build: + - gcc + ''', + ''' + should_not_be_noarch2: + meta.yaml: | + package: + name: should_not_be_noarch2 + version: "0.1" + build: + noarch: python + skip: True # [osx] + ''', + ''' + should_not_be_noarch3: + meta.yaml: | + package: + name: should_not_be_noarch3 + version: "0.1" + build: + noarch: python + skip: False + requirements: + build: + - gcc + ''', ] ) @@ -680,260 +709,276 @@ def test_setup_py_install_args(): run_lint( func=lint_functions.setup_py_install_args, should_pass=[ - ''' - setup_py_install_args: - meta.yaml: | - package: - name: setup_py_install_args - version: "0.1" - ''', - ''' - setup_py_install_args: - meta.yaml: | - package: - name: setup_py_install_args - version: "0.1" - build.sh: | - $PYTHON setup.py install - ''', - ''' - setup_py_install_args: - meta.yaml: | - package: - name: setup_py_install_args - version: "0.1" - build: - script: $PYTHON setup.py install - ''', - ''' - setup_py_install_args: - meta.yaml: | - package: - name: setup_py_install_args - version: "0.1" - requirements: - build: - - setuptools - build.sh: | - $PYTHON setup.py install --single-version-externally-managed --report=a.txt - ''', - ''' - setup_py_install_args: - meta.yaml: | - package: - name: setup_py_install_args - version: "0.1" - requirements: - build: - - setuptools - build.sh: | - $PYTHON setup.py install \\ - --single-version-externally-managed --report=a.txt - ''', - ''' - setup_py_install_args: - meta.yaml: | - package: - name: setup_py_install_args - version: "0.1" - build: - script: $PYTHON setup.py install --single-version-externally-managed --report=a.txt - ''', + ''' + setup_py_install_args: + meta.yaml: | + package: + name: setup_py_install_args + version: "0.1" + ''', + ''' + setup_py_install_args: + meta.yaml: | + package: + name: setup_py_install_args + version: "0.1" + build.sh: | + $PYTHON setup.py install + ''', + ''' + setup_py_install_args: + meta.yaml: | + package: + name: setup_py_install_args + version: "0.1" + build: + script: $PYTHON setup.py install + ''', + ''' + setup_py_install_args: + meta.yaml: | + package: + name: setup_py_install_args + version: "0.1" + requirements: + build: + - setuptools + build.sh: | + $PYTHON setup.py install --single-version-externally-managed --report=a.txt + ''', + ''' + setup_py_install_args: + meta.yaml: | + package: + name: setup_py_install_args + version: "0.1" + requirements: + build: + - setuptools + build.sh: | + $PYTHON setup.py install \\ + --single-version-externally-managed --report=a.txt + ''', + ''' + setup_py_install_args: + meta.yaml: | + package: + name: setup_py_install_args + version: "0.1" + build: + script: $PYTHON setup.py install --single-version-externally-managed --report=a.txt + ''', # noqa: E501: line too long ], should_fail=[ - ''' - setup_py_install_args: - meta.yaml: | - package: - name: setup_py_install_args - version: "0.1" - requirements: - build: - - setuptools - build.sh: | - $PYTHON setup.py install - ''', - ''' - setup_py_install_args: - meta.yaml: | - package: - name: setup_py_install_args - version: "0.1" - requirements: - build: - - setuptools - build: - script: $PYTHON setup.py install - ''', + ''' + setup_py_install_args: + meta.yaml: | + package: + name: setup_py_install_args + version: "0.1" + requirements: + build: + - setuptools + build.sh: | + $PYTHON setup.py install + ''', + ''' + setup_py_install_args: + meta.yaml: | + package: + name: setup_py_install_args + version: "0.1" + requirements: + build: + - setuptools + build: + script: $PYTHON setup.py install + ''', ] ) + def test_invalid_identifiers(): run_lint( func=lint_functions.invalid_identifiers, - should_pass=[''' - a: - meta.yaml: | - package: - name: a - version: 0.1 - extra: - identifiers: - - doi:10.1093/bioinformatics/btr010 - '''], - should_fail=[''' - a: - meta.yaml: | - package: - name: a - version: 0.1 - extra: - identifiers: - - doi: 10.1093/bioinformatics/btr010 - ''', - ''' - a: - meta.yaml: | - package: - name: a - version: 0.1 - extra: - identifiers: - doi: 10.1093/bioinformatics/btr010 - ''', - ''' - a: - meta.yaml: | - package: - name: a - version: 0.1 - extra: - identifiers: - doi:10.1093/bioinformatics/btr010 - '''] + should_pass=[ + ''' + a: + meta.yaml: | + package: + name: a + version: 0.1 + extra: + identifiers: + - doi:10.1093/bioinformatics/btr010 + ''', + ], + should_fail=[ + ''' + a: + meta.yaml: | + package: + name: a + version: 0.1 + extra: + identifiers: + - doi: 10.1093/bioinformatics/btr010 + ''', + ''' + a: + meta.yaml: | + package: + name: a + version: 0.1 + extra: + identifiers: + doi: 10.1093/bioinformatics/btr010 + ''', + ''' + a: + meta.yaml: | + package: + name: a + version: 0.1 + extra: + identifiers: + doi:10.1093/bioinformatics/btr010 + ''', + ] ) def test_deprecated_numpy_spec(): run_lint( func=lint_functions.deprecated_numpy_spec, - should_pass=[''' - a: - meta.yaml: | - package: - name: a - version: 0.1 - requirements: - build: - - numpy - - python - run: - - numpy - '''], - should_fail=[''' - a: - meta.yaml: | - package: - name: a - version: 0.1 - requirements: - build: - - numpy x.x - run: - - numpy x.x - '''] + should_pass=[ + ''' + a: + meta.yaml: | + package: + name: a + version: 0.1 + requirements: + build: + - numpy + - python + run: + - numpy + ''', + ], + should_fail=[ + ''' + a: + meta.yaml: | + package: + name: a + version: 0.1 + requirements: + build: + - numpy x.x + run: + - numpy x.x + ''', + ] ) def test_should_use_compilers(): run_lint( func=lint_functions.should_use_compilers, - should_pass=[''' - a: - meta.yaml: | - package: - name: a - version: 0.1 - requirements: - host: - - python - run: - - python - ''', - ''' - a: - meta.yaml: | - package: - name: a - version: 0.1 - requirements: - build: - - {{ compiler ('c') }} - '''], - should_fail=[''' - a: - meta.yaml: | - package: - name: a - version: 0.1 - requirements: - build: - - gcc # [linux] - ''', - ''' - a: - meta.yaml: | - package: - name: a - version: 0.1 - requirements: - run: - - libgcc # [linux] - ''' + should_pass=[ + ''' + a: + meta.yaml: | + package: + name: a + version: 0.1 + requirements: + host: + - python + run: + - python + ''', + ''' + a: + meta.yaml: | + package: + name: a + version: 0.1 + requirements: + build: + - {{ compiler ('c') }} + ''', + ], + should_fail=[ + ''' + a: + meta.yaml: | + package: + name: a + version: 0.1 + requirements: + build: + - gcc # [linux] + ''', + ''' + a: + meta.yaml: | + package: + name: a + version: 0.1 + requirements: + run: + - libgcc # [linux] + ''', ] ) + def test_compilers_must_be_in_build(): run_lint( func=lint_functions.compilers_must_be_in_build, - should_pass=[''' - a: - meta.yaml: | - package: - name: a - version: 0.1 - requirements: - host: - - python - run: - - python - package: - name: a - version: 0.1 - requirements: - build: - - {{ compiler ('c') }} - '''], - should_fail=[''' - a: - meta.yaml: | - package: - name: a - version: 0.1 - requirements: - run: - - {{ compiler("c") }} - ''', - ''' - a: - meta.yaml: | - package: - name: a - version: 0.1 - requirements: - host: - - {{ compiler ('c') }} - ''' + should_pass=[ + ''' + a: + meta.yaml: | + package: + name: a + version: 0.1 + requirements: + host: + - python + run: + - python + package: + name: a + version: 0.1 + requirements: + build: + - {{ compiler ('c') }} + ''', + ], + should_fail=[ + ''' + a: + meta.yaml: | + package: + name: a + version: 0.1 + requirements: + run: + - {{ compiler("c") }} + ''', + ''' + a: + meta.yaml: | + package: + name: a + version: 0.1 + requirements: + host: + - {{ compiler ('c') }} + ''', ] ) @@ -941,61 +986,68 @@ def test_compilers_must_be_in_build(): def test_should_not_use_fn(): run_lint( func=lint_functions.should_not_use_fn, - should_pass=[''' - a: - meta.yaml: | - package: - name: a - version: 0.1 - ''', - ''' - a: - meta.yaml: | - package: - name: a - version: 0.1 - source: - url: https://bioconda.github.io/index.html - '''], - should_fail=[''' - a: - meta.yaml: | - package: - name: a - version: 0.1 - source: - fn: index.html - url: https://bioconda.github.io/index.html - ''', + should_pass=[ + ''' + a: + meta.yaml: | + package: + name: a + version: 0.1 + ''', + ''' + a: + meta.yaml: | + package: + name: a + version: 0.1 + source: + url: https://bioconda.github.io/index.html + ''', + ], + should_fail=[ + ''' + a: + meta.yaml: | + package: + name: a + version: 0.1 + source: + fn: index.html + url: https://bioconda.github.io/index.html + ''', ] ) - + def test_bioconductor_37(): run_lint( func=lint_functions.bioconductor_37, - should_pass=[''' - a: - meta.yaml: | - {% set bioc = "3.6" %} - package: - name: a - version: 0.1 - '''], - should_fail=[''' - a: - meta.yaml: | - {% set bioc = "3.7" %} - package: - name: a - version: 0.1 - ''', - ''' - a: - meta.yaml: | - {% set bioc = "release" %} - package: - name: a - version: 0.1 - '''] + should_pass=[ + ''' + a: + meta.yaml: | + {% set bioc = "3.6" %} + package: + name: a + version: 0.1 + ''', + ], + should_fail=[ + ''' + a: + meta.yaml: | + {% set bioc = "3.7" %} + package: + name: a + version: 0.1 + ''', + ''' + a: + meta.yaml: | + {% set bioc = "release" %} + package: + name: a + version: 0.1 + ''', + ] ) diff --git a/test/test_pkg_test.py b/test/test_pkg_test.py index a924e629d1..598c0a0091 100644 --- a/test/test_pkg_test.py +++ b/test/test_pkg_test.py @@ -117,7 +117,7 @@ def test_pkg_test_conda_image(): if [ "${PREFIX}" == /usr/local ] ; then /opt/conda/bin/conda --version > /usr/local/conda-version fi - """) + """) # noqa: E501: line too long built_packages = _build_pkg(recipe) for pkg in built_packages: pkg_test.test_package(pkg, conda_image="continuumio/miniconda3:4.3.11") diff --git a/test/test_utils.py b/test/test_utils.py index 265ecde55e..cce88f1a68 100644 --- a/test/test_utils.py +++ b/test/test_utils.py @@ -15,12 +15,9 @@ from bioconda_utils import utils from bioconda_utils import pkg_test from bioconda_utils import docker_utils -from bioconda_utils import cli from bioconda_utils import build from bioconda_utils import upload from helpers import ensure_missing, Recipes -from conda_build import api -from conda_build.metadata import MetaData # TODO: need channel order tests. Could probably do this by adding different # file:// channels with different variants of the same package @@ -108,7 +105,6 @@ def single_build(request, recipes_fixture): ensure_missing(pkg) - # TODO: need to have a variant of this where TRAVIS_BRANCH_NAME="master" in # order to properly test for upload. @pytest.fixture(scope='module', params=PARAMS, ids=IDS) @@ -168,7 +164,7 @@ def single_upload(): yield (name, pkg, r.recipe_dirs[name]) - p = sp.run( + sp.run( ['anaconda', '-t', os.environ.get('ANACONDA_TOKEN'), 'remove', 'bioconda/{0}'.format(name), '--force'], stdout=sp.PIPE, stderr=sp.STDOUT, check=True, @@ -185,7 +181,7 @@ def test_upload(single_upload): name, pkg, recipe = single_upload env_name = 'bioconda-utils-test-' + str(uuid.uuid4()).split('-')[0] with ensure_env_missing(env_name): - p = sp.run( + sp.run( ['conda', 'create', '-n', env_name, '-c', 'bioconda/label/{0}'.format(TEST_LABEL), name], stdout=sp.PIPE, stderr=sp.STDOUT, check=True, @@ -246,8 +242,7 @@ def test_docker_build_image_fails(): RUN nonexistent command """) with pytest.raises(sp.CalledProcessError): - docker_builder = docker_utils.RecipeBuilder( - dockerfile_template=template) + docker_utils.RecipeBuilder(dockerfile_template=template) def test_conda_purge_cleans_up(): @@ -547,7 +542,7 @@ def test_rendering_sandboxing(): if 'GITHUB_TOKEN' in os.environ: with pytest.raises(sp.CalledProcessError) as excinfo: pkg_paths = utils.built_package_paths(r.recipe_dirs['one']) - res = build.build( + build.build( recipe=r.recipe_dirs['one'], recipe_folder='.', pkg_paths=pkg_paths, @@ -559,11 +554,11 @@ def test_rendering_sandboxing(): # recipe for "one" should fail because GITHUB_TOKEN is not a jinja var. with pytest.raises(SystemExit) as excinfo: pkg_paths = utils.built_package_paths(r.recipe_dirs['one']) - res = build.build( + build.build( recipe=r.recipe_dirs['one'], recipe_folder='.', pkg_paths=pkg_paths, - mulled_test=False + mulled_test=False, ) assert "'GITHUB_TOKEN' is undefined" in str(excinfo.value) @@ -585,11 +580,11 @@ def test_rendering_sandboxing(): for pkg in pkg_paths: ensure_missing(pkg) - res = build.build( + build.build( recipe=r.recipe_dirs['two'], recipe_folder='.', pkg_paths=pkg_paths, - mulled_test=False + mulled_test=False, ) for pkg in pkg_paths: @@ -932,7 +927,7 @@ def test_cb3_outputs(): """, from_string=True) r.write_recipes() - recipe = r.recipe_dirs['one'] + r.recipe_dirs['one'] build_result = build.build_recipes( r.basedir, @@ -949,6 +944,7 @@ def test_cb3_outputs(): assert os.path.exists(i) ensure_missing(i) + def test_compiler(): r = Recipes( """ From 4e35af84d80b12981a11caf04da4b29b3717a5d3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20K=C3=B6ster?= Date: Mon, 2 Jul 2018 23:24:14 +0200 Subject: [PATCH 116/118] Add paper to the homepage. --- docs/source/index.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/source/index.rst b/docs/source/index.rst index b423dd39c5..4859978e15 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -27,6 +27,8 @@ add additional channels from which to install software packages not available in the defaults channel. Bioconda is one such channel specializing in bioinformatics software. +When using Bioconda please **cite our article** `Grüning, Björn, Ryan Dale, Andreas Sjödin, Brad A. Chapman, Jillian Rowe, Christopher H. Tomkins-Tinch, Renan Valieris, the Bioconda Team, and Johannes Köster. 2018. "Bioconda: Sustainable and Comprehensive Software Distribution for the Life Sciences". Nature Methods, July. .`_. + Bioconda has been acknowledged by NATURE in their `technology blog `_. From b0df159d367730bfaed7046281babc22c22d7d5a Mon Sep 17 00:00:00 2001 From: Marcel Bargull Date: Wed, 4 Jul 2018 23:34:52 +0200 Subject: [PATCH 117/118] Handle dependency cycles (#313) * fix minor bug in `build --lint-exclude` * utils.check_recipe_skippable: avoid additional FILTER msg if nothing to do * utils.get_dag: 'dep' can be empty string for unevaluated jinja expressions * utils.get_dag: respect dependencies from build and run environments * build.build_recipes: gracefully handle(=skip) packages in dependency cycles --- bioconda_utils/build.py | 46 +++++++++++++++++++++++++++++++---------- bioconda_utils/utils.py | 29 +++++++++++++++++--------- 2 files changed, 54 insertions(+), 21 deletions(-) diff --git a/bioconda_utils/build.py b/bioconda_utils/build.py index e0f3d2cb6a..2151c75b9a 100644 --- a/bioconda_utils/build.py +++ b/bioconda_utils/build.py @@ -1,5 +1,4 @@ import subprocess as sp -from itertools import chain from collections import defaultdict, namedtuple import os import logging @@ -298,7 +297,6 @@ def build_recipes( lint_exclude = (lint_args.exclude or ()) if 'already_in_bioconda' not in lint_exclude: lint_exclude = tuple(lint_exclude) + ('already_in_bioconda',) - lint_args = linting.LintArgs(lint_args.df, lint_exclude, lint_args.registry) lint_args = linting.LintArgs(df, lint_exclude, lint_args.registry) dag, name2recipes = utils.get_dag(recipes, config=orig_config, blacklist=blacklist) @@ -322,14 +320,45 @@ def build_recipes( "SUBDAG=%s (zero-based) but only SUBDAGS=%s " "subdags are available") + failed = [] + skip_dependent = defaultdict(list) + # Get connected subdags and sort by nodes if testonly: # use each node as a subdag (they are grouped into equal sizes below) subdags = sorted([[n] for n in nx.nodes(dag)]) else: - # take connected components as subdags - subdags = sorted(map(sorted, nx.connected_components(dag.to_undirected( - )))) + # take connected components as subdags, remove cycles + subdags = [] + for cc_nodes in nx.connected_components(dag.to_undirected()): + cc = dag.subgraph(sorted(cc_nodes)) + nodes_in_cycles = set() + for cycle in list(nx.simple_cycles(cc)): + logger.error( + 'BUILD ERROR: ' + 'dependency cycle found: %s', + cycle, + ) + nodes_in_cycles.update(cycle) + for name in sorted(nodes_in_cycles): + cycle_fail_recipes = sorted(name2recipes[name]) + logger.error( + 'BUILD ERROR: ' + 'cannot build recipes for %s since it cyclically depends ' + 'on other packages in the current build job. Failed ' + 'recipes: %s', + name, cycle_fail_recipes, + ) + failed.extend(cycle_fail_recipes) + for n in nx.algorithms.descendants(cc, name): + if n in nodes_in_cycles: + continue # don't count packages twice (failed/skipped) + skip_dependent[n].extend(cycle_fail_recipes) + cc_without_cycles = dag.subgraph( + name for name in cc if name not in nodes_in_cycles + ) + # ensure that packages which need a build are built in the right order + subdags.append(nx.topological_sort(cc_without_cycles)) # chunk subdags such that we have at most subdags_n many if subdags_n < len(subdags): chunks = [[n for subdag in subdags[i::subdags_n] for n in subdag] @@ -340,10 +369,7 @@ def build_recipes( logger.info("Nothing to be done.") return True # merge subdags of the selected chunk - # ensure that packages which need a build are built in the right order - subdag = dag.subgraph(chain.from_iterable( - nx.topological_sort(dag.subgraph(cc)) for cc in chunks[subdag_i] - )) + subdag = dag.subgraph(chunks[subdag_i]) recipes = [recipe for package in subdag @@ -354,12 +380,10 @@ def build_recipes( subdag_i + 1, subdags_n, len(recipes) ) - failed = [] built_recipes = [] skipped_recipes = [] all_success = True failed_uploads = [] - skip_dependent = defaultdict(list) channel_packages = utils.get_all_channel_packages(check_channels) for recipe in recipes: diff --git a/bioconda_utils/utils.py b/bioconda_utils/utils.py index 51da29f44d..2e034c2169 100644 --- a/bioconda_utils/utils.py +++ b/bioconda_utils/utils.py @@ -492,7 +492,7 @@ def get_deps(meta, sec): deps = reqs.get(sec) if not deps: return [] - return [dep.split()[0] for dep in deps if dep is not None] + return [dep.split()[0] for dep in deps if dep] def get_inner_deps(dependencies): dependencies = list(dependencies) @@ -505,8 +505,14 @@ def get_inner_deps(dependencies): for meta, recipe in metadata) for meta, recipe in metadata: name = meta["package"]["name"] - dag.add_edges_from((dep, name) - for dep in set(get_inner_deps(get_deps(meta, "host")))) + dag.add_edges_from( + (dep, name) + for dep in set(chain( + get_inner_deps(get_deps(meta, "build")), + get_inner_deps(get_deps(meta, "host")), + get_inner_deps(get_deps(meta, "run")), + )) + ) return dag, name2recipe @@ -829,13 +835,6 @@ def check_recipe_skippable(recipe, channel_packages, force=False): return False platform, metas = _load_platform_metas(recipe, finalize=False) key_build_meta = _get_pkg_key_build_meta_map(metas) - num_new_pkg_builds = sum( - ( - Counter((pkg_key, pkg_build.subdir) for pkg_build in build_meta.keys()) - for pkg_key, build_meta in key_build_meta.items() - ), - Counter() - ) num_existing_pkg_builds = sum( ( Counter( @@ -846,6 +845,16 @@ def check_recipe_skippable(recipe, channel_packages, force=False): ), Counter() ) + if num_existing_pkg_builds == Counter(): + # No packages with same version + build num in channels: no need to skip + return False + num_new_pkg_builds = sum( + ( + Counter((pkg_key, pkg_build.subdir) for pkg_build in build_meta.keys()) + for pkg_key, build_meta in key_build_meta.items() + ), + Counter() + ) return num_new_pkg_builds == num_existing_pkg_builds From 5800ebe14d6752f223a083e820e9615999a4fa9a Mon Sep 17 00:00:00 2001 From: Marcel Bargull Date: Wed, 18 Jul 2018 17:33:01 +0200 Subject: [PATCH 118/118] requirements: update to conda-forge-pinning 2018.07.18 --- bioconda_utils/bioconda_utils-requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bioconda_utils/bioconda_utils-requirements.txt b/bioconda_utils/bioconda_utils-requirements.txt index cf2a0e9ef0..168ad7985f 100644 --- a/bioconda_utils/bioconda_utils-requirements.txt +++ b/bioconda_utils/bioconda_utils-requirements.txt @@ -21,5 +21,5 @@ colorlog=3.1.* six=1.11.* alabaster=0.7.* git=2.14.* -conda-forge-pinning=2018.05.22 +conda-forge-pinning=2018.07.18 python>=3.6