diff --git a/CHANGELOG.md b/CHANGELOG.md index 5cf0c79305..34bba39ae7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -28,6 +28,7 @@ - return directory if base_dir is the root directory ([#3003](https://github.com/nf-core/tools/pull/3003)) - Update pre-commit hook astral-sh/ruff-pre-commit to v0.4.6 ([#3006](https://github.com/nf-core/tools/pull/3006)) - Create: allow more special characters on the pipeline name for non-nf-core pipelines ([#3008](https://github.com/nf-core/tools/pull/3008)) +- Remove nf-core licences command ([#3012](https://github.com/nf-core/tools/pull/3012)) - README - absolute image paths ([#3013](https://github.com/nf-core/tools/pull/3013)) - Update pre-commit hook astral-sh/ruff-pre-commit to v0.4.7 ([#3015](https://github.com/nf-core/tools/pull/3015)) - Update pre-commit hook astral-sh/ruff-pre-commit to v0.4.8 ([#3017](https://github.com/nf-core/tools/pull/3017)) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index 341382f647..ea0018e2af 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -44,7 +44,7 @@ "nf-core pipelines": [ { "name": "For users", - "commands": ["list", "launch", "download", "create-params-file", "licences"], + "commands": ["list", "launch", "download", "create-params-file"], }, { "name": "For developers", diff --git a/nf_core/licences.py b/nf_core/licences.py deleted file mode 100644 index be737280f8..0000000000 --- a/nf_core/licences.py +++ /dev/null @@ -1,115 +0,0 @@ -"""Lists software licences for a given workflow.""" - -import json -import logging -import os - -import requests -import rich.console -import rich.table -import yaml - -import nf_core.utils - -log = logging.getLogger(__name__) - - -class WorkflowLicences: - """A nf-core workflow licenses collection. - - Tries to retrieve the license information from all dependencies - of a given nf-core pipeline. - - A condensed overview with license per dependency can be printed out. - - Args: - pipeline (str): An existing nf-core pipeline name, like `nf-core/hlatyping` - or short `hlatyping`. - """ - - def __init__(self, pipeline): - self.pipeline = pipeline - self.conda_config = None - if self.pipeline.startswith("nf-core/"): - self.pipeline = self.pipeline[8:] - self.conda_packages = {} - self.conda_package_licences = {} - self.as_json = False - - def run_licences(self): - """ - Run the nf-core licences action - """ - self.get_environment_file() - self.fetch_conda_licences() - return self.print_licences() - - def get_environment_file(self): - """Get the conda environment file for the pipeline""" - if os.path.exists(self.pipeline): - pipeline_obj = nf_core.utils.Pipeline(self.pipeline) - pipeline_obj._load() - if pipeline_obj._fp("environment.yml") not in pipeline_obj.files: - raise LookupError( - "No `environment.yml` file found. (Note: DSL2 pipelines are currently not supported by this command.)" - ) - self.conda_config = pipeline_obj.conda_config - else: - env_url = f"https://raw.githubusercontent.com/nf-core/{self.pipeline}/master/environment.yml" - log.debug(f"Fetching environment.yml file: {env_url}") - response = requests.get(env_url) - # Check that the pipeline exists - if response.status_code == 404: - raise LookupError( - f"Couldn't find pipeline conda file: {env_url}. (Note: DSL2 pipelines are currently not supported by this command.)" - ) - self.conda_config = yaml.safe_load(response.text) - - def fetch_conda_licences(self): - """Fetch package licences from Anaconda and PyPi.""" - - # Check conda dependency list - deps = self.conda_config.get("dependencies", []) - deps_data = {} - log.info(f"Fetching licence information for {len(deps)} tools") - for dep in deps: - try: - if isinstance(dep, str): - dep_channels = self.conda_config.get("channels", []) - deps_data[dep] = nf_core.utils.anaconda_package(dep, dep_channels) - elif isinstance(dep, dict): - deps_data[dep] = nf_core.utils.pip_package(dep) - except ValueError: - log.error(f"Couldn't get licence information for {dep}") - - for dep, data in deps_data.items(): - _, depver = dep.split("=", 1) - self.conda_package_licences[dep] = nf_core.utils.parse_anaconda_licence(data, depver) - - def print_licences(self): - """Prints the fetched license information. - - Args: - as_json (boolean): Prints the information in JSON. Defaults to False. - """ - log.info("Warning: This tool only prints licence information for the software tools packaged using conda.") - log.info("The pipeline may use other software and dependencies not described here. ") - - if self.as_json: - return json.dumps(self.conda_package_licences, indent=4) - else: - table = rich.table.Table("Package Name", "Version", "Licence") - licence_list = [] - for dep, licences in self.conda_package_licences.items(): - depname, depver = dep.split("=", 1) - try: - depname = depname.split("::")[1] - except IndexError: - pass - licence_list.append([depname, depver, ", ".join(licences)]) - # Sort by licence, then package name - licence_list = sorted(sorted(licence_list), key=lambda x: x[2]) - # Add table rows - for lic in licence_list: - table.add_row(*lic) - return table diff --git a/tests/test_cli.py b/tests/test_cli.py index 8142caa211..026efd1e6a 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -199,38 +199,6 @@ def test_cli_download(self, mock_dl): mock_dl.return_value.download_workflow.assert_called_once() - @mock.patch("nf_core.licences.WorkflowLicences") - def test_licences(self, mock_lic): - """Test nf-core pipeline licence is printed out and cli parameters are passed on.""" - licence_text = "dummy licence text" - mock_lic.return_value.run_licences.return_value = licence_text - - params = { - "json": None, - } - - cmd = ["licences"] + self.assemble_params(params) + ["pipeline_name"] - result = self.invoke_cli(cmd) - - assert result.exit_code == 0 - assert licence_text in result.output - - mock_lic.assert_called_once_with(cmd[-1]) - - @mock.patch("nf_core.licences.WorkflowLicences") - def test_licences_log_error(self, mock_lic): - """Test LookupError is logged""" - error_txt = "LookupError has been raised" - mock_lic.return_value.run_licences.side_effect = LookupError(error_txt) - - cmd = ["licences", "pipeline_name"] - with self.assertLogs() as captured_logs: - result = self.invoke_cli(cmd) - - assert result.exit_code == 1 - assert error_txt in captured_logs.output[-1] - assert captured_logs.records[-1].levelname == "ERROR" - @mock.patch("nf_core.pipelines.create.create.PipelineCreate") def test_create(self, mock_create): """Test nf-core pipeline is created and cli parameters are passed on.""" diff --git a/tests/test_licenses.py b/tests/test_licenses.py deleted file mode 100644 index 8023c9e891..0000000000 --- a/tests/test_licenses.py +++ /dev/null @@ -1,57 +0,0 @@ -"""Some tests covering the pipeline creation sub command.""" -# import json -# import os -# import tempfile -# import unittest -# -# import pytest -# from rich.console import Console -# -# import nf_core.create -# import nf_core.licences - -# TODO nf-core: Assess and strip out if no longer required for DSL2 - -# class WorkflowLicensesTest(unittest.TestCase): -# """A class that performs tests on the workflow license -# retrieval functionality of nf-core tools.""" - -# def setUp(self): -# """ Create a new pipeline, then make a Licence object """ -# # Set up the schema -# self.pipeline_dir = os.path.join(tempfile.mkdtemp(), "test_pipeline") -# self.create_obj = nf_core.create.PipelineCreate("testing", "test pipeline", "tester", outdir=self.pipeline_dir) -# self.create_obj.init_pipeline() -# self.license_obj = nf_core.licences.WorkflowLicences(self.pipeline_dir) - -# def test_run_licences_successful(self): -# console = Console(record=True) -# console.print(self.license_obj.run_licences()) -# output = console.export_text() -# assert "GPL v3" in output - -# def test_run_licences_successful_json(self): -# self.license_obj.as_json = True -# console = Console(record=True) -# console.print(self.license_obj.run_licences()) -# output = json.loads(console.export_text()) -# for package in output: -# if "multiqc" in package: -# assert output[package][0] == "GPL v3" -# break -# else: -# raise LookupError("Could not find MultiQC") - -# def test_get_environment_file_local(self): -# self.license_obj.get_environment_file() -# assert any(["multiqc" in k for k in self.license_obj.conda_config["dependencies"]]) - -# def test_get_environment_file_remote(self): -# self.license_obj = nf_core.licences.WorkflowLicences("methylseq") -# self.license_obj.get_environment_file() -# assert any(["multiqc" in k for k in self.license_obj.conda_config["dependencies"]]) - -# @pytest.mark.xfail(raises=LookupError, strict=True) -# def test_get_environment_file_nonexistent(self): -# self.license_obj = nf_core.licences.WorkflowLicences("fubarnotreal") -# self.license_obj.get_environment_file() diff --git a/tests/test_lint.py b/tests/test_lint.py index c3a03c5795..4e468b7538 100644 --- a/tests/test_lint.py +++ b/tests/test_lint.py @@ -253,340 +253,3 @@ def test_sphinx_md_files(self): test_template_strings_ignored, ) from .lint.version_consistency import test_version_consistency # type: ignore[misc] - - -# TODO nf-core: Assess and strip out if no longer required for DSL2 - -# def test_critical_missingfiles_example(self): -# """Tests for missing nextflow config and main.nf files""" -# lint_obj = nf_core.pipelines.lint.run_linting(PATH_CRITICAL_EXAMPLE, False) -# assert len(lint_obj.failed) > 0 -# -# def test_failing_missingfiles_example(self): -# """Tests for missing files like Dockerfile or LICENSE""" -# lint_obj = nf_core.pipelines.lint.PipelineLint(PATH_FAILING_EXAMPLE) -# lint_obj.check_files_exist() -# expectations = {"failed": 6, "warned": 2, "passed": 14} -# self.assess_lint_status(lint_obj, **expectations) -# -# def test_mit_licence_example_pass(self): -# """Tests that MIT test works with good MIT licences""" -# good_lint_obj = nf_core.pipelines.lint.PipelineLint(PATH_CRITICAL_EXAMPLE) -# good_lint_obj.check_licence() -# expectations = {"failed": 0, "warned": 0, "passed": 1} -# self.assess_lint_status(good_lint_obj, **expectations) -# -# def test_mit_license_example_with_failed(self): -# """Tests that MIT test works with bad MIT licences""" -# bad_lint_obj = nf_core.pipelines.lint.PipelineLint(PATH_FAILING_EXAMPLE) -# bad_lint_obj.check_licence() -# expectations = {"failed": 1, "warned": 0, "passed": 0} -# self.assess_lint_status(bad_lint_obj, **expectations) -# -# def test_config_variable_example_pass(self): -# """Tests that config variable existence test works with good pipeline example""" -# good_lint_obj = nf_core.pipelines.lint.PipelineLint(PATH_WORKING_EXAMPLE) -# good_lint_obj.check_nextflow_config() -# expectations = {"failed": 0, "warned": 1, "passed": 34} -# self.assess_lint_status(good_lint_obj, **expectations) -# -# def test_config_variable_example_with_failed(self): -# """Tests that config variable existence test fails with bad pipeline example""" -# bad_lint_obj = nf_core.pipelines.lint.PipelineLint(PATH_FAILING_EXAMPLE) -# bad_lint_obj.check_nextflow_config() -# expectations = {"failed": 19, "warned": 6, "passed": 10} -# self.assess_lint_status(bad_lint_obj, **expectations) -# -# @pytest.mark.xfail(raises=AssertionError, strict=True) -# def test_config_variable_error(self): -# """Tests that config variable existence test falls over nicely with nextflow can't run""" -# bad_lint_obj = nf_core.pipelines.lint.PipelineLint("/non/existant/path") -# bad_lint_obj.check_nextflow_config() -# -# -# def test_wrong_license_examples_with_failed(self): -# """Tests for checking the license test behavior""" -# for example in PATHS_WRONG_LICENSE_EXAMPLE: -# lint_obj = nf_core.pipelines.lint.PipelineLint(example) -# lint_obj.check_licence() -# expectations = {"failed": 1, "warned": 0, "passed": 0} -# self.assess_lint_status(lint_obj, **expectations) -# -# def test_missing_license_example(self): -# """Tests for missing license behavior""" -# lint_obj = nf_core.pipelines.lint.PipelineLint(PATH_MISSING_LICENSE_EXAMPLE) -# lint_obj.check_licence() -# expectations = {"failed": 1, "warned": 0, "passed": 0} -# self.assess_lint_status(lint_obj, **expectations) -# -# def test_readme_pass(self): -# """Tests that the pipeline README file checks work with a good example""" -# lint_obj = nf_core.pipelines.lint.PipelineLint(PATH_WORKING_EXAMPLE) -# lint_obj.minNextflowVersion = "20.04.0" -# lint_obj.files = ["environment.yml"] -# lint_obj.check_readme() -# expectations = {"failed": 0, "warned": 0, "passed": 2} -# self.assess_lint_status(lint_obj, **expectations) -# -# def test_readme_warn(self): -# """Tests that the pipeline README file checks fail """ -# lint_obj = nf_core.pipelines.lint.PipelineLint(PATH_WORKING_EXAMPLE) -# lint_obj.minNextflowVersion = "0.28.0" -# lint_obj.check_readme() -# expectations = {"failed": 1, "warned": 0, "passed": 0} -# self.assess_lint_status(lint_obj, **expectations) -# -# def test_readme_fail(self): -# """Tests that the pipeline README file checks give warnings with a bad example""" -# lint_obj = nf_core.pipelines.lint.PipelineLint(PATH_FAILING_EXAMPLE) -# lint_obj.files = ["environment.yml"] -# lint_obj.check_readme() -# expectations = {"failed": 0, "warned": 2, "passed": 0} -# self.assess_lint_status(lint_obj, **expectations) -# -# def test_dockerfile_pass(self): -# """Tests if a valid Dockerfile passes the lint checks""" -# lint_obj = nf_core.pipelines.lint.PipelineLint(PATH_WORKING_EXAMPLE) -# lint_obj.files = ["Dockerfile"] -# lint_obj.check_docker() -# expectations = {"failed": 0, "warned": 0, "passed": 1} -# self.assess_lint_status(lint_obj, **expectations) -# -# def test_version_consistency_pass(self): -# """Tests the workflow version and container version sucessfully""" -# lint_obj = nf_core.pipelines.lint.PipelineLint(PATH_WORKING_EXAMPLE) -# lint_obj.config["manifest.version"] = "0.4" -# lint_obj.config["process.container"] = "nfcore/tools:0.4" -# lint_obj.check_version_consistency() -# expectations = {"failed": 0, "warned": 0, "passed": 1} -# self.assess_lint_status(lint_obj, **expectations) -# -# def test_version_consistency_with_env_fail(self): -# """Tests the behaviour, when a git activity is a release -# and simulate wrong release tag""" -# os.environ["GITHUB_REF"] = "refs/tags/0.5" -# os.environ["GITHUB_REPOSITORY"] = "nf-core/testpipeline" -# lint_obj = nf_core.pipelines.lint.PipelineLint(PATH_WORKING_EXAMPLE) -# lint_obj.config["manifest.version"] = "0.4" -# lint_obj.config["process.container"] = "nfcore/tools:0.4" -# lint_obj.check_version_consistency() -# expectations = {"failed": 1, "warned": 0, "passed": 0} -# self.assess_lint_status(lint_obj, **expectations) -# -# def test_version_consistency_with_numeric_fail(self): -# """Tests the behaviour, when a git activity is a release -# and simulate wrong release tag""" -# os.environ["GITHUB_REF"] = "refs/tags/0.5dev" -# os.environ["GITHUB_REPOSITORY"] = "nf-core/testpipeline" -# lint_obj = nf_core.pipelines.lint.PipelineLint(PATH_WORKING_EXAMPLE) -# lint_obj.config["manifest.version"] = "0.4" -# lint_obj.config["process.container"] = "nfcore/tools:0.4" -# lint_obj.check_version_consistency() -# expectations = {"failed": 1, "warned": 0, "passed": 0} -# self.assess_lint_status(lint_obj, **expectations) -# -# def test_version_consistency_with_no_docker_version_fail(self): -# """Tests the behaviour, when a git activity is a release -# and simulate wrong missing docker version tag""" -# os.environ["GITHUB_REF"] = "refs/tags/0.4" -# os.environ["GITHUB_REPOSITORY"] = "nf-core/testpipeline" -# lint_obj = nf_core.pipelines.lint.PipelineLint(PATH_WORKING_EXAMPLE) -# lint_obj.config["manifest.version"] = "0.4" -# lint_obj.config["process.container"] = "nfcore/tools" -# lint_obj.check_version_consistency() -# expectations = {"failed": 1, "warned": 0, "passed": 0} -# self.assess_lint_status(lint_obj, **expectations) -# -# def test_version_consistency_with_env_pass(self): -# """Tests the behaviour, when a git activity is a release -# and simulate correct release tag""" -# os.environ["GITHUB_REF"] = "refs/tags/0.4" -# os.environ["GITHUB_REPOSITORY"] = "nf-core/testpipeline" -# lint_obj = nf_core.pipelines.lint.PipelineLint(PATH_WORKING_EXAMPLE) -# lint_obj.config["manifest.version"] = "0.4" -# lint_obj.config["process.container"] = "nfcore/tools:0.4" -# lint_obj.check_version_consistency() -# expectations = {"failed": 0, "warned": 0, "passed": 1} -# self.assess_lint_status(lint_obj, **expectations) -# -# def test_conda_env_pass(self): -# """ Tests the conda environment config checks with a working example """ -# lint_obj = nf_core.pipelines.lint.PipelineLint(PATH_WORKING_EXAMPLE) -# lint_obj.files = ["environment.yml"] -# with open(os.path.join(PATH_WORKING_EXAMPLE, "environment.yml"), "r") as fh: -# lint_obj.conda_config = yaml.safe_load(fh) -# lint_obj.pipeline_name = "tools" -# lint_obj.config["manifest.version"] = "0.4" -# lint_obj.check_conda_env_yaml() -# expectations = {"failed": 0, "warned": 4, "passed": 5} -# self.assess_lint_status(lint_obj, **expectations) -# -# def test_conda_env_fail(self): -# """ Tests the conda environment config fails with a bad example """ -# lint_obj = nf_core.pipelines.lint.PipelineLint(PATH_WORKING_EXAMPLE) -# lint_obj.files = ["environment.yml"] -# with open(os.path.join(PATH_WORKING_EXAMPLE, "environment.yml"), "r") as fh: -# lint_obj.conda_config = yaml.safe_load(fh) -# lint_obj.conda_config["dependencies"] = ["fastqc", "multiqc=0.9", "notapackaage=0.4"] -# lint_obj.pipeline_name = "not_tools" -# lint_obj.config["manifest.version"] = "0.23" -# lint_obj.check_conda_env_yaml() -# expectations = {"failed": 3, "warned": 1, "passed": 2} -# self.assess_lint_status(lint_obj, **expectations) -# -# @mock.patch("requests.get") -# @pytest.mark.xfail(raises=ValueError, strict=True) -# def test_conda_env_timeout(self, mock_get): -# """ Tests the conda environment handles API timeouts """ -# # Define the behaviour of the request get mock -# mock_get.side_effect = requests.exceptions.Timeout() -# # Now do the test -# lint_obj = nf_core.pipelines.lint.PipelineLint(PATH_WORKING_EXAMPLE) -# lint_obj.conda_config["channels"] = ["bioconda"] -# lint_obj.check_anaconda_package("multiqc=1.6") -# -# def test_conda_env_skip(self): -# """ Tests the conda environment config is skipped when not needed """ -# lint_obj = nf_core.pipelines.lint.PipelineLint(PATH_WORKING_EXAMPLE) -# lint_obj.check_conda_env_yaml() -# expectations = {"failed": 0, "warned": 0, "passed": 0} -# self.assess_lint_status(lint_obj, **expectations) -# -# def test_conda_dockerfile_pass(self): -# """ Tests the conda Dockerfile test works with a working example """ -# lint_obj = nf_core.pipelines.lint.PipelineLint(PATH_WORKING_EXAMPLE) -# lint_obj.version = "1.11" -# lint_obj.files = ["environment.yml", "Dockerfile"] -# with open(os.path.join(PATH_WORKING_EXAMPLE, "Dockerfile"), "r") as fh: -# lint_obj.dockerfile = fh.read().splitlines() -# lint_obj.conda_config["name"] = "nf-core-tools-0.4" -# lint_obj.check_conda_dockerfile() -# expectations = {"failed": 0, "warned": 0, "passed": 1} -# self.assess_lint_status(lint_obj, **expectations) -# -# def test_conda_dockerfile_fail(self): -# """ Tests the conda Dockerfile test fails with a bad example """ -# lint_obj = nf_core.pipelines.lint.PipelineLint(PATH_WORKING_EXAMPLE) -# lint_obj.version = "1.11" -# lint_obj.files = ["environment.yml", "Dockerfile"] -# lint_obj.conda_config["name"] = "nf-core-tools-0.4" -# lint_obj.dockerfile = ["fubar"] -# lint_obj.check_conda_dockerfile() -# expectations = {"failed": 5, "warned": 0, "passed": 0} -# self.assess_lint_status(lint_obj, **expectations) -# -# def test_conda_dockerfile_skip(self): -# """ Tests the conda Dockerfile test is skipped when not needed """ -# lint_obj = nf_core.pipelines.lint.PipelineLint(PATH_WORKING_EXAMPLE) -# lint_obj.check_conda_dockerfile() -# expectations = {"failed": 0, "warned": 0, "passed": 0} -# self.assess_lint_status(lint_obj, **expectations) -# -# def test_pip_no_version_fail(self): -# """ Tests the pip dependency version definition is present """ -# lint_obj = nf_core.pipelines.lint.PipelineLint(PATH_WORKING_EXAMPLE) -# lint_obj.files = ["environment.yml"] -# lint_obj.pipeline_name = "tools" -# lint_obj.config["manifest.version"] = "0.4" -# lint_obj.conda_config = {"name": "nf-core-tools-0.4", "dependencies": [{"pip": ["multiqc"]}]} -# lint_obj.check_conda_env_yaml() -# expectations = {"failed": 1, "warned": 0, "passed": 1} -# self.assess_lint_status(lint_obj, **expectations) -# -# def test_pip_package_not_latest_warn(self): -# """ Tests the pip dependency version definition is present """ -# lint_obj = nf_core.pipelines.lint.PipelineLint(PATH_WORKING_EXAMPLE) -# lint_obj.files = ["environment.yml"] -# lint_obj.pipeline_name = "tools" -# lint_obj.config["manifest.version"] = "0.4" -# lint_obj.conda_config = {"name": "nf-core-tools-0.4", "dependencies": [{"pip": ["multiqc==1.4"]}]} -# lint_obj.check_conda_env_yaml() -# expectations = {"failed": 0, "warned": 1, "passed": 2} -# self.assess_lint_status(lint_obj, **expectations) -# -# @mock.patch("requests.get") -# def test_pypi_timeout_warn(self, mock_get): -# """Tests the PyPi connection and simulates a request timeout, which should -# return in an addiional warning in the linting""" -# # Define the behaviour of the request get mock -# mock_get.side_effect = requests.exceptions.Timeout() -# # Now do the test -# lint_obj = nf_core.pipelines.lint.PipelineLint(PATH_WORKING_EXAMPLE) -# lint_obj.files = ["environment.yml"] -# lint_obj.pipeline_name = "tools" -# lint_obj.config["manifest.version"] = "0.4" -# lint_obj.conda_config = {"name": "nf-core-tools-0.4", "dependencies": [{"pip": ["multiqc==1.5"]}]} -# lint_obj.check_conda_env_yaml() -# expectations = {"failed": 0, "warned": 1, "passed": 2} -# self.assess_lint_status(lint_obj, **expectations) -# -# @mock.patch("requests.get") -# def test_pypi_connection_error_warn(self, mock_get): -# """Tests the PyPi connection and simulates a connection error, which should -# result in an additional warning, as we cannot test if dependent module is latest""" -# # Define the behaviour of the request get mock -# mock_get.side_effect = requests.exceptions.ConnectionError() -# # Now do the test -# lint_obj = nf_core.pipelines.lint.PipelineLint(PATH_WORKING_EXAMPLE) -# lint_obj.files = ["environment.yml"] -# lint_obj.pipeline_name = "tools" -# lint_obj.config["manifest.version"] = "0.4" -# lint_obj.conda_config = {"name": "nf-core-tools-0.4", "dependencies": [{"pip": ["multiqc==1.5"]}]} -# lint_obj.check_conda_env_yaml() -# expectations = {"failed": 0, "warned": 1, "passed": 2} -# self.assess_lint_status(lint_obj, **expectations) -# -# def test_pip_dependency_fail(self): -# """ Tests the PyPi API package information query """ -# lint_obj = nf_core.pipelines.lint.PipelineLint(PATH_WORKING_EXAMPLE) -# lint_obj.files = ["environment.yml"] -# lint_obj.pipeline_name = "tools" -# lint_obj.config["manifest.version"] = "0.4" -# lint_obj.conda_config = {"name": "nf-core-tools-0.4", "dependencies": [{"pip": ["notpresent==1.5"]}]} -# lint_obj.check_conda_env_yaml() -# expectations = {"failed": 1, "warned": 0, "passed": 2} -# self.assess_lint_status(lint_obj, **expectations) -# -# def test_conda_dependency_fails(self): -# """Tests that linting fails, if conda dependency -# package version is not available on Anaconda. -# """ -# lint_obj = nf_core.pipelines.lint.PipelineLint(PATH_WORKING_EXAMPLE) -# lint_obj.files = ["environment.yml"] -# lint_obj.pipeline_name = "tools" -# lint_obj.config["manifest.version"] = "0.4" -# lint_obj.conda_config = {"name": "nf-core-tools-0.4", "dependencies": ["openjdk=0.0.0"]} -# lint_obj.check_conda_env_yaml() -# expectations = {"failed": 1, "warned": 0, "passed": 2} -# self.assess_lint_status(lint_obj, **expectations) -# -# def test_pip_dependency_fails(self): -# """Tests that linting fails, if conda dependency -# package version is not available on Anaconda. -# """ -# lint_obj = nf_core.pipelines.lint.PipelineLint(PATH_WORKING_EXAMPLE) -# lint_obj.files = ["environment.yml"] -# lint_obj.pipeline_name = "tools" -# lint_obj.config["manifest.version"] = "0.4" -# lint_obj.conda_config = {"name": "nf-core-tools-0.4", "dependencies": [{"pip": ["multiqc==0.0"]}]} -# lint_obj.check_conda_env_yaml() -# expectations = {"failed": 1, "warned": 0, "passed": 2} -# self.assess_lint_status(lint_obj, **expectations) -# -# def test_pipeline_name_pass(self): -# """Tests pipeline name good pipeline example: lower case, no punctuation""" -# # good_lint_obj = nf_core.pipelines.lint.run_linting(PATH_WORKING_EXAMPLE) -# good_lint_obj = nf_core.pipelines.lint.PipelineLint(PATH_WORKING_EXAMPLE) -# good_lint_obj.pipeline_name = "tools" -# good_lint_obj.check_pipeline_name() -# expectations = {"failed": 0, "warned": 0, "passed": 1} -# self.assess_lint_status(good_lint_obj, **expectations) -# -# def test_pipeline_name_critical(self): -# """Tests that warning is returned for pipeline not adhering to naming convention""" -# critical_lint_obj = nf_core.pipelines.lint.PipelineLint(PATH_WORKING_EXAMPLE) -# critical_lint_obj.pipeline_name = "Tools123" -# critical_lint_obj.check_pipeline_name() -# expectations = {"failed": 0, "warned": 1, "passed": 0} -# self.assess_lint_status(critical_lint_obj, **expectations) -#