From e23869120ce320d3d4d4d58856751c358a2d1dd6 Mon Sep 17 00:00:00 2001 From: mashehu Date: Mon, 15 Jul 2024 14:55:30 +0200 Subject: [PATCH 01/89] move pipelines tests into `pipelines` subdirectory to mirror command structure --- tests/{ => pipelines}/test_bump_version.py | 0 tests/{ => pipelines}/test_create.py | 2 +- tests/{ => pipelines}/test_create_app.py | 0 tests/{ => pipelines}/test_create_logo.py | 0 tests/{ => pipelines}/test_download.py | 2 +- tests/{ => pipelines}/test_launch.py | 2 +- tests/{ => pipelines}/test_lint.py | 30 +++++++++++----------- tests/{ => pipelines}/test_list.py | 0 tests/{ => pipelines}/test_params_file.py | 0 tests/{ => pipelines}/test_refgenie.py | 0 tests/{ => pipelines}/test_schema.py | 2 +- tests/{ => pipelines}/test_sync.py | 2 +- 12 files changed, 20 insertions(+), 20 deletions(-) rename tests/{ => pipelines}/test_bump_version.py (100%) rename tests/{ => pipelines}/test_create.py (99%) rename tests/{ => pipelines}/test_create_app.py (100%) rename tests/{ => pipelines}/test_create_logo.py (100%) rename tests/{ => pipelines}/test_download.py (99%) rename tests/{ => pipelines}/test_launch.py (99%) rename tests/{ => pipelines}/test_lint.py (90%) rename tests/{ => pipelines}/test_list.py (100%) rename tests/{ => pipelines}/test_params_file.py (100%) rename tests/{ => pipelines}/test_refgenie.py (100%) rename tests/{ => pipelines}/test_schema.py (99%) rename tests/{ => pipelines}/test_sync.py (99%) diff --git a/tests/test_bump_version.py b/tests/pipelines/test_bump_version.py similarity index 100% rename from tests/test_bump_version.py rename to tests/pipelines/test_bump_version.py diff --git a/tests/test_create.py b/tests/pipelines/test_create.py similarity index 99% rename from tests/test_create.py rename to tests/pipelines/test_create.py index 313b6f5354..d93b26bd1a 100644 --- a/tests/test_create.py +++ b/tests/pipelines/test_create.py @@ -9,7 +9,7 @@ import nf_core.pipelines.create.create -from .utils import with_temporary_folder +from ..utils import with_temporary_folder TEST_DATA_DIR = Path(__file__).parent / "data" PIPELINE_TEMPLATE_YML = TEST_DATA_DIR / "pipeline_create_template.yml" diff --git a/tests/test_create_app.py b/tests/pipelines/test_create_app.py similarity index 100% rename from tests/test_create_app.py rename to tests/pipelines/test_create_app.py diff --git a/tests/test_create_logo.py b/tests/pipelines/test_create_logo.py similarity index 100% rename from tests/test_create_logo.py rename to tests/pipelines/test_create_logo.py diff --git a/tests/test_download.py b/tests/pipelines/test_download.py similarity index 99% rename from tests/test_download.py rename to tests/pipelines/test_download.py index c3a8c5546a..ebb76fef72 100644 --- a/tests/test_download.py +++ b/tests/pipelines/test_download.py @@ -18,7 +18,7 @@ from nf_core.synced_repo import SyncedRepo from nf_core.utils import run_cmd -from .utils import with_temporary_folder +from ..utils import with_temporary_folder class DownloadTest(unittest.TestCase): diff --git a/tests/test_launch.py b/tests/pipelines/test_launch.py similarity index 99% rename from tests/test_launch.py rename to tests/pipelines/test_launch.py index 9774853419..b4b285a3c9 100644 --- a/tests/test_launch.py +++ b/tests/pipelines/test_launch.py @@ -11,7 +11,7 @@ import nf_core.pipelines.create.create import nf_core.pipelines.launch -from .utils import create_tmp_pipeline, with_temporary_file, with_temporary_folder +from ..utils import create_tmp_pipeline, with_temporary_file, with_temporary_folder class TestLaunch(TestCase): diff --git a/tests/test_lint.py b/tests/pipelines/test_lint.py similarity index 90% rename from tests/test_lint.py rename to tests/pipelines/test_lint.py index 4e468b7538..460c8f03d8 100644 --- a/tests/test_lint.py +++ b/tests/pipelines/test_lint.py @@ -12,7 +12,7 @@ import nf_core.pipelines.create.create import nf_core.pipelines.lint -from .utils import with_temporary_folder +from ..utils import with_temporary_folder class TestLint(unittest.TestCase): @@ -179,33 +179,33 @@ def test_sphinx_md_files(self): ####################### # SPECIFIC LINT TESTS # ####################### - from .lint.actions_awsfulltest import ( # type: ignore[misc] + from ..lint.actions_awsfulltest import ( # type: ignore[misc] test_actions_awsfulltest_fail, test_actions_awsfulltest_pass, test_actions_awsfulltest_warn, ) - from .lint.actions_awstest import ( # type: ignore[misc] + from ..lint.actions_awstest import ( # type: ignore[misc] test_actions_awstest_fail, test_actions_awstest_pass, ) - from .lint.actions_ci import ( # type: ignore[misc] + from ..lint.actions_ci import ( # type: ignore[misc] test_actions_ci_fail_wrong_nf, test_actions_ci_fail_wrong_trigger, test_actions_ci_pass, ) - from .lint.actions_schema_validation import ( # type: ignore[misc] + from ..lint.actions_schema_validation import ( # type: ignore[misc] test_actions_schema_validation_fails_for_additional_property, test_actions_schema_validation_missing_jobs, test_actions_schema_validation_missing_on, ) - from .lint.configs import ( # type: ignore[misc] + from ..lint.configs import ( # type: ignore[misc] test_ignore_base_config, test_ignore_modules_config, test_superfluous_withname_in_base_config_fails, test_superfluous_withname_in_modules_config_fails, test_withname_in_modules_config, ) - from .lint.files_exist import ( # type: ignore[misc] + from ..lint.files_exist import ( # type: ignore[misc] test_files_exist_depreciated_file, test_files_exist_fail_conditional, test_files_exist_missing_config, @@ -214,13 +214,13 @@ def test_sphinx_md_files(self): test_files_exist_pass_conditional, test_files_exist_pass_conditional_nfschema, ) - from .lint.files_unchanged import ( # type: ignore[misc] + from ..lint.files_unchanged import ( # type: ignore[misc] test_files_unchanged_fail, test_files_unchanged_pass, ) - from .lint.merge_markers import test_merge_markers_found # type: ignore[misc] - from .lint.modules_json import test_modules_json_pass # type: ignore[misc] - from .lint.multiqc_config import ( # type: ignore[misc] + from ..lint.merge_markers import test_merge_markers_found # type: ignore[misc] + from ..lint.modules_json import test_modules_json_pass # type: ignore[misc] + from ..lint.multiqc_config import ( # type: ignore[misc] test_multiqc_config_exists, test_multiqc_config_ignore, test_multiqc_config_missing_report_section_order, @@ -229,7 +229,7 @@ def test_sphinx_md_files(self): test_multiqc_config_report_comment_release_succeed, test_multiqc_incorrect_export_plots, ) - from .lint.nextflow_config import ( # type: ignore[misc] + from ..lint.nextflow_config import ( # type: ignore[misc] test_allow_params_reference_in_main_nf, test_catch_params_assignment_in_main_nf, test_default_values_fail, @@ -242,14 +242,14 @@ def test_sphinx_md_files(self): test_nextflow_config_example_pass, test_nextflow_config_missing_test_profile_failed, ) - from .lint.nfcore_yml import ( # type: ignore[misc] + from ..lint.nfcore_yml import ( # type: ignore[misc] test_nfcore_yml_fail_nfcore_version, test_nfcore_yml_fail_repo_type, test_nfcore_yml_pass, ) - from .lint.template_strings import ( # type: ignore[misc] + from ..lint.template_strings import ( # type: ignore[misc] test_template_strings, test_template_strings_ignore_file, test_template_strings_ignored, ) - from .lint.version_consistency import test_version_consistency # type: ignore[misc] + from ..lint.version_consistency import test_version_consistency # type: ignore[misc] diff --git a/tests/test_list.py b/tests/pipelines/test_list.py similarity index 100% rename from tests/test_list.py rename to tests/pipelines/test_list.py diff --git a/tests/test_params_file.py b/tests/pipelines/test_params_file.py similarity index 100% rename from tests/test_params_file.py rename to tests/pipelines/test_params_file.py diff --git a/tests/test_refgenie.py b/tests/pipelines/test_refgenie.py similarity index 100% rename from tests/test_refgenie.py rename to tests/pipelines/test_refgenie.py diff --git a/tests/test_schema.py b/tests/pipelines/test_schema.py similarity index 99% rename from tests/test_schema.py rename to tests/pipelines/test_schema.py index 4cb157c087..633de3db69 100644 --- a/tests/test_schema.py +++ b/tests/pipelines/test_schema.py @@ -15,7 +15,7 @@ import nf_core.pipelines.create.create import nf_core.pipelines.schema -from .utils import with_temporary_file, with_temporary_folder +from ..utils import with_temporary_file, with_temporary_folder class TestSchema(unittest.TestCase): diff --git a/tests/test_sync.py b/tests/pipelines/test_sync.py similarity index 99% rename from tests/test_sync.py rename to tests/pipelines/test_sync.py index ca90071d95..d7b73c7ffb 100644 --- a/tests/test_sync.py +++ b/tests/pipelines/test_sync.py @@ -14,7 +14,7 @@ import nf_core.pipelines.create.create import nf_core.pipelines.sync -from .utils import with_temporary_folder +from ..utils import with_temporary_folder class TestModules(unittest.TestCase): From 891d08ab9472e686686561d8b5426be760782268 Mon Sep 17 00:00:00 2001 From: mashehu Date: Tue, 16 Jul 2024 16:21:43 +0200 Subject: [PATCH 02/89] move lint tests into pipeline folder --- tests/{lint => pipelines}/__init__.py | 0 tests/pipelines/lint/__init__.py | 0 .../lint/actions_awsfulltest.py | 0 tests/{ => pipelines}/lint/actions_awstest.py | 0 tests/{ => pipelines}/lint/actions_ci.py | 0 .../lint/actions_schema_validation.py | 0 tests/{ => pipelines}/lint/configs.py | 0 tests/{ => pipelines}/lint/files_exist.py | 7 +++--- tests/{ => pipelines}/lint/files_unchanged.py | 0 tests/{ => pipelines}/lint/merge_markers.py | 0 tests/{ => pipelines}/lint/modules_json.py | 0 tests/{ => pipelines}/lint/multiqc_config.py | 0 tests/{ => pipelines}/lint/nextflow_config.py | 22 +++++++++---------- tests/{ => pipelines}/lint/nfcore_yml.py | 0 .../{ => pipelines}/lint/template_strings.py | 0 .../lint/version_consistency.py | 2 +- 16 files changed, 15 insertions(+), 16 deletions(-) rename tests/{lint => pipelines}/__init__.py (100%) create mode 100644 tests/pipelines/lint/__init__.py rename tests/{ => pipelines}/lint/actions_awsfulltest.py (100%) rename tests/{ => pipelines}/lint/actions_awstest.py (100%) rename tests/{ => pipelines}/lint/actions_ci.py (100%) rename tests/{ => pipelines}/lint/actions_schema_validation.py (100%) rename tests/{ => pipelines}/lint/configs.py (100%) rename tests/{ => pipelines}/lint/files_exist.py (95%) rename tests/{ => pipelines}/lint/files_unchanged.py (100%) rename tests/{ => pipelines}/lint/merge_markers.py (100%) rename tests/{ => pipelines}/lint/modules_json.py (100%) rename tests/{ => pipelines}/lint/multiqc_config.py (100%) rename tests/{ => pipelines}/lint/nextflow_config.py (95%) rename tests/{ => pipelines}/lint/nfcore_yml.py (100%) rename tests/{ => pipelines}/lint/template_strings.py (100%) rename tests/{ => pipelines}/lint/version_consistency.py (93%) diff --git a/tests/lint/__init__.py b/tests/pipelines/__init__.py similarity index 100% rename from tests/lint/__init__.py rename to tests/pipelines/__init__.py diff --git a/tests/pipelines/lint/__init__.py b/tests/pipelines/lint/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/lint/actions_awsfulltest.py b/tests/pipelines/lint/actions_awsfulltest.py similarity index 100% rename from tests/lint/actions_awsfulltest.py rename to tests/pipelines/lint/actions_awsfulltest.py diff --git a/tests/lint/actions_awstest.py b/tests/pipelines/lint/actions_awstest.py similarity index 100% rename from tests/lint/actions_awstest.py rename to tests/pipelines/lint/actions_awstest.py diff --git a/tests/lint/actions_ci.py b/tests/pipelines/lint/actions_ci.py similarity index 100% rename from tests/lint/actions_ci.py rename to tests/pipelines/lint/actions_ci.py diff --git a/tests/lint/actions_schema_validation.py b/tests/pipelines/lint/actions_schema_validation.py similarity index 100% rename from tests/lint/actions_schema_validation.py rename to tests/pipelines/lint/actions_schema_validation.py diff --git a/tests/lint/configs.py b/tests/pipelines/lint/configs.py similarity index 100% rename from tests/lint/configs.py rename to tests/pipelines/lint/configs.py diff --git a/tests/lint/files_exist.py b/tests/pipelines/lint/files_exist.py similarity index 95% rename from tests/lint/files_exist.py rename to tests/pipelines/lint/files_exist.py index 679d209876..87508e78a8 100644 --- a/tests/lint/files_exist.py +++ b/tests/pipelines/lint/files_exist.py @@ -1,4 +1,3 @@ -import os from pathlib import Path import nf_core.pipelines.lint @@ -31,12 +30,12 @@ def test_files_exist_missing_main(self): assert "File not found: `main.nf`" in results["warned"] -def test_files_exist_depreciated_file(self): - """Check whether depreciated file issues warning""" +def test_files_exist_deprecated_file(self): + """Check whether deprecated file issues warning""" new_pipeline = self._make_pipeline_copy() nf = Path(new_pipeline, "parameters.settings.json") - os.system(f"touch {nf}") + nf.touch() lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) lint_obj._load() diff --git a/tests/lint/files_unchanged.py b/tests/pipelines/lint/files_unchanged.py similarity index 100% rename from tests/lint/files_unchanged.py rename to tests/pipelines/lint/files_unchanged.py diff --git a/tests/lint/merge_markers.py b/tests/pipelines/lint/merge_markers.py similarity index 100% rename from tests/lint/merge_markers.py rename to tests/pipelines/lint/merge_markers.py diff --git a/tests/lint/modules_json.py b/tests/pipelines/lint/modules_json.py similarity index 100% rename from tests/lint/modules_json.py rename to tests/pipelines/lint/modules_json.py diff --git a/tests/lint/multiqc_config.py b/tests/pipelines/lint/multiqc_config.py similarity index 100% rename from tests/lint/multiqc_config.py rename to tests/pipelines/lint/multiqc_config.py diff --git a/tests/lint/nextflow_config.py b/tests/pipelines/lint/nextflow_config.py similarity index 95% rename from tests/lint/nextflow_config.py rename to tests/pipelines/lint/nextflow_config.py index b90298f542..d9157c90c6 100644 --- a/tests/lint/nextflow_config.py +++ b/tests/pipelines/lint/nextflow_config.py @@ -8,7 +8,7 @@ def test_nextflow_config_example_pass(self): """Tests that config variable existence test works with good pipeline example""" - self.lint_obj._load_pipeline_config() + self.lint_obj.load_pipeline_config() result = self.lint_obj.nextflow_config() assert len(result["failed"]) == 0 assert len(result["warned"]) == 0 @@ -18,7 +18,7 @@ def test_nextflow_config_bad_name_fail(self): """Tests that config variable existence test fails with bad pipeline name""" new_pipeline = self._make_pipeline_copy() lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load_pipeline_config() + lint_obj.load_pipeline_config() lint_obj.nf_config["manifest.name"] = "bad_name" result = lint_obj.nextflow_config() @@ -30,7 +30,7 @@ def test_nextflow_config_dev_in_release_mode_failed(self): """Tests that config variable existence test fails with dev version in release mode""" new_pipeline = self._make_pipeline_copy() lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load_pipeline_config() + lint_obj.load_pipeline_config() lint_obj.release_mode = True lint_obj.nf_config["manifest.version"] = "dev_is_bad_name" @@ -50,7 +50,7 @@ def test_nextflow_config_missing_test_profile_failed(self): with open(nf_conf_file, "w") as f: f.write(fail_content) lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load_pipeline_config() + lint_obj.load_pipeline_config() result = lint_obj.nextflow_config() assert len(result["failed"]) > 0 assert len(result["warned"]) == 0 @@ -60,7 +60,7 @@ def test_default_values_match(self): """Test that the default values in nextflow.config match the default values defined in the nextflow_schema.json.""" new_pipeline = self._make_pipeline_copy() lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load_pipeline_config() + lint_obj.load_pipeline_config() result = lint_obj.nextflow_config() assert len(result["failed"]) == 0 assert len(result["warned"]) == 0 @@ -86,7 +86,7 @@ def test_default_values_fail(self): with open(nf_schema_file, "w") as f: f.write(fail_content) lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load_pipeline_config() + lint_obj.load_pipeline_config() result = lint_obj.nextflow_config() assert len(result["failed"]) == 2 assert ( @@ -107,7 +107,7 @@ def test_catch_params_assignment_in_main_nf(self): with open(main_nf_file, "a") as f: f.write("params.max_time = 42") lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load_pipeline_config() + lint_obj.load_pipeline_config() result = lint_obj.nextflow_config() assert len(result["failed"]) == 1 assert ( @@ -124,7 +124,7 @@ def test_allow_params_reference_in_main_nf(self): with open(main_nf_file, "a") as f: f.write("params.max_time == 42") lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load_pipeline_config() + lint_obj.load_pipeline_config() result = lint_obj.nextflow_config() assert len(result["failed"]) == 0 @@ -139,7 +139,7 @@ def test_default_values_ignored(self): "repository_type: pipeline\nlint:\n nextflow_config:\n - config_defaults:\n - params.max_cpus\n" ) lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load_pipeline_config() + lint_obj.load_pipeline_config() lint_obj._load_lint_config() result = lint_obj.nextflow_config() assert len(result["failed"]) == 0 @@ -173,7 +173,7 @@ def test_default_values_float(self): f.write(fail_content) lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load_pipeline_config() + lint_obj.load_pipeline_config() result = lint_obj.nextflow_config() assert len(result["failed"]) == 0 assert len(result["warned"]) == 0 @@ -203,7 +203,7 @@ def test_default_values_float_fail(self): f.write(fail_content) lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load_pipeline_config() + lint_obj.load_pipeline_config() result = lint_obj.nextflow_config() assert len(result["failed"]) == 1 diff --git a/tests/lint/nfcore_yml.py b/tests/pipelines/lint/nfcore_yml.py similarity index 100% rename from tests/lint/nfcore_yml.py rename to tests/pipelines/lint/nfcore_yml.py diff --git a/tests/lint/template_strings.py b/tests/pipelines/lint/template_strings.py similarity index 100% rename from tests/lint/template_strings.py rename to tests/pipelines/lint/template_strings.py diff --git a/tests/lint/version_consistency.py b/tests/pipelines/lint/version_consistency.py similarity index 93% rename from tests/lint/version_consistency.py rename to tests/pipelines/lint/version_consistency.py index 1be57969f1..88eadce391 100644 --- a/tests/lint/version_consistency.py +++ b/tests/pipelines/lint/version_consistency.py @@ -6,7 +6,7 @@ def test_version_consistency(self): """Tests that config variable existence test fails with bad pipeline name""" new_pipeline = self._make_pipeline_copy() lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load_pipeline_config() + lint_obj.load_pipeline_config() lint_obj.nextflow_config() result = lint_obj.version_consistency() From 11130e64d513969cfe011e8bdec4ae7da62b488b Mon Sep 17 00:00:00 2001 From: mashehu Date: Tue, 16 Jul 2024 16:38:59 +0200 Subject: [PATCH 03/89] make pipeline tests a subclass of TestPipeline --- tests/pipelines/test_bump_version.py | 127 ++++++++++----------------- tests/pipelines/test_create_logo.py | 39 +++----- tests/pipelines/test_launch.py | 27 ++---- tests/test_pipelines.py | 37 ++++++++ 4 files changed, 104 insertions(+), 126 deletions(-) create mode 100644 tests/test_pipelines.py diff --git a/tests/pipelines/test_bump_version.py b/tests/pipelines/test_bump_version.py index 260637c064..709e82427d 100644 --- a/tests/pipelines/test_bump_version.py +++ b/tests/pipelines/test_bump_version.py @@ -1,86 +1,55 @@ """Some tests covering the bump_version code.""" -import os - import yaml import nf_core.pipelines.bump_version -import nf_core.pipelines.create.create import nf_core.utils - -# pass tmp_path as argument, which is a pytest feature -# see: https://docs.pytest.org/en/latest/how-to/tmp_path.html#the-tmp-path-fixture -def test_bump_pipeline_version(datafiles, tmp_path): - """Test that making a release with the working example files works""" - - # Get a workflow and configs - test_pipeline_dir = os.path.join(tmp_path, "nf-core-testpipeline") - create_obj = nf_core.pipelines.create.create.PipelineCreate( - "testpipeline", "This is a test pipeline", "Test McTestFace", no_git=True, outdir=test_pipeline_dir - ) - create_obj.init_pipeline() - pipeline_obj = nf_core.utils.Pipeline(test_pipeline_dir) - pipeline_obj._load() - - # Bump the version number - nf_core.pipelines.bump_version.bump_pipeline_version(pipeline_obj, "1.1") - new_pipeline_obj = nf_core.utils.Pipeline(test_pipeline_dir) - - # Check nextflow.config - new_pipeline_obj._load_pipeline_config() - assert new_pipeline_obj.nf_config["manifest.version"].strip("'\"") == "1.1" - - -def test_dev_bump_pipeline_version(datafiles, tmp_path): - """Test that making a release works with a dev name and a leading v""" - # Get a workflow and configs - test_pipeline_dir = os.path.join(tmp_path, "nf-core-testpipeline") - create_obj = nf_core.pipelines.create.create.PipelineCreate( - "testpipeline", "This is a test pipeline", "Test McTestFace", no_git=True, outdir=test_pipeline_dir - ) - create_obj.init_pipeline() - pipeline_obj = nf_core.utils.Pipeline(test_pipeline_dir) - pipeline_obj._load() - - # Bump the version number - nf_core.pipelines.bump_version.bump_pipeline_version(pipeline_obj, "v1.2dev") - new_pipeline_obj = nf_core.utils.Pipeline(test_pipeline_dir) - - # Check the pipeline config - new_pipeline_obj._load_pipeline_config() - assert new_pipeline_obj.nf_config["manifest.version"].strip("'\"") == "1.2dev" - - -def test_bump_nextflow_version(datafiles, tmp_path): - # Get a workflow and configs - test_pipeline_dir = os.path.join(tmp_path, "nf-core-testpipeline") - create_obj = nf_core.pipelines.create.create.PipelineCreate( - "testpipeline", "This is a test pipeline", "Test McTestFace", no_git=True, outdir=test_pipeline_dir - ) - create_obj.init_pipeline() - pipeline_obj = nf_core.utils.Pipeline(test_pipeline_dir) - pipeline_obj._load() - - # Bump the version number to a specific version, preferably one - # we're not already on - version = "22.04.3" - nf_core.pipelines.bump_version.bump_nextflow_version(pipeline_obj, version) - new_pipeline_obj = nf_core.utils.Pipeline(test_pipeline_dir) - - # Check nextflow.config - new_pipeline_obj._load_pipeline_config() - assert new_pipeline_obj.nf_config["manifest.nextflowVersion"].strip("'\"") == f"!>={version}" - - # Check .github/workflows/ci.yml - with open(new_pipeline_obj._fp(".github/workflows/ci.yml")) as fh: - ci_yaml = yaml.safe_load(fh) - assert ci_yaml["jobs"]["test"]["strategy"]["matrix"]["NXF_VER"][0] == version - - # Check README.md - with open(new_pipeline_obj._fp("README.md")) as fh: - readme = fh.read().splitlines() - assert ( - f"[![Nextflow](https://img.shields.io/badge/nextflow%20DSL2-%E2%89%A5{version}-23aa62.svg)]" - "(https://www.nextflow.io/)" in readme - ) +from ..test_pipelines import TestPipelines + + +class TestBumpVersion(TestPipelines): + def test_bump_pipeline_version(self): + """Test that making a release with the working example files works""" + + # Bump the version number + nf_core.pipelines.bump_version.bump_pipeline_version(self.pipeline_obj, "1.1") + new_pipeline_obj = nf_core.utils.Pipeline(self.pipeline_dir) + + # Check nextflow.config + new_pipeline_obj.load_pipeline_config() + assert new_pipeline_obj.nf_config["manifest.version"].strip("'\"") == "1.1" + + def test_dev_bump_pipeline_version(self): + """Test that making a release works with a dev name and a leading v""" + # Bump the version number + nf_core.pipelines.bump_version.bump_pipeline_version(self.pipeline_obj, "v1.2dev") + new_pipeline_obj = nf_core.utils.Pipeline(self.pipeline_dir) + + # Check the pipeline config + new_pipeline_obj.load_pipeline_config() + assert new_pipeline_obj.nf_config["manifest.version"].strip("'\"") == "1.2dev" + + def test_bump_nextflow_version(self): + # Bump the version number to a specific version, preferably one + # we're not already on + version = "22.04.3" + nf_core.pipelines.bump_version.bump_nextflow_version(self.pipeline_obj, version) + new_pipeline_obj = nf_core.utils.Pipeline(self.pipeline_dir) + new_pipeline_obj._load() + + # Check nextflow.config + assert new_pipeline_obj.nf_config["manifest.nextflowVersion"].strip("'\"") == f"!>={version}" + + # Check .github/workflows/ci.yml + with open(new_pipeline_obj._fp(".github/workflows/ci.yml")) as fh: + ci_yaml = yaml.safe_load(fh) + assert ci_yaml["jobs"]["test"]["strategy"]["matrix"]["NXF_VER"][0] == version + + # Check README.md + with open(new_pipeline_obj._fp("README.md")) as fh: + readme = fh.read().splitlines() + assert ( + f"[![Nextflow](https://img.shields.io/badge/nextflow%20DSL2-%E2%89%A5{version}-23aa62.svg)]" + "(https://www.nextflow.io/)" in readme + ) diff --git a/tests/pipelines/test_create_logo.py b/tests/pipelines/test_create_logo.py index 8942894ce2..9ff9fce562 100644 --- a/tests/pipelines/test_create_logo.py +++ b/tests/pipelines/test_create_logo.py @@ -1,35 +1,24 @@ """Test covering the create-logo command.""" -import tempfile -import unittest from pathlib import Path import nf_core.pipelines.create_logo +from ..test_pipelines import TestPipelines -class TestCreateLogo(unittest.TestCase): - """Class for create-logo tests""" - - # create tempdir in setup step - def setUp(self): - self.tempdir = tempfile.TemporaryDirectory() - self.tempdir_path = Path(self.tempdir.name) - - # delete tempdir in teardown step - def tearDown(self): - self.tempdir.cleanup() +class TestCreateLogo(TestPipelines): def test_create_logo_png(self): """Test that the create-logo command works for PNGs""" # Create a logo - logo_fn = nf_core.pipelines.create_logo.create_logo("pipes", self.tempdir_path) + logo_fn = nf_core.pipelines.create_logo.create_logo("pipes", self.pipeline_dir) # Check that the file exists self.assertTrue(logo_fn.is_file()) # Check that the file is a PNG self.assertTrue(logo_fn.suffix == ".png") # Check that the file is the right size - fixture_fn = Path(__file__).parent / "fixtures" / "create_logo.png" + fixture_fn = Path(__file__).parent.parent / "fixtures" / "create_logo.png" # allow some flexibility in the file size self.assertTrue(int(logo_fn.stat().st_size / 1000) == int(fixture_fn.stat().st_size / 1000)) @@ -37,13 +26,13 @@ def test_create_logo_png_dark(self): """Test that the create-logo command works for dark PNGs""" # Create a logo - logo_fn = nf_core.pipelines.create_logo.create_logo("pipes", self.tempdir_path, theme="dark") + logo_fn = nf_core.pipelines.create_logo.create_logo("pipes", self.pipeline_dir, theme="dark") # Check that the file exists self.assertTrue(logo_fn.is_file()) # Check that the file is a PNG self.assertTrue(logo_fn.suffix == ".png") # Check that the file is the right size - fixture_fn = Path(__file__).parent / "fixtures" / "create_logo_dark.png" + fixture_fn = Path(__file__).parent.parent / "fixtures" / "create_logo_dark.png" # allow some flexibility in the file size self.assertTrue(int(logo_fn.stat().st_size / 1000) == int(fixture_fn.stat().st_size / 1000)) @@ -51,13 +40,13 @@ def test_create_log_png_width(self): """Test that the create-logo command works for PNGs with a custom width""" # Create a logo - logo_fn = nf_core.pipelines.create_logo.create_logo("pipes", self.tempdir_path, width=100) + logo_fn = nf_core.pipelines.create_logo.create_logo("pipes", self.pipeline_dir, width=100) # Check that the file exists self.assertTrue(logo_fn.is_file()) # Check that the file is a PNG self.assertTrue(logo_fn.suffix == ".png") # Check that the file is the right size - fixture_fn = Path(__file__).parent / "fixtures" / "create_logo_width100.png" + fixture_fn = Path(__file__).parent.parent / "fixtures" / "create_logo_width100.png" # allow some flexibility in the file size self.assertTrue(int(logo_fn.stat().st_size / 100) == int(fixture_fn.stat().st_size / 100)) @@ -65,12 +54,12 @@ def test_create_logo_twice(self): """Test that the create-logo command returns an info message when run twice""" # Create a logo - logo_fn = nf_core.pipelines.create_logo.create_logo("pipes", self.tempdir_path) + logo_fn = nf_core.pipelines.create_logo.create_logo("pipes", self.pipeline_dir) # Check that the file exists self.assertTrue(logo_fn.is_file()) # Create the logo again and capture the log output with self.assertLogs(level="INFO") as log: - nf_core.pipelines.create_logo.create_logo("pipes", self.tempdir_path) + nf_core.pipelines.create_logo.create_logo("pipes", self.pipeline_dir) # Check that the log message is correct self.assertIn("Logo already exists", log.output[0]) @@ -79,14 +68,14 @@ def test_create_logo_without_text_fail(self): # Create a logo with self.assertRaises(UserWarning): - nf_core.pipelines.create_logo.create_logo("", self.tempdir_path) + nf_core.pipelines.create_logo.create_logo("", self.pipeline_dir) def test_create_logo_with_filename(self): """Test that the create-logo command works with a custom filename""" # Create a logo logo_fn = nf_core.pipelines.create_logo.create_logo( - "pipes", Path(self.tempdir_path / "custom_dir"), filename="custom" + "pipes", Path(self.pipeline_dir / "custom_dir"), filename="custom" ) # Check that the file exists self.assertTrue(logo_fn.is_file()) @@ -99,7 +88,7 @@ def test_create_logo_svg(self): """Test that the create-logo command works for SVGs""" # Create a logo - logo_fn = nf_core.pipelines.create_logo.create_logo("pipes", self.tempdir_path, format="svg") + logo_fn = nf_core.pipelines.create_logo.create_logo("pipes", self.pipeline_dir, format="svg") # Check that the file exists self.assertTrue(logo_fn.is_file()) # Check that the file is a SVG @@ -115,7 +104,7 @@ def test_create_logo_svg_dark(self): """Test that the create-logo command works for svgs and dark theme""" # Create a logo - logo_fn = nf_core.pipelines.create_logo.create_logo("pipes", self.tempdir_path, format="svg", theme="dark") + logo_fn = nf_core.pipelines.create_logo.create_logo("pipes", self.pipeline_dir, format="svg", theme="dark") # Check that the file exists self.assertTrue(logo_fn.is_file()) # Check that the file is a SVG diff --git a/tests/pipelines/test_launch.py b/tests/pipelines/test_launch.py index b4b285a3c9..c9efa5aa63 100644 --- a/tests/pipelines/test_launch.py +++ b/tests/pipelines/test_launch.py @@ -2,36 +2,18 @@ import json import os -import shutil -from pathlib import Path -from unittest import TestCase, mock +from unittest import mock import pytest import nf_core.pipelines.create.create import nf_core.pipelines.launch -from ..utils import create_tmp_pipeline, with_temporary_file, with_temporary_folder +from ..test_pipelines import TestPipelines +from ..utils import with_temporary_file, with_temporary_folder -class TestLaunch(TestCase): - """Class for launch tests""" - - def setUp(self): - """Create a new PipelineSchema and Launch objects""" - self.tmp_dir, self.template_dir, self.pipeline_name, self.pipeline_dir = create_tmp_pipeline() - self.nf_params_fn = os.path.join(self.tmp_dir, "nf-params.json") - self.launcher = nf_core.pipelines.launch.Launch(self.pipeline_dir, params_out=self.nf_params_fn) - - def tearDown(self): - """Clean up temporary files and folders""" - - if Path(self.nf_params_fn).exists(): - Path(self.nf_params_fn).unlink() - - if Path(self.tmp_dir).exists(): - shutil.rmtree(self.tmp_dir) - +class TestLaunch(TestPipelines): @mock.patch.object(nf_core.pipelines.launch.Launch, "prompt_web_gui", side_effect=[True]) @mock.patch.object(nf_core.pipelines.launch.Launch, "launch_web_gui") def test_launch_pipeline(self, mock_webbrowser, mock_lauch_web_gui): @@ -43,6 +25,7 @@ def test_launch_file_exists(self, mock_confirm): """Test that we detect an existing params file and return""" # Make an empty params file to be overwritten open(self.nf_params_fn, "a").close() + # Try and to launch, return with error assert self.launcher.launch_pipeline() is False diff --git a/tests/test_pipelines.py b/tests/test_pipelines.py new file mode 100644 index 0000000000..461f046f52 --- /dev/null +++ b/tests/test_pipelines.py @@ -0,0 +1,37 @@ +import shutil +from pathlib import Path +from unittest import TestCase + +from git import Repo + +import nf_core.pipelines.launch +import nf_core.pipelines.lint +from nf_core.utils import Pipeline + +from .utils import create_tmp_pipeline + + +class TestPipelines(TestCase): + def setUp(self) -> None: + """Create a new Pipeline for testing""" + self.tmp_dir, self.template_dir, self.pipeline_name, self.pipeline_dir = create_tmp_pipeline() + self.pipeline_obj = Pipeline(self.pipeline_dir) + Repo.init(self.pipeline_dir) + self.pipeline_obj._load() + + self.nf_params_fn = Path(self.pipeline_dir, "nf-params.json") + self.launcher = nf_core.pipelines.launch.Launch(self.pipeline_dir, params_out=self.nf_params_fn) + + self.lint_obj = nf_core.pipelines.lint.PipelineLint(self.pipeline_dir) + + def tearDown(self) -> None: + """Remove the test pipeline directory""" + shutil.rmtree(self.tmp_dir) + + def _make_pipeline_copy(self): + """Make a copy of the test pipeline that can be edited + + Returns: Path to new temp directory with pipeline""" + new_pipeline = self.tmp_dir / "nf-core-testpipeline-copy" + shutil.copytree(self.pipeline_dir, new_pipeline) + return new_pipeline From 20f0ba6ca7f30acb113e5234e60693eba89a410d Mon Sep 17 00:00:00 2001 From: mashehu Date: Tue, 16 Jul 2024 16:40:14 +0200 Subject: [PATCH 04/89] move pytest.ini into pyproject.toml --- pyproject.toml | 3 +++ pytest.ini | 7 ------- 2 files changed, 3 insertions(+), 7 deletions(-) delete mode 100644 pytest.ini diff --git a/pyproject.toml b/pyproject.toml index 8168bd7c11..4499324445 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -5,6 +5,9 @@ requires = ["setuptools>=40.6.0", "wheel"] [tool.pytest.ini_options] markers = ["datafiles: load datafiles"] testpaths = ["tests"] +python_files = ["test_*.py"] +# automatically run coroutine tests with asyncio +asyncio_mode = ["auto"] norecursedirs = [ ".*", "build", diff --git a/pytest.ini b/pytest.ini deleted file mode 100644 index fcbd03fa45..0000000000 --- a/pytest.ini +++ /dev/null @@ -1,7 +0,0 @@ -[pytest] -testpaths = - tests -python_files = test_*.py - -# automatically run coroutine tests with asyncio -asyncio_mode = auto From 36050fd44b5dcfb70c6d4fe8b40cb6f45ebc5fd2 Mon Sep 17 00:00:00 2001 From: mashehu Date: Tue, 16 Jul 2024 16:54:32 +0200 Subject: [PATCH 05/89] migrate more tests to pathilb, use commonly shared TEST_DATA_DIR var --- nf_core/components/lint/__init__.py | 2 +- nf_core/modules/modules_json.py | 8 ++--- nf_core/pipelines/create/create.py | 6 ++-- nf_core/pipelines/download.py | 7 ++-- nf_core/pipelines/lint/__init__.py | 2 +- nf_core/pipelines/sync.py | 5 +-- nf_core/utils.py | 56 +++++++++++++++++------------ tests/pipelines/test_create.py | 4 +-- tests/pipelines/test_download.py | 12 +++---- tests/test_utils.py | 4 +-- tests/utils.py | 13 +++---- 11 files changed, 66 insertions(+), 53 deletions(-) diff --git a/nf_core/components/lint/__init__.py b/nf_core/components/lint/__init__.py index 499d31e71e..6d47f1e7a8 100644 --- a/nf_core/components/lint/__init__.py +++ b/nf_core/components/lint/__init__.py @@ -117,7 +117,7 @@ def __init__( ) for comp in self.get_local_components() ] - self.config = nf_core.utils.fetch_wf_config(self.dir, cache_config=True) + self.config = nf_core.utils.fetch_wf_config(Path(self.dir), cache_config=True) else: component_dir = Path( self.dir, diff --git a/nf_core/modules/modules_json.py b/nf_core/modules/modules_json.py index b0a4fa661f..ca5d9de072 100644 --- a/nf_core/modules/modules_json.py +++ b/nf_core/modules/modules_json.py @@ -38,10 +38,10 @@ def __init__(self, pipeline_dir: str): Args: pipeline_dir (str): The pipeline directory """ - self.dir = pipeline_dir - self.modules_dir = Path(self.dir, "modules") - self.subworkflows_dir = Path(self.dir, "subworkflows") - self.modules_json_path = Path(self.dir, "modules.json") + self.dir = Path(pipeline_dir) + self.modules_dir = self.dir / "modules" + self.subworkflows_dir = self.dir / "subworkflows" + self.modules_json_path = self.dir / "modules.json" self.modules_json = None self.pipeline_modules = None self.pipeline_subworkflows = None diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index bdbbca646f..e9f37306f5 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -50,7 +50,7 @@ def __init__( version: str = "1.0.0dev", no_git: bool = False, force: bool = False, - outdir: Optional[str] = None, + outdir: Optional[Union[Path, str]] = None, template_config: Optional[Union[str, CreateConfig, Path]] = None, organisation: str = "nf-core", from_config_file: bool = False, @@ -61,7 +61,7 @@ def __init__( self.config = template_config elif from_config_file: # Try reading config file - _, config_yml = nf_core.utils.load_tools_config(outdir if outdir else ".") + _, config_yml = nf_core.utils.load_tools_config(str(outdir) if outdir else ".") # Obtain a CreateConfig object from `.nf-core.yml` config file if "template" in config_yml: self.config = CreateConfig(**config_yml["template"]) @@ -372,6 +372,8 @@ def render_template(self): config_fn, config_yml = nf_core.utils.load_tools_config(self.outdir) with open(config_fn, "w") as fh: config_yml.update(template=self.config.model_dump()) + # convert posix path to string for yaml dump + config_yml["template"]["outdir"] = str(config_yml["template"]["outdir"]) yaml.safe_dump(config_yml, fh) log.debug(f"Dumping pipeline template yml to pipeline config file '{config_fn.name}'") run_prettier_on_file(self.outdir / config_fn) diff --git a/nf_core/pipelines/download.py b/nf_core/pipelines/download.py index 43b63d8192..f16430b0a8 100644 --- a/nf_core/pipelines/download.py +++ b/nf_core/pipelines/download.py @@ -10,6 +10,7 @@ import tarfile import textwrap from datetime import datetime +from pathlib import Path from typing import List, Optional, Tuple from zipfile import ZipFile @@ -697,7 +698,7 @@ def wf_use_local_configs(self, revision_dirname): with open(nfconfig_fn, "w") as nfconfig_fh: nfconfig_fh.write(nfconfig) - def find_container_images(self, workflow_directory): + def find_container_images(self, workflow_directory: str) -> None: """Find container image names for workflow. Starts by using `nextflow config` to pull out any process.container @@ -716,7 +717,7 @@ def find_container_images(self, workflow_directory): module_findings = [] # Use linting code to parse the pipeline nextflow config - self.nf_config = nf_core.utils.fetch_wf_config(workflow_directory) + self.nf_config = nf_core.utils.fetch_wf_config(Path(workflow_directory)) # Find any config variables that look like a container for k, v in self.nf_config.items(): @@ -1007,7 +1008,7 @@ def gather_registries(self, workflow_directory: str) -> None: # should exist, because find_container_images() is always called before if not self.nf_config: - self.nf_config = nf_core.utils.fetch_wf_config(workflow_directory) + self.nf_config = nf_core.utils.fetch_wf_config(Path(workflow_directory)) # Select registries defined in pipeline config configured_registries = [ diff --git a/nf_core/pipelines/lint/__init__.py b/nf_core/pipelines/lint/__init__.py index cf5ba3913f..b40f0ee292 100644 --- a/nf_core/pipelines/lint/__init__.py +++ b/nf_core/pipelines/lint/__init__.py @@ -559,7 +559,7 @@ def run_linting( # Load the various pipeline configs lint_obj._load_lint_config() - lint_obj._load_pipeline_config() + lint_obj.load_pipeline_config() lint_obj._list_files() # Create the modules lint object diff --git a/nf_core/pipelines/sync.py b/nf_core/pipelines/sync.py index f9a874c7e4..a309fa8c3b 100644 --- a/nf_core/pipelines/sync.py +++ b/nf_core/pipelines/sync.py @@ -5,6 +5,7 @@ import os import re import shutil +from pathlib import Path import git import questionary @@ -69,7 +70,7 @@ def __init__( ): """Initialise syncing object""" - self.pipeline_dir = os.path.abspath(pipeline_dir) + self.pipeline_dir = Path(pipeline_dir).resolve() self.from_branch = from_branch self.original_branch = None self.original_merge_branch = f"nf-core-template-merge-{nf_core.__version__}" @@ -209,7 +210,7 @@ def get_wf_config(self): # Fetch workflow variables log.debug("Fetching workflow config variables") - self.wf_config = nf_core.utils.fetch_wf_config(self.pipeline_dir) + self.wf_config = nf_core.utils.fetch_wf_config(Path(self.pipeline_dir)) # Check that we have the required variables for rvar in self.required_config_vars: diff --git a/nf_core/utils.py b/nf_core/utils.py index 48d1c3ca3d..cf624935b3 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -141,7 +141,7 @@ def __init__(self, wf_path): self.files = [] self.git_sha = None self.minNextflowVersion = None - self.wf_path = wf_path + self.wf_path = Path(wf_path) self.pipeline_name = None self.pipeline_prefix = None self.schema_obj = None @@ -156,13 +156,15 @@ def __init__(self, wf_path): if os.environ.get("GITHUB_PR_COMMIT", "") != "": self.git_sha = os.environ["GITHUB_PR_COMMIT"] - def _load(self): + def __repr__(self) -> str: + return f"" + + def _load(self) -> bool: """Run core load functions""" - self._list_files() - self._load_pipeline_config() - self._load_conda_environment() - def _list_files(self): + return self._list_files() and self.load_pipeline_config() and self._load_conda_environment() + + def _list_files(self) -> bool: """Get a list of all files in the pipeline""" try: # First, try to get the list of files using git @@ -174,18 +176,36 @@ def _list_files(self): self.files.append(full_fn) else: log.debug(f"`git ls-files` returned '{full_fn}' but could not open it!") + return True except subprocess.CalledProcessError as e: # Failed, so probably not initialised as a git repository - just a list of all files log.debug(f"Couldn't call 'git ls-files': {e}") self.files = [] for subdir, _, files in os.walk(self.wf_path): for fn in files: - self.files.append(Path(subdir) / fn) + self.files.append(Path(subdir, str(fn))) + if len(self.files) > 0: + return True + return False + + def _load_conda_environment(self) -> bool: + """Try to load the pipeline environment.yml file, if it exists""" + try: + with open(Path(self.wf_path, "environment.yml")) as fh: + self.conda_config = yaml.safe_load(fh) + return True + except FileNotFoundError: + log.debug("No conda `environment.yml` file found.") + return False - def _load_pipeline_config(self): + def _fp(self, fn): + """Convenience function to get full path to a file in the pipeline""" + return os.path.join(self.wf_path, fn) + + def load_pipeline_config(self) -> bool: """Get the nextflow config for this pipeline - Once loaded, set a few convienence reference class attributes + Once loaded, set a few convenience reference class attributes """ self.nf_config = fetch_wf_config(self.wf_path) @@ -194,18 +214,8 @@ def _load_pipeline_config(self): nextflow_version_match = re.search(r"[0-9\.]+(-edge)?", self.nf_config.get("manifest.nextflowVersion", "")) if nextflow_version_match: self.minNextflowVersion = nextflow_version_match.group(0) - - def _load_conda_environment(self): - """Try to load the pipeline environment.yml file, if it exists""" - try: - with open(os.path.join(self.wf_path, "environment.yml")) as fh: - self.conda_config = yaml.safe_load(fh) - except FileNotFoundError: - log.debug("No conda `environment.yml` file found.") - - def _fp(self, fn): - """Convenience function to get full path to a file in the pipeline""" - return os.path.join(self.wf_path, fn) + return True + return False def is_pipeline_directory(wf_path): @@ -229,7 +239,7 @@ def is_pipeline_directory(wf_path): raise UserWarning(warning) -def fetch_wf_config(wf_path: str, cache_config: bool = True) -> dict: +def fetch_wf_config(wf_path: Path, cache_config: bool = True) -> dict: """Uses Nextflow to retrieve the the configuration variables from a Nextflow workflow. @@ -263,7 +273,7 @@ def fetch_wf_config(wf_path: str, cache_config: bool = True) -> dict: concat_hash = "" for fn in ["nextflow.config", "main.nf"]: try: - with open(Path(wf_path, fn), "rb") as fh: + with open(wf_path / fn, "rb") as fh: concat_hash += hashlib.sha256(fh.read()).hexdigest() except FileNotFoundError: pass diff --git a/tests/pipelines/test_create.py b/tests/pipelines/test_create.py index d93b26bd1a..bb27445e70 100644 --- a/tests/pipelines/test_create.py +++ b/tests/pipelines/test_create.py @@ -2,16 +2,14 @@ import os import unittest -from pathlib import Path import git import yaml import nf_core.pipelines.create.create -from ..utils import with_temporary_folder +from ..utils import TEST_DATA_DIR, with_temporary_folder -TEST_DATA_DIR = Path(__file__).parent / "data" PIPELINE_TEMPLATE_YML = TEST_DATA_DIR / "pipeline_create_template.yml" PIPELINE_TEMPLATE_YML_SKIP = TEST_DATA_DIR / "pipeline_create_template_skip.yml" diff --git a/tests/pipelines/test_download.py b/tests/pipelines/test_download.py index ebb76fef72..7ed6de3fa7 100644 --- a/tests/pipelines/test_download.py +++ b/tests/pipelines/test_download.py @@ -18,7 +18,7 @@ from nf_core.synced_repo import SyncedRepo from nf_core.utils import run_cmd -from ..utils import with_temporary_folder +from ..utils import TEST_DATA_DIR, with_temporary_folder class DownloadTest(unittest.TestCase): @@ -139,12 +139,12 @@ def test_wf_use_local_configs(self, tmp_path): with tempfile.TemporaryDirectory() as test_outdir: download_obj = DownloadWorkflow(pipeline="dummy", revision="1.2.0", outdir=test_outdir) - shutil.copytree(test_pipeline_dir, os.path.join(test_outdir, "workflow")) + shutil.copytree(test_pipeline_dir, Path(test_outdir, "workflow")) download_obj.download_configs() # Test the function download_obj.wf_use_local_configs("workflow") - wf_config = nf_core.utils.fetch_wf_config(os.path.join(test_outdir, "workflow"), cache_config=False) + wf_config = nf_core.utils.fetch_wf_config(Path(test_outdir, "workflow"), cache_config=False) assert wf_config["params.custom_config_base"] == f"{test_outdir}/workflow/../configs/" # @@ -173,7 +173,7 @@ def test_find_container_images_config_basic(self, tmp_path, mock_fetch_wf_config @mock.patch("nf_core.utils.fetch_wf_config") def test__find_container_images_config_nextflow(self, tmp_path, mock_fetch_wf_config): download_obj = DownloadWorkflow(pipeline="dummy", outdir=tmp_path) - result = run_cmd("nextflow", f"config -flat {Path(__file__).resolve().parent / 'data/mock_config_containers'}") + result = run_cmd("nextflow", f"config -flat {TEST_DATA_DIR}'/mock_config_containers'") if result is not None: nfconfig_raw, _ = result config = {} @@ -203,7 +203,7 @@ def test__find_container_images_config_nextflow(self, tmp_path, mock_fetch_wf_co def test_find_container_images_modules(self, tmp_path, mock_fetch_wf_config): download_obj = DownloadWorkflow(pipeline="dummy", outdir=tmp_path) mock_fetch_wf_config.return_value = {} - download_obj.find_container_images(Path(__file__).resolve().parent / "data/mock_module_containers") + download_obj.find_container_images(TEST_DATA_DIR / "mock_module_containers") # mock_docker_single_quay_io.nf assert "quay.io/biocontainers/singlequay:1.9--pyh9f0ad1d_0" in download_obj.containers @@ -546,7 +546,7 @@ def test_remote_container_functionality(self, tmp_dir): outdir=os.path.join(tmp_dir, "new"), revision="3.9", compress_type="none", - container_cache_index=Path(__file__).resolve().parent / "data/testdata_remote_containers.txt", + container_cache_index=TEST_DATA_DIR / "data/testdata_remote_containers.txt", ) download_obj.include_configs = False # suppress prompt, because stderr.is_interactive doesn't. diff --git a/tests/test_utils.py b/tests/test_utils.py index 89ba0444ff..7afe1a532f 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -95,9 +95,9 @@ def test_rich_force_colours_true(self): os.environ.pop("PY_COLORS", None) assert nf_core.utils.rich_force_colors() is True - def test_load_pipeline_config(self): + def testload_pipeline_config(self): """Load the pipeline Nextflow config""" - self.pipeline_obj._load_pipeline_config() + self.pipeline_obj.load_pipeline_config() assert self.pipeline_obj.nf_config["dag.enabled"] == "true" # TODO nf-core: Assess and strip out if no longer required for DSL2 diff --git a/tests/utils.py b/tests/utils.py index 9a0fd0896f..7be0799d6f 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -3,8 +3,8 @@ """ import functools -import os import tempfile +from pathlib import Path from typing import Any, Callable, Tuple import responses @@ -12,6 +12,7 @@ import nf_core.modules import nf_core.pipelines.create.create +TEST_DATA_DIR = Path(__file__).parent / "data" OLD_TRIMGALORE_SHA = "9b7a3bdefeaad5d42324aa7dd50f87bea1b04386" OLD_TRIMGALORE_BRANCH = "mimic-old-trimgalore" GITLAB_URL = "https://gitlab.com/nf-core/modules-test.git" @@ -93,14 +94,14 @@ def mock_biocontainers_api_calls(rsps: responses.RequestsMock, module: str, vers rsps.get(biocontainers_api_url, json=biocontainers_mock, status=200) -def create_tmp_pipeline() -> Tuple[str, str, str, str]: +def create_tmp_pipeline() -> Tuple[Path, Path, str, Path]: """Create a new Pipeline for testing""" - tmp_dir = tempfile.mkdtemp() - root_repo_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) - template_dir = os.path.join(root_repo_dir, "nf_core", "pipeline-template") + tmp_dir = Path(tempfile.TemporaryDirectory().name) + root_repo_dir = Path(__file__).resolve().parent.parent + template_dir = Path(root_repo_dir, "nf_core", "pipeline-template") pipeline_name = "mypipeline" - pipeline_dir = os.path.join(tmp_dir, pipeline_name) + pipeline_dir = Path(tmp_dir, pipeline_name) nf_core.pipelines.create.create.PipelineCreate( pipeline_name, "it is mine", "me", no_git=True, outdir=pipeline_dir From fb274107d592b16e7634cc64db149a6b514e88db Mon Sep 17 00:00:00 2001 From: mashehu Date: Tue, 16 Jul 2024 16:55:00 +0200 Subject: [PATCH 06/89] use testpipeline class in test_lint --- tests/pipelines/test_lint.py | 83 +++++++++++------------------------- 1 file changed, 26 insertions(+), 57 deletions(-) diff --git a/tests/pipelines/test_lint.py b/tests/pipelines/test_lint.py index 460c8f03d8..8530fea5ad 100644 --- a/tests/pipelines/test_lint.py +++ b/tests/pipelines/test_lint.py @@ -3,51 +3,20 @@ import fnmatch import json import os -import shutil -import tempfile -import unittest +from pathlib import Path import yaml import nf_core.pipelines.create.create import nf_core.pipelines.lint +from ..test_pipelines import TestPipelines from ..utils import with_temporary_folder -class TestLint(unittest.TestCase): +class TestLint(TestPipelines): """Class for lint tests""" - def setUp(self): - """Function that runs at start of tests for common resources - - Use nf_core.pipelines.create() to make a pipeline that we can use for testing - """ - - self.tmp_dir = tempfile.mkdtemp() - self.test_pipeline_dir = os.path.join(self.tmp_dir, "nf-core-testpipeline") - self.create_obj = nf_core.pipelines.create.create.PipelineCreate( - "testpipeline", "This is a test pipeline", "Test McTestFace", outdir=self.test_pipeline_dir - ) - self.create_obj.init_pipeline() - - # Base lint object on this directory - self.lint_obj = nf_core.pipelines.lint.PipelineLint(self.test_pipeline_dir) - - def tearDown(self): - """Clean up temporary files and folders""" - - if os.path.exists(self.tmp_dir): - shutil.rmtree(self.tmp_dir) - - def _make_pipeline_copy(self): - """Make a copy of the test pipeline that can be edited - - Returns: Path to new temp directory with pipeline""" - new_pipeline = os.path.join(self.tmp_dir, "nf-core-testpipeline-copy") - shutil.copytree(self.test_pipeline_dir, new_pipeline) - return new_pipeline - ########################## # CORE lint.py FUNCTIONS # ########################## @@ -56,7 +25,7 @@ def test_run_linting_function(self): We don't really check any of this code as it's just a series of function calls and we're testing each of those individually. This is mostly to check for syntax errors.""" - nf_core.pipelines.lint.run_linting(self.test_pipeline_dir, False) + nf_core.pipelines.lint.run_linting(self.pipeline_dir, False) def test_init_pipeline_lint(self): """Simply create a PipelineLint object. @@ -64,11 +33,11 @@ def test_init_pipeline_lint(self): This checks that all of the lint test imports are working properly, we also check that the git sha was found and that the release flag works properly """ - lint_obj = nf_core.pipelines.lint.PipelineLint(self.test_pipeline_dir, True) + lint_obj = nf_core.pipelines.lint.PipelineLint(self.pipeline_dir, True) # Tests that extra test is added for release mode assert "version_consistency" in lint_obj.lint_tests - + assert lint_obj.git_sha # Tests that parent nf_core.utils.Pipeline class __init__() is working to find git hash assert len(lint_obj.git_sha) > 0 @@ -86,7 +55,7 @@ def test_load_lint_config_ignore_all_tests(self): # Make a config file listing all test names config_dict = {"lint": {test_name: False for test_name in lint_obj.lint_tests}} - with open(os.path.join(new_pipeline, ".nf-core.yml"), "w") as fh: + with open(Path(new_pipeline, ".nf-core.yml"), "w") as fh: yaml.dump(config_dict, fh) # Load the new lint config file and check @@ -130,7 +99,7 @@ def test_json_output(self, tmp_dir): self.lint_obj.warned.append(("test_three", "This test gave a warning")) # Make a temp dir for the JSON output - json_fn = os.path.join(tmp_dir, "lint_results.json") + json_fn = Path(tmp_dir, "lint_results.json") self.lint_obj._save_json_results(json_fn) # Load created JSON file and check its contents @@ -156,7 +125,7 @@ def test_sphinx_md_files(self): """Check that we have .md files for all lint module code, and that there are no unexpected files (eg. deleted lint tests)""" - docs_basedir = os.path.join( + docs_basedir = Path( os.path.dirname(os.path.dirname(os.path.abspath(__file__))), "docs", "api", "_src", "pipeline_lint_tests" ) @@ -164,12 +133,12 @@ def test_sphinx_md_files(self): existing_docs = [] for fn in os.listdir(docs_basedir): if fnmatch.fnmatch(fn, "*.md") and not fnmatch.fnmatch(fn, "index.md"): - existing_docs.append(os.path.join(docs_basedir, fn)) + existing_docs.append(Path(docs_basedir, fn)) # Check .md files against each test name lint_obj = nf_core.pipelines.lint.PipelineLint("", True) for test_name in lint_obj.lint_tests: - fn = os.path.join(docs_basedir, f"{test_name}.md") + fn = Path(docs_basedir, f"{test_name}.md") assert os.path.exists(fn), f"Could not find lint docs .md file: {fn}" existing_docs.remove(fn) @@ -179,34 +148,34 @@ def test_sphinx_md_files(self): ####################### # SPECIFIC LINT TESTS # ####################### - from ..lint.actions_awsfulltest import ( # type: ignore[misc] + from .lint.actions_awsfulltest import ( # type: ignore[misc] test_actions_awsfulltest_fail, test_actions_awsfulltest_pass, test_actions_awsfulltest_warn, ) - from ..lint.actions_awstest import ( # type: ignore[misc] + from .lint.actions_awstest import ( # type: ignore[misc] test_actions_awstest_fail, test_actions_awstest_pass, ) - from ..lint.actions_ci import ( # type: ignore[misc] + from .lint.actions_ci import ( # type: ignore[misc] test_actions_ci_fail_wrong_nf, test_actions_ci_fail_wrong_trigger, test_actions_ci_pass, ) - from ..lint.actions_schema_validation import ( # type: ignore[misc] + from .lint.actions_schema_validation import ( # type: ignore[misc] test_actions_schema_validation_fails_for_additional_property, test_actions_schema_validation_missing_jobs, test_actions_schema_validation_missing_on, ) - from ..lint.configs import ( # type: ignore[misc] + from .lint.configs import ( # type: ignore[misc] test_ignore_base_config, test_ignore_modules_config, test_superfluous_withname_in_base_config_fails, test_superfluous_withname_in_modules_config_fails, test_withname_in_modules_config, ) - from ..lint.files_exist import ( # type: ignore[misc] - test_files_exist_depreciated_file, + from .lint.files_exist import ( # type: ignore[misc] + test_files_exist_deprecated_file, test_files_exist_fail_conditional, test_files_exist_missing_config, test_files_exist_missing_main, @@ -214,13 +183,13 @@ def test_sphinx_md_files(self): test_files_exist_pass_conditional, test_files_exist_pass_conditional_nfschema, ) - from ..lint.files_unchanged import ( # type: ignore[misc] + from .lint.files_unchanged import ( # type: ignore[misc] test_files_unchanged_fail, test_files_unchanged_pass, ) - from ..lint.merge_markers import test_merge_markers_found # type: ignore[misc] - from ..lint.modules_json import test_modules_json_pass # type: ignore[misc] - from ..lint.multiqc_config import ( # type: ignore[misc] + from .lint.merge_markers import test_merge_markers_found # type: ignore[misc] + from .lint.modules_json import test_modules_json_pass # type: ignore[misc] + from .lint.multiqc_config import ( # type: ignore[misc] test_multiqc_config_exists, test_multiqc_config_ignore, test_multiqc_config_missing_report_section_order, @@ -229,7 +198,7 @@ def test_sphinx_md_files(self): test_multiqc_config_report_comment_release_succeed, test_multiqc_incorrect_export_plots, ) - from ..lint.nextflow_config import ( # type: ignore[misc] + from .lint.nextflow_config import ( # type: ignore[misc] test_allow_params_reference_in_main_nf, test_catch_params_assignment_in_main_nf, test_default_values_fail, @@ -242,14 +211,14 @@ def test_sphinx_md_files(self): test_nextflow_config_example_pass, test_nextflow_config_missing_test_profile_failed, ) - from ..lint.nfcore_yml import ( # type: ignore[misc] + from .lint.nfcore_yml import ( # type: ignore[misc] test_nfcore_yml_fail_nfcore_version, test_nfcore_yml_fail_repo_type, test_nfcore_yml_pass, ) - from ..lint.template_strings import ( # type: ignore[misc] + from .lint.template_strings import ( # type: ignore[misc] test_template_strings, test_template_strings_ignore_file, test_template_strings_ignored, ) - from ..lint.version_consistency import test_version_consistency # type: ignore[misc] + from .lint.version_consistency import test_version_consistency # type: ignore[misc] From 1d2076b511665034153d2ed3010508479f1e8e27 Mon Sep 17 00:00:00 2001 From: mashehu Date: Tue, 16 Jul 2024 17:56:42 +0200 Subject: [PATCH 07/89] fix tests --- nf_core/utils.py | 24 ++++++++++++------------ tests/pipelines/test_download.py | 4 ++-- tests/pipelines/test_lint.py | 4 +--- tests/pipelines/test_list.py | 28 ++++++++++++++-------------- tests/test_pipelines.py | 3 --- 5 files changed, 29 insertions(+), 34 deletions(-) diff --git a/nf_core/utils.py b/nf_core/utils.py index cf624935b3..9b3775d62d 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -19,7 +19,7 @@ import time from contextlib import contextmanager from pathlib import Path -from typing import Generator, Tuple, Union +from typing import Dict, Generator, List, Optional, Tuple, Union import git import prompt_toolkit @@ -135,22 +135,22 @@ class Pipeline: def __init__(self, wf_path): """Initialise pipeline object""" - self.conda_config = {} - self.conda_package_info = {} - self.nf_config = {} - self.files = [] - self.git_sha = None - self.minNextflowVersion = None + self.conda_config: Dict = {} + self.conda_package_info: Dict = {} + self.nf_config: Dict = {} + self.files: List[Path] = [] + self.git_sha: Optional[str] = None + self.minNextflowVersion: Optional[str] = None self.wf_path = Path(wf_path) - self.pipeline_name = None - self.pipeline_prefix = None - self.schema_obj = None + self.pipeline_name: Optional[str] = None + self.pipeline_prefix: Optional[str] = None + self.schema_obj: Optional[Dict] = None try: repo = git.Repo(self.wf_path) self.git_sha = repo.head.object.hexsha - except Exception: - log.debug(f"Could not find git hash for pipeline: {self.wf_path}") + except Exception as e: + log.debug(f"Could not find git hash for pipeline: {self.wf_path}. {e}") # Overwrite if we have the last commit from the PR - otherwise we get a merge commit hash if os.environ.get("GITHUB_PR_COMMIT", "") != "": diff --git a/tests/pipelines/test_download.py b/tests/pipelines/test_download.py index 7ed6de3fa7..d571b82cee 100644 --- a/tests/pipelines/test_download.py +++ b/tests/pipelines/test_download.py @@ -203,7 +203,7 @@ def test__find_container_images_config_nextflow(self, tmp_path, mock_fetch_wf_co def test_find_container_images_modules(self, tmp_path, mock_fetch_wf_config): download_obj = DownloadWorkflow(pipeline="dummy", outdir=tmp_path) mock_fetch_wf_config.return_value = {} - download_obj.find_container_images(TEST_DATA_DIR / "mock_module_containers") + download_obj.find_container_images(str(Path(TEST_DATA_DIR, "mock_module_containers"))) # mock_docker_single_quay_io.nf assert "quay.io/biocontainers/singlequay:1.9--pyh9f0ad1d_0" in download_obj.containers @@ -546,7 +546,7 @@ def test_remote_container_functionality(self, tmp_dir): outdir=os.path.join(tmp_dir, "new"), revision="3.9", compress_type="none", - container_cache_index=TEST_DATA_DIR / "data/testdata_remote_containers.txt", + container_cache_index=str(Path(TEST_DATA_DIR, "testdata_remote_containers.txt")), ) download_obj.include_configs = False # suppress prompt, because stderr.is_interactive doesn't. diff --git a/tests/pipelines/test_lint.py b/tests/pipelines/test_lint.py index 8530fea5ad..80d7e1e018 100644 --- a/tests/pipelines/test_lint.py +++ b/tests/pipelines/test_lint.py @@ -125,9 +125,7 @@ def test_sphinx_md_files(self): """Check that we have .md files for all lint module code, and that there are no unexpected files (eg. deleted lint tests)""" - docs_basedir = Path( - os.path.dirname(os.path.dirname(os.path.abspath(__file__))), "docs", "api", "_src", "pipeline_lint_tests" - ) + docs_basedir = Path(Path(__file__).parent.parent.parent, "docs", "api", "_src", "pipeline_lint_tests") # Get list of existing .md files existing_docs = [] diff --git a/tests/pipelines/test_list.py b/tests/pipelines/test_list.py index 21d239287d..32970ae076 100644 --- a/tests/pipelines/test_list.py +++ b/tests/pipelines/test_list.py @@ -14,15 +14,17 @@ import nf_core.pipelines.list -# create a temporary directory that can be used by the tests in this file -tmp = Path(tempfile.mkdtemp()) -tmp_nxf = tmp / "nxf" -tmp_nxf_str = str(tmp_nxf) - class TestList(unittest.TestCase): """Class for list tests""" + def setUp(self) -> None: + # create a temporary directory that can be used by the tests in this file + tmp = Path(tempfile.TemporaryDirectory().name) + self.tmp_nxf = tmp / "nxf" + self.tmp_nxf_str = str(self.tmp_nxf) + os.environ["NXF_ASSETS"] = self.tmp_nxf_str + @mock.patch("subprocess.check_output") def test_working_listcall(self, mock_subprocess): """Test that listing pipelines works""" @@ -105,28 +107,26 @@ def test_local_workflows_compare_and_fail_silently(self): rwf_ex.releases = None - @mock.patch.dict(os.environ, {"NXF_ASSETS": tmp_nxf_str}) @mock.patch("nf_core.pipelines.list.LocalWorkflow") def test_parse_local_workflow_and_succeed(self, mock_local_wf): - test_path = tmp_nxf / "nf-core" + test_path = self.tmp_nxf / "nf-core" if not os.path.isdir(test_path): os.makedirs(test_path) - assert os.environ["NXF_ASSETS"] == tmp_nxf_str - with open(tmp_nxf / "nf-core/dummy-wf", "w") as f: + assert os.environ["NXF_ASSETS"] == self.tmp_nxf_str + with open(self.tmp_nxf / "nf-core/dummy-wf", "w") as f: f.write("dummy") workflows_obj = nf_core.pipelines.list.Workflows() workflows_obj.get_local_nf_workflows() assert len(workflows_obj.local_workflows) == 1 - @mock.patch.dict(os.environ, {"NXF_ASSETS": tmp_nxf_str}) @mock.patch("nf_core.pipelines.list.LocalWorkflow") @mock.patch("subprocess.check_output") def test_parse_local_workflow_home(self, mock_local_wf, mock_subprocess): - test_path = tmp_nxf / "nf-core" + test_path = self.tmp_nxf / "nf-core" if not os.path.isdir(test_path): os.makedirs(test_path) - assert os.environ["NXF_ASSETS"] == tmp_nxf_str - with open(tmp_nxf / "nf-core/dummy-wf", "w") as f: + assert os.environ["NXF_ASSETS"] == self.tmp_nxf_str + with open(self.tmp_nxf / "nf-core/dummy-wf", "w") as f: f.write("dummy") workflows_obj = nf_core.pipelines.list.Workflows() workflows_obj.get_local_nf_workflows() @@ -135,7 +135,7 @@ def test_parse_local_workflow_home(self, mock_local_wf, mock_subprocess): @mock.patch("git.Repo") def test_local_workflow_investigation(self, mock_repo, mock_stat): local_wf = nf_core.pipelines.list.LocalWorkflow("dummy") - local_wf.local_path = tmp + local_wf.local_path = self.tmp_nxf.parent mock_repo.head.commit.hexsha = "h00r4y" mock_stat.st_mode = 1 local_wf.get_local_nf_workflow_details() diff --git a/tests/test_pipelines.py b/tests/test_pipelines.py index 461f046f52..899c4641ea 100644 --- a/tests/test_pipelines.py +++ b/tests/test_pipelines.py @@ -2,8 +2,6 @@ from pathlib import Path from unittest import TestCase -from git import Repo - import nf_core.pipelines.launch import nf_core.pipelines.lint from nf_core.utils import Pipeline @@ -16,7 +14,6 @@ def setUp(self) -> None: """Create a new Pipeline for testing""" self.tmp_dir, self.template_dir, self.pipeline_name, self.pipeline_dir = create_tmp_pipeline() self.pipeline_obj = Pipeline(self.pipeline_dir) - Repo.init(self.pipeline_dir) self.pipeline_obj._load() self.nf_params_fn = Path(self.pipeline_dir, "nf-params.json") From 65084dd6143d3be44121689b84ec3a00fa857356 Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Tue, 16 Jul 2024 18:48:35 +0000 Subject: [PATCH 08/89] [automated] Update CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index fe7fdcc28b..6bc07e2a7d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -13,6 +13,7 @@ - Fix linting fail on nfcore_external_java_deps if nf_schema is used ([#2976](https://github.com/nf-core/tools/pull/2976)) - Conda module linting: Include package name in log file ([#3014](https://github.com/nf-core/tools/pull/3014)) +- Rrestructure pipeline tests and move pipeline linting into subfolder ([#3070](https://github.com/nf-core/tools/pull/3070)) ### Download From 45fdfb9ddd06e0b75fef592475ce166ee89894da Mon Sep 17 00:00:00 2001 From: mashehu Date: Wed, 17 Jul 2024 09:57:57 +0200 Subject: [PATCH 09/89] more fine-grained setup steps --- nf_core/pipelines/create/create.py | 2 +- nf_core/utils.py | 2 +- tests/pipelines/test_launch.py | 13 +++++++++---- tests/pipelines/test_lint.py | 4 ++++ tests/pipelines/test_list.py | 5 ++--- tests/test_pipelines.py | 8 -------- 6 files changed, 17 insertions(+), 17 deletions(-) diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index e9f37306f5..8cfa09491f 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -61,7 +61,7 @@ def __init__( self.config = template_config elif from_config_file: # Try reading config file - _, config_yml = nf_core.utils.load_tools_config(str(outdir) if outdir else ".") + _, config_yml = nf_core.utils.load_tools_config(outdir if outdir else ".") # Obtain a CreateConfig object from `.nf-core.yml` config file if "template" in config_yml: self.config = CreateConfig(**config_yml["template"]) diff --git a/nf_core/utils.py b/nf_core/utils.py index 9b3775d62d..ead871aed1 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -133,7 +133,7 @@ class Pipeline: schema_obj (obj): A :class:`PipelineSchema` object """ - def __init__(self, wf_path): + def __init__(self, wf_path: Path) -> None: """Initialise pipeline object""" self.conda_config: Dict = {} self.conda_package_info: Dict = {} diff --git a/tests/pipelines/test_launch.py b/tests/pipelines/test_launch.py index c9efa5aa63..03bc0e98be 100644 --- a/tests/pipelines/test_launch.py +++ b/tests/pipelines/test_launch.py @@ -1,7 +1,7 @@ """Tests covering the pipeline launch code.""" import json -import os +from pathlib import Path from unittest import mock import pytest @@ -14,6 +14,11 @@ class TestLaunch(TestPipelines): + def setUp(self) -> None: + super().setUp() + self.nf_params_fn = Path(self.pipeline_dir, "nf-params.json") + self.launcher = nf_core.pipelines.launch.Launch(self.pipeline_dir, params_out=self.nf_params_fn) + @mock.patch.object(nf_core.pipelines.launch.Launch, "prompt_web_gui", side_effect=[True]) @mock.patch.object(nf_core.pipelines.launch.Launch, "launch_web_gui") def test_launch_pipeline(self, mock_webbrowser, mock_lauch_web_gui): @@ -47,12 +52,12 @@ def test_get_pipeline_schema(self): @with_temporary_folder def test_make_pipeline_schema(self, tmp_path): """Create a workflow, but delete the schema file, then try to load it""" - test_pipeline_dir = os.path.join(tmp_path, "wf") + test_pipeline_dir = Path(tmp_path, "wf") create_obj = nf_core.pipelines.create.create.PipelineCreate( "testpipeline", "a description", "Me", outdir=test_pipeline_dir, no_git=True ) create_obj.init_pipeline() - os.remove(os.path.join(test_pipeline_dir, "nextflow_schema.json")) + Path(test_pipeline_dir, "nextflow_schema.json").unlink() self.launcher = nf_core.pipelines.launch.Launch(test_pipeline_dir, params_out=self.nf_params_fn) self.launcher.get_pipeline_schema() assert len(self.launcher.schema_obj.schema["definitions"]["input_output_options"]["properties"]) > 2 @@ -300,7 +305,7 @@ def test_build_command_params(self): # Check command assert ( self.launcher.nextflow_cmd - == f'nextflow run {self.pipeline_dir} -params-file "{os.path.relpath(self.nf_params_fn)}"' + == f'nextflow run {self.pipeline_dir} -params-file "{Path(self.nf_params_fn).relative_to(Path.cwd())}"' ) # Check saved parameters file with open(self.nf_params_fn) as fh: diff --git a/tests/pipelines/test_lint.py b/tests/pipelines/test_lint.py index 80d7e1e018..800ffa16b3 100644 --- a/tests/pipelines/test_lint.py +++ b/tests/pipelines/test_lint.py @@ -17,6 +17,10 @@ class TestLint(TestPipelines): """Class for lint tests""" + def setUp(self) -> None: + super().setUp() + self.lint_obj = nf_core.pipelines.lint.PipelineLint(self.pipeline_dir) + ########################## # CORE lint.py FUNCTIONS # ########################## diff --git a/tests/pipelines/test_list.py b/tests/pipelines/test_list.py index 32970ae076..aacc3805e8 100644 --- a/tests/pipelines/test_list.py +++ b/tests/pipelines/test_list.py @@ -4,10 +4,9 @@ import os import tempfile import time -import unittest from datetime import datetime from pathlib import Path -from unittest import mock +from unittest import TestCase, mock import pytest from rich.console import Console @@ -15,7 +14,7 @@ import nf_core.pipelines.list -class TestList(unittest.TestCase): +class TestList(TestCase): """Class for list tests""" def setUp(self) -> None: diff --git a/tests/test_pipelines.py b/tests/test_pipelines.py index 899c4641ea..656ccbef55 100644 --- a/tests/test_pipelines.py +++ b/tests/test_pipelines.py @@ -1,9 +1,6 @@ import shutil -from pathlib import Path from unittest import TestCase -import nf_core.pipelines.launch -import nf_core.pipelines.lint from nf_core.utils import Pipeline from .utils import create_tmp_pipeline @@ -16,11 +13,6 @@ def setUp(self) -> None: self.pipeline_obj = Pipeline(self.pipeline_dir) self.pipeline_obj._load() - self.nf_params_fn = Path(self.pipeline_dir, "nf-params.json") - self.launcher = nf_core.pipelines.launch.Launch(self.pipeline_dir, params_out=self.nf_params_fn) - - self.lint_obj = nf_core.pipelines.lint.PipelineLint(self.pipeline_dir) - def tearDown(self) -> None: """Remove the test pipeline directory""" shutil.rmtree(self.tmp_dir) From 189015f1b12809bce71744801823c6fa4a956c58 Mon Sep 17 00:00:00 2001 From: mashehu Date: Wed, 17 Jul 2024 10:00:53 +0200 Subject: [PATCH 10/89] don't run list_files on every init, only needed for one linting step --- nf_core/pipelines/lint/__init__.py | 1 - nf_core/pipelines/lint/template_strings.py | 3 +- nf_core/utils.py | 53 +++++++++++----------- tests/test_utils.py | 4 +- 4 files changed, 31 insertions(+), 30 deletions(-) diff --git a/nf_core/pipelines/lint/__init__.py b/nf_core/pipelines/lint/__init__.py index b40f0ee292..d731cb018d 100644 --- a/nf_core/pipelines/lint/__init__.py +++ b/nf_core/pipelines/lint/__init__.py @@ -560,7 +560,6 @@ def run_linting( # Load the various pipeline configs lint_obj._load_lint_config() lint_obj.load_pipeline_config() - lint_obj._list_files() # Create the modules lint object module_lint_obj = nf_core.modules.lint.ModuleLint(pipeline_dir, hide_progress=hide_progress) diff --git a/nf_core/pipelines/lint/template_strings.py b/nf_core/pipelines/lint/template_strings.py index 9b015bc209..90c47203f6 100644 --- a/nf_core/pipelines/lint/template_strings.py +++ b/nf_core/pipelines/lint/template_strings.py @@ -39,10 +39,11 @@ def template_strings(self): ignored = [] # Files that should be ignored according to the linting config ignore_files = self.lint_config.get("template_strings", []) + files = self.list_files() # Loop through files, searching for string num_matches = 0 - for fn in self.files: + for fn in files: if str(fn.relative_to(self.wf_path)) in ignore_files: ignored.append(f"Ignoring Jinja template strings in file `{fn}`") continue diff --git a/nf_core/utils.py b/nf_core/utils.py index ead871aed1..b5719a2b15 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -162,31 +162,7 @@ def __repr__(self) -> str: def _load(self) -> bool: """Run core load functions""" - return self._list_files() and self.load_pipeline_config() and self._load_conda_environment() - - def _list_files(self) -> bool: - """Get a list of all files in the pipeline""" - try: - # First, try to get the list of files using git - git_ls_files = subprocess.check_output(["git", "ls-files"], cwd=self.wf_path).splitlines() - self.files = [] - for fn in git_ls_files: - full_fn = Path(self.wf_path) / fn.decode("utf-8") - if full_fn.is_file(): - self.files.append(full_fn) - else: - log.debug(f"`git ls-files` returned '{full_fn}' but could not open it!") - return True - except subprocess.CalledProcessError as e: - # Failed, so probably not initialised as a git repository - just a list of all files - log.debug(f"Couldn't call 'git ls-files': {e}") - self.files = [] - for subdir, _, files in os.walk(self.wf_path): - for fn in files: - self.files.append(Path(subdir, str(fn))) - if len(self.files) > 0: - return True - return False + return self.load_pipeline_config() and self._load_conda_environment() def _load_conda_environment(self) -> bool: """Try to load the pipeline environment.yml file, if it exists""" @@ -202,6 +178,31 @@ def _fp(self, fn): """Convenience function to get full path to a file in the pipeline""" return os.path.join(self.wf_path, fn) + def list_files(self) -> List[Path]: + """Get a list of all files in the pipeline""" + files = [] + try: + # First, try to get the list of files using git + git_ls_files = subprocess.check_output(["git", "ls-files"], cwd=self.wf_path).splitlines() + for fn in git_ls_files: + full_fn = Path(self.wf_path) / fn.decode("utf-8") + if full_fn.is_file(): + files.append(full_fn) + else: + log.debug(f"`git ls-files` returned '{full_fn}' but could not open it!") + except subprocess.CalledProcessError as e: + # Failed, so probably not initialised as a git repository - just a list of all files + log.debug(f"Couldn't call 'git ls-files': {e}") + files = [] + for file_path in self.wf_path.rglob("*"): + if file_path.is_file(): + # Append the file path to the list + files.append(file_path.relative_to(self.wf_path)) + if len(files) == 0: + log.debug(f"No files found in pipeline: {self.wf_path}") + + return files + def load_pipeline_config(self) -> bool: """Get the nextflow config for this pipeline @@ -1082,7 +1083,7 @@ def determine_base_dir(directory="."): return directory if (base_dir == start_dir or str(base_dir) == base_dir.root) else base_dir -def get_first_available_path(directory, paths): +def get_first_available_path(directory: Union[Path, str], paths: List[str]) -> Union[Path, None]: for p in paths: if Path(directory, p).is_file(): return Path(directory, p) diff --git a/tests/test_utils.py b/tests/test_utils.py index 7afe1a532f..f61f3584e2 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -109,7 +109,7 @@ def testload_pipeline_config(self): def test_list_files_git(self): """Test listing pipeline files using `git ls`""" - self.pipeline_obj._list_files() + self.pipeline_obj.list_files() assert Path(self.test_pipeline_dir, "main.nf") in self.pipeline_obj.files @with_temporary_folder @@ -119,7 +119,7 @@ def test_list_files_no_git(self, tmpdir): tmp_fn = Path(tmpdir, "testfile") tmp_fn.touch() pipeline_obj = nf_core.utils.Pipeline(tmpdir) - pipeline_obj._list_files() + pipeline_obj.list_files() assert tmp_fn in pipeline_obj.files @mock.patch("os.path.exists") From 2ddc860183729642bd69169b3d948fa3ec273d46 Mon Sep 17 00:00:00 2001 From: mashehu Date: Wed, 17 Jul 2024 10:01:29 +0200 Subject: [PATCH 11/89] create git repo with testpipeline --- tests/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/utils.py b/tests/utils.py index 38a10fa810..ef8f334669 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -105,7 +105,7 @@ def create_tmp_pipeline() -> Tuple[Path, Path, str, Path]: pipeline_dir = tmp_dir / pipeline_name nf_core.pipelines.create.create.PipelineCreate( - pipeline_name, "it is mine", "me", no_git=True, outdir=str(pipeline_dir) + pipeline_name, "it is mine", "me", no_git=False, outdir=str(pipeline_dir) ).init_pipeline() # return values to instance variables for later use in test methods From c313fed6a08c0e157a93a19a14d4d77946ad6d50 Mon Sep 17 00:00:00 2001 From: mashehu Date: Wed, 17 Jul 2024 10:25:45 +0200 Subject: [PATCH 12/89] update textual snapshots to new location --- .editorconfig | 2 +- .../__snapshots__/test_create_app.ambr | 0 tests/pipelines/test_create_app.py | 26 ++++++++++--------- 3 files changed, 15 insertions(+), 13 deletions(-) rename tests/{ => pipelines}/__snapshots__/test_create_app.ambr (100%) diff --git a/.editorconfig b/.editorconfig index 5aa8697d30..f266805d6e 100644 --- a/.editorconfig +++ b/.editorconfig @@ -21,7 +21,7 @@ indent_style = unset [**/Makefile] indent_style = unset -[tests/__snapshots__/*] +[tests/pipelines/__snapshots__/*] charset = unset end_of_line = unset insert_final_newline = unset diff --git a/tests/__snapshots__/test_create_app.ambr b/tests/pipelines/__snapshots__/test_create_app.ambr similarity index 100% rename from tests/__snapshots__/test_create_app.ambr rename to tests/pipelines/__snapshots__/test_create_app.ambr diff --git a/tests/pipelines/test_create_app.py b/tests/pipelines/test_create_app.py index 8c89b92cba..9a02f04f00 100644 --- a/tests/pipelines/test_create_app.py +++ b/tests/pipelines/test_create_app.py @@ -4,6 +4,8 @@ from nf_core.pipelines.create import PipelineCreateApp +INIT_FILE = "../../nf_core/pipelines/create/__init__.py" + async def test_app_bindings(): """Test that the app bindings work.""" @@ -23,7 +25,7 @@ async def test_app_bindings(): def test_welcome(snap_compare): """Test snapshot for the first screen in the app. The welcome screen.""" - assert snap_compare("../nf_core/pipelines/create/__init__.py", terminal_size=(100, 50)) + assert snap_compare(INIT_FILE, terminal_size=(100, 50)) def test_choose_type(snap_compare): @@ -36,7 +38,7 @@ def test_choose_type(snap_compare): async def run_before(pilot) -> None: await pilot.click("#start") - assert snap_compare("../nf_core/pipelines/create/__init__.py", terminal_size=(100, 50), run_before=run_before) + assert snap_compare(INIT_FILE, terminal_size=(100, 50), run_before=run_before) def test_basic_details_nfcore(snap_compare): @@ -51,7 +53,7 @@ async def run_before(pilot) -> None: await pilot.click("#start") await pilot.click("#type_nfcore") - assert snap_compare("../nf_core/pipelines/create/__init__.py", terminal_size=(100, 50), run_before=run_before) + assert snap_compare(INIT_FILE, terminal_size=(100, 50), run_before=run_before) def test_basic_details_custom(snap_compare): @@ -66,7 +68,7 @@ async def run_before(pilot) -> None: await pilot.click("#start") await pilot.click("#type_custom") - assert snap_compare("../nf_core/pipelines/create/__init__.py", terminal_size=(100, 50), run_before=run_before) + assert snap_compare(INIT_FILE, terminal_size=(100, 50), run_before=run_before) def test_type_nfcore(snap_compare): @@ -89,7 +91,7 @@ async def run_before(pilot) -> None: await pilot.press("M", "e") await pilot.click("#next") - assert snap_compare("../nf_core/pipelines/create/__init__.py", terminal_size=(100, 50), run_before=run_before) + assert snap_compare(INIT_FILE, terminal_size=(100, 50), run_before=run_before) def test_type_nfcore_validation(snap_compare): @@ -108,7 +110,7 @@ async def run_before(pilot) -> None: await pilot.click("#next") await pilot.pause() - assert snap_compare("../nf_core/pipelines/create/__init__.py", terminal_size=(100, 50), run_before=run_before) + assert snap_compare(INIT_FILE, terminal_size=(100, 50), run_before=run_before) def test_type_custom(snap_compare): @@ -132,7 +134,7 @@ async def run_before(pilot) -> None: await pilot.press("M", "e") await pilot.click("#next") - assert snap_compare("../nf_core/pipelines/create/__init__.py", terminal_size=(100, 50), run_before=run_before) + assert snap_compare(INIT_FILE, terminal_size=(100, 50), run_before=run_before) def test_final_details(snap_compare): @@ -157,7 +159,7 @@ async def run_before(pilot) -> None: await pilot.click("#next") await pilot.click("#continue") - assert snap_compare("../nf_core/pipelines/create/__init__.py", terminal_size=(100, 50), run_before=run_before) + assert snap_compare(INIT_FILE, terminal_size=(100, 50), run_before=run_before) def test_customisation_help(snap_compare): @@ -184,7 +186,7 @@ async def run_before(pilot) -> None: await pilot.press("tab") await pilot.press("enter") - assert snap_compare("../nf_core/pipelines/create/__init__.py", terminal_size=(100, 50), run_before=run_before) + assert snap_compare(INIT_FILE, terminal_size=(100, 50), run_before=run_before) def test_github_question(tmpdir, snap_compare): @@ -216,7 +218,7 @@ async def run_before(pilot) -> None: await pilot.app.workers.wait_for_complete() await pilot.click("#close_screen") - assert snap_compare("../nf_core/pipelines/create/__init__.py", terminal_size=(100, 50), run_before=run_before) + assert snap_compare(INIT_FILE, terminal_size=(100, 50), run_before=run_before) @mock.patch("nf_core.pipelines.create.githubrepo.GithubRepo._get_github_credentials") @@ -255,7 +257,7 @@ async def run_before(pilot) -> None: await pilot.click("#close_screen") await pilot.click("#github_repo") - assert snap_compare("../nf_core/pipelines/create/__init__.py", terminal_size=(100, 50), run_before=run_before) + assert snap_compare(INIT_FILE, terminal_size=(100, 50), run_before=run_before) def test_github_exit_message(tmpdir, snap_compare): @@ -291,4 +293,4 @@ async def run_before(pilot) -> None: await pilot.click("#github_repo") await pilot.click("#exit") - assert snap_compare("../nf_core/pipelines/create/__init__.py", terminal_size=(100, 50), run_before=run_before) + assert snap_compare(INIT_FILE, terminal_size=(100, 50), run_before=run_before) From d6560656873b8bc448cde44975c78fb0abc39e02 Mon Sep 17 00:00:00 2001 From: mashehu Date: Wed, 17 Jul 2024 11:19:49 +0200 Subject: [PATCH 13/89] fix tests --- nf_core/pipelines/launch.py | 5 +++-- nf_core/utils.py | 2 +- tests/pipelines/test_launch.py | 5 +---- tests/test_utils.py | 8 ++++---- tests/utils.py | 2 +- 5 files changed, 10 insertions(+), 12 deletions(-) diff --git a/nf_core/pipelines/launch.py b/nf_core/pipelines/launch.py index 3a5f97e78f..e03982a25a 100644 --- a/nf_core/pipelines/launch.py +++ b/nf_core/pipelines/launch.py @@ -7,6 +7,7 @@ import re import subprocess import webbrowser +from pathlib import Path import questionary from rich.console import Console @@ -46,7 +47,7 @@ def __init__( self.schema_obj = None self.use_params_file = False if command_only else True self.params_in = params_in - self.params_out = params_out if params_out else os.path.join(os.getcwd(), "nf-params.json") + self.params_out = params_out if params_out else Path.cwd() / "nf-params.json" self.save_all = save_all self.show_hidden = show_hidden self.web_schema_launch_url = url if url else "https://nf-co.re/launch" @@ -697,7 +698,7 @@ def build_command(self): # Write the user selection to a file and run nextflow with that if self.use_params_file: dump_json_with_prettier(self.params_out, self.schema_obj.input_params) - self.nextflow_cmd += f' -params-file "{os.path.relpath(self.params_out)}"' + self.nextflow_cmd += f' -params-file "{Path(self.params_out)}"' # Call nextflow with a list of command line flags else: diff --git a/nf_core/utils.py b/nf_core/utils.py index b5719a2b15..0cd812cb04 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -197,7 +197,7 @@ def list_files(self) -> List[Path]: for file_path in self.wf_path.rglob("*"): if file_path.is_file(): # Append the file path to the list - files.append(file_path.relative_to(self.wf_path)) + files.append(file_path) if len(files) == 0: log.debug(f"No files found in pipeline: {self.wf_path}") diff --git a/tests/pipelines/test_launch.py b/tests/pipelines/test_launch.py index 03bc0e98be..da7618d486 100644 --- a/tests/pipelines/test_launch.py +++ b/tests/pipelines/test_launch.py @@ -303,10 +303,7 @@ def test_build_command_params(self): self.launcher.schema_obj.input_params.update({"input": "custom_input"}) self.launcher.build_command() # Check command - assert ( - self.launcher.nextflow_cmd - == f'nextflow run {self.pipeline_dir} -params-file "{Path(self.nf_params_fn).relative_to(Path.cwd())}"' - ) + assert self.launcher.nextflow_cmd == f'nextflow run {self.pipeline_dir} -params-file "{self.nf_params_fn}"' # Check saved parameters file with open(self.nf_params_fn) as fh: try: diff --git a/tests/test_utils.py b/tests/test_utils.py index f61f3584e2..860cba5ba6 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -109,8 +109,8 @@ def testload_pipeline_config(self): def test_list_files_git(self): """Test listing pipeline files using `git ls`""" - self.pipeline_obj.list_files() - assert Path(self.test_pipeline_dir, "main.nf") in self.pipeline_obj.files + files = self.pipeline_obj.list_files() + assert Path(self.test_pipeline_dir, "main.nf") in files @with_temporary_folder def test_list_files_no_git(self, tmpdir): @@ -119,8 +119,8 @@ def test_list_files_no_git(self, tmpdir): tmp_fn = Path(tmpdir, "testfile") tmp_fn.touch() pipeline_obj = nf_core.utils.Pipeline(tmpdir) - pipeline_obj.list_files() - assert tmp_fn in pipeline_obj.files + files = pipeline_obj.list_files() + assert tmp_fn in files @mock.patch("os.path.exists") @mock.patch("os.makedirs") diff --git a/tests/utils.py b/tests/utils.py index ef8f334669..90c4ae0418 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -105,7 +105,7 @@ def create_tmp_pipeline() -> Tuple[Path, Path, str, Path]: pipeline_dir = tmp_dir / pipeline_name nf_core.pipelines.create.create.PipelineCreate( - pipeline_name, "it is mine", "me", no_git=False, outdir=str(pipeline_dir) + pipeline_name, "it is mine", "me", no_git=False, outdir=pipeline_dir ).init_pipeline() # return values to instance variables for later use in test methods From 21cdbbdd2871fe74d337ee755e83680c2ad820f6 Mon Sep 17 00:00:00 2001 From: mashehu Date: Wed, 17 Jul 2024 12:07:12 +0200 Subject: [PATCH 14/89] remove asyncio mode (and see what happens) --- pyproject.toml | 2 -- 1 file changed, 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 4499324445..775f04c9a1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -6,8 +6,6 @@ requires = ["setuptools>=40.6.0", "wheel"] markers = ["datafiles: load datafiles"] testpaths = ["tests"] python_files = ["test_*.py"] -# automatically run coroutine tests with asyncio -asyncio_mode = ["auto"] norecursedirs = [ ".*", "build", From e0a82d51bf231dfc04629bf32105cacc10c8556c Mon Sep 17 00:00:00 2001 From: mashehu Date: Wed, 17 Jul 2024 12:08:14 +0200 Subject: [PATCH 15/89] fix refgenie tests --- nf_core/pipelines/create/create.py | 9 ++++++--- nf_core/pipelines/create_logo.py | 1 - nf_core/pipelines/lint/files_exist.py | 2 +- nf_core/pipelines/refgenie.py | 2 +- requirements.txt | 4 ++-- tests/pipelines/lint/files_exist.py | 2 +- tests/pipelines/test_refgenie.py | 20 ++++++++++---------- 7 files changed, 21 insertions(+), 19 deletions(-) diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index 8cfa09491f..c5af956690 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -355,7 +355,6 @@ def render_template(self): # Remove all unused parameters in the nextflow schema if not self.jinja_params["igenomes"] or not self.jinja_params["nf_core_configs"]: self.update_nextflow_schema() - if self.config.is_nfcore: # Make a logo and save it, if it is a nf-core pipeline self.make_pipeline_logo() @@ -513,11 +512,15 @@ def fix_linting(self): def make_pipeline_logo(self): """Fetch a logo for the new pipeline from the nf-core website""" email_logo_path = Path(self.outdir) / "assets" - create_logo(text=self.jinja_params["short_name"], dir=email_logo_path, theme="light", force=self.force) + create_logo(text=self.jinja_params["short_name"], dir=email_logo_path, theme="light", force=bool(self.force)) for theme in ["dark", "light"]: readme_logo_path = Path(self.outdir) / "docs" / "images" create_logo( - text=self.jinja_params["short_name"], dir=readme_logo_path, width=600, theme=theme, force=self.force + text=self.jinja_params["short_name"], + dir=readme_logo_path, + width=600, + theme=theme, + force=bool(self.force), ) def git_init_pipeline(self): diff --git a/nf_core/pipelines/create_logo.py b/nf_core/pipelines/create_logo.py index 1e96b7032c..780bafd7f5 100644 --- a/nf_core/pipelines/create_logo.py +++ b/nf_core/pipelines/create_logo.py @@ -20,7 +20,6 @@ def create_logo( force: bool = False, ) -> Path: """Create a logo for a pipeline.""" - if not text: raise UserWarning("Please provide the name of the text to put on the logo.") dir = Path(dir) diff --git a/nf_core/pipelines/lint/files_exist.py b/nf_core/pipelines/lint/files_exist.py index c6f622b3e0..edad62aab8 100644 --- a/nf_core/pipelines/lint/files_exist.py +++ b/nf_core/pipelines/lint/files_exist.py @@ -5,7 +5,7 @@ log = logging.getLogger(__name__) -def files_exist(self) -> Dict[str, Union[List[str], bool]]: +def files_exist(self) -> Dict[str, List[str]]: """Checks a given pipeline directory for required files. Iterates through the pipeline's directory content and checks that specified diff --git a/nf_core/pipelines/refgenie.py b/nf_core/pipelines/refgenie.py index de9201bcd6..19ef4b5121 100644 --- a/nf_core/pipelines/refgenie.py +++ b/nf_core/pipelines/refgenie.py @@ -183,7 +183,7 @@ def update_config(rgc): # Save the updated genome config try: - with open(refgenie_genomes_config_file, "w+") as fh: + with open(str(refgenie_genomes_config_file), "w+") as fh: fh.write(refgenie_genomes) log.info(f"Updated nf-core genomes config: {refgenie_genomes_config_file}") except FileNotFoundError: diff --git a/requirements.txt b/requirements.txt index 524b739e89..fb658be2fb 100644 --- a/requirements.txt +++ b/requirements.txt @@ -9,10 +9,10 @@ packaging pillow pdiff pre-commit -prompt_toolkit>=3.0.3 +prompt_toolkit<=3.0.36 pydantic>=2.2.1 pyyaml -questionary>=1.8.0 +questionary>=2.0.1 refgenie requests requests_cache diff --git a/tests/pipelines/lint/files_exist.py b/tests/pipelines/lint/files_exist.py index 87508e78a8..4ae167b1d0 100644 --- a/tests/pipelines/lint/files_exist.py +++ b/tests/pipelines/lint/files_exist.py @@ -14,7 +14,7 @@ def test_files_exist_missing_config(self): lint_obj.nf_config["manifest.name"] = "nf-core/testpipeline" results = lint_obj.files_exist() - assert results["failed"] == ["File not found: `CHANGELOG.md`"] + assert "File not found: `CHANGELOG.md`" in results["failed"] def test_files_exist_missing_main(self): diff --git a/tests/pipelines/test_refgenie.py b/tests/pipelines/test_refgenie.py index 23cc0dd14a..734a2368bd 100644 --- a/tests/pipelines/test_refgenie.py +++ b/tests/pipelines/test_refgenie.py @@ -5,6 +5,7 @@ import subprocess import tempfile import unittest +from pathlib import Path class TestRefgenie(unittest.TestCase): @@ -14,36 +15,35 @@ def setUp(self): """ Prepare a refgenie config file """ - self.tmp_dir = tempfile.mkdtemp() - self.NXF_HOME = os.path.join(self.tmp_dir, ".nextflow") - self.NXF_REFGENIE_PATH = os.path.join(self.NXF_HOME, "nf-core", "refgenie_genomes.config") - self.REFGENIE = os.path.join(self.tmp_dir, "genomes_config.yaml") - self.translation_file = os.path.join(self.tmp_dir, "alias_translations.yaml") + self.tmp_dir = Path(tempfile.TemporaryDirectory().name) + self.NXF_HOME = self.tmp_dir / ".nextflow" + self.NXF_REFGENIE_PATH = self.NXF_HOME / "nf-core" / "refgenie_genomes.config" + self.REFGENIE = self.tmp_dir / "genomes_config.yaml" + self.translation_file = self.tmp_dir / "alias_translations.yaml" # Set NXF_HOME environment variable # avoids adding includeConfig statement to config file outside the current tmpdir try: self.NXF_HOME_ORIGINAL = os.environ["NXF_HOME"] except Exception: self.NXF_HOME_ORIGINAL = None - os.environ["NXF_HOME"] = self.NXF_HOME + os.environ["NXF_HOME"] = str(self.NXF_HOME) # create NXF_HOME and nf-core directories - os.makedirs(os.path.join(self.NXF_HOME, "nf-core"), exist_ok=True) + nf_core_dir = self.NXF_HOME / "nf-core" + nf_core_dir.mkdir(parents=True, exist_ok=True) # Initialize a refgenie config os.system(f"refgenie init -c {self.REFGENIE}") # Add NXF_REFGENIE_PATH to refgenie config with open(self.REFGENIE, "a") as fh: - fh.write(f"nextflow_config: {os.path.join(self.NXF_REFGENIE_PATH)}\n") + fh.write(f"nextflow_config: {self.NXF_REFGENIE_PATH}\n") # Add an alias translation to YAML file with open(self.translation_file, "a") as fh: fh.write("ensembl_gtf: gtf\n") def tearDown(self) -> None: - # Remove the tempdir again - os.system(f"rm -rf {self.tmp_dir}") # Reset NXF_HOME environment variable if self.NXF_HOME_ORIGINAL is None: del os.environ["NXF_HOME"] From 1fde6ca6e70866d0a8b7a7a79ade75afefcf36bb Mon Sep 17 00:00:00 2001 From: mashehu Date: Wed, 17 Jul 2024 13:12:43 +0200 Subject: [PATCH 16/89] add cleanup step to some download tests --- nf_core/pipelines/download.py | 2 +- tests/pipelines/test_download.py | 16 ++++++++++++++++ 2 files changed, 17 insertions(+), 1 deletion(-) diff --git a/nf_core/pipelines/download.py b/nf_core/pipelines/download.py index f16430b0a8..797909636c 100644 --- a/nf_core/pipelines/download.py +++ b/nf_core/pipelines/download.py @@ -231,7 +231,7 @@ def download_workflow(self): summary_log.append(f"Enabled for Seqera Platform: '{self.platform}'") # Check that the outdir doesn't already exist - if os.path.exists(self.outdir): + if self.outdir is not None and os.path.exists(self.outdir): if not self.force: raise DownloadError( f"Output directory '{self.outdir}' already exists (use [red]--force[/] to overwrite)" diff --git a/tests/pipelines/test_download.py b/tests/pipelines/test_download.py index d571b82cee..a898d37b70 100644 --- a/tests/pipelines/test_download.py +++ b/tests/pipelines/test_download.py @@ -13,6 +13,7 @@ import pytest import nf_core.pipelines.create.create +import nf_core.pipelines.list import nf_core.utils from nf_core.pipelines.download import ContainerError, DownloadWorkflow, WorkflowRepo from nf_core.synced_repo import SyncedRepo @@ -643,6 +644,11 @@ def test_download_workflow_for_platform(self, tmp_dir, _): in download_obj.containers ) # indirect definition via $container variable. + # clean-up + # remove "nf-core-rnaseq*" directories + for path in Path().cwd().glob("nf-core-rnaseq*"): + shutil.rmtree(path) + # # Brief test adding a single custom tag to Seqera Platform download # @@ -659,6 +665,11 @@ def test_download_workflow_for_platform_with_one_custom_tag(self, _, tmp_dir): ) assert isinstance(download_obj.additional_tags, list) and len(download_obj.additional_tags) == 1 + # clean-up + # remove "nf-core-rnaseq*" directories + for path in Path().cwd().glob("nf-core-rnaseq*"): + shutil.rmtree(path) + # # Test adding custom tags to Seqera Platform download (full test) # @@ -727,3 +738,8 @@ def test_download_workflow_for_platform_with_custom_tags(self, _, tmp_dir): "[red]Could not apply invalid `--tag` specification[/]: 'What is this?'", } ) + + # clean-up + # remove "nf-core-rnaseq*" directories + for path in Path().cwd().glob("nf-core-rnaseq*"): + shutil.rmtree(path) From a0f788dc1d7a8059e8077ca3413397f9f38cc3be Mon Sep 17 00:00:00 2001 From: mashehu Date: Wed, 17 Jul 2024 13:40:39 +0200 Subject: [PATCH 17/89] start converting linting tests to new subclass structure --- nf_core/pipelines/create_logo.py | 13 ++-- tests/pipelines/lint/actions_awsfulltest.py | 60 ------------------ .../lint/test_actions_awsfulltest.py | 61 +++++++++++++++++++ tests/pipelines/test_lint.py | 20 +++--- 4 files changed, 77 insertions(+), 77 deletions(-) delete mode 100644 tests/pipelines/lint/actions_awsfulltest.py create mode 100644 tests/pipelines/lint/test_actions_awsfulltest.py diff --git a/nf_core/pipelines/create_logo.py b/nf_core/pipelines/create_logo.py index 780bafd7f5..0643d2e295 100644 --- a/nf_core/pipelines/create_logo.py +++ b/nf_core/pipelines/create_logo.py @@ -90,11 +90,14 @@ def create_logo( color = theme == "dark" and (250, 250, 250) or (5, 5, 5) draw.text((110, 465), text, color, font=font) - # Crop to max width - img = img.crop((0, 0, max_width, height)) - - # Resize - img = img.resize((width, int((width / max_width) * height))) + if img is not None: + # Crop to max width + img = img.crop((0, 0, max_width, height)) + + # Resize + img = img.resize((width, int((width / max_width) * height))) + else: + log.error("Failed to create logo, no image object created.") # Save to cache Path(cache_path.parent).mkdir(parents=True, exist_ok=True) diff --git a/tests/pipelines/lint/actions_awsfulltest.py b/tests/pipelines/lint/actions_awsfulltest.py deleted file mode 100644 index d1479bb1e1..0000000000 --- a/tests/pipelines/lint/actions_awsfulltest.py +++ /dev/null @@ -1,60 +0,0 @@ -from pathlib import Path - -import yaml - -import nf_core.pipelines.lint - - -def test_actions_awsfulltest_warn(self): - """Lint test: actions_awsfulltest - PASS""" - self.lint_obj._load() - results = self.lint_obj.actions_awsfulltest() - assert "`.github/workflows/awsfulltest.yml` is triggered correctly" in results["passed"] - assert len(results.get("failed", [])) == 0 - assert len(results.get("ignored", [])) == 0 - - -def test_actions_awsfulltest_pass(self): - """Lint test: actions_awsfulltest - WARN""" - - # Edit .github/workflows/awsfulltest.yml to use -profile test_full - new_pipeline = self._make_pipeline_copy() - with open(Path(new_pipeline, ".github", "workflows", "awsfulltest.yml")) as fh: - awsfulltest_yml = fh.read() - awsfulltest_yml = awsfulltest_yml.replace("-profile test ", "-profile test_full ") - with open(Path(new_pipeline, ".github", "workflows", "awsfulltest.yml"), "w") as fh: - fh.write(awsfulltest_yml) - - # Make lint object - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load() - - results = lint_obj.actions_awsfulltest() - assert results["passed"] == [ - "`.github/workflows/awsfulltest.yml` is triggered correctly", - "`.github/workflows/awsfulltest.yml` does not use `-profile test`", - ] - assert len(results.get("warned", [])) == 0 - assert len(results.get("failed", [])) == 0 - assert len(results.get("ignored", [])) == 0 - - -def test_actions_awsfulltest_fail(self): - """Lint test: actions_awsfulltest - FAIL""" - - # Edit .github/workflows/awsfulltest.yml to use -profile test_full - new_pipeline = self._make_pipeline_copy() - with open(Path(new_pipeline, ".github", "workflows", "awsfulltest.yml")) as fh: - awsfulltest_yml = yaml.safe_load(fh) - del awsfulltest_yml[True]["pull_request_review"] - with open(Path(new_pipeline, ".github", "workflows", "awsfulltest.yml"), "w") as fh: - yaml.dump(awsfulltest_yml, fh) - - # Make lint object - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load() - - results = lint_obj.actions_awsfulltest() - assert results["failed"] == ["`.github/workflows/awsfulltest.yml` is not triggered correctly"] - assert "`.github/workflows/awsfulltest.yml` does not use `-profile test`" in results["passed"] - assert len(results.get("ignored", [])) == 0 diff --git a/tests/pipelines/lint/test_actions_awsfulltest.py b/tests/pipelines/lint/test_actions_awsfulltest.py new file mode 100644 index 0000000000..5c070fd5cf --- /dev/null +++ b/tests/pipelines/lint/test_actions_awsfulltest.py @@ -0,0 +1,61 @@ +from pathlib import Path + +import yaml + +import nf_core.pipelines.lint + +from ..test_lint import TestLint + + +class TestLintActionsAwsfulltest(TestLint): + def test_actions_awsfulltest_warn(self): + """Lint test: actions_awsfulltest - PASS""" + self.lint_obj._load() + results = self.lint_obj.actions_awsfulltest() + assert "`.github/workflows/awsfulltest.yml` is triggered correctly" in results["passed"] + assert len(results.get("failed", [])) == 0 + assert len(results.get("ignored", [])) == 0 + + def test_actions_awsfulltest_pass(self): + """Lint test: actions_awsfulltest - WARN""" + + # Edit .github/workflows/awsfulltest.yml to use -profile test_full + new_pipeline = self._make_pipeline_copy() + with open(Path(new_pipeline, ".github", "workflows", "awsfulltest.yml")) as fh: + awsfulltest_yml = fh.read() + awsfulltest_yml = awsfulltest_yml.replace("-profile test ", "-profile test_full ") + with open(Path(new_pipeline, ".github", "workflows", "awsfulltest.yml"), "w") as fh: + fh.write(awsfulltest_yml) + + # Make lint object + lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) + lint_obj._load() + + results = lint_obj.actions_awsfulltest() + assert results["passed"] == [ + "`.github/workflows/awsfulltest.yml` is triggered correctly", + "`.github/workflows/awsfulltest.yml` does not use `-profile test`", + ] + assert len(results.get("warned", [])) == 0 + assert len(results.get("failed", [])) == 0 + assert len(results.get("ignored", [])) == 0 + + def test_actions_awsfulltest_fail(self): + """Lint test: actions_awsfulltest - FAIL""" + + # Edit .github/workflows/awsfulltest.yml to use -profile test_full + new_pipeline = self._make_pipeline_copy() + with open(Path(new_pipeline, ".github", "workflows", "awsfulltest.yml")) as fh: + awsfulltest_yml = yaml.safe_load(fh) + del awsfulltest_yml[True]["pull_request_review"] + with open(Path(new_pipeline, ".github", "workflows", "awsfulltest.yml"), "w") as fh: + yaml.dump(awsfulltest_yml, fh) + + # Make lint object + lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) + lint_obj._load() + + results = lint_obj.actions_awsfulltest() + assert results["failed"] == ["`.github/workflows/awsfulltest.yml` is not triggered correctly"] + assert "`.github/workflows/awsfulltest.yml` does not use `-profile test`" in results["passed"] + assert len(results.get("ignored", [])) == 0 diff --git a/tests/pipelines/test_lint.py b/tests/pipelines/test_lint.py index 800ffa16b3..54279cd064 100644 --- a/tests/pipelines/test_lint.py +++ b/tests/pipelines/test_lint.py @@ -1,8 +1,6 @@ """Some tests covering the linting code.""" -import fnmatch import json -import os from pathlib import Path import yaml @@ -133,16 +131,18 @@ def test_sphinx_md_files(self): # Get list of existing .md files existing_docs = [] - for fn in os.listdir(docs_basedir): - if fnmatch.fnmatch(fn, "*.md") and not fnmatch.fnmatch(fn, "index.md"): - existing_docs.append(Path(docs_basedir, fn)) + existing_docs = [ + str(Path(docs_basedir, fn)) + for fn in Path(docs_basedir).iterdir() + if fn.match("*.md") and not fn.match("index.md") + ] # Check .md files against each test name lint_obj = nf_core.pipelines.lint.PipelineLint("", True) for test_name in lint_obj.lint_tests: fn = Path(docs_basedir, f"{test_name}.md") - assert os.path.exists(fn), f"Could not find lint docs .md file: {fn}" - existing_docs.remove(fn) + assert fn.exists(), f"Could not find lint docs .md file: {fn}" + existing_docs.remove(str(fn)) # Check that we have no remaining .md files that we didn't expect assert len(existing_docs) == 0, f"Unexpected lint docs .md files found: {', '.join(existing_docs)}" @@ -150,11 +150,7 @@ def test_sphinx_md_files(self): ####################### # SPECIFIC LINT TESTS # ####################### - from .lint.actions_awsfulltest import ( # type: ignore[misc] - test_actions_awsfulltest_fail, - test_actions_awsfulltest_pass, - test_actions_awsfulltest_warn, - ) + from .lint.actions_awstest import ( # type: ignore[misc] test_actions_awstest_fail, test_actions_awstest_pass, From 13be56f73d50e753d941301ed9a84424d3fa3022 Mon Sep 17 00:00:00 2001 From: mashehu Date: Wed, 17 Jul 2024 13:41:25 +0200 Subject: [PATCH 18/89] update prettier version --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index c1dc7978f6..bcf7ff65ce 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -10,7 +10,7 @@ repos: hooks: - id: prettier additional_dependencies: - - prettier@3.2.5 + - prettier@3.3.3 - repo: https://github.com/editorconfig-checker/editorconfig-checker.python rev: "2.7.3" From da155abe4945be13eaa5beb3654dd342dc9d5b19 Mon Sep 17 00:00:00 2001 From: mashehu Date: Wed, 17 Jul 2024 13:54:53 +0200 Subject: [PATCH 19/89] import linting tests correctly to avoid "module is not callable" warnings --- nf_core/pipelines/lint/__init__.py | 73 +++++++++++++++++++----------- 1 file changed, 47 insertions(+), 26 deletions(-) diff --git a/nf_core/pipelines/lint/__init__.py b/nf_core/pipelines/lint/__init__.py index d731cb018d..93f6523709 100644 --- a/nf_core/pipelines/lint/__init__.py +++ b/nf_core/pipelines/lint/__init__.py @@ -30,6 +30,29 @@ from nf_core.utils import plural_s as _s from nf_core.utils import strip_ansi_codes +from .actions_awsfulltest import actions_awsfulltest +from .actions_awstest import actions_awstest +from .actions_ci import actions_ci +from .actions_schema_validation import actions_schema_validation +from .configs import base_config, modules_config +from .files_exist import files_exist +from .files_unchanged import files_unchanged +from .merge_markers import merge_markers +from .modules_json import modules_json +from .modules_structure import modules_structure +from .multiqc_config import multiqc_config +from .nextflow_config import nextflow_config +from .nfcore_yml import nfcore_yml +from .pipeline_name_conventions import pipeline_name_conventions +from .pipeline_todos import pipeline_todos +from .readme import readme +from .schema_description import schema_description +from .schema_lint import schema_lint +from .schema_params import schema_params +from .system_exit import system_exit +from .template_strings import template_strings +from .version_consistency import version_consistency + log = logging.getLogger(__name__) @@ -52,32 +75,30 @@ class PipelineLint(nf_core.utils.Pipeline): warned (list): A list of tuples of the form: ``(, )`` """ - from .actions_awsfulltest import actions_awsfulltest # type: ignore[misc] - from .actions_awstest import actions_awstest # type: ignore[misc] - from .actions_ci import actions_ci # type: ignore[misc] - from .actions_schema_validation import ( # type: ignore[misc] - actions_schema_validation, - ) - from .configs import base_config, modules_config # type: ignore[misc] - from .files_exist import files_exist # type: ignore[misc] - from .files_unchanged import files_unchanged # type: ignore[misc] - from .merge_markers import merge_markers # type: ignore[misc] - from .modules_json import modules_json # type: ignore[misc] - from .modules_structure import modules_structure # type: ignore[misc] - from .multiqc_config import multiqc_config # type: ignore[misc] - from .nextflow_config import nextflow_config # type: ignore[misc] - from .nfcore_yml import nfcore_yml # type: ignore[misc] - from .pipeline_name_conventions import ( # type: ignore[misc] - pipeline_name_conventions, - ) - from .pipeline_todos import pipeline_todos # type: ignore[misc] - from .readme import readme # type: ignore[misc] - from .schema_description import schema_description # type: ignore[misc] - from .schema_lint import schema_lint # type: ignore[misc] - from .schema_params import schema_params # type: ignore[misc] - from .system_exit import system_exit # type: ignore[misc] - from .template_strings import template_strings # type: ignore[misc] - from .version_consistency import version_consistency # type: ignore[misc] + # Import all linting tests as methods for this class + actions_awsfulltest = actions_awsfulltest + actions_awstest = actions_awstest + actions_ci = actions_ci + actions_schema_validation = actions_schema_validation + base_config = base_config + modules_config = modules_config + files_exist = files_exist + files_unchanged = files_unchanged + merge_markers = merge_markers + modules_json = modules_json + modules_structure = modules_structure + multiqc_config = multiqc_config + nextflow_config = nextflow_config + nfcore_yml = nfcore_yml + pipeline_name_conventions = pipeline_name_conventions + pipeline_todos = pipeline_todos + readme = readme + schema_description = schema_description + schema_lint = schema_lint + schema_params = schema_params + system_exit = system_exit + template_strings = template_strings + version_consistency = version_consistency def __init__( self, wf_path, release_mode=False, fix=(), key=None, fail_ignored=False, fail_warned=False, hide_progress=False From 4c77621e8816a0ec5da9d9096efc89e77e5951f6 Mon Sep 17 00:00:00 2001 From: mashehu Date: Wed, 17 Jul 2024 14:31:33 +0200 Subject: [PATCH 20/89] convert rest of the lint test to new subclass structure --- tests/pipelines/lint/actions_awstest.py | 37 -- tests/pipelines/lint/actions_ci.py | 49 --- .../lint/actions_schema_validation.py | 66 ---- tests/pipelines/lint/configs.py | 89 ----- tests/pipelines/lint/files_exist.py | 97 ----- tests/pipelines/lint/files_unchanged.py | 26 -- tests/pipelines/lint/merge_markers.py | 22 -- tests/pipelines/lint/modules_json.py | 6 - tests/pipelines/lint/multiqc_config.py | 129 ------- tests/pipelines/lint/nextflow_config.py | 211 ----------- tests/pipelines/lint/nfcore_yml.py | 53 --- tests/pipelines/lint/template_strings.py | 53 --- tests/pipelines/lint/test_actions_awstest.py | 39 ++ tests/pipelines/lint/test_actions_ci.py | 50 +++ .../lint/test_actions_schema_validation.py | 62 +++ tests/pipelines/lint/test_configs.py | 91 +++++ tests/pipelines/lint/test_files_exist.py | 91 +++++ tests/pipelines/lint/test_files_unchanged.py | 28 ++ tests/pipelines/lint/test_merge_markers.py | 25 ++ tests/pipelines/lint/test_modules_json.py | 10 + tests/pipelines/lint/test_multiqc_config.py | 127 +++++++ tests/pipelines/lint/test_nextflow_config.py | 200 ++++++++++ tests/pipelines/lint/test_nfcore_yml.py | 57 +++ tests/pipelines/lint/test_template_strings.py | 55 +++ .../lint/test_version_consistency.py | 19 + tests/pipelines/lint/version_consistency.py | 14 - tests/pipelines/test_lint.py | 354 ++++++++---------- 27 files changed, 1012 insertions(+), 1048 deletions(-) delete mode 100644 tests/pipelines/lint/actions_awstest.py delete mode 100644 tests/pipelines/lint/actions_ci.py delete mode 100644 tests/pipelines/lint/actions_schema_validation.py delete mode 100644 tests/pipelines/lint/configs.py delete mode 100644 tests/pipelines/lint/files_exist.py delete mode 100644 tests/pipelines/lint/files_unchanged.py delete mode 100644 tests/pipelines/lint/merge_markers.py delete mode 100644 tests/pipelines/lint/modules_json.py delete mode 100644 tests/pipelines/lint/multiqc_config.py delete mode 100644 tests/pipelines/lint/nextflow_config.py delete mode 100644 tests/pipelines/lint/nfcore_yml.py delete mode 100644 tests/pipelines/lint/template_strings.py create mode 100644 tests/pipelines/lint/test_actions_awstest.py create mode 100644 tests/pipelines/lint/test_actions_ci.py create mode 100644 tests/pipelines/lint/test_actions_schema_validation.py create mode 100644 tests/pipelines/lint/test_configs.py create mode 100644 tests/pipelines/lint/test_files_exist.py create mode 100644 tests/pipelines/lint/test_files_unchanged.py create mode 100644 tests/pipelines/lint/test_merge_markers.py create mode 100644 tests/pipelines/lint/test_modules_json.py create mode 100644 tests/pipelines/lint/test_multiqc_config.py create mode 100644 tests/pipelines/lint/test_nextflow_config.py create mode 100644 tests/pipelines/lint/test_nfcore_yml.py create mode 100644 tests/pipelines/lint/test_template_strings.py create mode 100644 tests/pipelines/lint/test_version_consistency.py delete mode 100644 tests/pipelines/lint/version_consistency.py diff --git a/tests/pipelines/lint/actions_awstest.py b/tests/pipelines/lint/actions_awstest.py deleted file mode 100644 index 259bf866bf..0000000000 --- a/tests/pipelines/lint/actions_awstest.py +++ /dev/null @@ -1,37 +0,0 @@ -import os - -import yaml - -import nf_core.pipelines.lint - - -def test_actions_awstest_pass(self): - """Lint test: actions_awstest - PASS""" - self.lint_obj._load() - results = self.lint_obj.actions_awstest() - assert results["passed"] == ["'.github/workflows/awstest.yml' is triggered correctly"] - assert len(results.get("warned", [])) == 0 - assert len(results.get("failed", [])) == 0 - assert len(results.get("ignored", [])) == 0 - - -def test_actions_awstest_fail(self): - """Lint test: actions_awsfulltest - FAIL""" - - # Edit .github/workflows/awsfulltest.yml to use -profile test_full - new_pipeline = self._make_pipeline_copy() - with open(os.path.join(new_pipeline, ".github", "workflows", "awstest.yml")) as fh: - awstest_yml = yaml.safe_load(fh) - awstest_yml[True]["push"] = ["master"] - with open(os.path.join(new_pipeline, ".github", "workflows", "awstest.yml"), "w") as fh: - yaml.dump(awstest_yml, fh) - - # Make lint object - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load() - - results = lint_obj.actions_awstest() - assert results["failed"] == ["'.github/workflows/awstest.yml' is not triggered correctly"] - assert len(results.get("warned", [])) == 0 - assert len(results.get("passed", [])) == 0 - assert len(results.get("ignored", [])) == 0 diff --git a/tests/pipelines/lint/actions_ci.py b/tests/pipelines/lint/actions_ci.py deleted file mode 100644 index eb438b881d..0000000000 --- a/tests/pipelines/lint/actions_ci.py +++ /dev/null @@ -1,49 +0,0 @@ -import os - -import yaml - -import nf_core.pipelines.lint - - -def test_actions_ci_pass(self): - """Lint test: actions_ci - PASS""" - self.lint_obj._load() - results = self.lint_obj.actions_ci() - assert results["passed"] == [ - "'.github/workflows/ci.yml' is triggered on expected events", - "'.github/workflows/ci.yml' checks minimum NF version", - ] - assert len(results.get("warned", [])) == 0 - assert len(results.get("failed", [])) == 0 - assert len(results.get("ignored", [])) == 0 - - -def test_actions_ci_fail_wrong_nf(self): - """Lint test: actions_ci - FAIL - wrong minimum version of Nextflow tested""" - self.lint_obj._load() - self.lint_obj.minNextflowVersion = "1.2.3" - results = self.lint_obj.actions_ci() - assert results["failed"] == ["Minimum pipeline NF version '1.2.3' is not tested in '.github/workflows/ci.yml'"] - - -def test_actions_ci_fail_wrong_trigger(self): - """Lint test: actions_actions_ci - FAIL - workflow triggered incorrectly, NF ver not checked at all""" - - # Edit .github/workflows/actions_ci.yml to mess stuff up! - new_pipeline = self._make_pipeline_copy() - with open(os.path.join(new_pipeline, ".github", "workflows", "ci.yml")) as fh: - ci_yml = yaml.safe_load(fh) - ci_yml[True]["push"] = ["dev", "patch"] - ci_yml["jobs"]["test"]["strategy"]["matrix"] = {"nxf_versionnn": ["foo", ""]} - with open(os.path.join(new_pipeline, ".github", "workflows", "ci.yml"), "w") as fh: - yaml.dump(ci_yml, fh) - - # Make lint object - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load() - - results = lint_obj.actions_ci() - assert results["failed"] == [ - "'.github/workflows/ci.yml' is not triggered on expected events", - "'.github/workflows/ci.yml' does not check minimum NF version", - ] diff --git a/tests/pipelines/lint/actions_schema_validation.py b/tests/pipelines/lint/actions_schema_validation.py deleted file mode 100644 index 4b00e7bf40..0000000000 --- a/tests/pipelines/lint/actions_schema_validation.py +++ /dev/null @@ -1,66 +0,0 @@ -from pathlib import Path - -import yaml - -import nf_core.pipelines.lint - - -def test_actions_schema_validation_missing_jobs(self): - """Missing 'jobs' field should result in failure""" - new_pipeline = self._make_pipeline_copy() - - awstest_yml_path = Path(new_pipeline) / ".github" / "workflows" / "awstest.yml" - with open(awstest_yml_path) as fh: - awstest_yml = yaml.safe_load(fh) - awstest_yml.pop("jobs") - with open(awstest_yml_path, "w") as fh: - yaml.dump(awstest_yml, fh) - - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load() - - results = lint_obj.actions_schema_validation() - - assert "Workflow validation failed for awstest.yml: 'jobs' is a required property" in results["failed"][0] - - -def test_actions_schema_validation_missing_on(self): - """Missing 'on' field should result in failure""" - new_pipeline = self._make_pipeline_copy() - - awstest_yml_path = Path(new_pipeline) / ".github" / "workflows" / "awstest.yml" - with open(awstest_yml_path) as fh: - awstest_yml = yaml.safe_load(fh) - awstest_yml.pop(True) - with open(awstest_yml_path, "w") as fh: - yaml.dump(awstest_yml, fh) - - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load() - - results = lint_obj.actions_schema_validation() - - assert results["failed"][0] == "Missing 'on' keyword in awstest.yml" - assert "Workflow validation failed for awstest.yml: 'on' is a required property" in results["failed"][1] - - -def test_actions_schema_validation_fails_for_additional_property(self): - """Missing 'jobs' field should result in failure""" - new_pipeline = self._make_pipeline_copy() - - awstest_yml_path = Path(new_pipeline) / ".github" / "workflows" / "awstest.yml" - with open(awstest_yml_path) as fh: - awstest_yml = yaml.safe_load(fh) - awstest_yml["not_jobs"] = awstest_yml["jobs"] - with open(awstest_yml_path, "w") as fh: - yaml.dump(awstest_yml, fh) - - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load() - - results = lint_obj.actions_schema_validation() - - assert ( - "Workflow validation failed for awstest.yml: Additional properties are not allowed ('not_jobs' was unexpected)" - in results["failed"][0] - ) diff --git a/tests/pipelines/lint/configs.py b/tests/pipelines/lint/configs.py deleted file mode 100644 index 3ca35cab8c..0000000000 --- a/tests/pipelines/lint/configs.py +++ /dev/null @@ -1,89 +0,0 @@ -from pathlib import Path - -import yaml - -import nf_core.pipelines.create -import nf_core.pipelines.lint - - -def test_withname_in_modules_config(self): - """Tests finding withName in modules.config passes linting.""" - - new_pipeline = self._make_pipeline_copy() - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load() - result = lint_obj.modules_config() - assert len(result["failed"]) == 0 - assert any( - ["`FASTQC` found in `conf/modules.config` and Nextflow scripts." in passed for passed in result["passed"]] - ) - - -def test_superfluous_withname_in_modules_config_fails(self): - """Tests finding withName in modules.config fails linting.""" - new_pipeline = self._make_pipeline_copy() - # Add withName to modules.config - modules_config = Path(new_pipeline) / "conf" / "modules.config" - with open(modules_config, "a") as f: - f.write("\nwithName: 'BPIPE' {\n cache = false \n}") - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline, hide_progress=False) - lint_obj._load() - result = lint_obj.modules_config() - assert len(result["failed"]) == 1 - assert result["failed"][0].startswith("`conf/modules.config` contains `withName:BPIPE`") - - -def test_ignore_modules_config(self): - """Tests ignoring the modules.config passes linting.""" - new_pipeline = self._make_pipeline_copy() - # ignore modules.config in linting - with open(Path(new_pipeline) / ".nf-core.yml") as f: - content = yaml.safe_load(f) - old_content = content.copy() - content["lint"] = {"modules_config": False} - with open(Path(new_pipeline) / ".nf-core.yml", "w") as f: - yaml.dump(content, f) - Path(new_pipeline, "conf", "modules.config").unlink() - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load() - result = lint_obj.modules_config() - assert len(result["ignored"]) == 1 - assert result["ignored"][0].startswith("`conf/modules.config` not found, but it is ignored.") - # cleanup - with open(Path(new_pipeline) / ".nf-core.yml", "w") as f: - yaml.dump(old_content, f) - - -def test_superfluous_withname_in_base_config_fails(self): - """Tests finding withName in base.config fails linting.""" - new_pipeline = self._make_pipeline_copy() - # Add withName to base.config - base_config = Path(new_pipeline) / "conf" / "base.config" - with open(base_config, "a") as f: - f.write("\nwithName:CUSTOM_DUMPSOFTWAREVERSIONS {\n cache = false \n}") - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load() - result = lint_obj.base_config() - assert len(result["failed"]) == 1 - assert result["failed"][0].startswith("`conf/base.config` contains `withName:CUSTOM_DUMPSOFTWAREVERSIONS`") - - -def test_ignore_base_config(self): - """Tests ignoring the base.config passes linting.""" - new_pipeline = self._make_pipeline_copy() - # ignore base.config in linting - with open(Path(new_pipeline) / ".nf-core.yml") as f: - content = yaml.safe_load(f) - old_content = content.copy() - content["lint"] = {"base_config": False} - with open(Path(new_pipeline) / ".nf-core.yml", "w") as f: - yaml.dump(content, f) - Path(new_pipeline, "conf", "base.config").unlink() - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load() - result = lint_obj.base_config() - assert len(result["ignored"]) == 1 - assert result["ignored"][0].startswith("`conf/base.config` not found, but it is ignored.") - # cleanup - with open(Path(new_pipeline) / ".nf-core.yml", "w") as f: - yaml.dump(old_content, f) diff --git a/tests/pipelines/lint/files_exist.py b/tests/pipelines/lint/files_exist.py deleted file mode 100644 index 4ae167b1d0..0000000000 --- a/tests/pipelines/lint/files_exist.py +++ /dev/null @@ -1,97 +0,0 @@ -from pathlib import Path - -import nf_core.pipelines.lint - - -def test_files_exist_missing_config(self): - """Lint test: critical files missing FAIL""" - new_pipeline = self._make_pipeline_copy() - - Path(new_pipeline, "CHANGELOG.md").unlink() - - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load() - lint_obj.nf_config["manifest.name"] = "nf-core/testpipeline" - - results = lint_obj.files_exist() - assert "File not found: `CHANGELOG.md`" in results["failed"] - - -def test_files_exist_missing_main(self): - """Check if missing main issues warning""" - new_pipeline = self._make_pipeline_copy() - - Path(new_pipeline, "main.nf").unlink() - - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load() - - results = lint_obj.files_exist() - assert "File not found: `main.nf`" in results["warned"] - - -def test_files_exist_deprecated_file(self): - """Check whether deprecated file issues warning""" - new_pipeline = self._make_pipeline_copy() - - nf = Path(new_pipeline, "parameters.settings.json") - nf.touch() - - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load() - - results = lint_obj.files_exist() - assert results["failed"] == ["File must be removed: `parameters.settings.json`"] - - -def test_files_exist_pass(self): - """Lint check should pass if all files are there""" - - new_pipeline = self._make_pipeline_copy() - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load() - - results = lint_obj.files_exist() - assert results["failed"] == [] - - -def test_files_exist_pass_conditional(self): - new_pipeline = self._make_pipeline_copy() - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load() - lint_obj.nf_config["plugins"] = [] - lib_dir = Path(new_pipeline, "lib") - lib_dir.mkdir() - (lib_dir / "nfcore_external_java_deps.jar").touch() - results = lint_obj.files_exist() - assert results["failed"] == [] - assert results["ignored"] == [] - - -def test_files_exist_fail_conditional(self): - new_pipeline = self._make_pipeline_copy() - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load() - lib_dir = Path(new_pipeline, "lib") - lib_dir.mkdir() - (lib_dir / "nfcore_external_java_deps.jar").touch() - results = lint_obj.files_exist() - assert results["failed"] == ["File must be removed: `lib/nfcore_external_java_deps.jar`"] - assert results["ignored"] == [] - - -def test_files_exist_pass_conditional_nfschema(self): - new_pipeline = self._make_pipeline_copy() - # replace nf-validation with nf-schema in nextflow.config - with open(Path(new_pipeline, "nextflow.config")) as f: - config = f.read() - config = config.replace("nf-validation", "nf-schema") - with open(Path(new_pipeline, "nextflow.config"), "w") as f: - f.write(config) - - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load() - lint_obj.nf_config["manifest.schema"] = "nf-core" - results = lint_obj.files_exist() - assert results["failed"] == [] - assert results["ignored"] == [] diff --git a/tests/pipelines/lint/files_unchanged.py b/tests/pipelines/lint/files_unchanged.py deleted file mode 100644 index 07a7229191..0000000000 --- a/tests/pipelines/lint/files_unchanged.py +++ /dev/null @@ -1,26 +0,0 @@ -from pathlib import Path - -import nf_core.pipelines.lint - - -def test_files_unchanged_pass(self): - self.lint_obj._load() - results = self.lint_obj.files_unchanged() - assert len(results.get("warned", [])) == 0 - assert len(results.get("failed", [])) == 0 - assert len(results.get("ignored", [])) == 0 - assert not results.get("could_fix", True) - - -def test_files_unchanged_fail(self): - failing_file = Path(".github", "CONTRIBUTING.md") - new_pipeline = self._make_pipeline_copy() - with open(Path(new_pipeline, failing_file), "a") as fh: - fh.write("THIS SHOULD NOT BE HERE") - - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load() - results = lint_obj.files_unchanged() - assert len(results["failed"]) > 0 - assert str(failing_file) in results["failed"][0] - assert results["could_fix"] diff --git a/tests/pipelines/lint/merge_markers.py b/tests/pipelines/lint/merge_markers.py deleted file mode 100644 index 0e3699e190..0000000000 --- a/tests/pipelines/lint/merge_markers.py +++ /dev/null @@ -1,22 +0,0 @@ -import os - -import nf_core.pipelines.lint - - -def test_merge_markers_found(self): - """Missing 'jobs' field should result in failure""" - new_pipeline = self._make_pipeline_copy() - - with open(os.path.join(new_pipeline, "main.nf")) as fh: - main_nf_content = fh.read() - main_nf_content = ">>>>>>>\n" + main_nf_content - with open(os.path.join(new_pipeline, "main.nf"), "w") as fh: - fh.write(main_nf_content) - - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load() - - results = lint_obj.merge_markers() - assert len(results["failed"]) > 0 - assert len(results["passed"]) == 0 - assert "Merge marker '>>>>>>>' in " in results["failed"][0] diff --git a/tests/pipelines/lint/modules_json.py b/tests/pipelines/lint/modules_json.py deleted file mode 100644 index f025daa7f1..0000000000 --- a/tests/pipelines/lint/modules_json.py +++ /dev/null @@ -1,6 +0,0 @@ -def test_modules_json_pass(self): - self.lint_obj._load() - results = self.lint_obj.modules_json() - assert len(results.get("warned", [])) == 0 - assert len(results.get("failed", [])) == 0 - assert len(results.get("passed", [])) > 0 diff --git a/tests/pipelines/lint/multiqc_config.py b/tests/pipelines/lint/multiqc_config.py deleted file mode 100644 index 7f1fdbd67c..0000000000 --- a/tests/pipelines/lint/multiqc_config.py +++ /dev/null @@ -1,129 +0,0 @@ -from pathlib import Path - -import yaml - -import nf_core.pipelines.lint - - -def test_multiqc_config_exists(self): - """Test that linting fails if the multiqc_config.yml file is missing""" - # Delete the file - new_pipeline = self._make_pipeline_copy() - Path(Path(new_pipeline, "assets", "multiqc_config.yml")).unlink() - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load() - result = lint_obj.multiqc_config() - assert result["failed"] == ["`assets/multiqc_config.yml` not found."] - - -def test_multiqc_config_ignore(self): - """Test that linting succeeds if the multiqc_config.yml file is missing but ignored""" - # Delete the file - new_pipeline = self._make_pipeline_copy() - Path(Path(new_pipeline, "assets", "multiqc_config.yml")).unlink() - with open(Path(new_pipeline, ".nf-core.yml")) as f: - content = yaml.safe_load(f) - old_content = content.copy() - content["lint"] = {"multiqc_config": False} - with open(Path(new_pipeline, ".nf-core.yml"), "w") as f: - yaml.dump(content, f) - - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load() - result = lint_obj.multiqc_config() - assert result["ignored"] == ["`assets/multiqc_config.yml` not found, but it is ignored."] - - # cleanup - with open(Path(new_pipeline, ".nf-core.yml"), "w") as f: - yaml.dump(old_content, f) - - -def test_multiqc_config_missing_report_section_order(self): - """Test that linting fails if the multiqc_config.yml file is missing the report_section_order""" - new_pipeline = self._make_pipeline_copy() - with open(Path(new_pipeline, "assets", "multiqc_config.yml")) as fh: - mqc_yml = yaml.safe_load(fh) - mqc_yml_tmp = mqc_yml.copy() - mqc_yml.pop("report_section_order") - with open(Path(new_pipeline, "assets", "multiqc_config.yml"), "w") as fh: - yaml.safe_dump(mqc_yml, fh) - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load() - result = lint_obj.multiqc_config() - # Reset the file - with open(Path(new_pipeline, "assets", "multiqc_config.yml"), "w") as fh: - yaml.safe_dump(mqc_yml_tmp, fh) - assert result["failed"] == ["`assets/multiqc_config.yml` does not contain `report_section_order`"] - - -def test_multiqc_incorrect_export_plots(self): - """Test that linting fails if the multiqc_config.yml file has an incorrect value for export_plots""" - new_pipeline = self._make_pipeline_copy() - with open(Path(new_pipeline, "assets", "multiqc_config.yml")) as fh: - mqc_yml = yaml.safe_load(fh) - mqc_yml_tmp = mqc_yml.copy() - mqc_yml["export_plots"] = False - with open(Path(new_pipeline, "assets", "multiqc_config.yml"), "w") as fh: - yaml.safe_dump(mqc_yml, fh) - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load() - result = lint_obj.multiqc_config() - # Reset the file - with open(Path(new_pipeline, "assets", "multiqc_config.yml"), "w") as fh: - yaml.safe_dump(mqc_yml_tmp, fh) - assert result["failed"] == ["`assets/multiqc_config.yml` does not contain 'export_plots: true'."] - - -def test_multiqc_config_report_comment_fail(self): - """Test that linting fails if the multiqc_config.yml file has an incorrect report_comment""" - new_pipeline = self._make_pipeline_copy() - with open(Path(new_pipeline, "assets", "multiqc_config.yml")) as fh: - mqc_yml = yaml.safe_load(fh) - mqc_yml_tmp = mqc_yml.copy() - mqc_yml["report_comment"] = "This is a test" - with open(Path(new_pipeline, "assets", "multiqc_config.yml"), "w") as fh: - yaml.safe_dump(mqc_yml, fh) - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load() - result = lint_obj.multiqc_config() - # Reset the file - with open(Path(new_pipeline, "assets", "multiqc_config.yml"), "w") as fh: - yaml.safe_dump(mqc_yml_tmp, fh) - assert len(result["failed"]) == 1 - assert result["failed"][0].startswith("`assets/multiqc_config.yml` does not contain a matching 'report_comment'.") - - -def test_multiqc_config_report_comment_release_fail(self): - """Test that linting fails if the multiqc_config.yml file has an incorrect report_comment for a release version""" - new_pipeline = self._make_pipeline_copy() - with open(Path(new_pipeline, "assets", "multiqc_config.yml")) as fh: - mqc_yml = yaml.safe_load(fh) - mqc_yml_tmp = mqc_yml.copy() - with open(Path(new_pipeline, "assets", "multiqc_config.yml"), "w") as fh: - yaml.safe_dump(mqc_yml, fh) - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load() - # bump version - lint_obj.nf_config["manifest.version"] = "1.0" - result = lint_obj.multiqc_config() - # Reset the file - with open(Path(new_pipeline, "assets", "multiqc_config.yml"), "w") as fh: - yaml.safe_dump(mqc_yml_tmp, fh) - assert len(result["failed"]) == 1 - assert result["failed"][0].startswith("`assets/multiqc_config.yml` does not contain a matching 'report_comment'.") - - -def test_multiqc_config_report_comment_release_succeed(self): - """Test that linting fails if the multiqc_config.yml file has a correct report_comment for a release version""" - - import nf_core.pipelines.bump_version - - new_pipeline = self._make_pipeline_copy() - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load() - # bump version using the bump_version function - nf_core.pipelines.bump_version.bump_pipeline_version(lint_obj, "1.0") - # lint again - lint_obj._load() - result = lint_obj.multiqc_config() - assert "`assets/multiqc_config.yml` contains a matching 'report_comment'." in result["passed"] diff --git a/tests/pipelines/lint/nextflow_config.py b/tests/pipelines/lint/nextflow_config.py deleted file mode 100644 index d9157c90c6..0000000000 --- a/tests/pipelines/lint/nextflow_config.py +++ /dev/null @@ -1,211 +0,0 @@ -import os -import re -from pathlib import Path - -import nf_core.pipelines.create.create -import nf_core.pipelines.lint - - -def test_nextflow_config_example_pass(self): - """Tests that config variable existence test works with good pipeline example""" - self.lint_obj.load_pipeline_config() - result = self.lint_obj.nextflow_config() - assert len(result["failed"]) == 0 - assert len(result["warned"]) == 0 - - -def test_nextflow_config_bad_name_fail(self): - """Tests that config variable existence test fails with bad pipeline name""" - new_pipeline = self._make_pipeline_copy() - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj.load_pipeline_config() - - lint_obj.nf_config["manifest.name"] = "bad_name" - result = lint_obj.nextflow_config() - assert len(result["failed"]) > 0 - assert len(result["warned"]) == 0 - - -def test_nextflow_config_dev_in_release_mode_failed(self): - """Tests that config variable existence test fails with dev version in release mode""" - new_pipeline = self._make_pipeline_copy() - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj.load_pipeline_config() - - lint_obj.release_mode = True - lint_obj.nf_config["manifest.version"] = "dev_is_bad_name" - result = lint_obj.nextflow_config() - assert len(result["failed"]) > 0 - assert len(result["warned"]) == 0 - - -def test_nextflow_config_missing_test_profile_failed(self): - """Test failure if config file does not contain `test` profile.""" - new_pipeline = self._make_pipeline_copy() - # Change the name of the test profile so there is no such profile - nf_conf_file = os.path.join(new_pipeline, "nextflow.config") - with open(nf_conf_file) as f: - content = f.read() - fail_content = re.sub(r"\btest\b", "testfail", content) - with open(nf_conf_file, "w") as f: - f.write(fail_content) - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj.load_pipeline_config() - result = lint_obj.nextflow_config() - assert len(result["failed"]) > 0 - assert len(result["warned"]) == 0 - - -def test_default_values_match(self): - """Test that the default values in nextflow.config match the default values defined in the nextflow_schema.json.""" - new_pipeline = self._make_pipeline_copy() - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj.load_pipeline_config() - result = lint_obj.nextflow_config() - assert len(result["failed"]) == 0 - assert len(result["warned"]) == 0 - assert "Config default value correct: params.max_cpus" in str(result["passed"]) - assert "Config default value correct: params.validate_params" in str(result["passed"]) - - -def test_default_values_fail(self): - """Test linting fails if the default values in nextflow.config do not match the ones defined in the nextflow_schema.json.""" - new_pipeline = self._make_pipeline_copy() - # Change the default value of max_cpus in nextflow.config - nf_conf_file = Path(new_pipeline) / "nextflow.config" - with open(nf_conf_file) as f: - content = f.read() - fail_content = re.sub(r"\bmax_cpus\s*=\s*16\b", "max_cpus = 0", content) - with open(nf_conf_file, "w") as f: - f.write(fail_content) - # Change the default value of max_memory in nextflow_schema.json - nf_schema_file = Path(new_pipeline) / "nextflow_schema.json" - with open(nf_schema_file) as f: - content = f.read() - fail_content = re.sub(r'"default": "128.GB"', '"default": "18.GB"', content) - with open(nf_schema_file, "w") as f: - f.write(fail_content) - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj.load_pipeline_config() - result = lint_obj.nextflow_config() - assert len(result["failed"]) == 2 - assert ( - "Config default value incorrect: `params.max_cpus` is set as `16` in `nextflow_schema.json` but is `0` in `nextflow.config`." - in result["failed"] - ) - assert ( - "Config default value incorrect: `params.max_memory` is set as `18.GB` in `nextflow_schema.json` but is `128.GB` in `nextflow.config`." - in result["failed"] - ) - - -def test_catch_params_assignment_in_main_nf(self): - """Test linting fails if main.nf contains an assignment to a parameter from nextflow_schema.json.""" - new_pipeline = self._make_pipeline_copy() - # Add parameter assignment in main.nf - main_nf_file = Path(new_pipeline) / "main.nf" - with open(main_nf_file, "a") as f: - f.write("params.max_time = 42") - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj.load_pipeline_config() - result = lint_obj.nextflow_config() - assert len(result["failed"]) == 1 - assert ( - result["failed"][0] - == "Config default value incorrect: `params.max_time` is set as `240.h` in `nextflow_schema.json` but is `null` in `nextflow.config`." - ) - - -def test_allow_params_reference_in_main_nf(self): - """Test linting allows for references like `params.aligner == 'bwa'` in main.nf. The test will detect if the bug mentioned in GitHub-issue #2833 reemerges.""" - new_pipeline = self._make_pipeline_copy() - # Add parameter reference in main.nf - main_nf_file = Path(new_pipeline) / "main.nf" - with open(main_nf_file, "a") as f: - f.write("params.max_time == 42") - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj.load_pipeline_config() - result = lint_obj.nextflow_config() - assert len(result["failed"]) == 0 - - -def test_default_values_ignored(self): - """Test ignoring linting of default values.""" - new_pipeline = self._make_pipeline_copy() - # Add max_cpus to the ignore list - nf_core_yml = Path(new_pipeline) / ".nf-core.yml" - with open(nf_core_yml, "w") as f: - f.write( - "repository_type: pipeline\nlint:\n nextflow_config:\n - config_defaults:\n - params.max_cpus\n" - ) - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj.load_pipeline_config() - lint_obj._load_lint_config() - result = lint_obj.nextflow_config() - assert len(result["failed"]) == 0 - assert len(result["ignored"]) == 1 - assert "Config default value correct: params.max_cpu" not in str(result["passed"]) - assert "Config default ignored: params.max_cpus" in str(result["ignored"]) - - -def test_default_values_float(self): - """Test comparing two float values.""" - new_pipeline = self._make_pipeline_copy() - # Add a float value `dummy=0.0001` to the nextflow.config below `validate_params` - nf_conf_file = Path(new_pipeline) / "nextflow.config" - with open(nf_conf_file) as f: - content = f.read() - fail_content = re.sub( - r"validate_params\s*=\s*true", "params.validate_params = true\ndummy = 0.000000001", content - ) - with open(nf_conf_file, "w") as f: - f.write(fail_content) - # Add a float value `dummy` to the nextflow_schema.json - nf_schema_file = Path(new_pipeline) / "nextflow_schema.json" - with open(nf_schema_file) as f: - content = f.read() - fail_content = re.sub( - r'"validate_params": {', - ' "dummy": {"type": "number","default":0.000000001},\n"validate_params": {', - content, - ) - with open(nf_schema_file, "w") as f: - f.write(fail_content) - - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj.load_pipeline_config() - result = lint_obj.nextflow_config() - assert len(result["failed"]) == 0 - assert len(result["warned"]) == 0 - assert "Config default value correct: params.dummy" in str(result["passed"]) - - -def test_default_values_float_fail(self): - """Test comparing two float values.""" - new_pipeline = self._make_pipeline_copy() - # Add a float value `dummy=0.0001` to the nextflow.config below `validate_params` - nf_conf_file = Path(new_pipeline) / "nextflow.config" - with open(nf_conf_file) as f: - content = f.read() - fail_content = re.sub( - r"validate_params\s*=\s*true", "params.validate_params = true\ndummy = 0.000000001", content - ) - with open(nf_conf_file, "w") as f: - f.write(fail_content) - # Add a float value `dummy` to the nextflow_schema.json - nf_schema_file = Path(new_pipeline) / "nextflow_schema.json" - with open(nf_schema_file) as f: - content = f.read() - fail_content = re.sub( - r'"validate_params": {', ' "dummy": {"type": "float","default":0.000001},\n"validate_params": {', content - ) - with open(nf_schema_file, "w") as f: - f.write(fail_content) - - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj.load_pipeline_config() - result = lint_obj.nextflow_config() - - assert len(result["failed"]) == 1 - assert len(result["warned"]) == 0 - assert "Config default value incorrect: `params.dummy" in str(result["failed"]) diff --git a/tests/pipelines/lint/nfcore_yml.py b/tests/pipelines/lint/nfcore_yml.py deleted file mode 100644 index 94d2870e11..0000000000 --- a/tests/pipelines/lint/nfcore_yml.py +++ /dev/null @@ -1,53 +0,0 @@ -import re -from pathlib import Path - -import nf_core.pipelines.create -import nf_core.pipelines.lint - - -def test_nfcore_yml_pass(self): - """Lint test: nfcore_yml - PASS""" - self.lint_obj._load() - results = self.lint_obj.nfcore_yml() - - assert "Repository type in `.nf-core.yml` is valid" in str(results["passed"]) - assert "nf-core version in `.nf-core.yml` is set to the latest version" in str(results["passed"]) - assert len(results.get("warned", [])) == 0 - assert len(results.get("failed", [])) == 0 - assert len(results.get("ignored", [])) == 0 - - -def test_nfcore_yml_fail_repo_type(self): - """Lint test: nfcore_yml - FAIL - repository type not set""" - new_pipeline = self._make_pipeline_copy() - nf_core_yml = Path(new_pipeline) / ".nf-core.yml" - with open(nf_core_yml) as fh: - content = fh.read() - new_content = content.replace("repository_type: pipeline", "repository_type: foo") - with open(nf_core_yml, "w") as fh: - fh.write(new_content) - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load() - results = lint_obj.nfcore_yml() - assert "Repository type in `.nf-core.yml` is not valid." in str(results["failed"]) - assert len(results.get("warned", [])) == 0 - assert len(results.get("passed", [])) >= 0 - assert len(results.get("ignored", [])) == 0 - - -def test_nfcore_yml_fail_nfcore_version(self): - """Lint test: nfcore_yml - FAIL - nf-core version not set""" - new_pipeline = self._make_pipeline_copy() - nf_core_yml = Path(new_pipeline) / ".nf-core.yml" - with open(nf_core_yml) as fh: - content = fh.read() - new_content = re.sub(r"nf_core_version:.+", "nf_core_version: foo", content) - with open(nf_core_yml, "w") as fh: - fh.write(new_content) - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load() - results = lint_obj.nfcore_yml() - assert "nf-core version in `.nf-core.yml` is not set to the latest version." in str(results["warned"]) - assert len(results.get("failed", [])) == 0 - assert len(results.get("passed", [])) >= 0 - assert len(results.get("ignored", [])) == 0 diff --git a/tests/pipelines/lint/template_strings.py b/tests/pipelines/lint/template_strings.py deleted file mode 100644 index 2db9e20a3f..0000000000 --- a/tests/pipelines/lint/template_strings.py +++ /dev/null @@ -1,53 +0,0 @@ -import subprocess -from pathlib import Path - -import nf_core.pipelines.create -import nf_core.pipelines.lint - - -def test_template_strings(self): - """Tests finding a template string in a file fails linting.""" - new_pipeline = self._make_pipeline_copy() - # Add template string to a file - txt_file = Path(new_pipeline) / "docs" / "test.txt" - with open(txt_file, "w") as f: - f.write("my {{ template_string }}") - subprocess.check_output(["git", "add", "docs"], cwd=new_pipeline) - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load() - result = lint_obj.template_strings() - assert len(result["failed"]) == 1 - assert len(result["ignored"]) == 0 - - -def test_template_strings_ignored(self): - """Tests ignoring template_strings""" - new_pipeline = self._make_pipeline_copy() - # Ignore template_strings test - nf_core_yml = Path(new_pipeline) / ".nf-core.yml" - with open(nf_core_yml, "w") as f: - f.write("repository_type: pipeline\nlint:\n template_strings: False") - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load() - lint_obj._lint_pipeline() - assert len(lint_obj.failed) == 0 - assert len(lint_obj.ignored) == 1 - - -def test_template_strings_ignore_file(self): - """Tests ignoring template_strings file""" - new_pipeline = self._make_pipeline_copy() - # Add template string to a file - txt_file = Path(new_pipeline) / "docs" / "test.txt" - with open(txt_file, "w") as f: - f.write("my {{ template_string }}") - subprocess.check_output(["git", "add", "docs"], cwd=new_pipeline) - # Ignore template_strings test - nf_core_yml = Path(new_pipeline) / ".nf-core.yml" - with open(nf_core_yml, "w") as f: - f.write("repository_type: pipeline\nlint:\n template_strings:\n - docs/test.txt") - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load() - result = lint_obj.template_strings() - assert len(result["failed"]) == 0 - assert len(result["ignored"]) == 1 diff --git a/tests/pipelines/lint/test_actions_awstest.py b/tests/pipelines/lint/test_actions_awstest.py new file mode 100644 index 0000000000..51b55cb867 --- /dev/null +++ b/tests/pipelines/lint/test_actions_awstest.py @@ -0,0 +1,39 @@ +from pathlib import Path + +import yaml + +import nf_core.pipelines.lint + +from ..test_lint import TestLint + + +class TestLintActionsAws(TestLint): + def test_actions_awstest_pass(self): + """Lint test: actions_awstest - PASS""" + self.lint_obj._load() + results = self.lint_obj.actions_awstest() + assert results["passed"] == ["'.github/workflows/awstest.yml' is triggered correctly"] + assert len(results.get("warned", [])) == 0 + assert len(results.get("failed", [])) == 0 + assert len(results.get("ignored", [])) == 0 + + def test_actions_awstest_fail(self): + """Lint test: actions_awsfulltest - FAIL""" + + # Edit .github/workflows/awsfulltest.yml to use -profile test_full + new_pipeline = self._make_pipeline_copy() + with open(Path(new_pipeline, ".github", "workflows", "awstest.yml")) as fh: + awstest_yml = yaml.safe_load(fh) + awstest_yml[True]["push"] = ["master"] + with open(Path(new_pipeline, ".github", "workflows", "awstest.yml"), "w") as fh: + yaml.dump(awstest_yml, fh) + + # Make lint object + lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) + lint_obj._load() + + results = lint_obj.actions_awstest() + assert results["failed"] == ["'.github/workflows/awstest.yml' is not triggered correctly"] + assert len(results.get("warned", [])) == 0 + assert len(results.get("passed", [])) == 0 + assert len(results.get("ignored", [])) == 0 diff --git a/tests/pipelines/lint/test_actions_ci.py b/tests/pipelines/lint/test_actions_ci.py new file mode 100644 index 0000000000..7319ce4b0c --- /dev/null +++ b/tests/pipelines/lint/test_actions_ci.py @@ -0,0 +1,50 @@ +from pathlib import Path + +import yaml + +import nf_core.pipelines.lint + +from ..test_lint import TestLint + + +class TestLintActionsCi(TestLint): + def test_actions_ci_pass(self): + """Lint test: actions_ci - PASS""" + self.lint_obj._load() + results = self.lint_obj.actions_ci() + assert results["passed"] == [ + "'.github/workflows/ci.yml' is triggered on expected events", + "'.github/workflows/ci.yml' checks minimum NF version", + ] + assert len(results.get("warned", [])) == 0 + assert len(results.get("failed", [])) == 0 + assert len(results.get("ignored", [])) == 0 + + def test_actions_ci_fail_wrong_nf(self): + """Lint test: actions_ci - FAIL - wrong minimum version of Nextflow tested""" + self.lint_obj._load() + self.lint_obj.minNextflowVersion = "1.2.3" + results = self.lint_obj.actions_ci() + assert results["failed"] == ["Minimum pipeline NF version '1.2.3' is not tested in '.github/workflows/ci.yml'"] + + def test_actions_ci_fail_wrong_trigger(self): + """Lint test: actions_actions_ci - FAIL - workflow triggered incorrectly, NF ver not checked at all""" + + # Edit .github/workflows/actions_ci.yml to mess stuff up! + new_pipeline = self._make_pipeline_copy() + with open(Path(new_pipeline, ".github", "workflows", "ci.yml")) as fh: + ci_yml = yaml.safe_load(fh) + ci_yml[True]["push"] = ["dev", "patch"] + ci_yml["jobs"]["test"]["strategy"]["matrix"] = {"nxf_versionnn": ["foo", ""]} + with open(Path(new_pipeline, ".github", "workflows", "ci.yml"), "w") as fh: + yaml.dump(ci_yml, fh) + + # Make lint object + lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) + lint_obj._load() + + results = lint_obj.actions_ci() + assert results["failed"] == [ + "'.github/workflows/ci.yml' is not triggered on expected events", + "'.github/workflows/ci.yml' does not check minimum NF version", + ] diff --git a/tests/pipelines/lint/test_actions_schema_validation.py b/tests/pipelines/lint/test_actions_schema_validation.py new file mode 100644 index 0000000000..34f6b5fcb8 --- /dev/null +++ b/tests/pipelines/lint/test_actions_schema_validation.py @@ -0,0 +1,62 @@ +from pathlib import Path + +import yaml + +import nf_core.pipelines.lint + +from ..test_lint import TestLint + + +class TestLintActionsSchemaValidation(TestLint): + def setUp(self) -> None: + super().setUp() + self.new_pipeline = self._make_pipeline_copy() + self.awstest_yml_path = Path(self.new_pipeline) / ".github" / "workflows" / "awstest.yml" + with open(self.awstest_yml_path) as fh: + self.awstest_yml = yaml.safe_load(fh) + + def test_actions_schema_validation_missing_jobs(self): + """Missing 'jobs' field should result in failure""" + + self.awstest_yml.pop("jobs") + with open(self.awstest_yml_path, "w") as fh: + yaml.dump(self.awstest_yml, fh) + + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj._load() + + results = lint_obj.actions_schema_validation() + + assert "Workflow validation failed for awstest.yml: 'jobs' is a required property" in results["failed"][0] + + def test_actions_schema_validation_missing_on(self): + """Missing 'on' field should result in failure""" + + self.awstest_yml.pop(True) + with open(self.awstest_yml_path, "w") as fh: + yaml.dump(self.awstest_yml, fh) + + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj._load() + + results = lint_obj.actions_schema_validation() + + assert results["failed"][0] == "Missing 'on' keyword in awstest.yml" + assert "Workflow validation failed for awstest.yml: 'on' is a required property" in results["failed"][1] + + def test_actions_schema_validation_fails_for_additional_property(self): + """Missing 'jobs' field should result in failure""" + + self.awstest_yml["not_jobs"] = self.awstest_yml["jobs"] + with open(self.awstest_yml_path, "w") as fh: + yaml.dump(self.awstest_yml, fh) + + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj._load() + + results = lint_obj.actions_schema_validation() + + assert ( + "Workflow validation failed for awstest.yml: Additional properties are not allowed ('not_jobs' was unexpected)" + in results["failed"][0] + ) diff --git a/tests/pipelines/lint/test_configs.py b/tests/pipelines/lint/test_configs.py new file mode 100644 index 0000000000..7bb6329b5b --- /dev/null +++ b/tests/pipelines/lint/test_configs.py @@ -0,0 +1,91 @@ +from pathlib import Path + +import yaml + +import nf_core.pipelines.create +import nf_core.pipelines.lint + +from ..test_lint import TestLint + + +class TestLintConfigs(TestLint): + def setUp(self) -> None: + super().setUp() + self.new_pipeline = self._make_pipeline_copy() + + def test_withname_in_modules_config(self): + """Tests finding withName in modules.config passes linting.""" + + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj._load() + result = lint_obj.modules_config() + assert len(result["failed"]) == 0 + assert any( + ["`FASTQC` found in `conf/modules.config` and Nextflow scripts." in passed for passed in result["passed"]] + ) + + def test_superfluous_withname_in_modules_config_fails(self): + """Tests finding withName in modules.config fails linting.""" + + # Add withName to modules.config + modules_config = Path(self.new_pipeline) / "conf" / "modules.config" + with open(modules_config, "a") as f: + f.write("\nwithName: 'BPIPE' {\n cache = false \n}") + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline, hide_progress=False) + lint_obj._load() + result = lint_obj.modules_config() + assert len(result["failed"]) == 1 + assert result["failed"][0].startswith("`conf/modules.config` contains `withName:BPIPE`") + + def test_ignore_modules_config(self): + """Tests ignoring the modules.config passes linting.""" + + # ignore modules.config in linting + with open(Path(self.new_pipeline) / ".nf-core.yml") as f: + content = yaml.safe_load(f) + old_content = content.copy() + content["lint"] = {"modules_config": False} + with open(Path(self.new_pipeline) / ".nf-core.yml", "w") as f: + yaml.dump(content, f) + Path(self.new_pipeline, "conf", "modules.config").unlink() + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj._load() + result = lint_obj.modules_config() + assert len(result["ignored"]) == 1 + assert result["ignored"][0].startswith("`conf/modules.config` not found, but it is ignored.") + # cleanup + with open(Path(self.new_pipeline) / ".nf-core.yml", "w") as f: + yaml.dump(old_content, f) + + def test_superfluous_withname_in_base_config_fails(self): + """Tests finding withName in base.config fails linting.""" + + # Add withName to base.config + base_config = Path(self.new_pipeline) / "conf" / "base.config" + with open(base_config, "a") as f: + f.write("\nwithName:CUSTOM_DUMPSOFTWAREVERSIONS {\n cache = false \n}") + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj._load() + result = lint_obj.base_config() + assert len(result["failed"]) == 1 + assert result["failed"][0].startswith("`conf/base.config` contains `withName:CUSTOM_DUMPSOFTWAREVERSIONS`") + + def test_ignore_base_config(self): + """Tests ignoring the base.config passes linting.""" + + # ignore base.config in linting + with open(Path(self.new_pipeline) / ".nf-core.yml") as f: + content = yaml.safe_load(f) + old_content = content.copy() + content["lint"] = {"base_config": False} + with open(Path(self.new_pipeline) / ".nf-core.yml", "w") as f: + yaml.dump(content, f) + Path(self.new_pipeline, "conf", "base.config").unlink() + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj._load() + result = lint_obj.base_config() + assert len(result["ignored"]) == 1 + assert result["ignored"][0].startswith("`conf/base.config` not found, but it is ignored.") + # cleanup + with open(Path(self.new_pipeline) / ".nf-core.yml", "w") as f: + yaml.dump(old_content, f) diff --git a/tests/pipelines/lint/test_files_exist.py b/tests/pipelines/lint/test_files_exist.py new file mode 100644 index 0000000000..85ba817536 --- /dev/null +++ b/tests/pipelines/lint/test_files_exist.py @@ -0,0 +1,91 @@ +from pathlib import Path + +import nf_core.pipelines.lint + +from ..test_lint import TestLint + + +class TestLintFilesExist(TestLint): + def setUp(self) -> None: + super().setUp() + self.new_pipeline = self._make_pipeline_copy() + + def test_files_exist_missing_config(self): + """Lint test: critical files missing FAIL""" + + Path(self.new_pipeline, "CHANGELOG.md").unlink() + + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj._load() + lint_obj.nf_config["manifest.name"] = "nf-core/testpipeline" + + results = lint_obj.files_exist() + assert "File not found: `CHANGELOG.md`" in results["failed"] + + def test_files_exist_missing_main(self): + """Check if missing main issues warning""" + + Path(self.new_pipeline, "main.nf").unlink() + + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj._load() + + results = lint_obj.files_exist() + assert "File not found: `main.nf`" in results["warned"] + + def test_files_exist_deprecated_file(self): + """Check whether deprecated file issues warning""" + + nf = Path(self.new_pipeline, "parameters.settings.json") + nf.touch() + + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj._load() + + results = lint_obj.files_exist() + assert results["failed"] == ["File must be removed: `parameters.settings.json`"] + + def test_files_exist_pass(self): + """Lint check should pass if all files are there""" + + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj._load() + + results = lint_obj.files_exist() + assert results["failed"] == [] + + def test_files_exist_pass_conditional(self): + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj._load() + lint_obj.nf_config["plugins"] = [] + lib_dir = Path(self.new_pipeline, "lib") + lib_dir.mkdir() + (lib_dir / "nfcore_external_java_deps.jar").touch() + results = lint_obj.files_exist() + assert results["failed"] == [] + assert results["ignored"] == [] + + def test_files_exist_fail_conditional(self): + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj._load() + lib_dir = Path(self.new_pipeline, "lib") + lib_dir.mkdir() + (lib_dir / "nfcore_external_java_deps.jar").touch() + results = lint_obj.files_exist() + assert results["failed"] == ["File must be removed: `lib/nfcore_external_java_deps.jar`"] + assert results["ignored"] == [] + + def test_files_exist_pass_conditional_nfschema(self): + # replace nf-validation with nf-schema in nextflow.config + with open(Path(self.new_pipeline, "nextflow.config")) as f: + config = f.read() + config = config.replace("nf-validation", "nf-schema") + with open(Path(self.new_pipeline, "nextflow.config"), "w") as f: + f.write(config) + + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj._load() + lint_obj.nf_config["manifest.schema"] = "nf-core" + results = lint_obj.files_exist() + assert results["failed"] == [] + assert results["ignored"] == [] diff --git a/tests/pipelines/lint/test_files_unchanged.py b/tests/pipelines/lint/test_files_unchanged.py new file mode 100644 index 0000000000..4282b49958 --- /dev/null +++ b/tests/pipelines/lint/test_files_unchanged.py @@ -0,0 +1,28 @@ +from pathlib import Path + +import nf_core.pipelines.lint + +from ..test_lint import TestLint + + +class TestLintFilesUnchanged(TestLint): + def test_files_unchanged_pass(self): + self.lint_obj._load() + results = self.lint_obj.files_unchanged() + assert len(results.get("warned", [])) == 0 + assert len(results.get("failed", [])) == 0 + assert len(results.get("ignored", [])) == 0 + assert not results.get("could_fix", True) + + def test_files_unchanged_fail(self): + failing_file = Path(".github", "CONTRIBUTING.md") + new_pipeline = self._make_pipeline_copy() + with open(Path(new_pipeline, failing_file), "a") as fh: + fh.write("THIS SHOULD NOT BE HERE") + + lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) + lint_obj._load() + results = lint_obj.files_unchanged() + assert len(results["failed"]) > 0 + assert str(failing_file) in results["failed"][0] + assert results["could_fix"] diff --git a/tests/pipelines/lint/test_merge_markers.py b/tests/pipelines/lint/test_merge_markers.py new file mode 100644 index 0000000000..3094d8f8d1 --- /dev/null +++ b/tests/pipelines/lint/test_merge_markers.py @@ -0,0 +1,25 @@ +import os + +import nf_core.pipelines.lint + +from ..test_lint import TestLint + + +class TestLintMergeMarkers(TestLint): + def test_merge_markers_found(self): + """Missing 'jobs' field should result in failure""" + new_pipeline = self._make_pipeline_copy() + + with open(os.path.join(new_pipeline, "main.nf")) as fh: + main_nf_content = fh.read() + main_nf_content = ">>>>>>>\n" + main_nf_content + with open(os.path.join(new_pipeline, "main.nf"), "w") as fh: + fh.write(main_nf_content) + + lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) + lint_obj._load() + + results = lint_obj.merge_markers() + assert len(results["failed"]) > 0 + assert len(results["passed"]) == 0 + assert "Merge marker '>>>>>>>' in " in results["failed"][0] diff --git a/tests/pipelines/lint/test_modules_json.py b/tests/pipelines/lint/test_modules_json.py new file mode 100644 index 0000000000..0d8333d9a2 --- /dev/null +++ b/tests/pipelines/lint/test_modules_json.py @@ -0,0 +1,10 @@ +from ..test_lint import TestLint + + +class TestLintModulesJson(TestLint): + def test_modules_json_pass(self): + self.lint_obj._load() + results = self.lint_obj.modules_json() + assert len(results.get("warned", [])) == 0 + assert len(results.get("failed", [])) == 0 + assert len(results.get("passed", [])) > 0 diff --git a/tests/pipelines/lint/test_multiqc_config.py b/tests/pipelines/lint/test_multiqc_config.py new file mode 100644 index 0000000000..5da6e567ec --- /dev/null +++ b/tests/pipelines/lint/test_multiqc_config.py @@ -0,0 +1,127 @@ +from pathlib import Path + +import yaml + +import nf_core.pipelines.bump_version +import nf_core.pipelines.lint + +from ..test_lint import TestLint + + +class TestLintMultiqcConfig(TestLint): + def setUp(self) -> None: + super().setUp() + self.new_pipeline = self._make_pipeline_copy() + self.multiqc_config_yml = Path(self.new_pipeline, "assets", "multiqc_config.yml") + + def test_multiqc_config_exists(self): + """Test that linting fails if the multiqc_config.yml file is missing""" + # Delete the file + self.multiqc_config_yml.unlink() + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj._load() + result = lint_obj.multiqc_config() + assert result["failed"] == ["`assets/multiqc_config.yml` not found."] + + def test_multiqc_config_ignore(self): + """Test that linting succeeds if the multiqc_config.yml file is missing but ignored""" + # Delete the file + self.multiqc_config_yml.unlink() + with open(Path(self.new_pipeline, ".nf-core.yml")) as f: + content = yaml.safe_load(f) + old_content = content.copy() + content["lint"] = {"multiqc_config": False} + with open(Path(self.new_pipeline, ".nf-core.yml"), "w") as f: + yaml.dump(content, f) + + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj._load() + result = lint_obj.multiqc_config() + assert result["ignored"] == ["`assets/multiqc_config.yml` not found, but it is ignored."] + + # cleanup + with open(Path(self.new_pipeline, ".nf-core.yml"), "w") as f: + yaml.dump(old_content, f) + + def test_multiqc_config_missing_report_section_order(self): + """Test that linting fails if the multiqc_config.yml file is missing the report_section_order""" + with open(self.multiqc_config_yml) as fh: + mqc_yml = yaml.safe_load(fh) + mqc_yml_tmp = mqc_yml.copy() + mqc_yml.pop("report_section_order") + with open(self.multiqc_config_yml, "w") as fh: + yaml.safe_dump(mqc_yml, fh) + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj._load() + result = lint_obj.multiqc_config() + # Reset the file + with open(self.multiqc_config_yml, "w") as fh: + yaml.safe_dump(mqc_yml_tmp, fh) + assert result["failed"] == ["`assets/multiqc_config.yml` does not contain `report_section_order`"] + + def test_multiqc_incorrect_export_plots(self): + """Test that linting fails if the multiqc_config.yml file has an incorrect value for export_plots""" + with open(self.multiqc_config_yml) as fh: + mqc_yml = yaml.safe_load(fh) + mqc_yml_tmp = mqc_yml.copy() + mqc_yml["export_plots"] = False + with open(self.multiqc_config_yml, "w") as fh: + yaml.safe_dump(mqc_yml, fh) + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj._load() + result = lint_obj.multiqc_config() + # Reset the file + with open(self.multiqc_config_yml, "w") as fh: + yaml.safe_dump(mqc_yml_tmp, fh) + assert result["failed"] == ["`assets/multiqc_config.yml` does not contain 'export_plots: true'."] + + def test_multiqc_config_report_comment_fail(self): + """Test that linting fails if the multiqc_config.yml file has an incorrect report_comment""" + with open(self.multiqc_config_yml) as fh: + mqc_yml = yaml.safe_load(fh) + mqc_yml_tmp = mqc_yml.copy() + mqc_yml["report_comment"] = "This is a test" + with open(self.multiqc_config_yml, "w") as fh: + yaml.safe_dump(mqc_yml, fh) + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj._load() + result = lint_obj.multiqc_config() + # Reset the file + with open(self.multiqc_config_yml, "w") as fh: + yaml.safe_dump(mqc_yml_tmp, fh) + assert len(result["failed"]) == 1 + assert result["failed"][0].startswith( + "`assets/multiqc_config.yml` does not contain a matching 'report_comment'." + ) + + def test_multiqc_config_report_comment_release_fail(self): + """Test that linting fails if the multiqc_config.yml file has an incorrect report_comment for a release version""" + with open(self.multiqc_config_yml) as fh: + mqc_yml = yaml.safe_load(fh) + mqc_yml_tmp = mqc_yml.copy() + with open(self.multiqc_config_yml, "w") as fh: + yaml.safe_dump(mqc_yml, fh) + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj._load() + # bump version + lint_obj.nf_config["manifest.version"] = "1.0" + result = lint_obj.multiqc_config() + # Reset the file + with open(self.multiqc_config_yml, "w") as fh: + yaml.safe_dump(mqc_yml_tmp, fh) + assert len(result["failed"]) == 1 + assert result["failed"][0].startswith( + "`assets/multiqc_config.yml` does not contain a matching 'report_comment'." + ) + + def test_multiqc_config_report_comment_release_succeed(self): + """Test that linting fails if the multiqc_config.yml file has a correct report_comment for a release version""" + + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj._load() + # bump version using the bump_version function + nf_core.pipelines.bump_version.bump_pipeline_version(lint_obj, "1.0") + # lint again + lint_obj._load() + result = lint_obj.multiqc_config() + assert "`assets/multiqc_config.yml` contains a matching 'report_comment'." in result["passed"] diff --git a/tests/pipelines/lint/test_nextflow_config.py b/tests/pipelines/lint/test_nextflow_config.py new file mode 100644 index 0000000000..01173aec34 --- /dev/null +++ b/tests/pipelines/lint/test_nextflow_config.py @@ -0,0 +1,200 @@ +import os +import re +from pathlib import Path + +import nf_core.pipelines.create.create +import nf_core.pipelines.lint + +from ..test_lint import TestLint + + +class TestLintNextflowConfig(TestLint): + def setUp(self) -> None: + super().setUp() + self.new_pipeline = self._make_pipeline_copy() + + def test_nextflow_config_example_pass(self): + """Tests that config variable existence test works with good pipeline example""" + self.lint_obj.load_pipeline_config() + result = self.lint_obj.nextflow_config() + assert len(result["failed"]) == 0 + assert len(result["warned"]) == 0 + + def test_default_values_match(self): + """Test that the default values in nextflow.config match the default values defined in the nextflow_schema.json.""" + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj.load_pipeline_config() + result = lint_obj.nextflow_config() + assert len(result["failed"]) == 0 + assert len(result["warned"]) == 0 + assert "Config default value correct: params.max_cpus" in str(result["passed"]) + assert "Config default value correct: params.validate_params" in str(result["passed"]) + + def test_nextflow_config_bad_name_fail(self): + """Tests that config variable existence test fails with bad pipeline name""" + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj.load_pipeline_config() + + lint_obj.nf_config["manifest.name"] = "bad_name" + result = lint_obj.nextflow_config() + assert len(result["failed"]) > 0 + assert len(result["warned"]) == 0 + + def test_nextflow_config_dev_in_release_mode_failed(self): + """Tests that config variable existence test fails with dev version in release mode""" + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj.load_pipeline_config() + + lint_obj.release_mode = True + lint_obj.nf_config["manifest.version"] = "dev_is_bad_name" + result = lint_obj.nextflow_config() + assert len(result["failed"]) > 0 + assert len(result["warned"]) == 0 + + def test_nextflow_config_missing_test_profile_failed(self): + """Test failure if config file does not contain `test` profile.""" + # Change the name of the test profile so there is no such profile + nf_conf_file = os.path.join(self.new_pipeline, "nextflow.config") + with open(nf_conf_file) as f: + content = f.read() + fail_content = re.sub(r"\btest\b", "testfail", content) + with open(nf_conf_file, "w") as f: + f.write(fail_content) + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj.load_pipeline_config() + result = lint_obj.nextflow_config() + assert len(result["failed"]) > 0 + assert len(result["warned"]) == 0 + + def test_default_values_fail(self): + """Test linting fails if the default values in nextflow.config do not match the ones defined in the nextflow_schema.json.""" + # Change the default value of max_cpus in nextflow.config + nf_conf_file = Path(self.new_pipeline) / "nextflow.config" + with open(nf_conf_file) as f: + content = f.read() + fail_content = re.sub(r"\bmax_cpus\s*=\s*16\b", "max_cpus = 0", content) + with open(nf_conf_file, "w") as f: + f.write(fail_content) + # Change the default value of max_memory in nextflow_schema.json + nf_schema_file = Path(self.new_pipeline) / "nextflow_schema.json" + with open(nf_schema_file) as f: + content = f.read() + fail_content = re.sub(r'"default": "128.GB"', '"default": "18.GB"', content) + with open(nf_schema_file, "w") as f: + f.write(fail_content) + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj.load_pipeline_config() + result = lint_obj.nextflow_config() + assert len(result["failed"]) == 2 + assert ( + "Config default value incorrect: `params.max_cpus` is set as `16` in `nextflow_schema.json` but is `0` in `nextflow.config`." + in result["failed"] + ) + assert ( + "Config default value incorrect: `params.max_memory` is set as `18.GB` in `nextflow_schema.json` but is `128.GB` in `nextflow.config`." + in result["failed"] + ) + + def test_catch_params_assignment_in_main_nf(self): + """Test linting fails if main.nf contains an assignment to a parameter from nextflow_schema.json.""" + # Add parameter assignment in main.nf + main_nf_file = Path(self.new_pipeline) / "main.nf" + with open(main_nf_file, "a") as f: + f.write("params.max_time = 42") + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj.load_pipeline_config() + result = lint_obj.nextflow_config() + assert len(result["failed"]) == 1 + assert ( + result["failed"][0] + == "Config default value incorrect: `params.max_time` is set as `240.h` in `nextflow_schema.json` but is `null` in `nextflow.config`." + ) + + def test_allow_params_reference_in_main_nf(self): + """Test linting allows for references like `params.aligner == 'bwa'` in main.nf. The test will detect if the bug mentioned in GitHub-issue #2833 reemerges.""" + # Add parameter reference in main.nf + main_nf_file = Path(self.new_pipeline) / "main.nf" + with open(main_nf_file, "a") as f: + f.write("params.max_time == 42") + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj.load_pipeline_config() + result = lint_obj.nextflow_config() + assert len(result["failed"]) == 0 + + def test_default_values_ignored(self): + """Test ignoring linting of default values.""" + # Add max_cpus to the ignore list + nf_core_yml = Path(self.new_pipeline) / ".nf-core.yml" + with open(nf_core_yml, "w") as f: + f.write( + "repository_type: pipeline\nlint:\n nextflow_config:\n - config_defaults:\n - params.max_cpus\n" + ) + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj.load_pipeline_config() + lint_obj._load_lint_config() + result = lint_obj.nextflow_config() + assert len(result["failed"]) == 0 + assert len(result["ignored"]) == 1 + assert "Config default value correct: params.max_cpu" not in str(result["passed"]) + assert "Config default ignored: params.max_cpus" in str(result["ignored"]) + + def test_default_values_float(self): + """Test comparing two float values.""" + # Add a float value `dummy=0.0001` to the nextflow.config below `validate_params` + nf_conf_file = Path(self.new_pipeline) / "nextflow.config" + with open(nf_conf_file) as f: + content = f.read() + fail_content = re.sub( + r"validate_params\s*=\s*true", "params.validate_params = true\ndummy = 0.000000001", content + ) + with open(nf_conf_file, "w") as f: + f.write(fail_content) + # Add a float value `dummy` to the nextflow_schema.json + nf_schema_file = Path(self.new_pipeline) / "nextflow_schema.json" + with open(nf_schema_file) as f: + content = f.read() + fail_content = re.sub( + r'"validate_params": {', + ' "dummy": {"type": "number","default":0.000000001},\n"validate_params": {', + content, + ) + with open(nf_schema_file, "w") as f: + f.write(fail_content) + + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj.load_pipeline_config() + result = lint_obj.nextflow_config() + assert len(result["failed"]) == 0 + assert len(result["warned"]) == 0 + assert "Config default value correct: params.dummy" in str(result["passed"]) + + def test_default_values_float_fail(self): + """Test comparing two float values.""" + # Add a float value `dummy=0.0001` to the nextflow.config below `validate_params` + nf_conf_file = Path(self.new_pipeline) / "nextflow.config" + with open(nf_conf_file) as f: + content = f.read() + fail_content = re.sub( + r"validate_params\s*=\s*true", "params.validate_params = true\ndummy = 0.000000001", content + ) + with open(nf_conf_file, "w") as f: + f.write(fail_content) + # Add a float value `dummy` to the nextflow_schema.json + nf_schema_file = Path(self.new_pipeline) / "nextflow_schema.json" + with open(nf_schema_file) as f: + content = f.read() + fail_content = re.sub( + r'"validate_params": {', + ' "dummy": {"type": "float","default":0.000001},\n"validate_params": {', + content, + ) + with open(nf_schema_file, "w") as f: + f.write(fail_content) + + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj.load_pipeline_config() + result = lint_obj.nextflow_config() + + assert len(result["failed"]) == 1 + assert len(result["warned"]) == 0 + assert "Config default value incorrect: `params.dummy" in str(result["failed"]) diff --git a/tests/pipelines/lint/test_nfcore_yml.py b/tests/pipelines/lint/test_nfcore_yml.py new file mode 100644 index 0000000000..955c00da81 --- /dev/null +++ b/tests/pipelines/lint/test_nfcore_yml.py @@ -0,0 +1,57 @@ +import re +from pathlib import Path + +import nf_core.pipelines.create +import nf_core.pipelines.lint + +from ..test_lint import TestLint + + +class TestLintNfCoreYml(TestLint): + def setUp(self) -> None: + super().setUp() + self.new_pipeline = self._make_pipeline_copy() + self.nf_core_yml = Path(self.new_pipeline) / ".nf-core.yml" + + def test_nfcore_yml_pass(self): + """Lint test: nfcore_yml - PASS""" + self.lint_obj._load() + results = self.lint_obj.nfcore_yml() + + assert "Repository type in `.nf-core.yml` is valid" in str(results["passed"]) + assert "nf-core version in `.nf-core.yml` is set to the latest version" in str(results["passed"]) + assert len(results.get("warned", [])) == 0 + assert len(results.get("failed", [])) == 0 + assert len(results.get("ignored", [])) == 0 + + def test_nfcore_yml_fail_repo_type(self): + """Lint test: nfcore_yml - FAIL - repository type not set""" + + with open(self.nf_core_yml) as fh: + content = fh.read() + new_content = content.replace("repository_type: pipeline", "repository_type: foo") + with open(self.nf_core_yml, "w") as fh: + fh.write(new_content) + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj._load() + results = lint_obj.nfcore_yml() + assert "Repository type in `.nf-core.yml` is not valid." in str(results["failed"]) + assert len(results.get("warned", [])) == 0 + assert len(results.get("passed", [])) >= 0 + assert len(results.get("ignored", [])) == 0 + + def test_nfcore_yml_fail_nfcore_version(self): + """Lint test: nfcore_yml - FAIL - nf-core version not set""" + + with open(self.nf_core_yml) as fh: + content = fh.read() + new_content = re.sub(r"nf_core_version:.+", "nf_core_version: foo", content) + with open(self.nf_core_yml, "w") as fh: + fh.write(new_content) + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj._load() + results = lint_obj.nfcore_yml() + assert "nf-core version in `.nf-core.yml` is not set to the latest version." in str(results["warned"]) + assert len(results.get("failed", [])) == 0 + assert len(results.get("passed", [])) >= 0 + assert len(results.get("ignored", [])) == 0 diff --git a/tests/pipelines/lint/test_template_strings.py b/tests/pipelines/lint/test_template_strings.py new file mode 100644 index 0000000000..406ba63e0c --- /dev/null +++ b/tests/pipelines/lint/test_template_strings.py @@ -0,0 +1,55 @@ +import subprocess +from pathlib import Path + +import nf_core.pipelines.create +import nf_core.pipelines.lint + +from ..test_lint import TestLint + + +class TestLintTemplateStrings(TestLint): + def setUp(self) -> None: + super().setUp() + self.new_pipeline = self._make_pipeline_copy() + + def test_template_strings(self): + """Tests finding a template string in a file fails linting.""" + # Add template string to a file + txt_file = Path(self.new_pipeline) / "docs" / "test.txt" + with open(txt_file, "w") as f: + f.write("my {{ template_string }}") + subprocess.check_output(["git", "add", "docs"], cwd=self.new_pipeline) + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj._load() + result = lint_obj.template_strings() + assert len(result["failed"]) == 1 + assert len(result["ignored"]) == 0 + + def test_template_strings_ignored(self): + """Tests ignoring template_strings""" + # Ignore template_strings test + nf_core_yml = Path(self.new_pipeline) / ".nf-core.yml" + with open(nf_core_yml, "w") as f: + f.write("repository_type: pipeline\nlint:\n template_strings: False") + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj._load() + lint_obj._lint_pipeline() + assert len(lint_obj.failed) == 0 + assert len(lint_obj.ignored) == 1 + + def test_template_strings_ignore_file(self): + """Tests ignoring template_strings file""" + # Add template string to a file + txt_file = Path(self.new_pipeline) / "docs" / "test.txt" + with open(txt_file, "w") as f: + f.write("my {{ template_string }}") + subprocess.check_output(["git", "add", "docs"], cwd=self.new_pipeline) + # Ignore template_strings test + nf_core_yml = Path(self.new_pipeline) / ".nf-core.yml" + with open(nf_core_yml, "w") as f: + f.write("repository_type: pipeline\nlint:\n template_strings:\n - docs/test.txt") + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj._load() + result = lint_obj.template_strings() + assert len(result["failed"]) == 0 + assert len(result["ignored"]) == 1 diff --git a/tests/pipelines/lint/test_version_consistency.py b/tests/pipelines/lint/test_version_consistency.py new file mode 100644 index 0000000000..c5a2cc74f1 --- /dev/null +++ b/tests/pipelines/lint/test_version_consistency.py @@ -0,0 +1,19 @@ +import nf_core.pipelines.create.create +import nf_core.pipelines.lint + +from ..test_lint import TestLint + + +class TestLintVersionConsistency(TestLint): + def test_version_consistency(self): + """Tests that config variable existence test fails with bad pipeline name""" + new_pipeline = self._make_pipeline_copy() + lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) + lint_obj.load_pipeline_config() + lint_obj.nextflow_config() + + result = lint_obj.version_consistency() + assert result["passed"] == [ + "Version tags are numeric and consistent between container, release tag and config." + ] + assert result["failed"] == ["manifest.version was not numeric: 1.0.0dev!"] diff --git a/tests/pipelines/lint/version_consistency.py b/tests/pipelines/lint/version_consistency.py deleted file mode 100644 index 88eadce391..0000000000 --- a/tests/pipelines/lint/version_consistency.py +++ /dev/null @@ -1,14 +0,0 @@ -import nf_core.pipelines.create.create -import nf_core.pipelines.lint - - -def test_version_consistency(self): - """Tests that config variable existence test fails with bad pipeline name""" - new_pipeline = self._make_pipeline_copy() - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj.load_pipeline_config() - lint_obj.nextflow_config() - - result = lint_obj.version_consistency() - assert result["passed"] == ["Version tags are numeric and consistent between container, release tag and config."] - assert result["failed"] == ["manifest.version was not numeric: 1.0.0dev!"] diff --git a/tests/pipelines/test_lint.py b/tests/pipelines/test_lint.py index 54279cd064..b3eac17cb8 100644 --- a/tests/pipelines/test_lint.py +++ b/tests/pipelines/test_lint.py @@ -19,204 +19,166 @@ def setUp(self) -> None: super().setUp() self.lint_obj = nf_core.pipelines.lint.PipelineLint(self.pipeline_dir) - ########################## - # CORE lint.py FUNCTIONS # - ########################## - def test_run_linting_function(self): - """Run the master run_linting() function in lint.py - - We don't really check any of this code as it's just a series of function calls - and we're testing each of those individually. This is mostly to check for syntax errors.""" - nf_core.pipelines.lint.run_linting(self.pipeline_dir, False) - - def test_init_pipeline_lint(self): - """Simply create a PipelineLint object. - - This checks that all of the lint test imports are working properly, - we also check that the git sha was found and that the release flag works properly - """ - lint_obj = nf_core.pipelines.lint.PipelineLint(self.pipeline_dir, True) - - # Tests that extra test is added for release mode - assert "version_consistency" in lint_obj.lint_tests - assert lint_obj.git_sha - # Tests that parent nf_core.utils.Pipeline class __init__() is working to find git hash - assert len(lint_obj.git_sha) > 0 - - def test_load_lint_config_not_found(self): - """Try to load a linting config file that doesn't exist""" - self.lint_obj._load_lint_config() - assert self.lint_obj.lint_config == {} - - def test_load_lint_config_ignore_all_tests(self): - """Try to load a linting config file that ignores all tests""" - - # Make a copy of the test pipeline and create a lint object - new_pipeline = self._make_pipeline_copy() - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - - # Make a config file listing all test names - config_dict = {"lint": {test_name: False for test_name in lint_obj.lint_tests}} - with open(Path(new_pipeline, ".nf-core.yml"), "w") as fh: - yaml.dump(config_dict, fh) - - # Load the new lint config file and check - lint_obj._load_lint_config() - assert sorted(list(lint_obj.lint_config.keys())) == sorted(lint_obj.lint_tests) - - # Try running linting and make sure that all tests are ignored - lint_obj._lint_pipeline() - assert len(lint_obj.passed) == 0 - assert len(lint_obj.warned) == 0 - assert len(lint_obj.failed) == 0 - assert len(lint_obj.ignored) == len(lint_obj.lint_tests) - - @with_temporary_folder - def test_json_output(self, tmp_dir): - """ - Test creation of a JSON file with lint results - - Expected JSON output: - { - "nf_core_tools_version": "1.10.dev0", - "date_run": "2020-06-05 10:56:42", - "tests_pass": [ - [ 1, "This test passed"], - [ 2, "This test also passed"] - ], - "tests_warned": [ - [ 2, "This test gave a warning"] - ], - "tests_failed": [], - "num_tests_pass": 2, - "num_tests_warned": 1, - "num_tests_failed": 0, - "has_tests_pass": true, - "has_tests_warned": true, - "has_tests_failed": false - } - """ - self.lint_obj.passed.append(("test_one", "This test passed")) - self.lint_obj.passed.append(("test_two", "This test also passed")) - self.lint_obj.warned.append(("test_three", "This test gave a warning")) - - # Make a temp dir for the JSON output - json_fn = Path(tmp_dir, "lint_results.json") - self.lint_obj._save_json_results(json_fn) - - # Load created JSON file and check its contents - with open(json_fn) as fh: - try: - saved_json = json.load(fh) - except json.JSONDecodeError as e: - raise UserWarning(f"Unable to load JSON file '{json_fn}' due to error {e}") - assert saved_json["num_tests_pass"] > 0 - assert saved_json["num_tests_warned"] > 0 - assert saved_json["num_tests_ignored"] == 0 - assert saved_json["num_tests_failed"] == 0 - assert saved_json["has_tests_pass"] - assert saved_json["has_tests_warned"] - assert not saved_json["has_tests_ignored"] - assert not saved_json["has_tests_failed"] - - def test_wrap_quotes(self): - md = self.lint_obj._wrap_quotes(["one", "two", "three"]) - assert md == "`one` or `two` or `three`" - - def test_sphinx_md_files(self): - """Check that we have .md files for all lint module code, - and that there are no unexpected files (eg. deleted lint tests)""" - - docs_basedir = Path(Path(__file__).parent.parent.parent, "docs", "api", "_src", "pipeline_lint_tests") - - # Get list of existing .md files - existing_docs = [] - existing_docs = [ - str(Path(docs_basedir, fn)) - for fn in Path(docs_basedir).iterdir() - if fn.match("*.md") and not fn.match("index.md") - ] - - # Check .md files against each test name - lint_obj = nf_core.pipelines.lint.PipelineLint("", True) - for test_name in lint_obj.lint_tests: - fn = Path(docs_basedir, f"{test_name}.md") - assert fn.exists(), f"Could not find lint docs .md file: {fn}" - existing_docs.remove(str(fn)) - - # Check that we have no remaining .md files that we didn't expect - assert len(existing_docs) == 0, f"Unexpected lint docs .md files found: {', '.join(existing_docs)}" + +########################## +# CORE lint.py FUNCTIONS # +########################## +def test_run_linting_function(self): + """Run the master run_linting() function in lint.py + + We don't really check any of this code as it's just a series of function calls + and we're testing each of those individually. This is mostly to check for syntax errors.""" + nf_core.pipelines.lint.run_linting(self.pipeline_dir, False) + + +def test_init_pipeline_lint(self): + """Simply create a PipelineLint object. + + This checks that all of the lint test imports are working properly, + we also check that the git sha was found and that the release flag works properly + """ + lint_obj = nf_core.pipelines.lint.PipelineLint(self.pipeline_dir, True) + + # Tests that extra test is added for release mode + assert "version_consistency" in lint_obj.lint_tests + assert lint_obj.git_sha + # Tests that parent nf_core.utils.Pipeline class __init__() is working to find git hash + assert len(lint_obj.git_sha) > 0 + + +def test_load_lint_config_not_found(self): + """Try to load a linting config file that doesn't exist""" + self.lint_obj._load_lint_config() + assert self.lint_obj.lint_config == {} + + +def test_load_lint_config_ignore_all_tests(self): + """Try to load a linting config file that ignores all tests""" + + # Make a copy of the test pipeline and create a lint object + new_pipeline = self._make_pipeline_copy() + lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) + + # Make a config file listing all test names + config_dict = {"lint": {test_name: False for test_name in lint_obj.lint_tests}} + with open(Path(new_pipeline, ".nf-core.yml"), "w") as fh: + yaml.dump(config_dict, fh) + + # Load the new lint config file and check + lint_obj._load_lint_config() + assert sorted(list(lint_obj.lint_config.keys())) == sorted(lint_obj.lint_tests) + + # Try running linting and make sure that all tests are ignored + lint_obj._lint_pipeline() + assert len(lint_obj.passed) == 0 + assert len(lint_obj.warned) == 0 + assert len(lint_obj.failed) == 0 + assert len(lint_obj.ignored) == len(lint_obj.lint_tests) + + +@with_temporary_folder +def test_json_output(self, tmp_dir): + """ + Test creation of a JSON file with lint results + + Expected JSON output: + { + "nf_core_tools_version": "1.10.dev0", + "date_run": "2020-06-05 10:56:42", + "tests_pass": [ + [ 1, "This test passed"], + [ 2, "This test also passed"] + ], + "tests_warned": [ + [ 2, "This test gave a warning"] + ], + "tests_failed": [], + "num_tests_pass": 2, + "num_tests_warned": 1, + "num_tests_failed": 0, + "has_tests_pass": true, + "has_tests_warned": true, + "has_tests_failed": false + } + """ + self.lint_obj.passed.append(("test_one", "This test passed")) + self.lint_obj.passed.append(("test_two", "This test also passed")) + self.lint_obj.warned.append(("test_three", "This test gave a warning")) + + # Make a temp dir for the JSON output + json_fn = Path(tmp_dir, "lint_results.json") + self.lint_obj._save_json_results(json_fn) + + # Load created JSON file and check its contents + with open(json_fn) as fh: + try: + saved_json = json.load(fh) + except json.JSONDecodeError as e: + raise UserWarning(f"Unable to load JSON file '{json_fn}' due to error {e}") + assert saved_json["num_tests_pass"] > 0 + assert saved_json["num_tests_warned"] > 0 + assert saved_json["num_tests_ignored"] == 0 + assert saved_json["num_tests_failed"] == 0 + assert saved_json["has_tests_pass"] + assert saved_json["has_tests_warned"] + assert not saved_json["has_tests_ignored"] + assert not saved_json["has_tests_failed"] + + +def test_wrap_quotes(self): + md = self.lint_obj._wrap_quotes(["one", "two", "three"]) + assert md == "`one` or `two` or `three`" + + +def test_sphinx_md_files(self): + """Check that we have .md files for all lint module code, + and that there are no unexpected files (eg. deleted lint tests)""" + + docs_basedir = Path(Path(__file__).parent.parent.parent, "docs", "api", "_src", "pipeline_lint_tests") + + # Get list of existing .md files + existing_docs = [] + existing_docs = [ + str(Path(docs_basedir, fn)) + for fn in Path(docs_basedir).iterdir() + if fn.match("*.md") and not fn.match("index.md") + ] + + # Check .md files against each test name + lint_obj = nf_core.pipelines.lint.PipelineLint("", True) + for test_name in lint_obj.lint_tests: + fn = Path(docs_basedir, f"{test_name}.md") + assert fn.exists(), f"Could not find lint docs .md file: {fn}" + existing_docs.remove(str(fn)) + + # Check that we have no remaining .md files that we didn't expect + assert len(existing_docs) == 0, f"Unexpected lint docs .md files found: {', '.join(existing_docs)}" ####################### # SPECIFIC LINT TESTS # ####################### - from .lint.actions_awstest import ( # type: ignore[misc] - test_actions_awstest_fail, - test_actions_awstest_pass, - ) - from .lint.actions_ci import ( # type: ignore[misc] - test_actions_ci_fail_wrong_nf, - test_actions_ci_fail_wrong_trigger, - test_actions_ci_pass, - ) - from .lint.actions_schema_validation import ( # type: ignore[misc] - test_actions_schema_validation_fails_for_additional_property, - test_actions_schema_validation_missing_jobs, - test_actions_schema_validation_missing_on, - ) - from .lint.configs import ( # type: ignore[misc] - test_ignore_base_config, - test_ignore_modules_config, - test_superfluous_withname_in_base_config_fails, - test_superfluous_withname_in_modules_config_fails, - test_withname_in_modules_config, - ) - from .lint.files_exist import ( # type: ignore[misc] - test_files_exist_deprecated_file, - test_files_exist_fail_conditional, - test_files_exist_missing_config, - test_files_exist_missing_main, - test_files_exist_pass, - test_files_exist_pass_conditional, - test_files_exist_pass_conditional_nfschema, - ) - from .lint.files_unchanged import ( # type: ignore[misc] - test_files_unchanged_fail, - test_files_unchanged_pass, - ) - from .lint.merge_markers import test_merge_markers_found # type: ignore[misc] - from .lint.modules_json import test_modules_json_pass # type: ignore[misc] - from .lint.multiqc_config import ( # type: ignore[misc] - test_multiqc_config_exists, - test_multiqc_config_ignore, - test_multiqc_config_missing_report_section_order, - test_multiqc_config_report_comment_fail, - test_multiqc_config_report_comment_release_fail, - test_multiqc_config_report_comment_release_succeed, - test_multiqc_incorrect_export_plots, - ) - from .lint.nextflow_config import ( # type: ignore[misc] - test_allow_params_reference_in_main_nf, - test_catch_params_assignment_in_main_nf, - test_default_values_fail, - test_default_values_float, - test_default_values_float_fail, - test_default_values_ignored, - test_default_values_match, - test_nextflow_config_bad_name_fail, - test_nextflow_config_dev_in_release_mode_failed, - test_nextflow_config_example_pass, - test_nextflow_config_missing_test_profile_failed, - ) - from .lint.nfcore_yml import ( # type: ignore[misc] - test_nfcore_yml_fail_nfcore_version, - test_nfcore_yml_fail_repo_type, - test_nfcore_yml_pass, - ) - from .lint.template_strings import ( # type: ignore[misc] - test_template_strings, - test_template_strings_ignore_file, - test_template_strings_ignored, - ) - from .lint.version_consistency import test_version_consistency # type: ignore[misc] + # from .lint.nextflow_config import ( # type: ignore[misc] + # test_allow_params_reference_in_main_nf, + # test_catch_params_assignment_in_main_nf, + # test_default_values_fail, + # test_default_values_float, + # test_default_values_float_fail, + # test_default_values_ignored, + # test_default_values_match, + # test_nextflow_config_bad_name_fail, + # test_nextflow_config_dev_in_release_mode_failed, + # test_nextflow_config_example_pass, + # test_nextflow_config_missing_test_profile_failed, + # ) + # from .lint.nfcore_yml import ( # type: ignore[misc] + # test_nfcore_yml_fail_nfcore_version, + # test_nfcore_yml_fail_repo_type, + # test_nfcore_yml_pass, + # ) + # from .lint.template_strings import ( # type: ignore[misc] + # test_template_strings, + # test_template_strings_ignore_file, + # test_template_strings_ignored, + # ) + # from .lint.version_consistency import test_version_consistency # type: ignore[misc] From fcc43fd0c337ead404fba843eab51820d0f014a9 Mon Sep 17 00:00:00 2001 From: mashehu Date: Wed, 17 Jul 2024 14:51:51 +0200 Subject: [PATCH 21/89] add more tests to CI --- .github/workflows/pytest.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index 4e873385e8..fea96b4526 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -67,7 +67,7 @@ jobs: - name: List tests id: list_tests run: | - echo "tests=$(find tests/test_* | tac | sed 's/tests\///g' | jq -R -s -c '{test: (split("\n")[:-1])}')" >> $GITHUB_OUTPUT + echo "tests=$(find tests/**/test_* | tac | sed 's/tests\///g' | jq -R -s -c '{test: (split("\n")[:-1])}')" >> $GITHUB_OUTPUT outputs: tests: ${{ steps.list_tests.outputs.tests }} From a779d12b7fff23ce9e93b905f49f37221276af43 Mon Sep 17 00:00:00 2001 From: mashehu Date: Wed, 17 Jul 2024 15:15:40 +0200 Subject: [PATCH 22/89] simplify sed command --- .github/workflows/pytest.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index fea96b4526..18f9b28098 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -67,7 +67,7 @@ jobs: - name: List tests id: list_tests run: | - echo "tests=$(find tests/**/test_* | tac | sed 's/tests\///g' | jq -R -s -c '{test: (split("\n")[:-1])}')" >> $GITHUB_OUTPUT + echo "tests=$(find tests/**/test_* | tac | sed 's/.*\///g' | jq -R -s -c '{test: (split("\n")[:-1])}')" >> $GITHUB_OUTPUT outputs: tests: ${{ steps.list_tests.outputs.tests }} @@ -132,7 +132,7 @@ jobs: - name: Test with pytest run: | - python3 -m pytest tests/${{matrix.test}} --color=yes --cov --durations=0 && exit_code=0|| exit_code=$? + python3 -m pytest tests/**/${{matrix.test}} --color=yes --cov --durations=0 && exit_code=0|| exit_code=$? # don't fail if no tests were collected, e.g. for test_licence.py if [ "${exit_code}" -eq 5 ]; then echo "No tests were collected" From ca0769bd76b207881a8e77d3a18f3902b2307474 Mon Sep 17 00:00:00 2001 From: mashehu Date: Wed, 17 Jul 2024 15:31:01 +0200 Subject: [PATCH 23/89] fix ci tests --- .github/workflows/pytest.yml | 11 +- tests/pipelines/test_lint.py | 284 +++++++++++++++-------------------- 2 files changed, 133 insertions(+), 162 deletions(-) diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index 18f9b28098..72bacaeab9 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -67,7 +67,7 @@ jobs: - name: List tests id: list_tests run: | - echo "tests=$(find tests/**/test_* | tac | sed 's/.*\///g' | jq -R -s -c '{test: (split("\n")[:-1])}')" >> $GITHUB_OUTPUT + echo "tests=$(find tests/**/test_* | tac | sed 's/tests\///g' | jq -R -s -c '{test: (split("\n")[:-1])}')" >> $GITHUB_OUTPUT outputs: tests: ${{ steps.list_tests.outputs.tests }} @@ -132,7 +132,7 @@ jobs: - name: Test with pytest run: | - python3 -m pytest tests/**/${{matrix.test}} --color=yes --cov --durations=0 && exit_code=0|| exit_code=$? + python3 -m pytest tests/${{matrix.test}} --color=yes --cov --durations=0 && exit_code=0|| exit_code=$? # don't fail if no tests were collected, e.g. for test_licence.py if [ "${exit_code}" -eq 5 ]; then echo "No tests were collected" @@ -149,10 +149,15 @@ jobs: name: Snapshot Report ${{ matrix.test }} path: ./snapshot_report.html + - name: remove slashes from test name + run: | + test=$(echo ${{ matrix.test }} | sed 's/\//__/g') + echo "test=${test}" >> $GITHUB_ENV + - name: Upload coverage uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4 with: - name: coverage_${{ matrix.test }} + name: coverage_${{ env.test }} path: .coverage coverage: diff --git a/tests/pipelines/test_lint.py b/tests/pipelines/test_lint.py index b3eac17cb8..ab8bcf6b24 100644 --- a/tests/pipelines/test_lint.py +++ b/tests/pipelines/test_lint.py @@ -23,162 +23,128 @@ def setUp(self) -> None: ########################## # CORE lint.py FUNCTIONS # ########################## -def test_run_linting_function(self): - """Run the master run_linting() function in lint.py - - We don't really check any of this code as it's just a series of function calls - and we're testing each of those individually. This is mostly to check for syntax errors.""" - nf_core.pipelines.lint.run_linting(self.pipeline_dir, False) - - -def test_init_pipeline_lint(self): - """Simply create a PipelineLint object. - - This checks that all of the lint test imports are working properly, - we also check that the git sha was found and that the release flag works properly - """ - lint_obj = nf_core.pipelines.lint.PipelineLint(self.pipeline_dir, True) - - # Tests that extra test is added for release mode - assert "version_consistency" in lint_obj.lint_tests - assert lint_obj.git_sha - # Tests that parent nf_core.utils.Pipeline class __init__() is working to find git hash - assert len(lint_obj.git_sha) > 0 - - -def test_load_lint_config_not_found(self): - """Try to load a linting config file that doesn't exist""" - self.lint_obj._load_lint_config() - assert self.lint_obj.lint_config == {} - - -def test_load_lint_config_ignore_all_tests(self): - """Try to load a linting config file that ignores all tests""" - - # Make a copy of the test pipeline and create a lint object - new_pipeline = self._make_pipeline_copy() - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - - # Make a config file listing all test names - config_dict = {"lint": {test_name: False for test_name in lint_obj.lint_tests}} - with open(Path(new_pipeline, ".nf-core.yml"), "w") as fh: - yaml.dump(config_dict, fh) - - # Load the new lint config file and check - lint_obj._load_lint_config() - assert sorted(list(lint_obj.lint_config.keys())) == sorted(lint_obj.lint_tests) - - # Try running linting and make sure that all tests are ignored - lint_obj._lint_pipeline() - assert len(lint_obj.passed) == 0 - assert len(lint_obj.warned) == 0 - assert len(lint_obj.failed) == 0 - assert len(lint_obj.ignored) == len(lint_obj.lint_tests) - - -@with_temporary_folder -def test_json_output(self, tmp_dir): - """ - Test creation of a JSON file with lint results - - Expected JSON output: - { - "nf_core_tools_version": "1.10.dev0", - "date_run": "2020-06-05 10:56:42", - "tests_pass": [ - [ 1, "This test passed"], - [ 2, "This test also passed"] - ], - "tests_warned": [ - [ 2, "This test gave a warning"] - ], - "tests_failed": [], - "num_tests_pass": 2, - "num_tests_warned": 1, - "num_tests_failed": 0, - "has_tests_pass": true, - "has_tests_warned": true, - "has_tests_failed": false - } - """ - self.lint_obj.passed.append(("test_one", "This test passed")) - self.lint_obj.passed.append(("test_two", "This test also passed")) - self.lint_obj.warned.append(("test_three", "This test gave a warning")) - - # Make a temp dir for the JSON output - json_fn = Path(tmp_dir, "lint_results.json") - self.lint_obj._save_json_results(json_fn) - - # Load created JSON file and check its contents - with open(json_fn) as fh: - try: - saved_json = json.load(fh) - except json.JSONDecodeError as e: - raise UserWarning(f"Unable to load JSON file '{json_fn}' due to error {e}") - assert saved_json["num_tests_pass"] > 0 - assert saved_json["num_tests_warned"] > 0 - assert saved_json["num_tests_ignored"] == 0 - assert saved_json["num_tests_failed"] == 0 - assert saved_json["has_tests_pass"] - assert saved_json["has_tests_warned"] - assert not saved_json["has_tests_ignored"] - assert not saved_json["has_tests_failed"] - - -def test_wrap_quotes(self): - md = self.lint_obj._wrap_quotes(["one", "two", "three"]) - assert md == "`one` or `two` or `three`" - - -def test_sphinx_md_files(self): - """Check that we have .md files for all lint module code, - and that there are no unexpected files (eg. deleted lint tests)""" - - docs_basedir = Path(Path(__file__).parent.parent.parent, "docs", "api", "_src", "pipeline_lint_tests") - - # Get list of existing .md files - existing_docs = [] - existing_docs = [ - str(Path(docs_basedir, fn)) - for fn in Path(docs_basedir).iterdir() - if fn.match("*.md") and not fn.match("index.md") - ] - - # Check .md files against each test name - lint_obj = nf_core.pipelines.lint.PipelineLint("", True) - for test_name in lint_obj.lint_tests: - fn = Path(docs_basedir, f"{test_name}.md") - assert fn.exists(), f"Could not find lint docs .md file: {fn}" - existing_docs.remove(str(fn)) - - # Check that we have no remaining .md files that we didn't expect - assert len(existing_docs) == 0, f"Unexpected lint docs .md files found: {', '.join(existing_docs)}" - - ####################### - # SPECIFIC LINT TESTS # - ####################### - - # from .lint.nextflow_config import ( # type: ignore[misc] - # test_allow_params_reference_in_main_nf, - # test_catch_params_assignment_in_main_nf, - # test_default_values_fail, - # test_default_values_float, - # test_default_values_float_fail, - # test_default_values_ignored, - # test_default_values_match, - # test_nextflow_config_bad_name_fail, - # test_nextflow_config_dev_in_release_mode_failed, - # test_nextflow_config_example_pass, - # test_nextflow_config_missing_test_profile_failed, - # ) - # from .lint.nfcore_yml import ( # type: ignore[misc] - # test_nfcore_yml_fail_nfcore_version, - # test_nfcore_yml_fail_repo_type, - # test_nfcore_yml_pass, - # ) - # from .lint.template_strings import ( # type: ignore[misc] - # test_template_strings, - # test_template_strings_ignore_file, - # test_template_strings_ignored, - # ) - # from .lint.version_consistency import test_version_consistency # type: ignore[misc] +class TestPipelinesLint(TestLint): + def test_run_linting_function(self): + """Run the master run_linting() function in lint.py + + We don't really check any of this code as it's just a series of function calls + and we're testing each of those individually. This is mostly to check for syntax errors.""" + nf_core.pipelines.lint.run_linting(self.pipeline_dir, False) + + def test_init_pipeline_lint(self): + """Simply create a PipelineLint object. + + This checks that all of the lint test imports are working properly, + we also check that the git sha was found and that the release flag works properly + """ + lint_obj = nf_core.pipelines.lint.PipelineLint(self.pipeline_dir, True) + + # Tests that extra test is added for release mode + assert "version_consistency" in lint_obj.lint_tests + assert lint_obj.git_sha + # Tests that parent nf_core.utils.Pipeline class __init__() is working to find git hash + assert len(lint_obj.git_sha) > 0 + + def test_load_lint_config_not_found(self): + """Try to load a linting config file that doesn't exist""" + self.lint_obj._load_lint_config() + assert self.lint_obj.lint_config == {} + + def test_load_lint_config_ignore_all_tests(self): + """Try to load a linting config file that ignores all tests""" + + # Make a copy of the test pipeline and create a lint object + new_pipeline = self._make_pipeline_copy() + lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) + + # Make a config file listing all test names + config_dict = {"lint": {test_name: False for test_name in lint_obj.lint_tests}} + with open(Path(new_pipeline, ".nf-core.yml"), "w") as fh: + yaml.dump(config_dict, fh) + + # Load the new lint config file and check + lint_obj._load_lint_config() + assert sorted(list(lint_obj.lint_config.keys())) == sorted(lint_obj.lint_tests) + + # Try running linting and make sure that all tests are ignored + lint_obj._lint_pipeline() + assert len(lint_obj.passed) == 0 + assert len(lint_obj.warned) == 0 + assert len(lint_obj.failed) == 0 + assert len(lint_obj.ignored) == len(lint_obj.lint_tests) + + @with_temporary_folder + def test_json_output(self, tmp_dir): + """ + Test creation of a JSON file with lint results + + Expected JSON output: + { + "nf_core_tools_version": "1.10.dev0", + "date_run": "2020-06-05 10:56:42", + "tests_pass": [ + [ 1, "This test passed"], + [ 2, "This test also passed"] + ], + "tests_warned": [ + [ 2, "This test gave a warning"] + ], + "tests_failed": [], + "num_tests_pass": 2, + "num_tests_warned": 1, + "num_tests_failed": 0, + "has_tests_pass": true, + "has_tests_warned": true, + "has_tests_failed": false + } + """ + self.lint_obj.passed.append(("test_one", "This test passed")) + self.lint_obj.passed.append(("test_two", "This test also passed")) + self.lint_obj.warned.append(("test_three", "This test gave a warning")) + + # Make a temp dir for the JSON output + json_fn = Path(tmp_dir, "lint_results.json") + self.lint_obj._save_json_results(json_fn) + + # Load created JSON file and check its contents + with open(json_fn) as fh: + try: + saved_json = json.load(fh) + except json.JSONDecodeError as e: + raise UserWarning(f"Unable to load JSON file '{json_fn}' due to error {e}") + assert saved_json["num_tests_pass"] > 0 + assert saved_json["num_tests_warned"] > 0 + assert saved_json["num_tests_ignored"] == 0 + assert saved_json["num_tests_failed"] == 0 + assert saved_json["has_tests_pass"] + assert saved_json["has_tests_warned"] + assert not saved_json["has_tests_ignored"] + assert not saved_json["has_tests_failed"] + + def test_wrap_quotes(self): + md = self.lint_obj._wrap_quotes(["one", "two", "three"]) + assert md == "`one` or `two` or `three`" + + def test_sphinx_md_files(self): + """Check that we have .md files for all lint module code, + and that there are no unexpected files (eg. deleted lint tests)""" + + docs_basedir = Path(Path(__file__).parent.parent.parent, "docs", "api", "_src", "pipeline_lint_tests") + + # Get list of existing .md files + existing_docs = [] + existing_docs = [ + str(Path(docs_basedir, fn)) + for fn in Path(docs_basedir).iterdir() + if fn.match("*.md") and not fn.match("index.md") + ] + + # Check .md files against each test name + lint_obj = nf_core.pipelines.lint.PipelineLint("", True) + for test_name in lint_obj.lint_tests: + fn = Path(docs_basedir, f"{test_name}.md") + assert fn.exists(), f"Could not find lint docs .md file: {fn}" + existing_docs.remove(str(fn)) + + # Check that we have no remaining .md files that we didn't expect + assert len(existing_docs) == 0, f"Unexpected lint docs .md files found: {', '.join(existing_docs)}" From 768cfc31fff05bf4136d987f7dc811599766b7f9 Mon Sep 17 00:00:00 2001 From: mashehu Date: Wed, 17 Jul 2024 15:36:13 +0200 Subject: [PATCH 24/89] find ALL test files in CI --- .github/workflows/pytest.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index 72bacaeab9..dc88031886 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -67,7 +67,7 @@ jobs: - name: List tests id: list_tests run: | - echo "tests=$(find tests/**/test_* | tac | sed 's/tests\///g' | jq -R -s -c '{test: (split("\n")[:-1])}')" >> $GITHUB_OUTPUT + echo "tests=$(find tests -type f -name "test_*.py" | tac | sed 's/tests\///g' | jq -R -s -c '{test: (split("\n")[:-1])}')" >> $GITHUB_OUTPUT outputs: tests: ${{ steps.list_tests.outputs.tests }} From 3aee9cbfd0950a7f78cfcd273ce7a61bbc3d4ed9 Mon Sep 17 00:00:00 2001 From: mashehu Date: Wed, 17 Jul 2024 15:49:21 +0200 Subject: [PATCH 25/89] add pytest-asyncio --- requirements-dev.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/requirements-dev.txt b/requirements-dev.txt index 2c7bb0c8cf..82087edcba 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -14,6 +14,7 @@ types-Markdown types-PyYAML types-requests types-setuptools +pytest-asyncio pytest-textual-snapshot==0.4.0 pytest-workflow>=2.0.0 pytest>=8.0.0 From 09b0623a9682b58b1be9b5fe12bba117805681b4 Mon Sep 17 00:00:00 2001 From: mashehu Date: Wed, 17 Jul 2024 16:10:41 +0200 Subject: [PATCH 26/89] convert subworkflow tests to new structure --- nf_core/modules/bump_versions.py | 7 +- nf_core/modules/modules_json.py | 3 +- tests/subworkflows/create.py | 110 -------- tests/subworkflows/info.py | 64 ----- tests/subworkflows/install.py | 154 ----------- tests/subworkflows/lint.py | 403 ----------------------------- tests/subworkflows/list.py | 49 ---- tests/subworkflows/remove.py | 100 ------- tests/subworkflows/test_create.py | 109 ++++++++ tests/subworkflows/test_info.py | 63 +++++ tests/subworkflows/test_install.py | 157 +++++++++++ tests/subworkflows/test_lint.py | 391 ++++++++++++++++++++++++++++ tests/subworkflows/test_list.py | 48 ++++ tests/subworkflows/test_remove.py | 101 ++++++++ tests/subworkflows/test_update.py | 370 ++++++++++++++++++++++++++ tests/subworkflows/update.py | 376 --------------------------- tests/test_subworkflows.py | 105 +++----- 17 files changed, 1278 insertions(+), 1332 deletions(-) delete mode 100644 tests/subworkflows/create.py delete mode 100644 tests/subworkflows/info.py delete mode 100644 tests/subworkflows/install.py delete mode 100644 tests/subworkflows/lint.py delete mode 100644 tests/subworkflows/list.py delete mode 100644 tests/subworkflows/remove.py create mode 100644 tests/subworkflows/test_create.py create mode 100644 tests/subworkflows/test_info.py create mode 100644 tests/subworkflows/test_install.py create mode 100644 tests/subworkflows/test_lint.py create mode 100644 tests/subworkflows/test_list.py create mode 100644 tests/subworkflows/test_remove.py create mode 100644 tests/subworkflows/test_update.py delete mode 100644 tests/subworkflows/update.py diff --git a/nf_core/modules/bump_versions.py b/nf_core/modules/bump_versions.py index 6556dcf0f4..fae3793079 100644 --- a/nf_core/modules/bump_versions.py +++ b/nf_core/modules/bump_versions.py @@ -176,7 +176,12 @@ def bump_module_version(self, module: NFCoreComponent) -> bool: try: response = nf_core.utils.anaconda_package(bp) except (LookupError, ValueError): - self.failed.append((f"Conda version not specified correctly: {module.main_nf}", module.component_name)) + self.failed.append( + ( + f"Conda version not specified correctly: {module.main_nf.relative_to(self.dir)}", + module.component_name, + ) + ) return False # Check that required version is available at all diff --git a/nf_core/modules/modules_json.py b/nf_core/modules/modules_json.py index 0d6779a8d4..2c2f1a32c9 100644 --- a/nf_core/modules/modules_json.py +++ b/nf_core/modules/modules_json.py @@ -6,6 +6,7 @@ import shutil import tempfile from pathlib import Path +from typing import Union import git import questionary @@ -31,7 +32,7 @@ class ModulesJson: An object for handling a 'modules.json' file in a pipeline """ - def __init__(self, pipeline_dir: str): + def __init__(self, pipeline_dir: Union[str, Path]): """ Initialise the object. diff --git a/tests/subworkflows/create.py b/tests/subworkflows/create.py deleted file mode 100644 index 002b889671..0000000000 --- a/tests/subworkflows/create.py +++ /dev/null @@ -1,110 +0,0 @@ -import os -import shutil -from pathlib import Path -from unittest import mock - -import pytest -import yaml -from git.repo import Repo - -import nf_core.subworkflows -from tests.utils import GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH, GITLAB_URL - - -def test_subworkflows_create_succeed(self): - """Succeed at creating a subworkflow from the template inside a pipeline""" - subworkflow_create = nf_core.subworkflows.SubworkflowCreate( - self.pipeline_dir, "test_subworkflow_local", "@author", True - ) - subworkflow_create.create() - assert os.path.exists(os.path.join(self.pipeline_dir, "subworkflows", "local", "test_subworkflow_local.nf")) - - -def test_subworkflows_create_fail_exists(self): - """Fail at creating the same subworkflow twice""" - subworkflow_create = nf_core.subworkflows.SubworkflowCreate( - self.pipeline_dir, "test_subworkflow2", "@author", False - ) - subworkflow_create.create() - with pytest.raises(UserWarning) as excinfo: - subworkflow_create.create() - assert "Subworkflow file exists already" in str(excinfo.value) - - -def test_subworkflows_create_nfcore_modules(self): - """Create a subworkflow in nf-core/modules clone""" - subworkflow_create = nf_core.subworkflows.SubworkflowCreate( - self.nfcore_modules, "test_subworkflow", "@author", force=True - ) - subworkflow_create.create() - assert os.path.exists(os.path.join(self.nfcore_modules, "subworkflows", "nf-core", "test_subworkflow", "main.nf")) - assert os.path.exists( - os.path.join(self.nfcore_modules, "subworkflows", "nf-core", "test_subworkflow", "tests", "main.nf.test") - ) - - -@mock.patch("rich.prompt.Confirm.ask") -def test_subworkflows_migrate(self, mock_rich_ask): - """Create a subworkflow with the --migrate-pytest option to convert pytest to nf-test""" - pytest_dir = Path(self.nfcore_modules, "tests", "subworkflows", "nf-core", "bam_stats_samtools") - subworkflow_dir = Path(self.nfcore_modules, "subworkflows", "nf-core", "bam_stats_samtools") - - # Clone modules repo with pytests - shutil.rmtree(self.nfcore_modules) - Repo.clone_from(GITLAB_URL, self.nfcore_modules, branch=GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH) - with open(subworkflow_dir / "main.nf") as fh: - old_main_nf = fh.read() - with open(subworkflow_dir / "meta.yml") as fh: - old_meta_yml = fh.read() - - # Create a subworkflow with --migrate-pytest - mock_rich_ask.return_value = True - subworkflow_create = nf_core.subworkflows.SubworkflowCreate( - self.nfcore_modules, "bam_stats_samtools", migrate_pytest=True - ) - subworkflow_create.create() - - with open(subworkflow_dir / "main.nf") as fh: - new_main_nf = fh.read() - with open(subworkflow_dir / "meta.yml") as fh: - new_meta_yml = fh.read() - nextflow_config = subworkflow_dir / "tests" / "nextflow.config" - - # Check that old files have been copied to the new module - assert old_main_nf == new_main_nf - assert old_meta_yml == new_meta_yml - assert nextflow_config.is_file() - - # Check that pytest folder is deleted - assert not pytest_dir.is_dir() - - # Check that pytest_modules.yml is updated - with open(Path(self.nfcore_modules, "tests", "config", "pytest_modules.yml")) as fh: - modules_yml = yaml.safe_load(fh) - assert "subworkflows/bam_stats_samtools" not in modules_yml.keys() - - -@mock.patch("rich.prompt.Confirm.ask") -def test_subworkflows_migrate_no_delete(self, mock_rich_ask): - """Create a subworkflow with the --migrate-pytest option to convert pytest to nf-test. - Test that pytest directory is not deleted.""" - pytest_dir = Path(self.nfcore_modules, "tests", "subworkflows", "nf-core", "bam_stats_samtools") - - # Clone modules repo with pytests - shutil.rmtree(self.nfcore_modules) - Repo.clone_from(GITLAB_URL, self.nfcore_modules, branch=GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH) - - # Create a module with --migrate-pytest - mock_rich_ask.return_value = False - module_create = nf_core.subworkflows.SubworkflowCreate( - self.nfcore_modules, "bam_stats_samtools", migrate_pytest=True - ) - module_create.create() - - # Check that pytest folder is not deleted - assert pytest_dir.is_dir() - - # Check that pytest_modules.yml is updated - with open(Path(self.nfcore_modules, "tests", "config", "pytest_modules.yml")) as fh: - modules_yml = yaml.safe_load(fh) - assert "subworkflows/bam_stats_samtools" not in modules_yml.keys() diff --git a/tests/subworkflows/info.py b/tests/subworkflows/info.py deleted file mode 100644 index 688120ac02..0000000000 --- a/tests/subworkflows/info.py +++ /dev/null @@ -1,64 +0,0 @@ -from rich.console import Console - -import nf_core.subworkflows - -from ..utils import GITLAB_SUBWORKFLOWS_BRANCH, GITLAB_URL - - -def test_subworkflows_info_remote(self): - """Test getting info about a remote subworkflow""" - mods_info = nf_core.subworkflows.SubworkflowInfo(self.pipeline_dir, "bam_sort_stats_samtools") - mods_info_output = mods_info.get_component_info() - console = Console(record=True) - console.print(mods_info_output) - output = console.export_text() - - assert "Subworkflow: bam_sort_stats_samtools" in output - assert "Inputs" in output - assert "Outputs" in output - - -def test_subworkflows_info_remote_gitlab(self): - """Test getting info about a subworkflow in the remote gitlab repo""" - mods_info = nf_core.subworkflows.SubworkflowInfo( - self.pipeline_dir, "bam_sort_stats_samtools", remote_url=GITLAB_URL, branch=GITLAB_SUBWORKFLOWS_BRANCH - ) - mods_info_output = mods_info.get_component_info() - console = Console(record=True) - console.print(mods_info_output) - output = console.export_text() - - assert "Subworkflow: bam_sort_stats_samtools" in output - assert "Inputs" in output - assert "Outputs" in output - assert "--git-remote" in output - - -def test_subworkflows_info_local(self): - """Test getting info about a locally installed subworkflow""" - self.subworkflow_install.install("bam_sort_stats_samtools") - mods_info = nf_core.subworkflows.SubworkflowInfo(self.pipeline_dir, "bam_sort_stats_samtools") - mods_info.local = True - mods_info_output = mods_info.get_component_info() - console = Console(record=True) - console.print(mods_info_output) - output = console.export_text() - - assert "Subworkflow: bam_sort_stats_samtools" in output - assert "Inputs" in output - assert "Outputs" in output - - -def test_subworkflows_info_in_modules_repo(self): - """Test getting info about a locally subworkflow in the modules repo""" - self.subworkflow_install.install("bam_sort_stats_samtools") - mods_info = nf_core.subworkflows.SubworkflowInfo(self.nfcore_modules, "bam_sort_stats_samtools") - mods_info.local = True - mods_info_output = mods_info.get_component_info() - console = Console(record=True) - console.print(mods_info_output) - output = console.export_text() - - assert "Subworkflow: bam_sort_stats_samtools" in output - assert "Inputs" in output - assert "Outputs" in output diff --git a/tests/subworkflows/install.py b/tests/subworkflows/install.py deleted file mode 100644 index dfe71686fb..0000000000 --- a/tests/subworkflows/install.py +++ /dev/null @@ -1,154 +0,0 @@ -import os - -import pytest - -from nf_core.modules.modules_json import ModulesJson -from nf_core.subworkflows.install import SubworkflowInstall - -from ..utils import ( - GITLAB_BRANCH_TEST_BRANCH, - GITLAB_REPO, - GITLAB_SUBWORKFLOWS_BRANCH, - GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH, - GITLAB_URL, - with_temporary_folder, -) - - -def test_subworkflow_install_nopipeline(self): - """Test installing a subworkflow - no pipeline given""" - self.subworkflow_install.dir = None - assert self.subworkflow_install.install("foo") is False - - -@with_temporary_folder -def test_subworkflows_install_emptypipeline(self, tmpdir): - """Test installing a subworkflow - empty dir given""" - os.mkdir(os.path.join(tmpdir, "nf-core-pipe")) - self.subworkflow_install.dir = os.path.join(tmpdir, "nf-core-pipe") - with pytest.raises(UserWarning) as excinfo: - self.subworkflow_install.install("foo") - assert "Could not find a 'main.nf' or 'nextflow.config' file" in str(excinfo.value) - - -def test_subworkflows_install_nosubworkflow(self): - """Test installing a subworkflow - unrecognised subworkflow given""" - assert self.subworkflow_install.install("foo") is False - - -def test_subworkflows_install_bam_sort_stats_samtools(self): - """Test installing a subworkflow - bam_sort_stats_samtools""" - assert self.subworkflow_install.install("bam_sort_stats_samtools") is not False - subworkflow_path = os.path.join(self.subworkflow_install.dir, "subworkflows", "nf-core", "bam_sort_stats_samtools") - sub_subworkflow_path = os.path.join(self.subworkflow_install.dir, "subworkflows", "nf-core", "bam_stats_samtools") - samtools_index_path = os.path.join(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "index") - samtools_sort_path = os.path.join(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "sort") - samtools_stats_path = os.path.join(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "stats") - samtools_idxstats_path = os.path.join(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "idxstats") - samtools_flagstat_path = os.path.join(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "flagstat") - assert os.path.exists(subworkflow_path) - assert os.path.exists(sub_subworkflow_path) - assert os.path.exists(samtools_index_path) - assert os.path.exists(samtools_sort_path) - assert os.path.exists(samtools_stats_path) - assert os.path.exists(samtools_idxstats_path) - assert os.path.exists(samtools_flagstat_path) - - -def test_subworkflows_install_bam_sort_stats_samtools_twice(self): - """Test installing a subworkflow - bam_sort_stats_samtools already there""" - self.subworkflow_install.install("bam_sort_stats_samtools") - assert self.subworkflow_install.install("bam_sort_stats_samtools") is False - - -def test_subworkflows_install_from_gitlab(self): - """Test installing a subworkflow from GitLab""" - assert self.subworkflow_install_gitlab.install("bam_stats_samtools") is True - # Verify that the branch entry was added correctly - modules_json = ModulesJson(self.pipeline_dir) - assert ( - modules_json.get_component_branch(self.component_type, "bam_stats_samtools", GITLAB_URL, GITLAB_REPO) - == GITLAB_SUBWORKFLOWS_BRANCH - ) - - -def test_subworkflows_install_different_branch_fail(self): - """Test installing a subworkflow from a different branch""" - install_obj = SubworkflowInstall(self.pipeline_dir, remote_url=GITLAB_URL, branch=GITLAB_BRANCH_TEST_BRANCH) - # The bam_stats_samtools subworkflow does not exists in the branch-test branch - assert install_obj.install("bam_stats_samtools") is False - - -def test_subworkflows_install_tracking(self): - """Test installing a subworkflow and finding the correct entries in installed_by section of modules.json""" - self.subworkflow_install.install("bam_sort_stats_samtools") - - # Verify that the installed_by entry was added correctly - modules_json = ModulesJson(self.pipeline_dir) - mod_json = modules_json.get_modules_json() - assert mod_json["repos"]["https://github.com/nf-core/modules.git"]["subworkflows"]["nf-core"][ - "bam_sort_stats_samtools" - ]["installed_by"] == ["subworkflows"] - assert mod_json["repos"]["https://github.com/nf-core/modules.git"]["subworkflows"]["nf-core"]["bam_stats_samtools"][ - "installed_by" - ] == ["bam_sort_stats_samtools"] - assert mod_json["repos"]["https://github.com/nf-core/modules.git"]["modules"]["nf-core"]["samtools/stats"][ - "installed_by" - ] == ["bam_stats_samtools"] - assert mod_json["repos"]["https://github.com/nf-core/modules.git"]["modules"]["nf-core"]["samtools/sort"][ - "installed_by" - ] == ["bam_sort_stats_samtools"] - - # Clean directory - self.subworkflow_remove.remove("bam_sort_stats_samtools") - - -def test_subworkflows_install_tracking_added_already_installed(self): - """Test installing a subworkflow and finding the correct entries in installed_by section of modules.json""" - self.subworkflow_install.install("bam_sort_stats_samtools") - self.subworkflow_install.install("bam_stats_samtools") - - # Verify that the installed_by entry was added correctly - modules_json = ModulesJson(self.pipeline_dir) - mod_json = modules_json.get_modules_json() - assert mod_json["repos"]["https://github.com/nf-core/modules.git"]["subworkflows"]["nf-core"][ - "bam_sort_stats_samtools" - ]["installed_by"] == ["subworkflows"] - assert sorted( - mod_json["repos"]["https://github.com/nf-core/modules.git"]["subworkflows"]["nf-core"]["bam_stats_samtools"][ - "installed_by" - ] - ) == sorted(["bam_sort_stats_samtools", "subworkflows"]) - - # Clean directory - self.subworkflow_remove.remove("bam_sort_stats_samtools") - self.subworkflow_remove.remove("bam_stats_samtools") - - -def test_subworkflows_install_tracking_added_super_subworkflow(self): - """Test installing a subworkflow and finding the correct entries in installed_by section of modules.json""" - self.subworkflow_install.install("bam_stats_samtools") - self.subworkflow_install.install("bam_sort_stats_samtools") - - # Verify that the installed_by entry was added correctly - modules_json = ModulesJson(self.pipeline_dir) - mod_json = modules_json.get_modules_json() - assert mod_json["repos"]["https://github.com/nf-core/modules.git"]["subworkflows"]["nf-core"][ - "bam_sort_stats_samtools" - ]["installed_by"] == ["subworkflows"] - assert sorted( - mod_json["repos"]["https://github.com/nf-core/modules.git"]["subworkflows"]["nf-core"]["bam_stats_samtools"][ - "installed_by" - ] - ) == sorted(["subworkflows", "bam_sort_stats_samtools"]) - - -def test_subworkflows_install_alternate_remote(self): - """Test installing a module from a different remote with the same organization path""" - install_obj = SubworkflowInstall( - self.pipeline_dir, remote_url=GITLAB_URL, branch=GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH - ) - # Install a subworkflow from GitLab which is also installed from GitHub with the same org_path - with pytest.raises(Exception) as excinfo: - install_obj.install("fastqc") - assert "Could not find a 'main.nf' or 'nextflow.config' file" in str(excinfo.value) diff --git a/tests/subworkflows/lint.py b/tests/subworkflows/lint.py deleted file mode 100644 index 540f421ad2..0000000000 --- a/tests/subworkflows/lint.py +++ /dev/null @@ -1,403 +0,0 @@ -import json -import shutil -from pathlib import Path - -import pytest - -import nf_core.subworkflows - -from ..utils import GITLAB_SUBWORKFLOWS_BRANCH, GITLAB_URL - - -def test_subworkflows_lint(self): - """Test linting the fastq_align_bowtie2 subworkflow""" - self.subworkflow_install.install("fastq_align_bowtie2") - subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.pipeline_dir) - subworkflow_lint.lint(print_results=False, subworkflow="fastq_align_bowtie2") - assert len(subworkflow_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" - assert len(subworkflow_lint.passed) > 0 - assert len(subworkflow_lint.warned) >= 0 - - -def test_subworkflows_lint_empty(self): - """Test linting a pipeline with no subworkflows installed""" - self.subworkflow_remove.remove("utils_nextflow_pipeline", force=True) - self.subworkflow_remove.remove("utils_nfcore_pipeline", force=True) - self.subworkflow_remove.remove("utils_nfvalidation_plugin", force=True) - with pytest.raises(LookupError): - nf_core.subworkflows.SubworkflowLint(dir=self.pipeline_dir) - - -def test_subworkflows_lint_new_subworkflow(self): - """lint a new subworkflow""" - subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.nfcore_modules) - subworkflow_lint.lint(print_results=True, all_subworkflows=True) - assert len(subworkflow_lint.failed) == 0 - - assert len(subworkflow_lint.passed) > 0 - assert len(subworkflow_lint.warned) >= 0 - - -def test_subworkflows_lint_no_gitlab(self): - """Test linting a pipeline with no subworkflows installed""" - with pytest.raises(LookupError): - nf_core.subworkflows.SubworkflowLint(dir=self.pipeline_dir, remote_url=GITLAB_URL) - - -def test_subworkflows_lint_gitlab_subworkflows(self): - """Lint subworkflows from a different remote""" - self.subworkflow_install_gitlab.install("bam_stats_samtools") - subworkflow_lint = nf_core.subworkflows.SubworkflowLint( - dir=self.pipeline_dir, remote_url=GITLAB_URL, branch=GITLAB_SUBWORKFLOWS_BRANCH - ) - subworkflow_lint.lint(print_results=False, all_subworkflows=True) - assert len(subworkflow_lint.failed) == 0 - assert len(subworkflow_lint.passed) > 0 - assert len(subworkflow_lint.warned) >= 0 - - -def test_subworkflows_lint_multiple_remotes(self): - """Lint subworkflows from a different remote""" - self.subworkflow_install_gitlab.install("bam_stats_samtools") - self.subworkflow_install.install("fastq_align_bowtie2") - subworkflow_lint = nf_core.subworkflows.SubworkflowLint( - dir=self.pipeline_dir, remote_url=GITLAB_URL, branch=GITLAB_SUBWORKFLOWS_BRANCH - ) - subworkflow_lint.lint(print_results=False, all_subworkflows=True) - assert len(subworkflow_lint.failed) == 0 - assert len(subworkflow_lint.passed) > 0 - assert len(subworkflow_lint.warned) >= 0 - - -def test_subworkflows_lint_snapshot_file(self): - """Test linting a subworkflow with a snapshot file""" - subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.nfcore_modules) - subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") - assert len(subworkflow_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" - assert len(subworkflow_lint.passed) > 0 - assert len(subworkflow_lint.warned) >= 0 - - -def test_subworkflows_lint_snapshot_file_missing_fail(self): - """Test linting a subworkflow with a snapshot file missing, which should fail""" - Path( - self.nfcore_modules, - "subworkflows", - "nf-core", - "test_subworkflow", - "tests", - "main.nf.test.snap", - ).unlink() - subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.nfcore_modules) - subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") - Path( - self.nfcore_modules, - "subworkflows", - "nf-core", - "test_subworkflow", - "tests", - "main.nf.test.snap", - ).touch() - assert len(subworkflow_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" - assert len(subworkflow_lint.passed) > 0 - assert len(subworkflow_lint.warned) >= 0 - - -def test_subworkflows_lint_snapshot_file_not_needed(self): - """Test linting a subworkflow which doesn't need a snapshot file by removing the snapshot keyword in the main.nf.test file""" - with open( - Path( - self.nfcore_modules, - "subworkflows", - "nf-core", - "test_subworkflow", - "tests", - "main.nf.test", - ) - ) as fh: - content = fh.read() - new_content = content.replace("snapshot(", "snap (") - with open( - Path( - self.nfcore_modules, - "subworkflows", - "nf-core", - "test_subworkflow", - "tests", - "main.nf.test", - ), - "w", - ) as fh: - fh.write(new_content) - - Path( - self.nfcore_modules, - "subworkflows", - "nf-core", - "test_subworkflow", - "tests", - "main.nf.test.snap", - ).unlink() - subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.nfcore_modules) - subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") - Path( - self.nfcore_modules, - "subworkflows", - "nf-core", - "test_subworkflow", - "tests", - "main.nf.test.snap", - ).touch() - assert len(subworkflow_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" - assert len(subworkflow_lint.passed) > 0 - assert len(subworkflow_lint.warned) >= 0 - - -def test_subworkflows_lint_less_than_two_modules_warning(self): - """Test linting a subworkflow with less than two modules""" - self.subworkflow_install.install("bam_stats_samtools") - # Remove two modules - with open( - Path( - self.pipeline_dir, - "subworkflows", - "nf-core", - "bam_stats_samtools", - "main.nf", - ) - ) as fh: - content = fh.read() - new_content = content.replace( - "include { SAMTOOLS_IDXSTATS } from '../../../modules/nf-core/samtools/idxstats/main'", - "", - ) - new_content = new_content.replace( - "include { SAMTOOLS_FLAGSTAT } from '../../../modules/nf-core/samtools/flagstat/main'", - "", - ) - with open( - Path( - self.pipeline_dir, - "subworkflows", - "nf-core", - "bam_stats_samtools", - "main.nf", - ), - "w", - ) as fh: - fh.write(new_content) - subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.pipeline_dir) - subworkflow_lint.lint(print_results=False, subworkflow="bam_stats_samtools") - assert len(subworkflow_lint.failed) >= 0, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" - assert len(subworkflow_lint.passed) > 0 - assert len(subworkflow_lint.warned) > 0 - assert subworkflow_lint.warned[0].lint_test == "main_nf_include" - # cleanup - self.subworkflow_remove.remove("bam_stats_samtools", force=True) - - -def test_subworkflows_lint_include_multiple_alias(self): - """Test linting a subworkflow with multiple include methods""" - self.subworkflow_install.install("bam_stats_samtools") - with open( - Path( - self.pipeline_dir, - "subworkflows", - "nf-core", - "bam_stats_samtools", - "main.nf", - ) - ) as fh: - content = fh.read() - new_content = content.replace("SAMTOOLS_STATS", "SAMTOOLS_STATS_1") - new_content = new_content.replace( - "include { SAMTOOLS_STATS_1 ", - "include { SAMTOOLS_STATS as SAMTOOLS_STATS_1; SAMTOOLS_STATS as SAMTOOLS_STATS_2 ", - ) - with open( - Path( - self.pipeline_dir, - "subworkflows", - "nf-core", - "bam_stats_samtools", - "main.nf", - ), - "w", - ) as fh: - fh.write(new_content) - - subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.pipeline_dir) - subworkflow_lint.lint(print_results=False, subworkflow="bam_stats_samtools") - assert len(subworkflow_lint.failed) >= 0, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" - assert len(subworkflow_lint.passed) > 0 - assert len(subworkflow_lint.warned) == 2 - assert any( - [ - x.message == "Included component 'SAMTOOLS_STATS_1' versions are added in main.nf" - for x in subworkflow_lint.passed - ] - ) - assert any([x.message == "Included component 'SAMTOOLS_STATS_1' used in main.nf" for x in subworkflow_lint.passed]) - assert any( - [x.message == "Included component 'SAMTOOLS_STATS_2' not used in main.nf" for x in subworkflow_lint.warned] - ) - - # cleanup - self.subworkflow_remove.remove("bam_stats_samtools", force=True) - - -def test_subworkflows_lint_capitalization_fail(self): - """Test linting a subworkflow with a capitalization fail""" - self.subworkflow_install.install("bam_stats_samtools") - # change workflow name to lowercase - with open( - Path( - self.pipeline_dir, - "subworkflows", - "nf-core", - "bam_stats_samtools", - "main.nf", - ) - ) as fh: - content = fh.read() - new_content = content.replace("workflow BAM_STATS_SAMTOOLS {", "workflow bam_stats_samtools {") - with open( - Path( - self.pipeline_dir, - "subworkflows", - "nf-core", - "bam_stats_samtools", - "main.nf", - ), - "w", - ) as fh: - fh.write(new_content) - subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.pipeline_dir) - subworkflow_lint.lint(print_results=False, subworkflow="bam_stats_samtools") - assert len(subworkflow_lint.failed) >= 1, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" - assert len(subworkflow_lint.passed) > 0 - assert len(subworkflow_lint.warned) >= 0 - assert any([x.lint_test == "workflow_capitals" for x in subworkflow_lint.failed]) - - # cleanup - self.subworkflow_remove.remove("bam_stats_samtools", force=True) - - -def test_subworkflows_absent_version(self): - """Test linting a nf-test module if the versions is absent in the snapshot file `""" - snap_file = Path( - self.nfcore_modules, - "subworkflows", - "nf-core", - "test_subworkflow", - "tests", - "main.nf.test.snap", - ) - with open(snap_file) as fh: - content = fh.read() - new_content = content.replace("versions", "foo") - with open(snap_file, "w") as fh: - fh.write(new_content) - - subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.nfcore_modules) - subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") - assert len(subworkflow_lint.failed) == 0 - assert len(subworkflow_lint.passed) > 0 - assert len(subworkflow_lint.warned) >= 0, f"Linting warned with {[x.__dict__ for x in subworkflow_lint.warned]}" - assert any([x.lint_test == "test_snap_versions" for x in subworkflow_lint.warned]) - - # cleanup - with open(snap_file, "w") as fh: - fh.write(content) - - -def test_subworkflows_missing_test_dir(self): - """Test linting a nf-test subworkflow if the tests directory is missing""" - test_dir = Path(self.nfcore_modules, "subworkflows", "nf-core", "test_subworkflow", "tests") - test_dir_copy = shutil.copytree(test_dir, test_dir.parent / "tests_copy") - shutil.rmtree(test_dir) - - subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.nfcore_modules) - subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") - assert len(subworkflow_lint.failed) == 0 - assert len(subworkflow_lint.passed) > 0 - assert len(subworkflow_lint.warned) >= 0, f"Linting warned with {[x.__dict__ for x in subworkflow_lint.warned]}" - assert any([x.lint_test == "test_dir_versions" for x in subworkflow_lint.warned]) - - # cleanup - shutil.copytree(test_dir_copy, test_dir) - - -def test_subworkflows_missing_main_nf(self): - """Test linting a nf-test subworkflow if the main.nf file is missing""" - main_nf = Path(self.nfcore_modules, "subworkflows", "nf-core", "test_subworkflow", "main.nf") - main_nf_copy = shutil.copy(main_nf, main_nf.parent / "main_nf_copy") - main_nf.unlink() - - subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.nfcore_modules) - subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") - assert len(subworkflow_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" - assert len(subworkflow_lint.passed) > 0 - assert len(subworkflow_lint.warned) >= 0 - assert subworkflow_lint.failed[0].lint_test == "main_nf_exists" - - # cleanup - shutil.copy(main_nf_copy, main_nf) - - -def test_subworkflows_empty_file_in_snapshot(self): - """Test linting a nf-test subworkflow with an empty file sha sum in the test snapshot, which should make it fail (if it is not a stub)""" - snap_file = Path( - self.nfcore_modules, - "subworkflows", - "nf-core", - "test_subworkflow", - "tests", - "main.nf.test.snap", - ) - snap = json.load(snap_file.open()) - content = snap_file.read_text() - snap["my test"]["content"][0]["0"] = "test:md5,d41d8cd98f00b204e9800998ecf8427e" - - with open(snap_file, "w") as fh: - json.dump(snap, fh) - - subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.nfcore_modules) - subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") - assert len(subworkflow_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" - assert len(subworkflow_lint.passed) > 0 - assert len(subworkflow_lint.warned) >= 0 - assert subworkflow_lint.failed[0].lint_test == "test_snap_md5sum" - - # reset the file - with open(snap_file, "w") as fh: - fh.write(content) - - -def test_subworkflows_empty_file_in_stub_snapshot(self): - """Test linting a nf-test subworkflow with an empty file sha sum in the stub test snapshot, which should make it not fail""" - snap_file = Path( - self.nfcore_modules, - "subworkflows", - "nf-core", - "test_subworkflow", - "tests", - "main.nf.test.snap", - ) - snap = json.load(snap_file.open()) - content = snap_file.read_text() - snap["my_test_stub"] = {"content": [{"0": "test:md5,d41d8cd98f00b204e9800998ecf8427e", "versions": {}}]} - - with open(snap_file, "w") as fh: - json.dump(snap, fh) - - subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.nfcore_modules) - subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") - assert len(subworkflow_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" - assert len(subworkflow_lint.passed) > 0 - assert len(subworkflow_lint.warned) >= 0 - assert any(x.lint_test == "test_snap_md5sum" for x in subworkflow_lint.passed) - - # reset the file - with open(snap_file, "w") as fh: - fh.write(content) diff --git a/tests/subworkflows/list.py b/tests/subworkflows/list.py deleted file mode 100644 index c65999d42c..0000000000 --- a/tests/subworkflows/list.py +++ /dev/null @@ -1,49 +0,0 @@ -from rich.console import Console - -import nf_core.subworkflows - -from ..utils import GITLAB_SUBWORKFLOWS_BRANCH, GITLAB_URL - - -def test_subworkflows_list_remote(self): - """Test listing available subworkflows""" - subworkflows_list = nf_core.subworkflows.SubworkflowList(None, remote=True) - listed_subworkflows = subworkflows_list.list_components() - console = Console(record=True) - console.print(listed_subworkflows) - output = console.export_text() - assert "bam_stats" in output - - -def test_subworkflows_list_remote_gitlab(self): - """Test listing the subworkflows in the remote gitlab repo""" - subworkflows_list = nf_core.subworkflows.SubworkflowList( - None, remote=True, remote_url=GITLAB_URL, branch=GITLAB_SUBWORKFLOWS_BRANCH - ) - listed_subworkflows = subworkflows_list.list_components() - console = Console(record=True) - console.print(listed_subworkflows) - output = console.export_text() - assert "bam_stats" in output - - -def test_subworkflows_install_and_list_subworkflows(self): - """Test listing locally installed subworkflows""" - self.subworkflow_install.install("bam_sort_stats_samtools") - subworkflows_list = nf_core.subworkflows.SubworkflowList(self.pipeline_dir, remote=False) - listed_subworkflows = subworkflows_list.list_components() - console = Console(record=True) - console.print(listed_subworkflows) - output = console.export_text() - assert "bam_stats" in output - - -def test_subworkflows_install_gitlab_and_list_subworkflows(self): - """Test listing locally installed subworkflows""" - self.subworkflow_install_gitlab.install("bam_sort_stats_samtools") - subworkflows_list = nf_core.subworkflows.SubworkflowList(self.pipeline_dir, remote=False) - listed_subworkflows = subworkflows_list.list_components() - console = Console(record=True) - console.print(listed_subworkflows) - output = console.export_text() - assert "bam_stats" in output diff --git a/tests/subworkflows/remove.py b/tests/subworkflows/remove.py deleted file mode 100644 index c6a3b98454..0000000000 --- a/tests/subworkflows/remove.py +++ /dev/null @@ -1,100 +0,0 @@ -from pathlib import Path - -from nf_core.modules.modules_json import ModulesJson - - -def test_subworkflows_remove_uninstalled_subworkflow(self): - """Test removing subworkflow without installing it""" - assert self.subworkflow_remove.remove("bam_sort_stats_samtools") is False - - -def test_subworkflows_remove_subworkflow(self): - """Test removing subworkflow and all it's dependencies after installing it""" - self.subworkflow_install.install("bam_sort_stats_samtools") - - subworkflow_path = Path(self.subworkflow_install.dir, "subworkflows", "nf-core") - bam_sort_stats_samtools_path = Path(subworkflow_path, "bam_sort_stats_samtools") - bam_stats_samtools_path = Path(subworkflow_path, "bam_stats_samtools") - samtools_index_path = Path(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "index") - ModulesJson(self.pipeline_dir) - mod_json_before = ModulesJson(self.pipeline_dir).get_modules_json() - assert self.subworkflow_remove.remove("bam_sort_stats_samtools") - mod_json_after = ModulesJson(self.pipeline_dir).get_modules_json() - assert Path.exists(bam_sort_stats_samtools_path) is False - assert Path.exists(bam_stats_samtools_path) is False - assert Path.exists(samtools_index_path) is False - assert mod_json_before != mod_json_after - # assert subworkflows key is removed from modules.json - assert ( - "bam_sort_stats_samtools" - not in mod_json_after["repos"]["https://github.com/nf-core/modules.git"]["subworkflows"].keys() - ) - assert "samtools/index" not in mod_json_after["repos"]["https://github.com/nf-core/modules.git"]["modules"].keys() - - -def test_subworkflows_remove_subworkflow_keep_installed_module(self): - """Test removing subworkflow and all it's dependencies after installing it, except for a separately installed module""" - self.subworkflow_install.install("bam_sort_stats_samtools") - self.mods_install.install("samtools/index") - - subworkflow_path = Path(self.subworkflow_install.dir, "subworkflows", "nf-core") - bam_sort_stats_samtools_path = Path(subworkflow_path, "bam_sort_stats_samtools") - bam_stats_samtools_path = Path(subworkflow_path, "bam_stats_samtools") - samtools_index_path = Path(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "index") - - mod_json_before = ModulesJson(self.pipeline_dir).get_modules_json() - assert self.subworkflow_remove.remove("bam_sort_stats_samtools") - mod_json_after = ModulesJson(self.pipeline_dir).get_modules_json() - - assert Path.exists(bam_sort_stats_samtools_path) is False - assert Path.exists(bam_stats_samtools_path) is False - assert Path.exists(samtools_index_path) is True - assert mod_json_before != mod_json_after - # assert subworkflows key is removed from modules.json - assert ( - "bam_sort_stats_samtools" - not in mod_json_after["repos"]["https://github.com/nf-core/modules.git"]["subworkflows"].keys() - ) - assert ( - "samtools/index" - in mod_json_after["repos"]["https://github.com/nf-core/modules.git"]["modules"]["nf-core"].keys() - ) - - -def test_subworkflows_remove_one_of_two_subworkflow(self): - """Test removing subworkflow and all it's dependencies after installing it""" - self.subworkflow_install.install("bam_sort_stats_samtools") - self.subworkflow_install.install("bam_stats_samtools") - subworkflow_path = Path(self.subworkflow_install.dir, "subworkflows", "nf-core") - bam_sort_stats_samtools_path = Path(subworkflow_path, "bam_sort_stats_samtools") - bam_stats_samtools_path = Path(subworkflow_path, "bam_stats_samtools") - samtools_index_path = Path(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "index") - samtools_stats_path = Path(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "stats") - - assert self.subworkflow_remove.remove("bam_sort_stats_samtools") - - assert Path.exists(subworkflow_path) is True - assert Path.exists(bam_sort_stats_samtools_path) is False - assert Path.exists(bam_stats_samtools_path) is True - assert Path.exists(samtools_index_path) is False - assert Path.exists(samtools_stats_path) is True - self.subworkflow_remove.remove("bam_stats_samtools") - - -def test_subworkflows_remove_included_subworkflow(self): - """Test removing subworkflow which is installed by another subworkflow and all it's dependencies.""" - self.subworkflow_install.install("bam_sort_stats_samtools") - subworkflow_path = Path(self.subworkflow_install.dir, "subworkflows", "nf-core") - bam_sort_stats_samtools_path = Path(subworkflow_path, "bam_sort_stats_samtools") - bam_stats_samtools_path = Path(subworkflow_path, "bam_stats_samtools") - samtools_index_path = Path(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "index") - samtools_stats_path = Path(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "stats") - - assert self.subworkflow_remove.remove("bam_stats_samtools") is False - - assert Path.exists(subworkflow_path) is True - assert Path.exists(bam_sort_stats_samtools_path) is True - assert Path.exists(bam_stats_samtools_path) is True - assert Path.exists(samtools_index_path) is True - assert Path.exists(samtools_stats_path) is True - self.subworkflow_remove.remove("bam_sort_stats_samtools") diff --git a/tests/subworkflows/test_create.py b/tests/subworkflows/test_create.py new file mode 100644 index 0000000000..48cb482260 --- /dev/null +++ b/tests/subworkflows/test_create.py @@ -0,0 +1,109 @@ +import shutil +from pathlib import Path +from unittest import mock + +import pytest +import yaml +from git.repo import Repo + +import nf_core.subworkflows +from tests.utils import GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH, GITLAB_URL + +from ..test_subworkflows import TestSubworkflows + + +class TestSubworkflowsCreate(TestSubworkflows): + def test_subworkflows_create_succeed(self): + """Succeed at creating a subworkflow from the template inside a pipeline""" + subworkflow_create = nf_core.subworkflows.SubworkflowCreate( + self.pipeline_dir, "test_subworkflow_local", "@author", True + ) + subworkflow_create.create() + assert Path(self.pipeline_dir, "subworkflows", "local", "test_subworkflow_local.nf").exists() + + def test_subworkflows_create_fail_exists(self): + """Fail at creating the same subworkflow twice""" + subworkflow_create = nf_core.subworkflows.SubworkflowCreate( + self.pipeline_dir, "test_subworkflow2", "@author", False + ) + subworkflow_create.create() + with pytest.raises(UserWarning) as excinfo: + subworkflow_create.create() + assert "Subworkflow file exists already" in str(excinfo.value) + + def test_subworkflows_create_nfcore_modules(self): + """Create a subworkflow in nf-core/modules clone""" + subworkflow_create = nf_core.subworkflows.SubworkflowCreate( + self.nfcore_modules, "test_subworkflow", "@author", force=True + ) + subworkflow_create.create() + assert Path(self.nfcore_modules, "subworkflows", "nf-core", "test_subworkflow", "main.nf").exists() + + assert Path( + self.nfcore_modules, "subworkflows", "nf-core", "test_subworkflow", "tests", "main.nf.test" + ).exists() + + @mock.patch("rich.prompt.Confirm.ask") + def test_subworkflows_migrate(self, mock_rich_ask): + """Create a subworkflow with the --migrate-pytest option to convert pytest to nf-test""" + pytest_dir = Path(self.nfcore_modules, "tests", "subworkflows", "nf-core", "bam_stats_samtools") + subworkflow_dir = Path(self.nfcore_modules, "subworkflows", "nf-core", "bam_stats_samtools") + + # Clone modules repo with pytests + shutil.rmtree(self.nfcore_modules) + Repo.clone_from(GITLAB_URL, self.nfcore_modules, branch=GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH) + with open(subworkflow_dir / "main.nf") as fh: + old_main_nf = fh.read() + with open(subworkflow_dir / "meta.yml") as fh: + old_meta_yml = fh.read() + + # Create a subworkflow with --migrate-pytest + mock_rich_ask.return_value = True + subworkflow_create = nf_core.subworkflows.SubworkflowCreate( + self.nfcore_modules, "bam_stats_samtools", migrate_pytest=True + ) + subworkflow_create.create() + + with open(subworkflow_dir / "main.nf") as fh: + new_main_nf = fh.read() + with open(subworkflow_dir / "meta.yml") as fh: + new_meta_yml = fh.read() + nextflow_config = subworkflow_dir / "tests" / "nextflow.config" + + # Check that old files have been copied to the new module + assert old_main_nf == new_main_nf + assert old_meta_yml == new_meta_yml + assert nextflow_config.is_file() + + # Check that pytest folder is deleted + assert not pytest_dir.is_dir() + + # Check that pytest_modules.yml is updated + with open(Path(self.nfcore_modules, "tests", "config", "pytest_modules.yml")) as fh: + modules_yml = yaml.safe_load(fh) + assert "subworkflows/bam_stats_samtools" not in modules_yml.keys() + + @mock.patch("rich.prompt.Confirm.ask") + def test_subworkflows_migrate_no_delete(self, mock_rich_ask): + """Create a subworkflow with the --migrate-pytest option to convert pytest to nf-test. + Test that pytest directory is not deleted.""" + pytest_dir = Path(self.nfcore_modules, "tests", "subworkflows", "nf-core", "bam_stats_samtools") + + # Clone modules repo with pytests + shutil.rmtree(self.nfcore_modules) + Repo.clone_from(GITLAB_URL, self.nfcore_modules, branch=GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH) + + # Create a module with --migrate-pytest + mock_rich_ask.return_value = False + module_create = nf_core.subworkflows.SubworkflowCreate( + self.nfcore_modules, "bam_stats_samtools", migrate_pytest=True + ) + module_create.create() + + # Check that pytest folder is not deleted + assert pytest_dir.is_dir() + + # Check that pytest_modules.yml is updated + with open(Path(self.nfcore_modules, "tests", "config", "pytest_modules.yml")) as fh: + modules_yml = yaml.safe_load(fh) + assert "subworkflows/bam_stats_samtools" not in modules_yml.keys() diff --git a/tests/subworkflows/test_info.py b/tests/subworkflows/test_info.py new file mode 100644 index 0000000000..cf0f492710 --- /dev/null +++ b/tests/subworkflows/test_info.py @@ -0,0 +1,63 @@ +from rich.console import Console + +import nf_core.subworkflows + +from ..test_subworkflows import TestSubworkflows +from ..utils import GITLAB_SUBWORKFLOWS_BRANCH, GITLAB_URL + + +class TestSubworkflowsInfo(TestSubworkflows): + def test_subworkflows_info_remote(self): + """Test getting info about a remote subworkflow""" + mods_info = nf_core.subworkflows.SubworkflowInfo(self.pipeline_dir, "bam_sort_stats_samtools") + mods_info_output = mods_info.get_component_info() + console = Console(record=True) + console.print(mods_info_output) + output = console.export_text() + + assert "Subworkflow: bam_sort_stats_samtools" in output + assert "Inputs" in output + assert "Outputs" in output + + def test_subworkflows_info_remote_gitlab(self): + """Test getting info about a subworkflow in the remote gitlab repo""" + mods_info = nf_core.subworkflows.SubworkflowInfo( + self.pipeline_dir, "bam_sort_stats_samtools", remote_url=GITLAB_URL, branch=GITLAB_SUBWORKFLOWS_BRANCH + ) + mods_info_output = mods_info.get_component_info() + console = Console(record=True) + console.print(mods_info_output) + output = console.export_text() + + assert "Subworkflow: bam_sort_stats_samtools" in output + assert "Inputs" in output + assert "Outputs" in output + assert "--git-remote" in output + + def test_subworkflows_info_local(self): + """Test getting info about a locally installed subworkflow""" + self.subworkflow_install.install("bam_sort_stats_samtools") + mods_info = nf_core.subworkflows.SubworkflowInfo(self.pipeline_dir, "bam_sort_stats_samtools") + mods_info.local = True + mods_info_output = mods_info.get_component_info() + console = Console(record=True) + console.print(mods_info_output) + output = console.export_text() + + assert "Subworkflow: bam_sort_stats_samtools" in output + assert "Inputs" in output + assert "Outputs" in output + + def test_subworkflows_info_in_modules_repo(self): + """Test getting info about a locally subworkflow in the modules repo""" + self.subworkflow_install.install("bam_sort_stats_samtools") + mods_info = nf_core.subworkflows.SubworkflowInfo(self.nfcore_modules, "bam_sort_stats_samtools") + mods_info.local = True + mods_info_output = mods_info.get_component_info() + console = Console(record=True) + console.print(mods_info_output) + output = console.export_text() + + assert "Subworkflow: bam_sort_stats_samtools" in output + assert "Inputs" in output + assert "Outputs" in output diff --git a/tests/subworkflows/test_install.py b/tests/subworkflows/test_install.py new file mode 100644 index 0000000000..e0b2fc1ab6 --- /dev/null +++ b/tests/subworkflows/test_install.py @@ -0,0 +1,157 @@ +import os +from pathlib import Path + +import pytest + +from nf_core.modules.modules_json import ModulesJson +from nf_core.subworkflows.install import SubworkflowInstall + +from ..test_subworkflows import TestSubworkflows +from ..utils import ( + GITLAB_BRANCH_TEST_BRANCH, + GITLAB_REPO, + GITLAB_SUBWORKFLOWS_BRANCH, + GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH, + GITLAB_URL, + with_temporary_folder, +) + + +class TestSubworkflowsInstall(TestSubworkflows): + def test_subworkflow_install_nopipeline(self): + """Test installing a subworkflow - no pipeline given""" + assert self.subworkflow_install.dir is not None + self.subworkflow_install.dir = "" + assert self.subworkflow_install.install("foo") is False + + @with_temporary_folder + def test_subworkflows_install_emptypipeline(self, tmpdir): + """Test installing a subworkflow - empty dir given""" + + Path(tmpdir, "nf-core-pipe").mkdir(exist_ok=True) + self.subworkflow_install.dir = os.path.join(tmpdir, "nf-core-pipe") + with pytest.raises(UserWarning) as excinfo: + self.subworkflow_install.install("foo") + assert "Could not find a 'main.nf' or 'nextflow.config' file" in str(excinfo.value) + + def test_subworkflows_install_nosubworkflow(self): + """Test installing a subworkflow - unrecognised subworkflow given""" + assert self.subworkflow_install.install("foo") is False + + def test_subworkflows_install_bam_sort_stats_samtools(self): + """Test installing a subworkflow - bam_sort_stats_samtools""" + assert self.subworkflow_install.install("bam_sort_stats_samtools") is not False + subworkflow_path = os.path.join( + self.subworkflow_install.dir, "subworkflows", "nf-core", "bam_sort_stats_samtools" + ) + sub_subworkflow_path = os.path.join( + self.subworkflow_install.dir, "subworkflows", "nf-core", "bam_stats_samtools" + ) + samtools_index_path = os.path.join(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "index") + samtools_sort_path = os.path.join(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "sort") + samtools_stats_path = os.path.join(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "stats") + samtools_idxstats_path = os.path.join( + self.subworkflow_install.dir, "modules", "nf-core", "samtools", "idxstats" + ) + samtools_flagstat_path = os.path.join( + self.subworkflow_install.dir, "modules", "nf-core", "samtools", "flagstat" + ) + assert os.path.exists(subworkflow_path) + assert os.path.exists(sub_subworkflow_path) + assert os.path.exists(samtools_index_path) + assert os.path.exists(samtools_sort_path) + assert os.path.exists(samtools_stats_path) + assert os.path.exists(samtools_idxstats_path) + assert os.path.exists(samtools_flagstat_path) + + def test_subworkflows_install_bam_sort_stats_samtools_twice(self): + """Test installing a subworkflow - bam_sort_stats_samtools already there""" + self.subworkflow_install.install("bam_sort_stats_samtools") + assert self.subworkflow_install.install("bam_sort_stats_samtools") is False + + def test_subworkflows_install_from_gitlab(self): + """Test installing a subworkflow from GitLab""" + assert self.subworkflow_install_gitlab.install("bam_stats_samtools") is True + # Verify that the branch entry was added correctly + modules_json = ModulesJson(self.pipeline_dir) + assert ( + modules_json.get_component_branch(self.component_type, "bam_stats_samtools", GITLAB_URL, GITLAB_REPO) + == GITLAB_SUBWORKFLOWS_BRANCH + ) + + def test_subworkflows_install_different_branch_fail(self): + """Test installing a subworkflow from a different branch""" + install_obj = SubworkflowInstall(self.pipeline_dir, remote_url=GITLAB_URL, branch=GITLAB_BRANCH_TEST_BRANCH) + # The bam_stats_samtools subworkflow does not exists in the branch-test branch + assert install_obj.install("bam_stats_samtools") is False + + def test_subworkflows_install_tracking(self): + """Test installing a subworkflow and finding the correct entries in installed_by section of modules.json""" + self.subworkflow_install.install("bam_sort_stats_samtools") + + # Verify that the installed_by entry was added correctly + modules_json = ModulesJson(self.pipeline_dir) + mod_json = modules_json.get_modules_json() + assert mod_json["repos"]["https://github.com/nf-core/modules.git"]["subworkflows"]["nf-core"][ + "bam_sort_stats_samtools" + ]["installed_by"] == ["subworkflows"] + assert mod_json["repos"]["https://github.com/nf-core/modules.git"]["subworkflows"]["nf-core"][ + "bam_stats_samtools" + ]["installed_by"] == ["bam_sort_stats_samtools"] + assert mod_json["repos"]["https://github.com/nf-core/modules.git"]["modules"]["nf-core"]["samtools/stats"][ + "installed_by" + ] == ["bam_stats_samtools"] + assert mod_json["repos"]["https://github.com/nf-core/modules.git"]["modules"]["nf-core"]["samtools/sort"][ + "installed_by" + ] == ["bam_sort_stats_samtools"] + + # Clean directory + self.subworkflow_remove.remove("bam_sort_stats_samtools") + + def test_subworkflows_install_tracking_added_already_installed(self): + """Test installing a subworkflow and finding the correct entries in installed_by section of modules.json""" + self.subworkflow_install.install("bam_sort_stats_samtools") + self.subworkflow_install.install("bam_stats_samtools") + + # Verify that the installed_by entry was added correctly + modules_json = ModulesJson(self.pipeline_dir) + mod_json = modules_json.get_modules_json() + assert mod_json["repos"]["https://github.com/nf-core/modules.git"]["subworkflows"]["nf-core"][ + "bam_sort_stats_samtools" + ]["installed_by"] == ["subworkflows"] + assert sorted( + mod_json["repos"]["https://github.com/nf-core/modules.git"]["subworkflows"]["nf-core"][ + "bam_stats_samtools" + ]["installed_by"] + ) == sorted(["bam_sort_stats_samtools", "subworkflows"]) + + # Clean directory + self.subworkflow_remove.remove("bam_sort_stats_samtools") + self.subworkflow_remove.remove("bam_stats_samtools") + + def test_subworkflows_install_tracking_added_super_subworkflow(self): + """Test installing a subworkflow and finding the correct entries in installed_by section of modules.json""" + self.subworkflow_install.install("bam_stats_samtools") + self.subworkflow_install.install("bam_sort_stats_samtools") + + # Verify that the installed_by entry was added correctly + modules_json = ModulesJson(self.pipeline_dir) + mod_json = modules_json.get_modules_json() + assert mod_json["repos"]["https://github.com/nf-core/modules.git"]["subworkflows"]["nf-core"][ + "bam_sort_stats_samtools" + ]["installed_by"] == ["subworkflows"] + assert sorted( + mod_json["repos"]["https://github.com/nf-core/modules.git"]["subworkflows"]["nf-core"][ + "bam_stats_samtools" + ]["installed_by"] + ) == sorted(["subworkflows", "bam_sort_stats_samtools"]) + + def test_subworkflows_install_alternate_remote(self): + """Test installing a module from a different remote with the same organization path""" + install_obj = SubworkflowInstall( + self.pipeline_dir, remote_url=GITLAB_URL, branch=GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH + ) + # Install a subworkflow from GitLab which is also installed from GitHub with the same org_path + with pytest.raises(Exception) as excinfo: + install_obj.install("fastqc") + assert "Could not find a 'main.nf' or 'nextflow.config' file" in str(excinfo.value) diff --git a/tests/subworkflows/test_lint.py b/tests/subworkflows/test_lint.py new file mode 100644 index 0000000000..f8c9bedbf3 --- /dev/null +++ b/tests/subworkflows/test_lint.py @@ -0,0 +1,391 @@ +import json +import shutil +from pathlib import Path + +import pytest + +import nf_core.subworkflows + +from ..test_subworkflows import TestSubworkflows +from ..utils import GITLAB_SUBWORKFLOWS_BRANCH, GITLAB_URL + + +class TestSubworkflowsLint(TestSubworkflows): + def test_subworkflows_lint(self): + """Test linting the fastq_align_bowtie2 subworkflow""" + self.subworkflow_install.install("fastq_align_bowtie2") + subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.pipeline_dir) + subworkflow_lint.lint(print_results=False, subworkflow="fastq_align_bowtie2") + assert len(subworkflow_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" + assert len(subworkflow_lint.passed) > 0 + assert len(subworkflow_lint.warned) >= 0 + + def test_subworkflows_lint_empty(self): + """Test linting a pipeline with no subworkflows installed""" + self.subworkflow_remove.remove("utils_nextflow_pipeline", force=True) + self.subworkflow_remove.remove("utils_nfcore_pipeline", force=True) + self.subworkflow_remove.remove("utils_nfvalidation_plugin", force=True) + with pytest.raises(LookupError): + nf_core.subworkflows.SubworkflowLint(dir=self.pipeline_dir) + + def test_subworkflows_lint_new_subworkflow(self): + """lint a new subworkflow""" + subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.nfcore_modules) + subworkflow_lint.lint(print_results=True, all_subworkflows=True) + assert len(subworkflow_lint.failed) == 0 + + assert len(subworkflow_lint.passed) > 0 + assert len(subworkflow_lint.warned) >= 0 + + def test_subworkflows_lint_no_gitlab(self): + """Test linting a pipeline with no subworkflows installed""" + with pytest.raises(LookupError): + nf_core.subworkflows.SubworkflowLint(dir=self.pipeline_dir, remote_url=GITLAB_URL) + + def test_subworkflows_lint_gitlab_subworkflows(self): + """Lint subworkflows from a different remote""" + self.subworkflow_install_gitlab.install("bam_stats_samtools") + subworkflow_lint = nf_core.subworkflows.SubworkflowLint( + dir=self.pipeline_dir, remote_url=GITLAB_URL, branch=GITLAB_SUBWORKFLOWS_BRANCH + ) + subworkflow_lint.lint(print_results=False, all_subworkflows=True) + assert len(subworkflow_lint.failed) == 0 + assert len(subworkflow_lint.passed) > 0 + assert len(subworkflow_lint.warned) >= 0 + + def test_subworkflows_lint_multiple_remotes(self): + """Lint subworkflows from a different remote""" + self.subworkflow_install_gitlab.install("bam_stats_samtools") + self.subworkflow_install.install("fastq_align_bowtie2") + subworkflow_lint = nf_core.subworkflows.SubworkflowLint( + dir=self.pipeline_dir, remote_url=GITLAB_URL, branch=GITLAB_SUBWORKFLOWS_BRANCH + ) + subworkflow_lint.lint(print_results=False, all_subworkflows=True) + assert len(subworkflow_lint.failed) == 0 + assert len(subworkflow_lint.passed) > 0 + assert len(subworkflow_lint.warned) >= 0 + + def test_subworkflows_lint_snapshot_file(self): + """Test linting a subworkflow with a snapshot file""" + subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.nfcore_modules) + subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") + assert len(subworkflow_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" + assert len(subworkflow_lint.passed) > 0 + assert len(subworkflow_lint.warned) >= 0 + + def test_subworkflows_lint_snapshot_file_missing_fail(self): + """Test linting a subworkflow with a snapshot file missing, which should fail""" + Path( + self.nfcore_modules, + "subworkflows", + "nf-core", + "test_subworkflow", + "tests", + "main.nf.test.snap", + ).unlink() + subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.nfcore_modules) + subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") + Path( + self.nfcore_modules, + "subworkflows", + "nf-core", + "test_subworkflow", + "tests", + "main.nf.test.snap", + ).touch() + assert len(subworkflow_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" + assert len(subworkflow_lint.passed) > 0 + assert len(subworkflow_lint.warned) >= 0 + + def test_subworkflows_lint_snapshot_file_not_needed(self): + """Test linting a subworkflow which doesn't need a snapshot file by removing the snapshot keyword in the main.nf.test file""" + with open( + Path( + self.nfcore_modules, + "subworkflows", + "nf-core", + "test_subworkflow", + "tests", + "main.nf.test", + ) + ) as fh: + content = fh.read() + new_content = content.replace("snapshot(", "snap (") + with open( + Path( + self.nfcore_modules, + "subworkflows", + "nf-core", + "test_subworkflow", + "tests", + "main.nf.test", + ), + "w", + ) as fh: + fh.write(new_content) + + Path( + self.nfcore_modules, + "subworkflows", + "nf-core", + "test_subworkflow", + "tests", + "main.nf.test.snap", + ).unlink() + subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.nfcore_modules) + subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") + Path( + self.nfcore_modules, + "subworkflows", + "nf-core", + "test_subworkflow", + "tests", + "main.nf.test.snap", + ).touch() + assert len(subworkflow_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" + assert len(subworkflow_lint.passed) > 0 + assert len(subworkflow_lint.warned) >= 0 + + def test_subworkflows_lint_less_than_two_modules_warning(self): + """Test linting a subworkflow with less than two modules""" + self.subworkflow_install.install("bam_stats_samtools") + # Remove two modules + with open( + Path( + self.pipeline_dir, + "subworkflows", + "nf-core", + "bam_stats_samtools", + "main.nf", + ) + ) as fh: + content = fh.read() + new_content = content.replace( + "include { SAMTOOLS_IDXSTATS } from '../../../modules/nf-core/samtools/idxstats/main'", + "", + ) + new_content = new_content.replace( + "include { SAMTOOLS_FLAGSTAT } from '../../../modules/nf-core/samtools/flagstat/main'", + "", + ) + with open( + Path( + self.pipeline_dir, + "subworkflows", + "nf-core", + "bam_stats_samtools", + "main.nf", + ), + "w", + ) as fh: + fh.write(new_content) + subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.pipeline_dir) + subworkflow_lint.lint(print_results=False, subworkflow="bam_stats_samtools") + assert len(subworkflow_lint.failed) >= 0, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" + assert len(subworkflow_lint.passed) > 0 + assert len(subworkflow_lint.warned) > 0 + assert subworkflow_lint.warned[0].lint_test == "main_nf_include" + # cleanup + self.subworkflow_remove.remove("bam_stats_samtools", force=True) + + def test_subworkflows_lint_include_multiple_alias(self): + """Test linting a subworkflow with multiple include methods""" + self.subworkflow_install.install("bam_stats_samtools") + with open( + Path( + self.pipeline_dir, + "subworkflows", + "nf-core", + "bam_stats_samtools", + "main.nf", + ) + ) as fh: + content = fh.read() + new_content = content.replace("SAMTOOLS_STATS", "SAMTOOLS_STATS_1") + new_content = new_content.replace( + "include { SAMTOOLS_STATS_1 ", + "include { SAMTOOLS_STATS as SAMTOOLS_STATS_1; SAMTOOLS_STATS as SAMTOOLS_STATS_2 ", + ) + with open( + Path( + self.pipeline_dir, + "subworkflows", + "nf-core", + "bam_stats_samtools", + "main.nf", + ), + "w", + ) as fh: + fh.write(new_content) + + subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.pipeline_dir) + subworkflow_lint.lint(print_results=False, subworkflow="bam_stats_samtools") + assert len(subworkflow_lint.failed) >= 0, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" + assert len(subworkflow_lint.passed) > 0 + assert len(subworkflow_lint.warned) == 2 + assert any( + [ + x.message == "Included component 'SAMTOOLS_STATS_1' versions are added in main.nf" + for x in subworkflow_lint.passed + ] + ) + assert any( + [x.message == "Included component 'SAMTOOLS_STATS_1' used in main.nf" for x in subworkflow_lint.passed] + ) + assert any( + [x.message == "Included component 'SAMTOOLS_STATS_2' not used in main.nf" for x in subworkflow_lint.warned] + ) + + # cleanup + self.subworkflow_remove.remove("bam_stats_samtools", force=True) + + def test_subworkflows_lint_capitalization_fail(self): + """Test linting a subworkflow with a capitalization fail""" + self.subworkflow_install.install("bam_stats_samtools") + # change workflow name to lowercase + with open( + Path( + self.pipeline_dir, + "subworkflows", + "nf-core", + "bam_stats_samtools", + "main.nf", + ) + ) as fh: + content = fh.read() + new_content = content.replace("workflow BAM_STATS_SAMTOOLS {", "workflow bam_stats_samtools {") + with open( + Path( + self.pipeline_dir, + "subworkflows", + "nf-core", + "bam_stats_samtools", + "main.nf", + ), + "w", + ) as fh: + fh.write(new_content) + subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.pipeline_dir) + subworkflow_lint.lint(print_results=False, subworkflow="bam_stats_samtools") + assert len(subworkflow_lint.failed) >= 1, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" + assert len(subworkflow_lint.passed) > 0 + assert len(subworkflow_lint.warned) >= 0 + assert any([x.lint_test == "workflow_capitals" for x in subworkflow_lint.failed]) + + # cleanup + self.subworkflow_remove.remove("bam_stats_samtools", force=True) + + def test_subworkflows_absent_version(self): + """Test linting a nf-test module if the versions is absent in the snapshot file `""" + snap_file = Path( + self.nfcore_modules, + "subworkflows", + "nf-core", + "test_subworkflow", + "tests", + "main.nf.test.snap", + ) + with open(snap_file) as fh: + content = fh.read() + new_content = content.replace("versions", "foo") + with open(snap_file, "w") as fh: + fh.write(new_content) + + subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.nfcore_modules) + subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") + assert len(subworkflow_lint.failed) == 0 + assert len(subworkflow_lint.passed) > 0 + assert len(subworkflow_lint.warned) >= 0, f"Linting warned with {[x.__dict__ for x in subworkflow_lint.warned]}" + assert any([x.lint_test == "test_snap_versions" for x in subworkflow_lint.warned]) + + # cleanup + with open(snap_file, "w") as fh: + fh.write(content) + + def test_subworkflows_missing_test_dir(self): + """Test linting a nf-test subworkflow if the tests directory is missing""" + test_dir = Path(self.nfcore_modules, "subworkflows", "nf-core", "test_subworkflow", "tests") + test_dir_copy = shutil.copytree(test_dir, test_dir.parent / "tests_copy") + shutil.rmtree(test_dir) + + subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.nfcore_modules) + subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") + assert len(subworkflow_lint.failed) == 0 + assert len(subworkflow_lint.passed) > 0 + assert len(subworkflow_lint.warned) >= 0, f"Linting warned with {[x.__dict__ for x in subworkflow_lint.warned]}" + assert any([x.lint_test == "test_dir_versions" for x in subworkflow_lint.warned]) + + # cleanup + shutil.copytree(test_dir_copy, test_dir) + + def test_subworkflows_missing_main_nf(self): + """Test linting a nf-test subworkflow if the main.nf file is missing""" + main_nf = Path(self.nfcore_modules, "subworkflows", "nf-core", "test_subworkflow", "main.nf") + main_nf_copy = shutil.copy(main_nf, main_nf.parent / "main_nf_copy") + main_nf.unlink() + + subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.nfcore_modules) + subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") + assert len(subworkflow_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" + assert len(subworkflow_lint.passed) > 0 + assert len(subworkflow_lint.warned) >= 0 + assert subworkflow_lint.failed[0].lint_test == "main_nf_exists" + + # cleanup + shutil.copy(main_nf_copy, main_nf) + + def test_subworkflows_empty_file_in_snapshot(self): + """Test linting a nf-test subworkflow with an empty file sha sum in the test snapshot, which should make it fail (if it is not a stub)""" + snap_file = Path( + self.nfcore_modules, + "subworkflows", + "nf-core", + "test_subworkflow", + "tests", + "main.nf.test.snap", + ) + snap = json.load(snap_file.open()) + content = snap_file.read_text() + snap["my test"]["content"][0]["0"] = "test:md5,d41d8cd98f00b204e9800998ecf8427e" + + with open(snap_file, "w") as fh: + json.dump(snap, fh) + + subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.nfcore_modules) + subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") + assert len(subworkflow_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" + assert len(subworkflow_lint.passed) > 0 + assert len(subworkflow_lint.warned) >= 0 + assert subworkflow_lint.failed[0].lint_test == "test_snap_md5sum" + + # reset the file + with open(snap_file, "w") as fh: + fh.write(content) + + def test_subworkflows_empty_file_in_stub_snapshot(self): + """Test linting a nf-test subworkflow with an empty file sha sum in the stub test snapshot, which should make it not fail""" + snap_file = Path( + self.nfcore_modules, + "subworkflows", + "nf-core", + "test_subworkflow", + "tests", + "main.nf.test.snap", + ) + snap = json.load(snap_file.open()) + content = snap_file.read_text() + snap["my_test_stub"] = {"content": [{"0": "test:md5,d41d8cd98f00b204e9800998ecf8427e", "versions": {}}]} + + with open(snap_file, "w") as fh: + json.dump(snap, fh) + + subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.nfcore_modules) + subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") + assert len(subworkflow_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" + assert len(subworkflow_lint.passed) > 0 + assert len(subworkflow_lint.warned) >= 0 + assert any(x.lint_test == "test_snap_md5sum" for x in subworkflow_lint.passed) + + # reset the file + with open(snap_file, "w") as fh: + fh.write(content) diff --git a/tests/subworkflows/test_list.py b/tests/subworkflows/test_list.py new file mode 100644 index 0000000000..5e4e6feb0a --- /dev/null +++ b/tests/subworkflows/test_list.py @@ -0,0 +1,48 @@ +from rich.console import Console + +import nf_core.subworkflows + +from ..test_subworkflows import TestSubworkflows +from ..utils import GITLAB_SUBWORKFLOWS_BRANCH, GITLAB_URL + + +class TestSubworkflowsList(TestSubworkflows): + def test_subworkflows_list_remote(self): + """Test listing available subworkflows""" + subworkflows_list = nf_core.subworkflows.SubworkflowList(None, remote=True) + listed_subworkflows = subworkflows_list.list_components() + console = Console(record=True) + console.print(listed_subworkflows) + output = console.export_text() + assert "bam_stats" in output + + def test_subworkflows_list_remote_gitlab(self): + """Test listing the subworkflows in the remote gitlab repo""" + subworkflows_list = nf_core.subworkflows.SubworkflowList( + None, remote=True, remote_url=GITLAB_URL, branch=GITLAB_SUBWORKFLOWS_BRANCH + ) + listed_subworkflows = subworkflows_list.list_components() + console = Console(record=True) + console.print(listed_subworkflows) + output = console.export_text() + assert "bam_stats" in output + + def test_subworkflows_install_and_list_subworkflows(self): + """Test listing locally installed subworkflows""" + self.subworkflow_install.install("bam_sort_stats_samtools") + subworkflows_list = nf_core.subworkflows.SubworkflowList(self.pipeline_dir, remote=False) + listed_subworkflows = subworkflows_list.list_components() + console = Console(record=True) + console.print(listed_subworkflows) + output = console.export_text() + assert "bam_stats" in output + + def test_subworkflows_install_gitlab_and_list_subworkflows(self): + """Test listing locally installed subworkflows""" + self.subworkflow_install_gitlab.install("bam_sort_stats_samtools") + subworkflows_list = nf_core.subworkflows.SubworkflowList(self.pipeline_dir, remote=False) + listed_subworkflows = subworkflows_list.list_components() + console = Console(record=True) + console.print(listed_subworkflows) + output = console.export_text() + assert "bam_stats" in output diff --git a/tests/subworkflows/test_remove.py b/tests/subworkflows/test_remove.py new file mode 100644 index 0000000000..61c016b232 --- /dev/null +++ b/tests/subworkflows/test_remove.py @@ -0,0 +1,101 @@ +from pathlib import Path + +from nf_core.modules.modules_json import ModulesJson + +from ..test_subworkflows import TestSubworkflows + + +class TestSubworkflowsRemove(TestSubworkflows): + def test_subworkflows_remove_uninstalled_subworkflow(self): + """Test removing subworkflow without installing it""" + assert self.subworkflow_remove.remove("bam_sort_stats_samtools") is False + + def test_subworkflows_remove_subworkflow(self): + """Test removing subworkflow and all it's dependencies after installing it""" + self.subworkflow_install.install("bam_sort_stats_samtools") + + subworkflow_path = Path(self.subworkflow_install.dir, "subworkflows", "nf-core") + bam_sort_stats_samtools_path = Path(subworkflow_path, "bam_sort_stats_samtools") + bam_stats_samtools_path = Path(subworkflow_path, "bam_stats_samtools") + samtools_index_path = Path(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "index") + ModulesJson(self.pipeline_dir) + mod_json_before = ModulesJson(self.pipeline_dir).get_modules_json() + assert self.subworkflow_remove.remove("bam_sort_stats_samtools") + mod_json_after = ModulesJson(self.pipeline_dir).get_modules_json() + assert Path.exists(bam_sort_stats_samtools_path) is False + assert Path.exists(bam_stats_samtools_path) is False + assert Path.exists(samtools_index_path) is False + assert mod_json_before != mod_json_after + # assert subworkflows key is removed from modules.json + assert ( + "bam_sort_stats_samtools" + not in mod_json_after["repos"]["https://github.com/nf-core/modules.git"]["subworkflows"].keys() + ) + assert ( + "samtools/index" not in mod_json_after["repos"]["https://github.com/nf-core/modules.git"]["modules"].keys() + ) + + def test_subworkflows_remove_subworkflow_keep_installed_module(self): + """Test removing subworkflow and all it's dependencies after installing it, except for a separately installed module""" + self.subworkflow_install.install("bam_sort_stats_samtools") + self.mods_install.install("samtools/index") + + subworkflow_path = Path(self.subworkflow_install.dir, "subworkflows", "nf-core") + bam_sort_stats_samtools_path = Path(subworkflow_path, "bam_sort_stats_samtools") + bam_stats_samtools_path = Path(subworkflow_path, "bam_stats_samtools") + samtools_index_path = Path(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "index") + + mod_json_before = ModulesJson(self.pipeline_dir).get_modules_json() + assert self.subworkflow_remove.remove("bam_sort_stats_samtools") + mod_json_after = ModulesJson(self.pipeline_dir).get_modules_json() + + assert Path.exists(bam_sort_stats_samtools_path) is False + assert Path.exists(bam_stats_samtools_path) is False + assert Path.exists(samtools_index_path) is True + assert mod_json_before != mod_json_after + # assert subworkflows key is removed from modules.json + assert ( + "bam_sort_stats_samtools" + not in mod_json_after["repos"]["https://github.com/nf-core/modules.git"]["subworkflows"].keys() + ) + assert ( + "samtools/index" + in mod_json_after["repos"]["https://github.com/nf-core/modules.git"]["modules"]["nf-core"].keys() + ) + + def test_subworkflows_remove_one_of_two_subworkflow(self): + """Test removing subworkflow and all it's dependencies after installing it""" + self.subworkflow_install.install("bam_sort_stats_samtools") + self.subworkflow_install.install("bam_stats_samtools") + subworkflow_path = Path(self.subworkflow_install.dir, "subworkflows", "nf-core") + bam_sort_stats_samtools_path = Path(subworkflow_path, "bam_sort_stats_samtools") + bam_stats_samtools_path = Path(subworkflow_path, "bam_stats_samtools") + samtools_index_path = Path(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "index") + samtools_stats_path = Path(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "stats") + + assert self.subworkflow_remove.remove("bam_sort_stats_samtools") + + assert Path.exists(subworkflow_path) is True + assert Path.exists(bam_sort_stats_samtools_path) is False + assert Path.exists(bam_stats_samtools_path) is True + assert Path.exists(samtools_index_path) is False + assert Path.exists(samtools_stats_path) is True + self.subworkflow_remove.remove("bam_stats_samtools") + + def test_subworkflows_remove_included_subworkflow(self): + """Test removing subworkflow which is installed by another subworkflow and all it's dependencies.""" + self.subworkflow_install.install("bam_sort_stats_samtools") + subworkflow_path = Path(self.subworkflow_install.dir, "subworkflows", "nf-core") + bam_sort_stats_samtools_path = Path(subworkflow_path, "bam_sort_stats_samtools") + bam_stats_samtools_path = Path(subworkflow_path, "bam_stats_samtools") + samtools_index_path = Path(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "index") + samtools_stats_path = Path(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "stats") + + assert self.subworkflow_remove.remove("bam_stats_samtools") is False + + assert Path.exists(subworkflow_path) is True + assert Path.exists(bam_sort_stats_samtools_path) is True + assert Path.exists(bam_stats_samtools_path) is True + assert Path.exists(samtools_index_path) is True + assert Path.exists(samtools_stats_path) is True + self.subworkflow_remove.remove("bam_sort_stats_samtools") diff --git a/tests/subworkflows/test_update.py b/tests/subworkflows/test_update.py new file mode 100644 index 0000000000..d3b2433572 --- /dev/null +++ b/tests/subworkflows/test_update.py @@ -0,0 +1,370 @@ +import logging +import shutil +import tempfile +from pathlib import Path +from unittest import mock + +import questionary +import yaml + +import nf_core.utils +from nf_core.modules.modules_json import ModulesJson +from nf_core.modules.modules_repo import NF_CORE_MODULES_NAME, NF_CORE_MODULES_REMOTE +from nf_core.modules.update import ModuleUpdate +from nf_core.subworkflows.update import SubworkflowUpdate + +from ..test_subworkflows import TestSubworkflows +from ..utils import OLD_SUBWORKFLOWS_SHA, cmp_component + + +class TestSubworkflowsUpdate(TestSubworkflows): + def test_install_and_update(self): + """Installs a subworkflow in the pipeline and updates it (no change)""" + self.subworkflow_install.install("bam_stats_samtools") + update_obj = SubworkflowUpdate(self.pipeline_dir, show_diff=False) + + # Copy the sw files and check that they are unaffected by the update + tmpdir = Path(tempfile.TemporaryDirectory().name) + sw_path = Path(self.pipeline_dir, "subworkflows", NF_CORE_MODULES_NAME, "bam_stats_samtools") + shutil.copytree(sw_path, tmpdir) + + assert update_obj.update("bam_stats_samtools") is True + assert cmp_component(tmpdir, sw_path) is True + + def test_install_at_hash_and_update(self): + """Installs an old version of a subworkflow in the pipeline and updates it""" + assert self.subworkflow_install_old.install("fastq_align_bowtie2") + update_obj = SubworkflowUpdate(self.pipeline_dir, show_diff=False, update_deps=True) + old_mod_json = ModulesJson(self.pipeline_dir).get_modules_json() + + # Copy the sw files and check that they are affected by the update + tmpdir = Path(tempfile.TemporaryDirectory().name) + + sw_path = Path(self.pipeline_dir, "subworkflows", NF_CORE_MODULES_NAME, "fastq_align_bowtie2") + shutil.copytree(sw_path, tmpdir) + + assert update_obj.update("fastq_align_bowtie2") is True + assert cmp_component(tmpdir, sw_path) is False + + # Check that the modules.json is correctly updated + mod_json = ModulesJson(self.pipeline_dir).get_modules_json() + # Get the up-to-date git_sha for the sw from the ModulesRepo object + assert ( + old_mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]["fastq_align_bowtie2"][ + "git_sha" + ] + != mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]["fastq_align_bowtie2"][ + "git_sha" + ] + ) + + # Mock questionary answer: update components + @mock.patch.object(questionary.Question, "unsafe_ask", return_value=True) + def test_install_at_hash_and_update_limit_output(self, mock_prompt): + """Installs an old version of a subworkflow in the pipeline and updates it with limit_output=True""" + self.caplog.set_level(logging.INFO) + assert self.subworkflow_install_old.install("fastq_align_bowtie2") + + update_obj = SubworkflowUpdate(self.pipeline_dir, show_diff=True, update_deps=True, limit_output=True) + + assert update_obj.update("fastq_align_bowtie2") + + # Check changes not shown for non-.nf files + assert "Changes in 'fastq_align_bowtie2/meta.yml' but not shown" in self.caplog.text + assert "Changes in 'bam_sort_stats_samtools/meta.yml' but not shown" in self.caplog.text + assert "Changes in 'bam_stats_samtools/meta.yml' but not shown" in self.caplog.text + assert "Changes in 'samtools/flagstat/meta.yml' but not shown" in self.caplog.text + # Check changes only shown for main.nf files + assert "Changes in 'fastq_align_bowtie2/main.nf'" in self.caplog.text + for line in self.caplog.text.split("\n"): + if line.startswith("---"): + assert line.endswith("main.nf") + + def test_install_at_hash_and_update_and_save_diff_to_file(self): + """Installs an old version of a sw in the pipeline and updates it. Save differences to a file.""" + assert self.subworkflow_install_old.install("fastq_align_bowtie2") + patch_path = Path(self.pipeline_dir, "fastq_align_bowtie2.patch") + update_obj = SubworkflowUpdate(self.pipeline_dir, save_diff_fn=patch_path, update_deps=True) + + # Copy the sw files and check that they are affected by the update + tmpdir = Path(tempfile.TemporaryDirectory().name) + + sw_path = Path(self.pipeline_dir, "subworkflows", NF_CORE_MODULES_NAME, "fastq_align_bowtie2") + shutil.copytree(sw_path, tmpdir) + + assert update_obj.update("fastq_align_bowtie2") is True + assert cmp_component(tmpdir, sw_path) is True + + with open(patch_path) as fh: + line = fh.readline() + assert line.startswith( + "Changes in module 'nf-core/fastq_align_bowtie2' between (f3c078809a2513f1c95de14f6633fe1f03572fdb) and" + ) + + def test_install_at_hash_and_update_and_save_diff_limit_output(self): + """Installs an old version of a sw in the pipeline and updates it. Save differences to a file.""" + # Install old version of fastq_align_bowtie2 + self.subworkflow_install_old.install("fastq_align_bowtie2") + patch_path = Path(self.pipeline_dir, "fastq_align_bowtie2.patch") + # Update saving the differences to a patch file and with `limit_output` + update_obj = SubworkflowUpdate(self.pipeline_dir, save_diff_fn=patch_path, update_deps=True, limit_output=True) + assert update_obj.update("fastq_align_bowtie2") + + # Check that the patch file was created + assert patch_path.exists(), f"Patch file was not created at {patch_path}" + + # Read the contents of the patch file + with open(patch_path) as fh: + content = fh.read() + # Check changes not shown for non-.nf files + assert "Changes in 'fastq_align_bowtie2/meta.yml' but not shown" in content + assert "Changes in 'bam_sort_stats_samtools/meta.yml' but not shown" in content + assert "Changes in 'bam_stats_samtools/meta.yml' but not shown" in content + assert "Changes in 'samtools/flagstat/meta.yml' but not shown" in content + # Check changes only shown for main.nf files + assert "Changes in 'fastq_align_bowtie2/main.nf'" in content + for line in content: + if line.startswith("---"): + assert line.endswith("main.nf") + + def test_update_all(self): + """Updates all subworkflows present in the pipeline""" + # Install subworkflows fastq_align_bowtie2, bam_sort_stats_samtools, bam_stats_samtools + self.subworkflow_install.install("fastq_align_bowtie2") + # Update all subworkflows + update_obj = SubworkflowUpdate(self.pipeline_dir, update_all=True, show_diff=False) + assert update_obj.update() is True + + # We must reload the modules.json to get the updated version + mod_json_obj = ModulesJson(self.pipeline_dir) + mod_json = mod_json_obj.get_modules_json() + # Loop through all subworkflows and check that they are updated (according to the modules.json file) + for sw in mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]: + correct_git_sha = list(update_obj.modules_repo.get_component_git_log(sw, "subworkflows", depth=1))[0][ + "git_sha" + ] + current_git_sha = mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME][sw][ + "git_sha" + ] + assert correct_git_sha == current_git_sha + + def test_update_with_config_fixed_version(self): + """Try updating when there are entries in the .nf-core.yml""" + # Install subworkflow at the latest version + assert self.subworkflow_install.install("fastq_align_bowtie2") + + # Fix the subworkflow version in the .nf-core.yml to an old version + update_config = {NF_CORE_MODULES_REMOTE: {NF_CORE_MODULES_NAME: {"fastq_align_bowtie2": OLD_SUBWORKFLOWS_SHA}}} + config_fn, tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) + tools_config["update"] = update_config + with open(Path(self.pipeline_dir, config_fn), "w") as f: + yaml.dump(tools_config, f) + + # Update all subworkflows in the pipeline + update_obj = SubworkflowUpdate(self.pipeline_dir, update_all=True, show_diff=False) + assert update_obj.update() is True + + # Check that the git sha for fastq_align_bowtie2 is correctly downgraded + mod_json = ModulesJson(self.pipeline_dir).get_modules_json() + assert "fastq_align_bowtie2" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME] + assert ( + "git_sha" + in mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]["fastq_align_bowtie2"] + ) + assert ( + mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]["fastq_align_bowtie2"][ + "git_sha" + ] + == OLD_SUBWORKFLOWS_SHA + ) + + def test_update_with_config_dont_update(self): + """Try updating when sw is to be ignored""" + # Install an old version of fastq_align_bowtie2 + self.subworkflow_install_old.install("fastq_align_bowtie2") + + # Set the fastq_align_bowtie2 field to no update in the .nf-core.yml + update_config = {NF_CORE_MODULES_REMOTE: {NF_CORE_MODULES_NAME: {"fastq_align_bowtie2": False}}} + config_fn, tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) + tools_config["update"] = update_config + with open(Path(self.pipeline_dir, config_fn), "w") as f: + yaml.dump(tools_config, f) + + # Update all modules in the pipeline + update_obj = SubworkflowUpdate(self.pipeline_dir, update_all=True, show_diff=False) + assert update_obj.update() is True + + # Check that the git sha for fastq_align_bowtie2 is correctly downgraded + mod_json = ModulesJson(self.pipeline_dir).get_modules_json() + assert "fastq_align_bowtie2" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME] + assert ( + "git_sha" + in mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]["fastq_align_bowtie2"] + ) + assert ( + mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]["fastq_align_bowtie2"][ + "git_sha" + ] + == OLD_SUBWORKFLOWS_SHA + ) + + def test_update_with_config_fix_all(self): + """Fix the version of all nf-core subworkflows""" + # Install subworkflow at the latest version + assert self.subworkflow_install.install("fastq_align_bowtie2") + + # Fix the version of all nf-core subworkflows in the .nf-core.yml to an old version + update_config = {NF_CORE_MODULES_REMOTE: OLD_SUBWORKFLOWS_SHA} + config_fn, tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) + tools_config["update"] = update_config + with open(Path(self.pipeline_dir, config_fn), "w") as f: + yaml.dump(tools_config, f) + + # Update fastq_align_bowtie2 + update_obj = SubworkflowUpdate(self.pipeline_dir, update_all=False, update_deps=True, show_diff=False) + assert update_obj.update("fastq_align_bowtie2") is True + + # Check that the git sha for fastq_align_bowtie2 is correctly downgraded + mod_json = ModulesJson(self.pipeline_dir).get_modules_json() + assert ( + "git_sha" + in mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]["fastq_align_bowtie2"] + ) + assert ( + mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]["fastq_align_bowtie2"][ + "git_sha" + ] + == OLD_SUBWORKFLOWS_SHA + ) + + def test_update_with_config_no_updates(self): + """Don't update any nf-core subworkflows""" + # Install an old version of fastq_align_bowtie2 + self.subworkflow_install_old.install("fastq_align_bowtie2") + old_mod_json = ModulesJson(self.pipeline_dir).get_modules_json() + + # Set all repository updates to False + update_config = {NF_CORE_MODULES_REMOTE: False} + config_fn, tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) + tools_config["update"] = update_config + with open(Path(self.pipeline_dir, config_fn), "w") as f: + yaml.dump(tools_config, f) + + # Update all subworkflows in the pipeline + update_obj = SubworkflowUpdate(self.pipeline_dir, update_all=True, show_diff=False) + assert update_obj.update() is True + + # Check that the git sha for fastq_align_bowtie2 is correctly downgraded and none of the subworkflows has changed + mod_json = ModulesJson(self.pipeline_dir).get_modules_json() + for sw in mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]: + assert "git_sha" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME][sw] + assert ( + mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME][sw]["git_sha"] + == old_mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME][sw]["git_sha"] + ) + + def test_update_all_linked_components_from_subworkflow(self): + """Update a subworkflow and all modules and subworkflows used on it""" + # Install an old version of fastq_align_bowtie2 + self.subworkflow_install_old.install("fastq_align_bowtie2") + old_mod_json = ModulesJson(self.pipeline_dir).get_modules_json() + + # Copy the sw files and check that they are affected by the update + tmpdir = Path(tempfile.TemporaryDirectory().name) + subworkflows_path = Path(self.pipeline_dir, "subworkflows", NF_CORE_MODULES_NAME) + modules_path = Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME) + shutil.copytree(subworkflows_path, Path(tmpdir, "subworkflows")) + shutil.copytree(modules_path, Path(tmpdir, "modules")) + + # Update fastq_align_bowtie2 and all modules and subworkflows used by that + update_obj = SubworkflowUpdate(self.pipeline_dir, update_deps=True, show_diff=False) + assert update_obj.update("fastq_align_bowtie2") is True + + mod_json = ModulesJson(self.pipeline_dir).get_modules_json() + # Loop through all modules and subworkflows used in fastq_align_bowtie2 + # check that they are updated (according to the modules.json file) + for sw in ["fastq_align_bowtie2", "bam_sort_stats_samtools", "bam_stats_samtools"]: + assert ( + old_mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME][sw]["git_sha"] + != mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME][sw]["git_sha"] + ) + for mod in [ + "bowtie2/align", + "samtools/index", + "samtools/sort", + "samtools/flagstat", + "samtools/idxstats", + "samtools/stats", + ]: + assert ( + old_mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME][mod]["git_sha"] + != mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME][mod]["git_sha"] + ) + # Check that the subworkflow files are updated + assert ( + cmp_component( + Path(tmpdir, "subworkflows", "fastq_align_bowtie2"), Path(subworkflows_path, "fastq_align_bowtie2") + ) + is False + ) + + def test_update_all_subworkflows_from_module(self): + """Update a module and all subworkflows that use this module""" + # Install an old version of fastq_align_bowtie2 and thus all modules used by it (bowtie2/align) + self.subworkflow_install_old.install("fastq_align_bowtie2") + old_mod_json = ModulesJson(self.pipeline_dir).get_modules_json() + + # Copy the sw files and check that they are affected by the update + tmpdir = Path(tempfile.TemporaryDirectory().name) + sw_path = Path(self.pipeline_dir, "subworkflows", NF_CORE_MODULES_NAME, "fastq_align_bowtie2") + shutil.copytree(sw_path, Path(tmpdir, "fastq_align_bowtie2")) + + # Update bowtie2/align and all subworkflows using it + update_obj = ModuleUpdate(self.pipeline_dir, update_deps=True, show_diff=False) + assert update_obj.update("bowtie2/align") is True + + mod_json = ModulesJson(self.pipeline_dir).get_modules_json() + # Check that bowtie2/align and fastq_align_bowtie2 are updated + assert ( + old_mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]["fastq_align_bowtie2"][ + "git_sha" + ] + != mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]["fastq_align_bowtie2"][ + "git_sha" + ] + ) + assert cmp_component(Path(tmpdir, "fastq_align_bowtie2"), sw_path) is False + assert ( + old_mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME]["bowtie2/align"]["git_sha"] + != mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME]["bowtie2/align"]["git_sha"] + ) + + def test_update_change_of_included_modules(self): + """Update a subworkflow which has a module change in the new version.""" + # Install an old version of vcf_annotate_ensemblvep with tabix/bgziptabix and without tabix/tabix + self.subworkflow_install_module_change.install("vcf_annotate_ensemblvep") + old_mod_json = ModulesJson(self.pipeline_dir).get_modules_json() + + # Check that tabix/bgziptabix is there + assert "tabix/bgziptabix" in old_mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME] + assert Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, "tabix/bgziptabix").is_dir() + # Check that tabix/tabix is not there + assert "tabix/tabix" not in old_mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME] + assert not Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, "tabix/tabix").is_dir() + + # Update vcf_annotate_ensemblvep without tabix/bgziptabix and with tabix/tabix + update_obj = SubworkflowUpdate(self.pipeline_dir, update_deps=True, show_diff=False) + assert update_obj.update("vcf_annotate_ensemblvep") is True + + mod_json = ModulesJson(self.pipeline_dir).get_modules_json() + + # Check that tabix/bgziptabix is not there + assert "tabix/bgziptabix" not in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME] + assert not Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, "tabix/bgziptabix").is_dir() + # Check that tabix/tabix is there + assert "tabix/tabix" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME] + assert Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, "tabix/tabix").is_dir() + # Check that ensemblevep is not there but instead we have ensemblevep/vep (due to a file re-naming) + assert "ensemblvep" not in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME] + assert "ensemblvep/vep" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME] + assert Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, "ensemblvep/vep").is_dir() diff --git a/tests/subworkflows/update.py b/tests/subworkflows/update.py deleted file mode 100644 index 42ed716b1c..0000000000 --- a/tests/subworkflows/update.py +++ /dev/null @@ -1,376 +0,0 @@ -import logging -import shutil -import tempfile -from pathlib import Path -from unittest import mock - -import questionary -import yaml - -import nf_core.utils -from nf_core.modules.modules_json import ModulesJson -from nf_core.modules.modules_repo import NF_CORE_MODULES_NAME, NF_CORE_MODULES_REMOTE -from nf_core.modules.update import ModuleUpdate -from nf_core.subworkflows.update import SubworkflowUpdate - -from ..utils import OLD_SUBWORKFLOWS_SHA, cmp_component - - -def test_install_and_update(self): - """Installs a subworkflow in the pipeline and updates it (no change)""" - self.subworkflow_install.install("bam_stats_samtools") - update_obj = SubworkflowUpdate(self.pipeline_dir, show_diff=False) - - # Copy the sw files and check that they are unaffected by the update - tmpdir = Path(tempfile.TemporaryDirectory().name) - sw_path = Path(self.pipeline_dir, "subworkflows", NF_CORE_MODULES_NAME, "bam_stats_samtools") - shutil.copytree(sw_path, tmpdir) - - assert update_obj.update("bam_stats_samtools") is True - assert cmp_component(tmpdir, sw_path) is True - - -def test_install_at_hash_and_update(self): - """Installs an old version of a subworkflow in the pipeline and updates it""" - assert self.subworkflow_install_old.install("fastq_align_bowtie2") - update_obj = SubworkflowUpdate(self.pipeline_dir, show_diff=False, update_deps=True) - old_mod_json = ModulesJson(self.pipeline_dir).get_modules_json() - - # Copy the sw files and check that they are affected by the update - tmpdir = Path(tempfile.TemporaryDirectory().name) - - sw_path = Path(self.pipeline_dir, "subworkflows", NF_CORE_MODULES_NAME, "fastq_align_bowtie2") - shutil.copytree(sw_path, tmpdir) - - assert update_obj.update("fastq_align_bowtie2") is True - assert cmp_component(tmpdir, sw_path) is False - - # Check that the modules.json is correctly updated - mod_json = ModulesJson(self.pipeline_dir).get_modules_json() - # Get the up-to-date git_sha for the sw from the ModulesRepo object - assert ( - old_mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]["fastq_align_bowtie2"][ - "git_sha" - ] - != mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]["fastq_align_bowtie2"][ - "git_sha" - ] - ) - - -# Mock questionary answer: update components -@mock.patch.object(questionary.Question, "unsafe_ask", return_value=True) -def test_install_at_hash_and_update_limit_output(self, mock_prompt): - """Installs an old version of a subworkflow in the pipeline and updates it with limit_output=True""" - self.caplog.set_level(logging.INFO) - assert self.subworkflow_install_old.install("fastq_align_bowtie2") - - update_obj = SubworkflowUpdate(self.pipeline_dir, show_diff=True, update_deps=True, limit_output=True) - - assert update_obj.update("fastq_align_bowtie2") - - # Check changes not shown for non-.nf files - assert "Changes in 'fastq_align_bowtie2/meta.yml' but not shown" in self.caplog.text - assert "Changes in 'bam_sort_stats_samtools/meta.yml' but not shown" in self.caplog.text - assert "Changes in 'bam_stats_samtools/meta.yml' but not shown" in self.caplog.text - assert "Changes in 'samtools/flagstat/meta.yml' but not shown" in self.caplog.text - # Check changes only shown for main.nf files - assert "Changes in 'fastq_align_bowtie2/main.nf'" in self.caplog.text - for line in self.caplog.text.split("\n"): - if line.startswith("---"): - assert line.endswith("main.nf") - - -def test_install_at_hash_and_update_and_save_diff_to_file(self): - """Installs an old version of a sw in the pipeline and updates it. Save differences to a file.""" - assert self.subworkflow_install_old.install("fastq_align_bowtie2") - patch_path = Path(self.pipeline_dir, "fastq_align_bowtie2.patch") - update_obj = SubworkflowUpdate(self.pipeline_dir, save_diff_fn=patch_path, update_deps=True) - - # Copy the sw files and check that they are affected by the update - tmpdir = Path(tempfile.TemporaryDirectory().name) - - sw_path = Path(self.pipeline_dir, "subworkflows", NF_CORE_MODULES_NAME, "fastq_align_bowtie2") - shutil.copytree(sw_path, tmpdir) - - assert update_obj.update("fastq_align_bowtie2") is True - assert cmp_component(tmpdir, sw_path) is True - - with open(patch_path) as fh: - line = fh.readline() - assert line.startswith( - "Changes in module 'nf-core/fastq_align_bowtie2' between (f3c078809a2513f1c95de14f6633fe1f03572fdb) and" - ) - - -def test_install_at_hash_and_update_and_save_diff_limit_output(self): - """Installs an old version of a sw in the pipeline and updates it. Save differences to a file.""" - # Install old version of fastq_align_bowtie2 - self.subworkflow_install_old.install("fastq_align_bowtie2") - patch_path = Path(self.pipeline_dir, "fastq_align_bowtie2.patch") - # Update saving the differences to a patch file and with `limit_output` - update_obj = SubworkflowUpdate(self.pipeline_dir, save_diff_fn=patch_path, update_deps=True, limit_output=True) - assert update_obj.update("fastq_align_bowtie2") - - # Check that the patch file was created - assert patch_path.exists(), f"Patch file was not created at {patch_path}" - - # Read the contents of the patch file - with open(patch_path) as fh: - content = fh.read() - # Check changes not shown for non-.nf files - assert "Changes in 'fastq_align_bowtie2/meta.yml' but not shown" in content - assert "Changes in 'bam_sort_stats_samtools/meta.yml' but not shown" in content - assert "Changes in 'bam_stats_samtools/meta.yml' but not shown" in content - assert "Changes in 'samtools/flagstat/meta.yml' but not shown" in content - # Check changes only shown for main.nf files - assert "Changes in 'fastq_align_bowtie2/main.nf'" in content - for line in content: - if line.startswith("---"): - assert line.endswith("main.nf") - - -def test_update_all(self): - """Updates all subworkflows present in the pipeline""" - # Install subworkflows fastq_align_bowtie2, bam_sort_stats_samtools, bam_stats_samtools - self.subworkflow_install.install("fastq_align_bowtie2") - # Update all subworkflows - update_obj = SubworkflowUpdate(self.pipeline_dir, update_all=True, show_diff=False) - assert update_obj.update() is True - - # We must reload the modules.json to get the updated version - mod_json_obj = ModulesJson(self.pipeline_dir) - mod_json = mod_json_obj.get_modules_json() - # Loop through all subworkflows and check that they are updated (according to the modules.json file) - for sw in mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]: - correct_git_sha = list(update_obj.modules_repo.get_component_git_log(sw, "subworkflows", depth=1))[0]["git_sha"] - current_git_sha = mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME][sw]["git_sha"] - assert correct_git_sha == current_git_sha - - -def test_update_with_config_fixed_version(self): - """Try updating when there are entries in the .nf-core.yml""" - # Install subworkflow at the latest version - assert self.subworkflow_install.install("fastq_align_bowtie2") - - # Fix the subworkflow version in the .nf-core.yml to an old version - update_config = {NF_CORE_MODULES_REMOTE: {NF_CORE_MODULES_NAME: {"fastq_align_bowtie2": OLD_SUBWORKFLOWS_SHA}}} - config_fn, tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) - tools_config["update"] = update_config - with open(Path(self.pipeline_dir, config_fn), "w") as f: - yaml.dump(tools_config, f) - - # Update all subworkflows in the pipeline - update_obj = SubworkflowUpdate(self.pipeline_dir, update_all=True, show_diff=False) - assert update_obj.update() is True - - # Check that the git sha for fastq_align_bowtie2 is correctly downgraded - mod_json = ModulesJson(self.pipeline_dir).get_modules_json() - assert "fastq_align_bowtie2" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME] - assert ( - "git_sha" - in mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]["fastq_align_bowtie2"] - ) - assert ( - mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]["fastq_align_bowtie2"][ - "git_sha" - ] - == OLD_SUBWORKFLOWS_SHA - ) - - -def test_update_with_config_dont_update(self): - """Try updating when sw is to be ignored""" - # Install an old version of fastq_align_bowtie2 - self.subworkflow_install_old.install("fastq_align_bowtie2") - - # Set the fastq_align_bowtie2 field to no update in the .nf-core.yml - update_config = {NF_CORE_MODULES_REMOTE: {NF_CORE_MODULES_NAME: {"fastq_align_bowtie2": False}}} - config_fn, tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) - tools_config["update"] = update_config - with open(Path(self.pipeline_dir, config_fn), "w") as f: - yaml.dump(tools_config, f) - - # Update all modules in the pipeline - update_obj = SubworkflowUpdate(self.pipeline_dir, update_all=True, show_diff=False) - assert update_obj.update() is True - - # Check that the git sha for fastq_align_bowtie2 is correctly downgraded - mod_json = ModulesJson(self.pipeline_dir).get_modules_json() - assert "fastq_align_bowtie2" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME] - assert ( - "git_sha" - in mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]["fastq_align_bowtie2"] - ) - assert ( - mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]["fastq_align_bowtie2"][ - "git_sha" - ] - == OLD_SUBWORKFLOWS_SHA - ) - - -def test_update_with_config_fix_all(self): - """Fix the version of all nf-core subworkflows""" - # Install subworkflow at the latest version - assert self.subworkflow_install.install("fastq_align_bowtie2") - - # Fix the version of all nf-core subworkflows in the .nf-core.yml to an old version - update_config = {NF_CORE_MODULES_REMOTE: OLD_SUBWORKFLOWS_SHA} - config_fn, tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) - tools_config["update"] = update_config - with open(Path(self.pipeline_dir, config_fn), "w") as f: - yaml.dump(tools_config, f) - - # Update fastq_align_bowtie2 - update_obj = SubworkflowUpdate(self.pipeline_dir, update_all=False, update_deps=True, show_diff=False) - assert update_obj.update("fastq_align_bowtie2") is True - - # Check that the git sha for fastq_align_bowtie2 is correctly downgraded - mod_json = ModulesJson(self.pipeline_dir).get_modules_json() - assert ( - "git_sha" - in mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]["fastq_align_bowtie2"] - ) - assert ( - mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]["fastq_align_bowtie2"][ - "git_sha" - ] - == OLD_SUBWORKFLOWS_SHA - ) - - -def test_update_with_config_no_updates(self): - """Don't update any nf-core subworkflows""" - # Install an old version of fastq_align_bowtie2 - self.subworkflow_install_old.install("fastq_align_bowtie2") - old_mod_json = ModulesJson(self.pipeline_dir).get_modules_json() - - # Set all repository updates to False - update_config = {NF_CORE_MODULES_REMOTE: False} - config_fn, tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) - tools_config["update"] = update_config - with open(Path(self.pipeline_dir, config_fn), "w") as f: - yaml.dump(tools_config, f) - - # Update all subworkflows in the pipeline - update_obj = SubworkflowUpdate(self.pipeline_dir, update_all=True, show_diff=False) - assert update_obj.update() is True - - # Check that the git sha for fastq_align_bowtie2 is correctly downgraded and none of the subworkflows has changed - mod_json = ModulesJson(self.pipeline_dir).get_modules_json() - for sw in mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]: - assert "git_sha" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME][sw] - assert ( - mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME][sw]["git_sha"] - == old_mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME][sw]["git_sha"] - ) - - -def test_update_all_linked_components_from_subworkflow(self): - """Update a subworkflow and all modules and subworkflows used on it""" - # Install an old version of fastq_align_bowtie2 - self.subworkflow_install_old.install("fastq_align_bowtie2") - old_mod_json = ModulesJson(self.pipeline_dir).get_modules_json() - - # Copy the sw files and check that they are affected by the update - tmpdir = Path(tempfile.TemporaryDirectory().name) - subworkflows_path = Path(self.pipeline_dir, "subworkflows", NF_CORE_MODULES_NAME) - modules_path = Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME) - shutil.copytree(subworkflows_path, Path(tmpdir, "subworkflows")) - shutil.copytree(modules_path, Path(tmpdir, "modules")) - - # Update fastq_align_bowtie2 and all modules and subworkflows used by that - update_obj = SubworkflowUpdate(self.pipeline_dir, update_deps=True, show_diff=False) - assert update_obj.update("fastq_align_bowtie2") is True - - mod_json = ModulesJson(self.pipeline_dir).get_modules_json() - # Loop through all modules and subworkflows used in fastq_align_bowtie2 - # check that they are updated (according to the modules.json file) - for sw in ["fastq_align_bowtie2", "bam_sort_stats_samtools", "bam_stats_samtools"]: - assert ( - old_mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME][sw]["git_sha"] - != mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME][sw]["git_sha"] - ) - for mod in [ - "bowtie2/align", - "samtools/index", - "samtools/sort", - "samtools/flagstat", - "samtools/idxstats", - "samtools/stats", - ]: - assert ( - old_mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME][mod]["git_sha"] - != mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME][mod]["git_sha"] - ) - # Check that the subworkflow files are updated - assert ( - cmp_component( - Path(tmpdir, "subworkflows", "fastq_align_bowtie2"), Path(subworkflows_path, "fastq_align_bowtie2") - ) - is False - ) - - -def test_update_all_subworkflows_from_module(self): - """Update a module and all subworkflows that use this module""" - # Install an old version of fastq_align_bowtie2 and thus all modules used by it (bowtie2/align) - self.subworkflow_install_old.install("fastq_align_bowtie2") - old_mod_json = ModulesJson(self.pipeline_dir).get_modules_json() - - # Copy the sw files and check that they are affected by the update - tmpdir = Path(tempfile.TemporaryDirectory().name) - sw_path = Path(self.pipeline_dir, "subworkflows", NF_CORE_MODULES_NAME, "fastq_align_bowtie2") - shutil.copytree(sw_path, Path(tmpdir, "fastq_align_bowtie2")) - - # Update bowtie2/align and all subworkflows using it - update_obj = ModuleUpdate(self.pipeline_dir, update_deps=True, show_diff=False) - assert update_obj.update("bowtie2/align") is True - - mod_json = ModulesJson(self.pipeline_dir).get_modules_json() - # Check that bowtie2/align and fastq_align_bowtie2 are updated - assert ( - old_mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]["fastq_align_bowtie2"][ - "git_sha" - ] - != mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]["fastq_align_bowtie2"][ - "git_sha" - ] - ) - assert cmp_component(Path(tmpdir, "fastq_align_bowtie2"), sw_path) is False - assert ( - old_mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME]["bowtie2/align"]["git_sha"] - != mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME]["bowtie2/align"]["git_sha"] - ) - - -def test_update_change_of_included_modules(self): - """Update a subworkflow which has a module change in the new version.""" - # Install an old version of vcf_annotate_ensemblvep with tabix/bgziptabix and without tabix/tabix - self.subworkflow_install_module_change.install("vcf_annotate_ensemblvep") - old_mod_json = ModulesJson(self.pipeline_dir).get_modules_json() - - # Check that tabix/bgziptabix is there - assert "tabix/bgziptabix" in old_mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME] - assert Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, "tabix/bgziptabix").is_dir() - # Check that tabix/tabix is not there - assert "tabix/tabix" not in old_mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME] - assert not Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, "tabix/tabix").is_dir() - - # Update vcf_annotate_ensemblvep without tabix/bgziptabix and with tabix/tabix - update_obj = SubworkflowUpdate(self.pipeline_dir, update_deps=True, show_diff=False) - assert update_obj.update("vcf_annotate_ensemblvep") is True - - mod_json = ModulesJson(self.pipeline_dir).get_modules_json() - - # Check that tabix/bgziptabix is not there - assert "tabix/bgziptabix" not in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME] - assert not Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, "tabix/bgziptabix").is_dir() - # Check that tabix/tabix is there - assert "tabix/tabix" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME] - assert Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, "tabix/tabix").is_dir() - # Check that ensemblevep is not there but instead we have ensemblevep/vep (due to a file re-naming) - assert "ensemblvep" not in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME] - assert "ensemblvep/vep" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME] - assert Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, "ensemblvep/vep").is_dir() diff --git a/tests/test_subworkflows.py b/tests/test_subworkflows.py index 17bc678cad..af581fc9ab 100644 --- a/tests/test_subworkflows.py +++ b/tests/test_subworkflows.py @@ -119,77 +119,34 @@ def tearDown(self): def _use_caplog(self, caplog): self.caplog = caplog - ################################################ - # Test of the individual subworkflow commands. # - ################################################ - - from .subworkflows.create import ( # type: ignore[misc] - test_subworkflows_create_fail_exists, - test_subworkflows_create_nfcore_modules, - test_subworkflows_create_succeed, - test_subworkflows_migrate, - test_subworkflows_migrate_no_delete, - ) - from .subworkflows.info import ( # type: ignore[misc] - test_subworkflows_info_in_modules_repo, - test_subworkflows_info_local, - test_subworkflows_info_remote, - test_subworkflows_info_remote_gitlab, - ) - from .subworkflows.install import ( # type: ignore[misc] - test_subworkflow_install_nopipeline, - test_subworkflows_install_alternate_remote, - test_subworkflows_install_bam_sort_stats_samtools, - test_subworkflows_install_bam_sort_stats_samtools_twice, - test_subworkflows_install_different_branch_fail, - test_subworkflows_install_emptypipeline, - test_subworkflows_install_from_gitlab, - test_subworkflows_install_nosubworkflow, - test_subworkflows_install_tracking, - test_subworkflows_install_tracking_added_already_installed, - test_subworkflows_install_tracking_added_super_subworkflow, - ) - from .subworkflows.lint import ( # type: ignore[misc] - test_subworkflows_absent_version, - test_subworkflows_empty_file_in_snapshot, - test_subworkflows_empty_file_in_stub_snapshot, - test_subworkflows_lint, - test_subworkflows_lint_capitalization_fail, - test_subworkflows_lint_empty, - test_subworkflows_lint_gitlab_subworkflows, - test_subworkflows_lint_include_multiple_alias, - test_subworkflows_lint_less_than_two_modules_warning, - test_subworkflows_lint_multiple_remotes, - test_subworkflows_lint_new_subworkflow, - test_subworkflows_lint_no_gitlab, - test_subworkflows_lint_snapshot_file, - test_subworkflows_lint_snapshot_file_missing_fail, - test_subworkflows_lint_snapshot_file_not_needed, - ) - from .subworkflows.list import ( # type: ignore[misc] - test_subworkflows_install_and_list_subworkflows, - test_subworkflows_install_gitlab_and_list_subworkflows, - test_subworkflows_list_remote, - test_subworkflows_list_remote_gitlab, - ) - from .subworkflows.remove import ( # type: ignore[misc] - test_subworkflows_remove_included_subworkflow, - test_subworkflows_remove_one_of_two_subworkflow, - test_subworkflows_remove_subworkflow, - test_subworkflows_remove_subworkflow_keep_installed_module, - ) - from .subworkflows.update import ( # type: ignore[misc] - test_install_and_update, - test_install_at_hash_and_update, - test_install_at_hash_and_update_and_save_diff_limit_output, - test_install_at_hash_and_update_and_save_diff_to_file, - test_install_at_hash_and_update_limit_output, - test_update_all, - test_update_all_linked_components_from_subworkflow, - test_update_all_subworkflows_from_module, - test_update_change_of_included_modules, - test_update_with_config_dont_update, - test_update_with_config_fix_all, - test_update_with_config_fixed_version, - test_update_with_config_no_updates, - ) + # ################################################ + # # Test of the individual subworkflow commands. # + # ################################################ + + # from .subworkflows.list import ( # type: ignore[misc] + # test_subworkflows_install_and_list_subworkflows, + # test_subworkflows_install_gitlab_and_list_subworkflows, + # test_subworkflows_list_remote, + # test_subworkflows_list_remote_gitlab, + # ) + # from .subworkflows.remove import ( # type: ignore[misc] + # test_subworkflows_remove_included_subworkflow, + # test_subworkflows_remove_one_of_two_subworkflow, + # test_subworkflows_remove_subworkflow, + # test_subworkflows_remove_subworkflow_keep_installed_module, + # ) + # from .subworkflows.update import ( # type: ignore[misc] + # test_install_and_update, + # test_install_at_hash_and_update, + # test_install_at_hash_and_update_and_save_diff_limit_output, + # test_install_at_hash_and_update_and_save_diff_to_file, + # test_install_at_hash_and_update_limit_output, + # test_update_all, + # test_update_all_linked_components_from_subworkflow, + # test_update_all_subworkflows_from_module, + # test_update_change_of_included_modules, + # test_update_with_config_dont_update, + # test_update_with_config_fix_all, + # test_update_with_config_fixed_version, + # test_update_with_config_no_updates, + # ) From f7be79d6d599918af49513fb41bd5943df9b7e60 Mon Sep 17 00:00:00 2001 From: mashehu Date: Thu, 18 Jul 2024 10:12:10 +0200 Subject: [PATCH 27/89] migrate modules tests to new structure --- nf_core/components/components_command.py | 4 +- nf_core/modules/bump_versions.py | 3 +- tests/modules/bump_versions.py | 50 -- tests/modules/create.py | 165 ----- tests/modules/info.py | 63 -- tests/modules/install.py | 95 --- tests/modules/lint.py | 892 ----------------------- tests/modules/list.py | 134 ---- tests/modules/modules_json.py | 253 ------- tests/modules/patch.py | 360 --------- tests/modules/remove.py | 22 - tests/modules/test_bump_versions.py | 50 ++ tests/modules/test_create.py | 164 +++++ tests/modules/test_info.py | 62 ++ tests/modules/test_install.py | 90 +++ tests/modules/test_lint.py | 865 ++++++++++++++++++++++ tests/modules/test_list.py | 126 ++++ tests/modules/test_modules_json.py | 245 +++++++ tests/modules/test_patch.py | 365 ++++++++++ tests/modules/test_remove.py | 26 + tests/modules/test_update.py | 435 +++++++++++ tests/modules/update.py | 444 ----------- tests/test_modules.py | 128 ---- 23 files changed, 2432 insertions(+), 2609 deletions(-) delete mode 100644 tests/modules/bump_versions.py delete mode 100644 tests/modules/create.py delete mode 100644 tests/modules/info.py delete mode 100644 tests/modules/install.py delete mode 100644 tests/modules/lint.py delete mode 100644 tests/modules/list.py delete mode 100644 tests/modules/modules_json.py delete mode 100644 tests/modules/patch.py delete mode 100644 tests/modules/remove.py create mode 100644 tests/modules/test_bump_versions.py create mode 100644 tests/modules/test_create.py create mode 100644 tests/modules/test_info.py create mode 100644 tests/modules/test_install.py create mode 100644 tests/modules/test_lint.py create mode 100644 tests/modules/test_list.py create mode 100644 tests/modules/test_modules_json.py create mode 100644 tests/modules/test_patch.py create mode 100644 tests/modules/test_remove.py create mode 100644 tests/modules/test_update.py delete mode 100644 tests/modules/update.py diff --git a/nf_core/components/components_command.py b/nf_core/components/components_command.py index 4df67639e2..aa1dccc0d4 100644 --- a/nf_core/components/components_command.py +++ b/nf_core/components/components_command.py @@ -22,7 +22,7 @@ class ComponentCommand: def __init__( self, component_type: str, - dir: str, + dir: Union[str, Path], remote_url: Optional[str] = None, branch: Optional[str] = None, no_pull: bool = False, @@ -33,7 +33,7 @@ def __init__( Initialise the ComponentClass object """ self.component_type = component_type - self.dir = dir + self.dir = Path(dir) if dir else None self.modules_repo = ModulesRepo(remote_url, branch, no_pull, hide_progress) self.hide_progress = hide_progress self.no_prompts = no_prompts diff --git a/nf_core/modules/bump_versions.py b/nf_core/modules/bump_versions.py index fae3793079..1b94d5910c 100644 --- a/nf_core/modules/bump_versions.py +++ b/nf_core/modules/bump_versions.py @@ -6,6 +6,7 @@ import logging import os import re +from pathlib import Path from typing import Any, Dict, List, Optional, Tuple, Union import questionary @@ -30,7 +31,7 @@ class ModuleVersionBumper(ComponentCommand): # type: ignore[misc] def __init__( self, - pipeline_dir: str, + pipeline_dir: Union[str, Path], remote_url: Optional[str] = None, branch: Optional[str] = None, no_pull: bool = False, diff --git a/tests/modules/bump_versions.py b/tests/modules/bump_versions.py deleted file mode 100644 index ce8c6dbe11..0000000000 --- a/tests/modules/bump_versions.py +++ /dev/null @@ -1,50 +0,0 @@ -import os -import re - -import pytest - -import nf_core.modules -from nf_core.modules.modules_utils import ModuleExceptionError - - -def test_modules_bump_versions_single_module(self): - """Test updating a single module""" - # Change the bpipe/test version to an older version - env_yml_path = os.path.join(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "environment.yml") - with open(env_yml_path) as fh: - content = fh.read() - new_content = re.sub(r"bioconda::star=\d.\d.\d\D?", r"bioconda::star=2.6.1d", content) - with open(env_yml_path, "w") as fh: - fh.write(new_content) - version_bumper = nf_core.modules.ModuleVersionBumper(pipeline_dir=self.nfcore_modules) - version_bumper.bump_versions(module="bpipe/test") - assert len(version_bumper.failed) == 0 - - -def test_modules_bump_versions_all_modules(self): - """Test updating all modules""" - version_bumper = nf_core.modules.ModuleVersionBumper(pipeline_dir=self.nfcore_modules) - version_bumper.bump_versions(all_modules=True) - assert len(version_bumper.failed) == 0 - - -def test_modules_bump_versions_fail(self): - """Fail updating a module with wrong name""" - version_bumper = nf_core.modules.ModuleVersionBumper(pipeline_dir=self.nfcore_modules) - with pytest.raises(ModuleExceptionError) as excinfo: - version_bumper.bump_versions(module="no/module") - assert "Could not find the specified module:" in str(excinfo.value) - - -def test_modules_bump_versions_fail_unknown_version(self): - """Fail because of an unknown version""" - # Change the bpipe/test version to an older version - env_yml_path = os.path.join(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "environment.yml") - with open(env_yml_path) as fh: - content = fh.read() - new_content = re.sub(r"bioconda::bpipe=\d.\d.\d\D?", r"bioconda::bpipe=xxx", content) - with open(env_yml_path, "w") as fh: - fh.write(new_content) - version_bumper = nf_core.modules.ModuleVersionBumper(pipeline_dir=self.nfcore_modules) - version_bumper.bump_versions(module="bpipe/test") - assert "Conda package had unknown version" in version_bumper.failed[0][0] diff --git a/tests/modules/create.py b/tests/modules/create.py deleted file mode 100644 index cf39621f07..0000000000 --- a/tests/modules/create.py +++ /dev/null @@ -1,165 +0,0 @@ -import os -import shutil -from pathlib import Path -from unittest import mock - -import pytest -import requests_cache -import responses -import yaml -from git.repo import Repo - -import nf_core.modules -from tests.utils import ( - GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH, - GITLAB_URL, - mock_anaconda_api_calls, - mock_biocontainers_api_calls, -) - - -def test_modules_create_succeed(self): - """Succeed at creating the TrimGalore! module""" - with responses.RequestsMock() as rsps: - mock_anaconda_api_calls(rsps, "trim-galore", "0.6.7") - mock_biocontainers_api_calls(rsps, "trim-galore", "0.6.7") - module_create = nf_core.modules.ModuleCreate( - self.pipeline_dir, "trimgalore", "@author", "process_single", True, True, conda_name="trim-galore" - ) - with requests_cache.disabled(): - module_create.create() - assert os.path.exists(os.path.join(self.pipeline_dir, "modules", "local", "trimgalore.nf")) - - -def test_modules_create_fail_exists(self): - """Fail at creating the same module twice""" - with responses.RequestsMock() as rsps: - mock_anaconda_api_calls(rsps, "trim-galore", "0.6.7") - mock_biocontainers_api_calls(rsps, "trim-galore", "0.6.7") - module_create = nf_core.modules.ModuleCreate( - self.pipeline_dir, "trimgalore", "@author", "process_single", False, False, conda_name="trim-galore" - ) - with requests_cache.disabled(): - module_create.create() - with pytest.raises(UserWarning) as excinfo: - with requests_cache.disabled(): - module_create.create() - assert "Module file exists already" in str(excinfo.value) - - -def test_modules_create_nfcore_modules(self): - """Create a module in nf-core/modules clone""" - with responses.RequestsMock() as rsps: - mock_anaconda_api_calls(rsps, "fastqc", "0.11.9") - mock_biocontainers_api_calls(rsps, "fastqc", "0.11.9") - module_create = nf_core.modules.ModuleCreate( - self.nfcore_modules, "fastqc", "@author", "process_low", False, False - ) - with requests_cache.disabled(): - module_create.create() - assert os.path.exists(os.path.join(self.nfcore_modules, "modules", "nf-core", "fastqc", "main.nf")) - assert os.path.exists(os.path.join(self.nfcore_modules, "modules", "nf-core", "fastqc", "tests", "main.nf.test")) - - -def test_modules_create_nfcore_modules_subtool(self): - """Create a tool/subtool module in a nf-core/modules clone""" - with responses.RequestsMock() as rsps: - mock_anaconda_api_calls(rsps, "star", "2.8.10a") - mock_biocontainers_api_calls(rsps, "star", "2.8.10a") - module_create = nf_core.modules.ModuleCreate( - self.nfcore_modules, "star/index", "@author", "process_medium", False, False - ) - with requests_cache.disabled(): - module_create.create() - assert os.path.exists(os.path.join(self.nfcore_modules, "modules", "nf-core", "star", "index", "main.nf")) - assert os.path.exists( - os.path.join(self.nfcore_modules, "modules", "nf-core", "star", "index", "tests", "main.nf.test") - ) - - -@mock.patch("rich.prompt.Confirm.ask") -def test_modules_migrate(self, mock_rich_ask): - """Create a module with the --migrate-pytest option to convert pytest to nf-test""" - pytest_dir = Path(self.nfcore_modules, "tests", "modules", "nf-core", "samtools", "sort") - module_dir = Path(self.nfcore_modules, "modules", "nf-core", "samtools", "sort") - - # Clone modules repo with pytests - shutil.rmtree(self.nfcore_modules) - Repo.clone_from(GITLAB_URL, self.nfcore_modules, branch=GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH) - with open(module_dir / "main.nf") as fh: - old_main_nf = fh.read() - with open(module_dir / "meta.yml") as fh: - old_meta_yml = fh.read() - - # Create a module with --migrate-pytest - mock_rich_ask.return_value = True - module_create = nf_core.modules.ModuleCreate(self.nfcore_modules, "samtools/sort", migrate_pytest=True) - module_create.create() - - with open(module_dir / "main.nf") as fh: - new_main_nf = fh.read() - with open(module_dir / "meta.yml") as fh: - new_meta_yml = fh.read() - nextflow_config = module_dir / "tests" / "nextflow.config" - - # Check that old files have been copied to the new module - assert old_main_nf == new_main_nf - assert old_meta_yml == new_meta_yml - assert nextflow_config.is_file() - - # Check that pytest folder is deleted - assert not pytest_dir.is_dir() - - # Check that pytest_modules.yml is updated - with open(Path(self.nfcore_modules, "tests", "config", "pytest_modules.yml")) as fh: - modules_yml = yaml.safe_load(fh) - assert "samtools/sort" not in modules_yml.keys() - - -@mock.patch("rich.prompt.Confirm.ask") -def test_modules_migrate_no_delete(self, mock_rich_ask): - """Create a module with the --migrate-pytest option to convert pytest to nf-test. - Test that pytest directory is not deleted.""" - pytest_dir = Path(self.nfcore_modules, "tests", "modules", "nf-core", "samtools", "sort") - - # Clone modules repo with pytests - shutil.rmtree(self.nfcore_modules) - Repo.clone_from(GITLAB_URL, self.nfcore_modules, branch=GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH) - - # Create a module with --migrate-pytest - mock_rich_ask.return_value = False - module_create = nf_core.modules.ModuleCreate(self.nfcore_modules, "samtools/sort", migrate_pytest=True) - module_create.create() - - # Check that pytest folder is not deleted - assert pytest_dir.is_dir() - - # Check that pytest_modules.yml is updated - with open(Path(self.nfcore_modules, "tests", "config", "pytest_modules.yml")) as fh: - modules_yml = yaml.safe_load(fh) - assert "samtools/sort" not in modules_yml.keys() - - -@mock.patch("rich.prompt.Confirm.ask") -def test_modules_migrate_symlink(self, mock_rich_ask): - """Create a module with the --migrate-pytest option to convert pytest with symlinks to nf-test. - Test that the symlink is deleted and the file is copied.""" - - pytest_dir = Path(self.nfcore_modules, "tests", "modules", "nf-core", "samtools", "sort") - module_dir = Path(self.nfcore_modules, "modules", "nf-core", "samtools", "sort") - - # Clone modules repo with pytests - shutil.rmtree(self.nfcore_modules) - Repo.clone_from(GITLAB_URL, self.nfcore_modules, branch=GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH) - - # Create a symlinked file in the pytest directory - symlink_file = pytest_dir / "symlink_file.txt" - symlink_file.symlink_to(module_dir / "main.nf") - - # Create a module with --migrate-pytest - mock_rich_ask.return_value = True - module_create = nf_core.modules.ModuleCreate(self.nfcore_modules, "samtools/sort", migrate_pytest=True) - module_create.create() - - # Check that symlink is deleted - assert not symlink_file.is_symlink() diff --git a/tests/modules/info.py b/tests/modules/info.py deleted file mode 100644 index 2dbd48b240..0000000000 --- a/tests/modules/info.py +++ /dev/null @@ -1,63 +0,0 @@ -from rich.console import Console - -import nf_core.modules - -from ..utils import GITLAB_DEFAULT_BRANCH, GITLAB_URL - - -def test_modules_info_remote(self): - """Test getting info about a remote module""" - mods_info = nf_core.modules.ModuleInfo(self.pipeline_dir, "fastqc") - mods_info_output = mods_info.get_component_info() - console = Console(record=True) - console.print(mods_info_output) - output = console.export_text() - - assert "Module: fastqc" in output - assert "Inputs" in output - assert "Outputs" in output - - -def test_modules_info_remote_gitlab(self): - """Test getting info about a module in the remote gitlab repo""" - mods_info = nf_core.modules.ModuleInfo( - self.pipeline_dir, "fastqc", remote_url=GITLAB_URL, branch=GITLAB_DEFAULT_BRANCH - ) - mods_info_output = mods_info.get_component_info() - console = Console(record=True) - console.print(mods_info_output) - output = console.export_text() - - assert "Module: fastqc" in output - assert "Inputs" in output - assert "Outputs" in output - assert "--git-remote" in output - - -def test_modules_info_local(self): - """Test getting info about a locally installed module""" - self.mods_install.install("trimgalore") - mods_info = nf_core.modules.ModuleInfo(self.pipeline_dir, "trimgalore") - mods_info_output = mods_info.get_component_info() - console = Console(record=True) - console.print(mods_info_output) - output = console.export_text() - - assert "Module: trimgalore" in output - assert "Inputs" in output - assert "Outputs" in output - assert "Location" in output - - -def test_modules_info_in_modules_repo(self): - """Test getting info about a module in the modules repo""" - mods_info = nf_core.modules.ModuleInfo(self.nfcore_modules, "fastqc") - mods_info.local = True - mods_info_output = mods_info.get_component_info() - console = Console(record=True) - console.print(mods_info_output) - output = console.export_text() - - assert "Module: fastqc" in output - assert "Inputs" in output - assert "Outputs" in output diff --git a/tests/modules/install.py b/tests/modules/install.py deleted file mode 100644 index deca31204e..0000000000 --- a/tests/modules/install.py +++ /dev/null @@ -1,95 +0,0 @@ -import os - -import pytest - -from nf_core.modules.install import ModuleInstall -from nf_core.modules.modules_json import ModulesJson - -from ..utils import ( - GITLAB_BRANCH_ORG_PATH_BRANCH, - GITLAB_BRANCH_TEST_BRANCH, - GITLAB_REPO, - GITLAB_URL, - with_temporary_folder, -) - - -def test_modules_install_nopipeline(self): - """Test installing a module - no pipeline given""" - self.mods_install.dir = None - assert self.mods_install.install("foo") is False - - -@with_temporary_folder -def test_modules_install_emptypipeline(self, tmpdir): - """Test installing a module - empty dir given""" - os.mkdir(os.path.join(tmpdir, "nf-core-pipe")) - self.mods_install.dir = os.path.join(tmpdir, "nf-core-pipe") - with pytest.raises(UserWarning) as excinfo: - self.mods_install.install("foo") - assert "Could not find a 'main.nf' or 'nextflow.config' file" in str(excinfo.value) - - -def test_modules_install_nomodule(self): - """Test installing a module - unrecognised module given""" - assert self.mods_install.install("foo") is False - - -def test_modules_install_trimgalore(self): - """Test installing a module - TrimGalore!""" - assert self.mods_install.install("trimgalore") is not False - module_path = os.path.join(self.mods_install.dir, "modules", "nf-core", "trimgalore") - assert os.path.exists(module_path) - - -def test_modules_install_trimgalore_twice(self): - """Test installing a module - TrimGalore! already there""" - self.mods_install.install("trimgalore") - assert self.mods_install.install("trimgalore") is True - - -def test_modules_install_from_gitlab(self): - """Test installing a module from GitLab""" - assert self.mods_install_gitlab.install("fastqc") is True - - -def test_modules_install_different_branch_fail(self): - """Test installing a module from a different branch""" - install_obj = ModuleInstall(self.pipeline_dir, remote_url=GITLAB_URL, branch=GITLAB_BRANCH_TEST_BRANCH) - # The FastQC module does not exists in the branch-test branch - assert install_obj.install("fastqc") is False - - -def test_modules_install_different_branch_succeed(self): - """Test installing a module from a different branch""" - install_obj = ModuleInstall(self.pipeline_dir, remote_url=GITLAB_URL, branch=GITLAB_BRANCH_TEST_BRANCH) - # The fastp module does exists in the branch-test branch - assert install_obj.install("fastp") is True - - # Verify that the branch entry was added correctly - modules_json = ModulesJson(self.pipeline_dir) - assert ( - modules_json.get_component_branch(self.component_type, "fastp", GITLAB_URL, GITLAB_REPO) - == GITLAB_BRANCH_TEST_BRANCH - ) - - -def test_modules_install_tracking(self): - """Test installing a module and finding 'modules' in the installed_by section of modules.json""" - self.mods_install.install("trimgalore") - - # Verify that the installed_by entry was added correctly - modules_json = ModulesJson(self.pipeline_dir) - mod_json = modules_json.get_modules_json() - assert mod_json["repos"]["https://github.com/nf-core/modules.git"]["modules"]["nf-core"]["trimgalore"][ - "installed_by" - ] == ["modules"] - - -def test_modules_install_alternate_remote(self): - """Test installing a module from a different remote with the same organization path""" - install_obj = ModuleInstall(self.pipeline_dir, remote_url=GITLAB_URL, branch=GITLAB_BRANCH_ORG_PATH_BRANCH) - # Install fastqc from GitLab which is also installed from GitHub with the same org_path - with pytest.raises(Exception) as excinfo: - install_obj.install("fastqc") - assert "Could not find a 'main.nf' or 'nextflow.config' file" in str(excinfo.value) diff --git a/tests/modules/lint.py b/tests/modules/lint.py deleted file mode 100644 index e1a4e27ff8..0000000000 --- a/tests/modules/lint.py +++ /dev/null @@ -1,892 +0,0 @@ -import json -from pathlib import Path - -import pytest -import yaml -from git.repo import Repo - -import nf_core.modules -from nf_core.modules.lint import main_nf -from nf_core.utils import set_wd - -from ..utils import GITLAB_NFTEST_BRANCH, GITLAB_URL -from .patch import BISMARK_ALIGN, CORRECT_SHA, PATCH_BRANCH, REPO_NAME, modify_main_nf - - -def setup_patch(pipeline_dir: str, modify_module: bool): - install_obj = nf_core.modules.ModuleInstall( - pipeline_dir, - prompt=False, - force=False, - remote_url=GITLAB_URL, - branch=PATCH_BRANCH, - sha=CORRECT_SHA, - ) - - # Install the module - install_obj.install(BISMARK_ALIGN) - - if modify_module: - # Modify the module - module_path = Path(pipeline_dir, "modules", REPO_NAME, BISMARK_ALIGN) - modify_main_nf(module_path / "main.nf") - - -def test_modules_lint_trimgalore(self): - """Test linting the TrimGalore! module""" - self.mods_install.install("trimgalore") - module_lint = nf_core.modules.ModuleLint(dir=self.pipeline_dir) - module_lint.lint(print_results=False, module="trimgalore") - assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) > 0 - assert len(module_lint.warned) >= 0 - - -def test_modules_lint_empty(self): - """Test linting a pipeline with no modules installed""" - self.mods_remove.remove("fastqc", force=True) - self.mods_remove.remove("multiqc", force=True) - with pytest.raises(LookupError): - nf_core.modules.ModuleLint(dir=self.pipeline_dir) - - -def test_modules_lint_new_modules(self): - """lint a new module""" - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) - module_lint.lint(print_results=False, all_modules=True) - assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) > 0 - assert len(module_lint.warned) >= 0 - - -def test_modules_lint_no_gitlab(self): - """Test linting a pipeline with no modules installed""" - self.mods_remove.remove("fastqc", force=True) - self.mods_remove.remove("multiqc", force=True) - with pytest.raises(LookupError): - nf_core.modules.ModuleLint(dir=self.pipeline_dir, remote_url=GITLAB_URL) - - -def test_modules_lint_gitlab_modules(self): - """Lint modules from a different remote""" - self.mods_install_gitlab.install("fastqc") - self.mods_install_gitlab.install("multiqc") - module_lint = nf_core.modules.ModuleLint(dir=self.pipeline_dir, remote_url=GITLAB_URL) - module_lint.lint(print_results=False, all_modules=True) - assert len(module_lint.failed) == 2 - assert len(module_lint.passed) > 0 - assert len(module_lint.warned) >= 0 - - -def test_modules_lint_multiple_remotes(self): - """Lint modules from a different remote""" - self.mods_install_gitlab.install("multiqc") - module_lint = nf_core.modules.ModuleLint(dir=self.pipeline_dir, remote_url=GITLAB_URL) - module_lint.lint(print_results=False, all_modules=True) - assert len(module_lint.failed) == 1 - assert len(module_lint.passed) > 0 - assert len(module_lint.warned) >= 0 - - -def test_modules_lint_registry(self): - """Test linting the samtools module and alternative registry""" - self.mods_install.install("samtools") - module_lint = nf_core.modules.ModuleLint(dir=self.pipeline_dir, registry="public.ecr.aws") - module_lint.lint(print_results=False, module="samtools") - assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) > 0 - assert len(module_lint.warned) >= 0 - module_lint = nf_core.modules.ModuleLint(dir=self.pipeline_dir) - module_lint.lint(print_results=False, module="samtools") - assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) > 0 - assert len(module_lint.warned) >= 0 - - -def test_modules_lint_patched_modules(self): - """ - Test creating a patch file and applying it to a new version of the the files - """ - setup_patch(self.pipeline_dir, True) - - # Create a patch file - patch_obj = nf_core.modules.ModulePatch(self.pipeline_dir, GITLAB_URL, PATCH_BRANCH) - patch_obj.patch(BISMARK_ALIGN) - - # change temporarily working directory to the pipeline directory - # to avoid error from try_apply_patch() during linting - with set_wd(self.pipeline_dir): - module_lint = nf_core.modules.ModuleLint( - dir=self.pipeline_dir, - remote_url=GITLAB_URL, - branch=PATCH_BRANCH, - hide_progress=True, - ) - module_lint.lint( - print_results=False, - all_modules=True, - ) - - assert len(module_lint.failed) == 1 - assert len(module_lint.passed) > 0 - assert len(module_lint.warned) >= 0 - - -# A skeleton object with the passed/warned/failed list attrs -# Use this in place of a ModuleLint object to test behaviour of -# linting methods which don't need the full setup -class MockModuleLint: - def __init__(self): - self.passed = [] - self.warned = [] - self.failed = [] - - self.main_nf = "main_nf" - - -PROCESS_LABEL_GOOD = ( - """ - label 'process_high' - cpus 12 - """, - 1, - 0, - 0, -) -PROCESS_LABEL_NON_ALPHANUMERIC = ( - """ - label 'a:label:with:colons' - cpus 12 - """, - 0, - 2, - 0, -) -PROCESS_LABEL_GOOD_CONFLICTING = ( - """ - label 'process_high' - label 'process_low' - cpus 12 - """, - 0, - 1, - 0, -) -PROCESS_LABEL_GOOD_DUPLICATES = ( - """ - label 'process_high' - label 'process_high' - cpus 12 - """, - 0, - 2, - 0, -) -PROCESS_LABEL_GOOD_AND_NONSTANDARD = ( - """ - label 'process_high' - label 'process_extra_label' - cpus 12 - """, - 1, - 1, - 0, -) -PROCESS_LABEL_NONSTANDARD = ( - """ - label 'process_extra_label' - cpus 12 - """, - 0, - 2, - 0, -) -PROCESS_LABEL_NONSTANDARD_DUPLICATES = ( - """ - label process_extra_label - label process_extra_label - cpus 12 - """, - 0, - 3, - 0, -) -PROCESS_LABEL_NONE_FOUND = ( - """ - cpus 12 - """, - 0, - 1, - 0, -) - -PROCESS_LABEL_TEST_CASES = [ - PROCESS_LABEL_GOOD, - PROCESS_LABEL_NON_ALPHANUMERIC, - PROCESS_LABEL_GOOD_CONFLICTING, - PROCESS_LABEL_GOOD_DUPLICATES, - PROCESS_LABEL_GOOD_AND_NONSTANDARD, - PROCESS_LABEL_NONSTANDARD, - PROCESS_LABEL_NONSTANDARD_DUPLICATES, - PROCESS_LABEL_NONE_FOUND, -] - - -def test_modules_lint_check_process_labels(self): - for test_case in PROCESS_LABEL_TEST_CASES: - process, passed, warned, failed = test_case - mocked_ModuleLint = MockModuleLint() - main_nf.check_process_labels(mocked_ModuleLint, process.splitlines()) - assert len(mocked_ModuleLint.passed) == passed - assert len(mocked_ModuleLint.warned) == warned - assert len(mocked_ModuleLint.failed) == failed - - -# Test cases for linting the container definitions - -CONTAINER_SINGLE_GOOD = ( - "Single-line container definition should pass", - """ - container "quay.io/nf-core/gatk:4.4.0.0" //Biocontainers is missing a package - """, - 2, # passed - 0, # warned - 0, # failed -) - -CONTAINER_TWO_LINKS_GOOD = ( - "Multi-line container definition should pass", - """ - container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/gatk4:4.4.0.0--py36hdfd78af_0': - 'biocontainers/gatk4:4.4.0.0--py36hdfd78af_0' }" - """, - 6, - 0, - 0, -) - -CONTAINER_WITH_SPACE_BAD = ( - "Space in container URL should fail", - """ - container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/gatk4:4.4.0.0--py36hdfd78af_0 ': - 'biocontainers/gatk4:4.4.0.0--py36hdfd78af_0' }" - """, - 5, - 0, - 1, -) - -CONTAINER_MULTIPLE_DBLQUOTES_BAD = ( - "Incorrect quoting of container string should fail", - """ - container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/gatk4:4.4.0.0--py36hdfd78af_0 ': - "biocontainers/gatk4:4.4.0.0--py36hdfd78af_0" }" - """, - 4, - 0, - 1, -) - -CONTAINER_TEST_CASES = [ - CONTAINER_SINGLE_GOOD, - CONTAINER_TWO_LINKS_GOOD, - CONTAINER_WITH_SPACE_BAD, - CONTAINER_MULTIPLE_DBLQUOTES_BAD, -] - - -def test_modules_lint_check_url(self): - for test_case in CONTAINER_TEST_CASES: - test, process, passed, warned, failed = test_case - mocked_ModuleLint = MockModuleLint() - for line in process.splitlines(): - if line.strip(): - main_nf.check_container_link_line(mocked_ModuleLint, line, registry="quay.io") - - assert ( - len(mocked_ModuleLint.passed) == passed - ), f"{test}: Expected {passed} PASS, got {len(mocked_ModuleLint.passed)}." - assert ( - len(mocked_ModuleLint.warned) == warned - ), f"{test}: Expected {warned} WARN, got {len(mocked_ModuleLint.warned)}." - assert ( - len(mocked_ModuleLint.failed) == failed - ), f"{test}: Expected {failed} FAIL, got {len(mocked_ModuleLint.failed)}." - - -def test_modules_lint_snapshot_file(self): - """Test linting a module with a snapshot file""" - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) - module_lint.lint(print_results=False, module="bpipe/test") - assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) > 0 - assert len(module_lint.warned) >= 0 - - -def test_modules_lint_snapshot_file_missing_fail(self): - """Test linting a module with a snapshot file missing, which should fail""" - Path( - self.nfcore_modules, - "modules", - "nf-core", - "bpipe", - "test", - "tests", - "main.nf.test.snap", - ).unlink() - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) - module_lint.lint(print_results=False, module="bpipe/test") - Path( - self.nfcore_modules, - "modules", - "nf-core", - "bpipe", - "test", - "tests", - "main.nf.test.snap", - ).touch() - assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) > 0 - assert len(module_lint.warned) >= 0 - assert module_lint.failed[0].lint_test == "test_snapshot_exists" - - -def test_modules_lint_snapshot_file_not_needed(self): - """Test linting a module which doesn't need a snapshot file by removing the snapshot keyword in the main.nf.test file""" - with open( - Path( - self.nfcore_modules, - "modules", - "nf-core", - "bpipe", - "test", - "tests", - "main.nf.test", - ) - ) as fh: - content = fh.read() - new_content = content.replace("snapshot(", "snap (") - with open( - Path( - self.nfcore_modules, - "modules", - "nf-core", - "bpipe", - "test", - "tests", - "main.nf.test", - ), - "w", - ) as fh: - fh.write(new_content) - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) - module_lint.lint(print_results=False, module="bpipe/test") - assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) > 0 - assert len(module_lint.warned) >= 0 - - -def test_modules_environment_yml_file_doesnt_exists(self): - """Test linting a module with an environment.yml file""" - Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "environment.yml").rename( - Path( - self.nfcore_modules, - "modules", - "nf-core", - "bpipe", - "test", - "environment.yml.bak", - ) - ) - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) - module_lint.lint(print_results=False, module="bpipe/test") - Path( - self.nfcore_modules, - "modules", - "nf-core", - "bpipe", - "test", - "environment.yml.bak", - ).rename( - Path( - self.nfcore_modules, - "modules", - "nf-core", - "bpipe", - "test", - "environment.yml", - ) - ) - assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) > 0 - assert len(module_lint.warned) >= 0 - assert module_lint.failed[0].lint_test == "environment_yml_exists" - - -def test_modules_environment_yml_file_sorted_correctly(self): - """Test linting a module with a correctly sorted environment.yml file""" - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) - module_lint.lint(print_results=False, module="bpipe/test") - assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) > 0 - assert len(module_lint.warned) >= 0 - - -def test_modules_environment_yml_file_sorted_incorrectly(self): - """Test linting a module with an incorrectly sorted environment.yml file""" - with open( - Path( - self.nfcore_modules, - "modules", - "nf-core", - "bpipe", - "test", - "environment.yml", - ) - ) as fh: - yaml_content = yaml.safe_load(fh) - # Add a new dependency to the environment.yml file and reverse the order - yaml_content["dependencies"].append("z") - yaml_content["dependencies"].reverse() - yaml_content = yaml.dump(yaml_content) - with open( - Path( - self.nfcore_modules, - "modules", - "nf-core", - "bpipe", - "test", - "environment.yml", - ), - "w", - ) as fh: - fh.write(yaml_content) - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) - module_lint.lint(print_results=False, module="bpipe/test") - # we fix the sorting on the fly, so this should pass - assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) > 0 - assert len(module_lint.warned) >= 0 - - -def test_modules_environment_yml_file_not_array(self): - """Test linting a module with an incorrectly formatted environment.yml file""" - with open( - Path( - self.nfcore_modules, - "modules", - "nf-core", - "bpipe", - "test", - "environment.yml", - ) - ) as fh: - yaml_content = yaml.safe_load(fh) - yaml_content["dependencies"] = "z" - with open( - Path( - self.nfcore_modules, - "modules", - "nf-core", - "bpipe", - "test", - "environment.yml", - ), - "w", - ) as fh: - fh.write(yaml.dump(yaml_content)) - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) - module_lint.lint(print_results=False, module="bpipe/test") - assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) > 0 - assert len(module_lint.warned) >= 0 - assert module_lint.failed[0].lint_test == "environment_yml_valid" - - -def test_modules_environment_yml_file_name_mismatch(self): - """Test linting a module with a different name in the environment.yml file""" - with open( - Path( - self.nfcore_modules, - "modules", - "nf-core", - "bpipe", - "test", - "environment.yml", - ) - ) as fh: - yaml_content = yaml.safe_load(fh) - yaml_content["name"] = "bpipe-test" - with open( - Path( - self.nfcore_modules, - "modules", - "nf-core", - "bpipe", - "test", - "environment.yml", - ), - "w", - ) as fh: - fh.write(yaml.dump(yaml_content)) - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) - module_lint.lint(print_results=False, module="bpipe/test") - # reset changes - yaml_content["name"] = "bpipe_test" - with open( - Path( - self.nfcore_modules, - "modules", - "nf-core", - "bpipe", - "test", - "environment.yml", - ), - "w", - ) as fh: - fh.write(yaml.dump(yaml_content)) - - assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) > 0 - assert len(module_lint.warned) >= 0 - assert module_lint.failed[0].lint_test == "environment_yml_name" - - -def test_modules_meta_yml_incorrect_licence_field(self): - """Test linting a module with an incorrect Licence field in meta.yml""" - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "meta.yml")) as fh: - meta_yml = yaml.safe_load(fh) - meta_yml["tools"][0]["bpipe"]["licence"] = "[MIT]" - with open( - Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "meta.yml"), - "w", - ) as fh: - fh.write(yaml.dump(meta_yml)) - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) - module_lint.lint(print_results=False, module="bpipe/test") - - # reset changes - meta_yml["tools"][0]["bpipe"]["licence"] = ["MIT"] - with open( - Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "meta.yml"), - "w", - ) as fh: - fh.write(yaml.dump(meta_yml)) - - assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) >= 0 - assert len(module_lint.warned) >= 0 - assert module_lint.failed[0].lint_test == "meta_yml_valid" - - -def test_modules_meta_yml_input_mismatch(self): - """Test linting a module with an extra entry in input fields in meta.yml compared to module.input""" - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "main.nf")) as fh: - main_nf = fh.read() - main_nf_new = main_nf.replace("path bam", "path bai") - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "main.nf"), "w") as fh: - fh.write(main_nf_new) - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) - module_lint.lint(print_results=False, module="bpipe/test") - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "main.nf"), "w") as fh: - fh.write(main_nf) - assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) >= 0 - assert len(module_lint.warned) == 2 - lint_tests = [x.lint_test for x in module_lint.warned] - # check that it is there twice: - assert lint_tests.count("meta_input_meta_only") == 1 - assert lint_tests.count("meta_input_main_only") == 1 - - -def test_modules_meta_yml_output_mismatch(self): - """Test linting a module with an extra entry in output fields in meta.yml compared to module.output""" - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "main.nf")) as fh: - main_nf = fh.read() - main_nf_new = main_nf.replace("emit: bam", "emit: bai") - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "main.nf"), "w") as fh: - fh.write(main_nf_new) - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) - module_lint.lint(print_results=False, module="bpipe/test") - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "main.nf"), "w") as fh: - fh.write(main_nf) - assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) >= 0 - assert len(module_lint.warned) == 2 - lint_tests = [x.lint_test for x in module_lint.warned] - # check that it is there twice: - assert lint_tests.count("meta_output_meta_only") == 1 - assert lint_tests.count("meta_output_main_only") == 1 - - -def test_modules_meta_yml_incorrect_name(self): - """Test linting a module with an incorrect name in meta.yml""" - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "meta.yml")) as fh: - meta_yml = yaml.safe_load(fh) - meta_yml["name"] = "bpipe/test" - # need to make the same change to the environment.yml file - with open( - Path( - self.nfcore_modules, - "modules", - "nf-core", - "bpipe", - "test", - "environment.yml", - ) - ) as fh: - environment_yml = yaml.safe_load(fh) - environment_yml["name"] = "bpipe/test" - with open( - Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "meta.yml"), - "w", - ) as fh: - fh.write(yaml.dump(meta_yml)) - with open( - Path( - self.nfcore_modules, - "modules", - "nf-core", - "bpipe", - "test", - "environment.yml", - ), - "w", - ) as fh: - fh.write(yaml.dump(environment_yml)) - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) - module_lint.lint(print_results=False, module="bpipe/test") - - # reset changes - meta_yml["name"] = "bpipe_test" - with open( - Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "meta.yml"), - "w", - ) as fh: - fh.write(yaml.dump(meta_yml)) - environment_yml["name"] = "bpipe_test" - with open( - Path( - self.nfcore_modules, - "modules", - "nf-core", - "bpipe", - "test", - "environment.yml", - ), - "w", - ) as fh: - fh.write(yaml.dump(environment_yml)) - - assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) >= 0 - assert len(module_lint.warned) >= 0 - assert module_lint.failed[0].lint_test == "meta_name" - - -def test_modules_missing_test_dir(self): - """Test linting a module with a missing test directory""" - Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests").rename( - Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests.bak") - ) - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) - module_lint.lint(print_results=False, module="bpipe/test") - Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests.bak").rename( - Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests") - ) - assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) >= 0 - assert len(module_lint.warned) >= 0 - assert module_lint.failed[0].lint_test == "test_dir_exists" - - -def test_modules_missing_test_main_nf(self): - """Test linting a module with a missing test/main.nf file""" - Path( - self.nfcore_modules, - "modules", - "nf-core", - "bpipe", - "test", - "tests", - "main.nf.test", - ).rename( - Path( - self.nfcore_modules, - "modules", - "nf-core", - "bpipe", - "test", - "tests", - "main.nf.test.bak", - ) - ) - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) - module_lint.lint(print_results=False, module="bpipe/test") - Path( - self.nfcore_modules, - "modules", - "nf-core", - "bpipe", - "test", - "tests", - "main.nf.test.bak", - ).rename( - Path( - self.nfcore_modules, - "modules", - "nf-core", - "bpipe", - "test", - "tests", - "main.nf.test", - ) - ) - assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) >= 0 - assert len(module_lint.warned) >= 0 - assert module_lint.failed[0].lint_test == "test_main_nf_exists" - - -def test_modules_unused_pytest_files(self): - """Test linting a nf-test module with files still present in `tests/modules/`""" - Path(self.nfcore_modules, "tests", "modules", "bpipe", "test").mkdir(parents=True, exist_ok=True) - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) - module_lint.lint(print_results=False, module="bpipe/test") - Path(self.nfcore_modules, "tests", "modules", "bpipe", "test").rmdir() - assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) >= 0 - assert len(module_lint.warned) >= 0 - assert module_lint.failed[0].lint_test == "test_old_test_dir" - - -def test_nftest_failing_linting(self): - """Test linting a module which includes other modules in nf-test tests. - Linting tests""" - # Clone modules repo with testing modules - tmp_dir = self.nfcore_modules.parent - self.nfcore_modules = Path(tmp_dir, "modules-test") - Repo.clone_from(GITLAB_URL, self.nfcore_modules, branch=GITLAB_NFTEST_BRANCH) - - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) - module_lint.lint(print_results=False, module="kallisto/quant") - - assert len(module_lint.failed) == 3, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) >= 0 - assert len(module_lint.warned) >= 0 - assert module_lint.failed[0].lint_test == "environment_yml_valid" - assert module_lint.failed[1].lint_test == "meta_yml_valid" - assert module_lint.failed[2].lint_test == "test_main_tags" - assert "kallisto/index" in module_lint.failed[2].message - - -def test_modules_absent_version(self): - """Test linting a nf-test module if the versions is absent in the snapshot file `""" - with open( - Path( - self.nfcore_modules, - "modules", - "nf-core", - "bpipe", - "test", - "tests", - "main.nf.test.snap", - ) - ) as fh: - content = fh.read() - new_content = content.replace("versions", "foo") - with open( - Path( - self.nfcore_modules, - "modules", - "nf-core", - "bpipe", - "test", - "tests", - "main.nf.test.snap", - ), - "w", - ) as fh: - fh.write(new_content) - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) - module_lint.lint(print_results=False, module="bpipe/test") - with open( - Path( - self.nfcore_modules, - "modules", - "nf-core", - "bpipe", - "test", - "tests", - "main.nf.test.snap", - ), - "w", - ) as fh: - fh.write(content) - assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) >= 0 - assert len(module_lint.warned) >= 0 - assert module_lint.failed[0].lint_test == "test_snap_versions" - - -def test_modules_empty_file_in_snapshot(self): - """Test linting a nf-test module with an empty file sha sum in the test snapshot, which should make it fail (if it is not a stub)""" - snap_file = Path( - self.nfcore_modules, - "modules", - "nf-core", - "bpipe", - "test", - "tests", - "main.nf.test.snap", - ) - snap = json.load(snap_file.open()) - content = snap_file.read_text() - snap["my test"]["content"][0]["0"] = "test:md5,d41d8cd98f00b204e9800998ecf8427e" - - with open(snap_file, "w") as fh: - json.dump(snap, fh) - - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) - module_lint.lint(print_results=False, module="bpipe/test") - assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) > 0 - assert len(module_lint.warned) >= 0 - assert module_lint.failed[0].lint_test == "test_snap_md5sum" - - # reset the file - with open(snap_file, "w") as fh: - fh.write(content) - - -def test_modules_empty_file_in_stub_snapshot(self): - """Test linting a nf-test module with an empty file sha sum in the stub test snapshot, which should make it not fail""" - snap_file = Path( - self.nfcore_modules, - "modules", - "nf-core", - "bpipe", - "test", - "tests", - "main.nf.test.snap", - ) - snap = json.load(snap_file.open()) - content = snap_file.read_text() - snap["my_test_stub"] = {"content": [{"0": "test:md5,d41d8cd98f00b204e9800998ecf8427e", "versions": {}}]} - - with open(snap_file, "w") as fh: - json.dump(snap, fh) - - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) - module_lint.lint(print_results=False, module="bpipe/test") - assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) > 0 - assert len(module_lint.warned) >= 0 - assert any(x.lint_test == "test_snap_md5sum" for x in module_lint.passed) - - # reset the file - with open(snap_file, "w") as fh: - fh.write(content) diff --git a/tests/modules/list.py b/tests/modules/list.py deleted file mode 100644 index 3cb00a84d6..0000000000 --- a/tests/modules/list.py +++ /dev/null @@ -1,134 +0,0 @@ -import json -from pathlib import Path - -import yaml -from rich.console import Console - -import nf_core.modules - -from ..utils import GITLAB_DEFAULT_BRANCH, GITLAB_URL - - -def test_modules_list_remote(self): - """Test listing available modules""" - mods_list = nf_core.modules.ModuleList(None, remote=True) - listed_mods = mods_list.list_components() - console = Console(record=True) - console.print(listed_mods) - output = console.export_text() - assert "fastqc" in output - - -def test_modules_list_remote_gitlab(self): - """Test listing the modules in the remote gitlab repo""" - mods_list = nf_core.modules.ModuleList(None, remote=True, remote_url=GITLAB_URL, branch=GITLAB_DEFAULT_BRANCH) - listed_mods = mods_list.list_components() - console = Console(record=True) - console.print(listed_mods) - output = console.export_text() - assert "fastqc" in output - - -def test_modules_list_pipeline(self): - """Test listing locally installed modules""" - mods_list = nf_core.modules.ModuleList(self.pipeline_dir, remote=False) - listed_mods = mods_list.list_components() - console = Console(record=True) - console.print(listed_mods) - output = console.export_text() - assert "fastqc" in output - assert "multiqc" in output - - -def test_modules_install_and_list_pipeline(self): - """Test listing locally installed modules""" - self.mods_install.install("trimgalore") - mods_list = nf_core.modules.ModuleList(self.pipeline_dir, remote=False) - listed_mods = mods_list.list_components() - console = Console(record=True) - console.print(listed_mods) - output = console.export_text() - assert "trimgalore" in output - - -def test_modules_install_gitlab_and_list_pipeline(self): - """Test listing locally installed modules""" - self.mods_install_gitlab.install("fastqc") - mods_list = nf_core.modules.ModuleList(self.pipeline_dir, remote=False) - listed_mods = mods_list.list_components() - console = Console(record=True) - console.print(listed_mods) - output = console.export_text() - assert "fastqc" in output - - -def test_modules_list_local_json(self): - """Test listing locally installed modules as JSON""" - mods_list = nf_core.modules.ModuleList(self.pipeline_dir, remote=False) - listed_mods = mods_list.list_components(print_json=True) - listed_mods = json.loads(listed_mods) - assert "fastqc" in listed_mods - assert "multiqc" in listed_mods - - -def test_modules_list_remote_json(self): - """Test listing available modules as JSON""" - mods_list = nf_core.modules.ModuleList(None, remote=True) - listed_mods = mods_list.list_components(print_json=True) - listed_mods = json.loads(listed_mods) - assert "fastqc" in listed_mods - assert "multiqc" in listed_mods - - -def test_modules_list_with_one_keyword(self): - """Test listing available modules with one keyword""" - mods_list = nf_core.modules.ModuleList(None, remote=True) - listed_mods = mods_list.list_components(keywords=["qc"]) - console = Console(record=True) - console.print(listed_mods) - output = console.export_text() - assert "multiqc" in output - - -def test_modules_list_with_keywords(self): - """Test listing available modules with multiple keywords""" - mods_list = nf_core.modules.ModuleList(None, remote=True) - listed_mods = mods_list.list_components(keywords=["fastq", "qc"]) - console = Console(record=True) - console.print(listed_mods) - output = console.export_text() - assert "fastqc" in output - - -def test_modules_list_with_unused_keyword(self): - """Test listing available modules with an unused keyword""" - mods_list = nf_core.modules.ModuleList(None, remote=True) - with self.assertLogs(level="INFO") as log: - listed_mods = mods_list.list_components(keywords=["you_will_never_find_me"]) - self.assertIn("No available", log.output[0]) - # expect empty list - assert listed_mods == "" - - -def test_modules_list_in_wrong_repo_fail(self): - """Test listing available modules in a non-pipeline repo""" - # modify repotype in .nf-core.yml - with open(Path(self.pipeline_dir, ".nf-core.yml")) as fh: - nf_core_yml = yaml.safe_load(fh) - nf_core_yml_orig = nf_core_yml.copy() - nf_core_yml["repository_type"] = "modules" - nf_core_yml["org_path"] = "nf-core" - - print(nf_core_yml) - with open(Path(self.pipeline_dir, ".nf-core.yml"), "w") as fh: - yaml.safe_dump(nf_core_yml, fh) - # expect error logged - with self.assertLogs(level="ERROR") as log: - mods_list = nf_core.modules.ModuleList(self.pipeline_dir, remote=False) - listed_mods = mods_list.list_components() - self.assertIn("must be run from a pipeline directory", log.output[0]) - # expect empty list - assert listed_mods == "" - # restore .nf-core.yml - with open(Path(self.pipeline_dir, ".nf-core.yml"), "w") as fh: - yaml.safe_dump(nf_core_yml_orig, fh) diff --git a/tests/modules/modules_json.py b/tests/modules/modules_json.py deleted file mode 100644 index e0100adfb7..0000000000 --- a/tests/modules/modules_json.py +++ /dev/null @@ -1,253 +0,0 @@ -import copy -import json -import shutil -from pathlib import Path - -from nf_core.modules.modules_json import ModulesJson -from nf_core.modules.modules_repo import ( - NF_CORE_MODULES_DEFAULT_BRANCH, - NF_CORE_MODULES_NAME, - NF_CORE_MODULES_REMOTE, - ModulesRepo, -) -from nf_core.modules.patch import ModulePatch - - -def test_get_modules_json(self): - """Checks that the get_modules_json function returns the correct result""" - mod_json_path = Path(self.pipeline_dir, "modules.json") - with open(mod_json_path) as fh: - try: - mod_json_sb = json.load(fh) - except json.JSONDecodeError as e: - raise UserWarning(f"Unable to load JSON file '{mod_json_path}' due to error {e}") - - mod_json_obj = ModulesJson(self.pipeline_dir) - mod_json = mod_json_obj.get_modules_json() - - # Check that the modules.json hasn't changed - assert mod_json == mod_json_sb - - -def test_mod_json_update(self): - """Checks whether the update function works properly""" - mod_json_obj = ModulesJson(self.pipeline_dir) - # Update the modules.json file - mod_repo_obj = ModulesRepo() - mod_json_obj.update("modules", mod_repo_obj, "MODULE_NAME", "GIT_SHA", "modules", write_file=False) - mod_json = mod_json_obj.get_modules_json() - assert "MODULE_NAME" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"] - assert "git_sha" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"]["MODULE_NAME"] - assert "GIT_SHA" == mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"]["MODULE_NAME"]["git_sha"] - assert ( - NF_CORE_MODULES_DEFAULT_BRANCH - == mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"]["MODULE_NAME"]["branch"] - ) - assert "modules" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"]["MODULE_NAME"]["installed_by"] - - -def test_mod_json_create(self): - """Test creating a modules.json file from scratch""" - mod_json_path = Path(self.pipeline_dir, "modules.json") - # Remove the existing modules.json file - mod_json_path.unlink() - - # Create the new modules.json file - # (There are no prompts as long as there are only nf-core modules) - ModulesJson(self.pipeline_dir).create() - - # Check that the file exists - assert (mod_json_path).exists() - - # Get the contents of the file - mod_json_obj = ModulesJson(self.pipeline_dir) - mod_json = mod_json_obj.get_modules_json() - - mods = ["fastqc", "multiqc"] - for mod in mods: - assert mod in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"] - assert "git_sha" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"][mod] - assert "branch" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"][mod] - - -def modify_main_nf(path): - """Modify a file to test patch creation""" - with open(path) as fh: - lines = fh.readlines() - # Modify $meta.id to $meta.single_end - lines[1] = ' tag "$meta.single_end"\n' - with open(path, "w") as fh: - fh.writelines(lines) - - -def test_mod_json_create_with_patch(self): - """Test creating a modules.json file from scratch when there are patched modules""" - mod_json_path = Path(self.pipeline_dir, "modules.json") - - # Modify the module - module_path = Path(self.pipeline_dir, "modules", "nf-core", "fastqc") - modify_main_nf(module_path / "main.nf") - - # Try creating a patch file - patch_obj = ModulePatch(self.pipeline_dir, NF_CORE_MODULES_REMOTE, NF_CORE_MODULES_DEFAULT_BRANCH) - patch_obj.patch("fastqc") - - # Remove the existing modules.json file - mod_json_path.unlink() - - # Create the new modules.json file - ModulesJson(self.pipeline_dir).create() - - # Check that the file exists - assert mod_json_path.is_file() - - # Get the contents of the file - mod_json_obj = ModulesJson(self.pipeline_dir) - mod_json = mod_json_obj.get_modules_json() - - # Check that fastqc is in the file - assert "fastqc" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"] - assert "git_sha" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"]["fastqc"] - assert "branch" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"]["fastqc"] - - # Check that fastqc/main.nf maintains the changes - with open(module_path / "main.nf") as fh: - lines = fh.readlines() - assert lines[1] == ' tag "$meta.single_end"\n' - - -def test_mod_json_up_to_date(self): - """ - Checks if the modules.json file is up to date - when no changes have been made to the pipeline - """ - mod_json_obj = ModulesJson(self.pipeline_dir) - mod_json_before = mod_json_obj.get_modules_json() - mod_json_obj.check_up_to_date() - mod_json_after = mod_json_obj.get_modules_json() - - # Check that the modules.json hasn't changed - assert mod_json_before == mod_json_after - - -def test_mod_json_up_to_date_module_removed(self): - """ - Reinstall a module that has an entry in the modules.json - but is missing in the pipeline - """ - # Remove the fastqc module - fastqc_path = Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, "fastqc") - shutil.rmtree(fastqc_path) - - # Check that the modules.json file is up to date, and reinstall the module - mod_json_obj = ModulesJson(self.pipeline_dir) - mod_json_obj.check_up_to_date() - - # Check that the module has been reinstalled - files = ["main.nf", "meta.yml"] - assert fastqc_path.exists() - for f in files: - assert Path(fastqc_path, f).exists() - - -def test_mod_json_up_to_date_reinstall_fails(self): - """ - Try reinstalling a module where the git_sha is invalid - """ - mod_json_obj = ModulesJson(self.pipeline_dir) - - # Update the fastqc module entry to an invalid git_sha - mod_json_obj.update("modules", ModulesRepo(), "fastqc", "INVALID_GIT_SHA", "modules", write_file=True) - - # Remove the fastqc module - fastqc_path = Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, "fastqc") - shutil.rmtree(fastqc_path) - - # Check that the modules.json file is up to date, and remove the fastqc module entry - mod_json_obj.check_up_to_date() - mod_json = mod_json_obj.get_modules_json() - - # Check that the module has been removed from the modules.json - assert "fastqc" not in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"] - - -def test_mod_json_repo_present(self): - """Tests the repo_present function""" - mod_json_obj = ModulesJson(self.pipeline_dir) - - assert mod_json_obj.repo_present(NF_CORE_MODULES_REMOTE) is True - assert mod_json_obj.repo_present("INVALID_REPO") is False - - -def test_mod_json_module_present(self): - """Tests the module_present function""" - mod_json_obj = ModulesJson(self.pipeline_dir) - - assert mod_json_obj.module_present("fastqc", NF_CORE_MODULES_REMOTE, NF_CORE_MODULES_NAME) is True - assert mod_json_obj.module_present("INVALID_MODULE", NF_CORE_MODULES_REMOTE, NF_CORE_MODULES_NAME) is False - assert mod_json_obj.module_present("fastqc", "INVALID_REPO", "INVALID_DIR") is False - assert mod_json_obj.module_present("INVALID_MODULE", "INVALID_REPO", "INVALID_DIR") is False - - -def test_mod_json_get_module_version(self): - """Test the get_module_version function""" - mod_json_obj = ModulesJson(self.pipeline_dir) - mod_json = mod_json_obj.get_modules_json() - assert ( - mod_json_obj.get_module_version("fastqc", NF_CORE_MODULES_REMOTE, NF_CORE_MODULES_NAME) - == mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"]["fastqc"]["git_sha"] - ) - assert mod_json_obj.get_module_version("INVALID_MODULE", NF_CORE_MODULES_REMOTE, NF_CORE_MODULES_NAME) is None - - -def test_mod_json_dump(self): - """Tests the dump function""" - mod_json_obj = ModulesJson(self.pipeline_dir) - mod_json = mod_json_obj.get_modules_json() - # Remove the modules.json file - mod_json_path = Path(self.pipeline_dir, "modules.json") - mod_json_path.unlink() - - # Check that the dump function creates the file - mod_json_obj.dump() - assert mod_json_path.exists() - - # Check that the dump function writes the correct content - with open(mod_json_path) as f: - try: - mod_json_new = json.load(f) - except json.JSONDecodeError as e: - raise UserWarning(f"Unable to load JSON file '{mod_json_path}' due to error {e}") - assert mod_json == mod_json_new - - -def test_mod_json_with_empty_modules_value(self): - # Load module.json and remove the modules entry - mod_json_obj = ModulesJson(self.pipeline_dir) - mod_json_obj.create() # Create modules.json explicitly to get correct module sha - mod_json_orig = mod_json_obj.get_modules_json() - mod_json = copy.deepcopy(mod_json_orig) - mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"] = "" - # save the altered module.json and load it again to check if it will fix itself - mod_json_obj.modules_json = mod_json - mod_json_obj.dump() - mod_json_obj_new = ModulesJson(self.pipeline_dir) - mod_json_obj_new.check_up_to_date() - mod_json_new = mod_json_obj_new.get_modules_json() - assert mod_json_orig == mod_json_new - - -def test_mod_json_with_missing_modules_entry(self): - # Load module.json and remove the modules entry - mod_json_obj = ModulesJson(self.pipeline_dir) - mod_json_obj.create() # Create modules.json explicitly to get correct module sha - mod_json_orig = mod_json_obj.get_modules_json() - mod_json = copy.deepcopy(mod_json_orig) - mod_json["repos"][NF_CORE_MODULES_REMOTE].pop("modules") - # save the altered module.json and load it again to check if it will fix itself - mod_json_obj.modules_json = mod_json - mod_json_obj.dump() - mod_json_obj_new = ModulesJson(self.pipeline_dir) - mod_json_obj_new.check_up_to_date() - mod_json_new = mod_json_obj_new.get_modules_json() - assert mod_json_orig == mod_json_new diff --git a/tests/modules/patch.py b/tests/modules/patch.py deleted file mode 100644 index 513ea8a433..0000000000 --- a/tests/modules/patch.py +++ /dev/null @@ -1,360 +0,0 @@ -import os -import tempfile -from pathlib import Path -from unittest import mock - -import pytest - -import nf_core.components.components_command -import nf_core.modules - -from ..utils import GITLAB_URL - -""" -Test the 'nf-core modules patch' command - -Uses a branch (patch-tester) in the GitLab nf-core/modules-test repo when -testing if the update commands works correctly with patch files -""" - -ORG_SHA = "002623ccc88a3b0cb302c7d8f13792a95354d9f2" -CORRECT_SHA = "1dff30bfca2d98eb7ac7b09269a15e822451d99f" -SUCCEED_SHA = "ba15c20c032c549d77c5773659f19c2927daf48e" -FAIL_SHA = "67b642d4471c4005220a342cad3818d5ba2b5a73" -BISMARK_ALIGN = "bismark/align" -REPO_NAME = "nf-core-test" -PATCH_BRANCH = "patch-tester" -REPO_URL = "https://gitlab.com/nf-core/modules-test.git" - - -def setup_patch(pipeline_dir, modify_module): - install_obj = nf_core.modules.ModuleInstall( - pipeline_dir, prompt=False, force=False, remote_url=GITLAB_URL, branch=PATCH_BRANCH, sha=ORG_SHA - ) - - # Install the module - install_obj.install(BISMARK_ALIGN) - - if modify_module: - # Modify the module - module_path = Path(pipeline_dir, "modules", REPO_NAME, BISMARK_ALIGN) - modify_main_nf(module_path / "main.nf") - - -def modify_main_nf(path): - """Modify a file to test patch creation""" - with open(path) as fh: - lines = fh.readlines() - # We want a patch file that looks something like: - # - tuple val(meta), path(reads) - # - path index - # + tuple val(meta), path(reads), path(index) - for line_index in range(len(lines)): - if lines[line_index] == " tuple val(meta), path(reads)\n": - lines[line_index] = " tuple val(meta), path(reads), path(index)\n" - elif lines[line_index] == " path index\n": - to_pop = line_index - lines.pop(to_pop) - with open(path, "w") as fh: - fh.writelines(lines) - - -def test_create_patch_no_change(self): - """Test creating a patch when there is no change to the module""" - setup_patch(self.pipeline_dir, False) - - # Try creating a patch file - patch_obj = nf_core.modules.ModulePatch(self.pipeline_dir, GITLAB_URL, PATCH_BRANCH) - with pytest.raises(UserWarning): - patch_obj.patch(BISMARK_ALIGN) - - module_path = Path(self.pipeline_dir, "modules", REPO_NAME, BISMARK_ALIGN) - - # Check that no patch file has been added to the directory - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml"} - - # Check the 'modules.json' contains no patch file for the module - modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) is None - - -def test_create_patch_change(self): - """Test creating a patch when there is a change to the module""" - setup_patch(self.pipeline_dir, True) - - # Try creating a patch file - patch_obj = nf_core.modules.ModulePatch(self.pipeline_dir, GITLAB_URL, PATCH_BRANCH) - patch_obj.patch(BISMARK_ALIGN) - - module_path = Path(self.pipeline_dir, "modules", REPO_NAME, BISMARK_ALIGN) - - patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" - # Check that a patch file with the correct name has been created - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", patch_fn} - - # Check the 'modules.json' contains a patch file for the module - modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( - "modules", REPO_NAME, BISMARK_ALIGN, patch_fn - ) - - # Check that the correct lines are in the patch file - with open(module_path / patch_fn) as fh: - patch_lines = fh.readlines() - module_relpath = module_path.relative_to(self.pipeline_dir) - assert f"--- {module_relpath / 'main.nf'}\n" in patch_lines, module_relpath / "main.nf" - assert f"+++ {module_relpath / 'main.nf'}\n" in patch_lines - assert "- tuple val(meta), path(reads)\n" in patch_lines - assert "- path index\n" in patch_lines - assert "+ tuple val(meta), path(reads), path(index)\n" in patch_lines - - -def test_create_patch_try_apply_successful(self): - """ - Test creating a patch file and applying it to a new version of the the files - """ - - setup_patch(self.pipeline_dir, True) - module_relpath = Path("modules", REPO_NAME, BISMARK_ALIGN) - module_path = Path(self.pipeline_dir, module_relpath) - - # Try creating a patch file - patch_obj = nf_core.modules.ModulePatch(self.pipeline_dir, GITLAB_URL, PATCH_BRANCH) - patch_obj.patch(BISMARK_ALIGN) - - patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" - # Check that a patch file with the correct name has been created - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", patch_fn} - - # Check the 'modules.json' contains a patch file for the module - modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( - "modules", REPO_NAME, BISMARK_ALIGN, patch_fn - ) - - update_obj = nf_core.modules.ModuleUpdate( - self.pipeline_dir, sha=SUCCEED_SHA, remote_url=GITLAB_URL, branch=PATCH_BRANCH - ) - # Install the new files - install_dir = Path(tempfile.mkdtemp()) - update_obj.install_component_files(BISMARK_ALIGN, SUCCEED_SHA, update_obj.modules_repo, install_dir) - - # Try applying the patch - module_install_dir = install_dir / BISMARK_ALIGN - patch_relpath = module_relpath / patch_fn - assert update_obj.try_apply_patch(BISMARK_ALIGN, REPO_NAME, patch_relpath, module_path, module_install_dir) is True - - # Move the files from the temporary directory - update_obj.move_files_from_tmp_dir(BISMARK_ALIGN, install_dir, REPO_NAME, SUCCEED_SHA) - - # Check that a patch file with the correct name has been created - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", patch_fn} - - # Check the 'modules.json' contains a patch file for the module - modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( - "modules", REPO_NAME, BISMARK_ALIGN, patch_fn - ) - - # Check that the correct lines are in the patch file - with open(module_path / patch_fn) as fh: - patch_lines = fh.readlines() - module_relpath = module_path.relative_to(self.pipeline_dir) - assert f"--- {module_relpath / 'main.nf'}\n" in patch_lines - assert f"+++ {module_relpath / 'main.nf'}\n" in patch_lines - assert "- tuple val(meta), path(reads)\n" in patch_lines - assert "- path index\n" in patch_lines - assert "+ tuple val(meta), path(reads), path(index)\n" in patch_lines - - # Check that 'main.nf' is updated correctly - with open(module_path / "main.nf") as fh: - main_nf_lines = fh.readlines() - # These lines should have been removed by the patch - assert " tuple val(meta), path(reads)\n" not in main_nf_lines - assert " path index\n" not in main_nf_lines - # This line should have been added - assert " tuple val(meta), path(reads), path(index)\n" in main_nf_lines - - -def test_create_patch_try_apply_failed(self): - """ - Test creating a patch file and applying it to a new version of the the files - """ - - setup_patch(self.pipeline_dir, True) - module_relpath = Path("modules", REPO_NAME, BISMARK_ALIGN) - module_path = Path(self.pipeline_dir, module_relpath) - - # Try creating a patch file - patch_obj = nf_core.modules.ModulePatch(self.pipeline_dir, GITLAB_URL, PATCH_BRANCH) - patch_obj.patch(BISMARK_ALIGN) - - patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" - # Check that a patch file with the correct name has been created - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", patch_fn} - - # Check the 'modules.json' contains a patch file for the module - modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( - "modules", REPO_NAME, BISMARK_ALIGN, patch_fn - ) - - update_obj = nf_core.modules.ModuleUpdate( - self.pipeline_dir, sha=FAIL_SHA, remote_url=GITLAB_URL, branch=PATCH_BRANCH - ) - # Install the new files - install_dir = Path(tempfile.mkdtemp()) - update_obj.install_component_files(BISMARK_ALIGN, FAIL_SHA, update_obj.modules_repo, install_dir) - - # Try applying the patch - module_install_dir = install_dir / BISMARK_ALIGN - patch_path = module_relpath / patch_fn - assert update_obj.try_apply_patch(BISMARK_ALIGN, REPO_NAME, patch_path, module_path, module_install_dir) is False - - -def test_create_patch_update_success(self): - """ - Test creating a patch file and the updating the module - - Should have the same effect as 'test_create_patch_try_apply_successful' - but uses higher level api - """ - - setup_patch(self.pipeline_dir, True) - module_path = Path(self.pipeline_dir, "modules", REPO_NAME, BISMARK_ALIGN) - - # Try creating a patch file - patch_obj = nf_core.modules.ModulePatch(self.pipeline_dir, GITLAB_URL, PATCH_BRANCH) - patch_obj.patch(BISMARK_ALIGN) - - patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" - # Check that a patch file with the correct name has been created - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", patch_fn} - - # Check the 'modules.json' contains a patch file for the module - modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, GITLAB_URL, REPO_NAME) == Path( - "modules", REPO_NAME, BISMARK_ALIGN, patch_fn - ) - - # Update the module - update_obj = nf_core.modules.ModuleUpdate( - self.pipeline_dir, - sha=SUCCEED_SHA, - show_diff=False, - update_deps=True, - remote_url=GITLAB_URL, - branch=PATCH_BRANCH, - ) - assert update_obj.update(BISMARK_ALIGN) - - # Check that a patch file with the correct name has been created - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", patch_fn} - - # Check the 'modules.json' contains a patch file for the module - modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, GITLAB_URL, REPO_NAME) == Path( - "modules", REPO_NAME, BISMARK_ALIGN, patch_fn - ), modules_json_obj.get_patch_fn(BISMARK_ALIGN, GITLAB_URL, REPO_NAME) - - # Check that the correct lines are in the patch file - with open(module_path / patch_fn) as fh: - patch_lines = fh.readlines() - module_relpath = module_path.relative_to(self.pipeline_dir) - assert f"--- {module_relpath / 'main.nf'}\n" in patch_lines - assert f"+++ {module_relpath / 'main.nf'}\n" in patch_lines - assert "- tuple val(meta), path(reads)\n" in patch_lines - assert "- path index\n" in patch_lines - assert "+ tuple val(meta), path(reads), path(index)\n" in patch_lines - - # Check that 'main.nf' is updated correctly - with open(module_path / "main.nf") as fh: - main_nf_lines = fh.readlines() - # These lines should have been removed by the patch - assert " tuple val(meta), path(reads)\n" not in main_nf_lines - assert " path index\n" not in main_nf_lines - # This line should have been added - assert " tuple val(meta), path(reads), path(index)\n" in main_nf_lines - - -def test_create_patch_update_fail(self): - """ - Test creating a patch file and updating a module when there is a diff conflict - """ - - setup_patch(self.pipeline_dir, True) - module_path = Path(self.pipeline_dir, "modules", REPO_NAME, BISMARK_ALIGN) - - # Try creating a patch file - patch_obj = nf_core.modules.ModulePatch(self.pipeline_dir, GITLAB_URL, PATCH_BRANCH) - patch_obj.patch(BISMARK_ALIGN) - - patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" - # Check that a patch file with the correct name has been created - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", patch_fn} - - # Check the 'modules.json' contains a patch file for the module - modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( - "modules", REPO_NAME, BISMARK_ALIGN, patch_fn - ) - - # Save the file contents for downstream comparison - with open(module_path / patch_fn) as fh: - patch_contents = fh.read() - - update_obj = nf_core.modules.ModuleUpdate( - self.pipeline_dir, sha=FAIL_SHA, show_diff=False, update_deps=True, remote_url=GITLAB_URL, branch=PATCH_BRANCH - ) - update_obj.update(BISMARK_ALIGN) - - # Check that the installed files have not been affected by the attempted patch - temp_dir = Path(tempfile.mkdtemp()) - nf_core.components.components_command.ComponentCommand( - "modules", self.pipeline_dir, GITLAB_URL, PATCH_BRANCH - ).install_component_files(BISMARK_ALIGN, FAIL_SHA, update_obj.modules_repo, temp_dir) - - temp_module_dir = temp_dir / BISMARK_ALIGN - for file in os.listdir(temp_module_dir): - assert file in os.listdir(module_path) - with open(module_path / file) as fh: - installed = fh.read() - with open(temp_module_dir / file) as fh: - shouldbe = fh.read() - assert installed == shouldbe - - # Check that the patch file is unaffected - with open(module_path / patch_fn) as fh: - new_patch_contents = fh.read() - assert patch_contents == new_patch_contents - - -def test_remove_patch(self): - """Test creating a patch when there is no change to the module""" - setup_patch(self.pipeline_dir, True) - - # Try creating a patch file - patch_obj = nf_core.modules.ModulePatch(self.pipeline_dir, GITLAB_URL, PATCH_BRANCH) - patch_obj.patch(BISMARK_ALIGN) - - module_path = Path(self.pipeline_dir, "modules", REPO_NAME, BISMARK_ALIGN) - - # Check that a patch file with the correct name has been created - patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", patch_fn} - - # Check the 'modules.json' contains a patch file for the module - modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( - "modules", REPO_NAME, BISMARK_ALIGN, patch_fn - ) - - with mock.patch.object(nf_core.components.patch.questionary, "confirm") as mock_questionary: - mock_questionary.unsafe_ask.return_value = True - patch_obj.remove(BISMARK_ALIGN) - # Check that the diff file has been removed - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml"} - - # Check that the 'modules.json' entry has been removed - modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) is None diff --git a/tests/modules/remove.py b/tests/modules/remove.py deleted file mode 100644 index e412fd35a8..0000000000 --- a/tests/modules/remove.py +++ /dev/null @@ -1,22 +0,0 @@ -import os - - -def test_modules_remove_trimgalore(self): - """Test removing TrimGalore! module after installing it""" - self.mods_install.install("trimgalore") - module_path = os.path.join(self.mods_install.dir, "modules", "nf-core", "modules", "trimgalore") - assert self.mods_remove.remove("trimgalore") - assert os.path.exists(module_path) is False - - -def test_modules_remove_trimgalore_uninstalled(self): - """Test removing TrimGalore! module without installing it""" - assert self.mods_remove.remove("trimgalore") is False - - -def test_modules_remove_multiqc_from_gitlab(self): - """Test removing multiqc module after installing it from an alternative source""" - self.mods_install_gitlab.install("multiqc") - module_path = os.path.join(self.mods_install_gitlab.dir, "modules", "nf-core-test", "multiqc") - assert self.mods_remove_gitlab.remove("multiqc", force=True) - assert os.path.exists(module_path) is False diff --git a/tests/modules/test_bump_versions.py b/tests/modules/test_bump_versions.py new file mode 100644 index 0000000000..29e030668d --- /dev/null +++ b/tests/modules/test_bump_versions.py @@ -0,0 +1,50 @@ +import os +import re + +import pytest + +import nf_core.modules +from nf_core.modules.modules_utils import ModuleExceptionError + +from ..test_modules import TestModules + + +class TestModulesBumpVersions(TestModules): + def test_modules_bump_versions_single_module(self): + """Test updating a single module""" + # Change the bpipe/test version to an older version + env_yml_path = os.path.join(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "environment.yml") + with open(env_yml_path) as fh: + content = fh.read() + new_content = re.sub(r"bioconda::star=\d.\d.\d\D?", r"bioconda::star=2.6.1d", content) + with open(env_yml_path, "w") as fh: + fh.write(new_content) + version_bumper = nf_core.modules.ModuleVersionBumper(pipeline_dir=self.nfcore_modules) + version_bumper.bump_versions(module="bpipe/test") + assert len(version_bumper.failed) == 0 + + def test_modules_bump_versions_all_modules(self): + """Test updating all modules""" + version_bumper = nf_core.modules.ModuleVersionBumper(pipeline_dir=self.nfcore_modules) + version_bumper.bump_versions(all_modules=True) + assert len(version_bumper.failed) == 0 + + def test_modules_bump_versions_fail(self): + """Fail updating a module with wrong name""" + version_bumper = nf_core.modules.ModuleVersionBumper(pipeline_dir=self.nfcore_modules) + with pytest.raises(ModuleExceptionError) as excinfo: + version_bumper.bump_versions(module="no/module") + assert "Could not find the specified module:" in str(excinfo.value) + + def test_modules_bump_versions_fail_unknown_version(self): + """Fail because of an unknown version""" + # Change the bpipe/test version to an older version + env_yml_path = os.path.join(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "environment.yml") + with open(env_yml_path) as fh: + content = fh.read() + new_content = re.sub(r"bioconda::bpipe=\d.\d.\d\D?", r"bioconda::bpipe=xxx", content) + with open(env_yml_path, "w") as fh: + fh.write(new_content) + version_bumper = nf_core.modules.ModuleVersionBumper(pipeline_dir=self.nfcore_modules) + version_bumper.bump_versions(module="bpipe/test") + assert "Conda package had unknown version" in version_bumper.failed[0][0] diff --git a/tests/modules/test_create.py b/tests/modules/test_create.py new file mode 100644 index 0000000000..c847501830 --- /dev/null +++ b/tests/modules/test_create.py @@ -0,0 +1,164 @@ +import os +import shutil +from pathlib import Path +from unittest import mock + +import pytest +import requests_cache +import responses +import yaml +from git.repo import Repo + +import nf_core.modules +from tests.utils import ( + GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH, + GITLAB_URL, + mock_anaconda_api_calls, + mock_biocontainers_api_calls, +) + +from ..test_modules import TestModules + + +class TestModulesCreate(TestModules): + def test_modules_create_succeed(self): + """Succeed at creating the TrimGalore! module""" + with responses.RequestsMock() as rsps: + mock_anaconda_api_calls(rsps, "trim-galore", "0.6.7") + mock_biocontainers_api_calls(rsps, "trim-galore", "0.6.7") + module_create = nf_core.modules.ModuleCreate( + self.pipeline_dir, "trimgalore", "@author", "process_single", True, True, conda_name="trim-galore" + ) + with requests_cache.disabled(): + module_create.create() + assert os.path.exists(os.path.join(self.pipeline_dir, "modules", "local", "trimgalore.nf")) + + def test_modules_create_fail_exists(self): + """Fail at creating the same module twice""" + with responses.RequestsMock() as rsps: + mock_anaconda_api_calls(rsps, "trim-galore", "0.6.7") + mock_biocontainers_api_calls(rsps, "trim-galore", "0.6.7") + module_create = nf_core.modules.ModuleCreate( + self.pipeline_dir, "trimgalore", "@author", "process_single", False, False, conda_name="trim-galore" + ) + with requests_cache.disabled(): + module_create.create() + with pytest.raises(UserWarning) as excinfo: + with requests_cache.disabled(): + module_create.create() + assert "Module file exists already" in str(excinfo.value) + + def test_modules_create_nfcore_modules(self): + """Create a module in nf-core/modules clone""" + with responses.RequestsMock() as rsps: + mock_anaconda_api_calls(rsps, "fastqc", "0.11.9") + mock_biocontainers_api_calls(rsps, "fastqc", "0.11.9") + module_create = nf_core.modules.ModuleCreate( + self.nfcore_modules, "fastqc", "@author", "process_low", False, False + ) + with requests_cache.disabled(): + module_create.create() + assert os.path.exists(os.path.join(self.nfcore_modules, "modules", "nf-core", "fastqc", "main.nf")) + assert os.path.exists( + os.path.join(self.nfcore_modules, "modules", "nf-core", "fastqc", "tests", "main.nf.test") + ) + + def test_modules_create_nfcore_modules_subtool(self): + """Create a tool/subtool module in a nf-core/modules clone""" + with responses.RequestsMock() as rsps: + mock_anaconda_api_calls(rsps, "star", "2.8.10a") + mock_biocontainers_api_calls(rsps, "star", "2.8.10a") + module_create = nf_core.modules.ModuleCreate( + self.nfcore_modules, "star/index", "@author", "process_medium", False, False + ) + with requests_cache.disabled(): + module_create.create() + assert os.path.exists(os.path.join(self.nfcore_modules, "modules", "nf-core", "star", "index", "main.nf")) + assert os.path.exists( + os.path.join(self.nfcore_modules, "modules", "nf-core", "star", "index", "tests", "main.nf.test") + ) + + @mock.patch("rich.prompt.Confirm.ask") + def test_modules_migrate(self, mock_rich_ask): + """Create a module with the --migrate-pytest option to convert pytest to nf-test""" + pytest_dir = Path(self.nfcore_modules, "tests", "modules", "nf-core", "samtools", "sort") + module_dir = Path(self.nfcore_modules, "modules", "nf-core", "samtools", "sort") + + # Clone modules repo with pytests + shutil.rmtree(self.nfcore_modules) + Repo.clone_from(GITLAB_URL, self.nfcore_modules, branch=GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH) + with open(module_dir / "main.nf") as fh: + old_main_nf = fh.read() + with open(module_dir / "meta.yml") as fh: + old_meta_yml = fh.read() + + # Create a module with --migrate-pytest + mock_rich_ask.return_value = True + module_create = nf_core.modules.ModuleCreate(self.nfcore_modules, "samtools/sort", migrate_pytest=True) + module_create.create() + + with open(module_dir / "main.nf") as fh: + new_main_nf = fh.read() + with open(module_dir / "meta.yml") as fh: + new_meta_yml = fh.read() + nextflow_config = module_dir / "tests" / "nextflow.config" + + # Check that old files have been copied to the new module + assert old_main_nf == new_main_nf + assert old_meta_yml == new_meta_yml + assert nextflow_config.is_file() + + # Check that pytest folder is deleted + assert not pytest_dir.is_dir() + + # Check that pytest_modules.yml is updated + with open(Path(self.nfcore_modules, "tests", "config", "pytest_modules.yml")) as fh: + modules_yml = yaml.safe_load(fh) + assert "samtools/sort" not in modules_yml.keys() + + @mock.patch("rich.prompt.Confirm.ask") + def test_modules_migrate_no_delete(self, mock_rich_ask): + """Create a module with the --migrate-pytest option to convert pytest to nf-test. + Test that pytest directory is not deleted.""" + pytest_dir = Path(self.nfcore_modules, "tests", "modules", "nf-core", "samtools", "sort") + + # Clone modules repo with pytests + shutil.rmtree(self.nfcore_modules) + Repo.clone_from(GITLAB_URL, self.nfcore_modules, branch=GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH) + + # Create a module with --migrate-pytest + mock_rich_ask.return_value = False + module_create = nf_core.modules.ModuleCreate(self.nfcore_modules, "samtools/sort", migrate_pytest=True) + module_create.create() + + # Check that pytest folder is not deleted + assert pytest_dir.is_dir() + + # Check that pytest_modules.yml is updated + with open(Path(self.nfcore_modules, "tests", "config", "pytest_modules.yml")) as fh: + modules_yml = yaml.safe_load(fh) + assert "samtools/sort" not in modules_yml.keys() + + @mock.patch("rich.prompt.Confirm.ask") + def test_modules_migrate_symlink(self, mock_rich_ask): + """Create a module with the --migrate-pytest option to convert pytest with symlinks to nf-test. + Test that the symlink is deleted and the file is copied.""" + + pytest_dir = Path(self.nfcore_modules, "tests", "modules", "nf-core", "samtools", "sort") + module_dir = Path(self.nfcore_modules, "modules", "nf-core", "samtools", "sort") + + # Clone modules repo with pytests + shutil.rmtree(self.nfcore_modules) + Repo.clone_from(GITLAB_URL, self.nfcore_modules, branch=GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH) + + # Create a symlinked file in the pytest directory + symlink_file = pytest_dir / "symlink_file.txt" + symlink_file.symlink_to(module_dir / "main.nf") + + # Create a module with --migrate-pytest + mock_rich_ask.return_value = True + module_create = nf_core.modules.ModuleCreate(self.nfcore_modules, "samtools/sort", migrate_pytest=True) + module_create.create() + + # Check that symlink is deleted + assert not symlink_file.is_symlink() diff --git a/tests/modules/test_info.py b/tests/modules/test_info.py new file mode 100644 index 0000000000..8906854044 --- /dev/null +++ b/tests/modules/test_info.py @@ -0,0 +1,62 @@ +from rich.console import Console + +import nf_core.modules + +from ..test_modules import TestModules +from ..utils import GITLAB_DEFAULT_BRANCH, GITLAB_URL + + +class TestModulesCreate(TestModules): + def test_modules_info_remote(self): + """Test getting info about a remote module""" + mods_info = nf_core.modules.ModuleInfo(self.pipeline_dir, "fastqc") + mods_info_output = mods_info.get_component_info() + console = Console(record=True) + console.print(mods_info_output) + output = console.export_text() + + assert "Module: fastqc" in output + assert "Inputs" in output + assert "Outputs" in output + + def test_modules_info_remote_gitlab(self): + """Test getting info about a module in the remote gitlab repo""" + mods_info = nf_core.modules.ModuleInfo( + self.pipeline_dir, "fastqc", remote_url=GITLAB_URL, branch=GITLAB_DEFAULT_BRANCH + ) + mods_info_output = mods_info.get_component_info() + console = Console(record=True) + console.print(mods_info_output) + output = console.export_text() + + assert "Module: fastqc" in output + assert "Inputs" in output + assert "Outputs" in output + assert "--git-remote" in output + + def test_modules_info_local(self): + """Test getting info about a locally installed module""" + self.mods_install.install("trimgalore") + mods_info = nf_core.modules.ModuleInfo(self.pipeline_dir, "trimgalore") + mods_info_output = mods_info.get_component_info() + console = Console(record=True) + console.print(mods_info_output) + output = console.export_text() + + assert "Module: trimgalore" in output + assert "Inputs" in output + assert "Outputs" in output + assert "Location" in output + + def test_modules_info_in_modules_repo(self): + """Test getting info about a module in the modules repo""" + mods_info = nf_core.modules.ModuleInfo(self.nfcore_modules, "fastqc") + mods_info.local = True + mods_info_output = mods_info.get_component_info() + console = Console(record=True) + console.print(mods_info_output) + output = console.export_text() + + assert "Module: fastqc" in output + assert "Inputs" in output + assert "Outputs" in output diff --git a/tests/modules/test_install.py b/tests/modules/test_install.py new file mode 100644 index 0000000000..b90f01ee61 --- /dev/null +++ b/tests/modules/test_install.py @@ -0,0 +1,90 @@ +import os +from pathlib import Path + +import pytest + +from nf_core.modules.install import ModuleInstall +from nf_core.modules.modules_json import ModulesJson + +from ..test_modules import TestModules +from ..utils import ( + GITLAB_BRANCH_ORG_PATH_BRANCH, + GITLAB_BRANCH_TEST_BRANCH, + GITLAB_REPO, + GITLAB_URL, + with_temporary_folder, +) + + +class TestModulesCreate(TestModules): + def test_modules_install_nopipeline(self): + """Test installing a module - no pipeline given""" + self.mods_install.dir = None + assert self.mods_install.install("foo") is False + + @with_temporary_folder + def test_modules_install_emptypipeline(self, tmpdir): + """Test installing a module - empty dir given""" + os.mkdir(os.path.join(tmpdir, "nf-core-pipe")) + self.mods_install.dir = os.path.join(tmpdir, "nf-core-pipe") + with pytest.raises(UserWarning) as excinfo: + self.mods_install.install("foo") + assert "Could not find a 'main.nf' or 'nextflow.config' file" in str(excinfo.value) + + def test_modules_install_nomodule(self): + """Test installing a module - unrecognised module given""" + assert self.mods_install.install("foo") is False + + def test_modules_install_trimgalore(self): + """Test installing a module - TrimGalore!""" + assert self.mods_install.install("trimgalore") is not False + assert self.mods_install.dir is not None + module_path = Path(self.mods_install.dir, "modules", "nf-core", "trimgalore") + assert os.path.exists(module_path) + + def test_modules_install_trimgalore_twice(self): + """Test installing a module - TrimGalore! already there""" + self.mods_install.install("trimgalore") + assert self.mods_install.install("trimgalore") is True + + def test_modules_install_from_gitlab(self): + """Test installing a module from GitLab""" + assert self.mods_install_gitlab.install("fastqc") is True + + def test_modules_install_different_branch_fail(self): + """Test installing a module from a different branch""" + install_obj = ModuleInstall(self.pipeline_dir, remote_url=GITLAB_URL, branch=GITLAB_BRANCH_TEST_BRANCH) + # The FastQC module does not exists in the branch-test branch + assert install_obj.install("fastqc") is False + + def test_modules_install_different_branch_succeed(self): + """Test installing a module from a different branch""" + install_obj = ModuleInstall(self.pipeline_dir, remote_url=GITLAB_URL, branch=GITLAB_BRANCH_TEST_BRANCH) + # The fastp module does exists in the branch-test branch + assert install_obj.install("fastp") is True + + # Verify that the branch entry was added correctly + modules_json = ModulesJson(self.pipeline_dir) + assert ( + modules_json.get_component_branch(self.component_type, "fastp", GITLAB_URL, GITLAB_REPO) + == GITLAB_BRANCH_TEST_BRANCH + ) + + def test_modules_install_tracking(self): + """Test installing a module and finding 'modules' in the installed_by section of modules.json""" + self.mods_install.install("trimgalore") + + # Verify that the installed_by entry was added correctly + modules_json = ModulesJson(self.pipeline_dir) + mod_json = modules_json.get_modules_json() + assert mod_json["repos"]["https://github.com/nf-core/modules.git"]["modules"]["nf-core"]["trimgalore"][ + "installed_by" + ] == ["modules"] + + def test_modules_install_alternate_remote(self): + """Test installing a module from a different remote with the same organization path""" + install_obj = ModuleInstall(self.pipeline_dir, remote_url=GITLAB_URL, branch=GITLAB_BRANCH_ORG_PATH_BRANCH) + # Install fastqc from GitLab which is also installed from GitHub with the same org_path + with pytest.raises(Exception) as excinfo: + install_obj.install("fastqc") + assert "Could not find a 'main.nf' or 'nextflow.config' file" in str(excinfo.value) diff --git a/tests/modules/test_lint.py b/tests/modules/test_lint.py new file mode 100644 index 0000000000..6448916acf --- /dev/null +++ b/tests/modules/test_lint.py @@ -0,0 +1,865 @@ +import json +from pathlib import Path +from typing import Union + +import pytest +import yaml +from git.repo import Repo + +import nf_core.modules +from nf_core.modules.lint import main_nf +from nf_core.utils import set_wd + +from ..test_modules import TestModules +from ..utils import GITLAB_NFTEST_BRANCH, GITLAB_URL +from .test_patch import BISMARK_ALIGN, CORRECT_SHA, PATCH_BRANCH, REPO_NAME, modify_main_nf + +PROCESS_LABEL_GOOD = ( + """ + label 'process_high' + cpus 12 + """, + 1, + 0, + 0, +) +PROCESS_LABEL_NON_ALPHANUMERIC = ( + """ + label 'a:label:with:colons' + cpus 12 + """, + 0, + 2, + 0, +) +PROCESS_LABEL_GOOD_CONFLICTING = ( + """ + label 'process_high' + label 'process_low' + cpus 12 + """, + 0, + 1, + 0, +) +PROCESS_LABEL_GOOD_DUPLICATES = ( + """ + label 'process_high' + label 'process_high' + cpus 12 + """, + 0, + 2, + 0, +) +PROCESS_LABEL_GOOD_AND_NONSTANDARD = ( + """ + label 'process_high' + label 'process_extra_label' + cpus 12 + """, + 1, + 1, + 0, +) +PROCESS_LABEL_NONSTANDARD = ( + """ + label 'process_extra_label' + cpus 12 + """, + 0, + 2, + 0, +) +PROCESS_LABEL_NONSTANDARD_DUPLICATES = ( + """ + label process_extra_label + label process_extra_label + cpus 12 + """, + 0, + 3, + 0, +) +PROCESS_LABEL_NONE_FOUND = ( + """ + cpus 12 + """, + 0, + 1, + 0, +) + +PROCESS_LABEL_TEST_CASES = [ + PROCESS_LABEL_GOOD, + PROCESS_LABEL_NON_ALPHANUMERIC, + PROCESS_LABEL_GOOD_CONFLICTING, + PROCESS_LABEL_GOOD_DUPLICATES, + PROCESS_LABEL_GOOD_AND_NONSTANDARD, + PROCESS_LABEL_NONSTANDARD, + PROCESS_LABEL_NONSTANDARD_DUPLICATES, + PROCESS_LABEL_NONE_FOUND, +] + + +# Test cases for linting the container definitions + +CONTAINER_SINGLE_GOOD = ( + "Single-line container definition should pass", + """ + container "quay.io/nf-core/gatk:4.4.0.0" //Biocontainers is missing a package + """, + 2, # passed + 0, # warned + 0, # failed +) + +CONTAINER_TWO_LINKS_GOOD = ( + "Multi-line container definition should pass", + """ + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.4.0.0--py36hdfd78af_0': + 'biocontainers/gatk4:4.4.0.0--py36hdfd78af_0' }" + """, + 6, + 0, + 0, +) + +CONTAINER_WITH_SPACE_BAD = ( + "Space in container URL should fail", + """ + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.4.0.0--py36hdfd78af_0 ': + 'biocontainers/gatk4:4.4.0.0--py36hdfd78af_0' }" + """, + 5, + 0, + 1, +) + +CONTAINER_MULTIPLE_DBLQUOTES_BAD = ( + "Incorrect quoting of container string should fail", + """ + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.4.0.0--py36hdfd78af_0 ': + "biocontainers/gatk4:4.4.0.0--py36hdfd78af_0" }" + """, + 4, + 0, + 1, +) + +CONTAINER_TEST_CASES = [ + CONTAINER_SINGLE_GOOD, + CONTAINER_TWO_LINKS_GOOD, + CONTAINER_WITH_SPACE_BAD, + CONTAINER_MULTIPLE_DBLQUOTES_BAD, +] + + +class TestModulesCreate(TestModules): + def _setup_patch(self, pipeline_dir: Union[str, Path], modify_module: bool): + install_obj = nf_core.modules.ModuleInstall( + pipeline_dir, + prompt=False, + force=False, + remote_url=GITLAB_URL, + branch=PATCH_BRANCH, + sha=CORRECT_SHA, + ) + + # Install the module + install_obj.install(BISMARK_ALIGN) + + if modify_module: + # Modify the module + module_path = Path(pipeline_dir, "modules", REPO_NAME, BISMARK_ALIGN) + modify_main_nf(module_path / "main.nf") + + def test_modules_lint_trimgalore(self): + """Test linting the TrimGalore! module""" + self.mods_install.install("trimgalore") + module_lint = nf_core.modules.ModuleLint(dir=self.pipeline_dir) + module_lint.lint(print_results=False, module="trimgalore") + assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) > 0 + assert len(module_lint.warned) >= 0 + + def test_modules_lint_empty(self): + """Test linting a pipeline with no modules installed""" + self.mods_remove.remove("fastqc", force=True) + self.mods_remove.remove("multiqc", force=True) + with pytest.raises(LookupError): + nf_core.modules.ModuleLint(dir=self.pipeline_dir) + + def test_modules_lint_new_modules(self): + """lint a new module""" + module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint.lint(print_results=False, all_modules=True) + assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) > 0 + assert len(module_lint.warned) >= 0 + + def test_modules_lint_no_gitlab(self): + """Test linting a pipeline with no modules installed""" + self.mods_remove.remove("fastqc", force=True) + self.mods_remove.remove("multiqc", force=True) + with pytest.raises(LookupError): + nf_core.modules.ModuleLint(dir=self.pipeline_dir, remote_url=GITLAB_URL) + + def test_modules_lint_gitlab_modules(self): + """Lint modules from a different remote""" + self.mods_install_gitlab.install("fastqc") + self.mods_install_gitlab.install("multiqc") + module_lint = nf_core.modules.ModuleLint(dir=self.pipeline_dir, remote_url=GITLAB_URL) + module_lint.lint(print_results=False, all_modules=True) + assert len(module_lint.failed) == 2 + assert len(module_lint.passed) > 0 + assert len(module_lint.warned) >= 0 + + def test_modules_lint_multiple_remotes(self): + """Lint modules from a different remote""" + self.mods_install_gitlab.install("multiqc") + module_lint = nf_core.modules.ModuleLint(dir=self.pipeline_dir, remote_url=GITLAB_URL) + module_lint.lint(print_results=False, all_modules=True) + assert len(module_lint.failed) == 1 + assert len(module_lint.passed) > 0 + assert len(module_lint.warned) >= 0 + + def test_modules_lint_registry(self): + """Test linting the samtools module and alternative registry""" + self.mods_install.install("samtools") + module_lint = nf_core.modules.ModuleLint(dir=self.pipeline_dir, registry="public.ecr.aws") + module_lint.lint(print_results=False, module="samtools") + assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) > 0 + assert len(module_lint.warned) >= 0 + module_lint = nf_core.modules.ModuleLint(dir=self.pipeline_dir) + module_lint.lint(print_results=False, module="samtools") + assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) > 0 + assert len(module_lint.warned) >= 0 + + def test_modules_lint_patched_modules(self): + """ + Test creating a patch file and applying it to a new version of the the files + """ + self._setup_patch(str(self.pipeline_dir), True) + + # Create a patch file + patch_obj = nf_core.modules.ModulePatch(self.pipeline_dir, GITLAB_URL, PATCH_BRANCH) + patch_obj.patch(BISMARK_ALIGN) + + # change temporarily working directory to the pipeline directory + # to avoid error from try_apply_patch() during linting + with set_wd(self.pipeline_dir): + module_lint = nf_core.modules.ModuleLint( + dir=self.pipeline_dir, + remote_url=GITLAB_URL, + branch=PATCH_BRANCH, + hide_progress=True, + ) + module_lint.lint( + print_results=False, + all_modules=True, + ) + + assert len(module_lint.failed) == 1 + assert len(module_lint.passed) > 0 + assert len(module_lint.warned) >= 0 + + def test_modules_lint_check_process_labels(self): + for test_case in PROCESS_LABEL_TEST_CASES: + process, passed, warned, failed = test_case + mocked_ModuleLint = MockModuleLint() + main_nf.check_process_labels(mocked_ModuleLint, process.splitlines()) + assert len(mocked_ModuleLint.passed) == passed + assert len(mocked_ModuleLint.warned) == warned + assert len(mocked_ModuleLint.failed) == failed + + def test_modules_lint_check_url(self): + for test_case in CONTAINER_TEST_CASES: + test, process, passed, warned, failed = test_case + mocked_ModuleLint = MockModuleLint() + for line in process.splitlines(): + if line.strip(): + main_nf.check_container_link_line(mocked_ModuleLint, line, registry="quay.io") + + assert ( + len(mocked_ModuleLint.passed) == passed + ), f"{test}: Expected {passed} PASS, got {len(mocked_ModuleLint.passed)}." + assert ( + len(mocked_ModuleLint.warned) == warned + ), f"{test}: Expected {warned} WARN, got {len(mocked_ModuleLint.warned)}." + assert ( + len(mocked_ModuleLint.failed) == failed + ), f"{test}: Expected {failed} FAIL, got {len(mocked_ModuleLint.failed)}." + + def test_modules_lint_snapshot_file(self): + """Test linting a module with a snapshot file""" + module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint.lint(print_results=False, module="bpipe/test") + assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) > 0 + assert len(module_lint.warned) >= 0 + + def test_modules_lint_snapshot_file_missing_fail(self): + """Test linting a module with a snapshot file missing, which should fail""" + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "tests", + "main.nf.test.snap", + ).unlink() + module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint.lint(print_results=False, module="bpipe/test") + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "tests", + "main.nf.test.snap", + ).touch() + assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) > 0 + assert len(module_lint.warned) >= 0 + assert module_lint.failed[0].lint_test == "test_snapshot_exists" + + def test_modules_lint_snapshot_file_not_needed(self): + """Test linting a module which doesn't need a snapshot file by removing the snapshot keyword in the main.nf.test file""" + with open( + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "tests", + "main.nf.test", + ) + ) as fh: + content = fh.read() + new_content = content.replace("snapshot(", "snap (") + with open( + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "tests", + "main.nf.test", + ), + "w", + ) as fh: + fh.write(new_content) + module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint.lint(print_results=False, module="bpipe/test") + assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) > 0 + assert len(module_lint.warned) >= 0 + + def test_modules_environment_yml_file_doesnt_exists(self): + """Test linting a module with an environment.yml file""" + Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "environment.yml").rename( + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "environment.yml.bak", + ) + ) + module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint.lint(print_results=False, module="bpipe/test") + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "environment.yml.bak", + ).rename( + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "environment.yml", + ) + ) + assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) > 0 + assert len(module_lint.warned) >= 0 + assert module_lint.failed[0].lint_test == "environment_yml_exists" + + def test_modules_environment_yml_file_sorted_correctly(self): + """Test linting a module with a correctly sorted environment.yml file""" + module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint.lint(print_results=False, module="bpipe/test") + assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) > 0 + assert len(module_lint.warned) >= 0 + + def test_modules_environment_yml_file_sorted_incorrectly(self): + """Test linting a module with an incorrectly sorted environment.yml file""" + with open( + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "environment.yml", + ) + ) as fh: + yaml_content = yaml.safe_load(fh) + # Add a new dependency to the environment.yml file and reverse the order + yaml_content["dependencies"].append("z") + yaml_content["dependencies"].reverse() + yaml_content = yaml.dump(yaml_content) + with open( + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "environment.yml", + ), + "w", + ) as fh: + fh.write(yaml_content) + module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint.lint(print_results=False, module="bpipe/test") + # we fix the sorting on the fly, so this should pass + assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) > 0 + assert len(module_lint.warned) >= 0 + + def test_modules_environment_yml_file_not_array(self): + """Test linting a module with an incorrectly formatted environment.yml file""" + with open( + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "environment.yml", + ) + ) as fh: + yaml_content = yaml.safe_load(fh) + yaml_content["dependencies"] = "z" + with open( + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "environment.yml", + ), + "w", + ) as fh: + fh.write(yaml.dump(yaml_content)) + module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint.lint(print_results=False, module="bpipe/test") + assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) > 0 + assert len(module_lint.warned) >= 0 + assert module_lint.failed[0].lint_test == "environment_yml_valid" + + def test_modules_environment_yml_file_name_mismatch(self): + """Test linting a module with a different name in the environment.yml file""" + with open( + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "environment.yml", + ) + ) as fh: + yaml_content = yaml.safe_load(fh) + yaml_content["name"] = "bpipe-test" + with open( + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "environment.yml", + ), + "w", + ) as fh: + fh.write(yaml.dump(yaml_content)) + module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint.lint(print_results=False, module="bpipe/test") + # reset changes + yaml_content["name"] = "bpipe_test" + with open( + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "environment.yml", + ), + "w", + ) as fh: + fh.write(yaml.dump(yaml_content)) + + assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) > 0 + assert len(module_lint.warned) >= 0 + assert module_lint.failed[0].lint_test == "environment_yml_name" + + def test_modules_meta_yml_incorrect_licence_field(self): + """Test linting a module with an incorrect Licence field in meta.yml""" + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "meta.yml")) as fh: + meta_yml = yaml.safe_load(fh) + meta_yml["tools"][0]["bpipe"]["licence"] = "[MIT]" + with open( + Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "meta.yml"), + "w", + ) as fh: + fh.write(yaml.dump(meta_yml)) + module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint.lint(print_results=False, module="bpipe/test") + + # reset changes + meta_yml["tools"][0]["bpipe"]["licence"] = ["MIT"] + with open( + Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "meta.yml"), + "w", + ) as fh: + fh.write(yaml.dump(meta_yml)) + + assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) >= 0 + assert len(module_lint.warned) >= 0 + assert module_lint.failed[0].lint_test == "meta_yml_valid" + + def test_modules_meta_yml_input_mismatch(self): + """Test linting a module with an extra entry in input fields in meta.yml compared to module.input""" + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "main.nf")) as fh: + main_nf = fh.read() + main_nf_new = main_nf.replace("path bam", "path bai") + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "main.nf"), "w") as fh: + fh.write(main_nf_new) + module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint.lint(print_results=False, module="bpipe/test") + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "main.nf"), "w") as fh: + fh.write(main_nf) + assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) >= 0 + assert len(module_lint.warned) == 2 + lint_tests = [x.lint_test for x in module_lint.warned] + # check that it is there twice: + assert lint_tests.count("meta_input_meta_only") == 1 + assert lint_tests.count("meta_input_main_only") == 1 + + def test_modules_meta_yml_output_mismatch(self): + """Test linting a module with an extra entry in output fields in meta.yml compared to module.output""" + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "main.nf")) as fh: + main_nf = fh.read() + main_nf_new = main_nf.replace("emit: bam", "emit: bai") + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "main.nf"), "w") as fh: + fh.write(main_nf_new) + module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint.lint(print_results=False, module="bpipe/test") + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "main.nf"), "w") as fh: + fh.write(main_nf) + assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) >= 0 + assert len(module_lint.warned) == 2 + lint_tests = [x.lint_test for x in module_lint.warned] + # check that it is there twice: + assert lint_tests.count("meta_output_meta_only") == 1 + assert lint_tests.count("meta_output_main_only") == 1 + + def test_modules_meta_yml_incorrect_name(self): + """Test linting a module with an incorrect name in meta.yml""" + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "meta.yml")) as fh: + meta_yml = yaml.safe_load(fh) + meta_yml["name"] = "bpipe/test" + # need to make the same change to the environment.yml file + with open( + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "environment.yml", + ) + ) as fh: + environment_yml = yaml.safe_load(fh) + environment_yml["name"] = "bpipe/test" + with open( + Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "meta.yml"), + "w", + ) as fh: + fh.write(yaml.dump(meta_yml)) + with open( + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "environment.yml", + ), + "w", + ) as fh: + fh.write(yaml.dump(environment_yml)) + module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint.lint(print_results=False, module="bpipe/test") + + # reset changes + meta_yml["name"] = "bpipe_test" + with open( + Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "meta.yml"), + "w", + ) as fh: + fh.write(yaml.dump(meta_yml)) + environment_yml["name"] = "bpipe_test" + with open( + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "environment.yml", + ), + "w", + ) as fh: + fh.write(yaml.dump(environment_yml)) + + assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) >= 0 + assert len(module_lint.warned) >= 0 + assert module_lint.failed[0].lint_test == "meta_name" + + def test_modules_missing_test_dir(self): + """Test linting a module with a missing test directory""" + Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests").rename( + Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests.bak") + ) + module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint.lint(print_results=False, module="bpipe/test") + Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests.bak").rename( + Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests") + ) + assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) >= 0 + assert len(module_lint.warned) >= 0 + assert module_lint.failed[0].lint_test == "test_dir_exists" + + def test_modules_missing_test_main_nf(self): + """Test linting a module with a missing test/main.nf file""" + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "tests", + "main.nf.test", + ).rename( + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "tests", + "main.nf.test.bak", + ) + ) + module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint.lint(print_results=False, module="bpipe/test") + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "tests", + "main.nf.test.bak", + ).rename( + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "tests", + "main.nf.test", + ) + ) + assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) >= 0 + assert len(module_lint.warned) >= 0 + assert module_lint.failed[0].lint_test == "test_main_nf_exists" + + def test_modules_unused_pytest_files(self): + """Test linting a nf-test module with files still present in `tests/modules/`""" + Path(self.nfcore_modules, "tests", "modules", "bpipe", "test").mkdir(parents=True, exist_ok=True) + module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint.lint(print_results=False, module="bpipe/test") + Path(self.nfcore_modules, "tests", "modules", "bpipe", "test").rmdir() + assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) >= 0 + assert len(module_lint.warned) >= 0 + assert module_lint.failed[0].lint_test == "test_old_test_dir" + + def test_nftest_failing_linting(self): + """Test linting a module which includes other modules in nf-test tests. + Linting tests""" + # Clone modules repo with testing modules + tmp_dir = self.nfcore_modules.parent + self.nfcore_modules = Path(tmp_dir, "modules-test") + Repo.clone_from(GITLAB_URL, self.nfcore_modules, branch=GITLAB_NFTEST_BRANCH) + + module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint.lint(print_results=False, module="kallisto/quant") + + assert len(module_lint.failed) == 3, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) >= 0 + assert len(module_lint.warned) >= 0 + assert module_lint.failed[0].lint_test == "environment_yml_valid" + assert module_lint.failed[1].lint_test == "meta_yml_valid" + assert module_lint.failed[2].lint_test == "test_main_tags" + assert "kallisto/index" in module_lint.failed[2].message + + def test_modules_absent_version(self): + """Test linting a nf-test module if the versions is absent in the snapshot file `""" + with open( + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "tests", + "main.nf.test.snap", + ) + ) as fh: + content = fh.read() + new_content = content.replace("versions", "foo") + with open( + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "tests", + "main.nf.test.snap", + ), + "w", + ) as fh: + fh.write(new_content) + module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint.lint(print_results=False, module="bpipe/test") + with open( + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "tests", + "main.nf.test.snap", + ), + "w", + ) as fh: + fh.write(content) + assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) >= 0 + assert len(module_lint.warned) >= 0 + assert module_lint.failed[0].lint_test == "test_snap_versions" + + def test_modules_empty_file_in_snapshot(self): + """Test linting a nf-test module with an empty file sha sum in the test snapshot, which should make it fail (if it is not a stub)""" + snap_file = Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "tests", + "main.nf.test.snap", + ) + snap = json.load(snap_file.open()) + content = snap_file.read_text() + snap["my test"]["content"][0]["0"] = "test:md5,d41d8cd98f00b204e9800998ecf8427e" + + with open(snap_file, "w") as fh: + json.dump(snap, fh) + + module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint.lint(print_results=False, module="bpipe/test") + assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) > 0 + assert len(module_lint.warned) >= 0 + assert module_lint.failed[0].lint_test == "test_snap_md5sum" + + # reset the file + with open(snap_file, "w") as fh: + fh.write(content) + + def test_modules_empty_file_in_stub_snapshot(self): + """Test linting a nf-test module with an empty file sha sum in the stub test snapshot, which should make it not fail""" + snap_file = Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "tests", + "main.nf.test.snap", + ) + snap = json.load(snap_file.open()) + content = snap_file.read_text() + snap["my_test_stub"] = {"content": [{"0": "test:md5,d41d8cd98f00b204e9800998ecf8427e", "versions": {}}]} + + with open(snap_file, "w") as fh: + json.dump(snap, fh) + + module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint.lint(print_results=False, module="bpipe/test") + assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) > 0 + assert len(module_lint.warned) >= 0 + assert any(x.lint_test == "test_snap_md5sum" for x in module_lint.passed) + + # reset the file + with open(snap_file, "w") as fh: + fh.write(content) + + +# A skeleton object with the passed/warned/failed list attrs +# Use this in place of a ModuleLint object to test behaviour of +# linting methods which don't need the full setup +class MockModuleLint: + def __init__(self): + self.passed = [] + self.warned = [] + self.failed = [] + + self.main_nf = "main_nf" diff --git a/tests/modules/test_list.py b/tests/modules/test_list.py new file mode 100644 index 0000000000..81484cc8f0 --- /dev/null +++ b/tests/modules/test_list.py @@ -0,0 +1,126 @@ +import json +from pathlib import Path + +import yaml +from rich.console import Console + +import nf_core.modules + +from ..test_modules import TestModules +from ..utils import GITLAB_DEFAULT_BRANCH, GITLAB_URL + + +class TestModulesCreate(TestModules): + def test_modules_list_remote(self): + """Test listing available modules""" + mods_list = nf_core.modules.ModuleList(None, remote=True) + listed_mods = mods_list.list_components() + console = Console(record=True) + console.print(listed_mods) + output = console.export_text() + assert "fastqc" in output + + def test_modules_list_remote_gitlab(self): + """Test listing the modules in the remote gitlab repo""" + mods_list = nf_core.modules.ModuleList(None, remote=True, remote_url=GITLAB_URL, branch=GITLAB_DEFAULT_BRANCH) + listed_mods = mods_list.list_components() + console = Console(record=True) + console.print(listed_mods) + output = console.export_text() + assert "fastqc" in output + + def test_modules_list_pipeline(self): + """Test listing locally installed modules""" + mods_list = nf_core.modules.ModuleList(self.pipeline_dir, remote=False) + listed_mods = mods_list.list_components() + console = Console(record=True) + console.print(listed_mods) + output = console.export_text() + assert "fastqc" in output + assert "multiqc" in output + + def test_modules_install_and_list_pipeline(self): + """Test listing locally installed modules""" + self.mods_install.install("trimgalore") + mods_list = nf_core.modules.ModuleList(self.pipeline_dir, remote=False) + listed_mods = mods_list.list_components() + console = Console(record=True) + console.print(listed_mods) + output = console.export_text() + assert "trimgalore" in output + + def test_modules_install_gitlab_and_list_pipeline(self): + """Test listing locally installed modules""" + self.mods_install_gitlab.install("fastqc") + mods_list = nf_core.modules.ModuleList(self.pipeline_dir, remote=False) + listed_mods = mods_list.list_components() + console = Console(record=True) + console.print(listed_mods) + output = console.export_text() + assert "fastqc" in output + + def test_modules_list_local_json(self): + """Test listing locally installed modules as JSON""" + mods_list = nf_core.modules.ModuleList(self.pipeline_dir, remote=False) + listed_mods = mods_list.list_components(print_json=True) + listed_mods = json.loads(listed_mods) + assert "fastqc" in listed_mods + assert "multiqc" in listed_mods + + def test_modules_list_remote_json(self): + """Test listing available modules as JSON""" + mods_list = nf_core.modules.ModuleList(None, remote=True) + listed_mods = mods_list.list_components(print_json=True) + listed_mods = json.loads(listed_mods) + assert "fastqc" in listed_mods + assert "multiqc" in listed_mods + + def test_modules_list_with_one_keyword(self): + """Test listing available modules with one keyword""" + mods_list = nf_core.modules.ModuleList(None, remote=True) + listed_mods = mods_list.list_components(keywords=["qc"]) + console = Console(record=True) + console.print(listed_mods) + output = console.export_text() + assert "multiqc" in output + + def test_modules_list_with_keywords(self): + """Test listing available modules with multiple keywords""" + mods_list = nf_core.modules.ModuleList(None, remote=True) + listed_mods = mods_list.list_components(keywords=["fastq", "qc"]) + console = Console(record=True) + console.print(listed_mods) + output = console.export_text() + assert "fastqc" in output + + def test_modules_list_with_unused_keyword(self): + """Test listing available modules with an unused keyword""" + mods_list = nf_core.modules.ModuleList(None, remote=True) + with self.assertLogs(level="INFO") as log: + listed_mods = mods_list.list_components(keywords=["you_will_never_find_me"]) + self.assertIn("No available", log.output[0]) + # expect empty list + assert listed_mods == "" + + def test_modules_list_in_wrong_repo_fail(self): + """Test listing available modules in a non-pipeline repo""" + # modify repotype in .nf-core.yml + with open(Path(self.pipeline_dir, ".nf-core.yml")) as fh: + nf_core_yml = yaml.safe_load(fh) + nf_core_yml_orig = nf_core_yml.copy() + nf_core_yml["repository_type"] = "modules" + nf_core_yml["org_path"] = "nf-core" + + print(nf_core_yml) + with open(Path(self.pipeline_dir, ".nf-core.yml"), "w") as fh: + yaml.safe_dump(nf_core_yml, fh) + # expect error logged + with self.assertLogs(level="ERROR") as log: + mods_list = nf_core.modules.ModuleList(self.pipeline_dir, remote=False) + listed_mods = mods_list.list_components() + self.assertIn("must be run from a pipeline directory", log.output[0]) + # expect empty list + assert listed_mods == "" + # restore .nf-core.yml + with open(Path(self.pipeline_dir, ".nf-core.yml"), "w") as fh: + yaml.safe_dump(nf_core_yml_orig, fh) diff --git a/tests/modules/test_modules_json.py b/tests/modules/test_modules_json.py new file mode 100644 index 0000000000..319b5ad657 --- /dev/null +++ b/tests/modules/test_modules_json.py @@ -0,0 +1,245 @@ +import copy +import json +import shutil +from pathlib import Path + +from nf_core.modules.modules_json import ModulesJson +from nf_core.modules.modules_repo import ( + NF_CORE_MODULES_DEFAULT_BRANCH, + NF_CORE_MODULES_NAME, + NF_CORE_MODULES_REMOTE, + ModulesRepo, +) +from nf_core.modules.patch import ModulePatch + +from ..test_modules import TestModules + + +class TestModulesCreate(TestModules): + def test_get_modules_json(self): + """Checks that the get_modules_json function returns the correct result""" + mod_json_path = Path(self.pipeline_dir, "modules.json") + with open(mod_json_path) as fh: + try: + mod_json_sb = json.load(fh) + except json.JSONDecodeError as e: + raise UserWarning(f"Unable to load JSON file '{mod_json_path}' due to error {e}") + + mod_json_obj = ModulesJson(self.pipeline_dir) + mod_json = mod_json_obj.get_modules_json() + + # Check that the modules.json hasn't changed + assert mod_json == mod_json_sb + + def test_mod_json_update(self): + """Checks whether the update function works properly""" + mod_json_obj = ModulesJson(self.pipeline_dir) + # Update the modules.json file + mod_repo_obj = ModulesRepo() + mod_json_obj.update("modules", mod_repo_obj, "MODULE_NAME", "GIT_SHA", "modules", write_file=False) + mod_json = mod_json_obj.get_modules_json() + assert "MODULE_NAME" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"] + assert "git_sha" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"]["MODULE_NAME"] + assert "GIT_SHA" == mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"]["MODULE_NAME"]["git_sha"] + assert ( + NF_CORE_MODULES_DEFAULT_BRANCH + == mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"]["MODULE_NAME"]["branch"] + ) + assert ( + "modules" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"]["MODULE_NAME"]["installed_by"] + ) + + def test_mod_json_create(self): + """Test creating a modules.json file from scratch""" + mod_json_path = Path(self.pipeline_dir, "modules.json") + # Remove the existing modules.json file + mod_json_path.unlink() + + # Create the new modules.json file + # (There are no prompts as long as there are only nf-core modules) + ModulesJson(self.pipeline_dir).create() + + # Check that the file exists + assert (mod_json_path).exists() + + # Get the contents of the file + mod_json_obj = ModulesJson(self.pipeline_dir) + mod_json = mod_json_obj.get_modules_json() + + mods = ["fastqc", "multiqc"] + for mod in mods: + assert mod in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"] + assert "git_sha" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"][mod] + assert "branch" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"][mod] + + def _modify_main_nf(self, path): + """Modify a file to test patch creation""" + with open(path) as fh: + lines = fh.readlines() + # Modify $meta.id to $meta.single_end + lines[1] = ' tag "$meta.single_end"\n' + with open(path, "w") as fh: + fh.writelines(lines) + + def test_mod_json_create_with_patch(self): + """Test creating a modules.json file from scratch when there are patched modules""" + mod_json_path = Path(self.pipeline_dir, "modules.json") + + # Modify the module + module_path = Path(self.pipeline_dir, "modules", "nf-core", "fastqc") + self._modify_main_nf(module_path / "main.nf") + + # Try creating a patch file + patch_obj = ModulePatch(self.pipeline_dir, NF_CORE_MODULES_REMOTE, NF_CORE_MODULES_DEFAULT_BRANCH) + patch_obj.patch("fastqc") + + # Remove the existing modules.json file + mod_json_path.unlink() + + # Create the new modules.json file + ModulesJson(self.pipeline_dir).create() + + # Check that the file exists + assert mod_json_path.is_file() + + # Get the contents of the file + mod_json_obj = ModulesJson(self.pipeline_dir) + mod_json = mod_json_obj.get_modules_json() + + # Check that fastqc is in the file + assert "fastqc" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"] + assert "git_sha" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"]["fastqc"] + assert "branch" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"]["fastqc"] + + # Check that fastqc/main.nf maintains the changes + with open(module_path / "main.nf") as fh: + lines = fh.readlines() + assert lines[1] == ' tag "$meta.single_end"\n' + + def test_mod_json_up_to_date(self): + """ + Checks if the modules.json file is up to date + when no changes have been made to the pipeline + """ + mod_json_obj = ModulesJson(self.pipeline_dir) + mod_json_before = mod_json_obj.get_modules_json() + mod_json_obj.check_up_to_date() + mod_json_after = mod_json_obj.get_modules_json() + + # Check that the modules.json hasn't changed + assert mod_json_before == mod_json_after + + def test_mod_json_up_to_date_module_removed(self): + """ + Reinstall a module that has an entry in the modules.json + but is missing in the pipeline + """ + # Remove the fastqc module + fastqc_path = Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, "fastqc") + shutil.rmtree(fastqc_path) + + # Check that the modules.json file is up to date, and reinstall the module + mod_json_obj = ModulesJson(self.pipeline_dir) + mod_json_obj.check_up_to_date() + + # Check that the module has been reinstalled + files = ["main.nf", "meta.yml"] + assert fastqc_path.exists() + for f in files: + assert Path(fastqc_path, f).exists() + + def test_mod_json_up_to_date_reinstall_fails(self): + """ + Try reinstalling a module where the git_sha is invalid + """ + mod_json_obj = ModulesJson(self.pipeline_dir) + + # Update the fastqc module entry to an invalid git_sha + mod_json_obj.update("modules", ModulesRepo(), "fastqc", "INVALID_GIT_SHA", "modules", write_file=True) + + # Remove the fastqc module + fastqc_path = Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, "fastqc") + shutil.rmtree(fastqc_path) + + # Check that the modules.json file is up to date, and remove the fastqc module entry + mod_json_obj.check_up_to_date() + mod_json = mod_json_obj.get_modules_json() + + # Check that the module has been removed from the modules.json + assert "fastqc" not in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"] + + def test_mod_json_repo_present(self): + """Tests the repo_present function""" + mod_json_obj = ModulesJson(self.pipeline_dir) + + assert mod_json_obj.repo_present(NF_CORE_MODULES_REMOTE) is True + assert mod_json_obj.repo_present("INVALID_REPO") is False + + def test_mod_json_module_present(self): + """Tests the module_present function""" + mod_json_obj = ModulesJson(self.pipeline_dir) + + assert mod_json_obj.module_present("fastqc", NF_CORE_MODULES_REMOTE, NF_CORE_MODULES_NAME) is True + assert mod_json_obj.module_present("INVALID_MODULE", NF_CORE_MODULES_REMOTE, NF_CORE_MODULES_NAME) is False + assert mod_json_obj.module_present("fastqc", "INVALID_REPO", "INVALID_DIR") is False + assert mod_json_obj.module_present("INVALID_MODULE", "INVALID_REPO", "INVALID_DIR") is False + + def test_mod_json_get_module_version(self): + """Test the get_module_version function""" + mod_json_obj = ModulesJson(self.pipeline_dir) + mod_json = mod_json_obj.get_modules_json() + assert ( + mod_json_obj.get_module_version("fastqc", NF_CORE_MODULES_REMOTE, NF_CORE_MODULES_NAME) + == mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"]["fastqc"]["git_sha"] + ) + assert mod_json_obj.get_module_version("INVALID_MODULE", NF_CORE_MODULES_REMOTE, NF_CORE_MODULES_NAME) is None + + def test_mod_json_dump(self): + """Tests the dump function""" + mod_json_obj = ModulesJson(self.pipeline_dir) + mod_json = mod_json_obj.get_modules_json() + # Remove the modules.json file + mod_json_path = Path(self.pipeline_dir, "modules.json") + mod_json_path.unlink() + + # Check that the dump function creates the file + mod_json_obj.dump() + assert mod_json_path.exists() + + # Check that the dump function writes the correct content + with open(mod_json_path) as f: + try: + mod_json_new = json.load(f) + except json.JSONDecodeError as e: + raise UserWarning(f"Unable to load JSON file '{mod_json_path}' due to error {e}") + assert mod_json == mod_json_new + + def test_mod_json_with_empty_modules_value(self): + # Load module.json and remove the modules entry + mod_json_obj = ModulesJson(self.pipeline_dir) + mod_json_obj.create() # Create modules.json explicitly to get correct module sha + mod_json_orig = mod_json_obj.get_modules_json() + mod_json = copy.deepcopy(mod_json_orig) + mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"] = "" + # save the altered module.json and load it again to check if it will fix itself + mod_json_obj.modules_json = mod_json + mod_json_obj.dump() + mod_json_obj_new = ModulesJson(self.pipeline_dir) + mod_json_obj_new.check_up_to_date() + mod_json_new = mod_json_obj_new.get_modules_json() + assert mod_json_orig == mod_json_new + + def test_mod_json_with_missing_modules_entry(self): + # Load module.json and remove the modules entry + mod_json_obj = ModulesJson(self.pipeline_dir) + mod_json_obj.create() # Create modules.json explicitly to get correct module sha + mod_json_orig = mod_json_obj.get_modules_json() + mod_json = copy.deepcopy(mod_json_orig) + mod_json["repos"][NF_CORE_MODULES_REMOTE].pop("modules") + # save the altered module.json and load it again to check if it will fix itself + mod_json_obj.modules_json = mod_json + mod_json_obj.dump() + mod_json_obj_new = ModulesJson(self.pipeline_dir) + mod_json_obj_new.check_up_to_date() + mod_json_new = mod_json_obj_new.get_modules_json() + assert mod_json_orig == mod_json_new diff --git a/tests/modules/test_patch.py b/tests/modules/test_patch.py new file mode 100644 index 0000000000..230bb1ce8f --- /dev/null +++ b/tests/modules/test_patch.py @@ -0,0 +1,365 @@ +import os +import tempfile +from pathlib import Path +from unittest import mock + +import pytest + +import nf_core.components.components_command +import nf_core.modules + +from ..test_modules import TestModules +from ..utils import GITLAB_URL + +""" +Test the 'nf-core modules patch' command + +Uses a branch (patch-tester) in the GitLab nf-core/modules-test repo when +testing if the update commands works correctly with patch files +""" + +ORG_SHA = "002623ccc88a3b0cb302c7d8f13792a95354d9f2" +CORRECT_SHA = "1dff30bfca2d98eb7ac7b09269a15e822451d99f" +SUCCEED_SHA = "ba15c20c032c549d77c5773659f19c2927daf48e" +FAIL_SHA = "67b642d4471c4005220a342cad3818d5ba2b5a73" +BISMARK_ALIGN = "bismark/align" +REPO_NAME = "nf-core-test" +PATCH_BRANCH = "patch-tester" +REPO_URL = "https://gitlab.com/nf-core/modules-test.git" + + +def setup_patch(pipeline_dir, modify_module): + install_obj = nf_core.modules.ModuleInstall( + pipeline_dir, prompt=False, force=False, remote_url=GITLAB_URL, branch=PATCH_BRANCH, sha=ORG_SHA + ) + + # Install the module + install_obj.install(BISMARK_ALIGN) + + if modify_module: + # Modify the module + module_path = Path(pipeline_dir, "modules", REPO_NAME, BISMARK_ALIGN) + modify_main_nf(module_path / "main.nf") + + +def modify_main_nf(path): + """Modify a file to test patch creation""" + with open(path) as fh: + lines = fh.readlines() + # We want a patch file that looks something like: + # - tuple val(meta), path(reads) + # - path index + # + tuple val(meta), path(reads), path(index) + for line_index in range(len(lines)): + if lines[line_index] == " tuple val(meta), path(reads)\n": + lines[line_index] = " tuple val(meta), path(reads), path(index)\n" + elif lines[line_index] == " path index\n": + to_pop = line_index + lines.pop(to_pop) + with open(path, "w") as fh: + fh.writelines(lines) + + +class TestModulesCreate(TestModules): + def test_create_patch_no_change(self): + """Test creating a patch when there is no change to the module""" + setup_patch(self.pipeline_dir, False) + + # Try creating a patch file + patch_obj = nf_core.modules.ModulePatch(self.pipeline_dir, GITLAB_URL, PATCH_BRANCH) + with pytest.raises(UserWarning): + patch_obj.patch(BISMARK_ALIGN) + + module_path = Path(self.pipeline_dir, "modules", REPO_NAME, BISMARK_ALIGN) + + # Check that no patch file has been added to the directory + assert set(os.listdir(module_path)) == {"main.nf", "meta.yml"} + + # Check the 'modules.json' contains no patch file for the module + modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) + assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) is None + + def test_create_patch_change(self): + """Test creating a patch when there is a change to the module""" + setup_patch(self.pipeline_dir, True) + + # Try creating a patch file + patch_obj = nf_core.modules.ModulePatch(self.pipeline_dir, GITLAB_URL, PATCH_BRANCH) + patch_obj.patch(BISMARK_ALIGN) + + module_path = Path(self.pipeline_dir, "modules", REPO_NAME, BISMARK_ALIGN) + + patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" + # Check that a patch file with the correct name has been created + assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", patch_fn} + + # Check the 'modules.json' contains a patch file for the module + modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) + assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( + "modules", REPO_NAME, BISMARK_ALIGN, patch_fn + ) + + # Check that the correct lines are in the patch file + with open(module_path / patch_fn) as fh: + patch_lines = fh.readlines() + module_relpath = module_path.relative_to(self.pipeline_dir) + assert f"--- {module_relpath / 'main.nf'}\n" in patch_lines, module_relpath / "main.nf" + assert f"+++ {module_relpath / 'main.nf'}\n" in patch_lines + assert "- tuple val(meta), path(reads)\n" in patch_lines + assert "- path index\n" in patch_lines + assert "+ tuple val(meta), path(reads), path(index)\n" in patch_lines + + def test_create_patch_try_apply_successful(self): + """ + Test creating a patch file and applying it to a new version of the the files + """ + + setup_patch(self.pipeline_dir, True) + module_relpath = Path("modules", REPO_NAME, BISMARK_ALIGN) + module_path = Path(self.pipeline_dir, module_relpath) + + # Try creating a patch file + patch_obj = nf_core.modules.ModulePatch(self.pipeline_dir, GITLAB_URL, PATCH_BRANCH) + patch_obj.patch(BISMARK_ALIGN) + + patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" + # Check that a patch file with the correct name has been created + assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", patch_fn} + + # Check the 'modules.json' contains a patch file for the module + modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) + assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( + "modules", REPO_NAME, BISMARK_ALIGN, patch_fn + ) + + update_obj = nf_core.modules.ModuleUpdate( + self.pipeline_dir, sha=SUCCEED_SHA, remote_url=GITLAB_URL, branch=PATCH_BRANCH + ) + # Install the new files + install_dir = Path(tempfile.mkdtemp()) + update_obj.install_component_files(BISMARK_ALIGN, SUCCEED_SHA, update_obj.modules_repo, install_dir) + + # Try applying the patch + module_install_dir = install_dir / BISMARK_ALIGN + patch_relpath = module_relpath / patch_fn + assert ( + update_obj.try_apply_patch(BISMARK_ALIGN, REPO_NAME, patch_relpath, module_path, module_install_dir) is True + ) + + # Move the files from the temporary directory + update_obj.move_files_from_tmp_dir(BISMARK_ALIGN, install_dir, REPO_NAME, SUCCEED_SHA) + + # Check that a patch file with the correct name has been created + assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", patch_fn} + + # Check the 'modules.json' contains a patch file for the module + modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) + assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( + "modules", REPO_NAME, BISMARK_ALIGN, patch_fn + ) + + # Check that the correct lines are in the patch file + with open(module_path / patch_fn) as fh: + patch_lines = fh.readlines() + module_relpath = module_path.relative_to(self.pipeline_dir) + assert f"--- {module_relpath / 'main.nf'}\n" in patch_lines + assert f"+++ {module_relpath / 'main.nf'}\n" in patch_lines + assert "- tuple val(meta), path(reads)\n" in patch_lines + assert "- path index\n" in patch_lines + assert "+ tuple val(meta), path(reads), path(index)\n" in patch_lines + + # Check that 'main.nf' is updated correctly + with open(module_path / "main.nf") as fh: + main_nf_lines = fh.readlines() + # These lines should have been removed by the patch + assert " tuple val(meta), path(reads)\n" not in main_nf_lines + assert " path index\n" not in main_nf_lines + # This line should have been added + assert " tuple val(meta), path(reads), path(index)\n" in main_nf_lines + + def test_create_patch_try_apply_failed(self): + """ + Test creating a patch file and applying it to a new version of the the files + """ + + setup_patch(self.pipeline_dir, True) + module_relpath = Path("modules", REPO_NAME, BISMARK_ALIGN) + module_path = Path(self.pipeline_dir, module_relpath) + + # Try creating a patch file + patch_obj = nf_core.modules.ModulePatch(self.pipeline_dir, GITLAB_URL, PATCH_BRANCH) + patch_obj.patch(BISMARK_ALIGN) + + patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" + # Check that a patch file with the correct name has been created + assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", patch_fn} + + # Check the 'modules.json' contains a patch file for the module + modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) + assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( + "modules", REPO_NAME, BISMARK_ALIGN, patch_fn + ) + + update_obj = nf_core.modules.ModuleUpdate( + self.pipeline_dir, sha=FAIL_SHA, remote_url=GITLAB_URL, branch=PATCH_BRANCH + ) + # Install the new files + install_dir = Path(tempfile.mkdtemp()) + update_obj.install_component_files(BISMARK_ALIGN, FAIL_SHA, update_obj.modules_repo, install_dir) + + # Try applying the patch + module_install_dir = install_dir / BISMARK_ALIGN + patch_path = module_relpath / patch_fn + assert ( + update_obj.try_apply_patch(BISMARK_ALIGN, REPO_NAME, patch_path, module_path, module_install_dir) is False + ) + + def test_create_patch_update_success(self): + """ + Test creating a patch file and the updating the module + + Should have the same effect as 'test_create_patch_try_apply_successful' + but uses higher level api + """ + + setup_patch(self.pipeline_dir, True) + module_path = Path(self.pipeline_dir, "modules", REPO_NAME, BISMARK_ALIGN) + + # Try creating a patch file + patch_obj = nf_core.modules.ModulePatch(self.pipeline_dir, GITLAB_URL, PATCH_BRANCH) + patch_obj.patch(BISMARK_ALIGN) + + patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" + # Check that a patch file with the correct name has been created + assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", patch_fn} + + # Check the 'modules.json' contains a patch file for the module + modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) + assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, GITLAB_URL, REPO_NAME) == Path( + "modules", REPO_NAME, BISMARK_ALIGN, patch_fn + ) + + # Update the module + update_obj = nf_core.modules.ModuleUpdate( + self.pipeline_dir, + sha=SUCCEED_SHA, + show_diff=False, + update_deps=True, + remote_url=GITLAB_URL, + branch=PATCH_BRANCH, + ) + assert update_obj.update(BISMARK_ALIGN) + + # Check that a patch file with the correct name has been created + assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", patch_fn} + + # Check the 'modules.json' contains a patch file for the module + modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) + assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, GITLAB_URL, REPO_NAME) == Path( + "modules", REPO_NAME, BISMARK_ALIGN, patch_fn + ), modules_json_obj.get_patch_fn(BISMARK_ALIGN, GITLAB_URL, REPO_NAME) + + # Check that the correct lines are in the patch file + with open(module_path / patch_fn) as fh: + patch_lines = fh.readlines() + module_relpath = module_path.relative_to(self.pipeline_dir) + assert f"--- {module_relpath / 'main.nf'}\n" in patch_lines + assert f"+++ {module_relpath / 'main.nf'}\n" in patch_lines + assert "- tuple val(meta), path(reads)\n" in patch_lines + assert "- path index\n" in patch_lines + assert "+ tuple val(meta), path(reads), path(index)\n" in patch_lines + + # Check that 'main.nf' is updated correctly + with open(module_path / "main.nf") as fh: + main_nf_lines = fh.readlines() + # These lines should have been removed by the patch + assert " tuple val(meta), path(reads)\n" not in main_nf_lines + assert " path index\n" not in main_nf_lines + # This line should have been added + assert " tuple val(meta), path(reads), path(index)\n" in main_nf_lines + + def test_create_patch_update_fail(self): + """ + Test creating a patch file and updating a module when there is a diff conflict + """ + + setup_patch(self.pipeline_dir, True) + module_path = Path(self.pipeline_dir, "modules", REPO_NAME, BISMARK_ALIGN) + + # Try creating a patch file + patch_obj = nf_core.modules.ModulePatch(self.pipeline_dir, GITLAB_URL, PATCH_BRANCH) + patch_obj.patch(BISMARK_ALIGN) + + patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" + # Check that a patch file with the correct name has been created + assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", patch_fn} + + # Check the 'modules.json' contains a patch file for the module + modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) + assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( + "modules", REPO_NAME, BISMARK_ALIGN, patch_fn + ) + + # Save the file contents for downstream comparison + with open(module_path / patch_fn) as fh: + patch_contents = fh.read() + + update_obj = nf_core.modules.ModuleUpdate( + self.pipeline_dir, + sha=FAIL_SHA, + show_diff=False, + update_deps=True, + remote_url=GITLAB_URL, + branch=PATCH_BRANCH, + ) + update_obj.update(BISMARK_ALIGN) + + # Check that the installed files have not been affected by the attempted patch + temp_dir = Path(tempfile.mkdtemp()) + nf_core.components.components_command.ComponentCommand( + "modules", self.pipeline_dir, GITLAB_URL, PATCH_BRANCH + ).install_component_files(BISMARK_ALIGN, FAIL_SHA, update_obj.modules_repo, temp_dir) + + temp_module_dir = temp_dir / BISMARK_ALIGN + for file in os.listdir(temp_module_dir): + assert file in os.listdir(module_path) + with open(module_path / file) as fh: + installed = fh.read() + with open(temp_module_dir / file) as fh: + shouldbe = fh.read() + assert installed == shouldbe + + # Check that the patch file is unaffected + with open(module_path / patch_fn) as fh: + new_patch_contents = fh.read() + assert patch_contents == new_patch_contents + + def test_remove_patch(self): + """Test creating a patch when there is no change to the module""" + setup_patch(self.pipeline_dir, True) + + # Try creating a patch file + patch_obj = nf_core.modules.ModulePatch(self.pipeline_dir, GITLAB_URL, PATCH_BRANCH) + patch_obj.patch(BISMARK_ALIGN) + + module_path = Path(self.pipeline_dir, "modules", REPO_NAME, BISMARK_ALIGN) + + # Check that a patch file with the correct name has been created + patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" + assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", patch_fn} + + # Check the 'modules.json' contains a patch file for the module + modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) + assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( + "modules", REPO_NAME, BISMARK_ALIGN, patch_fn + ) + + with mock.patch.object(nf_core.components.patch.questionary, "confirm") as mock_questionary: + mock_questionary.unsafe_ask.return_value = True + patch_obj.remove(BISMARK_ALIGN) + # Check that the diff file has been removed + assert set(os.listdir(module_path)) == {"main.nf", "meta.yml"} + + # Check that the 'modules.json' entry has been removed + modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) + assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) is None diff --git a/tests/modules/test_remove.py b/tests/modules/test_remove.py new file mode 100644 index 0000000000..a80c8b0986 --- /dev/null +++ b/tests/modules/test_remove.py @@ -0,0 +1,26 @@ +import os +from pathlib import Path + +from ..test_modules import TestModules + + +class TestModulesRemove(TestModules): + def test_modules_remove_trimgalore(self): + """Test removing TrimGalore! module after installing it""" + self.mods_install.install("trimgalore") + assert self.mods_install.dir is not None + module_path = Path(self.mods_install.dir, "modules", "nf-core", "modules", "trimgalore") + assert self.mods_remove.remove("trimgalore") + assert os.path.exists(module_path) is False + + def test_modules_remove_trimgalore_uninstalled(self): + """Test removing TrimGalore! module without installing it""" + assert self.mods_remove.remove("trimgalore") is False + + def test_modules_remove_multiqc_from_gitlab(self): + """Test removing multiqc module after installing it from an alternative source""" + self.mods_install_gitlab.install("multiqc") + assert self.mods_install.dir is not None + module_path = Path(self.mods_install_gitlab.dir, "modules", "nf-core-test", "multiqc") + assert self.mods_remove_gitlab.remove("multiqc", force=True) + assert os.path.exists(module_path) is False diff --git a/tests/modules/test_update.py b/tests/modules/test_update.py new file mode 100644 index 0000000000..a33aac3775 --- /dev/null +++ b/tests/modules/test_update.py @@ -0,0 +1,435 @@ +import logging +import shutil +import tempfile +from pathlib import Path +from unittest import mock + +import questionary +import yaml + +import nf_core.utils +from nf_core.modules.install import ModuleInstall +from nf_core.modules.modules_json import ModulesJson +from nf_core.modules.modules_repo import NF_CORE_MODULES_NAME, NF_CORE_MODULES_REMOTE +from nf_core.modules.patch import ModulePatch +from nf_core.modules.update import ModuleUpdate + +from ..test_modules import TestModules +from ..utils import ( + GITLAB_BRANCH_TEST_BRANCH, + GITLAB_BRANCH_TEST_NEW_SHA, + GITLAB_BRANCH_TEST_OLD_SHA, + GITLAB_DEFAULT_BRANCH, + GITLAB_REPO, + GITLAB_URL, + OLD_TRIMGALORE_BRANCH, + OLD_TRIMGALORE_SHA, + cmp_component, +) + + +class TestModulesInstall(TestModules): + def test_install_and_update(self): + """Installs a module in the pipeline and updates it (no change)""" + self.mods_install.install("trimgalore") + update_obj = ModuleUpdate(self.pipeline_dir, show_diff=False) + + # Copy the module files and check that they are unaffected by the update + tmpdir = Path(tempfile.TemporaryDirectory().name) + trimgalore_tmpdir = tmpdir / "trimgalore" + trimgalore_path = Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, "trimgalore") + shutil.copytree(trimgalore_path, trimgalore_tmpdir) + + assert update_obj.update("trimgalore") is True + assert cmp_component(trimgalore_tmpdir, trimgalore_path) is True + + def test_install_at_hash_and_update(self): + """Installs an old version of a module in the pipeline and updates it""" + assert self.mods_install_old.install("trimgalore") + update_obj = ModuleUpdate( + self.pipeline_dir, show_diff=False, update_deps=True, remote_url=GITLAB_URL, branch=OLD_TRIMGALORE_BRANCH + ) + + # Copy the module files and check that they are affected by the update + tmpdir = Path(tempfile.TemporaryDirectory().name) + trimgalore_tmpdir = tmpdir / "trimgalore" + trimgalore_path = Path(self.pipeline_dir, "modules", GITLAB_REPO, "trimgalore") + shutil.copytree(trimgalore_path, trimgalore_tmpdir) + + assert update_obj.update("trimgalore") is True + assert cmp_component(trimgalore_tmpdir, trimgalore_path) is False + + # Check that the modules.json is correctly updated + mod_json_obj = ModulesJson(self.pipeline_dir) + mod_json = mod_json_obj.get_modules_json() + # Get the up-to-date git_sha for the module from the ModulesRepo object + correct_git_sha = update_obj.modules_repo.get_latest_component_version("trimgalore", "modules") + current_git_sha = mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO]["trimgalore"]["git_sha"] + assert correct_git_sha == current_git_sha + + # Mock questionary answer: do not update module, only show diffs + @mock.patch.object(questionary.Question, "unsafe_ask", return_value=True) + def test_install_at_hash_and_update_limit_output(self, mock_prompt): + """Installs an old version of a module in the pipeline and updates it with limited output reporting""" + self.caplog.set_level(logging.INFO) + assert self.mods_install_old.install("trimgalore") + + update_obj = ModuleUpdate( + self.pipeline_dir, + show_diff=True, + update_deps=True, + remote_url=GITLAB_URL, + branch=OLD_TRIMGALORE_BRANCH, + limit_output=True, + ) + assert update_obj.update("trimgalore") + + # Check changes not shown for non-.nf files + assert "Changes in 'trimgalore/meta.yml' but not shown" in self.caplog.text + # Check changes shown for .nf files + assert "Changes in 'trimgalore/main.nf'" in self.caplog.text + for line in self.caplog.text.split("\n"): + if line.startswith("---"): + assert line.endswith("main.nf") + + def test_install_at_hash_and_update_and_save_diff_to_file(self): + """Installs an old version of a module in the pipeline and updates it""" + self.mods_install_old.install("trimgalore") + patch_path = Path(self.pipeline_dir, "trimgalore.patch") + update_obj = ModuleUpdate( + self.pipeline_dir, + save_diff_fn=patch_path, + sha=OLD_TRIMGALORE_SHA, + remote_url=GITLAB_URL, + branch=OLD_TRIMGALORE_BRANCH, + ) + + # Copy the module files and check that they are affected by the update + tmpdir = Path(tempfile.TemporaryDirectory().name) + trimgalore_tmpdir = tmpdir / "trimgalore" + trimgalore_path = Path(self.pipeline_dir, "modules", GITLAB_REPO, "trimgalore") + shutil.copytree(trimgalore_path, trimgalore_tmpdir) + + assert update_obj.update("trimgalore") is True + assert cmp_component(trimgalore_tmpdir, trimgalore_path) is True + + # TODO: Apply the patch to the module + + def test_install_at_hash_and_update_and_save_diff_to_file_limit_output(self): + """Installs an old version of a module in the pipeline and updates it""" + # Install old version of trimgalore + self.mods_install_old.install("trimgalore") + patch_path = Path(self.pipeline_dir, "trimgalore.patch") + # Update saving the differences to a patch file and with `limit_output` + update_obj = ModuleUpdate( + self.pipeline_dir, + save_diff_fn=patch_path, + remote_url=GITLAB_URL, + branch=OLD_TRIMGALORE_BRANCH, + limit_output=True, + ) + assert update_obj.update("trimgalore") + + # Check that the patch file was created + assert patch_path.exists(), f"Patch file was not created at {patch_path}" + + # Read the contents of the patch file + with open(patch_path) as fh: + patch_content = fh.read() + # Check changes not shown for non-.nf files + assert "Changes in 'trimgalore/meta.yml' but not shown" in patch_content + # Check changes only shown for main.nf + assert "Changes in 'trimgalore/main.nf'" in patch_content + for line in patch_content: + if line.startswith("---"): + assert line.endswith("main.nf") + + def test_update_all(self): + """Updates all modules present in the pipeline""" + update_obj = ModuleUpdate(self.pipeline_dir, update_all=True, show_diff=False) + # Get the current modules.json + assert update_obj.update() is True + + # We must reload the modules.json to get the updated version + mod_json_obj = ModulesJson(self.pipeline_dir) + mod_json = mod_json_obj.get_modules_json() + # Loop through all modules and check that they are updated (according to the modules.json file) + for mod in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME]: + correct_git_sha = list(update_obj.modules_repo.get_component_git_log(mod, "modules", depth=1))[0]["git_sha"] + current_git_sha = mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME][mod]["git_sha"] + assert correct_git_sha == current_git_sha + + def test_update_with_config_fixed_version(self): + """Try updating when there are entries in the .nf-core.yml""" + # Install trimgalore at the latest version + assert self.mods_install_trimgalore.install("trimgalore") + + # Fix the trimgalore version in the .nf-core.yml to an old version + update_config = {GITLAB_URL: {GITLAB_REPO: {"trimgalore": OLD_TRIMGALORE_SHA}}} + config_fn, tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) + tools_config["update"] = update_config + with open(Path(self.pipeline_dir, config_fn), "w") as f: + yaml.dump(tools_config, f) + + # Update all modules in the pipeline + update_obj = ModuleUpdate( + self.pipeline_dir, update_all=True, show_diff=False, remote_url=GITLAB_URL, branch=OLD_TRIMGALORE_BRANCH + ) + assert update_obj.update() is True + + # Check that the git sha for trimgalore is correctly downgraded + mod_json = ModulesJson(self.pipeline_dir).get_modules_json() + assert "trimgalore" in mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO] + assert "git_sha" in mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO]["trimgalore"] + assert mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO]["trimgalore"]["git_sha"] == OLD_TRIMGALORE_SHA + + def test_update_with_config_dont_update(self): + """Try updating when module is to be ignored""" + # Install an old version of trimgalore + self.mods_install_old.install("trimgalore") + + # Set the trimgalore field to no update in the .nf-core.yml + update_config = {GITLAB_URL: {GITLAB_REPO: {"trimgalore": False}}} + config_fn, tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) + tools_config["update"] = update_config + with open(Path(self.pipeline_dir, config_fn), "w") as f: + yaml.dump(tools_config, f) + + # Update all modules in the pipeline + update_obj = ModuleUpdate( + self.pipeline_dir, + update_all=True, + show_diff=False, + sha=OLD_TRIMGALORE_SHA, + remote_url=GITLAB_URL, + branch=OLD_TRIMGALORE_BRANCH, + ) + assert update_obj.update() is True + + # Check that the git sha for trimgalore is correctly downgraded + mod_json = ModulesJson(self.pipeline_dir).get_modules_json() + assert "trimgalore" in mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO] + assert "git_sha" in mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO]["trimgalore"] + assert mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO]["trimgalore"]["git_sha"] == OLD_TRIMGALORE_SHA + + def test_update_with_config_fix_all(self): + """Fix the version of all nf-core modules""" + self.mods_install_trimgalore.install("trimgalore") + + # Fix the version of all nf-core modules in the .nf-core.yml to an old version + update_config = {GITLAB_URL: OLD_TRIMGALORE_SHA} + config_fn, tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) + tools_config["update"] = update_config + with open(Path(self.pipeline_dir, config_fn), "w") as f: + yaml.dump(tools_config, f) + + # Update all modules in the pipeline + update_obj = ModuleUpdate( + self.pipeline_dir, update_all=True, show_diff=False, remote_url=GITLAB_URL, branch=OLD_TRIMGALORE_BRANCH + ) + assert update_obj.update() is True + + # Check that the git sha for trimgalore is correctly downgraded + mod_json = ModulesJson(self.pipeline_dir).get_modules_json() + assert "git_sha" in mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO]["trimgalore"] + assert mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO]["trimgalore"]["git_sha"] == OLD_TRIMGALORE_SHA + + def test_update_with_config_no_updates(self): + """Don't update any nf-core modules""" + assert self.mods_install_old.install("trimgalore") + old_mod_json = ModulesJson(self.pipeline_dir).get_modules_json() + + # Fix the version of all nf-core modules in the .nf-core.yml to an old version + update_config = {GITLAB_URL: False} + config_fn, tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) + tools_config["update"] = update_config + with open(Path(self.pipeline_dir, config_fn), "w") as f: + yaml.dump(tools_config, f) + + # Update all modules in the pipeline + update_obj = ModuleUpdate( + self.pipeline_dir, + update_all=True, + show_diff=False, + sha=OLD_TRIMGALORE_SHA, + remote_url=GITLAB_URL, + branch=OLD_TRIMGALORE_BRANCH, + ) + assert update_obj.update() is True + + # Check that the git sha for trimgalore is correctly downgraded and none of the modules has changed + mod_json = ModulesJson(self.pipeline_dir).get_modules_json() + for module in mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO]: + assert "git_sha" in mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO][module] + assert ( + mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO][module]["git_sha"] + == old_mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO][module]["git_sha"] + ) + + def test_update_different_branch_single_module(self): + """Try updating a module in a specific branch""" + install_obj = ModuleInstall( + self.pipeline_dir, + prompt=False, + force=False, + remote_url=GITLAB_URL, + branch=GITLAB_BRANCH_TEST_BRANCH, + sha=GITLAB_BRANCH_TEST_OLD_SHA, + ) + assert install_obj.install("fastp") + + update_obj = ModuleUpdate( + self.pipeline_dir, + update_deps=True, + remote_url=GITLAB_URL, + branch=GITLAB_BRANCH_TEST_BRANCH, + show_diff=False, + ) + update_obj.update("fastp") + + # Verify that the branch entry was updated correctly + modules_json = ModulesJson(self.pipeline_dir) + assert ( + modules_json.get_component_branch(self.component_type, "fastp", GITLAB_URL, GITLAB_REPO) + == GITLAB_BRANCH_TEST_BRANCH + ) + assert modules_json.get_module_version("fastp", GITLAB_URL, GITLAB_REPO) == GITLAB_BRANCH_TEST_NEW_SHA + + def test_update_different_branch_mixed_modules_main(self): + """Try updating all modules where MultiQC is installed from main branch""" + # Install fastp + assert self.mods_install_gitlab_old.install("fastp") + + # Install MultiQC from gitlab default branch + assert self.mods_install_gitlab.install("multiqc") + + # Try updating + update_obj = ModuleUpdate(self.pipeline_dir, update_all=True, show_diff=False) + assert update_obj.update() is True + + modules_json = ModulesJson(self.pipeline_dir) + # Verify that the branch entry was updated correctly + assert ( + modules_json.get_component_branch(self.component_type, "fastp", GITLAB_URL, GITLAB_REPO) + == GITLAB_BRANCH_TEST_BRANCH + ) + assert modules_json.get_module_version("fastp", GITLAB_URL, GITLAB_REPO) == GITLAB_BRANCH_TEST_NEW_SHA + # MultiQC is present in both branches but should've been updated using the 'main' branch + assert ( + modules_json.get_component_branch(self.component_type, "multiqc", GITLAB_URL, GITLAB_REPO) + == GITLAB_DEFAULT_BRANCH + ) + + def test_update_different_branch_mix_modules_branch_test(self): + """Try updating all modules where MultiQC is installed from branch-test branch""" + # Install multiqc from the branch-test branch + assert self.mods_install_gitlab_old.install( + "multiqc" + ) # Force as the same module is installed from github nf-core modules repo + modules_json = ModulesJson(self.pipeline_dir) + update_obj = ModuleUpdate( + self.pipeline_dir, + update_all=True, + show_diff=False, + remote_url=GITLAB_URL, + branch=GITLAB_BRANCH_TEST_BRANCH, + sha=GITLAB_BRANCH_TEST_NEW_SHA, + ) + assert update_obj.update() + + assert ( + modules_json.get_component_branch(self.component_type, "multiqc", GITLAB_URL, GITLAB_REPO) + == GITLAB_BRANCH_TEST_BRANCH + ) + assert modules_json.get_module_version("multiqc", GITLAB_URL, GITLAB_REPO) == GITLAB_BRANCH_TEST_NEW_SHA + + # Mock questionary answer: do not update module, only show diffs + @mock.patch.object(questionary.Question, "unsafe_ask", return_value=False) + def test_update_only_show_differences(self, mock_prompt): + """Try updating all modules showing differences. + Only show diffs, don't actually save any updated files. + Check that the sha in modules.json is not changed.""" + + # Update modules to a fixed old SHA + update_old = ModuleUpdate( + self.pipeline_dir, update_all=True, show_diff=False, sha="5e34754d42cd2d5d248ca8673c0a53cdf5624905" + ) + update_old.update() + + tmpdir = Path(tempfile.TemporaryDirectory().name) + shutil.copytree(Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME), tmpdir) + + update_obj = ModuleUpdate(self.pipeline_dir, update_all=True, show_diff=True) + assert ModuleUpdate(self.pipeline_dir, update_all=True, show_diff=True).update() + + mod_json = ModulesJson(self.pipeline_dir).get_modules_json() + # Loop through all modules and check that they are NOT updated (according to the modules.json file) + # A module that can be updated but shouldn't is fastqc + # Module multiqc is already up to date so don't check + mod = "fastqc" + non_updated_git_sha = list(update_obj.modules_repo.get_component_git_log(mod, "modules", depth=1))[0]["git_sha"] + current_git_sha = mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME][mod]["git_sha"] + assert non_updated_git_sha != current_git_sha + assert cmp_component(Path(tmpdir, mod), Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, mod)) is True + + # Mock questionary answer: do not update module, only show diffs + @mock.patch.object(questionary.Question, "unsafe_ask", return_value=False) + def test_update_only_show_differences_when_patch(self, mock_prompt): + """Try updating all modules showing differences when there's a patched module. + Don't update some of them. + Check that the sha in modules.json is not changed.""" + modules_json = ModulesJson(self.pipeline_dir) + update_obj = ModuleUpdate(self.pipeline_dir, update_all=True, show_diff=True) + + # Update modules to a fixed old SHA + update_old = ModuleUpdate( + self.pipeline_dir, update_all=True, show_diff=False, sha="5e34754d42cd2d5d248ca8673c0a53cdf5624905" + ) + assert update_old.update() + + # Modify fastqc module, it will have a patch which will be applied during update + # We modify fastqc because it's one of the modules that can be updated and there's another one before it (custom/dumpsoftwareversions) + module_path = Path(self.pipeline_dir, "modules", "nf-core", "fastqc") + main_path = Path(module_path, "main.nf") + with open(main_path) as fh: + lines = fh.readlines() + for line_index in range(len(lines)): + if lines[line_index] == " label 'process_medium'\n": + lines[line_index] = " label 'process_low'\n" + break + with open(main_path, "w") as fh: + fh.writelines(lines) + # Create a patch file + patch_obj = ModulePatch(self.pipeline_dir) + patch_obj.patch("fastqc") + # Check that a patch file with the correct name has been created + assert "fastqc.diff" in [f.name for f in module_path.glob("*.diff")] + + # Update all modules + assert update_obj.update() is True + + mod_json = modules_json.get_modules_json() + # Loop through all modules and check that they are NOT updated (according to the modules.json file) + # A module that can be updated but shouldn't is fastqc + # Module multiqc is already up to date so don't check + mod = "fastqc" + correct_git_sha = list(update_obj.modules_repo.get_component_git_log(mod, "modules", depth=1))[0]["git_sha"] + current_git_sha = mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME][mod]["git_sha"] + assert correct_git_sha != current_git_sha + + def test_update_module_with_extra_config_file(self): + """Try updating a module with a config file""" + # Install the module + assert self.mods_install.install("trimgalore") + # Add a nextflow_test.config file to the module + trimgalore_path = Path(self.pipeline_dir, "modules", "nf-core", "trimgalore") + Path(trimgalore_path, "nextflow_test.config").touch() + with open(Path(trimgalore_path, "nextflow_test.config"), "w") as fh: + fh.write("params.my_param = 'my_value'\n") + # Update the module + update_obj = ModuleUpdate(self.pipeline_dir, show_diff=False) + assert update_obj.update("trimgalore") + # Check that the nextflow_test.config file is still there + assert Path(trimgalore_path, "nextflow_test.config").exists() + with open(Path(trimgalore_path, "nextflow_test.config")) as fh: + assert "params.my_param = 'my_value'" in fh.read() diff --git a/tests/modules/update.py b/tests/modules/update.py deleted file mode 100644 index e02b058fbb..0000000000 --- a/tests/modules/update.py +++ /dev/null @@ -1,444 +0,0 @@ -import logging -import shutil -import tempfile -from pathlib import Path -from unittest import mock - -import questionary -import yaml - -import nf_core.utils -from nf_core.modules.install import ModuleInstall -from nf_core.modules.modules_json import ModulesJson -from nf_core.modules.modules_repo import NF_CORE_MODULES_NAME, NF_CORE_MODULES_REMOTE -from nf_core.modules.patch import ModulePatch -from nf_core.modules.update import ModuleUpdate - -from ..utils import ( - GITLAB_BRANCH_TEST_BRANCH, - GITLAB_BRANCH_TEST_NEW_SHA, - GITLAB_BRANCH_TEST_OLD_SHA, - GITLAB_DEFAULT_BRANCH, - GITLAB_REPO, - GITLAB_URL, - OLD_TRIMGALORE_BRANCH, - OLD_TRIMGALORE_SHA, - cmp_component, -) - - -def test_install_and_update(self): - """Installs a module in the pipeline and updates it (no change)""" - self.mods_install.install("trimgalore") - update_obj = ModuleUpdate(self.pipeline_dir, show_diff=False) - - # Copy the module files and check that they are unaffected by the update - tmpdir = Path(tempfile.TemporaryDirectory().name) - trimgalore_tmpdir = tmpdir / "trimgalore" - trimgalore_path = Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, "trimgalore") - shutil.copytree(trimgalore_path, trimgalore_tmpdir) - - assert update_obj.update("trimgalore") is True - assert cmp_component(trimgalore_tmpdir, trimgalore_path) is True - - -def test_install_at_hash_and_update(self): - """Installs an old version of a module in the pipeline and updates it""" - assert self.mods_install_old.install("trimgalore") - update_obj = ModuleUpdate( - self.pipeline_dir, show_diff=False, update_deps=True, remote_url=GITLAB_URL, branch=OLD_TRIMGALORE_BRANCH - ) - - # Copy the module files and check that they are affected by the update - tmpdir = Path(tempfile.TemporaryDirectory().name) - trimgalore_tmpdir = tmpdir / "trimgalore" - trimgalore_path = Path(self.pipeline_dir, "modules", GITLAB_REPO, "trimgalore") - shutil.copytree(trimgalore_path, trimgalore_tmpdir) - - assert update_obj.update("trimgalore") is True - assert cmp_component(trimgalore_tmpdir, trimgalore_path) is False - - # Check that the modules.json is correctly updated - mod_json_obj = ModulesJson(self.pipeline_dir) - mod_json = mod_json_obj.get_modules_json() - # Get the up-to-date git_sha for the module from the ModulesRepo object - correct_git_sha = update_obj.modules_repo.get_latest_component_version("trimgalore", "modules") - current_git_sha = mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO]["trimgalore"]["git_sha"] - assert correct_git_sha == current_git_sha - - -# Mock questionary answer: do not update module, only show diffs -@mock.patch.object(questionary.Question, "unsafe_ask", return_value=True) -def test_install_at_hash_and_update_limit_output(self, mock_prompt): - """Installs an old version of a module in the pipeline and updates it with limited output reporting""" - self.caplog.set_level(logging.INFO) - assert self.mods_install_old.install("trimgalore") - - update_obj = ModuleUpdate( - self.pipeline_dir, - show_diff=True, - update_deps=True, - remote_url=GITLAB_URL, - branch=OLD_TRIMGALORE_BRANCH, - limit_output=True, - ) - assert update_obj.update("trimgalore") - - # Check changes not shown for non-.nf files - assert "Changes in 'trimgalore/meta.yml' but not shown" in self.caplog.text - # Check changes shown for .nf files - assert "Changes in 'trimgalore/main.nf'" in self.caplog.text - for line in self.caplog.text.split("\n"): - if line.startswith("---"): - assert line.endswith("main.nf") - - -def test_install_at_hash_and_update_and_save_diff_to_file(self): - """Installs an old version of a module in the pipeline and updates it""" - self.mods_install_old.install("trimgalore") - patch_path = Path(self.pipeline_dir, "trimgalore.patch") - update_obj = ModuleUpdate( - self.pipeline_dir, - save_diff_fn=patch_path, - sha=OLD_TRIMGALORE_SHA, - remote_url=GITLAB_URL, - branch=OLD_TRIMGALORE_BRANCH, - ) - - # Copy the module files and check that they are affected by the update - tmpdir = Path(tempfile.TemporaryDirectory().name) - trimgalore_tmpdir = tmpdir / "trimgalore" - trimgalore_path = Path(self.pipeline_dir, "modules", GITLAB_REPO, "trimgalore") - shutil.copytree(trimgalore_path, trimgalore_tmpdir) - - assert update_obj.update("trimgalore") is True - assert cmp_component(trimgalore_tmpdir, trimgalore_path) is True - - # TODO: Apply the patch to the module - - -def test_install_at_hash_and_update_and_save_diff_to_file_limit_output(self): - """Installs an old version of a module in the pipeline and updates it""" - # Install old version of trimgalore - self.mods_install_old.install("trimgalore") - patch_path = Path(self.pipeline_dir, "trimgalore.patch") - # Update saving the differences to a patch file and with `limit_output` - update_obj = ModuleUpdate( - self.pipeline_dir, - save_diff_fn=patch_path, - remote_url=GITLAB_URL, - branch=OLD_TRIMGALORE_BRANCH, - limit_output=True, - ) - assert update_obj.update("trimgalore") - - # Check that the patch file was created - assert patch_path.exists(), f"Patch file was not created at {patch_path}" - - # Read the contents of the patch file - with open(patch_path) as fh: - patch_content = fh.read() - # Check changes not shown for non-.nf files - assert "Changes in 'trimgalore/meta.yml' but not shown" in patch_content - # Check changes only shown for main.nf - assert "Changes in 'trimgalore/main.nf'" in patch_content - for line in patch_content: - if line.startswith("---"): - assert line.endswith("main.nf") - - -def test_update_all(self): - """Updates all modules present in the pipeline""" - update_obj = ModuleUpdate(self.pipeline_dir, update_all=True, show_diff=False) - # Get the current modules.json - assert update_obj.update() is True - - # We must reload the modules.json to get the updated version - mod_json_obj = ModulesJson(self.pipeline_dir) - mod_json = mod_json_obj.get_modules_json() - # Loop through all modules and check that they are updated (according to the modules.json file) - for mod in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME]: - correct_git_sha = list(update_obj.modules_repo.get_component_git_log(mod, "modules", depth=1))[0]["git_sha"] - current_git_sha = mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME][mod]["git_sha"] - assert correct_git_sha == current_git_sha - - -def test_update_with_config_fixed_version(self): - """Try updating when there are entries in the .nf-core.yml""" - # Install trimgalore at the latest version - assert self.mods_install_trimgalore.install("trimgalore") - - # Fix the trimgalore version in the .nf-core.yml to an old version - update_config = {GITLAB_URL: {GITLAB_REPO: {"trimgalore": OLD_TRIMGALORE_SHA}}} - config_fn, tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) - tools_config["update"] = update_config - with open(Path(self.pipeline_dir, config_fn), "w") as f: - yaml.dump(tools_config, f) - - # Update all modules in the pipeline - update_obj = ModuleUpdate( - self.pipeline_dir, update_all=True, show_diff=False, remote_url=GITLAB_URL, branch=OLD_TRIMGALORE_BRANCH - ) - assert update_obj.update() is True - - # Check that the git sha for trimgalore is correctly downgraded - mod_json = ModulesJson(self.pipeline_dir).get_modules_json() - assert "trimgalore" in mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO] - assert "git_sha" in mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO]["trimgalore"] - assert mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO]["trimgalore"]["git_sha"] == OLD_TRIMGALORE_SHA - - -def test_update_with_config_dont_update(self): - """Try updating when module is to be ignored""" - # Install an old version of trimgalore - self.mods_install_old.install("trimgalore") - - # Set the trimgalore field to no update in the .nf-core.yml - update_config = {GITLAB_URL: {GITLAB_REPO: {"trimgalore": False}}} - config_fn, tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) - tools_config["update"] = update_config - with open(Path(self.pipeline_dir, config_fn), "w") as f: - yaml.dump(tools_config, f) - - # Update all modules in the pipeline - update_obj = ModuleUpdate( - self.pipeline_dir, - update_all=True, - show_diff=False, - sha=OLD_TRIMGALORE_SHA, - remote_url=GITLAB_URL, - branch=OLD_TRIMGALORE_BRANCH, - ) - assert update_obj.update() is True - - # Check that the git sha for trimgalore is correctly downgraded - mod_json = ModulesJson(self.pipeline_dir).get_modules_json() - assert "trimgalore" in mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO] - assert "git_sha" in mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO]["trimgalore"] - assert mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO]["trimgalore"]["git_sha"] == OLD_TRIMGALORE_SHA - - -def test_update_with_config_fix_all(self): - """Fix the version of all nf-core modules""" - self.mods_install_trimgalore.install("trimgalore") - - # Fix the version of all nf-core modules in the .nf-core.yml to an old version - update_config = {GITLAB_URL: OLD_TRIMGALORE_SHA} - config_fn, tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) - tools_config["update"] = update_config - with open(Path(self.pipeline_dir, config_fn), "w") as f: - yaml.dump(tools_config, f) - - # Update all modules in the pipeline - update_obj = ModuleUpdate( - self.pipeline_dir, update_all=True, show_diff=False, remote_url=GITLAB_URL, branch=OLD_TRIMGALORE_BRANCH - ) - assert update_obj.update() is True - - # Check that the git sha for trimgalore is correctly downgraded - mod_json = ModulesJson(self.pipeline_dir).get_modules_json() - assert "git_sha" in mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO]["trimgalore"] - assert mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO]["trimgalore"]["git_sha"] == OLD_TRIMGALORE_SHA - - -def test_update_with_config_no_updates(self): - """Don't update any nf-core modules""" - assert self.mods_install_old.install("trimgalore") - old_mod_json = ModulesJson(self.pipeline_dir).get_modules_json() - - # Fix the version of all nf-core modules in the .nf-core.yml to an old version - update_config = {GITLAB_URL: False} - config_fn, tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) - tools_config["update"] = update_config - with open(Path(self.pipeline_dir, config_fn), "w") as f: - yaml.dump(tools_config, f) - - # Update all modules in the pipeline - update_obj = ModuleUpdate( - self.pipeline_dir, - update_all=True, - show_diff=False, - sha=OLD_TRIMGALORE_SHA, - remote_url=GITLAB_URL, - branch=OLD_TRIMGALORE_BRANCH, - ) - assert update_obj.update() is True - - # Check that the git sha for trimgalore is correctly downgraded and none of the modules has changed - mod_json = ModulesJson(self.pipeline_dir).get_modules_json() - for module in mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO]: - assert "git_sha" in mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO][module] - assert ( - mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO][module]["git_sha"] - == old_mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO][module]["git_sha"] - ) - - -def test_update_different_branch_single_module(self): - """Try updating a module in a specific branch""" - install_obj = ModuleInstall( - self.pipeline_dir, - prompt=False, - force=False, - remote_url=GITLAB_URL, - branch=GITLAB_BRANCH_TEST_BRANCH, - sha=GITLAB_BRANCH_TEST_OLD_SHA, - ) - assert install_obj.install("fastp") - - update_obj = ModuleUpdate( - self.pipeline_dir, update_deps=True, remote_url=GITLAB_URL, branch=GITLAB_BRANCH_TEST_BRANCH, show_diff=False - ) - update_obj.update("fastp") - - # Verify that the branch entry was updated correctly - modules_json = ModulesJson(self.pipeline_dir) - assert ( - modules_json.get_component_branch(self.component_type, "fastp", GITLAB_URL, GITLAB_REPO) - == GITLAB_BRANCH_TEST_BRANCH - ) - assert modules_json.get_module_version("fastp", GITLAB_URL, GITLAB_REPO) == GITLAB_BRANCH_TEST_NEW_SHA - - -def test_update_different_branch_mixed_modules_main(self): - """Try updating all modules where MultiQC is installed from main branch""" - # Install fastp - assert self.mods_install_gitlab_old.install("fastp") - - # Install MultiQC from gitlab default branch - assert self.mods_install_gitlab.install("multiqc") - - # Try updating - update_obj = ModuleUpdate(self.pipeline_dir, update_all=True, show_diff=False) - assert update_obj.update() is True - - modules_json = ModulesJson(self.pipeline_dir) - # Verify that the branch entry was updated correctly - assert ( - modules_json.get_component_branch(self.component_type, "fastp", GITLAB_URL, GITLAB_REPO) - == GITLAB_BRANCH_TEST_BRANCH - ) - assert modules_json.get_module_version("fastp", GITLAB_URL, GITLAB_REPO) == GITLAB_BRANCH_TEST_NEW_SHA - # MultiQC is present in both branches but should've been updated using the 'main' branch - assert ( - modules_json.get_component_branch(self.component_type, "multiqc", GITLAB_URL, GITLAB_REPO) - == GITLAB_DEFAULT_BRANCH - ) - - -def test_update_different_branch_mix_modules_branch_test(self): - """Try updating all modules where MultiQC is installed from branch-test branch""" - # Install multiqc from the branch-test branch - assert self.mods_install_gitlab_old.install( - "multiqc" - ) # Force as the same module is installed from github nf-core modules repo - modules_json = ModulesJson(self.pipeline_dir) - update_obj = ModuleUpdate( - self.pipeline_dir, - update_all=True, - show_diff=False, - remote_url=GITLAB_URL, - branch=GITLAB_BRANCH_TEST_BRANCH, - sha=GITLAB_BRANCH_TEST_NEW_SHA, - ) - assert update_obj.update() - - assert ( - modules_json.get_component_branch(self.component_type, "multiqc", GITLAB_URL, GITLAB_REPO) - == GITLAB_BRANCH_TEST_BRANCH - ) - assert modules_json.get_module_version("multiqc", GITLAB_URL, GITLAB_REPO) == GITLAB_BRANCH_TEST_NEW_SHA - - -# Mock questionary answer: do not update module, only show diffs -@mock.patch.object(questionary.Question, "unsafe_ask", return_value=False) -def test_update_only_show_differences(self, mock_prompt): - """Try updating all modules showing differences. - Only show diffs, don't actually save any updated files. - Check that the sha in modules.json is not changed.""" - - # Update modules to a fixed old SHA - update_old = ModuleUpdate( - self.pipeline_dir, update_all=True, show_diff=False, sha="5e34754d42cd2d5d248ca8673c0a53cdf5624905" - ) - update_old.update() - - tmpdir = Path(tempfile.TemporaryDirectory().name) - shutil.copytree(Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME), tmpdir) - - update_obj = ModuleUpdate(self.pipeline_dir, update_all=True, show_diff=True) - assert ModuleUpdate(self.pipeline_dir, update_all=True, show_diff=True).update() - - mod_json = ModulesJson(self.pipeline_dir).get_modules_json() - # Loop through all modules and check that they are NOT updated (according to the modules.json file) - # A module that can be updated but shouldn't is fastqc - # Module multiqc is already up to date so don't check - mod = "fastqc" - non_updated_git_sha = list(update_obj.modules_repo.get_component_git_log(mod, "modules", depth=1))[0]["git_sha"] - current_git_sha = mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME][mod]["git_sha"] - assert non_updated_git_sha != current_git_sha - assert cmp_component(Path(tmpdir, mod), Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, mod)) is True - - -# Mock questionary answer: do not update module, only show diffs -@mock.patch.object(questionary.Question, "unsafe_ask", return_value=False) -def test_update_only_show_differences_when_patch(self, mock_prompt): - """Try updating all modules showing differences when there's a patched module. - Don't update some of them. - Check that the sha in modules.json is not changed.""" - modules_json = ModulesJson(self.pipeline_dir) - update_obj = ModuleUpdate(self.pipeline_dir, update_all=True, show_diff=True) - - # Update modules to a fixed old SHA - update_old = ModuleUpdate( - self.pipeline_dir, update_all=True, show_diff=False, sha="5e34754d42cd2d5d248ca8673c0a53cdf5624905" - ) - assert update_old.update() - - # Modify fastqc module, it will have a patch which will be applied during update - # We modify fastqc because it's one of the modules that can be updated and there's another one before it (custom/dumpsoftwareversions) - module_path = Path(self.pipeline_dir, "modules", "nf-core", "fastqc") - main_path = Path(module_path, "main.nf") - with open(main_path) as fh: - lines = fh.readlines() - for line_index in range(len(lines)): - if lines[line_index] == " label 'process_medium'\n": - lines[line_index] = " label 'process_low'\n" - break - with open(main_path, "w") as fh: - fh.writelines(lines) - # Create a patch file - patch_obj = ModulePatch(self.pipeline_dir) - patch_obj.patch("fastqc") - # Check that a patch file with the correct name has been created - assert "fastqc.diff" in [f.name for f in module_path.glob("*.diff")] - - # Update all modules - assert update_obj.update() is True - - mod_json = modules_json.get_modules_json() - # Loop through all modules and check that they are NOT updated (according to the modules.json file) - # A module that can be updated but shouldn't is fastqc - # Module multiqc is already up to date so don't check - mod = "fastqc" - correct_git_sha = list(update_obj.modules_repo.get_component_git_log(mod, "modules", depth=1))[0]["git_sha"] - current_git_sha = mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME][mod]["git_sha"] - assert correct_git_sha != current_git_sha - - -def test_update_module_with_extra_config_file(self): - """Try updating a module with a config file""" - # Install the module - assert self.mods_install.install("trimgalore") - # Add a nextflow_test.config file to the module - trimgalore_path = Path(self.pipeline_dir, "modules", "nf-core", "trimgalore") - Path(trimgalore_path, "nextflow_test.config").touch() - with open(Path(trimgalore_path, "nextflow_test.config"), "w") as fh: - fh.write("params.my_param = 'my_value'\n") - # Update the module - update_obj = ModuleUpdate(self.pipeline_dir, show_diff=False) - assert update_obj.update("trimgalore") - # Check that the nextflow_test.config file is still there - assert Path(trimgalore_path, "nextflow_test.config").exists() - with open(Path(trimgalore_path, "nextflow_test.config")) as fh: - assert "params.my_param = 'my_value'" in fh.read() diff --git a/tests/test_modules.py b/tests/test_modules.py index 6e601ce7ad..13bf32f971 100644 --- a/tests/test_modules.py +++ b/tests/test_modules.py @@ -160,131 +160,3 @@ def test_modulesrepo_class(self): @pytest.fixture(autouse=True) def _use_caplog(self, caplog): self.caplog = caplog - - ############################################ - # Test of the individual modules commands. # - ############################################ - - from .modules.bump_versions import ( # type: ignore[misc] - test_modules_bump_versions_all_modules, - test_modules_bump_versions_fail, - test_modules_bump_versions_fail_unknown_version, - test_modules_bump_versions_single_module, - ) - from .modules.create import ( # type: ignore[misc] - test_modules_create_fail_exists, - test_modules_create_nfcore_modules, - test_modules_create_nfcore_modules_subtool, - test_modules_create_succeed, - test_modules_migrate, - test_modules_migrate_no_delete, - test_modules_migrate_symlink, - ) - from .modules.info import ( # type: ignore[misc] - test_modules_info_in_modules_repo, - test_modules_info_local, - test_modules_info_remote, - test_modules_info_remote_gitlab, - ) - from .modules.install import ( # type: ignore[misc] - test_modules_install_alternate_remote, - test_modules_install_different_branch_fail, - test_modules_install_different_branch_succeed, - test_modules_install_emptypipeline, - test_modules_install_from_gitlab, - test_modules_install_nomodule, - test_modules_install_nopipeline, - test_modules_install_tracking, - test_modules_install_trimgalore, - test_modules_install_trimgalore_twice, - ) - from .modules.lint import ( # type: ignore[misc] - test_modules_absent_version, - test_modules_empty_file_in_snapshot, - test_modules_empty_file_in_stub_snapshot, - test_modules_environment_yml_file_doesnt_exists, - test_modules_environment_yml_file_name_mismatch, - test_modules_environment_yml_file_not_array, - test_modules_environment_yml_file_sorted_correctly, - test_modules_environment_yml_file_sorted_incorrectly, - test_modules_lint_check_process_labels, - test_modules_lint_check_url, - test_modules_lint_empty, - test_modules_lint_gitlab_modules, - test_modules_lint_multiple_remotes, - test_modules_lint_new_modules, - test_modules_lint_no_gitlab, - test_modules_lint_patched_modules, - test_modules_lint_snapshot_file, - test_modules_lint_snapshot_file_missing_fail, - test_modules_lint_snapshot_file_not_needed, - test_modules_lint_trimgalore, - test_modules_meta_yml_incorrect_licence_field, - test_modules_meta_yml_incorrect_name, - test_modules_meta_yml_input_mismatch, - test_modules_meta_yml_output_mismatch, - test_modules_missing_test_dir, - test_modules_missing_test_main_nf, - test_modules_unused_pytest_files, - test_nftest_failing_linting, - ) - from .modules.list import ( # type: ignore[misc] - test_modules_install_and_list_pipeline, - test_modules_install_gitlab_and_list_pipeline, - test_modules_list_in_wrong_repo_fail, - test_modules_list_local_json, - test_modules_list_pipeline, - test_modules_list_remote, - test_modules_list_remote_gitlab, - test_modules_list_remote_json, - test_modules_list_with_keywords, - test_modules_list_with_one_keyword, - test_modules_list_with_unused_keyword, - ) - from .modules.modules_json import ( # type: ignore[misc] - test_get_modules_json, - test_mod_json_create, - test_mod_json_create_with_patch, - test_mod_json_dump, - test_mod_json_get_module_version, - test_mod_json_module_present, - test_mod_json_repo_present, - test_mod_json_up_to_date, - test_mod_json_up_to_date_module_removed, - test_mod_json_up_to_date_reinstall_fails, - test_mod_json_update, - test_mod_json_with_empty_modules_value, - test_mod_json_with_missing_modules_entry, - ) - from .modules.patch import ( # type: ignore[misc] - test_create_patch_change, - test_create_patch_no_change, - test_create_patch_try_apply_failed, - test_create_patch_try_apply_successful, - test_create_patch_update_fail, - test_create_patch_update_success, - test_remove_patch, - ) - from .modules.remove import ( # type: ignore[misc] - test_modules_remove_multiqc_from_gitlab, - test_modules_remove_trimgalore, - test_modules_remove_trimgalore_uninstalled, - ) - from .modules.update import ( # type: ignore[misc] - test_install_and_update, - test_install_at_hash_and_update, - test_install_at_hash_and_update_and_save_diff_to_file, - test_install_at_hash_and_update_and_save_diff_to_file_limit_output, - test_install_at_hash_and_update_limit_output, - test_update_all, - test_update_different_branch_mix_modules_branch_test, - test_update_different_branch_mixed_modules_main, - test_update_different_branch_single_module, - test_update_module_with_extra_config_file, - test_update_only_show_differences, - test_update_only_show_differences_when_patch, - test_update_with_config_dont_update, - test_update_with_config_fix_all, - test_update_with_config_fixed_version, - test_update_with_config_no_updates, - ) From d95f7f8631f0605319f8a80d4ab2199b08e6fea4 Mon Sep 17 00:00:00 2001 From: mashehu Date: Thu, 18 Jul 2024 10:14:54 +0200 Subject: [PATCH 28/89] avoid naming collision with variable name `dir` --- nf_core/commands_pipelines.py | 8 ++-- nf_core/components/components_command.py | 52 +++++++++++------------ nf_core/components/components_test.py | 4 +- nf_core/components/install.py | 16 +++---- nf_core/components/lint/__init__.py | 54 +++++++++++++----------- nf_core/components/update.py | 49 ++++++++++++--------- nf_core/modules/lint/__init__.py | 9 ++-- nf_core/modules/modules_json.py | 18 ++++---- nf_core/pipelines/create_logo.py | 14 +++--- 9 files changed, 121 insertions(+), 103 deletions(-) diff --git a/nf_core/commands_pipelines.py b/nf_core/commands_pipelines.py index 3f569bfe35..deb1f691a5 100644 --- a/nf_core/commands_pipelines.py +++ b/nf_core/commands_pipelines.py @@ -308,7 +308,7 @@ def pipelines_sync(ctx, dir, from_branch, pull_request, github_repository, usern # nf-core pipelines create-logo -def pipelines_create_logo(logo_text, dir, name, theme, width, format, force): +def pipelines_create_logo(logo_text, directory, name, theme, width, format, force): """ Generate a logo with the nf-core logo template. @@ -317,9 +317,9 @@ def pipelines_create_logo(logo_text, dir, name, theme, width, format, force): from nf_core.pipelines.create_logo import create_logo try: - if dir == ".": - dir = Path.cwd() - logo_path = create_logo(logo_text, dir, name, theme, width, format, force) + if directory == ".": + directory = Path.cwd() + logo_path = create_logo(logo_text, directory, name, theme, width, format, force) # Print path to logo relative to current working directory try: logo_path = Path(logo_path).relative_to(Path.cwd()) diff --git a/nf_core/components/components_command.py b/nf_core/components/components_command.py index aa1dccc0d4..bf80b46113 100644 --- a/nf_core/components/components_command.py +++ b/nf_core/components/components_command.py @@ -22,7 +22,7 @@ class ComponentCommand: def __init__( self, component_type: str, - dir: Union[str, Path], + directory: Union[str, Path], remote_url: Optional[str] = None, branch: Optional[str] = None, no_pull: bool = False, @@ -33,7 +33,7 @@ def __init__( Initialise the ComponentClass object """ self.component_type = component_type - self.dir = Path(dir) if dir else None + self.directory = Path(directory) if directory else None self.modules_repo = ModulesRepo(remote_url, branch, no_pull, hide_progress) self.hide_progress = hide_progress self.no_prompts = no_prompts @@ -49,8 +49,8 @@ def _configure_repo_and_paths(self, nf_dir_req: bool = True) -> None: """ try: - if self.dir: - self.dir, self.repo_type, self.org = get_repo_info(self.dir, use_prompt=not self.no_prompts) + if self.directory: + self.directory, self.repo_type, self.org = get_repo_info(self.directory, use_prompt=not self.no_prompts) else: self.repo_type = None self.org = "" @@ -68,7 +68,7 @@ def get_local_components(self) -> List[str]: """ Get the local modules/subworkflows in a pipeline """ - local_component_dir = Path(self.dir, self.component_type, "local") + local_component_dir = Path(self.directory, self.component_type, "local") return [ str(path.relative_to(local_component_dir)) for path in local_component_dir.iterdir() if path.suffix == ".nf" ] @@ -78,9 +78,9 @@ def get_components_clone_modules(self) -> List[str]: Get the modules/subworkflows repository available in a clone of nf-core/modules """ if self.component_type == "modules": - component_base_path = Path(self.dir, self.default_modules_path) + component_base_path = Path(self.directory, self.default_modules_path) elif self.component_type == "subworkflows": - component_base_path = Path(self.dir, self.default_subworkflows_path) + component_base_path = Path(self.directory, self.default_subworkflows_path) return [ str(Path(dir).relative_to(component_base_path)) for dir, _, files in os.walk(component_base_path) @@ -91,23 +91,23 @@ def has_valid_directory(self) -> bool: """Check that we were given a pipeline or clone of nf-core/modules""" if self.repo_type == "modules": return True - if self.dir is None or not os.path.exists(self.dir): - log.error(f"Could not find directory: {self.dir}") + if self.directory is None or not os.path.exists(self.directory): + log.error(f"Could not find directory: {self.directory}") return False - main_nf = os.path.join(self.dir, "main.nf") - nf_config = os.path.join(self.dir, "nextflow.config") + main_nf = os.path.join(self.directory, "main.nf") + nf_config = os.path.join(self.directory, "nextflow.config") if not os.path.exists(main_nf) and not os.path.exists(nf_config): - if Path(self.dir).resolve().parts[-1].startswith("nf-core"): - raise UserWarning(f"Could not find a 'main.nf' or 'nextflow.config' file in '{self.dir}'") - log.warning(f"Could not find a 'main.nf' or 'nextflow.config' file in '{self.dir}'") + if Path(self.directory).resolve().parts[-1].startswith("nf-core"): + raise UserWarning(f"Could not find a 'main.nf' or 'nextflow.config' file in '{self.directory}'") + log.warning(f"Could not find a 'main.nf' or 'nextflow.config' file in '{self.directory}'") return True def has_modules_file(self) -> None: """Checks whether a module.json file has been created and creates one if it is missing""" - modules_json_path = os.path.join(self.dir, "modules.json") + modules_json_path = os.path.join(self.directory, "modules.json") if not os.path.exists(modules_json_path): log.info("Creating missing 'module.json' file.") - ModulesJson(self.dir).create() + ModulesJson(self.directory).create() def clear_component_dir(self, component_name: str, component_dir: str) -> bool: """ @@ -122,7 +122,7 @@ def clear_component_dir(self, component_name: str, component_dir: str) -> bool: try: shutil.rmtree(component_dir) # remove all empty directories - for dir_path, dir_names, filenames in os.walk(self.dir, topdown=False): + for dir_path, dir_names, filenames in os.walk(self.directory, topdown=False): if not dir_names and not filenames: try: os.rmdir(dir_path) @@ -147,7 +147,7 @@ def components_from_repo(self, install_dir: str) -> List[str]: Returns: [str]: The names of the modules/subworkflows """ - repo_dir = Path(self.dir, self.component_type, install_dir) + repo_dir = Path(self.directory, self.component_type, install_dir) if not repo_dir.exists(): raise LookupError(f"Nothing installed from {install_dir} in pipeline") @@ -180,7 +180,7 @@ def load_lint_config(self) -> None: Add parsed config to the `self.lint_config` class attribute. """ - _, tools_config = nf_core.utils.load_tools_config(self.dir) + _, tools_config = nf_core.utils.load_tools_config(self.directory) self.lint_config = tools_config.get("lint", {}) def check_modules_structure(self) -> None: @@ -193,9 +193,9 @@ def check_modules_structure(self) -> None: """ if self.repo_type == "pipeline": wrong_location_modules: List[Path] = [] - for directory, _, files in os.walk(Path(self.dir, "modules")): + for directory, _, files in os.walk(Path(self.directory, "modules")): if "main.nf" in files: - module_path = Path(directory).relative_to(Path(self.dir, "modules")) + module_path = Path(directory).relative_to(Path(self.directory, "modules")) parts = module_path.parts # Check that there are modules installed directly under the 'modules' directory if parts[1] == "modules": @@ -215,9 +215,9 @@ def check_modules_structure(self) -> None: wrong_dir = Path(modules_dir, module) shutil.move(str(wrong_dir), str(correct_dir)) log.info(f"Moved {wrong_dir} to {correct_dir}.") - shutil.rmtree(Path(self.dir, "modules", self.modules_repo.repo_path, "modules")) + shutil.rmtree(Path(self.directory, "modules", self.modules_repo.repo_path, "modules")) # Regenerate modules.json file - modules_json = ModulesJson(self.dir) + modules_json = ModulesJson(self.directory) modules_json.check_up_to_date() def check_patch_paths(self, patch_path: Path, module_name: str) -> None: @@ -243,12 +243,12 @@ def check_patch_paths(self, patch_path: Path, module_name: str) -> None: for line in lines: fh.write(line) # Update path in modules.json if the file is in the correct format - modules_json = ModulesJson(self.dir) + modules_json = ModulesJson(self.directory) modules_json.load() if modules_json.has_git_url_and_modules() and modules_json.modules_json is not None: modules_json.modules_json["repos"][self.modules_repo.remote_url]["modules"][ self.modules_repo.repo_path - ][module_name]["patch"] = str(patch_path.relative_to(Path(self.dir).resolve())) + ][module_name]["patch"] = str(patch_path.relative_to(Path(self.directory).resolve())) modules_json.dump() def check_if_in_include_stmts(self, component_path: str) -> Dict[str, List[Dict[str, Union[int, str]]]]: @@ -262,7 +262,7 @@ def check_if_in_include_stmts(self, component_path: str) -> Dict[str, List[Dict[ """ include_stmts: Dict[str, List[Dict[str, Union[int, str]]]] = {} if self.repo_type == "pipeline": - workflow_files = Path(self.dir, "workflows").glob("*.nf") + workflow_files = Path(self.directory, "workflows").glob("*.nf") for workflow_file in workflow_files: with open(workflow_file) as fh: # Check if component path is in the file using mmap diff --git a/nf_core/components/components_test.py b/nf_core/components/components_test.py index 9b81f54f06..f9b891004a 100644 --- a/nf_core/components/components_test.py +++ b/nf_core/components/components_test.py @@ -93,7 +93,7 @@ def run(self) -> None: os.environ["NFT_DIFF_ARGS"] = ( "--line-numbers --expand-tabs=2" # taken from https://code.askimed.com/nf-test/docs/assertions/snapshots/#snapshot-differences ) - with nf_core.utils.set_wd(Path(self.dir)): + with nf_core.utils.set_wd(Path(self.directory)): self.check_snapshot_stability() if len(self.errors) > 0: errors = "\n - ".join(self.errors) @@ -126,7 +126,7 @@ def check_inputs(self) -> None: self.component_dir = Path(self.component_type, self.modules_repo.repo_path, *self.component_name.split("/")) # First, sanity check that the module directory exists - if not Path(self.dir, self.component_dir).is_dir(): + if not Path(self.directory, self.component_dir).is_dir(): raise UserWarning( f"Cannot find directory '{self.component_dir}'.{' Should be TOOL/SUBTOOL or TOOL' if self.component_type == 'modules' else ''}" ) diff --git a/nf_core/components/install.py b/nf_core/components/install.py index 6385ee4092..dddc3f93ec 100644 --- a/nf_core/components/install.py +++ b/nf_core/components/install.py @@ -54,7 +54,7 @@ def install(self, component, silent=False): self.check_modules_structure() # Verify that 'modules.json' is consistent with the installed modules and subworkflows - modules_json = ModulesJson(self.dir) + modules_json = ModulesJson(self.directory) if not silent: modules_json.check_up_to_date() @@ -79,7 +79,7 @@ def install(self, component, silent=False): ) # Set the install folder based on the repository name - install_folder = Path(self.dir, self.component_type, self.modules_repo.repo_path) + install_folder = Path(self.directory, self.component_type, self.modules_repo.repo_path) # Compute the component directory component_dir = Path(install_folder, component) @@ -134,14 +134,14 @@ def install(self, component, silent=False): log.info(f"Use the following statement to include this {self.component_type[:-1]}:") Console().print( Syntax( - f"include {{ {component_name} }} from '../{Path(install_folder, component).relative_to(self.dir)}/main'", + f"include {{ {component_name} }} from '../{Path(install_folder, component).relative_to(self.directory)}/main'", "groovy", theme="ansi_dark", padding=1, ) ) if self.component_type == "subworkflows": - subworkflow_config = Path(install_folder, component, "nextflow.config").relative_to(self.dir) + subworkflow_config = Path(install_folder, component, "nextflow.config").relative_to(self.directory) if os.path.isfile(subworkflow_config): log.info("Add the following config statement to use this subworkflow:") Console().print( @@ -261,9 +261,9 @@ def clean_modules_json(self, component, modules_repo, modules_json): Remove installed version of module/subworkflow from modules.json """ for repo_url, repo_content in modules_json.modules_json["repos"].items(): - for dir, dir_components in repo_content[self.component_type].items(): + for directory, dir_components in repo_content[self.component_type].items(): for name, component_values in dir_components.items(): - if name == component and dir == modules_repo.repo_path: + if name == component and directory == modules_repo.repo_path: repo_to_remove = repo_url log.debug( f"Removing {self.component_type[:-1]} '{modules_repo.repo_path}/{component}' from repo '{repo_to_remove}' from modules.json." @@ -285,7 +285,7 @@ def check_alternate_remotes(self, modules_json): modules_json.load() for repo_url, repo_content in modules_json.modules_json.get("repos", dict()).items(): for component_type in repo_content: - for dir in repo_content.get(component_type, dict()).keys(): - if dir == self.modules_repo.repo_path and repo_url != self.modules_repo.remote_url: + for directory in repo_content.get(component_type, dict()).keys(): + if directory == self.modules_repo.repo_path and repo_url != self.modules_repo.remote_url: return True return False diff --git a/nf_core/components/lint/__init__.py b/nf_core/components/lint/__init__.py index 6d47f1e7a8..7dd39bd904 100644 --- a/nf_core/components/lint/__init__.py +++ b/nf_core/components/lint/__init__.py @@ -7,6 +7,7 @@ import operator import os from pathlib import Path +from typing import List, Optional, Union import rich.box import rich.console @@ -53,18 +54,18 @@ class ComponentLint(ComponentCommand): def __init__( self, - component_type, - dir, - fail_warned=False, - remote_url=None, - branch=None, - no_pull=False, - registry=None, - hide_progress=False, + component_type: str, + directory: Union[str, Path], + fail_warned: bool = False, + remote_url: Optional[str] = None, + branch: Optional[str] = None, + no_pull: bool = False, + registry: Optional[str] = None, + hide_progress: bool = False, ): super().__init__( component_type, - dir=dir, + directory=directory, remote_url=remote_url, branch=branch, no_pull=no_pull, @@ -72,18 +73,18 @@ def __init__( ) self.fail_warned = fail_warned - self.passed = [] - self.warned = [] - self.failed = [] + self.passed: List[str] = [] + self.warned: List[str] = [] + self.failed: List[str] = [] if self.component_type == "modules": self.lint_tests = self.get_all_module_lint_tests(self.repo_type == "pipeline") else: self.lint_tests = self.get_all_subworkflow_lint_tests(self.repo_type == "pipeline") if self.repo_type == "pipeline": - modules_json = ModulesJson(self.dir) + modules_json = ModulesJson(self.directory) modules_json.check_up_to_date() - self.all_remote_components = [] + self.all_remote_components: List[NFCoreComponent] = [] for repo_url, components in modules_json.get_all_components(self.component_type).items(): if remote_url is not None and remote_url != repo_url: continue @@ -92,9 +93,9 @@ def __init__( NFCoreComponent( comp, repo_url, - Path(self.dir, self.component_type, org, comp), + Path(self.directory, self.component_type, org, comp), self.repo_type, - Path(self.dir), + Path(self.directory), self.component_type, ) ) @@ -102,7 +103,7 @@ def __init__( raise LookupError( f"No {self.component_type} from {self.modules_repo.remote_url} installed in pipeline." ) - local_component_dir = Path(self.dir, self.component_type, "local") + local_component_dir = Path(self.directory, self.component_type, "local") self.all_local_components = [] if local_component_dir.exists(): self.all_local_components = [ @@ -111,20 +112,20 @@ def __init__( None, Path(local_component_dir, comp), self.repo_type, - Path(self.dir), + Path(self.directory), self.component_type, remote_component=False, ) for comp in self.get_local_components() ] - self.config = nf_core.utils.fetch_wf_config(Path(self.dir), cache_config=True) + self.config = nf_core.utils.fetch_wf_config(Path(self.directory), cache_config=True) else: component_dir = Path( - self.dir, + self.directory, self.default_modules_path if self.component_type == "modules" else self.default_subworkflows_path, ) self.all_remote_components = [ - NFCoreComponent(m, None, component_dir / m, self.repo_type, Path(self.dir), self.component_type) + NFCoreComponent(m, None, component_dir / m, self.repo_type, Path(self.directory), self.component_type) for m in self.get_components_clone_modules() ] self.all_local_components = [] @@ -132,7 +133,9 @@ def __init__( raise LookupError(f"No {self.component_type} in '{self.component_type}' directory") # This could be better, perhaps glob for all nextflow.config files in? - self.config = nf_core.utils.fetch_wf_config(Path(self.dir).joinpath("tests", "config"), cache_config=True) + self.config = nf_core.utils.fetch_wf_config( + Path(self.directory).joinpath("tests", "config"), cache_config=True + ) if registry is None: self.registry = self.config.get("docker.registry", "quay.io") @@ -143,6 +146,9 @@ def __init__( self.lint_config = None self.modules_json = None + def __repr__(self) -> str: + return f"ComponentLint({self.component_type}, {self.directory})" + @staticmethod def get_all_module_lint_tests(is_pipeline): if is_pipeline: @@ -168,7 +174,7 @@ def get_all_subworkflow_lint_tests(is_pipeline): def set_up_pipeline_files(self): self.load_lint_config() - self.modules_json = ModulesJson(self.dir) + self.modules_json = ModulesJson(self.directory) self.modules_json.load() # Only continue if a lint config has been loaded @@ -243,7 +249,7 @@ def format_result(test_results, table): module_name = lint_result.component_name # Make the filename clickable to open in VSCode - file_path = os.path.relpath(lint_result.file_path, self.dir) + file_path = os.path.relpath(lint_result.file_path, self.directory) file_path_link = f"[link=vscode://file/{os.path.abspath(file_path)}]{file_path}[/link]" table.add_row( diff --git a/nf_core/components/update.py b/nf_core/components/update.py index 1e31e56271..9b24b6c0c9 100644 --- a/nf_core/components/update.py +++ b/nf_core/components/update.py @@ -51,7 +51,7 @@ def __init__( self.update_deps = update_deps self.component = None self.update_config = None - self.modules_json = ModulesJson(self.dir) + self.modules_json = ModulesJson(self.directory) self.branch = branch def _parameter_checks(self): @@ -96,9 +96,12 @@ def update(self, component=None, silent=False, updated=None, check_diff_exist=Tr if updated is None: updated = [] - _, tool_config = nf_core.utils.load_tools_config(self.dir) + _, tool_config = nf_core.utils.load_tools_config(self.directory) self.update_config = tool_config.get("update", {}) + if self.update_config is None: + raise UserWarning("Could not find '.nf-core.yml' file in pipeline directory") + self._parameter_checks() # Check modules directory structure @@ -171,7 +174,7 @@ def update(self, component=None, silent=False, updated=None, check_diff_exist=Tr component_install_dir = install_tmp_dir / component # Compute the component directory - component_dir = os.path.join(self.dir, self.component_type, modules_repo.repo_path, component) + component_dir = Path(self.directory, self.component_type, modules_repo.repo_path, component) if sha is not None: version = sha @@ -318,7 +321,7 @@ def update(self, component=None, silent=False, updated=None, check_diff_exist=Tr self.save_diff_fn, old_modules_json, self.modules_json.get_modules_json(), - Path(self.dir, "modules.json"), + Path(self.directory, "modules.json"), ) if exit_value and not silent: log.info( @@ -479,7 +482,9 @@ def get_all_components_info(self, branch=None): # Loop through all the modules/subworkflows in the pipeline # and check if they have an entry in the '.nf-core.yml' file for repo_name, components in self.modules_json.get_all_components(self.component_type).items(): - if repo_name not in self.update_config or self.update_config[repo_name] is True: + if isinstance(self.update_config, dict) and ( + repo_name not in self.update_config or self.update_config[repo_name] is True + ): # There aren't restrictions for the repository in .nf-core.yml file components_info[repo_name] = {} for component_dir, component in components: @@ -503,7 +508,7 @@ def get_all_components_info(self, branch=None): ), ) ] - elif isinstance(self.update_config[repo_name], dict): + elif isinstance(self.update_config, dict) and isinstance(self.update_config[repo_name], dict): # If it is a dict, then there are entries for individual components or component directories for component_dir in set([dir for dir, _ in components]): if isinstance(self.update_config[repo_name][component_dir], str): @@ -535,8 +540,8 @@ def get_all_components_info(self, branch=None): if self.sha is not None: overridden_repos.append(repo_name) elif self.update_config[repo_name][component_dir] is False: - for dir, component in components: - if dir == component_dir: + for directory, component in components: + if directory == component_dir: skipped_components.append(f"{component_dir}/{components}") elif isinstance(self.update_config[repo_name][component_dir], dict): # If it's a dict, there are entries for individual components @@ -596,7 +601,7 @@ def get_all_components_info(self, branch=None): raise UserWarning( f"{self.component_type[:-1].title()} '{component}' in '{component_dir}' has an invalid entry in '.nf-core.yml'" ) - elif isinstance(self.update_config[repo_name], str): + elif isinstance(self.update_config, dict) and isinstance(self.update_config[repo_name], str): # If a string is given it is the commit SHA to which we should update to custom_sha = self.update_config[repo_name] components_info[repo_name] = {} @@ -623,8 +628,10 @@ def get_all_components_info(self, branch=None): ] if self.sha is not None: overridden_repos.append(repo_name) - elif self.update_config[repo_name] is False: + elif isinstance(self.update_config, dict) and self.update_config[repo_name] is False: skipped_repos.append(repo_name) + elif not isinstance(self.update_config, dict): + raise UserWarning("`.nf-core.yml` is not correctly formatted.") else: raise UserWarning(f"Repo '{repo_name}' has an invalid entry in '.nf-core.yml'") @@ -712,8 +719,10 @@ def setup_diff_file(self, check_diff_exist=True): self.save_diff_fn = questionary.path( "Enter the filename: ", style=nf_core.utils.nfcore_question_style ).unsafe_ask() - - self.save_diff_fn = Path(self.save_diff_fn) + if self.save_diff_fn is not None: + self.save_diff_fn = Path(self.save_diff_fn) + else: + raise UserWarning("No filename provided for saving the diff file") if not check_diff_exist: # This guarantees that the file exists after calling the function @@ -744,7 +753,7 @@ def move_files_from_tmp_dir(self, component: str, install_folder: str, repo_path """ temp_component_dir = Path(install_folder, component) files = [file_path for file_path in temp_component_dir.rglob("*") if file_path.is_file()] - pipeline_path = Path(self.dir, self.component_type, repo_path, component) + pipeline_path = Path(self.directory, self.component_type, repo_path, component) if pipeline_path.exists(): pipeline_files = [f.name for f in pipeline_path.iterdir() if f.is_file()] @@ -795,7 +804,7 @@ def try_apply_patch( component_fullname = str(Path(repo_path, component)) log.info(f"Found patch for {self.component_type[:-1]} '{component_fullname}'. Trying to apply it to new files") - patch_path = Path(self.dir / patch_relpath) + patch_path = Path(self.directory / patch_relpath) component_relpath = Path(self.component_type, repo_path, component) # Check that paths in patch file are updated @@ -928,29 +937,31 @@ def update_linked_components( def manage_changes_in_linked_components(self, component, modules_to_update, subworkflows_to_update): """Check for linked components added or removed in the new subworkflow version""" if self.component_type == "subworkflows": - subworkflow_directory = Path(self.dir, self.component_type, self.modules_repo.repo_path, component) + subworkflow_directory = Path(self.directory, self.component_type, self.modules_repo.repo_path, component) included_modules, included_subworkflows = get_components_to_install(subworkflow_directory) # If a module/subworkflow has been removed from the subworkflow for module in modules_to_update: if module not in included_modules: log.info(f"Removing module '{module}' which is not included in '{component}' anymore.") - remove_module_object = ComponentRemove("modules", self.dir) + remove_module_object = ComponentRemove("modules", self.directory) remove_module_object.remove(module, removed_by=component) for subworkflow in subworkflows_to_update: if subworkflow not in included_subworkflows: log.info(f"Removing subworkflow '{subworkflow}' which is not included in '{component}' anymore.") - remove_subworkflow_object = ComponentRemove("subworkflows", self.dir) + remove_subworkflow_object = ComponentRemove("subworkflows", self.directory) remove_subworkflow_object.remove(subworkflow, removed_by=component) # If a new module/subworkflow is included in the subworklfow and wasn't included before for module in included_modules: if module not in modules_to_update: log.info(f"Installing newly included module '{module}' for '{component}'") - install_module_object = ComponentInstall(self.dir, "modules", installed_by=component) + install_module_object = ComponentInstall(self.directory, "modules", installed_by=component) install_module_object.install(module, silent=True) for subworkflow in included_subworkflows: if subworkflow not in subworkflows_to_update: log.info(f"Installing newly included subworkflow '{subworkflow}' for '{component}'") - install_subworkflow_object = ComponentInstall(self.dir, "subworkflows", installed_by=component) + install_subworkflow_object = ComponentInstall( + self.directory, "subworkflows", installed_by=component + ) install_subworkflow_object.install(subworkflow, silent=True) def _change_component_type(self, new_component_type): diff --git a/nf_core/modules/lint/__init__.py b/nf_core/modules/lint/__init__.py index b780144ef7..90d39104d2 100644 --- a/nf_core/modules/lint/__init__.py +++ b/nf_core/modules/lint/__init__.py @@ -11,6 +11,7 @@ import questionary import rich +import rich.progress import nf_core.modules.modules_utils import nf_core.utils @@ -39,7 +40,7 @@ class ModuleLint(ComponentLint): def __init__( self, - dir, + directory, fail_warned=False, remote_url=None, branch=None, @@ -49,7 +50,7 @@ def __init__( ): super().__init__( component_type="modules", - dir=dir, + directory=directory, fail_warned=fail_warned, remote_url=remote_url, branch=branch, @@ -127,9 +128,9 @@ def lint( remote_modules = self.all_remote_components if self.repo_type == "modules": - log.info(f"Linting modules repo: [magenta]'{self.dir}'") + log.info(f"Linting modules repo: [magenta]'{self.directory}'") else: - log.info(f"Linting pipeline: [magenta]'{self.dir}'") + log.info(f"Linting pipeline: [magenta]'{self.directory}'") if module: log.info(f"Linting module: [magenta]'{module}'") diff --git a/nf_core/modules/modules_json.py b/nf_core/modules/modules_json.py index 2c2f1a32c9..63c356a79a 100644 --- a/nf_core/modules/modules_json.py +++ b/nf_core/modules/modules_json.py @@ -39,10 +39,10 @@ def __init__(self, pipeline_dir: Union[str, Path]): Args: pipeline_dir (str): The pipeline directory """ - self.dir = Path(pipeline_dir) - self.modules_dir = self.dir / "modules" - self.subworkflows_dir = self.dir / "subworkflows" - self.modules_json_path = self.dir / "modules.json" + self.directory = Path(pipeline_dir) + self.modules_dir = self.directory / "modules" + self.subworkflows_dir = self.directory / "subworkflows" + self.modules_json_path = self.directory / "modules.json" self.modules_json = None self.pipeline_modules = None self.pipeline_subworkflows = None @@ -63,7 +63,7 @@ def create(self): Raises: UserWarning: If the creation fails """ - pipeline_config = nf_core.utils.fetch_wf_config(self.dir) + pipeline_config = nf_core.utils.fetch_wf_config(self.directory) pipeline_name = pipeline_config.get("manifest.name", "") pipeline_url = pipeline_config.get("manifest.homePage", "") new_modules_json = {"name": pipeline_name.strip("'"), "homePage": pipeline_url.strip("'"), "repos": {}} @@ -72,7 +72,7 @@ def create(self): if rich.prompt.Confirm.ask( "[bold][blue]?[/] Can't find a ./modules directory. Would you like me to create one?", default=True ): - log.info(f"Creating ./modules directory in '{self.dir}'") + log.info(f"Creating ./modules directory in '{self.directory}'") self.modules_dir.mkdir() else: raise UserWarning("Cannot proceed without a ./modules directory.") @@ -153,7 +153,7 @@ def get_pipeline_module_repositories(self, component_type, directory, repos=None # The function might rename some directories, keep track of them renamed_dirs = {} # Check if there are any untracked repositories - dirs_not_covered = self.dir_tree_uncovered(directory, [Path(ModulesRepo(url).repo_path) for url in repos]) + dirs_not_covered = self.directory_tree_uncovered(directory, [Path(ModulesRepo(url).repo_path) for url in repos]) if len(dirs_not_covered) > 0: log.info(f"Found custom {component_type[:-1]} repositories when creating 'modules.json'") # Loop until all directories in the base directory are covered by a remote @@ -203,7 +203,7 @@ def get_pipeline_module_repositories(self, component_type, directory, repos=None if component_type not in repos[nrepo_remote]: repos[nrepo_remote][component_type] = {} repos[nrepo_remote][component_type][nrepo_name] = {} - dirs_not_covered = self.dir_tree_uncovered( + dirs_not_covered = self.directory_tree_uncovered( directory, [Path(name) for url in repos for name in repos[url][component_type]] ) @@ -816,7 +816,7 @@ def try_apply_patch_reverse(self, module, repo_name, patch_relpath, module_dir): LookupError: If patch was not applied """ module_fullname = str(Path(repo_name, module)) - patch_path = Path(self.dir / patch_relpath) + patch_path = Path(self.directory / patch_relpath) try: new_files = ModulesDiffer.try_apply_patch(module, repo_name, patch_path, module_dir, reverse=True) diff --git a/nf_core/pipelines/create_logo.py b/nf_core/pipelines/create_logo.py index 0643d2e295..f49e98e93c 100644 --- a/nf_core/pipelines/create_logo.py +++ b/nf_core/pipelines/create_logo.py @@ -12,7 +12,7 @@ def create_logo( text: str, - dir: Union[Path, str], + directory: Union[Path, str], filename: str = "", theme: str = "light", width: int = 2300, @@ -22,10 +22,10 @@ def create_logo( """Create a logo for a pipeline.""" if not text: raise UserWarning("Please provide the name of the text to put on the logo.") - dir = Path(dir) - if not dir.is_dir(): - log.debug(f"Creating directory {dir}") - dir.mkdir(parents=True, exist_ok=True) + directory = Path(directory) + if not directory.is_dir(): + log.debug(f"Creating directory {directory}") + directory.mkdir(parents=True, exist_ok=True) assets = Path(nf_core.__file__).parent / "assets/logo" if format == "svg": @@ -43,7 +43,7 @@ def create_logo( # save the svg logo_filename = f"nf-core-{text}_logo_{theme}.svg" if not filename else filename logo_filename = f"{logo_filename}.svg" if not logo_filename.lower().endswith(".svg") else logo_filename - logo_path = Path(dir, logo_filename) + logo_path = Path(directory, logo_filename) with open(logo_path, "w") as fh: fh.write(svg) @@ -51,7 +51,7 @@ def create_logo( logo_filename = f"nf-core-{text}_logo_{theme}.png" if not filename else filename logo_filename = f"{logo_filename}.png" if not logo_filename.lower().endswith(".png") else logo_filename cache_name = f"nf-core-{text}_logo_{theme}_{width}.png" - logo_path = Path(dir, logo_filename) + logo_path = Path(directory, logo_filename) # Check if we haven't already created this logo if logo_path.is_file() and not force: From 60973a19a2381f89b11ff6c408c9f150ff6a1d55 Mon Sep 17 00:00:00 2001 From: mashehu Date: Fri, 19 Jul 2024 06:45:58 +0200 Subject: [PATCH 29/89] fix ALL the mypy errors --- nf_core/commands_pipelines.py | 2 +- nf_core/components/components_command.py | 10 +- nf_core/components/components_utils.py | 8 +- nf_core/components/create.py | 2 +- nf_core/components/info.py | 66 +++++---- nf_core/components/install.py | 42 +++--- nf_core/components/lint/__init__.py | 38 +++--- nf_core/components/list.py | 10 +- nf_core/components/nfcore_component.py | 51 ++++--- nf_core/components/patch.py | 4 + nf_core/components/remove.py | 19 ++- nf_core/modules/bump_versions.py | 12 +- nf_core/modules/lint/__init__.py | 60 ++++++--- nf_core/modules/lint/environment_yml.py | 6 +- nf_core/modules/lint/main_nf.py | 21 ++- nf_core/modules/lint/meta_yml.py | 6 +- nf_core/modules/modules_differ.py | 16 ++- nf_core/modules/modules_json.py | 126 ++++++++++++------ nf_core/modules/modules_repo.py | 9 +- nf_core/modules/modules_utils.py | 12 +- .../subworkflows/lint/subworkflow_tests.py | 7 +- nf_core/synced_repo.py | 58 +++++--- nf_core/utils.py | 16 ++- 23 files changed, 381 insertions(+), 220 deletions(-) diff --git a/nf_core/commands_pipelines.py b/nf_core/commands_pipelines.py index deb1f691a5..432a36aaee 100644 --- a/nf_core/commands_pipelines.py +++ b/nf_core/commands_pipelines.py @@ -284,7 +284,7 @@ def pipelines_sync(ctx, dir, from_branch, pull_request, github_repository, usern Sync a pipeline [cyan i]TEMPLATE[/] branch with the nf-core template. To keep nf-core pipelines up to date with improvements in the main - template, we use a method of synchronisation that uses a special + template, we use a method of w that uses a special git branch called [cyan i]TEMPLATE[/]. This command updates the [cyan i]TEMPLATE[/] branch with the latest version of diff --git a/nf_core/components/components_command.py b/nf_core/components/components_command.py index bf80b46113..13a6fed338 100644 --- a/nf_core/components/components_command.py +++ b/nf_core/components/components_command.py @@ -33,7 +33,7 @@ def __init__( Initialise the ComponentClass object """ self.component_type = component_type - self.directory = Path(directory) if directory else None + self.directory = Path(directory) self.modules_repo = ModulesRepo(remote_url, branch, no_pull, hide_progress) self.hide_progress = hide_progress self.no_prompts = no_prompts @@ -109,13 +109,13 @@ def has_modules_file(self) -> None: log.info("Creating missing 'module.json' file.") ModulesJson(self.directory).create() - def clear_component_dir(self, component_name: str, component_dir: str) -> bool: + def clear_component_dir(self, component_name: str, component_dir: Union[str, Path]) -> bool: """ Removes all files in the module/subworkflow directory Args: component_name (str): The name of the module/subworkflow - component_dir (str): The path to the module/subworkflow in the module repository + component_dir (str, Path): The path to the module/subworkflow """ @@ -156,7 +156,7 @@ def components_from_repo(self, install_dir: str) -> List[str]: ] def install_component_files( - self, component_name: str, component_version: str, modules_repo: ModulesRepo, install_dir: str + self, component_name: str, component_version: str, modules_repo: ModulesRepo, install_dir: Union[str, Path] ) -> bool: """ Installs a module/subworkflow into the given directory @@ -165,7 +165,7 @@ def install_component_files( component_name (str): The name of the module/subworkflow component_version (str): Git SHA for the version of the module/subworkflow to be installed modules_repo (ModulesRepo): A correctly configured ModulesRepo object - install_dir (str): The path to where the module/subworkflow should be installed (should be the 'modules/' or 'subworkflows/' dir of the pipeline) + install_dir (str, Path): The path to where the module/subworkflow should be installed (should be the 'modules/' or 'subworkflows/' dir of the pipeline) Returns: (bool): Whether the operation was successful of not diff --git a/nf_core/components/components_utils.py b/nf_core/components/components_utils.py index 01650a643d..32f6c0fc11 100644 --- a/nf_core/components/components_utils.py +++ b/nf_core/components/components_utils.py @@ -1,7 +1,7 @@ import logging import re from pathlib import Path -from typing import List, Optional, Tuple +from typing import List, Optional, Tuple, Union import questionary import rich.prompt @@ -12,7 +12,7 @@ log = logging.getLogger(__name__) -def get_repo_info(directory: str, use_prompt: Optional[bool] = True) -> Tuple[str, Optional[str], str]: +def get_repo_info(directory: Path, use_prompt: Optional[bool] = True) -> Tuple[Path, Optional[str], str]: """ Determine whether this is a pipeline repository or a clone of nf-core/modules @@ -23,7 +23,7 @@ def get_repo_info(directory: str, use_prompt: Optional[bool] = True) -> Tuple[st raise UserWarning(f"Could not find directory: {directory}") # Try to find the root directory - base_dir: str = nf_core.utils.determine_base_dir(directory) + base_dir: Path = nf_core.utils.determine_base_dir(directory) # Figure out the repository type from the .nf-core.yml config file if we can config_fn, tools_config = nf_core.utils.load_tools_config(base_dir) @@ -132,7 +132,7 @@ def prompt_component_version_sha( return git_sha -def get_components_to_install(subworkflow_dir: str) -> Tuple[List[str], List[str]]: +def get_components_to_install(subworkflow_dir: Union[str, Path]) -> Tuple[List[str], List[str]]: """ Parse the subworkflow main.nf file to retrieve all imported modules and subworkflows. """ diff --git a/nf_core/components/create.py b/nf_core/components/create.py index 5d6c411bdc..532c2a46d6 100644 --- a/nf_core/components/create.py +++ b/nf_core/components/create.py @@ -29,7 +29,7 @@ class ComponentCreate(ComponentCommand): def __init__( self, component_type: str, - directory: str = ".", + directory: Path = Path("."), component: str = "", author: Optional[str] = None, process_label: Optional[str] = None, diff --git a/nf_core/components/info.py b/nf_core/components/info.py index 54fc0004dc..8597875af4 100644 --- a/nf_core/components/info.py +++ b/nf_core/components/info.py @@ -1,6 +1,7 @@ import logging import os from pathlib import Path +from typing import Dict, Optional, Union import questionary import yaml @@ -57,25 +58,26 @@ class ComponentInfo(ComponentCommand): def __init__( self, - component_type, - pipeline_dir, - component_name, - remote_url=None, - branch=None, - no_pull=False, + component_type: str, + pipeline_dir: Union[str, Path], + component_name: str, + remote_url: Optional[str] = None, + branch: Optional[str] = None, + no_pull: bool = False, ): super().__init__(component_type, pipeline_dir, remote_url, branch, no_pull) - self.meta = None - self.local_path = None - self.remote_location = None - self.local = None + self.meta: Optional[Dict] = None + self.local_path: Optional[Path] = None + self.remote_location: Optional[str] = None + self.local: bool = False + self.modules_json: Optional[ModulesJson] = None if self.repo_type == "pipeline": # Check modules directory structure if self.component_type == "modules": self.check_modules_structure() # Check modules.json up to date - self.modules_json = ModulesJson(self.dir) + self.modules_json = ModulesJson(self.directory) self.modules_json.check_up_to_date() else: self.modules_json = None @@ -95,6 +97,7 @@ def init_mod_name(self, component): Args: module: str: Module name to check """ + assert self.modules_json is not None # mypy if component is None: self.local = questionary.confirm( f"Is the {self.component_type[:-1]} locally installed?", style=nf_core.utils.nfcore_question_style @@ -103,12 +106,12 @@ def init_mod_name(self, component): if self.repo_type == "modules": components = self.get_components_clone_modules() else: - components = self.modules_json.get_all_components(self.component_type).get( - self.modules_repo.remote_url, {} + all_components = self.modules_json.get_all_components(self.component_type).get( + self.modules_repo.remote_url, [] ) components = [ component if directory == self.modules_repo.repo_path else f"{directory}/{component}" - for directory, component in components + for directory, component in all_components ] if not components: raise UserWarning( @@ -133,11 +136,13 @@ def init_mod_name(self, component): if self.repo_type == "pipeline": # check if the module is locally installed local_paths = self.modules_json.get_all_components(self.component_type).get( - self.modules_repo.remote_url, {} - ) + self.modules_repo.remote_url + ) # type: ignore + if local_paths is None: + raise LookupError(f"No {self.component_type[:-1]} installed from {self.modules_repo.remote_url}") for directory, comp in local_paths: if comp == component: - component_base_path = Path(self.dir, self.component_type) + component_base_path = Path(self.directory, self.component_type) self.local_path = Path(component_base_path, directory, component) break if self.local_path: @@ -166,20 +171,22 @@ def get_local_yaml(self): """Attempt to get the meta.yml file from a locally installed module/subworkflow. Returns: - dict or bool: Parsed meta.yml found, False otherwise + Optional[dict]: Parsed meta.yml if found, None otherwise """ + assert self.modules_json is not None # mypy if self.repo_type == "pipeline": # Try to find and load the meta.yml file - component_base_path = Path(self.dir, self.component_type) + component_base_path = Path(self.directory, self.component_type) # Check that we have any modules/subworkflows installed from this repo components = self.modules_json.get_all_components(self.component_type).get(self.modules_repo.remote_url) - component_names = [component for _, component in components] if components is None: raise LookupError(f"No {self.component_type[:-1]} installed from {self.modules_repo.remote_url}") + component_names = [component for _, component in components] + if self.component in component_names: - install_dir = [dir for dir, module in components if module == self.component][0] + install_dir = [directory for directory, module in components if module == self.component][0] comp_dir = Path(component_base_path, install_dir, self.component) meta_fn = Path(comp_dir, "meta.yml") if meta_fn.exists(): @@ -190,7 +197,7 @@ def get_local_yaml(self): log.debug(f"{self.component_type[:-1].title()} '{self.component}' meta.yml not found locally") else: - component_base_path = Path(self.dir, self.component_type, self.org) + component_base_path = Path(self.directory, self.component_type, self.org) if self.component in os.listdir(component_base_path): comp_dir = Path(component_base_path, self.component) meta_fn = Path(comp_dir, "meta.yml") @@ -203,7 +210,7 @@ def get_local_yaml(self): return None - def get_remote_yaml(self): + def get_remote_yaml(self) -> Optional[dict]: """Attempt to get the meta.yml file from a remote repo. Returns: @@ -211,11 +218,11 @@ def get_remote_yaml(self): """ # Check if our requested module/subworkflow is there if self.component not in self.modules_repo.get_avail_components(self.component_type): - return False + return None file_contents = self.modules_repo.get_meta_yml(self.component_type, self.component) if file_contents is None: - return False + return None self.remote_location = self.modules_repo.remote_url return yaml.safe_load(file_contents) @@ -242,7 +249,8 @@ def generate_component_info_help(self): "\n" ) ) - + if self.meta is None: + raise UserWarning("No meta.yml file found") if self.meta.get("tools"): tools_strings = [] for tool in self.meta["tools"]: @@ -307,21 +315,21 @@ def generate_component_info_help(self): # Print include statement if self.local_path: - install_folder = Path(self.dir, self.component_type, self.modules_repo.repo_path) + install_folder = Path(self.directory, self.component_type, self.modules_repo.repo_path) component_name = "_".join(self.component.upper().split("/")) renderables.append( Text.from_markup(f"\n [blue]Use the following statement to include this {self.component_type[:-1]}:") ) renderables.append( Syntax( - f"include {{ {component_name} }} from '../{Path(install_folder, self.component).relative_to(self.dir)}/main'", + f"include {{ {component_name} }} from '../{Path(install_folder, self.component).relative_to(self.directory)}/main'", "groovy", theme="ansi_dark", padding=1, ) ) if self.component_type == "subworkflows": - subworkflow_config = Path(install_folder, self.component, "nextflow.config").relative_to(self.dir) + subworkflow_config = Path(install_folder, self.component, "nextflow.config").relative_to(self.directory) if os.path.isfile(subworkflow_config): renderables.append( Text.from_markup("\n [blue]Add the following config statement to use this subworkflow:") diff --git a/nf_core/components/install.py b/nf_core/components/install.py index dddc3f93ec..e6c31b3cbb 100644 --- a/nf_core/components/install.py +++ b/nf_core/components/install.py @@ -1,11 +1,13 @@ import logging import os from pathlib import Path +from typing import List, Optional, Union import questionary from rich.console import Console from rich.syntax import Syntax +import nf_core.components import nf_core.modules.modules_utils import nf_core.utils from nf_core.components.components_command import ComponentCommand @@ -22,26 +24,26 @@ class ComponentInstall(ComponentCommand): def __init__( self, - pipeline_dir, - component_type, - force=False, - prompt=False, - sha=None, - remote_url=None, - branch=None, - no_pull=False, - installed_by=False, + pipeline_dir: Union[str, Path], + component_type: str, + force: bool = False, + prompt: bool = False, + sha: Optional[str] = None, + remote_url: Optional[str] = None, + branch: Optional[str] = None, + no_pull: bool = False, + installed_by: Optional[List[str]] = None, ): super().__init__(component_type, pipeline_dir, remote_url, branch, no_pull) self.force = force self.prompt = prompt self.sha = sha - if installed_by: + if installed_by is not None: self.installed_by = installed_by else: - self.installed_by = self.component_type + self.installed_by = [self.component_type] - def install(self, component, silent=False): + def install(self, component: str, silent: bool = False) -> bool: if self.repo_type == "modules": log.error(f"You cannot install a {component} in a clone of nf-core/modules") return False @@ -67,8 +69,11 @@ def install(self, component, silent=False): # Verify SHA if not self.modules_repo.verify_sha(self.prompt, self.sha): return False + if self.modules_repo is None: + return False # Check and verify component name + component = self.collect_and_verify_name(component, self.modules_repo) if not component: return False @@ -156,19 +161,21 @@ def install_included_components(self, subworkflow_dir): modules_to_install, subworkflows_to_install = get_components_to_install(subworkflow_dir) for s_install in subworkflows_to_install: original_installed = self.installed_by - self.installed_by = Path(subworkflow_dir).parts[-1] + self.installed_by = [Path(subworkflow_dir).parts[-1]] self.install(s_install, silent=True) self.installed_by = original_installed for m_install in modules_to_install: original_component_type = self.component_type self.component_type = "modules" original_installed = self.installed_by - self.installed_by = Path(subworkflow_dir).parts[-1] + self.installed_by = [Path(subworkflow_dir).parts[-1]] self.install(m_install, silent=True) self.component_type = original_component_type self.installed_by = original_installed - def collect_and_verify_name(self, component, modules_repo): + def collect_and_verify_name( + self, component: Optional[str], modules_repo: nf_core.modules.modules_repo.ModulesRepo + ) -> str: """ Collect component name. Check that the supplied name is an available module/subworkflow. @@ -180,18 +187,19 @@ def collect_and_verify_name(self, component, modules_repo): style=nf_core.utils.nfcore_question_style, ).unsafe_ask() + if component is None: + return "" + # Check that the supplied name is an available module/subworkflow if component and component not in modules_repo.get_avail_components(self.component_type, commit=self.sha): log.error( f"{self.component_type[:-1].title()} '{component}' not found in list of available {self.component_type}." ) log.info(f"Use the command 'nf-core {self.component_type} list' to view available software") - return False if not modules_repo.component_exists(component, self.component_type, commit=self.sha): warn_msg = f"{self.component_type[:-1].title()} '{component}' not found in remote '{modules_repo.remote_url}' ({modules_repo.branch})" log.warning(warn_msg) - return False return component diff --git a/nf_core/components/lint/__init__.py b/nf_core/components/lint/__init__.py index 7dd39bd904..2cd59dc489 100644 --- a/nf_core/components/lint/__init__.py +++ b/nf_core/components/lint/__init__.py @@ -7,7 +7,7 @@ import operator import os from pathlib import Path -from typing import List, Optional, Union +from typing import List, Optional, Tuple, Union import rich.box import rich.console @@ -37,12 +37,12 @@ class LintExceptionError(Exception): class LintResult: """An object to hold the results of a lint test""" - def __init__(self, component, lint_test, message, file_path): + def __init__(self, component: NFCoreComponent, lint_test: str, message: str, file_path: Path): self.component = component self.lint_test = lint_test self.message = message self.file_path = file_path - self.component_name = component.component_name + self.component_name: str = component.component_name @rich.repr.auto @@ -73,9 +73,9 @@ def __init__( ) self.fail_warned = fail_warned - self.passed: List[str] = [] - self.warned: List[str] = [] - self.failed: List[str] = [] + self.passed: List[LintResult] = [] + self.warned: List[LintResult] = [] + self.failed: List[LintResult] = [] if self.component_type == "modules": self.lint_tests = self.get_all_module_lint_tests(self.repo_type == "pipeline") else: @@ -88,17 +88,21 @@ def __init__( for repo_url, components in modules_json.get_all_components(self.component_type).items(): if remote_url is not None and remote_url != repo_url: continue - for org, comp in components: - self.all_remote_components.append( - NFCoreComponent( - comp, - repo_url, - Path(self.directory, self.component_type, org, comp), - self.repo_type, - Path(self.directory), - self.component_type, - ) + if isinstance(components, str): + raise LookupError( + f"Error parsing modules.json: {components}. " f"Please check the file for errors or try again." ) + org, comp = components + self.all_remote_components.append( + NFCoreComponent( + comp, + repo_url, + Path(self.directory, self.component_type, org, comp), + self.repo_type, + Path(self.directory), + self.component_type, + ) + ) if not self.all_remote_components: raise LookupError( f"No {self.component_type} from {self.modules_repo.remote_url} installed in pipeline." @@ -119,7 +123,7 @@ def __init__( for comp in self.get_local_components() ] self.config = nf_core.utils.fetch_wf_config(Path(self.directory), cache_config=True) - else: + elif self.repo_type == "modules": component_dir = Path( self.directory, self.default_modules_path if self.component_type == "modules" else self.default_subworkflows_path, diff --git a/nf_core/components/list.py b/nf_core/components/list.py index f5f2744e17..b24732e5c3 100644 --- a/nf_core/components/list.py +++ b/nf_core/components/list.py @@ -1,6 +1,6 @@ import json import logging -from typing import Dict, List, Optional, Tuple, Union, cast +from typing import Dict, List, Optional, Union, cast import rich.table @@ -87,18 +87,18 @@ def pattern_msg(keywords: List[str]) -> str: return "" # Verify that 'modules.json' is consistent with the installed modules - modules_json: ModulesJson = ModulesJson(self.dir) + modules_json: ModulesJson = ModulesJson(self.directory) modules_json.check_up_to_date() # Filter by keywords - repos_with_comps: Dict[str, List[Tuple[str, str]]] = { + repos_with_comps = { repo_url: [comp for comp in components if all(k in comp[1] for k in keywords)] for repo_url, components in modules_json.get_all_components(self.component_type).items() } # Nothing found if sum(map(len, repos_with_comps)) == 0: - log.info(f"No nf-core {self.component_type} found in '{self.dir}'{pattern_msg(keywords)}") + log.info(f"No nf-core {self.component_type} found in '{self.directory}'{pattern_msg(keywords)}") return "" table.add_column("Repository") @@ -160,5 +160,5 @@ def pattern_msg(keywords: List[str]) -> str: f"{pattern_msg(keywords)}:\n" ) else: - log.info(f"{self.component_type.capitalize()} installed in '{self.dir}'{pattern_msg(keywords)}:\n") + log.info(f"{self.component_type.capitalize()} installed in '{self.directory}'{pattern_msg(keywords)}:\n") return table diff --git a/nf_core/components/nfcore_component.py b/nf_core/components/nfcore_component.py index 5d0baf63dd..1433122225 100644 --- a/nf_core/components/nfcore_component.py +++ b/nf_core/components/nfcore_component.py @@ -5,7 +5,7 @@ import logging import re from pathlib import Path -from typing import Union +from typing import List, Optional, Tuple, Union log = logging.getLogger(__name__) @@ -18,13 +18,13 @@ class NFCoreComponent: def __init__( self, - component_name, - repo_url, - component_dir, - repo_type, - base_dir, - component_type, - remote_component=True, + component_name: str, + repo_url: Optional[str], + component_dir: Path, + repo_type: str, + base_dir: Path, + component_type: str, + remote_component: bool = True, ): """ Initialize the object @@ -46,21 +46,21 @@ def __init__( self.component_dir = component_dir self.repo_type = repo_type self.base_dir = base_dir - self.passed = [] - self.warned = [] - self.failed = [] - self.inputs = [] - self.outputs = [] - self.has_meta = False - self.git_sha = None - self.is_patched = False + self.passed: List[Tuple[str, str, Path]] = [] + self.warned: List[Tuple[str, str, Path]] = [] + self.failed: List[Tuple[str, str, Path]] = [] + self.inputs: List[str] = [] + self.outputs: List[str] = [] + self.has_meta: bool = False + self.git_sha: Optional[str] = None + self.is_patched: bool = False if remote_component: # Initialize the important files - self.main_nf = Path(self.component_dir, "main.nf") - self.meta_yml = Path(self.component_dir, "meta.yml") + self.main_nf: Path = Path(self.component_dir, "main.nf") + self.meta_yml: Optional[Path] = Path(self.component_dir, "meta.yml") self.process_name = "" - self.environment_yml = Path(self.component_dir, "environment.yml") + self.environment_yml: Optional[Path] = Path(self.component_dir, "environment.yml") repo_dir = self.component_dir.parts[: self.component_dir.parts.index(self.component_name.split("/")[0])][-1] self.org = repo_dir @@ -79,8 +79,8 @@ def __init__( self.component_name = self.component_dir.stem # These attributes are only used by nf-core modules # so just initialize them to None - self.meta_yml = "" - self.environment_yml = "" + self.meta_yml = None + self.environment_yml = None self.test_dir = None self.test_yml = None self.test_main_nf = None @@ -155,10 +155,10 @@ def _get_included_components_in_chained_tests(self, main_nf_test: Union[Path, st included_components.append(component) return included_components - def get_inputs_from_main_nf(self): + def get_inputs_from_main_nf(self) -> None: """Collect all inputs from the main.nf file.""" - inputs = [] - with open(self.main_nf) as f: + inputs: List[str] = [] + with open(str(self.main_nf)) as f: data = f.read() # get input values from main.nf after "input:", which can be formatted as tuple val(foo) path(bar) or val foo or val bar or path bar or path foo # regex matches: @@ -171,7 +171,6 @@ def get_inputs_from_main_nf(self): # don't match anything inside comments or after "output:" if "input:" not in data: log.debug(f"Could not find any inputs in {self.main_nf}") - return inputs input_data = data.split("input:")[1].split("output:")[0] regex = r"(val|path)\s*(\(([^)]+)\)|\s*([^)\s,]+))" matches = re.finditer(regex, input_data, re.MULTILINE) @@ -187,7 +186,7 @@ def get_inputs_from_main_nf(self): def get_outputs_from_main_nf(self): outputs = [] - with open(self.main_nf) as f: + with open(str(self.main_nf)) as f: data = f.read() # get output values from main.nf after "output:". the names are always after "emit:" if "output:" not in data: diff --git a/nf_core/components/patch.py b/nf_core/components/patch.py index 55d5747451..5b29c152d8 100644 --- a/nf_core/components/patch.py +++ b/nf_core/components/patch.py @@ -30,6 +30,10 @@ def _parameter_checks(self, component): raise UserWarning("The command was not run in a valid pipeline directory.") components = self.modules_json.get_all_components(self.component_type).get(self.modules_repo.remote_url) + if components is None: + raise UserWarning( + f"No {self.component_type[:-1]}s found in the 'modules.json' file for the remote '{self.modules_repo.remote_url}'" + ) component_names = [component for _, component in components] if component is not None and component not in component_names: diff --git a/nf_core/components/remove.py b/nf_core/components/remove.py index 8d884db6c4..c2c5843918 100644 --- a/nf_core/components/remove.py +++ b/nf_core/components/remove.py @@ -58,10 +58,10 @@ def remove(self, component, removed_by=None, removed_components=None, force=Fals removed_components = [] # Get the module/subworkflow directory - component_dir = Path(self.dir, self.component_type, repo_path, component) + component_dir = Path(self.directory, self.component_type, repo_path, component) # Load the modules.json file - modules_json = ModulesJson(self.dir) + modules_json = ModulesJson(self.directory) modules_json.load() # Verify that the module/subworkflow is actually installed @@ -98,9 +98,16 @@ def remove(self, component, removed_by=None, removed_components=None, force=Fals for file, stmts in include_stmts.items(): renderables = [] for stmt in stmts: + # check that the line number is integer + if not isinstance(stmt["line_number"], int): + log.error( + f"Could not parse line number '{stmt['line_number']}' in '{file}'. Please report this issue." + ) + continue + renderables.append( Syntax( - stmt["line"], + str(stmt["line"]), "groovy", theme="ansi_dark", line_numbers=True, @@ -123,7 +130,7 @@ def remove(self, component, removed_by=None, removed_components=None, force=Fals style=nf_core.utils.nfcore_question_style, ).unsafe_ask(): # add the component back to modules.json - if not ComponentInstall(self.dir, self.component_type, force=True).install( + if not ComponentInstall(self.directory, self.component_type, force=True).install( component, silent=True ): log.warning( @@ -133,7 +140,9 @@ def remove(self, component, removed_by=None, removed_components=None, force=Fals return removed # Remove the component files of all entries removed from modules.json removed = ( - True if self.clear_component_dir(component, Path(self.dir, removed_component_dir)) or removed else False + True + if self.clear_component_dir(component, Path(self.directory, removed_component_dir)) or removed + else False ) removed_components.append(component) diff --git a/nf_core/modules/bump_versions.py b/nf_core/modules/bump_versions.py index 1b94d5910c..fb0dc7d50d 100644 --- a/nf_core/modules/bump_versions.py +++ b/nf_core/modules/bump_versions.py @@ -76,10 +76,10 @@ def bump_versions( ) # Get list of all modules - _, nfcore_modules = nf_core.modules.modules_utils.get_installed_modules(self.dir) + _, nfcore_modules = nf_core.modules.modules_utils.get_installed_modules(self.directory) # Load the .nf-core.yml config - _, self.tools_config = nf_core.utils.load_tools_config(self.dir) + _, self.tools_config = nf_core.utils.load_tools_config(self.directory) # Prompt for module or all if module is None and not all_modules: @@ -179,7 +179,7 @@ def bump_module_version(self, module: NFCoreComponent) -> bool: except (LookupError, ValueError): self.failed.append( ( - f"Conda version not specified correctly: {module.main_nf.relative_to(self.dir)}", + f"Conda version not specified correctly: {Path(module.main_nf).relative_to(self.directory)}", module.component_name, ) ) @@ -245,12 +245,12 @@ def bump_module_version(self, module: NFCoreComponent) -> bool: fh.write(content) # change version in environment.yml - with open(module.environment_yml) as fh: + with open(str(module.environment_yml)) as fh: env_yml = yaml.safe_load(fh) env_yml["dependencies"][0] = re.sub( bioconda_packages[0], f"bioconda::{bioconda_tool_name}={last_ver}", env_yml["dependencies"][0] ) - with open(module.environment_yml, "w") as fh: + with open(str(module.environment_yml), "w") as fh: yaml.dump(env_yml, fh, default_flow_style=False, Dumper=custom_yaml_dumper()) self.updated.append( @@ -272,7 +272,7 @@ def get_bioconda_version(self, module: NFCoreComponent) -> List[str]: # Check whether file exists and load it bioconda_packages = [] try: - with open(module.environment_yml) as fh: + with open(str(module.environment_yml)) as fh: env_yml = yaml.safe_load(fh) bioconda_packages = env_yml.get("dependencies", []) except FileNotFoundError: diff --git a/nf_core/modules/lint/__init__.py b/nf_core/modules/lint/__init__.py index 90d39104d2..2b10b4df5a 100644 --- a/nf_core/modules/lint/__init__.py +++ b/nf_core/modules/lint/__init__.py @@ -8,18 +8,33 @@ import logging import os +from pathlib import Path +from typing import List, Optional, Union import questionary import rich import rich.progress +import nf_core.components +import nf_core.components.nfcore_component import nf_core.modules.modules_utils import nf_core.utils from nf_core.components.lint import ComponentLint, LintExceptionError, LintResult +from nf_core.components.nfcore_component import NFCoreComponent from nf_core.pipelines.lint_utils import console log = logging.getLogger(__name__) +from .environment_yml import environment_yml +from .main_nf import main_nf +from .meta_yml import meta_yml +from .module_changes import module_changes +from .module_deprecations import module_deprecations +from .module_patch import module_patch +from .module_tests import module_tests +from .module_todos import module_todos +from .module_version import module_version + class ModuleLint(ComponentLint): """ @@ -28,25 +43,25 @@ class ModuleLint(ComponentLint): """ # Import lint functions - from .environment_yml import environment_yml # type: ignore[misc] - from .main_nf import main_nf # type: ignore[misc] - from .meta_yml import meta_yml # type: ignore[misc] - from .module_changes import module_changes # type: ignore[misc] - from .module_deprecations import module_deprecations # type: ignore[misc] - from .module_patch import module_patch # type: ignore[misc] - from .module_tests import module_tests # type: ignore[misc] - from .module_todos import module_todos # type: ignore[misc] - from .module_version import module_version # type: ignore[misc] + environment_yml = environment_yml + main_nf = main_nf + meta_yml = meta_yml + module_changes = module_changes + module_deprecations = module_deprecations + module_patch = module_patch + module_tests = module_tests + module_todos = module_todos + module_version = module_version def __init__( self, - directory, - fail_warned=False, - remote_url=None, - branch=None, - no_pull=False, - registry=None, - hide_progress=False, + directory: Union[str, Path], + fail_warned: bool = False, + remote_url: Optional[str] = None, + branch: Optional[str] = None, + no_pull: bool = False, + registry: Optional[str] = None, + hide_progress: bool = False, ): super().__init__( component_type="modules", @@ -155,7 +170,9 @@ def lint( self._print_results(show_passed=show_passed, sort_by=sort_by) self.print_summary() - def lint_modules(self, modules, registry="quay.io", local=False, fix_version=False): + def lint_modules( + self, modules: List[NFCoreComponent], registry: str = "quay.io", local: bool = False, fix_version: bool = False + ) -> None: """ Lint a list of modules @@ -185,7 +202,14 @@ def lint_modules(self, modules, registry="quay.io", local=False, fix_version=Fal progress_bar.update(lint_progress, advance=1, test_name=mod.component_name) self.lint_module(mod, progress_bar, registry=registry, local=local, fix_version=fix_version) - def lint_module(self, mod, progress_bar, registry, local=False, fix_version=False): + def lint_module( + self, + mod: NFCoreComponent, + progress_bar: rich.progress.Progress, + registry: str, + local: bool = False, + fix_version: bool = False, + ): """ Perform linting on one module diff --git a/nf_core/modules/lint/environment_yml.py b/nf_core/modules/lint/environment_yml.py index e10ef1761d..e34b9d5856 100644 --- a/nf_core/modules/lint/environment_yml.py +++ b/nf_core/modules/lint/environment_yml.py @@ -5,7 +5,7 @@ import yaml from jsonschema import exceptions, validators -from nf_core.components.lint import ComponentLint +from nf_core.components.lint import ComponentLint, LintExceptionError from nf_core.components.nfcore_component import NFCoreComponent from nf_core.utils import custom_yaml_dumper @@ -22,8 +22,10 @@ def environment_yml(module_lint_object: ComponentLint, module: NFCoreComponent) """ env_yml = None # load the environment.yml file + if module.environment_yml is None: + raise LintExceptionError("Module does not have an `environment.yml` file") try: - with open(Path(module.component_dir, "environment.yml")) as fh: + with open(str(module.environment_yml)) as fh: env_yml = yaml.safe_load(fh) module.passed.append(("environment_yml_exists", "Module's `environment.yml` exists", module.environment_yml)) diff --git a/nf_core/modules/lint/main_nf.py b/nf_core/modules/lint/main_nf.py index 81308ba5c5..985a92fa1f 100644 --- a/nf_core/modules/lint/main_nf.py +++ b/nf_core/modules/lint/main_nf.py @@ -6,19 +6,24 @@ import re import sqlite3 from pathlib import Path +from typing import List, Tuple from urllib.parse import urlparse, urlunparse import requests import yaml +from rich.progress import Progress import nf_core import nf_core.modules.modules_utils +from nf_core.components.nfcore_component import NFCoreComponent from nf_core.modules.modules_differ import ModulesDiffer log = logging.getLogger(__name__) -def main_nf(module_lint_object, module, fix_version, registry, progress_bar): +def main_nf( + module_lint_object, module: NFCoreComponent, fix_version: bool, registry: str, progress_bar: Progress +) -> Tuple[List[str], List[str]]: """ Lint a ``main.nf`` module file @@ -43,7 +48,7 @@ def main_nf(module_lint_object, module, fix_version, registry, progress_bar): # Check if we have a patch file affecting the 'main.nf' file # otherwise read the lines directly from the module - lines = None + lines: List[str] = [] if module.is_patched: lines = ModulesDiffer.try_apply_patch( module.component_name, @@ -51,8 +56,9 @@ def main_nf(module_lint_object, module, fix_version, registry, progress_bar): module.patch_path, Path(module.component_dir).relative_to(module.base_dir), reverse=True, - ).get("main.nf") - if lines is None: + ).get("main.nf", [""]) + + if not lines: try: # Check whether file exists and load it with open(module.main_nf) as fh: @@ -60,10 +66,13 @@ def main_nf(module_lint_object, module, fix_version, registry, progress_bar): module.passed.append(("main_nf_exists", "Module file exists", module.main_nf)) except FileNotFoundError: module.failed.append(("main_nf_exists", "Module file does not exist", module.main_nf)) - return deprecated_i = ["initOptions", "saveFiles", "getSoftwareName", "getProcessName", "publishDir"] - lines_j = "\n".join(lines) + if lines is not None: + lines_j = "\n".join(lines) + else: + lines_j = "" + for i in deprecated_i: if i in lines_j: module.failed.append( diff --git a/nf_core/modules/lint/meta_yml.py b/nf_core/modules/lint/meta_yml.py index 4c036713c8..32110b7131 100644 --- a/nf_core/modules/lint/meta_yml.py +++ b/nf_core/modules/lint/meta_yml.py @@ -4,7 +4,7 @@ import yaml from jsonschema import exceptions, validators -from nf_core.components.lint import ComponentLint +from nf_core.components.lint import ComponentLint, LintExceptionError from nf_core.components.nfcore_component import NFCoreComponent from nf_core.modules.modules_differ import ModulesDiffer @@ -53,9 +53,11 @@ def meta_yml(module_lint_object: ComponentLint, module: NFCoreComponent) -> None ).get("meta.yml") if lines is not None: meta_yaml = yaml.safe_load("".join(lines)) + if module.meta_yml is None: + raise LintExceptionError("Module does not have a `meta.yml` file") if meta_yaml is None: try: - with open(module.meta_yml) as fh: + with open(str(module.meta_yml)) as fh: meta_yaml = yaml.safe_load(fh) module.passed.append(("meta_yml_exists", "Module `meta.yml` exists", module.meta_yml)) except FileNotFoundError: diff --git a/nf_core/modules/modules_differ.py b/nf_core/modules/modules_differ.py index e79554f2b6..36d927f084 100644 --- a/nf_core/modules/modules_differ.py +++ b/nf_core/modules/modules_differ.py @@ -4,6 +4,7 @@ import logging import os from pathlib import Path +from typing import List, Union from rich.console import Console from rich.syntax import Syntax @@ -295,7 +296,7 @@ def print_diff( console.print(Syntax("".join(diff), "diff", theme="ansi_dark", padding=1)) @staticmethod - def per_file_patch(patch_fn): + def per_file_patch(patch_fn: Union[str, Path]) -> dict[str, List[str]]: """ Splits a patch file for several files into one patch per file. @@ -306,12 +307,12 @@ def per_file_patch(patch_fn): dict[str, str]: A dictionary indexed by the filenames with the file patches as values """ - with open(patch_fn) as fh: + with open(str(patch_fn)) as fh: lines = fh.readlines() patches = {} i = 0 - patch_lines = [] + patch_lines: list[str] = [] key = "preamble" while i < len(lines): line = lines[i] @@ -391,12 +392,12 @@ def try_apply_single_patch(file_lines, patch, reverse=False): """ Tries to apply a patch to a modified file. Since the line numbers in the patch does not agree if the file is modified, the old and new - lines in the patch are reconstructed and then we look for the old lines + lines inpatch are reconstructed and then we look for the old lines in the modified file. If all hunk in the patch are found in the new file it is updated with the new lines from the patch file. Args: - new_fn (str | Path): Path to the modified file + file_lines ([str]): The lines of the file to be patched patch (str | Path): (Outdated) patch for the file reverse (bool): Apply the patch in reverse @@ -450,7 +451,9 @@ def try_apply_single_patch(file_lines, patch, reverse=False): return patched_new_lines @staticmethod - def try_apply_patch(module, repo_path, patch_path, module_dir, reverse=False): + def try_apply_patch( + module: str, repo_path: Union[str, Path], patch_path: Union[str, Path], module_dir: Path, reverse: bool = False + ) -> dict[str, List[str]]: """ Try applying a full patch file to a module @@ -459,6 +462,7 @@ def try_apply_patch(module, repo_path, patch_path, module_dir, reverse=False): repo_path (str): Name of the repository where the module resides patch_path (str): The absolute path to the patch file to be applied module_dir (Path): The directory containing the module + reverse (bool): Apply the patch in reverse Returns: dict[str, str]: A dictionary with file paths (relative to the pipeline dir) diff --git a/nf_core/modules/modules_json.py b/nf_core/modules/modules_json.py index 63c356a79a..faeb84fc35 100644 --- a/nf_core/modules/modules_json.py +++ b/nf_core/modules/modules_json.py @@ -6,7 +6,7 @@ import shutil import tempfile from pathlib import Path -from typing import Union +from typing import Any, List, Optional, Tuple, Union import git import questionary @@ -153,7 +153,7 @@ def get_pipeline_module_repositories(self, component_type, directory, repos=None # The function might rename some directories, keep track of them renamed_dirs = {} # Check if there are any untracked repositories - dirs_not_covered = self.directory_tree_uncovered(directory, [Path(ModulesRepo(url).repo_path) for url in repos]) + dirs_not_covered = self.dir_tree_uncovered(directory, [Path(ModulesRepo(url).repo_path) for url in repos]) if len(dirs_not_covered) > 0: log.info(f"Found custom {component_type[:-1]} repositories when creating 'modules.json'") # Loop until all directories in the base directory are covered by a remote @@ -203,7 +203,7 @@ def get_pipeline_module_repositories(self, component_type, directory, repos=None if component_type not in repos[nrepo_remote]: repos[nrepo_remote][component_type] = {} repos[nrepo_remote][component_type][nrepo_name] = {} - dirs_not_covered = self.directory_tree_uncovered( + dirs_not_covered = self.dir_tree_uncovered( directory, [Path(name) for url in repos for name in repos[url][component_type]] ) @@ -244,7 +244,9 @@ def dir_tree_uncovered(self, components_directory, repos): depth += 1 return dirs_not_covered - def determine_branches_and_shas(self, component_type, install_dir, remote_url, components): + def determine_branches_and_shas( + self, component_type: str, install_dir: Union[str, Path], remote_url: str, components: List[Path] + ) -> dict[Path, dict[str, Any]]: """ Determines what branch and commit sha each module/subworkflow in the pipeline belongs to @@ -265,6 +267,8 @@ def determine_branches_and_shas(self, component_type, install_dir, remote_url, c repo_path = self.modules_dir / install_dir elif component_type == "subworkflows": repo_path = self.subworkflows_dir / install_dir + else: + raise ValueError(f"Unknown component type '{component_type}'") # Get the branches present in the repository, as well as the default branch available_branches = ModulesRepo.get_remote_branches(remote_url) sb_local = [] @@ -282,16 +286,16 @@ def determine_branches_and_shas(self, component_type, install_dir, remote_url, c if patch_file.is_file(): temp_module_dir = self.try_apply_patch_reverse(component, install_dir, patch_file, component_path) correct_commit_sha = self.find_correct_commit_sha( - component_type, component, temp_module_dir, modules_repo + component_type, str(component), temp_module_dir, modules_repo ) else: correct_commit_sha = self.find_correct_commit_sha( - component_type, component, component_path, modules_repo + component_type, str(component), component_path, modules_repo ) if correct_commit_sha is None: # Check in the old path correct_commit_sha = self.find_correct_commit_sha( - component_type, component, repo_path / component_type / component, modules_repo + component_type, str(component), repo_path / component_type / component, modules_repo ) if correct_commit_sha is None: log.info( @@ -334,7 +338,7 @@ def determine_branches_and_shas(self, component_type, install_dir, remote_url, c # Clean up the modules/subworkflows we were unable to find the sha for for component in sb_local: log.debug(f"Moving {component_type[:-1]} '{Path(install_dir, component)}' to 'local' directory") - self.move_component_to_local(component_type, component, install_dir) + self.move_component_to_local(component_type, component, str(install_dir)) for component in dead_components: log.debug(f"Removing {component_type[:-1]} {Path(install_dir, component)}'") @@ -342,7 +346,13 @@ def determine_branches_and_shas(self, component_type, install_dir, remote_url, c return repo_entry - def find_correct_commit_sha(self, component_type, component_name, component_path, modules_repo): + def find_correct_commit_sha( + self, + component_type: str, + component_name: Union[str, Path], + component_path: Union[str, Path], + modules_repo: ModulesRepo, + ) -> Optional[str]: """ Returns the SHA for the latest commit where the local files are identical to the remote files Args: @@ -370,24 +380,27 @@ def find_correct_commit_sha(self, component_type, component_name, component_path return commit_sha return None - def move_component_to_local(self, component_type, component, repo_name): + def move_component_to_local(self, component_type: str, component: Union[str, Path], repo_name: str): """ Move a module/subworkflow to the 'local' directory Args: - component (str): The name of the module/subworkflow + component_type (str): The type of component, either 'modules' or 'subworkflows' + component (Union[str,Path]): The name of the module/subworkflow repo_name (str): The name of the repository the module resides in """ if component_type == "modules": directory = self.modules_dir elif component_type == "subworkflows": directory = self.subworkflows_dir + else: + raise ValueError(f"Unknown component type '{component_type}'") current_path = directory / repo_name / component local_dir = directory / "local" if not local_dir.exists(): local_dir.mkdir() - to_name = component + to_name = str(component) # Check if there is already a subdirectory with the name while (local_dir / to_name).exists(): # Add a time suffix to the path to make it unique @@ -395,7 +408,7 @@ def move_component_to_local(self, component_type, component, repo_name): to_name += f"-{datetime.datetime.now().strftime('%y%m%d%H%M%S')}" shutil.move(current_path, local_dir / to_name) - def unsynced_components(self): + def unsynced_components(self) -> Tuple[List[Path], List[Path], dict]: """ Compute the difference between the modules/subworkflows in the directory and the modules/subworkflows in the 'modules.json' file. This is done by looking at all @@ -406,6 +419,7 @@ def unsynced_components(self): by the modules.json file, and modules/subworkflows in the modules.json where the installation directory is missing """ + assert self.modules_json is not None # mypy # Add all modules from modules.json to missing_installation missing_installation = copy.deepcopy(self.modules_json["repos"]) # Obtain the path of all installed modules @@ -429,14 +443,27 @@ def unsynced_components(self): return untracked_dirs_modules, untracked_dirs_subworkflows, missing_installation - def parse_dirs(self, dirs, missing_installation, component_type): + def parse_dirs(self, dirs: List[Path], missing_installation: dict, component_type: str) -> Tuple[List[Path], dict]: + """ + Parse directories and check if they are tracked in the modules.json file + + Args: + dirs ([ Path ]): List of directories to check + missing_installation (dict): Dictionary with the modules.json entries + component_type (str): The type of component, either 'modules' or 'subworkflows' + + Returns: + (untracked_dirs ([ Path ]), missing_installation (dict)): List of directories that are not tracked + by the modules.json file, and the updated missing_installation dictionary + """ + untracked_dirs = [] for dir_ in dirs: # Check if the module/subworkflows directory exists in modules.json install_dir = dir_.parts[0] - component = str(Path(*dir_.parts[1:])) + component = Path(*dir_.parts[1:]) component_in_file = False - git_url = None + git_url = "" for repo in missing_installation: if component_type in missing_installation[repo]: if install_dir in missing_installation[repo][component_type]: @@ -453,9 +480,7 @@ def parse_dirs(self, dirs, missing_installation, component_type): # Check if the entry has a git sha and branch before removing components_dict = module_repo[component_type][install_dir] if "git_sha" not in components_dict[component] or "branch" not in components_dict[component]: - self.determine_branches_and_shas( - component_type, component, git_url, module_repo["base_path"], [component] - ) + self.determine_branches_and_shas(component_type, component, git_url, [component]) # Remove the module/subworkflow from modules/subworkflows without installation module_repo[component_type][install_dir].pop(component) if len(module_repo[component_type][install_dir]) == 0: @@ -470,13 +495,14 @@ def parse_dirs(self, dirs, missing_installation, component_type): return untracked_dirs, missing_installation - def has_git_url_and_modules(self): + def has_git_url_and_modules(self) -> bool: """ Check that all repo entries in the modules.json has a git url and a modules dict entry Returns: (bool): True if they are found for all repos, False otherwise """ + assert self.modules_json is not None # mypy for repo_url, repo_entry in self.modules_json.get("repos", {}).items(): if "modules" not in repo_entry: if "subworkflows" in repo_entry: @@ -538,7 +564,7 @@ def reinstall_repo(self, install_dir, remote_url, module_entries): failed_to_install.append(module) return failed_to_install - def check_up_to_date(self): + def check_up_to_date(self) -> bool: """ Checks whether the modules and subworkflows installed in the directory are consistent with the entries in the 'modules.json' file and vice versa. @@ -558,6 +584,8 @@ def check_up_to_date(self): self.load() if not self.has_git_url_and_modules(): raise UserWarning + + assert self.modules_json is not None # mypy # check that all "installed_by" entries are lists and not strings # [these strings come from an older dev version, so this check can probably be removed in a future release] for _, repo_entry in self.modules_json.get("repos", {}).items(): @@ -601,7 +629,7 @@ def check_up_to_date(self): if len(subworkflows_missing_from_modules_json) > 0: dump_modules_json = True self.resolve_missing_from_modules_json(subworkflows_missing_from_modules_json, "subworkflows") - + assert self.modules_json is not None # mypy # If the "installed_by" value is not present for modules/subworkflows, add it. for repo, repo_content in self.modules_json["repos"].items(): for component_type, dir_content in repo_content.items(): @@ -626,8 +654,9 @@ def check_up_to_date(self): if dump_modules_json: self.dump(run_prettier=True) + return True - def load(self): + def load(self) -> None: """ Loads the modules.json file into the variable 'modules_json' @@ -648,14 +677,14 @@ def load(self): def update( self, - component_type, - modules_repo, - component_name, - component_version, - installed_by, - installed_by_log=None, - write_file=True, - ): + component_type: str, + modules_repo: ModulesRepo, + component_name: str, + component_version: str, + installed_by: Optional[List[str]], + installed_by_log: Optional[List[str]] = None, + write_file: bool = True, + ) -> bool: """ Updates the 'module.json' file with new module/subworkflow info @@ -675,9 +704,11 @@ def update( if self.modules_json is None: self.load() + assert self.modules_json is not None # mypy repo_name = modules_repo.repo_path remote_url = modules_repo.remote_url branch = modules_repo.branch + if remote_url not in self.modules_json["repos"]: self.modules_json["repos"][remote_url] = {component_type: {repo_name: {}}} if component_type not in self.modules_json["repos"][remote_url]: @@ -757,6 +788,8 @@ def add_patch_entry(self, module_name, repo_url, install_dir, patch_filename, wr """ if self.modules_json is None: self.load() + assert self.modules_json is not None # mypy + if repo_url not in self.modules_json["repos"]: raise LookupError(f"Repo '{repo_url}' not present in 'modules.json'") if module_name not in self.modules_json["repos"][repo_url]["modules"][install_dir]: @@ -768,6 +801,8 @@ def add_patch_entry(self, module_name, repo_url, install_dir, patch_filename, wr def remove_patch_entry(self, module_name, repo_url, install_dir, write_file=True): if self.modules_json is None: self.load() + assert self.modules_json is not None # mypy + try: del self.modules_json["repos"][repo_url]["modules"][install_dir][module_name]["patch"] except KeyError: @@ -789,6 +824,7 @@ def get_patch_fn(self, module_name, repo_url, install_dir): """ if self.modules_json is None: self.load() + assert self.modules_json is not None # mypy path = ( self.modules_json["repos"] .get(repo_url, {}) @@ -845,6 +881,8 @@ def repo_present(self, repo_name): """ if self.modules_json is None: self.load() + assert self.modules_json is not None # mypy + return repo_name in self.modules_json.get("repos", {}) def module_present(self, module_name, repo_url, install_dir): @@ -859,6 +897,7 @@ def module_present(self, module_name, repo_url, install_dir): """ if self.modules_json is None: self.load() + assert self.modules_json is not None # mypy return module_name in self.modules_json.get("repos", {}).get(repo_url, {}).get("modules", {}).get( install_dir, {} ) @@ -872,8 +911,8 @@ def get_modules_json(self) -> dict: """ if self.modules_json is None: self.load() - - return copy.deepcopy(self.modules_json) # type: ignore + assert self.modules_json is not None # mypy + return copy.deepcopy(self.modules_json) def get_component_version(self, component_type, component_name, repo_url, install_dir): """ @@ -889,6 +928,7 @@ def get_component_version(self, component_type, component_name, repo_url, instal """ if self.modules_json is None: self.load() + assert self.modules_json is not None # mypy return ( self.modules_json.get("repos", {}) .get(repo_url, {}) @@ -912,6 +952,7 @@ def get_module_version(self, module_name, repo_url, install_dir): """ if self.modules_json is None: self.load() + assert self.modules_json is not None # mypy return ( self.modules_json.get("repos", {}) .get(repo_url, {}) @@ -935,6 +976,7 @@ def get_subworkflow_version(self, subworkflow_name, repo_url, install_dir): """ if self.modules_json is None: self.load() + assert self.modules_json is not None # mypy return ( self.modules_json.get("repos", {}) .get(repo_url, {}) @@ -944,7 +986,7 @@ def get_subworkflow_version(self, subworkflow_name, repo_url, install_dir): .get("git_sha", None) ) - def get_all_components(self, component_type): + def get_all_components(self, component_type: str) -> dict[str, Tuple[(str, str)]]: """ Retrieves all pipeline modules/subworkflows that are reported in the modules.json @@ -954,6 +996,8 @@ def get_all_components(self, component_type): """ if self.modules_json is None: self.load() + assert self.modules_json is not None # mypy + if self.pipeline_components is None: self.pipeline_components = {} for repo, repo_entry in self.modules_json.get("repos", {}).items(): @@ -987,6 +1031,7 @@ def get_dependent_components( if self.modules_json is None: self.load() + assert self.modules_json is not None # mypy component_types = ["modules"] if component_type == "modules" else ["modules", "subworkflows"] # Find all components that have an entry of install by of a given component, recursively call this function for subworkflows for type in component_types: @@ -1016,10 +1061,11 @@ def get_installed_by_entries(self, component_type, name): """ if self.modules_json is None: self.load() + assert self.modules_json is not None # mypy installed_by_entries = {} - for repo_url, repo_entry in self.modules_json.get("repos", {}).items(): + for _, repo_entry in self.modules_json.get("repos", {}).items(): if component_type in repo_entry: - for install_dir, components in repo_entry[component_type].items(): + for _, components in repo_entry[component_type].items(): if name in components: installed_by_entries = components[name]["installed_by"] break @@ -1037,6 +1083,7 @@ def get_component_branch(self, component_type, component, repo_url, install_dir) """ if self.modules_json is None: self.load() + assert self.modules_json is not None # mypy branch = ( self.modules_json["repos"] .get(repo_url, {}) @@ -1096,7 +1143,8 @@ def resolve_missing_installation(self, missing_installation, component_type): log.info( f"Was unable to reinstall some {component_type}. Removing 'modules.json' entries: {', '.join(uninstallable_components)}" ) - + if self.modules_json is None: + raise UserWarning("No modules.json file found") for (repo_url, install_dir), component_entries in remove_from_mod_json.items(): for component in component_entries: self.modules_json["repos"][repo_url][component_type][install_dir].pop(component) @@ -1113,7 +1161,7 @@ def resolve_missing_from_modules_json(self, missing_from_modules_json, component log.info( f"Recomputing commit SHAs for {component_type} which were missing from 'modules.json': {', '.join(format_missing)}" ) - + assert self.modules_json is not None # mypy # Get the remotes we are missing tracked_repos = {repo_url: (repo_entry) for repo_url, repo_entry in self.modules_json["repos"].items()} repos, _ = self.get_pipeline_module_repositories(component_type, self.modules_dir, tracked_repos) @@ -1186,7 +1234,7 @@ def recreate_dependencies(self, repo, org, subworkflow): sw_path = Path(self.subworkflows_dir, org, subworkflow) dep_mods, dep_subwfs = get_components_to_install(sw_path) - + assert self.modules_json is not None # mypy for dep_mod in dep_mods: installed_by = self.modules_json["repos"][repo]["modules"][org][dep_mod]["installed_by"] if installed_by == ["modules"]: diff --git a/nf_core/modules/modules_repo.py b/nf_core/modules/modules_repo.py index 9694920274..b345dfe8b4 100644 --- a/nf_core/modules/modules_repo.py +++ b/nf_core/modules/modules_repo.py @@ -1,6 +1,7 @@ import logging import os import shutil +from typing import Optional import git import rich @@ -35,7 +36,13 @@ class ModulesRepo(SyncedRepo): local_repo_statuses = {} no_pull_global = False - def __init__(self, remote_url=None, branch=None, no_pull=False, hide_progress=False): + def __init__( + self, + remote_url: Optional[str] = None, + branch: Optional[str] = None, + no_pull: bool = False, + hide_progress: bool = False, + ): """ Initializes the object and clones the git repository if it is not already present """ diff --git a/nf_core/modules/modules_utils.py b/nf_core/modules/modules_utils.py index 6796de41ec..ecfe5f24ee 100644 --- a/nf_core/modules/modules_utils.py +++ b/nf_core/modules/modules_utils.py @@ -36,7 +36,7 @@ def repo_full_name_from_remote(remote_url: str) -> str: return path -def get_installed_modules(dir: str, repo_type="modules") -> Tuple[List[str], List[NFCoreComponent]]: +def get_installed_modules(directory: Path, repo_type="modules") -> Tuple[List[str], List[NFCoreComponent]]: """ Make a list of all modules installed in this repository @@ -52,15 +52,15 @@ def get_installed_modules(dir: str, repo_type="modules") -> Tuple[List[str], Lis # initialize lists local_modules: List[str] = [] nfcore_modules_names: List[str] = [] - local_modules_dir: Optional[str] = None - nfcore_modules_dir = os.path.join(dir, "modules", "nf-core") + local_modules_dir: Optional[Path] = None + nfcore_modules_dir = Path(directory, "modules", "nf-core") # Get local modules if repo_type == "pipeline": - local_modules_dir = os.path.join(dir, "modules", "local") + local_modules_dir = Path(directory, "modules", "local") # Filter local modules - if os.path.exists(local_modules_dir): + if local_modules_dir.exists(): local_modules = os.listdir(local_modules_dir) local_modules = sorted([x for x in local_modules if x.endswith(".nf")]) @@ -89,7 +89,7 @@ def get_installed_modules(dir: str, repo_type="modules") -> Tuple[List[str], Lis "nf-core/modules", Path(nfcore_modules_dir, m), repo_type=repo_type, - base_dir=Path(dir), + base_dir=directory, component_type="modules", ) for m in nfcore_modules_names diff --git a/nf_core/subworkflows/lint/subworkflow_tests.py b/nf_core/subworkflows/lint/subworkflow_tests.py index 601f351fda..af39334744 100644 --- a/nf_core/subworkflows/lint/subworkflow_tests.py +++ b/nf_core/subworkflows/lint/subworkflow_tests.py @@ -202,9 +202,12 @@ def subworkflow_tests(_, subworkflow: NFCoreComponent): f"subworkflows_{org_alphabet}", ] included_components = [] - if subworkflow.main_nf.is_file(): + if subworkflow.main_nf is not None and Path(subworkflow.main_nf).is_file(): included_components = subworkflow._get_included_components(subworkflow.main_nf) - chained_components_tags = subworkflow._get_included_components_in_chained_tests(subworkflow.nftest_main_nf) + if subworkflow.nftest_main_nf is not None and subworkflow.nftest_main_nf.is_file(): + chained_components_tags = subworkflow._get_included_components_in_chained_tests( + subworkflow.nftest_main_nf + ) log.debug(f"Included components: {included_components}") log.debug(f"Required tags: {required_tags}") log.debug(f"Included components for chained nf-tests: {chained_components_tags}") diff --git a/nf_core/synced_repo.py b/nf_core/synced_repo.py index 4d6a3f6a4e..4b69d4af87 100644 --- a/nf_core/synced_repo.py +++ b/nf_core/synced_repo.py @@ -4,7 +4,7 @@ import shutil from configparser import NoOptionError, NoSectionError from pathlib import Path -from typing import Dict +from typing import Dict, Optional, Union import git from git.exc import GitCommandError @@ -51,9 +51,14 @@ def update(self, op_code, cur_count, max_count=None, message=""): """ if not self.progress_bar.tasks[self.tid].started: self.progress_bar.start_task(self.tid) - self.progress_bar.update( - self.tid, total=max_count, completed=cur_count, state=f"{cur_count / max_count * 100:.1f}%" - ) + if cur_count is not None and max_count is not None: + cur_count = float(cur_count) + max_count = float(max_count) + state = f"{cur_count / max_count * 100:.1f}%" + else: + state = "Unknown" + + self.progress_bar.update(self.tid, total=max_count, completed=cur_count, state=state) class SyncedRepo: @@ -139,6 +144,9 @@ def __init__(self, remote_url=None, branch=None, no_pull=False, hide_progress=Fa self.avail_module_names = None + def setup_local_repo(self, remote_url, branch, hide_progress): + pass + def verify_sha(self, prompt, sha): """ Verify that 'sha' and 'prompt' arguments are not provided together. @@ -258,7 +266,7 @@ def component_exists(self, component_name, component_type, checkout=True, commit """ return component_name in self.get_avail_components(component_type, checkout=checkout, commit=commit) - def get_component_dir(self, component_name, component_type): + def get_component_dir(self, component_name: str, component_type: str) -> Path: """ Returns the file path of a module/subworkflow directory in the repo. Does not verify that the path exists. @@ -269,11 +277,15 @@ def get_component_dir(self, component_name, component_type): component_path (str): The path of the module/subworkflow in the local copy of the repository """ if component_type == "modules": - return os.path.join(self.modules_dir, component_name) + return Path(self.modules_dir, component_name) elif component_type == "subworkflows": - return os.path.join(self.subworkflows_dir, component_name) + return Path(self.subworkflows_dir, component_name) + else: + raise ValueError(f"Invalid component type: {component_type}") - def install_component(self, component_name, install_dir, commit, component_type): + def install_component( + self, component_name: str, install_dir: Union[str, Path], commit: str, component_type: str + ) -> bool: """ Install the module/subworkflow files into a pipeline at the given commit @@ -281,6 +293,7 @@ def install_component(self, component_name, install_dir, commit, component_type) component_name (str): The name of the module/subworkflow install_dir (str): The path where the module/subworkflow should be installed commit (str): The git SHA for the version of the module/subworkflow to be installed + component_type (str): Either 'modules' or 'subworkflows' Returns: (bool): Whether the operation was successful or not @@ -332,6 +345,8 @@ def component_files_identical(self, component_name, base_path, commit, component return files_identical def ensure_git_user_config(self, default_name: str, default_email: str) -> None: + if self.repo is None: + raise ValueError("Repository not initialized") try: with self.repo.config_reader() as git_config: user_name = git_config.get_value("user", "name", default=None) @@ -346,7 +361,7 @@ def ensure_git_user_config(self, default_name: str, default_email: str) -> None: if not user_email: git_config.set_value("user", "email", default_email) - def get_component_git_log(self, component_name, component_type, depth=None): + def get_component_git_log(self, component_name: Union[str, Path], component_type: str, depth: Optional[int] = None): """ Fetches the commit history the of requested module/subworkflow since a given date. The default value is not arbitrary - it is the last time the structure of the nf-core/modules repository was had an @@ -358,19 +373,26 @@ def get_component_git_log(self, component_name, component_type, depth=None): Returns: ( dict ): Iterator of commit SHAs and associated (truncated) message """ + + if self.repo is None: + raise ValueError("Repository not initialized") self.checkout_branch() - component_path = os.path.join(component_type, self.repo_path, component_name) + component_path = Path(component_type, self.repo_path, component_name) + commits_new = self.repo.iter_commits(max_count=depth, paths=component_path) - commits_new = [ - {"git_sha": commit.hexsha, "trunc_message": commit.message.partition("\n")[0]} for commit in commits_new - ] + if not commits_new: + raise ValueError(f"Could not find any commits for '{component_name}' in '{self.remote_url}'") + else: + commits_new = [ + {"git_sha": commit.hexsha, "trunc_message": commit.message.splitlines()[0]} for commit in commits_new + ] commits_old = [] if component_type == "modules": # Grab commits also from previous modules structure - component_path = os.path.join("modules", component_name) + component_path = Path("modules", component_name) commits_old = self.repo.iter_commits(max_count=depth, paths=component_path) commits_old = [ - {"git_sha": commit.hexsha, "trunc_message": commit.message.partition("\n")[0]} for commit in commits_old + {"git_sha": commit.hexsha, "trunc_message": commit.message.splitlines()[0]} for commit in commits_old ] commits = iter(commits_new + commits_old) return commits @@ -385,6 +407,8 @@ def sha_exists_on_branch(self, sha): """ Verifies that a given commit sha exists on the branch """ + if self.repo is None: + raise ValueError("Repository not initialized") self.checkout_branch() return sha in (commit.hexsha for commit in self.repo.iter_commits()) @@ -399,10 +423,12 @@ def get_commit_info(self, sha): Raises: LookupError: If the search for the commit fails """ + if self.repo is None: + raise ValueError("Repository not initialized") self.checkout_branch() for commit in self.repo.iter_commits(): if commit.hexsha == sha: - message = commit.message.partition("\n")[0] + message = commit.message.splitlines()[0] date_obj = commit.committed_datetime date = str(date_obj.date()) return message, date diff --git a/nf_core/utils.py b/nf_core/utils.py index 0cd812cb04..d1e9ccfe95 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -22,11 +22,12 @@ from typing import Dict, Generator, List, Optional, Tuple, Union import git -import prompt_toolkit +import prompt_toolkit.styles import questionary -import requests +import requests.auth import requests_cache import rich +import rich.markup import yaml from packaging.version import Version from rich.live import Live @@ -524,8 +525,9 @@ def __call__(self, r): self.auth_mode = f"gh CLI config: {gh_cli_config['github.com']['user']}" except Exception: ex_type, ex_value, _ = sys.exc_info() - output = rich.markup.escape(f"{ex_type.__name__}: {ex_value}") - log.debug(f"Couldn't auto-auth with GitHub CLI auth from '{gh_cli_config_fn}': [red]{output}") + if ex_type is not None: + output = rich.markup.escape(f"{ex_type.__name__}: {ex_value}") + log.debug(f"Couldn't auto-auth with GitHub CLI auth from '{gh_cli_config_fn}': [red]{output}") # Default auth if we have a GitHub Token (eg. GitHub Actions CI) if os.environ.get("GITHUB_TOKEN") is not None and self.auth is None: @@ -804,6 +806,8 @@ def get_tag_date(tag_date): singularity_image = all_singularity[k]["image"] current_date = date docker_image_name = docker_image["image_name"].lstrip("quay.io/") + if singularity_image is None: + raise LookupError(f"Could not find singularity container for {package}") return docker_image_name, singularity_image["image_name"] except TypeError: raise LookupError(f"Could not find docker or singularity container for {package}") @@ -1072,7 +1076,7 @@ def load_tools_config(directory: Union[str, Path] = ".") -> Tuple[Path, dict]: return config_fn, tools_config -def determine_base_dir(directory="."): +def determine_base_dir(directory: Union[Path, str] = ".") -> Path: base_dir = start_dir = Path(directory).absolute() # Only iterate up the tree if the start dir doesn't have a config while not get_first_available_path(base_dir, CONFIG_PATHS) and base_dir != base_dir.parent: @@ -1080,7 +1084,7 @@ def determine_base_dir(directory="."): config_fn = get_first_available_path(base_dir, CONFIG_PATHS) if config_fn: break - return directory if (base_dir == start_dir or str(base_dir) == base_dir.root) else base_dir + return Path(directory) if (base_dir == start_dir or str(base_dir) == base_dir.root) else base_dir def get_first_available_path(directory: Union[Path, str], paths: List[str]) -> Union[Path, None]: From 61898dc14663fa3e81f78fdd5a62756217de7dc5 Mon Sep 17 00:00:00 2001 From: mashehu Date: Mon, 22 Jul 2024 11:11:55 +0200 Subject: [PATCH 30/89] fix rich imports, remove boolean type for `installed_by`, fix more occurences of `dir` --- nf_core/components/create.py | 1 + nf_core/components/patch.py | 10 +-- nf_core/modules/install.py | 2 +- nf_core/modules/modules_repo.py | 1 + nf_core/modules/patch.py | 2 +- nf_core/pipelines/create/create.py | 6 +- nf_core/subworkflows/install.py | 2 +- nf_core/subworkflows/lint/__init__.py | 8 +-- .../subworkflows/lint/subworkflow_version.py | 2 +- tests/modules/test_install.py | 8 +-- tests/modules/test_lint.py | 62 +++++++++---------- tests/modules/test_remove.py | 8 +-- tests/subworkflows/test_install.py | 45 ++++++-------- tests/subworkflows/test_lint.py | 40 ++++++------ tests/subworkflows/test_remove.py | 20 +++--- 15 files changed, 109 insertions(+), 108 deletions(-) diff --git a/nf_core/components/create.py b/nf_core/components/create.py index 532c2a46d6..413790099d 100644 --- a/nf_core/components/create.py +++ b/nf_core/components/create.py @@ -14,6 +14,7 @@ import jinja2 import questionary import rich +import rich.prompt import yaml from packaging.version import parse as parse_version diff --git a/nf_core/components/patch.py b/nf_core/components/patch.py index 5b29c152d8..0332a1fe42 100644 --- a/nf_core/components/patch.py +++ b/nf_core/components/patch.py @@ -15,7 +15,7 @@ class ComponentPatch(ComponentCommand): - def __init__(self, pipeline_dir, component_type, remote_url=None, branch=None, no_pull=False, installed_by=False): + def __init__(self, pipeline_dir, component_type, remote_url=None, branch=None, no_pull=False, installed_by=None): super().__init__(component_type, pipeline_dir, remote_url, branch, no_pull) self.modules_json = ModulesJson(pipeline_dir) @@ -88,8 +88,8 @@ def patch(self, component=None): patch_filename = f"{component.replace('/', '-')}.diff" component_relpath = Path(self.component_type, component_dir, component) patch_relpath = Path(component_relpath, patch_filename) - component_current_dir = Path(self.dir, component_relpath) - patch_path = Path(self.dir, patch_relpath) + component_current_dir = Path(self.directory, component_relpath) + patch_path = Path(self.directory, patch_relpath) if patch_path.exists(): remove = questionary.confirm( @@ -189,8 +189,8 @@ def remove(self, component): patch_filename = f"{component.replace('/', '-')}.diff" component_relpath = Path(self.component_type, component_dir, component) patch_relpath = Path(component_relpath, patch_filename) - patch_path = Path(self.dir, patch_relpath) - component_path = Path(self.dir, component_relpath) + patch_path = Path(self.directory, patch_relpath) + component_path = Path(self.directory, component_relpath) if patch_path.exists(): remove = questionary.confirm( diff --git a/nf_core/modules/install.py b/nf_core/modules/install.py index e1755cee98..7055abe49a 100644 --- a/nf_core/modules/install.py +++ b/nf_core/modules/install.py @@ -11,7 +11,7 @@ def __init__( remote_url=None, branch=None, no_pull=False, - installed_by=False, + installed_by=None, ): super().__init__( pipeline_dir, diff --git a/nf_core/modules/modules_repo.py b/nf_core/modules/modules_repo.py index b345dfe8b4..a979ae45b8 100644 --- a/nf_core/modules/modules_repo.py +++ b/nf_core/modules/modules_repo.py @@ -6,6 +6,7 @@ import git import rich import rich.progress +import rich.prompt from git.exc import GitCommandError, InvalidGitRepositoryError import nf_core.modules.modules_json diff --git a/nf_core/modules/patch.py b/nf_core/modules/patch.py index b4e86f2d19..bbad0d428e 100644 --- a/nf_core/modules/patch.py +++ b/nf_core/modules/patch.py @@ -6,5 +6,5 @@ class ModulePatch(ComponentPatch): - def __init__(self, pipeline_dir, remote_url=None, branch=None, no_pull=False, installed_by=False): + def __init__(self, pipeline_dir, remote_url=None, branch=None, no_pull=False, installed_by=None): super().__init__(pipeline_dir, "modules", remote_url, branch, no_pull, installed_by) diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index c5af956690..aecba94236 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -512,12 +512,14 @@ def fix_linting(self): def make_pipeline_logo(self): """Fetch a logo for the new pipeline from the nf-core website""" email_logo_path = Path(self.outdir) / "assets" - create_logo(text=self.jinja_params["short_name"], dir=email_logo_path, theme="light", force=bool(self.force)) + create_logo( + text=self.jinja_params["short_name"], directory=email_logo_path, theme="light", force=bool(self.force) + ) for theme in ["dark", "light"]: readme_logo_path = Path(self.outdir) / "docs" / "images" create_logo( text=self.jinja_params["short_name"], - dir=readme_logo_path, + directory=readme_logo_path, width=600, theme=theme, force=bool(self.force), diff --git a/nf_core/subworkflows/install.py b/nf_core/subworkflows/install.py index 6c5cfb12b2..70a6b0afa5 100644 --- a/nf_core/subworkflows/install.py +++ b/nf_core/subworkflows/install.py @@ -11,7 +11,7 @@ def __init__( remote_url=None, branch=None, no_pull=False, - installed_by=False, + installed_by=None, ): super().__init__( pipeline_dir, diff --git a/nf_core/subworkflows/lint/__init__.py b/nf_core/subworkflows/lint/__init__.py index a3cacf2952..a073710884 100644 --- a/nf_core/subworkflows/lint/__init__.py +++ b/nf_core/subworkflows/lint/__init__.py @@ -36,7 +36,7 @@ class SubworkflowLint(ComponentLint): def __init__( self, - dir, + directory, fail_warned=False, remote_url=None, branch=None, @@ -46,7 +46,7 @@ def __init__( ): super().__init__( component_type="subworkflows", - dir=dir, + directory=directory, fail_warned=fail_warned, remote_url=remote_url, branch=branch, @@ -122,9 +122,9 @@ def lint( remote_subworkflows = self.all_remote_components if self.repo_type == "modules": - log.info(f"Linting modules repo: [magenta]'{self.dir}'") + log.info(f"Linting modules repo: [magenta]'{self.directory}'") else: - log.info(f"Linting pipeline: [magenta]'{self.dir}'") + log.info(f"Linting pipeline: [magenta]'{self.directory}'") if subworkflow: log.info(f"Linting subworkflow: [magenta]'{subworkflow}'") diff --git a/nf_core/subworkflows/lint/subworkflow_version.py b/nf_core/subworkflows/lint/subworkflow_version.py index 5801abd885..1acb95e779 100644 --- a/nf_core/subworkflows/lint/subworkflow_version.py +++ b/nf_core/subworkflows/lint/subworkflow_version.py @@ -21,7 +21,7 @@ def subworkflow_version(subworkflow_lint_object, subworkflow): newer version of the subworkflow available. """ - modules_json_path = Path(subworkflow_lint_object.dir, "modules.json") + modules_json_path = Path(subworkflow_lint_object.directory, "modules.json") # Verify that a git_sha exists in the `modules.json` file for this module version = subworkflow_lint_object.modules_json.get_subworkflow_version( subworkflow.component_name, subworkflow.repo_url, subworkflow.org diff --git a/tests/modules/test_install.py b/tests/modules/test_install.py index b90f01ee61..cfdaac47ed 100644 --- a/tests/modules/test_install.py +++ b/tests/modules/test_install.py @@ -19,14 +19,14 @@ class TestModulesCreate(TestModules): def test_modules_install_nopipeline(self): """Test installing a module - no pipeline given""" - self.mods_install.dir = None + self.mods_install.directory = None assert self.mods_install.install("foo") is False @with_temporary_folder def test_modules_install_emptypipeline(self, tmpdir): """Test installing a module - empty dir given""" os.mkdir(os.path.join(tmpdir, "nf-core-pipe")) - self.mods_install.dir = os.path.join(tmpdir, "nf-core-pipe") + self.mods_install.directory = os.path.join(tmpdir, "nf-core-pipe") with pytest.raises(UserWarning) as excinfo: self.mods_install.install("foo") assert "Could not find a 'main.nf' or 'nextflow.config' file" in str(excinfo.value) @@ -38,8 +38,8 @@ def test_modules_install_nomodule(self): def test_modules_install_trimgalore(self): """Test installing a module - TrimGalore!""" assert self.mods_install.install("trimgalore") is not False - assert self.mods_install.dir is not None - module_path = Path(self.mods_install.dir, "modules", "nf-core", "trimgalore") + assert self.mods_install.directory is not None + module_path = Path(self.mods_install.directory, "modules", "nf-core", "trimgalore") assert os.path.exists(module_path) def test_modules_install_trimgalore_twice(self): diff --git a/tests/modules/test_lint.py b/tests/modules/test_lint.py index 6448916acf..fc9871db2e 100644 --- a/tests/modules/test_lint.py +++ b/tests/modules/test_lint.py @@ -180,7 +180,7 @@ def _setup_patch(self, pipeline_dir: Union[str, Path], modify_module: bool): def test_modules_lint_trimgalore(self): """Test linting the TrimGalore! module""" self.mods_install.install("trimgalore") - module_lint = nf_core.modules.ModuleLint(dir=self.pipeline_dir) + module_lint = nf_core.modules.ModuleLint(directory=self.pipeline_dir) module_lint.lint(print_results=False, module="trimgalore") assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" assert len(module_lint.passed) > 0 @@ -191,11 +191,11 @@ def test_modules_lint_empty(self): self.mods_remove.remove("fastqc", force=True) self.mods_remove.remove("multiqc", force=True) with pytest.raises(LookupError): - nf_core.modules.ModuleLint(dir=self.pipeline_dir) + nf_core.modules.ModuleLint(directory=self.pipeline_dir) def test_modules_lint_new_modules(self): """lint a new module""" - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint = nf_core.modules.ModuleLint(directory=self.nfcore_modules) module_lint.lint(print_results=False, all_modules=True) assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" assert len(module_lint.passed) > 0 @@ -206,13 +206,13 @@ def test_modules_lint_no_gitlab(self): self.mods_remove.remove("fastqc", force=True) self.mods_remove.remove("multiqc", force=True) with pytest.raises(LookupError): - nf_core.modules.ModuleLint(dir=self.pipeline_dir, remote_url=GITLAB_URL) + nf_core.modules.ModuleLint(directory=self.pipeline_dir, remote_url=GITLAB_URL) def test_modules_lint_gitlab_modules(self): """Lint modules from a different remote""" self.mods_install_gitlab.install("fastqc") self.mods_install_gitlab.install("multiqc") - module_lint = nf_core.modules.ModuleLint(dir=self.pipeline_dir, remote_url=GITLAB_URL) + module_lint = nf_core.modules.ModuleLint(directory=self.pipeline_dir, remote_url=GITLAB_URL) module_lint.lint(print_results=False, all_modules=True) assert len(module_lint.failed) == 2 assert len(module_lint.passed) > 0 @@ -221,7 +221,7 @@ def test_modules_lint_gitlab_modules(self): def test_modules_lint_multiple_remotes(self): """Lint modules from a different remote""" self.mods_install_gitlab.install("multiqc") - module_lint = nf_core.modules.ModuleLint(dir=self.pipeline_dir, remote_url=GITLAB_URL) + module_lint = nf_core.modules.ModuleLint(directory=self.pipeline_dir, remote_url=GITLAB_URL) module_lint.lint(print_results=False, all_modules=True) assert len(module_lint.failed) == 1 assert len(module_lint.passed) > 0 @@ -229,14 +229,14 @@ def test_modules_lint_multiple_remotes(self): def test_modules_lint_registry(self): """Test linting the samtools module and alternative registry""" - self.mods_install.install("samtools") - module_lint = nf_core.modules.ModuleLint(dir=self.pipeline_dir, registry="public.ecr.aws") - module_lint.lint(print_results=False, module="samtools") + assert self.mods_install.install("samtools/sort") + module_lint = nf_core.modules.ModuleLint(directory=self.pipeline_dir, registry="public.ecr.aws") + module_lint.lint(print_results=False, module="samtools/sort") assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" assert len(module_lint.passed) > 0 assert len(module_lint.warned) >= 0 - module_lint = nf_core.modules.ModuleLint(dir=self.pipeline_dir) - module_lint.lint(print_results=False, module="samtools") + module_lint = nf_core.modules.ModuleLint(directory=self.pipeline_dir) + module_lint.lint(print_results=False, module="samtools/sort") assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" assert len(module_lint.passed) > 0 assert len(module_lint.warned) >= 0 @@ -255,7 +255,7 @@ def test_modules_lint_patched_modules(self): # to avoid error from try_apply_patch() during linting with set_wd(self.pipeline_dir): module_lint = nf_core.modules.ModuleLint( - dir=self.pipeline_dir, + directory=self.pipeline_dir, remote_url=GITLAB_URL, branch=PATCH_BRANCH, hide_progress=True, @@ -298,7 +298,7 @@ def test_modules_lint_check_url(self): def test_modules_lint_snapshot_file(self): """Test linting a module with a snapshot file""" - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint = nf_core.modules.ModuleLint(directory=self.nfcore_modules) module_lint.lint(print_results=False, module="bpipe/test") assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" assert len(module_lint.passed) > 0 @@ -315,7 +315,7 @@ def test_modules_lint_snapshot_file_missing_fail(self): "tests", "main.nf.test.snap", ).unlink() - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint = nf_core.modules.ModuleLint(directory=self.nfcore_modules) module_lint.lint(print_results=False, module="bpipe/test") Path( self.nfcore_modules, @@ -359,7 +359,7 @@ def test_modules_lint_snapshot_file_not_needed(self): "w", ) as fh: fh.write(new_content) - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint = nf_core.modules.ModuleLint(directory=self.nfcore_modules) module_lint.lint(print_results=False, module="bpipe/test") assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" assert len(module_lint.passed) > 0 @@ -377,7 +377,7 @@ def test_modules_environment_yml_file_doesnt_exists(self): "environment.yml.bak", ) ) - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint = nf_core.modules.ModuleLint(directory=self.nfcore_modules) module_lint.lint(print_results=False, module="bpipe/test") Path( self.nfcore_modules, @@ -403,7 +403,7 @@ def test_modules_environment_yml_file_doesnt_exists(self): def test_modules_environment_yml_file_sorted_correctly(self): """Test linting a module with a correctly sorted environment.yml file""" - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint = nf_core.modules.ModuleLint(directory=self.nfcore_modules) module_lint.lint(print_results=False, module="bpipe/test") assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" assert len(module_lint.passed) > 0 @@ -438,7 +438,7 @@ def test_modules_environment_yml_file_sorted_incorrectly(self): "w", ) as fh: fh.write(yaml_content) - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint = nf_core.modules.ModuleLint(directory=self.nfcore_modules) module_lint.lint(print_results=False, module="bpipe/test") # we fix the sorting on the fly, so this should pass assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" @@ -471,7 +471,7 @@ def test_modules_environment_yml_file_not_array(self): "w", ) as fh: fh.write(yaml.dump(yaml_content)) - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint = nf_core.modules.ModuleLint(directory=self.nfcore_modules) module_lint.lint(print_results=False, module="bpipe/test") assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" assert len(module_lint.passed) > 0 @@ -504,7 +504,7 @@ def test_modules_environment_yml_file_name_mismatch(self): "w", ) as fh: fh.write(yaml.dump(yaml_content)) - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint = nf_core.modules.ModuleLint(directory=self.nfcore_modules) module_lint.lint(print_results=False, module="bpipe/test") # reset changes yaml_content["name"] = "bpipe_test" @@ -536,7 +536,7 @@ def test_modules_meta_yml_incorrect_licence_field(self): "w", ) as fh: fh.write(yaml.dump(meta_yml)) - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint = nf_core.modules.ModuleLint(directory=self.nfcore_modules) module_lint.lint(print_results=False, module="bpipe/test") # reset changes @@ -559,7 +559,7 @@ def test_modules_meta_yml_input_mismatch(self): main_nf_new = main_nf.replace("path bam", "path bai") with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "main.nf"), "w") as fh: fh.write(main_nf_new) - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint = nf_core.modules.ModuleLint(directory=self.nfcore_modules) module_lint.lint(print_results=False, module="bpipe/test") with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "main.nf"), "w") as fh: fh.write(main_nf) @@ -578,7 +578,7 @@ def test_modules_meta_yml_output_mismatch(self): main_nf_new = main_nf.replace("emit: bam", "emit: bai") with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "main.nf"), "w") as fh: fh.write(main_nf_new) - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint = nf_core.modules.ModuleLint(directory=self.nfcore_modules) module_lint.lint(print_results=False, module="bpipe/test") with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "main.nf"), "w") as fh: fh.write(main_nf) @@ -625,7 +625,7 @@ def test_modules_meta_yml_incorrect_name(self): "w", ) as fh: fh.write(yaml.dump(environment_yml)) - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint = nf_core.modules.ModuleLint(directory=self.nfcore_modules) module_lint.lint(print_results=False, module="bpipe/test") # reset changes @@ -659,7 +659,7 @@ def test_modules_missing_test_dir(self): Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests").rename( Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests.bak") ) - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint = nf_core.modules.ModuleLint(directory=self.nfcore_modules) module_lint.lint(print_results=False, module="bpipe/test") Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests.bak").rename( Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests") @@ -690,7 +690,7 @@ def test_modules_missing_test_main_nf(self): "main.nf.test.bak", ) ) - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint = nf_core.modules.ModuleLint(directory=self.nfcore_modules) module_lint.lint(print_results=False, module="bpipe/test") Path( self.nfcore_modules, @@ -719,7 +719,7 @@ def test_modules_missing_test_main_nf(self): def test_modules_unused_pytest_files(self): """Test linting a nf-test module with files still present in `tests/modules/`""" Path(self.nfcore_modules, "tests", "modules", "bpipe", "test").mkdir(parents=True, exist_ok=True) - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint = nf_core.modules.ModuleLint(directory=self.nfcore_modules) module_lint.lint(print_results=False, module="bpipe/test") Path(self.nfcore_modules, "tests", "modules", "bpipe", "test").rmdir() assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" @@ -735,7 +735,7 @@ def test_nftest_failing_linting(self): self.nfcore_modules = Path(tmp_dir, "modules-test") Repo.clone_from(GITLAB_URL, self.nfcore_modules, branch=GITLAB_NFTEST_BRANCH) - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint = nf_core.modules.ModuleLint(directory=self.nfcore_modules) module_lint.lint(print_results=False, module="kallisto/quant") assert len(module_lint.failed) == 3, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" @@ -774,7 +774,7 @@ def test_modules_absent_version(self): "w", ) as fh: fh.write(new_content) - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint = nf_core.modules.ModuleLint(directory=self.nfcore_modules) module_lint.lint(print_results=False, module="bpipe/test") with open( Path( @@ -812,7 +812,7 @@ def test_modules_empty_file_in_snapshot(self): with open(snap_file, "w") as fh: json.dump(snap, fh) - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint = nf_core.modules.ModuleLint(directory=self.nfcore_modules) module_lint.lint(print_results=False, module="bpipe/test") assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" assert len(module_lint.passed) > 0 @@ -841,7 +841,7 @@ def test_modules_empty_file_in_stub_snapshot(self): with open(snap_file, "w") as fh: json.dump(snap, fh) - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint = nf_core.modules.ModuleLint(directory=self.nfcore_modules) module_lint.lint(print_results=False, module="bpipe/test") assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" assert len(module_lint.passed) > 0 diff --git a/tests/modules/test_remove.py b/tests/modules/test_remove.py index a80c8b0986..2caece7ce5 100644 --- a/tests/modules/test_remove.py +++ b/tests/modules/test_remove.py @@ -8,8 +8,8 @@ class TestModulesRemove(TestModules): def test_modules_remove_trimgalore(self): """Test removing TrimGalore! module after installing it""" self.mods_install.install("trimgalore") - assert self.mods_install.dir is not None - module_path = Path(self.mods_install.dir, "modules", "nf-core", "modules", "trimgalore") + assert self.mods_install.directory is not None + module_path = Path(self.mods_install.directory, "modules", "nf-core", "modules", "trimgalore") assert self.mods_remove.remove("trimgalore") assert os.path.exists(module_path) is False @@ -20,7 +20,7 @@ def test_modules_remove_trimgalore_uninstalled(self): def test_modules_remove_multiqc_from_gitlab(self): """Test removing multiqc module after installing it from an alternative source""" self.mods_install_gitlab.install("multiqc") - assert self.mods_install.dir is not None - module_path = Path(self.mods_install_gitlab.dir, "modules", "nf-core-test", "multiqc") + assert self.mods_install.directory is not None + module_path = Path(self.mods_install_gitlab.directory, "modules", "nf-core-test", "multiqc") assert self.mods_remove_gitlab.remove("multiqc", force=True) assert os.path.exists(module_path) is False diff --git a/tests/subworkflows/test_install.py b/tests/subworkflows/test_install.py index e0b2fc1ab6..af1ad92415 100644 --- a/tests/subworkflows/test_install.py +++ b/tests/subworkflows/test_install.py @@ -1,4 +1,3 @@ -import os from pathlib import Path import pytest @@ -20,8 +19,8 @@ class TestSubworkflowsInstall(TestSubworkflows): def test_subworkflow_install_nopipeline(self): """Test installing a subworkflow - no pipeline given""" - assert self.subworkflow_install.dir is not None - self.subworkflow_install.dir = "" + assert self.subworkflow_install.directory is not None + self.subworkflow_install.directory = Path("non_existent_dir") assert self.subworkflow_install.install("foo") is False @with_temporary_folder @@ -29,7 +28,7 @@ def test_subworkflows_install_emptypipeline(self, tmpdir): """Test installing a subworkflow - empty dir given""" Path(tmpdir, "nf-core-pipe").mkdir(exist_ok=True) - self.subworkflow_install.dir = os.path.join(tmpdir, "nf-core-pipe") + self.subworkflow_install.directory = Path(tmpdir, "nf-core-pipe") with pytest.raises(UserWarning) as excinfo: self.subworkflow_install.install("foo") assert "Could not find a 'main.nf' or 'nextflow.config' file" in str(excinfo.value) @@ -41,28 +40,22 @@ def test_subworkflows_install_nosubworkflow(self): def test_subworkflows_install_bam_sort_stats_samtools(self): """Test installing a subworkflow - bam_sort_stats_samtools""" assert self.subworkflow_install.install("bam_sort_stats_samtools") is not False - subworkflow_path = os.path.join( - self.subworkflow_install.dir, "subworkflows", "nf-core", "bam_sort_stats_samtools" + subworkflow_path = Path( + self.subworkflow_install.directory, "subworkflows", "nf-core", "bam_sort_stats_samtools" ) - sub_subworkflow_path = os.path.join( - self.subworkflow_install.dir, "subworkflows", "nf-core", "bam_stats_samtools" - ) - samtools_index_path = os.path.join(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "index") - samtools_sort_path = os.path.join(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "sort") - samtools_stats_path = os.path.join(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "stats") - samtools_idxstats_path = os.path.join( - self.subworkflow_install.dir, "modules", "nf-core", "samtools", "idxstats" - ) - samtools_flagstat_path = os.path.join( - self.subworkflow_install.dir, "modules", "nf-core", "samtools", "flagstat" - ) - assert os.path.exists(subworkflow_path) - assert os.path.exists(sub_subworkflow_path) - assert os.path.exists(samtools_index_path) - assert os.path.exists(samtools_sort_path) - assert os.path.exists(samtools_stats_path) - assert os.path.exists(samtools_idxstats_path) - assert os.path.exists(samtools_flagstat_path) + sub_subworkflow_path = Path(self.subworkflow_install.directory, "subworkflows", "nf-core", "bam_stats_samtools") + samtools_index_path = Path(self.subworkflow_install.directory, "modules", "nf-core", "samtools", "index") + samtools_sort_path = Path(self.subworkflow_install.directory, "modules", "nf-core", "samtools", "sort") + samtools_stats_path = Path(self.subworkflow_install.directory, "modules", "nf-core", "samtools", "stats") + samtools_idxstats_path = Path(self.subworkflow_install.directory, "modules", "nf-core", "samtools", "idxstats") + samtools_flagstat_path = Path(self.subworkflow_install.directory, "modules", "nf-core", "samtools", "flagstat") + assert subworkflow_path.exists() + assert sub_subworkflow_path.exists() + assert samtools_index_path.exists() + assert samtools_sort_path.exists() + assert samtools_stats_path.exists() + assert samtools_idxstats_path.exists() + assert samtools_flagstat_path.exists() def test_subworkflows_install_bam_sort_stats_samtools_twice(self): """Test installing a subworkflow - bam_sort_stats_samtools already there""" @@ -87,7 +80,7 @@ def test_subworkflows_install_different_branch_fail(self): def test_subworkflows_install_tracking(self): """Test installing a subworkflow and finding the correct entries in installed_by section of modules.json""" - self.subworkflow_install.install("bam_sort_stats_samtools") + assert self.subworkflow_install.install("bam_sort_stats_samtools") # Verify that the installed_by entry was added correctly modules_json = ModulesJson(self.pipeline_dir) diff --git a/tests/subworkflows/test_lint.py b/tests/subworkflows/test_lint.py index f8c9bedbf3..38bcc2b2c0 100644 --- a/tests/subworkflows/test_lint.py +++ b/tests/subworkflows/test_lint.py @@ -14,7 +14,7 @@ class TestSubworkflowsLint(TestSubworkflows): def test_subworkflows_lint(self): """Test linting the fastq_align_bowtie2 subworkflow""" self.subworkflow_install.install("fastq_align_bowtie2") - subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.pipeline_dir) + subworkflow_lint = nf_core.subworkflows.SubworkflowLint(directory=self.pipeline_dir) subworkflow_lint.lint(print_results=False, subworkflow="fastq_align_bowtie2") assert len(subworkflow_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" assert len(subworkflow_lint.passed) > 0 @@ -26,11 +26,11 @@ def test_subworkflows_lint_empty(self): self.subworkflow_remove.remove("utils_nfcore_pipeline", force=True) self.subworkflow_remove.remove("utils_nfvalidation_plugin", force=True) with pytest.raises(LookupError): - nf_core.subworkflows.SubworkflowLint(dir=self.pipeline_dir) + nf_core.subworkflows.SubworkflowLint(directory=self.pipeline_dir) def test_subworkflows_lint_new_subworkflow(self): """lint a new subworkflow""" - subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.nfcore_modules) + subworkflow_lint = nf_core.subworkflows.SubworkflowLint(directory=self.nfcore_modules) subworkflow_lint.lint(print_results=True, all_subworkflows=True) assert len(subworkflow_lint.failed) == 0 @@ -40,13 +40,13 @@ def test_subworkflows_lint_new_subworkflow(self): def test_subworkflows_lint_no_gitlab(self): """Test linting a pipeline with no subworkflows installed""" with pytest.raises(LookupError): - nf_core.subworkflows.SubworkflowLint(dir=self.pipeline_dir, remote_url=GITLAB_URL) + nf_core.subworkflows.SubworkflowLint(directory=self.pipeline_dir, remote_url=GITLAB_URL) def test_subworkflows_lint_gitlab_subworkflows(self): """Lint subworkflows from a different remote""" self.subworkflow_install_gitlab.install("bam_stats_samtools") subworkflow_lint = nf_core.subworkflows.SubworkflowLint( - dir=self.pipeline_dir, remote_url=GITLAB_URL, branch=GITLAB_SUBWORKFLOWS_BRANCH + directory=self.pipeline_dir, remote_url=GITLAB_URL, branch=GITLAB_SUBWORKFLOWS_BRANCH ) subworkflow_lint.lint(print_results=False, all_subworkflows=True) assert len(subworkflow_lint.failed) == 0 @@ -58,7 +58,7 @@ def test_subworkflows_lint_multiple_remotes(self): self.subworkflow_install_gitlab.install("bam_stats_samtools") self.subworkflow_install.install("fastq_align_bowtie2") subworkflow_lint = nf_core.subworkflows.SubworkflowLint( - dir=self.pipeline_dir, remote_url=GITLAB_URL, branch=GITLAB_SUBWORKFLOWS_BRANCH + directory=self.pipeline_dir, remote_url=GITLAB_URL, branch=GITLAB_SUBWORKFLOWS_BRANCH ) subworkflow_lint.lint(print_results=False, all_subworkflows=True) assert len(subworkflow_lint.failed) == 0 @@ -67,7 +67,7 @@ def test_subworkflows_lint_multiple_remotes(self): def test_subworkflows_lint_snapshot_file(self): """Test linting a subworkflow with a snapshot file""" - subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.nfcore_modules) + subworkflow_lint = nf_core.subworkflows.SubworkflowLint(directory=self.nfcore_modules) subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") assert len(subworkflow_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" assert len(subworkflow_lint.passed) > 0 @@ -83,7 +83,7 @@ def test_subworkflows_lint_snapshot_file_missing_fail(self): "tests", "main.nf.test.snap", ).unlink() - subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.nfcore_modules) + subworkflow_lint = nf_core.subworkflows.SubworkflowLint(directory=self.nfcore_modules) subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") Path( self.nfcore_modules, @@ -132,7 +132,7 @@ def test_subworkflows_lint_snapshot_file_not_needed(self): "tests", "main.nf.test.snap", ).unlink() - subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.nfcore_modules) + subworkflow_lint = nf_core.subworkflows.SubworkflowLint(directory=self.nfcore_modules) subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") Path( self.nfcore_modules, @@ -179,7 +179,7 @@ def test_subworkflows_lint_less_than_two_modules_warning(self): "w", ) as fh: fh.write(new_content) - subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.pipeline_dir) + subworkflow_lint = nf_core.subworkflows.SubworkflowLint(directory=self.pipeline_dir) subworkflow_lint.lint(print_results=False, subworkflow="bam_stats_samtools") assert len(subworkflow_lint.failed) >= 0, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" assert len(subworkflow_lint.passed) > 0 @@ -218,7 +218,7 @@ def test_subworkflows_lint_include_multiple_alias(self): ) as fh: fh.write(new_content) - subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.pipeline_dir) + subworkflow_lint = nf_core.subworkflows.SubworkflowLint(directory=self.pipeline_dir) subworkflow_lint.lint(print_results=False, subworkflow="bam_stats_samtools") assert len(subworkflow_lint.failed) >= 0, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" assert len(subworkflow_lint.passed) > 0 @@ -265,7 +265,7 @@ def test_subworkflows_lint_capitalization_fail(self): "w", ) as fh: fh.write(new_content) - subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.pipeline_dir) + subworkflow_lint = nf_core.subworkflows.SubworkflowLint(directory=self.pipeline_dir) subworkflow_lint.lint(print_results=False, subworkflow="bam_stats_samtools") assert len(subworkflow_lint.failed) >= 1, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" assert len(subworkflow_lint.passed) > 0 @@ -276,7 +276,7 @@ def test_subworkflows_lint_capitalization_fail(self): self.subworkflow_remove.remove("bam_stats_samtools", force=True) def test_subworkflows_absent_version(self): - """Test linting a nf-test module if the versions is absent in the snapshot file `""" + """Test linting a nf-test subworkflow if the versions is absent in the snapshot file `""" snap_file = Path( self.nfcore_modules, "subworkflows", @@ -291,7 +291,11 @@ def test_subworkflows_absent_version(self): with open(snap_file, "w") as fh: fh.write(new_content) - subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.nfcore_modules) + import ipdb + + ipdb.set_trace() + + subworkflow_lint = nf_core.subworkflows.SubworkflowLint(directory=self.nfcore_modules) subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") assert len(subworkflow_lint.failed) == 0 assert len(subworkflow_lint.passed) > 0 @@ -308,7 +312,7 @@ def test_subworkflows_missing_test_dir(self): test_dir_copy = shutil.copytree(test_dir, test_dir.parent / "tests_copy") shutil.rmtree(test_dir) - subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.nfcore_modules) + subworkflow_lint = nf_core.subworkflows.SubworkflowLint(directory=self.nfcore_modules) subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") assert len(subworkflow_lint.failed) == 0 assert len(subworkflow_lint.passed) > 0 @@ -324,7 +328,7 @@ def test_subworkflows_missing_main_nf(self): main_nf_copy = shutil.copy(main_nf, main_nf.parent / "main_nf_copy") main_nf.unlink() - subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.nfcore_modules) + subworkflow_lint = nf_core.subworkflows.SubworkflowLint(directory=self.nfcore_modules) subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") assert len(subworkflow_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" assert len(subworkflow_lint.passed) > 0 @@ -351,7 +355,7 @@ def test_subworkflows_empty_file_in_snapshot(self): with open(snap_file, "w") as fh: json.dump(snap, fh) - subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.nfcore_modules) + subworkflow_lint = nf_core.subworkflows.SubworkflowLint(directory=self.nfcore_modules) subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") assert len(subworkflow_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" assert len(subworkflow_lint.passed) > 0 @@ -379,7 +383,7 @@ def test_subworkflows_empty_file_in_stub_snapshot(self): with open(snap_file, "w") as fh: json.dump(snap, fh) - subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.nfcore_modules) + subworkflow_lint = nf_core.subworkflows.SubworkflowLint(directory=self.nfcore_modules) subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") assert len(subworkflow_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" assert len(subworkflow_lint.passed) > 0 diff --git a/tests/subworkflows/test_remove.py b/tests/subworkflows/test_remove.py index 61c016b232..bad5a2ddbb 100644 --- a/tests/subworkflows/test_remove.py +++ b/tests/subworkflows/test_remove.py @@ -14,10 +14,10 @@ def test_subworkflows_remove_subworkflow(self): """Test removing subworkflow and all it's dependencies after installing it""" self.subworkflow_install.install("bam_sort_stats_samtools") - subworkflow_path = Path(self.subworkflow_install.dir, "subworkflows", "nf-core") + subworkflow_path = Path(self.subworkflow_install.directory, "subworkflows", "nf-core") bam_sort_stats_samtools_path = Path(subworkflow_path, "bam_sort_stats_samtools") bam_stats_samtools_path = Path(subworkflow_path, "bam_stats_samtools") - samtools_index_path = Path(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "index") + samtools_index_path = Path(self.subworkflow_install.directory, "modules", "nf-core", "samtools", "index") ModulesJson(self.pipeline_dir) mod_json_before = ModulesJson(self.pipeline_dir).get_modules_json() assert self.subworkflow_remove.remove("bam_sort_stats_samtools") @@ -40,10 +40,10 @@ def test_subworkflows_remove_subworkflow_keep_installed_module(self): self.subworkflow_install.install("bam_sort_stats_samtools") self.mods_install.install("samtools/index") - subworkflow_path = Path(self.subworkflow_install.dir, "subworkflows", "nf-core") + subworkflow_path = Path(self.subworkflow_install.directory, "subworkflows", "nf-core") bam_sort_stats_samtools_path = Path(subworkflow_path, "bam_sort_stats_samtools") bam_stats_samtools_path = Path(subworkflow_path, "bam_stats_samtools") - samtools_index_path = Path(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "index") + samtools_index_path = Path(self.subworkflow_install.directory, "modules", "nf-core", "samtools", "index") mod_json_before = ModulesJson(self.pipeline_dir).get_modules_json() assert self.subworkflow_remove.remove("bam_sort_stats_samtools") @@ -67,11 +67,11 @@ def test_subworkflows_remove_one_of_two_subworkflow(self): """Test removing subworkflow and all it's dependencies after installing it""" self.subworkflow_install.install("bam_sort_stats_samtools") self.subworkflow_install.install("bam_stats_samtools") - subworkflow_path = Path(self.subworkflow_install.dir, "subworkflows", "nf-core") + subworkflow_path = Path(self.subworkflow_install.directory, "subworkflows", "nf-core") bam_sort_stats_samtools_path = Path(subworkflow_path, "bam_sort_stats_samtools") bam_stats_samtools_path = Path(subworkflow_path, "bam_stats_samtools") - samtools_index_path = Path(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "index") - samtools_stats_path = Path(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "stats") + samtools_index_path = Path(self.subworkflow_install.directory, "modules", "nf-core", "samtools", "index") + samtools_stats_path = Path(self.subworkflow_install.directory, "modules", "nf-core", "samtools", "stats") assert self.subworkflow_remove.remove("bam_sort_stats_samtools") @@ -85,11 +85,11 @@ def test_subworkflows_remove_one_of_two_subworkflow(self): def test_subworkflows_remove_included_subworkflow(self): """Test removing subworkflow which is installed by another subworkflow and all it's dependencies.""" self.subworkflow_install.install("bam_sort_stats_samtools") - subworkflow_path = Path(self.subworkflow_install.dir, "subworkflows", "nf-core") + subworkflow_path = Path(self.subworkflow_install.directory, "subworkflows", "nf-core") bam_sort_stats_samtools_path = Path(subworkflow_path, "bam_sort_stats_samtools") bam_stats_samtools_path = Path(subworkflow_path, "bam_stats_samtools") - samtools_index_path = Path(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "index") - samtools_stats_path = Path(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "stats") + samtools_index_path = Path(self.subworkflow_install.directory, "modules", "nf-core", "samtools", "index") + samtools_stats_path = Path(self.subworkflow_install.directory, "modules", "nf-core", "samtools", "stats") assert self.subworkflow_remove.remove("bam_stats_samtools") is False From 47229ff0bc9a30f8b5ee95e7b308d5445ad7339a Mon Sep 17 00:00:00 2001 From: mashehu Date: Mon, 22 Jul 2024 12:15:33 +0200 Subject: [PATCH 31/89] fix types and tests --- nf_core/components/components_command.py | 1 + nf_core/components/components_utils.py | 4 +- nf_core/components/info.py | 8 +- nf_core/components/install.py | 9 +- nf_core/components/lint/__init__.py | 32 +++--- nf_core/components/list.py | 4 +- nf_core/components/nfcore_component.py | 6 +- nf_core/modules/lint/module_version.py | 8 +- nf_core/modules/modules_json.py | 123 +++++++++++++---------- nf_core/modules/modules_repo.py | 5 +- nf_core/synced_repo.py | 63 ++++++------ nf_core/utils.py | 52 +++++++--- 12 files changed, 185 insertions(+), 130 deletions(-) diff --git a/nf_core/components/components_command.py b/nf_core/components/components_command.py index 13a6fed338..8d200021c2 100644 --- a/nf_core/components/components_command.py +++ b/nf_core/components/components_command.py @@ -38,6 +38,7 @@ def __init__( self.hide_progress = hide_progress self.no_prompts = no_prompts self._configure_repo_and_paths() + self.repo_type: Optional[str] = None def _configure_repo_and_paths(self, nf_dir_req: bool = True) -> None: """ diff --git a/nf_core/components/components_utils.py b/nf_core/components/components_utils.py index 32f6c0fc11..743efd386d 100644 --- a/nf_core/components/components_utils.py +++ b/nf_core/components/components_utils.py @@ -27,7 +27,7 @@ def get_repo_info(directory: Path, use_prompt: Optional[bool] = True) -> Tuple[P # Figure out the repository type from the .nf-core.yml config file if we can config_fn, tools_config = nf_core.utils.load_tools_config(base_dir) - repo_type: Optional[str] = tools_config.get("repository_type", None) + repo_type = tools_config.get("repository_type", None) # If not set, prompt the user if not repo_type and use_prompt: @@ -101,7 +101,7 @@ def prompt_component_version_sha( git_sha = "" page_nbr = 1 - all_commits = modules_repo.get_component_git_log(component_name, component_type) + all_commits = iter(modules_repo.get_component_git_log(component_name, component_type)) next_page_commits = [next(all_commits, None) for _ in range(10)] next_page_commits = [commit for commit in next_page_commits if commit is not None] diff --git a/nf_core/components/info.py b/nf_core/components/info.py index 8597875af4..d07486f638 100644 --- a/nf_core/components/info.py +++ b/nf_core/components/info.py @@ -97,7 +97,6 @@ def init_mod_name(self, component): Args: module: str: Module name to check """ - assert self.modules_json is not None # mypy if component is None: self.local = questionary.confirm( f"Is the {self.component_type[:-1]} locally installed?", style=nf_core.utils.nfcore_question_style @@ -105,7 +104,8 @@ def init_mod_name(self, component): if self.local: if self.repo_type == "modules": components = self.get_components_clone_modules() - else: + elif self.repo_type == "pipeline": + assert self.modules_json is not None # mypy all_components = self.modules_json.get_all_components(self.component_type).get( self.modules_repo.remote_url, [] ) @@ -117,6 +117,8 @@ def init_mod_name(self, component): raise UserWarning( f"No {self.component_type[:-1]} installed from '{self.modules_repo.remote_url}'" ) + else: + raise UserWarning("Unknown repository type") else: components = self.modules_repo.get_avail_components(self.component_type) components.sort() @@ -174,8 +176,8 @@ def get_local_yaml(self): Optional[dict]: Parsed meta.yml if found, None otherwise """ - assert self.modules_json is not None # mypy if self.repo_type == "pipeline": + assert self.modules_json is not None # mypy # Try to find and load the meta.yml file component_base_path = Path(self.directory, self.component_type) # Check that we have any modules/subworkflows installed from this repo diff --git a/nf_core/components/install.py b/nf_core/components/install.py index e6c31b3cbb..aa8aac81e6 100644 --- a/nf_core/components/install.py +++ b/nf_core/components/install.py @@ -100,8 +100,11 @@ def install(self, component: str, silent: bool = False) -> bool: modules_json.load() modules_json.update(self.component_type, self.modules_repo, component, current_version, self.installed_by) return False - - version = self.get_version(component, self.sha, self.prompt, current_version, self.modules_repo) + try: + version = self.get_version(component, self.sha, self.prompt, current_version, self.modules_repo) + except UserWarning as e: + log.error(e) + return False if not version: return False @@ -174,7 +177,7 @@ def install_included_components(self, subworkflow_dir): self.installed_by = original_installed def collect_and_verify_name( - self, component: Optional[str], modules_repo: nf_core.modules.modules_repo.ModulesRepo + self, component: Optional[str], modules_repo: "nf_core.modules.modules_repo.ModulesRepo" ) -> str: """ Collect component name. diff --git a/nf_core/components/lint/__init__.py b/nf_core/components/lint/__init__.py index 2cd59dc489..ada3ee30c2 100644 --- a/nf_core/components/lint/__init__.py +++ b/nf_core/components/lint/__init__.py @@ -92,17 +92,17 @@ def __init__( raise LookupError( f"Error parsing modules.json: {components}. " f"Please check the file for errors or try again." ) - org, comp = components - self.all_remote_components.append( - NFCoreComponent( - comp, - repo_url, - Path(self.directory, self.component_type, org, comp), - self.repo_type, - Path(self.directory), - self.component_type, + for org, comp in components: + self.all_remote_components.append( + NFCoreComponent( + comp, + repo_url, + Path(self.directory, self.component_type, org, comp), + self.repo_type, + Path(self.directory), + self.component_type, + ) ) - ) if not self.all_remote_components: raise LookupError( f"No {self.component_type} from {self.modules_repo.remote_url} installed in pipeline." @@ -123,7 +123,7 @@ def __init__( for comp in self.get_local_components() ] self.config = nf_core.utils.fetch_wf_config(Path(self.directory), cache_config=True) - elif self.repo_type == "modules": + else: component_dir = Path( self.directory, self.default_modules_path if self.component_type == "modules" else self.default_subworkflows_path, @@ -141,11 +141,11 @@ def __init__( Path(self.directory).joinpath("tests", "config"), cache_config=True ) - if registry is None: - self.registry = self.config.get("docker.registry", "quay.io") - else: - self.registry = registry - log.debug(f"Registry set to {self.registry}") + if registry is None: + self.registry = self.config.get("docker.registry", "quay.io") + else: + self.registry = registry + log.debug(f"Registry set to {self.registry}") self.lint_config = None self.modules_json = None diff --git a/nf_core/components/list.py b/nf_core/components/list.py index b24732e5c3..67468b4a5a 100644 --- a/nf_core/components/list.py +++ b/nf_core/components/list.py @@ -5,7 +5,7 @@ import rich.table from nf_core.components.components_command import ComponentCommand -from nf_core.modules.modules_json import ModulesJson +from nf_core.modules.modules_json import ModulesJson, ModulesJsonModuleEntry from nf_core.modules.modules_repo import ModulesRepo log = logging.getLogger(__name__) @@ -110,7 +110,7 @@ def pattern_msg(keywords: List[str]) -> str: modules_json_file = modules_json.modules_json for repo_url, component_with_dir in sorted(repos_with_comps.items()): - repo_entry: Dict[str, Dict[str, Dict[str, Dict[str, Union[str, List[str]]]]]] + repo_entry: Dict[str, Dict[str, Dict[str, ModulesJsonModuleEntry]]] if modules_json_file is None: log.warning(f"Modules JSON file '{modules_json.modules_json_path}' is missing. ") continue diff --git a/nf_core/components/nfcore_component.py b/nf_core/components/nfcore_component.py index 1433122225..0c63141c73 100644 --- a/nf_core/components/nfcore_component.py +++ b/nf_core/components/nfcore_component.py @@ -21,7 +21,7 @@ def __init__( component_name: str, repo_url: Optional[str], component_dir: Path, - repo_type: str, + repo_type: Optional[str], base_dir: Path, component_type: str, remote_component: bool = True, @@ -54,6 +54,7 @@ def __init__( self.has_meta: bool = False self.git_sha: Optional[str] = None self.is_patched: bool = False + self.branch: Optional[str] = None if remote_component: # Initialize the important files @@ -85,6 +86,9 @@ def __init__( self.test_yml = None self.test_main_nf = None + def __repr__(self) -> str: + return f"" + def _get_main_nf_tags(self, test_main_nf: Union[Path, str]): """Collect all tags from the main.nf.test file.""" tags = [] diff --git a/nf_core/modules/lint/module_version.py b/nf_core/modules/lint/module_version.py index d08658f5da..d0ef17a44d 100644 --- a/nf_core/modules/lint/module_version.py +++ b/nf_core/modules/lint/module_version.py @@ -8,11 +8,12 @@ import nf_core import nf_core.modules.modules_repo import nf_core.modules.modules_utils +from nf_core.modules.modules_utils import NFCoreComponent log = logging.getLogger(__name__) -def module_version(module_lint_object, module): +def module_version(module_lint_object: "nf_core.modules.lint.ModuleLint", module: "NFCoreComponent"): """ Verifies that the module has a version specified in the ``modules.json`` file @@ -20,8 +21,9 @@ def module_version(module_lint_object, module): containing a commit SHA. If that is true, it verifies that there are no newer version of the module available. """ - - modules_json_path = Path(module_lint_object.dir, "modules.json") + assert module_lint_object.modules_json is not None # mypy + assert module.repo_url is not None # mypy + modules_json_path = Path(module_lint_object.directory, "modules.json") # Verify that a git_sha exists in the `modules.json` file for this module version = module_lint_object.modules_json.get_module_version(module.component_name, module.repo_url, module.org) if version is None: diff --git a/nf_core/modules/modules_json.py b/nf_core/modules/modules_json.py index faeb84fc35..c0e41d1b42 100644 --- a/nf_core/modules/modules_json.py +++ b/nf_core/modules/modules_json.py @@ -6,7 +6,7 @@ import shutil import tempfile from pathlib import Path -from typing import Any, List, Optional, Tuple, Union +from typing import Dict, List, NotRequired, Optional, Tuple, TypedDict, Union import git import questionary @@ -27,6 +27,19 @@ log = logging.getLogger(__name__) +class ModulesJsonModuleEntry(TypedDict): + branch: str + git_sha: str + installed_by: List[str] + patch: NotRequired[str] + + +class ModulesJsonType(TypedDict): + name: str + homePage: str + repos: Dict[str, Dict[str, Dict[str, Dict[str, ModulesJsonModuleEntry]]]] + + class ModulesJson: """ An object for handling a 'modules.json' file in a pipeline @@ -43,10 +56,10 @@ def __init__(self, pipeline_dir: Union[str, Path]): self.modules_dir = self.directory / "modules" self.subworkflows_dir = self.directory / "subworkflows" self.modules_json_path = self.directory / "modules.json" - self.modules_json = None + self.modules_json: Optional[ModulesJsonType] = None self.pipeline_modules = None self.pipeline_subworkflows = None - self.pipeline_components = None + self.pipeline_components: Optional[Dict[str, List[Tuple[str, str]]]] = None def __str__(self): if self.modules_json is None: @@ -56,7 +69,7 @@ def __str__(self): def __repr__(self): return self.__str__() - def create(self): + def create(self) -> None: """ Creates the modules.json file from the modules and subworkflows installed in the pipeline directory @@ -66,7 +79,7 @@ def create(self): pipeline_config = nf_core.utils.fetch_wf_config(self.directory) pipeline_name = pipeline_config.get("manifest.name", "") pipeline_url = pipeline_config.get("manifest.homePage", "") - new_modules_json = {"name": pipeline_name.strip("'"), "homePage": pipeline_url.strip("'"), "repos": {}} + new_modules_json = ModulesJsonType(name=pipeline_name, homePage=pipeline_url, repos={}) if not self.modules_dir.exists(): if rich.prompt.Confirm.ask( @@ -79,7 +92,6 @@ def create(self): # Get repositories repos, _ = self.get_pipeline_module_repositories("modules", self.modules_dir) - # Get all module/subworkflow names in the repos repo_module_names = self.get_component_names_from_repo(repos, self.modules_dir) repo_subworkflow_names = self.get_component_names_from_repo(repos, self.subworkflows_dir) @@ -105,7 +117,9 @@ def create(self): self.modules_json = new_modules_json self.dump() - def get_component_names_from_repo(self, repos, directory): + def get_component_names_from_repo( + self, repos: Dict[str, Dict[str, Dict[str, Dict[str, Dict[str, str | List[str]]]]]], directory: Path + ) -> List[Tuple[str, List[str], str]]: """ Get component names from repositories in a pipeline. @@ -122,8 +136,8 @@ def get_component_names_from_repo(self, repos, directory): components = ( repo_url, [ - str(Path(component_name).relative_to(directory / modules_repo.repo_path)) - for component_name, _, file_names in os.walk(directory / modules_repo.repo_path) + str(component_name.relative_to(directory / modules_repo.repo_path)) + for component_name, _, file_names in Path.walk(directory / modules_repo.repo_path) if "main.nf" in file_names ], modules_repo.repo_path, @@ -131,7 +145,9 @@ def get_component_names_from_repo(self, repos, directory): names.append(components) return names - def get_pipeline_module_repositories(self, component_type, directory, repos=None): + def get_pipeline_module_repositories( + self, component_type: str, directory: Path, repos: Optional[Dict] = None + ) -> Tuple[Dict[str, Dict[str, Dict[str, Dict[str, Dict[str, Union[str, List[str]]]]]]], Dict[Path, Path]]: """ Finds all module repositories in the modules and subworkflows directory. Ignores the local modules/subworkflows. @@ -153,6 +169,7 @@ def get_pipeline_module_repositories(self, component_type, directory, repos=None # The function might rename some directories, keep track of them renamed_dirs = {} # Check if there are any untracked repositories + dirs_not_covered = self.dir_tree_uncovered(directory, [Path(ModulesRepo(url).repo_path) for url in repos]) if len(dirs_not_covered) > 0: log.info(f"Found custom {component_type[:-1]} repositories when creating 'modules.json'") @@ -245,8 +262,12 @@ def dir_tree_uncovered(self, components_directory, repos): return dirs_not_covered def determine_branches_and_shas( - self, component_type: str, install_dir: Union[str, Path], remote_url: str, components: List[Path] - ) -> dict[Path, dict[str, Any]]: + self, + component_type: str, + install_dir: Union[str, Path], + remote_url: str, + components: List[str], + ) -> Dict[str, ModulesJsonModuleEntry]: """ Determines what branch and commit sha each module/subworkflow in the pipeline belongs to @@ -273,10 +294,10 @@ def determine_branches_and_shas( available_branches = ModulesRepo.get_remote_branches(remote_url) sb_local = [] dead_components = [] - repo_entry = {} + repo_entry: Dict[str, ModulesJsonModuleEntry] = {} for component in sorted(components): modules_repo = default_modules_repo - component_path = repo_path / component + component_path = Path(repo_path, component) correct_commit_sha = None tried_branches = {default_modules_repo.branch} found_sha = False @@ -286,16 +307,16 @@ def determine_branches_and_shas( if patch_file.is_file(): temp_module_dir = self.try_apply_patch_reverse(component, install_dir, patch_file, component_path) correct_commit_sha = self.find_correct_commit_sha( - component_type, str(component), temp_module_dir, modules_repo + component_type, component, temp_module_dir, modules_repo ) else: correct_commit_sha = self.find_correct_commit_sha( - component_type, str(component), component_path, modules_repo + component_type, component, component_path, modules_repo ) if correct_commit_sha is None: # Check in the old path correct_commit_sha = self.find_correct_commit_sha( - component_type, str(component), repo_path / component_type / component, modules_repo + component_type, component, repo_path / component_type / component, modules_repo ) if correct_commit_sha is None: log.info( @@ -328,7 +349,7 @@ def determine_branches_and_shas( else: found_sha = True break - if found_sha: + if found_sha and correct_commit_sha is not None: repo_entry[component] = { "branch": modules_repo.branch, "git_sha": correct_commit_sha, @@ -337,7 +358,7 @@ def determine_branches_and_shas( # Clean up the modules/subworkflows we were unable to find the sha for for component in sb_local: - log.debug(f"Moving {component_type[:-1]} '{Path(install_dir, component)}' to 'local' directory") + log.debug(f"Moving {component_type[:-1]} '{Path(install_dir, str(component))}' to 'local' directory") self.move_component_to_local(component_type, component, str(install_dir)) for component in dead_components: @@ -408,14 +429,14 @@ def move_component_to_local(self, component_type: str, component: Union[str, Pat to_name += f"-{datetime.datetime.now().strftime('%y%m%d%H%M%S')}" shutil.move(current_path, local_dir / to_name) - def unsynced_components(self) -> Tuple[List[Path], List[Path], dict]: + def unsynced_components(self) -> Tuple[List[str], List[str], dict]: """ Compute the difference between the modules/subworkflows in the directory and the modules/subworkflows in the 'modules.json' file. This is done by looking at all directories containing a 'main.nf' file Returns: - (untrack_dirs ([ Path ]), missing_installation (dict)): Directories that are not tracked + (untrack_dirs ([ str ]), missing_installation (dict)): Directories that are not tracked by the modules.json file, and modules/subworkflows in the modules.json where the installation directory is missing """ @@ -443,7 +464,7 @@ def unsynced_components(self) -> Tuple[List[Path], List[Path], dict]: return untracked_dirs_modules, untracked_dirs_subworkflows, missing_installation - def parse_dirs(self, dirs: List[Path], missing_installation: dict, component_type: str) -> Tuple[List[Path], dict]: + def parse_dirs(self, dirs: List[Path], missing_installation: Dict, component_type: str) -> Tuple[List[str], Dict]: """ Parse directories and check if they are tracked in the modules.json file @@ -461,9 +482,10 @@ def parse_dirs(self, dirs: List[Path], missing_installation: dict, component_typ for dir_ in dirs: # Check if the module/subworkflows directory exists in modules.json install_dir = dir_.parts[0] - component = Path(*dir_.parts[1:]) + component = "/".join(dir_.parts[1:]) component_in_file = False git_url = "" + for repo in missing_installation: if component_type in missing_installation[repo]: if install_dir in missing_installation[repo][component_type]: @@ -564,7 +586,7 @@ def reinstall_repo(self, install_dir, remote_url, module_entries): failed_to_install.append(module) return failed_to_install - def check_up_to_date(self) -> bool: + def check_up_to_date(self): """ Checks whether the modules and subworkflows installed in the directory are consistent with the entries in the 'modules.json' file and vice versa. @@ -715,14 +737,12 @@ def update( self.modules_json["repos"][remote_url][component_type] = {repo_name: {}} repo_component_entry = self.modules_json["repos"][remote_url][component_type][repo_name] if component_name not in repo_component_entry: - repo_component_entry[component_name] = {} + repo_component_entry[component_name] = {"branch": "", "git_sha": "", "installed_by": []} repo_component_entry[component_name]["git_sha"] = component_version repo_component_entry[component_name]["branch"] = branch try: if installed_by not in repo_component_entry[component_name]["installed_by"] and installed_by is not None: - repo_component_entry[component_name]["installed_by"].append(installed_by) - except KeyError: - repo_component_entry[component_name]["installed_by"] = [installed_by] + repo_component_entry[component_name]["installed_by"] += installed_by finally: new_installed_by = repo_component_entry[component_name]["installed_by"] + list(installed_by_log) repo_component_entry[component_name]["installed_by"] = sorted([*set(new_installed_by)]) @@ -912,7 +932,7 @@ def get_modules_json(self) -> dict: if self.modules_json is None: self.load() assert self.modules_json is not None # mypy - return copy.deepcopy(self.modules_json) + return copy.deepcopy(self.modules_json) # type: ignore def get_component_version(self, component_type, component_name, repo_url, install_dir): """ @@ -938,7 +958,7 @@ def get_component_version(self, component_type, component_name, repo_url, instal .get("git_sha", None) ) - def get_module_version(self, module_name, repo_url, install_dir): + def get_module_version(self, module_name: str, repo_url: str, install_dir: str) -> Optional[str]: """ Returns the version of a module @@ -953,14 +973,11 @@ def get_module_version(self, module_name, repo_url, install_dir): if self.modules_json is None: self.load() assert self.modules_json is not None # mypy - return ( - self.modules_json.get("repos", {}) - .get(repo_url, {}) - .get("modules", {}) - .get(install_dir, {}) - .get(module_name, {}) - .get("git_sha", None) - ) + try: + sha = self.modules_json["repos"][repo_url]["modules"][install_dir][module_name]["git_sha"] + except KeyError: + sha = None + return sha def get_subworkflow_version(self, subworkflow_name, repo_url, install_dir): """ @@ -986,7 +1003,7 @@ def get_subworkflow_version(self, subworkflow_name, repo_url, install_dir): .get("git_sha", None) ) - def get_all_components(self, component_type: str) -> dict[str, Tuple[(str, str)]]: + def get_all_components(self, component_type: str) -> dict[str, List[Tuple[(str, str)]]]: """ Retrieves all pipeline modules/subworkflows that are reported in the modules.json @@ -1002,8 +1019,8 @@ def get_all_components(self, component_type: str) -> dict[str, Tuple[(str, str)] self.pipeline_components = {} for repo, repo_entry in self.modules_json.get("repos", {}).items(): if component_type in repo_entry: - for dir, components in repo_entry[component_type].items(): - self.pipeline_components[repo] = [(dir, m) for m in components] + for directory, components in repo_entry[component_type].items(): + self.pipeline_components[repo] = [(directory, m) for m in components] return self.pipeline_components @@ -1072,7 +1089,9 @@ def get_installed_by_entries(self, component_type, name): return installed_by_entries - def get_component_branch(self, component_type, component, repo_url, install_dir): + def get_component_branch( + self, component_type: str, component: Union[str, Path], repo_url: str, install_dir: str + ) -> str: """ Gets the branch from which the module/subworkflow was installed @@ -1084,14 +1103,10 @@ def get_component_branch(self, component_type, component, repo_url, install_dir) if self.modules_json is None: self.load() assert self.modules_json is not None # mypy - branch = ( - self.modules_json["repos"] - .get(repo_url, {}) - .get(component_type, {}) - .get(install_dir, {}) - .get(component, {}) - .get("branch") - ) + try: + branch = self.modules_json["repos"][repo_url][component_type][install_dir][str(component)]["branch"] + except (KeyError, TypeError): + branch = None if branch is None: raise LookupError( f"Could not find branch information for component '{Path(install_dir, component)}'." @@ -1112,7 +1127,7 @@ def dump(self, run_prettier: bool = False) -> None: with open(self.modules_json_path, "w") as fh: json.dump(self.modules_json, fh, indent=4) - def resolve_missing_installation(self, missing_installation, component_type): + def resolve_missing_installation(self, missing_installation: Dict, component_type: str) -> None: missing_but_in_mod_json = [ f"'{component_type}/{install_dir}/{component}'" for repo_url, contents in missing_installation.items() @@ -1168,7 +1183,7 @@ def resolve_missing_from_modules_json(self, missing_from_modules_json, component # Get tuples of components that miss installation and their install directory def components_with_repos(): - for dir in missing_from_modules_json: + for directory in missing_from_modules_json: for repo_url in repos: modules_repo = ModulesRepo(repo_url) paths_in_directory = [] @@ -1178,12 +1193,12 @@ def components_with_repos(): ) for dir_name, _, _ in os.walk(repo_url_path): if component_type == "modules": - if len(Path(dir).parts) > 1: # The module name is TOOL/SUBTOOL + if len(Path(directory).parts) > 1: # The module name is TOOL/SUBTOOL paths_in_directory.append(str(Path(*Path(dir_name).parts[-2:]))) pass paths_in_directory.append(Path(dir_name).parts[-1]) if dir in paths_in_directory: - yield (modules_repo.repo_path, dir) + yield (modules_repo.repo_path, directory) # Add all components into a dictionary with install directories repos_with_components = {} diff --git a/nf_core/modules/modules_repo.py b/nf_core/modules/modules_repo.py index a979ae45b8..daa7b59811 100644 --- a/nf_core/modules/modules_repo.py +++ b/nf_core/modules/modules_repo.py @@ -1,6 +1,7 @@ import logging import os import shutil +from pathlib import Path from typing import Optional import git @@ -72,8 +73,8 @@ def __init__( self.verify_branch() # Convenience variable - self.modules_dir = os.path.join(self.local_repo_dir, "modules", self.repo_path) - self.subworkflows_dir = os.path.join(self.local_repo_dir, "subworkflows", self.repo_path) + self.modules_dir = Path(self.local_repo_dir, "modules", self.repo_path) + self.subworkflows_dir = Path(self.local_repo_dir, "subworkflows", self.repo_path) self.avail_module_names = None diff --git a/nf_core/synced_repo.py b/nf_core/synced_repo.py index 4b69d4af87..33e7f0a54a 100644 --- a/nf_core/synced_repo.py +++ b/nf_core/synced_repo.py @@ -4,7 +4,7 @@ import shutil from configparser import NoOptionError, NoSectionError from pathlib import Path -from typing import Dict, Optional, Union +from typing import Dict, Iterable, Optional, Union import git from git.exc import GitCommandError @@ -121,28 +121,32 @@ def __init__(self, remote_url=None, branch=None, no_pull=False, hide_progress=Fa remote_url = NF_CORE_MODULES_REMOTE self.remote_url = remote_url + self.fullname = None + self.local_repo_dir = None self.repo = None # TODO: SyncedRepo doesn't have this method and both the ModulesRepo and # the WorkflowRepo define their own including custom init methods. This needs # fixing. self.setup_local_repo(remote_url, branch, hide_progress) + if self.local_repo_dir is None: + raise ValueError("Repository not initialized") + else: + config_fn, repo_config = load_tools_config(self.local_repo_dir) + try: + self.repo_path = repo_config["org_path"] + except KeyError: + raise UserWarning(f"'org_path' key not present in {config_fn.name}") - config_fn, repo_config = load_tools_config(self.local_repo_dir) - try: - self.repo_path = repo_config["org_path"] - except KeyError: - raise UserWarning(f"'org_path' key not present in {config_fn.name}") - - # Verify that the repo seems to be correctly configured - if self.repo_path != NF_CORE_MODULES_NAME or self.branch: - self.verify_branch() + # Verify that the repo seems to be correctly configured + if self.repo_path != NF_CORE_MODULES_NAME or self.branch: + self.verify_branch() - # Convenience variable - self.modules_dir = os.path.join(self.local_repo_dir, "modules", self.repo_path) - self.subworkflows_dir = os.path.join(self.local_repo_dir, "subworkflows", self.repo_path) + # Convenience variable + self.modules_dir = Path(self.local_repo_dir, "modules", self.repo_path) + self.subworkflows_dir = Path(self.local_repo_dir, "subworkflows", self.repo_path) - self.avail_module_names = None + self.avail_module_names = None def setup_local_repo(self, remote_url, branch, hide_progress): pass @@ -361,7 +365,9 @@ def ensure_git_user_config(self, default_name: str, default_email: str) -> None: if not user_email: git_config.set_value("user", "email", default_email) - def get_component_git_log(self, component_name: Union[str, Path], component_type: str, depth: Optional[int] = None): + def get_component_git_log( + self, component_name: Union[str, Path], component_type: str, depth: Optional[int] = None + ) -> Iterable[Dict[str, str]]: """ Fetches the commit history the of requested module/subworkflow since a given date. The default value is not arbitrary - it is the last time the structure of the nf-core/modules repository was had an @@ -373,35 +379,32 @@ def get_component_git_log(self, component_name: Union[str, Path], component_type Returns: ( dict ): Iterator of commit SHAs and associated (truncated) message """ - if self.repo is None: raise ValueError("Repository not initialized") self.checkout_branch() component_path = Path(component_type, self.repo_path, component_name) - commits_new = self.repo.iter_commits(max_count=depth, paths=component_path) - if not commits_new: - raise ValueError(f"Could not find any commits for '{component_name}' in '{self.remote_url}'") - else: - commits_new = [ - {"git_sha": commit.hexsha, "trunc_message": commit.message.splitlines()[0]} for commit in commits_new - ] - commits_old = [] + commits_new_iter = self.repo.iter_commits(max_count=depth, paths=component_path) + commits_old_iter = [] if component_type == "modules": # Grab commits also from previous modules structure - component_path = Path("modules", component_name) - commits_old = self.repo.iter_commits(max_count=depth, paths=component_path) - commits_old = [ - {"git_sha": commit.hexsha, "trunc_message": commit.message.splitlines()[0]} for commit in commits_old - ] + old_component_path = Path("modules", component_name) + commits_old_iter = self.repo.iter_commits(max_count=depth, paths=old_component_path) + + commits_old = [{"git_sha": commit.hexsha, "trunc_message": commit.message} for commit in commits_old_iter] + commits_new = [{"git_sha": commit.hexsha, "trunc_message": commit.message} for commit in commits_new_iter] commits = iter(commits_new + commits_old) + return commits def get_latest_component_version(self, component_name, component_type): """ Returns the latest commit in the repository """ - return list(self.get_component_git_log(component_name, component_type, depth=1))[0]["git_sha"] + try: + return list(self.get_component_git_log(component_name, component_type, depth=1))[0]["git_sha"] + except UserWarning: + return None def sha_exists_on_branch(self, sha): """ diff --git a/nf_core/utils.py b/nf_core/utils.py index d1e9ccfe95..4d0566b1af 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -30,6 +30,7 @@ import rich.markup import yaml from packaging.version import Version +from pydantic import BaseModel, ValidationError from rich.live import Live from rich.spinner import Spinner @@ -175,9 +176,9 @@ def _load_conda_environment(self) -> bool: log.debug("No conda `environment.yml` file found.") return False - def _fp(self, fn): + def _fp(self, fn: Union[str, Path]) -> Path: """Convenience function to get full path to a file in the pipeline""" - return os.path.join(self.wf_path, fn) + return Path(self.wf_path, fn) def list_files(self) -> List[Path]: """Get a list of all files in the pipeline""" @@ -191,9 +192,8 @@ def list_files(self) -> List[Path]: files.append(full_fn) else: log.debug(f"`git ls-files` returned '{full_fn}' but could not open it!") - except subprocess.CalledProcessError as e: + except subprocess.CalledProcessError: # Failed, so probably not initialised as a git repository - just a list of all files - log.debug(f"Couldn't call 'git ls-files': {e}") files = [] for file_path in self.wf_path.rglob("*"): if file_path.is_file(): @@ -1041,7 +1041,26 @@ def get_repo_releases_branches(pipeline, wfs): DEPRECATED_CONFIG_PATHS = [".nf-core-lint.yml", ".nf-core-lint.yaml"] -def load_tools_config(directory: Union[str, Path] = ".") -> Tuple[Path, dict]: +class NFCoreTemplateConfig(BaseModel): + org: str + name: str + description: str + author: str + version: Optional[str] + force: Optional[bool] + outdir: Optional[str] + skip_features: Optional[list] + is_nfcore: Optional[bool] + + +class NFCoreYamlConfig(BaseModel): + nf_core_version: str + repository_type: str + org_path: str + template: NFCoreTemplateConfig + + +def load_tools_config(directory: Union[str, Path] = ".") -> Tuple[Path, NFCoreYamlConfig]: """ Parse the nf-core.yml configuration file @@ -1059,21 +1078,26 @@ def load_tools_config(directory: Union[str, Path] = ".") -> Tuple[Path, dict]: if config_fn is None: depr_path = get_first_available_path(directory, DEPRECATED_CONFIG_PATHS) if depr_path: - log.error( - f"Deprecated `{depr_path.name}` file found! The file will not be loaded. " - f"Please rename the file to `{CONFIG_PATHS[0]}`." + raise AssertionError( + f"Deprecated `{depr_path.name}` file found! Please rename the file to `{CONFIG_PATHS[0]}`." ) else: - log.debug(f"No tools config file found: {CONFIG_PATHS[0]}") - return Path(directory, CONFIG_PATHS[0]), {} - - with open(config_fn) as fh: + raise AssertionError(f"Could not find a config file in the directory '{directory}'") + with open(str(config_fn)) as fh: tools_config = yaml.safe_load(fh) + # If the file is empty - tools_config = tools_config or {} + if tools_config is None: + raise AssertionError(f"Config file '{config_fn}' is empty") + + # Check for required fields + try: + nf_core_yaml_config = NFCoreYamlConfig(**tools_config) + except ValidationError as e: + raise AssertionError(f"Config file '{config_fn}' is invalid: {e}") log.debug("Using config file: %s", config_fn) - return config_fn, tools_config + return config_fn, nf_core_yaml_config def determine_base_dir(directory: Union[Path, str] = ".") -> Path: From ff1a49520f69d228acead56fc232760ded321fe3 Mon Sep 17 00:00:00 2001 From: mashehu Date: Wed, 24 Jul 2024 14:21:12 +0200 Subject: [PATCH 32/89] avoid circular import due to modules_json being importetd --- nf_core/modules/__init__.py | 12 ----- nf_core/modules/lint/module_version.py | 5 +- tests/modules/test_bump_versions.py | 10 ++-- tests/modules/test_create.py | 16 +++--- tests/modules/test_info.py | 10 ++-- tests/modules/test_install.py | 6 +-- tests/modules/test_lint.py | 69 +++++++++++++------------- tests/modules/test_list.py | 26 +++++----- tests/modules/test_modules_json.py | 4 +- tests/modules/test_patch.py | 29 ++++++----- tests/modules/test_update.py | 20 +++++--- tests/test_subworkflows.py | 43 +--------------- 12 files changed, 105 insertions(+), 145 deletions(-) diff --git a/nf_core/modules/__init__.py b/nf_core/modules/__init__.py index 6be871ece8..e69de29bb2 100644 --- a/nf_core/modules/__init__.py +++ b/nf_core/modules/__init__.py @@ -1,12 +0,0 @@ -from .bump_versions import ModuleVersionBumper -from .create import ModuleCreate -from .info import ModuleInfo -from .install import ModuleInstall -from .lint import ModuleLint -from .list import ModuleList -from .modules_json import ModulesJson -from .modules_repo import ModulesRepo -from .modules_utils import ModuleExceptionError -from .patch import ModulePatch -from .remove import ModuleRemove -from .update import ModuleUpdate diff --git a/nf_core/modules/lint/module_version.py b/nf_core/modules/lint/module_version.py index d0ef17a44d..207d5e9418 100644 --- a/nf_core/modules/lint/module_version.py +++ b/nf_core/modules/lint/module_version.py @@ -6,6 +6,7 @@ from pathlib import Path import nf_core +import nf_core.modules.lint import nf_core.modules.modules_repo import nf_core.modules.modules_utils from nf_core.modules.modules_utils import NFCoreComponent @@ -40,8 +41,8 @@ def module_version(module_lint_object: "nf_core.modules.lint.ModuleLint", module ) modules_repo = nf_core.modules.modules_repo.ModulesRepo(remote_url=module.repo_url, branch=module.branch) - module_git_log = modules_repo.get_component_git_log(module.component_name, "modules") - if version == next(module_git_log)["git_sha"]: + module_git_log = list(modules_repo.get_component_git_log(module.component_name, "modules")) + if version == module_git_log[0]["git_sha"]: module.passed.append(("module_version", "Module is the latest version", module.component_dir)) else: module.warned.append(("module_version", "New version available", module.component_dir)) diff --git a/tests/modules/test_bump_versions.py b/tests/modules/test_bump_versions.py index 29e030668d..d46b8747c8 100644 --- a/tests/modules/test_bump_versions.py +++ b/tests/modules/test_bump_versions.py @@ -3,7 +3,7 @@ import pytest -import nf_core.modules +import nf_core.modules.bump_versions from nf_core.modules.modules_utils import ModuleExceptionError from ..test_modules import TestModules @@ -19,19 +19,19 @@ def test_modules_bump_versions_single_module(self): new_content = re.sub(r"bioconda::star=\d.\d.\d\D?", r"bioconda::star=2.6.1d", content) with open(env_yml_path, "w") as fh: fh.write(new_content) - version_bumper = nf_core.modules.ModuleVersionBumper(pipeline_dir=self.nfcore_modules) + version_bumper = nf_core.modules.bump_versions.ModuleVersionBumper(pipeline_dir=self.nfcore_modules) version_bumper.bump_versions(module="bpipe/test") assert len(version_bumper.failed) == 0 def test_modules_bump_versions_all_modules(self): """Test updating all modules""" - version_bumper = nf_core.modules.ModuleVersionBumper(pipeline_dir=self.nfcore_modules) + version_bumper = nf_core.modules.bump_versions.ModuleVersionBumper(pipeline_dir=self.nfcore_modules) version_bumper.bump_versions(all_modules=True) assert len(version_bumper.failed) == 0 def test_modules_bump_versions_fail(self): """Fail updating a module with wrong name""" - version_bumper = nf_core.modules.ModuleVersionBumper(pipeline_dir=self.nfcore_modules) + version_bumper = nf_core.modules.bump_versions.ModuleVersionBumper(pipeline_dir=self.nfcore_modules) with pytest.raises(ModuleExceptionError) as excinfo: version_bumper.bump_versions(module="no/module") assert "Could not find the specified module:" in str(excinfo.value) @@ -45,6 +45,6 @@ def test_modules_bump_versions_fail_unknown_version(self): new_content = re.sub(r"bioconda::bpipe=\d.\d.\d\D?", r"bioconda::bpipe=xxx", content) with open(env_yml_path, "w") as fh: fh.write(new_content) - version_bumper = nf_core.modules.ModuleVersionBumper(pipeline_dir=self.nfcore_modules) + version_bumper = nf_core.modules.bump_versions.ModuleVersionBumper(pipeline_dir=self.nfcore_modules) version_bumper.bump_versions(module="bpipe/test") assert "Conda package had unknown version" in version_bumper.failed[0][0] diff --git a/tests/modules/test_create.py b/tests/modules/test_create.py index c847501830..219f869997 100644 --- a/tests/modules/test_create.py +++ b/tests/modules/test_create.py @@ -9,7 +9,7 @@ import yaml from git.repo import Repo -import nf_core.modules +import nf_core.modules.create from tests.utils import ( GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH, GITLAB_URL, @@ -26,7 +26,7 @@ def test_modules_create_succeed(self): with responses.RequestsMock() as rsps: mock_anaconda_api_calls(rsps, "trim-galore", "0.6.7") mock_biocontainers_api_calls(rsps, "trim-galore", "0.6.7") - module_create = nf_core.modules.ModuleCreate( + module_create = nf_core.modules.create.ModuleCreate( self.pipeline_dir, "trimgalore", "@author", "process_single", True, True, conda_name="trim-galore" ) with requests_cache.disabled(): @@ -38,7 +38,7 @@ def test_modules_create_fail_exists(self): with responses.RequestsMock() as rsps: mock_anaconda_api_calls(rsps, "trim-galore", "0.6.7") mock_biocontainers_api_calls(rsps, "trim-galore", "0.6.7") - module_create = nf_core.modules.ModuleCreate( + module_create = nf_core.modules.create.ModuleCreate( self.pipeline_dir, "trimgalore", "@author", "process_single", False, False, conda_name="trim-galore" ) with requests_cache.disabled(): @@ -53,7 +53,7 @@ def test_modules_create_nfcore_modules(self): with responses.RequestsMock() as rsps: mock_anaconda_api_calls(rsps, "fastqc", "0.11.9") mock_biocontainers_api_calls(rsps, "fastqc", "0.11.9") - module_create = nf_core.modules.ModuleCreate( + module_create = nf_core.modules.create.ModuleCreate( self.nfcore_modules, "fastqc", "@author", "process_low", False, False ) with requests_cache.disabled(): @@ -68,7 +68,7 @@ def test_modules_create_nfcore_modules_subtool(self): with responses.RequestsMock() as rsps: mock_anaconda_api_calls(rsps, "star", "2.8.10a") mock_biocontainers_api_calls(rsps, "star", "2.8.10a") - module_create = nf_core.modules.ModuleCreate( + module_create = nf_core.modules.create.ModuleCreate( self.nfcore_modules, "star/index", "@author", "process_medium", False, False ) with requests_cache.disabled(): @@ -94,7 +94,7 @@ def test_modules_migrate(self, mock_rich_ask): # Create a module with --migrate-pytest mock_rich_ask.return_value = True - module_create = nf_core.modules.ModuleCreate(self.nfcore_modules, "samtools/sort", migrate_pytest=True) + module_create = nf_core.modules.create.ModuleCreate(self.nfcore_modules, "samtools/sort", migrate_pytest=True) module_create.create() with open(module_dir / "main.nf") as fh: @@ -128,7 +128,7 @@ def test_modules_migrate_no_delete(self, mock_rich_ask): # Create a module with --migrate-pytest mock_rich_ask.return_value = False - module_create = nf_core.modules.ModuleCreate(self.nfcore_modules, "samtools/sort", migrate_pytest=True) + module_create = nf_core.modules.create.ModuleCreate(self.nfcore_modules, "samtools/sort", migrate_pytest=True) module_create.create() # Check that pytest folder is not deleted @@ -157,7 +157,7 @@ def test_modules_migrate_symlink(self, mock_rich_ask): # Create a module with --migrate-pytest mock_rich_ask.return_value = True - module_create = nf_core.modules.ModuleCreate(self.nfcore_modules, "samtools/sort", migrate_pytest=True) + module_create = nf_core.modules.create.ModuleCreate(self.nfcore_modules, "samtools/sort", migrate_pytest=True) module_create.create() # Check that symlink is deleted diff --git a/tests/modules/test_info.py b/tests/modules/test_info.py index 8906854044..8e60bed316 100644 --- a/tests/modules/test_info.py +++ b/tests/modules/test_info.py @@ -1,6 +1,6 @@ from rich.console import Console -import nf_core.modules +import nf_core.modules.info from ..test_modules import TestModules from ..utils import GITLAB_DEFAULT_BRANCH, GITLAB_URL @@ -9,7 +9,7 @@ class TestModulesCreate(TestModules): def test_modules_info_remote(self): """Test getting info about a remote module""" - mods_info = nf_core.modules.ModuleInfo(self.pipeline_dir, "fastqc") + mods_info = nf_core.modules.info.ModuleInfo(self.pipeline_dir, "fastqc") mods_info_output = mods_info.get_component_info() console = Console(record=True) console.print(mods_info_output) @@ -21,7 +21,7 @@ def test_modules_info_remote(self): def test_modules_info_remote_gitlab(self): """Test getting info about a module in the remote gitlab repo""" - mods_info = nf_core.modules.ModuleInfo( + mods_info = nf_core.modules.info.ModuleInfo( self.pipeline_dir, "fastqc", remote_url=GITLAB_URL, branch=GITLAB_DEFAULT_BRANCH ) mods_info_output = mods_info.get_component_info() @@ -37,7 +37,7 @@ def test_modules_info_remote_gitlab(self): def test_modules_info_local(self): """Test getting info about a locally installed module""" self.mods_install.install("trimgalore") - mods_info = nf_core.modules.ModuleInfo(self.pipeline_dir, "trimgalore") + mods_info = nf_core.modules.info.ModuleInfo(self.pipeline_dir, "trimgalore") mods_info_output = mods_info.get_component_info() console = Console(record=True) console.print(mods_info_output) @@ -50,7 +50,7 @@ def test_modules_info_local(self): def test_modules_info_in_modules_repo(self): """Test getting info about a module in the modules repo""" - mods_info = nf_core.modules.ModuleInfo(self.nfcore_modules, "fastqc") + mods_info = nf_core.modules.info.ModuleInfo(self.nfcore_modules, "fastqc") mods_info.local = True mods_info_output = mods_info.get_component_info() console = Console(record=True) diff --git a/tests/modules/test_install.py b/tests/modules/test_install.py index cfdaac47ed..8f7ac0a1dc 100644 --- a/tests/modules/test_install.py +++ b/tests/modules/test_install.py @@ -19,14 +19,14 @@ class TestModulesCreate(TestModules): def test_modules_install_nopipeline(self): """Test installing a module - no pipeline given""" - self.mods_install.directory = None + self.pipeline_dir = None assert self.mods_install.install("foo") is False @with_temporary_folder def test_modules_install_emptypipeline(self, tmpdir): """Test installing a module - empty dir given""" - os.mkdir(os.path.join(tmpdir, "nf-core-pipe")) - self.mods_install.directory = os.path.join(tmpdir, "nf-core-pipe") + Path(tmpdir, "nf-core-pipe").mkdir() + self.mods_install.directory = Path(tmpdir, "nf-core-pipe") with pytest.raises(UserWarning) as excinfo: self.mods_install.install("foo") assert "Could not find a 'main.nf' or 'nextflow.config' file" in str(excinfo.value) diff --git a/tests/modules/test_lint.py b/tests/modules/test_lint.py index fc9871db2e..dfe288a6ee 100644 --- a/tests/modules/test_lint.py +++ b/tests/modules/test_lint.py @@ -6,8 +6,9 @@ import yaml from git.repo import Repo -import nf_core.modules -from nf_core.modules.lint import main_nf +import nf_core.modules.lint +import nf_core.modules.patch +from nf_core.modules.lint.main_nf import check_container_link_line, check_process_labels from nf_core.utils import set_wd from ..test_modules import TestModules @@ -160,7 +161,7 @@ class TestModulesCreate(TestModules): def _setup_patch(self, pipeline_dir: Union[str, Path], modify_module: bool): - install_obj = nf_core.modules.ModuleInstall( + install_obj = nf_core.modules.install.ModuleInstall( pipeline_dir, prompt=False, force=False, @@ -180,7 +181,7 @@ def _setup_patch(self, pipeline_dir: Union[str, Path], modify_module: bool): def test_modules_lint_trimgalore(self): """Test linting the TrimGalore! module""" self.mods_install.install("trimgalore") - module_lint = nf_core.modules.ModuleLint(directory=self.pipeline_dir) + module_lint = nf_core.modules.lint.ModuleLint(directory=self.pipeline_dir) module_lint.lint(print_results=False, module="trimgalore") assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" assert len(module_lint.passed) > 0 @@ -191,11 +192,11 @@ def test_modules_lint_empty(self): self.mods_remove.remove("fastqc", force=True) self.mods_remove.remove("multiqc", force=True) with pytest.raises(LookupError): - nf_core.modules.ModuleLint(directory=self.pipeline_dir) + nf_core.modules.lint.ModuleLint(directory=self.pipeline_dir) def test_modules_lint_new_modules(self): """lint a new module""" - module_lint = nf_core.modules.ModuleLint(directory=self.nfcore_modules) + module_lint = nf_core.modules.lint.ModuleLint(directory=self.nfcore_modules) module_lint.lint(print_results=False, all_modules=True) assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" assert len(module_lint.passed) > 0 @@ -206,13 +207,13 @@ def test_modules_lint_no_gitlab(self): self.mods_remove.remove("fastqc", force=True) self.mods_remove.remove("multiqc", force=True) with pytest.raises(LookupError): - nf_core.modules.ModuleLint(directory=self.pipeline_dir, remote_url=GITLAB_URL) + nf_core.modules.lint.ModuleLint(directory=self.pipeline_dir, remote_url=GITLAB_URL) def test_modules_lint_gitlab_modules(self): """Lint modules from a different remote""" self.mods_install_gitlab.install("fastqc") self.mods_install_gitlab.install("multiqc") - module_lint = nf_core.modules.ModuleLint(directory=self.pipeline_dir, remote_url=GITLAB_URL) + module_lint = nf_core.modules.lint.ModuleLint(directory=self.pipeline_dir, remote_url=GITLAB_URL) module_lint.lint(print_results=False, all_modules=True) assert len(module_lint.failed) == 2 assert len(module_lint.passed) > 0 @@ -221,7 +222,7 @@ def test_modules_lint_gitlab_modules(self): def test_modules_lint_multiple_remotes(self): """Lint modules from a different remote""" self.mods_install_gitlab.install("multiqc") - module_lint = nf_core.modules.ModuleLint(directory=self.pipeline_dir, remote_url=GITLAB_URL) + module_lint = nf_core.modules.lint.ModuleLint(directory=self.pipeline_dir, remote_url=GITLAB_URL) module_lint.lint(print_results=False, all_modules=True) assert len(module_lint.failed) == 1 assert len(module_lint.passed) > 0 @@ -230,12 +231,12 @@ def test_modules_lint_multiple_remotes(self): def test_modules_lint_registry(self): """Test linting the samtools module and alternative registry""" assert self.mods_install.install("samtools/sort") - module_lint = nf_core.modules.ModuleLint(directory=self.pipeline_dir, registry="public.ecr.aws") + module_lint = nf_core.modules.lint.ModuleLint(directory=self.pipeline_dir, registry="public.ecr.aws") module_lint.lint(print_results=False, module="samtools/sort") assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" assert len(module_lint.passed) > 0 assert len(module_lint.warned) >= 0 - module_lint = nf_core.modules.ModuleLint(directory=self.pipeline_dir) + module_lint = nf_core.modules.lint.ModuleLint(directory=self.pipeline_dir) module_lint.lint(print_results=False, module="samtools/sort") assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" assert len(module_lint.passed) > 0 @@ -248,13 +249,13 @@ def test_modules_lint_patched_modules(self): self._setup_patch(str(self.pipeline_dir), True) # Create a patch file - patch_obj = nf_core.modules.ModulePatch(self.pipeline_dir, GITLAB_URL, PATCH_BRANCH) + patch_obj = nf_core.modules.patch.ModulePatch(self.pipeline_dir, GITLAB_URL, PATCH_BRANCH) patch_obj.patch(BISMARK_ALIGN) # change temporarily working directory to the pipeline directory # to avoid error from try_apply_patch() during linting with set_wd(self.pipeline_dir): - module_lint = nf_core.modules.ModuleLint( + module_lint = nf_core.modules.lint.ModuleLint( directory=self.pipeline_dir, remote_url=GITLAB_URL, branch=PATCH_BRANCH, @@ -273,7 +274,7 @@ def test_modules_lint_check_process_labels(self): for test_case in PROCESS_LABEL_TEST_CASES: process, passed, warned, failed = test_case mocked_ModuleLint = MockModuleLint() - main_nf.check_process_labels(mocked_ModuleLint, process.splitlines()) + check_process_labels(mocked_ModuleLint, process.splitlines()) assert len(mocked_ModuleLint.passed) == passed assert len(mocked_ModuleLint.warned) == warned assert len(mocked_ModuleLint.failed) == failed @@ -284,7 +285,7 @@ def test_modules_lint_check_url(self): mocked_ModuleLint = MockModuleLint() for line in process.splitlines(): if line.strip(): - main_nf.check_container_link_line(mocked_ModuleLint, line, registry="quay.io") + check_container_link_line(mocked_ModuleLint, line, registry="quay.io") assert ( len(mocked_ModuleLint.passed) == passed @@ -298,7 +299,7 @@ def test_modules_lint_check_url(self): def test_modules_lint_snapshot_file(self): """Test linting a module with a snapshot file""" - module_lint = nf_core.modules.ModuleLint(directory=self.nfcore_modules) + module_lint = nf_core.modules.lint.ModuleLint(directory=self.nfcore_modules) module_lint.lint(print_results=False, module="bpipe/test") assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" assert len(module_lint.passed) > 0 @@ -315,7 +316,7 @@ def test_modules_lint_snapshot_file_missing_fail(self): "tests", "main.nf.test.snap", ).unlink() - module_lint = nf_core.modules.ModuleLint(directory=self.nfcore_modules) + module_lint = nf_core.modules.lint.ModuleLint(directory=self.nfcore_modules) module_lint.lint(print_results=False, module="bpipe/test") Path( self.nfcore_modules, @@ -359,7 +360,7 @@ def test_modules_lint_snapshot_file_not_needed(self): "w", ) as fh: fh.write(new_content) - module_lint = nf_core.modules.ModuleLint(directory=self.nfcore_modules) + module_lint = nf_core.modules.lint.ModuleLint(directory=self.nfcore_modules) module_lint.lint(print_results=False, module="bpipe/test") assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" assert len(module_lint.passed) > 0 @@ -377,7 +378,7 @@ def test_modules_environment_yml_file_doesnt_exists(self): "environment.yml.bak", ) ) - module_lint = nf_core.modules.ModuleLint(directory=self.nfcore_modules) + module_lint = nf_core.modules.lint.ModuleLint(directory=self.nfcore_modules) module_lint.lint(print_results=False, module="bpipe/test") Path( self.nfcore_modules, @@ -403,7 +404,7 @@ def test_modules_environment_yml_file_doesnt_exists(self): def test_modules_environment_yml_file_sorted_correctly(self): """Test linting a module with a correctly sorted environment.yml file""" - module_lint = nf_core.modules.ModuleLint(directory=self.nfcore_modules) + module_lint = nf_core.modules.lint.ModuleLint(directory=self.nfcore_modules) module_lint.lint(print_results=False, module="bpipe/test") assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" assert len(module_lint.passed) > 0 @@ -438,7 +439,7 @@ def test_modules_environment_yml_file_sorted_incorrectly(self): "w", ) as fh: fh.write(yaml_content) - module_lint = nf_core.modules.ModuleLint(directory=self.nfcore_modules) + module_lint = nf_core.modules.lint.ModuleLint(directory=self.nfcore_modules) module_lint.lint(print_results=False, module="bpipe/test") # we fix the sorting on the fly, so this should pass assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" @@ -471,7 +472,7 @@ def test_modules_environment_yml_file_not_array(self): "w", ) as fh: fh.write(yaml.dump(yaml_content)) - module_lint = nf_core.modules.ModuleLint(directory=self.nfcore_modules) + module_lint = nf_core.modules.lint.ModuleLint(directory=self.nfcore_modules) module_lint.lint(print_results=False, module="bpipe/test") assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" assert len(module_lint.passed) > 0 @@ -504,7 +505,7 @@ def test_modules_environment_yml_file_name_mismatch(self): "w", ) as fh: fh.write(yaml.dump(yaml_content)) - module_lint = nf_core.modules.ModuleLint(directory=self.nfcore_modules) + module_lint = nf_core.modules.lint.ModuleLint(directory=self.nfcore_modules) module_lint.lint(print_results=False, module="bpipe/test") # reset changes yaml_content["name"] = "bpipe_test" @@ -536,7 +537,7 @@ def test_modules_meta_yml_incorrect_licence_field(self): "w", ) as fh: fh.write(yaml.dump(meta_yml)) - module_lint = nf_core.modules.ModuleLint(directory=self.nfcore_modules) + module_lint = nf_core.modules.lint.ModuleLint(directory=self.nfcore_modules) module_lint.lint(print_results=False, module="bpipe/test") # reset changes @@ -559,7 +560,7 @@ def test_modules_meta_yml_input_mismatch(self): main_nf_new = main_nf.replace("path bam", "path bai") with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "main.nf"), "w") as fh: fh.write(main_nf_new) - module_lint = nf_core.modules.ModuleLint(directory=self.nfcore_modules) + module_lint = nf_core.modules.lint.ModuleLint(directory=self.nfcore_modules) module_lint.lint(print_results=False, module="bpipe/test") with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "main.nf"), "w") as fh: fh.write(main_nf) @@ -578,7 +579,7 @@ def test_modules_meta_yml_output_mismatch(self): main_nf_new = main_nf.replace("emit: bam", "emit: bai") with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "main.nf"), "w") as fh: fh.write(main_nf_new) - module_lint = nf_core.modules.ModuleLint(directory=self.nfcore_modules) + module_lint = nf_core.modules.lint.ModuleLint(directory=self.nfcore_modules) module_lint.lint(print_results=False, module="bpipe/test") with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "main.nf"), "w") as fh: fh.write(main_nf) @@ -625,7 +626,7 @@ def test_modules_meta_yml_incorrect_name(self): "w", ) as fh: fh.write(yaml.dump(environment_yml)) - module_lint = nf_core.modules.ModuleLint(directory=self.nfcore_modules) + module_lint = nf_core.modules.lint.ModuleLint(directory=self.nfcore_modules) module_lint.lint(print_results=False, module="bpipe/test") # reset changes @@ -659,7 +660,7 @@ def test_modules_missing_test_dir(self): Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests").rename( Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests.bak") ) - module_lint = nf_core.modules.ModuleLint(directory=self.nfcore_modules) + module_lint = nf_core.modules.lint.ModuleLint(directory=self.nfcore_modules) module_lint.lint(print_results=False, module="bpipe/test") Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests.bak").rename( Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests") @@ -690,7 +691,7 @@ def test_modules_missing_test_main_nf(self): "main.nf.test.bak", ) ) - module_lint = nf_core.modules.ModuleLint(directory=self.nfcore_modules) + module_lint = nf_core.modules.lint.ModuleLint(directory=self.nfcore_modules) module_lint.lint(print_results=False, module="bpipe/test") Path( self.nfcore_modules, @@ -719,7 +720,7 @@ def test_modules_missing_test_main_nf(self): def test_modules_unused_pytest_files(self): """Test linting a nf-test module with files still present in `tests/modules/`""" Path(self.nfcore_modules, "tests", "modules", "bpipe", "test").mkdir(parents=True, exist_ok=True) - module_lint = nf_core.modules.ModuleLint(directory=self.nfcore_modules) + module_lint = nf_core.modules.lint.ModuleLint(directory=self.nfcore_modules) module_lint.lint(print_results=False, module="bpipe/test") Path(self.nfcore_modules, "tests", "modules", "bpipe", "test").rmdir() assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" @@ -735,7 +736,7 @@ def test_nftest_failing_linting(self): self.nfcore_modules = Path(tmp_dir, "modules-test") Repo.clone_from(GITLAB_URL, self.nfcore_modules, branch=GITLAB_NFTEST_BRANCH) - module_lint = nf_core.modules.ModuleLint(directory=self.nfcore_modules) + module_lint = nf_core.modules.lint.ModuleLint(directory=self.nfcore_modules) module_lint.lint(print_results=False, module="kallisto/quant") assert len(module_lint.failed) == 3, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" @@ -774,7 +775,7 @@ def test_modules_absent_version(self): "w", ) as fh: fh.write(new_content) - module_lint = nf_core.modules.ModuleLint(directory=self.nfcore_modules) + module_lint = nf_core.modules.lint.ModuleLint(directory=self.nfcore_modules) module_lint.lint(print_results=False, module="bpipe/test") with open( Path( @@ -812,7 +813,7 @@ def test_modules_empty_file_in_snapshot(self): with open(snap_file, "w") as fh: json.dump(snap, fh) - module_lint = nf_core.modules.ModuleLint(directory=self.nfcore_modules) + module_lint = nf_core.modules.lint.ModuleLint(directory=self.nfcore_modules) module_lint.lint(print_results=False, module="bpipe/test") assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" assert len(module_lint.passed) > 0 @@ -841,7 +842,7 @@ def test_modules_empty_file_in_stub_snapshot(self): with open(snap_file, "w") as fh: json.dump(snap, fh) - module_lint = nf_core.modules.ModuleLint(directory=self.nfcore_modules) + module_lint = nf_core.modules.lint.ModuleLint(directory=self.nfcore_modules) module_lint.lint(print_results=False, module="bpipe/test") assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" assert len(module_lint.passed) > 0 diff --git a/tests/modules/test_list.py b/tests/modules/test_list.py index 81484cc8f0..fdbb61f69e 100644 --- a/tests/modules/test_list.py +++ b/tests/modules/test_list.py @@ -4,7 +4,7 @@ import yaml from rich.console import Console -import nf_core.modules +import nf_core.modules.list from ..test_modules import TestModules from ..utils import GITLAB_DEFAULT_BRANCH, GITLAB_URL @@ -13,7 +13,7 @@ class TestModulesCreate(TestModules): def test_modules_list_remote(self): """Test listing available modules""" - mods_list = nf_core.modules.ModuleList(None, remote=True) + mods_list = nf_core.modules.list.ModuleList(None, remote=True) listed_mods = mods_list.list_components() console = Console(record=True) console.print(listed_mods) @@ -22,7 +22,9 @@ def test_modules_list_remote(self): def test_modules_list_remote_gitlab(self): """Test listing the modules in the remote gitlab repo""" - mods_list = nf_core.modules.ModuleList(None, remote=True, remote_url=GITLAB_URL, branch=GITLAB_DEFAULT_BRANCH) + mods_list = nf_core.modules.list.ModuleList( + None, remote=True, remote_url=GITLAB_URL, branch=GITLAB_DEFAULT_BRANCH + ) listed_mods = mods_list.list_components() console = Console(record=True) console.print(listed_mods) @@ -31,7 +33,7 @@ def test_modules_list_remote_gitlab(self): def test_modules_list_pipeline(self): """Test listing locally installed modules""" - mods_list = nf_core.modules.ModuleList(self.pipeline_dir, remote=False) + mods_list = nf_core.modules.list.ModuleList(self.pipeline_dir, remote=False) listed_mods = mods_list.list_components() console = Console(record=True) console.print(listed_mods) @@ -42,7 +44,7 @@ def test_modules_list_pipeline(self): def test_modules_install_and_list_pipeline(self): """Test listing locally installed modules""" self.mods_install.install("trimgalore") - mods_list = nf_core.modules.ModuleList(self.pipeline_dir, remote=False) + mods_list = nf_core.modules.list.ModuleList(self.pipeline_dir, remote=False) listed_mods = mods_list.list_components() console = Console(record=True) console.print(listed_mods) @@ -52,7 +54,7 @@ def test_modules_install_and_list_pipeline(self): def test_modules_install_gitlab_and_list_pipeline(self): """Test listing locally installed modules""" self.mods_install_gitlab.install("fastqc") - mods_list = nf_core.modules.ModuleList(self.pipeline_dir, remote=False) + mods_list = nf_core.modules.list.ModuleList(self.pipeline_dir, remote=False) listed_mods = mods_list.list_components() console = Console(record=True) console.print(listed_mods) @@ -61,7 +63,7 @@ def test_modules_install_gitlab_and_list_pipeline(self): def test_modules_list_local_json(self): """Test listing locally installed modules as JSON""" - mods_list = nf_core.modules.ModuleList(self.pipeline_dir, remote=False) + mods_list = nf_core.modules.list.ModuleList(self.pipeline_dir, remote=False) listed_mods = mods_list.list_components(print_json=True) listed_mods = json.loads(listed_mods) assert "fastqc" in listed_mods @@ -69,7 +71,7 @@ def test_modules_list_local_json(self): def test_modules_list_remote_json(self): """Test listing available modules as JSON""" - mods_list = nf_core.modules.ModuleList(None, remote=True) + mods_list = nf_core.modules.list.ModuleList(None, remote=True) listed_mods = mods_list.list_components(print_json=True) listed_mods = json.loads(listed_mods) assert "fastqc" in listed_mods @@ -77,7 +79,7 @@ def test_modules_list_remote_json(self): def test_modules_list_with_one_keyword(self): """Test listing available modules with one keyword""" - mods_list = nf_core.modules.ModuleList(None, remote=True) + mods_list = nf_core.modules.list.ModuleList(None, remote=True) listed_mods = mods_list.list_components(keywords=["qc"]) console = Console(record=True) console.print(listed_mods) @@ -86,7 +88,7 @@ def test_modules_list_with_one_keyword(self): def test_modules_list_with_keywords(self): """Test listing available modules with multiple keywords""" - mods_list = nf_core.modules.ModuleList(None, remote=True) + mods_list = nf_core.modules.list.ModuleList(None, remote=True) listed_mods = mods_list.list_components(keywords=["fastq", "qc"]) console = Console(record=True) console.print(listed_mods) @@ -95,7 +97,7 @@ def test_modules_list_with_keywords(self): def test_modules_list_with_unused_keyword(self): """Test listing available modules with an unused keyword""" - mods_list = nf_core.modules.ModuleList(None, remote=True) + mods_list = nf_core.modules.list.ModuleList(None, remote=True) with self.assertLogs(level="INFO") as log: listed_mods = mods_list.list_components(keywords=["you_will_never_find_me"]) self.assertIn("No available", log.output[0]) @@ -116,7 +118,7 @@ def test_modules_list_in_wrong_repo_fail(self): yaml.safe_dump(nf_core_yml, fh) # expect error logged with self.assertLogs(level="ERROR") as log: - mods_list = nf_core.modules.ModuleList(self.pipeline_dir, remote=False) + mods_list = nf_core.modules.list.ModuleList(self.pipeline_dir, remote=False) listed_mods = mods_list.list_components() self.assertIn("must be run from a pipeline directory", log.output[0]) # expect empty list diff --git a/tests/modules/test_modules_json.py b/tests/modules/test_modules_json.py index 319b5ad657..2ab058fa70 100644 --- a/tests/modules/test_modules_json.py +++ b/tests/modules/test_modules_json.py @@ -36,7 +36,7 @@ def test_mod_json_update(self): mod_json_obj = ModulesJson(self.pipeline_dir) # Update the modules.json file mod_repo_obj = ModulesRepo() - mod_json_obj.update("modules", mod_repo_obj, "MODULE_NAME", "GIT_SHA", "modules", write_file=False) + mod_json_obj.update("modules", mod_repo_obj, "MODULE_NAME", "GIT_SHA", ["modules"], write_file=False) mod_json = mod_json_obj.get_modules_json() assert "MODULE_NAME" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"] assert "git_sha" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"]["MODULE_NAME"] @@ -155,7 +155,7 @@ def test_mod_json_up_to_date_reinstall_fails(self): mod_json_obj = ModulesJson(self.pipeline_dir) # Update the fastqc module entry to an invalid git_sha - mod_json_obj.update("modules", ModulesRepo(), "fastqc", "INVALID_GIT_SHA", "modules", write_file=True) + mod_json_obj.update("modules", ModulesRepo(), "fastqc", "INVALID_GIT_SHA", ["modules"], write_file=True) # Remove the fastqc module fastqc_path = Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, "fastqc") diff --git a/tests/modules/test_patch.py b/tests/modules/test_patch.py index 230bb1ce8f..c3eb94d374 100644 --- a/tests/modules/test_patch.py +++ b/tests/modules/test_patch.py @@ -6,7 +6,10 @@ import pytest import nf_core.components.components_command -import nf_core.modules +import nf_core.components.patch +import nf_core.modules.modules_json +import nf_core.modules.patch +import nf_core.modules.update from ..test_modules import TestModules from ..utils import GITLAB_URL @@ -29,7 +32,7 @@ def setup_patch(pipeline_dir, modify_module): - install_obj = nf_core.modules.ModuleInstall( + install_obj = nf_core.modules.install.ModuleInstall( pipeline_dir, prompt=False, force=False, remote_url=GITLAB_URL, branch=PATCH_BRANCH, sha=ORG_SHA ) @@ -66,7 +69,7 @@ def test_create_patch_no_change(self): setup_patch(self.pipeline_dir, False) # Try creating a patch file - patch_obj = nf_core.modules.ModulePatch(self.pipeline_dir, GITLAB_URL, PATCH_BRANCH) + patch_obj = nf_core.modules.patch.ModulePatch(self.pipeline_dir, GITLAB_URL, PATCH_BRANCH) with pytest.raises(UserWarning): patch_obj.patch(BISMARK_ALIGN) @@ -84,7 +87,7 @@ def test_create_patch_change(self): setup_patch(self.pipeline_dir, True) # Try creating a patch file - patch_obj = nf_core.modules.ModulePatch(self.pipeline_dir, GITLAB_URL, PATCH_BRANCH) + patch_obj = nf_core.modules.patch.ModulePatch(self.pipeline_dir, GITLAB_URL, PATCH_BRANCH) patch_obj.patch(BISMARK_ALIGN) module_path = Path(self.pipeline_dir, "modules", REPO_NAME, BISMARK_ALIGN) @@ -119,7 +122,7 @@ def test_create_patch_try_apply_successful(self): module_path = Path(self.pipeline_dir, module_relpath) # Try creating a patch file - patch_obj = nf_core.modules.ModulePatch(self.pipeline_dir, GITLAB_URL, PATCH_BRANCH) + patch_obj = nf_core.modules.patch.ModulePatch(self.pipeline_dir, GITLAB_URL, PATCH_BRANCH) patch_obj.patch(BISMARK_ALIGN) patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" @@ -132,7 +135,7 @@ def test_create_patch_try_apply_successful(self): "modules", REPO_NAME, BISMARK_ALIGN, patch_fn ) - update_obj = nf_core.modules.ModuleUpdate( + update_obj = nf_core.modules.update.ModuleUpdate( self.pipeline_dir, sha=SUCCEED_SHA, remote_url=GITLAB_URL, branch=PATCH_BRANCH ) # Install the new files @@ -187,7 +190,7 @@ def test_create_patch_try_apply_failed(self): module_path = Path(self.pipeline_dir, module_relpath) # Try creating a patch file - patch_obj = nf_core.modules.ModulePatch(self.pipeline_dir, GITLAB_URL, PATCH_BRANCH) + patch_obj = nf_core.modules.patch.ModulePatch(self.pipeline_dir, GITLAB_URL, PATCH_BRANCH) patch_obj.patch(BISMARK_ALIGN) patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" @@ -200,7 +203,7 @@ def test_create_patch_try_apply_failed(self): "modules", REPO_NAME, BISMARK_ALIGN, patch_fn ) - update_obj = nf_core.modules.ModuleUpdate( + update_obj = nf_core.modules.update.ModuleUpdate( self.pipeline_dir, sha=FAIL_SHA, remote_url=GITLAB_URL, branch=PATCH_BRANCH ) # Install the new files @@ -226,7 +229,7 @@ def test_create_patch_update_success(self): module_path = Path(self.pipeline_dir, "modules", REPO_NAME, BISMARK_ALIGN) # Try creating a patch file - patch_obj = nf_core.modules.ModulePatch(self.pipeline_dir, GITLAB_URL, PATCH_BRANCH) + patch_obj = nf_core.modules.patch.ModulePatch(self.pipeline_dir, GITLAB_URL, PATCH_BRANCH) patch_obj.patch(BISMARK_ALIGN) patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" @@ -240,7 +243,7 @@ def test_create_patch_update_success(self): ) # Update the module - update_obj = nf_core.modules.ModuleUpdate( + update_obj = nf_core.modules.update.ModuleUpdate( self.pipeline_dir, sha=SUCCEED_SHA, show_diff=False, @@ -287,7 +290,7 @@ def test_create_patch_update_fail(self): module_path = Path(self.pipeline_dir, "modules", REPO_NAME, BISMARK_ALIGN) # Try creating a patch file - patch_obj = nf_core.modules.ModulePatch(self.pipeline_dir, GITLAB_URL, PATCH_BRANCH) + patch_obj = nf_core.modules.patch.ModulePatch(self.pipeline_dir, GITLAB_URL, PATCH_BRANCH) patch_obj.patch(BISMARK_ALIGN) patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" @@ -304,7 +307,7 @@ def test_create_patch_update_fail(self): with open(module_path / patch_fn) as fh: patch_contents = fh.read() - update_obj = nf_core.modules.ModuleUpdate( + update_obj = nf_core.modules.update.ModuleUpdate( self.pipeline_dir, sha=FAIL_SHA, show_diff=False, @@ -339,7 +342,7 @@ def test_remove_patch(self): setup_patch(self.pipeline_dir, True) # Try creating a patch file - patch_obj = nf_core.modules.ModulePatch(self.pipeline_dir, GITLAB_URL, PATCH_BRANCH) + patch_obj = nf_core.modules.patch.ModulePatch(self.pipeline_dir, GITLAB_URL, PATCH_BRANCH) patch_obj.patch(BISMARK_ALIGN) module_path = Path(self.pipeline_dir, "modules", REPO_NAME, BISMARK_ALIGN) diff --git a/tests/modules/test_update.py b/tests/modules/test_update.py index a33aac3775..1f81eab487 100644 --- a/tests/modules/test_update.py +++ b/tests/modules/test_update.py @@ -167,9 +167,10 @@ def test_update_with_config_fixed_version(self): # Fix the trimgalore version in the .nf-core.yml to an old version update_config = {GITLAB_URL: {GITLAB_REPO: {"trimgalore": OLD_TRIMGALORE_SHA}}} config_fn, tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) - tools_config["update"] = update_config + setattr(tools_config, "update", update_config) + assert config_fn is not None and tools_config is not None # mypy with open(Path(self.pipeline_dir, config_fn), "w") as f: - yaml.dump(tools_config, f) + yaml.dump(tools_config.model_dump(), f) # Update all modules in the pipeline update_obj = ModuleUpdate( @@ -191,9 +192,10 @@ def test_update_with_config_dont_update(self): # Set the trimgalore field to no update in the .nf-core.yml update_config = {GITLAB_URL: {GITLAB_REPO: {"trimgalore": False}}} config_fn, tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) - tools_config["update"] = update_config + setattr(tools_config, "update", update_config) + assert config_fn is not None and tools_config is not None # mypy with open(Path(self.pipeline_dir, config_fn), "w") as f: - yaml.dump(tools_config, f) + yaml.dump(tools_config.model_dump(), f) # Update all modules in the pipeline update_obj = ModuleUpdate( @@ -219,9 +221,10 @@ def test_update_with_config_fix_all(self): # Fix the version of all nf-core modules in the .nf-core.yml to an old version update_config = {GITLAB_URL: OLD_TRIMGALORE_SHA} config_fn, tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) - tools_config["update"] = update_config + setattr(tools_config, "update", update_config) + assert config_fn is not None and tools_config is not None # mypy with open(Path(self.pipeline_dir, config_fn), "w") as f: - yaml.dump(tools_config, f) + yaml.dump(tools_config.model_dump(), f) # Update all modules in the pipeline update_obj = ModuleUpdate( @@ -242,9 +245,10 @@ def test_update_with_config_no_updates(self): # Fix the version of all nf-core modules in the .nf-core.yml to an old version update_config = {GITLAB_URL: False} config_fn, tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) - tools_config["update"] = update_config + setattr(tools_config, "update", update_config) + assert config_fn is not None and tools_config is not None # mypy with open(Path(self.pipeline_dir, config_fn), "w") as f: - yaml.dump(tools_config, f) + yaml.dump(tools_config.model_dump(), f) # Update all modules in the pipeline update_obj = ModuleUpdate( diff --git a/tests/test_subworkflows.py b/tests/test_subworkflows.py index af581fc9ab..7c18ab0a2d 100644 --- a/tests/test_subworkflows.py +++ b/tests/test_subworkflows.py @@ -1,14 +1,13 @@ """Tests covering the subworkflows commands""" import json -import os -import shutil import unittest from pathlib import Path import pytest import nf_core.modules +import nf_core.modules.install import nf_core.pipelines.create.create import nf_core.subworkflows @@ -104,49 +103,11 @@ def setUp(self): force=False, sha="8c343b3c8a0925949783dc547666007c245c235b", ) - self.mods_install = nf_core.modules.ModuleInstall(self.pipeline_dir, prompt=False, force=True) + self.mods_install = nf_core.modules.install.ModuleInstall(self.pipeline_dir, prompt=False, force=True) # Set up remove objects self.subworkflow_remove = nf_core.subworkflows.SubworkflowRemove(self.pipeline_dir) - def tearDown(self): - """Clean up temporary files and folders""" - - if os.path.exists(self.tmp_dir): - shutil.rmtree(self.tmp_dir) - @pytest.fixture(autouse=True) def _use_caplog(self, caplog): self.caplog = caplog - - # ################################################ - # # Test of the individual subworkflow commands. # - # ################################################ - - # from .subworkflows.list import ( # type: ignore[misc] - # test_subworkflows_install_and_list_subworkflows, - # test_subworkflows_install_gitlab_and_list_subworkflows, - # test_subworkflows_list_remote, - # test_subworkflows_list_remote_gitlab, - # ) - # from .subworkflows.remove import ( # type: ignore[misc] - # test_subworkflows_remove_included_subworkflow, - # test_subworkflows_remove_one_of_two_subworkflow, - # test_subworkflows_remove_subworkflow, - # test_subworkflows_remove_subworkflow_keep_installed_module, - # ) - # from .subworkflows.update import ( # type: ignore[misc] - # test_install_and_update, - # test_install_at_hash_and_update, - # test_install_at_hash_and_update_and_save_diff_limit_output, - # test_install_at_hash_and_update_and_save_diff_to_file, - # test_install_at_hash_and_update_limit_output, - # test_update_all, - # test_update_all_linked_components_from_subworkflow, - # test_update_all_subworkflows_from_module, - # test_update_change_of_included_modules, - # test_update_with_config_dont_update, - # test_update_with_config_fix_all, - # test_update_with_config_fixed_version, - # test_update_with_config_no_updates, - # ) From 0cf5f8b2f6f36d7904d95368c2ae2964bcbc09af Mon Sep 17 00:00:00 2001 From: mashehu Date: Thu, 25 Jul 2024 09:08:04 +0200 Subject: [PATCH 33/89] remove broken _repr_ (breaks because not all parameters are intilaized) --- nf_core/components/lint/__init__.py | 1 - 1 file changed, 1 deletion(-) diff --git a/nf_core/components/lint/__init__.py b/nf_core/components/lint/__init__.py index ada3ee30c2..f5ce3ddb7f 100644 --- a/nf_core/components/lint/__init__.py +++ b/nf_core/components/lint/__init__.py @@ -45,7 +45,6 @@ def __init__(self, component: NFCoreComponent, lint_test: str, message: str, fil self.component_name: str = component.component_name -@rich.repr.auto class ComponentLint(ComponentCommand): """ An object for linting modules and subworkflows either in a clone of the 'nf-core/modules' From 879e808b37f5bf7ed4e15aaef111ba025779a08f Mon Sep 17 00:00:00 2001 From: mashehu Date: Thu, 25 Jul 2024 13:27:14 +0200 Subject: [PATCH 34/89] add types --- nf_core/components/components_command.py | 1 - nf_core/components/components_utils.py | 16 +- nf_core/components/create.py | 21 +-- nf_core/components/info.py | 14 +- nf_core/components/install.py | 4 + nf_core/components/lint/__init__.py | 12 +- nf_core/components/nfcore_component.py | 1 + nf_core/modules/bump_versions.py | 10 +- nf_core/modules/modules_json.py | 7 +- nf_core/pipelines/create/create.py | 50 +++--- nf_core/pipelines/create/utils.py | 20 +-- nf_core/pipelines/lint/__init__.py | 24 +-- nf_core/pipelines/lint/modules_json.py | 9 +- nf_core/subworkflows/lint/main_nf.py | 14 +- nf_core/synced_repo.py | 33 ++-- tests/pipelines/test_sync.py | 186 ++++++++++------------- tests/subworkflows/test_lint.py | 42 +++-- 17 files changed, 232 insertions(+), 232 deletions(-) diff --git a/nf_core/components/components_command.py b/nf_core/components/components_command.py index 8d200021c2..13a6fed338 100644 --- a/nf_core/components/components_command.py +++ b/nf_core/components/components_command.py @@ -38,7 +38,6 @@ def __init__( self.hide_progress = hide_progress self.no_prompts = no_prompts self._configure_repo_and_paths() - self.repo_type: Optional[str] = None def _configure_repo_and_paths(self, nf_dir_req: bool = True) -> None: """ diff --git a/nf_core/components/components_utils.py b/nf_core/components/components_utils.py index 743efd386d..e4e2ff092b 100644 --- a/nf_core/components/components_utils.py +++ b/nf_core/components/components_utils.py @@ -27,7 +27,9 @@ def get_repo_info(directory: Path, use_prompt: Optional[bool] = True) -> Tuple[P # Figure out the repository type from the .nf-core.yml config file if we can config_fn, tools_config = nf_core.utils.load_tools_config(base_dir) - repo_type = tools_config.get("repository_type", None) + if config_fn is None: + raise UserWarning(f"Could not find a config file in directory: {base_dir}") + repo_type = getattr(tools_config, "repository_type", None) or None # If not set, prompt the user if not repo_type and use_prompt: @@ -55,13 +57,11 @@ def get_repo_info(directory: Path, use_prompt: Optional[bool] = True) -> Tuple[P # Check if it's a valid answer if repo_type not in ["pipeline", "modules"]: raise UserWarning(f"Invalid repository type: '{repo_type}'") - + org: str = "" # Check for org if modules repo - if repo_type == "pipeline": - org = "" - elif repo_type == "modules": - org = tools_config.get("org_path", None) - if org is None: + if repo_type == "modules": + org = getattr(tools_config, "org_path", "") or "" + if org == "": log.warning("Organisation path not defined in %s [key: org_path]", config_fn.name) org = questionary.text( "What is the organisation path under which modules and subworkflows are stored?", @@ -70,7 +70,7 @@ def get_repo_info(directory: Path, use_prompt: Optional[bool] = True) -> Tuple[P ).unsafe_ask() log.info("To avoid this prompt in the future, add the 'org_path' key to a root '%s' file.", config_fn.name) if rich.prompt.Confirm.ask("[bold][blue]?[/] Would you like me to add this config now?", default=True): - with open(config_fn, "a+") as fh: + with open(str(config_fn), "a+") as fh: fh.write(f"org_path: {org}\n") log.info(f"Config added to '{config_fn.name}'") diff --git a/nf_core/components/create.py b/nf_core/components/create.py index 413790099d..fdcf402b47 100644 --- a/nf_core/components/create.py +++ b/nf_core/components/create.py @@ -62,7 +62,7 @@ def __init__( self.not_empty_template = not empty_template self.migrate_pytest = migrate_pytest - def create(self): + def create(self) -> bool: """ Create a new DSL2 module or subworkflow from the nf-core template. @@ -106,12 +106,12 @@ def create(self): ``` """ - if self.component_type == "modules": # Check modules directory structure self.check_modules_structure() # Check whether the given directory is a nf-core pipeline or a clone of nf-core/modules + log.info(f"Repository type: [blue]{self.repo_type}") if self.directory != ".": log.info(f"Base directory: '{self.directory}'") @@ -161,7 +161,7 @@ def create(self): self.org_alphabet = not_alphabet.sub("", self.org) # Create component template with jinja2 - self._render_template() + assert self._render_template() log.info(f"Created component template: '{self.component_name}'") if self.migrate_pytest: @@ -171,7 +171,9 @@ def create(self): self._print_and_delete_pytest_files() new_files = [str(path) for path in self.file_paths.values()] + log.info("Created following files:\n " + "\n ".join(new_files)) + return True def _get_bioconda_tool(self): """ @@ -265,7 +267,7 @@ def _get_module_structure_components(self): default=True, ) - def _render_template(self): + def _render_template(self) -> Optional[bool]: """ Create new module/subworkflow files with Jinja2. """ @@ -288,7 +290,7 @@ def _render_template(self): # Write output to the target file log.debug(f"Writing output to: '{dest_fn}'") dest_fn.parent.mkdir(exist_ok=True, parents=True) - with open(dest_fn, "w") as fh: + with open(str(dest_fn), "w") as fh: log.debug(f"Writing output to: '{dest_fn}'") fh.write(rendered_output) @@ -297,6 +299,7 @@ def _render_template(self): Path(nf_core.__file__).parent / f"{self.component_type[:-1]}-template" / template_fn ).stat() dest_fn.chmod(template_stat.st_mode) + return True def _collect_name_prompt(self): """ @@ -340,7 +343,7 @@ def _collect_name_prompt(self): elif self.component_type == "subworkflows": self.component = rich.prompt.Prompt.ask("[violet]Name of subworkflow").strip() - def _get_component_dirs(self): + def _get_component_dirs(self) -> Dict[str, Path]: """Given a directory and a tool/subtool or subworkflow, set the file paths and check if they already exist Returns dict: keys are relative paths to template files, vals are target paths. @@ -372,9 +375,8 @@ def _get_component_dirs(self): # Set file paths file_paths["main.nf"] = component_file - if self.repo_type == "modules": + elif self.repo_type == "modules": component_dir = Path(self.directory, self.component_type, self.org, self.component_dir) - # Check if module/subworkflow directories exist already if component_dir.exists() and not self.force_overwrite and not self.migrate_pytest: raise UserWarning( @@ -403,7 +405,6 @@ def _get_component_dirs(self): raise UserWarning( f"Module subtool '{tool_glob[0]}' exists already, cannot make tool '{self.component_name}'" ) - # Set file paths # For modules - can be tool/ or tool/subtool/ so can't do in template directory structure file_paths["main.nf"] = component_dir / "main.nf" @@ -411,6 +412,8 @@ def _get_component_dirs(self): if self.component_type == "modules": file_paths["environment.yml"] = component_dir / "environment.yml" file_paths["tests/main.nf.test.j2"] = component_dir / "tests" / "main.nf.test" + else: + raise ValueError("`repo_type` not set correctly") return file_paths diff --git a/nf_core/components/info.py b/nf_core/components/info.py index d07486f638..a296fcaccf 100644 --- a/nf_core/components/info.py +++ b/nf_core/components/info.py @@ -136,17 +136,17 @@ def init_mod_name(self, component): ).unsafe_ask() else: if self.repo_type == "pipeline": + assert self.modules_json is not None # mypy # check if the module is locally installed local_paths = self.modules_json.get_all_components(self.component_type).get( self.modules_repo.remote_url ) # type: ignore - if local_paths is None: - raise LookupError(f"No {self.component_type[:-1]} installed from {self.modules_repo.remote_url}") - for directory, comp in local_paths: - if comp == component: - component_base_path = Path(self.directory, self.component_type) - self.local_path = Path(component_base_path, directory, component) - break + if local_paths is not None: + for directory, comp in local_paths: + if comp == component: + component_base_path = Path(self.directory, self.component_type) + self.local_path = Path(component_base_path, directory, component) + break if self.local_path: self.local = True diff --git a/nf_core/components/install.py b/nf_core/components/install.py index aa8aac81e6..8f47913283 100644 --- a/nf_core/components/install.py +++ b/nf_core/components/install.py @@ -69,8 +69,12 @@ def install(self, component: str, silent: bool = False) -> bool: # Verify SHA if not self.modules_repo.verify_sha(self.prompt, self.sha): return False + + # verify self.modules_repo entries: if self.modules_repo is None: return False + if self.modules_repo.repo_path is None: + return False # Check and verify component name diff --git a/nf_core/components/lint/__init__.py b/nf_core/components/lint/__init__.py index f5ce3ddb7f..e2475ef62c 100644 --- a/nf_core/components/lint/__init__.py +++ b/nf_core/components/lint/__init__.py @@ -75,6 +75,11 @@ def __init__( self.passed: List[LintResult] = [] self.warned: List[LintResult] = [] self.failed: List[LintResult] = [] + self.all_local_components: List[NFCoreComponent] = [] + + self.lint_config = None + self.modules_json = None + if self.component_type == "modules": self.lint_tests = self.get_all_module_lint_tests(self.repo_type == "pipeline") else: @@ -107,7 +112,7 @@ def __init__( f"No {self.component_type} from {self.modules_repo.remote_url} installed in pipeline." ) local_component_dir = Path(self.directory, self.component_type, "local") - self.all_local_components = [] + if local_component_dir.exists(): self.all_local_components = [ NFCoreComponent( @@ -122,7 +127,7 @@ def __init__( for comp in self.get_local_components() ] self.config = nf_core.utils.fetch_wf_config(Path(self.directory), cache_config=True) - else: + elif self.repo_type == "modules": component_dir = Path( self.directory, self.default_modules_path if self.component_type == "modules" else self.default_subworkflows_path, @@ -146,9 +151,6 @@ def __init__( self.registry = registry log.debug(f"Registry set to {self.registry}") - self.lint_config = None - self.modules_json = None - def __repr__(self) -> str: return f"ComponentLint({self.component_type}, {self.directory})" diff --git a/nf_core/components/nfcore_component.py b/nf_core/components/nfcore_component.py index 0c63141c73..129871d68c 100644 --- a/nf_core/components/nfcore_component.py +++ b/nf_core/components/nfcore_component.py @@ -55,6 +55,7 @@ def __init__( self.git_sha: Optional[str] = None self.is_patched: bool = False self.branch: Optional[str] = None + self.workflow_name: Optional[str] = None if remote_component: # Initialize the important files diff --git a/nf_core/modules/bump_versions.py b/nf_core/modules/bump_versions.py index fb0dc7d50d..2d8854e3c0 100644 --- a/nf_core/modules/bump_versions.py +++ b/nf_core/modules/bump_versions.py @@ -7,7 +7,7 @@ import os import re from pathlib import Path -from typing import Any, Dict, List, Optional, Tuple, Union +from typing import List, Optional, Tuple, Union import questionary import yaml @@ -22,13 +22,13 @@ import nf_core.utils from nf_core.components.components_command import ComponentCommand from nf_core.components.nfcore_component import NFCoreComponent -from nf_core.utils import custom_yaml_dumper, rich_force_colors +from nf_core.utils import NFCoreYamlConfig, custom_yaml_dumper, rich_force_colors from nf_core.utils import plural_s as _s log = logging.getLogger(__name__) -class ModuleVersionBumper(ComponentCommand): # type: ignore[misc] +class ModuleVersionBumper(ComponentCommand): def __init__( self, pipeline_dir: Union[str, Path], @@ -43,7 +43,7 @@ def __init__( self.failed: List[Tuple[str, str]] = [] self.ignored: List[Tuple[str, str]] = [] self.show_up_to_date: Optional[bool] = None - self.tools_config: Dict[str, Any] = {} + self.tools_config: Optional[NFCoreYamlConfig] def bump_versions( self, module: Union[str, None] = None, all_modules: bool = False, show_uptodate: bool = False @@ -160,7 +160,7 @@ def bump_module_version(self, module: NFCoreComponent) -> bool: return False # Don't update if blocked in blacklist - self.bump_versions_config = self.tools_config.get("bump-versions", {}) + self.bump_versions_config = getattr(self.tools_config, "bump-versions", {}) or {} if module.component_name in self.bump_versions_config: config_version = self.bump_versions_config[module.component_name] if not config_version: diff --git a/nf_core/modules/modules_json.py b/nf_core/modules/modules_json.py index c0e41d1b42..02ce6fa5a2 100644 --- a/nf_core/modules/modules_json.py +++ b/nf_core/modules/modules_json.py @@ -922,7 +922,7 @@ def module_present(self, module_name, repo_url, install_dir): install_dir, {} ) - def get_modules_json(self) -> dict: + def get_modules_json(self) -> ModulesJsonType: """ Returns a copy of the loaded modules.json @@ -932,7 +932,7 @@ def get_modules_json(self) -> dict: if self.modules_json is None: self.load() assert self.modules_json is not None # mypy - return copy.deepcopy(self.modules_json) # type: ignore + return copy.deepcopy(self.modules_json) def get_component_version(self, component_type, component_name, repo_url, install_dir): """ @@ -1182,6 +1182,7 @@ def resolve_missing_from_modules_json(self, missing_from_modules_json, component repos, _ = self.get_pipeline_module_repositories(component_type, self.modules_dir, tracked_repos) # Get tuples of components that miss installation and their install directory + def components_with_repos(): for directory in missing_from_modules_json: for repo_url in repos: @@ -1197,7 +1198,7 @@ def components_with_repos(): paths_in_directory.append(str(Path(*Path(dir_name).parts[-2:]))) pass paths_in_directory.append(Path(dir_name).parts[-1]) - if dir in paths_in_directory: + if directory in paths_in_directory: yield (modules_repo.repo_path, directory) # Add all components into a dictionary with install directories diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index aecba94236..b7d86c5bd7 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -8,9 +8,10 @@ import re import shutil from pathlib import Path -from typing import Optional, Union +from typing import Dict, List, Optional, Union import git +import git.config import jinja2 import yaml @@ -61,12 +62,15 @@ def __init__( self.config = template_config elif from_config_file: # Try reading config file - _, config_yml = nf_core.utils.load_tools_config(outdir if outdir else ".") - # Obtain a CreateConfig object from `.nf-core.yml` config file - if "template" in config_yml: - self.config = CreateConfig(**config_yml["template"]) - else: - raise UserWarning("The template configuration was not provided in '.nf-core.yml'.") + try: + _, config_yml = nf_core.utils.load_tools_config(outdir if outdir else Path().cwd()) + # Obtain a CreateConfig object from `.nf-core.yml` config file + if config_yml is not None and getattr(config_yml, "template", None) is not None: + self.config = CreateConfig(**config_yml["template"]) + else: + raise UserWarning("The template configuration was not provided in '.nf-core.yml'.") + except (FileNotFoundError, UserWarning): + log.debug("The '.nf-core.yml' configuration file was not found.") elif (name and description and author) or ( template_config and (isinstance(template_config, str) or isinstance(template_config, Path)) ): @@ -191,7 +195,10 @@ def obtain_jinja_params_dict(self, features_to_skip, pipeline_dir): skip_paths (list): List of template areas which contain paths to skip. """ # Try reading config file - _, config_yml = nf_core.utils.load_tools_config(pipeline_dir) + try: + _, config_yml = nf_core.utils.load_tools_config(pipeline_dir) + except UserWarning: + config_yml = None # Define the different template areas, and what actions to take for each # if they are skipped @@ -230,13 +237,13 @@ def obtain_jinja_params_dict(self, features_to_skip, pipeline_dir): jinja_params["name_docker"] = jinja_params["name"].replace(jinja_params["org"], jinja_params["prefix_nodash"]) jinja_params["logo_light"] = f"{jinja_params['name_noslash']}_logo_light.png" jinja_params["logo_dark"] = f"{jinja_params['name_noslash']}_logo_dark.png" - - if ( - "lint" in config_yml - and "nextflow_config" in config_yml["lint"] - and "manifest.name" in config_yml["lint"]["nextflow_config"] - ): - return jinja_params, skip_paths + if config_yml is not None: + if ( + hasattr(config_yml, "lint") + and hasattr(config_yml["lint"], "nextflow_config") + and hasattr(config_yml["lint"]["nextflow_config"], "manifest.name") + ): + return jinja_params, skip_paths # Check that the pipeline name matches the requirements if not re.match(r"^[a-z]+$", jinja_params["short_name"]): @@ -417,7 +424,7 @@ def fix_linting(self): """ # Create a lint config short_name = self.jinja_params["short_name"] - lint_config = { + lint_config: Dict[str, List[str]] = { "files_exist": [ "CODE_OF_CONDUCT.md", f"assets/nf-core-{short_name}_logo_light.png", @@ -503,9 +510,10 @@ def fix_linting(self): # Add the lint content to the preexisting nf-core config config_fn, nf_core_yml = nf_core.utils.load_tools_config(self.outdir) - nf_core_yml["lint"] = lint_config - with open(self.outdir / config_fn, "w") as fh: - yaml.dump(nf_core_yml, fh, default_flow_style=False, sort_keys=False) + if config_fn is not None and nf_core_yml is not None: + nf_core_yml.lint = lint_config + with open(self.outdir / config_fn, "w") as fh: + yaml.dump(nf_core_yml.model_dump(), fh, default_flow_style=False, sort_keys=False) run_prettier_on_file(os.path.join(self.outdir, config_fn)) @@ -531,9 +539,9 @@ def git_init_pipeline(self): Raises: UserWarning: if Git default branch is set to 'dev' or 'TEMPLATE'. """ - default_branch = self.default_branch + default_branch: Optional[str] = self.default_branch try: - default_branch = default_branch or git.config.GitConfigParser().get_value("init", "defaultBranch") + default_branch = default_branch or str(git.config.GitConfigParser().get_value("init", "defaultBranch")) except configparser.Error: log.debug("Could not read init.defaultBranch") if default_branch in ["dev", "TEMPLATE"]: diff --git a/nf_core/pipelines/create/utils.py b/nf_core/pipelines/create/utils.py index c15d61e266..88994c9361 100644 --- a/nf_core/pipelines/create/utils.py +++ b/nf_core/pipelines/create/utils.py @@ -3,9 +3,9 @@ from contextvars import ContextVar from logging import LogRecord from pathlib import Path -from typing import Any, Dict, Iterator, Optional, Union +from typing import Any, Dict, Iterator, Union -from pydantic import BaseModel, ConfigDict, ValidationError, ValidationInfo, field_validator +from pydantic import ConfigDict, ValidationError, ValidationInfo, field_validator from rich.logging import RichHandler from textual import on from textual._context import active_app @@ -16,6 +16,8 @@ from textual.widget import Widget from textual.widgets import Button, Input, Markdown, RichLog, Static, Switch +from nf_core.utils import NFCoreTemplateConfig + # Use ContextVar to define a context on the model initialization _init_context_var: ContextVar = ContextVar("_init_context_var", default={}) @@ -33,19 +35,9 @@ def init_context(value: Dict[str, Any]) -> Iterator[None]: NFCORE_PIPELINE_GLOBAL: bool = True -class CreateConfig(BaseModel): +class CreateConfig(NFCoreTemplateConfig): """Pydantic model for the nf-core create config.""" - org: Optional[str] = None - name: Optional[str] = None - description: Optional[str] = None - author: Optional[str] = None - version: Optional[str] = None - force: Optional[bool] = True - outdir: Optional[str] = None - skip_features: Optional[list] = None - is_nfcore: Optional[bool] = None - model_config = ConfigDict(extra="allow") def __init__(self, /, **data: Any) -> None: @@ -103,7 +95,7 @@ class TextInput(Static): and validation messages. """ - def __init__(self, field_id, placeholder, description, default=None, password=None, **kwargs) -> None: + def __init__(self, field_id, placeholder, description, default="", password=False, **kwargs) -> None: """Initialise the widget with our values. Pass on kwargs upstream for standard usage.""" diff --git a/nf_core/pipelines/lint/__init__.py b/nf_core/pipelines/lint/__init__.py index 93f6523709..ed833d3219 100644 --- a/nf_core/pipelines/lint/__init__.py +++ b/nf_core/pipelines/lint/__init__.py @@ -151,15 +151,15 @@ def _get_all_lint_tests(release_mode): "nfcore_yml", ] + (["version_consistency"] if release_mode else []) - def _load(self): + def _load(self) -> bool: """Load information about the pipeline into the PipelineLint object""" # Load everything using the parent object super()._load() # Load lint object specific stuff - self._load_lint_config() + return self._load_lint_config() - def _load_lint_config(self): + def _load_lint_config(self) -> bool: """Parse a pipeline lint config file. Load the '.nf-core.yml' config file and extract @@ -168,14 +168,19 @@ def _load_lint_config(self): Add parsed config to the `self.lint_config` class attribute. """ _, tools_config = nf_core.utils.load_tools_config(self.wf_path) - self.lint_config = tools_config.get("lint", {}) + self.lint_config = getattr(tools_config, "lint", {}) or {} + is_correct = True # Check if we have any keys that don't match lint test names - for k in self.lint_config: - if k not in self.lint_tests: - log.warning(f"Found unrecognised test name '{k}' in pipeline lint config") + if self.lint_config is not None: + for k in self.lint_config: + if k not in self.lint_tests: + log.warning(f"Found unrecognised test name '{k}' in pipeline lint config") + is_correct = False - def _lint_pipeline(self): + return is_correct + + def _lint_pipeline(self) -> None: """Main linting function. Takes the pipeline directory as the primary input and iterates through @@ -240,7 +245,8 @@ def _lint_pipeline(self): "Running lint checks", total=len(self.lint_tests), test_name=self.lint_tests[0] ) for test_name in self.lint_tests: - if self.lint_config.get(test_name, {}) is False: + lint_test = self.lint_config.get(test_name, {}) if self.lint_config is not None else {} + if lint_test is False: log.debug(f"Skipping lint test '{test_name}'") self.ignored.append((test_name, test_name)) continue diff --git a/nf_core/pipelines/lint/modules_json.py b/nf_core/pipelines/lint/modules_json.py index dd0a59d558..5ce2054036 100644 --- a/nf_core/pipelines/lint/modules_json.py +++ b/nf_core/pipelines/lint/modules_json.py @@ -1,9 +1,10 @@ from pathlib import Path +from typing import Dict, List -from nf_core.modules.modules_json import ModulesJson +from nf_core.modules.modules_json import ModulesJson, ModulesJsonType -def modules_json(self): +def modules_json(self) -> Dict[str, List[str]]: """Make sure all modules described in the ``modules.json`` file are actually installed Every module installed from ``nf-core/modules`` must have an entry in the ``modules.json`` file @@ -18,10 +19,10 @@ def modules_json(self): # Load pipeline modules and modules.json _modules_json = ModulesJson(self.wf_path) _modules_json.load() - modules_json_dict = _modules_json.modules_json + modules_json_dict: ModulesJsonType | None = _modules_json.modules_json modules_dir = Path(self.wf_path, "modules") - if _modules_json: + if _modules_json and modules_json_dict is not None: all_modules_passed = True for repo in modules_json_dict["repos"].keys(): diff --git a/nf_core/subworkflows/lint/main_nf.py b/nf_core/subworkflows/lint/main_nf.py index c73559502c..edca32bf30 100644 --- a/nf_core/subworkflows/lint/main_nf.py +++ b/nf_core/subworkflows/lint/main_nf.py @@ -4,12 +4,14 @@ import logging import re -from typing import List +from typing import List, Tuple + +from nf_core.components.nfcore_component import NFCoreComponent log = logging.getLogger(__name__) -def main_nf(_, subworkflow): +def main_nf(_, subworkflow: NFCoreComponent) -> Tuple[List[str], List[str]]: """ Lint a ``main.nf`` subworkflow file @@ -25,8 +27,8 @@ def main_nf(_, subworkflow): * The subworkflow emits a software version """ - inputs = [] - outputs = [] + inputs: List[str] = [] + outputs: List[str] = [] # Read the lines directly from the subworkflow lines = None @@ -38,7 +40,7 @@ def main_nf(_, subworkflow): subworkflow.passed.append(("main_nf_exists", "Subworkflow file exists", subworkflow.main_nf)) except FileNotFoundError: subworkflow.failed.append(("main_nf_exists", "Subworkflow file does not exist", subworkflow.main_nf)) - return + return inputs, outputs # Go through subworkflow main.nf file and switch state according to current section # Perform section-specific linting @@ -199,7 +201,7 @@ def check_subworkflow_section(self, lines: List[str]) -> List[str]: return includes -def check_workflow_section(self, lines): +def check_workflow_section(self, lines: List[str]) -> None: """Lint the workflow definition of a subworkflow before Specifically checks that the name is all capital letters diff --git a/nf_core/synced_repo.py b/nf_core/synced_repo.py index 33e7f0a54a..8af0ee9a08 100644 --- a/nf_core/synced_repo.py +++ b/nf_core/synced_repo.py @@ -4,7 +4,7 @@ import shutil from configparser import NoOptionError, NoSectionError from pathlib import Path -from typing import Dict, Iterable, Optional, Union +from typing import Dict, Iterable, List, Optional, Union import git from git.exc import GitCommandError @@ -129,14 +129,16 @@ def __init__(self, remote_url=None, branch=None, no_pull=False, hide_progress=Fa # the WorkflowRepo define their own including custom init methods. This needs # fixing. self.setup_local_repo(remote_url, branch, hide_progress) + if self.local_repo_dir is None: raise ValueError("Repository not initialized") else: config_fn, repo_config = load_tools_config(self.local_repo_dir) - try: - self.repo_path = repo_config["org_path"] - except KeyError: - raise UserWarning(f"'org_path' key not present in {config_fn.name}") + if config_fn is not None and repo_config is not None: + try: + self.repo_path = repo_config.org_path + except KeyError: + raise UserWarning(f"'org_path' key not present in {config_fn.name}") # Verify that the repo seems to be correctly configured if self.repo_path != NF_CORE_MODULES_NAME or self.branch: @@ -148,6 +150,9 @@ def __init__(self, remote_url=None, branch=None, no_pull=False, hide_progress=Fa self.avail_module_names = None + def __repr__(self) -> str: + return f"SyncedRepo({self.remote_url}, {self.branch})" + def setup_local_repo(self, remote_url, branch, hide_progress): pass @@ -402,8 +407,12 @@ def get_latest_component_version(self, component_name, component_type): Returns the latest commit in the repository """ try: - return list(self.get_component_git_log(component_name, component_type, depth=1))[0]["git_sha"] - except UserWarning: + git_logs = list(self.get_component_git_log(component_name, component_type, depth=1)) + if not git_logs: + return None + return git_logs[0]["git_sha"] + except Exception as e: + log.debug(f"Could not get latest version of {component_name}: {e}") return None def sha_exists_on_branch(self, sha): @@ -437,7 +446,9 @@ def get_commit_info(self, sha): return message, date raise LookupError(f"Commit '{sha}' not found in the '{self.remote_url}'") - def get_avail_components(self, component_type, checkout=True, commit=None): + def get_avail_components( + self, component_type: str, checkout: bool = True, commit: Optional[str] = None + ) -> List[str]: """ Gets the names of the modules/subworkflows in the repository. They are detected by checking which directories have a 'main.nf' file @@ -456,9 +467,9 @@ def get_avail_components(self, component_type, checkout=True, commit=None): directory = self.subworkflows_dir # Module/Subworkflow directories are characterized by having a 'main.nf' file avail_component_names = [ - os.path.relpath(dirpath, start=directory) - for dirpath, _, file_names in os.walk(directory) - if "main.nf" in file_names + str(Path(dirpath).relative_to(directory)) + for dirpath, _, files in Path.walk(directory) + if "main.nf" in files ] return avail_component_names diff --git a/tests/pipelines/test_sync.py b/tests/pipelines/test_sync.py index d7b73c7ffb..b6955e6714 100644 --- a/tests/pipelines/test_sync.py +++ b/tests/pipelines/test_sync.py @@ -2,50 +2,104 @@ import json import os -import shutil -import tempfile -import unittest from pathlib import Path +from typing import Dict, List, Union from unittest import mock import git import pytest +import yaml import nf_core.pipelines.create.create import nf_core.pipelines.sync +from nf_core.utils import NFCoreYamlConfig +from ..test_pipelines import TestPipelines from ..utils import with_temporary_folder -class TestModules(unittest.TestCase): +class MockResponse: + def __init__(self, data: Union[Dict, List[Dict]], status_code: int, url: str): + self.url: str = url + self.status_code: int = status_code + self.from_cache: bool = False + self.reason: str = "Mocked response" + self.data: Union[Dict, List[Dict]] = data + self.content: str = json.dumps(data) + self.headers: Dict[str, str] = {"content-encoding": "test", "connection": "fake"} + + def json(self): + return self.data + + +def mocked_requests_get(url) -> MockResponse: + """Helper function to emulate POST requests responses from the web""" + + url_template = "https://api.github.com/repos/{}/response/" + if url == Path(url_template.format("no_existing_pr"), "pulls?head=TEMPLATE&base=None"): + return MockResponse([], 200, url) + if url == Path(url_template.format("list_prs"), "pulls"): + response_data = [ + { + "state": "closed", + "head": {"ref": "nf-core-template-merge-2"}, + "base": {"ref": "master"}, + "html_url": "pr_url", + } + ] + [ + { + "state": "open", + "head": {"ref": f"nf-core-template-merge-{branch_no}"}, + "base": {"ref": "master"}, + "html_url": "pr_url", + } + for branch_no in range(3, 7) + ] + return MockResponse(response_data, 200, url) + + return MockResponse([{"html_url": url}], 404, url) + + +def mocked_requests_patch(url: str, data: str, **kwargs) -> MockResponse: + """Helper function to emulate POST requests responses from the web""" + + if url == "url_to_update_pr": + return MockResponse({"html_url": "great_success"}, 200, url) + # convert data to dict + response = json.loads(data) + response["patch_url"] = url + return MockResponse(response, 404, url) + + +def mocked_requests_post(url, **kwargs): + """Helper function to emulate POST requests responses from the web""" + + if url == "https://api.github.com/repos/no_existing_pr/response/pulls": + return MockResponse({"html_url": "great_success"}, 201, url) + + return MockResponse({}, 404, url) + + +class TestModules(TestPipelines): """Class for modules tests""" def setUp(self): - """Create a new pipeline to test""" - self.tmp_dir = tempfile.mkdtemp() - self.pipeline_dir = os.path.join(self.tmp_dir, "testpipeline") - default_branch = "master" - self.create_obj = nf_core.pipelines.create.create.PipelineCreate( - "testing", - "test pipeline", - "tester", - outdir=self.pipeline_dir, - default_branch=default_branch, - ) - self.create_obj.init_pipeline() - self.remote_path = os.path.join(self.tmp_dir, "remote_repo") + super().setUp() + self.remote_path = Path(self.tmp_dir, "remote_repo") self.remote_repo = git.Repo.init(self.remote_path, bare=True) if self.remote_repo.active_branch.name != "master": - self.remote_repo.active_branch.rename(default_branch) - - def tearDown(self): - if os.path.exists(self.tmp_dir): - shutil.rmtree(self.tmp_dir) + self.remote_repo.active_branch.rename("master") @with_temporary_folder - def test_inspect_sync_dir_notgit(self, tmp_dir): + def test_inspect_sync_dir_notgit(self, tmp_dir: str): """Try syncing an empty directory""" + nf_core_yml_path = Path(tmp_dir, ".nf-core.yml") + nf_core_yml = NFCoreYamlConfig(repository_type="pipeline") + + with open(nf_core_yml_path, "w") as fh: + yaml.dump(nf_core_yml.model_dump(), fh) + psync = nf_core.pipelines.sync.PipelineSync(tmp_dir) with pytest.raises(nf_core.pipelines.sync.SyncExceptionError) as exc_info: psync.inspect_sync_dir() @@ -227,88 +281,6 @@ def test_push_merge_branch_without_create_branch(self): psync.push_merge_branch() assert exc_info.value.args[0].startswith(f"Could not push branch '{psync.merge_branch}'") - def mocked_requests_get(url, **kwargs): - """Helper function to emulate POST requests responses from the web""" - - class MockResponse: - def __init__(self, data, status_code): - self.url = kwargs.get("url") - self.status_code = status_code - self.from_cache = False - self.reason = "Mocked response" - self.data = data - self.content = json.dumps(data) - self.headers = {"content-encoding": "test", "connection": "fake"} - - def json(self): - return self.data - - url_template = "https://api.github.com/repos/{}/response/" - if url == os.path.join(url_template.format("no_existing_pr"), "pulls?head=TEMPLATE&base=None"): - response_data = [] - return MockResponse(response_data, 200) - if url == os.path.join(url_template.format("list_prs"), "pulls"): - response_data = [ - { - "state": "closed", - "head": {"ref": "nf-core-template-merge-2"}, - "base": {"ref": "master"}, - "html_url": "pr_url", - } - ] + [ - { - "state": "open", - "head": {"ref": f"nf-core-template-merge-{branch_no}"}, - "base": {"ref": "master"}, - "html_url": "pr_url", - } - for branch_no in range(3, 7) - ] - return MockResponse(response_data, 200) - - return MockResponse({"html_url": url}, 404) - - def mocked_requests_patch(url, **kwargs): - """Helper function to emulate POST requests responses from the web""" - - class MockResponse: - def __init__(self, data, status_code): - self.url = kwargs.get("url") - self.status_code = status_code - self.from_cache = False - self.reason = "Mocked" - self.content = json.dumps(data) - self.headers = {"content-encoding": "test", "connection": "fake"} - - if url == "url_to_update_pr": - response_data = {"html_url": "great_success"} - return MockResponse(response_data, 200) - - return MockResponse({"patch_url": url}, 404) - - def mocked_requests_post(url, **kwargs): - """Helper function to emulate POST requests responses from the web""" - - class MockResponse: - def __init__(self, data, status_code): - self.url = kwargs.get("url") - self.status_code = status_code - self.from_cache = False - self.reason = "Mocked" - self.data = data - self.content = json.dumps(data) - self.headers = {"content-encoding": "test", "connection": "fake"} - - def json(self): - return self.data - - if url == "https://api.github.com/repos/no_existing_pr/response/pulls": - response_data = {"html_url": "great_success"} - return MockResponse(response_data, 201) - - response_data = {} - return MockResponse(response_data, 404) - @mock.patch("nf_core.utils.gh_api.get", side_effect=mocked_requests_get) @mock.patch("nf_core.utils.gh_api.post", side_effect=mocked_requests_post) def test_make_pull_request_success(self, mock_post, mock_get): @@ -354,7 +326,7 @@ def test_close_open_template_merge_prs(self, mock_get): prs = mock_get(f"https://api.github.com/repos/{psync.gh_repo}/pulls").data for pr in prs: - if pr["state"] == "open": + if pr.get("state", None) == "open": mock_close_open_pr.assert_any_call(pr) @mock.patch("nf_core.utils.gh_api.post", side_effect=mocked_requests_post) @@ -368,7 +340,7 @@ def test_close_open_pr(self, mock_patch, mock_post): psync.gh_username = "bad_url" psync.gh_repo = "bad_url/response" os.environ["GITHUB_AUTH_TOKEN"] = "test" - pr = { + pr: Dict[str, Union[str, Dict[str, str]]] = { "state": "open", "head": {"ref": "nf-core-template-merge-3"}, "base": {"ref": "master"}, diff --git a/tests/subworkflows/test_lint.py b/tests/subworkflows/test_lint.py index 38bcc2b2c0..2693008707 100644 --- a/tests/subworkflows/test_lint.py +++ b/tests/subworkflows/test_lint.py @@ -291,10 +291,6 @@ def test_subworkflows_absent_version(self): with open(snap_file, "w") as fh: fh.write(new_content) - import ipdb - - ipdb.set_trace() - subworkflow_lint = nf_core.subworkflows.SubworkflowLint(directory=self.nfcore_modules) subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") assert len(subworkflow_lint.failed) == 0 @@ -312,31 +308,33 @@ def test_subworkflows_missing_test_dir(self): test_dir_copy = shutil.copytree(test_dir, test_dir.parent / "tests_copy") shutil.rmtree(test_dir) - subworkflow_lint = nf_core.subworkflows.SubworkflowLint(directory=self.nfcore_modules) + subworkflow_lint = nf_core.subworkflows.SubworkflowLint(self.nfcore_modules) subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") - assert len(subworkflow_lint.failed) == 0 + assert len(subworkflow_lint.failed) == 1 assert len(subworkflow_lint.passed) > 0 assert len(subworkflow_lint.warned) >= 0, f"Linting warned with {[x.__dict__ for x in subworkflow_lint.warned]}" - assert any([x.lint_test == "test_dir_versions" for x in subworkflow_lint.warned]) + assert any([x.lint_test == "test_dir_exists" for x in subworkflow_lint.failed]) # cleanup shutil.copytree(test_dir_copy, test_dir) - def test_subworkflows_missing_main_nf(self): - """Test linting a nf-test subworkflow if the main.nf file is missing""" - main_nf = Path(self.nfcore_modules, "subworkflows", "nf-core", "test_subworkflow", "main.nf") - main_nf_copy = shutil.copy(main_nf, main_nf.parent / "main_nf_copy") - main_nf.unlink() - - subworkflow_lint = nf_core.subworkflows.SubworkflowLint(directory=self.nfcore_modules) - subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") - assert len(subworkflow_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" - assert len(subworkflow_lint.passed) > 0 - assert len(subworkflow_lint.warned) >= 0 - assert subworkflow_lint.failed[0].lint_test == "main_nf_exists" - - # cleanup - shutil.copy(main_nf_copy, main_nf) + # There are many steps before the actual main_nf linting where we rely on the main_nf file to exist, so this test is not possible for now + # def test_subworkflows_missing_main_nf(self): + # """Test linting a nf-test subworkflow if the main.nf file is missing""" + + # subworkflow_lint = nf_core.subworkflows.SubworkflowLint(directory=self.nfcore_modules) + # main_nf = Path(self.nfcore_modules, "subworkflows", "nf-core", "test_subworkflow", "main.nf") + # main_nf_copy = shutil.copy(main_nf, main_nf.parent / "main_nf_copy") + # main_nf.unlink() + # subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") + # assert len(subworkflow_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" + # assert len(subworkflow_lint.passed) > 0 + # assert len(subworkflow_lint.warned) >= 0 + # assert subworkflow_lint.failed[0].lint_test == "main_nf_exists" + + # # cleanup + # shutil.copy(main_nf_copy, main_nf) + # shutil.rmtree(Path(self.nfcore_modules, "subworkflows", "nf-core", "test_subworkflow_backup")) def test_subworkflows_empty_file_in_snapshot(self): """Test linting a nf-test subworkflow with an empty file sha sum in the test snapshot, which should make it fail (if it is not a stub)""" From 178146f75baf6322e36a752c234c628dde9953c5 Mon Sep 17 00:00:00 2001 From: mashehu Date: Thu, 25 Jul 2024 13:34:26 +0200 Subject: [PATCH 35/89] fix export of pydantic models --- nf_core/components/components_command.py | 5 +- nf_core/components/update.py | 52 +++++----- nf_core/modules/modules_repo.py | 1 + nf_core/pipelines/create/create.py | 15 +-- nf_core/pipelines/lint_utils.py | 10 +- nf_core/utils.py | 50 ++++++---- tests/pipelines/lint/test_nextflow_config.py | 15 ++- tests/subworkflows/test_update.py | 20 ++-- tests/test_modules.py | 44 ++++----- tests/test_utils.py | 99 +++++++------------- tests/utils.py | 29 +++++- 11 files changed, 182 insertions(+), 158 deletions(-) diff --git a/nf_core/components/components_command.py b/nf_core/components/components_command.py index 13a6fed338..a6b46d1b61 100644 --- a/nf_core/components/components_command.py +++ b/nf_core/components/components_command.py @@ -181,7 +181,10 @@ def load_lint_config(self) -> None: Add parsed config to the `self.lint_config` class attribute. """ _, tools_config = nf_core.utils.load_tools_config(self.directory) - self.lint_config = tools_config.get("lint", {}) + if tools_config is None: + raise UserWarning("Could not load `.nf-core.yml` file.") + else: + self.lint_config = tools_config.get("lint", {}) def check_modules_structure(self) -> None: """ diff --git a/nf_core/components/update.py b/nf_core/components/update.py index 9b24b6c0c9..eb15f976bf 100644 --- a/nf_core/components/update.py +++ b/nf_core/components/update.py @@ -97,11 +97,7 @@ def update(self, component=None, silent=False, updated=None, check_diff_exist=Tr updated = [] _, tool_config = nf_core.utils.load_tools_config(self.directory) - self.update_config = tool_config.get("update", {}) - - if self.update_config is None: - raise UserWarning("Could not find '.nf-core.yml' file in pipeline directory") - + self.update_config = getattr(tool_config, "update", {}) or {} self._parameter_checks() # Check modules directory structure @@ -396,27 +392,26 @@ def get_single_component_info(self, component): sha = self.sha config_entry = None - if self.update_config is None: - raise UserWarning("Could not find '.nf-core.yml' file in pipeline directory") - if any( - [ - entry.count("/") == 1 - and (entry.endswith("modules") or entry.endswith("subworkflows")) - and not (entry.endswith(".git") or entry.endswith(".git/")) - for entry in self.update_config.keys() - ] - ): - raise UserWarning( - "Your '.nf-core.yml' file format is outdated. " - "The format should be of the form:\n" - "update:\n :\n :\n :" - ) - if isinstance(self.update_config.get(self.modules_repo.remote_url, {}), str): - # If the repo entry is a string, it's the sha to update to - config_entry = self.update_config.get(self.modules_repo.remote_url, {}) - elif component in self.update_config.get(self.modules_repo.remote_url, {}).get(install_dir, {}): - # If the component to update is in .nf-core.yml config file - config_entry = self.update_config[self.modules_repo.remote_url][install_dir].get(component) + if self.update_config is not None: + if any( + [ + entry.count("/") == 1 + and (entry.endswith("modules") or entry.endswith("subworkflows")) + and not (entry.endswith(".git") or entry.endswith(".git/")) + for entry in self.update_config.keys() + ] + ): + raise UserWarning( + "Your '.nf-core.yml' file format is outdated. " + "The format should be of the form:\n" + "update:\n :\n :\n :" + ) + if isinstance(self.update_config.get(self.modules_repo.remote_url, {}), str): + # If the repo entry is a string, it's the sha to update to + config_entry = self.update_config.get(self.modules_repo.remote_url, {}) + elif component in self.update_config.get(self.modules_repo.remote_url, {}).get(install_dir, {}): + # If the component to update is in .nf-core.yml config file + config_entry = self.update_config[self.modules_repo.remote_url][install_dir].get(component) if config_entry is not None and config_entry is not True: if config_entry is False: log.warn( @@ -481,6 +476,7 @@ def get_all_components_info(self, branch=None): components_info = {} # Loop through all the modules/subworkflows in the pipeline # and check if they have an entry in the '.nf-core.yml' file + for repo_name, components in self.modules_json.get_all_components(self.component_type).items(): if isinstance(self.update_config, dict) and ( repo_name not in self.update_config or self.update_config[repo_name] is True @@ -630,10 +626,8 @@ def get_all_components_info(self, branch=None): overridden_repos.append(repo_name) elif isinstance(self.update_config, dict) and self.update_config[repo_name] is False: skipped_repos.append(repo_name) - elif not isinstance(self.update_config, dict): - raise UserWarning("`.nf-core.yml` is not correctly formatted.") else: - raise UserWarning(f"Repo '{repo_name}' has an invalid entry in '.nf-core.yml'") + log.debug(f"no update config for {repo_name} in `.nf-core.yml`") if skipped_repos: skipped_str = "', '".join(skipped_repos) diff --git a/nf_core/modules/modules_repo.py b/nf_core/modules/modules_repo.py index daa7b59811..7d576d4aef 100644 --- a/nf_core/modules/modules_repo.py +++ b/nf_core/modules/modules_repo.py @@ -63,6 +63,7 @@ def __init__( self.setup_local_repo(remote_url, branch, hide_progress) config_fn, repo_config = load_tools_config(self.local_repo_dir) + assert config_fn is not None and repo_config is not None # mypy try: self.repo_path = repo_config["org_path"] except KeyError: diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index b7d86c5bd7..deeb5554af 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -376,13 +376,14 @@ def render_template(self): if self.config: config_fn, config_yml = nf_core.utils.load_tools_config(self.outdir) - with open(config_fn, "w") as fh: - config_yml.update(template=self.config.model_dump()) - # convert posix path to string for yaml dump - config_yml["template"]["outdir"] = str(config_yml["template"]["outdir"]) - yaml.safe_dump(config_yml, fh) - log.debug(f"Dumping pipeline template yml to pipeline config file '{config_fn.name}'") - run_prettier_on_file(self.outdir / config_fn) + if config_fn is not None and config_yml is not None: + with open(str(config_fn), "w") as fh: + config_yml.template = self.config.model_dump() + # convert posix path to string for yaml dump + config_yml["template"]["outdir"] = str(config_yml["template"]["outdir"]) + yaml.safe_dump(config_yml.model_dump(), fh) + log.debug(f"Dumping pipeline template yml to pipeline config file '{config_fn.name}'") + run_prettier_on_file(self.outdir / config_fn) def update_nextflow_schema(self): """ diff --git a/nf_core/pipelines/lint_utils.py b/nf_core/pipelines/lint_utils.py index 4ccf790076..ccab76295f 100644 --- a/nf_core/pipelines/lint_utils.py +++ b/nf_core/pipelines/lint_utils.py @@ -110,9 +110,13 @@ def ignore_file(lint_name: str, file_path: Path, dir_path: Path) -> List[List[st passed: List[str] = [] failed: List[str] = [] ignored: List[str] = [] - _, lint_conf = nf_core.utils.load_tools_config(dir_path) - lint_conf = lint_conf.get("lint", {}) - ignore_entry: List[str] | bool = lint_conf.get(lint_name, []) + _, pipeline_conf = nf_core.utils.load_tools_config(dir_path) + lint_conf = getattr(pipeline_conf, "lint", None) or None + + if lint_conf is None: + ignore_entry: List[str] = [] + else: + ignore_entry = lint_conf.get(lint_name, []) full_path = dir_path / file_path # Return a failed status if we can't find the file if not full_path.is_file(): diff --git a/nf_core/utils.py b/nf_core/utils.py index 4d0566b1af..6794cf04fe 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -19,7 +19,7 @@ import time from contextlib import contextmanager from pathlib import Path -from typing import Dict, Generator, List, Optional, Tuple, Union +from typing import Any, Dict, Generator, List, Optional, Tuple, Union import git import prompt_toolkit.styles @@ -1042,25 +1042,37 @@ def get_repo_releases_branches(pipeline, wfs): class NFCoreTemplateConfig(BaseModel): - org: str - name: str - description: str - author: str - version: Optional[str] - force: Optional[bool] - outdir: Optional[str] - skip_features: Optional[list] - is_nfcore: Optional[bool] + org: Optional[str] = None + name: Optional[str] = None + description: Optional[str] = None + author: Optional[str] = None + version: Optional[str] = None + force: Optional[bool] = None + outdir: Optional[str] = None + skip_features: Optional[list] = None + is_nfcore: Optional[bool] = None + + +LintConfigType = Optional[Dict[str, Union[List[str], List[Dict[str, List[str]]], bool]]] class NFCoreYamlConfig(BaseModel): - nf_core_version: str repository_type: str - org_path: str - template: NFCoreTemplateConfig + nf_core_version: Optional[str] = None + org_path: Optional[str] = None + lint: LintConfigType = None + template: Optional[NFCoreTemplateConfig] = None + bump_version: Optional[Dict[str, bool]] = None + update: Optional[Dict[str, Union[str, bool, Dict[str, Union[str, Dict[str, Union[str, bool]]]]]]] = None + + def __getitem__(self, item: str) -> Any: + return getattr(self, item) + + def get(self, item: str, default: Any = None) -> Any: + return getattr(self, item, default) -def load_tools_config(directory: Union[str, Path] = ".") -> Tuple[Path, NFCoreYamlConfig]: +def load_tools_config(directory: Union[str, Path] = ".") -> Tuple[Optional[Path], Optional[NFCoreYamlConfig]]: """ Parse the nf-core.yml configuration file @@ -1078,11 +1090,12 @@ def load_tools_config(directory: Union[str, Path] = ".") -> Tuple[Path, NFCoreYa if config_fn is None: depr_path = get_first_available_path(directory, DEPRECATED_CONFIG_PATHS) if depr_path: - raise AssertionError( + raise UserWarning( f"Deprecated `{depr_path.name}` file found! Please rename the file to `{CONFIG_PATHS[0]}`." ) else: - raise AssertionError(f"Could not find a config file in the directory '{directory}'") + log.debug(f"Could not find a config file in the directory '{directory}'") + return Path(directory, CONFIG_PATHS[0]), None with open(str(config_fn)) as fh: tools_config = yaml.safe_load(fh) @@ -1094,7 +1107,10 @@ def load_tools_config(directory: Union[str, Path] = ".") -> Tuple[Path, NFCoreYa try: nf_core_yaml_config = NFCoreYamlConfig(**tools_config) except ValidationError as e: - raise AssertionError(f"Config file '{config_fn}' is invalid: {e}") + error_message = f"Config file '{config_fn}' is invalid" + for error in e.errors(): + error_message += f"\n{error['loc'][0]}: {error['msg']}" + raise AssertionError(error_message) log.debug("Using config file: %s", config_fn) return config_fn, nf_core_yaml_config diff --git a/tests/pipelines/lint/test_nextflow_config.py b/tests/pipelines/lint/test_nextflow_config.py index 01173aec34..3cc9355452 100644 --- a/tests/pipelines/lint/test_nextflow_config.py +++ b/tests/pipelines/lint/test_nextflow_config.py @@ -2,8 +2,11 @@ import re from pathlib import Path +import yaml + import nf_core.pipelines.create.create import nf_core.pipelines.lint +from nf_core.utils import NFCoreYamlConfig from ..test_lint import TestLint @@ -124,11 +127,13 @@ def test_allow_params_reference_in_main_nf(self): def test_default_values_ignored(self): """Test ignoring linting of default values.""" # Add max_cpus to the ignore list - nf_core_yml = Path(self.new_pipeline) / ".nf-core.yml" - with open(nf_core_yml, "w") as f: - f.write( - "repository_type: pipeline\nlint:\n nextflow_config:\n - config_defaults:\n - params.max_cpus\n" - ) + nf_core_yml_path = Path(self.new_pipeline) / ".nf-core.yml" + nf_core_yml = NFCoreYamlConfig( + repository_type="pipeline", lint={"nextflow_config": [{"config_defaults": ["params.max_cpus"]}]} + ) + with open(nf_core_yml_path, "w") as f: + yaml.dump(nf_core_yml.model_dump(), f) + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) lint_obj.load_pipeline_config() lint_obj._load_lint_config() diff --git a/tests/subworkflows/test_update.py b/tests/subworkflows/test_update.py index d3b2433572..7b17a621bc 100644 --- a/tests/subworkflows/test_update.py +++ b/tests/subworkflows/test_update.py @@ -156,9 +156,10 @@ def test_update_with_config_fixed_version(self): # Fix the subworkflow version in the .nf-core.yml to an old version update_config = {NF_CORE_MODULES_REMOTE: {NF_CORE_MODULES_NAME: {"fastq_align_bowtie2": OLD_SUBWORKFLOWS_SHA}}} config_fn, tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) - tools_config["update"] = update_config + setattr(tools_config, "update", update_config) + assert config_fn is not None and tools_config is not None # mypy with open(Path(self.pipeline_dir, config_fn), "w") as f: - yaml.dump(tools_config, f) + yaml.dump(tools_config.model_dump(), f) # Update all subworkflows in the pipeline update_obj = SubworkflowUpdate(self.pipeline_dir, update_all=True, show_diff=False) @@ -186,9 +187,10 @@ def test_update_with_config_dont_update(self): # Set the fastq_align_bowtie2 field to no update in the .nf-core.yml update_config = {NF_CORE_MODULES_REMOTE: {NF_CORE_MODULES_NAME: {"fastq_align_bowtie2": False}}} config_fn, tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) - tools_config["update"] = update_config + setattr(tools_config, "update", update_config) + assert config_fn is not None and tools_config is not None # mypy with open(Path(self.pipeline_dir, config_fn), "w") as f: - yaml.dump(tools_config, f) + yaml.dump(tools_config.model_dump(), f) # Update all modules in the pipeline update_obj = SubworkflowUpdate(self.pipeline_dir, update_all=True, show_diff=False) @@ -216,9 +218,10 @@ def test_update_with_config_fix_all(self): # Fix the version of all nf-core subworkflows in the .nf-core.yml to an old version update_config = {NF_CORE_MODULES_REMOTE: OLD_SUBWORKFLOWS_SHA} config_fn, tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) - tools_config["update"] = update_config + setattr(tools_config, "update", update_config) + assert config_fn is not None and tools_config is not None # mypy with open(Path(self.pipeline_dir, config_fn), "w") as f: - yaml.dump(tools_config, f) + yaml.dump(tools_config.model_dump(), f) # Update fastq_align_bowtie2 update_obj = SubworkflowUpdate(self.pipeline_dir, update_all=False, update_deps=True, show_diff=False) @@ -246,9 +249,10 @@ def test_update_with_config_no_updates(self): # Set all repository updates to False update_config = {NF_CORE_MODULES_REMOTE: False} config_fn, tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) - tools_config["update"] = update_config + setattr(tools_config, "update", update_config) + assert config_fn is not None and tools_config is not None # mypy with open(Path(self.pipeline_dir, config_fn), "w") as f: - yaml.dump(tools_config, f) + yaml.dump(tools_config.model_dump(), f) # Update all subworkflows in the pipeline update_obj = SubworkflowUpdate(self.pipeline_dir, update_all=True, show_diff=False) diff --git a/tests/test_modules.py b/tests/test_modules.py index 13bf32f971..9ce74fd4e1 100644 --- a/tests/test_modules.py +++ b/tests/test_modules.py @@ -1,8 +1,6 @@ """Tests covering the modules commands""" import json -import os -import shutil import unittest from pathlib import Path @@ -12,7 +10,13 @@ import yaml import nf_core.modules +import nf_core.modules.create +import nf_core.modules.install +import nf_core.modules.modules_repo +import nf_core.modules.remove import nf_core.pipelines.create.create +from nf_core import __version__ +from nf_core.utils import NFCoreYamlConfig from .utils import ( GITLAB_BRANCH_TEST_BRANCH, @@ -34,24 +38,28 @@ def create_modules_repo_dummy(tmp_dir): Path(root_dir, "modules", "nf-core").mkdir(parents=True) Path(root_dir, "tests", "modules", "nf-core").mkdir(parents=True) Path(root_dir, "tests", "config").mkdir(parents=True) + + nf_core_yml = NFCoreYamlConfig(nf_core_version=__version__, repository_type="modules", org_path="nf-core") with open(Path(root_dir, ".nf-core.yml"), "w") as fh: - fh.writelines(["repository_type: modules", "\n", "org_path: nf-core", "\n"]) + yaml.dump(nf_core_yml.model_dump(), fh) # mock biocontainers and anaconda response with responses.RequestsMock() as rsps: mock_anaconda_api_calls(rsps, "bpipe", "0.9.12--hdfd78af_0") mock_biocontainers_api_calls(rsps, "bpipe", "0.9.12--hdfd78af_0") # bpipe is a valid package on bioconda that is very unlikely to ever be added to nf-core/modules - module_create = nf_core.modules.ModuleCreate(root_dir, "bpipe/test", "@author", "process_single", False, False) + module_create = nf_core.modules.create.ModuleCreate( + root_dir, "bpipe/test", "@author", "process_single", False, False + ) with requests_cache.disabled(): - module_create.create() + assert module_create.create() # Remove doi from meta.yml which makes lint fail meta_yml_path = Path(root_dir, "modules", "nf-core", "bpipe", "test", "meta.yml") - with open(meta_yml_path) as fh: + with open(str(meta_yml_path)) as fh: meta_yml = yaml.safe_load(fh) del meta_yml["tools"][0]["bpipe"]["doi"] - with open(meta_yml_path, "w") as fh: + with open(str(meta_yml_path), "w") as fh: yaml.dump(meta_yml, fh) # Add dummy content to main.nf.test.snap test_snap_path = Path(root_dir, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test.snap") @@ -102,8 +110,8 @@ def setUp(self): self.tmp_dir, self.template_dir, self.pipeline_name, self.pipeline_dir = create_tmp_pipeline() # Set up install objects - self.mods_install = nf_core.modules.ModuleInstall(self.pipeline_dir, prompt=False, force=True) - self.mods_install_old = nf_core.modules.ModuleInstall( + self.mods_install = nf_core.modules.install.ModuleInstall(self.pipeline_dir, prompt=False, force=True) + self.mods_install_old = nf_core.modules.install.ModuleInstall( self.pipeline_dir, prompt=False, force=False, @@ -111,21 +119,21 @@ def setUp(self): remote_url=GITLAB_URL, branch=OLD_TRIMGALORE_BRANCH, ) - self.mods_install_trimgalore = nf_core.modules.ModuleInstall( + self.mods_install_trimgalore = nf_core.modules.install.ModuleInstall( self.pipeline_dir, prompt=False, force=False, remote_url=GITLAB_URL, branch=OLD_TRIMGALORE_BRANCH, ) - self.mods_install_gitlab = nf_core.modules.ModuleInstall( + self.mods_install_gitlab = nf_core.modules.install.ModuleInstall( self.pipeline_dir, prompt=False, force=False, remote_url=GITLAB_URL, branch=GITLAB_DEFAULT_BRANCH, ) - self.mods_install_gitlab_old = nf_core.modules.ModuleInstall( + self.mods_install_gitlab_old = nf_core.modules.install.ModuleInstall( self.pipeline_dir, prompt=False, force=False, @@ -135,8 +143,8 @@ def setUp(self): ) # Set up remove objects - self.mods_remove = nf_core.modules.ModuleRemove(self.pipeline_dir) - self.mods_remove_gitlab = nf_core.modules.ModuleRemove( + self.mods_remove = nf_core.modules.remove.ModuleRemove(self.pipeline_dir) + self.mods_remove_gitlab = nf_core.modules.remove.ModuleRemove( self.pipeline_dir, remote_url=GITLAB_URL, branch=GITLAB_DEFAULT_BRANCH, @@ -145,15 +153,9 @@ def setUp(self): # Set up the nf-core/modules repo dummy self.nfcore_modules = create_modules_repo_dummy(self.tmp_dir) - def tearDown(self): - """Clean up temporary files and folders""" - - if os.path.exists(self.tmp_dir): - shutil.rmtree(self.tmp_dir) - def test_modulesrepo_class(self): """Initialise a modules repo object""" - modrepo = nf_core.modules.ModulesRepo() + modrepo = nf_core.modules.modules_repo.ModulesRepo() assert modrepo.repo_path == "nf-core" assert modrepo.branch == "master" diff --git a/tests/test_utils.py b/tests/test_utils.py index 860cba5ba6..0d012716a9 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -1,9 +1,6 @@ """Tests covering for utility functions.""" import os -import shutil -import tempfile -import unittest from pathlib import Path from unittest import mock @@ -14,6 +11,7 @@ import nf_core.pipelines.list import nf_core.utils +from .test_pipelines import TestPipelines from .utils import with_temporary_folder TEST_DATA_DIR = Path(__file__).parent / "data" @@ -28,31 +26,9 @@ def test_strip_ansi_codes(): assert stripped == "ls examplefile.zip" -class TestUtils(unittest.TestCase): +class TestUtils(TestPipelines): """Class for utils tests""" - def setUp(self): - """Function that runs at start of tests for common resources - - Use nf_core.pipelines.create() to make a pipeline that we can use for testing - """ - self.tmp_dir = tempfile.mkdtemp() - self.test_pipeline_dir = os.path.join(self.tmp_dir, "nf-core-testpipeline") - self.create_obj = nf_core.pipelines.create.create.PipelineCreate( - "testpipeline", - "This is a test pipeline", - "Test McTestFace", - no_git=True, - outdir=self.test_pipeline_dir, - ) - self.create_obj.init_pipeline() - # Base Pipeline object on this directory - self.pipeline_obj = nf_core.utils.Pipeline(self.test_pipeline_dir) - - def tearDown(self): - if os.path.exists(self.tmp_dir): - shutil.rmtree(self.tmp_dir) - def test_check_if_outdated_1(self): current_version = "1.0" remote_version = "2.0" @@ -110,7 +86,7 @@ def testload_pipeline_config(self): def test_list_files_git(self): """Test listing pipeline files using `git ls`""" files = self.pipeline_obj.list_files() - assert Path(self.test_pipeline_dir, "main.nf") in files + assert Path(self.pipeline_dir, "main.nf") in files @with_temporary_folder def test_list_files_no_git(self, tmpdir): @@ -193,46 +169,39 @@ def test_get_repo_releases_branches_not_exists_slash(self): with pytest.raises(AssertionError): nf_core.utils.get_repo_releases_branches("made-up/pipeline", wfs) - -def test_validate_file_md5(): - # MD5(test) = d8e8fca2dc0f896fd7cb4cb0031ba249 - test_file = TEST_DATA_DIR / "test.txt" - test_file_md5 = "d8e8fca2dc0f896fd7cb4cb0031ba249" - different_md5 = "9e7b964750cf0bb08ee960fce356b6d6" - non_hex_string = "s" - assert nf_core.utils.validate_file_md5(test_file, test_file_md5) - with pytest.raises(IOError): - nf_core.utils.validate_file_md5(test_file, different_md5) - with pytest.raises(ValueError): - nf_core.utils.validate_file_md5(test_file, non_hex_string) - - -def test_nested_setitem(): - d = {"a": {"b": {"c": "value"}}} - nf_core.utils.nested_setitem(d, ["a", "b", "c"], "value new") - assert d["a"]["b"]["c"] == "value new" - assert d == {"a": {"b": {"c": "value new"}}} - - -def test_nested_delitem(): - d = {"a": {"b": {"c": "value"}}} - nf_core.utils.nested_delitem(d, ["a", "b", "c"]) - assert "c" not in d["a"]["b"] - assert d == {"a": {"b": {}}} - - -def test_set_wd(): - with tempfile.TemporaryDirectory() as tmpdirname: - with nf_core.utils.set_wd(tmpdirname): + def test_validate_file_md5(self): + # MD5(test) = d8e8fca2dc0f896fd7cb4cb0031ba249 + test_file = TEST_DATA_DIR / "test.txt" + test_file_md5 = "d8e8fca2dc0f896fd7cb4cb0031ba249" + different_md5 = "9e7b964750cf0bb08ee960fce356b6d6" + non_hex_string = "s" + assert nf_core.utils.validate_file_md5(test_file, test_file_md5) + with pytest.raises(IOError): + nf_core.utils.validate_file_md5(test_file, different_md5) + with pytest.raises(ValueError): + nf_core.utils.validate_file_md5(test_file, non_hex_string) + + def test_nested_setitem(self): + d = {"a": {"b": {"c": "value"}}} + nf_core.utils.nested_setitem(d, ["a", "b", "c"], "value new") + assert d["a"]["b"]["c"] == "value new" + assert d == {"a": {"b": {"c": "value new"}}} + + def test_nested_delitem(self): + d = {"a": {"b": {"c": "value"}}} + nf_core.utils.nested_delitem(d, ["a", "b", "c"]) + assert "c" not in d["a"]["b"] + assert d == {"a": {"b": {}}} + + def test_set_wd(self): + with nf_core.utils.set_wd(self.tmp_dir): context_wd = Path().resolve() - assert context_wd == Path(tmpdirname).resolve() + assert context_wd == Path(self.tmp_dir).resolve() assert context_wd != Path().resolve() - -def test_set_wd_revert_on_raise(): - wd_before_context = Path().resolve() - with tempfile.TemporaryDirectory() as tmpdirname: + def test_set_wd_revert_on_raise(self): + wd_before_context = Path().resolve() with pytest.raises(Exception): - with nf_core.utils.set_wd(tmpdirname): + with nf_core.utils.set_wd(self.tmp_dir): raise Exception - assert wd_before_context == Path().resolve() + assert wd_before_context == Path().resolve() diff --git a/tests/utils.py b/tests/utils.py index 90c4ae0418..1d5a8a115d 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -9,9 +9,12 @@ from typing import Any, Callable, Tuple import responses +import yaml import nf_core.modules import nf_core.pipelines.create.create +from nf_core import __version__ +from nf_core.utils import NFCoreTemplateConfig, NFCoreYamlConfig TEST_DATA_DIR = Path(__file__).parent / "data" OLD_TRIMGALORE_SHA = "9b7a3bdefeaad5d42324aa7dd50f87bea1b04386" @@ -95,7 +98,7 @@ def mock_biocontainers_api_calls(rsps: responses.RequestsMock, module: str, vers rsps.get(biocontainers_api_url, json=biocontainers_mock, status=200) -def create_tmp_pipeline() -> Tuple[Path, Path, str, Path]: +def create_tmp_pipeline(no_git: bool = False) -> Tuple[Path, Path, str, Path]: """Create a new Pipeline for testing""" tmp_dir = Path(tempfile.TemporaryDirectory().name) @@ -103,9 +106,31 @@ def create_tmp_pipeline() -> Tuple[Path, Path, str, Path]: template_dir = root_repo_dir / "nf_core" / "pipeline-template" pipeline_name = "mypipeline" pipeline_dir = tmp_dir / pipeline_name + pipeline_dir.mkdir(parents=True) + + nf_core_yml = NFCoreYamlConfig( + nf_core_version=__version__, + repository_type="modules", + org_path="nf-core", + lint=None, + template=NFCoreTemplateConfig( + name="mypipeline", + author="me", + description="it is mine", + org="nf-core", + version=None, + force=True, + is_nfcore=None, + skip_features=None, + outdir=None, + ), + bump_version=None, + ) + with open(str(Path(pipeline_dir, ".nf-core.yml")), "w") as fh: + yaml.dump(nf_core_yml.model_dump(), fh) nf_core.pipelines.create.create.PipelineCreate( - pipeline_name, "it is mine", "me", no_git=False, outdir=pipeline_dir + pipeline_name, "it is mine", "me", no_git=no_git, outdir=pipeline_dir, force=True ).init_pipeline() # return values to instance variables for later use in test methods From d0a968ceb3ffe85e87303cc0d63838dbbc6a71b9 Mon Sep 17 00:00:00 2001 From: mashehu Date: Thu, 25 Jul 2024 13:35:06 +0200 Subject: [PATCH 36/89] fix tests --- nf_core/modules/lint/__init__.py | 4 +- nf_core/pipelines/lint/files_exist.py | 2 +- nf_core/pipelines/lint/nfcore_yml.py | 3 +- nf_core/pipelines/lint/schema_description.py | 2 +- nf_core/pipelines/lint/template_strings.py | 2 +- nf_core/pipelines/sync.py | 45 ++++++++++--------- .../subworkflows/lint/subworkflow_tests.py | 11 +++-- tests/modules/test_modules_json.py | 2 +- tests/pipelines/test_lint.py | 4 +- 9 files changed, 39 insertions(+), 36 deletions(-) diff --git a/nf_core/modules/lint/__init__.py b/nf_core/modules/lint/__init__.py index 2b10b4df5a..cea75d8f2e 100644 --- a/nf_core/modules/lint/__init__.py +++ b/nf_core/modules/lint/__init__.py @@ -228,7 +228,7 @@ def lint_module( # TODO: consider unifying modules and subworkflows lint_module() function and add it to the ComponentLint class # Only check the main script in case of a local module if local: - self.main_nf(mod, fix_version, self.registry, progress_bar) + self.main_nf(mod, fix_version, registry, progress_bar) self.passed += [LintResult(mod, *m) for m in mod.passed] warned = [LintResult(mod, *m) for m in (mod.warned + mod.failed)] if not self.fail_warned: @@ -245,7 +245,7 @@ def lint_module( for test_name in self.lint_tests: if test_name == "main_nf": - getattr(self, test_name)(mod, fix_version, self.registry, progress_bar) + getattr(self, test_name)(mod, fix_version, registry, progress_bar) else: getattr(self, test_name)(mod) diff --git a/nf_core/pipelines/lint/files_exist.py b/nf_core/pipelines/lint/files_exist.py index edad62aab8..ad0605dcf7 100644 --- a/nf_core/pipelines/lint/files_exist.py +++ b/nf_core/pipelines/lint/files_exist.py @@ -205,7 +205,7 @@ def files_exist(self) -> Dict[str, List[str]]: ] # Remove files that should be ignored according to the linting config - ignore_files = self.lint_config.get("files_exist", []) + ignore_files = self.lint_config.get("files_exist", []) if self.lint_config is not None else [] def pf(file_path: Union[str, Path]) -> Path: return Path(self.wf_path, file_path) diff --git a/nf_core/pipelines/lint/nfcore_yml.py b/nf_core/pipelines/lint/nfcore_yml.py index f23b2f1a84..e0d5fb2005 100644 --- a/nf_core/pipelines/lint/nfcore_yml.py +++ b/nf_core/pipelines/lint/nfcore_yml.py @@ -27,8 +27,7 @@ def nfcore_yml(self) -> Dict[str, List[str]]: ignored: List[str] = [] # Remove field that should be ignored according to the linting config - ignore_configs = self.lint_config.get(".nf-core", []) - + ignore_configs = self.lint_config.get(".nf-core", []) if self.lint_config is not None else [] try: with open(Path(self.wf_path, ".nf-core.yml")) as fh: content = fh.read() diff --git a/nf_core/pipelines/lint/schema_description.py b/nf_core/pipelines/lint/schema_description.py index 82165b6e7d..d617e40949 100644 --- a/nf_core/pipelines/lint/schema_description.py +++ b/nf_core/pipelines/lint/schema_description.py @@ -24,7 +24,7 @@ def schema_description(self): self.schema_obj.load_lint_schema() # Get parameters that should be ignored according to the linting config - ignore_params = self.lint_config.get("schema_description", []) + ignore_params = self.lint_config.get("schema_description", []) if self.lint_config is not None else [] # Get ungrouped params if "properties" in self.schema_obj.schema.keys(): diff --git a/nf_core/pipelines/lint/template_strings.py b/nf_core/pipelines/lint/template_strings.py index 90c47203f6..37a1f64daf 100644 --- a/nf_core/pipelines/lint/template_strings.py +++ b/nf_core/pipelines/lint/template_strings.py @@ -38,7 +38,7 @@ def template_strings(self): failed = [] ignored = [] # Files that should be ignored according to the linting config - ignore_files = self.lint_config.get("template_strings", []) + ignore_files = self.lint_config.get("template_strings", []) if self.lint_config is not None else [] files = self.list_files() # Loop through files, searching for string diff --git a/nf_core/pipelines/sync.py b/nf_core/pipelines/sync.py index a309fa8c3b..b1da99a626 100644 --- a/nf_core/pipelines/sync.py +++ b/nf_core/pipelines/sync.py @@ -10,6 +10,7 @@ import git import questionary import requests +import requests.auth import requests_cache import rich import yaml @@ -86,23 +87,23 @@ def __init__( self.pr_url = "" self.config_yml_path, self.config_yml = nf_core.utils.load_tools_config(self.pipeline_dir) - + assert self.config_yml_path is not None # mypy # Throw deprecation warning if template_yaml_path is set if template_yaml_path is not None: log.warning( f"The `template_yaml_path` argument is deprecated. Saving pipeline creation settings in .nf-core.yml instead. Please remove {template_yaml_path} file." ) - if "template" in self.config_yml: + if getattr(self.config_yml, "template", None) is not None: overwrite_template = questionary.confirm( f"A template section already exists in '{self.config_yml_path}'. Do you want to overwrite?", style=nf_core.utils.nfcore_question_style, default=False, ).unsafe_ask() - if overwrite_template or "template" not in self.config_yml: + if overwrite_template or getattr(self.config_yml, "template", None) is None: with open(template_yaml_path) as f: - self.config_yml["template"] = yaml.safe_load(f) + self.config_yml.template = yaml.safe_load(f) with open(self.config_yml_path, "w") as fh: - yaml.safe_dump(self.config_yml, fh) + yaml.safe_dump(self.config_yml.model_dump(), fh) log.info(f"Saved pipeline creation settings to '{self.config_yml_path}'") raise SystemExit( f"Please commit your changes and delete the {template_yaml_path} file. Then run the sync command again." @@ -259,11 +260,12 @@ def make_template_pipeline(self): # Only show error messages from pipeline creation logging.getLogger("nf_core.pipelines.create").setLevel(logging.ERROR) - + assert self.config_yml_path is not None + assert self.config_yml is not None # Re-write the template yaml info from .nf-core.yml config - if "template" in self.config_yml: + if getattr(self.config_yml, "template", None) is not None: with open(self.config_yml_path, "w") as config_path: - yaml.safe_dump(self.config_yml, config_path) + yaml.safe_dump(self.config_yml.model_dump(), config_path) try: nf_core.pipelines.create.create.PipelineCreate( @@ -411,21 +413,24 @@ def close_open_template_merge_prs(self): return False for pr in list_prs_json: - log.debug(f"Looking at PR from '{pr['head']['ref']}': {pr['html_url']}") - # Ignore closed PRs - if pr["state"] != "open": - log.debug(f"Ignoring PR as state not open ({pr['state']}): {pr['html_url']}") - continue + if isinstance(pr, int): + log.debug(f"Incorrect PR format: {pr}") + else: + log.debug(f"Looking at PR from '{pr['head']['ref']}': {pr['html_url']}") + # Ignore closed PRs + if pr["state"] != "open": + log.debug(f"Ignoring PR as state not open ({pr['state']}): {pr['html_url']}") + continue - # Don't close the new PR that we just opened - if pr["head"]["ref"] == self.merge_branch: - continue + # Don't close the new PR that we just opened + if pr["head"]["ref"] == self.merge_branch: + continue - # PR is from an automated branch and goes to our target base - if pr["head"]["ref"].startswith("nf-core-template-merge-") and pr["base"]["ref"] == self.from_branch: - self.close_open_pr(pr) + # PR is from an automated branch and goes to our target base + if pr["head"]["ref"].startswith("nf-core-template-merge-") and pr["base"]["ref"] == self.from_branch: + self.close_open_pr(pr) - def close_open_pr(self, pr): + def close_open_pr(self, pr) -> bool: """Given a PR API response, add a comment and close.""" log.debug(f"Attempting to close PR: '{pr['html_url']}'") diff --git a/nf_core/subworkflows/lint/subworkflow_tests.py b/nf_core/subworkflows/lint/subworkflow_tests.py index af39334744..7ca825f04f 100644 --- a/nf_core/subworkflows/lint/subworkflow_tests.py +++ b/nf_core/subworkflows/lint/subworkflow_tests.py @@ -27,14 +27,14 @@ def subworkflow_tests(_, subworkflow: NFCoreComponent): repo_dir = subworkflow.component_dir.parts[ : subworkflow.component_dir.parts.index(subworkflow.component_name.split("/")[0]) ][-1] - test_dir = Path( + pytest_dir = Path( subworkflow.base_dir, "tests", "subworkflows", repo_dir, subworkflow.component_name, ) - pytest_main_nf = Path(test_dir, "main.nf") + pytest_main_nf = Path(pytest_dir, "main.nf") is_pytest = pytest_main_nf.is_file() log.debug(f"{pytest_main_nf} is pytest: {is_pytest}") if subworkflow.nftest_testdir.is_dir(): @@ -265,8 +265,7 @@ def subworkflow_tests(_, subworkflow: NFCoreComponent): # Check that the old test directory does not exist if not is_pytest: - old_test_dir = Path(subworkflow.base_dir, "tests", "subworkflows", subworkflow.component_name) - if old_test_dir.is_dir(): - subworkflow.failed.append(("test_old_test_dir", "old test directory exists", old_test_dir)) + if pytest_dir.is_dir(): + subworkflow.failed.append(("test_old_test_dir", "old test directory exists", pytest_dir)) else: - subworkflow.passed.append(("test_old_test_dir", "old test directory does not exist", old_test_dir)) + subworkflow.passed.append(("test_old_test_dir", "old test directory does not exist", pytest_dir)) diff --git a/tests/modules/test_modules_json.py b/tests/modules/test_modules_json.py index 2ab058fa70..845f9c3e34 100644 --- a/tests/modules/test_modules_json.py +++ b/tests/modules/test_modules_json.py @@ -220,7 +220,7 @@ def test_mod_json_with_empty_modules_value(self): mod_json_obj.create() # Create modules.json explicitly to get correct module sha mod_json_orig = mod_json_obj.get_modules_json() mod_json = copy.deepcopy(mod_json_orig) - mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"] = "" + mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"] = {} # save the altered module.json and load it again to check if it will fix itself mod_json_obj.modules_json = mod_json mod_json_obj.dump() diff --git a/tests/pipelines/test_lint.py b/tests/pipelines/test_lint.py index ab8bcf6b24..9ca29d249f 100644 --- a/tests/pipelines/test_lint.py +++ b/tests/pipelines/test_lint.py @@ -47,7 +47,7 @@ def test_init_pipeline_lint(self): def test_load_lint_config_not_found(self): """Try to load a linting config file that doesn't exist""" - self.lint_obj._load_lint_config() + assert self.lint_obj._load_lint_config() assert self.lint_obj.lint_config == {} def test_load_lint_config_ignore_all_tests(self): @@ -58,7 +58,7 @@ def test_load_lint_config_ignore_all_tests(self): lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) # Make a config file listing all test names - config_dict = {"lint": {test_name: False for test_name in lint_obj.lint_tests}} + config_dict = {"repository_type": "pipeline", "lint": {test_name: False for test_name in lint_obj.lint_tests}} with open(Path(new_pipeline, ".nf-core.yml"), "w") as fh: yaml.dump(config_dict, fh) From 33f461aa5a3a888dd0914796c99b3201b2a06f3e Mon Sep 17 00:00:00 2001 From: mashehu Date: Thu, 25 Jul 2024 13:35:26 +0200 Subject: [PATCH 37/89] migrate to pathlib --- nf_core/modules/modules_json.py | 2 +- nf_core/pipelines/create/create.py | 2 +- tests/pipelines/test_create.py | 11 +++++++---- 3 files changed, 9 insertions(+), 6 deletions(-) diff --git a/nf_core/modules/modules_json.py b/nf_core/modules/modules_json.py index 02ce6fa5a2..01785c6d65 100644 --- a/nf_core/modules/modules_json.py +++ b/nf_core/modules/modules_json.py @@ -1192,7 +1192,7 @@ def components_with_repos(): self.modules_dir, modules_repo.repo_path, ) - for dir_name, _, _ in os.walk(repo_url_path): + for dir_name, _, _ in Path.walk(repo_url_path): if component_type == "modules": if len(Path(directory).parts) > 1: # The module name is TOOL/SUBTOOL paths_in_directory.append(str(Path(*Path(dir_name).parts[-2:]))) diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index deeb5554af..f032db7f69 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -516,7 +516,7 @@ def fix_linting(self): with open(self.outdir / config_fn, "w") as fh: yaml.dump(nf_core_yml.model_dump(), fh, default_flow_style=False, sort_keys=False) - run_prettier_on_file(os.path.join(self.outdir, config_fn)) + run_prettier_on_file(Path(self.outdir, config_fn)) def make_pipeline_logo(self): """Fetch a logo for the new pipeline from the nf-core website""" diff --git a/tests/pipelines/test_create.py b/tests/pipelines/test_create.py index bb27445e70..35cff5cab6 100644 --- a/tests/pipelines/test_create.py +++ b/tests/pipelines/test_create.py @@ -2,6 +2,7 @@ import os import unittest +from pathlib import Path import git import yaml @@ -22,7 +23,8 @@ def setUp(self): self.pipeline_version = "1.0.0" self.default_branch = "default" - def test_pipeline_creation(self): + @with_temporary_folder + def test_pipeline_creation(self, tmp_path): pipeline = nf_core.pipelines.create.create.PipelineCreate( name=self.pipeline_name, description=self.pipeline_description, @@ -30,6 +32,7 @@ def test_pipeline_creation(self): version=self.pipeline_version, no_git=False, force=True, + outdir=tmp_path, default_branch=self.default_branch, ) @@ -51,10 +54,10 @@ def test_pipeline_creation_initiation(self, tmp_path): default_branch=self.default_branch, ) pipeline.init_pipeline() - assert os.path.isdir(os.path.join(pipeline.outdir, ".git")) + assert Path(pipeline.outdir, ".git").is_dir() assert f" {self.default_branch}\n" in git.Repo.init(pipeline.outdir).git.branch() - assert not os.path.exists(os.path.join(pipeline.outdir, "pipeline_template.yml")) - with open(os.path.join(pipeline.outdir, ".nf-core.yml")) as fh: + assert not Path(pipeline.outdir, "pipeline_template.yml").exists() + with open(Path(pipeline.outdir, ".nf-core.yml")) as fh: assert "template" in fh.read() @with_temporary_folder From 1473611b9025ec4221e91fdbe0709723c59962b8 Mon Sep 17 00:00:00 2001 From: mashehu Date: Thu, 25 Jul 2024 13:35:44 +0200 Subject: [PATCH 38/89] change import strategy for lint tests --- nf_core/subworkflows/lint/__init__.py | 21 ++++++++++++++------- 1 file changed, 14 insertions(+), 7 deletions(-) diff --git a/nf_core/subworkflows/lint/__init__.py b/nf_core/subworkflows/lint/__init__.py index a073710884..b366ddfb51 100644 --- a/nf_core/subworkflows/lint/__init__.py +++ b/nf_core/subworkflows/lint/__init__.py @@ -19,6 +19,14 @@ log = logging.getLogger(__name__) +# Import lint functions +from .main_nf import main_nf # type: ignore[misc] +from .meta_yml import meta_yml # type: ignore[misc] +from .subworkflow_changes import subworkflow_changes # type: ignore[misc] +from .subworkflow_tests import subworkflow_tests # type: ignore[misc] +from .subworkflow_todos import subworkflow_todos # type: ignore[misc] +from .subworkflow_version import subworkflow_version # type: ignore[misc] + class SubworkflowLint(ComponentLint): """ @@ -26,13 +34,12 @@ class SubworkflowLint(ComponentLint): repository or in any nf-core pipeline directory """ - # Import lint functions - from .main_nf import main_nf # type: ignore[misc] - from .meta_yml import meta_yml # type: ignore[misc] - from .subworkflow_changes import subworkflow_changes # type: ignore[misc] - from .subworkflow_tests import subworkflow_tests # type: ignore[misc] - from .subworkflow_todos import subworkflow_todos # type: ignore[misc] - from .subworkflow_version import subworkflow_version # type: ignore[misc] + main_nf = main_nf + meta_yml = meta_yml + subworkflow_changes = subworkflow_changes + subworkflow_tests = subworkflow_tests + subworkflow_todos = subworkflow_todos + subworkflow_version = subworkflow_version def __init__( self, From 9dc61d61059bb2cb2b2bed4c7001251b01e863df Mon Sep 17 00:00:00 2001 From: mashehu Date: Thu, 25 Jul 2024 13:37:36 +0200 Subject: [PATCH 39/89] migrate to pathlib --- nf_core/components/components_command.py | 40 +++++++++++-------- nf_core/components/list.py | 3 +- nf_core/modules/modules_repo.py | 10 +++-- nf_core/pipelines/lint/actions_awsfulltest.py | 9 +++-- nf_core/pipelines/lint/actions_awstest.py | 6 +-- nf_core/pipelines/lint/actions_ci.py | 6 +-- .../lint/actions_schema_validation.py | 7 ++-- nf_core/pipelines/lint/files_unchanged.py | 8 ++-- nf_core/pipelines/lint/merge_markers.py | 32 +++++++-------- nf_core/pipelines/lint/modules_structure.py | 3 +- nf_core/pipelines/lint/nextflow_config.py | 19 ++++----- nf_core/pipelines/lint/pipeline_todos.py | 20 +++++----- nf_core/pipelines/lint/readme.py | 6 +-- 13 files changed, 90 insertions(+), 79 deletions(-) diff --git a/nf_core/components/components_command.py b/nf_core/components/components_command.py index a6b46d1b61..4440dc32a4 100644 --- a/nf_core/components/components_command.py +++ b/nf_core/components/components_command.py @@ -1,6 +1,5 @@ import logging import mmap -import os import shutil from pathlib import Path from typing import Dict, List, Optional, Union @@ -22,7 +21,9 @@ class ComponentCommand: def __init__( self, component_type: str, - directory: Union[str, Path], + directory: Union[ + str, Path + ], # TODO: This is actually None sometimes (e.g. in test_modules_list_remote), need to rewrite the logic here to handle these cases elegantly, for example setting a default path remote_url: Optional[str] = None, branch: Optional[str] = None, no_pull: bool = False, @@ -33,7 +34,7 @@ def __init__( Initialise the ComponentClass object """ self.component_type = component_type - self.directory = Path(directory) + self.directory = directory self.modules_repo = ModulesRepo(remote_url, branch, no_pull, hide_progress) self.hide_progress = hide_progress self.no_prompts = no_prompts @@ -47,18 +48,21 @@ def _configure_repo_and_paths(self, nf_dir_req: bool = True) -> None: Args: nf_dir_req (bool, optional): Whether this command requires being run in the nf-core modules repo or a nf-core pipeline repository. Defaults to True. """ - try: if self.directory: - self.directory, self.repo_type, self.org = get_repo_info(self.directory, use_prompt=not self.no_prompts) + self.directory, self.repo_type, self.org = get_repo_info( + Path(self.directory), use_prompt=not self.no_prompts + ) else: self.repo_type = None self.org = "" + except UserWarning: if nf_dir_req: raise self.repo_type = None self.org = "" + self.default_modules_path = Path("modules", self.org) self.default_tests_path = Path("tests", "modules", self.org) self.default_subworkflows_path = Path("subworkflows", self.org) @@ -82,8 +86,8 @@ def get_components_clone_modules(self) -> List[str]: elif self.component_type == "subworkflows": component_base_path = Path(self.directory, self.default_subworkflows_path) return [ - str(Path(dir).relative_to(component_base_path)) - for dir, _, files in os.walk(component_base_path) + str(Path(directory).relative_to(component_base_path)) + for directory, _, files in Path.walk(component_base_path) if "main.nf" in files ] @@ -91,12 +95,12 @@ def has_valid_directory(self) -> bool: """Check that we were given a pipeline or clone of nf-core/modules""" if self.repo_type == "modules": return True - if self.directory is None or not os.path.exists(self.directory): + if self.directory is None or not Path(self.directory).exists(): log.error(f"Could not find directory: {self.directory}") return False - main_nf = os.path.join(self.directory, "main.nf") - nf_config = os.path.join(self.directory, "nextflow.config") - if not os.path.exists(main_nf) and not os.path.exists(nf_config): + main_nf = Path(self.directory, "main.nf") + nf_config = Path(self.directory, "nextflow.config") + if not main_nf.exists() and not nf_config.exists(): if Path(self.directory).resolve().parts[-1].startswith("nf-core"): raise UserWarning(f"Could not find a 'main.nf' or 'nextflow.config' file in '{self.directory}'") log.warning(f"Could not find a 'main.nf' or 'nextflow.config' file in '{self.directory}'") @@ -104,8 +108,8 @@ def has_valid_directory(self) -> bool: def has_modules_file(self) -> None: """Checks whether a module.json file has been created and creates one if it is missing""" - modules_json_path = os.path.join(self.directory, "modules.json") - if not os.path.exists(modules_json_path): + modules_json_path = Path(self.directory, "modules.json") + if not modules_json_path.exists(): log.info("Creating missing 'module.json' file.") ModulesJson(self.directory).create() @@ -122,10 +126,10 @@ def clear_component_dir(self, component_name: str, component_dir: Union[str, Pat try: shutil.rmtree(component_dir) # remove all empty directories - for dir_path, dir_names, filenames in os.walk(self.directory, topdown=False): + for dir_path, dir_names, filenames in Path.walk(Path(self.directory), top_down=False): if not dir_names and not filenames: try: - os.rmdir(dir_path) + dir_path.rmdir() except OSError: pass else: @@ -152,7 +156,9 @@ def components_from_repo(self, install_dir: str) -> List[str]: raise LookupError(f"Nothing installed from {install_dir} in pipeline") return [ - str(Path(dir_path).relative_to(repo_dir)) for dir_path, _, files in os.walk(repo_dir) if "main.nf" in files + str(Path(dir_path).relative_to(repo_dir)) + for dir_path, _, files in Path.walk(repo_dir) + if "main.nf" in files ] def install_component_files( @@ -196,7 +202,7 @@ def check_modules_structure(self) -> None: """ if self.repo_type == "pipeline": wrong_location_modules: List[Path] = [] - for directory, _, files in os.walk(Path(self.directory, "modules")): + for directory, _, files in Path.walk(Path(self.directory, "modules")): if "main.nf" in files: module_path = Path(directory).relative_to(Path(self.directory, "modules")) parts = module_path.parts diff --git a/nf_core/components/list.py b/nf_core/components/list.py index 67468b4a5a..0a6b654467 100644 --- a/nf_core/components/list.py +++ b/nf_core/components/list.py @@ -1,5 +1,6 @@ import json import logging +from pathlib import Path from typing import Dict, List, Optional, Union, cast import rich.table @@ -15,7 +16,7 @@ class ComponentList(ComponentCommand): def __init__( self, component_type: str, - pipeline_dir: str, + pipeline_dir: Union[str, Path], remote: bool = True, remote_url: Optional[str] = None, branch: Optional[str] = None, diff --git a/nf_core/modules/modules_repo.py b/nf_core/modules/modules_repo.py index 7d576d4aef..bc92072e3b 100644 --- a/nf_core/modules/modules_repo.py +++ b/nf_core/modules/modules_repo.py @@ -63,16 +63,18 @@ def __init__( self.setup_local_repo(remote_url, branch, hide_progress) config_fn, repo_config = load_tools_config(self.local_repo_dir) - assert config_fn is not None and repo_config is not None # mypy + if config_fn is None or repo_config is None: + raise UserWarning(f"Could not find a configuration file in {self.local_repo_dir}") try: - self.repo_path = repo_config["org_path"] + self.repo_path = repo_config.org_path except KeyError: raise UserWarning(f"'org_path' key not present in {config_fn.name}") # Verify that the repo seems to be correctly configured if self.repo_path != NF_CORE_MODULES_NAME or self.branch: self.verify_branch() - + if self.repo_path is None: + raise UserWarning(f"Could not find the org_path in the configuration file: {config_fn.name}") # Convenience variable self.modules_dir = Path(self.local_repo_dir, "modules", self.repo_path) self.subworkflows_dir = Path(self.local_repo_dir, "subworkflows", self.repo_path) @@ -96,7 +98,7 @@ def setup_local_repo(self, remote, branch, hide_progress=True, in_cache=False): branch (str): name of branch to use Sets self.repo """ - self.local_repo_dir = os.path.join(NFCORE_DIR if not in_cache else NFCORE_CACHE_DIR, self.fullname) + self.local_repo_dir = Path(NFCORE_DIR if not in_cache else NFCORE_CACHE_DIR, self.fullname) try: if not os.path.exists(self.local_repo_dir): try: diff --git a/nf_core/pipelines/lint/actions_awsfulltest.py b/nf_core/pipelines/lint/actions_awsfulltest.py index 4cf3bece28..7ea167f6c9 100644 --- a/nf_core/pipelines/lint/actions_awsfulltest.py +++ b/nf_core/pipelines/lint/actions_awsfulltest.py @@ -1,9 +1,10 @@ -import os +from pathlib import Path +from typing import Dict, List import yaml -def actions_awsfulltest(self): +def actions_awsfulltest(self) -> Dict[str, List[str]]: """Checks the GitHub Actions awsfulltest is valid. In addition to small test datasets run on GitHub Actions, we provide the possibility of testing the pipeline on full size datasets on AWS. @@ -29,8 +30,8 @@ def actions_awsfulltest(self): warned = [] failed = [] - fn = os.path.join(self.wf_path, ".github", "workflows", "awsfulltest.yml") - if os.path.isfile(fn): + fn = Path(self.wf_path, ".github", "workflows", "awsfulltest.yml") + if fn.is_file(): try: with open(fn) as fh: wf = yaml.safe_load(fh) diff --git a/nf_core/pipelines/lint/actions_awstest.py b/nf_core/pipelines/lint/actions_awstest.py index 7c55998944..7e4c0fc497 100644 --- a/nf_core/pipelines/lint/actions_awstest.py +++ b/nf_core/pipelines/lint/actions_awstest.py @@ -1,4 +1,4 @@ -import os +from pathlib import Path import yaml @@ -22,8 +22,8 @@ def actions_awstest(self): * Must be turned on for ``workflow_dispatch``. """ - fn = os.path.join(self.wf_path, ".github", "workflows", "awstest.yml") - if not os.path.isfile(fn): + fn = Path(self.wf_path, ".github", "workflows", "awstest.yml") + if not fn.is_file(): return {"ignored": [f"'awstest.yml' workflow not found: `{fn}`"]} try: diff --git a/nf_core/pipelines/lint/actions_ci.py b/nf_core/pipelines/lint/actions_ci.py index a3e7d54b66..74f433ef80 100644 --- a/nf_core/pipelines/lint/actions_ci.py +++ b/nf_core/pipelines/lint/actions_ci.py @@ -1,4 +1,4 @@ -import os +from pathlib import Path import yaml @@ -40,10 +40,10 @@ def actions_ci(self): """ passed = [] failed = [] - fn = os.path.join(self.wf_path, ".github", "workflows", "ci.yml") + fn = Path(self.wf_path, ".github", "workflows", "ci.yml") # Return an ignored status if we can't find the file - if not os.path.isfile(fn): + if not fn.is_file(): return {"ignored": ["'.github/workflows/ci.yml' not found"]} try: diff --git a/nf_core/pipelines/lint/actions_schema_validation.py b/nf_core/pipelines/lint/actions_schema_validation.py index b4be42b54c..a057d80589 100644 --- a/nf_core/pipelines/lint/actions_schema_validation.py +++ b/nf_core/pipelines/lint/actions_schema_validation.py @@ -1,6 +1,5 @@ -import glob import logging -import os +from pathlib import Path from typing import Any, Dict, List import jsonschema @@ -26,7 +25,7 @@ def actions_schema_validation(self) -> Dict[str, List[str]]: logging.getLogger("nf_core.pipelines.schema").setLevel(logging.ERROR) # Get all workflow files - action_workflows = glob.glob(os.path.join(self.wf_path, ".github/workflows/*.y*ml")) + action_workflows = list(Path(self.wf_path).glob(".github/workflows/*.y*ml")) # Load the GitHub workflow schema r = requests.get("https://json.schemastore.org/github-workflow", allow_redirects=True) @@ -40,7 +39,7 @@ def actions_schema_validation(self) -> Dict[str, List[str]]: # Validate all workflows against the schema for wf_path in action_workflows: - wf = os.path.basename(wf_path) + wf = wf_path.name # load workflow try: diff --git a/nf_core/pipelines/lint/files_unchanged.py b/nf_core/pipelines/lint/files_unchanged.py index bafd8aec78..300b3674b2 100644 --- a/nf_core/pipelines/lint/files_unchanged.py +++ b/nf_core/pipelines/lint/files_unchanged.py @@ -112,7 +112,8 @@ def files_unchanged(self) -> Dict[str, Union[List[str], bool]]: logging.getLogger("nf_core.pipelines.create").setLevel(logging.ERROR) # Generate a new pipeline with nf-core create that we can compare to - tmp_dir = tempfile.mkdtemp() + tmp_dir = Path(tempfile.TemporaryDirectory().name) + tmp_dir.mkdir(parents=True) # Create a template.yaml file for the pipeline creation template_yaml = { @@ -123,10 +124,11 @@ def files_unchanged(self) -> Dict[str, Union[List[str], bool]]: } template_yaml_path = Path(tmp_dir, "template.yaml") + with open(template_yaml_path, "w") as fh: yaml.dump(template_yaml, fh, default_flow_style=False) - test_pipeline_dir = os.path.join(tmp_dir, f"{prefix}-{short_name}") + test_pipeline_dir = Path(tmp_dir, f"{prefix}-{short_name}") create_obj = nf_core.pipelines.create.create.PipelineCreate( None, None, None, no_git=True, outdir=test_pipeline_dir, template_config=template_yaml_path ) @@ -141,7 +143,7 @@ def _tf(file_path: Union[str, Path]) -> Path: """Helper function - get file path for template file""" return Path(test_pipeline_dir, file_path) - ignore_files = self.lint_config.get("files_unchanged", []) + ignore_files = self.lint_config.get("files_unchanged", []) if self.lint_config is not None else [] # Files that must be completely unchanged from template for files in files_exact: diff --git a/nf_core/pipelines/lint/merge_markers.py b/nf_core/pipelines/lint/merge_markers.py index d57b63fd19..208c9f4bcc 100644 --- a/nf_core/pipelines/lint/merge_markers.py +++ b/nf_core/pipelines/lint/merge_markers.py @@ -1,6 +1,6 @@ import fnmatch import logging -import os +from pathlib import Path import nf_core.utils @@ -35,36 +35,36 @@ def merge_markers(self): failed = [] ignored = [] - ignored_config = self.lint_config.get("merge_markers", []) + ignored_config = self.lint_config.get("merge_markers", []) if self.lint_config is not None else [] ignore = [".git"] - if os.path.isfile(os.path.join(self.wf_path, ".gitignore")): - with open(os.path.join(self.wf_path, ".gitignore"), encoding="latin1") as fh: + if Path(self.wf_path, ".gitignore").is_file(): + with open(Path(self.wf_path, ".gitignore"), encoding="latin1") as fh: for line in fh: - ignore.append(os.path.basename(line.strip().rstrip("/"))) - for root, dirs, files in os.walk(self.wf_path, topdown=True): + ignore.append(Path(line.strip().rstrip("/")).name) + for root, dirs, files in Path.walk(self.wf_path, top_down=True): # Ignore files for i_base in ignore: - i = os.path.join(root, i_base) - dirs[:] = [d for d in dirs if not fnmatch.fnmatch(os.path.join(root, d), i)] - files[:] = [f for f in files if not fnmatch.fnmatch(os.path.join(root, f), i)] + i = str(Path(root, i_base)) + dirs[:] = [d for d in dirs if not fnmatch.fnmatch(str(Path(root, d)), i)] + files[:] = [f for f in files if not fnmatch.fnmatch(str(Path(root, f)), i)] for fname in files: # File ignored in config - if os.path.relpath(os.path.join(root, fname), self.wf_path) in ignored_config: - ignored.append(f"Ignoring file `{os.path.join(root, fname)}`") + if str(Path(root, fname).relative_to(self.wf_path)) in ignored_config: + ignored.append(f"Ignoring file `{Path(root, fname)}`") continue # Skip binary files - if nf_core.utils.is_file_binary(os.path.join(root, fname)): + if nf_core.utils.is_file_binary(Path(root, fname)): continue try: - with open(os.path.join(root, fname), encoding="latin1") as fh: + with open(Path(root, fname), encoding="latin1") as fh: for line in fh: if ">>>>>>>" in line: - failed.append(f"Merge marker '>>>>>>>' in `{os.path.join(root, fname)}`: {line[:30]}") + failed.append(f"Merge marker '>>>>>>>' in `{Path(root, fname)}`: {line[:30]}") if "<<<<<<<" in line: - failed.append(f"Merge marker '<<<<<<<' in `{os.path.join(root, fname)}`: {line[:30]}") + failed.append(f"Merge marker '<<<<<<<' in `{Path(root, fname)}`: {line[:30]}") except FileNotFoundError: - log.debug(f"Could not open file {os.path.join(root, fname)} in merge_markers lint test") + log.debug(f"Could not open file {Path(root, fname)} in merge_markers lint test") if len(failed) == 0: passed.append("No merge markers found in pipeline files") return {"passed": passed, "failed": failed, "ignored": ignored} diff --git a/nf_core/pipelines/lint/modules_structure.py b/nf_core/pipelines/lint/modules_structure.py index 9d9b4c9fc0..fd29942ed6 100644 --- a/nf_core/pipelines/lint/modules_structure.py +++ b/nf_core/pipelines/lint/modules_structure.py @@ -1,5 +1,4 @@ import logging -import os from pathlib import Path log = logging.getLogger(__name__) @@ -20,7 +19,7 @@ def modules_structure(self): modules/nf-core/modules/TOOL/SUBTOOL """ wrong_location_modules = [] - for directory, _, files in os.walk(Path(self.wf_path, "modules")): + for directory, _, files in Path.walk(Path(self.wf_path, "modules")): if "main.nf" in files: module_path = Path(directory).relative_to(Path(self.wf_path, "modules")) parts = module_path.parts diff --git a/nf_core/pipelines/lint/nextflow_config.py b/nf_core/pipelines/lint/nextflow_config.py index f62100a70a..96323af94d 100644 --- a/nf_core/pipelines/lint/nextflow_config.py +++ b/nf_core/pipelines/lint/nextflow_config.py @@ -1,14 +1,14 @@ import logging -import os import re from pathlib import Path +from typing import Dict, List, Optional, Union from nf_core.pipelines.schema import PipelineSchema log = logging.getLogger(__name__) -def nextflow_config(self): +def nextflow_config(self) -> Dict[str, List[str]]: """Checks the pipeline configuration for required variables. All nf-core pipelines are required to be configured with a minimal set of variable @@ -173,7 +173,7 @@ def nextflow_config(self): ] # Remove field that should be ignored according to the linting config - ignore_configs = self.lint_config.get("nextflow_config", []) + ignore_configs = self.lint_config.get("nextflow_config", []) if self.lint_config is not None else [] for cfs in config_fail: for cf in cfs: @@ -205,12 +205,13 @@ def nextflow_config(self): failed.append(f"Config variable (incorrectly) found: {self._wrap_quotes(cf)}") # Check and warn if the process configuration is done with deprecated syntax + process_with_deprecated_syntax = list( set( [ - re.search(r"^(process\.\$.*?)\.+.*$", ck).group(1) + match.group(1) for ck in self.nf_config.keys() - if re.match(r"^(process\.\$.*?)\.+.*$", ck) + if (match := re.match(r"^(process\.\$.*?)\.+.*$", ck)) is not None ] ) ) @@ -313,7 +314,7 @@ def nextflow_config(self): r'System.err.println("WARNING: Could not load nf-core/config profiles: ${params.custom_config_base}/nfcore_custom.config")', r"}", ] - path = os.path.join(self.wf_path, "nextflow.config") + path = Path(self.wf_path, "nextflow.config") i = 0 with open(path) as f: for line in f: @@ -335,7 +336,7 @@ def nextflow_config(self): ) # Check for the availability of the "test" configuration profile by parsing nextflow.config - with open(os.path.join(self.wf_path, "nextflow.config")) as f: + with open(Path(self.wf_path, "nextflow.config")) as f: content = f.read() # Remove comments @@ -379,8 +380,8 @@ def nextflow_config(self): if param in ignore_defaults: ignored.append(f"Config default ignored: {param}") elif param in self.nf_config.keys(): - config_default = None - schema_default = None + config_default: Optional[Union[str, float, int]] = None + schema_default: Optional[Union[str, float, int]] = None if schema.schema_types[param_name] == "boolean": schema_default = str(schema.schema_defaults[param_name]).lower() config_default = str(self.nf_config[param]).lower() diff --git a/nf_core/pipelines/lint/pipeline_todos.py b/nf_core/pipelines/lint/pipeline_todos.py index ba6ec79150..530d853764 100644 --- a/nf_core/pipelines/lint/pipeline_todos.py +++ b/nf_core/pipelines/lint/pipeline_todos.py @@ -1,6 +1,6 @@ import fnmatch import logging -import os +from pathlib import Path log = logging.getLogger(__name__) @@ -39,19 +39,19 @@ def pipeline_todos(self, root_dir=None): root_dir = self.wf_path ignore = [".git"] - if os.path.isfile(os.path.join(root_dir, ".gitignore")): - with open(os.path.join(root_dir, ".gitignore"), encoding="latin1") as fh: + if Path(root_dir, ".gitignore").is_file(): + with open(Path(root_dir, ".gitignore"), encoding="latin1") as fh: for line in fh: - ignore.append(os.path.basename(line.strip().rstrip("/"))) - for root, dirs, files in os.walk(root_dir, topdown=True): + ignore.append(Path(line.strip().rstrip("/")).name) + for root, dirs, files in Path.walk(root_dir, top_down=True): # Ignore files for i_base in ignore: - i = os.path.join(root, i_base) - dirs[:] = [d for d in dirs if not fnmatch.fnmatch(os.path.join(root, d), i)] - files[:] = [f for f in files if not fnmatch.fnmatch(os.path.join(root, f), i)] + i = str(Path(root, i_base)) + dirs[:] = [d for d in dirs if not fnmatch.fnmatch(str(Path(root, d)), i)] + files[:] = [f for f in files if not fnmatch.fnmatch(str(Path(root, f)), i)] for fname in files: try: - with open(os.path.join(root, fname), encoding="latin1") as fh: + with open(Path(root, fname), encoding="latin1") as fh: for line in fh: if "TODO nf-core" in line: line = ( @@ -63,7 +63,7 @@ def pipeline_todos(self, root_dir=None): .strip() ) warned.append(f"TODO string in `{fname}`: _{line}_") - file_paths.append(os.path.join(root, fname)) + file_paths.append(Path(root, fname)) except FileNotFoundError: log.debug(f"Could not open file {fname} in pipeline_todos lint test") diff --git a/nf_core/pipelines/lint/readme.py b/nf_core/pipelines/lint/readme.py index cade9ca3ea..4c16243690 100644 --- a/nf_core/pipelines/lint/readme.py +++ b/nf_core/pipelines/lint/readme.py @@ -1,5 +1,5 @@ -import os import re +from pathlib import Path def readme(self): @@ -29,9 +29,9 @@ def readme(self): failed = [] # Remove field that should be ignored according to the linting config - ignore_configs = self.lint_config.get("readme", []) + ignore_configs = self.lint_config.get("readme", []) if self.lint_config is not None else [] - with open(os.path.join(self.wf_path, "README.md")) as fh: + with open(Path(self.wf_path, "README.md")) as fh: content = fh.read() if "nextflow_badge" not in ignore_configs: From 0a3cb5ffaea2026f55e6291f1f4f1ea73dc3cc4a Mon Sep 17 00:00:00 2001 From: mashehu Date: Thu, 25 Jul 2024 14:09:35 +0200 Subject: [PATCH 40/89] fix mypy linting --- nf_core/modules/bump_versions.py | 2 +- nf_core/pipelines/create/create.py | 13 +++++++------ tests/pipelines/test_sync.py | 2 +- 3 files changed, 9 insertions(+), 8 deletions(-) diff --git a/nf_core/modules/bump_versions.py b/nf_core/modules/bump_versions.py index 2d8854e3c0..6546cccc9a 100644 --- a/nf_core/modules/bump_versions.py +++ b/nf_core/modules/bump_versions.py @@ -76,7 +76,7 @@ def bump_versions( ) # Get list of all modules - _, nfcore_modules = nf_core.modules.modules_utils.get_installed_modules(self.directory) + _, nfcore_modules = nf_core.modules.modules_utils.get_installed_modules(Path(self.directory)) # Load the .nf-core.yml config _, self.tools_config = nf_core.utils.load_tools_config(self.directory) diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index f032db7f69..be07c0c290 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -8,7 +8,7 @@ import re import shutil from pathlib import Path -from typing import Dict, List, Optional, Union +from typing import Dict, List, Optional, Union, cast import git import git.config @@ -21,6 +21,7 @@ from nf_core.pipelines.create.utils import CreateConfig from nf_core.pipelines.create_logo import create_logo from nf_core.pipelines.lint_utils import run_prettier_on_file +from nf_core.utils import LintConfigType log = logging.getLogger(__name__) @@ -57,7 +58,7 @@ def __init__( from_config_file: bool = False, default_branch: Optional[str] = None, is_interactive: bool = False, - ): + ) -> None: if isinstance(template_config, CreateConfig): self.config = template_config elif from_config_file: @@ -418,13 +419,13 @@ def remove_nf_core_in_bug_report_template(self): run_prettier_on_file(bug_report_path) - def fix_linting(self): + def fix_linting(self) -> None: """ Updates the .nf-core.yml with linting configurations for a customized pipeline. """ # Create a lint config - short_name = self.jinja_params["short_name"] + short_name: str = self.jinja_params["short_name"] lint_config: Dict[str, List[str]] = { "files_exist": [ "CODE_OF_CONDUCT.md", @@ -512,7 +513,7 @@ def fix_linting(self): # Add the lint content to the preexisting nf-core config config_fn, nf_core_yml = nf_core.utils.load_tools_config(self.outdir) if config_fn is not None and nf_core_yml is not None: - nf_core_yml.lint = lint_config + nf_core_yml.lint = cast(LintConfigType, lint_config) with open(self.outdir / config_fn, "w") as fh: yaml.dump(nf_core_yml.model_dump(), fh, default_flow_style=False, sort_keys=False) @@ -534,7 +535,7 @@ def make_pipeline_logo(self): force=bool(self.force), ) - def git_init_pipeline(self): + def git_init_pipeline(self) -> None: """Initialises the new pipeline as a Git repository and submits first commit. Raises: diff --git a/tests/pipelines/test_sync.py b/tests/pipelines/test_sync.py index b6955e6714..ffbe75510b 100644 --- a/tests/pipelines/test_sync.py +++ b/tests/pipelines/test_sync.py @@ -331,7 +331,7 @@ def test_close_open_template_merge_prs(self, mock_get): @mock.patch("nf_core.utils.gh_api.post", side_effect=mocked_requests_post) @mock.patch("nf_core.utils.gh_api.patch", side_effect=mocked_requests_patch) - def test_close_open_pr(self, mock_patch, mock_post): + def test_close_open_pr(self, mock_patch, mock_post) -> None: psync = nf_core.pipelines.sync.PipelineSync(self.pipeline_dir) psync.inspect_sync_dir() psync.get_wf_config() From b7b45dfab771a2a84ed05d368719f204133ec8a0 Mon Sep 17 00:00:00 2001 From: mashehu Date: Thu, 25 Jul 2024 14:09:47 +0200 Subject: [PATCH 41/89] fix circular import --- nf_core/__main__.py | 2 +- nf_core/components/components_utils.py | 16 +++++++++++++--- nf_core/components/info.py | 2 +- nf_core/components/install.py | 2 +- nf_core/modules/modules_json.py | 8 ++------ nf_core/modules/modules_repo.py | 6 +----- nf_core/synced_repo.py | 9 ++++----- tests/modules/test_modules_json.py | 4 ++-- tests/modules/test_update.py | 2 +- tests/subworkflows/test_update.py | 2 +- 10 files changed, 27 insertions(+), 26 deletions(-) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index f33c63e87c..b86f8f4bad 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -52,7 +52,7 @@ subworkflows_test, subworkflows_update, ) -from nf_core.modules.modules_repo import NF_CORE_MODULES_REMOTE +from nf_core.components.components_utils import NF_CORE_MODULES_REMOTE from nf_core.pipelines.download import DownloadError from nf_core.utils import check_if_outdated, nfcore_logo, rich_force_colors, setup_nfcore_dir diff --git a/nf_core/components/components_utils.py b/nf_core/components/components_utils.py index e4e2ff092b..4e9c0ac601 100644 --- a/nf_core/components/components_utils.py +++ b/nf_core/components/components_utils.py @@ -1,16 +1,23 @@ import logging import re from pathlib import Path -from typing import List, Optional, Tuple, Union +from typing import TYPE_CHECKING, List, Optional, Tuple, Union import questionary import rich.prompt +if TYPE_CHECKING: + from nf_core.modules.modules_repo import ModulesRepo + import nf_core.utils -from nf_core.modules.modules_repo import ModulesRepo log = logging.getLogger(__name__) +# Constants for the nf-core/modules repo used throughout the module files +NF_CORE_MODULES_NAME = "nf-core" +NF_CORE_MODULES_REMOTE = "https://github.com/nf-core/modules.git" +NF_CORE_MODULES_DEFAULT_BRANCH = "master" + def get_repo_info(directory: Path, use_prompt: Optional[bool] = True) -> Tuple[Path, Optional[str], str]: """ @@ -82,7 +89,10 @@ def get_repo_info(directory: Path, use_prompt: Optional[bool] = True) -> Tuple[P def prompt_component_version_sha( - component_name: str, component_type: str, modules_repo: ModulesRepo, installed_sha: Optional[str] = None + component_name: str, + component_type: str, + modules_repo: "ModulesRepo", + installed_sha: Optional[str] = None, ) -> str: """ Creates an interactive questionary prompt for selecting the module/subworkflow version diff --git a/nf_core/components/info.py b/nf_core/components/info.py index a296fcaccf..55a95593f9 100644 --- a/nf_core/components/info.py +++ b/nf_core/components/info.py @@ -15,8 +15,8 @@ import nf_core.utils from nf_core.components.components_command import ComponentCommand +from nf_core.components.components_utils import NF_CORE_MODULES_REMOTE from nf_core.modules.modules_json import ModulesJson -from nf_core.modules.modules_repo import NF_CORE_MODULES_REMOTE log = logging.getLogger(__name__) diff --git a/nf_core/components/install.py b/nf_core/components/install.py index 8f47913283..f2849f85bc 100644 --- a/nf_core/components/install.py +++ b/nf_core/components/install.py @@ -12,11 +12,11 @@ import nf_core.utils from nf_core.components.components_command import ComponentCommand from nf_core.components.components_utils import ( + NF_CORE_MODULES_NAME, get_components_to_install, prompt_component_version_sha, ) from nf_core.modules.modules_json import ModulesJson -from nf_core.modules.modules_repo import NF_CORE_MODULES_NAME log = logging.getLogger(__name__) diff --git a/nf_core/modules/modules_json.py b/nf_core/modules/modules_json.py index 01785c6d65..e9b4aa1025 100644 --- a/nf_core/modules/modules_json.py +++ b/nf_core/modules/modules_json.py @@ -14,12 +14,8 @@ from git.exc import GitCommandError import nf_core.utils -from nf_core.components.components_utils import get_components_to_install -from nf_core.modules.modules_repo import ( - NF_CORE_MODULES_NAME, - NF_CORE_MODULES_REMOTE, - ModulesRepo, -) +from nf_core.components.components_utils import NF_CORE_MODULES_NAME, NF_CORE_MODULES_REMOTE, get_components_to_install +from nf_core.modules.modules_repo import ModulesRepo from nf_core.pipelines.lint_utils import dump_json_with_prettier from .modules_differ import ModulesDiffer diff --git a/nf_core/modules/modules_repo.py b/nf_core/modules/modules_repo.py index bc92072e3b..5b50205484 100644 --- a/nf_core/modules/modules_repo.py +++ b/nf_core/modules/modules_repo.py @@ -12,16 +12,12 @@ import nf_core.modules.modules_json import nf_core.modules.modules_utils +from nf_core.components.components_utils import NF_CORE_MODULES_NAME, NF_CORE_MODULES_REMOTE from nf_core.synced_repo import RemoteProgressbar, SyncedRepo from nf_core.utils import NFCORE_CACHE_DIR, NFCORE_DIR, load_tools_config log = logging.getLogger(__name__) -# Constants for the nf-core/modules repo used throughout the module files -NF_CORE_MODULES_NAME = "nf-core" -NF_CORE_MODULES_REMOTE = "https://github.com/nf-core/modules.git" -NF_CORE_MODULES_DEFAULT_BRANCH = "master" - class ModulesRepo(SyncedRepo): """ diff --git a/nf_core/synced_repo.py b/nf_core/synced_repo.py index 8af0ee9a08..8efdd0e488 100644 --- a/nf_core/synced_repo.py +++ b/nf_core/synced_repo.py @@ -9,15 +9,14 @@ import git from git.exc import GitCommandError +from nf_core.components.components_utils import ( + NF_CORE_MODULES_NAME, + NF_CORE_MODULES_REMOTE, +) from nf_core.utils import load_tools_config log = logging.getLogger(__name__) -# Constants for the nf-core/modules repo used throughout the module files -NF_CORE_MODULES_NAME = "nf-core" -NF_CORE_MODULES_REMOTE = "https://github.com/nf-core/modules.git" -NF_CORE_MODULES_DEFAULT_BRANCH = "master" - class RemoteProgressbar(git.RemoteProgress): """ diff --git a/tests/modules/test_modules_json.py b/tests/modules/test_modules_json.py index 845f9c3e34..b2cac99e6d 100644 --- a/tests/modules/test_modules_json.py +++ b/tests/modules/test_modules_json.py @@ -3,13 +3,13 @@ import shutil from pathlib import Path -from nf_core.modules.modules_json import ModulesJson -from nf_core.modules.modules_repo import ( +from nf_core.components.components_utils import ( NF_CORE_MODULES_DEFAULT_BRANCH, NF_CORE_MODULES_NAME, NF_CORE_MODULES_REMOTE, ModulesRepo, ) +from nf_core.modules.modules_json import ModulesJson from nf_core.modules.patch import ModulePatch from ..test_modules import TestModules diff --git a/tests/modules/test_update.py b/tests/modules/test_update.py index 1f81eab487..6c8eacc666 100644 --- a/tests/modules/test_update.py +++ b/tests/modules/test_update.py @@ -8,9 +8,9 @@ import yaml import nf_core.utils +from nf_core.components.components_utils import NF_CORE_MODULES_NAME, NF_CORE_MODULES_REMOTE from nf_core.modules.install import ModuleInstall from nf_core.modules.modules_json import ModulesJson -from nf_core.modules.modules_repo import NF_CORE_MODULES_NAME, NF_CORE_MODULES_REMOTE from nf_core.modules.patch import ModulePatch from nf_core.modules.update import ModuleUpdate diff --git a/tests/subworkflows/test_update.py b/tests/subworkflows/test_update.py index 7b17a621bc..153038cd1d 100644 --- a/tests/subworkflows/test_update.py +++ b/tests/subworkflows/test_update.py @@ -8,8 +8,8 @@ import yaml import nf_core.utils +from nf_core.components.components_utils import NF_CORE_MODULES_NAME, NF_CORE_MODULES_REMOTE from nf_core.modules.modules_json import ModulesJson -from nf_core.modules.modules_repo import NF_CORE_MODULES_NAME, NF_CORE_MODULES_REMOTE from nf_core.modules.update import ModuleUpdate from nf_core.subworkflows.update import SubworkflowUpdate From c6227a7cac8a909e54a46423a9626159ec6589b8 Mon Sep 17 00:00:00 2001 From: mashehu Date: Thu, 25 Jul 2024 14:17:18 +0200 Subject: [PATCH 42/89] fix module command imports --- nf_core/commands_modules.py | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/nf_core/commands_modules.py b/nf_core/commands_modules.py index 3d96d332b0..b93bd7bcb0 100644 --- a/nf_core/commands_modules.py +++ b/nf_core/commands_modules.py @@ -13,7 +13,7 @@ def modules_list_remote(ctx, keywords, json): """ List modules in a remote GitHub repo [dim i](e.g [link=https://github.com/nf-core/modules]nf-core/modules[/])[/]. """ - from nf_core.modules import ModuleList + from nf_core.modules.list import ModuleList try: module_list = ModuleList( @@ -33,7 +33,7 @@ def modules_list_local(ctx, keywords, json, dir): # pylint: disable=redefined-b """ List modules installed locally in a pipeline """ - from nf_core.modules import ModuleList + from nf_core.modules.list import ModuleList try: module_list = ModuleList( @@ -55,7 +55,7 @@ def modules_install(ctx, tool, dir, prompt, force, sha): Fetches and installs module files from a remote repo e.g. nf-core/modules. """ - from nf_core.modules import ModuleInstall + from nf_core.modules.install import ModuleInstall try: module_install = ModuleInstall( @@ -93,7 +93,7 @@ def modules_update( Fetches and updates module files from a remote repo e.g. nf-core/modules. """ - from nf_core.modules import ModuleUpdate + from nf_core.modules.update import ModuleUpdate try: module_install = ModuleUpdate( @@ -125,7 +125,7 @@ def modules_patch(ctx, tool, dir, remove): Checks if a module has been modified locally and creates a patch file describing how the module has changed from the remote version """ - from nf_core.modules import ModulePatch + from nf_core.modules.patch import ModulePatch try: module_patch = ModulePatch( @@ -147,7 +147,7 @@ def modules_remove(ctx, dir, tool): """ Remove a module from a pipeline. """ - from nf_core.modules import ModuleRemove + from nf_core.modules.remove import ModuleRemove try: module_remove = ModuleRemove( @@ -194,7 +194,7 @@ def modules_create( elif no_meta: has_meta = False - from nf_core.modules import ModuleCreate + from nf_core.modules.create import ModuleCreate # Run function try: @@ -257,7 +257,7 @@ def modules_lint(ctx, tool, dir, registry, key, all, fail_warned, local, passed, nf-core/modules repository. """ from nf_core.components.lint import LintExceptionError - from nf_core.modules import ModuleLint + from nf_core.modules.lint import ModuleLint try: module_lint = ModuleLint( @@ -302,7 +302,7 @@ def modules_info(ctx, tool, dir): will print this usage info. If not, usage from the remote modules repo will be shown. """ - from nf_core.modules import ModuleInfo + from nf_core.modules.info import ModuleInfo try: module_info = ModuleInfo( From 847c683d22a44cb20129afb4b63bf94d529badb9 Mon Sep 17 00:00:00 2001 From: mashehu Date: Thu, 25 Jul 2024 14:20:10 +0200 Subject: [PATCH 43/89] fix: cannot import name 'NotRequired' --- requirements-dev.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/requirements-dev.txt b/requirements-dev.txt index 82087edcba..aa43ee3fe3 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -14,6 +14,7 @@ types-Markdown types-PyYAML types-requests types-setuptools +typing_extensions >=4.0.0 pytest-asyncio pytest-textual-snapshot==0.4.0 pytest-workflow>=2.0.0 From e7ac7812422e1d76fc67898c72058b6decb23322 Mon Sep 17 00:00:00 2001 From: mashehu Date: Thu, 25 Jul 2024 16:12:12 +0200 Subject: [PATCH 44/89] more dir-> directory conversions --- nf_core/__main__.py | 135 ++++++++++++++++++++-------------- nf_core/commands_pipelines.py | 29 ++++---- 2 files changed, 95 insertions(+), 69 deletions(-) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index b86f8f4bad..91af979586 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -126,7 +126,7 @@ # because they are actually preliminary, but intended program terminations. # (Custom exceptions are cleaner than `sys.exit(1)`, which we used before) def selective_traceback_hook(exctype, value, traceback): - if exctype in {DownloadError}: # extend set as needed + if exctype in {DownloadError, UserWarning}: # extend set as needed log.error(value) else: # print the colored traceback for all other exceptions with rich as usual @@ -278,6 +278,7 @@ def command_pipelines_create(ctx, name, description, author, version, force, out @click.option( "-d", "--dir", + "directory", type=click.Path(exists=True), default=".", help=r"Pipeline directory [dim]\[default: current working directory][/]", @@ -331,7 +332,7 @@ def command_pipelines_create(ctx, name, description, author, version, force, out @click.pass_context def command_pipelines_lint( ctx, - dir, + directory, release, fix, key, @@ -345,7 +346,7 @@ def command_pipelines_lint( """ Check pipeline code against nf-core guidelines. """ - pipelines_lint(ctx, dir, release, fix, key, show_passed, fail_ignored, fail_warned, markdown, json, sort_by) + pipelines_lint(ctx, directory, release, fix, key, show_passed, fail_ignored, fail_warned, markdown, json, sort_by) # nf-core pipelines download @@ -584,6 +585,7 @@ def command_pipelines_list(ctx, keywords, sort, json, show_archived): @click.option( "-d", "--dir", + "directory", type=click.Path(exists=True), default=".", help=r"Pipeline directory. [dim]\[default: current working directory][/]", @@ -610,11 +612,13 @@ def command_pipelines_list(ctx, keywords, sort, json, show_archived): @click.option("-g", "--github-repository", type=str, help="GitHub PR: target repository.") @click.option("-u", "--username", type=str, help="GitHub PR: auth username.") @click.option("-t", "--template-yaml", help="Pass a YAML file to customize the template") -def command_pipelines_sync(ctx, dir, from_branch, pull_request, github_repository, username, template_yaml, force_pr): +def command_pipelines_sync( + ctx, directory, from_branch, pull_request, github_repository, username, template_yaml, force_pr +): """ Sync a pipeline [cyan i]TEMPLATE[/] branch with the nf-core template. """ - pipelines_sync(ctx, dir, from_branch, pull_request, github_repository, username, template_yaml, force_pr) + pipelines_sync(ctx, directory, from_branch, pull_request, github_repository, username, template_yaml, force_pr) # nf-core pipelines bump-version @@ -624,6 +628,7 @@ def command_pipelines_sync(ctx, dir, from_branch, pull_request, github_repositor @click.option( "-d", "--dir", + "directory", type=click.Path(exists=True), default=".", help=r"Pipeline directory. [dim]\[default: current working directory][/]", @@ -635,11 +640,11 @@ def command_pipelines_sync(ctx, dir, from_branch, pull_request, github_repositor default=False, help="Bump required nextflow version instead of pipeline version", ) -def command_pipelines_bump_version(ctx, new_version, dir, nextflow): +def command_pipelines_bump_version(ctx, new_version, directory, nextflow): """ Update nf-core pipeline version number with `nf-core pipelines bump-version`. """ - pipelines_bump_version(ctx, new_version, dir, nextflow) + pipelines_bump_version(ctx, new_version, directory, nextflow) # nf-core pipelines create-logo @@ -680,11 +685,11 @@ def command_pipelines_bump_version(ctx, new_version, dir, nextflow): default=False, help="Overwrite any files if they already exist", ) -def command_pipelines_create_logo(logo_text, dir, name, theme, width, format, force): +def command_pipelines_create_logo(logo_text, directory, name, theme, width, format, force): """ Generate a logo with the nf-core logo template. """ - pipelines_create_logo(logo_text, dir, name, theme, width, format, force) + pipelines_create_logo(logo_text, directory, name, theme, width, format, force) # nf-core pipelines schema subcommands @@ -715,6 +720,7 @@ def command_pipelines_schema_validate(pipeline, params): @click.option( "-d", "--dir", + "directory", type=click.Path(exists=True), default=".", help=r"Pipeline directory. [dim]\[default: current working directory][/]", @@ -735,11 +741,11 @@ def command_pipelines_schema_validate(pipeline, params): default="https://nf-co.re/pipeline_schema_builder", help="Customise the builder URL (for development work)", ) -def command_pipelines_schema_build(dir, no_prompts, web_only, url): +def command_pipelines_schema_build(directory, no_prompts, web_only, url): """ Interactively build a pipeline schema from Nextflow params. """ - pipelines_schema_build(dir, no_prompts, web_only, url) + pipelines_schema_build(directory, no_prompts, web_only, url) # nf-core pipelines schema lint @@ -864,15 +870,16 @@ def command_modules_list_remote(ctx, keywords, json): @click.option( "-d", "--dir", + "directory", type=click.Path(exists=True), default=".", help=r"Pipeline directory. [dim]\[default: Current working directory][/]", ) -def command_modules_list_local(ctx, keywords, json, dir): # pylint: disable=redefined-builtin +def command_modules_list_local(ctx, keywords, json, directory): # pylint: disable=redefined-builtin """ List modules installed locally in a pipeline """ - modules_list_local(ctx, keywords, json, dir) + modules_list_local(ctx, keywords, json, directory) # nf-core modules install @@ -882,6 +889,7 @@ def command_modules_list_local(ctx, keywords, json, dir): # pylint: disable=red @click.option( "-d", "--dir", + "directory", type=click.Path(exists=True), default=".", help=r"Pipeline directory. [dim]\[default: current working directory][/]", @@ -901,11 +909,11 @@ def command_modules_list_local(ctx, keywords, json, dir): # pylint: disable=red help="Force reinstallation of module if it already exists", ) @click.option("-s", "--sha", type=str, metavar="", help="Install module at commit SHA") -def command_modules_install(ctx, tool, dir, prompt, force, sha): +def command_modules_install(ctx, tool, directory, prompt, force, sha): """ Install DSL2 modules within a pipeline. """ - modules_install(ctx, tool, dir, prompt, force, sha) + modules_install(ctx, tool, directory, prompt, force, sha) # nf-core modules update @@ -992,16 +1000,17 @@ def command_modules_update( @click.option( "-d", "--dir", + "directory", type=click.Path(exists=True), default=".", help=r"Pipeline directory. [dim]\[default: current working directory][/]", ) @click.option("-r", "--remove", is_flag=True, default=False) -def command_modules_patch(ctx, tool, dir, remove): +def command_modules_patch(ctx, tool, directory, remove): """ Create a patch file for minor changes in a module """ - modules_patch(ctx, tool, dir, remove) + modules_patch(ctx, tool, directory, remove) # nf-core modules remove @@ -1011,15 +1020,16 @@ def command_modules_patch(ctx, tool, dir, remove): @click.option( "-d", "--dir", + "directory", type=click.Path(exists=True), default=".", help=r"Pipeline directory. [dim]\[default: current working directory][/]", ) -def command_modules_remove(ctx, dir, tool): +def command_modules_remove(ctx, directory, tool): """ Remove a module from a pipeline. """ - modules_remove(ctx, dir, tool) + modules_remove(ctx, directory, tool) # nf-core modules create @@ -1092,7 +1102,7 @@ def command_modules_remove(ctx, dir, tool): def command_modules_create( ctx, tool, - dir, + directory, author, label, meta, @@ -1109,7 +1119,7 @@ def command_modules_create( modules_create( ctx, tool, - dir, + directory, author, label, meta, @@ -1129,6 +1139,7 @@ def command_modules_create( @click.option( "-d", "--dir", + "directory", type=click.Path(exists=True), default=".", metavar="", @@ -1154,11 +1165,11 @@ def command_modules_create( default=None, help="Run tests with a specific profile", ) -def command_modules_test(ctx, tool, dir, no_prompts, update, once, profile): +def command_modules_test(ctx, tool, directory, no_prompts, update, once, profile): """ Run nf-test for a module. """ - modules_test(ctx, tool, dir, no_prompts, update, once, profile) + modules_test(ctx, tool, directory, no_prompts, update, once, profile) # nf-core modules lint @@ -1168,6 +1179,7 @@ def command_modules_test(ctx, tool, dir, no_prompts, update, once, profile): @click.option( "-d", "--dir", + "directory", type=click.Path(exists=True), default=".", metavar="", @@ -1204,11 +1216,11 @@ def command_modules_test(ctx, tool, dir, no_prompts, update, once, profile): is_flag=True, help="Fix the module version if a newer version is available", ) -def command_modules_lint(ctx, tool, dir, registry, key, all, fail_warned, local, passed, sort_by, fix_version): +def command_modules_lint(ctx, tool, directory, registry, key, all, fail_warned, local, passed, sort_by, fix_version): """ Lint one or more modules in a directory. """ - modules_lint(ctx, tool, dir, registry, key, all, fail_warned, local, passed, sort_by, fix_version) + modules_lint(ctx, tool, directory, registry, key, all, fail_warned, local, passed, sort_by, fix_version) # nf-core modules info @@ -1218,15 +1230,16 @@ def command_modules_lint(ctx, tool, dir, registry, key, all, fail_warned, local, @click.option( "-d", "--dir", + "directory", type=click.Path(exists=True), default=".", help=r"Pipeline directory. [dim]\[default: Current working directory][/]", ) -def command_modules_info(ctx, tool, dir): +def command_modules_info(ctx, tool, directory): """ Show developer usage information about a given module. """ - modules_info(ctx, tool, dir) + modules_info(ctx, tool, directory) # nf-core modules bump-versions @@ -1236,18 +1249,19 @@ def command_modules_info(ctx, tool, dir): @click.option( "-d", "--dir", + "directory", type=click.Path(exists=True), default=".", metavar="", ) @click.option("-a", "--all", is_flag=True, help="Run on all modules") @click.option("-s", "--show-all", is_flag=True, help="Show up-to-date modules in results too") -def command_modules_bump_versions(ctx, tool, dir, all, show_all): +def command_modules_bump_versions(ctx, tool, directory, all, show_all): """ Bump versions for one or more modules in a clone of the nf-core/modules repo. """ - modules_bump_versions(ctx, tool, dir, all, show_all) + modules_bump_versions(ctx, tool, directory, all, show_all) # nf-core subworkflows click command @@ -1313,11 +1327,11 @@ def subworkflows(ctx, git_remote, branch, no_pull): default=False, help="Migrate a module with pytest tests to nf-test", ) -def command_subworkflows_create(ctx, subworkflow, dir, author, force, migrate_pytest): +def command_subworkflows_create(ctx, subworkflow, directory, author, force, migrate_pytest): """ Create a new subworkflow from the nf-core template. """ - subworkflows_create(ctx, subworkflow, dir, author, force, migrate_pytest) + subworkflows_create(ctx, subworkflow, directory, author, force, migrate_pytest) # nf-core subworkflows test @@ -1327,6 +1341,7 @@ def command_subworkflows_create(ctx, subworkflow, dir, author, force, migrate_py @click.option( "-d", "--dir", + "directory", type=click.Path(exists=True), default=".", metavar="", @@ -1352,11 +1367,11 @@ def command_subworkflows_create(ctx, subworkflow, dir, author, force, migrate_py default=None, help="Run tests with a specific profile", ) -def command_subworkflows_test(ctx, subworkflow, dir, no_prompts, update, once, profile): +def command_subworkflows_test(ctx, subworkflow, directory, no_prompts, update, once, profile): """ Run nf-test for a subworkflow. """ - subworkflows_test(ctx, subworkflow, dir, no_prompts, update, once, profile) + subworkflows_test(ctx, subworkflow, directory, no_prompts, update, once, profile) # nf-core subworkflows list subcommands @@ -1389,15 +1404,16 @@ def command_subworkflows_list_remote(ctx, keywords, json): @click.option( "-d", "--dir", + "directory", type=click.Path(exists=True), default=".", help=r"Pipeline directory. [dim]\[default: Current working directory][/]", ) -def command_subworkflows_list_local(ctx, keywords, json, dir): # pylint: disable=redefined-builtin +def command_subworkflows_list_local(ctx, keywords, json, directory): # pylint: disable=redefined-builtin """ List subworkflows installed locally in a pipeline """ - subworkflows_list_local(ctx, keywords, json, dir) + subworkflows_list_local(ctx, keywords, json, directory) # nf-core subworkflows lint @@ -1407,6 +1423,7 @@ def command_subworkflows_list_local(ctx, keywords, json, dir): # pylint: disabl @click.option( "-d", "--dir", + "directory", type=click.Path(exists=True), default=".", metavar="", @@ -1438,11 +1455,11 @@ def command_subworkflows_list_local(ctx, keywords, json, dir): # pylint: disabl help="Sort lint output by subworkflow or test name.", show_default=True, ) -def command_subworkflows_lint(ctx, subworkflow, dir, registry, key, all, fail_warned, local, passed, sort_by): +def command_subworkflows_lint(ctx, subworkflow, directory, registry, key, all, fail_warned, local, passed, sort_by): """ Lint one or more subworkflows in a directory. """ - subworkflows_lint(ctx, subworkflow, dir, registry, key, all, fail_warned, local, passed, sort_by) + subworkflows_lint(ctx, subworkflow, directory, registry, key, all, fail_warned, local, passed, sort_by) # nf-core subworkflows info @@ -1452,15 +1469,16 @@ def command_subworkflows_lint(ctx, subworkflow, dir, registry, key, all, fail_wa @click.option( "-d", "--dir", + "directory", type=click.Path(exists=True), default=".", help=r"Pipeline directory. [dim]\[default: Current working directory][/]", ) -def command_subworkflows_info(ctx, subworkflow, dir): +def command_subworkflows_info(ctx, subworkflow, directory): """ Show developer usage information about a given subworkflow. """ - subworkflows_info(ctx, subworkflow, dir) + subworkflows_info(ctx, subworkflow, directory) # nf-core subworkflows install @@ -1470,6 +1488,7 @@ def command_subworkflows_info(ctx, subworkflow, dir): @click.option( "-d", "--dir", + "directory", type=click.Path(exists=True), default=".", help=r"Pipeline directory. [dim]\[default: current working directory][/]", @@ -1495,11 +1514,11 @@ def command_subworkflows_info(ctx, subworkflow, dir): metavar="", help="Install subworkflow at commit SHA", ) -def command_subworkflows_install(ctx, subworkflow, dir, prompt, force, sha): +def command_subworkflows_install(ctx, subworkflow, directory, prompt, force, sha): """ Install DSL2 subworkflow within a pipeline. """ - subworkflows_install(ctx, subworkflow, dir, prompt, force, sha) + subworkflows_install(ctx, subworkflow, directory, prompt, force, sha) # nf-core subworkflows remove @@ -1509,15 +1528,16 @@ def command_subworkflows_install(ctx, subworkflow, dir, prompt, force, sha): @click.option( "-d", "--dir", + "directory", type=click.Path(exists=True), default=".", help=r"Pipeline directory. [dim]\[default: current working directory][/]", ) -def command_subworkflows_remove(ctx, dir, subworkflow): +def command_subworkflows_remove(ctx, directory, subworkflow): """ Remove a subworkflow from a pipeline. """ - subworkflows_remove(ctx, dir, subworkflow) + subworkflows_remove(ctx, directory, subworkflow) # nf-core subworkflows update @@ -1527,6 +1547,7 @@ def command_subworkflows_remove(ctx, dir, subworkflow): @click.option( "-d", "--dir", + "directory", type=click.Path(exists=True), default=".", help=r"Pipeline directory. [dim]\[default: current working directory][/]", @@ -1587,7 +1608,7 @@ def command_subworkflows_remove(ctx, dir, subworkflow): def command_subworkflows_update( ctx, subworkflow, - dir, + directory, force, prompt, sha, @@ -1601,7 +1622,7 @@ def command_subworkflows_update( Update DSL2 subworkflow within a pipeline. """ subworkflows_update( - ctx, subworkflow, dir, force, prompt, sha, install_all, preview, save_diff, update_deps, limit_output + ctx, subworkflow, directory, force, prompt, sha, install_all, preview, save_diff, update_deps, limit_output ) @@ -1636,6 +1657,7 @@ def command_schema_validate(pipeline, params): @click.option( "-d", "--dir", + "directory", type=click.Path(exists=True), default=".", help=r"Pipeline directory. [dim]\[default: current working directory][/]", @@ -1656,14 +1678,14 @@ def command_schema_validate(pipeline, params): default="https://nf-co.re/pipeline_schema_builder", help="Customise the builder URL (for development work)", ) -def command_schema_build(dir, no_prompts, web_only, url): +def command_schema_build(directory, no_prompts, web_only, url): """ Use `nf-core pipelines schema build` instead. """ log.warning( "The `[magenta]nf-core schema build[/]` command is deprecated. Use `[magenta]nf-core pipelines schema build[/]` instead." ) - pipelines_schema_build(dir, no_prompts, web_only, url) + pipelines_schema_build(directory, no_prompts, web_only, url) # nf-core schema lint (deprecated) @@ -1764,14 +1786,14 @@ def command_schema_docs(schema_path, output, format, force, columns): default=False, help="Overwrite any files if they already exist", ) -def command_create_logo(logo_text, dir, name, theme, width, format, force): +def command_create_logo(logo_text, directory, name, theme, width, format, force): """ Use `nf-core pipelines create-logo` instead. """ log.warning( "The `[magenta]nf-core create-logo[/]` command is deprecated. Use `[magenta]nf-core pipelines screate-logo[/]` instead." ) - pipelines_create_logo(logo_text, dir, name, theme, width, format, force) + pipelines_create_logo(logo_text, directory, name, theme, width, format, force) # nf-core sync (deprecated) @@ -1779,6 +1801,7 @@ def command_create_logo(logo_text, dir, name, theme, width, format, force): @click.option( "-d", "--dir", + "directory", type=click.Path(exists=True), default=".", help=r"Pipeline directory. [dim]\[default: current working directory][/]", @@ -1805,14 +1828,14 @@ def command_create_logo(logo_text, dir, name, theme, width, format, force): @click.option("-g", "--github-repository", type=str, help="GitHub PR: target repository.") @click.option("-u", "--username", type=str, help="GitHub PR: auth username.") @click.option("-t", "--template-yaml", help="Pass a YAML file to customize the template") -def command_sync(dir, from_branch, pull_request, github_repository, username, template_yaml, force_pr): +def command_sync(directory, from_branch, pull_request, github_repository, username, template_yaml, force_pr): """ Use `nf-core pipelines sync` instead. """ log.warning( "The `[magenta]nf-core sync[/]` command is deprecated. Use `[magenta]nf-core pipelines sync[/]` instead." ) - pipelines_sync(dir, from_branch, pull_request, github_repository, username, template_yaml, force_pr) + pipelines_sync(directory, from_branch, pull_request, github_repository, username, template_yaml, force_pr) # nf-core bump-version (deprecated) @@ -1822,6 +1845,7 @@ def command_sync(dir, from_branch, pull_request, github_repository, username, te @click.option( "-d", "--dir", + "directory", type=click.Path(exists=True), default=".", help=r"Pipeline directory. [dim]\[default: current working directory][/]", @@ -1833,14 +1857,14 @@ def command_sync(dir, from_branch, pull_request, github_repository, username, te default=False, help="Bump required nextflow version instead of pipeline version", ) -def command_bump_version(ctx, new_version, dir, nextflow): +def command_bump_version(ctx, new_version, directory, nextflow): """ Use `nf-core pipelines bump-version` instead. """ log.warning( "The `[magenta]nf-core bump-version[/]` command is deprecated. Use `[magenta]nf-core pipelines bump-version[/]` instead." ) - pipelines_bump_version(ctx, new_version, dir, nextflow) + pipelines_bump_version(ctx, new_version, directory, nextflow) # nf-core list (deprecated) @@ -2086,6 +2110,7 @@ def command_download( @click.option( "-d", "--dir", + "directory", type=click.Path(exists=True), default=".", help=r"Pipeline directory [dim]\[default: current working directory][/]", @@ -2139,7 +2164,7 @@ def command_download( @click.pass_context def command_lint( ctx, - dir, + directory, release, fix, key, @@ -2156,7 +2181,7 @@ def command_lint( log.warning( "The `[magenta]nf-core lint[/]` command is deprecated. Use `[magenta]nf-core pipelines lint[/]` instead." ) - pipelines_lint(ctx, dir, release, fix, key, show_passed, fail_ignored, fail_warned, markdown, json, sort_by) + pipelines_lint(ctx, directory, release, fix, key, show_passed, fail_ignored, fail_warned, markdown, json, sort_by) # nf-core create (deprecated) diff --git a/nf_core/commands_pipelines.py b/nf_core/commands_pipelines.py index 432a36aaee..c586534b22 100644 --- a/nf_core/commands_pipelines.py +++ b/nf_core/commands_pipelines.py @@ -60,7 +60,7 @@ def pipelines_create(ctx, name, description, author, version, force, outdir, tem # nf-core pipelines bump-version -def pipelines_bump_version(ctx, new_version, dir, nextflow): +def pipelines_bump_version(ctx, new_version, directory, nextflow): """ Update nf-core pipeline version number. @@ -78,10 +78,10 @@ def pipelines_bump_version(ctx, new_version, dir, nextflow): try: # Check if pipeline directory contains necessary files - is_pipeline_directory(dir) + is_pipeline_directory(directory) # Make a pipeline object and load config etc - pipeline_obj = Pipeline(dir) + pipeline_obj = Pipeline(directory) pipeline_obj._load() # Bump the pipeline version number @@ -97,7 +97,7 @@ def pipelines_bump_version(ctx, new_version, dir, nextflow): # nf-core pipelines lint def pipelines_lint( ctx, - dir, + directory, release, fix, key, @@ -123,7 +123,7 @@ def pipelines_lint( # Check if pipeline directory is a pipeline try: - is_pipeline_directory(dir) + is_pipeline_directory(directory) except UserWarning as e: log.error(e) sys.exit(1) @@ -131,7 +131,7 @@ def pipelines_lint( # Run the lint tests! try: lint_obj, module_lint_obj, subworkflow_lint_obj = run_linting( - dir, + directory, release, fix, key, @@ -279,7 +279,7 @@ def pipelines_list(ctx, keywords, sort, json, show_archived): # nf-core pipelines sync -def pipelines_sync(ctx, dir, from_branch, pull_request, github_repository, username, template_yaml, force_pr): +def pipelines_sync(ctx, directory, from_branch, pull_request, github_repository, username, template_yaml, force_pr): """ Sync a pipeline [cyan i]TEMPLATE[/] branch with the nf-core template. @@ -295,12 +295,13 @@ def pipelines_sync(ctx, dir, from_branch, pull_request, github_repository, usern from nf_core.pipelines.sync import PipelineSync, PullRequestExceptionError, SyncExceptionError from nf_core.utils import is_pipeline_directory - # Check if pipeline directory contains necessary files - is_pipeline_directory(dir) - - # Sync the given pipeline dir - sync_obj = PipelineSync(dir, from_branch, pull_request, github_repository, username, template_yaml, force_pr) try: + # Check if pipeline directory contains necessary files + is_pipeline_directory(directory) + # Sync the given pipeline dir + sync_obj = PipelineSync( + directory, from_branch, pull_request, github_repository, username, template_yaml, force_pr + ) sync_obj.sync() except (SyncExceptionError, PullRequestExceptionError) as e: log.error(e) @@ -360,7 +361,7 @@ def pipelines_schema_validate(pipeline, params): # nf-core pipelines schema build -def pipelines_schema_build(dir, no_prompts, web_only, url): +def pipelines_schema_build(directory, no_prompts, web_only, url): """ Interactively build a pipeline schema from Nextflow params. @@ -376,7 +377,7 @@ def pipelines_schema_build(dir, no_prompts, web_only, url): try: schema_obj = PipelineSchema() - if schema_obj.build_schema(dir, no_prompts, web_only, url) is False: + if schema_obj.build_schema(directory, no_prompts, web_only, url) is False: sys.exit(1) except (UserWarning, AssertionError) as e: log.error(e) From 5a8ae1240e646413d3c483cb8d67f8d1581495cb Mon Sep 17 00:00:00 2001 From: mashehu Date: Thu, 25 Jul 2024 16:18:50 +0200 Subject: [PATCH 45/89] fix pydantic warnings --- nf_core/pipelines/create/create.py | 28 ++++++++++++---------------- nf_core/pipelines/sync.py | 2 +- nf_core/utils.py | 8 ++++++-- 3 files changed, 19 insertions(+), 19 deletions(-) diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index be07c0c290..a2df9aeefc 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -21,7 +21,7 @@ from nf_core.pipelines.create.utils import CreateConfig from nf_core.pipelines.create_logo import create_logo from nf_core.pipelines.lint_utils import run_prettier_on_file -from nf_core.utils import LintConfigType +from nf_core.utils import LintConfigType, NFCoreTemplateConfig log = logging.getLogger(__name__) @@ -111,7 +111,7 @@ def __init__( self.is_interactive = is_interactive self.force = self.config.force if self.config.outdir is None: - self.config.outdir = os.getcwd() + self.config.outdir = str(Path.cwd()) if self.config.outdir == ".": self.outdir = Path(self.config.outdir, self.jinja_params["name_noslash"]).absolute() else: @@ -289,13 +289,13 @@ def render_template(self): log.info("Use -f / --force to overwrite existing files") raise UserWarning(f"Output directory '{self.outdir}' exists!") else: - os.makedirs(self.outdir) + self.outdir.mkdir(parents=True, exist_ok=True) # Run jinja2 for each file in the template folder env = jinja2.Environment( loader=jinja2.PackageLoader("nf_core", "pipeline-template"), keep_trailing_newline=True ) - template_dir = os.path.join(os.path.dirname(nf_core.__file__), "pipeline-template") + template_dir = Path(nf_core.__file__).parent / "pipeline-template" object_attrs = self.jinja_params object_attrs["nf_core_version"] = nf_core.__version__ @@ -310,26 +310,24 @@ def render_template(self): } # Set the paths to skip according to customization - for template_fn_path_obj in template_files: - template_fn_path = str(template_fn_path_obj) - + for template_fn_path in template_files: # Skip files that are in the self.skip_paths list for skip_path in self.skip_paths: - if os.path.relpath(template_fn_path, template_dir).startswith(skip_path): + if str(template_fn_path.relative_to(template_dir)).startswith(skip_path): break else: - if os.path.isdir(template_fn_path): + if template_fn_path.is_dir(): continue - if any([s in template_fn_path for s in ignore_strs]): + if any([s in str(template_fn_path) for s in ignore_strs]): log.debug(f"Ignoring '{template_fn_path}' in jinja2 template creation") continue # Set up vars and directories - template_fn = os.path.relpath(template_fn_path, template_dir) + template_fn = template_fn_path.relative_to(template_dir) output_path = self.outdir / template_fn if template_fn in rename_files: output_path = self.outdir / rename_files[template_fn] - os.makedirs(os.path.dirname(output_path), exist_ok=True) + output_path.parent.mkdir(parents=True, exist_ok=True) try: # Just copy binary files @@ -338,7 +336,7 @@ def render_template(self): # Got this far - render the template log.debug(f"Rendering template file: '{template_fn}'") - j_template = env.get_template(template_fn) + j_template = env.get_template(str(template_fn)) rendered_output = j_template.render(object_attrs) # Write to the pipeline output file @@ -379,9 +377,7 @@ def render_template(self): config_fn, config_yml = nf_core.utils.load_tools_config(self.outdir) if config_fn is not None and config_yml is not None: with open(str(config_fn), "w") as fh: - config_yml.template = self.config.model_dump() - # convert posix path to string for yaml dump - config_yml["template"]["outdir"] = str(config_yml["template"]["outdir"]) + config_yml.template = NFCoreTemplateConfig(**self.config.model_dump()) yaml.safe_dump(config_yml.model_dump(), fh) log.debug(f"Dumping pipeline template yml to pipeline config file '{config_fn.name}'") run_prettier_on_file(self.outdir / config_fn) diff --git a/nf_core/pipelines/sync.py b/nf_core/pipelines/sync.py index b1da99a626..d044be84e4 100644 --- a/nf_core/pipelines/sync.py +++ b/nf_core/pipelines/sync.py @@ -274,7 +274,7 @@ def make_template_pipeline(self): version=self.wf_config["manifest.version"].strip('"').strip("'"), no_git=True, force=True, - outdir=self.pipeline_dir, + outdir=str(self.pipeline_dir), author=self.wf_config["manifest.author"].strip('"').strip("'"), ).init_pipeline() except Exception as err: diff --git a/nf_core/utils.py b/nf_core/utils.py index 6794cf04fe..0beb8c7366 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -1052,6 +1052,12 @@ class NFCoreTemplateConfig(BaseModel): skip_features: Optional[list] = None is_nfcore: Optional[bool] = None + def __getitem__(self, item: str) -> Any: + return getattr(self, item) + + def get(self, item: str, default: Any = None) -> Any: + return getattr(self, item, default) + LintConfigType = Optional[Dict[str, Union[List[str], List[Dict[str, List[str]]], bool]]] @@ -1086,7 +1092,6 @@ def load_tools_config(directory: Union[str, Path] = ".") -> Tuple[Optional[Path] tools_config = {} config_fn = get_first_available_path(directory, CONFIG_PATHS) - if config_fn is None: depr_path = get_first_available_path(directory, DEPRECATED_CONFIG_PATHS) if depr_path: @@ -1102,7 +1107,6 @@ def load_tools_config(directory: Union[str, Path] = ".") -> Tuple[Optional[Path] # If the file is empty if tools_config is None: raise AssertionError(f"Config file '{config_fn}' is empty") - # Check for required fields try: nf_core_yaml_config = NFCoreYamlConfig(**tools_config) From ac1ea23974df7182c02a2619a2363010590c1b23 Mon Sep 17 00:00:00 2001 From: mashehu Date: Thu, 25 Jul 2024 16:30:53 +0200 Subject: [PATCH 46/89] fix notrequired not found --- nf_core/modules/modules_json.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/nf_core/modules/modules_json.py b/nf_core/modules/modules_json.py index e9b4aa1025..f992271741 100644 --- a/nf_core/modules/modules_json.py +++ b/nf_core/modules/modules_json.py @@ -6,12 +6,13 @@ import shutil import tempfile from pathlib import Path -from typing import Dict, List, NotRequired, Optional, Tuple, TypedDict, Union +from typing import Dict, List, Optional, Tuple, Union import git import questionary import rich.prompt from git.exc import GitCommandError +from typing_extensions import NotRequired, TypedDict # for py<3.11 import nf_core.utils from nf_core.components.components_utils import NF_CORE_MODULES_NAME, NF_CORE_MODULES_REMOTE, get_components_to_install From 79727c0b430f495468011233ec3b761d275a7be7 Mon Sep 17 00:00:00 2001 From: mashehu Date: Thu, 25 Jul 2024 17:28:53 +0200 Subject: [PATCH 47/89] fix pydantic type --- nf_core/pipelines/create/create.py | 1 + 1 file changed, 1 insertion(+) diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index a2df9aeefc..127b1f607a 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -378,6 +378,7 @@ def render_template(self): if config_fn is not None and config_yml is not None: with open(str(config_fn), "w") as fh: config_yml.template = NFCoreTemplateConfig(**self.config.model_dump()) + config_yml.template.outdir = str(config_yml.template.outdir) yaml.safe_dump(config_yml.model_dump(), fh) log.debug(f"Dumping pipeline template yml to pipeline config file '{config_fn.name}'") run_prettier_on_file(self.outdir / config_fn) From ee67c5fdd7f764f1f16f10f28e75f6f1f45941f5 Mon Sep 17 00:00:00 2001 From: mashehu Date: Thu, 25 Jul 2024 17:31:34 +0200 Subject: [PATCH 48/89] fix before model creation --- nf_core/pipelines/create/create.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index 127b1f607a..acbe7efd5c 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -377,8 +377,9 @@ def render_template(self): config_fn, config_yml = nf_core.utils.load_tools_config(self.outdir) if config_fn is not None and config_yml is not None: with open(str(config_fn), "w") as fh: + self.config.outdir = str(self.config.outdir) config_yml.template = NFCoreTemplateConfig(**self.config.model_dump()) - config_yml.template.outdir = str(config_yml.template.outdir) + yaml.safe_dump(config_yml.model_dump(), fh) log.debug(f"Dumping pipeline template yml to pipeline config file '{config_fn.name}'") run_prettier_on_file(self.outdir / config_fn) From dae348e4de483f5b24f92bcc41ddb1b843f688bf Mon Sep 17 00:00:00 2001 From: mashehu Date: Thu, 25 Jul 2024 17:33:33 +0200 Subject: [PATCH 49/89] fix modules repo import error --- tests/modules/test_modules_json.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/modules/test_modules_json.py b/tests/modules/test_modules_json.py index b2cac99e6d..0368c146c4 100644 --- a/tests/modules/test_modules_json.py +++ b/tests/modules/test_modules_json.py @@ -7,9 +7,9 @@ NF_CORE_MODULES_DEFAULT_BRANCH, NF_CORE_MODULES_NAME, NF_CORE_MODULES_REMOTE, - ModulesRepo, ) from nf_core.modules.modules_json import ModulesJson +from nf_core.modules.modules_repo import ModulesRepo from nf_core.modules.patch import ModulePatch from ..test_modules import TestModules From b53a4b82436f0313fa5c0896e9edd869c3043084 Mon Sep 17 00:00:00 2001 From: mashehu Date: Fri, 26 Jul 2024 09:30:22 +0200 Subject: [PATCH 50/89] fix incorrect type name --- nf_core/modules/modules_differ.py | 6 +++--- nf_core/modules/modules_json.py | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/nf_core/modules/modules_differ.py b/nf_core/modules/modules_differ.py index 36d927f084..e310b3bf8b 100644 --- a/nf_core/modules/modules_differ.py +++ b/nf_core/modules/modules_differ.py @@ -4,7 +4,7 @@ import logging import os from pathlib import Path -from typing import List, Union +from typing import Dict, List, Union from rich.console import Console from rich.syntax import Syntax @@ -296,7 +296,7 @@ def print_diff( console.print(Syntax("".join(diff), "diff", theme="ansi_dark", padding=1)) @staticmethod - def per_file_patch(patch_fn: Union[str, Path]) -> dict[str, List[str]]: + def per_file_patch(patch_fn: Union[str, Path]) -> Dict[str, List[str]]: """ Splits a patch file for several files into one patch per file. @@ -453,7 +453,7 @@ def try_apply_single_patch(file_lines, patch, reverse=False): @staticmethod def try_apply_patch( module: str, repo_path: Union[str, Path], patch_path: Union[str, Path], module_dir: Path, reverse: bool = False - ) -> dict[str, List[str]]: + ) -> Dict[str, List[str]]: """ Try applying a full patch file to a module diff --git a/nf_core/modules/modules_json.py b/nf_core/modules/modules_json.py index f992271741..4f3578b02b 100644 --- a/nf_core/modules/modules_json.py +++ b/nf_core/modules/modules_json.py @@ -1000,7 +1000,7 @@ def get_subworkflow_version(self, subworkflow_name, repo_url, install_dir): .get("git_sha", None) ) - def get_all_components(self, component_type: str) -> dict[str, List[Tuple[(str, str)]]]: + def get_all_components(self, component_type: str) -> Dict[str, List[Tuple[(str, str)]]]: """ Retrieves all pipeline modules/subworkflows that are reported in the modules.json From bdbdd17a24c8aee70339f22d0bc0994c2f65fe4e Mon Sep 17 00:00:00 2001 From: mashehu Date: Fri, 26 Jul 2024 11:50:17 +0200 Subject: [PATCH 51/89] fix types --- nf_core/pipelines/create/create.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index acbe7efd5c..5e57d27c3b 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -81,8 +81,11 @@ def __init__( else: raise UserWarning("The template configuration was not provided.") + if self.config.outdir is None: + self.config.outdir = str(Path.cwd()) + self.jinja_params, skip_paths = self.obtain_jinja_params_dict( - self.config.skip_features or [], self.config.outdir + self.config.skip_features or [], Path(self.config.outdir) ) skippable_paths = { @@ -110,8 +113,7 @@ def __init__( self.default_branch = default_branch self.is_interactive = is_interactive self.force = self.config.force - if self.config.outdir is None: - self.config.outdir = str(Path.cwd()) + if self.config.outdir == ".": self.outdir = Path(self.config.outdir, self.jinja_params["name_noslash"]).absolute() else: @@ -184,7 +186,7 @@ def update_config(self, organisation, version, force, outdir): if self.config.is_nfcore is None: self.config.is_nfcore = self.config.org == "nf-core" - def obtain_jinja_params_dict(self, features_to_skip, pipeline_dir): + def obtain_jinja_params_dict(self, features_to_skip: List[str], pipeline_dir: Union[str, Path]): """Creates a dictionary of parameters for the new pipeline. Args: @@ -379,7 +381,6 @@ def render_template(self): with open(str(config_fn), "w") as fh: self.config.outdir = str(self.config.outdir) config_yml.template = NFCoreTemplateConfig(**self.config.model_dump()) - yaml.safe_dump(config_yml.model_dump(), fh) log.debug(f"Dumping pipeline template yml to pipeline config file '{config_fn.name}'") run_prettier_on_file(self.outdir / config_fn) From 98c3fb0178a4548fcab1db5055cedd1586f0c915 Mon Sep 17 00:00:00 2001 From: mashehu Date: Fri, 26 Jul 2024 11:54:07 +0200 Subject: [PATCH 52/89] fix type definition for older python version --- nf_core/modules/modules_json.py | 2 +- nf_core/pipelines/lint/modules_json.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/nf_core/modules/modules_json.py b/nf_core/modules/modules_json.py index 4f3578b02b..9f17add6ae 100644 --- a/nf_core/modules/modules_json.py +++ b/nf_core/modules/modules_json.py @@ -115,7 +115,7 @@ def create(self) -> None: self.dump() def get_component_names_from_repo( - self, repos: Dict[str, Dict[str, Dict[str, Dict[str, Dict[str, str | List[str]]]]]], directory: Path + self, repos: Dict[str, Dict[str, Dict[str, Dict[str, Dict[str, Union[str, List[str]]]]]]], directory: Path ) -> List[Tuple[str, List[str], str]]: """ Get component names from repositories in a pipeline. diff --git a/nf_core/pipelines/lint/modules_json.py b/nf_core/pipelines/lint/modules_json.py index 5ce2054036..2b7c266848 100644 --- a/nf_core/pipelines/lint/modules_json.py +++ b/nf_core/pipelines/lint/modules_json.py @@ -1,5 +1,5 @@ from pathlib import Path -from typing import Dict, List +from typing import Dict, List, Union from nf_core.modules.modules_json import ModulesJson, ModulesJsonType @@ -19,7 +19,7 @@ def modules_json(self) -> Dict[str, List[str]]: # Load pipeline modules and modules.json _modules_json = ModulesJson(self.wf_path) _modules_json.load() - modules_json_dict: ModulesJsonType | None = _modules_json.modules_json + modules_json_dict: Union[ModulesJsonType, None] = _modules_json.modules_json modules_dir = Path(self.wf_path, "modules") if _modules_json and modules_json_dict is not None: From 7b883eb1f01bac9249428a3821c9a4454dc715ca Mon Sep 17 00:00:00 2001 From: mashehu Date: Fri, 26 Jul 2024 12:11:41 +0200 Subject: [PATCH 53/89] fix incorrect types and missing cli option names --- nf_core/__main__.py | 6 +++--- nf_core/pipelines/create/create.py | 9 +++++---- nf_core/pipelines/sync.py | 21 +++++++++++---------- 3 files changed, 19 insertions(+), 17 deletions(-) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index 91af979586..d0aad63e80 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -1036,7 +1036,7 @@ def command_modules_remove(ctx, directory, tool): @modules.command("create") @click.pass_context @click.argument("tool", type=str, required=False, metavar=" or ") -@click.option("-d", "--dir", type=click.Path(exists=True), default=".", metavar="") +@click.option("-d", "--dir", "directory", type=click.Path(exists=True), default=".", metavar="") @click.option( "-a", "--author", @@ -1306,7 +1306,7 @@ def subworkflows(ctx, git_remote, branch, no_pull): @subworkflows.command("create") @click.pass_context @click.argument("subworkflow", type=str, required=False, metavar="subworkflow name") -@click.option("-d", "--dir", type=click.Path(exists=True), default=".", metavar="") +@click.option("-d", "--dir", "directory", type=click.Path(exists=True), default=".", metavar="") @click.option( "-a", "--author", @@ -1751,7 +1751,7 @@ def command_schema_docs(schema_path, output, format, force, columns): # nf-core create-logo (deprecated) @nf_core_cli.command("create-logo", deprecated=True, hidden=True) @click.argument("logo-text", metavar="") -@click.option("-d", "--dir", type=click.Path(), default=".", help="Directory to save the logo in.") +@click.option("-d", "--dir", "directory", type=click.Path(), default=".", help="Directory to save the logo in.") @click.option( "-n", "--name", diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index 5e57d27c3b..15eed46dce 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -278,7 +278,7 @@ def init_pipeline(self): "https://nf-co.re/docs/tutorials/adding_a_pipeline/overview#join-the-community[/link]" ) - def render_template(self): + def render_template(self) -> None: """Runs Jinja to create a new nf-core pipeline.""" log.info(f"Creating new pipeline: '{self.name}'") @@ -306,7 +306,7 @@ def render_template(self): template_files += list(Path(template_dir).glob("*")) ignore_strs = [".pyc", "__pycache__", ".pyo", ".pyd", ".DS_Store", ".egg"] short_name = self.jinja_params["short_name"] - rename_files = { + rename_files: Dict[str, str] = { "workflows/pipeline.nf": f"workflows/{short_name}.nf", "subworkflows/local/utils_nfcore_pipeline_pipeline/main.nf": f"subworkflows/local/utils_nfcore_{short_name}_pipeline/main.nf", } @@ -327,8 +327,9 @@ def render_template(self): # Set up vars and directories template_fn = template_fn_path.relative_to(template_dir) output_path = self.outdir / template_fn - if template_fn in rename_files: - output_path = self.outdir / rename_files[template_fn] + + if str(template_fn) in rename_files: + output_path = self.outdir / rename_files[str(template_fn)] output_path.parent.mkdir(parents=True, exist_ok=True) try: diff --git a/nf_core/pipelines/sync.py b/nf_core/pipelines/sync.py index d044be84e4..741bdd751a 100644 --- a/nf_core/pipelines/sync.py +++ b/nf_core/pipelines/sync.py @@ -6,6 +6,7 @@ import re import shutil from pathlib import Path +from typing import Dict, Optional, Union import git import questionary @@ -61,24 +62,24 @@ class PipelineSync: def __init__( self, - pipeline_dir, - from_branch=None, - make_pr=False, - gh_repo=None, - gh_username=None, - template_yaml_path=None, - force_pr=False, + pipeline_dir: Union[str, Path], + from_branch: Optional[str] = None, + make_pr: bool = False, + gh_repo: Optional[str] = None, + gh_username: Optional[str] = None, + template_yaml_path: Optional[str] = None, + force_pr: bool = False, ): """Initialise syncing object""" - self.pipeline_dir = Path(pipeline_dir).resolve() + self.pipeline_dir: Path = Path(pipeline_dir).resolve() self.from_branch = from_branch self.original_branch = None self.original_merge_branch = f"nf-core-template-merge-{nf_core.__version__}" self.merge_branch = self.original_merge_branch self.made_changes = False self.make_pr = make_pr - self.gh_pr_returned_data = {} + self.gh_pr_returned_data: Dict = {} self.required_config_vars = ["manifest.name", "manifest.description", "manifest.version", "manifest.author"] self.force_pr = force_pr @@ -87,7 +88,7 @@ def __init__( self.pr_url = "" self.config_yml_path, self.config_yml = nf_core.utils.load_tools_config(self.pipeline_dir) - assert self.config_yml_path is not None # mypy + assert self.config_yml_path is not None and self.config_yml is not None # mypy # Throw deprecation warning if template_yaml_path is set if template_yaml_path is not None: log.warning( From 0fe3910c54251b005a0cbc493de556d865310c1e Mon Sep 17 00:00:00 2001 From: mashehu Date: Fri, 26 Jul 2024 12:28:30 +0200 Subject: [PATCH 54/89] fix create-logo cli command --- nf_core/__main__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index d0aad63e80..11ab574ccc 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -650,7 +650,7 @@ def command_pipelines_bump_version(ctx, new_version, directory, nextflow): # nf-core pipelines create-logo @pipelines.command("create-logo") @click.argument("logo-text", metavar="") -@click.option("-d", "--dir", type=click.Path(), default=".", help="Directory to save the logo in.") +@click.option("-d", "--dir", "directory", type=click.Path(), default=".", help="Directory to save the logo in.") @click.option( "-n", "--name", From ce325366e04551b46d109b572e95320c7dc39023 Mon Sep 17 00:00:00 2001 From: mashehu Date: Fri, 26 Jul 2024 14:47:53 +0200 Subject: [PATCH 55/89] add types, include review comments, add pydantic mypy plugin --- .pre-commit-config.yaml | 1 + mypy.ini | 1 + nf_core/components/components_command.py | 6 +++++- nf_core/components/components_utils.py | 2 +- nf_core/components/info.py | 2 +- nf_core/components/lint/__init__.py | 5 +++-- 6 files changed, 12 insertions(+), 5 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index bcf7ff65ce..f763fa6658 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -28,3 +28,4 @@ repos: - types-jsonschema - types-Markdown - types-setuptools + - pydantic diff --git a/mypy.ini b/mypy.ini index c48aa5884b..5a95223162 100644 --- a/mypy.ini +++ b/mypy.ini @@ -1,3 +1,4 @@ [mypy] warn_unused_configs = True ignore_missing_imports = true +plugins = pydantic.mypy diff --git a/nf_core/components/components_command.py b/nf_core/components/components_command.py index 4440dc32a4..91e14d0606 100644 --- a/nf_core/components/components_command.py +++ b/nf_core/components/components_command.py @@ -254,7 +254,11 @@ def check_patch_paths(self, patch_path: Path, module_name: str) -> None: # Update path in modules.json if the file is in the correct format modules_json = ModulesJson(self.directory) modules_json.load() - if modules_json.has_git_url_and_modules() and modules_json.modules_json is not None: + if ( + modules_json.has_git_url_and_modules() + and self.modules_repo.repo_path is not None + and modules_json.modules_json is not None + ): modules_json.modules_json["repos"][self.modules_repo.remote_url]["modules"][ self.modules_repo.repo_path ][module_name]["patch"] = str(patch_path.relative_to(Path(self.directory).resolve())) diff --git a/nf_core/components/components_utils.py b/nf_core/components/components_utils.py index 4e9c0ac601..3d64dc1bb6 100644 --- a/nf_core/components/components_utils.py +++ b/nf_core/components/components_utils.py @@ -77,7 +77,7 @@ def get_repo_info(directory: Path, use_prompt: Optional[bool] = True) -> Tuple[P ).unsafe_ask() log.info("To avoid this prompt in the future, add the 'org_path' key to a root '%s' file.", config_fn.name) if rich.prompt.Confirm.ask("[bold][blue]?[/] Would you like me to add this config now?", default=True): - with open(str(config_fn), "a+") as fh: + with open(config_fn, "a+") as fh: fh.write(f"org_path: {org}\n") log.info(f"Config added to '{config_fn.name}'") diff --git a/nf_core/components/info.py b/nf_core/components/info.py index 55a95593f9..726586b5b7 100644 --- a/nf_core/components/info.py +++ b/nf_core/components/info.py @@ -107,7 +107,7 @@ def init_mod_name(self, component): elif self.repo_type == "pipeline": assert self.modules_json is not None # mypy all_components = self.modules_json.get_all_components(self.component_type).get( - self.modules_repo.remote_url, [] + self.modules_repo.remote_url, {} ) components = [ component if directory == self.modules_repo.repo_path else f"{directory}/{component}" diff --git a/nf_core/components/lint/__init__.py b/nf_core/components/lint/__init__.py index e2475ef62c..ddf5e1e165 100644 --- a/nf_core/components/lint/__init__.py +++ b/nf_core/components/lint/__init__.py @@ -22,6 +22,7 @@ from nf_core.components.nfcore_component import NFCoreComponent from nf_core.modules.modules_json import ModulesJson from nf_core.pipelines.lint_utils import console +from nf_core.utils import LintConfigType from nf_core.utils import plural_s as _s log = logging.getLogger(__name__) @@ -77,8 +78,8 @@ def __init__( self.failed: List[LintResult] = [] self.all_local_components: List[NFCoreComponent] = [] - self.lint_config = None - self.modules_json = None + self.lint_config: Optional[LintConfigType] = None + self.modules_json: Optional[ModulesJson] = None if self.component_type == "modules": self.lint_tests = self.get_all_module_lint_tests(self.repo_type == "pipeline") From 8eeaf28402013b4f54af693f80836de92c8d2cf7 Mon Sep 17 00:00:00 2001 From: mashehu Date: Fri, 26 Jul 2024 14:50:35 +0200 Subject: [PATCH 56/89] fix mypy error --- nf_core/modules/lint/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/modules/lint/__init__.py b/nf_core/modules/lint/__init__.py index cea75d8f2e..fcf2d7d066 100644 --- a/nf_core/modules/lint/__init__.py +++ b/nf_core/modules/lint/__init__.py @@ -238,7 +238,7 @@ def lint_module( # Otherwise run all the lint tests else: - if self.repo_type == "pipeline" and self.modules_json: + if self.repo_type == "pipeline" and self.modules_json and mod.repo_url: # Set correct sha version = self.modules_json.get_module_version(mod.component_name, mod.repo_url, mod.org) mod.git_sha = version From 95de96b7bddf192cd5ef3ea75218236ec73e02c4 Mon Sep 17 00:00:00 2001 From: mashehu Date: Fri, 26 Jul 2024 15:27:31 +0200 Subject: [PATCH 57/89] allow dashes in pipeilne short name (why didn't this fail before?) --- nf_core/pipelines/create/create.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index 15eed46dce..dcb6d2c994 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -249,7 +249,7 @@ def obtain_jinja_params_dict(self, features_to_skip: List[str], pipeline_dir: Un return jinja_params, skip_paths # Check that the pipeline name matches the requirements - if not re.match(r"^[a-z]+$", jinja_params["short_name"]): + if not re.match(r"^[a-z-]+$", jinja_params["short_name"]): if jinja_params["is_nfcore"]: raise UserWarning("[red]Invalid workflow name: must be lowercase without punctuation.") else: From 1b6f231ebc41665a7cad108fe4fdbc7b38d218f0 Mon Sep 17 00:00:00 2001 From: mashehu Date: Fri, 26 Jul 2024 15:27:59 +0200 Subject: [PATCH 58/89] Revert "allow dashes in pipeilne short name (why didn't this fail before?)" This reverts commit 95de96b7bddf192cd5ef3ea75218236ec73e02c4. --- nf_core/pipelines/create/create.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index dcb6d2c994..15eed46dce 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -249,7 +249,7 @@ def obtain_jinja_params_dict(self, features_to_skip: List[str], pipeline_dir: Un return jinja_params, skip_paths # Check that the pipeline name matches the requirements - if not re.match(r"^[a-z-]+$", jinja_params["short_name"]): + if not re.match(r"^[a-z]+$", jinja_params["short_name"]): if jinja_params["is_nfcore"]: raise UserWarning("[red]Invalid workflow name: must be lowercase without punctuation.") else: From 5186ac71523c0fe16978c3857325365f02d1093b Mon Sep 17 00:00:00 2001 From: mashehu Date: Fri, 26 Jul 2024 16:25:41 +0200 Subject: [PATCH 59/89] use path.walk correctly --- nf_core/components/components_command.py | 8 ++++---- nf_core/modules/modules_json.py | 4 ++-- nf_core/pipelines/lint/merge_markers.py | 2 +- nf_core/pipelines/lint/modules_structure.py | 2 +- nf_core/pipelines/lint/pipeline_todos.py | 2 +- nf_core/synced_repo.py | 2 +- 6 files changed, 10 insertions(+), 10 deletions(-) diff --git a/nf_core/components/components_command.py b/nf_core/components/components_command.py index 91e14d0606..5cde00ef69 100644 --- a/nf_core/components/components_command.py +++ b/nf_core/components/components_command.py @@ -87,7 +87,7 @@ def get_components_clone_modules(self) -> List[str]: component_base_path = Path(self.directory, self.default_subworkflows_path) return [ str(Path(directory).relative_to(component_base_path)) - for directory, _, files in Path.walk(component_base_path) + for directory, _, files in Path(component_base_path).walk() if "main.nf" in files ] @@ -126,7 +126,7 @@ def clear_component_dir(self, component_name: str, component_dir: Union[str, Pat try: shutil.rmtree(component_dir) # remove all empty directories - for dir_path, dir_names, filenames in Path.walk(Path(self.directory), top_down=False): + for dir_path, dir_names, filenames in Path(self.directory).walk(top_down=False): if not dir_names and not filenames: try: dir_path.rmdir() @@ -157,7 +157,7 @@ def components_from_repo(self, install_dir: str) -> List[str]: return [ str(Path(dir_path).relative_to(repo_dir)) - for dir_path, _, files in Path.walk(repo_dir) + for dir_path, _, files in Path(repo_dir).walk() if "main.nf" in files ] @@ -202,7 +202,7 @@ def check_modules_structure(self) -> None: """ if self.repo_type == "pipeline": wrong_location_modules: List[Path] = [] - for directory, _, files in Path.walk(Path(self.directory, "modules")): + for directory, _, files in Path(self.directory, "modules").walk(): if "main.nf" in files: module_path = Path(directory).relative_to(Path(self.directory, "modules")) parts = module_path.parts diff --git a/nf_core/modules/modules_json.py b/nf_core/modules/modules_json.py index 9f17add6ae..3f4fb46f2d 100644 --- a/nf_core/modules/modules_json.py +++ b/nf_core/modules/modules_json.py @@ -134,7 +134,7 @@ def get_component_names_from_repo( repo_url, [ str(component_name.relative_to(directory / modules_repo.repo_path)) - for component_name, _, file_names in Path.walk(directory / modules_repo.repo_path) + for component_name, _, file_names in Path(directory / modules_repo.repo_path).walk() if "main.nf" in file_names ], modules_repo.repo_path, @@ -1189,7 +1189,7 @@ def components_with_repos(): self.modules_dir, modules_repo.repo_path, ) - for dir_name, _, _ in Path.walk(repo_url_path): + for dir_name, _, _ in repo_url_path.walk(): if component_type == "modules": if len(Path(directory).parts) > 1: # The module name is TOOL/SUBTOOL paths_in_directory.append(str(Path(*Path(dir_name).parts[-2:]))) diff --git a/nf_core/pipelines/lint/merge_markers.py b/nf_core/pipelines/lint/merge_markers.py index 208c9f4bcc..4e2f73fb85 100644 --- a/nf_core/pipelines/lint/merge_markers.py +++ b/nf_core/pipelines/lint/merge_markers.py @@ -42,7 +42,7 @@ def merge_markers(self): with open(Path(self.wf_path, ".gitignore"), encoding="latin1") as fh: for line in fh: ignore.append(Path(line.strip().rstrip("/")).name) - for root, dirs, files in Path.walk(self.wf_path, top_down=True): + for root, dirs, files in Path(self.wf_path).walk(top_down=True): # Ignore files for i_base in ignore: i = str(Path(root, i_base)) diff --git a/nf_core/pipelines/lint/modules_structure.py b/nf_core/pipelines/lint/modules_structure.py index fd29942ed6..34cdc89561 100644 --- a/nf_core/pipelines/lint/modules_structure.py +++ b/nf_core/pipelines/lint/modules_structure.py @@ -19,7 +19,7 @@ def modules_structure(self): modules/nf-core/modules/TOOL/SUBTOOL """ wrong_location_modules = [] - for directory, _, files in Path.walk(Path(self.wf_path, "modules")): + for directory, _, files in Path(self.wf_path, "modules").walk(): if "main.nf" in files: module_path = Path(directory).relative_to(Path(self.wf_path, "modules")) parts = module_path.parts diff --git a/nf_core/pipelines/lint/pipeline_todos.py b/nf_core/pipelines/lint/pipeline_todos.py index 530d853764..0a21d0d05a 100644 --- a/nf_core/pipelines/lint/pipeline_todos.py +++ b/nf_core/pipelines/lint/pipeline_todos.py @@ -43,7 +43,7 @@ def pipeline_todos(self, root_dir=None): with open(Path(root_dir, ".gitignore"), encoding="latin1") as fh: for line in fh: ignore.append(Path(line.strip().rstrip("/")).name) - for root, dirs, files in Path.walk(root_dir, top_down=True): + for root, dirs, files in Path(root_dir).walk(top_down=True): # # Ignore files for i_base in ignore: i = str(Path(root, i_base)) diff --git a/nf_core/synced_repo.py b/nf_core/synced_repo.py index 8efdd0e488..22cdcd1577 100644 --- a/nf_core/synced_repo.py +++ b/nf_core/synced_repo.py @@ -467,7 +467,7 @@ def get_avail_components( # Module/Subworkflow directories are characterized by having a 'main.nf' file avail_component_names = [ str(Path(dirpath).relative_to(directory)) - for dirpath, _, files in Path.walk(directory) + for dirpath, _, files in Path(directory).walk() if "main.nf" in files ] return avail_component_names From 873398727b6dd8e6a1046f6a91c45bad8fd26825 Mon Sep 17 00:00:00 2001 From: mashehu Date: Fri, 26 Jul 2024 16:37:19 +0200 Subject: [PATCH 60/89] Revert "use path.walk correctly" This reverts commit 5186ac71523c0fe16978c3857325365f02d1093b. --- nf_core/components/components_command.py | 8 ++++---- nf_core/modules/modules_json.py | 4 ++-- nf_core/pipelines/lint/merge_markers.py | 2 +- nf_core/pipelines/lint/modules_structure.py | 2 +- nf_core/pipelines/lint/pipeline_todos.py | 2 +- nf_core/synced_repo.py | 2 +- 6 files changed, 10 insertions(+), 10 deletions(-) diff --git a/nf_core/components/components_command.py b/nf_core/components/components_command.py index 5cde00ef69..91e14d0606 100644 --- a/nf_core/components/components_command.py +++ b/nf_core/components/components_command.py @@ -87,7 +87,7 @@ def get_components_clone_modules(self) -> List[str]: component_base_path = Path(self.directory, self.default_subworkflows_path) return [ str(Path(directory).relative_to(component_base_path)) - for directory, _, files in Path(component_base_path).walk() + for directory, _, files in Path.walk(component_base_path) if "main.nf" in files ] @@ -126,7 +126,7 @@ def clear_component_dir(self, component_name: str, component_dir: Union[str, Pat try: shutil.rmtree(component_dir) # remove all empty directories - for dir_path, dir_names, filenames in Path(self.directory).walk(top_down=False): + for dir_path, dir_names, filenames in Path.walk(Path(self.directory), top_down=False): if not dir_names and not filenames: try: dir_path.rmdir() @@ -157,7 +157,7 @@ def components_from_repo(self, install_dir: str) -> List[str]: return [ str(Path(dir_path).relative_to(repo_dir)) - for dir_path, _, files in Path(repo_dir).walk() + for dir_path, _, files in Path.walk(repo_dir) if "main.nf" in files ] @@ -202,7 +202,7 @@ def check_modules_structure(self) -> None: """ if self.repo_type == "pipeline": wrong_location_modules: List[Path] = [] - for directory, _, files in Path(self.directory, "modules").walk(): + for directory, _, files in Path.walk(Path(self.directory, "modules")): if "main.nf" in files: module_path = Path(directory).relative_to(Path(self.directory, "modules")) parts = module_path.parts diff --git a/nf_core/modules/modules_json.py b/nf_core/modules/modules_json.py index 3f4fb46f2d..9f17add6ae 100644 --- a/nf_core/modules/modules_json.py +++ b/nf_core/modules/modules_json.py @@ -134,7 +134,7 @@ def get_component_names_from_repo( repo_url, [ str(component_name.relative_to(directory / modules_repo.repo_path)) - for component_name, _, file_names in Path(directory / modules_repo.repo_path).walk() + for component_name, _, file_names in Path.walk(directory / modules_repo.repo_path) if "main.nf" in file_names ], modules_repo.repo_path, @@ -1189,7 +1189,7 @@ def components_with_repos(): self.modules_dir, modules_repo.repo_path, ) - for dir_name, _, _ in repo_url_path.walk(): + for dir_name, _, _ in Path.walk(repo_url_path): if component_type == "modules": if len(Path(directory).parts) > 1: # The module name is TOOL/SUBTOOL paths_in_directory.append(str(Path(*Path(dir_name).parts[-2:]))) diff --git a/nf_core/pipelines/lint/merge_markers.py b/nf_core/pipelines/lint/merge_markers.py index 4e2f73fb85..208c9f4bcc 100644 --- a/nf_core/pipelines/lint/merge_markers.py +++ b/nf_core/pipelines/lint/merge_markers.py @@ -42,7 +42,7 @@ def merge_markers(self): with open(Path(self.wf_path, ".gitignore"), encoding="latin1") as fh: for line in fh: ignore.append(Path(line.strip().rstrip("/")).name) - for root, dirs, files in Path(self.wf_path).walk(top_down=True): + for root, dirs, files in Path.walk(self.wf_path, top_down=True): # Ignore files for i_base in ignore: i = str(Path(root, i_base)) diff --git a/nf_core/pipelines/lint/modules_structure.py b/nf_core/pipelines/lint/modules_structure.py index 34cdc89561..fd29942ed6 100644 --- a/nf_core/pipelines/lint/modules_structure.py +++ b/nf_core/pipelines/lint/modules_structure.py @@ -19,7 +19,7 @@ def modules_structure(self): modules/nf-core/modules/TOOL/SUBTOOL """ wrong_location_modules = [] - for directory, _, files in Path(self.wf_path, "modules").walk(): + for directory, _, files in Path.walk(Path(self.wf_path, "modules")): if "main.nf" in files: module_path = Path(directory).relative_to(Path(self.wf_path, "modules")) parts = module_path.parts diff --git a/nf_core/pipelines/lint/pipeline_todos.py b/nf_core/pipelines/lint/pipeline_todos.py index 0a21d0d05a..530d853764 100644 --- a/nf_core/pipelines/lint/pipeline_todos.py +++ b/nf_core/pipelines/lint/pipeline_todos.py @@ -43,7 +43,7 @@ def pipeline_todos(self, root_dir=None): with open(Path(root_dir, ".gitignore"), encoding="latin1") as fh: for line in fh: ignore.append(Path(line.strip().rstrip("/")).name) - for root, dirs, files in Path(root_dir).walk(top_down=True): # + for root, dirs, files in Path.walk(root_dir, top_down=True): # Ignore files for i_base in ignore: i = str(Path(root, i_base)) diff --git a/nf_core/synced_repo.py b/nf_core/synced_repo.py index 22cdcd1577..8efdd0e488 100644 --- a/nf_core/synced_repo.py +++ b/nf_core/synced_repo.py @@ -467,7 +467,7 @@ def get_avail_components( # Module/Subworkflow directories are characterized by having a 'main.nf' file avail_component_names = [ str(Path(dirpath).relative_to(directory)) - for dirpath, _, files in Path(directory).walk() + for dirpath, _, files in Path.walk(directory) if "main.nf" in files ] return avail_component_names From 1679594f5a52c0926f7f40800fcf43c445f8515a Mon Sep 17 00:00:00 2001 From: mashehu Date: Fri, 26 Jul 2024 16:41:59 +0200 Subject: [PATCH 61/89] switch back to os.walk() --- nf_core/components/components_command.py | 13 ++++++------- nf_core/modules/modules_json.py | 4 ++-- nf_core/pipelines/lint/merge_markers.py | 3 ++- nf_core/pipelines/lint/modules_structure.py | 3 ++- nf_core/pipelines/lint/pipeline_todos.py | 3 ++- nf_core/synced_repo.py | 4 +--- 6 files changed, 15 insertions(+), 15 deletions(-) diff --git a/nf_core/components/components_command.py b/nf_core/components/components_command.py index 91e14d0606..ada8b532e5 100644 --- a/nf_core/components/components_command.py +++ b/nf_core/components/components_command.py @@ -1,5 +1,6 @@ import logging import mmap +import os import shutil from pathlib import Path from typing import Dict, List, Optional, Union @@ -87,7 +88,7 @@ def get_components_clone_modules(self) -> List[str]: component_base_path = Path(self.directory, self.default_subworkflows_path) return [ str(Path(directory).relative_to(component_base_path)) - for directory, _, files in Path.walk(component_base_path) + for directory, _, files in os.walk(component_base_path) if "main.nf" in files ] @@ -126,10 +127,10 @@ def clear_component_dir(self, component_name: str, component_dir: Union[str, Pat try: shutil.rmtree(component_dir) # remove all empty directories - for dir_path, dir_names, filenames in Path.walk(Path(self.directory), top_down=False): + for dir_path, dir_names, filenames in os.walk(Path(self.directory), topdown=False): if not dir_names and not filenames: try: - dir_path.rmdir() + Path(dir_path).rmdir() except OSError: pass else: @@ -156,9 +157,7 @@ def components_from_repo(self, install_dir: str) -> List[str]: raise LookupError(f"Nothing installed from {install_dir} in pipeline") return [ - str(Path(dir_path).relative_to(repo_dir)) - for dir_path, _, files in Path.walk(repo_dir) - if "main.nf" in files + str(Path(dir_path).relative_to(repo_dir)) for dir_path, _, files in os.walk(repo_dir) if "main.nf" in files ] def install_component_files( @@ -202,7 +201,7 @@ def check_modules_structure(self) -> None: """ if self.repo_type == "pipeline": wrong_location_modules: List[Path] = [] - for directory, _, files in Path.walk(Path(self.directory, "modules")): + for directory, _, files in os.walk(Path(self.directory, "modules")): if "main.nf" in files: module_path = Path(directory).relative_to(Path(self.directory, "modules")) parts = module_path.parts diff --git a/nf_core/modules/modules_json.py b/nf_core/modules/modules_json.py index 9f17add6ae..4f86bc8302 100644 --- a/nf_core/modules/modules_json.py +++ b/nf_core/modules/modules_json.py @@ -134,7 +134,7 @@ def get_component_names_from_repo( repo_url, [ str(component_name.relative_to(directory / modules_repo.repo_path)) - for component_name, _, file_names in Path.walk(directory / modules_repo.repo_path) + for component_name, _, file_names in os.walk(directory / modules_repo.repo_path) if "main.nf" in file_names ], modules_repo.repo_path, @@ -1189,7 +1189,7 @@ def components_with_repos(): self.modules_dir, modules_repo.repo_path, ) - for dir_name, _, _ in Path.walk(repo_url_path): + for dir_name, _, _ in os.walk(repo_url_path): if component_type == "modules": if len(Path(directory).parts) > 1: # The module name is TOOL/SUBTOOL paths_in_directory.append(str(Path(*Path(dir_name).parts[-2:]))) diff --git a/nf_core/pipelines/lint/merge_markers.py b/nf_core/pipelines/lint/merge_markers.py index 208c9f4bcc..1c3d70a76f 100644 --- a/nf_core/pipelines/lint/merge_markers.py +++ b/nf_core/pipelines/lint/merge_markers.py @@ -1,5 +1,6 @@ import fnmatch import logging +import os from pathlib import Path import nf_core.utils @@ -42,7 +43,7 @@ def merge_markers(self): with open(Path(self.wf_path, ".gitignore"), encoding="latin1") as fh: for line in fh: ignore.append(Path(line.strip().rstrip("/")).name) - for root, dirs, files in Path.walk(self.wf_path, top_down=True): + for root, dirs, files in os.walk(self.wf_path, topdown=True): # Ignore files for i_base in ignore: i = str(Path(root, i_base)) diff --git a/nf_core/pipelines/lint/modules_structure.py b/nf_core/pipelines/lint/modules_structure.py index fd29942ed6..9d9b4c9fc0 100644 --- a/nf_core/pipelines/lint/modules_structure.py +++ b/nf_core/pipelines/lint/modules_structure.py @@ -1,4 +1,5 @@ import logging +import os from pathlib import Path log = logging.getLogger(__name__) @@ -19,7 +20,7 @@ def modules_structure(self): modules/nf-core/modules/TOOL/SUBTOOL """ wrong_location_modules = [] - for directory, _, files in Path.walk(Path(self.wf_path, "modules")): + for directory, _, files in os.walk(Path(self.wf_path, "modules")): if "main.nf" in files: module_path = Path(directory).relative_to(Path(self.wf_path, "modules")) parts = module_path.parts diff --git a/nf_core/pipelines/lint/pipeline_todos.py b/nf_core/pipelines/lint/pipeline_todos.py index 530d853764..0535069f9a 100644 --- a/nf_core/pipelines/lint/pipeline_todos.py +++ b/nf_core/pipelines/lint/pipeline_todos.py @@ -1,5 +1,6 @@ import fnmatch import logging +import os from pathlib import Path log = logging.getLogger(__name__) @@ -43,7 +44,7 @@ def pipeline_todos(self, root_dir=None): with open(Path(root_dir, ".gitignore"), encoding="latin1") as fh: for line in fh: ignore.append(Path(line.strip().rstrip("/")).name) - for root, dirs, files in Path.walk(root_dir, top_down=True): + for root, dirs, files in os.walk(root_dir, topdown=True): # Ignore files for i_base in ignore: i = str(Path(root, i_base)) diff --git a/nf_core/synced_repo.py b/nf_core/synced_repo.py index 8efdd0e488..e2a76ccaeb 100644 --- a/nf_core/synced_repo.py +++ b/nf_core/synced_repo.py @@ -466,9 +466,7 @@ def get_avail_components( directory = self.subworkflows_dir # Module/Subworkflow directories are characterized by having a 'main.nf' file avail_component_names = [ - str(Path(dirpath).relative_to(directory)) - for dirpath, _, files in Path.walk(directory) - if "main.nf" in files + str(Path(dirpath).relative_to(directory)) for dirpath, _, files in os.walk(directory) if "main.nf" in files ] return avail_component_names From 3c0433e68fc33be6dd35dfbb88b3989e103b6ad6 Mon Sep 17 00:00:00 2001 From: mashehu Date: Fri, 26 Jul 2024 22:43:37 +0200 Subject: [PATCH 62/89] fix sync --- nf_core/modules/modules_json.py | 2 +- nf_core/pipelines/create/create.py | 5 ++--- nf_core/pipelines/sync.py | 7 ++----- nf_core/utils.py | 2 ++ 4 files changed, 7 insertions(+), 9 deletions(-) diff --git a/nf_core/modules/modules_json.py b/nf_core/modules/modules_json.py index 4f86bc8302..39d70b7d78 100644 --- a/nf_core/modules/modules_json.py +++ b/nf_core/modules/modules_json.py @@ -133,7 +133,7 @@ def get_component_names_from_repo( components = ( repo_url, [ - str(component_name.relative_to(directory / modules_repo.repo_path)) + str(Path(component_name).relative_to(directory / modules_repo.repo_path)) for component_name, _, file_names in os.walk(directory / modules_repo.repo_path) if "main.nf" in file_names ], diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index 15eed46dce..9b64b53446 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -53,7 +53,7 @@ def __init__( no_git: bool = False, force: bool = False, outdir: Optional[Union[Path, str]] = None, - template_config: Optional[Union[str, CreateConfig, Path]] = None, + template_config: Optional[CreateConfig] = None, organisation: str = "nf-core", from_config_file: bool = False, default_branch: Optional[str] = None, @@ -67,7 +67,7 @@ def __init__( _, config_yml = nf_core.utils.load_tools_config(outdir if outdir else Path().cwd()) # Obtain a CreateConfig object from `.nf-core.yml` config file if config_yml is not None and getattr(config_yml, "template", None) is not None: - self.config = CreateConfig(**config_yml["template"]) + self.config = CreateConfig(**config_yml["template"].model_dump()) else: raise UserWarning("The template configuration was not provided in '.nf-core.yml'.") except (FileNotFoundError, UserWarning): @@ -261,7 +261,6 @@ def obtain_jinja_params_dict(self, features_to_skip: List[str], pipeline_dir: Un def init_pipeline(self): """Creates the nf-core pipeline.""" - # Make the new pipeline self.render_template() diff --git a/nf_core/pipelines/sync.py b/nf_core/pipelines/sync.py index 741bdd751a..efc7212b4d 100644 --- a/nf_core/pipelines/sync.py +++ b/nf_core/pipelines/sync.py @@ -270,13 +270,10 @@ def make_template_pipeline(self): try: nf_core.pipelines.create.create.PipelineCreate( - name=self.wf_config["manifest.name"].strip('"').strip("'"), - description=self.wf_config["manifest.description"].strip('"').strip("'"), - version=self.wf_config["manifest.version"].strip('"').strip("'"), + outdir=str(self.pipeline_dir), + from_config_file=True, no_git=True, force=True, - outdir=str(self.pipeline_dir), - author=self.wf_config["manifest.author"].strip('"').strip("'"), ).init_pipeline() except Exception as err: # Reset to where you were to prevent git getting messed up. diff --git a/nf_core/utils.py b/nf_core/utils.py index 0beb8c7366..44eafca3cc 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -1053,6 +1053,8 @@ class NFCoreTemplateConfig(BaseModel): is_nfcore: Optional[bool] = None def __getitem__(self, item: str) -> Any: + if self is None: + return None return getattr(self, item) def get(self, item: str, default: Any = None) -> Any: From 4d6930be2da27623f5e1644785cb6bcf1a994d74 Mon Sep 17 00:00:00 2001 From: mashehu Date: Fri, 26 Jul 2024 22:46:57 +0200 Subject: [PATCH 63/89] fix types --- nf_core/pipelines/create/create.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index 9b64b53446..d5c230e048 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -53,7 +53,7 @@ def __init__( no_git: bool = False, force: bool = False, outdir: Optional[Union[Path, str]] = None, - template_config: Optional[CreateConfig] = None, + template_config: Optional[Union[CreateConfig, str, Path]] = None, organisation: str = "nf-core", from_config_file: bool = False, default_branch: Optional[str] = None, From 26b1cd13c36bc2d1234b8f97e6141bf9c0c82c5c Mon Sep 17 00:00:00 2001 From: mashehu Date: Mon, 29 Jul 2024 11:44:02 +0200 Subject: [PATCH 64/89] set force true in sync to create a template file --- nf_core/pipelines/sync.py | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/nf_core/pipelines/sync.py b/nf_core/pipelines/sync.py index efc7212b4d..fced35dc20 100644 --- a/nf_core/pipelines/sync.py +++ b/nf_core/pipelines/sync.py @@ -263,8 +263,12 @@ def make_template_pipeline(self): logging.getLogger("nf_core.pipelines.create").setLevel(logging.ERROR) assert self.config_yml_path is not None assert self.config_yml is not None + # Re-write the template yaml info from .nf-core.yml config - if getattr(self.config_yml, "template", None) is not None: + if self.config_yml.template is not None: + # Set force true in config to overwrite existing files + + self.config_yml.template.force = True with open(self.config_yml_path, "w") as config_path: yaml.safe_dump(self.config_yml.model_dump(), config_path) @@ -275,6 +279,14 @@ def make_template_pipeline(self): no_git=True, force=True, ).init_pipeline() + + # set force to false to avoid overwriting files in the future + if self.config_yml.template is not None: + # Set force true in config to overwrite existing files + self.config_yml.template.force = False + with open(self.config_yml_path, "w") as config_path: + yaml.safe_dump(self.config_yml.model_dump(), config_path) + except Exception as err: # Reset to where you were to prevent git getting messed up. self.repo.git.reset("--hard") From 59a3eb38c2f7603ed63c73ea6c1589a9b73a95c1 Mon Sep 17 00:00:00 2001 From: mashehu Date: Mon, 29 Jul 2024 12:33:14 +0200 Subject: [PATCH 65/89] fix missing initialization in download.py --- nf_core/pipelines/download.py | 10 ++++++---- nf_core/utils.py | 14 ++++++++------ 2 files changed, 14 insertions(+), 10 deletions(-) diff --git a/nf_core/pipelines/download.py b/nf_core/pipelines/download.py index 797909636c..704fe91b2f 100644 --- a/nf_core/pipelines/download.py +++ b/nf_core/pipelines/download.py @@ -11,7 +11,7 @@ import textwrap from datetime import datetime from pathlib import Path -from typing import List, Optional, Tuple +from typing import Any, Dict, List, Optional, Tuple from zipfile import ZipFile import git @@ -24,6 +24,7 @@ from packaging.version import Version import nf_core +import nf_core.modules.modules_utils import nf_core.pipelines.list import nf_core.utils from nf_core.synced_repo import RemoteProgressbar, SyncedRepo @@ -131,6 +132,7 @@ def __init__( self.compress_type = compress_type self.force = force self.platform = platform + self.fullname: Optional[str] = None # if flag is not specified, do not assume deliberate choice and prompt config inclusion interactively. # this implies that non-interactive "no" choice is only possible implicitly (e.g. with --platform or if prompt is suppressed by !stderr.is_interactive). # only alternative would have been to make it a parameter with argument, e.g. -d="yes" or -d="no". @@ -161,8 +163,8 @@ def __init__( # allows to specify a container library / registry or a respective mirror to download images from self.parallel_downloads = parallel_downloads - self.wf_revisions = {} - self.wf_branches = {} + self.wf_revisions = [] + self.wf_branches: Dict[str, Any] = {} self.wf_sha = {} self.wf_download_url = {} self.nf_config = {} @@ -339,7 +341,7 @@ def prompt_pipeline_name(self): stderr.print("Specify the name of a nf-core pipeline or a GitHub repository name (user/repo).") self.pipeline = nf_core.utils.prompt_remote_pipeline_name(self.wfs) - def prompt_revision(self): + def prompt_revision(self) -> None: """ Prompt for pipeline revision / branch Prompt user for revision tag if '--revision' was not set diff --git a/nf_core/utils.py b/nf_core/utils.py index 44eafca3cc..80324fc9a8 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -903,7 +903,9 @@ def prompt_remote_pipeline_name(wfs): raise AssertionError(f"Not able to find pipeline '{pipeline}'") -def prompt_pipeline_release_branch(wf_releases, wf_branches, multiple=False): +def prompt_pipeline_release_branch( + wf_releases: List[Dict[str, Any]], wf_branches: Dict[str, Any], multiple: bool = False +) -> tuple[Any, list[str]]: """Prompt for pipeline release / branch Args: @@ -912,18 +914,18 @@ def prompt_pipeline_release_branch(wf_releases, wf_branches, multiple=False): multiple (bool): Allow selection of multiple releases & branches (for Seqera Platform) Returns: - choice (str): Selected release / branch name + choice (questionary.Choice or bool): Selected release / branch or False if no releases / branches available """ # Prompt user for release tag, tag_set will contain all available. - choices = [] - tag_set = [] + choices: List[questionary.Choice] = [] + tag_set: List[str] = [] # Releases if len(wf_releases) > 0: for tag in map(lambda release: release.get("tag_name"), wf_releases): tag_display = [("fg:ansiblue", f"{tag} "), ("class:choice-default", "[release]")] choices.append(questionary.Choice(title=tag_display, value=tag)) - tag_set.append(tag) + tag_set.append(str(tag)) # Branches for branch in wf_branches.keys(): @@ -932,7 +934,7 @@ def prompt_pipeline_release_branch(wf_releases, wf_branches, multiple=False): tag_set.append(branch) if len(choices) == 0: - return False + return [], [] if multiple: return ( From fec536fd1ece3ce042a7fa32c9f05a7ddaa6ad06 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Matthias=20H=C3=B6rtenhuber?= Date: Mon, 29 Jul 2024 13:35:40 +0200 Subject: [PATCH 66/89] Apply suggestions from code review --- CHANGELOG.md | 2 +- nf_core/commands_pipelines.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 2c47c1b887..15e92781cb 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -13,7 +13,7 @@ - Fix linting fail on nfcore_external_java_deps if nf_schema is used ([#2976](https://github.com/nf-core/tools/pull/2976)) - Conda module linting: Include package name in log file ([#3014](https://github.com/nf-core/tools/pull/3014)) -- Rrestructure pipeline tests and move pipeline linting into subfolder ([#3070](https://github.com/nf-core/tools/pull/3070)) +- Restructure pipeline tests and move pipeline linting into subfolder ([#3070](https://github.com/nf-core/tools/pull/3070)) ### Download diff --git a/nf_core/commands_pipelines.py b/nf_core/commands_pipelines.py index c586534b22..4b6fa75f3e 100644 --- a/nf_core/commands_pipelines.py +++ b/nf_core/commands_pipelines.py @@ -284,7 +284,7 @@ def pipelines_sync(ctx, directory, from_branch, pull_request, github_repository, Sync a pipeline [cyan i]TEMPLATE[/] branch with the nf-core template. To keep nf-core pipelines up to date with improvements in the main - template, we use a method of w that uses a special + template, we use a method of synchronisation that uses a special git branch called [cyan i]TEMPLATE[/]. This command updates the [cyan i]TEMPLATE[/] branch with the latest version of From 7f03f9e807c537c448f75d69002625b0768b4931 Mon Sep 17 00:00:00 2001 From: mashehu Date: Mon, 29 Jul 2024 14:12:24 +0200 Subject: [PATCH 67/89] fix mypy warnings --- nf_core/pipelines/create/utils.py | 12 +++++++++--- nf_core/pipelines/create_logo.py | 6 +++--- 2 files changed, 12 insertions(+), 6 deletions(-) diff --git a/nf_core/pipelines/create/utils.py b/nf_core/pipelines/create/utils.py index 88994c9361..e62ef328e4 100644 --- a/nf_core/pipelines/create/utils.py +++ b/nf_core/pipelines/create/utils.py @@ -124,10 +124,16 @@ def compose(self) -> ComposeResult: @on(Input.Submitted) def show_invalid_reasons(self, event: Union[Input.Changed, Input.Submitted]) -> None: """Validate the text input and show errors if invalid.""" - if not event.validation_result.is_valid: - self.query_one(".validation_msg").update("\n".join(event.validation_result.failure_descriptions)) + val_msg = self.query_one(".validation_msg") + if not isinstance(val_msg, Static): + raise ValueError("Validation message not found.") + + if event.validation_result is not None and not event.validation_result.is_valid: + # check that val_msg is instance of Static + if isinstance(val_msg, Static): + val_msg.update("\n".join(event.validation_result.failure_descriptions)) else: - self.query_one(".validation_msg").update("") + val_msg.update("") class ValidateConfig(Validator): diff --git a/nf_core/pipelines/create_logo.py b/nf_core/pipelines/create_logo.py index f49e98e93c..6619b910b5 100644 --- a/nf_core/pipelines/create_logo.py +++ b/nf_core/pipelines/create_logo.py @@ -1,6 +1,6 @@ import logging from pathlib import Path -from typing import Union +from typing import Optional, Union from PIL import Image, ImageDraw, ImageFont @@ -59,11 +59,11 @@ def create_logo( return logo_path # cache file cache_path = Path(NFCORE_CACHE_DIR, "logo", cache_name) - img = None + img: Optional[Image.Image] = None if cache_path.is_file(): log.debug(f"Logo already exists in cache at: {cache_path}. Reusing this file.") img = Image.open(str(cache_path)) - if not img: + if img is None: log.debug(f"Creating logo for {text}") # make sure the figure fits the text From d2020f511c0f5992b6919e6832d3bcd5cd3bfe75 Mon Sep 17 00:00:00 2001 From: mashehu Date: Mon, 29 Jul 2024 14:19:36 +0200 Subject: [PATCH 68/89] set default value for self.directory and more dir->directory switches --- nf_core/commands_modules.py | 38 ++++++++++++------------ nf_core/commands_subworkflows.py | 34 ++++++++++----------- nf_core/components/components_command.py | 12 ++++---- nf_core/components/list.py | 4 ++- nf_core/modules/list.py | 11 ++++++- nf_core/subworkflows/list.py | 11 ++++++- tests/modules/test_list.py | 18 +++++------ tests/subworkflows/test_list.py | 2 +- 8 files changed, 73 insertions(+), 57 deletions(-) diff --git a/nf_core/commands_modules.py b/nf_core/commands_modules.py index b93bd7bcb0..a889305055 100644 --- a/nf_core/commands_modules.py +++ b/nf_core/commands_modules.py @@ -17,7 +17,7 @@ def modules_list_remote(ctx, keywords, json): try: module_list = ModuleList( - None, + ".", True, ctx.obj["modules_repo_url"], ctx.obj["modules_repo_branch"], @@ -29,7 +29,7 @@ def modules_list_remote(ctx, keywords, json): sys.exit(1) -def modules_list_local(ctx, keywords, json, dir): # pylint: disable=redefined-builtin +def modules_list_local(ctx, keywords, json, directory): # pylint: disable=redefined-builtin """ List modules installed locally in a pipeline """ @@ -37,7 +37,7 @@ def modules_list_local(ctx, keywords, json, dir): # pylint: disable=redefined-b try: module_list = ModuleList( - dir, + directory, False, ctx.obj["modules_repo_url"], ctx.obj["modules_repo_branch"], @@ -49,7 +49,7 @@ def modules_list_local(ctx, keywords, json, dir): # pylint: disable=redefined-b sys.exit(1) -def modules_install(ctx, tool, dir, prompt, force, sha): +def modules_install(ctx, tool, directory, prompt, force, sha): """ Install DSL2 modules within a pipeline. @@ -59,7 +59,7 @@ def modules_install(ctx, tool, dir, prompt, force, sha): try: module_install = ModuleInstall( - dir, + directory, force, prompt, sha, @@ -118,7 +118,7 @@ def modules_update( sys.exit(1) -def modules_patch(ctx, tool, dir, remove): +def modules_patch(ctx, tool, directory, remove): """ Create a patch file for minor changes in a module @@ -129,7 +129,7 @@ def modules_patch(ctx, tool, dir, remove): try: module_patch = ModulePatch( - dir, + directory, ctx.obj["modules_repo_url"], ctx.obj["modules_repo_branch"], ctx.obj["modules_repo_no_pull"], @@ -143,7 +143,7 @@ def modules_patch(ctx, tool, dir, remove): sys.exit(1) -def modules_remove(ctx, dir, tool): +def modules_remove(ctx, directory, tool): """ Remove a module from a pipeline. """ @@ -151,7 +151,7 @@ def modules_remove(ctx, dir, tool): try: module_remove = ModuleRemove( - dir, + directory, ctx.obj["modules_repo_url"], ctx.obj["modules_repo_branch"], ctx.obj["modules_repo_no_pull"], @@ -165,7 +165,7 @@ def modules_remove(ctx, dir, tool): def modules_create( ctx, tool, - dir, + directory, author, label, meta, @@ -199,7 +199,7 @@ def modules_create( # Run function try: module_create = ModuleCreate( - dir, + directory, tool, author, label, @@ -219,7 +219,7 @@ def modules_create( sys.exit(1) -def modules_test(ctx, tool, dir, no_prompts, update, once, profile): +def modules_test(ctx, tool, directory, no_prompts, update, once, profile): """ Run nf-test for a module. @@ -231,7 +231,7 @@ def modules_test(ctx, tool, dir, no_prompts, update, once, profile): module_tester = ComponentsTest( component_type="modules", component_name=tool, - directory=dir, + directory=directory, no_prompts=no_prompts, update=update, once=once, @@ -246,7 +246,7 @@ def modules_test(ctx, tool, dir, no_prompts, update, once, profile): sys.exit(1) -def modules_lint(ctx, tool, dir, registry, key, all, fail_warned, local, passed, sort_by, fix_version): +def modules_lint(ctx, tool, directory, registry, key, all, fail_warned, local, passed, sort_by, fix_version): """ Lint one or more modules in a directory. @@ -261,7 +261,7 @@ def modules_lint(ctx, tool, dir, registry, key, all, fail_warned, local, passed, try: module_lint = ModuleLint( - dir, + directory, fail_warned=fail_warned, registry=ctx.params["registry"], remote_url=ctx.obj["modules_repo_url"], @@ -290,7 +290,7 @@ def modules_lint(ctx, tool, dir, registry, key, all, fail_warned, local, passed, sys.exit(1) -def modules_info(ctx, tool, dir): +def modules_info(ctx, tool, directory): """ Show developer usage information about a given module. @@ -306,7 +306,7 @@ def modules_info(ctx, tool, dir): try: module_info = ModuleInfo( - dir, + directory, tool, ctx.obj["modules_repo_url"], ctx.obj["modules_repo_branch"], @@ -318,7 +318,7 @@ def modules_info(ctx, tool, dir): sys.exit(1) -def modules_bump_versions(ctx, tool, dir, all, show_all): +def modules_bump_versions(ctx, tool, directory, all, show_all): """ Bump versions for one or more modules in a clone of the nf-core/modules repo. @@ -328,7 +328,7 @@ def modules_bump_versions(ctx, tool, dir, all, show_all): try: version_bumper = ModuleVersionBumper( - dir, + directory, ctx.obj["modules_repo_url"], ctx.obj["modules_repo_branch"], ctx.obj["modules_repo_no_pull"], diff --git a/nf_core/commands_subworkflows.py b/nf_core/commands_subworkflows.py index a3abce3f85..36fd998a68 100644 --- a/nf_core/commands_subworkflows.py +++ b/nf_core/commands_subworkflows.py @@ -10,7 +10,7 @@ stdout = rich.console.Console(force_terminal=rich_force_colors()) -def subworkflows_create(ctx, subworkflow, dir, author, force, migrate_pytest): +def subworkflows_create(ctx, subworkflow, directory, author, force, migrate_pytest): """ Create a new subworkflow from the nf-core template. @@ -24,7 +24,7 @@ def subworkflows_create(ctx, subworkflow, dir, author, force, migrate_pytest): # Run function try: - subworkflow_create = SubworkflowCreate(dir, subworkflow, author, force, migrate_pytest) + subworkflow_create = SubworkflowCreate(directory, subworkflow, author, force, migrate_pytest) subworkflow_create.create() except UserWarning as e: log.critical(e) @@ -34,7 +34,7 @@ def subworkflows_create(ctx, subworkflow, dir, author, force, migrate_pytest): sys.exit(1) -def subworkflows_test(ctx, subworkflow, dir, no_prompts, update, once, profile): +def subworkflows_test(ctx, subworkflow, directory, no_prompts, update, once, profile): """ Run nf-test for a subworkflow. @@ -46,7 +46,7 @@ def subworkflows_test(ctx, subworkflow, dir, no_prompts, update, once, profile): sw_tester = ComponentsTest( component_type="subworkflows", component_name=subworkflow, - directory=dir, + directory=directory, no_prompts=no_prompts, update=update, once=once, @@ -69,7 +69,7 @@ def subworkflows_list_remote(ctx, keywords, json): try: subworkflow_list = SubworkflowList( - None, + ".", True, ctx.obj["modules_repo_url"], ctx.obj["modules_repo_branch"], @@ -82,7 +82,7 @@ def subworkflows_list_remote(ctx, keywords, json): sys.exit(1) -def subworkflows_list_local(ctx, keywords, json, dir): # pylint: disable=redefined-builtin +def subworkflows_list_local(ctx, keywords, json, directory): # pylint: disable=redefined-builtin """ List subworkflows installed locally in a pipeline """ @@ -90,7 +90,7 @@ def subworkflows_list_local(ctx, keywords, json, dir): # pylint: disable=redefi try: subworkflow_list = SubworkflowList( - dir, + directory, False, ctx.obj["modules_repo_url"], ctx.obj["modules_repo_branch"], @@ -102,7 +102,7 @@ def subworkflows_list_local(ctx, keywords, json, dir): # pylint: disable=redefi sys.exit(1) -def subworkflows_lint(ctx, subworkflow, dir, registry, key, all, fail_warned, local, passed, sort_by): +def subworkflows_lint(ctx, subworkflow, directory, registry, key, all, fail_warned, local, passed, sort_by): """ Lint one or more subworkflows in a directory. @@ -117,7 +117,7 @@ def subworkflows_lint(ctx, subworkflow, dir, registry, key, all, fail_warned, lo try: subworkflow_lint = SubworkflowLint( - dir, + directory, fail_warned=fail_warned, registry=ctx.params["registry"], remote_url=ctx.obj["modules_repo_url"], @@ -145,7 +145,7 @@ def subworkflows_lint(ctx, subworkflow, dir, registry, key, all, fail_warned, lo sys.exit(1) -def subworkflows_info(ctx, subworkflow, dir): +def subworkflows_info(ctx, subworkflow, directory): """ Show developer usage information about a given subworkflow. @@ -161,7 +161,7 @@ def subworkflows_info(ctx, subworkflow, dir): try: subworkflow_info = SubworkflowInfo( - dir, + directory, subworkflow, ctx.obj["modules_repo_url"], ctx.obj["modules_repo_branch"], @@ -173,7 +173,7 @@ def subworkflows_info(ctx, subworkflow, dir): sys.exit(1) -def subworkflows_install(ctx, subworkflow, dir, prompt, force, sha): +def subworkflows_install(ctx, subworkflow, directory, prompt, force, sha): """ Install DSL2 subworkflow within a pipeline. @@ -183,7 +183,7 @@ def subworkflows_install(ctx, subworkflow, dir, prompt, force, sha): try: subworkflow_install = SubworkflowInstall( - dir, + directory, force, prompt, sha, @@ -199,7 +199,7 @@ def subworkflows_install(ctx, subworkflow, dir, prompt, force, sha): sys.exit(1) -def subworkflows_remove(ctx, dir, subworkflow): +def subworkflows_remove(ctx, directory, subworkflow): """ Remove a subworkflow from a pipeline. """ @@ -207,7 +207,7 @@ def subworkflows_remove(ctx, dir, subworkflow): try: module_remove = SubworkflowRemove( - dir, + directory, ctx.obj["modules_repo_url"], ctx.obj["modules_repo_branch"], ctx.obj["modules_repo_no_pull"], @@ -221,7 +221,7 @@ def subworkflows_remove(ctx, dir, subworkflow): def subworkflows_update( ctx, subworkflow, - dir, + directory, force, prompt, sha, @@ -240,7 +240,7 @@ def subworkflows_update( try: subworkflow_install = SubworkflowUpdate( - dir, + directory, force, prompt, sha, diff --git a/nf_core/components/components_command.py b/nf_core/components/components_command.py index ada8b532e5..69f067f8fe 100644 --- a/nf_core/components/components_command.py +++ b/nf_core/components/components_command.py @@ -22,9 +22,7 @@ class ComponentCommand: def __init__( self, component_type: str, - directory: Union[ - str, Path - ], # TODO: This is actually None sometimes (e.g. in test_modules_list_remote), need to rewrite the logic here to handle these cases elegantly, for example setting a default path + directory: Union[str, Path] = ".", remote_url: Optional[str] = None, branch: Optional[str] = None, no_pull: bool = False, @@ -34,11 +32,11 @@ def __init__( """ Initialise the ComponentClass object """ - self.component_type = component_type - self.directory = directory + self.component_type: str = component_type + self.directory: Path = Path(directory) self.modules_repo = ModulesRepo(remote_url, branch, no_pull, hide_progress) - self.hide_progress = hide_progress - self.no_prompts = no_prompts + self.hide_progress: bool = hide_progress + self.no_prompts: bool = no_prompts self._configure_repo_and_paths() def _configure_repo_and_paths(self, nf_dir_req: bool = True) -> None: diff --git a/nf_core/components/list.py b/nf_core/components/list.py index 0a6b654467..ded035c897 100644 --- a/nf_core/components/list.py +++ b/nf_core/components/list.py @@ -25,7 +25,9 @@ def __init__( super().__init__(component_type, pipeline_dir, remote_url, branch, no_pull) self.remote = remote - def list_components(self, keywords: Optional[List[str]] = None, print_json=False) -> Union[rich.table.Table, str]: + def list_components( + self, keywords: Optional[List[str]] = None, print_json: bool = False + ) -> Union[rich.table.Table, str]: keywords = keywords or [] """ Get available modules/subworkflows names from GitHub tree for repo diff --git a/nf_core/modules/list.py b/nf_core/modules/list.py index c7dc943f9e..68da570f67 100644 --- a/nf_core/modules/list.py +++ b/nf_core/modules/list.py @@ -1,4 +1,6 @@ import logging +from pathlib import Path +from typing import Optional, Union from nf_core.components.list import ComponentList @@ -6,5 +8,12 @@ class ModuleList(ComponentList): - def __init__(self, pipeline_dir, remote=True, remote_url=None, branch=None, no_pull=False): + def __init__( + self, + pipeline_dir: Union[str, Path] = ".", + remote: bool = True, + remote_url: Optional[str] = None, + branch: Optional[str] = None, + no_pull: bool = False, + ): super().__init__("modules", pipeline_dir, remote, remote_url, branch, no_pull) diff --git a/nf_core/subworkflows/list.py b/nf_core/subworkflows/list.py index ddf144ee00..9e84d6cbe0 100644 --- a/nf_core/subworkflows/list.py +++ b/nf_core/subworkflows/list.py @@ -1,4 +1,6 @@ import logging +from pathlib import Path +from typing import Optional, Union from nf_core.components.list import ComponentList @@ -6,5 +8,12 @@ class SubworkflowList(ComponentList): - def __init__(self, pipeline_dir, remote=True, remote_url=None, branch=None, no_pull=False): + def __init__( + self, + pipeline_dir: Union[str, Path] = ".", + remote: bool = True, + remote_url: Optional[str] = None, + branch: Optional[str] = None, + no_pull: bool = False, + ) -> None: super().__init__("subworkflows", pipeline_dir, remote, remote_url, branch, no_pull) diff --git a/tests/modules/test_list.py b/tests/modules/test_list.py index fdbb61f69e..a170f6d6fc 100644 --- a/tests/modules/test_list.py +++ b/tests/modules/test_list.py @@ -13,7 +13,7 @@ class TestModulesCreate(TestModules): def test_modules_list_remote(self): """Test listing available modules""" - mods_list = nf_core.modules.list.ModuleList(None, remote=True) + mods_list = nf_core.modules.list.ModuleList(remote=True) listed_mods = mods_list.list_components() console = Console(record=True) console.print(listed_mods) @@ -22,9 +22,7 @@ def test_modules_list_remote(self): def test_modules_list_remote_gitlab(self): """Test listing the modules in the remote gitlab repo""" - mods_list = nf_core.modules.list.ModuleList( - None, remote=True, remote_url=GITLAB_URL, branch=GITLAB_DEFAULT_BRANCH - ) + mods_list = nf_core.modules.list.ModuleList(remote=True, remote_url=GITLAB_URL, branch=GITLAB_DEFAULT_BRANCH) listed_mods = mods_list.list_components() console = Console(record=True) console.print(listed_mods) @@ -64,22 +62,22 @@ def test_modules_install_gitlab_and_list_pipeline(self): def test_modules_list_local_json(self): """Test listing locally installed modules as JSON""" mods_list = nf_core.modules.list.ModuleList(self.pipeline_dir, remote=False) - listed_mods = mods_list.list_components(print_json=True) + listed_mods = str(mods_list.list_components(print_json=True)) listed_mods = json.loads(listed_mods) assert "fastqc" in listed_mods assert "multiqc" in listed_mods def test_modules_list_remote_json(self): """Test listing available modules as JSON""" - mods_list = nf_core.modules.list.ModuleList(None, remote=True) - listed_mods = mods_list.list_components(print_json=True) + mods_list = nf_core.modules.list.ModuleList(remote=True) + listed_mods: str = str(mods_list.list_components(print_json=True)) listed_mods = json.loads(listed_mods) assert "fastqc" in listed_mods assert "multiqc" in listed_mods def test_modules_list_with_one_keyword(self): """Test listing available modules with one keyword""" - mods_list = nf_core.modules.list.ModuleList(None, remote=True) + mods_list = nf_core.modules.list.ModuleList(remote=True) listed_mods = mods_list.list_components(keywords=["qc"]) console = Console(record=True) console.print(listed_mods) @@ -88,7 +86,7 @@ def test_modules_list_with_one_keyword(self): def test_modules_list_with_keywords(self): """Test listing available modules with multiple keywords""" - mods_list = nf_core.modules.list.ModuleList(None, remote=True) + mods_list = nf_core.modules.list.ModuleList(remote=True) listed_mods = mods_list.list_components(keywords=["fastq", "qc"]) console = Console(record=True) console.print(listed_mods) @@ -97,7 +95,7 @@ def test_modules_list_with_keywords(self): def test_modules_list_with_unused_keyword(self): """Test listing available modules with an unused keyword""" - mods_list = nf_core.modules.list.ModuleList(None, remote=True) + mods_list = nf_core.modules.list.ModuleList(remote=True) with self.assertLogs(level="INFO") as log: listed_mods = mods_list.list_components(keywords=["you_will_never_find_me"]) self.assertIn("No available", log.output[0]) diff --git a/tests/subworkflows/test_list.py b/tests/subworkflows/test_list.py index 5e4e6feb0a..aa1c4de7af 100644 --- a/tests/subworkflows/test_list.py +++ b/tests/subworkflows/test_list.py @@ -9,7 +9,7 @@ class TestSubworkflowsList(TestSubworkflows): def test_subworkflows_list_remote(self): """Test listing available subworkflows""" - subworkflows_list = nf_core.subworkflows.SubworkflowList(None, remote=True) + subworkflows_list = nf_core.subworkflows.SubworkflowList(remote=True) listed_subworkflows = subworkflows_list.list_components() console = Console(record=True) console.print(listed_subworkflows) From ebdb3981fec56235822f07d19dca7bc2c206e7ee Mon Sep 17 00:00:00 2001 From: mashehu Date: Mon, 29 Jul 2024 14:26:37 +0200 Subject: [PATCH 69/89] remove unnecessary checks --- nf_core/components/components_command.py | 2 +- nf_core/components/components_utils.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/nf_core/components/components_command.py b/nf_core/components/components_command.py index 69f067f8fe..7d43b387fc 100644 --- a/nf_core/components/components_command.py +++ b/nf_core/components/components_command.py @@ -94,7 +94,7 @@ def has_valid_directory(self) -> bool: """Check that we were given a pipeline or clone of nf-core/modules""" if self.repo_type == "modules": return True - if self.directory is None or not Path(self.directory).exists(): + if not Path(self.directory).exists(): log.error(f"Could not find directory: {self.directory}") return False main_nf = Path(self.directory, "main.nf") diff --git a/nf_core/components/components_utils.py b/nf_core/components/components_utils.py index 3d64dc1bb6..632ae7b2aa 100644 --- a/nf_core/components/components_utils.py +++ b/nf_core/components/components_utils.py @@ -26,7 +26,7 @@ def get_repo_info(directory: Path, use_prompt: Optional[bool] = True) -> Tuple[P """ # Verify that the pipeline dir exists - if directory is None or not Path(directory).is_dir(): + if not Path(directory).is_dir(): raise UserWarning(f"Could not find directory: {directory}") # Try to find the root directory From 15a9071886fda734d182a2ae10a2dd78074c3d48 Mon Sep 17 00:00:00 2001 From: mashehu Date: Mon, 29 Jul 2024 14:33:20 +0200 Subject: [PATCH 70/89] remove unnecessary Path conversions --- nf_core/components/components_command.py | 12 +++++------- nf_core/components/components_test.py | 2 +- nf_core/components/lint/__init__.py | 12 +++++------- nf_core/modules/bump_versions.py | 2 +- 4 files changed, 12 insertions(+), 16 deletions(-) diff --git a/nf_core/components/components_command.py b/nf_core/components/components_command.py index 7d43b387fc..4e739fd6b8 100644 --- a/nf_core/components/components_command.py +++ b/nf_core/components/components_command.py @@ -49,9 +49,7 @@ def _configure_repo_and_paths(self, nf_dir_req: bool = True) -> None: """ try: if self.directory: - self.directory, self.repo_type, self.org = get_repo_info( - Path(self.directory), use_prompt=not self.no_prompts - ) + self.directory, self.repo_type, self.org = get_repo_info(self.directory, use_prompt=not self.no_prompts) else: self.repo_type = None self.org = "" @@ -94,13 +92,13 @@ def has_valid_directory(self) -> bool: """Check that we were given a pipeline or clone of nf-core/modules""" if self.repo_type == "modules": return True - if not Path(self.directory).exists(): + if not self.directory.exists(): log.error(f"Could not find directory: {self.directory}") return False main_nf = Path(self.directory, "main.nf") nf_config = Path(self.directory, "nextflow.config") if not main_nf.exists() and not nf_config.exists(): - if Path(self.directory).resolve().parts[-1].startswith("nf-core"): + if self.directory.resolve().parts[-1].startswith("nf-core"): raise UserWarning(f"Could not find a 'main.nf' or 'nextflow.config' file in '{self.directory}'") log.warning(f"Could not find a 'main.nf' or 'nextflow.config' file in '{self.directory}'") return True @@ -125,7 +123,7 @@ def clear_component_dir(self, component_name: str, component_dir: Union[str, Pat try: shutil.rmtree(component_dir) # remove all empty directories - for dir_path, dir_names, filenames in os.walk(Path(self.directory), topdown=False): + for dir_path, dir_names, filenames in os.walk(self.directory, topdown=False): if not dir_names and not filenames: try: Path(dir_path).rmdir() @@ -258,7 +256,7 @@ def check_patch_paths(self, patch_path: Path, module_name: str) -> None: ): modules_json.modules_json["repos"][self.modules_repo.remote_url]["modules"][ self.modules_repo.repo_path - ][module_name]["patch"] = str(patch_path.relative_to(Path(self.directory).resolve())) + ][module_name]["patch"] = str(patch_path.relative_to(self.directory.resolve())) modules_json.dump() def check_if_in_include_stmts(self, component_path: str) -> Dict[str, List[Dict[str, Union[int, str]]]]: diff --git a/nf_core/components/components_test.py b/nf_core/components/components_test.py index f9b891004a..57c0034ba4 100644 --- a/nf_core/components/components_test.py +++ b/nf_core/components/components_test.py @@ -93,7 +93,7 @@ def run(self) -> None: os.environ["NFT_DIFF_ARGS"] = ( "--line-numbers --expand-tabs=2" # taken from https://code.askimed.com/nf-test/docs/assertions/snapshots/#snapshot-differences ) - with nf_core.utils.set_wd(Path(self.directory)): + with nf_core.utils.set_wd(self.directory): self.check_snapshot_stability() if len(self.errors) > 0: errors = "\n - ".join(self.errors) diff --git a/nf_core/components/lint/__init__.py b/nf_core/components/lint/__init__.py index ddf5e1e165..be6b225a0a 100644 --- a/nf_core/components/lint/__init__.py +++ b/nf_core/components/lint/__init__.py @@ -104,7 +104,7 @@ def __init__( repo_url, Path(self.directory, self.component_type, org, comp), self.repo_type, - Path(self.directory), + self.directory, self.component_type, ) ) @@ -121,20 +121,20 @@ def __init__( None, Path(local_component_dir, comp), self.repo_type, - Path(self.directory), + self.directory, self.component_type, remote_component=False, ) for comp in self.get_local_components() ] - self.config = nf_core.utils.fetch_wf_config(Path(self.directory), cache_config=True) + self.config = nf_core.utils.fetch_wf_config(self.directory, cache_config=True) elif self.repo_type == "modules": component_dir = Path( self.directory, self.default_modules_path if self.component_type == "modules" else self.default_subworkflows_path, ) self.all_remote_components = [ - NFCoreComponent(m, None, component_dir / m, self.repo_type, Path(self.directory), self.component_type) + NFCoreComponent(m, None, component_dir / m, self.repo_type, self.directory, self.component_type) for m in self.get_components_clone_modules() ] self.all_local_components = [] @@ -142,9 +142,7 @@ def __init__( raise LookupError(f"No {self.component_type} in '{self.component_type}' directory") # This could be better, perhaps glob for all nextflow.config files in? - self.config = nf_core.utils.fetch_wf_config( - Path(self.directory).joinpath("tests", "config"), cache_config=True - ) + self.config = nf_core.utils.fetch_wf_config(self.directory / "tests" / "config", cache_config=True) if registry is None: self.registry = self.config.get("docker.registry", "quay.io") diff --git a/nf_core/modules/bump_versions.py b/nf_core/modules/bump_versions.py index 6546cccc9a..2d8854e3c0 100644 --- a/nf_core/modules/bump_versions.py +++ b/nf_core/modules/bump_versions.py @@ -76,7 +76,7 @@ def bump_versions( ) # Get list of all modules - _, nfcore_modules = nf_core.modules.modules_utils.get_installed_modules(Path(self.directory)) + _, nfcore_modules = nf_core.modules.modules_utils.get_installed_modules(self.directory) # Load the .nf-core.yml config _, self.tools_config = nf_core.utils.load_tools_config(self.directory) From fa00b1b61e0315177bee09c3d2e8f486cd4ae86e Mon Sep 17 00:00:00 2001 From: mashehu Date: Mon, 29 Jul 2024 14:42:13 +0200 Subject: [PATCH 71/89] fix types for 3.8 --- nf_core/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/utils.py b/nf_core/utils.py index 80324fc9a8..86ed09555c 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -905,7 +905,7 @@ def prompt_remote_pipeline_name(wfs): def prompt_pipeline_release_branch( wf_releases: List[Dict[str, Any]], wf_branches: Dict[str, Any], multiple: bool = False -) -> tuple[Any, list[str]]: +) -> Tuple[Any, List[str]]: """Prompt for pipeline release / branch Args: From e7f06bea17e9c29bb40a1b7f8e54818ab308a264 Mon Sep 17 00:00:00 2001 From: mashehu Date: Mon, 29 Jul 2024 14:46:10 +0200 Subject: [PATCH 72/89] limit `component` to type string --- nf_core/components/install.py | 9 +++++---- nf_core/modules/modules_json.py | 14 ++++++-------- 2 files changed, 11 insertions(+), 12 deletions(-) diff --git a/nf_core/components/install.py b/nf_core/components/install.py index f2849f85bc..9b42ba77db 100644 --- a/nf_core/components/install.py +++ b/nf_core/components/install.py @@ -1,7 +1,10 @@ import logging import os from pathlib import Path -from typing import List, Optional, Union +from typing import TYPE_CHECKING, List, Optional, Union + +if TYPE_CHECKING: + from nf_core.modules.modules_repo import ModulesRepo import questionary from rich.console import Console @@ -180,9 +183,7 @@ def install_included_components(self, subworkflow_dir): self.component_type = original_component_type self.installed_by = original_installed - def collect_and_verify_name( - self, component: Optional[str], modules_repo: "nf_core.modules.modules_repo.ModulesRepo" - ) -> str: + def collect_and_verify_name(self, component: Optional[str], modules_repo: ModulesRepo) -> str: """ Collect component name. Check that the supplied name is an available module/subworkflow. diff --git a/nf_core/modules/modules_json.py b/nf_core/modules/modules_json.py index 39d70b7d78..ff4922dc02 100644 --- a/nf_core/modules/modules_json.py +++ b/nf_core/modules/modules_json.py @@ -355,7 +355,7 @@ def determine_branches_and_shas( # Clean up the modules/subworkflows we were unable to find the sha for for component in sb_local: - log.debug(f"Moving {component_type[:-1]} '{Path(install_dir, str(component))}' to 'local' directory") + log.debug(f"Moving {component_type[:-1]} '{Path(install_dir, component)}' to 'local' directory") self.move_component_to_local(component_type, component, str(install_dir)) for component in dead_components: @@ -398,13 +398,13 @@ def find_correct_commit_sha( return commit_sha return None - def move_component_to_local(self, component_type: str, component: Union[str, Path], repo_name: str): + def move_component_to_local(self, component_type: str, component: str, repo_name: str): """ Move a module/subworkflow to the 'local' directory Args: component_type (str): The type of component, either 'modules' or 'subworkflows' - component (Union[str,Path]): The name of the module/subworkflow + component (str): The name of the module/subworkflow repo_name (str): The name of the repository the module resides in """ if component_type == "modules": @@ -418,7 +418,7 @@ def move_component_to_local(self, component_type: str, component: Union[str, Pat if not local_dir.exists(): local_dir.mkdir() - to_name = str(component) + to_name = component # Check if there is already a subdirectory with the name while (local_dir / to_name).exists(): # Add a time suffix to the path to make it unique @@ -1086,9 +1086,7 @@ def get_installed_by_entries(self, component_type, name): return installed_by_entries - def get_component_branch( - self, component_type: str, component: Union[str, Path], repo_url: str, install_dir: str - ) -> str: + def get_component_branch(self, component_type: str, component: str, repo_url: str, install_dir: str) -> str: """ Gets the branch from which the module/subworkflow was installed @@ -1101,7 +1099,7 @@ def get_component_branch( self.load() assert self.modules_json is not None # mypy try: - branch = self.modules_json["repos"][repo_url][component_type][install_dir][str(component)]["branch"] + branch = self.modules_json["repos"][repo_url][component_type][install_dir][component]["branch"] except (KeyError, TypeError): branch = None if branch is None: From ae7d912369630daaf2e43ddcd636bdb8d8cfb2d4 Mon Sep 17 00:00:00 2001 From: mashehu Date: Mon, 29 Jul 2024 14:59:04 +0200 Subject: [PATCH 73/89] handle more type warnings --- nf_core/modules/modules_json.py | 10 ++++++++-- nf_core/modules/modules_repo.py | 2 +- 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/nf_core/modules/modules_json.py b/nf_core/modules/modules_json.py index ff4922dc02..19649a57cd 100644 --- a/nf_core/modules/modules_json.py +++ b/nf_core/modules/modules_json.py @@ -42,7 +42,7 @@ class ModulesJson: An object for handling a 'modules.json' file in a pipeline """ - def __init__(self, pipeline_dir: Union[str, Path]): + def __init__(self, pipeline_dir: Union[str, Path]) -> None: """ Initialise the object. @@ -130,6 +130,10 @@ def get_component_names_from_repo( names = [] for repo_url in repos: modules_repo = ModulesRepo(repo_url) + if modules_repo is None: + raise UserWarning(f"Could not find module repository for '{repo_url}' in '{directory}'") + if modules_repo.repo_path is None: + raise UserWarning(f"Could not find module repository path for '{repo_url}' in '{directory}'") components = ( repo_url, [ @@ -195,6 +199,8 @@ def get_pipeline_module_repositories( # Verify that there is a directory corresponding the remote nrepo_name = ModulesRepo(nrepo_remote).repo_path + if nrepo_name is None: + raise UserWarning(f"Could not find the repository name for '{nrepo_remote}'") if not (directory / nrepo_name).exists(): log.info( "The provided remote does not seem to correspond to a local directory. " @@ -416,7 +422,7 @@ def move_component_to_local(self, component_type: str, component: str, repo_name current_path = directory / repo_name / component local_dir = directory / "local" if not local_dir.exists(): - local_dir.mkdir() + local_dir.mkdir(parents=True) to_name = component # Check if there is already a subdirectory with the name diff --git a/nf_core/modules/modules_repo.py b/nf_core/modules/modules_repo.py index 5b50205484..357fc49cc5 100644 --- a/nf_core/modules/modules_repo.py +++ b/nf_core/modules/modules_repo.py @@ -40,7 +40,7 @@ def __init__( branch: Optional[str] = None, no_pull: bool = False, hide_progress: bool = False, - ): + ) -> None: """ Initializes the object and clones the git repository if it is not already present """ From d7587dc1577602ab26d53dd34a438aedfb4c8a1e Mon Sep 17 00:00:00 2001 From: mashehu Date: Mon, 29 Jul 2024 16:18:32 +0200 Subject: [PATCH 74/89] fix import errors, handle outdir as string --- nf_core/components/info.py | 17 +++++++++-------- nf_core/components/install.py | 10 +++++----- nf_core/pipelines/create/create.py | 3 +-- nf_core/utils.py | 12 ++++++++++-- 4 files changed, 25 insertions(+), 17 deletions(-) diff --git a/nf_core/components/info.py b/nf_core/components/info.py index 726586b5b7..98f8be5272 100644 --- a/nf_core/components/info.py +++ b/nf_core/components/info.py @@ -1,7 +1,7 @@ import logging import os from pathlib import Path -from typing import Dict, Optional, Union +from typing import Dict, List, Optional, Tuple, Union import questionary import yaml @@ -83,14 +83,14 @@ def __init__( self.modules_json = None self.component = self.init_mod_name(component_name) - def _configure_repo_and_paths(self, nf_dir_req=False): + def _configure_repo_and_paths(self, nf_dir_req=False) -> None: """ Override the default with nf_dir_req set to False to allow info to be run from anywhere and still return remote info """ return super()._configure_repo_and_paths(nf_dir_req) - def init_mod_name(self, component): + def init_mod_name(self, component: Optional[str]) -> str: """ Makes sure that we have a module/subworkflow name before proceeding. @@ -106,9 +106,10 @@ def init_mod_name(self, component): components = self.get_components_clone_modules() elif self.repo_type == "pipeline": assert self.modules_json is not None # mypy - all_components = self.modules_json.get_all_components(self.component_type).get( - self.modules_repo.remote_url, {} - ) + all_components: List[Tuple[str, str]] = self.modules_json.get_all_components( + self.component_type + ).get(self.modules_repo.remote_url, []) + components = [ component if directory == self.modules_repo.repo_path else f"{directory}/{component}" for directory, component in all_components @@ -169,7 +170,7 @@ def get_component_info(self): return self.generate_component_info_help() - def get_local_yaml(self): + def get_local_yaml(self) -> Optional[Dict]: """Attempt to get the meta.yml file from a locally installed module/subworkflow. Returns: @@ -316,7 +317,7 @@ def generate_component_info_help(self): ) # Print include statement - if self.local_path: + if self.local_path and self.modules_repo.repo_path is not None: install_folder = Path(self.directory, self.component_type, self.modules_repo.repo_path) component_name = "_".join(self.component.upper().split("/")) renderables.append( diff --git a/nf_core/components/install.py b/nf_core/components/install.py index 9b42ba77db..a7be737bf9 100644 --- a/nf_core/components/install.py +++ b/nf_core/components/install.py @@ -1,16 +1,14 @@ import logging import os from pathlib import Path -from typing import TYPE_CHECKING, List, Optional, Union - -if TYPE_CHECKING: - from nf_core.modules.modules_repo import ModulesRepo +from typing import List, Optional, Union import questionary from rich.console import Console from rich.syntax import Syntax import nf_core.components +import nf_core.modules.modules_repo import nf_core.modules.modules_utils import nf_core.utils from nf_core.components.components_command import ComponentCommand @@ -183,7 +181,9 @@ def install_included_components(self, subworkflow_dir): self.component_type = original_component_type self.installed_by = original_installed - def collect_and_verify_name(self, component: Optional[str], modules_repo: ModulesRepo) -> str: + def collect_and_verify_name( + self, component: Optional[str], modules_repo: "nf_core.modules.modules_repo.ModulesRepo" + ) -> str: """ Collect component name. Check that the supplied name is an available module/subworkflow. diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index d5c230e048..899507847e 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -83,9 +83,8 @@ def __init__( if self.config.outdir is None: self.config.outdir = str(Path.cwd()) - self.jinja_params, skip_paths = self.obtain_jinja_params_dict( - self.config.skip_features or [], Path(self.config.outdir) + self.config.skip_features or [], str(self.config.outdir) ) skippable_paths = { diff --git a/nf_core/utils.py b/nf_core/utils.py index 86ed09555c..c1cc7c1301 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -30,7 +30,7 @@ import rich.markup import yaml from packaging.version import Version -from pydantic import BaseModel, ValidationError +from pydantic import BaseModel, ValidationError, field_validator from rich.live import Live from rich.spinner import Spinner @@ -1050,10 +1050,18 @@ class NFCoreTemplateConfig(BaseModel): author: Optional[str] = None version: Optional[str] = None force: Optional[bool] = None - outdir: Optional[str] = None + outdir: Optional[Union[str, Path]] = None skip_features: Optional[list] = None is_nfcore: Optional[bool] = None + # convert outdir to str + @field_validator("outdir") + @classmethod + def outdir_to_str(cls, v: Optional[Union[str, Path]]) -> Optional[str]: + if v is not None: + v = str(v) + return v + def __getitem__(self, item: str) -> Any: if self is None: return None From 5979b9d1f4ee8fa157eb1798ec3a7226238f00cb Mon Sep 17 00:00:00 2001 From: mashehu Date: Mon, 29 Jul 2024 17:24:52 +0200 Subject: [PATCH 75/89] add error message and resolve circular import --- nf_core/components/components_utils.py | 2 +- nf_core/components/install.py | 13 +++++++++---- 2 files changed, 10 insertions(+), 5 deletions(-) diff --git a/nf_core/components/components_utils.py b/nf_core/components/components_utils.py index 632ae7b2aa..9289d90004 100644 --- a/nf_core/components/components_utils.py +++ b/nf_core/components/components_utils.py @@ -63,7 +63,7 @@ def get_repo_info(directory: Path, use_prompt: Optional[bool] = True) -> Tuple[P # Check if it's a valid answer if repo_type not in ["pipeline", "modules"]: - raise UserWarning(f"Invalid repository type: '{repo_type}'") + raise UserWarning(f"Invalid repository type: '{repo_type}', must be 'pipeline' or 'modules'") org: str = "" # Check for org if modules repo if repo_type == "modules": diff --git a/nf_core/components/install.py b/nf_core/components/install.py index a7be737bf9..6130a4cf72 100644 --- a/nf_core/components/install.py +++ b/nf_core/components/install.py @@ -8,7 +8,6 @@ from rich.syntax import Syntax import nf_core.components -import nf_core.modules.modules_repo import nf_core.modules.modules_utils import nf_core.utils from nf_core.components.components_command import ComponentCommand @@ -69,12 +68,18 @@ def install(self, component: str, silent: bool = False) -> bool: # Verify SHA if not self.modules_repo.verify_sha(self.prompt, self.sha): + err_msg = f"SHA '{self.sha}' is not a valid commit SHA for the repository '{self.modules_repo.remote_url}'" + log.error(err_msg) return False # verify self.modules_repo entries: if self.modules_repo is None: + err_msg = "Could not find a valid modules repository." + log.error(err_msg) return False if self.modules_repo.repo_path is None: + err_msg = "Could not find a valid modules repository path." + log.error(err_msg) return False # Check and verify component name @@ -200,10 +205,10 @@ def collect_and_verify_name( # Check that the supplied name is an available module/subworkflow if component and component not in modules_repo.get_avail_components(self.component_type, commit=self.sha): - log.error( - f"{self.component_type[:-1].title()} '{component}' not found in list of available {self.component_type}." - ) log.info(f"Use the command 'nf-core {self.component_type} list' to view available software") + raise SystemError( + f"{self.component_type[:-1].title()} '{component}' not found in available {self.component_type}" + ) if not modules_repo.component_exists(component, self.component_type, commit=self.sha): warn_msg = f"{self.component_type[:-1].title()} '{component}' not found in remote '{modules_repo.remote_url}' ({modules_repo.branch})" From 24a3ece90f28b9efe1336a5768b55dc6dc62b184 Mon Sep 17 00:00:00 2001 From: mashehu Date: Mon, 29 Jul 2024 17:45:45 +0200 Subject: [PATCH 76/89] actually test installation with a correct subworkflow --- tests/subworkflows/test_install.py | 46 ++++++++++++++++-------------- 1 file changed, 25 insertions(+), 21 deletions(-) diff --git a/tests/subworkflows/test_install.py b/tests/subworkflows/test_install.py index af1ad92415..e6ba9439a4 100644 --- a/tests/subworkflows/test_install.py +++ b/tests/subworkflows/test_install.py @@ -17,26 +17,6 @@ class TestSubworkflowsInstall(TestSubworkflows): - def test_subworkflow_install_nopipeline(self): - """Test installing a subworkflow - no pipeline given""" - assert self.subworkflow_install.directory is not None - self.subworkflow_install.directory = Path("non_existent_dir") - assert self.subworkflow_install.install("foo") is False - - @with_temporary_folder - def test_subworkflows_install_emptypipeline(self, tmpdir): - """Test installing a subworkflow - empty dir given""" - - Path(tmpdir, "nf-core-pipe").mkdir(exist_ok=True) - self.subworkflow_install.directory = Path(tmpdir, "nf-core-pipe") - with pytest.raises(UserWarning) as excinfo: - self.subworkflow_install.install("foo") - assert "Could not find a 'main.nf' or 'nextflow.config' file" in str(excinfo.value) - - def test_subworkflows_install_nosubworkflow(self): - """Test installing a subworkflow - unrecognised subworkflow given""" - assert self.subworkflow_install.install("foo") is False - def test_subworkflows_install_bam_sort_stats_samtools(self): """Test installing a subworkflow - bam_sort_stats_samtools""" assert self.subworkflow_install.install("bam_sort_stats_samtools") is not False @@ -57,6 +37,28 @@ def test_subworkflows_install_bam_sort_stats_samtools(self): assert samtools_idxstats_path.exists() assert samtools_flagstat_path.exists() + def test_subworkflow_install_nopipeline(self): + """Test installing a subworkflow - no pipeline given""" + assert self.subworkflow_install.directory is not None + self.subworkflow_install.directory = Path("non_existent_dir") + assert self.subworkflow_install.install("bam_stats_samtools") is False + + @with_temporary_folder + def test_subworkflows_install_emptypipeline(self, tmpdir): + """Test installing a subworkflow - empty dir given""" + + Path(tmpdir, "nf-core-pipe").mkdir(exist_ok=True) + self.subworkflow_install.directory = Path(tmpdir, "nf-core-pipe") + with pytest.raises(UserWarning) as excinfo: + self.subworkflow_install.install("bam_stats_samtools") + assert "Could not find a 'main.nf' or 'nextflow.config' file" in str(excinfo.value) + + def test_subworkflows_install_nosubworkflow(self): + """Test installing a subworkflow - unrecognised subworkflow given""" + with pytest.raises(SystemError) as excinfo: + self.subworkflow_install.install("foo") + assert "Subworkflow 'foo' not found in available subworkflows" in str(excinfo.value) + def test_subworkflows_install_bam_sort_stats_samtools_twice(self): """Test installing a subworkflow - bam_sort_stats_samtools already there""" self.subworkflow_install.install("bam_sort_stats_samtools") @@ -76,7 +78,9 @@ def test_subworkflows_install_different_branch_fail(self): """Test installing a subworkflow from a different branch""" install_obj = SubworkflowInstall(self.pipeline_dir, remote_url=GITLAB_URL, branch=GITLAB_BRANCH_TEST_BRANCH) # The bam_stats_samtools subworkflow does not exists in the branch-test branch - assert install_obj.install("bam_stats_samtools") is False + with pytest.raises(Exception) as excinfo: + install_obj.install("bam_stats_samtools") + assert "Subworkflow 'bam_stats_samtools' not found in available subworkflows" in str(excinfo.value) def test_subworkflows_install_tracking(self): """Test installing a subworkflow and finding the correct entries in installed_by section of modules.json""" From 05def5932e12de9aa8fa2d07d763a54cd552038c Mon Sep 17 00:00:00 2001 From: mashehu Date: Mon, 29 Jul 2024 22:21:13 +0200 Subject: [PATCH 77/89] fix list and info command --- nf_core/components/components_command.py | 10 ++++------ nf_core/components/lint/__init__.py | 21 +++++++++++++++------ nf_core/components/list.py | 2 +- nf_core/modules/bump_versions.py | 13 ++++++++----- nf_core/subworkflows/list.py | 2 +- nf_core/utils.py | 2 ++ tests/subworkflows/test_list.py | 2 +- 7 files changed, 32 insertions(+), 20 deletions(-) diff --git a/nf_core/components/components_command.py b/nf_core/components/components_command.py index 4e739fd6b8..bff2467383 100644 --- a/nf_core/components/components_command.py +++ b/nf_core/components/components_command.py @@ -37,6 +37,8 @@ def __init__( self.modules_repo = ModulesRepo(remote_url, branch, no_pull, hide_progress) self.hide_progress: bool = hide_progress self.no_prompts: bool = no_prompts + self.repo_type: Optional[str] = None + self.org: str = "" self._configure_repo_and_paths() def _configure_repo_and_paths(self, nf_dir_req: bool = True) -> None: @@ -50,15 +52,11 @@ def _configure_repo_and_paths(self, nf_dir_req: bool = True) -> None: try: if self.directory: self.directory, self.repo_type, self.org = get_repo_info(self.directory, use_prompt=not self.no_prompts) - else: - self.repo_type = None - self.org = "" - except UserWarning: if nf_dir_req: raise - self.repo_type = None - self.org = "" + except FileNotFoundError: + raise self.default_modules_path = Path("modules", self.org) self.default_tests_path = Path("tests", "modules", self.org) diff --git a/nf_core/components/lint/__init__.py b/nf_core/components/lint/__init__.py index be6b225a0a..d0f6685365 100644 --- a/nf_core/components/lint/__init__.py +++ b/nf_core/components/lint/__init__.py @@ -86,6 +86,11 @@ def __init__( else: self.lint_tests = self.get_all_subworkflow_lint_tests(self.repo_type == "pipeline") + if self.repo_type is None: + raise LookupError( + "Could not determine repository type. Please check the repository type in the nf-core.yml" + ) + if self.repo_type == "pipeline": modules_json = ModulesJson(self.directory) modules_json.check_up_to_date() @@ -128,6 +133,8 @@ def __init__( for comp in self.get_local_components() ] self.config = nf_core.utils.fetch_wf_config(self.directory, cache_config=True) + self._set_registry(registry) + elif self.repo_type == "modules": component_dir = Path( self.directory, @@ -143,16 +150,18 @@ def __init__( # This could be better, perhaps glob for all nextflow.config files in? self.config = nf_core.utils.fetch_wf_config(self.directory / "tests" / "config", cache_config=True) - - if registry is None: - self.registry = self.config.get("docker.registry", "quay.io") - else: - self.registry = registry - log.debug(f"Registry set to {self.registry}") + self._set_registry(registry) def __repr__(self) -> str: return f"ComponentLint({self.component_type}, {self.directory})" + def _set_registry(self, registry) -> None: + if registry is None: + self.registry = self.config.get("docker.registry", "quay.io") + else: + self.registry = registry + log.debug(f"Registry set to {self.registry}") + @staticmethod def get_all_module_lint_tests(is_pipeline): if is_pipeline: diff --git a/nf_core/components/list.py b/nf_core/components/list.py index ded035c897..65a28db716 100644 --- a/nf_core/components/list.py +++ b/nf_core/components/list.py @@ -16,7 +16,7 @@ class ComponentList(ComponentCommand): def __init__( self, component_type: str, - pipeline_dir: Union[str, Path], + pipeline_dir: Union[str, Path] = ".", remote: bool = True, remote_url: Optional[str] = None, branch: Optional[str] = None, diff --git a/nf_core/modules/bump_versions.py b/nf_core/modules/bump_versions.py index 2d8854e3c0..d98eac7cd6 100644 --- a/nf_core/modules/bump_versions.py +++ b/nf_core/modules/bump_versions.py @@ -245,12 +245,15 @@ def bump_module_version(self, module: NFCoreComponent) -> bool: fh.write(content) # change version in environment.yml - with open(str(module.environment_yml)) as fh: + if not module.environment_yml: + log.error(f"Could not read `environment.yml` of {module.component_name} module.") + return False + with open(module.environment_yml) as fh: env_yml = yaml.safe_load(fh) env_yml["dependencies"][0] = re.sub( bioconda_packages[0], f"bioconda::{bioconda_tool_name}={last_ver}", env_yml["dependencies"][0] ) - with open(str(module.environment_yml), "w") as fh: + with open(module.environment_yml, "w") as fh: yaml.dump(env_yml, fh, default_flow_style=False, Dumper=custom_yaml_dumper()) self.updated.append( @@ -271,11 +274,11 @@ def get_bioconda_version(self, module: NFCoreComponent) -> List[str]: """ # Check whether file exists and load it bioconda_packages = [] - try: - with open(str(module.environment_yml)) as fh: + if module.environment_yml is not None and module.environment_yml.exists(): + with open(module.environment_yml) as fh: env_yml = yaml.safe_load(fh) bioconda_packages = env_yml.get("dependencies", []) - except FileNotFoundError: + else: log.error(f"Could not read `environment.yml` of {module.component_name} module.") return bioconda_packages diff --git a/nf_core/subworkflows/list.py b/nf_core/subworkflows/list.py index 9e84d6cbe0..5f849a1f9c 100644 --- a/nf_core/subworkflows/list.py +++ b/nf_core/subworkflows/list.py @@ -10,7 +10,7 @@ class SubworkflowList(ComponentList): def __init__( self, - pipeline_dir: Union[str, Path] = ".", + pipeline_dir: Union[str, Path], remote: bool = True, remote_url: Optional[str] = None, branch: Optional[str] = None, diff --git a/nf_core/utils.py b/nf_core/utils.py index c1cc7c1301..a95eea4b69 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -1113,6 +1113,8 @@ def load_tools_config(directory: Union[str, Path] = ".") -> Tuple[Optional[Path] else: log.debug(f"Could not find a config file in the directory '{directory}'") return Path(directory, CONFIG_PATHS[0]), None + if not Path(config_fn).is_file(): + raise FileNotFoundError(f"No `.nf-core.yml` file found in the directory '{directory}'") with open(str(config_fn)) as fh: tools_config = yaml.safe_load(fh) diff --git a/tests/subworkflows/test_list.py b/tests/subworkflows/test_list.py index aa1c4de7af..1ae8f5fff6 100644 --- a/tests/subworkflows/test_list.py +++ b/tests/subworkflows/test_list.py @@ -19,7 +19,7 @@ def test_subworkflows_list_remote(self): def test_subworkflows_list_remote_gitlab(self): """Test listing the subworkflows in the remote gitlab repo""" subworkflows_list = nf_core.subworkflows.SubworkflowList( - None, remote=True, remote_url=GITLAB_URL, branch=GITLAB_SUBWORKFLOWS_BRANCH + remote=True, remote_url=GITLAB_URL, branch=GITLAB_SUBWORKFLOWS_BRANCH ) listed_subworkflows = subworkflows_list.list_components() console = Console(record=True) From 8de38715468b084a00a9627c89c5e1ec77bf52f4 Mon Sep 17 00:00:00 2001 From: mashehu Date: Tue, 30 Jul 2024 09:21:01 +0200 Subject: [PATCH 78/89] exclude click from rich traceback --- nf_core/pipelines/create/__init__.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/nf_core/pipelines/create/__init__.py b/nf_core/pipelines/create/__init__.py index 56e25bf1d5..1dd9902fb1 100644 --- a/nf_core/pipelines/create/__init__.py +++ b/nf_core/pipelines/create/__init__.py @@ -2,6 +2,7 @@ import logging +import click from textual.app import App from textual.widgets import Button @@ -23,6 +24,7 @@ show_time=False, show_path=False, markup=True, + tracebacks_suppress=[click], ) logging.basicConfig( level="INFO", From 8fe4be6520995812a8d819b3cfe213055d8f6864 Mon Sep 17 00:00:00 2001 From: mashehu Date: Tue, 30 Jul 2024 10:16:43 +0200 Subject: [PATCH 79/89] give nicer message on install failure, fix install test --- nf_core/__main__.py | 2 +- nf_core/components/install.py | 21 +++++++++++++++++---- tests/modules/test_install.py | 4 +++- 3 files changed, 21 insertions(+), 6 deletions(-) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index 11ab574ccc..1e5cb210a5 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -126,7 +126,7 @@ # because they are actually preliminary, but intended program terminations. # (Custom exceptions are cleaner than `sys.exit(1)`, which we used before) def selective_traceback_hook(exctype, value, traceback): - if exctype in {DownloadError, UserWarning}: # extend set as needed + if exctype in {DownloadError, UserWarning, ValueError}: # extend set as needed log.error(value) else: # print the colored traceback for all other exceptions with rich as usual diff --git a/nf_core/components/install.py b/nf_core/components/install.py index 6130a4cf72..5bdcd1ebd6 100644 --- a/nf_core/components/install.py +++ b/nf_core/components/install.py @@ -4,7 +4,10 @@ from typing import List, Optional, Union import questionary +from rich import print from rich.console import Console +from rich.markdown import Markdown +from rich.panel import Panel from rich.syntax import Syntax import nf_core.components @@ -160,7 +163,7 @@ def install(self, component: str, silent: bool = False) -> bool: ) if self.component_type == "subworkflows": subworkflow_config = Path(install_folder, component, "nextflow.config").relative_to(self.directory) - if os.path.isfile(subworkflow_config): + if subworkflow_config.is_file(): log.info("Add the following config statement to use this subworkflow:") Console().print( Syntax(f"includeConfig '{subworkflow_config}'", "groovy", theme="ansi_dark", padding=1) @@ -205,11 +208,21 @@ def collect_and_verify_name( # Check that the supplied name is an available module/subworkflow if component and component not in modules_repo.get_avail_components(self.component_type, commit=self.sha): - log.info(f"Use the command 'nf-core {self.component_type} list' to view available software") - raise SystemError( - f"{self.component_type[:-1].title()} '{component}' not found in available {self.component_type}" + log.error(f"{self.component_type[:-1].title()} '{component}' not found in available {self.component_type}") + print( + Panel( + Markdown( + f"Use the command `nf-core {self.component_type} list` to view available {self.component_type}." + ), + title="info", + title_align="left", + style="blue", + padding=1, + ) ) + raise ValueError + if not modules_repo.component_exists(component, self.component_type, commit=self.sha): warn_msg = f"{self.component_type[:-1].title()} '{component}' not found in remote '{modules_repo.remote_url}' ({modules_repo.branch})" log.warning(warn_msg) diff --git a/tests/modules/test_install.py b/tests/modules/test_install.py index 8f7ac0a1dc..5f45d1b5b4 100644 --- a/tests/modules/test_install.py +++ b/tests/modules/test_install.py @@ -55,7 +55,9 @@ def test_modules_install_different_branch_fail(self): """Test installing a module from a different branch""" install_obj = ModuleInstall(self.pipeline_dir, remote_url=GITLAB_URL, branch=GITLAB_BRANCH_TEST_BRANCH) # The FastQC module does not exists in the branch-test branch - assert install_obj.install("fastqc") is False + with pytest.raises(Exception) as excinfo: + install_obj.install("fastqc") + assert "Module 'fastqc' not found in available module" in str(excinfo.value) def test_modules_install_different_branch_succeed(self): """Test installing a module from a different branch""" From 6bc7361043ffe2e34685383a8b5af6fef6f4b36f Mon Sep 17 00:00:00 2001 From: mashehu Date: Tue, 30 Jul 2024 10:48:56 +0200 Subject: [PATCH 80/89] use self.registry --- nf_core/modules/lint/__init__.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/nf_core/modules/lint/__init__.py b/nf_core/modules/lint/__init__.py index fcf2d7d066..b75f7e7570 100644 --- a/nf_core/modules/lint/__init__.py +++ b/nf_core/modules/lint/__init__.py @@ -200,13 +200,12 @@ def lint_modules( for mod in modules: progress_bar.update(lint_progress, advance=1, test_name=mod.component_name) - self.lint_module(mod, progress_bar, registry=registry, local=local, fix_version=fix_version) + self.lint_module(mod, progress_bar, local=local, fix_version=fix_version) def lint_module( self, mod: NFCoreComponent, progress_bar: rich.progress.Progress, - registry: str, local: bool = False, fix_version: bool = False, ): @@ -228,7 +227,7 @@ def lint_module( # TODO: consider unifying modules and subworkflows lint_module() function and add it to the ComponentLint class # Only check the main script in case of a local module if local: - self.main_nf(mod, fix_version, registry, progress_bar) + self.main_nf(mod, fix_version, self.registry, progress_bar) self.passed += [LintResult(mod, *m) for m in mod.passed] warned = [LintResult(mod, *m) for m in (mod.warned + mod.failed)] if not self.fail_warned: @@ -245,7 +244,7 @@ def lint_module( for test_name in self.lint_tests: if test_name == "main_nf": - getattr(self, test_name)(mod, fix_version, registry, progress_bar) + getattr(self, test_name)(mod, fix_version, self.registry, progress_bar) else: getattr(self, test_name)(mod) From e4717cccccce91402535bd9ff49a619ee81ff323 Mon Sep 17 00:00:00 2001 From: mashehu Date: Tue, 30 Jul 2024 10:53:39 +0200 Subject: [PATCH 81/89] move subworkflows main_nf linting closer to the modules version --- nf_core/subworkflows/lint/main_nf.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/nf_core/subworkflows/lint/main_nf.py b/nf_core/subworkflows/lint/main_nf.py index edca32bf30..3ad3f34864 100644 --- a/nf_core/subworkflows/lint/main_nf.py +++ b/nf_core/subworkflows/lint/main_nf.py @@ -31,8 +31,9 @@ def main_nf(_, subworkflow: NFCoreComponent) -> Tuple[List[str], List[str]]: outputs: List[str] = [] # Read the lines directly from the subworkflow - lines = None - if lines is None: + lines: List[str] = [] + + if len(lines) == 0: try: # Check whether file exists and load it with open(subworkflow.main_nf) as fh: From b022862f0c1bce3dbc0188f4aa3135e2b8a71812 Mon Sep 17 00:00:00 2001 From: mashehu Date: Tue, 30 Jul 2024 14:49:54 +0200 Subject: [PATCH 82/89] fix handling of missing dir in list command --- nf_core/components/components_command.py | 2 ++ nf_core/components/list.py | 11 ++++++++++- 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/nf_core/components/components_command.py b/nf_core/components/components_command.py index bff2467383..f25fb33a6f 100644 --- a/nf_core/components/components_command.py +++ b/nf_core/components/components_command.py @@ -51,6 +51,8 @@ def _configure_repo_and_paths(self, nf_dir_req: bool = True) -> None: """ try: if self.directory: + if self.directory == Path(".") and not nf_dir_req: + self.no_prompts = True self.directory, self.repo_type, self.org = get_repo_info(self.directory, use_prompt=not self.no_prompts) except UserWarning: if nf_dir_req: diff --git a/nf_core/components/list.py b/nf_core/components/list.py index 65a28db716..05a8f71120 100644 --- a/nf_core/components/list.py +++ b/nf_core/components/list.py @@ -22,8 +22,17 @@ def __init__( branch: Optional[str] = None, no_pull: bool = False, ) -> None: - super().__init__(component_type, pipeline_dir, remote_url, branch, no_pull) self.remote = remote + super().__init__(component_type, pipeline_dir, remote_url, branch, no_pull) + + def _configure_repo_and_paths(self, nf_dir_req=True) -> None: + """ + Override the default with nf_dir_req set to False to allow + info to be run from anywhere and still return remote info + """ + if self.remote: + nf_dir_req = False + return super()._configure_repo_and_paths(nf_dir_req) def list_components( self, keywords: Optional[List[str]] = None, print_json: bool = False From e9bf654aec84fc749b915e5e834e45ec2d035deb Mon Sep 17 00:00:00 2001 From: mashehu Date: Tue, 30 Jul 2024 14:50:52 +0200 Subject: [PATCH 83/89] remove unnecessary string conversion --- nf_core/components/create.py | 2 +- nf_core/components/nfcore_component.py | 4 ++-- nf_core/modules/lint/environment_yml.py | 2 +- nf_core/modules/lint/meta_yml.py | 4 ++-- nf_core/modules/lint/module_tests.py | 1 + nf_core/modules/modules_differ.py | 2 +- nf_core/pipelines/create_logo.py | 4 ++-- nf_core/utils.py | 2 +- 8 files changed, 11 insertions(+), 10 deletions(-) diff --git a/nf_core/components/create.py b/nf_core/components/create.py index fdcf402b47..c9af6d1adb 100644 --- a/nf_core/components/create.py +++ b/nf_core/components/create.py @@ -290,7 +290,7 @@ def _render_template(self) -> Optional[bool]: # Write output to the target file log.debug(f"Writing output to: '{dest_fn}'") dest_fn.parent.mkdir(exist_ok=True, parents=True) - with open(str(dest_fn), "w") as fh: + with open(dest_fn, "w") as fh: log.debug(f"Writing output to: '{dest_fn}'") fh.write(rendered_output) diff --git a/nf_core/components/nfcore_component.py b/nf_core/components/nfcore_component.py index 129871d68c..db3196be91 100644 --- a/nf_core/components/nfcore_component.py +++ b/nf_core/components/nfcore_component.py @@ -163,7 +163,7 @@ def _get_included_components_in_chained_tests(self, main_nf_test: Union[Path, st def get_inputs_from_main_nf(self) -> None: """Collect all inputs from the main.nf file.""" inputs: List[str] = [] - with open(str(self.main_nf)) as f: + with open(self.main_nf) as f: data = f.read() # get input values from main.nf after "input:", which can be formatted as tuple val(foo) path(bar) or val foo or val bar or path bar or path foo # regex matches: @@ -191,7 +191,7 @@ def get_inputs_from_main_nf(self) -> None: def get_outputs_from_main_nf(self): outputs = [] - with open(str(self.main_nf)) as f: + with open(self.main_nf) as f: data = f.read() # get output values from main.nf after "output:". the names are always after "emit:" if "output:" not in data: diff --git a/nf_core/modules/lint/environment_yml.py b/nf_core/modules/lint/environment_yml.py index e34b9d5856..341b9cd730 100644 --- a/nf_core/modules/lint/environment_yml.py +++ b/nf_core/modules/lint/environment_yml.py @@ -25,7 +25,7 @@ def environment_yml(module_lint_object: ComponentLint, module: NFCoreComponent) if module.environment_yml is None: raise LintExceptionError("Module does not have an `environment.yml` file") try: - with open(str(module.environment_yml)) as fh: + with open(module.environment_yml) as fh: env_yml = yaml.safe_load(fh) module.passed.append(("environment_yml_exists", "Module's `environment.yml` exists", module.environment_yml)) diff --git a/nf_core/modules/lint/meta_yml.py b/nf_core/modules/lint/meta_yml.py index 32110b7131..4a0ef6e01e 100644 --- a/nf_core/modules/lint/meta_yml.py +++ b/nf_core/modules/lint/meta_yml.py @@ -43,7 +43,7 @@ def meta_yml(module_lint_object: ComponentLint, module: NFCoreComponent) -> None module.get_outputs_from_main_nf() # Check if we have a patch file, get original file in that case meta_yaml = None - if module.is_patched: + if module.is_patched and module_lint_object.modules_repo.repo_path is not None: lines = ModulesDiffer.try_apply_patch( module.component_name, module_lint_object.modules_repo.repo_path, @@ -57,7 +57,7 @@ def meta_yml(module_lint_object: ComponentLint, module: NFCoreComponent) -> None raise LintExceptionError("Module does not have a `meta.yml` file") if meta_yaml is None: try: - with open(str(module.meta_yml)) as fh: + with open(module.meta_yml) as fh: meta_yaml = yaml.safe_load(fh) module.passed.append(("meta_yml_exists", "Module `meta.yml` exists", module.meta_yml)) except FileNotFoundError: diff --git a/nf_core/modules/lint/module_tests.py b/nf_core/modules/lint/module_tests.py index 9301db81ee..6722c12129 100644 --- a/nf_core/modules/lint/module_tests.py +++ b/nf_core/modules/lint/module_tests.py @@ -73,6 +73,7 @@ def module_tests(_, module: NFCoreComponent): with open(module.nftest_main_nf) as fh: if "snapshot(" in fh.read(): snap_file = module.nftest_testdir / "main.nf.test.snap" + if snap_file.is_file(): module.passed.append( ( diff --git a/nf_core/modules/modules_differ.py b/nf_core/modules/modules_differ.py index e310b3bf8b..f6b334eb6d 100644 --- a/nf_core/modules/modules_differ.py +++ b/nf_core/modules/modules_differ.py @@ -307,7 +307,7 @@ def per_file_patch(patch_fn: Union[str, Path]) -> Dict[str, List[str]]: dict[str, str]: A dictionary indexed by the filenames with the file patches as values """ - with open(str(patch_fn)) as fh: + with open(patch_fn) as fh: lines = fh.readlines() patches = {} diff --git a/nf_core/pipelines/create_logo.py b/nf_core/pipelines/create_logo.py index 6619b910b5..c54d8f2085 100644 --- a/nf_core/pipelines/create_logo.py +++ b/nf_core/pipelines/create_logo.py @@ -62,7 +62,7 @@ def create_logo( img: Optional[Image.Image] = None if cache_path.is_file(): log.debug(f"Logo already exists in cache at: {cache_path}. Reusing this file.") - img = Image.open(str(cache_path)) + img = Image.open(cache_path) if img is None: log.debug(f"Creating logo for {text}") @@ -81,7 +81,7 @@ def create_logo( template_fn = "nf-core-repo-logo-base-darkbg.png" template_path = assets / template_fn - img = Image.open(str(template_path)) + img = Image.open(template_path) # get the height of the template image height = img.size[1] diff --git a/nf_core/utils.py b/nf_core/utils.py index a95eea4b69..c12ec191fb 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -1115,7 +1115,7 @@ def load_tools_config(directory: Union[str, Path] = ".") -> Tuple[Optional[Path] return Path(directory, CONFIG_PATHS[0]), None if not Path(config_fn).is_file(): raise FileNotFoundError(f"No `.nf-core.yml` file found in the directory '{directory}'") - with open(str(config_fn)) as fh: + with open(config_fn) as fh: tools_config = yaml.safe_load(fh) # If the file is empty From d4c06cb17fce07eedf00b2104e5be6fc5fc5548c Mon Sep 17 00:00:00 2001 From: mashehu Date: Tue, 30 Jul 2024 14:51:05 +0200 Subject: [PATCH 84/89] fix tests --- nf_core/modules/lint/main_nf.py | 9 +++++---- nf_core/pipelines/refgenie.py | 14 +++++++++----- tests/modules/test_install.py | 15 ++++++--------- tests/modules/test_list.py | 6 +++--- tests/pipelines/test_params_file.py | 18 +++++++++--------- tests/subworkflows/test_install.py | 5 +++-- 6 files changed, 35 insertions(+), 32 deletions(-) diff --git a/nf_core/modules/lint/main_nf.py b/nf_core/modules/lint/main_nf.py index 985a92fa1f..358d6a75a4 100644 --- a/nf_core/modules/lint/main_nf.py +++ b/nf_core/modules/lint/main_nf.py @@ -43,8 +43,8 @@ def main_nf( of ``software`` and ``prefix`` """ - inputs = [] - outputs = [] + inputs: List[str] = [] + outputs: List[str] = [] # Check if we have a patch file affecting the 'main.nf' file # otherwise read the lines directly from the module @@ -58,7 +58,7 @@ def main_nf( reverse=True, ).get("main.nf", [""]) - if not lines: + if len(lines) == 0: try: # Check whether file exists and load it with open(module.main_nf) as fh: @@ -66,9 +66,10 @@ def main_nf( module.passed.append(("main_nf_exists", "Module file exists", module.main_nf)) except FileNotFoundError: module.failed.append(("main_nf_exists", "Module file does not exist", module.main_nf)) + raise FileNotFoundError(f"Module file does not exist: {module.main_nf}") deprecated_i = ["initOptions", "saveFiles", "getSoftwareName", "getProcessName", "publishDir"] - if lines is not None: + if len(lines) > 0: lines_j = "\n".join(lines) else: lines_j = "" diff --git a/nf_core/pipelines/refgenie.py b/nf_core/pipelines/refgenie.py index 19ef4b5121..f2eb09f330 100644 --- a/nf_core/pipelines/refgenie.py +++ b/nf_core/pipelines/refgenie.py @@ -181,14 +181,18 @@ def update_config(rgc): log.info("Could not determine path to 'refgenie_genomes.config' file.") return False + if refgenie_genomes_config_file is None: + log.info("Could not determine path to 'refgenie_genomes.config' file.") + return False + # Save the updated genome config - try: - with open(str(refgenie_genomes_config_file), "w+") as fh: - fh.write(refgenie_genomes) - log.info(f"Updated nf-core genomes config: {refgenie_genomes_config_file}") - except FileNotFoundError: + if not Path(refgenie_genomes_config_file).is_file(): log.warning(f"Could not write to {refgenie_genomes_config_file}") return False + else: + with open(refgenie_genomes_config_file, "w+") as fh: + fh.write(refgenie_genomes) + log.info(f"Updated nf-core genomes config: {refgenie_genomes_config_file}") # Add include statement to NXF_HOME/config if nxf_home: diff --git a/tests/modules/test_install.py b/tests/modules/test_install.py index 5f45d1b5b4..92d30a494f 100644 --- a/tests/modules/test_install.py +++ b/tests/modules/test_install.py @@ -1,4 +1,3 @@ -import os from pathlib import Path import pytest @@ -17,30 +16,28 @@ class TestModulesCreate(TestModules): - def test_modules_install_nopipeline(self): - """Test installing a module - no pipeline given""" - self.pipeline_dir = None - assert self.mods_install.install("foo") is False - @with_temporary_folder def test_modules_install_emptypipeline(self, tmpdir): """Test installing a module - empty dir given""" Path(tmpdir, "nf-core-pipe").mkdir() self.mods_install.directory = Path(tmpdir, "nf-core-pipe") with pytest.raises(UserWarning) as excinfo: - self.mods_install.install("foo") + self.mods_install.install("fastp") assert "Could not find a 'main.nf' or 'nextflow.config' file" in str(excinfo.value) def test_modules_install_nomodule(self): """Test installing a module - unrecognised module given""" - assert self.mods_install.install("foo") is False + with pytest.raises(ValueError) as excinfo: + self.mods_install.install("foo") + assert excinfo.typename == "ValueError" + assert "Module 'foo' not found in available modules" in self.caplog.text def test_modules_install_trimgalore(self): """Test installing a module - TrimGalore!""" assert self.mods_install.install("trimgalore") is not False assert self.mods_install.directory is not None module_path = Path(self.mods_install.directory, "modules", "nf-core", "trimgalore") - assert os.path.exists(module_path) + assert module_path.exists() def test_modules_install_trimgalore_twice(self): """Test installing a module - TrimGalore! already there""" diff --git a/tests/modules/test_list.py b/tests/modules/test_list.py index a170f6d6fc..282f044c1b 100644 --- a/tests/modules/test_list.py +++ b/tests/modules/test_list.py @@ -10,10 +10,10 @@ from ..utils import GITLAB_DEFAULT_BRANCH, GITLAB_URL -class TestModulesCreate(TestModules): +class TestModulesList(TestModules): def test_modules_list_remote(self): """Test listing available modules""" - mods_list = nf_core.modules.list.ModuleList(remote=True) + mods_list = nf_core.modules.list.ModuleList() listed_mods = mods_list.list_components() console = Console(record=True) console.print(listed_mods) @@ -22,7 +22,7 @@ def test_modules_list_remote(self): def test_modules_list_remote_gitlab(self): """Test listing the modules in the remote gitlab repo""" - mods_list = nf_core.modules.list.ModuleList(remote=True, remote_url=GITLAB_URL, branch=GITLAB_DEFAULT_BRANCH) + mods_list = nf_core.modules.list.ModuleList(remote_url=GITLAB_URL, branch=GITLAB_DEFAULT_BRANCH) listed_mods = mods_list.list_components() console = Console(record=True) console.print(listed_mods) diff --git a/tests/pipelines/test_params_file.py b/tests/pipelines/test_params_file.py index 7e3e4b4f42..22a6182acd 100644 --- a/tests/pipelines/test_params_file.py +++ b/tests/pipelines/test_params_file.py @@ -20,15 +20,15 @@ def setup_class(cls): # Create a test pipeline in temp directory cls.tmp_dir = tempfile.mkdtemp() - cls.template_dir = os.path.join(cls.tmp_dir, "wf") + cls.template_dir = Path(cls.tmp_dir, "wf") create_obj = nf_core.pipelines.create.create.PipelineCreate( "testpipeline", "a description", "Me", outdir=cls.template_dir, no_git=True ) create_obj.init_pipeline() - cls.template_schema = os.path.join(cls.template_dir, "nextflow_schema.json") + cls.template_schema = Path(cls.template_dir, "nextflow_schema.json") cls.params_template_builder = ParamsFileBuilder(cls.template_dir) - cls.invalid_template_schema = os.path.join(cls.template_dir, "nextflow_schema_invalid.json") + cls.invalid_template_schema = Path(cls.template_dir, "nextflow_schema_invalid.json") # Remove the allOf section to make the schema invalid with open(cls.template_schema) as fh: @@ -40,14 +40,14 @@ def setup_class(cls): @classmethod def teardown_class(cls): - if os.path.exists(cls.tmp_dir): + if Path(cls.tmp_dir).exists(): shutil.rmtree(cls.tmp_dir) def test_build_template(self): - outfile = os.path.join(self.tmp_dir, "params-file.yml") - self.params_template_builder.write_params_file(outfile) + outfile = Path(self.tmp_dir, "params-file.yml") + self.params_template_builder.write_params_file(str(outfile)) - assert os.path.exists(outfile) + assert outfile.exists() with open(outfile) as fh: out = fh.read() @@ -56,9 +56,9 @@ def test_build_template(self): def test_build_template_invalid_schema(self, caplog): """Build a schema from a template""" - outfile = os.path.join(self.tmp_dir, "params-file-invalid.yml") + outfile = Path(self.tmp_dir, "params-file-invalid.yml") builder = ParamsFileBuilder(self.invalid_template_schema) - res = builder.write_params_file(outfile) + res = builder.write_params_file(str(outfile)) assert res is False assert "Pipeline schema file is invalid" in caplog.text diff --git a/tests/subworkflows/test_install.py b/tests/subworkflows/test_install.py index e6ba9439a4..00ba888414 100644 --- a/tests/subworkflows/test_install.py +++ b/tests/subworkflows/test_install.py @@ -55,9 +55,10 @@ def test_subworkflows_install_emptypipeline(self, tmpdir): def test_subworkflows_install_nosubworkflow(self): """Test installing a subworkflow - unrecognised subworkflow given""" - with pytest.raises(SystemError) as excinfo: + with pytest.raises(ValueError) as excinfo: self.subworkflow_install.install("foo") - assert "Subworkflow 'foo' not found in available subworkflows" in str(excinfo.value) + assert excinfo.typename == "ValueError" + assert "Subworkflow 'foo' not found in available subworkflows" in self.caplog.text def test_subworkflows_install_bam_sort_stats_samtools_twice(self): """Test installing a subworkflow - bam_sort_stats_samtools already there""" From d57c815b83213154179be3b3e390557a6404d3b5 Mon Sep 17 00:00:00 2001 From: mashehu Date: Tue, 30 Jul 2024 15:30:46 +0200 Subject: [PATCH 85/89] add type hints to utils functions --- nf_core/components/update.py | 4 +-- nf_core/modules/modules_json.py | 2 +- nf_core/pipelines/download.py | 2 +- nf_core/utils.py | 48 ++++++++++++++++----------------- tests/modules/test_list.py | 2 +- 5 files changed, 29 insertions(+), 29 deletions(-) diff --git a/nf_core/components/update.py b/nf_core/components/update.py index eb15f976bf..3e4694adc8 100644 --- a/nf_core/components/update.py +++ b/nf_core/components/update.py @@ -755,7 +755,7 @@ def move_files_from_tmp_dir(self, component: str, install_folder: str, repo_path config_files = [f for f in pipeline_files if str(f).endswith(".config")] for config_file in config_files: log.debug(f"Moving '{component}/{config_file}' to updated component") - shutil.move(pipeline_path / config_file, temp_component_dir / config_file) + shutil.move(str(pipeline_path / config_file), temp_component_dir / config_file) files.append(temp_component_dir / config_file) else: @@ -772,7 +772,7 @@ def move_files_from_tmp_dir(self, component: str, install_folder: str, repo_path log.debug(f"Moving '{file}' to updated component") dest = Path(pipeline_path, file) dest.parent.mkdir(parents=True, exist_ok=True) - shutil.move(path, dest) + shutil.move(str(path), dest) log.info(f"Updating '{repo_path}/{component}'") log.debug(f"Updating {self.component_type[:-1]} '{component}' to {new_version} from {repo_path}") diff --git a/nf_core/modules/modules_json.py b/nf_core/modules/modules_json.py index 19649a57cd..536589d81e 100644 --- a/nf_core/modules/modules_json.py +++ b/nf_core/modules/modules_json.py @@ -430,7 +430,7 @@ def move_component_to_local(self, component_type: str, component: str, repo_name # Add a time suffix to the path to make it unique # (do it again and again if it didn't work out...) to_name += f"-{datetime.datetime.now().strftime('%y%m%d%H%M%S')}" - shutil.move(current_path, local_dir / to_name) + shutil.move(str(current_path), local_dir / to_name) def unsynced_components(self) -> Tuple[List[str], List[str], dict]: """ diff --git a/nf_core/pipelines/download.py b/nf_core/pipelines/download.py index 704fe91b2f..97453b127e 100644 --- a/nf_core/pipelines/download.py +++ b/nf_core/pipelines/download.py @@ -1388,7 +1388,7 @@ def singularity_pull_image( # where the output of 'singularity pull' is first generated before being copied to the NXF_SINGULARITY_CACHDIR. # if not defined by the Singularity administrators, then use the temporary directory to avoid storing the images in the work directory. if os.environ.get("SINGULARITY_CACHEDIR") is None: - os.environ["SINGULARITY_CACHEDIR"] = NFCORE_CACHE_DIR + os.environ["SINGULARITY_CACHEDIR"] = str(NFCORE_CACHE_DIR) # Sometimes, container still contain an explicit library specification, which # resulted in attempted pulls e.g. from docker://quay.io/quay.io/qiime2/core:2022.11 diff --git a/nf_core/utils.py b/nf_core/utils.py index c12ec191fb..ea850a5d24 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -19,7 +19,7 @@ import time from contextlib import contextmanager from pathlib import Path -from typing import Any, Dict, Generator, List, Optional, Tuple, Union +from typing import Any, Callable, Dict, Generator, List, Optional, Tuple, Union import git import prompt_toolkit.styles @@ -66,11 +66,11 @@ ] ) -NFCORE_CACHE_DIR = os.path.join( - os.environ.get("XDG_CACHE_HOME", os.path.join(os.getenv("HOME") or "", ".cache")), +NFCORE_CACHE_DIR = Path( + os.environ.get("XDG_CACHE_HOME", Path(os.getenv("HOME") or "", ".cache")), "nfcore", ) -NFCORE_DIR = os.path.join(os.environ.get("XDG_CONFIG_HOME", os.path.join(os.getenv("HOME") or "", ".config")), "nfcore") +NFCORE_DIR = Path(os.environ.get("XDG_CONFIG_HOME", os.path.join(os.getenv("HOME") or "", ".config")), "nfcore") def fetch_remote_version(source_url): @@ -358,17 +358,17 @@ def run_cmd(executable: str, cmd: str) -> Union[Tuple[bytes, bytes], None]: ) -def setup_nfcore_dir(): +def setup_nfcore_dir() -> bool: """Creates a directory for files that need to be kept between sessions Currently only used for keeping local copies of modules repos """ - if not os.path.exists(NFCORE_DIR): - os.makedirs(NFCORE_DIR) - return True + if not NFCORE_DIR.exists(): + NFCORE_DIR.mkdir(parents=True) + return True -def setup_requests_cachedir() -> dict: +def setup_requests_cachedir() -> Dict[str, Union[Path, datetime.timedelta, str]]: """Sets up local caching for faster remote HTTP requests. Caching directory will be set up in the user's home directory under @@ -377,10 +377,10 @@ def setup_requests_cachedir() -> dict: Uses requests_cache monkey patching. Also returns the config dict so that we can use the same setup with a Session. """ - pyversion = ".".join(str(v) for v in sys.version_info[0:3]) - cachedir = setup_nfcore_cachedir(f"cache_{pyversion}") - config = { - "cache_name": os.path.join(cachedir, "github_info"), + pyversion: str = ".".join(str(v) for v in sys.version_info[0:3]) + cachedir: Path = setup_nfcore_cachedir(f"cache_{pyversion}") + config: Dict[str, Union[Path, datetime.timedelta, str]] = { + "cache_name": Path(cachedir, "github_info"), "expire_after": datetime.timedelta(hours=1), "backend": "sqlite", } @@ -403,7 +403,7 @@ def setup_nfcore_cachedir(cache_fn: Union[str, Path]) -> Path: return cachedir -def wait_cli_function(poll_func, refresh_per_second=20): +def wait_cli_function(poll_func: Callable[[], bool], refresh_per_second: int = 20) -> None: """ Display a command-line spinner while calling a function repeatedly. @@ -427,7 +427,7 @@ def wait_cli_function(poll_func, refresh_per_second=20): raise AssertionError("Cancelled!") -def poll_nfcore_web_api(api_url, post_data=None): +def poll_nfcore_web_api(api_url: str, post_data: Optional[Dict] = None) -> Dict: """ Poll the nf-core website API @@ -448,7 +448,7 @@ def poll_nfcore_web_api(api_url, post_data=None): raise AssertionError(f"Could not connect to URL: {api_url}") else: if response.status_code != 200 and response.status_code != 301: - log.debug(f"Response content:\n{response.content}") + log.debug(f"Response content:\n{response.content.decode()}") raise AssertionError( f"Could not access remote API results: {api_url} (HTML {response.status_code} Error)" ) @@ -460,7 +460,7 @@ def poll_nfcore_web_api(api_url, post_data=None): if "status" not in web_response: raise AssertionError() except (json.decoder.JSONDecodeError, AssertionError, TypeError): - log.debug(f"Response content:\n{response.content}") + log.debug(f"Response content:\n{response.content.decode()}") raise AssertionError( f"nf-core website API results response not recognised: {api_url}\n " "See verbose log for full response" @@ -476,14 +476,14 @@ class GitHubAPISession(requests_cache.CachedSession): such as automatically setting up GitHub authentication if we can. """ - def __init__(self): # pylint: disable=super-init-not-called - self.auth_mode = None - self.return_ok = [200, 201] - self.return_retry = [403] - self.return_unauthorised = [401] - self.has_init = False + def __init__(self) -> None: + self.auth_mode: Optional[str] = None + self.return_ok: List[int] = [200, 201] + self.return_retry: List[int] = [403] + self.return_unauthorised: List[int] = [401] + self.has_init: bool = False - def lazy_init(self): + def lazy_init(self) -> None: """ Initialise the object. diff --git a/tests/modules/test_list.py b/tests/modules/test_list.py index 282f044c1b..3e92a33ab4 100644 --- a/tests/modules/test_list.py +++ b/tests/modules/test_list.py @@ -67,7 +67,7 @@ def test_modules_list_local_json(self): assert "fastqc" in listed_mods assert "multiqc" in listed_mods - def test_modules_list_remote_json(self): + def test_modules_list_remote_json(self) -> None: """Test listing available modules as JSON""" mods_list = nf_core.modules.list.ModuleList(remote=True) listed_mods: str = str(mods_list.list_components(print_json=True)) From 495e05acc406b64e1d21e008aed92b443c5fb40e Mon Sep 17 00:00:00 2001 From: mashehu Date: Tue, 30 Jul 2024 15:46:39 +0200 Subject: [PATCH 86/89] fix refgenie tests --- nf_core/pipelines/refgenie.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/nf_core/pipelines/refgenie.py b/nf_core/pipelines/refgenie.py index f2eb09f330..426ca5eb7d 100644 --- a/nf_core/pipelines/refgenie.py +++ b/nf_core/pipelines/refgenie.py @@ -186,13 +186,13 @@ def update_config(rgc): return False # Save the updated genome config - if not Path(refgenie_genomes_config_file).is_file(): - log.warning(f"Could not write to {refgenie_genomes_config_file}") - return False - else: + try: with open(refgenie_genomes_config_file, "w+") as fh: fh.write(refgenie_genomes) log.info(f"Updated nf-core genomes config: {refgenie_genomes_config_file}") + except FileNotFoundError: + log.info(f"Could not write to {refgenie_genomes_config_file}") + return False # Add include statement to NXF_HOME/config if nxf_home: From 4c07bdfef5542de71954dbaedba6293e71bfcc9d Mon Sep 17 00:00:00 2001 From: mashehu Date: Tue, 30 Jul 2024 15:57:15 +0200 Subject: [PATCH 87/89] fix tests --- nf_core/subworkflows/list.py | 2 +- nf_core/utils.py | 10 ++++++++-- 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/nf_core/subworkflows/list.py b/nf_core/subworkflows/list.py index 5f849a1f9c..9e84d6cbe0 100644 --- a/nf_core/subworkflows/list.py +++ b/nf_core/subworkflows/list.py @@ -10,7 +10,7 @@ class SubworkflowList(ComponentList): def __init__( self, - pipeline_dir: Union[str, Path], + pipeline_dir: Union[str, Path] = ".", remote: bool = True, remote_url: Optional[str] = None, branch: Optional[str] = None, diff --git a/nf_core/utils.py b/nf_core/utils.py index ea850a5d24..48b7eae294 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -448,7 +448,10 @@ def poll_nfcore_web_api(api_url: str, post_data: Optional[Dict] = None) -> Dict: raise AssertionError(f"Could not connect to URL: {api_url}") else: if response.status_code != 200 and response.status_code != 301: - log.debug(f"Response content:\n{response.content.decode()}") + response_content = response.content + if isinstance(response_content, bytes): + response_content = response_content.decode() + log.debug(f"Response content:\n{response_content}") raise AssertionError( f"Could not access remote API results: {api_url} (HTML {response.status_code} Error)" ) @@ -460,7 +463,10 @@ def poll_nfcore_web_api(api_url: str, post_data: Optional[Dict] = None) -> Dict: if "status" not in web_response: raise AssertionError() except (json.decoder.JSONDecodeError, AssertionError, TypeError): - log.debug(f"Response content:\n{response.content.decode()}") + response_content = response.content + if isinstance(response_content, bytes): + response_content = response_content.decode() + log.debug(f"Response content:\n{response_content}") raise AssertionError( f"nf-core website API results response not recognised: {api_url}\n " "See verbose log for full response" From 4f755380032ce01d6b1222036ae68172976e40f5 Mon Sep 17 00:00:00 2001 From: mashehu Date: Mon, 5 Aug 2024 08:27:26 +0200 Subject: [PATCH 88/89] update ruff --- .github/workflows/changelog.py | 9 +++++---- .pre-commit-config.yaml | 2 +- nf_core/modules/modules_differ.py | 2 +- 3 files changed, 7 insertions(+), 6 deletions(-) diff --git a/.github/workflows/changelog.py b/.github/workflows/changelog.py index 471665e4b1..24130e65c4 100644 --- a/.github/workflows/changelog.py +++ b/.github/workflows/changelog.py @@ -1,3 +1,4 @@ +#! /usr/bin/env python3 """ Taken from https://github.com/MultiQC/MultiQC/blob/main/.github/workflows/changelog.py and updated for nf-core @@ -18,7 +19,7 @@ import re import sys from pathlib import Path -from typing import List +from typing import List, Tuple REPO_URL = "https://github.com/nf-core/tools" @@ -32,7 +33,7 @@ assert pr_number, pr_number # Trim the PR number added when GitHub squashes commits, e.g. "Template: Updated (#2026)" -pr_title = pr_title.removesuffix(f" (#{pr_number})") +pr_title = pr_title.removesuffix(f" (#{pr_number})") # type: ignore changelog_path = workspace_path / "CHANGELOG.md" @@ -50,7 +51,7 @@ sys.exit(0) -def _determine_change_type(pr_title) -> tuple[str, str]: +def _determine_change_type(pr_title) -> Tuple[str, str]: """ Determine the type of the PR: Template, Download, Linting, Modules, Subworkflows, or General Returns a tuple of the section name and the module info. @@ -85,7 +86,7 @@ def _determine_change_type(pr_title) -> tuple[str, str]: pr_link = f"([#{pr_number}]({REPO_URL}/pull/{pr_number}))" # Handle manual changelog entries through comments. -if comment := comment.removeprefix("@nf-core-bot changelog").strip(): +if comment := comment.removeprefix("@nf-core-bot changelog").strip(): # type: ignore print(f"Adding manual changelog entry: {comment}") pr_title = comment new_lines = [ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index f763fa6658..c83cb5a4b7 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.5.1 + rev: v0.5.6 hooks: - id: ruff # linter args: [--fix, --exit-non-zero-on-fix] # sort imports and fix diff --git a/nf_core/modules/modules_differ.py b/nf_core/modules/modules_differ.py index f6b334eb6d..b6d7f0d0fa 100644 --- a/nf_core/modules/modules_differ.py +++ b/nf_core/modules/modules_differ.py @@ -312,7 +312,7 @@ def per_file_patch(patch_fn: Union[str, Path]) -> Dict[str, List[str]]: patches = {} i = 0 - patch_lines: list[str] = [] + patch_lines: List[str] = [] key = "preamble" while i < len(lines): line = lines[i] From d4d231444d9a342624b7fb8c2154ba07424bf34d Mon Sep 17 00:00:00 2001 From: mashehu Date: Mon, 12 Aug 2024 10:22:26 +0200 Subject: [PATCH 89/89] reset lintconfigtype --- nf_core/utils.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/nf_core/utils.py b/nf_core/utils.py index 9b3e56da5f..d0546a5c5e 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -19,7 +19,7 @@ import time from contextlib import contextmanager from pathlib import Path -from typing import Any, Callable, Dict, Generator, List, Optional, Tuple, TypedDict, Union +from typing import Any, Callable, Dict, Generator, List, Optional, Tuple, Union import git import prompt_toolkit.styles @@ -1077,11 +1077,7 @@ def get(self, item: str, default: Any = None) -> Any: return getattr(self, item, default) -class LintConfigType(TypedDict): - files_exist: Optional[Union[List, List[str], List[Dict[str, List[str]]]]] - files_unchanged: Optional[Union[List[str], List[Dict[str, List[str]]]]] - nextflow_config: Optional[Union[List[str], List[Dict[str, List[str]]]]] - multiqc_config: Optional[Union[List[str], bool]] +LintConfigType = Optional[Dict[str, Union[List[str], List[Dict[str, List[str]]], bool]]] class NFCoreYamlConfig(BaseModel):