diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md index e64b466c0f..e9e98d457c 100644 --- a/.github/CONTRIBUTING.md +++ b/.github/CONTRIBUTING.md @@ -57,7 +57,7 @@ We aim to write function docstrings according to the [Google Python style-guide] You can find this documentation here: [https://nf-co.re/tools-docs/](https://nf-co.re/tools-docs/) If you would like to test the documentation, you can install Sphinx locally by following Sphinx's [installation instruction](https://www.sphinx-doc.org/en/master/usage/installation.html). -Once done, you can run `make clean` and then `make html` in the root directory of `nf-core tools`. +Once done, you can run `make clean` and then `make html` in the `docs/api` directory of `nf-core tools`. The HTML will then be generated in `docs/api/_build/html`. ## Tests diff --git a/.gitignore b/.gitignore index 77ce81a93b..e11134949a 100644 --- a/.gitignore +++ b/.gitignore @@ -2,6 +2,7 @@ .coverage .pytest_cache +docs/api/_build # Byte-compiled / optimized / DLL files __pycache__/ diff --git a/docs/api/_src/bump_version.rst b/docs/api/_src/api/bump_version.rst similarity index 100% rename from docs/api/_src/bump_version.rst rename to docs/api/_src/api/bump_version.rst diff --git a/docs/api/_src/create.rst b/docs/api/_src/api/create.rst similarity index 100% rename from docs/api/_src/create.rst rename to docs/api/_src/api/create.rst diff --git a/docs/api/_src/download.rst b/docs/api/_src/api/download.rst similarity index 100% rename from docs/api/_src/download.rst rename to docs/api/_src/api/download.rst diff --git a/docs/api/_src/api/index.rst b/docs/api/_src/api/index.rst new file mode 100644 index 0000000000..4ddec5ecbe --- /dev/null +++ b/docs/api/_src/api/index.rst @@ -0,0 +1,9 @@ +API Reference +============= + +.. toctree:: + :maxdepth: 2 + :caption: Tests: + :glob: + + * diff --git a/docs/api/_src/launch.rst b/docs/api/_src/api/launch.rst similarity index 89% rename from docs/api/_src/launch.rst rename to docs/api/_src/api/launch.rst index 416c2c99ee..060d97009e 100644 --- a/docs/api/_src/launch.rst +++ b/docs/api/_src/api/launch.rst @@ -1,5 +1,5 @@ nf_core.launch -============ +============== .. automodule:: nf_core.launch :members: diff --git a/docs/api/_src/licences.rst b/docs/api/_src/api/licences.rst similarity index 100% rename from docs/api/_src/licences.rst rename to docs/api/_src/api/licences.rst diff --git a/docs/api/_src/api/lint.rst b/docs/api/_src/api/lint.rst new file mode 100644 index 0000000000..8e0cfdff97 --- /dev/null +++ b/docs/api/_src/api/lint.rst @@ -0,0 +1,14 @@ +nf_core.lint +============ + +.. seealso:: See the `Lint Tests <../lint_tests/index.html>`_ docs for information about specific linting functions. + +.. automodule:: nf_core.lint + :members: run_linting + :undoc-members: + :show-inheritance: + +.. autoclass:: nf_core.lint.PipelineLint + :members: _lint_pipeline + :private-members: _print_results, _get_results_md, _save_json_results, _wrap_quotes, _strip_ansi_codes + :show-inheritance: diff --git a/docs/api/_src/list.rst b/docs/api/_src/api/list.rst similarity index 100% rename from docs/api/_src/list.rst rename to docs/api/_src/api/list.rst diff --git a/docs/api/_src/modules.rst b/docs/api/_src/api/modules.rst similarity index 88% rename from docs/api/_src/modules.rst rename to docs/api/_src/api/modules.rst index 44c341175e..6bb6e0547d 100644 --- a/docs/api/_src/modules.rst +++ b/docs/api/_src/api/modules.rst @@ -1,5 +1,5 @@ nf_core.modules -============ +=============== .. automodule:: nf_core.modules :members: diff --git a/docs/api/_src/schema.rst b/docs/api/_src/api/schema.rst similarity index 89% rename from docs/api/_src/schema.rst rename to docs/api/_src/api/schema.rst index e1cefb98d9..d2d346c28c 100644 --- a/docs/api/_src/schema.rst +++ b/docs/api/_src/api/schema.rst @@ -1,5 +1,5 @@ nf_core.schema -============ +============== .. automodule:: nf_core.schema :members: diff --git a/docs/api/_src/sync.rst b/docs/api/_src/api/sync.rst similarity index 100% rename from docs/api/_src/sync.rst rename to docs/api/_src/api/sync.rst diff --git a/docs/api/_src/utils.rst b/docs/api/_src/api/utils.rst similarity index 100% rename from docs/api/_src/utils.rst rename to docs/api/_src/api/utils.rst diff --git a/docs/api/_src/conf.py b/docs/api/_src/conf.py index 4766da0c6e..d863a80d28 100644 --- a/docs/api/_src/conf.py +++ b/docs/api/_src/conf.py @@ -74,7 +74,8 @@ # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # -html_theme = "nature" +# html_theme = "nature" +html_theme = "sphinx_rtd_theme" # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the diff --git a/docs/api/_src/index.rst b/docs/api/_src/index.rst index facd9f13bf..9236b45331 100644 --- a/docs/api/_src/index.rst +++ b/docs/api/_src/index.rst @@ -7,12 +7,19 @@ Welcome to nf-core tools API documentation! =========================================== .. toctree:: + :hidden: :maxdepth: 2 :caption: Contents: :glob: - * + lint_tests/index.rst + api/index.rst +This documentation is for the ``nf-core/tools`` package. + +Primarily, it describes the different `code lint tests `_ +run by ``nf-core lint`` (typically visited by a developer when their pipeline fails a given +test), and also reference for the ``nf_core`` `Python package API `_. Indices and tables ================== diff --git a/docs/api/_src/lint.rst b/docs/api/_src/lint.rst deleted file mode 100644 index 532801c551..0000000000 --- a/docs/api/_src/lint.rst +++ /dev/null @@ -1,8 +0,0 @@ -nf_core.lint -============ - -.. automodule:: nf_core.lint - :members: - :undoc-members: - :show-inheritance: - :private-members: diff --git a/docs/api/_src/lint_tests/actions_awsfulltest.rst b/docs/api/_src/lint_tests/actions_awsfulltest.rst new file mode 100644 index 0000000000..daf414a1b7 --- /dev/null +++ b/docs/api/_src/lint_tests/actions_awsfulltest.rst @@ -0,0 +1,4 @@ +actions_awsfulltest +=================== + +.. automethod:: nf_core.lint.PipelineLint.actions_awsfulltest diff --git a/docs/api/_src/lint_tests/actions_awstest.rst b/docs/api/_src/lint_tests/actions_awstest.rst new file mode 100644 index 0000000000..b27c830285 --- /dev/null +++ b/docs/api/_src/lint_tests/actions_awstest.rst @@ -0,0 +1,4 @@ +actions_awstest +=============== + +.. automethod:: nf_core.lint.PipelineLint.actions_awstest diff --git a/docs/api/_src/lint_tests/actions_branch_protection.rst b/docs/api/_src/lint_tests/actions_branch_protection.rst new file mode 100644 index 0000000000..5b89242cf5 --- /dev/null +++ b/docs/api/_src/lint_tests/actions_branch_protection.rst @@ -0,0 +1,4 @@ +actions_branch_protection +========================= + +.. automethod:: nf_core.lint.PipelineLint.actions_branch_protection diff --git a/docs/api/_src/lint_tests/actions_ci.rst b/docs/api/_src/lint_tests/actions_ci.rst new file mode 100644 index 0000000000..28bf91cce5 --- /dev/null +++ b/docs/api/_src/lint_tests/actions_ci.rst @@ -0,0 +1,4 @@ +actions_ci +========== + +.. automethod:: nf_core.lint.PipelineLint.actions_ci diff --git a/docs/api/_src/lint_tests/actions_lint.rst b/docs/api/_src/lint_tests/actions_lint.rst new file mode 100644 index 0000000000..3974bea714 --- /dev/null +++ b/docs/api/_src/lint_tests/actions_lint.rst @@ -0,0 +1,4 @@ +actions_lint +============ + +.. automethod:: nf_core.lint.PipelineLint.actions_lint diff --git a/docs/api/_src/lint_tests/conda_dockerfile.rst b/docs/api/_src/lint_tests/conda_dockerfile.rst new file mode 100644 index 0000000000..eaa8e2fd92 --- /dev/null +++ b/docs/api/_src/lint_tests/conda_dockerfile.rst @@ -0,0 +1,4 @@ +conda_dockerfile +================ + +.. automethod:: nf_core.lint.PipelineLint.conda_dockerfile diff --git a/docs/api/_src/lint_tests/conda_env_yaml.rst b/docs/api/_src/lint_tests/conda_env_yaml.rst new file mode 100644 index 0000000000..7764f401cc --- /dev/null +++ b/docs/api/_src/lint_tests/conda_env_yaml.rst @@ -0,0 +1,6 @@ +conda_env_yaml +============== + +.. automethod:: nf_core.lint.PipelineLint.conda_env_yaml +.. automethod:: nf_core.lint.PipelineLint._anaconda_package +.. automethod:: nf_core.lint.PipelineLint._pip_package diff --git a/docs/api/_src/lint_tests/cookiecutter_strings.rst b/docs/api/_src/lint_tests/cookiecutter_strings.rst new file mode 100644 index 0000000000..9fe30cae48 --- /dev/null +++ b/docs/api/_src/lint_tests/cookiecutter_strings.rst @@ -0,0 +1,4 @@ +cookiecutter_strings +==================== + +.. automethod:: nf_core.lint.PipelineLint.cookiecutter_strings diff --git a/docs/api/_src/lint_tests/files_exist.rst b/docs/api/_src/lint_tests/files_exist.rst new file mode 100644 index 0000000000..04b87f3277 --- /dev/null +++ b/docs/api/_src/lint_tests/files_exist.rst @@ -0,0 +1,4 @@ +files_exist +=========== + +.. automethod:: nf_core.lint.PipelineLint.files_exist diff --git a/docs/api/_src/lint_tests/index.rst b/docs/api/_src/lint_tests/index.rst new file mode 100644 index 0000000000..641c85d9e7 --- /dev/null +++ b/docs/api/_src/lint_tests/index.rst @@ -0,0 +1,9 @@ +Lint tests +============================================ + +.. toctree:: + :maxdepth: 2 + :caption: Tests: + :glob: + + * diff --git a/docs/api/_src/lint_tests/licence.rst b/docs/api/_src/lint_tests/licence.rst new file mode 100644 index 0000000000..0073569b67 --- /dev/null +++ b/docs/api/_src/lint_tests/licence.rst @@ -0,0 +1,4 @@ +licence +======= + +.. automethod:: nf_core.lint.PipelineLint.licence diff --git a/docs/api/_src/lint_tests/nextflow_config.rst b/docs/api/_src/lint_tests/nextflow_config.rst new file mode 100644 index 0000000000..68fe8708e7 --- /dev/null +++ b/docs/api/_src/lint_tests/nextflow_config.rst @@ -0,0 +1,4 @@ +nextflow_config +=============== + +.. automethod:: nf_core.lint.PipelineLint.nextflow_config diff --git a/docs/api/_src/lint_tests/pipeline_name_conventions.rst b/docs/api/_src/lint_tests/pipeline_name_conventions.rst new file mode 100644 index 0000000000..8a63f9759a --- /dev/null +++ b/docs/api/_src/lint_tests/pipeline_name_conventions.rst @@ -0,0 +1,4 @@ +pipeline_name_conventions +========================= + +.. automethod:: nf_core.lint.PipelineLint.pipeline_name_conventions diff --git a/docs/api/_src/lint_tests/pipeline_todos.rst b/docs/api/_src/lint_tests/pipeline_todos.rst new file mode 100644 index 0000000000..259cc693e2 --- /dev/null +++ b/docs/api/_src/lint_tests/pipeline_todos.rst @@ -0,0 +1,4 @@ +pipeline_todos +============== + +.. automethod:: nf_core.lint.PipelineLint.pipeline_todos diff --git a/docs/api/_src/lint_tests/readme.rst b/docs/api/_src/lint_tests/readme.rst new file mode 100644 index 0000000000..dca8a32d11 --- /dev/null +++ b/docs/api/_src/lint_tests/readme.rst @@ -0,0 +1,4 @@ +readme +====== + +.. automethod:: nf_core.lint.PipelineLint.readme diff --git a/docs/api/_src/lint_tests/schema_lint.rst b/docs/api/_src/lint_tests/schema_lint.rst new file mode 100644 index 0000000000..7d9697c8e9 --- /dev/null +++ b/docs/api/_src/lint_tests/schema_lint.rst @@ -0,0 +1,4 @@ +schema_lint +=========== + +.. automethod:: nf_core.lint.PipelineLint.schema_lint diff --git a/docs/api/_src/lint_tests/schema_params.rst b/docs/api/_src/lint_tests/schema_params.rst new file mode 100644 index 0000000000..0997774c50 --- /dev/null +++ b/docs/api/_src/lint_tests/schema_params.rst @@ -0,0 +1,4 @@ +schema_params +============= + +.. automethod:: nf_core.lint.PipelineLint.schema_params diff --git a/docs/api/_src/lint_tests/version_consistency.rst b/docs/api/_src/lint_tests/version_consistency.rst new file mode 100644 index 0000000000..f0b334fc1c --- /dev/null +++ b/docs/api/_src/lint_tests/version_consistency.rst @@ -0,0 +1,4 @@ +version_consistency +=================== + +.. automethod:: nf_core.lint.PipelineLint.version_consistency diff --git a/docs/api/make_lint_rst.py b/docs/api/make_lint_rst.py new file mode 100644 index 0000000000..48305a9f58 --- /dev/null +++ b/docs/api/make_lint_rst.py @@ -0,0 +1,32 @@ +#!/usr/bin/env python + +import fnmatch +import os +import nf_core.lint + +docs_basedir = os.path.join(os.path.dirname(os.path.abspath(__file__)), "_src", "lint_tests") + +# Get list of existing .rst files +existing_docs = [] +for fn in os.listdir(docs_basedir): + if fnmatch.fnmatch(fn, "*.rst") and not fnmatch.fnmatch(fn, "index.rst"): + existing_docs.append(os.path.join(docs_basedir, fn)) + +# Make .rst file for each test name +lint_obj = nf_core.lint.PipelineLint("", True) +rst_template = """{0} +{1} + +.. automethod:: nf_core.lint.PipelineLint.{0} +""" + +for test_name in lint_obj.lint_tests: + fn = os.path.join(docs_basedir, "{}.rst".format(test_name)) + if os.path.exists(fn): + existing_docs.remove(fn) + else: + with open(fn, "w") as fh: + fh.write(rst_template.format(test_name, len(test_name) * "=")) + +for fn in existing_docs: + os.remove(fn) diff --git a/docs/api/requirements.txt b/docs/api/requirements.txt new file mode 100644 index 0000000000..e2d91f9b36 --- /dev/null +++ b/docs/api/requirements.txt @@ -0,0 +1,2 @@ +Sphinx>=3.3.1 +sphinx_rtd_theme>=0.5.0 diff --git a/docs/lint_errors.md b/docs/lint_errors.md deleted file mode 100644 index 1bc6546a20..0000000000 --- a/docs/lint_errors.md +++ /dev/null @@ -1,409 +0,0 @@ -# Linting Errors - -This page contains detailed descriptions of the tests done by the [nf-core/tools](https://github.com/nf-core/tools) package. Linting errors should show URLs next to any failures that link to the relevant heading below. - -## Error #1 - File not found / must be removed ## {#1} - -nf-core pipelines should adhere to a common file structure for consistency. - -The lint test looks for the following required files: - -* `nextflow.config` - * The main nextflow config file -* `nextflow_schema.json` - * A JSON schema describing pipeline parameters, generated using `nf-core schema build` -* Continuous integration tests with [GitHub Actions](https://github.com/features/actions) - * GitHub Actions workflows for CI of your pipeline (`.github/workflows/ci.yml`), branch protection (`.github/workflows/branch.yml`) and nf-core best practice linting (`.github/workflows/linting.yml`) -* `LICENSE`, `LICENSE.md`, `LICENCE.md` or `LICENCE.md` - * The MIT licence. Copy from [here](https://raw.githubusercontent.com/nf-core/tools/master/LICENSE). -* `README.md` - * A well written readme file in markdown format -* `CHANGELOG.md` - * A markdown file listing the changes for each pipeline release -* `docs/README.md`, `docs/output.md` and `docs/usage.md` - * A `docs` directory with an index `README.md`, usage and output documentation - -The following files are suggested but not a hard requirement. If they are missing they trigger a warning: - -* `main.nf` - * It's recommended that the main workflow script is called `main.nf` -* `environment.yml` - * A conda environment file describing the required software -* `Dockerfile` - * A docker build script to generate a docker image with the required software -* `conf/base.config` - * A `conf` directory with at least one config called `base.config` -* `.github/workflows/awstest.yml` and `.github/workflows/awsfulltest.yml` - * GitHub workflow scripts used for automated tests on AWS - -The following files will cause a failure if the _are_ present (to fix, delete them): - -* `Singularity` - * As we are relying on [Docker Hub](https://https://hub.docker.com/) instead of Singularity - and all containers are automatically pulled from there, repositories should not - have a `Singularity` file present. -* `parameters.settings.json` - * The syntax for pipeline schema has changed - old `parameters.settings.json` should be - deleted and new `nextflow_schema.json` files created instead. -* `bin/markdown_to_html.r` - * The old markdown to HTML conversion script, now replaced by `markdown_to_html.py` -* `.github/workflows/push_dockerhub.yml` - * The old dockerhub build script, now split into `.github/workflows/push_dockerhub_dev.yml` and `.github/workflows/push_dockerhub_release.yml` - -## Error #2 - Docker file check failed ## {#2} - -DSL1 pipelines should have a file called `Dockerfile` in their root directory. -The file is used for automated docker image builds. This test checks that the file -exists and contains at least the string `FROM` (`Dockerfile`). - -Some pipelines, especially DSL2, may not have a `Dockerfile`. In this case a warning -will be generated which can be safely ignored. - -## Error #3 - Licence check failed ## {#3} - -nf-core pipelines must ship with an open source [MIT licence](https://choosealicense.com/licenses/mit/). - -This test fails if the following conditions are not met: - -* No licence file found - * `LICENSE`, `LICENSE.md`, `LICENCE.md` or `LICENCE.md` -* Licence file contains fewer than 4 lines of text -* File does not contain the string `without restriction` -* Licence contains template placeholders - * `[year]`, `[fullname]`, ``, ``, `` or `` - -## Error #4 - Nextflow config check failed ## {#4} - -nf-core pipelines are required to be configured with a minimal set of variable -names. This test fails or throws warnings if required variables are not set. - -> **Note:** These config variables must be set in `nextflow.config` or another config -> file imported from there. Any variables set in nextflow script files (eg. `main.nf`) -> are not checked and will be assumed to be missing. - -The following variables fail the test if missing: - -* `params.outdir` - * A directory in which all pipeline results should be saved -* `manifest.name` - * The pipeline name. Should begin with `nf-core/` -* `manifest.description` - * A description of the pipeline -* `manifest.version` - * The version of this pipeline. This should correspond to a [GitHub release](https://help.github.com/articles/creating-releases/). - * If `--release` is set when running `nf-core lint`, the version number must not contain the string `dev` - * If `--release` is _not_ set, the version should end in `dev` (warning triggered if not) -* `manifest.nextflowVersion` - * The minimum version of Nextflow required to run the pipeline. - * Should be `>=` or `!>=` and a version number, eg. `manifest.nextflowVersion = '>=0.31.0'` (see [Nextflow documentation](https://www.nextflow.io/docs/latest/config.html#scope-manifest)) - * `>=` warns about old versions but tries to run anyway, `!>=` fails for old versions. Only use the latter if you _know_ that the pipeline will certainly fail before this version. - * This should correspond to the `NXF_VER` version tested by GitHub Actions. -* `manifest.homePage` - * The homepage for the pipeline. Should be the nf-core GitHub repository URL, - so beginning with `https://github.com/nf-core/` -* `timeline.enabled`, `trace.enabled`, `report.enabled`, `dag.enabled` - * The nextflow timeline, trace, report and DAG should be enabled by default (set to `true`) -* `process.cpus`, `process.memory`, `process.time` - * Default CPUs, memory and time limits for tasks -* `params.input` - * Input parameter to specify input data, specify this to avoid a warning - * Typical usage: - * `params.input`: Input data that is not NGS sequencing data - -The following variables throw warnings if missing: - -* `manifest.mainScript` - * The filename of the main pipeline script (recommended to be `main.nf`) -* `timeline.file`, `trace.file`, `report.file`, `dag.file` - * Default filenames for the timeline, trace and report - * Should be set to a results folder, eg: `${params.outdir}/pipeline_info/trace.[workflowname].txt"` - * The DAG file path should end with `.svg` - * If Graphviz is not installed, Nextflow will generate a `.dot` file instead -* `process.container` - * Docker Hub handle for a single default container for use by all processes. - * Must specify a tag that matches the pipeline version number if set. - * If the pipeline version number contains the string `dev`, the DockerHub tag must be `:dev` - -The following variables are depreciated and fail the test if they are still present: - -* `params.version` - * The old method for specifying the pipeline version. Replaced by `manifest.version` -* `params.nf_required_version` - * The old method for specifying the minimum Nextflow version. Replaced by `manifest.nextflowVersion` -* `params.container` - * The old method for specifying the dockerhub container address. Replaced by `process.container` -* `igenomesIgnore` - * Changed to `igenomes_ignore` - * The `snake_case` convention should now be used when defining pipeline parameters - -Process-level configuration syntax is checked and fails if uses the old Nextflow syntax, for example: -`process.$fastqc` instead of `process withName:'fastqc'`. - -## Error #5 - Continuous Integration configuration ## {#5} - -nf-core pipelines must have CI testing with GitHub Actions. - -### GitHub Actions CI - -There are 4 main GitHub Actions CI test files: `ci.yml`, `linting.yml`, `branch.yml` and `awstests.yml`, and they can all be found in the `.github/workflows/` directory. -You can always add steps to the workflows to suit your needs, but to ensure that the `nf-core lint` tests pass, keep the steps indicated here. - -This test will fail if the following requirements are not met in these files: - -1. `ci.yml`: Contains all the commands required to test the pipeline - * Must be triggered on the following events: - - ```yaml - on: - push: - branches: - - dev - pull_request: - release: - types: [published] - ``` - - * The minimum Nextflow version specified in the pipeline's `nextflow.config` has to match that defined by `nxf_ver` in the test matrix: - - ```yaml - strategy: - matrix: - # Nextflow versions: check pipeline minimum and current latest - nxf_ver: ['19.10.0', ''] - ``` - - * The `Docker` container for the pipeline must be tagged appropriately for: - * Development pipelines: `docker tag nfcore/:dev nfcore/:dev` - * Released pipelines: `docker tag nfcore/:dev nfcore/:` - - ```yaml - - name: Build new docker image - if: env.GIT_DIFF - run: docker build --no-cache . -t nfcore/:1.0.0 - - - name: Pull docker image - if: ${{ !env.GIT_DIFF }} - run: | - docker pull nfcore/:dev - docker tag nfcore/:dev nfcore/:1.0.0 - ``` - -2. `linting.yml`: Specifies the commands to lint the pipeline repository using `nf-core lint` and `markdownlint` - * Must be turned on for `push` and `pull_request`. - * Must have the command `nf-core -l lint_log.txt lint ${GITHUB_WORKSPACE}`. - * Must have the command `markdownlint ${GITHUB_WORKSPACE} -c ${GITHUB_WORKSPACE}/.github/markdownlint.yml`. - -3. `branch.yml`: Ensures that pull requests to the protected `master` branch are coming from the correct branch when a PR is opened against the _nf-core_ repository. - * Must be turned on for `pull_request` to `master`. - - ```yaml - on: - pull_request: - branches: - - master - ``` - - * Checks that PRs to the protected nf-core repo `master` branch can only come from an nf-core `dev` branch or a fork `patch` branch: - - ```yaml - steps: - # PRs to the nf-core repo master branch are only ok if coming from the nf-core repo `dev` or any `patch` branches - - name: Check PRs - if: github.repository == 'nf-core/' - run: | - { [[ ${{github.event.pull_request.head.repo.full_name}} == nf-core/ ]] && [[ $GITHUB_HEAD_REF = "dev" ]]; } || [[ $GITHUB_HEAD_REF == "patch" ]] - ``` - - * For branch protection in repositories outside of _nf-core_, you can add an additional step to this workflow. Keep the _nf-core_ branch protection step, to ensure that the `nf-core lint` tests pass. Here's an example: - - ```yaml - steps: - # PRs are only ok if coming from an nf-core `dev` branch or a fork `patch` branch - - name: Check PRs - if: github.repository == 'nf-core/' - run: | - { [[ ${{github.event.pull_request.head.repo.full_name}} == nf-core/ ]] && [[ $GITHUB_HEAD_REF = "dev" ]]; } || [[ $GITHUB_HEAD_REF == "patch" ]] - - name: Check PRs in another repository - if: github.repository == '/' - run: | - { [[ ${{github.event.pull_request.head.repo.full_name}} == / ]] && [[ $GITHUB_HEAD_REF = "dev" ]]; } || [[ $GITHUB_HEAD_REF == "patch" ]] - ``` - -4. `awstest.yml`: Triggers tests on AWS batch. As running tests on AWS incurs costs, they should be only triggered on `workflow_dispatch`. -This allows for manual triggering of the workflow when testing on AWS is desired. -You can trigger the tests by going to the `Actions` tab on the pipeline GitHub repository and selecting the `nf-core AWS test` workflow on the left. - * Must not be turned on for `push` or `pull_request`. - * Must be turned on for `workflow_dispatch`. - -### GitHub Actions AWS full tests - -Additionally, we provide the possibility of testing the pipeline on full size datasets on AWS. -This should ensure that the pipeline runs as expected on AWS and provide a resource estimation. -The GitHub Actions workflow is `awsfulltest.yml`, and it can be found in the `.github/workflows/` directory. -This workflow incurrs higher AWS costs, therefore it should only be triggered for releases (`workflow_run` - after the docker hub release workflow) and `workflow_dispatch`. -You can trigger the tests by going to the `Actions` tab on the pipeline GitHub repository and selecting the `nf-core AWS full size tests` workflow on the left. -For tests on full data prior to release, [Nextflow Tower](https://tower.nf) launch feature can be employed. - -`awsfulltest.yml`: Triggers full sized tests run on AWS batch after releasing. - -* Must be turned on `workflow_dispatch`. -* Must be turned on for `workflow_run` with `workflows: ["nf-core Docker push (release)"]` and `types: [completed]` -* Should run the profile `test_full` that should be edited to provide the links to full-size datasets. If it runs the profile `test` a warning is given. - -## Error #6 - Repository `README.md` tests ## {#6} - -The `README.md` files for a project are very important and must meet some requirements: - -* Nextflow badge - * If no Nextflow badge is found, a warning is given - * If a badge is found but the version doesn't match the minimum version in the config file, the test fails - * Example badge code: - - ```markdown - [![Nextflow](https://img.shields.io/badge/nextflow-%E2%89%A50.27.6-brightgreen.svg)](https://www.nextflow.io/) - ``` - -* Bioconda badge - * If your pipeline contains a file called `environment.yml`, a bioconda badge is required - * Required badge code: - - ```markdown - [![install with bioconda](https://img.shields.io/badge/install%20with-bioconda-brightgreen.svg)](https://bioconda.github.io/) - ``` - -## Error #7 - Pipeline and container version numbers ## {#7} - -> This test only runs when `--release` is set or `$GITHUB_REF` is equal to `master` - -These tests look at `process.container` and `$GITHUB_REF` only if they are set. - -* Container name must have a tag specified (eg. `nfcore/pipeline:version`) -* Container tag / `$GITHUB_REF` must contain only numbers and dots -* Tags and `$GITHUB_REF` must all match one another - -## Error #8 - Conda environment tests ## {#8} - -> These tests only run when your pipeline has a root file called `environment.yml` - -* The environment `name` must match the pipeline name and version - * The pipeline name is defined in the config variable `manifest.name` - * Replace the slash with a hyphen as environment names shouldn't contain that character - * Example: For `nf-core/test` version 1.4, the conda environment name should be `nf-core-test-1.4` - -Each dependency is checked using the [Anaconda API service](https://api.anaconda.org/docs). -Dependency sublists are ignored with the exception of `- pip`: these packages are also checked -for pinned version numbers and checked using the [PyPI JSON API](https://wiki.python.org/moin/PyPIJSON). - -Note that conda dependencies with pinned channels (eg. `conda-forge::openjdk`) are fine -and should be handled by the linting properly. - -Each dependency can have the following lint failures and warnings: - -* (Test failure) Dependency does not have a pinned version number, eg. `toolname=1.6.8` -* (Test failure) The package cannot be found on any of the listed conda channels (or PyPI if `pip`) -* (Test failure) The package version cannot be found on anaconda cloud (or on PyPi, for `pip` dependencies) -* (Test warning) A newer version of the package is available - -> NB: Conda package versions should be pinned with one equals sign (`toolname=1.1`), pip with two (`toolname==1.2`) - -## Error #9 - Dockerfile for use with Conda environments ## {#9} - -> This test only runs if there is both `environment.yml` -> and `Dockerfile` present in the workflow. - -If a workflow has a conda `environment.yml` file (see above), the `Dockerfile` should use this -to create the container. Such `Dockerfile`s can usually be very short, eg: - -```Dockerfile -FROM nfcore/base:1.11 -MAINTAINER Rocky Balboa -LABEL authors="your@email.com" \ - description="Docker image containing all requirements for the nf-core mypipeline pipeline" - -COPY environment.yml / -RUN conda env create --quiet -f /environment.yml && conda clean -a -RUN conda env export --name nf-core-mypipeline-1.0 > nf-core-mypipeline-1.0.yml -ENV PATH /opt/conda/envs/nf-core-mypipeline-1.0/bin:$PATH -``` - -To enforce this minimal `Dockerfile` and check for common copy+paste errors, we require -that the above template is used. -Failures are generated if the `FROM`, `COPY` and `RUN` statements above are not present. -These lines must be an exact copy of the above example. - -Note that the base `nfcore/base` image should be tagged to the most recent release. -The linting tool compares the tag against the currently installed version. - -Additional lines and different metadata can be added without causing the test to fail. - -## Error #10 - Template TODO statement found ## {#10} - -The nf-core workflow template contains a number of comment lines with the following format: - -```groovy -// TODO nf-core: Make some kind of change to the workflow here -``` - -This lint test runs through all files in the pipeline and searches for these lines. - -## Error #11 - Pipeline name ## {#11} - -_..removed.._ - -## Error #12 - Pipeline name ## {#12} - -In order to ensure consistent naming, pipeline names should contain only lower case, alphanumeric characters. Otherwise a warning is displayed. - -## Error #13 - Pipeline name ## {#13} - -The `nf-core create` pipeline template uses [cookiecutter](https://github.com/cookiecutter/cookiecutter) behind the scenes. -This check fails if any cookiecutter template variables such as `{{ cookiecutter.pipeline_name }}` are fouund in your pipeline code. -Finding a placeholder like this means that something was probably copied and pasted from the template without being properly rendered for your pipeline. - -## Error #14 - Pipeline schema syntax ## {#14} - -Pipelines should have a `nextflow_schema.json` file that describes the different pipeline parameters (eg. `params.something`, `--something`). - -* Schema should be valid JSON files -* Schema should adhere to [JSONSchema](https://json-schema.org/), Draft 7. -* Parameters can be described in two places: - * As `properties` in the top-level schema object - * As `properties` within subschemas listed in a top-level `definitions` objects -* The schema must describe at least one parameter -* There must be no duplicate parameter IDs across the schema and definition subschema -* All subschema in `definitions` must be referenced in the top-level `allOf` key -* The top-level `allOf` key must not describe any non-existent definitions -* Core top-level schema attributes should exist and be set as follows: - * `$schema`: `https://json-schema.org/draft-07/schema` - * `$id`: URL to the raw schema file, eg. `https://raw.githubusercontent.com/YOURPIPELINE/master/nextflow_schema.json` - * `title`: `YOURPIPELINE pipeline parameters` - * `description`: The piepline config `manifest.description` - -For example, an _extremely_ minimal schema could look like this: - -```json -{ - "$schema": "https://json-schema.org/draft-07/schema", - "$id": "https://raw.githubusercontent.com/YOURPIPELINE/master/nextflow_schema.json", - "title": "YOURPIPELINE pipeline parameters", - "description": "This pipeline is for testing", - "properties": { - "first_param": { "type": "string" } - }, - "definitions": { - "my_first_group": { - "properties": { - "second_param": { "type": "string" } - } - } - }, - "allOf": [{"$ref": "#/definitions/my_first_group"}] -} -``` - -## Error #15 - Schema config check ## {#15} - -The `nextflow_schema.json` pipeline schema should describe every flat parameter returned from the `nextflow config` command (params that are objects or more complex structures are ignored). -Missing parameters result in a lint failure. - -If any parameters are found in the schema that were not returned from `nextflow config` a warning is given. diff --git a/nf_core/__main__.py b/nf_core/__main__.py index ec3c1aa2a6..c4d7d8642f 100755 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -527,26 +527,15 @@ def bump_version(pipeline_dir, new_version, nextflow): As well as the pipeline version, you can also change the required version of Nextflow. """ - - # First, lint the pipeline to check everything is in order - log.info("Running nf-core lint tests") - - # Run the lint tests - try: - lint_obj = nf_core.lint.PipelineLint(pipeline_dir) - lint_obj.lint_pipeline() - except AssertionError as e: - log.error("Please fix lint errors before bumping versions") - return - if len(lint_obj.failed) > 0: - log.error("Please fix lint errors before bumping versions") - return + # Make a pipeline object and load config etc + pipeline_obj = nf_core.utils.Pipeline(pipeline_dir) + pipeline_obj._load() # Bump the pipeline version number if not nextflow: - nf_core.bump_version.bump_pipeline_version(lint_obj, new_version) + nf_core.bump_version.bump_pipeline_version(pipeline_obj, new_version) else: - nf_core.bump_version.bump_nextflow_version(lint_obj, new_version) + nf_core.bump_version.bump_nextflow_version(pipeline_obj, new_version) @nf_core_cli.command("sync", help_priority=10) diff --git a/nf_core/bump_version.py b/nf_core/bump_version.py index 60434f9f55..28e3f9eeaa 100644 --- a/nf_core/bump_version.py +++ b/nf_core/bump_version.py @@ -7,161 +7,208 @@ import logging import os import re +import rich.console import sys +import nf_core.utils log = logging.getLogger(__name__) +stderr = rich.console.Console(file=sys.stderr, force_terminal=nf_core.utils.rich_force_colors()) -def bump_pipeline_version(lint_obj, new_version): +def bump_pipeline_version(pipeline_obj, new_version): """Bumps a pipeline version number. Args: - lint_obj (nf_core.lint.PipelineLint): A `PipelineLint` object that holds information + pipeline_obj (nf_core.utils.Pipeline): A `Pipeline` object that holds information about the pipeline contents and build files. new_version (str): The new version tag for the pipeline. Semantic versioning only. """ + # Collect the old and new version numbers - current_version = lint_obj.config.get("manifest.version", "").strip(" '\"") + current_version = pipeline_obj.nf_config.get("manifest.version", "").strip(" '\"") if new_version.startswith("v"): log.warning("Stripping leading 'v' from new version number") new_version = new_version[1:] if not current_version: - log.error("Could not find config variable manifest.version") + log.error("Could not find config variable 'manifest.version'") sys.exit(1) - log.info( - "Changing version number:\n Current version number is '{}'\n New version number will be '{}'".format( - current_version, new_version - ) - ) - - # Update nextflow.config - nfconfig_pattern = r"version\s*=\s*[\'\"]?{}[\'\"]?".format(current_version.replace(".", r"\.")) - nfconfig_newstr = "version = '{}'".format(new_version) - update_file_version("nextflow.config", lint_obj, nfconfig_pattern, nfconfig_newstr) + log.info("Changing version number from '{}' to '{}'".format(current_version, new_version)) - # Update container tag + # nextflow.config - workflow manifest version + # nextflow.config - process container manifest version docker_tag = "dev" if new_version.replace(".", "").isdigit(): docker_tag = new_version else: log.info("New version contains letters. Setting docker tag to 'dev'") - nfconfig_pattern = r"container\s*=\s*[\'\"]nfcore/{}:(?:{}|dev)[\'\"]".format( - lint_obj.pipeline_name.lower(), current_version.replace(".", r"\.") - ) - nfconfig_newstr = "container = 'nfcore/{}:{}'".format(lint_obj.pipeline_name.lower(), docker_tag) - update_file_version("nextflow.config", lint_obj, nfconfig_pattern, nfconfig_newstr) - # Update GitHub Actions CI image tag (build) - nfconfig_pattern = r"docker build --no-cache . -t nfcore/{name}:(?:{tag}|dev)".format( - name=lint_obj.pipeline_name.lower(), tag=current_version.replace(".", r"\.") - ) - nfconfig_newstr = "docker build --no-cache . -t nfcore/{name}:{tag}".format( - name=lint_obj.pipeline_name.lower(), tag=docker_tag - ) update_file_version( - os.path.join(".github", "workflows", "ci.yml"), lint_obj, nfconfig_pattern, nfconfig_newstr, allow_multiple=True + "nextflow.config", + pipeline_obj, + [ + ( + r"version\s*=\s*[\'\"]?{}[\'\"]?".format(current_version.replace(".", r"\.")), + "version = '{}'".format(new_version), + ), + ( + r"container\s*=\s*[\'\"]nfcore/{}:(?:{}|dev)[\'\"]".format( + pipeline_obj.pipeline_name.lower(), current_version.replace(".", r"\.") + ), + "container = 'nfcore/{}:{}'".format(pipeline_obj.pipeline_name.lower(), docker_tag), + ), + ], ) - # Update GitHub Actions CI image tag (pull) - nfconfig_pattern = r"docker tag nfcore/{name}:dev nfcore/{name}:(?:{tag}|dev)".format( - name=lint_obj.pipeline_name.lower(), tag=current_version.replace(".", r"\.") - ) - nfconfig_newstr = "docker tag nfcore/{name}:dev nfcore/{name}:{tag}".format( - name=lint_obj.pipeline_name.lower(), tag=docker_tag - ) + # .github/workflows/ci.yml - docker build image tag + # .github/workflows/ci.yml - docker tag image update_file_version( - os.path.join(".github", "workflows", "ci.yml"), lint_obj, nfconfig_pattern, nfconfig_newstr, allow_multiple=True + os.path.join(".github", "workflows", "ci.yml"), + pipeline_obj, + [ + ( + r"docker build --no-cache . -t nfcore/{name}:(?:{tag}|dev)".format( + name=pipeline_obj.pipeline_name.lower(), tag=current_version.replace(".", r"\.") + ), + "docker build --no-cache . -t nfcore/{name}:{tag}".format( + name=pipeline_obj.pipeline_name.lower(), tag=docker_tag + ), + ), + ( + r"docker tag nfcore/{name}:dev nfcore/{name}:(?:{tag}|dev)".format( + name=pipeline_obj.pipeline_name.lower(), tag=current_version.replace(".", r"\.") + ), + "docker tag nfcore/{name}:dev nfcore/{name}:{tag}".format( + name=pipeline_obj.pipeline_name.lower(), tag=docker_tag + ), + ), + ], ) - if "environment.yml" in lint_obj.files: - # Update conda environment.yml - nfconfig_pattern = r"name: nf-core-{}-{}".format( - lint_obj.pipeline_name.lower(), current_version.replace(".", r"\.") - ) - nfconfig_newstr = "name: nf-core-{}-{}".format(lint_obj.pipeline_name.lower(), new_version) - update_file_version("environment.yml", lint_obj, nfconfig_pattern, nfconfig_newstr) + # environment.yml - environment name + update_file_version( + "environment.yml", + pipeline_obj, + [ + ( + r"name: nf-core-{}-{}".format(pipeline_obj.pipeline_name.lower(), current_version.replace(".", r"\.")), + "name: nf-core-{}-{}".format(pipeline_obj.pipeline_name.lower(), new_version), + ) + ], + ) - # Update Dockerfile ENV PATH and RUN conda env create - nfconfig_pattern = r"nf-core-{}-{}".format(lint_obj.pipeline_name.lower(), current_version.replace(".", r"\.")) - nfconfig_newstr = "nf-core-{}-{}".format(lint_obj.pipeline_name.lower(), new_version) - update_file_version("Dockerfile", lint_obj, nfconfig_pattern, nfconfig_newstr, allow_multiple=True) + # Dockerfile - ENV PATH and RUN conda env create + update_file_version( + "Dockerfile", + pipeline_obj, + [ + ( + r"nf-core-{}-{}".format(pipeline_obj.pipeline_name.lower(), current_version.replace(".", r"\.")), + "nf-core-{}-{}".format(pipeline_obj.pipeline_name.lower(), new_version), + ) + ], + ) -def bump_nextflow_version(lint_obj, new_version): +def bump_nextflow_version(pipeline_obj, new_version): """Bumps the required Nextflow version number of a pipeline. Args: - lint_obj (nf_core.lint.PipelineLint): A `PipelineLint` object that holds information + pipeline_obj (nf_core.utils.Pipeline): A `Pipeline` object that holds information about the pipeline contents and build files. new_version (str): The new version tag for the required Nextflow version. """ - # Collect the old and new version numbers - current_version = lint_obj.config.get("manifest.nextflowVersion", "").strip(" '\"") - current_version = re.sub(r"[^0-9\.]", "", current_version) - new_version = re.sub(r"[^0-9\.]", "", new_version) + + # Collect the old and new version numbers - strip leading non-numeric characters (>=) + current_version = pipeline_obj.nf_config.get("manifest.nextflowVersion", "").strip(" '\"") + current_version = re.sub(r"^[^0-9\.]*", "", current_version) + new_version = re.sub(r"^[^0-9\.]*", "", new_version) if not current_version: - log.error("Could not find config variable manifest.nextflowVersion") + log.error("Could not find config variable 'manifest.nextflowVersion'") sys.exit(1) - log.info( - "Changing version number:\n Current version number is '{}'\n New version number will be '{}'".format( - current_version, new_version - ) - ) + log.info("Changing Nextlow version number from '{}' to '{}'".format(current_version, new_version)) - # Update nextflow.config - nfconfig_pattern = r"nextflowVersion\s*=\s*[\'\"]?>={}[\'\"]?".format(current_version.replace(".", r"\.")) - nfconfig_newstr = "nextflowVersion = '>={}'".format(new_version) - update_file_version("nextflow.config", lint_obj, nfconfig_pattern, nfconfig_newstr) + # nextflow.config - manifest minimum nextflowVersion + update_file_version( + "nextflow.config", + pipeline_obj, + [ + ( + r"nextflowVersion\s*=\s*[\'\"]?>={}[\'\"]?".format(current_version.replace(".", r"\.")), + "nextflowVersion = '>={}'".format(new_version), + ) + ], + ) - # Update GitHub Actions CI - nfconfig_pattern = r"nxf_ver: \[[\'\"]?{}[\'\"]?, ''\]".format(current_version.replace(".", r"\.")) - nfconfig_newstr = "nxf_ver: ['{}', '']".format(new_version) + # .github/workflows/ci.yml - Nextflow version matrix update_file_version( - os.path.join(".github", "workflows", "ci.yml"), lint_obj, nfconfig_pattern, nfconfig_newstr, True + os.path.join(".github", "workflows", "ci.yml"), + pipeline_obj, + [ + ( + r"nxf_ver: \[[\'\"]?{}[\'\"]?, ''\]".format(current_version.replace(".", r"\.")), + "nxf_ver: ['{}', '']".format(new_version), + ) + ], ) - # Update README badge - nfconfig_pattern = r"nextflow-%E2%89%A5{}-brightgreen.svg".format(current_version.replace(".", r"\.")) - nfconfig_newstr = "nextflow-%E2%89%A5{}-brightgreen.svg".format(new_version) - update_file_version("README.md", lint_obj, nfconfig_pattern, nfconfig_newstr, True) + # README.md - Nextflow version badge + update_file_version( + "README.md", + pipeline_obj, + [ + ( + r"nextflow-%E2%89%A5{}-brightgreen.svg".format(current_version.replace(".", r"\.")), + "nextflow-%E2%89%A5{}-brightgreen.svg".format(new_version), + ) + ], + ) -def update_file_version(filename, lint_obj, pattern, newstr, allow_multiple=False): +def update_file_version(filename, pipeline_obj, patterns): """Updates the version number in a requested file. Args: filename (str): File to scan. - lint_obj (nf_core.lint.PipelineLint): A PipelineLint object that holds information + pipeline_obj (nf_core.lint.PipelineLint): A PipelineLint object that holds information about the pipeline contents and build files. pattern (str): Regex pattern to apply. newstr (str): The replaced string. - allow_multiple (bool): Replace all pattern hits, not only the first. Defaults to False. Raises: - SyntaxError, if the version number cannot be found. + ValueError, if the version number cannot be found. """ # Load the file - fn = os.path.join(lint_obj.path, filename) + fn = pipeline_obj._fp(filename) content = "" - with open(fn, "r") as fh: - content = fh.read() - - # Check that we have exactly one match - matches_pattern = re.findall("^.*{}.*$".format(pattern), content, re.MULTILINE) - if len(matches_pattern) == 0: - raise SyntaxError("Could not find version number in {}: '{}'".format(filename, pattern)) - if len(matches_pattern) > 1 and not allow_multiple: - raise SyntaxError("Found more than one version number in {}: '{}'".format(filename, pattern)) - - # Replace the match - new_content = re.sub(pattern, newstr, content) - matches_newstr = re.findall("^.*{}.*$".format(newstr), new_content, re.MULTILINE) - - log.info( - "Updating version in {}\n".format(filename) - + "[red] - {}\n".format("\n - ".join(matches_pattern).strip()) - + "[green] + {}\n".format("\n + ".join(matches_newstr).strip()) - ) + try: + with open(fn, "r") as fh: + content = fh.read() + except FileNotFoundError: + log.warning("File not found: '{}'".format(fn)) + return + + replacements = [] + for pattern in patterns: + + # Check that we have a match + matches_pattern = re.findall("^.*{}.*$".format(pattern[0]), content, re.MULTILINE) + if len(matches_pattern) == 0: + log.error("Could not find version number in {}: '{}'".format(filename, pattern)) + continue + + # Replace the match + content = re.sub(pattern[0], pattern[1], content) + matches_newstr = re.findall("^.*{}.*$".format(pattern[1]), content, re.MULTILINE) + + # Save for logging + replacements.append((matches_pattern, matches_newstr)) + + log.info("Updated version in '{}'".format(filename)) + for replacement in replacements: + for idx, matched in enumerate(replacement[0]): + stderr.print(" [red] - {}".format(matched.strip()), highlight=False) + stderr.print(" [green] + {}".format(replacement[1][idx].strip()), highlight=False) + stderr.print("\n") with open(fn, "w") as fh: - fh.write(new_content) + fh.write(content) diff --git a/nf_core/download.py b/nf_core/download.py index db570231e4..0586fb9cc7 100644 --- a/nf_core/download.py +++ b/nf_core/download.py @@ -46,7 +46,7 @@ def __init__(self, pipeline, release=None, singularity=False, outdir=None, compr self.wf_name = None self.wf_sha = None self.wf_download_url = None - self.config = dict() + self.nf_config = dict() self.containers = list() def download_workflow(self): @@ -255,10 +255,10 @@ def find_container_images(self): """ Find container image names for workflow """ # Use linting code to parse the pipeline nextflow config - self.config = nf_core.utils.fetch_wf_config(os.path.join(self.outdir, "workflow")) + self.nf_config = nf_core.utils.fetch_wf_config(os.path.join(self.outdir, "workflow")) # Find any config variables that look like a container - for k, v in self.config.items(): + for k, v in self.nf_config.items(): if k.startswith("process.") and k.endswith(".container"): self.containers.append(v.strip('"').strip("'")) diff --git a/nf_core/licences.py b/nf_core/licences.py index 08e3ac8b42..2b90de838a 100644 --- a/nf_core/licences.py +++ b/nf_core/licences.py @@ -14,7 +14,8 @@ import rich.console import rich.table -import nf_core.lint +import nf_core.utils +import nf_core.lint.conda_env_yaml log = logging.getLogger(__name__) @@ -52,38 +53,37 @@ def run_licences(self): def get_environment_file(self): """Get the conda environment file for the pipeline""" if os.path.exists(self.pipeline): - env_filename = os.path.join(self.pipeline, "environment.yml") - if not os.path.exists(self.pipeline): - raise LookupError("Pipeline {} exists, but no environment.yml file found".format(self.pipeline)) - with open(env_filename, "r") as fh: - self.conda_config = yaml.safe_load(fh) + pipeline_obj = nf_core.utils.Pipeline(self.pipeline) + pipeline_obj._load() + if pipeline_obj._fp("environment.yml") not in pipeline_obj.files: + raise LookupError("No `environment.yml` file found") + self.conda_config = pipeline_obj.conda_config else: env_url = "https://raw.githubusercontent.com/nf-core/{}/master/environment.yml".format(self.pipeline) log.debug("Fetching environment.yml file: {}".format(env_url)) response = requests.get(env_url) # Check that the pipeline exists if response.status_code == 404: - raise LookupError("Couldn't find pipeline nf-core/{}".format(self.pipeline)) + raise LookupError("Couldn't find pipeline conda file: {}".format(env_url)) self.conda_config = yaml.safe_load(response.text) def fetch_conda_licences(self): """Fetch package licences from Anaconda and PyPi.""" - lint_obj = nf_core.lint.PipelineLint(self.pipeline) - lint_obj.conda_config = self.conda_config # Check conda dependency list - deps = lint_obj.conda_config.get("dependencies", []) + deps = self.conda_config.get("dependencies", []) + deps_data = {} log.info("Fetching licence information for {} tools".format(len(deps))) for dep in deps: try: if isinstance(dep, str): - lint_obj.check_anaconda_package(dep) + deps_data[dep] = nf_core.lint.conda_env_yaml._anaconda_package(self.conda_config, dep) elif isinstance(dep, dict): - lint_obj.check_pip_package(dep) + deps_data[dep] = nf_core.lint.conda_env_yaml._pip_package(dep) except ValueError: log.error("Couldn't get licence information for {}".format(dep)) - for dep, data in lint_obj.conda_package_info.items(): + for dep, data in deps_data.items(): try: depname, depver = dep.split("=", 1) licences = set() diff --git a/nf_core/lint.py b/nf_core/lint.py deleted file mode 100755 index e057bc38b6..0000000000 --- a/nf_core/lint.py +++ /dev/null @@ -1,1495 +0,0 @@ -#!/usr/bin/env python -"""Linting policy for nf-core pipeline projects. - -Tests Nextflow-based pipelines to check that they adhere to -the nf-core community guidelines. -""" - -from rich.console import Console -from rich.markdown import Markdown -from rich.table import Table -import datetime -import fnmatch -import git -import io -import json -import logging -import os -import re -import requests -import rich -import rich.progress -import subprocess -import textwrap - -import click -import requests -import yaml - -import nf_core.utils -import nf_core.schema - -log = logging.getLogger(__name__) - -# Set up local caching for requests to speed up remote queries -nf_core.utils.setup_requests_cachedir() - -# Don't pick up debug logs from the requests package -logging.getLogger("requests").setLevel(logging.WARNING) -logging.getLogger("urllib3").setLevel(logging.WARNING) - - -def run_linting(pipeline_dir, release_mode=False, show_passed=False, md_fn=None, json_fn=None): - """Runs all nf-core linting checks on a given Nextflow pipeline project - in either `release` mode or `normal` mode (default). Returns an object - of type :class:`PipelineLint` after finished. - - Args: - pipeline_dir (str): The path to the Nextflow pipeline root directory - release_mode (bool): Set this to `True`, if the linting should be run in the `release` mode. - See :class:`PipelineLint` for more information. - - Returns: - An object of type :class:`PipelineLint` that contains all the linting results. - """ - - # Create the lint object - lint_obj = PipelineLint(pipeline_dir) - - # Run the linting tests - try: - lint_obj.lint_pipeline(release_mode) - except AssertionError as e: - log.critical("Critical error: {}".format(e)) - log.info("Stopping tests...") - return lint_obj - - # Print the results - lint_obj.print_results(show_passed) - - # Save results to Markdown file - if md_fn is not None: - log.info("Writing lint results to {}".format(md_fn)) - markdown = lint_obj.get_results_md() - with open(md_fn, "w") as fh: - fh.write(markdown) - - # Save results to JSON file - if json_fn is not None: - lint_obj.save_json_results(json_fn) - - # Exit code - if len(lint_obj.failed) > 0: - if release_mode: - log.info("Reminder: Lint tests were run in --release mode.") - - return lint_obj - - -class PipelineLint(object): - """Object to hold linting information and results. - All objects attributes are set, after the :func:`PipelineLint.lint_pipeline` function was called. - - Args: - path (str): The path to the nf-core pipeline directory. - - Attributes: - conda_config (dict): The parsed conda configuration file content (`environment.yml`). - conda_package_info (dict): The conda package(s) information, based on the API requests to Anaconda cloud. - config (dict): The Nextflow pipeline configuration file content. - dockerfile (list): A list of lines (str) from the parsed Dockerfile. - failed (list): A list of tuples of the form: `(, )` - files (list): A list of files found during the linting process. - minNextflowVersion (str): The minimum required Nextflow version to run the pipeline. - passed (list): A list of tuples of the form: `(, )` - path (str): Path to the pipeline directory. - pipeline_name (str): The pipeline name, without the `nf-core` tag, for example `hlatyping`. - release_mode (bool): `True`, if you the to linting was run in release mode, `False` else. - warned (list): A list of tuples of the form: `(, )` - - **Attribute specifications** - - Some of the more complex attributes of a PipelineLint object. - - * `conda_config`:: - - # Example - { - 'name': 'nf-core-hlatyping', - 'channels': ['bioconda', 'conda-forge'], - 'dependencies': ['optitype=1.3.2', 'yara=0.9.6'] - } - - * `conda_package_info`:: - - # See https://api.anaconda.org/package/bioconda/bioconda-utils as an example. - { - : - } - - * `config`: Produced by calling Nextflow with :code:`nextflow config -flat `. Here is an example from - the `nf-core/hlatyping `_ pipeline:: - - process.container = 'nfcore/hlatyping:1.1.1' - params.help = false - params.outdir = './results' - params.bam = false - params.single_end = false - params.seqtype = 'dna' - params.solver = 'glpk' - params.igenomes_base = './iGenomes' - params.clusterOptions = false - ... - """ - - def __init__(self, path): - """ Initialise linting object """ - self.release_mode = False - self.version = nf_core.__version__ - self.path = path - self.git_sha = None - self.files = [] - self.config = {} - self.pipeline_name = None - self.minNextflowVersion = None - self.dockerfile = [] - self.conda_config = {} - self.conda_package_info = {} - self.schema_obj = None - self.passed = [] - self.warned = [] - self.failed = [] - - try: - repo = git.Repo(self.path) - self.git_sha = repo.head.object.hexsha - except: - pass - - # Overwrite if we have the last commit from the PR - otherwise we get a merge commit hash - if os.environ.get("GITHUB_PR_COMMIT", "") != "": - self.git_sha = os.environ["GITHUB_PR_COMMIT"] - - def lint_pipeline(self, release_mode=False): - """Main linting function. - - Takes the pipeline directory as the primary input and iterates through - the different linting checks in order. Collects any warnings or errors - and returns summary at completion. Raises an exception if there is a - critical error that makes the rest of the tests pointless (eg. no - pipeline script). Results from this function are printed by the main script. - - Args: - release_mode (boolean): Activates the release mode, which checks for - consistent version tags of containers. Default is `False`. - - Returns: - dict: Summary of test result messages structured as follows:: - - { - 'pass': [ - ( test-id (int), message (string) ), - ( test-id (int), message (string) ) - ], - 'warn': [(id, msg)], - 'fail': [(id, msg)], - } - - Raises: - If a critical problem is found, an ``AssertionError`` is raised. - """ - log.info("Testing pipeline: [magenta]{}".format(self.path)) - if self.release_mode: - log.info("Including --release mode tests") - check_functions = [ - "check_files_exist", - "check_licence", - "check_docker", - "check_nextflow_config", - "check_actions_branch_protection", - "check_actions_ci", - "check_actions_lint", - "check_actions_awstest", - "check_actions_awsfulltest", - "check_readme", - "check_conda_env_yaml", - "check_conda_dockerfile", - "check_pipeline_todos", - "check_pipeline_name", - "check_cookiecutter_strings", - "check_schema_lint", - "check_schema_params", - ] - if release_mode: - self.release_mode = True - check_functions.extend(["check_version_consistency"]) - - progress = rich.progress.Progress( - "[bold blue]{task.description}", - rich.progress.BarColumn(bar_width=None), - "[magenta]{task.completed} of {task.total}[reset] ยป [bold yellow]{task.fields[func_name]}", - transient=True, - ) - with progress: - lint_progress = progress.add_task( - "Running lint checks", total=len(check_functions), func_name=check_functions[0] - ) - for fun_name in check_functions: - progress.update(lint_progress, advance=1, func_name=fun_name) - log.debug("Running lint test: {}".format(fun_name)) - getattr(self, fun_name)() - if len(self.failed) > 0: - log.critical("Found test failures in `{}`, halting lint run.".format(fun_name)) - break - - def check_files_exist(self): - """Checks a given pipeline directory for required files. - - Iterates through the pipeline's directory content and checkmarks files - for presence. - Files that **must** be present:: - - 'nextflow.config', - 'nextflow_schema.json', - ['LICENSE', 'LICENSE.md', 'LICENCE', 'LICENCE.md'], # NB: British / American spelling - 'README.md', - 'CHANGELOG.md', - 'docs/README.md', - 'docs/output.md', - 'docs/usage.md', - '.github/workflows/branch.yml', - '.github/workflows/ci.yml', - '.github/workflows/linting.yml' - - Files that *should* be present:: - - 'main.nf', - 'environment.yml', - 'Dockerfile', - 'conf/base.config', - '.github/workflows/awstest.yml', - '.github/workflows/awsfulltest.yml' - - Files that *must not* be present:: - - 'Singularity', - 'parameters.settings.json', - 'bin/markdown_to_html.r', - '.github/workflows/push_dockerhub.yml' - - Files that *should not* be present:: - - '.travis.yml' - - Raises: - An AssertionError if neither `nextflow.config` or `main.nf` found. - """ - - # NB: Should all be files, not directories - # List of lists. Passes if any of the files in the sublist are found. - files_fail = [ - ["nextflow.config"], - ["nextflow_schema.json"], - ["LICENSE", "LICENSE.md", "LICENCE", "LICENCE.md"], # NB: British / American spelling - ["README.md"], - ["CHANGELOG.md"], - [os.path.join("docs", "README.md")], - [os.path.join("docs", "output.md")], - [os.path.join("docs", "usage.md")], - [os.path.join(".github", "workflows", "branch.yml")], - [os.path.join(".github", "workflows", "ci.yml")], - [os.path.join(".github", "workflows", "linting.yml")], - ] - files_warn = [ - ["main.nf"], - ["environment.yml"], - ["Dockerfile"], - [os.path.join("conf", "base.config")], - [os.path.join(".github", "workflows", "awstest.yml")], - [os.path.join(".github", "workflows", "awsfulltest.yml")], - ] - - # List of strings. Dails / warns if any of the strings exist. - files_fail_ifexists = [ - "Singularity", - "parameters.settings.json", - os.path.join("bin", "markdown_to_html.r"), - os.path.join(".github", "workflows", "push_dockerhub.yml"), - ] - files_warn_ifexists = [".travis.yml"] - - def pf(file_path): - return os.path.join(self.path, file_path) - - # First - critical files. Check that this is actually a Nextflow pipeline - if not os.path.isfile(pf("nextflow.config")) and not os.path.isfile(pf("main.nf")): - self.failed.append((1, "File not found: nextflow.config or main.nf")) - raise AssertionError("Neither nextflow.config or main.nf found! Is this a Nextflow pipeline?") - - # Files that cause an error if they don't exist - for files in files_fail: - if any([os.path.isfile(pf(f)) for f in files]): - self.passed.append((1, "File found: {}".format(self._wrap_quotes(files)))) - self.files.extend(files) - else: - self.failed.append((1, "File not found: {}".format(self._wrap_quotes(files)))) - - # Files that cause a warning if they don't exist - for files in files_warn: - if any([os.path.isfile(pf(f)) for f in files]): - self.passed.append((1, "File found: {}".format(self._wrap_quotes(files)))) - self.files.extend(files) - else: - self.warned.append((1, "File not found: {}".format(self._wrap_quotes(files)))) - - # Files that cause an error if they exist - for file in files_fail_ifexists: - if os.path.isfile(pf(file)): - self.failed.append((1, "File must be removed: {}".format(self._wrap_quotes(file)))) - else: - self.passed.append((1, "File not found check: {}".format(self._wrap_quotes(file)))) - - # Files that cause a warning if they exist - for file in files_warn_ifexists: - if os.path.isfile(pf(file)): - self.warned.append((1, "File should be removed: {}".format(self._wrap_quotes(file)))) - else: - self.passed.append((1, "File not found check: {}".format(self._wrap_quotes(file)))) - - # Load and parse files for later - if "environment.yml" in self.files: - with open(os.path.join(self.path, "environment.yml"), "r") as fh: - self.conda_config = yaml.safe_load(fh) - - def check_docker(self): - """Checks that Dockerfile contains the string ``FROM``.""" - if "Dockerfile" not in self.files: - return - - fn = os.path.join(self.path, "Dockerfile") - content = "" - with open(fn, "r") as fh: - content = fh.read() - - # Implicitly also checks if empty. - if "FROM " in content: - self.passed.append((2, "Dockerfile check passed")) - self.dockerfile = [line.strip() for line in content.splitlines()] - return - - self.failed.append((2, "Dockerfile check failed")) - - def check_licence(self): - """Checks licence file is MIT. - - Currently the checkpoints are: - * licence file must be long enough (4 or more lines) - * licence contains the string *without restriction* - * licence doesn't have any placeholder variables - """ - for l in ["LICENSE", "LICENSE.md", "LICENCE", "LICENCE.md"]: - fn = os.path.join(self.path, l) - if os.path.isfile(fn): - content = "" - with open(fn, "r") as fh: - content = fh.read() - - # needs at least copyright, permission, notice and "as-is" lines - nl = content.count("\n") - if nl < 4: - self.failed.append((3, "Number of lines too small for a valid MIT license file: {}".format(fn))) - return - - # determine whether this is indeed an MIT - # license. Most variations actually don't contain the - # string MIT Searching for 'without restriction' - # instead (a crutch). - if not "without restriction" in content: - self.failed.append((3, "Licence file did not look like MIT: {}".format(fn))) - return - - # check for placeholders present in - # - https://choosealicense.com/licenses/mit/ - # - https://opensource.org/licenses/MIT - # - https://en.wikipedia.org/wiki/MIT_License - placeholders = {"[year]", "[fullname]", "", "", "", ""} - if any([ph in content for ph in placeholders]): - self.failed.append((3, "Licence file contains placeholders: {}".format(fn))) - return - - self.passed.append((3, "Licence check passed")) - return - - self.failed.append((3, "Couldn't find MIT licence file")) - - def check_nextflow_config(self): - """Checks a given pipeline for required config variables. - - At least one string in each list must be present for fail and warn. - Any config in config_fail_ifdefined results in a failure. - - Uses ``nextflow config -flat`` to parse pipeline ``nextflow.config`` - and print all config variables. - NB: Does NOT parse contents of main.nf / nextflow script - """ - - # Fail tests if these are missing - config_fail = [ - ["manifest.name"], - ["manifest.nextflowVersion"], - ["manifest.description"], - ["manifest.version"], - ["manifest.homePage"], - ["timeline.enabled"], - ["trace.enabled"], - ["report.enabled"], - ["dag.enabled"], - ["process.cpus"], - ["process.memory"], - ["process.time"], - ["params.outdir"], - ["params.input"], - ] - # Throw a warning if these are missing - config_warn = [ - ["manifest.mainScript"], - ["timeline.file"], - ["trace.file"], - ["report.file"], - ["dag.file"], - ["process.container"], - ] - # Old depreciated vars - fail if present - config_fail_ifdefined = [ - "params.version", - "params.nf_required_version", - "params.container", - "params.singleEnd", - "params.igenomesIgnore", - ] - - # Get the nextflow config for this pipeline - self.config = nf_core.utils.fetch_wf_config(self.path) - for cfs in config_fail: - for cf in cfs: - if cf in self.config.keys(): - self.passed.append((4, "Config variable found: {}".format(self._wrap_quotes(cf)))) - break - else: - self.failed.append((4, "Config variable not found: {}".format(self._wrap_quotes(cfs)))) - for cfs in config_warn: - for cf in cfs: - if cf in self.config.keys(): - self.passed.append((4, "Config variable found: {}".format(self._wrap_quotes(cf)))) - break - else: - self.warned.append((4, "Config variable not found: {}".format(self._wrap_quotes(cfs)))) - for cf in config_fail_ifdefined: - if cf not in self.config.keys(): - self.passed.append((4, "Config variable (correctly) not found: {}".format(self._wrap_quotes(cf)))) - else: - self.failed.append((4, "Config variable (incorrectly) found: {}".format(self._wrap_quotes(cf)))) - - # Check and warn if the process configuration is done with deprecated syntax - process_with_deprecated_syntax = list( - set( - [ - re.search(r"^(process\.\$.*?)\.+.*$", ck).group(1) - for ck in self.config.keys() - if re.match(r"^(process\.\$.*?)\.+.*$", ck) - ] - ) - ) - for pd in process_with_deprecated_syntax: - self.warned.append((4, "Process configuration is done with deprecated_syntax: {}".format(pd))) - - # Check the variables that should be set to 'true' - for k in ["timeline.enabled", "report.enabled", "trace.enabled", "dag.enabled"]: - if self.config.get(k) == "true": - self.passed.append((4, "Config `{}` had correct value: `{}`".format(k, self.config.get(k)))) - else: - self.failed.append((4, "Config `{}` did not have correct value: `{}`".format(k, self.config.get(k)))) - - # Check that the pipeline name starts with nf-core - try: - assert self.config.get("manifest.name", "").strip("'\"").startswith("nf-core/") - except (AssertionError, IndexError): - self.failed.append( - ( - 4, - "Config `manifest.name` did not begin with `nf-core/`:\n {}".format( - self.config.get("manifest.name", "").strip("'\"") - ), - ) - ) - else: - self.passed.append((4, "Config `manifest.name` began with `nf-core/`")) - self.pipeline_name = self.config.get("manifest.name", "").strip("'").replace("nf-core/", "") - - # Check that the homePage is set to the GitHub URL - try: - assert self.config.get("manifest.homePage", "").strip("'\"").startswith("https://github.com/nf-core/") - except (AssertionError, IndexError): - self.failed.append( - ( - 4, - "Config variable `manifest.homePage` did not begin with https://github.com/nf-core/:\n {}".format( - self.config.get("manifest.homePage", "").strip("'\"") - ), - ) - ) - else: - self.passed.append((4, "Config variable `manifest.homePage` began with https://github.com/nf-core/")) - - # Check that the DAG filename ends in `.svg` - if "dag.file" in self.config: - if self.config["dag.file"].strip("'\"").endswith(".svg"): - self.passed.append((4, "Config `dag.file` ended with `.svg`")) - else: - self.failed.append((4, "Config `dag.file` did not end with `.svg`")) - - # Check that the minimum nextflowVersion is set properly - if "manifest.nextflowVersion" in self.config: - if self.config.get("manifest.nextflowVersion", "").strip("\"'").lstrip("!").startswith(">="): - self.passed.append((4, "Config variable `manifest.nextflowVersion` started with >= or !>=")) - # Save self.minNextflowVersion for convenience - nextflowVersionMatch = re.search(r"[0-9\.]+(-edge)?", self.config.get("manifest.nextflowVersion", "")) - if nextflowVersionMatch: - self.minNextflowVersion = nextflowVersionMatch.group(0) - else: - self.minNextflowVersion = None - else: - self.failed.append( - ( - 4, - "Config `manifest.nextflowVersion` did not start with `>=` or `!>=` : `{}`".format( - self.config.get("manifest.nextflowVersion", "") - ).strip("\"'"), - ) - ) - - # Check that the process.container name is pulling the version tag or :dev - if self.config.get("process.container"): - container_name = "{}:{}".format( - self.config.get("manifest.name").replace("nf-core", "nfcore").strip("'"), - self.config.get("manifest.version", "").strip("'"), - ) - if "dev" in self.config.get("manifest.version", "") or not self.config.get("manifest.version"): - container_name = "{}:dev".format( - self.config.get("manifest.name").replace("nf-core", "nfcore").strip("'") - ) - try: - assert self.config.get("process.container", "").strip("'") == container_name - except AssertionError: - if self.release_mode: - self.failed.append( - ( - 4, - "Config `process.container` looks wrong. Should be `{}` but is `{}`".format( - container_name, self.config.get("process.container", "").strip("'") - ), - ) - ) - else: - self.warned.append( - ( - 4, - "Config `process.container` looks wrong. Should be `{}` but is `{}`".format( - container_name, self.config.get("process.container", "").strip("'") - ), - ) - ) - else: - self.passed.append((4, "Config `process.container` looks correct: `{}`".format(container_name))) - - # Check that the pipeline version contains `dev` - if not self.release_mode and "manifest.version" in self.config: - if self.config["manifest.version"].strip(" '\"").endswith("dev"): - self.passed.append( - (4, "Config `manifest.version` ends in `dev`: `{}`".format(self.config["manifest.version"])) - ) - else: - self.warned.append( - ( - 4, - "Config `manifest.version` should end in `dev`: `{}`".format(self.config["manifest.version"]), - ) - ) - elif "manifest.version" in self.config: - if "dev" in self.config["manifest.version"]: - self.failed.append( - ( - 4, - "Config `manifest.version` should not contain `dev` for a release: `{}`".format( - self.config["manifest.version"] - ), - ) - ) - else: - self.passed.append( - ( - 4, - "Config `manifest.version` does not contain `dev` for release: `{}`".format( - self.config["manifest.version"] - ), - ) - ) - - def check_actions_branch_protection(self): - """Checks that the GitHub Actions branch protection workflow is valid. - - Makes sure PRs can only come from nf-core dev or 'patch' of a fork. - """ - fn = os.path.join(self.path, ".github", "workflows", "branch.yml") - if os.path.isfile(fn): - with open(fn, "r") as fh: - branchwf = yaml.safe_load(fh) - - # Check that the action is turned on for PRs to master - try: - # Yaml 'on' parses as True - super weird - assert "master" in branchwf[True]["pull_request_target"]["branches"] - except (AssertionError, KeyError): - self.failed.append( - (5, "GitHub Actions 'branch' workflow should be triggered for PRs to master: `{}`".format(fn)) - ) - else: - self.passed.append( - (5, "GitHub Actions 'branch' workflow is triggered for PRs to master: `{}`".format(fn)) - ) - - # Check that PRs are only ok if coming from an nf-core `dev` branch or a fork `patch` branch - steps = branchwf.get("jobs", {}).get("test", {}).get("steps", []) - for step in steps: - has_name = step.get("name", "").strip() == "Check PRs" - has_if = step.get("if", "").strip() == "github.repository == 'nf-core/{}'".format( - self.pipeline_name.lower() - ) - # Don't use .format() as the squiggly brackets get ridiculous - has_run = step.get( - "run", "" - ).strip() == '{ [[ ${{github.event.pull_request.head.repo.full_name}} == nf-core/PIPELINENAME ]] && [[ $GITHUB_HEAD_REF = "dev" ]]; } || [[ $GITHUB_HEAD_REF == "patch" ]]'.replace( - "PIPELINENAME", self.pipeline_name.lower() - ) - if has_name and has_if and has_run: - self.passed.append( - ( - 5, - "GitHub Actions 'branch' workflow looks good: `{}`".format(fn), - ) - ) - break - else: - self.failed.append( - ( - 5, - "Couldn't find GitHub Actions 'branch' check for PRs to master: `{}`".format(fn), - ) - ) - - def check_actions_ci(self): - """Checks that the GitHub Actions CI workflow is valid - - Makes sure tests run with the required nextflow version. - """ - fn = os.path.join(self.path, ".github", "workflows", "ci.yml") - if os.path.isfile(fn): - with open(fn, "r") as fh: - ciwf = yaml.safe_load(fh) - - # Check that the action is turned on for the correct events - try: - expected = {"push": {"branches": ["dev"]}, "pull_request": None, "release": {"types": ["published"]}} - # NB: YAML dict key 'on' is evaluated to a Python dict key True - assert ciwf[True] == expected - except (AssertionError, KeyError, TypeError): - self.failed.append( - ( - 5, - "GitHub Actions CI is not triggered on expected events: `{}`".format(fn), - ) - ) - else: - self.passed.append((5, "GitHub Actions CI is triggered on expected events: `{}`".format(fn))) - - # Check that we're pulling the right docker image and tagging it properly - if self.config.get("process.container", ""): - docker_notag = re.sub(r":(?:[\.\d]+|dev)$", "", self.config.get("process.container", "").strip("\"'")) - docker_withtag = self.config.get("process.container", "").strip("\"'") - - # docker build - docker_build_cmd = "docker build --no-cache . -t {}".format(docker_withtag) - try: - steps = ciwf["jobs"]["test"]["steps"] - assert any([docker_build_cmd in step["run"] for step in steps if "run" in step.keys()]) - except (AssertionError, KeyError, TypeError): - self.failed.append( - ( - 5, - "CI is not building the correct docker image. Should be: `{}`".format(docker_build_cmd), - ) - ) - else: - self.passed.append((5, "CI is building the correct docker image: `{}`".format(docker_build_cmd))) - - # docker pull - docker_pull_cmd = "docker pull {}:dev".format(docker_notag) - try: - steps = ciwf["jobs"]["test"]["steps"] - assert any([docker_pull_cmd in step["run"] for step in steps if "run" in step.keys()]) - except (AssertionError, KeyError, TypeError): - self.failed.append( - (5, "CI is not pulling the correct docker image. Should be: `{}`".format(docker_pull_cmd)) - ) - else: - self.passed.append((5, "CI is pulling the correct docker image: {}".format(docker_pull_cmd))) - - # docker tag - docker_tag_cmd = "docker tag {}:dev {}".format(docker_notag, docker_withtag) - try: - steps = ciwf["jobs"]["test"]["steps"] - assert any([docker_tag_cmd in step["run"] for step in steps if "run" in step.keys()]) - except (AssertionError, KeyError, TypeError): - self.failed.append( - (5, "CI is not tagging docker image correctly. Should be: `{}`".format(docker_tag_cmd)) - ) - else: - self.passed.append((5, "CI is tagging docker image correctly: {}".format(docker_tag_cmd))) - - # Check that we are testing the minimum nextflow version - try: - matrix = ciwf["jobs"]["test"]["strategy"]["matrix"]["nxf_ver"] - assert any([self.minNextflowVersion in matrix]) - except (KeyError, TypeError): - self.failed.append((5, "Continuous integration does not check minimum NF version: `{}`".format(fn))) - except AssertionError: - self.failed.append((5, "Minimum NF version different in CI and pipelines manifest: `{}`".format(fn))) - else: - self.passed.append((5, "Continuous integration checks minimum NF version: `{}`".format(fn))) - - def check_actions_lint(self): - """Checks that the GitHub Actions lint workflow is valid - - Makes sure ``nf-core lint`` and ``markdownlint`` runs. - """ - fn = os.path.join(self.path, ".github", "workflows", "linting.yml") - if os.path.isfile(fn): - with open(fn, "r") as fh: - lintwf = yaml.safe_load(fh) - - # Check that the action is turned on for push and pull requests - try: - assert "push" in lintwf[True] - assert "pull_request" in lintwf[True] - except (AssertionError, KeyError, TypeError): - self.failed.append( - (5, "GitHub Actions linting workflow must be triggered on PR and push: `{}`".format(fn)) - ) - else: - self.passed.append((5, "GitHub Actions linting workflow is triggered on PR and push: `{}`".format(fn))) - - # Check that the Markdown linting runs - Markdownlint_cmd = "markdownlint ${GITHUB_WORKSPACE} -c ${GITHUB_WORKSPACE}/.github/markdownlint.yml" - try: - steps = lintwf["jobs"]["Markdown"]["steps"] - assert any([Markdownlint_cmd in step["run"] for step in steps if "run" in step.keys()]) - except (AssertionError, KeyError, TypeError): - self.failed.append((5, "Continuous integration must run Markdown lint Tests: `{}`".format(fn))) - else: - self.passed.append((5, "Continuous integration runs Markdown lint Tests: `{}`".format(fn))) - - # Check that the nf-core linting runs - nfcore_lint_cmd = "nf-core -l lint_log.txt lint ${GITHUB_WORKSPACE}" - try: - steps = lintwf["jobs"]["nf-core"]["steps"] - assert any([nfcore_lint_cmd in step["run"] for step in steps if "run" in step.keys()]) - except (AssertionError, KeyError, TypeError): - self.failed.append((5, "Continuous integration must run nf-core lint Tests: `{}`".format(fn))) - else: - self.passed.append((5, "Continuous integration runs nf-core lint Tests: `{}`".format(fn))) - - def check_actions_awstest(self): - """Checks the GitHub Actions awstest is valid. - - Makes sure it is triggered only on ``push`` to ``master``. - """ - fn = os.path.join(self.path, ".github", "workflows", "awstest.yml") - if os.path.isfile(fn): - with open(fn, "r") as fh: - wf = yaml.safe_load(fh) - - # Check that the action is only turned on for workflow_dispatch - try: - assert "workflow_dispatch" in wf[True] - assert "push" not in wf[True] - assert "pull_request" not in wf[True] - except (AssertionError, KeyError, TypeError): - self.failed.append( - ( - 5, - "GitHub Actions AWS test should be triggered on workflow_dispatch and not on push or PRs: `{}`".format( - fn - ), - ) - ) - else: - self.passed.append((5, "GitHub Actions AWS test is triggered on workflow_dispatch: `{}`".format(fn))) - - def check_actions_awsfulltest(self): - """Checks the GitHub Actions awsfulltest is valid. - - Makes sure it is triggered only on ``release`` and workflow_dispatch. - """ - fn = os.path.join(self.path, ".github", "workflows", "awsfulltest.yml") - if os.path.isfile(fn): - with open(fn, "r") as fh: - wf = yaml.safe_load(fh) - - aws_profile = "-profile test " - - # Check that the action is only turned on for published releases - try: - assert "workflow_run" in wf[True] - assert wf[True]["workflow_run"]["workflows"] == ["nf-core Docker push (release)"] - assert wf[True]["workflow_run"]["types"] == ["completed"] - assert "workflow_dispatch" in wf[True] - except (AssertionError, KeyError, TypeError): - self.failed.append( - ( - 5, - "GitHub Actions AWS full test should be triggered only on published release and workflow_dispatch: `{}`".format( - fn - ), - ) - ) - else: - self.passed.append( - ( - 5, - "GitHub Actions AWS full test is triggered only on published release and workflow_dispatch: `{}`".format( - fn - ), - ) - ) - - # Warn if `-profile test` is still unchanged - try: - steps = wf["jobs"]["run-awstest"]["steps"] - assert any([aws_profile in step["run"] for step in steps if "run" in step.keys()]) - except (AssertionError, KeyError, TypeError): - self.passed.append((5, "GitHub Actions AWS full test should test full datasets: `{}`".format(fn))) - else: - self.warned.append((5, "GitHub Actions AWS full test should test full datasets: `{}`".format(fn))) - - def check_readme(self): - """Checks the repository README file for errors. - - Currently just checks the badges at the top of the README. - """ - with open(os.path.join(self.path, "README.md"), "r") as fh: - content = fh.read() - - # Check that there is a readme badge showing the minimum required version of Nextflow - # and that it has the correct version - nf_badge_re = r"\[!\[Nextflow\]\(https://img\.shields\.io/badge/nextflow-%E2%89%A5([\d\.]+)-brightgreen\.svg\)\]\(https://www\.nextflow\.io/\)" - match = re.search(nf_badge_re, content) - if match: - nf_badge_version = match.group(1).strip("'\"") - try: - assert nf_badge_version == self.minNextflowVersion - except (AssertionError, KeyError): - self.failed.append( - ( - 6, - "README Nextflow minimum version badge does not match config. Badge: `{}`, Config: `{}`".format( - nf_badge_version, self.minNextflowVersion - ), - ) - ) - else: - self.passed.append( - ( - 6, - "README Nextflow minimum version badge matched config. Badge: `{}`, Config: `{}`".format( - nf_badge_version, self.minNextflowVersion - ), - ) - ) - else: - self.warned.append((6, "README did not have a Nextflow minimum version badge.")) - - # Check that we have a bioconda badge if we have a bioconda environment file - if "environment.yml" in self.files: - bioconda_badge = "[![install with bioconda](https://img.shields.io/badge/install%20with-bioconda-brightgreen.svg)](https://bioconda.github.io/)" - if bioconda_badge in content: - self.passed.append((6, "README had a bioconda badge")) - else: - self.warned.append((6, "Found a bioconda environment.yml file but no badge in the README")) - - def check_version_consistency(self): - """Checks container tags versions. - - Runs on ``process.container`` (if set) and ``$GITHUB_REF`` (if a GitHub Actions release). - - Checks that: - * the container has a tag - * the version numbers are numeric - * the version numbers are the same as one-another - """ - versions = {} - # Get the version definitions - # Get version from nextflow.config - versions["manifest.version"] = self.config.get("manifest.version", "").strip(" '\"") - - # Get version from the docker slug - if self.config.get("process.container", "") and not ":" in self.config.get("process.container", ""): - self.failed.append( - ( - 7, - "Docker slug seems not to have " - "a version tag: {}".format(self.config.get("process.container", "")), - ) - ) - return - - # Get config container slugs, (if set; one container per workflow) - if self.config.get("process.container", ""): - versions["process.container"] = self.config.get("process.container", "").strip(" '\"").split(":")[-1] - if self.config.get("process.container", ""): - versions["process.container"] = self.config.get("process.container", "").strip(" '\"").split(":")[-1] - - # Get version from the GITHUB_REF env var if this is a release - if ( - os.environ.get("GITHUB_REF", "").startswith("refs/tags/") - and os.environ.get("GITHUB_REPOSITORY", "") != "nf-core/tools" - ): - versions["GITHUB_REF"] = os.path.basename(os.environ["GITHUB_REF"].strip(" '\"")) - - # Check if they are all numeric - for v_type, version in versions.items(): - if not version.replace(".", "").isdigit(): - self.failed.append((7, "{} was not numeric: {}!".format(v_type, version))) - return - - # Check if they are consistent - if len(set(versions.values())) != 1: - self.failed.append( - ( - 7, - "The versioning is not consistent between container, release tag " - "and config. Found {}".format(", ".join(["{} = {}".format(k, v) for k, v in versions.items()])), - ) - ) - return - - self.passed.append((7, "Version tags are numeric and consistent between container, release tag and config.")) - - def check_conda_env_yaml(self): - """Checks that the conda environment file is valid. - - Checks that: - * a name is given and is consistent with the pipeline name - * check that dependency versions are pinned - * dependency versions are the latest available - """ - if "environment.yml" not in self.files: - return - - # Check that the environment name matches the pipeline name - pipeline_version = self.config.get("manifest.version", "").strip(" '\"") - expected_env_name = "nf-core-{}-{}".format(self.pipeline_name.lower(), pipeline_version) - if self.conda_config["name"] != expected_env_name: - self.failed.append( - ( - 8, - "Conda environment name is incorrect ({}, should be {})".format( - self.conda_config["name"], expected_env_name - ), - ) - ) - else: - self.passed.append((8, "Conda environment name was correct ({})".format(expected_env_name))) - - # Check conda dependency list - for dep in self.conda_config.get("dependencies", []): - if isinstance(dep, str): - # Check that each dependency has a version number - try: - assert dep.count("=") in [1, 2] - except AssertionError: - self.failed.append((8, "Conda dep did not have pinned version number: `{}`".format(dep))) - else: - self.passed.append((8, "Conda dep had pinned version number: `{}`".format(dep))) - - try: - depname, depver = dep.split("=")[:2] - self.check_anaconda_package(dep) - except ValueError: - pass - else: - # Check that required version is available at all - if depver not in self.conda_package_info[dep].get("versions"): - self.failed.append((8, "Conda dep had unknown version: {}".format(dep))) - continue # No need to test for latest version, continue linting - # Check version is latest available - last_ver = self.conda_package_info[dep].get("latest_version") - if last_ver is not None and last_ver != depver: - self.warned.append((8, "Conda dep outdated: `{}`, `{}` available".format(dep, last_ver))) - else: - self.passed.append((8, "Conda package is the latest available: `{}`".format(dep))) - - elif isinstance(dep, dict): - for pip_dep in dep.get("pip", []): - # Check that each pip dependency has a version number - try: - assert pip_dep.count("=") == 2 - except AssertionError: - self.failed.append((8, "Pip dependency did not have pinned version number: {}".format(pip_dep))) - else: - self.passed.append((8, "Pip dependency had pinned version number: {}".format(pip_dep))) - - try: - pip_depname, pip_depver = pip_dep.split("==", 1) - self.check_pip_package(pip_dep) - except ValueError: - pass - else: - # Check, if PyPi package version is available at all - if pip_depver not in self.conda_package_info[pip_dep].get("releases").keys(): - self.failed.append((8, "PyPi package had an unknown version: {}".format(pip_depver))) - continue # No need to test latest version, if not available - last_ver = self.conda_package_info[pip_dep].get("info").get("version") - if last_ver is not None and last_ver != pip_depver: - self.warned.append( - ( - 8, - "PyPi package is not latest available: {}, {} available".format( - pip_depver, last_ver - ), - ) - ) - else: - self.passed.append((8, "PyPi package is latest available: {}".format(pip_depver))) - - def check_anaconda_package(self, dep): - """Query conda package information. - - Sends a HTTP GET request to the Anaconda remote API. - - Args: - dep (str): A conda package name. - - Raises: - A ValueError, if the package name can not be resolved. - """ - # Check if each dependency is the latest available version - depname, depver = dep.split("=", 1) - dep_channels = self.conda_config.get("channels", []) - # 'defaults' isn't actually a channel name. See https://docs.anaconda.com/anaconda/user-guide/tasks/using-repositories/ - if "defaults" in dep_channels: - dep_channels.remove("defaults") - dep_channels.extend(["main", "anaconda", "r", "free", "archive", "anaconda-extras"]) - if "::" in depname: - dep_channels = [depname.split("::")[0]] - depname = depname.split("::")[1] - for ch in dep_channels: - anaconda_api_url = "https://api.anaconda.org/package/{}/{}".format(ch, depname) - try: - response = requests.get(anaconda_api_url, timeout=10) - except (requests.exceptions.Timeout): - self.warned.append((8, "Anaconda API timed out: {}".format(anaconda_api_url))) - raise ValueError - except (requests.exceptions.ConnectionError): - self.warned.append((8, "Could not connect to Anaconda API")) - raise ValueError - else: - if response.status_code == 200: - dep_json = response.json() - self.conda_package_info[dep] = dep_json - return - elif response.status_code != 404: - self.warned.append( - ( - 8, - "Anaconda API returned unexpected response code `{}` for: {}\n{}".format( - response.status_code, anaconda_api_url, response - ), - ) - ) - raise ValueError - elif response.status_code == 404: - log.debug("Could not find {} in conda channel {}".format(dep, ch)) - else: - # We have looped through each channel and had a 404 response code on everything - self.failed.append((8, "Could not find Conda dependency using the Anaconda API: {}".format(dep))) - raise ValueError - - def check_pip_package(self, dep): - """Query PyPi package information. - - Sends a HTTP GET request to the PyPi remote API. - - Args: - dep (str): A PyPi package name. - - Raises: - A ValueError, if the package name can not be resolved or the connection timed out. - """ - pip_depname, pip_depver = dep.split("=", 1) - pip_api_url = "https://pypi.python.org/pypi/{}/json".format(pip_depname) - try: - response = requests.get(pip_api_url, timeout=10) - except (requests.exceptions.Timeout): - self.warned.append((8, "PyPi API timed out: {}".format(pip_api_url))) - raise ValueError - except (requests.exceptions.ConnectionError): - self.warned.append((8, "PyPi API Connection error: {}".format(pip_api_url))) - raise ValueError - else: - if response.status_code == 200: - pip_dep_json = response.json() - self.conda_package_info[dep] = pip_dep_json - else: - self.failed.append((8, "Could not find pip dependency using the PyPi API: {}".format(dep))) - raise ValueError - - def check_conda_dockerfile(self): - """Checks the Docker build file. - - Checks that: - * a name is given and is consistent with the pipeline name - * dependency versions are pinned - * dependency versions are the latest available - """ - if "environment.yml" not in self.files or "Dockerfile" not in self.files or len(self.dockerfile) == 0: - return - - expected_strings = [ - "COPY environment.yml /", - "RUN conda env create --quiet -f /environment.yml && conda clean -a", - "RUN conda env export --name {} > {}.yml".format(self.conda_config["name"], self.conda_config["name"]), - "ENV PATH /opt/conda/envs/{}/bin:$PATH".format(self.conda_config["name"]), - ] - - if "dev" not in self.version: - expected_strings.append("FROM nfcore/base:{}".format(self.version)) - - difference = set(expected_strings) - set(self.dockerfile) - if not difference: - self.passed.append((9, "Found all expected strings in Dockerfile file")) - else: - for missing in difference: - self.failed.append((9, "Could not find Dockerfile file string: {}".format(missing))) - - def check_pipeline_todos(self): - """ Go through all template files looking for the string 'TODO nf-core:' """ - ignore = [".git"] - if os.path.isfile(os.path.join(self.path, ".gitignore")): - with io.open(os.path.join(self.path, ".gitignore"), "rt", encoding="latin1") as fh: - for l in fh: - ignore.append(os.path.basename(l.strip().rstrip("/"))) - for root, dirs, files in os.walk(self.path): - # Ignore files - for i in ignore: - dirs = [d for d in dirs if not fnmatch.fnmatch(os.path.join(root, d), i)] - files = [f for f in files if not fnmatch.fnmatch(os.path.join(root, f), i)] - for fname in files: - with io.open(os.path.join(root, fname), "rt", encoding="latin1") as fh: - for l in fh: - if "TODO nf-core" in l: - l = ( - l.replace("", "") - .replace("# TODO nf-core: ", "") - .replace("// TODO nf-core: ", "") - .replace("TODO nf-core: ", "") - .strip() - ) - self.warned.append((10, "TODO string in `{}`: _{}_".format(fname, l))) - - def check_pipeline_name(self): - """Check whether pipeline name adheres to lower case/no hyphen naming convention""" - - if self.pipeline_name.islower() and self.pipeline_name.isalnum(): - self.passed.append((12, "Name adheres to nf-core convention")) - if not self.pipeline_name.islower(): - self.warned.append((12, "Naming does not adhere to nf-core conventions: Contains uppercase letters")) - if not self.pipeline_name.isalnum(): - self.warned.append( - (12, "Naming does not adhere to nf-core conventions: Contains non alphanumeric characters") - ) - - def check_cookiecutter_strings(self): - """ - Look for the string 'cookiecutter' in all pipeline files. - Finding it probably means that there has been a copy+paste error from the template. - """ - try: - # First, try to get the list of files using git - git_ls_files = subprocess.check_output(["git", "ls-files"], cwd=self.path).splitlines() - list_of_files = [os.path.join(self.path, s.decode("utf-8")) for s in git_ls_files] - except subprocess.CalledProcessError as e: - # Failed, so probably not initialised as a git repository - just a list of all files - log.debug("Couldn't call 'git ls-files': {}".format(e)) - list_of_files = [] - for subdir, dirs, files in os.walk(self.path): - for file in files: - list_of_files.append(os.path.join(subdir, file)) - - # Loop through files, searching for string - num_matches = 0 - num_files = 0 - for fn in list_of_files: - num_files += 1 - try: - with io.open(fn, "r", encoding="latin1") as fh: - lnum = 0 - for l in fh: - lnum += 1 - cc_matches = re.findall(r"{{\s*cookiecutter[^}]*}}", l) - if len(cc_matches) > 0: - for cc_match in cc_matches: - self.failed.append( - ( - 13, - "Found a cookiecutter template string in `{}` L{}: {}".format( - fn, lnum, cc_match - ), - ) - ) - num_matches += 1 - except FileNotFoundError as e: - log.warn("`git ls-files` returned '{}' but could not open it!".format(fn)) - if num_matches == 0: - self.passed.append((13, "Did not find any cookiecutter template strings ({} files)".format(num_files))) - - def check_schema_lint(self): - """ Lint the pipeline schema """ - - # Only show error messages from schema - logging.getLogger("nf_core.schema").setLevel(logging.ERROR) - - # Lint the schema - self.schema_obj = nf_core.schema.PipelineSchema() - self.schema_obj.get_schema_path(self.path) - try: - self.schema_obj.load_lint_schema() - self.passed.append((14, "Schema lint passed")) - except AssertionError as e: - self.failed.append((14, "Schema lint failed: {}".format(e))) - - # Check the title and description - gives warnings instead of fail - if self.schema_obj.schema is not None: - try: - self.schema_obj.validate_schema_title_description() - self.passed.append((14, "Schema title + description lint passed")) - except AssertionError as e: - self.warned.append((14, e)) - - def check_schema_params(self): - """ Check that the schema describes all flat params in the pipeline """ - - # First, get the top-level config options for the pipeline - # Schema object already created in the previous test - self.schema_obj.get_schema_path(self.path) - self.schema_obj.get_wf_params() - self.schema_obj.no_prompts = True - - # Remove any schema params not found in the config - removed_params = self.schema_obj.remove_schema_notfound_configs() - - # Add schema params found in the config but not the schema - added_params = self.schema_obj.add_schema_found_configs() - - if len(removed_params) > 0: - for param in removed_params: - self.warned.append((15, "Schema param `{}` not found from nextflow config".format(param))) - - if len(added_params) > 0: - for param in added_params: - self.failed.append( - (15, "Param `{}` from `nextflow config` not found in nextflow_schema.json".format(param)) - ) - - if len(removed_params) == 0 and len(added_params) == 0: - self.passed.append((15, "Schema matched params returned from nextflow config")) - - def print_results(self, show_passed=False): - - log.debug("Printing final results") - console = Console(force_terminal=nf_core.utils.rich_force_colors()) - - # Helper function to format test links nicely - def format_result(test_results, table): - """ - Given an list of error message IDs and the message texts, return a nicely formatted - string for the terminal with appropriate ASCII colours. - """ - for eid, msg in test_results: - table.add_row( - Markdown("[https://nf-co.re/errors#{0}](https://nf-co.re/errors#{0}): {1}".format(eid, msg)) - ) - return table - - def _s(some_list): - if len(some_list) > 1: - return "s" - return "" - - # Table of passed tests - if len(self.passed) > 0 and show_passed: - table = Table(style="green", box=rich.box.ROUNDED) - table.add_column( - r"\[โœ”] {} Test{} Passed".format(len(self.passed), _s(self.passed)), - no_wrap=True, - ) - table = format_result(self.passed, table) - console.print(table) - - # Table of warning tests - if len(self.warned) > 0: - table = Table(style="yellow", box=rich.box.ROUNDED) - table.add_column(r"\[!] {} Test Warning{}".format(len(self.warned), _s(self.warned)), no_wrap=True) - table = format_result(self.warned, table) - console.print(table) - - # Table of failing tests - if len(self.failed) > 0: - table = Table(style="red", box=rich.box.ROUNDED) - table.add_column( - r"\[โœ—] {} Test{} Failed".format(len(self.failed), _s(self.failed)), - no_wrap=True, - ) - table = format_result(self.failed, table) - console.print(table) - - # Summary table - - table = Table(box=rich.box.ROUNDED) - table.add_column("[bold green]LINT RESULTS SUMMARY".format(len(self.passed)), no_wrap=True) - table.add_row( - r"\[โœ”] {:>3} Test{} Passed".format(len(self.passed), _s(self.passed)), - style="green", - ) - table.add_row(r"\[!] {:>3} Test Warning{}".format(len(self.warned), _s(self.warned)), style="yellow") - table.add_row(r"\[โœ—] {:>3} Test{} Failed".format(len(self.failed), _s(self.failed)), style="red") - console.print(table) - - def get_results_md(self): - """ - Function to create a markdown file suitable for posting in a GitHub comment - """ - # Overall header - overall_result = "Passed :white_check_mark:" - if len(self.failed) > 0: - overall_result = "Failed :x:" - - # List of tests for details - test_failure_count = "" - test_failures = "" - if len(self.failed) > 0: - test_failure_count = "\n-| โŒ {:3d} tests failed |-".format(len(self.failed)) - test_failures = "### :x: Test failures:\n\n{}\n\n".format( - "\n".join( - [ - "* [Test #{0}](https://nf-co.re/errors#{0}) - {1}".format(eid, self._strip_ansi_codes(msg, "`")) - for eid, msg in self.failed - ] - ) - ) - - test_warning_count = "" - test_warnings = "" - if len(self.warned) > 0: - test_warning_count = "\n!| โ— {:3d} tests had warnings |!".format(len(self.warned)) - test_warnings = "### :heavy_exclamation_mark: Test warnings:\n\n{}\n\n".format( - "\n".join( - [ - "* [Test #{0}](https://nf-co.re/errors#{0}) - {1}".format(eid, self._strip_ansi_codes(msg, "`")) - for eid, msg in self.warned - ] - ) - ) - - test_passe_count = "" - test_passes = "" - if len(self.passed) > 0: - test_passed_count = "\n+| โœ… {:3d} tests passed |+".format(len(self.passed)) - test_passes = "### :white_check_mark: Tests passed:\n\n{}\n\n".format( - "\n".join( - [ - "* [Test #{0}](https://nf-co.re/errors#{0}) - {1}".format(eid, self._strip_ansi_codes(msg, "`")) - for eid, msg in self.passed - ] - ) - ) - - now = datetime.datetime.now() - - markdown = textwrap.dedent( - """ - #### `nf-core lint` overall result: {} - - {} - - ```diff{}{}{} - ``` - -
- - {}{}{}### Run details: - - * nf-core/tools version {} - * Run at `{}` - -
- """ - ).format( - overall_result, - "Posted for pipeline commit {}".format(self.git_sha[:7]) if self.git_sha is not None else "", - test_passed_count, - test_warning_count, - test_failure_count, - test_failures, - test_warnings, - test_passes, - nf_core.__version__, - now.strftime("%Y-%m-%d %H:%M:%S"), - ) - - return markdown - - def save_json_results(self, json_fn): - """ - Function to dump lint results to a JSON file for downstream use - """ - - log.info("Writing lint results to {}".format(json_fn)) - now = datetime.datetime.now() - results = { - "nf_core_tools_version": nf_core.__version__, - "date_run": now.strftime("%Y-%m-%d %H:%M:%S"), - "tests_pass": [[idx, self._strip_ansi_codes(msg)] for idx, msg in self.passed], - "tests_warned": [[idx, self._strip_ansi_codes(msg)] for idx, msg in self.warned], - "tests_failed": [[idx, self._strip_ansi_codes(msg)] for idx, msg in self.failed], - "num_tests_pass": len(self.passed), - "num_tests_warned": len(self.warned), - "num_tests_failed": len(self.failed), - "has_tests_pass": len(self.passed) > 0, - "has_tests_warned": len(self.warned) > 0, - "has_tests_failed": len(self.failed) > 0, - "markdown_result": self.get_results_md(), - } - with open(json_fn, "w") as fh: - json.dump(results, fh, indent=4) - - def _wrap_quotes(self, files): - if not isinstance(files, list): - files = [files] - bfiles = ["`{}`".format(f) for f in files] - return " or ".join(bfiles) - - def _strip_ansi_codes(self, string, replace_with=""): - # https://stackoverflow.com/a/14693789/713980 - ansi_escape = re.compile(r"\x1B(?:[@-Z\\-_]|\[[0-?]*[ -/]*[@-~])") - return ansi_escape.sub(replace_with, string) diff --git a/nf_core/lint/__init__.py b/nf_core/lint/__init__.py new file mode 100644 index 0000000000..f43ed92c5f --- /dev/null +++ b/nf_core/lint/__init__.py @@ -0,0 +1,451 @@ +#!/usr/bin/env python +"""Linting policy for nf-core pipeline projects. + +Tests Nextflow-based pipelines to check that they adhere to +the nf-core community guidelines. +""" + +from rich.console import Console +from rich.markdown import Markdown +from rich.table import Table +import datetime +import git +import json +import logging +import os +import re +import rich +import rich.progress +import subprocess +import textwrap +import yaml + +import nf_core.utils + +log = logging.getLogger(__name__) + + +def run_linting(pipeline_dir, release_mode=False, show_passed=False, md_fn=None, json_fn=None): + """Runs all nf-core linting checks on a given Nextflow pipeline project + in either `release` mode or `normal` mode (default). Returns an object + of type :class:`PipelineLint` after finished. + + Args: + pipeline_dir (str): The path to the Nextflow pipeline root directory + release_mode (bool): Set this to `True`, if the linting should be run in the `release` mode. + See :class:`PipelineLint` for more information. + + Returns: + An object of type :class:`PipelineLint` that contains all the linting results. + """ + + # Create the lint object + lint_obj = PipelineLint(pipeline_dir, release_mode) + + # Load the various pipeline configs + lint_obj._load_lint_config() + lint_obj._load_pipeline_config() + lint_obj._load_conda_environment() + lint_obj._list_files() + + # Run the linting tests + try: + lint_obj._lint_pipeline() + except AssertionError as e: + log.critical("Critical error: {}".format(e)) + log.info("Stopping tests...") + return lint_obj + + # Print the results + lint_obj._print_results(show_passed) + + # Save results to Markdown file + if md_fn is not None: + log.info("Writing lint results to {}".format(md_fn)) + markdown = lint_obj._get_results_md() + with open(md_fn, "w") as fh: + fh.write(markdown) + + # Save results to JSON file + if json_fn is not None: + lint_obj._save_json_results(json_fn) + + # Reminder about --release mode flag if we had failures + if len(lint_obj.failed) > 0: + if release_mode: + log.info("Reminder: Lint tests were run in --release mode.") + + return lint_obj + + +class PipelineLint(nf_core.utils.Pipeline): + """Object to hold linting information and results. + + Inherits :class:`nf_core.utils.Pipeline` class. + + Use the :func:`PipelineLint._lint_pipeline` function to run lint tests. + + Args: + path (str): The path to the nf-core pipeline directory. + + Attributes: + failed (list): A list of tuples of the form: ``(, )`` + ignored (list): A list of tuples of the form: ``(, )`` + lint_config (dict): The parsed nf-core linting config for this pipeline + passed (list): A list of tuples of the form: ``(, )`` + release_mode (bool): `True`, if you the to linting was run in release mode, `False` else. + warned (list): A list of tuples of the form: ``(, )`` + """ + + from .files_exist import files_exist + from .licence import licence + from .nextflow_config import nextflow_config + from .actions_branch_protection import actions_branch_protection + from .actions_ci import actions_ci + from .actions_lint import actions_lint + from .actions_awstest import actions_awstest + from .actions_awsfulltest import actions_awsfulltest + from .readme import readme + from .version_consistency import version_consistency + from .conda_env_yaml import conda_env_yaml, _anaconda_package, _pip_package + from .conda_dockerfile import conda_dockerfile + from .pipeline_todos import pipeline_todos + from .pipeline_name_conventions import pipeline_name_conventions + from .cookiecutter_strings import cookiecutter_strings + from .schema_lint import schema_lint + from .schema_params import schema_params + + def __init__(self, wf_path, release_mode=False): + """ Initialise linting object """ + + # Initialise the parent object + super().__init__(wf_path) + + self.failed = [] + self.ignored = [] + self.lint_config = {} + self.passed = [] + self.release_mode = release_mode + self.warned = [] + self.lint_tests = [ + "files_exist", + "licence", + "nextflow_config", + "actions_branch_protection", + "actions_ci", + "actions_lint", + "actions_awstest", + "actions_awsfulltest", + "readme", + "conda_env_yaml", + "conda_dockerfile", + "pipeline_todos", + "pipeline_name_conventions", + "cookiecutter_strings", + "schema_lint", + "schema_params", + ] + if self.release_mode: + self.lint_tests.extend(["version_consistency"]) + + def _load(self): + """Load information about the pipeline into the PipelineLint object""" + # Load everything using the parent object + super()._load() + + # Load lint object specific stuff + self._load_lint_config() + + def _load_lint_config(self): + """Parse a pipeline lint config file. + + Look for a file called either `.nf-core-lint.yml` or + `.nf-core-lint.yaml` in the pipeline root directory and parse it. + (`.yml` takes precedence). + + Add parsed config to the `self.lint_config` class attribute. + """ + config_fn = os.path.join(self.wf_path, ".nf-core-lint.yml") + + # Pick up the file if it's .yaml instead of .yml + if not os.path.isfile(config_fn): + config_fn = os.path.join(self.wf_path, ".nf-core-lint.yaml") + + # Load the YAML + try: + with open(config_fn, "r") as fh: + self.lint_config = yaml.safe_load(fh) + except FileNotFoundError: + log.debug("No lint config file found: {}".format(config_fn)) + + # Check if we have any keys that don't match lint test names + for k in self.lint_config: + if k not in self.lint_tests: + log.warning("Found unrecognised test name '{}' in pipeline lint config".format(k)) + + def _lint_pipeline(self): + """Main linting function. + + Takes the pipeline directory as the primary input and iterates through + the different linting checks in order. Collects any warnings or errors + into object attributes: ``passed``, ``ignored``, ``warned`` and ``failed``. + """ + log.info("Testing pipeline: [magenta]{}".format(self.wf_path)) + if self.release_mode: + log.info("Including --release mode tests") + + progress = rich.progress.Progress( + "[bold blue]{task.description}", + rich.progress.BarColumn(bar_width=None), + "[magenta]{task.completed} of {task.total}[reset] ยป [bold yellow]{task.fields[func_name]}", + transient=True, + ) + with progress: + lint_progress = progress.add_task( + "Running lint checks", total=len(self.lint_tests), func_name=self.lint_tests[0] + ) + for fun_name in self.lint_tests: + if self.lint_config.get(fun_name, {}) is False: + log.debug("Skipping lint test '{}'".format(fun_name)) + self.ignored.append((fun_name, fun_name)) + continue + progress.update(lint_progress, advance=1, func_name=fun_name) + log.debug("Running lint test: {}".format(fun_name)) + test_results = getattr(self, fun_name)() + for test in test_results.get("passed", []): + self.passed.append((fun_name, test)) + for test in test_results.get("ignored", []): + self.ignored.append((fun_name, test)) + for test in test_results.get("warned", []): + self.warned.append((fun_name, test)) + for test in test_results.get("failed", []): + self.failed.append((fun_name, test)) + + def _print_results(self, show_passed=False): + """Print linting results to the command line. + + Uses the ``rich`` library to print a set of formatted tables to the command line + summarising the linting results. + """ + + log.debug("Printing final results") + console = Console(force_terminal=nf_core.utils.rich_force_colors()) + + # Helper function to format test links nicely + def format_result(test_results, table): + """ + Given an list of error message IDs and the message texts, return a nicely formatted + string for the terminal with appropriate ASCII colours. + """ + for eid, msg in test_results: + table.add_row(Markdown("[{0}](https://nf-co.re/errors#{0}): {1}".format(eid, msg))) + return table + + def _s(some_list): + if len(some_list) > 1: + return "s" + return "" + + # Table of passed tests + if len(self.passed) > 0 and show_passed: + table = Table(style="green", box=rich.box.ROUNDED) + table.add_column( + r"\[โœ”] {} Test{} Passed".format(len(self.passed), _s(self.passed)), + no_wrap=True, + ) + table = format_result(self.passed, table) + console.print(table) + + # Table of ignored tests + if len(self.ignored) > 0: + table = Table(style="grey58", box=rich.box.ROUNDED) + table.add_column(r"\[?] {} Test{} Ignored".format(len(self.ignored), _s(self.ignored)), no_wrap=True) + table = format_result(self.ignored, table) + console.print(table) + + # Table of warning tests + if len(self.warned) > 0: + table = Table(style="yellow", box=rich.box.ROUNDED) + table.add_column(r"\[!] {} Test Warning{}".format(len(self.warned), _s(self.warned)), no_wrap=True) + table = format_result(self.warned, table) + console.print(table) + + # Table of failing tests + if len(self.failed) > 0: + table = Table(style="red", box=rich.box.ROUNDED) + table.add_column( + r"\[โœ—] {} Test{} Failed".format(len(self.failed), _s(self.failed)), + no_wrap=True, + ) + table = format_result(self.failed, table) + console.print(table) + + # Summary table + table = Table(box=rich.box.ROUNDED) + table.add_column("[bold green]LINT RESULTS SUMMARY".format(len(self.passed)), no_wrap=True) + table.add_row( + r"\[โœ”] {:>3} Test{} Passed".format(len(self.passed), _s(self.passed)), + style="green", + ) + table.add_row(r"\[?] {:>3} Test{} Ignored".format(len(self.ignored), _s(self.ignored)), style="grey58") + table.add_row(r"\[!] {:>3} Test Warning{}".format(len(self.warned), _s(self.warned)), style="yellow") + table.add_row(r"\[โœ—] {:>3} Test{} Failed".format(len(self.failed), _s(self.failed)), style="red") + console.print(table) + + def _get_results_md(self): + """ + Create a markdown file suitable for posting in a GitHub comment. + + Returns: + markdown (str): Formatting markdown content + """ + # Overall header + overall_result = "Passed :white_check_mark:" + if len(self.failed) > 0: + overall_result = "Failed :x:" + + # List of tests for details + test_failure_count = "" + test_failures = "" + if len(self.failed) > 0: + test_failure_count = "\n-| โŒ {:3d} tests failed |-".format(len(self.failed)) + test_failures = "### :x: Test failures:\n\n{}\n\n".format( + "\n".join( + [ + "* [{0}](https://nf-co.re/errors#{0}) - {1}".format(eid, self._strip_ansi_codes(msg, "`")) + for eid, msg in self.failed + ] + ) + ) + + test_ignored_count = "" + test_ignored = "" + if len(self.ignored) > 0: + test_ignored_count = "\n#| โ” {:3d} tests had warnings |#".format(len(self.ignored)) + test_ignored = "### :grey_question: Tests ignored:\n\n{}\n\n".format( + "\n".join( + [ + "* [{0}](https://nf-co.re/errors#{0}) - {1}".format(eid, self._strip_ansi_codes(msg, "`")) + for eid, msg in self.ignored + ] + ) + ) + + test_warning_count = "" + test_warnings = "" + if len(self.warned) > 0: + test_warning_count = "\n!| โ— {:3d} tests had warnings |!".format(len(self.warned)) + test_warnings = "### :heavy_exclamation_mark: Test warnings:\n\n{}\n\n".format( + "\n".join( + [ + "* [{0}](https://nf-co.re/errors#{0}) - {1}".format(eid, self._strip_ansi_codes(msg, "`")) + for eid, msg in self.warned + ] + ) + ) + + test_passed_count = "" + test_passes = "" + if len(self.passed) > 0: + test_passed_count = "\n+| โœ… {:3d} tests passed |+".format(len(self.passed)) + test_passes = "### :white_check_mark: Tests passed:\n\n{}\n\n".format( + "\n".join( + [ + "* [{0}](https://nf-co.re/errors#{0}) - {1}".format(eid, self._strip_ansi_codes(msg, "`")) + for eid, msg in self.passed + ] + ) + ) + + now = datetime.datetime.now() + + markdown = textwrap.dedent( + """ + #### `nf-core lint` overall result: {} + + {} + + ```diff{}{}{}{} + ``` + +
+ + {}{}{}{}### Run details: + + * nf-core/tools version {} + * Run at `{}` + +
+ """ + ).format( + overall_result, + "Posted for pipeline commit {}".format(self.git_sha[:7]) if self.git_sha is not None else "", + test_passed_count, + test_ignored_count, + test_warning_count, + test_failure_count, + test_failures, + test_warnings, + test_ignored, + test_passes, + nf_core.__version__, + now.strftime("%Y-%m-%d %H:%M:%S"), + ) + + return markdown + + def _save_json_results(self, json_fn): + """ + Function to dump lint results to a JSON file for downstream use + + Arguments: + json_fn (str): File path to write JSON to. + """ + + log.info("Writing lint results to {}".format(json_fn)) + now = datetime.datetime.now() + results = { + "nf_core_tools_version": nf_core.__version__, + "date_run": now.strftime("%Y-%m-%d %H:%M:%S"), + "tests_pass": [[idx, self._strip_ansi_codes(msg)] for idx, msg in self.passed], + "tests_ignored": [[idx, self._strip_ansi_codes(msg)] for idx, msg in self.ignored], + "tests_warned": [[idx, self._strip_ansi_codes(msg)] for idx, msg in self.warned], + "tests_failed": [[idx, self._strip_ansi_codes(msg)] for idx, msg in self.failed], + "num_tests_pass": len(self.passed), + "num_tests_ignored": len(self.ignored), + "num_tests_warned": len(self.warned), + "num_tests_failed": len(self.failed), + "has_tests_pass": len(self.passed) > 0, + "has_tests_ignored": len(self.ignored) > 0, + "has_tests_warned": len(self.warned) > 0, + "has_tests_failed": len(self.failed) > 0, + "markdown_result": self._get_results_md(), + } + with open(json_fn, "w") as fh: + json.dump(results, fh, indent=4) + + def _wrap_quotes(self, files): + """Helper function to take a list of filenames and format with markdown. + + Args: + files (list): List of filenames, eg:: + + ['foo', 'bar', 'baz'] + + Returns: + markdown (str): Formatted string of paths separated by word ``or``, eg:: + + `foo` or bar` or `baz` + """ + if not isinstance(files, list): + files = [files] + bfiles = ["`{}`".format(f) for f in files] + return " or ".join(bfiles) + + def _strip_ansi_codes(self, string, replace_with=""): + """Strip ANSI colouring codes from a string to return plain text. + + Solution found on Stack Overflow: https://stackoverflow.com/a/14693789/713980 + """ + ansi_escape = re.compile(r"\x1B(?:[@-Z\\-_]|\[[0-?]*[ -/]*[@-~])") + return ansi_escape.sub(replace_with, string) diff --git a/nf_core/lint/actions_awsfulltest.py b/nf_core/lint/actions_awsfulltest.py new file mode 100644 index 0000000000..22eb823ad4 --- /dev/null +++ b/nf_core/lint/actions_awsfulltest.py @@ -0,0 +1,60 @@ +#!/usr/bin/env python + +import os +import yaml + + +def actions_awsfulltest(self): + """Checks the GitHub Actions awsfulltest is valid. + + In addition to small test datasets run on GitHub Actions, we provide the possibility of testing the pipeline on full size datasets on AWS. + This should ensure that the pipeline runs as expected on AWS and provide a resource estimation. + + The GitHub Actions workflow is called ``awsfulltest.yml``, and it can be found in the ``.github/workflows/`` directory. + + .. warning:: This workflow incurs AWS costs, therefore it should only be triggered for pipeline releases: + ``workflow_run`` (after the docker hub release workflow) and ``workflow_dispatch``. + + .. note:: You can manually trigger the AWS tests by going to the `Actions` tab on the pipeline GitHub repository and selecting the + `nf-core AWS full size tests` workflow on the left. + + .. tip:: For tests on full data prior to release, `Nextflow Tower `_ launch feature can be employed. + + The ``.github/workflows/awsfulltest.yml`` file is tested for the following: + + * Must be turned on ``workflow_dispatch``. + * Must be turned on for ``workflow_run`` with ``workflows: ["nf-core Docker push (release)"]`` and ``types: [completed]`` + * Should run the profile ``test_full`` that should be edited to provide the links to full-size datasets. If it runs the profile ``test``, a warning is given. + """ + passed = [] + warned = [] + failed = [] + + fn = os.path.join(self.wf_path, ".github", "workflows", "awsfulltest.yml") + if os.path.isfile(fn): + with open(fn, "r") as fh: + wf = yaml.safe_load(fh) + + aws_profile = "-profile test " + + # Check that the action is only turned on for published releases + try: + assert "workflow_run" in wf[True] + assert wf[True]["workflow_run"]["workflows"] == ["nf-core Docker push (release)"] + assert wf[True]["workflow_run"]["types"] == ["completed"] + assert "workflow_dispatch" in wf[True] + except (AssertionError, KeyError, TypeError): + failed.append("`.github/workflows/awsfulltest.yml` is not triggered correctly") + else: + passed.append("`.github/workflows/awsfulltest.yml` is triggered correctly") + + # Warn if `-profile test` is still unchanged + try: + steps = wf["jobs"]["run-awstest"]["steps"] + assert any([aws_profile in step["run"] for step in steps if "run" in step.keys()]) + except (AssertionError, KeyError, TypeError): + passed.append("`.github/workflows/awsfulltest.yml` does not use `-profile test`") + else: + warned.append("`.github/workflows/awsfulltest.yml` should test full datasets, not `-profile test`") + + return {"passed": passed, "warned": warned, "failed": failed} diff --git a/nf_core/lint/actions_awstest.py b/nf_core/lint/actions_awstest.py new file mode 100644 index 0000000000..eff088e3db --- /dev/null +++ b/nf_core/lint/actions_awstest.py @@ -0,0 +1,41 @@ +#!/usr/bin/env python + +import os +import yaml + + +def actions_awstest(self): + """Checks the GitHub Actions awstest is valid. + + In addition to small test datasets run on GitHub Actions, we provide the possibility of testing the pipeline on AWS. + This should ensure that the pipeline runs as expected on AWS (which often has its own unique edge cases). + + .. warning:: Running tests on AWS incurs costs, so these tests are not triggered automatically. + Instead, they use the ``workflow_dispatch`` trigger, which allows for manual triggering + of the workflow when testing on AWS is desired. + + .. note:: You can trigger the tests by going to the `Actions` tab on the pipeline GitHub repository + and selecting the `nf-core AWS test` workflow on the left. + + The ``.github/workflows/awstest.yml`` file is tested for the following: + + * Must *not* be turned on for ``push`` or ``pull_request``. + * Must be turned on for ``workflow_dispatch``. + + """ + fn = os.path.join(self.wf_path, ".github", "workflows", "awstest.yml") + if not os.path.isfile(fn): + return {"ignored": ["'awstest.yml' workflow not found: `{}`".format(fn)]} + + with open(fn, "r") as fh: + wf = yaml.safe_load(fh) + + # Check that the action is only turned on for workflow_dispatch + try: + assert "workflow_dispatch" in wf[True] + assert "push" not in wf[True] + assert "pull_request" not in wf[True] + except (AssertionError, KeyError, TypeError): + return {"failed": ["'.github/workflows/awstest.yml' is not triggered correctly"]} + else: + return {"passed": ["'.github/workflows/awstest.yml' is triggered correctly"]} diff --git a/nf_core/lint/actions_branch_protection.py b/nf_core/lint/actions_branch_protection.py new file mode 100644 index 0000000000..9f13b8be6a --- /dev/null +++ b/nf_core/lint/actions_branch_protection.py @@ -0,0 +1,106 @@ +#!/usr/bin/env python + +import os +import yaml + + +def actions_branch_protection(self): + """Checks that the GitHub Actions branch protection workflow is valid. + + A common error when making pull-requests to nf-core repositories is to open the + PR against the default branch: ``master``. This branch should only have stable + code from the latest release, so development PRs nearly always go to ``dev`` instead. + We want ``master`` to be the default branch so that people pull this when running workflows. + + The only time that PRs against ``master`` are allows is when they come from a branch + on the main nf-core repo called ``dev`` or a fork with a branch called ``patch``. + + The GitHub Actions ``.github/workflows/branch.yml`` workflow checks pull requests + opened against ``master`` to ensure that they are coming from an allowed branch + and throws an error if not. It also posts a comment to the PR explaining the failure + and how to resolve it. + + Specifically, the lint test checks that: + + * The workflow is triggered for the ``pull_request_target`` event against ``master``: + + .. code-block:: yaml + + on: + pull_request_target: + branches: + - master + + .. note:: The event ``pull_request_target`` is used and not ``pull_request`` so that + it runs on the repo `recieving` the PR and has permissions to post a comment. + The ``github.event`` object that we want is still confusingly called ``pull_request`` though. + + * The code that checks PRs to the protected nf-core repo ``master`` branch can only come from an nf-core ``dev`` branch or a fork ``patch`` branch: + + .. code-block:: yaml + + steps: + # PRs to the nf-core repo master branch are only ok if coming from the nf-core repo `dev` or any `patch` branches + - name: Check PRs + if: github.repository == 'nf-core/' + run: | + { [[ ${{github.event.pull_request.head.repo.full_name}} == nf-core/ ]] && [[ $GITHUB_HEAD_REF = "dev" ]]; } || [[ $GITHUB_HEAD_REF == "patch" ]] + + .. tip:: For branch protection in repositories outside of `nf-core`, you can add an additional step to this workflow. + Keep the `nf-core` branch protection step, to ensure that the ``nf-core lint`` tests pass. It should just be ignored + if you're working outside of `nf-core`. Here's an example of how this code could look: + + .. code-block:: yaml + + steps: + # Usual nf-core branch check, looked for by the nf-core lint test + - name: Check PRs + if: github.repository == 'nf-core/' + run: | + { [[ ${{github.event.pull_request.head.repo.full_name}} == nf-core/ ]] && [[ $GITHUB_HEAD_REF = "dev" ]]; } || [[ $GITHUB_HEAD_REF == "patch" ]] + + ##### Your custom code: Check PRs in your own repository + - name: Check PRs in another repository + if: github.repository == '/' + run: | + { [[ ${{github.event.pull_request.head.repo.full_name}} == / ]] && [[ $GITHUB_HEAD_REF = "dev" ]]; } || [[ $GITHUB_HEAD_REF == "patch" ]] + """ + + passed = [] + failed = [] + + fn = os.path.join(self.wf_path, ".github", "workflows", "branch.yml") + if not os.path.isfile(fn): + return {"ignored": ["Could not find branch.yml workflow: {}".format(fn)]} + + with open(fn, "r") as fh: + branchwf = yaml.safe_load(fh) + + # Check that the action is turned on for PRs to master + try: + # Yaml 'on' parses as True - super weird + assert "master" in branchwf[True]["pull_request_target"]["branches"] + except (AssertionError, KeyError): + failed.append("GitHub Actions 'branch.yml' workflow should be triggered for PRs to master") + else: + passed.append("GitHub Actions 'branch.yml' workflow is triggered for PRs to master") + + # Check that PRs are only ok if coming from an nf-core `dev` branch or a fork `patch` branch + steps = branchwf.get("jobs", {}).get("test", {}).get("steps", []) + for step in steps: + has_name = step.get("name", "").strip() == "Check PRs" + has_if = step.get("if", "").strip() == "github.repository == 'nf-core/{}'".format(self.pipeline_name.lower()) + # Don't use .format() as the squiggly brackets get ridiculous + has_run = step.get( + "run", "" + ).strip() == '{ [[ ${{github.event.pull_request.head.repo.full_name}} == nf-core/PIPELINENAME ]] && [[ $GITHUB_HEAD_REF = "dev" ]]; } || [[ $GITHUB_HEAD_REF == "patch" ]]'.replace( + "PIPELINENAME", self.pipeline_name.lower() + ) + if has_name and has_if and has_run: + passed.append("GitHub Actions 'branch.yml' workflow looks good") + break + # Break wasn't called - didn't find proper step + else: + failed.append("Couldn't find GitHub Actions 'branch.yml' check for PRs to master") + + return {"passed": passed, "failed": failed} diff --git a/nf_core/lint/actions_ci.py b/nf_core/lint/actions_ci.py new file mode 100644 index 0000000000..934b61884a --- /dev/null +++ b/nf_core/lint/actions_ci.py @@ -0,0 +1,139 @@ +#!/usr/bin/env python + +import os +import re +import yaml + + +def actions_ci(self): + """Checks that the GitHub Actions pipeline CI (Continuous Integration) workflow is valid. + + The ``.github/workflows/ci.yml`` GitHub Actions workflow runs the pipeline on a minimal test + dataset using ``-profile test`` to check that no breaking changes have been introduced. + Final result files are not checked, just that the pipeline exists successfully. + + This lint test checks this GitHub Actions workflow file for the following: + + * Workflow must be triggered on the following events: + + .. code-block:: yaml + + on: + push: + branches: + - dev + pull_request: + release: + types: [published] + + * The minimum Nextflow version specified in the pipeline's ``nextflow.config`` matches that defined by ``nxf_ver`` in the test matrix: + + .. code-block:: yaml + :emphasize-lines: 4 + + strategy: + matrix: + # Nextflow versions: check pipeline minimum and current latest + nxf_ver: ['19.10.0', ''] + + .. note:: These ``matrix`` variables run the test workflow twice, varying the ``nxf_ver`` variable each time. + This is used in the ``nextflow run`` commands to test the pipeline with both the latest available version + of the pipeline (``''``) and the stated minimum required version. + + * The `Docker` container for the pipeline must use the correct pipeline version number: + + * Development pipelines: + + .. code-block:: bash + + docker tag nfcore/:dev nfcore/:dev + + * Released pipelines: + + .. code-block:: bash + + docker tag nfcore/:dev nfcore/: + + * Complete example for a released pipeline called *nf-core/example* with version number ``1.0.0``: + + .. code-block:: yaml + :emphasize-lines: 3,8,9 + + - name: Build new docker image + if: env.GIT_DIFF + run: docker build --no-cache . -t nfcore/example:1.0.0 + + - name: Pull docker image + if: ${{ !env.GIT_DIFF }} + run: | + docker pull nfcore/example:dev + docker tag nfcore/example:dev nfcore/example:1.0.0 + """ + passed = [] + failed = [] + fn = os.path.join(self.wf_path, ".github", "workflows", "ci.yml") + + # Return an ignored status if we can't find the file + if not os.path.isfile(fn): + return {"ignored": ["'.github/workflows/ci.yml' not found"]} + + with open(fn, "r") as fh: + ciwf = yaml.safe_load(fh) + + # Check that the action is turned on for the correct events + try: + expected = {"push": {"branches": ["dev"]}, "pull_request": None, "release": {"types": ["published"]}} + # NB: YAML dict key 'on' is evaluated to a Python dict key True + assert ciwf[True] == expected + except (AssertionError, KeyError, TypeError): + failed.append("'.github/workflows/ci.yml' is not triggered on expected events") + else: + passed.append("'.github/workflows/ci.yml' is triggered on expected events") + + # Check that we're pulling the right docker image and tagging it properly + if self.nf_config.get("process.container", ""): + docker_notag = re.sub(r":(?:[\.\d]+|dev)$", "", self.nf_config.get("process.container", "").strip("\"'")) + docker_withtag = self.nf_config.get("process.container", "").strip("\"'") + + # docker build + docker_build_cmd = "docker build --no-cache . -t {}".format(docker_withtag) + try: + steps = ciwf["jobs"]["test"]["steps"] + assert any([docker_build_cmd in step["run"] for step in steps if "run" in step.keys()]) + except (AssertionError, KeyError, TypeError): + failed.append("CI is not building the correct docker image. Should be: `{}`".format(docker_build_cmd)) + else: + passed.append("CI is building the correct docker image: `{}`".format(docker_build_cmd)) + + # docker pull + docker_pull_cmd = "docker pull {}:dev".format(docker_notag) + try: + steps = ciwf["jobs"]["test"]["steps"] + assert any([docker_pull_cmd in step["run"] for step in steps if "run" in step.keys()]) + except (AssertionError, KeyError, TypeError): + failed.append("CI is not pulling the correct docker image. Should be: `{}`".format(docker_pull_cmd)) + else: + passed.append("CI is pulling the correct docker image: {}".format(docker_pull_cmd)) + + # docker tag + docker_tag_cmd = "docker tag {}:dev {}".format(docker_notag, docker_withtag) + try: + steps = ciwf["jobs"]["test"]["steps"] + assert any([docker_tag_cmd in step["run"] for step in steps if "run" in step.keys()]) + except (AssertionError, KeyError, TypeError): + failed.append("CI is not tagging docker image correctly. Should be: `{}`".format(docker_tag_cmd)) + else: + passed.append("CI is tagging docker image correctly: {}".format(docker_tag_cmd)) + + # Check that we are testing the minimum nextflow version + try: + matrix = ciwf["jobs"]["test"]["strategy"]["matrix"]["nxf_ver"] + assert any([self.minNextflowVersion in matrix]) + except (KeyError, TypeError): + failed.append("'.github/workflows/ci.yml' does not check minimum NF version") + except AssertionError: + failed.append("Minimum NF version in '.github/workflows/ci.yml' different to pipeline's manifest") + else: + passed.append("'.github/workflows/ci.yml' checks minimum NF version") + + return {"passed": passed, "failed": failed} diff --git a/nf_core/lint/actions_lint.py b/nf_core/lint/actions_lint.py new file mode 100644 index 0000000000..442bd9ad64 --- /dev/null +++ b/nf_core/lint/actions_lint.py @@ -0,0 +1,105 @@ +#!/usr/bin/env python + +import os +import yaml + + +def actions_lint(self): + """Checks that the GitHub Actions *linting* workflow is valid. + + This linting test checks that the GitHub Actions ``.github/workflows/linting.yml`` workflow + correctly runs the ``nf-core lint``, ``markdownlint`` and ``yamllint`` commands. + These three commands all check code syntax and code-style. + Yes that's right - this is a lint test that checks that lint tests are running. Meta. + + This lint test checks this GitHub Actions workflow file for the following: + + * That the workflow is triggered on the ``push`` and ``pull_request`` events, eg: + + .. code-block:: yaml + + on: + push: + pull_request: + + * That the workflow has a step that runs ``nf-core lint``, eg: + + + .. code-block:: yaml + + jobs: + nf-core: + steps: + - run: nf-core -l lint_log.txt lint ${GITHUB_WORKSPACE} --markdown lint_results.md + + * That the workflow has a step that runs ``markdownlint``, eg: + + + .. code-block:: yaml + + jobs: + Markdown: + steps: + - run: markdownlint ${GITHUB_WORKSPACE} -c ${GITHUB_WORKSPACE}/.github/markdownlint.yml + + * That the workflow has a step that runs ``yamllint``, eg: + + + .. code-block:: yaml + + jobs: + YAML: + steps: + - run: yamllint $(find ${GITHUB_WORKSPACE} -type f -name "*.yml") + + .. warning:: These are minimal examples of the commands and YAML structure and are not complete + enough to be copied into the workflow file. + """ + passed = [] + warned = [] + failed = [] + fn = os.path.join(self.wf_path, ".github", "workflows", "linting.yml") + if os.path.isfile(fn): + with open(fn, "r") as fh: + lintwf = yaml.safe_load(fh) + + # Check that the action is turned on for push and pull requests + try: + assert "push" in lintwf[True] + assert "pull_request" in lintwf[True] + except (AssertionError, KeyError, TypeError): + failed.append("GitHub Actions linting workflow must be triggered on PR and push: `{}`".format(fn)) + else: + passed.append("GitHub Actions linting workflow is triggered on PR and push: `{}`".format(fn)) + + # Check that the nf-core linting runs + nfcore_lint_cmd = "nf-core -l lint_log.txt lint ${GITHUB_WORKSPACE} --markdown lint_results.md" + try: + steps = lintwf["jobs"]["nf-core"]["steps"] + assert any([nfcore_lint_cmd in step["run"] for step in steps if "run" in step.keys()]) + except (AssertionError, KeyError, TypeError): + failed.append("Continuous integration must run nf-core lint Tests: `{}`".format(fn)) + else: + passed.append("Continuous integration runs nf-core lint Tests: `{}`".format(fn)) + + # Check that the Markdown linting runs + markdownlint_cmd = "markdownlint ${GITHUB_WORKSPACE} -c ${GITHUB_WORKSPACE}/.github/markdownlint.yml" + try: + steps = lintwf["jobs"]["Markdown"]["steps"] + assert any([markdownlint_cmd in step["run"] for step in steps if "run" in step.keys()]) + except (AssertionError, KeyError, TypeError): + failed.append("Continuous integration must run Markdown lint Tests: `{}`".format(fn)) + else: + passed.append("Continuous integration runs Markdown lint Tests: `{}`".format(fn)) + + # Check that the Markdown linting runs + yamllint_cmd = 'yamllint $(find ${GITHUB_WORKSPACE} -type f -name "*.yml")' + try: + steps = lintwf["jobs"]["YAML"]["steps"] + assert any([yamllint_cmd in step["run"] for step in steps if "run" in step.keys()]) + except (AssertionError, KeyError, TypeError): + failed.append("Continuous integration must run YAML lint Tests: `{}`".format(fn)) + else: + passed.append("Continuous integration runs YAML lint Tests: `{}`".format(fn)) + + return {"passed": passed, "warned": warned, "failed": failed} diff --git a/nf_core/lint/conda_dockerfile.py b/nf_core/lint/conda_dockerfile.py new file mode 100644 index 0000000000..838493be71 --- /dev/null +++ b/nf_core/lint/conda_dockerfile.py @@ -0,0 +1,62 @@ +#!/usr/bin/env python + +import logging +import os +import nf_core + +log = logging.getLogger(__name__) + + +def conda_dockerfile(self): + """Checks the Dockerfile for use with Conda environments + + .. note:: This test only runs if there is both an ``environment.yml`` + and ``Dockerfile`` present in the pipeline root directory. + + If a workflow has a conda ``environment.yml`` file, the ``Dockerfile`` should use this + to create the docker image. These files are typically very short, just creating the conda + environment inside the container. + + This linting test checks for the following: + + * All of the following lines are present in the file (where ``PIPELINE`` is your pipeline name): + + .. code-block:: Dockerfile + + FROM nfcore/base:VERSION + COPY environment.yml / + RUN conda env create --quiet -f /environment.yml && conda clean -a + RUN conda env export --name PIPELINE > PIPELINE.yml + ENV PATH /opt/conda/envs/PIPELINE/bin:$PATH + + * That the ``FROM nfcore/base:VERSION`` is tagged to the most recent release of nf-core/tools + + * The linting tool compares the tag against the currently installed version of tools. + * This line is not checked if running a development version of nf-core/tools. + + .. tip:: Additional lines and different metadata can be added to the ``Dockerfile`` + without causing this lint test to fail. + """ + + # Check if we have both a conda and dockerfile + if self._fp("environment.yml") not in self.files or self._fp("Dockerfile") not in self.files: + return {"ignored": ["No `environment.yml` / `Dockerfile` file found - skipping conda_dockerfile test"]} + + expected_strings = [ + "COPY environment.yml /", + "RUN conda env create --quiet -f /environment.yml && conda clean -a", + "RUN conda env export --name {} > {}.yml".format(self.conda_config["name"], self.conda_config["name"]), + "ENV PATH /opt/conda/envs/{}/bin:$PATH".format(self.conda_config["name"]), + ] + + if "dev" not in nf_core.__version__: + expected_strings.append("FROM nfcore/base:{}".format(nf_core.__version__)) + + with open(os.path.join(self.wf_path, "Dockerfile"), "r") as fh: + dockerfile_contents = fh.read().splitlines() + + difference = set(expected_strings) - set(dockerfile_contents) + if not difference: + return {"passed": ["Found all expected strings in Dockerfile file"]} + else: + return {"failed": ["Could not find Dockerfile file string: `{}`".format(missing) for missing in difference]} diff --git a/nf_core/lint/conda_env_yaml.py b/nf_core/lint/conda_env_yaml.py new file mode 100644 index 0000000000..b823a92c7e --- /dev/null +++ b/nf_core/lint/conda_env_yaml.py @@ -0,0 +1,208 @@ +#!/usr/bin/env python + +import logging +import os +import requests +import nf_core.utils + +# Set up local caching for requests to speed up remote queries +nf_core.utils.setup_requests_cachedir() + +# Don't pick up debug logs from the requests package +logging.getLogger("requests").setLevel(logging.WARNING) +logging.getLogger("urllib3").setLevel(logging.WARNING) + +log = logging.getLogger(__name__) + + +def conda_env_yaml(self): + """Checks that the conda environment file is valid. + + .. note:: This test is ignored if there is not an ``environment.yml`` + file present in the pipeline root directory. + + DSL1 nf-core pipelines use a single Conda environment to manage all software + dependencies for a workflow. This can be used directly with ``-profile conda`` + and is also used in the ``Dockerfile`` to build a docker image. + + This test checks the conda ``environment.yml`` file to ensure that it follows nf-core guidelines. + Each dependency is checked using the `Anaconda API service `_. + Dependency sublists are ignored with the exception of ``- pip``: these packages are also checked + for pinned version numbers and checked using the `PyPI JSON API `_. + + Specifically, this lint test makes sure that: + + * The environment ``name`` must match the pipeline name and version + + * The pipeline name is defined in the config variable ``manifest.name`` + * Replace the slash with a hyphen as environment names shouldn't contain that character + * Example: For ``nf-core/test`` version 1.4, the conda environment name should be ``nf-core-test-1.4`` + + * All package dependencies have a specific version number pinned + + .. warning:: Remember that Conda package versions should be pinned with one equals sign (``toolname=1.1``), + but pip uses two (``toolname==1.2``) + + * That package versions can be found and are the latest available + + * Test will go through all conda channels listed in the file, or check PyPI if ``pip`` + * Conda dependencies with pinned channels (eg. ``conda-forge::openjdk``) are ok too + * In addition to the package name, the pinned version is checked + * If a newer version is available, a warning will be reported + """ + passed = [] + warned = [] + failed = [] + + if os.path.join(self.wf_path, "environment.yml") not in self.files: + return {"ignored": ["No `environment.yml` file found - skipping conda_env_yaml test"]} + + # Check that the environment name matches the pipeline name + pipeline_version = self.nf_config.get("manifest.version", "").strip(" '\"") + expected_env_name = "nf-core-{}-{}".format(self.pipeline_name.lower(), pipeline_version) + if self.conda_config["name"] != expected_env_name: + failed.append( + "Conda environment name is incorrect ({}, should be {})".format( + self.conda_config["name"], expected_env_name + ) + ) + else: + passed.append("Conda environment name was correct ({})".format(expected_env_name)) + + # Check conda dependency list + for dep in self.conda_config.get("dependencies", []): + if isinstance(dep, str): + # Check that each dependency has a version number + try: + assert dep.count("=") in [1, 2] + except AssertionError: + failed.append("Conda dep did not have pinned version number: `{}`".format(dep)) + else: + passed.append("Conda dep had pinned version number: `{}`".format(dep)) + + try: + depname, depver = dep.split("=")[:2] + self.conda_package_info[dep] = _anaconda_package(self.conda_config, dep) + except LookupError as e: + warned.append(e) + except ValueError as e: + failed.append(e) + else: + # Check that required version is available at all + if depver not in self.conda_package_info[dep].get("versions"): + failed.append("Conda dep had unknown version: {}".format(dep)) + continue # No need to test for latest version, continue linting + # Check version is latest available + last_ver = self.conda_package_info[dep].get("latest_version") + if last_ver is not None and last_ver != depver: + warned.append("Conda dep outdated: `{}`, `{}` available".format(dep, last_ver)) + else: + passed.append("Conda package is the latest available: `{}`".format(dep)) + + elif isinstance(dep, dict): + for pip_dep in dep.get("pip", []): + # Check that each pip dependency has a version number + try: + assert pip_dep.count("=") == 2 + except AssertionError: + failed.append("Pip dependency did not have pinned version number: {}".format(pip_dep)) + else: + passed.append("Pip dependency had pinned version number: {}".format(pip_dep)) + + try: + pip_depname, pip_depver = pip_dep.split("==", 1) + self.conda_package_info[dep] = _pip_package(pip_dep) + except LookupError as e: + warned.append(e) + except ValueError as e: + failed.append(e) + else: + # Check, if PyPi package version is available at all + if pip_depver not in self.conda_package_info[pip_dep].get("releases").keys(): + failed.append("PyPi package had an unknown version: {}".format(pip_depver)) + continue # No need to test latest version, if not available + last_ver = self.conda_package_info[pip_dep].get("info").get("version") + if last_ver is not None and last_ver != pip_depver: + warned.append( + "PyPi package is not latest available: {}, {} available".format(pip_depver, last_ver) + ) + else: + passed.append("PyPi package is latest available: {}".format(pip_depver)) + + return {"passed": passed, "warned": warned, "failed": failed} + + +def _anaconda_package(conda_config, dep): + """Query conda package information. + + Sends a HTTP GET request to the Anaconda remote API. + + Args: + dep (str): A conda package name. + + Raises: + A LookupError, if the connection fails or times out or gives an unexpected status code + A ValueError, if the package name can not be found (404) + """ + + # Check if each dependency is the latest available version + depname, depver = dep.split("=", 1) + dep_channels = conda_config.get("channels", []) + # 'defaults' isn't actually a channel name. See https://docs.anaconda.com/anaconda/user-guide/tasks/using-repositories/ + if "defaults" in dep_channels: + dep_channels.remove("defaults") + dep_channels.extend(["main", "anaconda", "r", "free", "archive", "anaconda-extras"]) + if "::" in depname: + dep_channels = [depname.split("::")[0]] + depname = depname.split("::")[1] + for ch in dep_channels: + anaconda_api_url = "https://api.anaconda.org/package/{}/{}".format(ch, depname) + try: + response = requests.get(anaconda_api_url, timeout=10) + except (requests.exceptions.Timeout): + raise LookupError("Anaconda API timed out: {}".format(anaconda_api_url)) + except (requests.exceptions.ConnectionError): + raise LookupError("Could not connect to Anaconda API") + else: + if response.status_code == 200: + return response.json() + elif response.status_code != 404: + raise LookupError( + "Anaconda API returned unexpected response code `{}` for: {}\n{}".format( + response.status_code, anaconda_api_url, response + ) + ) + elif response.status_code == 404: + log.debug("Could not find `{}` in conda channel `{}`".format(dep, ch)) + else: + # We have looped through each channel and had a 404 response code on everything + raise ValueError( + "Could not find Conda dependency using the Anaconda API: `{}` (<{}>)".format(dep, anaconda_api_url) + ) + + +def _pip_package(dep): + """Query PyPi package information. + + Sends a HTTP GET request to the PyPi remote API. + + Args: + dep (str): A PyPi package name. + + Raises: + A LookupError, if the connection fails or times out + A ValueError, if the package name can not be found + """ + pip_depname, pip_depver = dep.split("=", 1) + pip_api_url = "https://pypi.python.org/pypi/{}/json".format(pip_depname) + try: + response = requests.get(pip_api_url, timeout=10) + except (requests.exceptions.Timeout): + raise LookupError("PyPi API timed out: {}".format(pip_api_url)) + except (requests.exceptions.ConnectionError): + raise LookupError("PyPi API Connection error: {}".format(pip_api_url)) + else: + if response.status_code == 200: + return response.json() + else: + raise ValueError("Could not find pip dependency using the PyPi API: `{}`".format(dep)) diff --git a/nf_core/lint/cookiecutter_strings.py b/nf_core/lint/cookiecutter_strings.py new file mode 100644 index 0000000000..2819963c41 --- /dev/null +++ b/nf_core/lint/cookiecutter_strings.py @@ -0,0 +1,38 @@ +#!/usr/bin/env python + +import io +import os +import re + + +def cookiecutter_strings(self): + """Check for 'cookiecutter' placeholders. + + The ``nf-core create`` pipeline template uses + `cookiecutter `_ behind the scenes. + + This lint test fails if any cookiecutter template variables such as + ``{{ cookiecutter.pipeline_name }}`` are found in your pipeline code. + + Finding a placeholder like this means that something was probably copied and pasted + from the template without being properly rendered for your pipeline. + """ + passed = [] + failed = [] + + # Loop through files, searching for string + num_matches = 0 + for fn in self.files: + with io.open(fn, "r", encoding="latin1") as fh: + lnum = 0 + for l in fh: + lnum += 1 + cc_matches = re.findall(r"{{\s*cookiecutter[^}]*}}", l) + if len(cc_matches) > 0: + for cc_match in cc_matches: + failed.append("Found a cookiecutter template string in `{}` L{}: {}".format(fn, lnum, cc_match)) + num_matches += 1 + if num_matches == 0: + passed.append("Did not find any cookiecutter template strings ({} files)".format(len(self.files))) + + return {"passed": passed, "failed": failed} diff --git a/nf_core/lint/files_exist.py b/nf_core/lint/files_exist.py new file mode 100644 index 0000000000..6c95b276e0 --- /dev/null +++ b/nf_core/lint/files_exist.py @@ -0,0 +1,142 @@ +#!/usr/bin/env python + +import os +import yaml + + +def files_exist(self): + """Checks a given pipeline directory for required files. + + Iterates through the pipeline's directory content and checkmarks files + for presence. + + .. note:: + This test raises an ``AssertionError`` if neither ``nextflow.config`` or ``main.nf`` are found. + If these files are not found then this cannot be a Nextflow pipeline and something has gone badly wrong. + All lint tests are stopped immediately with a critical error message. + + Files that **must** be present:: + + 'nextflow.config', + 'nextflow_schema.json', + ['LICENSE', 'LICENSE.md', 'LICENCE', 'LICENCE.md'], # NB: British / American spelling + 'README.md', + 'CHANGELOG.md', + 'docs/README.md', + 'docs/output.md', + 'docs/usage.md', + '.github/workflows/branch.yml', + '.github/workflows/ci.yml', + '.github/workflows/linting.yml' + + Files that *should* be present:: + + 'main.nf', + 'environment.yml', + 'Dockerfile', + 'conf/base.config', + '.github/workflows/awstest.yml', + '.github/workflows/awsfulltest.yml' + + Files that *must not* be present:: + + 'Singularity', + 'parameters.settings.json', + 'bin/markdown_to_html.r', + '.github/workflows/push_dockerhub.yml' + + Files that *should not* be present:: + + '.travis.yml' + """ + + passed = [] + warned = [] + failed = [] + ignored = [] + + # NB: Should all be files, not directories + # List of lists. Passes if any of the files in the sublist are found. + files_fail = [ + ["nextflow.config"], + ["nextflow_schema.json"], + ["LICENSE", "LICENSE.md", "LICENCE", "LICENCE.md"], # NB: British / American spelling + ["README.md"], + ["CHANGELOG.md"], + [os.path.join("docs", "README.md")], + [os.path.join("docs", "output.md")], + [os.path.join("docs", "usage.md")], + [os.path.join(".github", "workflows", "branch.yml")], + [os.path.join(".github", "workflows", "ci.yml")], + [os.path.join(".github", "workflows", "linting.yml")], + ] + files_warn = [ + ["main.nf"], + ["environment.yml"], + ["Dockerfile"], + [os.path.join("conf", "base.config")], + [os.path.join(".github", "workflows", "awstest.yml")], + [os.path.join(".github", "workflows", "awsfulltest.yml")], + ] + + # List of strings. Fails / warns if any of the strings exist. + files_fail_ifexists = [ + "Singularity", + "parameters.settings.json", + os.path.join("bin", "markdown_to_html.r"), + os.path.join(".github", "workflows", "push_dockerhub.yml"), + ] + files_warn_ifexists = [".travis.yml"] + + # Remove files that should be ignored according to the linting config + ignore_files = self.lint_config.get("files_exist", []) + + def pf(file_path): + return os.path.join(self.wf_path, file_path) + + # First - critical files. Check that this is actually a Nextflow pipeline + if not os.path.isfile(pf("nextflow.config")) and not os.path.isfile(pf("main.nf")): + failed.append("File not found: nextflow.config or main.nf") + raise AssertionError("Neither nextflow.config or main.nf found! Is this a Nextflow pipeline?") + + # Files that cause an error if they don't exist + for files in files_fail: + if any([f in ignore_files for f in files]): + continue + if any([os.path.isfile(pf(f)) for f in files]): + passed.append("File found: {}".format(self._wrap_quotes(files))) + else: + failed.append("File not found: {}".format(self._wrap_quotes(files))) + + # Files that cause a warning if they don't exist + for files in files_warn: + if any([f in ignore_files for f in files]): + continue + if any([os.path.isfile(pf(f)) for f in files]): + passed.append("File found: {}".format(self._wrap_quotes(files))) + else: + warned.append("File not found: {}".format(self._wrap_quotes(files))) + + # Files that cause an error if they exist + for file in files_fail_ifexists: + if file in ignore_files: + continue + if os.path.isfile(pf(file)): + failed.append("File must be removed: {}".format(self._wrap_quotes(file))) + else: + passed.append("File not found check: {}".format(self._wrap_quotes(file))) + + # Files that cause a warning if they exist + for file in files_warn_ifexists: + if file in ignore_files: + continue + if os.path.isfile(pf(file)): + warned.append("File should be removed: {}".format(self._wrap_quotes(file))) + else: + passed.append("File not found check: {}".format(self._wrap_quotes(file))) + + # Files that are ignoed + for file in ignore_files: + ignored.append("File is ignored: {}".format(self._wrap_quotes(file))) + + return {"passed": passed, "warned": warned, "failed": failed, "ignored": ignored} diff --git a/nf_core/lint/licence.py b/nf_core/lint/licence.py new file mode 100644 index 0000000000..5cd0dd50ba --- /dev/null +++ b/nf_core/lint/licence.py @@ -0,0 +1,54 @@ +#!/usr/bin/env python + +import os + + +def licence(self): + """Checks that the pipeline licence file. + + All nf-core pipelines must ship with an open source `MIT licence `_. + + This test fails if the following conditions are not met: + + * Licence file contains fewer than 4 lines of text + * File does not contain the string *"without restriction"* + * Licence contains template placeholders: ``[year]``, ``[fullname]``, ````, ````, ```` or ```` + + .. note:: + The lint test looks in any of the following filenames: + ``LICENSE``, ``LICENSE.md``, ``LICENCE.md`` or ``LICENCE.md`` *(British / American spellings)* + """ + passed = [] + warned = [] + failed = [] + + for l in ["LICENSE", "LICENSE.md", "LICENCE", "LICENCE.md"]: + fn = os.path.join(self.wf_path, l) + if os.path.isfile(fn): + content = "" + with open(fn, "r") as fh: + content = fh.read() + + # needs at least copyright, permission, notice and "as-is" lines + nl = content.count("\n") + if nl < 4: + failed.append("Number of lines too small for a valid MIT license file: {}".format(fn)) + + # determine whether this is indeed an MIT + # license. Most variations actually don't contain the + # string MIT Searching for 'without restriction' + # instead (a crutch). + if not "without restriction" in content: + failed.append("Licence file did not look like MIT: {}".format(fn)) + + # check for placeholders present in + # - https://choosealicense.com/licenses/mit/ + # - https://opensource.org/licenses/MIT + # - https://en.wikipedia.org/wiki/MIT_License + placeholders = {"[year]", "[fullname]", "", "", "", ""} + if any([ph in content for ph in placeholders]): + failed.append("Licence file contains placeholders: {}".format(fn)) + + passed.append("Licence check passed") + + return {"passed": passed, "warned": warned, "failed": failed} diff --git a/nf_core/lint/nextflow_config.py b/nf_core/lint/nextflow_config.py new file mode 100644 index 0000000000..b458c257c0 --- /dev/null +++ b/nf_core/lint/nextflow_config.py @@ -0,0 +1,266 @@ +#!/usr/bin/env python + +import re + + +def nextflow_config(self): + """Checks the pipeline configuration for required variables. + + All nf-core pipelines are required to be configured with a minimal set of variable + names. This test fails or throws warnings if required variables are not set. + + .. note:: These config variables must be set in ``nextflow.config`` or another config + file imported from there. Any variables set in nextflow script files (eg. ``main.nf``) + are not checked and will be assumed to be missing. + + **The following variables fail the test if missing:** + + * ``params.outdir``: A directory in which all pipeline results should be saved + * ``manifest.name``: The pipeline name. Should begin with ``nf-core/`` + * ``manifest.description``: A description of the pipeline + * ``manifest.version`` + + * The version of this pipeline. This should correspond to a `GitHub release `_. + * If ``--release`` is set when running ``nf-core lint``, the version number must not contain the string ``dev`` + * If ``--release`` is _not_ set, the version should end in ``dev`` (warning triggered if not) + + * ``manifest.nextflowVersion`` + + * The minimum version of Nextflow required to run the pipeline. + * Should be ``>=`` or ``!>=`` and a version number, eg. ``manifest.nextflowVersion = '>=0.31.0'`` (see `Nextflow documentation `_) + * ``>=`` warns about old versions but tries to run anyway, ``!>=`` fails for old versions. Only use the latter if you *know* that the pipeline will certainly fail before this version. + * This should correspond to the ``NXF_VER`` version tested by GitHub Actions. + + * ``manifest.homePage`` + + * The homepage for the pipeline. Should be the nf-core GitHub repository URL, + so beginning with ``https://github.com/nf-core/`` + + * ``timeline.enabled``, ``trace.enabled``, ``report.enabled``, ``dag.enabled`` + + * The nextflow timeline, trace, report and DAG should be enabled by default (set to ``true``) + + * ``process.cpus``, ``process.memory``, ``process.time`` + + * Default CPUs, memory and time limits for tasks + + * ``params.input`` + + * Input parameter to specify input data, specify this to avoid a warning + * Typical usage: + + * ``params.input``: Input data that is not NGS sequencing data + + **The following variables throw warnings if missing:** + + * ``manifest.mainScript``: The filename of the main pipeline script (should be ``main.nf``) + * ``timeline.file``, ``trace.file``, ``report.file``, ``dag.file`` + + * Default filenames for the timeline, trace and report + * The DAG file path should end with ``.svg`` (If Graphviz is not installed, Nextflow will generate a ``.dot`` file instead) + + * ``process.container`` + + * Docker Hub handle for a single default container for use by all processes. + * Must specify a tag that matches the pipeline version number if set. + * If the pipeline version number contains the string ``dev``, the DockerHub tag must be ``:dev`` + + **The following variables are depreciated and fail the test if they are still present:** + + * ``params.version``: The old method for specifying the pipeline version. Replaced by ``manifest.version`` + * ``params.nf_required_version``: The old method for specifying the minimum Nextflow version. Replaced by ``manifest.nextflowVersion`` + * ``params.container``: The old method for specifying the dockerhub container address. Replaced by ``process.container`` + * ``igenomesIgnore``: Changed to ``igenomes_ignore`` + + .. tip:: The ``snake_case`` convention should now be used when defining pipeline parameters + + **The following Nextflow syntax is depreciated and fails the test if present:** + + * Process-level configuration syntax still using the old Nextflow syntax, for example: ``process.$fastqc`` instead of ``process withName:'fastqc'``. + """ + passed = [] + warned = [] + failed = [] + ignored = [] + + # Fail tests if these are missing + config_fail = [ + ["manifest.name"], + ["manifest.nextflowVersion"], + ["manifest.description"], + ["manifest.version"], + ["manifest.homePage"], + ["timeline.enabled"], + ["trace.enabled"], + ["report.enabled"], + ["dag.enabled"], + ["process.cpus"], + ["process.memory"], + ["process.time"], + ["params.outdir"], + ["params.input"], + ] + # Throw a warning if these are missing + config_warn = [ + ["manifest.mainScript"], + ["timeline.file"], + ["trace.file"], + ["report.file"], + ["dag.file"], + ["process.container"], + ] + # Old depreciated vars - fail if present + config_fail_ifdefined = [ + "params.version", + "params.nf_required_version", + "params.container", + "params.singleEnd", + "params.igenomesIgnore", + ] + + # Remove field that should be ignored according to the linting config + ignore_configs = self.lint_config.get("nextflow_config", []) + + for cfs in config_fail: + for cf in cfs: + if cf in ignore_configs: + continue + if cf in self.nf_config.keys(): + passed.append("Config variable found: {}".format(self._wrap_quotes(cf))) + break + else: + failed.append("Config variable not found: {}".format(self._wrap_quotes(cfs))) + for cfs in config_warn: + for cf in cfs: + if cf in ignore_configs: + continue + if cf in self.nf_config.keys(): + passed.append("Config variable found: {}".format(self._wrap_quotes(cf))) + break + else: + warned.append("Config variable not found: {}".format(self._wrap_quotes(cfs))) + for cf in config_fail_ifdefined: + if cf in ignore_configs: + continue + if cf not in self.nf_config.keys(): + passed.append("Config variable (correctly) not found: {}".format(self._wrap_quotes(cf))) + else: + failed.append("Config variable (incorrectly) found: {}".format(self._wrap_quotes(cf))) + + # Check and warn if the process configuration is done with deprecated syntax + process_with_deprecated_syntax = list( + set( + [ + re.search(r"^(process\.\$.*?)\.+.*$", ck).group(1) + for ck in self.nf_config.keys() + if re.match(r"^(process\.\$.*?)\.+.*$", ck) + ] + ) + ) + for pd in process_with_deprecated_syntax: + warned.append("Process configuration is done with deprecated_syntax: {}".format(pd)) + + # Check the variables that should be set to 'true' + for k in ["timeline.enabled", "report.enabled", "trace.enabled", "dag.enabled"]: + if self.nf_config.get(k) == "true": + passed.append("Config ``{}`` had correct value: ``{}``".format(k, self.nf_config.get(k))) + else: + failed.append("Config ``{}`` did not have correct value: ``{}``".format(k, self.nf_config.get(k))) + + # Check that the pipeline name starts with nf-core + try: + assert self.nf_config.get("manifest.name", "").strip("'\"").startswith("nf-core/") + except (AssertionError, IndexError): + failed.append( + "Config ``manifest.name`` did not begin with ``nf-core/``:\n {}".format( + self.nf_config.get("manifest.name", "").strip("'\"") + ) + ) + else: + passed.append("Config ``manifest.name`` began with ``nf-core/``") + + # Check that the homePage is set to the GitHub URL + try: + assert self.nf_config.get("manifest.homePage", "").strip("'\"").startswith("https://github.com/nf-core/") + except (AssertionError, IndexError): + failed.append( + "Config variable ``manifest.homePage`` did not begin with https://github.com/nf-core/:\n {}".format( + self.nf_config.get("manifest.homePage", "").strip("'\"") + ) + ) + else: + passed.append("Config variable ``manifest.homePage`` began with https://github.com/nf-core/") + + # Check that the DAG filename ends in ``.svg`` + if "dag.file" in self.nf_config: + if self.nf_config["dag.file"].strip("'\"").endswith(".svg"): + passed.append("Config ``dag.file`` ended with ``.svg``") + else: + failed.append("Config ``dag.file`` did not end with ``.svg``") + + # Check that the minimum nextflowVersion is set properly + if "manifest.nextflowVersion" in self.nf_config: + if self.nf_config.get("manifest.nextflowVersion", "").strip("\"'").lstrip("!").startswith(">="): + passed.append("Config variable ``manifest.nextflowVersion`` started with >= or !>=") + else: + failed.append( + "Config ``manifest.nextflowVersion`` did not start with ``>=`` or ``!>=`` : ``{}``".format( + self.nf_config.get("manifest.nextflowVersion", "") + ).strip("\"'") + ) + + # Check that the process.container name is pulling the version tag or :dev + if self.nf_config.get("process.container"): + container_name = "{}:{}".format( + self.nf_config.get("manifest.name").replace("nf-core", "nfcore").strip("'"), + self.nf_config.get("manifest.version", "").strip("'"), + ) + if "dev" in self.nf_config.get("manifest.version", "") or not self.nf_config.get("manifest.version"): + container_name = "{}:dev".format( + self.nf_config.get("manifest.name").replace("nf-core", "nfcore").strip("'") + ) + try: + assert self.nf_config.get("process.container", "").strip("'") == container_name + except AssertionError: + if self.release_mode: + failed.append( + "Config ``process.container`` looks wrong. Should be ``{}`` but is ``{}``".format( + container_name, self.nf_config.get("process.container", "").strip("'") + ) + ) + else: + warned.append( + "Config ``process.container`` looks wrong. Should be ``{}`` but is ``{}``".format( + container_name, self.nf_config.get("process.container", "").strip("'") + ) + ) + else: + passed.append("Config ``process.container`` looks correct: ``{}``".format(container_name)) + + # Check that the pipeline version contains ``dev`` + if not self.release_mode and "manifest.version" in self.nf_config: + if self.nf_config["manifest.version"].strip(" '\"").endswith("dev"): + passed.append( + "Config ``manifest.version`` ends in ``dev``: ``{}``".format(self.nf_config["manifest.version"]) + ) + else: + warned.append( + "Config ``manifest.version`` should end in ``dev``: ``{}``".format(self.nf_config["manifest.version"]) + ) + elif "manifest.version" in self.nf_config: + if "dev" in self.nf_config["manifest.version"]: + failed.append( + "Config ``manifest.version`` should not contain ``dev`` for a release: ``{}``".format( + self.nf_config["manifest.version"] + ) + ) + else: + passed.append( + "Config ``manifest.version`` does not contain ``dev`` for release: ``{}``".format( + self.nf_config["manifest.version"] + ) + ) + + for config in ignore_configs: + ignored.append("Config ignored: {}".format(self._wrap_quotes(config))) + return {"passed": passed, "warned": warned, "failed": failed, "ignored": ignored} diff --git a/nf_core/lint/pipeline_name_conventions.py b/nf_core/lint/pipeline_name_conventions.py new file mode 100644 index 0000000000..e1ecad0be2 --- /dev/null +++ b/nf_core/lint/pipeline_name_conventions.py @@ -0,0 +1,25 @@ +#!/usr/bin/env python + + +def pipeline_name_conventions(self): + """Checks that the pipeline name adheres to nf-core conventions. + + In order to ensure consistent naming, pipeline names should contain only lower case, alphanumeric characters. + Otherwise a warning is displayed. + + .. warning:: + DockerHub is very picky about image names and doesn't even allow hyphens (we are ``nfcore``). + This is a large part of why we set this rule. + """ + passed = [] + warned = [] + failed = [] + + if self.pipeline_name.islower() and self.pipeline_name.isalnum(): + passed.append("Name adheres to nf-core convention") + if not self.pipeline_name.islower(): + warned.append("Naming does not adhere to nf-core conventions: Contains uppercase letters") + if not self.pipeline_name.isalnum(): + warned.append("Naming does not adhere to nf-core conventions: Contains non alphanumeric characters") + + return {"passed": passed, "warned": warned, "failed": failed} diff --git a/nf_core/lint/pipeline_todos.py b/nf_core/lint/pipeline_todos.py new file mode 100644 index 0000000000..c5cb907658 --- /dev/null +++ b/nf_core/lint/pipeline_todos.py @@ -0,0 +1,60 @@ +#!/usr/bin/env python + +import os +import io +import fnmatch + + +def pipeline_todos(self): + """Check for nf-core *TODO* lines. + + The nf-core workflow template contains a number of comment lines to help developers + of new pipelines know where they need to edit files and add content. + They typically have the following format: + + .. code-block:: groovy + + // TODO nf-core: Make some kind of change to the workflow here + + ..or in markdown: + + .. code-block:: html + + + + This lint test runs through all files in the pipeline and searches for these lines. + If any are found they will throw a warning. + + .. tip:: Note that many GUI code editors have plugins to list all instances of *TODO* + in a given project directory. This is a very quick and convenient way to get + started on your pipeline! + """ + passed = [] + warned = [] + failed = [] + + ignore = [".git"] + if os.path.isfile(os.path.join(self.wf_path, ".gitignore")): + with io.open(os.path.join(self.wf_path, ".gitignore"), "rt", encoding="latin1") as fh: + for l in fh: + ignore.append(os.path.basename(l.strip().rstrip("/"))) + for root, dirs, files in os.walk(self.wf_path): + # Ignore files + for i in ignore: + dirs = [d for d in dirs if not fnmatch.fnmatch(os.path.join(root, d), i)] + files = [f for f in files if not fnmatch.fnmatch(os.path.join(root, f), i)] + for fname in files: + with io.open(os.path.join(root, fname), "rt", encoding="latin1") as fh: + for l in fh: + if "TODO nf-core" in l: + l = ( + l.replace("", "") + .replace("# TODO nf-core: ", "") + .replace("// TODO nf-core: ", "") + .replace("TODO nf-core: ", "") + .strip() + ) + warned.append("TODO string in `{}`: _{}_".format(fname, l)) + + return {"passed": passed, "warned": warned, "failed": failed} diff --git a/nf_core/lint/readme.py b/nf_core/lint/readme.py new file mode 100644 index 0000000000..c595df074c --- /dev/null +++ b/nf_core/lint/readme.py @@ -0,0 +1,71 @@ +#!/usr/bin/env python + +import os +import re + + +def readme(self): + """Repository ``README.md`` tests + + The ``README.md`` files for a project are very important and must meet some requirements: + + * Nextflow badge + + * If no Nextflow badge is found, a warning is given + * If a badge is found but the version doesn't match the minimum version in the config file, the test fails + * Example badge code: + + .. code-block:: md + + [![Nextflow](https://img.shields.io/badge/nextflow-%E2%89%A50.27.6-brightgreen.svg)](https://www.nextflow.io/) + + * Bioconda badge + + * If your pipeline contains a file called ``environment.yml`` in the root directory, a bioconda badge is required + * Required badge code: + + .. code-block:: md + + [![install with bioconda](https://img.shields.io/badge/install%20with-bioconda-brightgreen.svg)](https://bioconda.github.io/) + + .. note:: These badges are a markdown image ``![alt-text]()`` *inside* a markdown link ``[markdown image]()``, so a bit fiddly to write. + """ + passed = [] + warned = [] + failed = [] + + with open(os.path.join(self.wf_path, "README.md"), "r") as fh: + content = fh.read() + + # Check that there is a readme badge showing the minimum required version of Nextflow + # and that it has the correct version + nf_badge_re = r"\[!\[Nextflow\]\(https://img\.shields\.io/badge/nextflow-%E2%89%A5([\d\.]+)-brightgreen\.svg\)\]\(https://www\.nextflow\.io/\)" + match = re.search(nf_badge_re, content) + if match: + nf_badge_version = match.group(1).strip("'\"") + try: + assert nf_badge_version == self.minNextflowVersion + except (AssertionError, KeyError): + failed.append( + "README Nextflow minimum version badge does not match config. Badge: `{}`, Config: `{}`".format( + nf_badge_version, self.minNextflowVersion + ) + ) + else: + passed.append( + "README Nextflow minimum version badge matched config. Badge: `{}`, Config: `{}`".format( + nf_badge_version, self.minNextflowVersion + ) + ) + else: + warned.append("README did not have a Nextflow minimum version badge.") + + # Check that we have a bioconda badge if we have a bioconda environment file + if os.path.join(self.wf_path, "environment.yml") in self.files: + bioconda_badge = "[![install with bioconda](https://img.shields.io/badge/install%20with-bioconda-brightgreen.svg)](https://bioconda.github.io/)" + if bioconda_badge in content: + passed.append("README had a bioconda badge") + else: + warned.append("Found a bioconda environment.yml file but no badge in the README") + + return {"passed": passed, "warned": warned, "failed": failed} diff --git a/nf_core/lint/schema_lint.py b/nf_core/lint/schema_lint.py new file mode 100644 index 0000000000..8e6984f3b3 --- /dev/null +++ b/nf_core/lint/schema_lint.py @@ -0,0 +1,86 @@ +#!/usr/bin/env python + +import logging +import nf_core.schema + + +def schema_lint(self): + """Pipeline schema syntax + + Pipelines should have a ``nextflow_schema.json`` file that describes the different + pipeline parameters (eg. ``params.something``, ``--something``). + + .. tip:: Reminder: you should generally never need to edit this JSON file by hand. + The ``nf-core schema build`` command can create *and edit* the file for you + to keep it up to date, with a friendly user-interface for customisation. + + The lint test checks the schema for the following: + + * Schema should be a valid JSON file + * Schema should adhere to `JSONSchema `_, Draft 7. + * Parameters can be described in two places: + + * As ``properties`` in the top-level schema object + * As ``properties`` within subschemas listed in a top-level ``definitions`` objects + + * The schema must describe at least one parameter + * There must be no duplicate parameter IDs across the schema and definition subschema + * All subschema in ``definitions`` must be referenced in the top-level ``allOf`` key + * The top-level ``allOf`` key must not describe any non-existent definitions + * Core top-level schema attributes should exist and be set as follows: + + * ``$schema``: ``https://json-schema.org/draft-07/schema`` + * ``$id``: URL to the raw schema file, eg. ``https://raw.githubusercontent.com/YOURPIPELINE/master/nextflow_schema.json`` + * ``title``: ``YOURPIPELINE pipeline parameters`` + * ``description``: The pipeline config ``manifest.description`` + + For example, an *extremely* minimal schema could look like this: + + .. code-block:: json + + { + "$schema": "https://json-schema.org/draft-07/schema", + "$id": "https://raw.githubusercontent.com/YOURPIPELINE/master/nextflow_schema.json", + "title": "YOURPIPELINE pipeline parameters", + "description": "This pipeline is for testing", + "properties": { + "first_param": { "type": "string" } + }, + "definitions": { + "my_first_group": { + "properties": { + "second_param": { "type": "string" } + } + } + }, + "allOf": [{"$ref": "#/definitions/my_first_group"}] + } + + .. tip:: You can check your pipeline schema without having to run the entire pipeline lint + by running ``nf-core schema lint`` instead of ``nf-core lint`` + """ + passed = [] + warned = [] + failed = [] + + # Only show error messages from schema + logging.getLogger("nf_core.schema").setLevel(logging.ERROR) + + # Lint the schema + self.schema_obj = nf_core.schema.PipelineSchema() + self.schema_obj.get_schema_path(self.wf_path) + try: + self.schema_obj.load_lint_schema() + passed.append("Schema lint passed") + except AssertionError as e: + failed.append("Schema lint failed: {}".format(e)) + + # Check the title and description - gives warnings instead of fail + if self.schema_obj.schema is not None: + try: + self.schema_obj.validate_schema_title_description() + passed.append("Schema title + description lint passed") + except AssertionError as e: + warned.append(e) + + return {"passed": passed, "warned": warned, "failed": failed} diff --git a/nf_core/lint/schema_params.py b/nf_core/lint/schema_params.py new file mode 100644 index 0000000000..580e9129d8 --- /dev/null +++ b/nf_core/lint/schema_params.py @@ -0,0 +1,42 @@ +#!/usr/bin/env python + +import nf_core.schema + + +def schema_params(self): + """Check that the schema describes all flat params in the pipeline. + + The ``nextflow_schema.json`` pipeline schema should describe every flat parameter + returned from the ``nextflow config`` command (params that are objects or more complex structures are ignored). + + * Failure: If parameters are found in ``nextflow_schema.json`` that are not in ``nextflow_schema.json`` + * Warning: If parameters are found in ``nextflow_schema.json`` that are not in ``nextflow_schema.json`` + """ + passed = [] + warned = [] + failed = [] + + # First, get the top-level config options for the pipeline + # Schema object already created in the `schema_lint` test + self.schema_obj.get_schema_path(self.wf_path) + self.schema_obj.get_wf_params() + self.schema_obj.no_prompts = True + + # Remove any schema params not found in the config + removed_params = self.schema_obj.remove_schema_notfound_configs() + + # Add schema params found in the config but not the schema + added_params = self.schema_obj.add_schema_found_configs() + + if len(removed_params) > 0: + for param in removed_params: + warned.append("Schema param `{}` not found from nextflow config".format(param)) + + if len(added_params) > 0: + for param in added_params: + failed.append("Param `{}` from `nextflow config` not found in nextflow_schema.json".format(param)) + + if len(removed_params) == 0 and len(added_params) == 0: + passed.append("Schema matched params returned from nextflow config") + + return {"passed": passed, "warned": warned, "failed": failed} diff --git a/nf_core/lint/version_consistency.py b/nf_core/lint/version_consistency.py new file mode 100644 index 0000000000..fbd90394a4 --- /dev/null +++ b/nf_core/lint/version_consistency.py @@ -0,0 +1,64 @@ +#!/usr/bin/env python + + +def version_consistency(self): + """Pipeline and container version number consistency. + + .. note:: This test only runs when the ``--release`` flag is set for ``nf-core lint``, + or ``$GITHUB_REF`` is equal to ``master``. + + This lint fetches the pipeline version number from three possible locations: + + * The pipeline config, ``manifest.version`` + * The docker container in the pipeline config, ``process.container`` + + * Some pipelines may not have this set on a pipeline level. If it is not found, it is ignored. + + * ``$GITHUB_REF``, if it looks like a release tag (``refs/tags/``) + + The test then checks that: + + * The container name has a tag specified (eg. ``nfcore/pipeline:version``) + * The pipeline version number is numeric (contains only numbers and dots) + * That the version numbers all match one another + """ + passed = [] + failed = [] + + # Get the version definitions + # Get version from nextflow.config + versions = {} + versions["manifest.version"] = self.nf_config.get("manifest.version", "").strip(" '\"") + + # Get version from the docker tag + if self.nf_config.get("process.container", "") and not ":" in self.nf_config.get("process.container", ""): + failed.append( + "Docker slug seems not to have a version tag: {}".format(self.nf_config.get("process.container", "")) + ) + + # Get config container tag (if set; one container per workflow) + if self.nf_config.get("process.container", ""): + versions["process.container"] = self.nf_config.get("process.container", "").strip(" '\"").split(":")[-1] + + # Get version from the $GITHUB_REF env var if this is a release + if ( + os.environ.get("GITHUB_REF", "").startswith("refs/tags/") + and os.environ.get("GITHUB_REPOSITORY", "") != "nf-core/tools" + ): + versions["GITHUB_REF"] = os.path.basename(os.environ["GITHUB_REF"].strip(" '\"")) + + # Check if they are all numeric + for v_type, version in versions.items(): + if not version.replace(".", "").isdigit(): + failed.append("{} was not numeric: {}!".format(v_type, version)) + + # Check if they are consistent + if len(set(versions.values())) != 1: + failed.append( + "The versioning is not consistent between container, release tag " + "and config. Found {}".format(", ".join(["{} = {}".format(k, v) for k, v in versions.items()])) + ) + + passed.append("Version tags are numeric and consistent between container, release tag and config.") + + return {"passed": passed, "failed": failed} diff --git a/nf_core/utils.py b/nf_core/utils.py index 2e6388db31..40fb7225cb 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -3,10 +3,13 @@ Common utility functions for the nf-core python package. """ import nf_core + +from distutils import version import datetime import errno -import json +import git import hashlib +import json import logging import os import re @@ -15,7 +18,7 @@ import subprocess import sys import time -from distutils import version +import yaml log = logging.getLogger(__name__) @@ -52,6 +55,98 @@ def rich_force_colors(): return None +class Pipeline(object): + """Object to hold information about a local pipeline. + + Args: + path (str): The path to the nf-core pipeline directory. + + Attributes: + conda_config (dict): The parsed conda configuration file content (``environment.yml``). + conda_package_info (dict): The conda package(s) information, based on the API requests to Anaconda cloud. + nf_config (dict): The Nextflow pipeline configuration file content. + files (list): A list of files found during the linting process. + git_sha (str): The git sha for the repo commit / current GitHub pull-request (`$GITHUB_PR_COMMIT`) + minNextflowVersion (str): The minimum required Nextflow version to run the pipeline. + wf_path (str): Path to the pipeline directory. + pipeline_name (str): The pipeline name, without the `nf-core` tag, for example `hlatyping`. + schema_obj (obj): A :class:`PipelineSchema` object + """ + + def __init__(self, wf_path): + """ Initialise pipeline object """ + self.conda_config = {} + self.conda_package_info = {} + self.nf_config = {} + self.files = [] + self.git_sha = None + self.minNextflowVersion = None + self.wf_path = wf_path + self.pipeline_name = None + self.schema_obj = None + + try: + repo = git.Repo(self.wf_path) + self.git_sha = repo.head.object.hexsha + except: + log.debug("Could not find git hash for pipeline: {}".format(self.wf_path)) + + # Overwrite if we have the last commit from the PR - otherwise we get a merge commit hash + if os.environ.get("GITHUB_PR_COMMIT", "") != "": + self.git_sha = os.environ["GITHUB_PR_COMMIT"] + + def _load(self): + """Run core load functions""" + self._list_files() + self._load_pipeline_config() + self._load_conda_environment() + + def _list_files(self): + """Get a list of all files in the pipeline""" + try: + # First, try to get the list of files using git + git_ls_files = subprocess.check_output(["git", "ls-files"], cwd=self.wf_path).splitlines() + self.files = [] + for fn in git_ls_files: + full_fn = os.path.join(self.wf_path, fn.decode("utf-8")) + if os.path.isfile(full_fn): + self.files.append(full_fn) + else: + log.warning("`git ls-files` returned '{}' but could not open it!".format(full_fn)) + except subprocess.CalledProcessError as e: + # Failed, so probably not initialised as a git repository - just a list of all files + log.debug("Couldn't call 'git ls-files': {}".format(e)) + self.files = [] + for subdir, dirs, files in os.walk(self.wf_path): + for fn in files: + self.files.append(os.path.join(subdir, fn)) + + def _load_pipeline_config(self): + """Get the nextflow config for this pipeline + + Once loaded, set a few convienence reference class attributes + """ + self.nf_config = fetch_wf_config(self.wf_path) + + self.pipeline_name = self.nf_config.get("manifest.name", "").strip("'").replace("nf-core/", "") + + nextflowVersionMatch = re.search(r"[0-9\.]+(-edge)?", self.nf_config.get("manifest.nextflowVersion", "")) + if nextflowVersionMatch: + self.minNextflowVersion = nextflowVersionMatch.group(0) + + def _load_conda_environment(self): + """Try to load the pipeline environment.yml file, if it exists""" + try: + with open(os.path.join(self.wf_path, "environment.yml"), "r") as fh: + self.conda_config = yaml.safe_load(fh) + except FileNotFoundError: + log.debug("No conda `environment.yml` file found.") + + def _fp(self, fn): + """Convenience function to get full path to a file in the pipeline""" + return os.path.join(self.wf_path, fn) + + def fetch_wf_config(wf_path): """Uses Nextflow to retrieve the the configuration variables from a Nextflow workflow. diff --git a/tests/lint_examples/failing_example/Dockerfile b/tests/lint/__init__.py similarity index 100% rename from tests/lint_examples/failing_example/Dockerfile rename to tests/lint/__init__.py diff --git a/tests/lint/actions_awsfulltest.py b/tests/lint/actions_awsfulltest.py new file mode 100644 index 0000000000..767715340e --- /dev/null +++ b/tests/lint/actions_awsfulltest.py @@ -0,0 +1,62 @@ +#!/usr/bin/env python + +import os +import yaml +import nf_core.lint + + +def test_actions_awsfulltest_warn(self): + """Lint test: actions_awsfulltest - WARN""" + self.lint_obj._load() + results = self.lint_obj.actions_awsfulltest() + assert results["passed"] == ["`.github/workflows/awsfulltest.yml` is triggered correctly"] + assert results["warned"] == ["`.github/workflows/awsfulltest.yml` should test full datasets, not `-profile test`"] + assert len(results.get("failed", [])) == 0 + assert len(results.get("ignored", [])) == 0 + + +def test_actions_awsfulltest_pass(self): + """Lint test: actions_awsfulltest - PASS""" + + # Edit .github/workflows/awsfulltest.yml to use -profile test_full + new_pipeline = self._make_pipeline_copy() + with open(os.path.join(new_pipeline, ".github", "workflows", "awsfulltest.yml"), "r") as fh: + awsfulltest_yml = fh.read() + awsfulltest_yml = awsfulltest_yml.replace("-profile test ", "-profile test_full ") + with open(os.path.join(new_pipeline, ".github", "workflows", "awsfulltest.yml"), "w") as fh: + fh.write(awsfulltest_yml) + + # Make lint object + lint_obj = nf_core.lint.PipelineLint(new_pipeline) + lint_obj._load() + + results = lint_obj.actions_awsfulltest() + assert results["passed"] == [ + "`.github/workflows/awsfulltest.yml` is triggered correctly", + "`.github/workflows/awsfulltest.yml` does not use `-profile test`", + ] + assert len(results.get("warned", [])) == 0 + assert len(results.get("failed", [])) == 0 + assert len(results.get("ignored", [])) == 0 + + +def test_actions_awsfulltest_fail(self): + """Lint test: actions_awsfulltest - FAIL""" + + # Edit .github/workflows/awsfulltest.yml to use -profile test_full + new_pipeline = self._make_pipeline_copy() + with open(os.path.join(new_pipeline, ".github", "workflows", "awsfulltest.yml"), "r") as fh: + awsfulltest_yml = yaml.safe_load(fh) + del awsfulltest_yml[True]["workflow_run"] + with open(os.path.join(new_pipeline, ".github", "workflows", "awsfulltest.yml"), "w") as fh: + yaml.dump(awsfulltest_yml, fh) + + # Make lint object + lint_obj = nf_core.lint.PipelineLint(new_pipeline) + lint_obj._load() + + results = lint_obj.actions_awsfulltest() + assert results["failed"] == ["`.github/workflows/awsfulltest.yml` is not triggered correctly"] + assert results["warned"] == ["`.github/workflows/awsfulltest.yml` should test full datasets, not `-profile test`"] + assert len(results.get("passed", [])) == 0 + assert len(results.get("ignored", [])) == 0 diff --git a/tests/lint/actions_awstest.py b/tests/lint/actions_awstest.py new file mode 100644 index 0000000000..d42d9b3b5e --- /dev/null +++ b/tests/lint/actions_awstest.py @@ -0,0 +1,37 @@ +#!/usr/bin/env python + +import os +import yaml +import nf_core.lint + + +def test_actions_awstest_pass(self): + """Lint test: actions_awstest - PASS""" + self.lint_obj._load() + results = self.lint_obj.actions_awstest() + assert results["passed"] == ["'.github/workflows/awstest.yml' is triggered correctly"] + assert len(results.get("warned", [])) == 0 + assert len(results.get("failed", [])) == 0 + assert len(results.get("ignored", [])) == 0 + + +def test_actions_awstest_fail(self): + """Lint test: actions_awsfulltest - FAIL""" + + # Edit .github/workflows/awsfulltest.yml to use -profile test_full + new_pipeline = self._make_pipeline_copy() + with open(os.path.join(new_pipeline, ".github", "workflows", "awstest.yml"), "r") as fh: + awstest_yml = yaml.safe_load(fh) + awstest_yml[True]["push"] = ["master"] + with open(os.path.join(new_pipeline, ".github", "workflows", "awstest.yml"), "w") as fh: + yaml.dump(awstest_yml, fh) + + # Make lint object + lint_obj = nf_core.lint.PipelineLint(new_pipeline) + lint_obj._load() + + results = lint_obj.actions_awstest() + assert results["failed"] == ["'.github/workflows/awstest.yml' is not triggered correctly"] + assert len(results.get("warned", [])) == 0 + assert len(results.get("passed", [])) == 0 + assert len(results.get("ignored", [])) == 0 diff --git a/tests/lint/actions_branch_protection.py b/tests/lint/actions_branch_protection.py new file mode 100644 index 0000000000..3335901f38 --- /dev/null +++ b/tests/lint/actions_branch_protection.py @@ -0,0 +1,64 @@ +#!/usr/bin/env python +import nf_core.lint +import os +import yaml + + +def test_actions_branch_protection_pass(self): + """Lint test: actions_branch_protection - PASS""" + self.lint_obj._load() + results = self.lint_obj.actions_branch_protection() + assert results["passed"] == [ + "GitHub Actions 'branch.yml' workflow is triggered for PRs to master", + "GitHub Actions 'branch.yml' workflow looks good", + ] + assert len(results.get("warned", [])) == 0 + assert len(results.get("failed", [])) == 0 + assert len(results.get("ignored", [])) == 0 + + +def test_actions_branch_protection_fail(self): + """Lint test: actions_branch_protection - FAIL""" + + # Edit .github/workflows/branch.yml and mess stuff up! + new_pipeline = self._make_pipeline_copy() + with open(os.path.join(new_pipeline, ".github", "workflows", "branch.yml"), "r") as fh: + branch_yml = yaml.safe_load(fh) + branch_yml[True] = {"push": ["dev"]} + branch_yml["jobs"]["test"]["steps"] = [] + with open(os.path.join(new_pipeline, ".github", "workflows", "branch.yml"), "w") as fh: + yaml.dump(branch_yml, fh) + + # Make lint object + lint_obj = nf_core.lint.PipelineLint(new_pipeline) + lint_obj._load() + + results = lint_obj.actions_branch_protection() + print(results["failed"]) + assert results["failed"] == [ + "GitHub Actions 'branch.yml' workflow should be triggered for PRs to master", + "Couldn't find GitHub Actions 'branch.yml' check for PRs to master", + ] + assert len(results.get("warned", [])) == 0 + assert len(results.get("passed", [])) == 0 + assert len(results.get("ignored", [])) == 0 + + +def test_actions_branch_protection_ignore(self): + """Lint test: actions_branch_protection - IGNORE""" + + # Delete .github/workflows/branch.yml + new_pipeline = self._make_pipeline_copy() + branch_fn = os.path.join(new_pipeline, ".github", "workflows", "branch.yml") + os.remove(branch_fn) + + # Make lint object + lint_obj = nf_core.lint.PipelineLint(new_pipeline) + lint_obj._load() + + lint_obj._load() + results = lint_obj.actions_branch_protection() + assert results["ignored"] == ["Could not find branch.yml workflow: {}".format(branch_fn)] + assert len(results.get("warned", [])) == 0 + assert len(results.get("passed", [])) == 0 + assert len(results.get("failed", [])) == 0 diff --git a/tests/lint/actions_ci.py b/tests/lint/actions_ci.py new file mode 100644 index 0000000000..847f7006c9 --- /dev/null +++ b/tests/lint/actions_ci.py @@ -0,0 +1,65 @@ +#!/usr/bin/env python + +import os +import yaml +import nf_core.lint + + +def test_actions_ci_pass(self): + """Lint test: actions_ci - PASS""" + self.lint_obj._load() + results = self.lint_obj.actions_ci() + assert results["passed"] == [ + "'.github/workflows/ci.yml' is triggered on expected events", + "CI is building the correct docker image: `docker build --no-cache . -t nfcore/testpipeline:dev`", + "CI is pulling the correct docker image: docker pull nfcore/testpipeline:dev", + "CI is tagging docker image correctly: docker tag nfcore/testpipeline:dev nfcore/testpipeline:dev", + "'.github/workflows/ci.yml' checks minimum NF version", + ] + assert len(results.get("warned", [])) == 0 + assert len(results.get("failed", [])) == 0 + assert len(results.get("ignored", [])) == 0 + + +def test_actions_ci_fail_wrong_nf(self): + """Lint test: actions_ci - FAIL - wrong minimum version of Nextflow tested""" + self.lint_obj._load() + self.lint_obj.minNextflowVersion = "1.2.3" + results = self.lint_obj.actions_ci() + assert results["failed"] == ["Minimum NF version in '.github/workflows/ci.yml' different to pipeline's manifest"] + + +def test_actions_ci_fail_wrong_docker_ver(self): + """Lint test: actions_actions_ci - FAIL - wrong pipeline version used for docker commands""" + + self.lint_obj._load() + self.lint_obj.nf_config["process.container"] = "'nfcore/tools:0.4'" + results = self.lint_obj.actions_ci() + assert results["failed"] == [ + "CI is not building the correct docker image. Should be: `docker build --no-cache . -t nfcore/tools:0.4`", + "CI is not pulling the correct docker image. Should be: `docker pull nfcore/tools:dev`", + "CI is not tagging docker image correctly. Should be: `docker tag nfcore/tools:dev nfcore/tools:0.4`", + ] + + +def test_actions_ci_fail_wrong_trigger(self): + """Lint test: actions_actions_ci - FAIL - workflow triggered incorrectly, NF ver not checked at all""" + + # Edit .github/workflows/actions_ci.yml to mess stuff up! + new_pipeline = self._make_pipeline_copy() + with open(os.path.join(new_pipeline, ".github", "workflows", "ci.yml"), "r") as fh: + ci_yml = yaml.safe_load(fh) + ci_yml[True]["push"] = ["dev", "patch"] + ci_yml["jobs"]["test"]["strategy"]["matrix"] = {"nxf_versionnn": ["foo", ""]} + with open(os.path.join(new_pipeline, ".github", "workflows", "ci.yml"), "w") as fh: + yaml.dump(ci_yml, fh) + + # Make lint object + lint_obj = nf_core.lint.PipelineLint(new_pipeline) + lint_obj._load() + + results = lint_obj.actions_ci() + assert results["failed"] == [ + "'.github/workflows/ci.yml' is not triggered on expected events", + "'.github/workflows/ci.yml' does not check minimum NF version", + ] diff --git a/tests/lint/files_exist.py b/tests/lint/files_exist.py new file mode 100644 index 0000000000..b66d40ee88 --- /dev/null +++ b/tests/lint/files_exist.py @@ -0,0 +1,56 @@ +#!/usr/bin/env python + +import os +import yaml +import nf_core.lint + + +def test_files_exist_missing_config(self): + """Lint test: critical files missing FAIL""" + new_pipeline = self._make_pipeline_copy() + + os.remove(os.path.join(new_pipeline, "nextflow.config")) + + lint_obj = nf_core.lint.PipelineLint(new_pipeline) + lint_obj._load() + + results = lint_obj.files_exist() + assert results["failed"] == ["File not found: `nextflow.config`"] + + +def test_files_exist_missing_main(self): + """Check if missing main issues warning""" + new_pipeline = self._make_pipeline_copy() + + os.remove(os.path.join(new_pipeline, "main.nf")) + + lint_obj = nf_core.lint.PipelineLint(new_pipeline) + lint_obj._load() + + results = lint_obj.files_exist() + assert results["warned"] == ["File not found: `main.nf`"] + + +def test_files_exist_depreciated_file(self): + """Check whether depreciated file issues warning""" + new_pipeline = self._make_pipeline_copy() + + nf = os.path.join(new_pipeline, "parameters.settings.json") + os.system("touch {}".format(nf)) + + lint_obj = nf_core.lint.PipelineLint(new_pipeline) + lint_obj._load() + + results = lint_obj.files_exist() + assert results["failed"] == ["File must be removed: `parameters.settings.json`"] + + +def test_files_exist_pass(self): + """Lint check should pass if all files are there""" + + new_pipeline = self._make_pipeline_copy() + lint_obj = nf_core.lint.PipelineLint(new_pipeline) + lint_obj._load() + + results = lint_obj.files_exist() + assert results["failed"] == [] diff --git a/tests/lint/licence.py b/tests/lint/licence.py new file mode 100644 index 0000000000..97969994b4 --- /dev/null +++ b/tests/lint/licence.py @@ -0,0 +1,29 @@ +#!/usr/bin/env python + +import os +import yaml +import nf_core.lint + + +def test_licence_pass(self): + """Lint test: check a valid MIT licence""" + new_pipeline = self._make_pipeline_copy() + lint_obj = nf_core.lint.PipelineLint(new_pipeline) + lint_obj._load() + + results = lint_obj.licence() + assert results["passed"] == ["Licence check passed"] + + +def test_licence_fail(self): + """Lint test: invalid MIT licence""" + new_pipeline = self._make_pipeline_copy() + lint_obj = nf_core.lint.PipelineLint(new_pipeline) + lint_obj._load() + + fh = open(os.path.join(new_pipeline, "LICENSE"), "a") + fh.write("[year]") + fh.close() + + results = lint_obj.licence() + assert results["failed"] == ["Licence file contains placeholders: {}".format(os.path.join(new_pipeline, "LICENSE"))] diff --git a/tests/lint_examples/critical_example/LICENSE b/tests/lint_examples/critical_example/LICENSE deleted file mode 100644 index d13cc4b26a..0000000000 --- a/tests/lint_examples/critical_example/LICENSE +++ /dev/null @@ -1,19 +0,0 @@ -The MIT License (MIT) - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/tests/lint_examples/failing_example/.github/workflows/awsfulltest.yml b/tests/lint_examples/failing_example/.github/workflows/awsfulltest.yml deleted file mode 100644 index 0563e646e4..0000000000 --- a/tests/lint_examples/failing_example/.github/workflows/awsfulltest.yml +++ /dev/null @@ -1,41 +0,0 @@ -name: nf-core AWS full size tests -# This workflow is triggered on push to the master branch. -# It runs the -profile 'test_full' on AWS batch - -on: - push: - branches: - - master - -jobs: - run-awstest: - name: Run AWS tests - if: github.repository == 'nf-core/tools' - runs-on: ubuntu-latest - steps: - - name: Setup Miniconda - uses: goanpeca/setup-miniconda@v1.0.2 - with: - auto-update-conda: true - python-version: 3.7 - - name: Install awscli - run: conda install -c conda-forge awscli - - name: Start AWS batch job - # TODO nf-core: You can customise AWS full pipeline tests as required - # Add full size test data (but still relatively small datasets for few samples) - # on the `test_full.config` test runs with only one set of parameters - # Then specify `-profile test_full` instead of `-profile test` on the AWS batch command - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - TOWER_ACCESS_TOKEN: ${{ secrets.AWS_TOWER_TOKEN }} - AWS_JOB_DEFINITION: ${{ secrets.AWS_JOB_DEFINITION }} - AWS_JOB_QUEUE: ${{ secrets.AWS_JOB_QUEUE }} - AWS_S3_BUCKET: ${{ secrets.AWS_S3_BUCKET }} - run: | - aws batch submit-job \ - --region eu-west-1 \ - --job-name nf-core-tools \ - --job-queue $AWS_JOB_QUEUE \ - --job-definition $AWS_JOB_DEFINITION \ - --container-overrides '{"command": ["nf-core/tools", "-r '"${GITHUB_SHA}"' -profile test --outdir s3://nf-core-awsmegatests/tools/results-'"${GITHUB_SHA}"' -w s3://nf-core-awsmegatests/tools/work-'"${GITHUB_SHA}"' -with-tower"], "environment": [{"name": "TOWER_ACCESS_TOKEN", "value": "'"$TOWER_ACCESS_TOKEN"'"}]}' \ No newline at end of file diff --git a/tests/lint_examples/failing_example/.github/workflows/awstest.yml b/tests/lint_examples/failing_example/.github/workflows/awstest.yml deleted file mode 100644 index a4bf436da0..0000000000 --- a/tests/lint_examples/failing_example/.github/workflows/awstest.yml +++ /dev/null @@ -1,42 +0,0 @@ -name: nf-core AWS tests -# This workflow is triggered on push to the master branch. -# It runs the -profile 'test' on AWS batch - -on: - push: - branches: - - master - - dev - pull_request: - -jobs: - run-awstest: - name: Run AWS tests - if: github.repository == 'nf-core/tools' - runs-on: ubuntu-latest - steps: - - name: Setup Miniconda - uses: goanpeca/setup-miniconda@v1.0.2 - with: - auto-update-conda: true - python-version: 3.7 - - name: Install awscli - run: conda install -c conda-forge awscli - - name: Start AWS batch job - # TODO nf-core: You can customise CI pipeline run tests as required - # For example: adding multiple test runs with different parameters - # Remember that you can parallelise this by using strategy.matrix - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - TOWER_ACCESS_TOKEN: ${{ secrets.AWS_TOWER_TOKEN }} - AWS_JOB_DEFINITION: ${{ secrets.AWS_JOB_DEFINITION }} - AWS_JOB_QUEUE: ${{ secrets.AWS_JOB_QUEUE }} - AWS_S3_BUCKET: ${{ secrets.AWS_S3_BUCKET }} - run: | - aws batch submit-job \ - --region eu-west-1 \ - --job-name nf-core-tools \ - --job-queue $AWS_JOB_QUEUE \ - --job-definition $AWS_JOB_DEFINITION \ - --container-overrides '{"command": ["nf-core/tools", "-r '"${GITHUB_SHA}"' -profile test --outdir s3://nf-core-awsmegatests/tools/results-'"${GITHUB_SHA}"' -w s3://nf-core-awsmegatests/tools/work-'"${GITHUB_SHA}"' -with-tower"], "environment": [{"name": "TOWER_ACCESS_TOKEN", "value": "'"$TOWER_ACCESS_TOKEN"'"}]}' \ No newline at end of file diff --git a/tests/lint_examples/failing_example/.github/workflows/branch.yml b/tests/lint_examples/failing_example/.github/workflows/branch.yml deleted file mode 100644 index 05e345fd20..0000000000 --- a/tests/lint_examples/failing_example/.github/workflows/branch.yml +++ /dev/null @@ -1,10 +0,0 @@ -name: nf-core branch protection -jobs: - test: - runs-on: ubuntu-18.04 - steps: - # PRs are only ok if coming from an nf-core `dev` branch or a fork `patch` branch - - name: Check PRs - run: bad example - - name: Check sth - run: still bad example \ No newline at end of file diff --git a/tests/lint_examples/failing_example/.github/workflows/ci.yml b/tests/lint_examples/failing_example/.github/workflows/ci.yml deleted file mode 100644 index eab6f83518..0000000000 --- a/tests/lint_examples/failing_example/.github/workflows/ci.yml +++ /dev/null @@ -1,16 +0,0 @@ -name: nf-core CI -# This workflow is triggered on pushes and PRs to the repository. -# It runs the pipeline with the minimal test dataset to check that it completes without any syntax errors -on: - -jobs: - test: - runs-on: ubuntu-18.04 - steps: - - uses: actions/checkout@v1 - - name: Install Nextflow - run: | - - name: Pull container - run: | - - name: Run test - run: | diff --git a/tests/lint_examples/failing_example/.github/workflows/linting.yml b/tests/lint_examples/failing_example/.github/workflows/linting.yml deleted file mode 100644 index 0c774d0fee..0000000000 --- a/tests/lint_examples/failing_example/.github/workflows/linting.yml +++ /dev/null @@ -1,40 +0,0 @@ -name: nf-core linting -# This workflow is triggered on pushes and PRs to the repository. -# It runs the `nf-core lint` and markdown lint tests to ensure that the code meets the nf-core guidelines -on: - -jobs: - Markdown: - runs-on: ubuntu-18.04 - steps: - - uses: actions/checkout@v1 - - uses: actions/setup-node@v1 - with: - node-version: '10' - - name: Install markdownlint - run: | - npm install -g markdownlint-cli - - name: Run Markdownlint - run: | - nf-core: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v1 - - name: Install Nextflow - run: | - wget -qO- get.nextflow.io | bash - sudo mv nextflow /usr/local/bin/ - - uses: actions/setup-python@v1 - with: - python-version: '3.6' - architecture: 'x64' - - name: Install pip - run: | - sudo apt install python3-pip - pip install --upgrade pip - - name: Install nf-core tools - run: | - pip install nf-core - - name: Run nf-core lint - run: | - \ No newline at end of file diff --git a/tests/lint_examples/failing_example/.travis.yml b/tests/lint_examples/failing_example/.travis.yml deleted file mode 100644 index 85ae7e25aa..0000000000 --- a/tests/lint_examples/failing_example/.travis.yml +++ /dev/null @@ -1,2 +0,0 @@ -script: - - "echo This doesn't do anything useful" diff --git a/tests/lint_examples/failing_example/LICENSE.md b/tests/lint_examples/failing_example/LICENSE.md deleted file mode 100644 index 32a22d6a96..0000000000 --- a/tests/lint_examples/failing_example/LICENSE.md +++ /dev/null @@ -1 +0,0 @@ -# This is a bad licence file diff --git a/tests/lint_examples/failing_example/README.md b/tests/lint_examples/failing_example/README.md deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/tests/lint_examples/failing_example/Singularity b/tests/lint_examples/failing_example/Singularity deleted file mode 100644 index 02e88c8045..0000000000 --- a/tests/lint_examples/failing_example/Singularity +++ /dev/null @@ -1 +0,0 @@ -Nothing to be found here \ No newline at end of file diff --git a/tests/lint_examples/failing_example/environment.yml b/tests/lint_examples/failing_example/environment.yml deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/tests/lint_examples/failing_example/main.nf b/tests/lint_examples/failing_example/main.nf deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/tests/lint_examples/failing_example/nextflow.config b/tests/lint_examples/failing_example/nextflow.config deleted file mode 100644 index 38dc8ee1b6..0000000000 --- a/tests/lint_examples/failing_example/nextflow.config +++ /dev/null @@ -1,14 +0,0 @@ -manifest.homePage = 'https://nf-co.re/pipelines' -manifest.name = 'pipelines' -manifest.nextflowVersion = '0.30.1' -manifest.version = '0.4dev' - -dag.file = "dag.html" - -params.container = 'pipelines:latest' - -process { - $deprecatedSyntax { - cpu = 1 - } -} diff --git a/tests/lint_examples/license_incomplete_example/LICENSE b/tests/lint_examples/license_incomplete_example/LICENSE deleted file mode 100644 index 7e6c6575b6..0000000000 --- a/tests/lint_examples/license_incomplete_example/LICENSE +++ /dev/null @@ -1,7 +0,0 @@ -Copyright 1984 - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/tests/lint_examples/minimalworkingexample/.github/workflows/awsfulltest.yml b/tests/lint_examples/minimalworkingexample/.github/workflows/awsfulltest.yml deleted file mode 100644 index 2045d5014e..0000000000 --- a/tests/lint_examples/minimalworkingexample/.github/workflows/awsfulltest.yml +++ /dev/null @@ -1,38 +0,0 @@ -name: nf-core AWS full size tests -# This workflow is triggered on push to the master branch. -# It runs the -profile 'test_full' on AWS batch - -on: - workflow_run: - workflows: ["nf-core Docker push (release)"] - types: [completed] - workflow_dispatch: - -jobs: - run-awstest: - name: Run AWS tests - if: github.repository == 'nf-core/tools' - runs-on: ubuntu-latest - steps: - - name: Setup Miniconda - uses: goanpeca/setup-miniconda@v1.0.2 - with: - auto-update-conda: true - python-version: 3.7 - - name: Install awscli - run: conda install -c conda-forge awscli - - name: Start AWS batch job - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - TOWER_ACCESS_TOKEN: ${{ secrets.AWS_TOWER_TOKEN }} - AWS_JOB_DEFINITION: ${{ secrets.AWS_JOB_DEFINITION }} - AWS_JOB_QUEUE: ${{ secrets.AWS_JOB_QUEUE }} - AWS_S3_BUCKET: ${{ secrets.AWS_S3_BUCKET }} - run: | - aws batch submit-job \ - --region eu-west-1 \ - --job-name nf-core-tools \ - --job-queue $AWS_JOB_QUEUE \ - --job-definition $AWS_JOB_DEFINITION \ - --container-overrides '{"command": ["nf-core/tools", "-r '"${GITHUB_SHA}"' -profile test_full --outdir s3://nf-core-awsmegatests/tools/results-'"${GITHUB_SHA}"' -w s3://nf-core-awsmegatests/tools/work-'"${GITHUB_SHA}"' -with-tower"], "environment": [{"name": "TOWER_ACCESS_TOKEN", "value": "'"$TOWER_ACCESS_TOKEN"'"}]}' diff --git a/tests/lint_examples/minimalworkingexample/.github/workflows/awstest.yml b/tests/lint_examples/minimalworkingexample/.github/workflows/awstest.yml deleted file mode 100644 index 2347f7d019..0000000000 --- a/tests/lint_examples/minimalworkingexample/.github/workflows/awstest.yml +++ /dev/null @@ -1,35 +0,0 @@ -name: nf-core AWS tests -# This workflow is triggered on push to the master branch. -# It runs the -profile 'test' on AWS batch - -on: - workflow_dispatch: - -jobs: - run-awstest: - name: Run AWS tests - if: github.repository == 'nf-core/tools' - runs-on: ubuntu-latest - steps: - - name: Setup Miniconda - uses: goanpeca/setup-miniconda@v1.0.2 - with: - auto-update-conda: true - python-version: 3.7 - - name: Install awscli - run: conda install -c conda-forge awscli - - name: Start AWS batch job - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - TOWER_ACCESS_TOKEN: ${{ secrets.AWS_TOWER_TOKEN }} - AWS_JOB_DEFINITION: ${{ secrets.AWS_JOB_DEFINITION }} - AWS_JOB_QUEUE: ${{ secrets.AWS_JOB_QUEUE }} - AWS_S3_BUCKET: ${{ secrets.AWS_S3_BUCKET }} - run: | - aws batch submit-job \ - --region eu-west-1 \ - --job-name nf-core-tools \ - --job-queue $AWS_JOB_QUEUE \ - --job-definition $AWS_JOB_DEFINITION \ - --container-overrides '{"command": ["nf-core/tools", "-r '"${GITHUB_SHA}"' -profile test --outdir s3://nf-core-awsmegatests/tools/results-'"${GITHUB_SHA}"' -w s3://nf-core-awsmegatests/tools/work-'"${GITHUB_SHA}"' -with-tower"], "environment": [{"name": "TOWER_ACCESS_TOKEN", "value": "'"$TOWER_ACCESS_TOKEN"'"}]}' \ No newline at end of file diff --git a/tests/lint_examples/minimalworkingexample/.github/workflows/branch.yml b/tests/lint_examples/minimalworkingexample/.github/workflows/branch.yml deleted file mode 100644 index 1d1305cab8..0000000000 --- a/tests/lint_examples/minimalworkingexample/.github/workflows/branch.yml +++ /dev/null @@ -1,16 +0,0 @@ -name: nf-core branch protection -# This workflow is triggered on PRs to master branch on the repository -# It fails when someone tries to make a PR against the nf-core `master` branch instead of `dev` -on: - pull_request_target: - branches: [master] - -jobs: - test: - runs-on: ubuntu-latest - steps: - # PRs to the nf-core repo master branch are only ok if coming from the nf-core repo `dev` or any `patch` branches - - name: Check PRs - if: github.repository == 'nf-core/tools' - run: | - { [[ ${{github.event.pull_request.head.repo.full_name}} == nf-core/tools ]] && [[ $GITHUB_HEAD_REF = "dev" ]]; } || [[ $GITHUB_HEAD_REF == "patch" ]] diff --git a/tests/lint_examples/minimalworkingexample/.github/workflows/ci.yml b/tests/lint_examples/minimalworkingexample/.github/workflows/ci.yml deleted file mode 100644 index ea6d955d02..0000000000 --- a/tests/lint_examples/minimalworkingexample/.github/workflows/ci.yml +++ /dev/null @@ -1,87 +0,0 @@ -name: nf-core CI -# This workflow is triggered on releases and pull-requests. -# It runs the pipeline with the minimal test dataset to check that it completes without any syntax errors -on: - push: - branches: - - dev - pull_request: - release: - types: [published] - -jobs: - test: - name: Run workflow tests - # Only run on push if this is the nf-core dev branch (merged PRs) - if: ${{ github.event_name != 'push' || (github.event_name == 'push' && github.repository == 'nf-core/tools') }} - runs-on: ubuntu-latest - env: - NXF_VER: ${{ matrix.nxf_ver }} - NXF_ANSI_LOG: false - strategy: - matrix: - # Nextflow versions: check pipeline minimum and current latest - nxf_ver: ['20.04.0', ''] - steps: - - name: Check out pipeline code - uses: actions/checkout@v2 - - - name: Check if Dockerfile or Conda environment changed - uses: technote-space/get-diff-action@v1 - with: - PREFIX_FILTER: | - Dockerfile - environment.yml - - - name: Build new docker image - if: env.GIT_DIFF - run: docker build --no-cache . -t nfcore/tools:0.4 - - - name: Pull docker image - if: ${{ !env.GIT_DIFF }} - run: | - docker pull nfcore/tools:dev - docker tag nfcore/tools:dev nfcore/tools:0.4 - - - name: Install Nextflow - env: - CAPSULE_LOG: none - run: | - wget -qO- get.nextflow.io | bash - sudo mv nextflow /usr/local/bin/ - - - name: Run pipeline with test data - run: | - nextflow run ${GITHUB_WORKSPACE} -profile test,docker - - push_dockerhub: - name: Push new Docker image to Docker Hub - runs-on: ubuntu-latest - # Only run if the tests passed - needs: test - # Only run for the nf-core repo, for releases and merged PRs - if: ${{ github.repository == 'nf-core/tools' && (github.event_name == 'release' || github.event_name == 'push') }} - env: - DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} - DOCKERHUB_PASS: ${{ secrets.DOCKERHUB_PASS }} - steps: - - name: Check out pipeline code - uses: actions/checkout@v2 - - - name: Build new docker image - run: docker build --no-cache . -t nfcore/tools:latest - - - name: Push Docker image to DockerHub (dev) - if: ${{ github.event_name == 'push' }} - run: | - echo "$DOCKERHUB_PASS" | docker login -u "$DOCKERHUB_USERNAME" --password-stdin - docker tag nfcore/tools:latest nfcore/tools:dev - docker push nfcore/tools:dev - - - name: Push Docker image to DockerHub (release) - if: ${{ github.event_name == 'release' }} - run: | - echo "$DOCKERHUB_PASS" | docker login -u "$DOCKERHUB_USERNAME" --password-stdin - docker push nfcore/tools:latest - docker tag nfcore/tools:latest nfcore/tools:${{ github.event.release.tag_name }} - docker push nfcore/tools:${{ github.event.release.tag_name }} diff --git a/tests/lint_examples/minimalworkingexample/.github/workflows/linting.yml b/tests/lint_examples/minimalworkingexample/.github/workflows/linting.yml deleted file mode 100644 index 6bf4fccf02..0000000000 --- a/tests/lint_examples/minimalworkingexample/.github/workflows/linting.yml +++ /dev/null @@ -1,46 +0,0 @@ -name: nf-core linting -# This workflow is triggered on pushes and PRs to the repository. -# It runs the `nf-core lint` and markdown lint tests to ensure that the code meets the nf-core guidelines -on: - push: - pull_request: - release: - types: [published] - -jobs: - Markdown: - runs-on: ubuntu-18.04 - steps: - - uses: actions/checkout@v1 - - uses: actions/setup-node@v1 - with: - node-version: '10' - - name: Install markdownlint - run: | - npm install -g markdownlint-cli - - name: Run Markdownlint - run: | - markdownlint ${GITHUB_WORKSPACE} -c ${GITHUB_WORKSPACE}/.github/markdownlint.yml - nf-core: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v1 - - name: Install Nextflow - env: - CAPSULE_LOG: none - run: | - wget -qO- get.nextflow.io | bash - sudo mv nextflow /usr/local/bin/ - - uses: actions/setup-python@v1 - with: - python-version: '3.6' - architecture: 'x64' - - name: Install pip - run: | - sudo apt install python3-pip - pip install --upgrade pip - - name: Install nf-core tools - run: | - pip install nf-core - - name: Run nf-core lint - run: nf-core -l lint_log.txt lint ${GITHUB_WORKSPACE} diff --git a/tests/lint_examples/minimalworkingexample/CHANGELOG.md b/tests/lint_examples/minimalworkingexample/CHANGELOG.md deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/tests/lint_examples/minimalworkingexample/Dockerfile b/tests/lint_examples/minimalworkingexample/Dockerfile deleted file mode 100644 index d5c8005c47..0000000000 --- a/tests/lint_examples/minimalworkingexample/Dockerfile +++ /dev/null @@ -1,8 +0,0 @@ -FROM nfcore/base:1.11 -LABEL authors="Phil Ewels phil.ewels@scilifelab.se" \ - description="Docker image containing all requirements for the nf-core tools pipeline" - -COPY environment.yml / -RUN conda env create --quiet -f /environment.yml && conda clean -a -RUN conda env export --name nf-core-tools-0.4 > nf-core-tools-0.4.yml -ENV PATH /opt/conda/envs/nf-core-tools-0.4/bin:$PATH diff --git a/tests/lint_examples/minimalworkingexample/LICENSE b/tests/lint_examples/minimalworkingexample/LICENSE deleted file mode 100644 index ba37e5dbb3..0000000000 --- a/tests/lint_examples/minimalworkingexample/LICENSE +++ /dev/null @@ -1,7 +0,0 @@ -Copyright 1984 me-myself-and-I - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/tests/lint_examples/minimalworkingexample/README.md b/tests/lint_examples/minimalworkingexample/README.md deleted file mode 100644 index ae26ae11c7..0000000000 --- a/tests/lint_examples/minimalworkingexample/README.md +++ /dev/null @@ -1,5 +0,0 @@ -# The pipeline readme file - -[![Nextflow](https://img.shields.io/badge/nextflow-%E2%89%A520.04.0-brightgreen.svg)](https://www.nextflow.io/) - -[![install with bioconda](https://img.shields.io/badge/install%20with-bioconda-brightgreen.svg)](https://bioconda.github.io/) diff --git a/tests/lint_examples/minimalworkingexample/conf/base.config b/tests/lint_examples/minimalworkingexample/conf/base.config deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/tests/lint_examples/minimalworkingexample/docs/README.md b/tests/lint_examples/minimalworkingexample/docs/README.md deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/tests/lint_examples/minimalworkingexample/docs/output.md b/tests/lint_examples/minimalworkingexample/docs/output.md deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/tests/lint_examples/minimalworkingexample/docs/usage.md b/tests/lint_examples/minimalworkingexample/docs/usage.md deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/tests/lint_examples/minimalworkingexample/environment.yml b/tests/lint_examples/minimalworkingexample/environment.yml deleted file mode 100644 index c40b9fce5a..0000000000 --- a/tests/lint_examples/minimalworkingexample/environment.yml +++ /dev/null @@ -1,13 +0,0 @@ -# You can use this file to create a conda environment for this pipeline: -# conda env create -f environment.yml -name: nf-core-tools-0.4 -channels: - - conda-forge - - bioconda - - defaults -dependencies: - - conda-forge::openjdk=8.0.144 - - conda-forge::markdown=3.1.1=py_0 - - fastqc=0.11.7 - - pip: - - multiqc==1.4 diff --git a/tests/lint_examples/minimalworkingexample/main.nf b/tests/lint_examples/minimalworkingexample/main.nf deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/tests/lint_examples/minimalworkingexample/nextflow.config b/tests/lint_examples/minimalworkingexample/nextflow.config deleted file mode 100644 index 5bd148751e..0000000000 --- a/tests/lint_examples/minimalworkingexample/nextflow.config +++ /dev/null @@ -1,42 +0,0 @@ - -params { - outdir = './results' - input = "data/*.fastq" - single_end = false - custom_config_version = 'master' - custom_config_base = "https://raw.githubusercontent.com/nf-core/configs/${params.custom_config_version}" -} - -process { - container = 'nfcore/tools:0.4' - cpus = 1 - memory = 2.GB - time = 14.h -} - -timeline { - enabled = true - file = "timeline.html" -} -report { - enabled = true - file = "report.html" -} -trace { - enabled = true - file = "trace.txt" -} -dag { - enabled = true - file = "dag.svg" -} - -manifest { - name = 'nf-core/tools' - author = 'Phil Ewels' - homePage = 'https://github.com/nf-core/tools' - description = 'Minimal working example pipeline' - mainScript = 'main.nf' - nextflowVersion = '>=20.04.0' - version = '0.4' -} diff --git a/tests/lint_examples/minimalworkingexample/nextflow_schema.json b/tests/lint_examples/minimalworkingexample/nextflow_schema.json deleted file mode 100644 index 9340e60113..0000000000 --- a/tests/lint_examples/minimalworkingexample/nextflow_schema.json +++ /dev/null @@ -1,29 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema", - "$id": "https://raw.githubusercontent.com/nf-core/tools/master/nextflow_schema.json", - "title": "nf-core/tools pipeline parameters", - "description": "Minimal working example pipeline", - "type": "object", - "properties": { - "outdir": { - "type": "string", - "default": "'./results'" - }, - "input": { - "type": "string", - "default": "'data/*.fastq'" - }, - "single_end": { - "type": "string", - "default": "false" - }, - "custom_config_version": { - "type": "string", - "default": "'master'" - }, - "custom_config_base": { - "type": "string", - "default": "'https://raw.githubusercontent.com/nf-core/configs/master'" - } - } -} diff --git a/tests/lint_examples/minimalworkingexample/tests/run_test.sh b/tests/lint_examples/minimalworkingexample/tests/run_test.sh deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/tests/lint_examples/missing_license_example/README.md b/tests/lint_examples/missing_license_example/README.md deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/tests/lint_examples/wrong_license_example/LICENSE b/tests/lint_examples/wrong_license_example/LICENSE deleted file mode 100644 index a9ed694801..0000000000 --- a/tests/lint_examples/wrong_license_example/LICENSE +++ /dev/null @@ -1,9 +0,0 @@ -Copyright 1984 me-myself-and-I - -this is a bad license - -that has more than - -four lines - -but is acutally no license file \ No newline at end of file diff --git a/tests/test_bump_version.py b/tests/test_bump_version.py index 20a7f44da5..74e9dfddf0 100644 --- a/tests/test_bump_version.py +++ b/tests/test_bump_version.py @@ -2,63 +2,102 @@ """Some tests covering the bump_version code. """ import os -import pytest -import nf_core.lint, nf_core.bump_version +import tempfile +import yaml -WD = os.path.dirname(__file__) -PATH_WORKING_EXAMPLE = os.path.join(WD, "lint_examples/minimalworkingexample") +import nf_core.bump_version +import nf_core.create +import nf_core.utils -@pytest.mark.datafiles(PATH_WORKING_EXAMPLE) -def test_working_bump_pipeline_version(datafiles): +def test_bump_pipeline_version(datafiles): """ Test that making a release with the working example files works """ - lint_obj = nf_core.lint.PipelineLint(str(datafiles)) - lint_obj.pipeline_name = "tools" - lint_obj.config["manifest.version"] = "0.4" - lint_obj.files = ["nextflow.config", "Dockerfile", "environment.yml"] - nf_core.bump_version.bump_pipeline_version(lint_obj, "1.1") + # Get a workflow and configs + test_pipeline_dir = os.path.join(tempfile.mkdtemp(), "nf-core-testpipeline") + create_obj = nf_core.create.PipelineCreate( + "testpipeline", "This is a test pipeline", "Test McTestFace", outdir=test_pipeline_dir + ) + create_obj.init_pipeline() + pipeline_obj = nf_core.utils.Pipeline(test_pipeline_dir) + pipeline_obj._load() + + # Bump the version number + nf_core.bump_version.bump_pipeline_version(pipeline_obj, "1.1") + new_pipeline_obj = nf_core.utils.Pipeline(test_pipeline_dir) + + # Check nextflow.config + new_pipeline_obj._load_pipeline_config() + assert new_pipeline_obj.nf_config["manifest.version"].strip("'\"") == "1.1" + assert new_pipeline_obj.nf_config["process.container"].strip("'\"") == "nfcore/testpipeline:1.1" + + # Check .github/workflows/ci.yml + with open(new_pipeline_obj._fp(".github/workflows/ci.yml")) as fh: + ci_yaml = yaml.safe_load(fh) + assert ci_yaml["jobs"]["test"]["steps"][2]["run"] == "docker build --no-cache . -t nfcore/testpipeline:1.1" + assert "docker tag nfcore/testpipeline:dev nfcore/testpipeline:1.1" in ci_yaml["jobs"]["test"]["steps"][3]["run"] + + # Check environment.yml + with open(new_pipeline_obj._fp("environment.yml")) as fh: + conda_env = yaml.safe_load(fh) + assert conda_env["name"] == "nf-core-testpipeline-1.1" + + # Check Dockerfile + with open(new_pipeline_obj._fp("Dockerfile")) as fh: + dockerfile = fh.read().splitlines() + assert "ENV PATH /opt/conda/envs/nf-core-testpipeline-1.1/bin:$PATH" in dockerfile + assert "RUN conda env export --name nf-core-testpipeline-1.1 > nf-core-testpipeline-1.1.yml" in dockerfile -@pytest.mark.datafiles(PATH_WORKING_EXAMPLE) def test_dev_bump_pipeline_version(datafiles): """ Test that making a release works with a dev name and a leading v """ - lint_obj = nf_core.lint.PipelineLint(str(datafiles)) - lint_obj.pipeline_name = "tools" - lint_obj.config["manifest.version"] = "0.4" - lint_obj.files = ["nextflow.config", "Dockerfile", "environment.yml"] - nf_core.bump_version.bump_pipeline_version(lint_obj, "v1.2dev") - - -@pytest.mark.datafiles(PATH_WORKING_EXAMPLE) -@pytest.mark.xfail(raises=SyntaxError, strict=True) -def test_pattern_not_found(datafiles): - """ Test that making a release raises and error if a pattern isn't found """ - lint_obj = nf_core.lint.PipelineLint(str(datafiles)) - lint_obj.pipeline_name = "tools" - lint_obj.config["manifest.version"] = "0.5" - lint_obj.files = ["nextflow.config", "Dockerfile", "environment.yml"] - nf_core.bump_version.bump_pipeline_version(lint_obj, "1.2dev") - - -@pytest.mark.datafiles(PATH_WORKING_EXAMPLE) -@pytest.mark.xfail(raises=SyntaxError, strict=True) -def test_multiple_patterns_found(datafiles): - """ Test that making a release raises if a version number is found twice """ - lint_obj = nf_core.lint.PipelineLint(str(datafiles)) - with open(os.path.join(str(datafiles), "nextflow.config"), "a") as nfcfg: - nfcfg.write("manifest.version = '0.4'") - lint_obj.pipeline_name = "tools" - lint_obj.config["manifest.version"] = "0.4" - lint_obj.files = ["nextflow.config", "Dockerfile", "environment.yml"] - nf_core.bump_version.bump_pipeline_version(lint_obj, "1.2dev") - - -@pytest.mark.datafiles(PATH_WORKING_EXAMPLE) -def test_successfull_nextflow_version_bump(datafiles): - lint_obj = nf_core.lint.PipelineLint(str(datafiles)) - lint_obj.pipeline_name = "tools" - lint_obj.config["manifest.nextflowVersion"] = "20.04.0" - nf_core.bump_version.bump_nextflow_version(lint_obj, "0.40") - lint_obj_new = nf_core.lint.PipelineLint(str(datafiles)) - lint_obj_new.check_nextflow_config() - assert lint_obj_new.config["manifest.nextflowVersion"] == "'>=0.40'" + # Get a workflow and configs + test_pipeline_dir = os.path.join(tempfile.mkdtemp(), "nf-core-testpipeline") + create_obj = nf_core.create.PipelineCreate( + "testpipeline", "This is a test pipeline", "Test McTestFace", outdir=test_pipeline_dir + ) + create_obj.init_pipeline() + pipeline_obj = nf_core.utils.Pipeline(test_pipeline_dir) + pipeline_obj._load() + + # Bump the version number + nf_core.bump_version.bump_pipeline_version(pipeline_obj, "v1.2dev") + new_pipeline_obj = nf_core.utils.Pipeline(test_pipeline_dir) + + # Check the pipeline config + new_pipeline_obj._load_pipeline_config() + assert new_pipeline_obj.nf_config["manifest.version"].strip("'\"") == "1.2dev" + assert new_pipeline_obj.nf_config["process.container"].strip("'\"") == "nfcore/testpipeline:dev" + + +def test_bump_nextflow_version(datafiles): + # Get a workflow and configs + test_pipeline_dir = os.path.join(tempfile.mkdtemp(), "nf-core-testpipeline") + create_obj = nf_core.create.PipelineCreate( + "testpipeline", "This is a test pipeline", "Test McTestFace", outdir=test_pipeline_dir + ) + create_obj.init_pipeline() + pipeline_obj = nf_core.utils.Pipeline(test_pipeline_dir) + pipeline_obj._load() + + # Bump the version number + nf_core.bump_version.bump_nextflow_version(pipeline_obj, "19.10.3-edge") + new_pipeline_obj = nf_core.utils.Pipeline(test_pipeline_dir) + + # Check nextflow.config + new_pipeline_obj._load_pipeline_config() + assert new_pipeline_obj.nf_config["manifest.nextflowVersion"].strip("'\"") == ">=19.10.3-edge" + + # Check .github/workflows/ci.yml + with open(new_pipeline_obj._fp(".github/workflows/ci.yml")) as fh: + ci_yaml = yaml.safe_load(fh) + assert ci_yaml["jobs"]["test"]["strategy"]["matrix"]["nxf_ver"][0] == "19.10.3-edge" + + # Check README.md + with open(new_pipeline_obj._fp("README.md")) as fh: + readme = fh.read().splitlines() + assert ( + "[![Nextflow](https://img.shields.io/badge/nextflow-%E2%89%A5{}-brightgreen.svg)](https://www.nextflow.io/)".format( + "19.10.3-edge" + ) + in readme + ) diff --git a/tests/test_create.py b/tests/test_create.py index 8d527891d3..2b2e18fba7 100644 --- a/tests/test_create.py +++ b/tests/test_create.py @@ -6,31 +6,29 @@ import tempfile import unittest -WD = os.path.dirname(__file__) -PIPELINE_NAME = "nf-core/test" -PIPELINE_DESCRIPTION = "just for 4w3s0m3 tests" -PIPELINE_AUTHOR = "Chuck Norris" -PIPELINE_VERSION = "1.0.0" - class NfcoreCreateTest(unittest.TestCase): def setUp(self): - self.tmppath = tempfile.mkdtemp() + self.pipeline_name = "nf-core/test" + self.pipeline_description = "just for 4w3s0m3 tests" + self.pipeline_author = "Chuck Norris" + self.pipeline_version = "1.0.0" + self.pipeline = nf_core.create.PipelineCreate( - name=PIPELINE_NAME, - description=PIPELINE_DESCRIPTION, - author=PIPELINE_AUTHOR, - new_version=PIPELINE_VERSION, + name=self.pipeline_name, + description=self.pipeline_description, + author=self.pipeline_author, + new_version=self.pipeline_version, no_git=False, force=True, - outdir=self.tmppath, + outdir=tempfile.mkdtemp(), ) def test_pipeline_creation(self): - assert self.pipeline.name == PIPELINE_NAME - assert self.pipeline.description == PIPELINE_DESCRIPTION - assert self.pipeline.author == PIPELINE_AUTHOR - assert self.pipeline.new_version == PIPELINE_VERSION + assert self.pipeline.name == self.pipeline_name + assert self.pipeline.description == self.pipeline_description + assert self.pipeline.author == self.pipeline_author + assert self.pipeline.new_version == self.pipeline_version def test_pipeline_creation_initiation(self): self.pipeline.init_pipeline() diff --git a/tests/test_download.py b/tests/test_download.py index fe10592aa6..cdf707ad93 100644 --- a/tests/test_download.py +++ b/tests/test_download.py @@ -2,6 +2,7 @@ """Tests for the download subcommand of nf-core tools """ +import nf_core.create import nf_core.utils from nf_core.download import DownloadWorkflow @@ -13,8 +14,6 @@ import tempfile import unittest -PATH_WORKING_EXAMPLE = os.path.join(os.path.dirname(__file__), "lint_examples/minimalworkingexample") - class DownloadTest(unittest.TestCase): @@ -108,9 +107,15 @@ def test_download_configs(self): # def test_wf_use_local_configs(self): # Get a workflow and configs + test_pipeline_dir = os.path.join(tempfile.mkdtemp(), "nf-core-testpipeline") + create_obj = nf_core.create.PipelineCreate( + "testpipeline", "This is a test pipeline", "Test McTestFace", outdir=test_pipeline_dir + ) + create_obj.init_pipeline() + test_outdir = tempfile.mkdtemp() download_obj = DownloadWorkflow(pipeline="dummy", release="1.2.0", outdir=test_outdir) - shutil.copytree(PATH_WORKING_EXAMPLE, os.path.join(test_outdir, "workflow")) + shutil.copytree(test_pipeline_dir, os.path.join(test_outdir, "workflow")) download_obj.download_configs() # Test the function diff --git a/tests/test_lint.py b/tests/test_lint.py index 4680901278..0b9eb21789 100644 --- a/tests/test_lint.py +++ b/tests/test_lint.py @@ -1,529 +1,97 @@ #!/usr/bin/env python """Some tests covering the linting code. -Provide example wokflow directory contents like: - - --tests - |--lint_examples - | |--missing_license - | | |... - | |--missing_config - | | |.... - | |... - |--test_lint.py """ +import fnmatch import json import mock import os import pytest import requests +import shutil +import subprocess import tempfile import unittest import yaml +import nf_core.create import nf_core.lint -def listfiles(path): - files_found = [] - for (_, _, files) in os.walk(path): - files_found.extend(files) - return files_found - - -def pf(wd, path): - return os.path.join(wd, path) - - -WD = os.path.dirname(__file__) -PATH_CRITICAL_EXAMPLE = pf(WD, "lint_examples/critical_example") -PATH_FAILING_EXAMPLE = pf(WD, "lint_examples/failing_example") -PATH_WORKING_EXAMPLE = pf(WD, "lint_examples/minimalworkingexample") -PATH_MISSING_LICENSE_EXAMPLE = pf(WD, "lint_examples/missing_license_example") -PATHS_WRONG_LICENSE_EXAMPLE = [ - pf(WD, "lint_examples/wrong_license_example"), - pf(WD, "lint_examples/license_incomplete_example"), -] - -# The maximum sum of passed tests currently possible -MAX_PASS_CHECKS = 85 -# The additional tests passed for releases -ADD_PASS_RELEASE = 1 - -# The minimal working example expects a development release version -if "dev" not in nf_core.__version__: - nf_core.__version__ = "{}dev".format(nf_core.__version__) - - class TestLint(unittest.TestCase): """Class for lint tests""" - def assess_lint_status(self, lint_obj, **expected): - """Little helper function for assessing the lint - object status lists""" - for list_type, expect in expected.items(): - observed = len(getattr(lint_obj, list_type)) - oberved_list = yaml.safe_dump(getattr(lint_obj, list_type)) - self.assertEqual( - observed, - expect, - "Expected {} tests in '{}', but found {}.\n{}".format( - expect, list_type.upper(), observed, oberved_list - ), - ) - - def test_call_lint_pipeline_pass(self): - """Test the main execution function of PipelineLint (pass) - This should not result in any exception for the minimal - working example""" - old_nfcore_version = nf_core.__version__ - nf_core.__version__ = "1.11" - lint_obj = nf_core.lint.run_linting(PATH_WORKING_EXAMPLE, False) - nf_core.__version__ = old_nfcore_version - expectations = {"failed": 0, "warned": 5, "passed": MAX_PASS_CHECKS - 1} - self.assess_lint_status(lint_obj, **expectations) - - @pytest.mark.xfail(raises=AssertionError, strict=True) - def test_call_lint_pipeline_fail(self): - """Test the main execution function of PipelineLint (fail) - This should fail after the first test and halt execution""" - lint_obj = nf_core.lint.run_linting(PATH_FAILING_EXAMPLE, False) - expectations = {"failed": 4, "warned": 2, "passed": 7} - self.assess_lint_status(lint_obj, **expectations) - - def test_call_lint_pipeline_release(self): - """Test the main execution function of PipelineLint when running with --release""" - lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) - lint_obj.version = "1.11" - lint_obj.lint_pipeline(release_mode=True) - expectations = {"failed": 0, "warned": 4, "passed": MAX_PASS_CHECKS + ADD_PASS_RELEASE} - self.assess_lint_status(lint_obj, **expectations) - - def test_failing_dockerfile_example(self): - """Tests for empty Dockerfile""" - lint_obj = nf_core.lint.PipelineLint(PATH_FAILING_EXAMPLE) - lint_obj.files = ["Dockerfile"] - lint_obj.check_docker() - self.assess_lint_status(lint_obj, failed=1) - - def test_critical_missingfiles_example(self): - """Tests for missing nextflow config and main.nf files""" - lint_obj = nf_core.lint.run_linting(PATH_CRITICAL_EXAMPLE, False) - assert len(lint_obj.failed) == 1 - - def test_failing_missingfiles_example(self): - """Tests for missing files like Dockerfile or LICENSE""" - lint_obj = nf_core.lint.PipelineLint(PATH_FAILING_EXAMPLE) - lint_obj.check_files_exist() - expectations = {"failed": 6, "warned": 2, "passed": 14} - self.assess_lint_status(lint_obj, **expectations) - - def test_mit_licence_example_pass(self): - """Tests that MIT test works with good MIT licences""" - good_lint_obj = nf_core.lint.PipelineLint(PATH_CRITICAL_EXAMPLE) - good_lint_obj.check_licence() - expectations = {"failed": 0, "warned": 0, "passed": 1} - self.assess_lint_status(good_lint_obj, **expectations) - - def test_mit_license_example_with_failed(self): - """Tests that MIT test works with bad MIT licences""" - bad_lint_obj = nf_core.lint.PipelineLint(PATH_FAILING_EXAMPLE) - bad_lint_obj.check_licence() - expectations = {"failed": 1, "warned": 0, "passed": 0} - self.assess_lint_status(bad_lint_obj, **expectations) - - def test_config_variable_example_pass(self): - """Tests that config variable existence test works with good pipeline example""" - good_lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) - good_lint_obj.check_nextflow_config() - expectations = {"failed": 0, "warned": 1, "passed": 34} - self.assess_lint_status(good_lint_obj, **expectations) - - def test_config_variable_example_with_failed(self): - """Tests that config variable existence test fails with bad pipeline example""" - bad_lint_obj = nf_core.lint.PipelineLint(PATH_FAILING_EXAMPLE) - bad_lint_obj.check_nextflow_config() - expectations = {"failed": 19, "warned": 6, "passed": 10} - self.assess_lint_status(bad_lint_obj, **expectations) - - @pytest.mark.xfail(raises=AssertionError, strict=True) - def test_config_variable_error(self): - """Tests that config variable existence test falls over nicely with nextflow can't run""" - bad_lint_obj = nf_core.lint.PipelineLint("/non/existant/path") - bad_lint_obj.check_nextflow_config() - - def test_actions_wf_branch_pass(self): - """Tests that linting for GitHub Actions workflow for branch protection works for a good example""" - lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) - lint_obj.pipeline_name = "tools" - lint_obj.check_actions_branch_protection() - expectations = {"failed": 0, "warned": 0, "passed": 2} - self.assess_lint_status(lint_obj, **expectations) - - def test_actions_wf_branch_fail(self): - """Tests that linting for GitHub Actions workflow for branch protection fails for a bad example""" - lint_obj = nf_core.lint.PipelineLint(PATH_FAILING_EXAMPLE) - lint_obj.pipeline_name = "tools" - lint_obj.check_actions_branch_protection() - expectations = {"failed": 2, "warned": 0, "passed": 0} - self.assess_lint_status(lint_obj, **expectations) - - def test_actions_wf_ci_pass(self): - """Tests that linting for GitHub Actions CI workflow works for a good example""" - lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) - lint_obj.minNextflowVersion = "20.04.0" - lint_obj.pipeline_name = "tools" - lint_obj.config["process.container"] = "'nfcore/tools:0.4'" - lint_obj.check_actions_ci() - expectations = {"failed": 0, "warned": 0, "passed": 5} - self.assess_lint_status(lint_obj, **expectations) - - def test_actions_wf_ci_fail(self): - """Tests that linting for GitHub Actions CI workflow fails for a bad example""" - lint_obj = nf_core.lint.PipelineLint(PATH_FAILING_EXAMPLE) - lint_obj.minNextflowVersion = "20.04.0" - lint_obj.pipeline_name = "tools" - lint_obj.config["process.container"] = "'nfcore/tools:0.4'" - lint_obj.check_actions_ci() - expectations = {"failed": 5, "warned": 0, "passed": 0} - self.assess_lint_status(lint_obj, **expectations) - - def test_actions_wf_ci_fail_wrong_NF_version(self): - """Tests that linting for GitHub Actions CI workflow fails for a bad NXF version""" - lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) - lint_obj.minNextflowVersion = "0.28.0" - lint_obj.pipeline_name = "tools" - lint_obj.config["process.container"] = "'nfcore/tools:0.4'" - lint_obj.check_actions_ci() - expectations = {"failed": 1, "warned": 0, "passed": 4} - self.assess_lint_status(lint_obj, **expectations) - - def test_actions_wf_lint_pass(self): - """Tests that linting for GitHub Actions linting wf works for a good example""" - lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) - lint_obj.check_actions_lint() - expectations = {"failed": 0, "warned": 0, "passed": 3} - self.assess_lint_status(lint_obj, **expectations) - - def test_actions_wf_lint_fail(self): - """Tests that linting for GitHub Actions linting wf fails for a bad example""" - lint_obj = nf_core.lint.PipelineLint(PATH_FAILING_EXAMPLE) - lint_obj.check_actions_lint() - expectations = {"failed": 3, "warned": 0, "passed": 0} - self.assess_lint_status(lint_obj, **expectations) - - def test_actions_wf_awstest_pass(self): - """Tests that linting for GitHub Actions AWS test wf works for a good example""" - lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) - lint_obj.check_actions_awstest() - expectations = {"failed": 0, "warned": 0, "passed": 1} - self.assess_lint_status(lint_obj, **expectations) - - def test_actions_wf_awstest_fail(self): - """Tests that linting for GitHub Actions AWS test wf fails for a bad example""" - lint_obj = nf_core.lint.PipelineLint(PATH_FAILING_EXAMPLE) - lint_obj.check_actions_awstest() - expectations = {"failed": 1, "warned": 0, "passed": 0} - self.assess_lint_status(lint_obj, **expectations) - - def test_actions_wf_awsfulltest_pass(self): - """Tests that linting for GitHub Actions AWS full test wf works for a good example""" - lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) - lint_obj.check_actions_awsfulltest() - expectations = {"failed": 0, "warned": 0, "passed": 2} - self.assess_lint_status(lint_obj, **expectations) - - def test_actions_wf_awsfulltest_fail(self): - """Tests that linting for GitHub Actions AWS full test wf fails for a bad example""" - lint_obj = nf_core.lint.PipelineLint(PATH_FAILING_EXAMPLE) - lint_obj.check_actions_awsfulltest() - expectations = {"failed": 1, "warned": 1, "passed": 0} - self.assess_lint_status(lint_obj, **expectations) - - def test_wrong_license_examples_with_failed(self): - """Tests for checking the license test behavior""" - for example in PATHS_WRONG_LICENSE_EXAMPLE: - lint_obj = nf_core.lint.PipelineLint(example) - lint_obj.check_licence() - expectations = {"failed": 1, "warned": 0, "passed": 0} - self.assess_lint_status(lint_obj, **expectations) - - def test_missing_license_example(self): - """Tests for missing license behavior""" - lint_obj = nf_core.lint.PipelineLint(PATH_MISSING_LICENSE_EXAMPLE) - lint_obj.check_licence() - expectations = {"failed": 1, "warned": 0, "passed": 0} - self.assess_lint_status(lint_obj, **expectations) - - def test_readme_pass(self): - """Tests that the pipeline README file checks work with a good example""" - lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) - lint_obj.minNextflowVersion = "20.04.0" - lint_obj.files = ["environment.yml"] - lint_obj.check_readme() - expectations = {"failed": 0, "warned": 0, "passed": 2} - self.assess_lint_status(lint_obj, **expectations) - - def test_readme_warn(self): - """Tests that the pipeline README file checks fail """ - lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) - lint_obj.minNextflowVersion = "0.28.0" - lint_obj.check_readme() - expectations = {"failed": 1, "warned": 0, "passed": 0} - self.assess_lint_status(lint_obj, **expectations) - - def test_readme_fail(self): - """Tests that the pipeline README file checks give warnings with a bad example""" - lint_obj = nf_core.lint.PipelineLint(PATH_FAILING_EXAMPLE) - lint_obj.files = ["environment.yml"] - lint_obj.check_readme() - expectations = {"failed": 0, "warned": 2, "passed": 0} - self.assess_lint_status(lint_obj, **expectations) - - def test_dockerfile_pass(self): - """Tests if a valid Dockerfile passes the lint checks""" - lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) - lint_obj.files = ["Dockerfile"] - lint_obj.check_docker() - expectations = {"failed": 0, "warned": 0, "passed": 1} - self.assess_lint_status(lint_obj, **expectations) - - def test_version_consistency_pass(self): - """Tests the workflow version and container version sucessfully""" - lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) - lint_obj.config["manifest.version"] = "0.4" - lint_obj.config["process.container"] = "nfcore/tools:0.4" - lint_obj.check_version_consistency() - expectations = {"failed": 0, "warned": 0, "passed": 1} - self.assess_lint_status(lint_obj, **expectations) - - def test_version_consistency_with_env_fail(self): - """Tests the behaviour, when a git activity is a release - and simulate wrong release tag""" - os.environ["GITHUB_REF"] = "refs/tags/0.5" - os.environ["GITHUB_REPOSITORY"] = "nf-core/testpipeline" - lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) - lint_obj.config["manifest.version"] = "0.4" - lint_obj.config["process.container"] = "nfcore/tools:0.4" - lint_obj.check_version_consistency() - expectations = {"failed": 1, "warned": 0, "passed": 0} - self.assess_lint_status(lint_obj, **expectations) - - def test_version_consistency_with_numeric_fail(self): - """Tests the behaviour, when a git activity is a release - and simulate wrong release tag""" - os.environ["GITHUB_REF"] = "refs/tags/0.5dev" - os.environ["GITHUB_REPOSITORY"] = "nf-core/testpipeline" - lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) - lint_obj.config["manifest.version"] = "0.4" - lint_obj.config["process.container"] = "nfcore/tools:0.4" - lint_obj.check_version_consistency() - expectations = {"failed": 1, "warned": 0, "passed": 0} - self.assess_lint_status(lint_obj, **expectations) - - def test_version_consistency_with_no_docker_version_fail(self): - """Tests the behaviour, when a git activity is a release - and simulate wrong missing docker version tag""" - os.environ["GITHUB_REF"] = "refs/tags/0.4" - os.environ["GITHUB_REPOSITORY"] = "nf-core/testpipeline" - lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) - lint_obj.config["manifest.version"] = "0.4" - lint_obj.config["process.container"] = "nfcore/tools" - lint_obj.check_version_consistency() - expectations = {"failed": 1, "warned": 0, "passed": 0} - self.assess_lint_status(lint_obj, **expectations) - - def test_version_consistency_with_env_pass(self): - """Tests the behaviour, when a git activity is a release - and simulate correct release tag""" - os.environ["GITHUB_REF"] = "refs/tags/0.4" - os.environ["GITHUB_REPOSITORY"] = "nf-core/testpipeline" - lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) - lint_obj.config["manifest.version"] = "0.4" - lint_obj.config["process.container"] = "nfcore/tools:0.4" - lint_obj.check_version_consistency() - expectations = {"failed": 0, "warned": 0, "passed": 1} - self.assess_lint_status(lint_obj, **expectations) - - def test_conda_env_pass(self): - """ Tests the conda environment config checks with a working example """ - lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) - lint_obj.files = ["environment.yml"] - with open(os.path.join(PATH_WORKING_EXAMPLE, "environment.yml"), "r") as fh: - lint_obj.conda_config = yaml.safe_load(fh) - lint_obj.pipeline_name = "tools" - lint_obj.config["manifest.version"] = "0.4" - lint_obj.check_conda_env_yaml() - expectations = {"failed": 0, "warned": 4, "passed": 5} - self.assess_lint_status(lint_obj, **expectations) - - def test_conda_env_fail(self): - """ Tests the conda environment config fails with a bad example """ - lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) - lint_obj.files = ["environment.yml"] - with open(os.path.join(PATH_WORKING_EXAMPLE, "environment.yml"), "r") as fh: - lint_obj.conda_config = yaml.safe_load(fh) - lint_obj.conda_config["dependencies"] = ["fastqc", "multiqc=0.9", "notapackaage=0.4"] - lint_obj.pipeline_name = "not_tools" - lint_obj.config["manifest.version"] = "0.23" - lint_obj.check_conda_env_yaml() - expectations = {"failed": 3, "warned": 1, "passed": 2} - self.assess_lint_status(lint_obj, **expectations) - - @mock.patch("requests.get") - @pytest.mark.xfail(raises=ValueError, strict=True) - def test_conda_env_timeout(self, mock_get): - """ Tests the conda environment handles API timeouts """ - # Define the behaviour of the request get mock - mock_get.side_effect = requests.exceptions.Timeout() - # Now do the test - lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) - lint_obj.conda_config["channels"] = ["bioconda"] - lint_obj.check_anaconda_package("multiqc=1.6") - - def test_conda_env_skip(self): - """ Tests the conda environment config is skipped when not needed """ - lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) - lint_obj.check_conda_env_yaml() - expectations = {"failed": 0, "warned": 0, "passed": 0} - self.assess_lint_status(lint_obj, **expectations) - - def test_conda_dockerfile_pass(self): - """ Tests the conda Dockerfile test works with a working example """ - lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) - lint_obj.version = "1.11" - lint_obj.files = ["environment.yml", "Dockerfile"] - with open(os.path.join(PATH_WORKING_EXAMPLE, "Dockerfile"), "r") as fh: - lint_obj.dockerfile = fh.read().splitlines() - lint_obj.conda_config["name"] = "nf-core-tools-0.4" - lint_obj.check_conda_dockerfile() - expectations = {"failed": 0, "warned": 0, "passed": 1} - self.assess_lint_status(lint_obj, **expectations) - - def test_conda_dockerfile_fail(self): - """ Tests the conda Dockerfile test fails with a bad example """ - lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) - lint_obj.version = "1.11" - lint_obj.files = ["environment.yml", "Dockerfile"] - lint_obj.conda_config["name"] = "nf-core-tools-0.4" - lint_obj.dockerfile = ["fubar"] - lint_obj.check_conda_dockerfile() - expectations = {"failed": 5, "warned": 0, "passed": 0} - self.assess_lint_status(lint_obj, **expectations) - - def test_conda_dockerfile_skip(self): - """ Tests the conda Dockerfile test is skipped when not needed """ - lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) - lint_obj.check_conda_dockerfile() - expectations = {"failed": 0, "warned": 0, "passed": 0} - self.assess_lint_status(lint_obj, **expectations) - - def test_pip_no_version_fail(self): - """ Tests the pip dependency version definition is present """ - lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) - lint_obj.files = ["environment.yml"] - lint_obj.pipeline_name = "tools" - lint_obj.config["manifest.version"] = "0.4" - lint_obj.conda_config = {"name": "nf-core-tools-0.4", "dependencies": [{"pip": ["multiqc"]}]} - lint_obj.check_conda_env_yaml() - expectations = {"failed": 1, "warned": 0, "passed": 1} - self.assess_lint_status(lint_obj, **expectations) - - def test_pip_package_not_latest_warn(self): - """ Tests the pip dependency version definition is present """ - lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) - lint_obj.files = ["environment.yml"] - lint_obj.pipeline_name = "tools" - lint_obj.config["manifest.version"] = "0.4" - lint_obj.conda_config = {"name": "nf-core-tools-0.4", "dependencies": [{"pip": ["multiqc==1.4"]}]} - lint_obj.check_conda_env_yaml() - expectations = {"failed": 0, "warned": 1, "passed": 2} - self.assess_lint_status(lint_obj, **expectations) - - @mock.patch("requests.get") - def test_pypi_timeout_warn(self, mock_get): - """Tests the PyPi connection and simulates a request timeout, which should - return in an addiional warning in the linting""" - # Define the behaviour of the request get mock - mock_get.side_effect = requests.exceptions.Timeout() - # Now do the test - lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) - lint_obj.files = ["environment.yml"] - lint_obj.pipeline_name = "tools" - lint_obj.config["manifest.version"] = "0.4" - lint_obj.conda_config = {"name": "nf-core-tools-0.4", "dependencies": [{"pip": ["multiqc==1.5"]}]} - lint_obj.check_conda_env_yaml() - expectations = {"failed": 0, "warned": 1, "passed": 2} - self.assess_lint_status(lint_obj, **expectations) - - @mock.patch("requests.get") - def test_pypi_connection_error_warn(self, mock_get): - """Tests the PyPi connection and simulates a connection error, which should - result in an additional warning, as we cannot test if dependent module is latest""" - # Define the behaviour of the request get mock - mock_get.side_effect = requests.exceptions.ConnectionError() - # Now do the test - lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) - lint_obj.files = ["environment.yml"] - lint_obj.pipeline_name = "tools" - lint_obj.config["manifest.version"] = "0.4" - lint_obj.conda_config = {"name": "nf-core-tools-0.4", "dependencies": [{"pip": ["multiqc==1.5"]}]} - lint_obj.check_conda_env_yaml() - expectations = {"failed": 0, "warned": 1, "passed": 2} - self.assess_lint_status(lint_obj, **expectations) - - def test_pip_dependency_fail(self): - """ Tests the PyPi API package information query """ - lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) - lint_obj.files = ["environment.yml"] - lint_obj.pipeline_name = "tools" - lint_obj.config["manifest.version"] = "0.4" - lint_obj.conda_config = {"name": "nf-core-tools-0.4", "dependencies": [{"pip": ["notpresent==1.5"]}]} - lint_obj.check_conda_env_yaml() - expectations = {"failed": 1, "warned": 0, "passed": 2} - self.assess_lint_status(lint_obj, **expectations) - - def test_conda_dependency_fails(self): - """Tests that linting fails, if conda dependency - package version is not available on Anaconda. + def setUp(self): + """Function that runs at start of tests for common resources + + Use nf_core.create() to make a pipeline that we can use for testing """ - lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) - lint_obj.files = ["environment.yml"] - lint_obj.pipeline_name = "tools" - lint_obj.config["manifest.version"] = "0.4" - lint_obj.conda_config = {"name": "nf-core-tools-0.4", "dependencies": ["openjdk=0.0.0"]} - lint_obj.check_conda_env_yaml() - expectations = {"failed": 1, "warned": 0, "passed": 2} - self.assess_lint_status(lint_obj, **expectations) - - def test_pip_dependency_fails(self): - """Tests that linting fails, if conda dependency - package version is not available on Anaconda. + self.test_pipeline_dir = os.path.join(tempfile.mkdtemp(), "nf-core-testpipeline") + self.create_obj = nf_core.create.PipelineCreate( + "testpipeline", "This is a test pipeline", "Test McTestFace", outdir=self.test_pipeline_dir + ) + self.create_obj.init_pipeline() + # Base lint object on this directory + self.lint_obj = nf_core.lint.PipelineLint(self.test_pipeline_dir) + + def _make_pipeline_copy(self): + """Make a copy of the test pipeline that can be edited + + Returns: Path to new temp directory with pipeline""" + new_pipeline = os.path.join(tempfile.mkdtemp(), "nf-core-testpipeline") + shutil.copytree(self.test_pipeline_dir, new_pipeline) + return new_pipeline + + ########################## + # CORE lint.py FUNCTIONS # + ########################## + def test_run_linting_function(self): + """Run the master run_linting() function in lint.py + + We don't really check any of this code as it's just a series of function calls + and we're testing each of those individually. This is mostly to check for syntax errors.""" + lint_obj = nf_core.lint.run_linting(self.test_pipeline_dir, False) + + def test_init_PipelineLint(self): + """Simply create a PipelineLint object. + + This checks that all of the lint test imports are working properly, + we also check that the git sha was found and that the release flag works properly """ - lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) - lint_obj.files = ["environment.yml"] - lint_obj.pipeline_name = "tools" - lint_obj.config["manifest.version"] = "0.4" - lint_obj.conda_config = {"name": "nf-core-tools-0.4", "dependencies": [{"pip": ["multiqc==0.0"]}]} - lint_obj.check_conda_env_yaml() - expectations = {"failed": 1, "warned": 0, "passed": 2} - self.assess_lint_status(lint_obj, **expectations) - - def test_pipeline_name_pass(self): - """Tests pipeline name good pipeline example: lower case, no punctuation""" - # good_lint_obj = nf_core.lint.run_linting(PATH_WORKING_EXAMPLE) - good_lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) - good_lint_obj.pipeline_name = "tools" - good_lint_obj.check_pipeline_name() - expectations = {"failed": 0, "warned": 0, "passed": 1} - self.assess_lint_status(good_lint_obj, **expectations) - - def test_pipeline_name_critical(self): - """Tests that warning is returned for pipeline not adhering to naming convention""" - critical_lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) - critical_lint_obj.pipeline_name = "Tools123" - critical_lint_obj.check_pipeline_name() - expectations = {"failed": 0, "warned": 1, "passed": 0} - self.assess_lint_status(critical_lint_obj, **expectations) + lint_obj = nf_core.lint.PipelineLint(self.test_pipeline_dir, True) + + # Tests that extra test is added for release mode + assert "version_consistency" in lint_obj.lint_tests + + # Tests that parent nf_core.utils.Pipeline class __init__() is working to find git hash + assert len(lint_obj.git_sha) > 0 + + def test_load_lint_config_not_found(self): + """Try to load a linting config file that doesn't exist""" + self.lint_obj._load_lint_config() + assert self.lint_obj.lint_config == {} + + def test_load_lint_config_ignore_all_tests(self): + """Try to load a linting config file that ignores all tests""" + # Make a copy of the test pipeline and create a lint object + new_pipeline = os.path.join(tempfile.mkdtemp(), "nf-core-testpipeline") + shutil.copytree(self.test_pipeline_dir, new_pipeline) + lint_obj = nf_core.lint.PipelineLint(new_pipeline) + + # Make a config file listing all test names + config_dict = {test_name: False for test_name in lint_obj.lint_tests} + with open(os.path.join(new_pipeline, ".nf-core-lint.yml"), "w") as fh: + yaml.dump(config_dict, fh) + + # Load the new lint config file and check + lint_obj._load_lint_config() + assert sorted(list(lint_obj.lint_config.keys())) == sorted(lint_obj.lint_tests) + + # Try running linting and make sure that all tests are ignored + lint_obj._lint_pipeline() + assert len(lint_obj.passed) == 0 + assert len(lint_obj.warned) == 0 + assert len(lint_obj.failed) == 0 + assert len(lint_obj.ignored) == len(lint_obj.lint_tests) def test_json_output(self): """ @@ -549,41 +117,434 @@ def test_json_output(self): "has_tests_failed": false } """ - # Don't run testing, just fake some testing results - lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) - lint_obj.passed.append((1, "This test passed")) - lint_obj.passed.append((2, "This test also passed")) - lint_obj.warned.append((2, "This test gave a warning")) - tmpdir = tempfile.mkdtemp() - json_fn = os.path.join(tmpdir, "lint_results.json") - lint_obj.save_json_results(json_fn) + self.lint_obj.passed.append(("test_one", "This test passed")) + self.lint_obj.passed.append(("test_two", "This test also passed")) + self.lint_obj.warned.append(("test_three", "This test gave a warning")) + + # Make a temp dir for the JSON output + json_fn = os.path.join(tempfile.mkdtemp(), "lint_results.json") + self.lint_obj._save_json_results(json_fn) + + # Load created JSON file and check its contents with open(json_fn, "r") as fh: saved_json = json.load(fh) assert saved_json["num_tests_pass"] == 2 assert saved_json["num_tests_warned"] == 1 + assert saved_json["num_tests_ignored"] == 0 assert saved_json["num_tests_failed"] == 0 assert saved_json["has_tests_pass"] assert saved_json["has_tests_warned"] + assert not saved_json["has_tests_ignored"] assert not saved_json["has_tests_failed"] - def mock_gh_get_comments(**kwargs): - """ Helper function to emulate requests responses from the web """ - - class MockResponse: - def __init__(self, url): - self.status_code = 200 - self.url = url - - def json(self): - if self.url == "existing_comment": - return [ - { - "user": {"login": "github-actions[bot]"}, - "body": "\n#### `nf-core lint` overall result", - "url": "https://github.com", - } - ] - else: - return [] - - return MockResponse(kwargs["url"]) + def test_wrap_quotes(self): + md = self.lint_obj._wrap_quotes(["one", "two", "three"]) + assert md == "`one` or `two` or `three`" + + def test_strip_ansi_codes(self): + """Check that we can make rich text strings plain + + String prints ls examplefile.zip, where examplefile.zip is red bold text + """ + stripped = self.lint_obj._strip_ansi_codes("ls \x1b[00m\x1b[01;31mexamplefile.zip\x1b[00m\x1b[01;31m") + assert stripped == "ls examplefile.zip" + + def test_sphinx_rst_files(self): + """Check that we have .rst files for all lint module code, + and that there are no unexpected files (eg. deleted lint tests)""" + + docs_basedir = os.path.join( + os.path.dirname(os.path.dirname(os.path.abspath(__file__))), "docs", "api", "_src", "lint_tests" + ) + + # Get list of existing .rst files + existing_docs = [] + for fn in os.listdir(docs_basedir): + if fnmatch.fnmatch(fn, "*.rst") and not fnmatch.fnmatch(fn, "index.rst"): + existing_docs.append(os.path.join(docs_basedir, fn)) + + # Check .rst files against each test name + lint_obj = nf_core.lint.PipelineLint("", True) + for test_name in lint_obj.lint_tests: + fn = os.path.join(docs_basedir, "{}.rst".format(test_name)) + assert os.path.exists(fn), "Could not find lint docs .rst file: {}".format(fn) + existing_docs.remove(fn) + + # Check that we have no remaining .rst files that we didn't expect + assert len(existing_docs) == 0, "Unexpected lint docs .rst files found: {}".format(", ".join(existing_docs)) + + ####################### + # SPECIFIC LINT TESTS # + ####################### + from lint.actions_awsfulltest import ( + test_actions_awsfulltest_warn, + test_actions_awsfulltest_pass, + test_actions_awsfulltest_fail, + ) + from lint.actions_awstest import test_actions_awstest_pass, test_actions_awstest_fail + from lint.files_exist import ( + test_files_exist_missing_config, + test_files_exist_missing_main, + test_files_exist_depreciated_file, + test_files_exist_pass, + ) + from lint.licence import test_licence_pass, test_licence_fail + from lint.actions_branch_protection import ( + test_actions_branch_protection_pass, + test_actions_branch_protection_fail, + test_actions_branch_protection_ignore, + ) + from lint.actions_ci import ( + test_actions_ci_pass, + test_actions_ci_fail_wrong_nf, + test_actions_ci_fail_wrong_docker_ver, + test_actions_ci_fail_wrong_trigger, + ) + + +# def test_critical_missingfiles_example(self): +# """Tests for missing nextflow config and main.nf files""" +# lint_obj = nf_core.lint.run_linting(PATH_CRITICAL_EXAMPLE, False) +# assert len(lint_obj.failed) == 1 +# +# def test_failing_missingfiles_example(self): +# """Tests for missing files like Dockerfile or LICENSE""" +# lint_obj = nf_core.lint.PipelineLint(PATH_FAILING_EXAMPLE) +# lint_obj.check_files_exist() +# expectations = {"failed": 6, "warned": 2, "passed": 14} +# self.assess_lint_status(lint_obj, **expectations) +# +# def test_mit_licence_example_pass(self): +# """Tests that MIT test works with good MIT licences""" +# good_lint_obj = nf_core.lint.PipelineLint(PATH_CRITICAL_EXAMPLE) +# good_lint_obj.check_licence() +# expectations = {"failed": 0, "warned": 0, "passed": 1} +# self.assess_lint_status(good_lint_obj, **expectations) +# +# def test_mit_license_example_with_failed(self): +# """Tests that MIT test works with bad MIT licences""" +# bad_lint_obj = nf_core.lint.PipelineLint(PATH_FAILING_EXAMPLE) +# bad_lint_obj.check_licence() +# expectations = {"failed": 1, "warned": 0, "passed": 0} +# self.assess_lint_status(bad_lint_obj, **expectations) +# +# def test_config_variable_example_pass(self): +# """Tests that config variable existence test works with good pipeline example""" +# good_lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) +# good_lint_obj.check_nextflow_config() +# expectations = {"failed": 0, "warned": 1, "passed": 34} +# self.assess_lint_status(good_lint_obj, **expectations) +# +# def test_config_variable_example_with_failed(self): +# """Tests that config variable existence test fails with bad pipeline example""" +# bad_lint_obj = nf_core.lint.PipelineLint(PATH_FAILING_EXAMPLE) +# bad_lint_obj.check_nextflow_config() +# expectations = {"failed": 19, "warned": 6, "passed": 10} +# self.assess_lint_status(bad_lint_obj, **expectations) +# +# @pytest.mark.xfail(raises=AssertionError, strict=True) +# def test_config_variable_error(self): +# """Tests that config variable existence test falls over nicely with nextflow can't run""" +# bad_lint_obj = nf_core.lint.PipelineLint("/non/existant/path") +# bad_lint_obj.check_nextflow_config() +# +# def test_actions_wf_lint_pass(self): +# """Tests that linting for GitHub Actions linting wf works for a good example""" +# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) +# lint_obj.check_actions_lint() +# expectations = {"failed": 0, "warned": 0, "passed": 3} +# self.assess_lint_status(lint_obj, **expectations) +# +# def test_actions_wf_lint_fail(self): +# """Tests that linting for GitHub Actions linting wf fails for a bad example""" +# lint_obj = nf_core.lint.PipelineLint(PATH_FAILING_EXAMPLE) +# lint_obj.check_actions_lint() +# expectations = {"failed": 3, "warned": 0, "passed": 0} +# self.assess_lint_status(lint_obj, **expectations) +# +# def test_wrong_license_examples_with_failed(self): +# """Tests for checking the license test behavior""" +# for example in PATHS_WRONG_LICENSE_EXAMPLE: +# lint_obj = nf_core.lint.PipelineLint(example) +# lint_obj.check_licence() +# expectations = {"failed": 1, "warned": 0, "passed": 0} +# self.assess_lint_status(lint_obj, **expectations) +# +# def test_missing_license_example(self): +# """Tests for missing license behavior""" +# lint_obj = nf_core.lint.PipelineLint(PATH_MISSING_LICENSE_EXAMPLE) +# lint_obj.check_licence() +# expectations = {"failed": 1, "warned": 0, "passed": 0} +# self.assess_lint_status(lint_obj, **expectations) +# +# def test_readme_pass(self): +# """Tests that the pipeline README file checks work with a good example""" +# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) +# lint_obj.minNextflowVersion = "20.04.0" +# lint_obj.files = ["environment.yml"] +# lint_obj.check_readme() +# expectations = {"failed": 0, "warned": 0, "passed": 2} +# self.assess_lint_status(lint_obj, **expectations) +# +# def test_readme_warn(self): +# """Tests that the pipeline README file checks fail """ +# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) +# lint_obj.minNextflowVersion = "0.28.0" +# lint_obj.check_readme() +# expectations = {"failed": 1, "warned": 0, "passed": 0} +# self.assess_lint_status(lint_obj, **expectations) +# +# def test_readme_fail(self): +# """Tests that the pipeline README file checks give warnings with a bad example""" +# lint_obj = nf_core.lint.PipelineLint(PATH_FAILING_EXAMPLE) +# lint_obj.files = ["environment.yml"] +# lint_obj.check_readme() +# expectations = {"failed": 0, "warned": 2, "passed": 0} +# self.assess_lint_status(lint_obj, **expectations) +# +# def test_dockerfile_pass(self): +# """Tests if a valid Dockerfile passes the lint checks""" +# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) +# lint_obj.files = ["Dockerfile"] +# lint_obj.check_docker() +# expectations = {"failed": 0, "warned": 0, "passed": 1} +# self.assess_lint_status(lint_obj, **expectations) +# +# def test_version_consistency_pass(self): +# """Tests the workflow version and container version sucessfully""" +# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) +# lint_obj.config["manifest.version"] = "0.4" +# lint_obj.config["process.container"] = "nfcore/tools:0.4" +# lint_obj.check_version_consistency() +# expectations = {"failed": 0, "warned": 0, "passed": 1} +# self.assess_lint_status(lint_obj, **expectations) +# +# def test_version_consistency_with_env_fail(self): +# """Tests the behaviour, when a git activity is a release +# and simulate wrong release tag""" +# os.environ["GITHUB_REF"] = "refs/tags/0.5" +# os.environ["GITHUB_REPOSITORY"] = "nf-core/testpipeline" +# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) +# lint_obj.config["manifest.version"] = "0.4" +# lint_obj.config["process.container"] = "nfcore/tools:0.4" +# lint_obj.check_version_consistency() +# expectations = {"failed": 1, "warned": 0, "passed": 0} +# self.assess_lint_status(lint_obj, **expectations) +# +# def test_version_consistency_with_numeric_fail(self): +# """Tests the behaviour, when a git activity is a release +# and simulate wrong release tag""" +# os.environ["GITHUB_REF"] = "refs/tags/0.5dev" +# os.environ["GITHUB_REPOSITORY"] = "nf-core/testpipeline" +# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) +# lint_obj.config["manifest.version"] = "0.4" +# lint_obj.config["process.container"] = "nfcore/tools:0.4" +# lint_obj.check_version_consistency() +# expectations = {"failed": 1, "warned": 0, "passed": 0} +# self.assess_lint_status(lint_obj, **expectations) +# +# def test_version_consistency_with_no_docker_version_fail(self): +# """Tests the behaviour, when a git activity is a release +# and simulate wrong missing docker version tag""" +# os.environ["GITHUB_REF"] = "refs/tags/0.4" +# os.environ["GITHUB_REPOSITORY"] = "nf-core/testpipeline" +# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) +# lint_obj.config["manifest.version"] = "0.4" +# lint_obj.config["process.container"] = "nfcore/tools" +# lint_obj.check_version_consistency() +# expectations = {"failed": 1, "warned": 0, "passed": 0} +# self.assess_lint_status(lint_obj, **expectations) +# +# def test_version_consistency_with_env_pass(self): +# """Tests the behaviour, when a git activity is a release +# and simulate correct release tag""" +# os.environ["GITHUB_REF"] = "refs/tags/0.4" +# os.environ["GITHUB_REPOSITORY"] = "nf-core/testpipeline" +# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) +# lint_obj.config["manifest.version"] = "0.4" +# lint_obj.config["process.container"] = "nfcore/tools:0.4" +# lint_obj.check_version_consistency() +# expectations = {"failed": 0, "warned": 0, "passed": 1} +# self.assess_lint_status(lint_obj, **expectations) +# +# def test_conda_env_pass(self): +# """ Tests the conda environment config checks with a working example """ +# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) +# lint_obj.files = ["environment.yml"] +# with open(os.path.join(PATH_WORKING_EXAMPLE, "environment.yml"), "r") as fh: +# lint_obj.conda_config = yaml.safe_load(fh) +# lint_obj.pipeline_name = "tools" +# lint_obj.config["manifest.version"] = "0.4" +# lint_obj.check_conda_env_yaml() +# expectations = {"failed": 0, "warned": 4, "passed": 5} +# self.assess_lint_status(lint_obj, **expectations) +# +# def test_conda_env_fail(self): +# """ Tests the conda environment config fails with a bad example """ +# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) +# lint_obj.files = ["environment.yml"] +# with open(os.path.join(PATH_WORKING_EXAMPLE, "environment.yml"), "r") as fh: +# lint_obj.conda_config = yaml.safe_load(fh) +# lint_obj.conda_config["dependencies"] = ["fastqc", "multiqc=0.9", "notapackaage=0.4"] +# lint_obj.pipeline_name = "not_tools" +# lint_obj.config["manifest.version"] = "0.23" +# lint_obj.check_conda_env_yaml() +# expectations = {"failed": 3, "warned": 1, "passed": 2} +# self.assess_lint_status(lint_obj, **expectations) +# +# @mock.patch("requests.get") +# @pytest.mark.xfail(raises=ValueError, strict=True) +# def test_conda_env_timeout(self, mock_get): +# """ Tests the conda environment handles API timeouts """ +# # Define the behaviour of the request get mock +# mock_get.side_effect = requests.exceptions.Timeout() +# # Now do the test +# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) +# lint_obj.conda_config["channels"] = ["bioconda"] +# lint_obj.check_anaconda_package("multiqc=1.6") +# +# def test_conda_env_skip(self): +# """ Tests the conda environment config is skipped when not needed """ +# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) +# lint_obj.check_conda_env_yaml() +# expectations = {"failed": 0, "warned": 0, "passed": 0} +# self.assess_lint_status(lint_obj, **expectations) +# +# def test_conda_dockerfile_pass(self): +# """ Tests the conda Dockerfile test works with a working example """ +# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) +# lint_obj.version = "1.11" +# lint_obj.files = ["environment.yml", "Dockerfile"] +# with open(os.path.join(PATH_WORKING_EXAMPLE, "Dockerfile"), "r") as fh: +# lint_obj.dockerfile = fh.read().splitlines() +# lint_obj.conda_config["name"] = "nf-core-tools-0.4" +# lint_obj.check_conda_dockerfile() +# expectations = {"failed": 0, "warned": 0, "passed": 1} +# self.assess_lint_status(lint_obj, **expectations) +# +# def test_conda_dockerfile_fail(self): +# """ Tests the conda Dockerfile test fails with a bad example """ +# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) +# lint_obj.version = "1.11" +# lint_obj.files = ["environment.yml", "Dockerfile"] +# lint_obj.conda_config["name"] = "nf-core-tools-0.4" +# lint_obj.dockerfile = ["fubar"] +# lint_obj.check_conda_dockerfile() +# expectations = {"failed": 5, "warned": 0, "passed": 0} +# self.assess_lint_status(lint_obj, **expectations) +# +# def test_conda_dockerfile_skip(self): +# """ Tests the conda Dockerfile test is skipped when not needed """ +# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) +# lint_obj.check_conda_dockerfile() +# expectations = {"failed": 0, "warned": 0, "passed": 0} +# self.assess_lint_status(lint_obj, **expectations) +# +# def test_pip_no_version_fail(self): +# """ Tests the pip dependency version definition is present """ +# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) +# lint_obj.files = ["environment.yml"] +# lint_obj.pipeline_name = "tools" +# lint_obj.config["manifest.version"] = "0.4" +# lint_obj.conda_config = {"name": "nf-core-tools-0.4", "dependencies": [{"pip": ["multiqc"]}]} +# lint_obj.check_conda_env_yaml() +# expectations = {"failed": 1, "warned": 0, "passed": 1} +# self.assess_lint_status(lint_obj, **expectations) +# +# def test_pip_package_not_latest_warn(self): +# """ Tests the pip dependency version definition is present """ +# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) +# lint_obj.files = ["environment.yml"] +# lint_obj.pipeline_name = "tools" +# lint_obj.config["manifest.version"] = "0.4" +# lint_obj.conda_config = {"name": "nf-core-tools-0.4", "dependencies": [{"pip": ["multiqc==1.4"]}]} +# lint_obj.check_conda_env_yaml() +# expectations = {"failed": 0, "warned": 1, "passed": 2} +# self.assess_lint_status(lint_obj, **expectations) +# +# @mock.patch("requests.get") +# def test_pypi_timeout_warn(self, mock_get): +# """Tests the PyPi connection and simulates a request timeout, which should +# return in an addiional warning in the linting""" +# # Define the behaviour of the request get mock +# mock_get.side_effect = requests.exceptions.Timeout() +# # Now do the test +# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) +# lint_obj.files = ["environment.yml"] +# lint_obj.pipeline_name = "tools" +# lint_obj.config["manifest.version"] = "0.4" +# lint_obj.conda_config = {"name": "nf-core-tools-0.4", "dependencies": [{"pip": ["multiqc==1.5"]}]} +# lint_obj.check_conda_env_yaml() +# expectations = {"failed": 0, "warned": 1, "passed": 2} +# self.assess_lint_status(lint_obj, **expectations) +# +# @mock.patch("requests.get") +# def test_pypi_connection_error_warn(self, mock_get): +# """Tests the PyPi connection and simulates a connection error, which should +# result in an additional warning, as we cannot test if dependent module is latest""" +# # Define the behaviour of the request get mock +# mock_get.side_effect = requests.exceptions.ConnectionError() +# # Now do the test +# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) +# lint_obj.files = ["environment.yml"] +# lint_obj.pipeline_name = "tools" +# lint_obj.config["manifest.version"] = "0.4" +# lint_obj.conda_config = {"name": "nf-core-tools-0.4", "dependencies": [{"pip": ["multiqc==1.5"]}]} +# lint_obj.check_conda_env_yaml() +# expectations = {"failed": 0, "warned": 1, "passed": 2} +# self.assess_lint_status(lint_obj, **expectations) +# +# def test_pip_dependency_fail(self): +# """ Tests the PyPi API package information query """ +# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) +# lint_obj.files = ["environment.yml"] +# lint_obj.pipeline_name = "tools" +# lint_obj.config["manifest.version"] = "0.4" +# lint_obj.conda_config = {"name": "nf-core-tools-0.4", "dependencies": [{"pip": ["notpresent==1.5"]}]} +# lint_obj.check_conda_env_yaml() +# expectations = {"failed": 1, "warned": 0, "passed": 2} +# self.assess_lint_status(lint_obj, **expectations) +# +# def test_conda_dependency_fails(self): +# """Tests that linting fails, if conda dependency +# package version is not available on Anaconda. +# """ +# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) +# lint_obj.files = ["environment.yml"] +# lint_obj.pipeline_name = "tools" +# lint_obj.config["manifest.version"] = "0.4" +# lint_obj.conda_config = {"name": "nf-core-tools-0.4", "dependencies": ["openjdk=0.0.0"]} +# lint_obj.check_conda_env_yaml() +# expectations = {"failed": 1, "warned": 0, "passed": 2} +# self.assess_lint_status(lint_obj, **expectations) +# +# def test_pip_dependency_fails(self): +# """Tests that linting fails, if conda dependency +# package version is not available on Anaconda. +# """ +# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) +# lint_obj.files = ["environment.yml"] +# lint_obj.pipeline_name = "tools" +# lint_obj.config["manifest.version"] = "0.4" +# lint_obj.conda_config = {"name": "nf-core-tools-0.4", "dependencies": [{"pip": ["multiqc==0.0"]}]} +# lint_obj.check_conda_env_yaml() +# expectations = {"failed": 1, "warned": 0, "passed": 2} +# self.assess_lint_status(lint_obj, **expectations) +# +# def test_pipeline_name_pass(self): +# """Tests pipeline name good pipeline example: lower case, no punctuation""" +# # good_lint_obj = nf_core.lint.run_linting(PATH_WORKING_EXAMPLE) +# good_lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) +# good_lint_obj.pipeline_name = "tools" +# good_lint_obj.check_pipeline_name() +# expectations = {"failed": 0, "warned": 0, "passed": 1} +# self.assess_lint_status(good_lint_obj, **expectations) +# +# def test_pipeline_name_critical(self): +# """Tests that warning is returned for pipeline not adhering to naming convention""" +# critical_lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) +# critical_lint_obj.pipeline_name = "Tools123" +# critical_lint_obj.check_pipeline_name() +# expectations = {"failed": 0, "warned": 1, "passed": 0} +# self.assess_lint_status(critical_lint_obj, **expectations) +# diff --git a/tests/test_utils.py b/tests/test_utils.py index b533abb7a1..ba983fc9e5 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -2,15 +2,30 @@ """ Tests covering for utility functions. """ +import nf_core.create import nf_core.utils import os +import tempfile import unittest class TestUtils(unittest.TestCase): """Class for utils tests""" + def setUp(self): + """Function that runs at start of tests for common resources + + Use nf_core.create() to make a pipeline that we can use for testing + """ + self.test_pipeline_dir = os.path.join(tempfile.mkdtemp(), "nf-core-testpipeline") + self.create_obj = nf_core.create.PipelineCreate( + "testpipeline", "This is a test pipeline", "Test McTestFace", outdir=self.test_pipeline_dir + ) + self.create_obj.init_pipeline() + # Base Pipeline object on this directory + self.pipeline_obj = nf_core.utils.Pipeline(self.test_pipeline_dir) + def test_check_if_outdated_1(self): current_version = "1.0" remote_version = "2.0" @@ -52,3 +67,28 @@ def test_rich_force_colours_true(self): os.environ.pop("FORCE_COLOR", None) os.environ.pop("PY_COLORS", None) assert nf_core.utils.rich_force_colors() is True + + def test_load_pipeline_config(self): + """Load the pipeline Nextflow config""" + self.pipeline_obj._load_pipeline_config() + assert self.pipeline_obj.nf_config["dag.enabled"] == "true" + + def test_load_conda_env(self): + """Load the pipeline Conda environment.yml file""" + self.pipeline_obj._load_conda_environment() + assert self.pipeline_obj.conda_config["channels"] == ["conda-forge", "bioconda", "defaults"] + + def test_list_files_git(self): + """Test listing pipeline files using `git ls`""" + self.pipeline_obj._list_files() + assert os.path.join(self.test_pipeline_dir, "main.nf") in self.pipeline_obj.files + + def test_list_files_no_git(self): + """Test listing pipeline files without `git-ls`""" + # Create directory with a test file + tmpdir = tempfile.mkdtemp() + tmp_fn = os.path.join(tmpdir, "testfile") + open(tmp_fn, "a").close() + pipeline_obj = nf_core.utils.Pipeline(tmpdir) + pipeline_obj._list_files() + assert tmp_fn in pipeline_obj.files