diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json new file mode 100644 index 00000000..ea27a584 --- /dev/null +++ b/.devcontainer/devcontainer.json @@ -0,0 +1,27 @@ +{ + "name": "nfcore", + "image": "nfcore/gitpod:latest", + "remoteUser": "gitpod", + + // Configure tool-specific properties. + "customizations": { + // Configure properties specific to VS Code. + "vscode": { + // Set *default* container specific settings.json values on container create. + "settings": { + "python.defaultInterpreterPath": "/opt/conda/bin/python", + "python.linting.enabled": true, + "python.linting.pylintEnabled": true, + "python.formatting.autopep8Path": "/opt/conda/bin/autopep8", + "python.formatting.yapfPath": "/opt/conda/bin/yapf", + "python.linting.flake8Path": "/opt/conda/bin/flake8", + "python.linting.pycodestylePath": "/opt/conda/bin/pycodestyle", + "python.linting.pydocstylePath": "/opt/conda/bin/pydocstyle", + "python.linting.pylintPath": "/opt/conda/bin/pylint" + }, + + // Add the IDs of extensions you want installed when the container is created. + "extensions": ["ms-python.python", "ms-python.vscode-pylance", "nf-core.nf-core-extensionpack"] + } + } +} diff --git a/.gitattributes b/.gitattributes index 050bb120..7a2dabc2 100644 --- a/.gitattributes +++ b/.gitattributes @@ -1,3 +1,4 @@ *.config linguist-language=nextflow +*.nf.test linguist-language=nextflow modules/nf-core/** linguist-generated subworkflows/nf-core/** linguist-generated diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md index de10fd6d..14713dbf 100644 --- a/.github/CONTRIBUTING.md +++ b/.github/CONTRIBUTING.md @@ -101,3 +101,19 @@ If you are using a new feature from core Nextflow, you may bump the minimum requ ### Images and figures For overview images and other documents we follow the nf-core [style guidelines and examples](https://nf-co.re/developers/design_guidelines). + +## GitHub Codespaces + +This repo includes a devcontainer configuration which will create a GitHub Codespaces for Nextflow development! This is an online developer environment that runs in your browser, complete with VSCode and a terminal. + +To get started: + +- Open the repo in [Codespaces](https://github.com/nf-core/quantms/codespaces) +- Tools installed + - nf-core + - Nextflow + +Devcontainer specs: + +- [DevContainer config](.devcontainer/devcontainer.json) +- [Dockerfile](.devcontainer/Dockerfile) diff --git a/.github/ISSUE_TEMPLATE/bug_report.yml b/.github/ISSUE_TEMPLATE/bug_report.yml index 34a329f8..4ef83298 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.yml +++ b/.github/ISSUE_TEMPLATE/bug_report.yml @@ -42,7 +42,7 @@ body: attributes: label: System information description: | - * Nextflow version _(eg. 21.10.3)_ + * Nextflow version _(eg. 22.10.1)_ * Hardware _(eg. HPC, Desktop, Cloud)_ * Executor _(eg. slurm, local, awsbatch)_ * Container engine: _(e.g. Docker, Singularity, Conda, Podman, Shifter or Charliecloud)_ diff --git a/.github/workflows/awstest.yml b/.github/workflows/awstest.yml index 43d1949b..42958b76 100644 --- a/.github/workflows/awstest.yml +++ b/.github/workflows/awstest.yml @@ -13,7 +13,6 @@ jobs: # Launch workflow using Tower CLI tool action - name: Launch workflow via tower uses: nf-core/tower-action@v3 - with: workspace_id: ${{ secrets.TOWER_WORKSPACE_ID }} access_token: ${{ secrets.TOWER_ACCESS_TOKEN }} diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 859d6bb1..4ec31aee 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -11,6 +11,10 @@ on: env: NXF_ANSI_LOG: false +concurrency: + group: "${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}" + cancel-in-progress: true + jobs: test: env: @@ -28,7 +32,7 @@ jobs: matrix: # Nextflow versions NXF_VER: - - "21.10.3" + - "22.10.1" - "latest-everything" test_profile: ["test_lfq", "test_dia", "test_localize", "test_tmt"] exec_profile: ["docker", "conda"] @@ -41,7 +45,7 @@ jobs: exec_profile: "conda" steps: - name: Check out pipeline code - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Install Nextflow uses: nf-core/setup-nextflow@v1 diff --git a/.github/workflows/fix-linting.yml b/.github/workflows/fix-linting.yml index 8bf2def0..20f90708 100644 --- a/.github/workflows/fix-linting.yml +++ b/.github/workflows/fix-linting.yml @@ -24,7 +24,7 @@ jobs: env: GITHUB_TOKEN: ${{ secrets.nf_core_bot_auth_token }} - - uses: actions/setup-node@v2 + - uses: actions/setup-node@v3 - name: Install Prettier run: npm install -g prettier @prettier/plugin-php @@ -34,9 +34,9 @@ jobs: id: prettier_status run: | if prettier --check ${GITHUB_WORKSPACE}; then - echo "::set-output name=result::pass" + echo "result=pass" >> $GITHUB_OUTPUT else - echo "::set-output name=result::fail" + echo "result=fail" >> $GITHUB_OUTPUT fi - name: Run 'prettier --write' diff --git a/.github/workflows/linting.yml b/.github/workflows/linting.yml index 8a5ce69b..858d622e 100644 --- a/.github/workflows/linting.yml +++ b/.github/workflows/linting.yml @@ -4,6 +4,8 @@ name: nf-core linting # that the code meets the nf-core guidelines. on: push: + branches: + - dev pull_request: release: types: [published] @@ -12,9 +14,9 @@ jobs: EditorConfig: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - - uses: actions/setup-node@v2 + - uses: actions/setup-node@v3 - name: Install editorconfig-checker run: npm install -g editorconfig-checker @@ -25,9 +27,9 @@ jobs: Prettier: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - - uses: actions/setup-node@v2 + - uses: actions/setup-node@v3 - name: Install Prettier run: npm install -g prettier @@ -38,7 +40,7 @@ jobs: PythonBlack: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Check code lints with Black uses: psf/black@stable @@ -69,12 +71,12 @@ jobs: runs-on: ubuntu-latest steps: - name: Check out pipeline code - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Install Nextflow uses: nf-core/setup-nextflow@v1 - - uses: actions/setup-python@v3 + - uses: actions/setup-python@v4 with: python-version: "3.7" architecture: "x64" @@ -97,7 +99,7 @@ jobs: - name: Upload linting log file artifact if: ${{ always() }} - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: linting-logs path: | diff --git a/.github/workflows/linting_comment.yml b/.github/workflows/linting_comment.yml index 04758f61..0bbcd30f 100644 --- a/.github/workflows/linting_comment.yml +++ b/.github/workflows/linting_comment.yml @@ -18,7 +18,7 @@ jobs: - name: Get PR number id: pr_number - run: echo "::set-output name=pr_number::$(cat linting-logs/PR_number.txt)" + run: echo "pr_number=$(cat linting-logs/PR_number.txt)" >> $GITHUB_OUTPUT - name: Post PR comment uses: marocchino/sticky-pull-request-comment@v2 diff --git a/.prettierignore b/.prettierignore index eb74a574..437d763d 100644 --- a/.prettierignore +++ b/.prettierignore @@ -1,5 +1,6 @@ email_template.html adaptivecard.json +slackreport.json .nextflow* work/ data/ @@ -8,3 +9,4 @@ results/ testing/ testing* *.pyc +bin/ diff --git a/CHANGELOG.md b/CHANGELOG.md index cb13fc4e..243ab242 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -60,7 +60,7 @@ The pipeline is using Nextflow DSL2, each process will be run with its own [Bioc | `thermorawfileparser` | 1.3.4 | | `comet` | 2021010 | | `msgf+` | 2022.01.07 | -| `openms` | 2.8.0 | +| `openms` | 2.9.0 | | `sdrf-pipelines` | 0.0.22 | | `percolator` | 3.5 | | `pmultiqc` | 0.0.11 | diff --git a/CITATION.cff b/CITATION.cff deleted file mode 100644 index 017666c0..00000000 --- a/CITATION.cff +++ /dev/null @@ -1,56 +0,0 @@ -cff-version: 1.2.0 -message: "If you use `nf-core tools` in your work, please cite the `nf-core` publication" -authors: - - family-names: Ewels - given-names: Philip - - family-names: Peltzer - given-names: Alexander - - family-names: Fillinger - given-names: Sven - - family-names: Patel - given-names: Harshil - - family-names: Alneberg - given-names: Johannes - - family-names: Wilm - given-names: Andreas - - family-names: Garcia - given-names: Maxime Ulysse - - family-names: Di Tommaso - given-names: Paolo - - family-names: Nahnsen - given-names: Sven -title: "The nf-core framework for community-curated bioinformatics pipelines." -version: 2.4.1 -doi: 10.1038/s41587-020-0439-x -date-released: 2022-05-16 -url: https://github.com/nf-core/tools -prefered-citation: - type: article - authors: - - family-names: Ewels - given-names: Philip - - family-names: Peltzer - given-names: Alexander - - family-names: Fillinger - given-names: Sven - - family-names: Patel - given-names: Harshil - - family-names: Alneberg - given-names: Johannes - - family-names: Wilm - given-names: Andreas - - family-names: Garcia - given-names: Maxime Ulysse - - family-names: Di Tommaso - given-names: Paolo - - family-names: Nahnsen - given-names: Sven - doi: 10.1038/s41587-020-0439-x - journal: nature biotechnology - start: 276 - end: 278 - title: "The nf-core framework for community-curated bioinformatics pipelines." - issue: 3 - volume: 38 - year: 2020 - url: https://dx.doi.org/10.1038/s41587-020-0439-x diff --git a/README.md b/README.md index 2ae33648..83fb1370 100644 --- a/README.md +++ b/README.md @@ -2,7 +2,7 @@ [![AWS CI](https://img.shields.io/badge/CI%20tests-full%20size-FF9900?labelColor=000000&logo=Amazon%20AWS)](https://nf-co.re/quantms/results)[![Cite with Zenodo](http://img.shields.io/badge/DOI-10.5281/zenodo.XXXXXXX-1073c8?labelColor=000000)](https://doi.org/10.5281/zenodo.XXXXXXX) -[![Nextflow](https://img.shields.io/badge/nextflow%20DSL2-%E2%89%A521.10.3-23aa62.svg)](https://www.nextflow.io/) +[![Nextflow](https://img.shields.io/badge/nextflow%20DSL2-%E2%89%A522.10.1-23aa62.svg)](https://www.nextflow.io/) [![run with conda](http://img.shields.io/badge/run%20with-conda-3EB049?labelColor=000000&logo=anaconda)](https://docs.conda.io/en/latest/) [![run with docker](https://img.shields.io/badge/run%20with-docker-0db7ed?labelColor=000000&logo=docker)](https://www.docker.com/) [![run with singularity](https://img.shields.io/badge/run%20with-singularity-1d355c.svg?labelColor=000000)](https://sylabs.io/docs/) @@ -74,7 +74,7 @@ A graphical overview of suggested routes through the pipeline depending on conte ## Quick Start -1. Install [`Nextflow`](https://www.nextflow.io/docs/latest/getstarted.html#installation) (`>=21.10.3`) +1. Install [`Nextflow`](https://www.nextflow.io/docs/latest/getstarted.html#installation) (`>=22.10.1`) 2. Install any of [`Docker`](https://docs.docker.com/engine/installation/), [`Singularity`](https://www.sylabs.io/guides/3.0/user-guide/), [`Podman`](https://podman.io/), [`Shifter`](https://nersc.gitlab.io/development/shifter/how-to-use/) or [`Charliecloud`](https://hpc.github.io/charliecloud/) for full pipeline reproducibility _(please only use [`Conda`](https://conda.io/miniconda.html) as a last resort; see [docs](https://nf-co.re/usage/configuration#basic-configuration-profiles))_ diff --git a/assets/slackreport.json b/assets/slackreport.json new file mode 100644 index 00000000..043d02f2 --- /dev/null +++ b/assets/slackreport.json @@ -0,0 +1,34 @@ +{ + "attachments": [ + { + "fallback": "Plain-text summary of the attachment.", + "color": "<% if (success) { %>good<% } else { %>danger<%} %>", + "author_name": "sanger-tol/readmapping v${version} - ${runName}", + "author_icon": "https://www.nextflow.io/docs/latest/_static/favicon.ico", + "text": "<% if (success) { %>Pipeline completed successfully!<% } else { %>Pipeline completed with errors<% } %>", + "fields": [ + { + "title": "Command used to launch the workflow", + "value": "```${commandLine}```", + "short": false + } + <% + if (!success) { %> + , + { + "title": "Full error message", + "value": "```${errorReport}```", + "short": false + }, + { + "title": "Pipeline configuration", + "value": "<% out << summary.collect{ k,v -> k == "hook_url" ? "_${k}_: (_hidden_)" : ( ( v.class.toString().contains('Path') || ( v.class.toString().contains('String') && v.contains('/') ) ) ? "_${k}_: `${v}`" : (v.class.toString().contains('DateTime') ? ("_${k}_: " + v.format(java.time.format.DateTimeFormatter.ofLocalizedDateTime(java.time.format.FormatStyle.MEDIUM))) : "_${k}_: ${v}") ) }.join(",\n") %>", + "short": false + } + <% } + %> + ], + "footer": "Completed at <% out << dateComplete.format(java.time.format.DateTimeFormatter.ofLocalizedDateTime(java.time.format.FormatStyle.MEDIUM)) %> (duration: ${duration})" + } + ] +} diff --git a/bin/diann_convert.py b/bin/diann_convert.py index 971524da..77b50fd0 100755 --- a/bin/diann_convert.py +++ b/bin/diann_convert.py @@ -2,10 +2,10 @@ import os import re - import click import numpy as np import pandas as pd +import logging as log from pyopenms import AASequence, FASTAFile, ModificationsDB CONTEXT_SETTINGS = dict(help_option_names=["-h", "--help"]) @@ -82,17 +82,31 @@ def convert(ctx, folder, dia_params, diann_version, charge, missed_cleavages, qv break f.close() - pg = pd.read_csv(pg_matrix, sep="\t", header=0, dtype="str") - pr = pd.read_csv(pr_matrix, sep="\t", header=0, dtype="str") - report = pd.read_csv(diann_report, sep="\t", header=0, dtype="str") - - col = ["Q.Value", "Precursor.Normalised", "RT", "RT.Start", "Global.Q.Value", "Lib.Q.Value", "PG.MaxLFQ"] - for i in col: - report.loc[:, i] = report.loc[:, i].astype("float", errors="ignore") + remain_cols = [ + "File.Name", + "Run", + "Protein.Group", + "Protein.Names", + "Protein.Ids", + "First.Protein.Description", + "PG.MaxLFQ", + "RT.Start", + "Global.Q.Value", + "Lib.Q.Value", + "PEP", + "Precursor.Normalised", + "Precursor.Id", + "Q.Value", + "Modified.Sequence", + "Stripped.Sequence", + "Precursor.Charge", + "Precursor.Quantity", + "Global.PG.Q.Value", + ] + report = pd.read_csv(diann_report, sep="\t", header=0, usecols=remain_cols) # filter based on qvalue parameter for downstream analysiss report = report[report["Q.Value"] < qvalue_threshold] - report["Calculate.Precursor.Mz"] = report.apply( lambda x: calculate_mz(x["Stripped.Sequence"], x["Precursor.Charge"]), axis=1 ) @@ -120,6 +134,7 @@ def convert(ctx, folder, dia_params, diann_version, charge, missed_cleavages, qv ["Protein.Names", "Modified.Sequence", "Precursor.Charge", "Precursor.Quantity", "File.Name", "Run"] ] out_msstats.columns = ["ProteinName", "PeptideSequence", "PrecursorCharge", "Intensity", "Reference", "Run"] + out_msstats = out_msstats[out_msstats["Intensity"] != 0] out_msstats.loc[:, "PeptideSequence"] = out_msstats.apply( lambda x: AASequence.fromString(x["PeptideSequence"]).toString(), axis=1 ) @@ -131,19 +146,19 @@ def convert(ctx, folder, dia_params, diann_version, charge, missed_cleavages, qv out_msstats[["Fraction", "BioReplicate", "Condition"]] = out_msstats.apply( lambda x: query_expdesign_value(x["Run"], f_table, s_DataFrame), axis=1, result_type="expand" ) + out_msstats.to_csv(os.path.splitext(os.path.basename(exp_design))[0] + "_msstats_in.csv", sep=",", index=False) # Convert to Triqler out_triqler = pd.DataFrame() out_triqler = out_msstats[["ProteinName", "PeptideSequence", "PrecursorCharge", "Intensity", "Run", "Condition"]] + del out_msstats out_triqler.columns = ["proteins", "peptide", "charge", "intensity", "run", "condition"] + out_triqler = out_triqler[out_triqler["intensity"] != 0] out_triqler.loc[:, "searchScore"] = report["Q.Value"] out_triqler.loc[:, "searchScore"] = 1 - out_triqler["searchScore"] - - out_msstats = out_msstats[out_msstats["Intensity"] != 0] - out_msstats.to_csv(os.path.splitext(os.path.basename(exp_design))[0] + "_msstats_in.csv", sep=",", index=False) - out_triqler = out_triqler[out_triqler["intensity"] != 0] out_triqler.to_csv(os.path.splitext(os.path.basename(exp_design))[0] + "_triqler_in.tsv", sep="\t", index=False) + del out_triqler # Convert to mzTab if diann_version_id == "1.8.1": @@ -168,9 +183,22 @@ def convert(ctx, folder, dia_params, diann_version, charge, missed_cleavages, qv ) (MTD, database) = mztab_MTD(index_ref, dia_params, fasta, charge, missed_cleavages) + pg = pd.read_csv( + pg_matrix, + sep="\t", + header=0, + ) PRH = mztab_PRH(report, pg, index_ref, database, fasta_df) + del pg + pr = pd.read_csv( + pr_matrix, + sep="\t", + header=0, + ) PEH = mztab_PEH(report, pr, precursor_list, index_ref, database) + del pr PSH = mztab_PSH(report, folder, database) + del report MTD.loc["", :] = "" PRH.loc[len(PRH) + 1, :] = "" PEH.loc[len(PEH) + 1, :] = "" @@ -594,7 +622,6 @@ def mztab_PSH(report, folder, database): [ "Stripped.Sequence", "Protein.Ids", - "Genes", "Q.Value", "RT.Start", "Precursor.Charge", @@ -611,7 +638,6 @@ def mztab_PSH(report, folder, database): out_mztab_PSH.columns = [ "sequence", "accession", - "Genes", "search_engine_score[1]", "retention_time", "charge", @@ -661,7 +687,7 @@ def mztab_PSH(report, folder, database): out_mztab_PSH.loc[:, "PSH"] = "PSM" index = out_mztab_PSH.loc[:, "PSH"] - out_mztab_PSH.drop(["PSH", "Genes", "ms_run"], axis=1, inplace=True) + out_mztab_PSH.drop(["PSH", "ms_run"], axis=1, inplace=True) out_mztab_PSH.insert(0, "PSH", index) out_mztab_PSH.fillna("null", inplace=True) new_cols = [col for col in out_mztab_PSH.columns if not col.startswith("opt_")] + [ @@ -769,7 +795,7 @@ def match_in_report(report, target, max, flag, level): PEH_params = [] for i in range(1, max + 1): match = result[result["study_variable"] == i] - PEH_params.extend([match["Precursor.Normalised"].mean(), "null", "null", "null", match["RT"].mean()]) + PEH_params.extend([match["Precursor.Normalised"].mean(), "null", "null", "null", match["RT.Start"].mean()]) return tuple(PEH_params) @@ -823,7 +849,7 @@ def PEH_match_report(report, target): match = report[report["precursor.Index"] == target] ## Score at peptide level: the minimum of the respective precursor q-values (minimum of Q.Value per group) search_score = match["Q.Value"].min() - time = match["RT"].mean() + time = match["RT.Start"].mean() q_score = match["Global.Q.Value"].values[0] if match["Global.Q.Value"].values.size > 0 else np.nan spec_e = match["Lib.Q.Value"].values[0] if match["Lib.Q.Value"].values.size > 0 else np.nan mz = match["Calculate.Precursor.Mz"].mean() diff --git a/bin/mzml_statistics.py b/bin/mzml_statistics.py index 8affa68f..79d64b7e 100755 --- a/bin/mzml_statistics.py +++ b/bin/mzml_statistics.py @@ -6,8 +6,7 @@ import sys -def mzml_dataframe(mzml_folder): - +def mzml_dataframe(mzml_path): file_columns = [ "SpectrumID", "MSLevel", @@ -17,8 +16,6 @@ def mzml_dataframe(mzml_folder): "Retention_Time", "Exp_Mass_To_Charge", ] - mzml_paths = list(i for i in os.listdir(mzml_folder) if i.endswith(".mzML")) - mzml_count = 1 def parse_mzml(file_name, file_columns): info = [] @@ -45,20 +42,19 @@ def parse_mzml(file_name, file_columns): return pd.DataFrame(info, columns=file_columns) - for i in mzml_paths: - mzml_df = parse_mzml(mzml_folder + i, file_columns) - mzml_df.to_csv( - "{}_mzml_info.tsv".format(os.path.splitext(os.path.split(i)[1])[0]), - mode="a", - sep="\t", - index=False, - header=True, - ) + mzml_df = parse_mzml(mzml_path, file_columns) + mzml_df.to_csv( + "{}_mzml_info.tsv".format(os.path.splitext(os.path.split(mzml_path)[1])[0]), + mode="a", + sep="\t", + index=False, + header=True, + ) def main(): - mzmls_path = sys.argv[1] - mzml_dataframe(mzmls_path) + mzml_path = sys.argv[1] + mzml_dataframe(mzml_path) if __name__ == "__main__": diff --git a/conf/modules.config b/conf/modules.config index 330aa481..b00b1a16 100644 --- a/conf/modules.config +++ b/conf/modules.config @@ -253,7 +253,7 @@ process { // DIA-NN withName: '.*:DIA:.*' { - ext.when = { !params.enable_conda } + ext.when = { session.config.conda && session.config.conda.enabled } publishDir = [ path: { "${params.outdir}/${task.process.tokenize(':')[-1].toLowerCase()}" }, mode: params.publish_dir_mode, diff --git a/docs/usage.md b/docs/usage.md index 89a30613..84cc751d 100644 --- a/docs/usage.md +++ b/docs/usage.md @@ -45,9 +45,9 @@ nextflow pull nf-core/quantms It is a good idea to specify a pipeline version when running the pipeline on your data. This ensures that a specific version of the pipeline code and software are used when you run your pipeline. If you keep using the same tag, you'll be running the same version of the pipeline, even if there have been changes to the code since. -First, go to the [nf-core/quantms releases page](https://github.com/nf-core/quantms/releases) and find the latest version number - numeric only (eg. `1.3.1`). Then specify this when running the pipeline with `-r` (one hyphen) - eg. `-r 1.3.1`. +First, go to the [nf-core/quantms releases page](https://github.com/nf-core/quantms/releases) and find the latest pipeline version - numeric only (eg. `1.3.1`). Then specify this when running the pipeline with `-r` (one hyphen) - eg. `-r 1.3.1`. Of course, you can switch to another version by changing the number after the `-r` flag. -This version number will be logged in reports when you run the pipeline, so that you'll know what you used when you look back in the future. +This version number will be logged in reports when you run the pipeline, so that you'll know what you used when you look back in the future. For example, at the bottom of the MultiQC reports. ## Core Nextflow arguments @@ -57,7 +57,7 @@ This version number will be logged in reports when you run the pipeline, so that Use this parameter to choose a configuration profile. Profiles can give configuration presets for different compute environments. -Several generic profiles are bundled with the pipeline which instruct the pipeline to use software packaged using different methods (Docker, Singularity, Podman, Shifter, Charliecloud, Conda) - see below. When using Biocontainers, most of these software packaging methods pull Docker containers from quay.io e.g [FastQC](https://quay.io/repository/biocontainers/fastqc) except for Singularity which directly downloads Singularity images via https hosted by the [Galaxy project](https://depot.galaxyproject.org/singularity/) and Conda which downloads and installs software locally from [Bioconda](https://bioconda.github.io/). +Several generic profiles are bundled with the pipeline which instruct the pipeline to use software packaged using different methods (Docker, Singularity, Podman, Shifter, Charliecloud, Conda) - see below. > We highly recommend the use of Docker or Singularity containers for full pipeline reproducibility, however when this is not possible, Conda is also supported. @@ -66,8 +66,11 @@ The pipeline also dynamically loads configurations from [https://github.com/nf-c Note that multiple profiles can be loaded, for example: `-profile test,docker` - the order of arguments is important! They are loaded in sequence, so later profiles can overwrite earlier profiles. -If `-profile` is not specified, the pipeline will run locally and expect all software to be installed and available on the `PATH`. This is _not_ recommended. +If `-profile` is not specified, the pipeline will run locally and expect all software to be installed and available on the `PATH`. This is _not_ recommended, since it can lead to different results on different machines dependent on the computer enviroment. +- `test` + - A profile with a complete configuration for automated testing + - Includes links to test data so needs no other parameters - `docker` - A generic configuration profile to be used with [Docker](https://docker.com/) - `singularity` @@ -80,9 +83,6 @@ If `-profile` is not specified, the pipeline will run locally and expect all sof - A generic configuration profile to be used with [Charliecloud](https://hpc.github.io/charliecloud/) - `conda` - A generic configuration profile to be used with [Conda](https://conda.io/docs/). Please only use Conda as a last resort i.e. when it's not possible to run the pipeline with Docker, Singularity, Podman, Shifter or Charliecloud. -- `test` - - A profile with a complete configuration for automated testing - - Includes links to test data so needs no other parameters ### `-resume` @@ -133,7 +133,19 @@ Work dir: Tip: you can replicate the issue by changing to the process work dir and entering the command `bash .command.run` ``` -To bypass this error you would need to find exactly which resources are set by the `STAR_ALIGN` process. The quickest way is to search for `process STAR_ALIGN` in the [nf-core/rnaseq Github repo](https://github.com/nf-core/rnaseq/search?q=process+STAR_ALIGN). We have standardised the structure of Nextflow DSL2 pipelines such that all module files will be present in the `modules/` directory and so based on the search results the file we want is `modules/nf-core/software/star/align/main.nf`. If you click on the link to that file you will notice that there is a `label` directive at the top of the module that is set to [`label process_high`](https://github.com/nf-core/rnaseq/blob/4c27ef5610c87db00c3c5a3eed10b1d161abf575/modules/nf-core/software/star/align/main.nf#L9). The [Nextflow `label`](https://www.nextflow.io/docs/latest/process.html#label) directive allows us to organise workflow processes in separate groups which can be referenced in a configuration file to select and configure subset of processes having similar computing requirements. The default values for the `process_high` label are set in the pipeline's [`base.config`](https://github.com/nf-core/rnaseq/blob/4c27ef5610c87db00c3c5a3eed10b1d161abf575/conf/base.config#L33-L37) which in this case is defined as 72GB. Providing you haven't set any other standard nf-core parameters to **cap** the [maximum resources](https://nf-co.re/usage/configuration#max-resources) used by the pipeline then we can try and bypass the `STAR_ALIGN` process failure by creating a custom config file that sets at least 72GB of memory, in this case increased to 100GB. The custom config below can then be provided to the pipeline via the [`-c`](#-c) parameter as highlighted in previous sections. +#### For beginners + +A first step to bypass this error, you could try to increase the amount of CPUs, memory, and time for the whole pipeline. Therefor you can try to increase the resource for the parameters `--max_cpus`, `--max_memory`, and `--max_time`. Based on the error above, you have to increase the amount of memory. Therefore you can go to the [parameter documentation of rnaseq](https://nf-co.re/rnaseq/3.9/parameters) and scroll down to the `show hidden parameter` button to get the default value for `--max_memory`. In this case 128GB, you than can try to run your pipeline again with `--max_memory 200GB -resume` to skip all process, that were already calculated. If you can not increase the resource of the complete pipeline, you can try to adapt the resource for a single process as mentioned below. + +#### Advanced option on process level + +To bypass this error you would need to find exactly which resources are set by the `STAR_ALIGN` process. The quickest way is to search for `process STAR_ALIGN` in the [nf-core/rnaseq Github repo](https://github.com/nf-core/rnaseq/search?q=process+STAR_ALIGN). +We have standardised the structure of Nextflow DSL2 pipelines such that all module files will be present in the `modules/` directory and so, based on the search results, the file we want is `modules/nf-core/star/align/main.nf`. +If you click on the link to that file you will notice that there is a `label` directive at the top of the module that is set to [`label process_high`](https://github.com/nf-core/rnaseq/blob/4c27ef5610c87db00c3c5a3eed10b1d161abf575/modules/nf-core/software/star/align/main.nf#L9). +The [Nextflow `label`](https://www.nextflow.io/docs/latest/process.html#label) directive allows us to organise workflow processes in separate groups which can be referenced in a configuration file to select and configure subset of processes having similar computing requirements. +The default values for the `process_high` label are set in the pipeline's [`base.config`](https://github.com/nf-core/rnaseq/blob/4c27ef5610c87db00c3c5a3eed10b1d161abf575/conf/base.config#L33-L37) which in this case is defined as 72GB. +Providing you haven't set any other standard nf-core parameters to **cap** the [maximum resources](https://nf-co.re/usage/configuration#max-resources) used by the pipeline then we can try and bypass the `STAR_ALIGN` process failure by creating a custom config file that sets at least 72GB of memory, in this case increased to 100GB. +The custom config below can then be provided to the pipeline via the [`-c`](#-c) parameter as highlighted in previous sections. ```nextflow process { @@ -147,7 +159,7 @@ process { > > If you get a warning suggesting that the process selector isn't recognised check that the process name has been specified correctly. -### Updating containers +### Updating containers (advanced users) The [Nextflow DSL2](https://www.nextflow.io/docs/latest/dsl2.html) implementation of this pipeline uses one container per process which makes it much easier to maintain and update software dependencies. If for some reason you need to use a different version of a particular tool with the pipeline then you just need to identify the `process` name and override the Nextflow `container` definition for that process using the `withName` declaration. For example, in the [nf-core/viralrecon](https://nf-co.re/viralrecon) pipeline a tool called [Pangolin](https://github.com/cov-lineages/pangolin) has been used during the COVID-19 pandemic to assign lineages to SARS-CoV-2 genome sequenced samples. Given that the lineage assignments change quite frequently it doesn't make sense to re-release the nf-core/viralrecon everytime a new version of Pangolin has been released. However, you can override the default container used by the pipeline by creating a custom config file and passing it as a command-line argument via `-c custom.config`. diff --git a/lib/NfcoreSchema.groovy b/lib/NfcoreSchema.groovy index b3d092f8..33cd4f6e 100755 --- a/lib/NfcoreSchema.groovy +++ b/lib/NfcoreSchema.groovy @@ -46,7 +46,6 @@ class NfcoreSchema { 'quiet', 'syslog', 'v', - 'version', // Options for `nextflow run` command 'ansi', diff --git a/lib/NfcoreTemplate.groovy b/lib/NfcoreTemplate.groovy index 27feb009..25a0a74a 100755 --- a/lib/NfcoreTemplate.groovy +++ b/lib/NfcoreTemplate.groovy @@ -32,6 +32,25 @@ class NfcoreTemplate { } } + // + // Generate version string + // + public static String version(workflow) { + String version_string = "" + + if (workflow.manifest.version) { + def prefix_v = workflow.manifest.version[0] != 'v' ? 'v' : '' + version_string += "${prefix_v}${workflow.manifest.version}" + } + + if (workflow.commitId) { + def git_shortsha = workflow.commitId.substring(0, 7) + version_string += "-g${git_shortsha}" + } + + return version_string + } + // // Construct and send completion email // @@ -61,7 +80,7 @@ class NfcoreTemplate { misc_fields['Nextflow Compile Timestamp'] = workflow.nextflow.timestamp def email_fields = [:] - email_fields['version'] = workflow.manifest.version + email_fields['version'] = NfcoreTemplate.version(workflow) email_fields['runName'] = workflow.runName email_fields['success'] = workflow.success email_fields['dateComplete'] = workflow.complete @@ -146,10 +165,10 @@ class NfcoreTemplate { } // - // Construct and send adaptive card - // https://adaptivecards.io + // Construct and send a notification to a web server as JSON + // e.g. Microsoft Teams and Slack // - public static void adaptivecard(workflow, params, summary_params, projectDir, log) { + public static void IM_notification(workflow, params, summary_params, projectDir, log) { def hook_url = params.hook_url def summary = [:] @@ -170,7 +189,7 @@ class NfcoreTemplate { misc_fields['nxf_timestamp'] = workflow.nextflow.timestamp def msg_fields = [:] - msg_fields['version'] = workflow.manifest.version + msg_fields['version'] = NfcoreTemplate.version(workflow) msg_fields['runName'] = workflow.runName msg_fields['success'] = workflow.success msg_fields['dateComplete'] = workflow.complete @@ -178,13 +197,16 @@ class NfcoreTemplate { msg_fields['exitStatus'] = workflow.exitStatus msg_fields['errorMessage'] = (workflow.errorMessage ?: 'None') msg_fields['errorReport'] = (workflow.errorReport ?: 'None') - msg_fields['commandLine'] = workflow.commandLine + msg_fields['commandLine'] = workflow.commandLine.replaceFirst(/ +--hook_url +[^ ]+/, "") msg_fields['projectDir'] = workflow.projectDir msg_fields['summary'] = summary << misc_fields // Render the JSON template def engine = new groovy.text.GStringTemplateEngine() - def hf = new File("$projectDir/assets/adaptivecard.json") + // Different JSON depending on the service provider + // Defaults to "Adaptive Cards" (https://adaptivecards.io), except Slack which has its own format + def json_path = hook_url.contains("hooks.slack.com") ? "slackreport.json" : "adaptivecard.json" + def hf = new File("$projectDir/assets/${json_path}") def json_template = engine.createTemplate(hf).make(msg_fields) def json_message = json_template.toString() @@ -209,7 +231,7 @@ class NfcoreTemplate { if (workflow.stats.ignoredCount == 0) { log.info "-${colors.purple}[$workflow.manifest.name]${colors.green} Pipeline completed successfully${colors.reset}-" } else { - log.info "-${colors.purple}[$workflow.manifest.name]${colors.red} Pipeline completed successfully, but with errored process(es) ${colors.reset}-" + log.info "-${colors.purple}[$workflow.manifest.name]${colors.yellow} Pipeline completed successfully, but with errored process(es) ${colors.reset}-" } } else { log.info "-${colors.purple}[$workflow.manifest.name]${colors.red} Pipeline completed with errors${colors.reset}-" @@ -297,6 +319,7 @@ class NfcoreTemplate { // public static String logo(workflow, monochrome_logs) { Map colors = logColours(monochrome_logs) + String workflow_version = NfcoreTemplate.version(workflow) String.format( """\n ${dashedLine(monochrome_logs)} @@ -305,7 +328,7 @@ class NfcoreTemplate { ${colors.blue} |\\ | |__ __ / ` / \\ |__) |__ ${colors.yellow}} {${colors.reset} ${colors.blue} | \\| | \\__, \\__/ | \\ |___ ${colors.green}\\`-._,-`-,${colors.reset} ${colors.green}`._,._,\'${colors.reset} - ${colors.purple} ${workflow.manifest.name} v${workflow.manifest.version}${colors.reset} + ${colors.purple} ${workflow.manifest.name} ${workflow_version}${colors.reset} ${dashedLine(monochrome_logs)} """.stripIndent() ) diff --git a/lib/Utils.groovy b/lib/Utils.groovy index 7378f26f..8d030f4e 100755 --- a/lib/Utils.groovy +++ b/lib/Utils.groovy @@ -38,7 +38,7 @@ class Utils { " You will need to set-up the conda-forge and bioconda channels correctly.\n" + " Please refer to https://bioconda.github.io/\n" + " The observed channel order is \n" + - " ${channels}" + + " ${channels}\n" + " but the following channel order is required:\n" + " ${required_channels_in_order}\n" + "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~" diff --git a/lib/WorkflowMain.groovy b/lib/WorkflowMain.groovy index 07ca4cf4..db89bb98 100755 --- a/lib/WorkflowMain.groovy +++ b/lib/WorkflowMain.groovy @@ -19,7 +19,7 @@ class WorkflowMain { } // - // Print help to screen if required + // Generate help string // public static String help(workflow, params, log) { def command = "nextflow run ${workflow.manifest.name} --input samplesheet.csv --genome GRCh37 -profile docker" @@ -32,7 +32,7 @@ class WorkflowMain { } // - // Print parameter summary log to screen + // Generate parameter summary log string // public static String paramsSummaryLog(workflow, params, log) { def summary_log = '' @@ -53,20 +53,26 @@ class WorkflowMain { System.exit(0) } - // Validate workflow parameters via the JSON schema - if (params.validate_params) { - NfcoreSchema.validateParameters(workflow, params, log) + // Print workflow version and exit on --version + if (params.version) { + String workflow_version = NfcoreTemplate.version(workflow) + log.info "${workflow.manifest.name} ${workflow_version}" + System.exit(0) } // Print parameter summary log to screen - log.info paramsSummaryLog(workflow, params, log) + // Validate workflow parameters via the JSON schema + if (params.validate_params) { + NfcoreSchema.validateParameters(workflow, params, log) + } + // Check that a -profile or Nextflow config has been provided to run the pipeline NfcoreTemplate.checkConfigProvided(workflow, log) // Check that conda channels are set-up correctly - if (params.enable_conda) { + if (workflow.profile.tokenize(',').intersect(['conda', 'mamba']).size() >= 1) { Utils.checkCondaChannels(log) } @@ -96,6 +102,6 @@ class WorkflowMain { // check fasta database has been provided if (!params.database) { log.error "Please provide an fasta database to the pipeline e.g. '--database *.fasta'" - } + } } } diff --git a/modules.json b/modules.json index 6949afbd..011ee5f9 100644 --- a/modules.json +++ b/modules.json @@ -7,7 +7,13 @@ "nf-core": { "custom/dumpsoftwareversions": { "branch": "master", - "git_sha": "5e34754d42cd2d5d248ca8673c0a53cdf5624905" + "git_sha": "c8e35eb2055c099720a75538d1b8adb3fb5a464c", + "installed_by": ["modules"] + }, + "multiqc": { + "branch": "master", + "git_sha": "c8e35eb2055c099720a75538d1b8adb3fb5a464c", + "installed_by": ["modules"] } } } diff --git a/modules/local/diannconvert/main.nf b/modules/local/diannconvert/main.nf index b23534b9..fc13cf80 100644 --- a/modules/local/diannconvert/main.nf +++ b/modules/local/diannconvert/main.nf @@ -2,7 +2,7 @@ process DIANNCONVERT { tag "$meta.experiment_id" label 'process_medium' - conda (params.enable_conda ? "conda-forge::pandas_schema conda-forge::lzstring bioconda::pmultiqc=0.0.19" : null) + conda "conda-forge::pandas_schema conda-forge::lzstring bioconda::pmultiqc=0.0.19" if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { container "https://depot.galaxyproject.org/singularity/pmultiqc:0.0.19--pyhdfd78af_0" } else { diff --git a/modules/local/generate_diann_cfg/main.nf b/modules/local/generate_diann_cfg/main.nf index dd586f64..3625f5d9 100644 --- a/modules/local/generate_diann_cfg/main.nf +++ b/modules/local/generate_diann_cfg/main.nf @@ -2,7 +2,7 @@ process GENERATE_DIANN_CFG { tag "$meta.experiment_id" label 'process_low' - conda (params.enable_conda ? "conda-forge::pandas_schema bioconda::sdrf-pipelines=0.0.22" : null) + conda "conda-forge::pandas_schema bioconda::sdrf-pipelines=0.0.22" if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { container "https://depot.galaxyproject.org/singularity/sdrf-pipelines:0.0.22--pyhdfd78af_0" } else { diff --git a/modules/local/individual_final_analysis/meta.yml b/modules/local/individual_final_analysis/meta.yml index 1dd4f176..906c8615 100644 --- a/modules/local/individual_final_analysis/meta.yml +++ b/modules/local/individual_final_analysis/meta.yml @@ -13,11 +13,11 @@ input: - diann_log: type: file description: DIA-NN log file - pattern: "*.log" - - library: + pattern: "assemble_empirical_library.log" + - empirical_library: type: file - description: Silico-predicted spectral library by deep leaning predictor in DIA-NN - pattern: "*.tsv" + description: An empirical spectral library from the .quant files. + pattern: "empirical_library.tsv" - mzML: type: file description: Spectra file in mzML format diff --git a/modules/local/msstats/main.nf b/modules/local/msstats/main.nf index 994eaa90..5a7db1f3 100644 --- a/modules/local/msstats/main.nf +++ b/modules/local/msstats/main.nf @@ -2,7 +2,7 @@ process MSSTATS { tag "$msstats_csv_input.Name" label 'process_medium' - conda (params.enable_conda ? "bioconda::bioconductor-msstats=4.2.0" : null) + conda "bioconda::bioconductor-msstats=4.2.0" if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { container "https://depot.galaxyproject.org/singularity/bioconductor-msstats:4.2.0--r41h619a076_1" } else { diff --git a/modules/local/msstatstmt/main.nf b/modules/local/msstatstmt/main.nf index 10cf304e..a9fd33d0 100644 --- a/modules/local/msstatstmt/main.nf +++ b/modules/local/msstatstmt/main.nf @@ -2,7 +2,7 @@ process MSSTATSTMT { tag "$msstatstmt_csv_input.Name" label 'process_medium' - conda (params.enable_conda ? "bioconda::bioconductor-msstatstmt=2.2.0" : null) + conda "bioconda::bioconductor-msstatstmt=2.2.0" if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { container "https://depot.galaxyproject.org/singularity/bioconductor-msstatstmt:2.2.0--r41hdfd78af_0" } else { diff --git a/modules/local/mzmlstatistics/main.nf b/modules/local/mzmlstatistics/main.nf index 324cae72..07a887c3 100644 --- a/modules/local/mzmlstatistics/main.nf +++ b/modules/local/mzmlstatistics/main.nf @@ -3,7 +3,7 @@ process MZMLSTATISTICS { // TODO could be easily parallelized label 'process_single_thread' - conda (params.enable_conda ? "bioconda::pyopenms=2.8.0" : null) + conda "bioconda::pyopenms=2.8.0" if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { container "https://depot.galaxyproject.org/singularity/pyopenms:2.8.0--py38hd8d5640_1" } else { @@ -11,7 +11,7 @@ process MZMLSTATISTICS { } input: - path("out/*") + path mzml_path output: path "*_mzml_info.tsv", emit: mzml_statistics @@ -22,7 +22,7 @@ process MZMLSTATISTICS { def args = task.ext.args ?: '' """ - mzml_statistics.py "./out/" \\ + mzml_statistics.py "${mzml_path}" \\ |& tee mzml_statistics.log cat <<-END_VERSIONS > versions.yml diff --git a/modules/local/mzmlstatistics/meta.yml b/modules/local/mzmlstatistics/meta.yml index 807be93d..d1fab0da 100644 --- a/modules/local/mzmlstatistics/meta.yml +++ b/modules/local/mzmlstatistics/meta.yml @@ -10,9 +10,10 @@ tools: homepage: https://github.com/bigbio/quantms documentation: https://github.com/bigbio/quantms/tree/readthedocs input: - - mzmls: - type: dir - description: mzML files directory + - mzml: + type: file + description: Spectra file in mzML format + pattern: "*.mzML" output: - mzml_statistics: type: file diff --git a/modules/local/openms/consensusid/main.nf b/modules/local/openms/consensusid/main.nf index 4848f912..79890551 100644 --- a/modules/local/openms/consensusid/main.nf +++ b/modules/local/openms/consensusid/main.nf @@ -5,10 +5,10 @@ process CONSENSUSID { label 'process_single_thread' label 'openms' - conda (params.enable_conda ? "bioconda::openms=2.8.0" : null) + conda "bioconda::openms=2.9.0" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://ftp.pride.ebi.ac.uk/pub/databases/pride/resources/tools/ghcr.io-openms-openms-executables-latest.img' : - 'ghcr.io/openms/openms-executables:latest' }" + 'https://depot.galaxyproject.org/singularity/openms:2.9.0--h135471a_0' : + 'quay.io/biocontainers/openms:2.9.0--h135471a_0' }" input: tuple val(meta), path(id_file), val(qval_score) diff --git a/modules/local/openms/decoydatabase/main.nf b/modules/local/openms/decoydatabase/main.nf index caffe665..79379403 100644 --- a/modules/local/openms/decoydatabase/main.nf +++ b/modules/local/openms/decoydatabase/main.nf @@ -2,10 +2,10 @@ process DECOYDATABASE { label 'process_very_low' label 'openms' - conda (params.enable_conda ? "bioconda::openms=2.8.0" : null) + conda "bioconda::openms=2.9.0" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://ftp.pride.ebi.ac.uk/pub/databases/pride/resources/tools/ghcr.io-openms-openms-executables-latest.img' : - 'ghcr.io/openms/openms-executables:latest' }" + 'https://depot.galaxyproject.org/singularity/openms:2.9.0--h135471a_0' : + 'quay.io/biocontainers/openms:2.9.0--h135471a_0' }" input: path(db_for_decoy) diff --git a/modules/local/openms/epifany/main.nf b/modules/local/openms/epifany/main.nf index 78e983c0..da70b78b 100644 --- a/modules/local/openms/epifany/main.nf +++ b/modules/local/openms/epifany/main.nf @@ -4,10 +4,10 @@ process EPIFANY { publishDir "${params.outdir}" - conda (params.enable_conda ? "bioconda::openms=2.8.0" : null) + conda "bioconda::openms=2.9.0" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://ftp.pride.ebi.ac.uk/pub/databases/pride/resources/tools/ghcr.io-openms-openms-executables-latest.img' : - 'ghcr.io/openms/openms-executables:latest' }" + 'https://depot.galaxyproject.org/singularity/openms:2.9.0--h135471a_0' : + 'quay.io/biocontainers/openms:2.9.0--h135471a_0' }" input: tuple val(meta), path(consus_file) diff --git a/modules/local/openms/extractpsmfeatures/main.nf b/modules/local/openms/extractpsmfeatures/main.nf index 27e1d84e..2bc825af 100644 --- a/modules/local/openms/extractpsmfeatures/main.nf +++ b/modules/local/openms/extractpsmfeatures/main.nf @@ -4,10 +4,10 @@ process EXTRACTPSMFEATURES { label 'process_single_thread' label 'openms' - conda (params.enable_conda ? "bioconda::openms=2.8.0" : null) + conda "bioconda::openms=2.9.0" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://ftp.pride.ebi.ac.uk/pub/databases/pride/resources/tools/ghcr.io-openms-openms-executables-latest.img' : - 'ghcr.io/openms/openms-executables:latest' }" + 'https://depot.galaxyproject.org/singularity/openms:2.9.0--h135471a_0' : + 'quay.io/biocontainers/openms:2.9.0--h135471a_0' }" input: tuple val(meta), path(id_file) diff --git a/modules/local/openms/falsediscoveryrate/main.nf b/modules/local/openms/falsediscoveryrate/main.nf index 167611cf..3272614f 100644 --- a/modules/local/openms/falsediscoveryrate/main.nf +++ b/modules/local/openms/falsediscoveryrate/main.nf @@ -4,10 +4,10 @@ process FALSEDISCOVERYRATE { label 'process_single_thread' label 'openms' - conda (params.enable_conda ? "bioconda::openms=2.8.0" : null) + conda "bioconda::openms=2.9.0" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://ftp.pride.ebi.ac.uk/pub/databases/pride/resources/tools/ghcr.io-openms-openms-executables-latest.img' : - 'ghcr.io/openms/openms-executables:latest' }" + 'https://depot.galaxyproject.org/singularity/openms:2.9.0--h135471a_0' : + 'quay.io/biocontainers/openms:2.9.0--h135471a_0' }" input: tuple val(meta), path(id_file) diff --git a/modules/local/openms/filemerge/main.nf b/modules/local/openms/filemerge/main.nf index 03cff0ea..9bd76150 100644 --- a/modules/local/openms/filemerge/main.nf +++ b/modules/local/openms/filemerge/main.nf @@ -3,10 +3,10 @@ process FILEMERGE { label 'process_single_thread' label 'openms' - conda (params.enable_conda ? "bioconda::openms=2.8.0" : null) + conda "bioconda::openms=2.9.0" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://ftp.pride.ebi.ac.uk/pub/databases/pride/resources/tools/ghcr.io-openms-openms-executables-latest.img' : - 'ghcr.io/openms/openms-executables:latest' }" + 'https://depot.galaxyproject.org/singularity/openms:2.9.0--h135471a_0' : + 'quay.io/biocontainers/openms:2.9.0--h135471a_0' }" input: file(id_map) diff --git a/modules/local/openms/idconflictresolver/main.nf b/modules/local/openms/idconflictresolver/main.nf index 2dc03116..07d60fe6 100644 --- a/modules/local/openms/idconflictresolver/main.nf +++ b/modules/local/openms/idconflictresolver/main.nf @@ -2,10 +2,10 @@ process IDCONFLICTRESOLVER { label 'process_low' label 'openms' - conda (params.enable_conda ? "bioconda::openms=2.8.0" : null) + conda "bioconda::openms=2.9.0" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://ftp.pride.ebi.ac.uk/pub/databases/pride/resources/tools/ghcr.io-openms-openms-executables-latest.img' : - 'ghcr.io/openms/openms-executables:latest' }" + 'https://depot.galaxyproject.org/singularity/openms:2.9.0--h135471a_0' : + 'quay.io/biocontainers/openms:2.9.0--h135471a_0' }" input: path consus_file diff --git a/modules/local/openms/idfilter/main.nf b/modules/local/openms/idfilter/main.nf index ae1ba9de..9df9ecbc 100644 --- a/modules/local/openms/idfilter/main.nf +++ b/modules/local/openms/idfilter/main.nf @@ -4,10 +4,10 @@ process IDFILTER { label 'process_single_thread' label 'openms' - conda (params.enable_conda ? "bioconda::openms=2.8.0" : null) + conda "bioconda::openms=2.9.0" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://ftp.pride.ebi.ac.uk/pub/databases/pride/resources/tools/ghcr.io-openms-openms-executables-latest.img' : - 'ghcr.io/openms/openms-executables:latest' }" + 'https://depot.galaxyproject.org/singularity/openms:2.9.0--h135471a_0' : + 'quay.io/biocontainers/openms:2.9.0--h135471a_0' }" input: tuple val(meta), path(id_file) diff --git a/modules/local/openms/idmapper/main.nf b/modules/local/openms/idmapper/main.nf index 4dd2c53b..23d560c5 100644 --- a/modules/local/openms/idmapper/main.nf +++ b/modules/local/openms/idmapper/main.nf @@ -4,10 +4,10 @@ process IDMAPPER { label 'process_medium' label 'openms' - conda (params.enable_conda ? "bioconda::openms=2.8.0" : null) + conda "bioconda::openms=2.9.0" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://ftp.pride.ebi.ac.uk/pub/databases/pride/resources/tools/ghcr.io-openms-openms-executables-latest.img' : - 'ghcr.io/openms/openms-executables:latest' }" + 'https://depot.galaxyproject.org/singularity/openms:2.9.0--h135471a_0' : + 'quay.io/biocontainers/openms:2.9.0--h135471a_0' }" input: tuple val(meta), path(id_file), path(map_file) diff --git a/modules/local/openms/idpep/main.nf b/modules/local/openms/idpep/main.nf index ba6e18c0..356708fb 100644 --- a/modules/local/openms/idpep/main.nf +++ b/modules/local/openms/idpep/main.nf @@ -2,10 +2,10 @@ process IDPEP { tag "$meta.mzml_id" label 'process_very_low' - conda (params.enable_conda ? "bioconda::openms=2.8.0" : null) + conda "bioconda::openms=2.9.0" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://ftp.pride.ebi.ac.uk/pub/databases/pride/resources/tools/ghcr.io-openms-openms-executables-latest.img' : - 'ghcr.io/openms/openms-executables:latest' }" + 'https://depot.galaxyproject.org/singularity/openms:2.9.0--h135471a_0' : + 'quay.io/biocontainers/openms:2.9.0--h135471a_0' }" input: tuple val(meta), path(id_file) diff --git a/modules/local/openms/idscoreswitcher/main.nf b/modules/local/openms/idscoreswitcher/main.nf index 5f38a1d5..1849a772 100644 --- a/modules/local/openms/idscoreswitcher/main.nf +++ b/modules/local/openms/idscoreswitcher/main.nf @@ -3,10 +3,10 @@ process IDSCORESWITCHER { label 'process_very_low' label 'process_single_thread' - conda (params.enable_conda ? "bioconda::openms=2.8.0" : null) + conda "bioconda::openms=2.9.0" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://ftp.pride.ebi.ac.uk/pub/databases/pride/resources/tools/ghcr.io-openms-openms-executables-latest.img' : - 'ghcr.io/openms/openms-executables:latest' }" + 'https://depot.galaxyproject.org/singularity/openms:2.9.0--h135471a_0' : + 'quay.io/biocontainers/openms:2.9.0--h135471a_0' }" input: tuple val(meta), path(id_file), val(new_score) diff --git a/modules/local/openms/indexpeptides/main.nf b/modules/local/openms/indexpeptides/main.nf index bcf64de2..fee38870 100644 --- a/modules/local/openms/indexpeptides/main.nf +++ b/modules/local/openms/indexpeptides/main.nf @@ -2,10 +2,10 @@ process INDEXPEPTIDES { tag "$meta.mzml_id" label 'process_low' - conda (params.enable_conda ? "bioconda::openms=2.8.0" : null) + conda "bioconda::openms=2.9.0" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://ftp.pride.ebi.ac.uk/pub/databases/pride/resources/tools/ghcr.io-openms-openms-executables-latest.img' : - 'ghcr.io/openms/openms-executables:latest' }" + 'https://depot.galaxyproject.org/singularity/openms:2.9.0--h135471a_0' : + 'quay.io/biocontainers/openms:2.9.0--h135471a_0' }" input: tuple val(meta), path(id_file), path(database) diff --git a/modules/local/openms/isobaricanalyzer/main.nf b/modules/local/openms/isobaricanalyzer/main.nf index 6045ef73..ac766ceb 100644 --- a/modules/local/openms/isobaricanalyzer/main.nf +++ b/modules/local/openms/isobaricanalyzer/main.nf @@ -2,10 +2,10 @@ process ISOBARICANALYZER { tag "$meta.mzml_id" label 'process_medium' - conda (params.enable_conda ? "bioconda::openms=2.8.0" : null) + conda "bioconda::openms=2.9.0" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://ftp.pride.ebi.ac.uk/pub/databases/pride/resources/tools/ghcr.io-openms-openms-executables-latest.img' : - 'ghcr.io/openms/openms-executables:latest' }" + 'https://depot.galaxyproject.org/singularity/openms:2.9.0--h135471a_0' : + 'quay.io/biocontainers/openms:2.9.0--h135471a_0' }" input: tuple val(meta), path(mzml_file) diff --git a/modules/local/openms/msstatsconverter/main.nf b/modules/local/openms/msstatsconverter/main.nf index a27ffdaf..331503f0 100644 --- a/modules/local/openms/msstatsconverter/main.nf +++ b/modules/local/openms/msstatsconverter/main.nf @@ -2,10 +2,10 @@ process MSSTATSCONVERTER { tag "$exp_file.Name" label 'process_low' - conda (params.enable_conda ? "bioconda::openms=2.8.0" : null) + conda "bioconda::openms=2.9.0" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://ftp.pride.ebi.ac.uk/pub/databases/pride/resources/tools/ghcr.io-openms-openms-executables-latest.img' : - 'ghcr.io/openms/openms-executables:latest' }" + 'https://depot.galaxyproject.org/singularity/openms:2.9.0--h135471a_0' : + 'quay.io/biocontainers/openms:2.9.0--h135471a_0' }" input: path consensusXML diff --git a/modules/local/openms/mzmlindexing/main.nf b/modules/local/openms/mzmlindexing/main.nf index 9eb49d3b..efec9e7c 100644 --- a/modules/local/openms/mzmlindexing/main.nf +++ b/modules/local/openms/mzmlindexing/main.nf @@ -2,10 +2,10 @@ process MZMLINDEXING { tag "$meta.mzml_id" label 'process_low' - conda (params.enable_conda ? "bioconda::openms=2.8.0" : null) + conda "bioconda::openms=2.9.0" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://ftp.pride.ebi.ac.uk/pub/databases/pride/resources/tools/ghcr.io-openms-openms-executables-latest.img' : - 'ghcr.io/openms/openms-executables:latest' }" + 'https://depot.galaxyproject.org/singularity/openms:2.9.0--h135471a_0' : + 'quay.io/biocontainers/openms:2.9.0--h135471a_0' }" input: tuple val(meta), path(mzmlfile) @@ -20,7 +20,7 @@ process MZMLINDEXING { def prefix = task.ext.prefix ?: "${meta.mzml_id}" """ - mkdir out + mkdir -p out FileConverter -in ${mzmlfile} -out out/${mzmlfile.baseName}.mzML |& tee ${mzmlfile.baseName}_mzmlindexing.log cat <<-END_VERSIONS > versions.yml diff --git a/modules/local/openms/openmspeakpicker/main.nf b/modules/local/openms/openmspeakpicker/main.nf index 56eeb181..d449039d 100644 --- a/modules/local/openms/openmspeakpicker/main.nf +++ b/modules/local/openms/openmspeakpicker/main.nf @@ -2,10 +2,10 @@ process OPENMSPEAKPICKER { tag "$meta.mzml_id" label 'process_low' - conda (params.enable_conda ? "bioconda::openms=2.8.0" : null) + conda "bioconda::openms=2.9.0" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://ftp.pride.ebi.ac.uk/pub/databases/pride/resources/tools/ghcr.io-openms-openms-executables-latest.img' : - 'ghcr.io/openms/openms-executables:latest' }" + 'https://depot.galaxyproject.org/singularity/openms:2.9.0--h135471a_0' : + 'quay.io/biocontainers/openms:2.9.0--h135471a_0' }" input: tuple val(meta), path(mzml_file) diff --git a/modules/local/openms/proteininference/main.nf b/modules/local/openms/proteininference/main.nf index ccabdd2f..9ec251f1 100644 --- a/modules/local/openms/proteininference/main.nf +++ b/modules/local/openms/proteininference/main.nf @@ -1,10 +1,10 @@ process PROTEININFERENCE { label 'process_medium' - conda (params.enable_conda ? "bioconda::openms=2.8.0" : null) + conda "bioconda::openms=2.9.0" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://ftp.pride.ebi.ac.uk/pub/databases/pride/resources/tools/ghcr.io-openms-openms-executables-latest.img' : - 'ghcr.io/openms/openms-executables:latest' }" + 'https://depot.galaxyproject.org/singularity/openms:2.9.0--h135471a_0' : + 'quay.io/biocontainers/openms:2.9.0--h135471a_0' }" input: tuple val(meta), path(consus_file) diff --git a/modules/local/openms/proteinquantifier/main.nf b/modules/local/openms/proteinquantifier/main.nf index 774178ab..8a22b6ad 100644 --- a/modules/local/openms/proteinquantifier/main.nf +++ b/modules/local/openms/proteinquantifier/main.nf @@ -2,10 +2,10 @@ process PROTEINQUANTIFIER { tag "${pro_quant_exp.baseName}" label 'process_medium' - conda (params.enable_conda ? "openms::openms=3.0.0dev" : null) + conda "openms::openms=2.9.0" container "${ workflow.containerEngine == 'docker' && !task.ext.singularity_pull_docker_container ? - 'ghcr.io/openms/openms-executables:latest' : - 'https://ftp.pride.ebi.ac.uk/pride/resources/tools/ghcr.io-openms-openms-executables-latest.img' + 'quay.io/biocontainers/openms:2.9.0--h135471a_0' : + 'https://depot.galaxyproject.org/singularity/openms:2.9.0--h135471a_0' }" input: diff --git a/modules/local/openms/proteomicslfq/main.nf b/modules/local/openms/proteomicslfq/main.nf index 7619093a..7511d911 100644 --- a/modules/local/openms/proteomicslfq/main.nf +++ b/modules/local/openms/proteomicslfq/main.nf @@ -2,10 +2,10 @@ process PROTEOMICSLFQ { tag "${expdes.baseName}" label 'process_high' - conda (params.enable_conda ? "openms::openms=3.0.0dev" : null) + conda "openms::openms=2.9.0" container "${ workflow.containerEngine == 'docker' && !task.ext.singularity_pull_docker_container ? - 'ghcr.io/openms/openms-executables:latest' : - 'https://ftp.pride.ebi.ac.uk/pride/resources/tools/ghcr.io-openms-openms-executables-latest.img' + 'quay.io/biocontainers/openms:2.9.0--h135471a_0' : + 'https://depot.galaxyproject.org/singularity/openms:2.9.0--h135471a_0' }" input: diff --git a/modules/local/openms/thirdparty/luciphoradapter/main.nf b/modules/local/openms/thirdparty/luciphoradapter/main.nf index 76e4da45..af79ea45 100644 --- a/modules/local/openms/thirdparty/luciphoradapter/main.nf +++ b/modules/local/openms/thirdparty/luciphoradapter/main.nf @@ -2,9 +2,9 @@ process LUCIPHORADAPTER { tag "$meta.mzml_id" label 'process_medium' - conda (params.enable_conda ? "bioconda::openms-thirdparty=2.8.0" : null) + conda "bioconda::openms-thirdparty=2.9.0" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/openms-thirdparty:2.8.0--h9ee0642_0' : + 'https://depot.galaxyproject.org/singularity/openms-thirdparty:2.9.0--h9ee0642_0' : 'quay.io/biocontainers/openms-thirdparty:2.8.0--h9ee0642_0' }" input: @@ -21,7 +21,7 @@ process LUCIPHORADAPTER { luciphor_jar = '' if (workflow.containerEngine || (task.executor == "awsbatch")) { luciphor_jar = "-executable \$(find /usr/local/share/luciphor2-*/luciphor2.jar -maxdepth 0)" - } else if (params.enable_conda) { + } else if (session.config.conda && session.config.conda.enabled) { luciphor_jar = "-executable \$(find \$CONDA_PREFIX/share/luciphor2-*/luciphor2.jar -maxdepth 0)" } diff --git a/modules/local/openms/thirdparty/percolator/main.nf b/modules/local/openms/thirdparty/percolator/main.nf index 998119a6..cd124452 100644 --- a/modules/local/openms/thirdparty/percolator/main.nf +++ b/modules/local/openms/thirdparty/percolator/main.nf @@ -2,9 +2,9 @@ process PERCOLATOR { tag "$meta.mzml_id" label 'process_medium' - conda (params.enable_conda ? "bioconda::openms-thirdparty=2.8.0" : null) + conda "bioconda::openms-thirdparty=2.9.0" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/openms-thirdparty:2.8.0--h9ee0642_0' : + 'https://depot.galaxyproject.org/singularity/openms-thirdparty:2.9.0--h9ee0642_0' : 'quay.io/biocontainers/openms-thirdparty:2.8.0--h9ee0642_0' }" input: diff --git a/modules/local/openms/thirdparty/searchenginecomet/main.nf b/modules/local/openms/thirdparty/searchenginecomet/main.nf index f7c47e7e..dc25c561 100644 --- a/modules/local/openms/thirdparty/searchenginecomet/main.nf +++ b/modules/local/openms/thirdparty/searchenginecomet/main.nf @@ -2,9 +2,9 @@ process SEARCHENGINECOMET { tag "$meta.mzml_id" label 'process_medium' - conda (params.enable_conda ? "bioconda::openms-thirdparty=2.8.0" : null) + conda "bioconda::openms-thirdparty=2.9.0" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/openms-thirdparty:2.8.0--h9ee0642_0' : + 'https://depot.galaxyproject.org/singularity/openms-thirdparty:2.9.0--h9ee0642_0' : 'quay.io/biocontainers/openms-thirdparty:2.8.0--h9ee0642_0' }" input: diff --git a/modules/local/openms/thirdparty/searchenginemsgf/main.nf b/modules/local/openms/thirdparty/searchenginemsgf/main.nf index b67f478b..49a35429 100644 --- a/modules/local/openms/thirdparty/searchenginemsgf/main.nf +++ b/modules/local/openms/thirdparty/searchenginemsgf/main.nf @@ -2,9 +2,9 @@ process SEARCHENGINEMSGF { tag "$meta.mzml_id" label 'process_medium' - conda (params.enable_conda ? "bioconda::openms-thirdparty=2.8.0" : null) + conda "bioconda::openms-thirdparty=2.9.0" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/openms-thirdparty:2.8.0--h9ee0642_0' : + 'https://depot.galaxyproject.org/singularity/openms-thirdparty:2.9.0--h9ee0642_0' : 'quay.io/biocontainers/openms-thirdparty:2.8.0--h9ee0642_0' }" input: @@ -20,7 +20,7 @@ process SEARCHENGINEMSGF { msgf_jar = '' if (workflow.containerEngine || (task.executor == "awsbatch")) { msgf_jar = "-executable \$(find /usr/local/share/msgf_plus-*/MSGFPlus.jar -maxdepth 0)" - } else if (params.enable_conda) { + } else if (session.config.conda && session.config.conda.enabled) { msgf_jar = "-executable \$(find \$CONDA_PREFIX/share/msgf_plus-*/MSGFPlus.jar -maxdepth 0)" } diff --git a/modules/local/pmultiqc/main.nf b/modules/local/pmultiqc/main.nf index 7db9298c..0cbf679f 100644 --- a/modules/local/pmultiqc/main.nf +++ b/modules/local/pmultiqc/main.nf @@ -1,7 +1,7 @@ process PMULTIQC { label 'process_high' - conda (params.enable_conda ? "conda-forge::pandas_schema conda-forge::lzstring bioconda::pmultiqc=0.0.19" : null) + conda "conda-forge::pandas_schema conda-forge::lzstring bioconda::pmultiqc=0.0.19" if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { container "https://depot.galaxyproject.org/singularity/pmultiqc:0.0.19--pyhdfd78af_0" } else { diff --git a/modules/local/preprocess_expdesign.nf b/modules/local/preprocess_expdesign.nf index 2b35d94d..e1c9652a 100644 --- a/modules/local/preprocess_expdesign.nf +++ b/modules/local/preprocess_expdesign.nf @@ -4,8 +4,7 @@ // accept different file endings already? process PREPROCESS_EXPDESIGN { - conda (params.enable_conda ? "bioconda::sdrf-pipelines=0.0.22 conda-forge::pandas" : null) - + conda "bioconda::sdrf-pipelines=0.0.22 conda-forge::pandas" label 'process_very_low' label 'process_single_thread' tag "$design.Name" diff --git a/modules/local/samplesheet_check.nf b/modules/local/samplesheet_check.nf index b37fb965..f14188f2 100644 --- a/modules/local/samplesheet_check.nf +++ b/modules/local/samplesheet_check.nf @@ -1,7 +1,9 @@ process SAMPLESHEET_CHECK { - conda (params.enable_conda ? "bioconda::sdrf-pipelines=0.0.22" : null) + tag "$input_file" + label 'process_single' + conda "bioconda::sdrf-pipelines=0.0.22" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/sdrf-pipelines:0.0.22--pyhdfd78af_0' : 'quay.io/biocontainers/sdrf-pipelines:0.0.22--pyhdfd78af_0' }" @@ -15,6 +17,9 @@ process SAMPLESHEET_CHECK { path "${input_file}", emit: checked_file path "versions.yml", emit: versions + when: + task.ext.when == null || task.ext.when + script: // This script is bundled with the pipeline, in nf-core/quantms/bin/ // TODO validate experimental design file def args = task.ext.args ?: '' diff --git a/modules/local/sdrfparsing/main.nf b/modules/local/sdrfparsing/main.nf index 1dacccdd..d93b919b 100644 --- a/modules/local/sdrfparsing/main.nf +++ b/modules/local/sdrfparsing/main.nf @@ -2,7 +2,7 @@ process SDRFPARSING { tag "$sdrf.Name" label 'process_low' - conda (params.enable_conda ? "conda-forge::pandas_schema bioconda::sdrf-pipelines=0.0.22" : null) + conda "conda-forge::pandas_schema bioconda::sdrf-pipelines=0.0.22" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/sdrf-pipelines:0.0.22--pyhdfd78af_0' : 'quay.io/biocontainers/sdrf-pipelines:0.0.22--pyhdfd78af_0' }" diff --git a/modules/local/thermorawfileparser/main.nf b/modules/local/thermorawfileparser/main.nf index 5ddfcfdd..f31d5fdf 100644 --- a/modules/local/thermorawfileparser/main.nf +++ b/modules/local/thermorawfileparser/main.nf @@ -4,7 +4,7 @@ process THERMORAWFILEPARSER { label 'process_single_thread' label 'error_retry' - conda (params.enable_conda ? "conda-forge::mono bioconda::thermorawfileparser=1.3.4" : null) + conda "conda-forge::mono bioconda::thermorawfileparser=1.3.4" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/thermorawfileparser:1.3.4--ha8f3691_0' : 'quay.io/biocontainers/thermorawfileparser:1.3.4--ha8f3691_0' }" diff --git a/modules/nf-core/custom/dumpsoftwareversions/main.nf b/modules/nf-core/custom/dumpsoftwareversions/main.nf index cebb6e05..3df21765 100644 --- a/modules/nf-core/custom/dumpsoftwareversions/main.nf +++ b/modules/nf-core/custom/dumpsoftwareversions/main.nf @@ -2,7 +2,7 @@ process CUSTOM_DUMPSOFTWAREVERSIONS { label 'process_single' // Requires `pyyaml` which does not have a dedicated container but is in the MultiQC container - conda (params.enable_conda ? 'bioconda::multiqc=1.13' : null) + conda "bioconda::multiqc=1.13" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/multiqc:1.13--pyhdfd78af_0' : 'quay.io/biocontainers/multiqc:1.13--pyhdfd78af_0' }" diff --git a/modules/nf-core/custom/dumpsoftwareversions/templates/dumpsoftwareversions.py b/modules/nf-core/custom/dumpsoftwareversions/templates/dumpsoftwareversions.py old mode 100644 new mode 100755 index 787bdb7b..e55b8d43 --- a/modules/nf-core/custom/dumpsoftwareversions/templates/dumpsoftwareversions.py +++ b/modules/nf-core/custom/dumpsoftwareversions/templates/dumpsoftwareversions.py @@ -1,5 +1,9 @@ #!/usr/bin/env python + +"""Provide functions to merge multiple versions.yml files.""" + + import platform from textwrap import dedent @@ -7,6 +11,7 @@ def _make_versions_html(versions): + """Generate a tabular HTML output of all versions for MultiQC.""" html = [ dedent( """\\ @@ -45,47 +50,53 @@ def _make_versions_html(versions): return "\\n".join(html) -versions_this_module = {} -versions_this_module["${task.process}"] = { - "python": platform.python_version(), - "yaml": yaml.__version__, -} - -with open("$versions") as f: - versions_by_process = yaml.load(f, Loader=yaml.BaseLoader) | versions_this_module - -# aggregate versions by the module name (derived from fully-qualified process name) -versions_by_module = {} -for process, process_versions in versions_by_process.items(): - module = process.split(":")[-1] - try: - if versions_by_module[module] != process_versions: - raise AssertionError( - "We assume that software versions are the same between all modules. " - "If you see this error-message it means you discovered an edge-case " - "and should open an issue in nf-core/tools. " - ) - except KeyError: - versions_by_module[module] = process_versions - -versions_by_module["Workflow"] = { - "Nextflow": "$workflow.nextflow.version", - "$workflow.manifest.name": "$workflow.manifest.version", -} - -versions_mqc = { - "id": "software_versions", - "section_name": "${workflow.manifest.name} Software Versions", - "section_href": "https://github.com/${workflow.manifest.name}", - "plot_type": "html", - "description": "are collected at run time from the software output.", - "data": _make_versions_html(versions_by_module), -} - -with open("software_versions.yml", "w") as f: - yaml.dump(versions_by_module, f, default_flow_style=False) -with open("software_versions_mqc.yml", "w") as f: - yaml.dump(versions_mqc, f, default_flow_style=False) - -with open("versions.yml", "w") as f: - yaml.dump(versions_this_module, f, default_flow_style=False) +def main(): + """Load all version files and generate merged output.""" + versions_this_module = {} + versions_this_module["${task.process}"] = { + "python": platform.python_version(), + "yaml": yaml.__version__, + } + + with open("$versions") as f: + versions_by_process = yaml.load(f, Loader=yaml.BaseLoader) | versions_this_module + + # aggregate versions by the module name (derived from fully-qualified process name) + versions_by_module = {} + for process, process_versions in versions_by_process.items(): + module = process.split(":")[-1] + try: + if versions_by_module[module] != process_versions: + raise AssertionError( + "We assume that software versions are the same between all modules. " + "If you see this error-message it means you discovered an edge-case " + "and should open an issue in nf-core/tools. " + ) + except KeyError: + versions_by_module[module] = process_versions + + versions_by_module["Workflow"] = { + "Nextflow": "$workflow.nextflow.version", + "$workflow.manifest.name": "$workflow.manifest.version", + } + + versions_mqc = { + "id": "software_versions", + "section_name": "${workflow.manifest.name} Software Versions", + "section_href": "https://github.com/${workflow.manifest.name}", + "plot_type": "html", + "description": "are collected at run time from the software output.", + "data": _make_versions_html(versions_by_module), + } + + with open("software_versions.yml", "w") as f: + yaml.dump(versions_by_module, f, default_flow_style=False) + with open("software_versions_mqc.yml", "w") as f: + yaml.dump(versions_mqc, f, default_flow_style=False) + + with open("versions.yml", "w") as f: + yaml.dump(versions_this_module, f, default_flow_style=False) + + +if __name__ == "__main__": + main() diff --git a/modules/nf-core/multiqc/main.nf b/modules/nf-core/multiqc/main.nf index a8159a57..68f66bea 100644 --- a/modules/nf-core/multiqc/main.nf +++ b/modules/nf-core/multiqc/main.nf @@ -1,7 +1,7 @@ process MULTIQC { label 'process_single' - conda (params.enable_conda ? 'bioconda::multiqc=1.13' : null) + conda "bioconda::multiqc=1.13" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/multiqc:1.13--pyhdfd78af_0' : 'quay.io/biocontainers/multiqc:1.13--pyhdfd78af_0' }" diff --git a/nextflow.config b/nextflow.config index 5b429fd3..ae64982c 100644 --- a/nextflow.config +++ b/nextflow.config @@ -202,10 +202,10 @@ params { monochrome_logs = false hook_url = null help = false + version = false validate_params = true show_hidden_params = false schema_ignore_params = 'modules' - enable_conda = false singularity_pull_docker_container = false @@ -239,8 +239,8 @@ try { profiles { debug { process.beforeScript = 'echo $HOSTNAME' } - ebicluster{ - params.enable_conda = true + ebicluster { + conda.enabled = true docker.enabled = false singularity.enabled = false conda.createTimeout = '1 h' @@ -248,7 +248,7 @@ profiles { process.executor = 'lsf' } conda { - params.enable_conda = true + conda.enabled = true docker.enabled = false singularity.enabled = false podman.enabled = false @@ -257,7 +257,7 @@ profiles { conda.useMamba = true } mamba { - params.enable_conda = true + conda.enabled = true conda.useMamba = true docker.enabled = false singularity.enabled = false @@ -273,6 +273,9 @@ profiles { shifter.enabled = false charliecloud.enabled = false } + arm { + docker.runOptions = '-u $(id -u):$(id -g) --platform=linux/amd64' + } singularity { singularity.enabled = true singularity.autoMounts = true @@ -357,11 +360,11 @@ dag { manifest { name = 'nf-core/quantms' - author = 'Yasset Perez-Riverol' + author = """Yasset Perez-Riverol""" homePage = 'https://github.com/nf-core/quantms' - description = 'Quantitative Mass Spectrometry nf-core workflow' + description = """Quantitative Mass Spectrometry nf-core workflow""" mainScript = 'main.nf' - nextflowVersion = '!>=21.10.3' + nextflowVersion = '!>=22.10.1' version = '1.1dev' doi = '' } diff --git a/nextflow_schema.json b/nextflow_schema.json index 38fb6569..8470e3d0 100644 --- a/nextflow_schema.json +++ b/nextflow_schema.json @@ -49,6 +49,13 @@ "fa_icon": "fas fa-file-invoice", "default": "mzML", "help_text": "If the above [`--root_folder`](#root_folder) was given to load local input files, this overwrites the file type/extension of\nthe filename as specified in the SDRF/design. Usually used in case you have an mzML-converted version of the files already. Needs to be\none of 'mzML' or 'raw' (the letter cases should match your files exactly)." + }, + "acquisition_method": { + "type": "string", + "description": "Proteomics data acquisition method", + "default": "dda", + "enum": ["dda", "dia"], + "fa_icon": "far fa-list-ol" } } }, @@ -1127,12 +1134,11 @@ "fa_icon": "fas fa-question-circle", "hidden": true }, - "acquisition_method": { - "type": "string", - "description": "Proteomics data acquisition method", - "default": "dda", - "enum": ["dda", "dia"], - "fa_icon": "far fa-list-ol" + "version": { + "type": "boolean", + "description": "Display version and exit.", + "fa_icon": "fas fa-question-circle", + "hidden": true }, "publish_dir_mode": { "type": "string", @@ -1175,7 +1181,7 @@ "type": "string", "description": "Incoming hook URL for messaging service", "fa_icon": "fas fa-people-group", - "help_text": "Incoming hook URL for messaging service. Currently, only MS Teams is supported.", + "help_text": "Incoming hook URL for messaging service. Currently, MS Teams and Slack are supported.", "hidden": true }, "multiqc_config": { @@ -1222,12 +1228,6 @@ "hidden": true, "help_text": "By default, parameters set as _hidden_ in the schema are not shown on the command line when a user runs with `--help`. Specifying this option will tell the pipeline to show all parameters." }, - "enable_conda": { - "type": "boolean", - "description": "Run this workflow with Conda. You can also use '-profile conda' instead of providing this parameter.", - "hidden": true, - "fa_icon": "fas fa-bacon" - }, "singularity_pull_docker_container": { "type": "boolean", "description": "This parameter force singularity to pull the contain from docker instead of using the singularity image", diff --git a/subworkflows/local/create_input_channel.nf b/subworkflows/local/create_input_channel.nf index 9140db8d..dd4ceb71 100644 --- a/subworkflows/local/create_input_channel.nf +++ b/subworkflows/local/create_input_channel.nf @@ -163,7 +163,7 @@ def create_meta_channel(LinkedHashMap row, is_sdrf, enzymes, files, wrapper) { exit 1 } } - }else if(params.enable_conda){ + }else if(session.config.conda && session.config.conda.enabled){ log.error "File in DIA mode found in input design and conda profile was chosen. DIA-NN currently doesn't support conda! Exiting. Please use the docker/singularity profile with a container." exit 1 } diff --git a/subworkflows/local/file_preparation.nf b/subworkflows/local/file_preparation.nf index 64007c4f..b9e0df21 100644 --- a/subworkflows/local/file_preparation.nf +++ b/subworkflows/local/file_preparation.nf @@ -58,7 +58,7 @@ workflow FILE_PREPARATION { mzml: it[1] }.set{ ch_mzml } - MZMLSTATISTICS( ch_mzml.mzml.collect() ) + MZMLSTATISTICS( ch_mzml.mzml ) ch_statistics = ch_statistics.mix(MZMLSTATISTICS.out.mzml_statistics.collect()) ch_versions = ch_versions.mix(MZMLSTATISTICS.out.version) diff --git a/workflows/quantms.nf b/workflows/quantms.nf index 52685f8e..b9c51946 100644 --- a/workflows/quantms.nf +++ b/workflows/quantms.nf @@ -193,7 +193,6 @@ workflow QUANTMS { ch_multiqc_quantms_logo ) multiqc_report = SUMMARYPIPELINE.out.ch_pmultiqc_report.toList() - ch_versions = ch_versions.mix(SUMMARYPIPELINE.out.versions) } @@ -209,7 +208,7 @@ workflow.onComplete { } NfcoreTemplate.summary(workflow, params, log) if (params.hook_url) { - NfcoreTemplate.adaptivecard(workflow, params, summary_params, projectDir, log) + NfcoreTemplate.IM_notification(workflow, params, summary_params, projectDir, log) } }