From 84d48486a3cb35a4bd578b9b0897ae457e6d32b1 Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Thu, 8 Dec 2022 13:13:00 +0000 Subject: [PATCH 01/24] Template update for nf-core/tools version 2.7.1 --- .devcontainer/devcontainer.json | 27 +++++++++++++ .gitattributes | 1 + .github/CONTRIBUTING.md | 16 ++++++++ .github/ISSUE_TEMPLATE/bug_report.yml | 2 +- .github/workflows/ci.yml | 8 +++- .github/workflows/fix-linting.yml | 6 +-- .github/workflows/linting.yml | 18 +++++---- .github/workflows/linting_comment.yml | 2 +- .prettierignore | 2 + CITATION.cff | 56 --------------------------- README.md | 4 +- assets/slackreport.json | 34 ++++++++++++++++ docs/usage.md | 24 +++++++----- lib/NfcoreSchema.groovy | 1 - lib/NfcoreTemplate.groovy | 41 +++++++++++++++----- lib/WorkflowMain.groovy | 18 ++++++--- modules.json | 9 +++-- modules/local/samplesheet_check.nf | 4 ++ nextflow.config | 12 ++++-- nextflow_schema.json | 8 +++- workflows/quantms.nf | 11 +++--- 21 files changed, 193 insertions(+), 111 deletions(-) create mode 100644 .devcontainer/devcontainer.json delete mode 100644 CITATION.cff create mode 100644 assets/slackreport.json diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json new file mode 100644 index 00000000..ea27a584 --- /dev/null +++ b/.devcontainer/devcontainer.json @@ -0,0 +1,27 @@ +{ + "name": "nfcore", + "image": "nfcore/gitpod:latest", + "remoteUser": "gitpod", + + // Configure tool-specific properties. + "customizations": { + // Configure properties specific to VS Code. + "vscode": { + // Set *default* container specific settings.json values on container create. + "settings": { + "python.defaultInterpreterPath": "/opt/conda/bin/python", + "python.linting.enabled": true, + "python.linting.pylintEnabled": true, + "python.formatting.autopep8Path": "/opt/conda/bin/autopep8", + "python.formatting.yapfPath": "/opt/conda/bin/yapf", + "python.linting.flake8Path": "/opt/conda/bin/flake8", + "python.linting.pycodestylePath": "/opt/conda/bin/pycodestyle", + "python.linting.pydocstylePath": "/opt/conda/bin/pydocstyle", + "python.linting.pylintPath": "/opt/conda/bin/pylint" + }, + + // Add the IDs of extensions you want installed when the container is created. + "extensions": ["ms-python.python", "ms-python.vscode-pylance", "nf-core.nf-core-extensionpack"] + } + } +} diff --git a/.gitattributes b/.gitattributes index 050bb120..7a2dabc2 100644 --- a/.gitattributes +++ b/.gitattributes @@ -1,3 +1,4 @@ *.config linguist-language=nextflow +*.nf.test linguist-language=nextflow modules/nf-core/** linguist-generated subworkflows/nf-core/** linguist-generated diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md index de10fd6d..14713dbf 100644 --- a/.github/CONTRIBUTING.md +++ b/.github/CONTRIBUTING.md @@ -101,3 +101,19 @@ If you are using a new feature from core Nextflow, you may bump the minimum requ ### Images and figures For overview images and other documents we follow the nf-core [style guidelines and examples](https://nf-co.re/developers/design_guidelines). + +## GitHub Codespaces + +This repo includes a devcontainer configuration which will create a GitHub Codespaces for Nextflow development! This is an online developer environment that runs in your browser, complete with VSCode and a terminal. + +To get started: + +- Open the repo in [Codespaces](https://github.com/nf-core/quantms/codespaces) +- Tools installed + - nf-core + - Nextflow + +Devcontainer specs: + +- [DevContainer config](.devcontainer/devcontainer.json) +- [Dockerfile](.devcontainer/Dockerfile) diff --git a/.github/ISSUE_TEMPLATE/bug_report.yml b/.github/ISSUE_TEMPLATE/bug_report.yml index 34a329f8..4ef83298 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.yml +++ b/.github/ISSUE_TEMPLATE/bug_report.yml @@ -42,7 +42,7 @@ body: attributes: label: System information description: | - * Nextflow version _(eg. 21.10.3)_ + * Nextflow version _(eg. 22.10.1)_ * Hardware _(eg. HPC, Desktop, Cloud)_ * Executor _(eg. slurm, local, awsbatch)_ * Container engine: _(e.g. Docker, Singularity, Conda, Podman, Shifter or Charliecloud)_ diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index a8f883a3..e7ebd9c4 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -11,6 +11,10 @@ on: env: NXF_ANSI_LOG: false +concurrency: + group: "${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}" + cancel-in-progress: true + jobs: test: name: Run pipeline with test data @@ -20,11 +24,11 @@ jobs: strategy: matrix: NXF_VER: - - "21.10.3" + - "22.10.1" - "latest-everything" steps: - name: Check out pipeline code - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Install Nextflow uses: nf-core/setup-nextflow@v1 diff --git a/.github/workflows/fix-linting.yml b/.github/workflows/fix-linting.yml index 8bf2def0..4f709862 100644 --- a/.github/workflows/fix-linting.yml +++ b/.github/workflows/fix-linting.yml @@ -24,7 +24,7 @@ jobs: env: GITHUB_TOKEN: ${{ secrets.nf_core_bot_auth_token }} - - uses: actions/setup-node@v2 + - uses: actions/setup-node@v3 - name: Install Prettier run: npm install -g prettier @prettier/plugin-php @@ -34,9 +34,9 @@ jobs: id: prettier_status run: | if prettier --check ${GITHUB_WORKSPACE}; then - echo "::set-output name=result::pass" + echo "name=result::pass" >> $GITHUB_OUTPUT else - echo "::set-output name=result::fail" + echo "name=result::fail" >> $GITHUB_OUTPUT fi - name: Run 'prettier --write' diff --git a/.github/workflows/linting.yml b/.github/workflows/linting.yml index 8a5ce69b..858d622e 100644 --- a/.github/workflows/linting.yml +++ b/.github/workflows/linting.yml @@ -4,6 +4,8 @@ name: nf-core linting # that the code meets the nf-core guidelines. on: push: + branches: + - dev pull_request: release: types: [published] @@ -12,9 +14,9 @@ jobs: EditorConfig: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - - uses: actions/setup-node@v2 + - uses: actions/setup-node@v3 - name: Install editorconfig-checker run: npm install -g editorconfig-checker @@ -25,9 +27,9 @@ jobs: Prettier: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - - uses: actions/setup-node@v2 + - uses: actions/setup-node@v3 - name: Install Prettier run: npm install -g prettier @@ -38,7 +40,7 @@ jobs: PythonBlack: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Check code lints with Black uses: psf/black@stable @@ -69,12 +71,12 @@ jobs: runs-on: ubuntu-latest steps: - name: Check out pipeline code - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Install Nextflow uses: nf-core/setup-nextflow@v1 - - uses: actions/setup-python@v3 + - uses: actions/setup-python@v4 with: python-version: "3.7" architecture: "x64" @@ -97,7 +99,7 @@ jobs: - name: Upload linting log file artifact if: ${{ always() }} - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: linting-logs path: | diff --git a/.github/workflows/linting_comment.yml b/.github/workflows/linting_comment.yml index 04758f61..39635186 100644 --- a/.github/workflows/linting_comment.yml +++ b/.github/workflows/linting_comment.yml @@ -18,7 +18,7 @@ jobs: - name: Get PR number id: pr_number - run: echo "::set-output name=pr_number::$(cat linting-logs/PR_number.txt)" + run: echo "name=pr_number::$(cat linting-logs/PR_number.txt)" >> $GITHUB_OUTPUT - name: Post PR comment uses: marocchino/sticky-pull-request-comment@v2 diff --git a/.prettierignore b/.prettierignore index eb74a574..437d763d 100644 --- a/.prettierignore +++ b/.prettierignore @@ -1,5 +1,6 @@ email_template.html adaptivecard.json +slackreport.json .nextflow* work/ data/ @@ -8,3 +9,4 @@ results/ testing/ testing* *.pyc +bin/ diff --git a/CITATION.cff b/CITATION.cff deleted file mode 100644 index 017666c0..00000000 --- a/CITATION.cff +++ /dev/null @@ -1,56 +0,0 @@ -cff-version: 1.2.0 -message: "If you use `nf-core tools` in your work, please cite the `nf-core` publication" -authors: - - family-names: Ewels - given-names: Philip - - family-names: Peltzer - given-names: Alexander - - family-names: Fillinger - given-names: Sven - - family-names: Patel - given-names: Harshil - - family-names: Alneberg - given-names: Johannes - - family-names: Wilm - given-names: Andreas - - family-names: Garcia - given-names: Maxime Ulysse - - family-names: Di Tommaso - given-names: Paolo - - family-names: Nahnsen - given-names: Sven -title: "The nf-core framework for community-curated bioinformatics pipelines." -version: 2.4.1 -doi: 10.1038/s41587-020-0439-x -date-released: 2022-05-16 -url: https://github.com/nf-core/tools -prefered-citation: - type: article - authors: - - family-names: Ewels - given-names: Philip - - family-names: Peltzer - given-names: Alexander - - family-names: Fillinger - given-names: Sven - - family-names: Patel - given-names: Harshil - - family-names: Alneberg - given-names: Johannes - - family-names: Wilm - given-names: Andreas - - family-names: Garcia - given-names: Maxime Ulysse - - family-names: Di Tommaso - given-names: Paolo - - family-names: Nahnsen - given-names: Sven - doi: 10.1038/s41587-020-0439-x - journal: nature biotechnology - start: 276 - end: 278 - title: "The nf-core framework for community-curated bioinformatics pipelines." - issue: 3 - volume: 38 - year: 2020 - url: https://dx.doi.org/10.1038/s41587-020-0439-x diff --git a/README.md b/README.md index 9889fb6b..e33034b3 100644 --- a/README.md +++ b/README.md @@ -2,7 +2,7 @@ [![AWS CI](https://img.shields.io/badge/CI%20tests-full%20size-FF9900?labelColor=000000&logo=Amazon%20AWS)](https://nf-co.re/quantms/results)[![Cite with Zenodo](http://img.shields.io/badge/DOI-10.5281/zenodo.XXXXXXX-1073c8?labelColor=000000)](https://doi.org/10.5281/zenodo.XXXXXXX) -[![Nextflow](https://img.shields.io/badge/nextflow%20DSL2-%E2%89%A521.10.3-23aa62.svg)](https://www.nextflow.io/) +[![Nextflow](https://img.shields.io/badge/nextflow%20DSL2-%E2%89%A522.10.1-23aa62.svg)](https://www.nextflow.io/) [![run with conda](http://img.shields.io/badge/run%20with-conda-3EB049?labelColor=000000&logo=anaconda)](https://docs.conda.io/en/latest/) [![run with docker](https://img.shields.io/badge/run%20with-docker-0db7ed?labelColor=000000&logo=docker)](https://www.docker.com/) [![run with singularity](https://img.shields.io/badge/run%20with-singularity-1d355c.svg?labelColor=000000)](https://sylabs.io/docs/) @@ -31,7 +31,7 @@ On release, automated continuous integration tests run the pipeline on a full-si ## Quick Start -1. Install [`Nextflow`](https://www.nextflow.io/docs/latest/getstarted.html#installation) (`>=21.10.3`) +1. Install [`Nextflow`](https://www.nextflow.io/docs/latest/getstarted.html#installation) (`>=22.10.1`) 2. Install any of [`Docker`](https://docs.docker.com/engine/installation/), [`Singularity`](https://www.sylabs.io/guides/3.0/user-guide/) (you can follow [this tutorial](https://singularity-tutorial.github.io/01-installation/)), [`Podman`](https://podman.io/), [`Shifter`](https://nersc.gitlab.io/development/shifter/how-to-use/) or [`Charliecloud`](https://hpc.github.io/charliecloud/) for full pipeline reproducibility _(you can use [`Conda`](https://conda.io/miniconda.html) both to install Nextflow itself and also to manage software within pipelines. Please only use it within pipelines as a last resort; see [docs](https://nf-co.re/usage/configuration#basic-configuration-profiles))_. diff --git a/assets/slackreport.json b/assets/slackreport.json new file mode 100644 index 00000000..043d02f2 --- /dev/null +++ b/assets/slackreport.json @@ -0,0 +1,34 @@ +{ + "attachments": [ + { + "fallback": "Plain-text summary of the attachment.", + "color": "<% if (success) { %>good<% } else { %>danger<%} %>", + "author_name": "sanger-tol/readmapping v${version} - ${runName}", + "author_icon": "https://www.nextflow.io/docs/latest/_static/favicon.ico", + "text": "<% if (success) { %>Pipeline completed successfully!<% } else { %>Pipeline completed with errors<% } %>", + "fields": [ + { + "title": "Command used to launch the workflow", + "value": "```${commandLine}```", + "short": false + } + <% + if (!success) { %> + , + { + "title": "Full error message", + "value": "```${errorReport}```", + "short": false + }, + { + "title": "Pipeline configuration", + "value": "<% out << summary.collect{ k,v -> k == "hook_url" ? "_${k}_: (_hidden_)" : ( ( v.class.toString().contains('Path') || ( v.class.toString().contains('String') && v.contains('/') ) ) ? "_${k}_: `${v}`" : (v.class.toString().contains('DateTime') ? ("_${k}_: " + v.format(java.time.format.DateTimeFormatter.ofLocalizedDateTime(java.time.format.FormatStyle.MEDIUM))) : "_${k}_: ${v}") ) }.join(",\n") %>", + "short": false + } + <% } + %> + ], + "footer": "Completed at <% out << dateComplete.format(java.time.format.DateTimeFormatter.ofLocalizedDateTime(java.time.format.FormatStyle.MEDIUM)) %> (duration: ${duration})" + } + ] +} diff --git a/docs/usage.md b/docs/usage.md index 6eea03c6..eeec4b5f 100644 --- a/docs/usage.md +++ b/docs/usage.md @@ -83,9 +83,9 @@ nextflow pull nf-core/quantms It is a good idea to specify a pipeline version when running the pipeline on your data. This ensures that a specific version of the pipeline code and software are used when you run your pipeline. If you keep using the same tag, you'll be running the same version of the pipeline, even if there have been changes to the code since. -First, go to the [nf-core/quantms releases page](https://github.com/nf-core/quantms/releases) and find the latest version number - numeric only (eg. `1.3.1`). Then specify this when running the pipeline with `-r` (one hyphen) - eg. `-r 1.3.1`. +First, go to the [nf-core/quantms releases page](https://github.com/nf-core/quantms/releases) and find the latest pipeline version - numeric only (eg. `1.3.1`). Then specify this when running the pipeline with `-r` (one hyphen) - eg. `-r 1.3.1`. Of course, you can switch to another version by changing the number after the `-r` flag. -This version number will be logged in reports when you run the pipeline, so that you'll know what you used when you look back in the future. +This version number will be logged in reports when you run the pipeline, so that you'll know what you used when you look back in the future. For example, at the bottom of the MultiQC reports. ## Core Nextflow arguments @@ -95,7 +95,7 @@ This version number will be logged in reports when you run the pipeline, so that Use this parameter to choose a configuration profile. Profiles can give configuration presets for different compute environments. -Several generic profiles are bundled with the pipeline which instruct the pipeline to use software packaged using different methods (Docker, Singularity, Podman, Shifter, Charliecloud, Conda) - see below. When using Biocontainers, most of these software packaging methods pull Docker containers from quay.io e.g [FastQC](https://quay.io/repository/biocontainers/fastqc) except for Singularity which directly downloads Singularity images via https hosted by the [Galaxy project](https://depot.galaxyproject.org/singularity/) and Conda which downloads and installs software locally from [Bioconda](https://bioconda.github.io/). +Several generic profiles are bundled with the pipeline which instruct the pipeline to use software packaged using different methods (Docker, Singularity, Podman, Shifter, Charliecloud, Conda) - see below. > We highly recommend the use of Docker or Singularity containers for full pipeline reproducibility, however when this is not possible, Conda is also supported. @@ -104,8 +104,11 @@ The pipeline also dynamically loads configurations from [https://github.com/nf-c Note that multiple profiles can be loaded, for example: `-profile test,docker` - the order of arguments is important! They are loaded in sequence, so later profiles can overwrite earlier profiles. -If `-profile` is not specified, the pipeline will run locally and expect all software to be installed and available on the `PATH`. This is _not_ recommended. +If `-profile` is not specified, the pipeline will run locally and expect all software to be installed and available on the `PATH`. This is _not_ recommended, since it can lead to different results on different machines dependent on the computer enviroment. +- `test` + - A profile with a complete configuration for automated testing + - Includes links to test data so needs no other parameters - `docker` - A generic configuration profile to be used with [Docker](https://docker.com/) - `singularity` @@ -118,9 +121,6 @@ If `-profile` is not specified, the pipeline will run locally and expect all sof - A generic configuration profile to be used with [Charliecloud](https://hpc.github.io/charliecloud/) - `conda` - A generic configuration profile to be used with [Conda](https://conda.io/docs/). Please only use Conda as a last resort i.e. when it's not possible to run the pipeline with Docker, Singularity, Podman, Shifter or Charliecloud. -- `test` - - A profile with a complete configuration for automated testing - - Includes links to test data so needs no other parameters ### `-resume` @@ -169,8 +169,14 @@ Work dir: Tip: you can replicate the issue by changing to the process work dir and entering the command `bash .command.run` ``` +#### For beginners + +A first step to bypass this error, you could try to increase the amount of CPUs, memory, and time for the whole pipeline. Therefor you can try to increase the resource for the parameters `--max_cpus`, `--max_memory`, and `--max_time`. Based on the error above, you have to increase the amount of memory. Therefore you can go to the [parameter documentation of rnaseq](https://nf-co.re/rnaseq/3.9/parameters) and scroll down to the `show hidden parameter` button to get the default value for `--max_memory`. In this case 128GB, you than can try to run your pipeline again with `--max_memory 200GB -resume` to skip all process, that were already calculated. If you can not increase the resource of the complete pipeline, you can try to adapt the resource for a single process as mentioned below. + +#### Advanced option on process level + To bypass this error you would need to find exactly which resources are set by the `STAR_ALIGN` process. The quickest way is to search for `process STAR_ALIGN` in the [nf-core/rnaseq Github repo](https://github.com/nf-core/rnaseq/search?q=process+STAR_ALIGN). -We have standardised the structure of Nextflow DSL2 pipelines such that all module files will be present in the `modules/` directory and so, based on the search results, the file we want is `modules/nf-core/software/star/align/main.nf`. +We have standardised the structure of Nextflow DSL2 pipelines such that all module files will be present in the `modules/` directory and so, based on the search results, the file we want is `modules/nf-core/star/align/main.nf`. If you click on the link to that file you will notice that there is a `label` directive at the top of the module that is set to [`label process_high`](https://github.com/nf-core/rnaseq/blob/4c27ef5610c87db00c3c5a3eed10b1d161abf575/modules/nf-core/software/star/align/main.nf#L9). The [Nextflow `label`](https://www.nextflow.io/docs/latest/process.html#label) directive allows us to organise workflow processes in separate groups which can be referenced in a configuration file to select and configure subset of processes having similar computing requirements. The default values for the `process_high` label are set in the pipeline's [`base.config`](https://github.com/nf-core/rnaseq/blob/4c27ef5610c87db00c3c5a3eed10b1d161abf575/conf/base.config#L33-L37) which in this case is defined as 72GB. @@ -189,7 +195,7 @@ process { > > If you get a warning suggesting that the process selector isn't recognised check that the process name has been specified correctly. -### Updating containers +### Updating containers (advanced users) The [Nextflow DSL2](https://www.nextflow.io/docs/latest/dsl2.html) implementation of this pipeline uses one container per process which makes it much easier to maintain and update software dependencies. If for some reason you need to use a different version of a particular tool with the pipeline then you just need to identify the `process` name and override the Nextflow `container` definition for that process using the `withName` declaration. For example, in the [nf-core/viralrecon](https://nf-co.re/viralrecon) pipeline a tool called [Pangolin](https://github.com/cov-lineages/pangolin) has been used during the COVID-19 pandemic to assign lineages to SARS-CoV-2 genome sequenced samples. Given that the lineage assignments change quite frequently it doesn't make sense to re-release the nf-core/viralrecon everytime a new version of Pangolin has been released. However, you can override the default container used by the pipeline by creating a custom config file and passing it as a command-line argument via `-c custom.config`. diff --git a/lib/NfcoreSchema.groovy b/lib/NfcoreSchema.groovy index b3d092f8..33cd4f6e 100755 --- a/lib/NfcoreSchema.groovy +++ b/lib/NfcoreSchema.groovy @@ -46,7 +46,6 @@ class NfcoreSchema { 'quiet', 'syslog', 'v', - 'version', // Options for `nextflow run` command 'ansi', diff --git a/lib/NfcoreTemplate.groovy b/lib/NfcoreTemplate.groovy index 27feb009..25a0a74a 100755 --- a/lib/NfcoreTemplate.groovy +++ b/lib/NfcoreTemplate.groovy @@ -32,6 +32,25 @@ class NfcoreTemplate { } } + // + // Generate version string + // + public static String version(workflow) { + String version_string = "" + + if (workflow.manifest.version) { + def prefix_v = workflow.manifest.version[0] != 'v' ? 'v' : '' + version_string += "${prefix_v}${workflow.manifest.version}" + } + + if (workflow.commitId) { + def git_shortsha = workflow.commitId.substring(0, 7) + version_string += "-g${git_shortsha}" + } + + return version_string + } + // // Construct and send completion email // @@ -61,7 +80,7 @@ class NfcoreTemplate { misc_fields['Nextflow Compile Timestamp'] = workflow.nextflow.timestamp def email_fields = [:] - email_fields['version'] = workflow.manifest.version + email_fields['version'] = NfcoreTemplate.version(workflow) email_fields['runName'] = workflow.runName email_fields['success'] = workflow.success email_fields['dateComplete'] = workflow.complete @@ -146,10 +165,10 @@ class NfcoreTemplate { } // - // Construct and send adaptive card - // https://adaptivecards.io + // Construct and send a notification to a web server as JSON + // e.g. Microsoft Teams and Slack // - public static void adaptivecard(workflow, params, summary_params, projectDir, log) { + public static void IM_notification(workflow, params, summary_params, projectDir, log) { def hook_url = params.hook_url def summary = [:] @@ -170,7 +189,7 @@ class NfcoreTemplate { misc_fields['nxf_timestamp'] = workflow.nextflow.timestamp def msg_fields = [:] - msg_fields['version'] = workflow.manifest.version + msg_fields['version'] = NfcoreTemplate.version(workflow) msg_fields['runName'] = workflow.runName msg_fields['success'] = workflow.success msg_fields['dateComplete'] = workflow.complete @@ -178,13 +197,16 @@ class NfcoreTemplate { msg_fields['exitStatus'] = workflow.exitStatus msg_fields['errorMessage'] = (workflow.errorMessage ?: 'None') msg_fields['errorReport'] = (workflow.errorReport ?: 'None') - msg_fields['commandLine'] = workflow.commandLine + msg_fields['commandLine'] = workflow.commandLine.replaceFirst(/ +--hook_url +[^ ]+/, "") msg_fields['projectDir'] = workflow.projectDir msg_fields['summary'] = summary << misc_fields // Render the JSON template def engine = new groovy.text.GStringTemplateEngine() - def hf = new File("$projectDir/assets/adaptivecard.json") + // Different JSON depending on the service provider + // Defaults to "Adaptive Cards" (https://adaptivecards.io), except Slack which has its own format + def json_path = hook_url.contains("hooks.slack.com") ? "slackreport.json" : "adaptivecard.json" + def hf = new File("$projectDir/assets/${json_path}") def json_template = engine.createTemplate(hf).make(msg_fields) def json_message = json_template.toString() @@ -209,7 +231,7 @@ class NfcoreTemplate { if (workflow.stats.ignoredCount == 0) { log.info "-${colors.purple}[$workflow.manifest.name]${colors.green} Pipeline completed successfully${colors.reset}-" } else { - log.info "-${colors.purple}[$workflow.manifest.name]${colors.red} Pipeline completed successfully, but with errored process(es) ${colors.reset}-" + log.info "-${colors.purple}[$workflow.manifest.name]${colors.yellow} Pipeline completed successfully, but with errored process(es) ${colors.reset}-" } } else { log.info "-${colors.purple}[$workflow.manifest.name]${colors.red} Pipeline completed with errors${colors.reset}-" @@ -297,6 +319,7 @@ class NfcoreTemplate { // public static String logo(workflow, monochrome_logs) { Map colors = logColours(monochrome_logs) + String workflow_version = NfcoreTemplate.version(workflow) String.format( """\n ${dashedLine(monochrome_logs)} @@ -305,7 +328,7 @@ class NfcoreTemplate { ${colors.blue} |\\ | |__ __ / ` / \\ |__) |__ ${colors.yellow}} {${colors.reset} ${colors.blue} | \\| | \\__, \\__/ | \\ |___ ${colors.green}\\`-._,-`-,${colors.reset} ${colors.green}`._,._,\'${colors.reset} - ${colors.purple} ${workflow.manifest.name} v${workflow.manifest.version}${colors.reset} + ${colors.purple} ${workflow.manifest.name} ${workflow_version}${colors.reset} ${dashedLine(monochrome_logs)} """.stripIndent() ) diff --git a/lib/WorkflowMain.groovy b/lib/WorkflowMain.groovy index e8775029..2a680123 100755 --- a/lib/WorkflowMain.groovy +++ b/lib/WorkflowMain.groovy @@ -19,7 +19,7 @@ class WorkflowMain { } // - // Print help to screen if required + // Generate help string // public static String help(workflow, params, log) { def command = "nextflow run ${workflow.manifest.name} --input samplesheet.csv --genome GRCh37 -profile docker" @@ -32,7 +32,7 @@ class WorkflowMain { } // - // Print parameter summary log to screen + // Generate parameter summary log string // public static String paramsSummaryLog(workflow, params, log) { def summary_log = '' @@ -53,15 +53,21 @@ class WorkflowMain { System.exit(0) } - // Validate workflow parameters via the JSON schema - if (params.validate_params) { - NfcoreSchema.validateParameters(workflow, params, log) + // Print workflow version and exit on --version + if (params.version) { + String workflow_version = NfcoreTemplate.version(workflow) + log.info "${workflow.manifest.name} ${workflow_version}" + System.exit(0) } // Print parameter summary log to screen - log.info paramsSummaryLog(workflow, params, log) + // Validate workflow parameters via the JSON schema + if (params.validate_params) { + NfcoreSchema.validateParameters(workflow, params, log) + } + // Check that a -profile or Nextflow config has been provided to run the pipeline NfcoreTemplate.checkConfigProvided(workflow, log) diff --git a/modules.json b/modules.json index d5da720c..f82d1dc4 100644 --- a/modules.json +++ b/modules.json @@ -7,15 +7,18 @@ "nf-core": { "custom/dumpsoftwareversions": { "branch": "master", - "git_sha": "5e34754d42cd2d5d248ca8673c0a53cdf5624905" + "git_sha": "5e34754d42cd2d5d248ca8673c0a53cdf5624905", + "installed_by": ["modules"] }, "fastqc": { "branch": "master", - "git_sha": "5e34754d42cd2d5d248ca8673c0a53cdf5624905" + "git_sha": "5e34754d42cd2d5d248ca8673c0a53cdf5624905", + "installed_by": ["modules"] }, "multiqc": { "branch": "master", - "git_sha": "5e34754d42cd2d5d248ca8673c0a53cdf5624905" + "git_sha": "5e34754d42cd2d5d248ca8673c0a53cdf5624905", + "installed_by": ["modules"] } } } diff --git a/modules/local/samplesheet_check.nf b/modules/local/samplesheet_check.nf index 16662be6..48578864 100644 --- a/modules/local/samplesheet_check.nf +++ b/modules/local/samplesheet_check.nf @@ -1,5 +1,6 @@ process SAMPLESHEET_CHECK { tag "$samplesheet" + label 'process_single' conda (params.enable_conda ? "conda-forge::python=3.8.3" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? @@ -13,6 +14,9 @@ process SAMPLESHEET_CHECK { path '*.csv' , emit: csv path "versions.yml", emit: versions + when: + task.ext.when == null || task.ext.when + script: // This script is bundled with the pipeline, in nf-core/quantms/bin/ """ check_samplesheet.py \\ diff --git a/nextflow.config b/nextflow.config index 5c52c756..a273adec 100644 --- a/nextflow.config +++ b/nextflow.config @@ -35,6 +35,7 @@ params { monochrome_logs = false hook_url = null help = false + version = false validate_params = true show_hidden_params = false schema_ignore_params = 'genomes' @@ -81,6 +82,7 @@ profiles { debug { process.beforeScript = 'echo $HOSTNAME' } conda { params.enable_conda = true + conda.enabled = true docker.enabled = false singularity.enabled = false podman.enabled = false @@ -89,6 +91,7 @@ profiles { } mamba { params.enable_conda = true + conda.enabled = true conda.useMamba = true docker.enabled = false singularity.enabled = false @@ -104,6 +107,9 @@ profiles { shifter.enabled = false charliecloud.enabled = false } + arm { + docker.runOptions = '-u $(id -u):$(id -g) --platform=linux/amd64' + } singularity { singularity.enabled = true singularity.autoMounts = true @@ -185,11 +191,11 @@ dag { manifest { name = 'nf-core/quantms' - author = 'Yasset Perez-Riverol' + author = """Yasset Perez-Riverol""" homePage = 'https://github.com/nf-core/quantms' - description = 'Quantitative Mass Spectrometry nf-core workflow' + description = """Quantitative Mass Spectrometry nf-core workflow""" mainScript = 'main.nf' - nextflowVersion = '!>=21.10.3' + nextflowVersion = '!>=22.10.1' version = '1.1dev' doi = '' } diff --git a/nextflow_schema.json b/nextflow_schema.json index 2e636991..1f918ca1 100644 --- a/nextflow_schema.json +++ b/nextflow_schema.json @@ -176,6 +176,12 @@ "fa_icon": "fas fa-question-circle", "hidden": true }, + "version": { + "type": "boolean", + "description": "Display version and exit.", + "fa_icon": "fas fa-question-circle", + "hidden": true + }, "publish_dir_mode": { "type": "string", "default": "copy", @@ -217,7 +223,7 @@ "type": "string", "description": "Incoming hook URL for messaging service", "fa_icon": "fas fa-people-group", - "help_text": "Incoming hook URL for messaging service. Currently, only MS Teams is supported.", + "help_text": "Incoming hook URL for messaging service. Currently, MS Teams and Slack are supported.", "hidden": true }, "multiqc_config": { diff --git a/workflows/quantms.nf b/workflows/quantms.nf index fca4fb0b..73fb0e03 100644 --- a/workflows/quantms.nf +++ b/workflows/quantms.nf @@ -82,7 +82,7 @@ workflow QUANTMS { ch_versions = ch_versions.mix(FASTQC.out.versions.first()) CUSTOM_DUMPSOFTWAREVERSIONS ( - ch_versions.unique().collectFile(name: 'collated_versions.yml') + ch_versions.unique{ it.text }.collectFile(name: 'collated_versions.yml') ) // @@ -102,12 +102,11 @@ workflow QUANTMS { MULTIQC ( ch_multiqc_files.collect(), - ch_multiqc_config.collect().ifEmpty([]), - ch_multiqc_custom_config.collect().ifEmpty([]), - ch_multiqc_logo.collect().ifEmpty([]) + ch_multiqc_config.toList(), + ch_multiqc_custom_config.toList(), + ch_multiqc_logo.toList() ) multiqc_report = MULTIQC.out.report.toList() - ch_versions = ch_versions.mix(MULTIQC.out.versions) } /* @@ -122,7 +121,7 @@ workflow.onComplete { } NfcoreTemplate.summary(workflow, params, log) if (params.hook_url) { - NfcoreTemplate.adaptivecard(workflow, params, summary_params, projectDir, log) + NfcoreTemplate.IM_notification(workflow, params, summary_params, projectDir, log) } } From a16aac6cb036056fede2802ed003fc52517a0608 Mon Sep 17 00:00:00 2001 From: Yasset Perez-Riverol Date: Fri, 16 Dec 2022 10:05:40 +0000 Subject: [PATCH 02/24] Update main.nf --- modules/local/openms/mzmlindexing/main.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/local/openms/mzmlindexing/main.nf b/modules/local/openms/mzmlindexing/main.nf index 9eb49d3b..c463209d 100644 --- a/modules/local/openms/mzmlindexing/main.nf +++ b/modules/local/openms/mzmlindexing/main.nf @@ -20,7 +20,7 @@ process MZMLINDEXING { def prefix = task.ext.prefix ?: "${meta.mzml_id}" """ - mkdir out + mkdir -p out FileConverter -in ${mzmlfile} -out out/${mzmlfile.baseName}.mzML |& tee ${mzmlfile.baseName}_mzmlindexing.log cat <<-END_VERSIONS > versions.yml From 78212845b46ed4daf5df8a8e5677cc46d513b5fc Mon Sep 17 00:00:00 2001 From: WangHong007 <88552471+WangHong007@users.noreply.github.com> Date: Sun, 18 Dec 2022 20:40:48 +0800 Subject: [PATCH 03/24] Parallel mzml_statistics --- bin/mzml_statistics.py | 25 ++++++++----------- .../local/individual_final_analysis/meta.yml | 8 +++--- modules/local/mzmlstatistics/main.nf | 6 ++--- modules/local/mzmlstatistics/meta.yml | 7 +++--- subworkflows/local/file_preparation.nf | 2 +- 5 files changed, 23 insertions(+), 25 deletions(-) diff --git a/bin/mzml_statistics.py b/bin/mzml_statistics.py index 8affa68f..f00fd2ff 100755 --- a/bin/mzml_statistics.py +++ b/bin/mzml_statistics.py @@ -6,7 +6,7 @@ import sys -def mzml_dataframe(mzml_folder): +def mzml_dataframe(mzml_path): file_columns = [ "SpectrumID", @@ -17,8 +17,6 @@ def mzml_dataframe(mzml_folder): "Retention_Time", "Exp_Mass_To_Charge", ] - mzml_paths = list(i for i in os.listdir(mzml_folder) if i.endswith(".mzML")) - mzml_count = 1 def parse_mzml(file_name, file_columns): info = [] @@ -45,20 +43,19 @@ def parse_mzml(file_name, file_columns): return pd.DataFrame(info, columns=file_columns) - for i in mzml_paths: - mzml_df = parse_mzml(mzml_folder + i, file_columns) - mzml_df.to_csv( - "{}_mzml_info.tsv".format(os.path.splitext(os.path.split(i)[1])[0]), - mode="a", - sep="\t", - index=False, - header=True, - ) + mzml_df = parse_mzml(mzml_path, file_columns) + mzml_df.to_csv( + "{}_mzml_info.tsv".format(os.path.splitext(os.path.split(mzml_path)[1])[0]), + mode="a", + sep="\t", + index=False, + header=True, + ) def main(): - mzmls_path = sys.argv[1] - mzml_dataframe(mzmls_path) + mzml_path = sys.argv[1] + mzml_dataframe(mzml_path) if __name__ == "__main__": diff --git a/modules/local/individual_final_analysis/meta.yml b/modules/local/individual_final_analysis/meta.yml index 1dd4f176..906c8615 100644 --- a/modules/local/individual_final_analysis/meta.yml +++ b/modules/local/individual_final_analysis/meta.yml @@ -13,11 +13,11 @@ input: - diann_log: type: file description: DIA-NN log file - pattern: "*.log" - - library: + pattern: "assemble_empirical_library.log" + - empirical_library: type: file - description: Silico-predicted spectral library by deep leaning predictor in DIA-NN - pattern: "*.tsv" + description: An empirical spectral library from the .quant files. + pattern: "empirical_library.tsv" - mzML: type: file description: Spectra file in mzML format diff --git a/modules/local/mzmlstatistics/main.nf b/modules/local/mzmlstatistics/main.nf index 324cae72..dec0744e 100644 --- a/modules/local/mzmlstatistics/main.nf +++ b/modules/local/mzmlstatistics/main.nf @@ -9,9 +9,9 @@ process MZMLSTATISTICS { } else { container "quay.io/biocontainers/pyopenms:2.8.0--py38hd8d5640_1" } - + input: - path("out/*") + path mzml_path output: path "*_mzml_info.tsv", emit: mzml_statistics @@ -22,7 +22,7 @@ process MZMLSTATISTICS { def args = task.ext.args ?: '' """ - mzml_statistics.py "./out/" \\ + mzml_statistics.py "${mzml_path}" \\ |& tee mzml_statistics.log cat <<-END_VERSIONS > versions.yml diff --git a/modules/local/mzmlstatistics/meta.yml b/modules/local/mzmlstatistics/meta.yml index 807be93d..d1fab0da 100644 --- a/modules/local/mzmlstatistics/meta.yml +++ b/modules/local/mzmlstatistics/meta.yml @@ -10,9 +10,10 @@ tools: homepage: https://github.com/bigbio/quantms documentation: https://github.com/bigbio/quantms/tree/readthedocs input: - - mzmls: - type: dir - description: mzML files directory + - mzml: + type: file + description: Spectra file in mzML format + pattern: "*.mzML" output: - mzml_statistics: type: file diff --git a/subworkflows/local/file_preparation.nf b/subworkflows/local/file_preparation.nf index 64007c4f..b9e0df21 100644 --- a/subworkflows/local/file_preparation.nf +++ b/subworkflows/local/file_preparation.nf @@ -58,7 +58,7 @@ workflow FILE_PREPARATION { mzml: it[1] }.set{ ch_mzml } - MZMLSTATISTICS( ch_mzml.mzml.collect() ) + MZMLSTATISTICS( ch_mzml.mzml ) ch_statistics = ch_statistics.mix(MZMLSTATISTICS.out.mzml_statistics.collect()) ch_versions = ch_versions.mix(MZMLSTATISTICS.out.version) From 43132b5200fcdc0f213c472e600bc375baeaee5b Mon Sep 17 00:00:00 2001 From: WangHong007 <88552471+WangHong007@users.noreply.github.com> Date: Sun, 18 Dec 2022 20:44:47 +0800 Subject: [PATCH 04/24] Update main.nf --- modules/local/mzmlstatistics/main.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/local/mzmlstatistics/main.nf b/modules/local/mzmlstatistics/main.nf index dec0744e..4d21af5c 100644 --- a/modules/local/mzmlstatistics/main.nf +++ b/modules/local/mzmlstatistics/main.nf @@ -9,7 +9,7 @@ process MZMLSTATISTICS { } else { container "quay.io/biocontainers/pyopenms:2.8.0--py38hd8d5640_1" } - + input: path mzml_path From 7e9deae5e0f810c678445869a69a078d39ba205d Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Mon, 19 Dec 2022 12:07:09 +0000 Subject: [PATCH 05/24] Template update for nf-core/tools version 2.7.2 --- .github/workflows/fix-linting.yml | 4 +- .github/workflows/linting_comment.yml | 2 +- lib/WorkflowMain.groovy | 2 +- modules.json | 6 +- modules/local/samplesheet_check.nf | 2 +- .../custom/dumpsoftwareversions/main.nf | 2 +- .../templates/dumpsoftwareversions.py | 99 ++++++++++--------- modules/nf-core/fastqc/main.nf | 40 +++----- modules/nf-core/multiqc/main.nf | 2 +- nextflow.config | 3 - nextflow_schema.json | 6 -- workflows/quantms.nf | 2 +- 12 files changed, 82 insertions(+), 88 deletions(-) mode change 100644 => 100755 modules/nf-core/custom/dumpsoftwareversions/templates/dumpsoftwareversions.py diff --git a/.github/workflows/fix-linting.yml b/.github/workflows/fix-linting.yml index 4f709862..20f90708 100644 --- a/.github/workflows/fix-linting.yml +++ b/.github/workflows/fix-linting.yml @@ -34,9 +34,9 @@ jobs: id: prettier_status run: | if prettier --check ${GITHUB_WORKSPACE}; then - echo "name=result::pass" >> $GITHUB_OUTPUT + echo "result=pass" >> $GITHUB_OUTPUT else - echo "name=result::fail" >> $GITHUB_OUTPUT + echo "result=fail" >> $GITHUB_OUTPUT fi - name: Run 'prettier --write' diff --git a/.github/workflows/linting_comment.yml b/.github/workflows/linting_comment.yml index 39635186..0bbcd30f 100644 --- a/.github/workflows/linting_comment.yml +++ b/.github/workflows/linting_comment.yml @@ -18,7 +18,7 @@ jobs: - name: Get PR number id: pr_number - run: echo "name=pr_number::$(cat linting-logs/PR_number.txt)" >> $GITHUB_OUTPUT + run: echo "pr_number=$(cat linting-logs/PR_number.txt)" >> $GITHUB_OUTPUT - name: Post PR comment uses: marocchino/sticky-pull-request-comment@v2 diff --git a/lib/WorkflowMain.groovy b/lib/WorkflowMain.groovy index 2a680123..1d396a61 100755 --- a/lib/WorkflowMain.groovy +++ b/lib/WorkflowMain.groovy @@ -72,7 +72,7 @@ class WorkflowMain { NfcoreTemplate.checkConfigProvided(workflow, log) // Check that conda channels are set-up correctly - if (params.enable_conda) { + if (workflow.profile.tokenize(',').intersect(['conda', 'mamba']).size() >= 1) { Utils.checkCondaChannels(log) } diff --git a/modules.json b/modules.json index f82d1dc4..01d2039a 100644 --- a/modules.json +++ b/modules.json @@ -7,17 +7,17 @@ "nf-core": { "custom/dumpsoftwareversions": { "branch": "master", - "git_sha": "5e34754d42cd2d5d248ca8673c0a53cdf5624905", + "git_sha": "c8e35eb2055c099720a75538d1b8adb3fb5a464c", "installed_by": ["modules"] }, "fastqc": { "branch": "master", - "git_sha": "5e34754d42cd2d5d248ca8673c0a53cdf5624905", + "git_sha": "c8e35eb2055c099720a75538d1b8adb3fb5a464c", "installed_by": ["modules"] }, "multiqc": { "branch": "master", - "git_sha": "5e34754d42cd2d5d248ca8673c0a53cdf5624905", + "git_sha": "c8e35eb2055c099720a75538d1b8adb3fb5a464c", "installed_by": ["modules"] } } diff --git a/modules/local/samplesheet_check.nf b/modules/local/samplesheet_check.nf index 48578864..72d941a3 100644 --- a/modules/local/samplesheet_check.nf +++ b/modules/local/samplesheet_check.nf @@ -2,7 +2,7 @@ process SAMPLESHEET_CHECK { tag "$samplesheet" label 'process_single' - conda (params.enable_conda ? "conda-forge::python=3.8.3" : null) + conda "conda-forge::python=3.8.3" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/python:3.8.3' : 'quay.io/biocontainers/python:3.8.3' }" diff --git a/modules/nf-core/custom/dumpsoftwareversions/main.nf b/modules/nf-core/custom/dumpsoftwareversions/main.nf index cebb6e05..3df21765 100644 --- a/modules/nf-core/custom/dumpsoftwareversions/main.nf +++ b/modules/nf-core/custom/dumpsoftwareversions/main.nf @@ -2,7 +2,7 @@ process CUSTOM_DUMPSOFTWAREVERSIONS { label 'process_single' // Requires `pyyaml` which does not have a dedicated container but is in the MultiQC container - conda (params.enable_conda ? 'bioconda::multiqc=1.13' : null) + conda "bioconda::multiqc=1.13" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/multiqc:1.13--pyhdfd78af_0' : 'quay.io/biocontainers/multiqc:1.13--pyhdfd78af_0' }" diff --git a/modules/nf-core/custom/dumpsoftwareversions/templates/dumpsoftwareversions.py b/modules/nf-core/custom/dumpsoftwareversions/templates/dumpsoftwareversions.py old mode 100644 new mode 100755 index 787bdb7b..e55b8d43 --- a/modules/nf-core/custom/dumpsoftwareversions/templates/dumpsoftwareversions.py +++ b/modules/nf-core/custom/dumpsoftwareversions/templates/dumpsoftwareversions.py @@ -1,5 +1,9 @@ #!/usr/bin/env python + +"""Provide functions to merge multiple versions.yml files.""" + + import platform from textwrap import dedent @@ -7,6 +11,7 @@ def _make_versions_html(versions): + """Generate a tabular HTML output of all versions for MultiQC.""" html = [ dedent( """\\ @@ -45,47 +50,53 @@ def _make_versions_html(versions): return "\\n".join(html) -versions_this_module = {} -versions_this_module["${task.process}"] = { - "python": platform.python_version(), - "yaml": yaml.__version__, -} - -with open("$versions") as f: - versions_by_process = yaml.load(f, Loader=yaml.BaseLoader) | versions_this_module - -# aggregate versions by the module name (derived from fully-qualified process name) -versions_by_module = {} -for process, process_versions in versions_by_process.items(): - module = process.split(":")[-1] - try: - if versions_by_module[module] != process_versions: - raise AssertionError( - "We assume that software versions are the same between all modules. " - "If you see this error-message it means you discovered an edge-case " - "and should open an issue in nf-core/tools. " - ) - except KeyError: - versions_by_module[module] = process_versions - -versions_by_module["Workflow"] = { - "Nextflow": "$workflow.nextflow.version", - "$workflow.manifest.name": "$workflow.manifest.version", -} - -versions_mqc = { - "id": "software_versions", - "section_name": "${workflow.manifest.name} Software Versions", - "section_href": "https://github.com/${workflow.manifest.name}", - "plot_type": "html", - "description": "are collected at run time from the software output.", - "data": _make_versions_html(versions_by_module), -} - -with open("software_versions.yml", "w") as f: - yaml.dump(versions_by_module, f, default_flow_style=False) -with open("software_versions_mqc.yml", "w") as f: - yaml.dump(versions_mqc, f, default_flow_style=False) - -with open("versions.yml", "w") as f: - yaml.dump(versions_this_module, f, default_flow_style=False) +def main(): + """Load all version files and generate merged output.""" + versions_this_module = {} + versions_this_module["${task.process}"] = { + "python": platform.python_version(), + "yaml": yaml.__version__, + } + + with open("$versions") as f: + versions_by_process = yaml.load(f, Loader=yaml.BaseLoader) | versions_this_module + + # aggregate versions by the module name (derived from fully-qualified process name) + versions_by_module = {} + for process, process_versions in versions_by_process.items(): + module = process.split(":")[-1] + try: + if versions_by_module[module] != process_versions: + raise AssertionError( + "We assume that software versions are the same between all modules. " + "If you see this error-message it means you discovered an edge-case " + "and should open an issue in nf-core/tools. " + ) + except KeyError: + versions_by_module[module] = process_versions + + versions_by_module["Workflow"] = { + "Nextflow": "$workflow.nextflow.version", + "$workflow.manifest.name": "$workflow.manifest.version", + } + + versions_mqc = { + "id": "software_versions", + "section_name": "${workflow.manifest.name} Software Versions", + "section_href": "https://github.com/${workflow.manifest.name}", + "plot_type": "html", + "description": "are collected at run time from the software output.", + "data": _make_versions_html(versions_by_module), + } + + with open("software_versions.yml", "w") as f: + yaml.dump(versions_by_module, f, default_flow_style=False) + with open("software_versions_mqc.yml", "w") as f: + yaml.dump(versions_mqc, f, default_flow_style=False) + + with open("versions.yml", "w") as f: + yaml.dump(versions_this_module, f, default_flow_style=False) + + +if __name__ == "__main__": + main() diff --git a/modules/nf-core/fastqc/main.nf b/modules/nf-core/fastqc/main.nf index 05730368..9ae58381 100644 --- a/modules/nf-core/fastqc/main.nf +++ b/modules/nf-core/fastqc/main.nf @@ -2,7 +2,7 @@ process FASTQC { tag "$meta.id" label 'process_medium' - conda (params.enable_conda ? "bioconda::fastqc=0.11.9" : null) + conda "bioconda::fastqc=0.11.9" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/fastqc:0.11.9--0' : 'quay.io/biocontainers/fastqc:0.11.9--0' }" @@ -20,30 +20,22 @@ process FASTQC { script: def args = task.ext.args ?: '' - // Add soft-links to original FastQs for consistent naming in pipeline def prefix = task.ext.prefix ?: "${meta.id}" - if (meta.single_end) { - """ - [ ! -f ${prefix}.fastq.gz ] && ln -s $reads ${prefix}.fastq.gz - fastqc $args --threads $task.cpus ${prefix}.fastq.gz - - cat <<-END_VERSIONS > versions.yml - "${task.process}": - fastqc: \$( fastqc --version | sed -e "s/FastQC v//g" ) - END_VERSIONS - """ - } else { - """ - [ ! -f ${prefix}_1.fastq.gz ] && ln -s ${reads[0]} ${prefix}_1.fastq.gz - [ ! -f ${prefix}_2.fastq.gz ] && ln -s ${reads[1]} ${prefix}_2.fastq.gz - fastqc $args --threads $task.cpus ${prefix}_1.fastq.gz ${prefix}_2.fastq.gz - - cat <<-END_VERSIONS > versions.yml - "${task.process}": - fastqc: \$( fastqc --version | sed -e "s/FastQC v//g" ) - END_VERSIONS - """ - } + // Make list of old name and new name pairs to use for renaming in the bash while loop + def old_new_pairs = reads instanceof Path || reads.size() == 1 ? [[ reads, "${prefix}.${reads.extension}" ]] : reads.withIndex().collect { entry, index -> [ entry, "${prefix}_${index + 1}.${entry.extension}" ] } + def rename_to = old_new_pairs*.join(' ').join(' ') + def renamed_files = old_new_pairs.collect{ old_name, new_name -> new_name }.join(' ') + """ + printf "%s %s\\n" $rename_to | while read old_name new_name; do + [ -f "\${new_name}" ] || ln -s \$old_name \$new_name + done + fastqc $args --threads $task.cpus $renamed_files + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + fastqc: \$( fastqc --version | sed -e "s/FastQC v//g" ) + END_VERSIONS + """ stub: def prefix = task.ext.prefix ?: "${meta.id}" diff --git a/modules/nf-core/multiqc/main.nf b/modules/nf-core/multiqc/main.nf index a8159a57..68f66bea 100644 --- a/modules/nf-core/multiqc/main.nf +++ b/modules/nf-core/multiqc/main.nf @@ -1,7 +1,7 @@ process MULTIQC { label 'process_single' - conda (params.enable_conda ? 'bioconda::multiqc=1.13' : null) + conda "bioconda::multiqc=1.13" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/multiqc:1.13--pyhdfd78af_0' : 'quay.io/biocontainers/multiqc:1.13--pyhdfd78af_0' }" diff --git a/nextflow.config b/nextflow.config index a273adec..697d6bd3 100644 --- a/nextflow.config +++ b/nextflow.config @@ -39,7 +39,6 @@ params { validate_params = true show_hidden_params = false schema_ignore_params = 'genomes' - enable_conda = false // Config options @@ -81,7 +80,6 @@ try { profiles { debug { process.beforeScript = 'echo $HOSTNAME' } conda { - params.enable_conda = true conda.enabled = true docker.enabled = false singularity.enabled = false @@ -90,7 +88,6 @@ profiles { charliecloud.enabled = false } mamba { - params.enable_conda = true conda.enabled = true conda.useMamba = true docker.enabled = false diff --git a/nextflow_schema.json b/nextflow_schema.json index 1f918ca1..105463b2 100644 --- a/nextflow_schema.json +++ b/nextflow_schema.json @@ -263,12 +263,6 @@ "description": "Show all params when using `--help`", "hidden": true, "help_text": "By default, parameters set as _hidden_ in the schema are not shown on the command line when a user runs with `--help`. Specifying this option will tell the pipeline to show all parameters." - }, - "enable_conda": { - "type": "boolean", - "description": "Run this workflow with Conda. You can also use '-profile conda' instead of providing this parameter.", - "hidden": true, - "fa_icon": "fas fa-bacon" } } } diff --git a/workflows/quantms.nf b/workflows/quantms.nf index 73fb0e03..d4772113 100644 --- a/workflows/quantms.nf +++ b/workflows/quantms.nf @@ -82,7 +82,7 @@ workflow QUANTMS { ch_versions = ch_versions.mix(FASTQC.out.versions.first()) CUSTOM_DUMPSOFTWAREVERSIONS ( - ch_versions.unique{ it.text }.collectFile(name: 'collated_versions.yml') + ch_versions.unique().collectFile(name: 'collated_versions.yml') ) // From f905203fa6e8becd4145b93e0b83c22dc92b16d8 Mon Sep 17 00:00:00 2001 From: WangHong007 <88552471+WangHong007@users.noreply.github.com> Date: Tue, 3 Jan 2023 20:30:32 +0800 Subject: [PATCH 06/24] Reduce memory usage --- bin/diann_convert.py | 62 +++++++++++++++++++++++++++++++------------- 1 file changed, 44 insertions(+), 18 deletions(-) diff --git a/bin/diann_convert.py b/bin/diann_convert.py index 971524da..77b50fd0 100755 --- a/bin/diann_convert.py +++ b/bin/diann_convert.py @@ -2,10 +2,10 @@ import os import re - import click import numpy as np import pandas as pd +import logging as log from pyopenms import AASequence, FASTAFile, ModificationsDB CONTEXT_SETTINGS = dict(help_option_names=["-h", "--help"]) @@ -82,17 +82,31 @@ def convert(ctx, folder, dia_params, diann_version, charge, missed_cleavages, qv break f.close() - pg = pd.read_csv(pg_matrix, sep="\t", header=0, dtype="str") - pr = pd.read_csv(pr_matrix, sep="\t", header=0, dtype="str") - report = pd.read_csv(diann_report, sep="\t", header=0, dtype="str") - - col = ["Q.Value", "Precursor.Normalised", "RT", "RT.Start", "Global.Q.Value", "Lib.Q.Value", "PG.MaxLFQ"] - for i in col: - report.loc[:, i] = report.loc[:, i].astype("float", errors="ignore") + remain_cols = [ + "File.Name", + "Run", + "Protein.Group", + "Protein.Names", + "Protein.Ids", + "First.Protein.Description", + "PG.MaxLFQ", + "RT.Start", + "Global.Q.Value", + "Lib.Q.Value", + "PEP", + "Precursor.Normalised", + "Precursor.Id", + "Q.Value", + "Modified.Sequence", + "Stripped.Sequence", + "Precursor.Charge", + "Precursor.Quantity", + "Global.PG.Q.Value", + ] + report = pd.read_csv(diann_report, sep="\t", header=0, usecols=remain_cols) # filter based on qvalue parameter for downstream analysiss report = report[report["Q.Value"] < qvalue_threshold] - report["Calculate.Precursor.Mz"] = report.apply( lambda x: calculate_mz(x["Stripped.Sequence"], x["Precursor.Charge"]), axis=1 ) @@ -120,6 +134,7 @@ def convert(ctx, folder, dia_params, diann_version, charge, missed_cleavages, qv ["Protein.Names", "Modified.Sequence", "Precursor.Charge", "Precursor.Quantity", "File.Name", "Run"] ] out_msstats.columns = ["ProteinName", "PeptideSequence", "PrecursorCharge", "Intensity", "Reference", "Run"] + out_msstats = out_msstats[out_msstats["Intensity"] != 0] out_msstats.loc[:, "PeptideSequence"] = out_msstats.apply( lambda x: AASequence.fromString(x["PeptideSequence"]).toString(), axis=1 ) @@ -131,19 +146,19 @@ def convert(ctx, folder, dia_params, diann_version, charge, missed_cleavages, qv out_msstats[["Fraction", "BioReplicate", "Condition"]] = out_msstats.apply( lambda x: query_expdesign_value(x["Run"], f_table, s_DataFrame), axis=1, result_type="expand" ) + out_msstats.to_csv(os.path.splitext(os.path.basename(exp_design))[0] + "_msstats_in.csv", sep=",", index=False) # Convert to Triqler out_triqler = pd.DataFrame() out_triqler = out_msstats[["ProteinName", "PeptideSequence", "PrecursorCharge", "Intensity", "Run", "Condition"]] + del out_msstats out_triqler.columns = ["proteins", "peptide", "charge", "intensity", "run", "condition"] + out_triqler = out_triqler[out_triqler["intensity"] != 0] out_triqler.loc[:, "searchScore"] = report["Q.Value"] out_triqler.loc[:, "searchScore"] = 1 - out_triqler["searchScore"] - - out_msstats = out_msstats[out_msstats["Intensity"] != 0] - out_msstats.to_csv(os.path.splitext(os.path.basename(exp_design))[0] + "_msstats_in.csv", sep=",", index=False) - out_triqler = out_triqler[out_triqler["intensity"] != 0] out_triqler.to_csv(os.path.splitext(os.path.basename(exp_design))[0] + "_triqler_in.tsv", sep="\t", index=False) + del out_triqler # Convert to mzTab if diann_version_id == "1.8.1": @@ -168,9 +183,22 @@ def convert(ctx, folder, dia_params, diann_version, charge, missed_cleavages, qv ) (MTD, database) = mztab_MTD(index_ref, dia_params, fasta, charge, missed_cleavages) + pg = pd.read_csv( + pg_matrix, + sep="\t", + header=0, + ) PRH = mztab_PRH(report, pg, index_ref, database, fasta_df) + del pg + pr = pd.read_csv( + pr_matrix, + sep="\t", + header=0, + ) PEH = mztab_PEH(report, pr, precursor_list, index_ref, database) + del pr PSH = mztab_PSH(report, folder, database) + del report MTD.loc["", :] = "" PRH.loc[len(PRH) + 1, :] = "" PEH.loc[len(PEH) + 1, :] = "" @@ -594,7 +622,6 @@ def mztab_PSH(report, folder, database): [ "Stripped.Sequence", "Protein.Ids", - "Genes", "Q.Value", "RT.Start", "Precursor.Charge", @@ -611,7 +638,6 @@ def mztab_PSH(report, folder, database): out_mztab_PSH.columns = [ "sequence", "accession", - "Genes", "search_engine_score[1]", "retention_time", "charge", @@ -661,7 +687,7 @@ def mztab_PSH(report, folder, database): out_mztab_PSH.loc[:, "PSH"] = "PSM" index = out_mztab_PSH.loc[:, "PSH"] - out_mztab_PSH.drop(["PSH", "Genes", "ms_run"], axis=1, inplace=True) + out_mztab_PSH.drop(["PSH", "ms_run"], axis=1, inplace=True) out_mztab_PSH.insert(0, "PSH", index) out_mztab_PSH.fillna("null", inplace=True) new_cols = [col for col in out_mztab_PSH.columns if not col.startswith("opt_")] + [ @@ -769,7 +795,7 @@ def match_in_report(report, target, max, flag, level): PEH_params = [] for i in range(1, max + 1): match = result[result["study_variable"] == i] - PEH_params.extend([match["Precursor.Normalised"].mean(), "null", "null", "null", match["RT"].mean()]) + PEH_params.extend([match["Precursor.Normalised"].mean(), "null", "null", "null", match["RT.Start"].mean()]) return tuple(PEH_params) @@ -823,7 +849,7 @@ def PEH_match_report(report, target): match = report[report["precursor.Index"] == target] ## Score at peptide level: the minimum of the respective precursor q-values (minimum of Q.Value per group) search_score = match["Q.Value"].min() - time = match["RT"].mean() + time = match["RT.Start"].mean() q_score = match["Global.Q.Value"].values[0] if match["Global.Q.Value"].values.size > 0 else np.nan spec_e = match["Lib.Q.Value"].values[0] if match["Lib.Q.Value"].values.size > 0 else np.nan mz = match["Calculate.Precursor.Mz"].mean() From 3fd1e56f2dbbbdd7da8df223f0e2e35680f7ed78 Mon Sep 17 00:00:00 2001 From: Yasset Perez-Riverol Date: Tue, 7 Mar 2023 07:54:06 +0000 Subject: [PATCH 07/24] openms moved to 2.9.0 --- CHANGELOG.md | 4 ++-- modules/local/openms/consensusid/main.nf | 6 +++--- modules/local/openms/decoydatabase/main.nf | 6 +++--- modules/local/openms/epifany/main.nf | 6 +++--- modules/local/openms/extractpsmfeatures/main.nf | 6 +++--- modules/local/openms/falsediscoveryrate/main.nf | 6 +++--- modules/local/openms/filemerge/main.nf | 6 +++--- modules/local/openms/idconflictresolver/main.nf | 6 +++--- modules/local/openms/idfilter/main.nf | 6 +++--- modules/local/openms/idmapper/main.nf | 6 +++--- modules/local/openms/idpep/main.nf | 6 +++--- modules/local/openms/idscoreswitcher/main.nf | 6 +++--- modules/local/openms/indexpeptides/main.nf | 6 +++--- modules/local/openms/isobaricanalyzer/main.nf | 6 +++--- modules/local/openms/msstatsconverter/main.nf | 6 +++--- modules/local/openms/mzmlindexing/main.nf | 6 +++--- modules/local/openms/openmspeakpicker/main.nf | 6 +++--- modules/local/openms/proteininference/main.nf | 6 +++--- modules/local/openms/proteinquantifier/main.nf | 2 +- modules/local/openms/proteomicslfq/main.nf | 2 +- modules/local/openms/thirdparty/luciphoradapter/main.nf | 4 ++-- modules/local/openms/thirdparty/percolator/main.nf | 4 ++-- modules/local/openms/thirdparty/searchenginecomet/main.nf | 4 ++-- modules/local/openms/thirdparty/searchenginemsgf/main.nf | 4 ++-- 24 files changed, 63 insertions(+), 63 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index cb13fc4e..a30f4821 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -56,11 +56,11 @@ Initial release of nf-core/quantms, created with the [nf-core](https://nf-co.re/ The pipeline is using Nextflow DSL2, each process will be run with its own [Biocontainer](https://biocontainers.pro/#/registry). This means that on occasion it is entirely possible for the pipeline to be using different versions of the same tool. However, the overall software dependency changes compared to the last release have been listed below for reference. | Dependency | Version | -| --------------------- | ---------- | +| --------------------- |------------| | `thermorawfileparser` | 1.3.4 | | `comet` | 2021010 | | `msgf+` | 2022.01.07 | -| `openms` | 2.8.0 | +| `openms` | 2.9.0 | | `sdrf-pipelines` | 0.0.22 | | `percolator` | 3.5 | | `pmultiqc` | 0.0.11 | diff --git a/modules/local/openms/consensusid/main.nf b/modules/local/openms/consensusid/main.nf index 4848f912..33b82c4b 100644 --- a/modules/local/openms/consensusid/main.nf +++ b/modules/local/openms/consensusid/main.nf @@ -5,10 +5,10 @@ process CONSENSUSID { label 'process_single_thread' label 'openms' - conda (params.enable_conda ? "bioconda::openms=2.8.0" : null) + conda (params.enable_conda ? "bioconda::openms=2.9.0" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://ftp.pride.ebi.ac.uk/pub/databases/pride/resources/tools/ghcr.io-openms-openms-executables-latest.img' : - 'ghcr.io/openms/openms-executables:latest' }" + 'https://depot.galaxyproject.org/singularity/openms:2.9.0--h135471a_0' : + 'quay.io/biocontainers/openms:2.9.0--h135471a_0' }" input: tuple val(meta), path(id_file), val(qval_score) diff --git a/modules/local/openms/decoydatabase/main.nf b/modules/local/openms/decoydatabase/main.nf index caffe665..e4a4f63a 100644 --- a/modules/local/openms/decoydatabase/main.nf +++ b/modules/local/openms/decoydatabase/main.nf @@ -2,10 +2,10 @@ process DECOYDATABASE { label 'process_very_low' label 'openms' - conda (params.enable_conda ? "bioconda::openms=2.8.0" : null) + conda (params.enable_conda ? "bioconda::openms=2.9.0" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://ftp.pride.ebi.ac.uk/pub/databases/pride/resources/tools/ghcr.io-openms-openms-executables-latest.img' : - 'ghcr.io/openms/openms-executables:latest' }" + 'https://depot.galaxyproject.org/singularity/openms:2.9.0--h135471a_0' : + 'quay.io/biocontainers/openms:2.9.0--h135471a_0' }" input: path(db_for_decoy) diff --git a/modules/local/openms/epifany/main.nf b/modules/local/openms/epifany/main.nf index 78e983c0..54b7ca49 100644 --- a/modules/local/openms/epifany/main.nf +++ b/modules/local/openms/epifany/main.nf @@ -4,10 +4,10 @@ process EPIFANY { publishDir "${params.outdir}" - conda (params.enable_conda ? "bioconda::openms=2.8.0" : null) + conda (params.enable_conda ? "bioconda::openms=2.9.0" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://ftp.pride.ebi.ac.uk/pub/databases/pride/resources/tools/ghcr.io-openms-openms-executables-latest.img' : - 'ghcr.io/openms/openms-executables:latest' }" + 'https://depot.galaxyproject.org/singularity/openms:2.9.0--h135471a_0' : + 'quay.io/biocontainers/openms:2.9.0--h135471a_0' }" input: tuple val(meta), path(consus_file) diff --git a/modules/local/openms/extractpsmfeatures/main.nf b/modules/local/openms/extractpsmfeatures/main.nf index 27e1d84e..9cebd9ea 100644 --- a/modules/local/openms/extractpsmfeatures/main.nf +++ b/modules/local/openms/extractpsmfeatures/main.nf @@ -4,10 +4,10 @@ process EXTRACTPSMFEATURES { label 'process_single_thread' label 'openms' - conda (params.enable_conda ? "bioconda::openms=2.8.0" : null) + conda (params.enable_conda ? "bioconda::openms=2.9.0" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://ftp.pride.ebi.ac.uk/pub/databases/pride/resources/tools/ghcr.io-openms-openms-executables-latest.img' : - 'ghcr.io/openms/openms-executables:latest' }" + 'https://depot.galaxyproject.org/singularity/openms:2.9.0--h135471a_0' : + 'quay.io/biocontainers/openms:2.9.0--h135471a_0' }" input: tuple val(meta), path(id_file) diff --git a/modules/local/openms/falsediscoveryrate/main.nf b/modules/local/openms/falsediscoveryrate/main.nf index 167611cf..cc160e52 100644 --- a/modules/local/openms/falsediscoveryrate/main.nf +++ b/modules/local/openms/falsediscoveryrate/main.nf @@ -4,10 +4,10 @@ process FALSEDISCOVERYRATE { label 'process_single_thread' label 'openms' - conda (params.enable_conda ? "bioconda::openms=2.8.0" : null) + conda (params.enable_conda ? "bioconda::openms=2.9.0" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://ftp.pride.ebi.ac.uk/pub/databases/pride/resources/tools/ghcr.io-openms-openms-executables-latest.img' : - 'ghcr.io/openms/openms-executables:latest' }" + 'https://depot.galaxyproject.org/singularity/openms:2.9.0--h135471a_0' : + 'quay.io/biocontainers/openms:2.9.0--h135471a_0' }" input: tuple val(meta), path(id_file) diff --git a/modules/local/openms/filemerge/main.nf b/modules/local/openms/filemerge/main.nf index 03cff0ea..6e8860b0 100644 --- a/modules/local/openms/filemerge/main.nf +++ b/modules/local/openms/filemerge/main.nf @@ -3,10 +3,10 @@ process FILEMERGE { label 'process_single_thread' label 'openms' - conda (params.enable_conda ? "bioconda::openms=2.8.0" : null) + conda (params.enable_conda ? "bioconda::openms=2.9.0" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://ftp.pride.ebi.ac.uk/pub/databases/pride/resources/tools/ghcr.io-openms-openms-executables-latest.img' : - 'ghcr.io/openms/openms-executables:latest' }" + 'https://depot.galaxyproject.org/singularity/openms:2.9.0--h135471a_0' : + 'quay.io/biocontainers/openms:2.9.0--h135471a_0' }" input: file(id_map) diff --git a/modules/local/openms/idconflictresolver/main.nf b/modules/local/openms/idconflictresolver/main.nf index 2dc03116..cd71fc88 100644 --- a/modules/local/openms/idconflictresolver/main.nf +++ b/modules/local/openms/idconflictresolver/main.nf @@ -2,10 +2,10 @@ process IDCONFLICTRESOLVER { label 'process_low' label 'openms' - conda (params.enable_conda ? "bioconda::openms=2.8.0" : null) + conda (params.enable_conda ? "bioconda::openms=2.9.0" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://ftp.pride.ebi.ac.uk/pub/databases/pride/resources/tools/ghcr.io-openms-openms-executables-latest.img' : - 'ghcr.io/openms/openms-executables:latest' }" + 'https://depot.galaxyproject.org/singularity/openms:2.9.0--h135471a_0' : + 'quay.io/biocontainers/openms:2.9.0--h135471a_0' }" input: path consus_file diff --git a/modules/local/openms/idfilter/main.nf b/modules/local/openms/idfilter/main.nf index ae1ba9de..221f10eb 100644 --- a/modules/local/openms/idfilter/main.nf +++ b/modules/local/openms/idfilter/main.nf @@ -4,10 +4,10 @@ process IDFILTER { label 'process_single_thread' label 'openms' - conda (params.enable_conda ? "bioconda::openms=2.8.0" : null) + conda (params.enable_conda ? "bioconda::openms=2.9.0" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://ftp.pride.ebi.ac.uk/pub/databases/pride/resources/tools/ghcr.io-openms-openms-executables-latest.img' : - 'ghcr.io/openms/openms-executables:latest' }" + 'https://depot.galaxyproject.org/singularity/openms:2.9.0--h135471a_0' : + 'quay.io/biocontainers/openms:2.9.0--h135471a_0' }" input: tuple val(meta), path(id_file) diff --git a/modules/local/openms/idmapper/main.nf b/modules/local/openms/idmapper/main.nf index 4dd2c53b..4674e086 100644 --- a/modules/local/openms/idmapper/main.nf +++ b/modules/local/openms/idmapper/main.nf @@ -4,10 +4,10 @@ process IDMAPPER { label 'process_medium' label 'openms' - conda (params.enable_conda ? "bioconda::openms=2.8.0" : null) + conda (params.enable_conda ? "bioconda::openms=2.9.0" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://ftp.pride.ebi.ac.uk/pub/databases/pride/resources/tools/ghcr.io-openms-openms-executables-latest.img' : - 'ghcr.io/openms/openms-executables:latest' }" + 'https://depot.galaxyproject.org/singularity/openms:2.9.0--h135471a_0' : + 'quay.io/biocontainers/openms:2.9.0--h135471a_0' }" input: tuple val(meta), path(id_file), path(map_file) diff --git a/modules/local/openms/idpep/main.nf b/modules/local/openms/idpep/main.nf index ba6e18c0..9e410793 100644 --- a/modules/local/openms/idpep/main.nf +++ b/modules/local/openms/idpep/main.nf @@ -2,10 +2,10 @@ process IDPEP { tag "$meta.mzml_id" label 'process_very_low' - conda (params.enable_conda ? "bioconda::openms=2.8.0" : null) + conda (params.enable_conda ? "bioconda::openms=2.9.0" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://ftp.pride.ebi.ac.uk/pub/databases/pride/resources/tools/ghcr.io-openms-openms-executables-latest.img' : - 'ghcr.io/openms/openms-executables:latest' }" + 'https://depot.galaxyproject.org/singularity/openms:2.9.0--h135471a_0' : + 'quay.io/biocontainers/openms:2.9.0--h135471a_0' }" input: tuple val(meta), path(id_file) diff --git a/modules/local/openms/idscoreswitcher/main.nf b/modules/local/openms/idscoreswitcher/main.nf index 5f38a1d5..f7493a63 100644 --- a/modules/local/openms/idscoreswitcher/main.nf +++ b/modules/local/openms/idscoreswitcher/main.nf @@ -3,10 +3,10 @@ process IDSCORESWITCHER { label 'process_very_low' label 'process_single_thread' - conda (params.enable_conda ? "bioconda::openms=2.8.0" : null) + conda (params.enable_conda ? "bioconda::openms=2.9.0" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://ftp.pride.ebi.ac.uk/pub/databases/pride/resources/tools/ghcr.io-openms-openms-executables-latest.img' : - 'ghcr.io/openms/openms-executables:latest' }" + 'https://depot.galaxyproject.org/singularity/openms:2.9.0--h135471a_0' : + 'quay.io/biocontainers/openms:2.9.0--h135471a_0' }" input: tuple val(meta), path(id_file), val(new_score) diff --git a/modules/local/openms/indexpeptides/main.nf b/modules/local/openms/indexpeptides/main.nf index bcf64de2..78a488af 100644 --- a/modules/local/openms/indexpeptides/main.nf +++ b/modules/local/openms/indexpeptides/main.nf @@ -2,10 +2,10 @@ process INDEXPEPTIDES { tag "$meta.mzml_id" label 'process_low' - conda (params.enable_conda ? "bioconda::openms=2.8.0" : null) + conda (params.enable_conda ? "bioconda::openms=2.9.0" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://ftp.pride.ebi.ac.uk/pub/databases/pride/resources/tools/ghcr.io-openms-openms-executables-latest.img' : - 'ghcr.io/openms/openms-executables:latest' }" + 'https://depot.galaxyproject.org/singularity/openms:2.9.0--h135471a_0' : + 'quay.io/biocontainers/openms:2.9.0--h135471a_0' }" input: tuple val(meta), path(id_file), path(database) diff --git a/modules/local/openms/isobaricanalyzer/main.nf b/modules/local/openms/isobaricanalyzer/main.nf index 6045ef73..391fa46b 100644 --- a/modules/local/openms/isobaricanalyzer/main.nf +++ b/modules/local/openms/isobaricanalyzer/main.nf @@ -2,10 +2,10 @@ process ISOBARICANALYZER { tag "$meta.mzml_id" label 'process_medium' - conda (params.enable_conda ? "bioconda::openms=2.8.0" : null) + conda (params.enable_conda ? "bioconda::openms=2.9.0" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://ftp.pride.ebi.ac.uk/pub/databases/pride/resources/tools/ghcr.io-openms-openms-executables-latest.img' : - 'ghcr.io/openms/openms-executables:latest' }" + 'https://depot.galaxyproject.org/singularity/openms:2.9.0--h135471a_0' : + 'quay.io/biocontainers/openms:2.9.0--h135471a_0' }" input: tuple val(meta), path(mzml_file) diff --git a/modules/local/openms/msstatsconverter/main.nf b/modules/local/openms/msstatsconverter/main.nf index a27ffdaf..cf65dcec 100644 --- a/modules/local/openms/msstatsconverter/main.nf +++ b/modules/local/openms/msstatsconverter/main.nf @@ -2,10 +2,10 @@ process MSSTATSCONVERTER { tag "$exp_file.Name" label 'process_low' - conda (params.enable_conda ? "bioconda::openms=2.8.0" : null) + conda (params.enable_conda ? "bioconda::openms=2.9.0" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://ftp.pride.ebi.ac.uk/pub/databases/pride/resources/tools/ghcr.io-openms-openms-executables-latest.img' : - 'ghcr.io/openms/openms-executables:latest' }" + 'https://depot.galaxyproject.org/singularity/openms:2.9.0--h135471a_0' : + 'quay.io/biocontainers/openms:2.9.0--h135471a_0' }" input: path consensusXML diff --git a/modules/local/openms/mzmlindexing/main.nf b/modules/local/openms/mzmlindexing/main.nf index c463209d..c8ac3b40 100644 --- a/modules/local/openms/mzmlindexing/main.nf +++ b/modules/local/openms/mzmlindexing/main.nf @@ -2,10 +2,10 @@ process MZMLINDEXING { tag "$meta.mzml_id" label 'process_low' - conda (params.enable_conda ? "bioconda::openms=2.8.0" : null) + conda (params.enable_conda ? "bioconda::openms=2.9.0" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://ftp.pride.ebi.ac.uk/pub/databases/pride/resources/tools/ghcr.io-openms-openms-executables-latest.img' : - 'ghcr.io/openms/openms-executables:latest' }" + 'https://depot.galaxyproject.org/singularity/openms:2.9.0--h135471a_0' : + 'quay.io/biocontainers/openms:2.9.0--h135471a_0' }" input: tuple val(meta), path(mzmlfile) diff --git a/modules/local/openms/openmspeakpicker/main.nf b/modules/local/openms/openmspeakpicker/main.nf index 56eeb181..ae1d6436 100644 --- a/modules/local/openms/openmspeakpicker/main.nf +++ b/modules/local/openms/openmspeakpicker/main.nf @@ -2,10 +2,10 @@ process OPENMSPEAKPICKER { tag "$meta.mzml_id" label 'process_low' - conda (params.enable_conda ? "bioconda::openms=2.8.0" : null) + conda (params.enable_conda ? "bioconda::openms=2.9.0" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://ftp.pride.ebi.ac.uk/pub/databases/pride/resources/tools/ghcr.io-openms-openms-executables-latest.img' : - 'ghcr.io/openms/openms-executables:latest' }" + 'https://depot.galaxyproject.org/singularity/openms:2.9.0--h135471a_0' : + 'quay.io/biocontainers/openms:2.9.0--h135471a_0' }" input: tuple val(meta), path(mzml_file) diff --git a/modules/local/openms/proteininference/main.nf b/modules/local/openms/proteininference/main.nf index ccabdd2f..7a345d80 100644 --- a/modules/local/openms/proteininference/main.nf +++ b/modules/local/openms/proteininference/main.nf @@ -1,10 +1,10 @@ process PROTEININFERENCE { label 'process_medium' - conda (params.enable_conda ? "bioconda::openms=2.8.0" : null) + conda (params.enable_conda ? "bioconda::openms=2.9.0" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://ftp.pride.ebi.ac.uk/pub/databases/pride/resources/tools/ghcr.io-openms-openms-executables-latest.img' : - 'ghcr.io/openms/openms-executables:latest' }" + 'https://depot.galaxyproject.org/singularity/openms:2.9.0--h135471a_0' : + 'quay.io/biocontainers/openms:2.9.0--h135471a_0' }" input: tuple val(meta), path(consus_file) diff --git a/modules/local/openms/proteinquantifier/main.nf b/modules/local/openms/proteinquantifier/main.nf index 774178ab..b10acf90 100644 --- a/modules/local/openms/proteinquantifier/main.nf +++ b/modules/local/openms/proteinquantifier/main.nf @@ -4,7 +4,7 @@ process PROTEINQUANTIFIER { conda (params.enable_conda ? "openms::openms=3.0.0dev" : null) container "${ workflow.containerEngine == 'docker' && !task.ext.singularity_pull_docker_container ? - 'ghcr.io/openms/openms-executables:latest' : + 'quay.io/biocontainers/openms:2.9.0--h135471a_0' : 'https://ftp.pride.ebi.ac.uk/pride/resources/tools/ghcr.io-openms-openms-executables-latest.img' }" diff --git a/modules/local/openms/proteomicslfq/main.nf b/modules/local/openms/proteomicslfq/main.nf index 7619093a..637c6cf9 100644 --- a/modules/local/openms/proteomicslfq/main.nf +++ b/modules/local/openms/proteomicslfq/main.nf @@ -4,7 +4,7 @@ process PROTEOMICSLFQ { conda (params.enable_conda ? "openms::openms=3.0.0dev" : null) container "${ workflow.containerEngine == 'docker' && !task.ext.singularity_pull_docker_container ? - 'ghcr.io/openms/openms-executables:latest' : + 'quay.io/biocontainers/openms:2.9.0--h135471a_0' : 'https://ftp.pride.ebi.ac.uk/pride/resources/tools/ghcr.io-openms-openms-executables-latest.img' }" diff --git a/modules/local/openms/thirdparty/luciphoradapter/main.nf b/modules/local/openms/thirdparty/luciphoradapter/main.nf index 76e4da45..c3e6c0af 100644 --- a/modules/local/openms/thirdparty/luciphoradapter/main.nf +++ b/modules/local/openms/thirdparty/luciphoradapter/main.nf @@ -2,9 +2,9 @@ process LUCIPHORADAPTER { tag "$meta.mzml_id" label 'process_medium' - conda (params.enable_conda ? "bioconda::openms-thirdparty=2.8.0" : null) + conda (params.enable_conda ? "bioconda::openms-thirdparty=2.9.0" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/openms-thirdparty:2.8.0--h9ee0642_0' : + 'https://depot.galaxyproject.org/singularity/openms-thirdparty:2.9.0--h9ee0642_0' : 'quay.io/biocontainers/openms-thirdparty:2.8.0--h9ee0642_0' }" input: diff --git a/modules/local/openms/thirdparty/percolator/main.nf b/modules/local/openms/thirdparty/percolator/main.nf index 998119a6..2be83ff1 100644 --- a/modules/local/openms/thirdparty/percolator/main.nf +++ b/modules/local/openms/thirdparty/percolator/main.nf @@ -2,9 +2,9 @@ process PERCOLATOR { tag "$meta.mzml_id" label 'process_medium' - conda (params.enable_conda ? "bioconda::openms-thirdparty=2.8.0" : null) + conda (params.enable_conda ? "bioconda::openms-thirdparty=2.9.0" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/openms-thirdparty:2.8.0--h9ee0642_0' : + 'https://depot.galaxyproject.org/singularity/openms-thirdparty:2.9.0--h9ee0642_0' : 'quay.io/biocontainers/openms-thirdparty:2.8.0--h9ee0642_0' }" input: diff --git a/modules/local/openms/thirdparty/searchenginecomet/main.nf b/modules/local/openms/thirdparty/searchenginecomet/main.nf index f7c47e7e..39c92022 100644 --- a/modules/local/openms/thirdparty/searchenginecomet/main.nf +++ b/modules/local/openms/thirdparty/searchenginecomet/main.nf @@ -2,9 +2,9 @@ process SEARCHENGINECOMET { tag "$meta.mzml_id" label 'process_medium' - conda (params.enable_conda ? "bioconda::openms-thirdparty=2.8.0" : null) + conda (params.enable_conda ? "bioconda::openms-thirdparty=2.9.0" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/openms-thirdparty:2.8.0--h9ee0642_0' : + 'https://depot.galaxyproject.org/singularity/openms-thirdparty:2.9.0--h9ee0642_0' : 'quay.io/biocontainers/openms-thirdparty:2.8.0--h9ee0642_0' }" input: diff --git a/modules/local/openms/thirdparty/searchenginemsgf/main.nf b/modules/local/openms/thirdparty/searchenginemsgf/main.nf index b67f478b..a7fbc4e8 100644 --- a/modules/local/openms/thirdparty/searchenginemsgf/main.nf +++ b/modules/local/openms/thirdparty/searchenginemsgf/main.nf @@ -2,9 +2,9 @@ process SEARCHENGINEMSGF { tag "$meta.mzml_id" label 'process_medium' - conda (params.enable_conda ? "bioconda::openms-thirdparty=2.8.0" : null) + conda (params.enable_conda ? "bioconda::openms-thirdparty=2.9.0" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/openms-thirdparty:2.8.0--h9ee0642_0' : + 'https://depot.galaxyproject.org/singularity/openms-thirdparty:2.9.0--h9ee0642_0' : 'quay.io/biocontainers/openms-thirdparty:2.8.0--h9ee0642_0' }" input: From 6bff3a74d166dd9e8861ee1438297816f127a169 Mon Sep 17 00:00:00 2001 From: Yasset Perez-Riverol Date: Tue, 7 Mar 2023 10:04:26 +0000 Subject: [PATCH 08/24] openms moved to 2.9.0 --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index a30f4821..243ab242 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -56,7 +56,7 @@ Initial release of nf-core/quantms, created with the [nf-core](https://nf-co.re/ The pipeline is using Nextflow DSL2, each process will be run with its own [Biocontainer](https://biocontainers.pro/#/registry). This means that on occasion it is entirely possible for the pipeline to be using different versions of the same tool. However, the overall software dependency changes compared to the last release have been listed below for reference. | Dependency | Version | -| --------------------- |------------| +| --------------------- | ---------- | | `thermorawfileparser` | 1.3.4 | | `comet` | 2021010 | | `msgf+` | 2022.01.07 | From 0a37db13473df61f9802caa62bd7e4b3ca268fa0 Mon Sep 17 00:00:00 2001 From: Yasset Perez-Riverol Date: Tue, 7 Mar 2023 10:32:58 +0000 Subject: [PATCH 09/24] black run --- bin/mzml_statistics.py | 1 - 1 file changed, 1 deletion(-) diff --git a/bin/mzml_statistics.py b/bin/mzml_statistics.py index f00fd2ff..79d64b7e 100755 --- a/bin/mzml_statistics.py +++ b/bin/mzml_statistics.py @@ -7,7 +7,6 @@ def mzml_dataframe(mzml_path): - file_columns = [ "SpectrumID", "MSLevel", From 091f6d36e6d80c30ea6fab8a07d8b7da46c8eeb0 Mon Sep 17 00:00:00 2001 From: Yasset Perez-Riverol Date: Tue, 7 Mar 2023 10:46:20 +0000 Subject: [PATCH 10/24] minor changes --- .github/ISSUE_TEMPLATE/bug_report.yml | 2 +- .github/workflows/awstest.yml | 1 - modules.json | 8 +++++++- 3 files changed, 8 insertions(+), 3 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/bug_report.yml b/.github/ISSUE_TEMPLATE/bug_report.yml index 34a329f8..4ef83298 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.yml +++ b/.github/ISSUE_TEMPLATE/bug_report.yml @@ -42,7 +42,7 @@ body: attributes: label: System information description: | - * Nextflow version _(eg. 21.10.3)_ + * Nextflow version _(eg. 22.10.1)_ * Hardware _(eg. HPC, Desktop, Cloud)_ * Executor _(eg. slurm, local, awsbatch)_ * Container engine: _(e.g. Docker, Singularity, Conda, Podman, Shifter or Charliecloud)_ diff --git a/.github/workflows/awstest.yml b/.github/workflows/awstest.yml index 43d1949b..42958b76 100644 --- a/.github/workflows/awstest.yml +++ b/.github/workflows/awstest.yml @@ -13,7 +13,6 @@ jobs: # Launch workflow using Tower CLI tool action - name: Launch workflow via tower uses: nf-core/tower-action@v3 - with: workspace_id: ${{ secrets.TOWER_WORKSPACE_ID }} access_token: ${{ secrets.TOWER_ACCESS_TOKEN }} diff --git a/modules.json b/modules.json index 6949afbd..5b2e5e2d 100644 --- a/modules.json +++ b/modules.json @@ -7,7 +7,13 @@ "nf-core": { "custom/dumpsoftwareversions": { "branch": "master", - "git_sha": "5e34754d42cd2d5d248ca8673c0a53cdf5624905" + "git_sha": "5e34754d42cd2d5d248ca8673c0a53cdf5624905", + "installed_by": ["modules"] + }, + "multiqc": { + "branch": "master", + "git_sha": "5e34754d42cd2d5d248ca8673c0a53cdf5624905", + "installed_by": ["modules"] } } } From 5736fcd7edd0cbedf24e9d404f69448b3b13c209 Mon Sep 17 00:00:00 2001 From: Yasset Perez-Riverol Date: Tue, 7 Mar 2023 11:04:51 +0000 Subject: [PATCH 11/24] minor changes --- .gitattributes | 1 + .github/CONTRIBUTING.md | 16 +++++++++++ .github/workflows/linting.yml | 18 ++++++------ .github/workflows/linting_comment.yml | 2 +- .prettierignore | 2 ++ lib/NfcoreSchema.groovy | 1 - lib/NfcoreTemplate.groovy | 41 +++++++++++++++++++++------ 7 files changed, 62 insertions(+), 19 deletions(-) diff --git a/.gitattributes b/.gitattributes index 050bb120..7a2dabc2 100644 --- a/.gitattributes +++ b/.gitattributes @@ -1,3 +1,4 @@ *.config linguist-language=nextflow +*.nf.test linguist-language=nextflow modules/nf-core/** linguist-generated subworkflows/nf-core/** linguist-generated diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md index de10fd6d..14713dbf 100644 --- a/.github/CONTRIBUTING.md +++ b/.github/CONTRIBUTING.md @@ -101,3 +101,19 @@ If you are using a new feature from core Nextflow, you may bump the minimum requ ### Images and figures For overview images and other documents we follow the nf-core [style guidelines and examples](https://nf-co.re/developers/design_guidelines). + +## GitHub Codespaces + +This repo includes a devcontainer configuration which will create a GitHub Codespaces for Nextflow development! This is an online developer environment that runs in your browser, complete with VSCode and a terminal. + +To get started: + +- Open the repo in [Codespaces](https://github.com/nf-core/quantms/codespaces) +- Tools installed + - nf-core + - Nextflow + +Devcontainer specs: + +- [DevContainer config](.devcontainer/devcontainer.json) +- [Dockerfile](.devcontainer/Dockerfile) diff --git a/.github/workflows/linting.yml b/.github/workflows/linting.yml index 8a5ce69b..858d622e 100644 --- a/.github/workflows/linting.yml +++ b/.github/workflows/linting.yml @@ -4,6 +4,8 @@ name: nf-core linting # that the code meets the nf-core guidelines. on: push: + branches: + - dev pull_request: release: types: [published] @@ -12,9 +14,9 @@ jobs: EditorConfig: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - - uses: actions/setup-node@v2 + - uses: actions/setup-node@v3 - name: Install editorconfig-checker run: npm install -g editorconfig-checker @@ -25,9 +27,9 @@ jobs: Prettier: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - - uses: actions/setup-node@v2 + - uses: actions/setup-node@v3 - name: Install Prettier run: npm install -g prettier @@ -38,7 +40,7 @@ jobs: PythonBlack: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Check code lints with Black uses: psf/black@stable @@ -69,12 +71,12 @@ jobs: runs-on: ubuntu-latest steps: - name: Check out pipeline code - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Install Nextflow uses: nf-core/setup-nextflow@v1 - - uses: actions/setup-python@v3 + - uses: actions/setup-python@v4 with: python-version: "3.7" architecture: "x64" @@ -97,7 +99,7 @@ jobs: - name: Upload linting log file artifact if: ${{ always() }} - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: linting-logs path: | diff --git a/.github/workflows/linting_comment.yml b/.github/workflows/linting_comment.yml index 04758f61..0bbcd30f 100644 --- a/.github/workflows/linting_comment.yml +++ b/.github/workflows/linting_comment.yml @@ -18,7 +18,7 @@ jobs: - name: Get PR number id: pr_number - run: echo "::set-output name=pr_number::$(cat linting-logs/PR_number.txt)" + run: echo "pr_number=$(cat linting-logs/PR_number.txt)" >> $GITHUB_OUTPUT - name: Post PR comment uses: marocchino/sticky-pull-request-comment@v2 diff --git a/.prettierignore b/.prettierignore index eb74a574..437d763d 100644 --- a/.prettierignore +++ b/.prettierignore @@ -1,5 +1,6 @@ email_template.html adaptivecard.json +slackreport.json .nextflow* work/ data/ @@ -8,3 +9,4 @@ results/ testing/ testing* *.pyc +bin/ diff --git a/lib/NfcoreSchema.groovy b/lib/NfcoreSchema.groovy index b3d092f8..33cd4f6e 100755 --- a/lib/NfcoreSchema.groovy +++ b/lib/NfcoreSchema.groovy @@ -46,7 +46,6 @@ class NfcoreSchema { 'quiet', 'syslog', 'v', - 'version', // Options for `nextflow run` command 'ansi', diff --git a/lib/NfcoreTemplate.groovy b/lib/NfcoreTemplate.groovy index 27feb009..25a0a74a 100755 --- a/lib/NfcoreTemplate.groovy +++ b/lib/NfcoreTemplate.groovy @@ -32,6 +32,25 @@ class NfcoreTemplate { } } + // + // Generate version string + // + public static String version(workflow) { + String version_string = "" + + if (workflow.manifest.version) { + def prefix_v = workflow.manifest.version[0] != 'v' ? 'v' : '' + version_string += "${prefix_v}${workflow.manifest.version}" + } + + if (workflow.commitId) { + def git_shortsha = workflow.commitId.substring(0, 7) + version_string += "-g${git_shortsha}" + } + + return version_string + } + // // Construct and send completion email // @@ -61,7 +80,7 @@ class NfcoreTemplate { misc_fields['Nextflow Compile Timestamp'] = workflow.nextflow.timestamp def email_fields = [:] - email_fields['version'] = workflow.manifest.version + email_fields['version'] = NfcoreTemplate.version(workflow) email_fields['runName'] = workflow.runName email_fields['success'] = workflow.success email_fields['dateComplete'] = workflow.complete @@ -146,10 +165,10 @@ class NfcoreTemplate { } // - // Construct and send adaptive card - // https://adaptivecards.io + // Construct and send a notification to a web server as JSON + // e.g. Microsoft Teams and Slack // - public static void adaptivecard(workflow, params, summary_params, projectDir, log) { + public static void IM_notification(workflow, params, summary_params, projectDir, log) { def hook_url = params.hook_url def summary = [:] @@ -170,7 +189,7 @@ class NfcoreTemplate { misc_fields['nxf_timestamp'] = workflow.nextflow.timestamp def msg_fields = [:] - msg_fields['version'] = workflow.manifest.version + msg_fields['version'] = NfcoreTemplate.version(workflow) msg_fields['runName'] = workflow.runName msg_fields['success'] = workflow.success msg_fields['dateComplete'] = workflow.complete @@ -178,13 +197,16 @@ class NfcoreTemplate { msg_fields['exitStatus'] = workflow.exitStatus msg_fields['errorMessage'] = (workflow.errorMessage ?: 'None') msg_fields['errorReport'] = (workflow.errorReport ?: 'None') - msg_fields['commandLine'] = workflow.commandLine + msg_fields['commandLine'] = workflow.commandLine.replaceFirst(/ +--hook_url +[^ ]+/, "") msg_fields['projectDir'] = workflow.projectDir msg_fields['summary'] = summary << misc_fields // Render the JSON template def engine = new groovy.text.GStringTemplateEngine() - def hf = new File("$projectDir/assets/adaptivecard.json") + // Different JSON depending on the service provider + // Defaults to "Adaptive Cards" (https://adaptivecards.io), except Slack which has its own format + def json_path = hook_url.contains("hooks.slack.com") ? "slackreport.json" : "adaptivecard.json" + def hf = new File("$projectDir/assets/${json_path}") def json_template = engine.createTemplate(hf).make(msg_fields) def json_message = json_template.toString() @@ -209,7 +231,7 @@ class NfcoreTemplate { if (workflow.stats.ignoredCount == 0) { log.info "-${colors.purple}[$workflow.manifest.name]${colors.green} Pipeline completed successfully${colors.reset}-" } else { - log.info "-${colors.purple}[$workflow.manifest.name]${colors.red} Pipeline completed successfully, but with errored process(es) ${colors.reset}-" + log.info "-${colors.purple}[$workflow.manifest.name]${colors.yellow} Pipeline completed successfully, but with errored process(es) ${colors.reset}-" } } else { log.info "-${colors.purple}[$workflow.manifest.name]${colors.red} Pipeline completed with errors${colors.reset}-" @@ -297,6 +319,7 @@ class NfcoreTemplate { // public static String logo(workflow, monochrome_logs) { Map colors = logColours(monochrome_logs) + String workflow_version = NfcoreTemplate.version(workflow) String.format( """\n ${dashedLine(monochrome_logs)} @@ -305,7 +328,7 @@ class NfcoreTemplate { ${colors.blue} |\\ | |__ __ / ` / \\ |__) |__ ${colors.yellow}} {${colors.reset} ${colors.blue} | \\| | \\__, \\__/ | \\ |___ ${colors.green}\\`-._,-`-,${colors.reset} ${colors.green}`._,._,\'${colors.reset} - ${colors.purple} ${workflow.manifest.name} v${workflow.manifest.version}${colors.reset} + ${colors.purple} ${workflow.manifest.name} ${workflow_version}${colors.reset} ${dashedLine(monochrome_logs)} """.stripIndent() ) From 0a528701410e7bd6dcf181fdaae2ddba83034583 Mon Sep 17 00:00:00 2001 From: Yasset Perez-Riverol Date: Tue, 7 Mar 2023 11:44:10 +0000 Subject: [PATCH 12/24] change params.enable_conda --- nextflow.config | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/nextflow.config b/nextflow.config index 5b429fd3..57caec5a 100644 --- a/nextflow.config +++ b/nextflow.config @@ -240,7 +240,7 @@ try { profiles { debug { process.beforeScript = 'echo $HOSTNAME' } ebicluster{ - params.enable_conda = true + conda.enabled = true docker.enabled = false singularity.enabled = false conda.createTimeout = '1 h' @@ -248,7 +248,7 @@ profiles { process.executor = 'lsf' } conda { - params.enable_conda = true + conda.enabled = true docker.enabled = false singularity.enabled = false podman.enabled = false @@ -257,7 +257,7 @@ profiles { conda.useMamba = true } mamba { - params.enable_conda = true + conda.enabled = true conda.useMamba = true docker.enabled = false singularity.enabled = false From 73b8e1a1501c41df0ce96ccfcbb09b58c1d70511 Mon Sep 17 00:00:00 2001 From: Yasset Perez-Riverol Date: Tue, 7 Mar 2023 14:00:23 +0000 Subject: [PATCH 13/24] change params.enable_conda --- modules/local/diannconvert/main.nf | 2 +- modules/local/generate_diann_cfg/main.nf | 2 +- modules/local/msstats/main.nf | 2 +- modules/local/msstatstmt/main.nf | 2 +- modules/local/mzmlstatistics/main.nf | 2 +- modules/local/openms/consensusid/main.nf | 2 +- modules/local/openms/decoydatabase/main.nf | 2 +- modules/local/openms/epifany/main.nf | 2 +- modules/local/openms/extractpsmfeatures/main.nf | 2 +- modules/local/openms/falsediscoveryrate/main.nf | 2 +- modules/local/openms/filemerge/main.nf | 2 +- modules/local/openms/idconflictresolver/main.nf | 2 +- modules/local/openms/idfilter/main.nf | 2 +- modules/local/openms/idmapper/main.nf | 2 +- modules/local/openms/idpep/main.nf | 2 +- modules/local/openms/idscoreswitcher/main.nf | 2 +- modules/local/openms/indexpeptides/main.nf | 2 +- modules/local/openms/isobaricanalyzer/main.nf | 2 +- modules/local/openms/msstatsconverter/main.nf | 2 +- modules/local/openms/mzmlindexing/main.nf | 2 +- modules/local/openms/openmspeakpicker/main.nf | 2 +- modules/local/openms/proteininference/main.nf | 2 +- modules/local/openms/proteinquantifier/main.nf | 4 ++-- modules/local/openms/proteomicslfq/main.nf | 4 ++-- modules/local/openms/thirdparty/luciphoradapter/main.nf | 2 +- modules/local/openms/thirdparty/percolator/main.nf | 2 +- modules/local/openms/thirdparty/searchenginecomet/main.nf | 2 +- modules/local/openms/thirdparty/searchenginemsgf/main.nf | 2 +- modules/local/pmultiqc/main.nf | 2 +- modules/local/preprocess_expdesign.nf | 3 +-- modules/local/samplesheet_check.nf | 3 +-- modules/local/sdrfparsing/main.nf | 2 +- modules/local/thermorawfileparser/main.nf | 2 +- modules/nf-core/custom/dumpsoftwareversions/main.nf | 2 +- modules/nf-core/multiqc/main.nf | 2 +- nextflow.config | 1 - nextflow_schema.json | 6 ------ 37 files changed, 37 insertions(+), 46 deletions(-) diff --git a/modules/local/diannconvert/main.nf b/modules/local/diannconvert/main.nf index b23534b9..fc13cf80 100644 --- a/modules/local/diannconvert/main.nf +++ b/modules/local/diannconvert/main.nf @@ -2,7 +2,7 @@ process DIANNCONVERT { tag "$meta.experiment_id" label 'process_medium' - conda (params.enable_conda ? "conda-forge::pandas_schema conda-forge::lzstring bioconda::pmultiqc=0.0.19" : null) + conda "conda-forge::pandas_schema conda-forge::lzstring bioconda::pmultiqc=0.0.19" if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { container "https://depot.galaxyproject.org/singularity/pmultiqc:0.0.19--pyhdfd78af_0" } else { diff --git a/modules/local/generate_diann_cfg/main.nf b/modules/local/generate_diann_cfg/main.nf index dd586f64..3625f5d9 100644 --- a/modules/local/generate_diann_cfg/main.nf +++ b/modules/local/generate_diann_cfg/main.nf @@ -2,7 +2,7 @@ process GENERATE_DIANN_CFG { tag "$meta.experiment_id" label 'process_low' - conda (params.enable_conda ? "conda-forge::pandas_schema bioconda::sdrf-pipelines=0.0.22" : null) + conda "conda-forge::pandas_schema bioconda::sdrf-pipelines=0.0.22" if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { container "https://depot.galaxyproject.org/singularity/sdrf-pipelines:0.0.22--pyhdfd78af_0" } else { diff --git a/modules/local/msstats/main.nf b/modules/local/msstats/main.nf index 994eaa90..5a7db1f3 100644 --- a/modules/local/msstats/main.nf +++ b/modules/local/msstats/main.nf @@ -2,7 +2,7 @@ process MSSTATS { tag "$msstats_csv_input.Name" label 'process_medium' - conda (params.enable_conda ? "bioconda::bioconductor-msstats=4.2.0" : null) + conda "bioconda::bioconductor-msstats=4.2.0" if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { container "https://depot.galaxyproject.org/singularity/bioconductor-msstats:4.2.0--r41h619a076_1" } else { diff --git a/modules/local/msstatstmt/main.nf b/modules/local/msstatstmt/main.nf index 10cf304e..a9fd33d0 100644 --- a/modules/local/msstatstmt/main.nf +++ b/modules/local/msstatstmt/main.nf @@ -2,7 +2,7 @@ process MSSTATSTMT { tag "$msstatstmt_csv_input.Name" label 'process_medium' - conda (params.enable_conda ? "bioconda::bioconductor-msstatstmt=2.2.0" : null) + conda "bioconda::bioconductor-msstatstmt=2.2.0" if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { container "https://depot.galaxyproject.org/singularity/bioconductor-msstatstmt:2.2.0--r41hdfd78af_0" } else { diff --git a/modules/local/mzmlstatistics/main.nf b/modules/local/mzmlstatistics/main.nf index 4d21af5c..07a887c3 100644 --- a/modules/local/mzmlstatistics/main.nf +++ b/modules/local/mzmlstatistics/main.nf @@ -3,7 +3,7 @@ process MZMLSTATISTICS { // TODO could be easily parallelized label 'process_single_thread' - conda (params.enable_conda ? "bioconda::pyopenms=2.8.0" : null) + conda "bioconda::pyopenms=2.8.0" if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { container "https://depot.galaxyproject.org/singularity/pyopenms:2.8.0--py38hd8d5640_1" } else { diff --git a/modules/local/openms/consensusid/main.nf b/modules/local/openms/consensusid/main.nf index 33b82c4b..79890551 100644 --- a/modules/local/openms/consensusid/main.nf +++ b/modules/local/openms/consensusid/main.nf @@ -5,7 +5,7 @@ process CONSENSUSID { label 'process_single_thread' label 'openms' - conda (params.enable_conda ? "bioconda::openms=2.9.0" : null) + conda "bioconda::openms=2.9.0" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/openms:2.9.0--h135471a_0' : 'quay.io/biocontainers/openms:2.9.0--h135471a_0' }" diff --git a/modules/local/openms/decoydatabase/main.nf b/modules/local/openms/decoydatabase/main.nf index e4a4f63a..79379403 100644 --- a/modules/local/openms/decoydatabase/main.nf +++ b/modules/local/openms/decoydatabase/main.nf @@ -2,7 +2,7 @@ process DECOYDATABASE { label 'process_very_low' label 'openms' - conda (params.enable_conda ? "bioconda::openms=2.9.0" : null) + conda "bioconda::openms=2.9.0" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/openms:2.9.0--h135471a_0' : 'quay.io/biocontainers/openms:2.9.0--h135471a_0' }" diff --git a/modules/local/openms/epifany/main.nf b/modules/local/openms/epifany/main.nf index 54b7ca49..da70b78b 100644 --- a/modules/local/openms/epifany/main.nf +++ b/modules/local/openms/epifany/main.nf @@ -4,7 +4,7 @@ process EPIFANY { publishDir "${params.outdir}" - conda (params.enable_conda ? "bioconda::openms=2.9.0" : null) + conda "bioconda::openms=2.9.0" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/openms:2.9.0--h135471a_0' : 'quay.io/biocontainers/openms:2.9.0--h135471a_0' }" diff --git a/modules/local/openms/extractpsmfeatures/main.nf b/modules/local/openms/extractpsmfeatures/main.nf index 9cebd9ea..2bc825af 100644 --- a/modules/local/openms/extractpsmfeatures/main.nf +++ b/modules/local/openms/extractpsmfeatures/main.nf @@ -4,7 +4,7 @@ process EXTRACTPSMFEATURES { label 'process_single_thread' label 'openms' - conda (params.enable_conda ? "bioconda::openms=2.9.0" : null) + conda "bioconda::openms=2.9.0" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/openms:2.9.0--h135471a_0' : 'quay.io/biocontainers/openms:2.9.0--h135471a_0' }" diff --git a/modules/local/openms/falsediscoveryrate/main.nf b/modules/local/openms/falsediscoveryrate/main.nf index cc160e52..3272614f 100644 --- a/modules/local/openms/falsediscoveryrate/main.nf +++ b/modules/local/openms/falsediscoveryrate/main.nf @@ -4,7 +4,7 @@ process FALSEDISCOVERYRATE { label 'process_single_thread' label 'openms' - conda (params.enable_conda ? "bioconda::openms=2.9.0" : null) + conda "bioconda::openms=2.9.0" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/openms:2.9.0--h135471a_0' : 'quay.io/biocontainers/openms:2.9.0--h135471a_0' }" diff --git a/modules/local/openms/filemerge/main.nf b/modules/local/openms/filemerge/main.nf index 6e8860b0..9bd76150 100644 --- a/modules/local/openms/filemerge/main.nf +++ b/modules/local/openms/filemerge/main.nf @@ -3,7 +3,7 @@ process FILEMERGE { label 'process_single_thread' label 'openms' - conda (params.enable_conda ? "bioconda::openms=2.9.0" : null) + conda "bioconda::openms=2.9.0" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/openms:2.9.0--h135471a_0' : 'quay.io/biocontainers/openms:2.9.0--h135471a_0' }" diff --git a/modules/local/openms/idconflictresolver/main.nf b/modules/local/openms/idconflictresolver/main.nf index cd71fc88..07d60fe6 100644 --- a/modules/local/openms/idconflictresolver/main.nf +++ b/modules/local/openms/idconflictresolver/main.nf @@ -2,7 +2,7 @@ process IDCONFLICTRESOLVER { label 'process_low' label 'openms' - conda (params.enable_conda ? "bioconda::openms=2.9.0" : null) + conda "bioconda::openms=2.9.0" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/openms:2.9.0--h135471a_0' : 'quay.io/biocontainers/openms:2.9.0--h135471a_0' }" diff --git a/modules/local/openms/idfilter/main.nf b/modules/local/openms/idfilter/main.nf index 221f10eb..9df9ecbc 100644 --- a/modules/local/openms/idfilter/main.nf +++ b/modules/local/openms/idfilter/main.nf @@ -4,7 +4,7 @@ process IDFILTER { label 'process_single_thread' label 'openms' - conda (params.enable_conda ? "bioconda::openms=2.9.0" : null) + conda "bioconda::openms=2.9.0" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/openms:2.9.0--h135471a_0' : 'quay.io/biocontainers/openms:2.9.0--h135471a_0' }" diff --git a/modules/local/openms/idmapper/main.nf b/modules/local/openms/idmapper/main.nf index 4674e086..23d560c5 100644 --- a/modules/local/openms/idmapper/main.nf +++ b/modules/local/openms/idmapper/main.nf @@ -4,7 +4,7 @@ process IDMAPPER { label 'process_medium' label 'openms' - conda (params.enable_conda ? "bioconda::openms=2.9.0" : null) + conda "bioconda::openms=2.9.0" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/openms:2.9.0--h135471a_0' : 'quay.io/biocontainers/openms:2.9.0--h135471a_0' }" diff --git a/modules/local/openms/idpep/main.nf b/modules/local/openms/idpep/main.nf index 9e410793..356708fb 100644 --- a/modules/local/openms/idpep/main.nf +++ b/modules/local/openms/idpep/main.nf @@ -2,7 +2,7 @@ process IDPEP { tag "$meta.mzml_id" label 'process_very_low' - conda (params.enable_conda ? "bioconda::openms=2.9.0" : null) + conda "bioconda::openms=2.9.0" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/openms:2.9.0--h135471a_0' : 'quay.io/biocontainers/openms:2.9.0--h135471a_0' }" diff --git a/modules/local/openms/idscoreswitcher/main.nf b/modules/local/openms/idscoreswitcher/main.nf index f7493a63..1849a772 100644 --- a/modules/local/openms/idscoreswitcher/main.nf +++ b/modules/local/openms/idscoreswitcher/main.nf @@ -3,7 +3,7 @@ process IDSCORESWITCHER { label 'process_very_low' label 'process_single_thread' - conda (params.enable_conda ? "bioconda::openms=2.9.0" : null) + conda "bioconda::openms=2.9.0" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/openms:2.9.0--h135471a_0' : 'quay.io/biocontainers/openms:2.9.0--h135471a_0' }" diff --git a/modules/local/openms/indexpeptides/main.nf b/modules/local/openms/indexpeptides/main.nf index 78a488af..fee38870 100644 --- a/modules/local/openms/indexpeptides/main.nf +++ b/modules/local/openms/indexpeptides/main.nf @@ -2,7 +2,7 @@ process INDEXPEPTIDES { tag "$meta.mzml_id" label 'process_low' - conda (params.enable_conda ? "bioconda::openms=2.9.0" : null) + conda "bioconda::openms=2.9.0" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/openms:2.9.0--h135471a_0' : 'quay.io/biocontainers/openms:2.9.0--h135471a_0' }" diff --git a/modules/local/openms/isobaricanalyzer/main.nf b/modules/local/openms/isobaricanalyzer/main.nf index 391fa46b..ac766ceb 100644 --- a/modules/local/openms/isobaricanalyzer/main.nf +++ b/modules/local/openms/isobaricanalyzer/main.nf @@ -2,7 +2,7 @@ process ISOBARICANALYZER { tag "$meta.mzml_id" label 'process_medium' - conda (params.enable_conda ? "bioconda::openms=2.9.0" : null) + conda "bioconda::openms=2.9.0" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/openms:2.9.0--h135471a_0' : 'quay.io/biocontainers/openms:2.9.0--h135471a_0' }" diff --git a/modules/local/openms/msstatsconverter/main.nf b/modules/local/openms/msstatsconverter/main.nf index cf65dcec..331503f0 100644 --- a/modules/local/openms/msstatsconverter/main.nf +++ b/modules/local/openms/msstatsconverter/main.nf @@ -2,7 +2,7 @@ process MSSTATSCONVERTER { tag "$exp_file.Name" label 'process_low' - conda (params.enable_conda ? "bioconda::openms=2.9.0" : null) + conda "bioconda::openms=2.9.0" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/openms:2.9.0--h135471a_0' : 'quay.io/biocontainers/openms:2.9.0--h135471a_0' }" diff --git a/modules/local/openms/mzmlindexing/main.nf b/modules/local/openms/mzmlindexing/main.nf index c8ac3b40..efec9e7c 100644 --- a/modules/local/openms/mzmlindexing/main.nf +++ b/modules/local/openms/mzmlindexing/main.nf @@ -2,7 +2,7 @@ process MZMLINDEXING { tag "$meta.mzml_id" label 'process_low' - conda (params.enable_conda ? "bioconda::openms=2.9.0" : null) + conda "bioconda::openms=2.9.0" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/openms:2.9.0--h135471a_0' : 'quay.io/biocontainers/openms:2.9.0--h135471a_0' }" diff --git a/modules/local/openms/openmspeakpicker/main.nf b/modules/local/openms/openmspeakpicker/main.nf index ae1d6436..d449039d 100644 --- a/modules/local/openms/openmspeakpicker/main.nf +++ b/modules/local/openms/openmspeakpicker/main.nf @@ -2,7 +2,7 @@ process OPENMSPEAKPICKER { tag "$meta.mzml_id" label 'process_low' - conda (params.enable_conda ? "bioconda::openms=2.9.0" : null) + conda "bioconda::openms=2.9.0" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/openms:2.9.0--h135471a_0' : 'quay.io/biocontainers/openms:2.9.0--h135471a_0' }" diff --git a/modules/local/openms/proteininference/main.nf b/modules/local/openms/proteininference/main.nf index 7a345d80..9ec251f1 100644 --- a/modules/local/openms/proteininference/main.nf +++ b/modules/local/openms/proteininference/main.nf @@ -1,7 +1,7 @@ process PROTEININFERENCE { label 'process_medium' - conda (params.enable_conda ? "bioconda::openms=2.9.0" : null) + conda "bioconda::openms=2.9.0" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/openms:2.9.0--h135471a_0' : 'quay.io/biocontainers/openms:2.9.0--h135471a_0' }" diff --git a/modules/local/openms/proteinquantifier/main.nf b/modules/local/openms/proteinquantifier/main.nf index b10acf90..8a22b6ad 100644 --- a/modules/local/openms/proteinquantifier/main.nf +++ b/modules/local/openms/proteinquantifier/main.nf @@ -2,10 +2,10 @@ process PROTEINQUANTIFIER { tag "${pro_quant_exp.baseName}" label 'process_medium' - conda (params.enable_conda ? "openms::openms=3.0.0dev" : null) + conda "openms::openms=2.9.0" container "${ workflow.containerEngine == 'docker' && !task.ext.singularity_pull_docker_container ? 'quay.io/biocontainers/openms:2.9.0--h135471a_0' : - 'https://ftp.pride.ebi.ac.uk/pride/resources/tools/ghcr.io-openms-openms-executables-latest.img' + 'https://depot.galaxyproject.org/singularity/openms:2.9.0--h135471a_0' }" input: diff --git a/modules/local/openms/proteomicslfq/main.nf b/modules/local/openms/proteomicslfq/main.nf index 637c6cf9..7511d911 100644 --- a/modules/local/openms/proteomicslfq/main.nf +++ b/modules/local/openms/proteomicslfq/main.nf @@ -2,10 +2,10 @@ process PROTEOMICSLFQ { tag "${expdes.baseName}" label 'process_high' - conda (params.enable_conda ? "openms::openms=3.0.0dev" : null) + conda "openms::openms=2.9.0" container "${ workflow.containerEngine == 'docker' && !task.ext.singularity_pull_docker_container ? 'quay.io/biocontainers/openms:2.9.0--h135471a_0' : - 'https://ftp.pride.ebi.ac.uk/pride/resources/tools/ghcr.io-openms-openms-executables-latest.img' + 'https://depot.galaxyproject.org/singularity/openms:2.9.0--h135471a_0' }" input: diff --git a/modules/local/openms/thirdparty/luciphoradapter/main.nf b/modules/local/openms/thirdparty/luciphoradapter/main.nf index c3e6c0af..16dfe7fe 100644 --- a/modules/local/openms/thirdparty/luciphoradapter/main.nf +++ b/modules/local/openms/thirdparty/luciphoradapter/main.nf @@ -2,7 +2,7 @@ process LUCIPHORADAPTER { tag "$meta.mzml_id" label 'process_medium' - conda (params.enable_conda ? "bioconda::openms-thirdparty=2.9.0" : null) + conda "bioconda::openms-thirdparty=2.9.0" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/openms-thirdparty:2.9.0--h9ee0642_0' : 'quay.io/biocontainers/openms-thirdparty:2.8.0--h9ee0642_0' }" diff --git a/modules/local/openms/thirdparty/percolator/main.nf b/modules/local/openms/thirdparty/percolator/main.nf index 2be83ff1..cd124452 100644 --- a/modules/local/openms/thirdparty/percolator/main.nf +++ b/modules/local/openms/thirdparty/percolator/main.nf @@ -2,7 +2,7 @@ process PERCOLATOR { tag "$meta.mzml_id" label 'process_medium' - conda (params.enable_conda ? "bioconda::openms-thirdparty=2.9.0" : null) + conda "bioconda::openms-thirdparty=2.9.0" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/openms-thirdparty:2.9.0--h9ee0642_0' : 'quay.io/biocontainers/openms-thirdparty:2.8.0--h9ee0642_0' }" diff --git a/modules/local/openms/thirdparty/searchenginecomet/main.nf b/modules/local/openms/thirdparty/searchenginecomet/main.nf index 39c92022..dc25c561 100644 --- a/modules/local/openms/thirdparty/searchenginecomet/main.nf +++ b/modules/local/openms/thirdparty/searchenginecomet/main.nf @@ -2,7 +2,7 @@ process SEARCHENGINECOMET { tag "$meta.mzml_id" label 'process_medium' - conda (params.enable_conda ? "bioconda::openms-thirdparty=2.9.0" : null) + conda "bioconda::openms-thirdparty=2.9.0" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/openms-thirdparty:2.9.0--h9ee0642_0' : 'quay.io/biocontainers/openms-thirdparty:2.8.0--h9ee0642_0' }" diff --git a/modules/local/openms/thirdparty/searchenginemsgf/main.nf b/modules/local/openms/thirdparty/searchenginemsgf/main.nf index a7fbc4e8..4ce79e85 100644 --- a/modules/local/openms/thirdparty/searchenginemsgf/main.nf +++ b/modules/local/openms/thirdparty/searchenginemsgf/main.nf @@ -2,7 +2,7 @@ process SEARCHENGINEMSGF { tag "$meta.mzml_id" label 'process_medium' - conda (params.enable_conda ? "bioconda::openms-thirdparty=2.9.0" : null) + conda "bioconda::openms-thirdparty=2.9.0" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/openms-thirdparty:2.9.0--h9ee0642_0' : 'quay.io/biocontainers/openms-thirdparty:2.8.0--h9ee0642_0' }" diff --git a/modules/local/pmultiqc/main.nf b/modules/local/pmultiqc/main.nf index 7db9298c..0cbf679f 100644 --- a/modules/local/pmultiqc/main.nf +++ b/modules/local/pmultiqc/main.nf @@ -1,7 +1,7 @@ process PMULTIQC { label 'process_high' - conda (params.enable_conda ? "conda-forge::pandas_schema conda-forge::lzstring bioconda::pmultiqc=0.0.19" : null) + conda "conda-forge::pandas_schema conda-forge::lzstring bioconda::pmultiqc=0.0.19" if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { container "https://depot.galaxyproject.org/singularity/pmultiqc:0.0.19--pyhdfd78af_0" } else { diff --git a/modules/local/preprocess_expdesign.nf b/modules/local/preprocess_expdesign.nf index 2b35d94d..e1c9652a 100644 --- a/modules/local/preprocess_expdesign.nf +++ b/modules/local/preprocess_expdesign.nf @@ -4,8 +4,7 @@ // accept different file endings already? process PREPROCESS_EXPDESIGN { - conda (params.enable_conda ? "bioconda::sdrf-pipelines=0.0.22 conda-forge::pandas" : null) - + conda "bioconda::sdrf-pipelines=0.0.22 conda-forge::pandas" label 'process_very_low' label 'process_single_thread' tag "$design.Name" diff --git a/modules/local/samplesheet_check.nf b/modules/local/samplesheet_check.nf index b37fb965..5d798b31 100644 --- a/modules/local/samplesheet_check.nf +++ b/modules/local/samplesheet_check.nf @@ -1,7 +1,6 @@ process SAMPLESHEET_CHECK { - conda (params.enable_conda ? "bioconda::sdrf-pipelines=0.0.22" : null) - + conda "bioconda::sdrf-pipelines=0.0.22" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/sdrf-pipelines:0.0.22--pyhdfd78af_0' : 'quay.io/biocontainers/sdrf-pipelines:0.0.22--pyhdfd78af_0' }" diff --git a/modules/local/sdrfparsing/main.nf b/modules/local/sdrfparsing/main.nf index 1dacccdd..d93b919b 100644 --- a/modules/local/sdrfparsing/main.nf +++ b/modules/local/sdrfparsing/main.nf @@ -2,7 +2,7 @@ process SDRFPARSING { tag "$sdrf.Name" label 'process_low' - conda (params.enable_conda ? "conda-forge::pandas_schema bioconda::sdrf-pipelines=0.0.22" : null) + conda "conda-forge::pandas_schema bioconda::sdrf-pipelines=0.0.22" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/sdrf-pipelines:0.0.22--pyhdfd78af_0' : 'quay.io/biocontainers/sdrf-pipelines:0.0.22--pyhdfd78af_0' }" diff --git a/modules/local/thermorawfileparser/main.nf b/modules/local/thermorawfileparser/main.nf index 5ddfcfdd..f31d5fdf 100644 --- a/modules/local/thermorawfileparser/main.nf +++ b/modules/local/thermorawfileparser/main.nf @@ -4,7 +4,7 @@ process THERMORAWFILEPARSER { label 'process_single_thread' label 'error_retry' - conda (params.enable_conda ? "conda-forge::mono bioconda::thermorawfileparser=1.3.4" : null) + conda "conda-forge::mono bioconda::thermorawfileparser=1.3.4" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/thermorawfileparser:1.3.4--ha8f3691_0' : 'quay.io/biocontainers/thermorawfileparser:1.3.4--ha8f3691_0' }" diff --git a/modules/nf-core/custom/dumpsoftwareversions/main.nf b/modules/nf-core/custom/dumpsoftwareversions/main.nf index cebb6e05..3df21765 100644 --- a/modules/nf-core/custom/dumpsoftwareversions/main.nf +++ b/modules/nf-core/custom/dumpsoftwareversions/main.nf @@ -2,7 +2,7 @@ process CUSTOM_DUMPSOFTWAREVERSIONS { label 'process_single' // Requires `pyyaml` which does not have a dedicated container but is in the MultiQC container - conda (params.enable_conda ? 'bioconda::multiqc=1.13' : null) + conda "bioconda::multiqc=1.13" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/multiqc:1.13--pyhdfd78af_0' : 'quay.io/biocontainers/multiqc:1.13--pyhdfd78af_0' }" diff --git a/modules/nf-core/multiqc/main.nf b/modules/nf-core/multiqc/main.nf index a8159a57..68f66bea 100644 --- a/modules/nf-core/multiqc/main.nf +++ b/modules/nf-core/multiqc/main.nf @@ -1,7 +1,7 @@ process MULTIQC { label 'process_single' - conda (params.enable_conda ? 'bioconda::multiqc=1.13' : null) + conda "bioconda::multiqc=1.13" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/multiqc:1.13--pyhdfd78af_0' : 'quay.io/biocontainers/multiqc:1.13--pyhdfd78af_0' }" diff --git a/nextflow.config b/nextflow.config index 57caec5a..bb02df32 100644 --- a/nextflow.config +++ b/nextflow.config @@ -205,7 +205,6 @@ params { validate_params = true show_hidden_params = false schema_ignore_params = 'modules' - enable_conda = false singularity_pull_docker_container = false diff --git a/nextflow_schema.json b/nextflow_schema.json index 38fb6569..2fd2e267 100644 --- a/nextflow_schema.json +++ b/nextflow_schema.json @@ -1222,12 +1222,6 @@ "hidden": true, "help_text": "By default, parameters set as _hidden_ in the schema are not shown on the command line when a user runs with `--help`. Specifying this option will tell the pipeline to show all parameters." }, - "enable_conda": { - "type": "boolean", - "description": "Run this workflow with Conda. You can also use '-profile conda' instead of providing this parameter.", - "hidden": true, - "fa_icon": "fas fa-bacon" - }, "singularity_pull_docker_container": { "type": "boolean", "description": "This parameter force singularity to pull the contain from docker instead of using the singularity image", From cbddb4d735ea55d634cd6ceeea0cdaa126ef87d1 Mon Sep 17 00:00:00 2001 From: Julianus Pfeuffer Date: Tue, 7 Mar 2023 14:23:16 +0000 Subject: [PATCH 14/24] black (although my black does not complain!!) --- bin/mzml_statistics.py | 1 - 1 file changed, 1 deletion(-) diff --git a/bin/mzml_statistics.py b/bin/mzml_statistics.py index f00fd2ff..79d64b7e 100755 --- a/bin/mzml_statistics.py +++ b/bin/mzml_statistics.py @@ -7,7 +7,6 @@ def mzml_dataframe(mzml_path): - file_columns = [ "SpectrumID", "MSLevel", From 0ec0a416dcf4f55eaf19c5f27c128e4a7a201993 Mon Sep 17 00:00:00 2001 From: Julianus Pfeuffer Date: Tue, 7 Mar 2023 14:24:36 +0000 Subject: [PATCH 15/24] nfcorelint --- nextflow_schema.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nextflow_schema.json b/nextflow_schema.json index 8fbc3e72..8470e3d0 100644 --- a/nextflow_schema.json +++ b/nextflow_schema.json @@ -56,7 +56,7 @@ "default": "dda", "enum": ["dda", "dia"], "fa_icon": "far fa-list-ol" - }, + } } }, "protein_database": { From 54c4bbbf7d859c0812b70b56ba96d6cceb5e3d9e Mon Sep 17 00:00:00 2001 From: Yasset Perez-Riverol Date: Tue, 7 Mar 2023 14:35:14 +0000 Subject: [PATCH 16/24] change params.enable_conda --- modules.json | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/modules.json b/modules.json index 5b2e5e2d..ffe37044 100644 --- a/modules.json +++ b/modules.json @@ -13,7 +13,8 @@ "multiqc": { "branch": "master", "git_sha": "5e34754d42cd2d5d248ca8673c0a53cdf5624905", - "installed_by": ["modules"] + "installed_by": ["modules"], + "patch": "modules/nf-core/multiqc/multiqc.diff" } } } From 3b7f9b1e512dabc0f7b68cac5792329a0188efb0 Mon Sep 17 00:00:00 2001 From: Yasset Perez-Riverol Date: Tue, 7 Mar 2023 14:35:30 +0000 Subject: [PATCH 17/24] change params.enable_conda --- modules.json | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/modules.json b/modules.json index ffe37044..5b2e5e2d 100644 --- a/modules.json +++ b/modules.json @@ -13,8 +13,7 @@ "multiqc": { "branch": "master", "git_sha": "5e34754d42cd2d5d248ca8673c0a53cdf5624905", - "installed_by": ["modules"], - "patch": "modules/nf-core/multiqc/multiqc.diff" + "installed_by": ["modules"] } } } From 0bd5a43311c416185127b089aa2ff7cd703887e4 Mon Sep 17 00:00:00 2001 From: Yasset Perez-Riverol Date: Tue, 7 Mar 2023 14:39:41 +0000 Subject: [PATCH 18/24] change params.enable_conda --- modules.json | 4 +- .../templates/dumpsoftwareversions.py | 102 ++++++++++-------- modules/nf-core/multiqc/main.nf | 6 +- 3 files changed, 61 insertions(+), 51 deletions(-) mode change 100644 => 100755 modules/nf-core/custom/dumpsoftwareversions/templates/dumpsoftwareversions.py diff --git a/modules.json b/modules.json index 5b2e5e2d..5f778412 100644 --- a/modules.json +++ b/modules.json @@ -7,12 +7,12 @@ "nf-core": { "custom/dumpsoftwareversions": { "branch": "master", - "git_sha": "5e34754d42cd2d5d248ca8673c0a53cdf5624905", + "git_sha": "c8e35eb2055c099720a75538d1b8adb3fb5a464c", "installed_by": ["modules"] }, "multiqc": { "branch": "master", - "git_sha": "5e34754d42cd2d5d248ca8673c0a53cdf5624905", + "git_sha": "ee80d14721e76e2e079103b8dcd5d57129e584ba", "installed_by": ["modules"] } } diff --git a/modules/nf-core/custom/dumpsoftwareversions/templates/dumpsoftwareversions.py b/modules/nf-core/custom/dumpsoftwareversions/templates/dumpsoftwareversions.py old mode 100644 new mode 100755 index 787bdb7b..da033408 --- a/modules/nf-core/custom/dumpsoftwareversions/templates/dumpsoftwareversions.py +++ b/modules/nf-core/custom/dumpsoftwareversions/templates/dumpsoftwareversions.py @@ -1,12 +1,16 @@ #!/usr/bin/env python -import platform -from textwrap import dedent + +"""Provide functions to merge multiple versions.yml files.""" + import yaml +import platform +from textwrap import dedent def _make_versions_html(versions): + """Generate a tabular HTML output of all versions for MultiQC.""" html = [ dedent( """\\ @@ -45,47 +49,53 @@ def _make_versions_html(versions): return "\\n".join(html) -versions_this_module = {} -versions_this_module["${task.process}"] = { - "python": platform.python_version(), - "yaml": yaml.__version__, -} - -with open("$versions") as f: - versions_by_process = yaml.load(f, Loader=yaml.BaseLoader) | versions_this_module - -# aggregate versions by the module name (derived from fully-qualified process name) -versions_by_module = {} -for process, process_versions in versions_by_process.items(): - module = process.split(":")[-1] - try: - if versions_by_module[module] != process_versions: - raise AssertionError( - "We assume that software versions are the same between all modules. " - "If you see this error-message it means you discovered an edge-case " - "and should open an issue in nf-core/tools. " - ) - except KeyError: - versions_by_module[module] = process_versions - -versions_by_module["Workflow"] = { - "Nextflow": "$workflow.nextflow.version", - "$workflow.manifest.name": "$workflow.manifest.version", -} - -versions_mqc = { - "id": "software_versions", - "section_name": "${workflow.manifest.name} Software Versions", - "section_href": "https://github.com/${workflow.manifest.name}", - "plot_type": "html", - "description": "are collected at run time from the software output.", - "data": _make_versions_html(versions_by_module), -} - -with open("software_versions.yml", "w") as f: - yaml.dump(versions_by_module, f, default_flow_style=False) -with open("software_versions_mqc.yml", "w") as f: - yaml.dump(versions_mqc, f, default_flow_style=False) - -with open("versions.yml", "w") as f: - yaml.dump(versions_this_module, f, default_flow_style=False) +def main(): + """Load all version files and generate merged output.""" + versions_this_module = {} + versions_this_module["${task.process}"] = { + "python": platform.python_version(), + "yaml": yaml.__version__, + } + + with open("$versions") as f: + versions_by_process = yaml.load(f, Loader=yaml.BaseLoader) | versions_this_module + + # aggregate versions by the module name (derived from fully-qualified process name) + versions_by_module = {} + for process, process_versions in versions_by_process.items(): + module = process.split(":")[-1] + try: + if versions_by_module[module] != process_versions: + raise AssertionError( + "We assume that software versions are the same between all modules. " + "If you see this error-message it means you discovered an edge-case " + "and should open an issue in nf-core/tools. " + ) + except KeyError: + versions_by_module[module] = process_versions + + versions_by_module["Workflow"] = { + "Nextflow": "$workflow.nextflow.version", + "$workflow.manifest.name": "$workflow.manifest.version", + } + + versions_mqc = { + "id": "software_versions", + "section_name": "${workflow.manifest.name} Software Versions", + "section_href": "https://github.com/${workflow.manifest.name}", + "plot_type": "html", + "description": "are collected at run time from the software output.", + "data": _make_versions_html(versions_by_module), + } + + with open("software_versions.yml", "w") as f: + yaml.dump(versions_by_module, f, default_flow_style=False) + with open("software_versions_mqc.yml", "w") as f: + yaml.dump(versions_mqc, f, default_flow_style=False) + + with open("versions.yml", "w") as f: + yaml.dump(versions_this_module, f, default_flow_style=False) + + +if __name__ == "__main__": + main() diff --git a/modules/nf-core/multiqc/main.nf b/modules/nf-core/multiqc/main.nf index 68f66bea..4b604749 100644 --- a/modules/nf-core/multiqc/main.nf +++ b/modules/nf-core/multiqc/main.nf @@ -1,10 +1,10 @@ process MULTIQC { label 'process_single' - conda "bioconda::multiqc=1.13" + conda "bioconda::multiqc=1.14" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/multiqc:1.13--pyhdfd78af_0' : - 'quay.io/biocontainers/multiqc:1.13--pyhdfd78af_0' }" + 'https://depot.galaxyproject.org/singularity/multiqc:1.14--pyhdfd78af_0' : + 'quay.io/biocontainers/multiqc:1.14--pyhdfd78af_0' }" input: path multiqc_files, stageAs: "?/*" From 0c3d03302bc65cf6fd071c7161bd2da17e299db5 Mon Sep 17 00:00:00 2001 From: Yasset Perez-Riverol Date: Tue, 7 Mar 2023 14:43:47 +0000 Subject: [PATCH 19/24] remove completely params.enable_conda --- conf/modules.config | 2 +- lib/WorkflowMain.groovy | 2 +- modules/local/openms/thirdparty/luciphoradapter/main.nf | 2 +- modules/local/openms/thirdparty/searchenginemsgf/main.nf | 2 +- subworkflows/local/create_input_channel.nf | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/conf/modules.config b/conf/modules.config index 330aa481..b00b1a16 100644 --- a/conf/modules.config +++ b/conf/modules.config @@ -253,7 +253,7 @@ process { // DIA-NN withName: '.*:DIA:.*' { - ext.when = { !params.enable_conda } + ext.when = { session.config.conda && session.config.conda.enabled } publishDir = [ path: { "${params.outdir}/${task.process.tokenize(':')[-1].toLowerCase()}" }, mode: params.publish_dir_mode, diff --git a/lib/WorkflowMain.groovy b/lib/WorkflowMain.groovy index 07ca4cf4..658bdec2 100755 --- a/lib/WorkflowMain.groovy +++ b/lib/WorkflowMain.groovy @@ -66,7 +66,7 @@ class WorkflowMain { NfcoreTemplate.checkConfigProvided(workflow, log) // Check that conda channels are set-up correctly - if (params.enable_conda) { + if (session.config.conda && session.config.conda.enabled) { Utils.checkCondaChannels(log) } diff --git a/modules/local/openms/thirdparty/luciphoradapter/main.nf b/modules/local/openms/thirdparty/luciphoradapter/main.nf index 16dfe7fe..af79ea45 100644 --- a/modules/local/openms/thirdparty/luciphoradapter/main.nf +++ b/modules/local/openms/thirdparty/luciphoradapter/main.nf @@ -21,7 +21,7 @@ process LUCIPHORADAPTER { luciphor_jar = '' if (workflow.containerEngine || (task.executor == "awsbatch")) { luciphor_jar = "-executable \$(find /usr/local/share/luciphor2-*/luciphor2.jar -maxdepth 0)" - } else if (params.enable_conda) { + } else if (session.config.conda && session.config.conda.enabled) { luciphor_jar = "-executable \$(find \$CONDA_PREFIX/share/luciphor2-*/luciphor2.jar -maxdepth 0)" } diff --git a/modules/local/openms/thirdparty/searchenginemsgf/main.nf b/modules/local/openms/thirdparty/searchenginemsgf/main.nf index 4ce79e85..49a35429 100644 --- a/modules/local/openms/thirdparty/searchenginemsgf/main.nf +++ b/modules/local/openms/thirdparty/searchenginemsgf/main.nf @@ -20,7 +20,7 @@ process SEARCHENGINEMSGF { msgf_jar = '' if (workflow.containerEngine || (task.executor == "awsbatch")) { msgf_jar = "-executable \$(find /usr/local/share/msgf_plus-*/MSGFPlus.jar -maxdepth 0)" - } else if (params.enable_conda) { + } else if (session.config.conda && session.config.conda.enabled) { msgf_jar = "-executable \$(find \$CONDA_PREFIX/share/msgf_plus-*/MSGFPlus.jar -maxdepth 0)" } diff --git a/subworkflows/local/create_input_channel.nf b/subworkflows/local/create_input_channel.nf index 9140db8d..dd4ceb71 100644 --- a/subworkflows/local/create_input_channel.nf +++ b/subworkflows/local/create_input_channel.nf @@ -163,7 +163,7 @@ def create_meta_channel(LinkedHashMap row, is_sdrf, enzymes, files, wrapper) { exit 1 } } - }else if(params.enable_conda){ + }else if(session.config.conda && session.config.conda.enabled){ log.error "File in DIA mode found in input design and conda profile was chosen. DIA-NN currently doesn't support conda! Exiting. Please use the docker/singularity profile with a container." exit 1 } From 483701e4d892e303c529e37dd700338ceae73589 Mon Sep 17 00:00:00 2001 From: Yasset Perez-Riverol Date: Tue, 7 Mar 2023 14:56:44 +0000 Subject: [PATCH 20/24] remove completely params.enable_conda --- lib/WorkflowMain.groovy | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/WorkflowMain.groovy b/lib/WorkflowMain.groovy index 658bdec2..07ca4cf4 100755 --- a/lib/WorkflowMain.groovy +++ b/lib/WorkflowMain.groovy @@ -66,7 +66,7 @@ class WorkflowMain { NfcoreTemplate.checkConfigProvided(workflow, log) // Check that conda channels are set-up correctly - if (session.config.conda && session.config.conda.enabled) { + if (params.enable_conda) { Utils.checkCondaChannels(log) } From 5b8a4139213be86c14de989ee2eec55c6ecb9389 Mon Sep 17 00:00:00 2001 From: Yasset Perez-Riverol Date: Tue, 7 Mar 2023 15:39:28 +0000 Subject: [PATCH 21/24] remove completely params.enable_conda --- lib/WorkflowMain.groovy | 22 ++++++++++++++-------- 1 file changed, 14 insertions(+), 8 deletions(-) diff --git a/lib/WorkflowMain.groovy b/lib/WorkflowMain.groovy index 07ca4cf4..db89bb98 100755 --- a/lib/WorkflowMain.groovy +++ b/lib/WorkflowMain.groovy @@ -19,7 +19,7 @@ class WorkflowMain { } // - // Print help to screen if required + // Generate help string // public static String help(workflow, params, log) { def command = "nextflow run ${workflow.manifest.name} --input samplesheet.csv --genome GRCh37 -profile docker" @@ -32,7 +32,7 @@ class WorkflowMain { } // - // Print parameter summary log to screen + // Generate parameter summary log string // public static String paramsSummaryLog(workflow, params, log) { def summary_log = '' @@ -53,20 +53,26 @@ class WorkflowMain { System.exit(0) } - // Validate workflow parameters via the JSON schema - if (params.validate_params) { - NfcoreSchema.validateParameters(workflow, params, log) + // Print workflow version and exit on --version + if (params.version) { + String workflow_version = NfcoreTemplate.version(workflow) + log.info "${workflow.manifest.name} ${workflow_version}" + System.exit(0) } // Print parameter summary log to screen - log.info paramsSummaryLog(workflow, params, log) + // Validate workflow parameters via the JSON schema + if (params.validate_params) { + NfcoreSchema.validateParameters(workflow, params, log) + } + // Check that a -profile or Nextflow config has been provided to run the pipeline NfcoreTemplate.checkConfigProvided(workflow, log) // Check that conda channels are set-up correctly - if (params.enable_conda) { + if (workflow.profile.tokenize(',').intersect(['conda', 'mamba']).size() >= 1) { Utils.checkCondaChannels(log) } @@ -96,6 +102,6 @@ class WorkflowMain { // check fasta database has been provided if (!params.database) { log.error "Please provide an fasta database to the pipeline e.g. '--database *.fasta'" - } + } } } From 8c7e024e781ceae63dd743bb34405cef29688ae0 Mon Sep 17 00:00:00 2001 From: Yasset Perez-Riverol Date: Tue, 7 Mar 2023 15:40:12 +0000 Subject: [PATCH 22/24] remove completely params.enable_conda --- lib/Utils.groovy | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/Utils.groovy b/lib/Utils.groovy index 7378f26f..8d030f4e 100755 --- a/lib/Utils.groovy +++ b/lib/Utils.groovy @@ -38,7 +38,7 @@ class Utils { " You will need to set-up the conda-forge and bioconda channels correctly.\n" + " Please refer to https://bioconda.github.io/\n" + " The observed channel order is \n" + - " ${channels}" + + " ${channels}\n" + " but the following channel order is required:\n" + " ${required_channels_in_order}\n" + "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~" From 7a49eb9c9757c8382484db48a35980f726540061 Mon Sep 17 00:00:00 2001 From: Yasset Perez-Riverol Date: Tue, 7 Mar 2023 15:45:19 +0000 Subject: [PATCH 23/24] remove completely params.enable_conda --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 859d6bb1..33eab815 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -28,7 +28,7 @@ jobs: matrix: # Nextflow versions NXF_VER: - - "21.10.3" + - "22.10.1" - "latest-everything" test_profile: ["test_lfq", "test_dia", "test_localize", "test_tmt"] exec_profile: ["docker", "conda"] From 8151104d46b4102f979858c5dc30fd5bf7fb93f1 Mon Sep 17 00:00:00 2001 From: Yasset Perez-Riverol Date: Tue, 7 Mar 2023 15:50:35 +0000 Subject: [PATCH 24/24] remove completely params.enable_conda --- nextflow.config | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/nextflow.config b/nextflow.config index bb02df32..c9e993b8 100644 --- a/nextflow.config +++ b/nextflow.config @@ -356,11 +356,11 @@ dag { manifest { name = 'nf-core/quantms' - author = 'Yasset Perez-Riverol' + author = """Yasset Perez-Riverol""" homePage = 'https://github.com/nf-core/quantms' - description = 'Quantitative Mass Spectrometry nf-core workflow' + description = """Quantitative Mass Spectrometry nf-core workflow""" mainScript = 'main.nf' - nextflowVersion = '!>=21.10.3' + nextflowVersion = '!>=22.10.1' version = '1.1dev' doi = '' }