diff --git a/.github/actions/create-lint-wf/action.yml b/.github/actions/create-lint-wf/action.yml new file mode 100644 index 0000000000..3b63f198cc --- /dev/null +++ b/.github/actions/create-lint-wf/action.yml @@ -0,0 +1,108 @@ +name: "Create and lint nf-core pipeline" +description: "Create and lint nf-core pipeline" +inputs: + NXF_VER: + description: "Nextflow version" + required: true + +runs: + using: "composite" + steps: + - name: go to subdirectory and change nextflow workdir + shell: bash + run: | + mkdir -p create-lint-wf + cd create-lint-wf + export NXF_WORK=$(pwd) + + # Set up Nextflow + - name: Install Nextflow + uses: nf-core/setup-nextflow@b9f764e8ba5c76b712ace14ecbfcef0e40ae2dd8 # v1 + with: + version: ${{ matrix.NXF_VER }} + + # Build a pipeline from the template + - name: nf-core create + shell: bash + run: | + mkdir -p create-lint-wf && cd create-lint-wf + export NXF_WORK=$(pwd) + nf-core --log-file log.txt create -n testpipeline -d "This pipeline is for testing" -a "Testing McTestface" --plain + + # Try syncing it before we change anything + - name: nf-core sync + shell: bash + run: nf-core --log-file log.txt sync --dir nf-core-testpipeline/ + working-directory: create-lint-wf + + # Run code style linting + - name: run pre-commit + shell: bash + run: pre-commit run --all-files + working-directory: create-lint-wf + + # Update modules to the latest version + - name: nf-core modules update + shell: bash + run: nf-core --log-file log.txt modules update --dir nf-core-testpipeline --all --no-preview + working-directory: create-lint-wf + + # Remove TODO statements + - name: remove TODO + shell: bash + run: find nf-core-testpipeline -type f -exec sed -i '/TODO nf-core:/d' {} \; + working-directory: create-lint-wf + + # Replace zenodo.XXXXXX to pass readme linting + - name: replace zenodo.XXXXXX + shell: bash + run: find nf-core-testpipeline -type f -exec sed -i 's/zenodo.XXXXXX/zenodo.123456/g' {} \; + working-directory: create-lint-wf + + # Run nf-core linting + - name: nf-core lint + shell: bash + run: nf-core --verbose --log-file log.txt --hide-progress lint --dir nf-core-testpipeline --fail-ignored --fail-warned + working-directory: create-lint-wf + + - name: nf-core bump-version to release + shell: bash + run: nf-core --log-file log.txt bump-version --dir nf-core-testpipeline/ 1.1 + working-directory: create-lint-wf + + - name: nf-core lint in release mode + shell: bash + run: nf-core --log-file log.txt --hide-progress lint --dir nf-core-testpipeline --fail-ignored --fail-warned --release + working-directory: create-lint-wf + + - name: Upload log file artifact + if: ${{ always() }} + uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4 + with: + name: nf-core-log-file-${{ matrix.NXF_VER }} + path: create-lint-wf/log.txt + + - name: nf-core modules install + shell: bash + run: nf-core --log-file log.txt modules install fastqc --dir nf-core-testpipeline/ --force + working-directory: create-lint-wf + + - name: nf-core modules install gitlab + shell: bash + run: nf-core --log-file log.txt modules --git-remote https://gitlab.com/nf-core/modules-test.git --branch branch-tester install fastp --dir nf-core-testpipeline/ + working-directory: create-lint-wf + + - name: nf-core modules list local + shell: bash + run: nf-core --log-file log.txt modules list local --dir nf-core-testpipeline/ + working-directory: create-lint-wf + + - name: nf-core modules list remote + shell: bash + run: nf-core --log-file log.txt modules list remote + working-directory: create-lint-wf + + - name: nf-core modules list remote gitlab + shell: bash + run: nf-core --log-file log.txt modules --git-remote https://gitlab.com/nf-core/modules-test.git list remote + working-directory: create-lint-wf diff --git a/.github/workflows/branch.yml b/.github/workflows/branch.yml index 54dee6df16..bbac1cc6ff 100644 --- a/.github/workflows/branch.yml +++ b/.github/workflows/branch.yml @@ -18,7 +18,7 @@ jobs: # If the above check failed, post a comment on the PR explaining the failure - name: Post PR comment if: failure() - uses: mshick/add-pr-comment@v2 + uses: mshick/add-pr-comment@b8f338c590a895d50bcbfa6c5859251edc8952fc # v2 with: message: | ## This PR is against the `master` branch :x: diff --git a/.github/workflows/changelog.yml b/.github/workflows/changelog.yml index 63dcf2e428..6316d62f33 100644 --- a/.github/workflows/changelog.yml +++ b/.github/workflows/changelog.yml @@ -17,7 +17,7 @@ jobs: ) steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4 with: token: ${{ secrets.NF_CORE_BOT_AUTH_TOKEN }} @@ -34,7 +34,7 @@ jobs: fi gh pr checkout $PR_NUMBER - - uses: actions/setup-python@v5 + - uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5 with: python-version: "3.11" @@ -63,7 +63,7 @@ jobs: echo "File changed: ${{ env.changed }}" - name: Set up Python 3.11 - uses: actions/setup-python@v5 + uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5 with: python-version: 3.11 cache: "pip" diff --git a/.github/workflows/clean-up.yml b/.github/workflows/clean-up.yml index ff311f9df8..a9cd4e930c 100644 --- a/.github/workflows/clean-up.yml +++ b/.github/workflows/clean-up.yml @@ -10,7 +10,7 @@ jobs: issues: write pull-requests: write steps: - - uses: actions/stale@v9 + - uses: actions/stale@28ca1036281a5e5922ead5184a1bbf96e5fc984e # v9 with: stale-issue-message: "This issue has been tagged as awaiting-changes or awaiting-feedback by an nf-core contributor. Remove stale label or add a comment otherwise this issue will be closed in 20 days." stale-pr-message: "This PR has been tagged as awaiting-changes or awaiting-feedback by an nf-core contributor. Remove stale label or add a comment if it is still useful." diff --git a/.github/workflows/create-lint-wf.yml b/.github/workflows/create-lint-wf.yml index c4d0358982..e69229c051 100644 --- a/.github/workflows/create-lint-wf.yml +++ b/.github/workflows/create-lint-wf.yml @@ -3,6 +3,9 @@ on: push: branches: - dev + paths-ignore: + - "docs/**" + - "CHANGELOG.md" pull_request: release: types: [published] @@ -26,7 +29,7 @@ env: jobs: MakeTestWorkflow: - runs-on: ${{ github.event.inputs.runners || 'self-hosted' }} + runs-on: ${{ github.event.inputs.runners || github.run_number > 1 && 'ubuntu-latest' || 'self-hosted' }} env: NXF_ANSI_LOG: false strategy: @@ -42,12 +45,12 @@ jobs: export NXF_WORK=$(pwd) # Get the repo code - - uses: actions/checkout@v4 + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4 name: Check out source-code repository # Set up nf-core/tools - name: Set up Python 3.11 - uses: actions/setup-python@v5 + uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5 with: python-version: 3.11 cache: pip @@ -57,37 +60,14 @@ jobs: python -m pip install --upgrade pip pip install . - # Set up Nextflow - - name: Install Nextflow - uses: nf-core/setup-nextflow@v1 + - name: run create-lint-wf + uses: ./.github/actions/create-lint-wf with: - version: ${{ matrix.NXF_VER }} - - # Build a pipeline from the template - - name: nf-core create - run: | - mkdir create-lint-wf && cd create-lint-wf - export NXF_WORK=$(pwd) - nf-core --log-file log.txt create -n testpipeline -d "This pipeline is for testing" -a "Testing McTestface" --plain - - # Try syncing it before we change anything - - name: nf-core sync - run: nf-core --log-file log.txt sync --dir nf-core-testpipeline/ - working-directory: create-lint-wf + NXF_VER: ${{ matrix.NXF_VER }} # Build a module from the template - name: nf-core modules create - run: nf-core --log-file log.txt modules create bpipe --dir nf-core-testpipeline --author @nf-core-bot --label process_low --meta - working-directory: create-lint-wf - - # Run code style linting - - name: run pre-commit - run: pre-commit run --all-files - working-directory: create-lint-wf - - # Update modules to the latest version - - name: nf-core modules update - run: nf-core --log-file log.txt modules update --dir nf-core-testpipeline --all --no-preview + run: nf-core --verbose --log-file log.txt modules create bpipe --dir nf-core-testpipeline --author @nf-core-bot --label process_low --meta working-directory: create-lint-wf # Remove TODO statements @@ -95,63 +75,15 @@ jobs: run: find nf-core-testpipeline -type f -exec sed -i '/TODO nf-core:/d' {} \; working-directory: create-lint-wf - # Replace zenodo.XXXXXX to pass readme linting - - name: replace zenodo.XXXXXX - run: find nf-core-testpipeline -type f -exec sed -i 's/zenodo.XXXXXX/zenodo.123456/g' {} \; - working-directory: create-lint-wf - - # Run nf-core linting - - name: nf-core lint - run: nf-core --verbose --log-file log.txt --hide-progress lint --dir nf-core-testpipeline --fail-ignored --fail-warned - working-directory: create-lint-wf - # Run the other nf-core commands - name: nf-core list run: nf-core --log-file log.txt list working-directory: create-lint-wf - # - name: nf-core licences - # run: nf-core --log-file log.txt licences nf-core-testpipeline - - name: nf-core schema run: nf-core --log-file log.txt schema build --dir nf-core-testpipeline/ --no-prompts working-directory: create-lint-wf - - name: nf-core bump-version - run: nf-core --log-file log.txt bump-version --dir nf-core-testpipeline/ 1.1 - working-directory: create-lint-wf - - - name: nf-core lint in release mode - run: nf-core --log-file log.txt --hide-progress lint --dir nf-core-testpipeline --fail-ignored --fail-warned --release - working-directory: create-lint-wf - - - name: nf-core modules install - run: nf-core --log-file log.txt modules install fastqc --dir nf-core-testpipeline/ --force - working-directory: create-lint-wf - - - name: nf-core modules install gitlab - run: nf-core --log-file log.txt modules --git-remote https://gitlab.com/nf-core/modules-test.git --branch branch-tester install fastp --dir nf-core-testpipeline/ - working-directory: create-lint-wf - - - name: nf-core modules list local - run: nf-core --log-file log.txt modules list local --dir nf-core-testpipeline/ - working-directory: create-lint-wf - - - name: nf-core modules list remote - run: nf-core --log-file log.txt modules list remote - working-directory: create-lint-wf - - - name: nf-core modules list remote gitlab - run: nf-core --log-file log.txt modules --git-remote https://gitlab.com/nf-core/modules-test.git list remote - working-directory: create-lint-wf - - - name: Upload log file artifact - if: ${{ always() }} - uses: actions/upload-artifact@v4 - with: - name: nf-core-log-file-${{ matrix.NXF_VER }} - path: create-lint-wf/log.txt - - name: Cleanup work directory run: sudo rm -rf create-lint-wf if: always() diff --git a/.github/workflows/create-test-lint-wf-template.yml b/.github/workflows/create-test-lint-wf-template.yml index 7d586fc3d2..6f27236462 100644 --- a/.github/workflows/create-test-lint-wf-template.yml +++ b/.github/workflows/create-test-lint-wf-template.yml @@ -39,7 +39,8 @@ jobs: - "template_skip_igenomes.yml" - "template_skip_ci.yml" runner: - - ${{ github.event.inputs.runners || 'self-hosted' }} + # use the runner given by the input if it is dispatched manually, run on github if it is a rerun or on self-hosted by default + - ${{ github.event.inputs.runners || github.run_number > 1 && 'ubuntu-latest' || 'self-hosted' }} profile: ["self_hosted_runner"] include: - TEMPLATE: "template_skip_all.yml" @@ -57,11 +58,11 @@ jobs: cd create-lint-wf-template export NXF_WORK=$(pwd) - - uses: actions/checkout@v4 + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4 name: Check out source-code repository - name: Set up Python 3.11 - uses: actions/setup-python@v5 + uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5 with: python-version: 3.11 @@ -71,7 +72,7 @@ jobs: pip install . - name: Install Nextflow - uses: nf-core/setup-nextflow@v1 + uses: nf-core/setup-nextflow@b9f764e8ba5c76b712ace14ecbfcef0e40ae2dd8 # v1 with: version: latest-everything @@ -154,7 +155,7 @@ jobs: - name: Upload log file artifact if: ${{ always() }} - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4 with: name: nf-core-log-file-${{ matrix.TEMPLATE }} path: create-test-lint-wf/artifact_files.tar diff --git a/.github/workflows/create-test-wf.yml b/.github/workflows/create-test-wf.yml index 9301d1224e..0166931be7 100644 --- a/.github/workflows/create-test-wf.yml +++ b/.github/workflows/create-test-wf.yml @@ -3,6 +3,9 @@ on: push: branches: - dev + paths-ignore: + - "docs/**" + - "CHANGELOG.md" pull_request: release: types: [published] @@ -26,7 +29,8 @@ env: jobs: RunTestWorkflow: - runs-on: ${{ github.event.inputs.runners || 'self-hosted' }} + # use the runner given by the input if it is dispatched manually, run on github if it is a rerun or on self-hosted by default + runs-on: ${{ github.event.inputs.runners || github.run_number > 1 && 'ubuntu-latest' || 'self-hosted' }} env: NXF_ANSI_LOG: false strategy: @@ -41,11 +45,11 @@ jobs: cd create-test-wf export NXF_WORK=$(pwd) - - uses: actions/checkout@v4 + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4 name: Check out source-code repository - name: Set up Python 3.11 - uses: actions/setup-python@v5 + uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5 with: python-version: 3.11 @@ -55,7 +59,7 @@ jobs: pip install . - name: Install Nextflow - uses: nf-core/setup-nextflow@v1 + uses: nf-core/setup-nextflow@b9f764e8ba5c76b712ace14ecbfcef0e40ae2dd8 # v1 with: version: ${{ matrix.NXF_VER }} @@ -68,7 +72,7 @@ jobs: - name: Upload log file artifact if: ${{ always() }} - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4 with: name: nf-core-log-file-${{ matrix.NXF_VER }} path: create-test-wf/log.txt diff --git a/.github/workflows/deploy-pypi.yml b/.github/workflows/deploy-pypi.yml index 8d3a154d80..e53d2f2f5a 100644 --- a/.github/workflows/deploy-pypi.yml +++ b/.github/workflows/deploy-pypi.yml @@ -13,11 +13,11 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4 name: Check out source-code repository - name: Set up Python 3.11 - uses: actions/setup-python@v5 + uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5 with: python-version: 3.11 diff --git a/.github/workflows/lint-code.yml b/.github/workflows/lint-code.yml index d9847dd365..8ed52a0582 100644 --- a/.github/workflows/lint-code.yml +++ b/.github/workflows/lint-code.yml @@ -18,10 +18,10 @@ jobs: env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4 - name: Set up Python 3.11 - uses: actions/setup-python@v5 + uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5 with: python-version: 3.11 cache: "pip" diff --git a/.github/workflows/push_dockerhub_dev.yml b/.github/workflows/push_dockerhub_dev.yml index 169a917d83..af11a79990 100644 --- a/.github/workflows/push_dockerhub_dev.yml +++ b/.github/workflows/push_dockerhub_dev.yml @@ -23,7 +23,7 @@ jobs: fail-fast: false steps: - name: Check out code - uses: actions/checkout@v4 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4 - name: Build nfcore/tools:dev docker image run: docker build --no-cache . -t nfcore/tools:dev diff --git a/.github/workflows/push_dockerhub_release.yml b/.github/workflows/push_dockerhub_release.yml index 49ce17dd84..0b3f381d86 100644 --- a/.github/workflows/push_dockerhub_release.yml +++ b/.github/workflows/push_dockerhub_release.yml @@ -23,7 +23,7 @@ jobs: fail-fast: false steps: - name: Check out code - uses: actions/checkout@v4 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4 - name: Build nfcore/tools:latest docker image run: docker build --no-cache . -t nfcore/tools:latest diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index 12c8a34000..5351bb985c 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -12,6 +12,7 @@ on: paths-ignore: - "docs/**" - "CHANGELOG.md" + - ".github/**" release: types: [published] workflow_dispatch: @@ -58,7 +59,7 @@ jobs: name: Get test file matrix runs-on: "ubuntu-latest" steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4 name: Check out source-code repository - name: List tests @@ -72,7 +73,8 @@ jobs: name: Run ${{matrix.test}} with Python ${{ needs.setup.outputs.python-version }} on ${{ needs.setup.outputs.runner }} needs: [setup, list_tests] if: ${{ needs.setup.outputs.run-tests }} - runs-on: ${{ github.event.inputs.runners || 'self-hosted' }} + # run on self-hosted runners for test_components.py (because of the gitlab branch), based on the input if it is dispatched manually, on github if it is a rerun or on self-hosted by default + runs-on: ${{ matrix.test == 'test_components.py' && 'self-hosted' || (github.event.inputs.runners || github.run_number > 1 && 'ubuntu-latest' || 'self-hosted') }} strategy: matrix: ${{ fromJson(needs.list_tests.outputs.tests) }} fail-fast: false # run all tests even if one fails @@ -83,11 +85,11 @@ jobs: cd pytest export NXF_WORK=$(pwd) - - uses: actions/checkout@v4 + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4 name: Check out source-code repository - name: Set up Python ${{ needs.setup.outputs.python-version }} - uses: actions/setup-python@v5 + uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5 with: python-version: ${{ needs.setup.outputs.python-version }} cache: "pip" @@ -111,7 +113,7 @@ jobs: run: echo "date=$(date +'%Y-%m')" >> $GITHUB_ENV - name: Install Nextflow - uses: nf-core/setup-nextflow@v1 + uses: nf-core/setup-nextflow@b9f764e8ba5c76b712ace14ecbfcef0e40ae2dd8 # v1 - name: Look if nf-test is already installed and write to env variable id: check-nftest @@ -155,23 +157,24 @@ jobs: fi - name: Upload coverage - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4 with: name: coverage_${{ matrix.test }} path: .coverage coverage: needs: test - runs-on: ${{ github.event.inputs.runners || 'self-hosted' }} + # use the runner given by the input if it is dispatched manually, run on github if it is a rerun or on self-hosted by default + runs-on: ${{ github.event.inputs.runners || github.run_number > 1 && 'ubuntu-latest' || 'self-hosted' }} steps: - name: go to subdirectory run: | mkdir -p pytest cd pytest - - uses: actions/checkout@v4 + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4 - name: Set up Python 3.11 - uses: actions/setup-python@v5 + uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5 env: AGENT_TOOLSDIRECTORY: /opt/actions-runner/_work/tools/tools/ with: @@ -188,14 +191,14 @@ jobs: mv .github/.coveragerc . - name: Download all artifacts - uses: actions/download-artifact@v4 + uses: actions/download-artifact@eaceaf801fd36c7dee90939fad912460b18a1ffe # v4 - name: Run coverage run: | coverage combine --keep coverage*/.coverage* coverage report coverage xml - - uses: codecov/codecov-action@f30e4959ba63075080d4f7f90cacc18d9f3fafd7 # v4 + - uses: codecov/codecov-action@e0b68c6749509c5f83f984dd99a76a1c1a231044 # v4 with: files: coverage.xml env: diff --git a/.github/workflows/rich-codex.yml b/.github/workflows/rich-codex.yml index ecbae8ffec..0e8a1c5e9b 100644 --- a/.github/workflows/rich-codex.yml +++ b/.github/workflows/rich-codex.yml @@ -6,16 +6,16 @@ jobs: runs-on: ubuntu-latest steps: - name: Check out the repo - uses: actions/checkout@v4 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4 - name: Set up Python - uses: actions/setup-python@v5 + uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5 with: python-version: 3.x cache: pip cache-dependency-path: setup.py - name: Install Nextflow - uses: nf-core/setup-nextflow@v1 + uses: nf-core/setup-nextflow@b9f764e8ba5c76b712ace14ecbfcef0e40ae2dd8 # v1 - name: Cache nf-test installation id: cache-software @@ -36,7 +36,7 @@ jobs: run: pip install git+https://github.com/nf-core/tools.git@dev - name: Generate terminal images with rich-codex - uses: ewels/rich-codex@v1 + uses: ewels/rich-codex@8ce988cc253c240a3027ba58e33e47640935dd8b # v1 env: COLUMNS: 100 HIDE_PROGRESS: "true" diff --git a/.github/workflows/sync.yml b/.github/workflows/sync.yml index aeb24839f1..d89e255bfb 100644 --- a/.github/workflows/sync.yml +++ b/.github/workflows/sync.yml @@ -23,7 +23,7 @@ concurrency: jobs: get-pipelines: - runs-on: ${{ github.event.inputs.runners || 'self-hosted' }} + runs-on: "ubuntu-latest" outputs: matrix: ${{ steps.set-matrix.outputs.matrix }} steps: @@ -38,15 +38,16 @@ jobs: sync: needs: get-pipelines - runs-on: ${{ github.event.inputs.runners || 'self-hosted' }} + # use the runner given by the input if it is dispatched manually, run on github if it is a rerun or on self-hosted by default + runs-on: ${{ github.event.inputs.runners || github.run_number > 1 && 'ubuntu-latest' || 'self-hosted' }} strategy: matrix: ${{fromJson(needs.get-pipelines.outputs.matrix)}} fail-fast: false steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4 name: Check out nf-core/tools - - uses: actions/checkout@v4 + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4 name: Check out nf-core/${{ matrix.pipeline }} with: repository: nf-core/${{ matrix.pipeline }} @@ -56,7 +57,7 @@ jobs: fetch-depth: "0" - name: Set up Python 3.11 - uses: actions/setup-python@v5 + uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5 with: python-version: 3.11 @@ -66,7 +67,7 @@ jobs: pip install . - name: Install Nextflow - uses: nf-core/setup-nextflow@v1 + uses: nf-core/setup-nextflow@b9f764e8ba5c76b712ace14ecbfcef0e40ae2dd8 # v1 with: version: "latest-everything" @@ -85,7 +86,7 @@ jobs: - name: Upload sync log file artifact if: ${{ always() }} - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4 with: name: sync_log_${{ matrix.pipeline }} path: sync_log_${{ matrix.pipeline }}.txt diff --git a/.github/workflows/tools-api-docs-dev.yml b/.github/workflows/tools-api-docs-dev.yml deleted file mode 100644 index 91396b2a25..0000000000 --- a/.github/workflows/tools-api-docs-dev.yml +++ /dev/null @@ -1,54 +0,0 @@ -name: nf-core/tools dev API docs -# Run on push and PR to test that docs build -on: - push: - branches: - - dev - paths-ignore: - - "CHANGELOG.md" - pull_request: - paths-ignore: - - "CHANGELOG.md" - release: - types: [published] - -# Cancel if a newer run is started -concurrency: - group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} - cancel-in-progress: true - -jobs: - api-docs: - name: Build & push Sphinx API docs - runs-on: ubuntu-latest - - steps: - - name: Check out source-code repository - uses: actions/checkout@v4 - - - name: Set up Python 3.11 - uses: actions/setup-python@v5 - with: - python-version: 3.11 - - - name: Install python dependencies - run: | - pip install --upgrade pip - pip install -r ./docs/api/requirements.txt - pip install . - - - name: Build HTML docs - run: make --directory ./docs/api html - - - name: Sync dev docs - # Only sync with the website if it was a push from nf-core/tools dev branch - if: github.repository == 'nf-core/tools' && github.event_name == 'push' && github.event.ref == 'refs/heads/dev' - uses: SamKirkland/FTP-Deploy-Action@v4.3.4 - with: - server: ${{ secrets.ftp_server }} - username: ${{ secrets.ftp_username}} - password: ${{ secrets.ftp_password }} - local-dir: "./docs/api/_build/html/" - server-dir: ${{ secrets.ftp_server_old_site_dir }}/dev/ - protocol: ${{ secrets.ftp_protocol }} - port: ${{ secrets.ftp_port }} diff --git a/.github/workflows/tools-api-docs-release.yml b/.github/workflows/tools-api-docs-release.yml deleted file mode 100644 index 2183db3fcf..0000000000 --- a/.github/workflows/tools-api-docs-release.yml +++ /dev/null @@ -1,48 +0,0 @@ -name: nf-core/tools release API docs -on: - release: - types: [published] - -# Cancel if a newer run is started -concurrency: - group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} - cancel-in-progress: true - -jobs: - api-docs: - name: Build & push Sphinx API docs - runs-on: ubuntu-latest - strategy: - matrix: - dir: - - latest - - ${{ github.event.release.tag_name }} - steps: - - name: Check out source-code repository - uses: actions/checkout@v4 - - - name: Set up Python 3.11 - uses: actions/setup-python@v5 - with: - python-version: 3.11 - - - name: Install python dependencies - run: | - pip install --upgrade pip - pip install -r ./docs/api/requirements.txt - pip install . - - - name: Build HTML docs - run: make --directory ./docs/api html - - - name: Sync release docs - if: github.repository == 'nf-core/tools' - uses: SamKirkland/FTP-Deploy-Action@v4.3.4 - with: - server: ${{ secrets.ftp_server }} - username: ${{ secrets.ftp_username}} - password: ${{ secrets.ftp_password }} - local-dir: "./docs/api/_build/html/" - server-dir: ${{ secrets.ftp_server_old_site_dir }}/${{ matrix.dir }}/ - protocol: ${{ secrets.ftp_protocol }} - port: ${{ secrets.ftp_port }} diff --git a/.github/workflows/tools-api-docs.yml b/.github/workflows/tools-api-docs.yml new file mode 100644 index 0000000000..4fd99e4a6a --- /dev/null +++ b/.github/workflows/tools-api-docs.yml @@ -0,0 +1,40 @@ +name: generate nf-core/tools API docs +on: + push: + branches: + - dev + paths: + - nf_core/**/*.py + release: + types: [published] + workflow_dispatch: + inputs: + ref_name: + description: "The branch or tag to build the API docs for" + required: true + default: "dev" + +# Cancel if a newer run is started +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true + +jobs: + api-docs: + name: trigger API docs build on website repo + runs-on: ubuntu-latest + steps: + - name: trigger API docs build + uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7 + with: + github-token: ${{ secrets.nf_core_bot_auth_token }} + script: | + await github.rest.actions.createWorkflowDispatch({ + owner: 'nf-core', + repo: 'website', + workflow_id: 'add-tools-api-docs.yml', + ref: 'main', + inputs: { + "ref_name": "${{ inputs.ref_name || github.ref_name }}" + }, + }) diff --git a/.github/workflows/update_components_template.yml b/.github/workflows/update_components_template.yml new file mode 100644 index 0000000000..f357bed03b --- /dev/null +++ b/.github/workflows/update_components_template.yml @@ -0,0 +1,46 @@ +name: Update Modules Template + +on: + schedule: + - cron: "0 0 * * *" + workflow_dispatch: + +jobs: + update_modules: + runs-on: ubuntu-latest + + steps: + - name: Checkout repository + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4 + + - name: Set up Python + uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5 + with: + python-version: "3.x" + + - name: Install nf-core + run: pip install nf-core + + - name: Update modules + run: nf-core modules update --all + working-directory: nf-core/pipeline-template + + - name: Update subworkflows + run: nf-core subworkflows update --all + working-directory: nf-core/pipeline-template + + # Commit the changes + - name: Commit changes + run: | + git config user.email "core@nf-co.re" + git config user.name "nf-core-bot" + git add . + git status + git commit -m "[automated] Fix code linting" + + # Open a new PR to dev with the changes + - name: Create PR + run: | + git checkout -b update-modules + git push origin update-modules + gh pr create --title "Update modules in template" --body "This PR updates the modules in the pipeline template" --base dev --head update-modules diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 1fdda4978a..03fbb7bedf 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.1.15 + rev: v0.2.2 hooks: - id: ruff # linter args: [--fix, --exit-non-zero-on-fix] # sort imports and fix diff --git a/.prettierignore b/.prettierignore index a55074abfb..b923532bd7 100644 --- a/.prettierignore +++ b/.prettierignore @@ -6,6 +6,7 @@ testing nf_core/module-template/meta.yml nf_core/module-template/tests/tags.yml nf_core/subworkflow-template/tests/tags.yml +nf_core/pipeline-template/nextflow_schema.json # don't run on things handled by ruff *.py *.pyc diff --git a/CHANGELOG.md b/CHANGELOG.md index 70cf2de07a..a9d83051f3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,46 @@ # nf-core/tools: Changelog +## [v2.13 - Tin Puppy](https://github.com/nf-core/tools/releases/tag/2.13) - [2024-02-20] + +### Template + +- Add empty line in README.md to fix badges. ([#2729](https://github.com/nf-core/tools/pull/2729)) +- Replace automatic branch detection in `nf-core download` CI test with hardcoded `dev` and input. ([#2727](https://github.com/nf-core/tools/pull/2727)) +- Add Github Action to automatically cleanup ubuntu-latest runners to fix runner running out of diskspace errors([#2755](https://github.com/nf-core/tools/issues/2755)) +- Fix GitHub Actions CI and Linting badges links ([#2757](https://github.com/nf-core/tools/pull/2757)) +- Add hashtags to release announcement on mastodon ([#2761](https://github.com/nf-core/tools/pull/2761)) +- update fastqc and multiqc in template ([#2776](https://github.com/nf-core/tools/pull/2776)) +- template refactoring: remove the `lib` directory and use nf-core subworkflows ([#2736](https://github.com/nf-core/tools/pull/2736)) +- use nf-validation to create an input channel from a sample sheet ([#2736](https://github.com/nf-core/tools/pull/2736)) + +### Linting + +- Make creat-lint-wf composable ([#2733](https://github.com/nf-core/tools/pull/2733)) +- Add looser comparison when pipeline logos ([#2744](https://github.com/nf-core/tools/pull/2744)) +- Handle multiple aliases in module imports correctly during linting ([#2762](https://github.com/nf-core/tools/pull/2762)) +- Switch to markdown based API and error docs ([#2758](https://github.com/nf-core/tools/pull/2758)) + +### Modules + +- Handle dirty local module repos by force checkout of commits and branches if needed ([#2734](https://github.com/nf-core/tools/pull/2734)) +- Patch: handle file not found when it is an added file to a module ([#2771](https://github.com/nf-core/tools/pull/2771)) +- Handle symlinks when migrating pytest ([#2770](https://github.com/nf-core/tools/pull/2770)) +- Add `--profile` parameter to nf-test command ([#2767](https://github.com/nf-core/tools/pull/2767)) + +### General + +- fix ignoring changes in partially templated files (e.g. `.gitignore`) ([#2722](https://github.com/nf-core/tools/pull/2722)) +- update ruff to 0.2.0 and add it to pre-commit step ([#2725](https://github.com/nf-core/tools/pull/2725)) +- Update codecov/codecov-action digest to e0b68c6 ([#2728](https://github.com/nf-core/tools/pull/2728)) +- Update pre-commit hook astral-sh/ruff-pre-commit to v0.2.1 ([#2730](https://github.com/nf-core/tools/pull/2730)) +- Update python:3.11-slim Docker digest to 2a746e2 ([#2743](https://github.com/nf-core/tools/pull/2743)) +- Update actions/setup-python action to v5 ([#2739](https://github.com/nf-core/tools/pull/2739)) +- Update gitpod/workspace-base Docker digest to 45e7617 ([#2747](https://github.com/nf-core/tools/pull/2747)) +- chore(deps): pin jlumbroso/free-disk-space action to 54081f1 ([#2756](https://github.com/nf-core/tools/pull/2756)) +- chore(deps): update actions/github-script action to v7 ([#2766](https://github.com/nf-core/tools/pull/2766)) +- chore(deps): update pre-commit hook astral-sh/ruff-pre-commit to v0.2.2 ([#2769](https://github.com/nf-core/tools/pull/2769)) +- Update gitpod/workspace-base Docker digest to 728e1fa ([#2780](https://github.com/nf-core/tools/pull/2780)) + ## [v2.12.1 - Aluminium Wolf - Patch](https://github.com/nf-core/tools/releases/tag/2.12.1) - [2024-02-01] ### Linting @@ -20,7 +61,7 @@ ### Template -- Add a Github Action Workflow to the pipeline template that tests a successful download with 'nf-core download' ([#2618](https://github.com/nf-core/tools/pull/2618)) +- Add a Github Action Workflow to the pipeline template that tests a successful download with `nf-core download` ([#2618](https://github.com/nf-core/tools/pull/2618)) - Use `pre-commit` to lint files in GitHub CI ([#2635](https://github.com/nf-core/tools/pull/2635)) - Use pdiff also on gitpod for nf-test ([#2640](https://github.com/nf-core/tools/pull/2640)) - switch to new image syntax in readme ([#2645](https://github.com/nf-core/tools/pull/2645)) diff --git a/Dockerfile b/Dockerfile index 9c9770c25f..62431be140 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.11-slim +FROM python:3.11-slim@sha256:ce81dc539f0aedc9114cae640f8352fad83d37461c24a3615b01f081d0c0583a LABEL authors="phil.ewels@scilifelab.se,erik.danielsson@scilifelab.se" \ description="Docker image containing requirements for the nfcore tools" diff --git a/README.md b/README.md index 0adf04d4c6..cf0b01d210 100644 --- a/README.md +++ b/README.md @@ -1142,6 +1142,7 @@ You can update subworkflows installed from a remote repository in your pipeline working_dir: tmp/nf-core-nextbigthing before_command: > echo "repository_type: pipeline" >> .nf-core.yml +timeout: 30 --> ![`nf-core subworkflows update --all --no-preview`](docs/images/nf-core-subworkflows-update.svg) diff --git a/docs/api/Makefile b/docs/api/Makefile deleted file mode 100644 index ab30a5051e..0000000000 --- a/docs/api/Makefile +++ /dev/null @@ -1,19 +0,0 @@ -# Minimal makefile for Sphinx documentation -# - -# You can set these variables from the command line. -SPHINXOPTS = -SPHINXBUILD = sphinx-build -SOURCEDIR = _src -BUILDDIR = _build - -# Put it first so that "make" without argument is like "make help". -help: - @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) - -.PHONY: help Makefile - -# Catch-all target: route all unknown targets to Sphinx using the new -# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). -%: Makefile - @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/docs/api/_src/api/index.md b/docs/api/_src/api/index.md index 9050280fd3..a1863f7e39 100644 --- a/docs/api/_src/api/index.md +++ b/docs/api/_src/api/index.md @@ -1,9 +1,8 @@ # API Reference ```{toctree} -:caption: 'Tests:' :glob: true -:maxdepth: 2 +:maxdepth: 1 * ``` diff --git a/docs/api/_src/index.md b/docs/api/_src/index.md index cb455986d7..17bf2407f5 100644 --- a/docs/api/_src/index.md +++ b/docs/api/_src/index.md @@ -20,5 +20,3 @@ This documentation is for the `nf-core/tools` package. - [Module code lint tests](module_lint_tests/index.md) (run by `nf-core modules lint`) - [Subworkflow code lint tests](subworkflow_lint_tests/index.md) (run by `nf-core subworkflows lint`) - [nf-core/tools Python package API reference](api/index.md) -- {ref}`genindex` -- {ref}`modindex` diff --git a/docs/api/_src/module_lint_tests/environment_yml.md b/docs/api/_src/module_lint_tests/environment_yml.md new file mode 100644 index 0000000000..e931f9df05 --- /dev/null +++ b/docs/api/_src/module_lint_tests/environment_yml.md @@ -0,0 +1,5 @@ +# environment_yml + +```{eval-rst} +.. automethod:: nf_core.modules.lint.ModuleLint.environment_yml +``` diff --git a/docs/api/_src/module_lint_tests/index.md b/docs/api/_src/module_lint_tests/index.md index f889abf73c..dee84d06d8 100644 --- a/docs/api/_src/module_lint_tests/index.md +++ b/docs/api/_src/module_lint_tests/index.md @@ -1,9 +1,8 @@ # Module lint tests ```{toctree} -:caption: 'Tests:' :glob: true -:maxdepth: 2 +:maxdepth: 1 * ``` diff --git a/docs/api/_src/pipeline_lint_tests/index.md b/docs/api/_src/pipeline_lint_tests/index.md index 0cf9bc1d21..c631610d64 100644 --- a/docs/api/_src/pipeline_lint_tests/index.md +++ b/docs/api/_src/pipeline_lint_tests/index.md @@ -1,9 +1,8 @@ # Pipeline lint tests ```{toctree} -:caption: 'Tests:' :glob: true -:maxdepth: 2 +:maxdepth: 1 * ``` diff --git a/docs/api/_src/subworkflow_lint_tests/index.md b/docs/api/_src/subworkflow_lint_tests/index.md index 6eed715c52..0ecf590c0d 100644 --- a/docs/api/_src/subworkflow_lint_tests/index.md +++ b/docs/api/_src/subworkflow_lint_tests/index.md @@ -1,9 +1,8 @@ # Subworkflow lint tests ```{toctree} -:caption: 'Tests:' :glob: true -:maxdepth: 2 +:maxdepth: 1 * ``` diff --git a/docs/api/generate-api-docs.sh b/docs/api/generate-api-docs.sh new file mode 100644 index 0000000000..6b3c3abfa3 --- /dev/null +++ b/docs/api/generate-api-docs.sh @@ -0,0 +1,94 @@ +#!/bin/bash + +# allow --force option and also a --release option (which takes a release name, or "all") +force=false +releases=() + +while [[ $# -gt 0 ]]; do + case $1 in + -f | --force ) + force=true + ;; + -r | --release ) + shift + releases+=("$1") + ;; + -o | --output ) + shift + output_dir="$1" + ;; + * ) + echo "Invalid argument: $1" + exit 1 + ;; + esac + shift +done + + +# Set the output directory if not set +if [[ -z "$output_dir" ]]; then + output_dir="../src/content/tools/docs" +fi + +# if no release is specified, use all releases +if [[ ${#releases[@]} -eq 0 ]]; then + releases=($(git tag)) + # add 'dev' to the list of releases + releases+=("dev") +fi + +# Loop through each release +for release in "${releases[@]}"; do + # Checkout the release + git checkout "$release" + echo "_________________________" + echo "Generating docs for release: $release" + echo "_________________________" + git checkout docs/api + pip install -r docs/api/requirements.txt --quiet + # add the napoleon extension to the sphinx conf.py + sed -i 's/^extensions = \[/extensions = \[\n "sphinx_markdown_builder",/' docs/api/_src/conf.py + + # run docs/api/make_lint_md.py if it exists + # if [[ -f "docs/api/make_lint_md.py" ]]; then + # python docs/api/make_lint_md.py + # fi + + find nf_core -name "*.py" | while IFS= read -r file; do + # echo "Processing $file" + + # replace ..tip:: with note in the python docstrings due to missing directive in the markdown builder + sed -i 's/^\(\s*\)\.\. tip::/\1\.\. note::/g' "$file" + + done + + # fix syntax in lint/merge_markers.py + sed -i 's/>>>>>>> or <<<<<<``>>>>>>>`` or ``<<<<<<<``/g' nf_core/lint/merge_markers.py + # remove markdown files if --force is set + if [[ "$force" = true ]]; then + echo -e "\n\e[31mRemoving $output_dir/$release because of '--force'\e[0m" + rm -rf "$output_dir/$release" + fi + sphinx-build -b markdown docs/api/_src "$output_dir/$release" + + # undo all changes + git restore . + + git checkout - + # replace :::{seealso} with :::tip in the markdown files + find "$output_dir/$release" -name "*.md" -exec sed -i 's/:::{seealso}/:::tip/g' {} \; + i=1 + sp="/-\|" # spinner + find "$output_dir/$release" -name "*.md" | while IFS= read -r file; do + # echo "Processing $file" + printf "\b${sp:i++%${#sp}:1}" + node docs/api/remark.mjs "$file" + done + # remove empty files + find "$output_dir/$release" -name "*.md" -size 0 -delete + # remove `.doctrees` directory + rm -rf "$output_dir/$release/.doctrees" + # run pre-commit to fix any formatting issues on the generated markdown files + pre-commit run --files "$output_dir/$release" +done diff --git a/docs/api/remark.mjs b/docs/api/remark.mjs new file mode 100644 index 0000000000..9274321de6 --- /dev/null +++ b/docs/api/remark.mjs @@ -0,0 +1,93 @@ +import fs from "fs"; +import { remark } from "remark"; +import { visit } from "unist-util-visit"; + +function remarkDirectives() { + return transformer; + + function transformer(tree) { + visit(tree, "heading", visitor); + visit(tree, "link", visitor); + } + + function visitor(node, index, parent) { + if (node.depth === 4) { + if (["note", "warning"].includes(node.children[0].value?.toLowerCase())) { + const type = node.children[0].value.toLowerCase(); + parent.children.splice(index, 1); + parent.children[index].children[0].value = `:::${type}\n${parent.children[index].children[0].value}`; + // if second to list parent.children[index].children ends with ":", check if the next node is a code block, if so, add the code block as a child to the current node + if (parent.children[index].children.slice(-1)[0]?.value?.trim().endsWith(":")) { + if (parent.children[index + 1].type === "code") { + parent.children[index].children.slice(-1)[0].value += "\n"; + parent.children[index].children.push(parent.children[index + 1]); + parent.children.splice(index + 1, 1); + } + } + parent.children[index].children.push({ type: "text", value: "\n:::" }); + } else if (node.children[0].type === "emphasis") { + node.children[0].children.map((child) => { + if (child.type === "text") { + child.type = "inlineCode"; + child.value = child.value?.trim() + "{:python}"; + } + }); + // convert the rest of the heading to inline code + node.children.slice(1).map((child) => { + if (child.type === "text") { + child.type = "inlineCode"; + child.value = child.value?.trim() + "{:python}"; + } + if (child.type === "link") { + child.children.map((child) => { + if (child.type === "text") { + child.type = "inlineCode"; + child.value = child.value?.trim() + "{:python}"; + } + }); + } + }); + } else if (node.children[0].type !== "inlineCode") { + node.children[0] = { + type: "inlineCode", + value: node.children[0].value?.trim() + "{:python}", + }; + } + } else if (node.depth === 3) { + node.children.map((child) => { + if (child.type === "text") { + child.type = "inlineCode"; + child.value = child.value?.trim() + "{:python}"; + } + if (child.type === "link") { + child.children.map((child) => { + if (child.type === "text") { + child.type = "inlineCode"; + child.value = child.value?.trim() + "{:python}"; + } + }); + } + if (child.type === "emphasis") { + child.children.map((child) => { + if (child.type === "text") { + child.type = "inlineCode"; + child.value = child.value?.trim() + "{:python}"; + } + }); + } + }); + } + if (node.type === "link") { + node.url = node.url.replace(".md", ""); + } + } +} + +let markdown = fs.readFileSync(process.argv[2]); + +remark() + .use(remarkDirectives) + .process(markdown, function (err, file) { + if (err) throw err; + fs.writeFileSync(process.argv[2], String(file)); + }); diff --git a/docs/images/nf-core-bump-version.svg b/docs/images/nf-core-bump-version.svg index af75211d65..838d1cfc7f 100644 --- a/docs/images/nf-core-bump-version.svg +++ b/docs/images/nf-core-bump-version.svg @@ -19,122 +19,122 @@ font-weight: 700; } - .terminal-3212272693-matrix { + .terminal-978478039-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-3212272693-title { + .terminal-978478039-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-3212272693-r1 { fill: #c5c8c6 } -.terminal-3212272693-r2 { fill: #98a84b } -.terminal-3212272693-r3 { fill: #9a9b99 } -.terminal-3212272693-r4 { fill: #608ab1 } -.terminal-3212272693-r5 { fill: #d0b344 } -.terminal-3212272693-r6 { fill: #cc555a } + .terminal-978478039-r1 { fill: #c5c8c6 } +.terminal-978478039-r2 { fill: #98a84b } +.terminal-978478039-r3 { fill: #9a9b99 } +.terminal-978478039-r4 { fill: #608ab1 } +.terminal-978478039-r5 { fill: #d0b344 } +.terminal-978478039-r6 { fill: #cc555a } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -146,41 +146,41 @@ - + - - $ nf-core bump-version 1.1 - - ,--./,-. - ___ __ __ __ ___ /,-._.--~\ - |\ | |__ __ / ` / \ |__) |__ } { - | \| | \__, \__/ | \ |___ \`-._,-`-, - `._,._,' - - nf-core/tools version 2.12.1 - https://nf-co.re - - -INFO Changing version number from '1.0dev' to '1.1' -INFO Updated version in 'nextflow.config' - - version = '1.0dev' - + version = '1.1' - - -INFO Updated version in 'assets/multiqc_config.yml' - - This report has been generated by the <a -href="https://github.com/nf-core/nextbigthing/tree/dev" target="_blank">nf-core/nextbigthing</a> - + This report has been generated by the <a -href="https://github.com/nf-core/nextbigthing/releases/tag/1.1" -target="_blank">nf-core/nextbigthing</a> - - -INFO Updated version in 'assets/multiqc_config.yml' - - <a href="https://nf-co.re/nextbigthing/dev/docs/output" -target="_blank">documentation</a>. - + <a href="https://nf-co.re/nextbigthing/1.1/docs/output" -target="_blank">documentation</a>. - - + + $ nf-core bump-version 1.1 + + ,--./,-. + ___ __ __ __ ___ /,-._.--~\ + |\ | |__ __ / ` / \ |__) |__ } { + | \| | \__, \__/ | \ |___ \`-._,-`-, + `._,._,' + + nf-core/tools version 2.13 - https://nf-co.re + + +INFO Changing version number from '1.0dev' to '1.1' +INFO Updated version in 'nextflow.config' + - version = '1.0dev' + + version = '1.1' + + +INFO Updated version in 'assets/multiqc_config.yml' + - This report has been generated by the <a +href="https://github.com/nf-core/nextbigthing/tree/dev" target="_blank">nf-core/nextbigthing</a> + + This report has been generated by the <a +href="https://github.com/nf-core/nextbigthing/releases/tag/1.1" +target="_blank">nf-core/nextbigthing</a> + + +INFO Updated version in 'assets/multiqc_config.yml' + - <a href="https://nf-co.re/nextbigthing/dev/docs/output" +target="_blank">documentation</a>. + + <a href="https://nf-co.re/nextbigthing/1.1/docs/output" +target="_blank">documentation</a>. + + diff --git a/docs/images/nf-core-create-logo.svg b/docs/images/nf-core-create-logo.svg index 5f8bf56128..ac5d872b94 100644 --- a/docs/images/nf-core-create-logo.svg +++ b/docs/images/nf-core-create-logo.svg @@ -19,62 +19,62 @@ font-weight: 700; } - .terminal-2549025252-matrix { + .terminal-189794694-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-2549025252-title { + .terminal-189794694-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-2549025252-r1 { fill: #c5c8c6 } -.terminal-2549025252-r2 { fill: #98a84b } -.terminal-2549025252-r3 { fill: #9a9b99 } -.terminal-2549025252-r4 { fill: #608ab1 } -.terminal-2549025252-r5 { fill: #d0b344 } -.terminal-2549025252-r6 { fill: #98729f } + .terminal-189794694-r1 { fill: #c5c8c6 } +.terminal-189794694-r2 { fill: #98a84b } +.terminal-189794694-r3 { fill: #9a9b99 } +.terminal-189794694-r4 { fill: #608ab1 } +.terminal-189794694-r5 { fill: #d0b344 } +.terminal-189794694-r6 { fill: #98729f } - + - + - + - + - + - + - + - + - + - + - + - + @@ -86,21 +86,21 @@ - + - - $ nf-core create-logo nextbigthing - - ,--./,-. - ___ __ __ __ ___ /,-._.--~\ - |\ | |__ __ / ` / \ |__) |__ } { - | \| | \__, \__/ | \ |___ \`-._,-`-, - `._,._,' - - nf-core/tools version 2.12.1 - https://nf-co.re - - -INFO Created logo: nf-core-nextbigthing_logo_light.png + + $ nf-core create-logo nextbigthing + + ,--./,-. + ___ __ __ __ ___ /,-._.--~\ + |\ | |__ __ / ` / \ |__) |__ } { + | \| | \__, \__/ | \ |___ \`-._,-`-, + `._,._,' + + nf-core/tools version 2.13 - https://nf-co.re + + +INFO Created logo: nf-core-nextbigthing_logo_light.png diff --git a/docs/images/nf-core-create.svg b/docs/images/nf-core-create.svg index 02d942a7f9..68cae552b3 100644 --- a/docs/images/nf-core-create.svg +++ b/docs/images/nf-core-create.svg @@ -19,104 +19,104 @@ font-weight: 700; } - .terminal-4211528375-matrix { + .terminal-2074661465-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-4211528375-title { + .terminal-2074661465-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-4211528375-r1 { fill: #c5c8c6 } -.terminal-4211528375-r2 { fill: #98a84b } -.terminal-4211528375-r3 { fill: #9a9b99 } -.terminal-4211528375-r4 { fill: #608ab1 } -.terminal-4211528375-r5 { fill: #d0b344 } -.terminal-4211528375-r6 { fill: #98729f } -.terminal-4211528375-r7 { fill: #ff2c7a } -.terminal-4211528375-r8 { fill: #98a84b;font-weight: bold } -.terminal-4211528375-r9 { fill: #1984e9;text-decoration: underline; } + .terminal-2074661465-r1 { fill: #c5c8c6 } +.terminal-2074661465-r2 { fill: #98a84b } +.terminal-2074661465-r3 { fill: #9a9b99 } +.terminal-2074661465-r4 { fill: #608ab1 } +.terminal-2074661465-r5 { fill: #d0b344 } +.terminal-2074661465-r6 { fill: #98729f } +.terminal-2074661465-r7 { fill: #ff2c7a } +.terminal-2074661465-r8 { fill: #98a84b;font-weight: bold } +.terminal-2074661465-r9 { fill: #1984e9;text-decoration: underline; } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -128,34 +128,34 @@ - + - - $ nf-core create -n nextbigthing -d "This pipeline analyses data from the next big omics technique" --a "Big Steve" --plain - - ,--./,-. - ___ __ __ __ ___ /,-._.--~\ - |\ | |__ __ / ` / \ |__) |__ } { - | \| | \__, \__/ | \ |___ \`-._,-`-, - `._,._,' - - nf-core/tools version 2.12.1 - https://nf-co.re - - -INFO Creating new nf-core pipeline: 'nf-core/nextbigthing' -INFO Initialising pipeline git repository -INFO Done. Remember to add a remote and push to GitHub: - cd /home/runner/work/tools/tools/tmp/nf-core-nextbigthing - git remote add origin git@github.com:USERNAME/REPO_NAME.git - git push --all origin -INFO This will also push your newly created dev branch and the TEMPLATE branch for syncing. -INFO !!!!!! IMPORTANT !!!!!! - -If you are interested in adding your pipeline to the nf-core community, -PLEASE COME AND TALK TO US IN THE NF-CORE SLACK BEFORE WRITING ANY CODE! - -Please read: https://nf-co.re/developers/adding_pipelines#join-the-community + + $ nf-core create -n nextbigthing -d "This pipeline analyses data from the next big omics technique" +-a "Big Steve" --plain + + ,--./,-. + ___ __ __ __ ___ /,-._.--~\ + |\ | |__ __ / ` / \ |__) |__ } { + | \| | \__, \__/ | \ |___ \`-._,-`-, + `._,._,' + + nf-core/tools version 2.13 - https://nf-co.re + + +INFO Creating new nf-core pipeline: 'nf-core/nextbigthing' +INFO Initialising pipeline git repository +INFO Done. Remember to add a remote and push to GitHub: + cd /home/runner/work/tools/tools/tmp/nf-core-nextbigthing + git remote add origin git@github.com:USERNAME/REPO_NAME.git + git push --all origin +INFO This will also push your newly created dev branch and the TEMPLATE branch for syncing. +INFO !!!!!! IMPORTANT !!!!!! + +If you are interested in adding your pipeline to the nf-core community, +PLEASE COME AND TALK TO US IN THE NF-CORE SLACK BEFORE WRITING ANY CODE! + +Please read: https://nf-co.re/developers/adding_pipelines#join-the-community diff --git a/docs/images/nf-core-download.svg b/docs/images/nf-core-download.svg index 3d306efb25..e2ed5b22b4 100644 --- a/docs/images/nf-core-download.svg +++ b/docs/images/nf-core-download.svg @@ -19,86 +19,86 @@ font-weight: 700; } - .terminal-2749558223-matrix { + .terminal-2088037745-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-2749558223-title { + .terminal-2088037745-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-2749558223-r1 { fill: #c5c8c6 } -.terminal-2749558223-r2 { fill: #98a84b } -.terminal-2749558223-r3 { fill: #9a9b99 } -.terminal-2749558223-r4 { fill: #608ab1 } -.terminal-2749558223-r5 { fill: #d0b344 } -.terminal-2749558223-r6 { fill: #cc555a } + .terminal-2088037745-r1 { fill: #c5c8c6 } +.terminal-2088037745-r2 { fill: #98a84b } +.terminal-2088037745-r3 { fill: #9a9b99 } +.terminal-2088037745-r4 { fill: #608ab1 } +.terminal-2088037745-r5 { fill: #d0b344 } +.terminal-2088037745-r6 { fill: #cc555a } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -110,29 +110,29 @@ - + - - $ nf-core download rnaseq -r 3.8 --outdir nf-core-rnaseq -x none -s none -d - - ,--./,-. - ___ __ __ __ ___ /,-._.--~\ - |\ | |__ __ / ` / \ |__) |__ } { - | \| | \__, \__/ | \ |___ \`-._,-`-, - `._,._,' - - nf-core/tools version 2.12.1 - https://nf-co.re - - -WARNING Could not find GitHub authentication token. Some API requests may fail. -INFO Saving 'nf-core/rnaseq' - Pipeline revision: '3.8' - Use containers: 'none' - Container library: 'quay.io' - Output directory: 'nf-core-rnaseq' - Include default institutional configuration: 'True' -INFO Downloading centralised configs from GitHub -INFO Downloading workflow files from GitHub + + $ nf-core download rnaseq -r 3.8 --outdir nf-core-rnaseq -x none -s none -d + + ,--./,-. + ___ __ __ __ ___ /,-._.--~\ + |\ | |__ __ / ` / \ |__) |__ } { + | \| | \__, \__/ | \ |___ \`-._,-`-, + `._,._,' + + nf-core/tools version 2.13 - https://nf-co.re + + +WARNING Could not find GitHub authentication token. Some API requests may fail. +INFO Saving 'nf-core/rnaseq' + Pipeline revision: '3.8' + Use containers: 'none' + Container library: 'quay.io' + Output directory: 'nf-core-rnaseq' + Include default institutional configuration: 'True' +INFO Downloading centralised configs from GitHub +INFO Downloading workflow files from GitHub diff --git a/docs/images/nf-core-launch-rnaseq.svg b/docs/images/nf-core-launch-rnaseq.svg index 42ac222ed3..6bbbbaf099 100644 --- a/docs/images/nf-core-launch-rnaseq.svg +++ b/docs/images/nf-core-launch-rnaseq.svg @@ -19,72 +19,72 @@ font-weight: 700; } - .terminal-3783306802-matrix { + .terminal-1939516884-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-3783306802-title { + .terminal-1939516884-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-3783306802-r1 { fill: #c5c8c6 } -.terminal-3783306802-r2 { fill: #98a84b } -.terminal-3783306802-r3 { fill: #9a9b99 } -.terminal-3783306802-r4 { fill: #608ab1 } -.terminal-3783306802-r5 { fill: #d0b344 } -.terminal-3783306802-r6 { fill: #c5c8c6;font-weight: bold } -.terminal-3783306802-r7 { fill: #68a0b3;font-weight: bold } + .terminal-1939516884-r1 { fill: #c5c8c6 } +.terminal-1939516884-r2 { fill: #98a84b } +.terminal-1939516884-r3 { fill: #9a9b99 } +.terminal-1939516884-r4 { fill: #608ab1 } +.terminal-1939516884-r5 { fill: #d0b344 } +.terminal-1939516884-r6 { fill: #c5c8c6;font-weight: bold } +.terminal-1939516884-r7 { fill: #68a0b3;font-weight: bold } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -96,24 +96,24 @@ - + - - $ nf-core launch rnaseq -r 3.8.1 - - ,--./,-. - ___ __ __ __ ___ /,-._.--~\ - |\ | |__ __ / ` / \ |__) |__ } { - | \| | \__, \__/ | \ |___ \`-._,-`-, - `._,._,' - - nf-core/tools version 2.12.1 - https://nf-co.re - - -INFO NOTE: This tool ignores any pipeline parameter defaults overwritten by Nextflow config - files or profiles - -INFO Downloading workflow: nf-core/rnaseq (3.8.1) + + $ nf-core launch rnaseq -r 3.8.1 + + ,--./,-. + ___ __ __ __ ___ /,-._.--~\ + |\ | |__ __ / ` / \ |__) |__ } { + | \| | \__, \__/ | \ |___ \`-._,-`-, + `._,._,' + + nf-core/tools version 2.13 - https://nf-co.re + + +INFO NOTE: This tool ignores any pipeline parameter defaults overwritten by Nextflow config + files or profiles + +INFO Downloading workflow: nf-core/rnaseq (3.8.1) diff --git a/docs/images/nf-core-licences.svg b/docs/images/nf-core-licences.svg index e7971f9883..35962d8e4f 100644 --- a/docs/images/nf-core-licences.svg +++ b/docs/images/nf-core-licences.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + - + - - $ nf-core licences deepvariant - - ,--./,-. - ___ __ __ __ ___ /,-._.--~\ - |\ | |__ __ / ` / \ |__) |__ } { - | \| | \__, \__/ | \ |___ \`-._,-`-, - `._,._,' - - nf-core/tools version 2.12.1 - https://nf-co.re - - -INFO Fetching licence information for 8 tools -INFO Warning: This tool only prints licence information for the software tools packaged using - conda. -INFO The pipeline may use other software and dependencies not described here. -┏━━━━━━━━━━━━━━┳━━━━━━━━━┳━━━━━━━━━┓ -┃Package Name┃Version┃Licence┃ -┡━━━━━━━━━━━━━━╇━━━━━━━━━╇━━━━━━━━━┩ -│ lbzip2 │ 2.5 │ GPL v3 │ -│ deepvariant │ 0.7.0 │ MIT │ -│ htslib │ 1.9 │ MIT │ -│ picard │ 2.18.7 │ MIT │ -│ pip │ 10.0.1 │ MIT │ -│ samtools │ 1.9 │ MIT │ -│ python │ 2.7.15 │ PSF │ -│ bzip2 │ 1.0.6 │ bzip2 │ -└──────────────┴─────────┴─────────┘ + + $ nf-core licences deepvariant + + ,--./,-. + ___ __ __ __ ___ /,-._.--~\ + |\ | |__ __ / ` / \ |__) |__ } { + | \| | \__, \__/ | \ |___ \`-._,-`-, + `._,._,' + + nf-core/tools version 2.13 - https://nf-co.re + + +INFO Fetching licence information for 8 tools diff --git a/docs/images/nf-core-lint.svg b/docs/images/nf-core-lint.svg index 8bab8f6e57..74e41870d3 100644 --- a/docs/images/nf-core-lint.svg +++ b/docs/images/nf-core-lint.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - - - - - - - - - - - - - - - - - - - + - + - - $ nf-core lint - - ,--./,-. - ___ __ __ __ ___ /,-._.--~\ - |\ | |__ __ / ` / \ |__) |__ } { - | \| | \__, \__/ | \ |___ \`-._,-`-, - `._,._,' - - nf-core/tools version 2.12.1 - https://nf-co.re - - - -INFO Testing pipeline: . - - -╭─[?] 1 Pipeline Test Ignored────────────────────────────────────────────────────────────────────╮ -││ -│pipeline_todos: pipeline_todos │ -││ -╰──────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─[!] 1 Pipeline Test Warning────────────────────────────────────────────────────────────────────╮ -││ -│readme: README contains the placeholder zenodo.XXXXXXX. This should be replaced with the zenodo │ -│doi (after the first release). │ -││ -╰──────────────────────────────────────────────────────────────────────────────────────────────────╯ - -╭─[!] 1 Module Test Warning──────────────────────────────────────────────────────────────────────╮ -│ ╷ ╷ │ -│Module name │File path │Test message │ -│╶─────────────────────────────┼────────────────────────┼─────────────────────────────────────────╴│ -│fastqc │modules/nf-core/fastqc│New version available │ -│ ╵ ╵ │ -╰──────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭───────────────────────╮ -│LINT RESULTS SUMMARY │ -├───────────────────────┤ -│[✔] 198 Tests Passed│ -│[?] 1 Test Ignored│ -│[!] 2 Test Warnings│ -│[✗] 0 Tests Failed│ -╰───────────────────────╯ + + $ nf-core lint + + ,--./,-. + ___ __ __ __ ___ /,-._.--~\ + |\ | |__ __ / ` / \ |__) |__ } { + | \| | \__, \__/ | \ |___ \`-._,-`-, + `._,._,' + + nf-core/tools version 2.13 - https://nf-co.re + + + +INFO Testing pipeline: . + + +╭─[?] 1 Pipeline Test Ignored────────────────────────────────────────────────────────────────────╮ +││ +│pipeline_todos: pipeline_todos │ +││ +╰──────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─[!] 1 Pipeline Test Warning────────────────────────────────────────────────────────────────────╮ +││ +│readme: README contains the placeholder zenodo.XXXXXXX. This should be replaced with the zenodo │ +│doi (after the first release). │ +││ +╰──────────────────────────────────────────────────────────────────────────────────────────────────╯ + + +╭──────────────────────╮ +│LINT RESULTS SUMMARY│ +├──────────────────────┤ +│[✔] 188 Tests Passed│ +│[?] 1 Test Ignored│ +│[!] 1 Test Warning│ +│[✗] 0 Tests Failed│ +╰──────────────────────╯ diff --git a/docs/images/nf-core-list-rna.svg b/docs/images/nf-core-list-rna.svg index d94144af44..ff51d523e4 100644 --- a/docs/images/nf-core-list-rna.svg +++ b/docs/images/nf-core-list-rna.svg @@ -19,111 +19,111 @@ font-weight: 700; } - .terminal-3256931459-matrix { + .terminal-822138325-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-3256931459-title { + .terminal-822138325-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-3256931459-r1 { fill: #c5c8c6 } -.terminal-3256931459-r2 { fill: #98a84b } -.terminal-3256931459-r3 { fill: #9a9b99 } -.terminal-3256931459-r4 { fill: #608ab1 } -.terminal-3256931459-r5 { fill: #d0b344 } -.terminal-3256931459-r6 { fill: #c5c8c6;font-weight: bold } -.terminal-3256931459-r7 { fill: #868887 } + .terminal-822138325-r1 { fill: #c5c8c6 } +.terminal-822138325-r2 { fill: #98a84b } +.terminal-822138325-r3 { fill: #9a9b99 } +.terminal-822138325-r4 { fill: #608ab1 } +.terminal-822138325-r5 { fill: #d0b344 } +.terminal-822138325-r6 { fill: #c5c8c6;font-weight: bold } +.terminal-822138325-r7 { fill: #868887 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -135,37 +135,37 @@ - + - - $ nf-core list rna rna-seq - - ,--./,-. - ___ __ __ __ ___ /,-._.--~\ - |\ | |__ __ / ` / \ |__) |__ } { - | \| | \__, \__/ | \ |___ \`-._,-`-, - `._,._,' - - nf-core/tools version 2.12.1 - https://nf-co.re - - -┏━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━┳━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━┳━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━┓ -┃┃┃┃┃┃Have latest ┃ -┃Pipeline Name ┃Stars┃Latest Release┃ Released┃Last Pulled┃release? ┃ -┡━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━╇━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━╇━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━┩ -│ rnafusion │ 125 │ 3.0.1 │ 2 months ago │ - │ - │ -│ smrnaseq │ 63 │ 2.2.4 │ 3 months ago │ - │ - │ -│ circrna │ 33 │ dev │ 2 days ago │ - │ - │ -│ differentialabundan… │ 35 │ 1.4.0 │ 2 months ago │ - │ - │ -│ spatialtranscriptom… │ 36 │ dev │ 6 days ago │ - │ - │ -│ scrnaseq │ 122 │ 2.5.1 │ 1 weeks ago │ - │ - │ -│ rnaseq │ 724 │ 3.14.0 │ 3 weeks ago │ - │ - │ -│ rnasplice │ 20 │ 1.0.2 │ 3 weeks ago │ - │ - │ -│ dualrnaseq │ 15 │ 1.0.0 │ 3 years ago │ - │ - │ -│ marsseq │ 5 │ 1.0.3 │ 7 months ago │ - │ - │ -│ lncpipe │ 27 │ dev │ 1 years ago │ - │ - │ -│ scflow │ 23 │ dev │ 2 years ago │ - │ - │ -└──────────────────────┴───────┴────────────────┴──────────────┴─────────────┴─────────────────────┘ + + $ nf-core list rna rna-seq + + ,--./,-. + ___ __ __ __ ___ /,-._.--~\ + |\ | |__ __ / ` / \ |__) |__ } { + | \| | \__, \__/ | \ |___ \`-._,-`-, + `._,._,' + + nf-core/tools version 2.13 - https://nf-co.re + + +┏━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━┳━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━┳━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━┓ +┃┃┃┃┃┃Have latest ┃ +┃Pipeline Name ┃Stars┃Latest Release┃ Released┃Last Pulled┃release? ┃ +┡━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━╇━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━╇━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━┩ +│ rnasplice │ 22 │ 1.0.2 │ 1 months ago │ - │ - │ +│ circrna │ 34 │ dev │ yesterday │ - │ - │ +│ rnaseq │ 735 │ 3.14.0 │ 1 months ago │ - │ - │ +│ smrnaseq │ 64 │ 2.2.4 │ 4 months ago │ - │ - │ +│ scrnaseq │ 124 │ 2.5.1 │ 4 weeks ago │ - │ - │ +│ differentialabundan… │ 38 │ 1.4.0 │ 3 months ago │ - │ - │ +│ rnafusion │ 126 │ 3.0.1 │ 3 months ago │ - │ - │ +│ spatialtranscriptom… │ 36 │ dev │ 3 weeks ago │ - │ - │ +│ dualrnaseq │ 16 │ 1.0.0 │ 3 years ago │ - │ - │ +│ marsseq │ 5 │ 1.0.3 │ 7 months ago │ - │ - │ +│ lncpipe │ 28 │ dev │ 1 years ago │ - │ - │ +│ scflow │ 24 │ dev │ 2 years ago │ - │ - │ +└──────────────────────┴───────┴────────────────┴──────────────┴─────────────┴─────────────────────┘ diff --git a/docs/images/nf-core-list-stars.svg b/docs/images/nf-core-list-stars.svg index 8f9f0d1072..4b020b0dc7 100644 --- a/docs/images/nf-core-list-stars.svg +++ b/docs/images/nf-core-list-stars.svg @@ -19,88 +19,88 @@ font-weight: 700; } - .terminal-3218995592-matrix { + .terminal-2391931080-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-3218995592-title { + .terminal-2391931080-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-3218995592-r1 { fill: #c5c8c6 } -.terminal-3218995592-r2 { fill: #98a84b } -.terminal-3218995592-r3 { fill: #9a9b99 } -.terminal-3218995592-r4 { fill: #608ab1 } -.terminal-3218995592-r5 { fill: #d0b344 } -.terminal-3218995592-r6 { fill: #c5c8c6;font-weight: bold } -.terminal-3218995592-r7 { fill: #868887 } -.terminal-3218995592-r8 { fill: #868887;font-style: italic; } + .terminal-2391931080-r1 { fill: #c5c8c6 } +.terminal-2391931080-r2 { fill: #98a84b } +.terminal-2391931080-r3 { fill: #9a9b99 } +.terminal-2391931080-r4 { fill: #608ab1 } +.terminal-2391931080-r5 { fill: #d0b344 } +.terminal-2391931080-r6 { fill: #c5c8c6;font-weight: bold } +.terminal-2391931080-r7 { fill: #868887 } +.terminal-2391931080-r8 { fill: #868887;font-style: italic; } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -112,29 +112,29 @@ - + - - $ nf-core list -s stars - - ,--./,-. - ___ __ __ __ ___ /,-._.--~\ - |\ | |__ __ / ` / \ |__) |__ } { - | \| | \__, \__/ | \ |___ \`-._,-`-, - `._,._,' - - nf-core/tools version 2.12.1 - https://nf-co.re - - -┏━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━┳━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━┓ -┃┃┃┃┃┃Have latest ┃ -┃Pipeline Name ┃Stars┃Latest Release┃ Released┃Last Pulled┃release? ┃ -┡━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━╇━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━┩ -│ rnaseq │ 724 │ 3.14.0 │ 3 weeks ago │ - │ - │ -│ sarek │ 305 │ 3.4.0 │ 3 months ago │ - │ - │ -│ mag │ 167 │ 2.5.1 │ 3 months ago │ - │ - │ -│ chipseq │ 159 │ 2.0.0 │ 1 years ago │ - │ - │ -[..truncated..] + + $ nf-core list -s stars + + ,--./,-. + ___ __ __ __ ___ /,-._.--~\ + |\ | |__ __ / ` / \ |__) |__ } { + | \| | \__, \__/ | \ |___ \`-._,-`-, + `._,._,' + + nf-core/tools version 2.13 - https://nf-co.re + + +┏━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━┳━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━┓ +┃┃┃┃┃┃Have latest ┃ +┃Pipeline Name ┃Stars┃Latest Release┃ Released┃Last Pulled┃release? ┃ +┡━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━╇━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━┩ +│ rnaseq │ 735 │ 3.14.0 │ 1 months ago │ - │ - │ +│ sarek │ 312 │ 3.4.0 │ 3 months ago │ - │ - │ +│ mag │ 170 │ 2.5.4 │ 1 week ago │ - │ - │ +│ chipseq │ 160 │ 2.0.0 │ 1 years ago │ - │ - │ +[..truncated..] diff --git a/docs/images/nf-core-list.svg b/docs/images/nf-core-list.svg index bbcd1d7782..aa93f830ee 100644 --- a/docs/images/nf-core-list.svg +++ b/docs/images/nf-core-list.svg @@ -19,91 +19,91 @@ font-weight: 700; } - .terminal-2091548930-matrix { + .terminal-1155039927-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-2091548930-title { + .terminal-1155039927-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-2091548930-r1 { fill: #c5c8c6 } -.terminal-2091548930-r2 { fill: #98a84b } -.terminal-2091548930-r3 { fill: #9a9b99 } -.terminal-2091548930-r4 { fill: #608ab1 } -.terminal-2091548930-r5 { fill: #d0b344 } -.terminal-2091548930-r6 { fill: #c5c8c6;font-weight: bold } -.terminal-2091548930-r7 { fill: #868887 } -.terminal-2091548930-r8 { fill: #868887;font-style: italic; } + .terminal-1155039927-r1 { fill: #c5c8c6 } +.terminal-1155039927-r2 { fill: #98a84b } +.terminal-1155039927-r3 { fill: #9a9b99 } +.terminal-1155039927-r4 { fill: #608ab1 } +.terminal-1155039927-r5 { fill: #d0b344 } +.terminal-1155039927-r6 { fill: #c5c8c6;font-weight: bold } +.terminal-1155039927-r7 { fill: #868887 } +.terminal-1155039927-r8 { fill: #868887;font-style: italic; } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -115,30 +115,30 @@ - + - - $ nf-core list - - ,--./,-. - ___ __ __ __ ___ /,-._.--~\ - |\ | |__ __ / ` / \ |__) |__ } { - | \| | \__, \__/ | \ |___ \`-._,-`-, - `._,._,' - - nf-core/tools version 2.12.1 - https://nf-co.re - - -┏━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━┳━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━┓ -┃┃┃┃┃┃Have latest ┃ -┃Pipeline Name ┃Stars┃Latest Release┃ Released┃Last Pulled┃release? ┃ -┡━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━╇━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━┩ -│ raredisease │ 64 │ 1.1.1 │ 6 months ago │ - │ - │ -│ funcscan │ 49 │ 1.1.4 │ 3 months ago │ - │ - │ -│ circdna │ 21 │ 1.0.4 │ 7 months ago │ - │ - │ -│ mhcquant │ 29 │ 2.5.0 │ 4 months ago │ - │ - │ -│ fetchngs │ 97 │ 1.11.0 │ 4 months ago │ - │ - │ -[..truncated..] + + $ nf-core list + + ,--./,-. + ___ __ __ __ ___ /,-._.--~\ + |\ | |__ __ / ` / \ |__) |__ } { + | \| | \__, \__/ | \ |___ \`-._,-`-, + `._,._,' + + nf-core/tools version 2.13 - https://nf-co.re + + +┏━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━┳━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━┓ +┃┃┃┃┃┃Have latest ┃ +┃Pipeline Name ┃Stars┃Latest Release┃ Released┃Last Pulled┃release? ┃ +┡━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━╇━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━┩ +│ fetchngs │ 100 │ 1.11.0 │ 4 months ago │ - │ - │ +│ phageannotator │ 7 │ dev │ 20 hours ago │ - │ - │ +│ riboseq │ 2 │ dev │ 22 hours ago │ - │ - │ +│ bacass │ 48 │ 2.1.0 │ 4 months ago │ - │ - │ +│ rnasplice │ 22 │ 1.0.2 │ 1 months ago │ - │ - │ +[..truncated..] diff --git a/docs/images/nf-core-modules-bump-version.svg b/docs/images/nf-core-modules-bump-version.svg index 0b8f9dbeae..c886806891 100644 --- a/docs/images/nf-core-modules-bump-version.svg +++ b/docs/images/nf-core-modules-bump-version.svg @@ -19,90 +19,90 @@ font-weight: 700; } - .terminal-1422915651-matrix { + .terminal-692975589-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-1422915651-title { + .terminal-692975589-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-1422915651-r1 { fill: #c5c8c6 } -.terminal-1422915651-r2 { fill: #98a84b } -.terminal-1422915651-r3 { fill: #9a9b99 } -.terminal-1422915651-r4 { fill: #608ab1 } -.terminal-1422915651-r5 { fill: #d0b344 } -.terminal-1422915651-r6 { fill: #98a84b;font-weight: bold } -.terminal-1422915651-r7 { fill: #c5c8c6;font-weight: bold } + .terminal-692975589-r1 { fill: #c5c8c6 } +.terminal-692975589-r2 { fill: #98a84b } +.terminal-692975589-r3 { fill: #9a9b99 } +.terminal-692975589-r4 { fill: #608ab1 } +.terminal-692975589-r5 { fill: #d0b344 } +.terminal-692975589-r6 { fill: #98a84b;font-weight: bold } +.terminal-692975589-r7 { fill: #c5c8c6;font-weight: bold } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -114,30 +114,30 @@ - + - - $ nf-core modules bump-versions fastqc - - ,--./,-. - ___ __ __ __ ___ /,-._.--~\ - |\ | |__ __ / ` / \ |__) |__ } { - | \| | \__, \__/ | \ |___ \`-._,-`-, - `._,._,' - - nf-core/tools version 2.12.1 - https://nf-co.re - - - - -╭──────────────────────────────────────────────────────────────────────────────────────────────────╮ -│[!] 1 Module version up to date.│ -╰──────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭──────────────────────────────────────────┬───────────────────────────────────────────────────────╮ -│Module name │Update Message │ -├──────────────────────────────────────────┼───────────────────────────────────────────────────────┤ -│ fastqc │ Module version up to date: fastqc │ -╰──────────────────────────────────────────┴───────────────────────────────────────────────────────╯ + + $ nf-core modules bump-versions fastqc + + ,--./,-. + ___ __ __ __ ___ /,-._.--~\ + |\ | |__ __ / ` / \ |__) |__ } { + | \| | \__, \__/ | \ |___ \`-._,-`-, + `._,._,' + + nf-core/tools version 2.13 - https://nf-co.re + + + + +╭──────────────────────────────────────────────────────────────────────────────────────────────────╮ +│[!] 1 Module version up to date.│ +╰──────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭──────────────────────────────────────────┬───────────────────────────────────────────────────────╮ +│Module name │Update Message │ +├──────────────────────────────────────────┼───────────────────────────────────────────────────────┤ +│ fastqc │ Module version up to date: fastqc │ +╰──────────────────────────────────────────┴───────────────────────────────────────────────────────╯ diff --git a/docs/images/nf-core-modules-create.svg b/docs/images/nf-core-modules-create.svg index 19855f1cc4..295289aaca 100644 --- a/docs/images/nf-core-modules-create.svg +++ b/docs/images/nf-core-modules-create.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - - - - - - - - - - - - - - - - - - - - - - - - - - - - + - + - - $ nf-core modules create fastqc --author @nf-core-bot --label process_low --meta --force - - ,--./,-. - ___ __ __ __ ___ /,-._.--~\ - |\ | |__ __ / ` / \ |__) |__ } { - | \| | \__, \__/ | \ |___ \`-._,-`-, - `._,._,' - - nf-core/tools version 2.12.1 - https://nf-co.re - - - -INFO Repository type: modules -INFO Press enter to use default values (shown in brackets)or type your own responses. -ctrl+click underlined text to open links. -INFO Using Bioconda package: 'bioconda::fastqc=0.12.1' -INFO Could not find a Docker/Singularity container (Unexpected response code `503` for -https://api.biocontainers.pro/ga4gh/trs/v2/tools/fastqc/versions/fastqc-0.12.1) -INFO Created component template: 'fastqc' -INFO Created following files: - modules/nf-core/fastqc/main.nf - modules/nf-core/fastqc/meta.yml - modules/nf-core/fastqc/environment.yml - modules/nf-core/fastqc/tests/tags.yml - modules/nf-core/fastqc/tests/main.nf.test + + $ nf-core modules create fastqc --author @nf-core-bot --label process_low --meta --force + + ,--./,-. + ___ __ __ __ ___ /,-._.--~\ + |\ | |__ __ / ` / \ |__) |__ } { + | \| | \__, \__/ | \ |___ \`-._,-`-, + `._,._,' + + nf-core/tools version 2.13 - https://nf-co.re + + + +INFO Repository type: modules +INFO Press enter to use default values (shown in brackets)or type your own responses. +ctrl+click underlined text to open links. +INFO Using Bioconda package: 'bioconda::fastqc=0.12.1' diff --git a/docs/images/nf-core-modules-info.svg b/docs/images/nf-core-modules-info.svg index 13794a61df..a35ecdec75 100644 --- a/docs/images/nf-core-modules-info.svg +++ b/docs/images/nf-core-modules-info.svg @@ -19,163 +19,163 @@ font-weight: 700; } - .terminal-729543216-matrix { + .terminal-3092902354-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-729543216-title { + .terminal-3092902354-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-729543216-r1 { fill: #c5c8c6 } -.terminal-729543216-r2 { fill: #98a84b } -.terminal-729543216-r3 { fill: #9a9b99 } -.terminal-729543216-r4 { fill: #608ab1 } -.terminal-729543216-r5 { fill: #d0b344 } -.terminal-729543216-r6 { fill: #c5c8c6;font-weight: bold } -.terminal-729543216-r7 { fill: #98a84b;font-weight: bold } -.terminal-729543216-r8 { fill: #868887 } -.terminal-729543216-r9 { fill: #d08442 } -.terminal-729543216-r10 { fill: #868887;font-style: italic; } -.terminal-729543216-r11 { fill: #98729f } + .terminal-3092902354-r1 { fill: #c5c8c6 } +.terminal-3092902354-r2 { fill: #98a84b } +.terminal-3092902354-r3 { fill: #9a9b99 } +.terminal-3092902354-r4 { fill: #608ab1 } +.terminal-3092902354-r5 { fill: #d0b344 } +.terminal-3092902354-r6 { fill: #c5c8c6;font-weight: bold } +.terminal-3092902354-r7 { fill: #98a84b;font-weight: bold } +.terminal-3092902354-r8 { fill: #868887 } +.terminal-3092902354-r9 { fill: #d08442 } +.terminal-3092902354-r10 { fill: #868887;font-style: italic; } +.terminal-3092902354-r11 { fill: #98729f } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -187,53 +187,53 @@ - + - - $ nf-core modules info abacas - - ,--./,-. - ___ __ __ __ ___ /,-._.--~\ - |\ | |__ __ / ` / \ |__) |__ } { - | \| | \__, \__/ | \ |___ \`-._,-`-, - `._,._,' - - nf-core/tools version 2.12.1 - https://nf-co.re - - - -╭─ Module: abacas ────────────────────────────────────────────────────────────────────────────────╮ -│ 🌐 Repository: https://github.com/nf-core/modules.git │ -│ 🔧 Tools: abacas │ -│ 📖 Description: contiguate draft genome assembly │ -╰──────────────────────────────────────────────────────────────────────────────────────────────────╯ - ╷ ╷ -📥 Inputs │Description │ Pattern -╺━━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━╸ - meta (map) │Groovy Map containing sample information e.g. [ id:'test', │ - │single_end:false ] │ -╶─────────────────┼───────────────────────────────────────────────────────────────────┼────────────╴ - scaffold (file)│Fasta file containing scaffold │*.{fasta,fa} -╶─────────────────┼───────────────────────────────────────────────────────────────────┼────────────╴ - fasta (file) │FASTA reference file │*.{fasta,fa} - ╵ ╵ - ╷ ╷ -📤 Outputs │Description │ Pattern -╺━━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━╸ - meta (map) │Groovy Map containing sample information e.g. [ id:'test', │ - │single_end:false ] │ -╶─────────────────┼───────────────────────────────────────────────────────────────────┼────────────╴ - results (file) │List containing abacas output files [ 'test.abacas.bin', │ *.{abacas}* - │'test.abacas.fasta', 'test.abacas.gaps', 'test.abacas.gaps.tab', │ - │'test.abacas.nucmer.delta', 'test.abacas.nucmer.filtered.delta', │ - │'test.abacas.nucmer.tiling', 'test.abacas.tab', │ - │'test.abacas.unused.contigs.out', 'test.abacas.MULTIFASTA.fa' ] │ -╶─────────────────┼───────────────────────────────────────────────────────────────────┼────────────╴ - versions (file)│File containing software versions │versions.yml - ╵ ╵ - - 💻 Installation command: nf-core modules install abacas - + + $ nf-core modules info abacas + + ,--./,-. + ___ __ __ __ ___ /,-._.--~\ + |\ | |__ __ / ` / \ |__) |__ } { + | \| | \__, \__/ | \ |___ \`-._,-`-, + `._,._,' + + nf-core/tools version 2.13 - https://nf-co.re + + + +╭─ Module: abacas ────────────────────────────────────────────────────────────────────────────────╮ +│ 🌐 Repository: https://github.com/nf-core/modules.git │ +│ 🔧 Tools: abacas │ +│ 📖 Description: contiguate draft genome assembly │ +╰──────────────────────────────────────────────────────────────────────────────────────────────────╯ + ╷ ╷ +📥 Inputs │Description │ Pattern +╺━━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━╸ + meta (map) │Groovy Map containing sample information e.g. [ id:'test', │ + │single_end:false ] │ +╶─────────────────┼───────────────────────────────────────────────────────────────────┼────────────╴ + scaffold (file)│Fasta file containing scaffold │*.{fasta,fa} +╶─────────────────┼───────────────────────────────────────────────────────────────────┼────────────╴ + fasta (file) │FASTA reference file │*.{fasta,fa} + ╵ ╵ + ╷ ╷ +📤 Outputs │Description │ Pattern +╺━━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━╸ + meta (map) │Groovy Map containing sample information e.g. [ id:'test', │ + │single_end:false ] │ +╶─────────────────┼───────────────────────────────────────────────────────────────────┼────────────╴ + results (file) │List containing abacas output files [ 'test.abacas.bin', │ *.{abacas}* + │'test.abacas.fasta', 'test.abacas.gaps', 'test.abacas.gaps.tab', │ + │'test.abacas.nucmer.delta', 'test.abacas.nucmer.filtered.delta', │ + │'test.abacas.nucmer.tiling', 'test.abacas.tab', │ + │'test.abacas.unused.contigs.out', 'test.abacas.MULTIFASTA.fa' ] │ +╶─────────────────┼───────────────────────────────────────────────────────────────────┼────────────╴ + versions (file)│File containing software versions │versions.yml + ╵ ╵ + + 💻 Installation command: nf-core modules install abacas + diff --git a/docs/images/nf-core-modules-install.svg b/docs/images/nf-core-modules-install.svg index 0f7d460ad9..d0ac5b2946 100644 --- a/docs/images/nf-core-modules-install.svg +++ b/docs/images/nf-core-modules-install.svg @@ -19,76 +19,76 @@ font-weight: 700; } - .terminal-235834510-matrix { + .terminal-2787740720-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-235834510-title { + .terminal-2787740720-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-235834510-r1 { fill: #c5c8c6 } -.terminal-235834510-r2 { fill: #98a84b } -.terminal-235834510-r3 { fill: #9a9b99 } -.terminal-235834510-r4 { fill: #608ab1 } -.terminal-235834510-r5 { fill: #d0b344 } + .terminal-2787740720-r1 { fill: #c5c8c6 } +.terminal-2787740720-r2 { fill: #98a84b } +.terminal-2787740720-r3 { fill: #9a9b99 } +.terminal-2787740720-r4 { fill: #608ab1 } +.terminal-2787740720-r5 { fill: #d0b344 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -100,26 +100,26 @@ - + - - $ nf-core modules install abacas - - ,--./,-. - ___ __ __ __ ___ /,-._.--~\ - |\ | |__ __ / ` / \ |__) |__ } { - | \| | \__, \__/ | \ |___ \`-._,-`-, - `._,._,' - - nf-core/tools version 2.12.1 - https://nf-co.re - - - -INFO Installing 'abacas' -INFO Use the following statement to include this module: - - include { ABACAS } from '../modules/nf-core/abacas/main' - + + $ nf-core modules install abacas + + ,--./,-. + ___ __ __ __ ___ /,-._.--~\ + |\ | |__ __ / ` / \ |__) |__ } { + | \| | \__, \__/ | \ |___ \`-._,-`-, + `._,._,' + + nf-core/tools version 2.13 - https://nf-co.re + + + +INFO Installing 'abacas' +INFO Use the following statement to include this module: + + include { ABACAS } from '../modules/nf-core/abacas/main' + diff --git a/docs/images/nf-core-modules-lint.svg b/docs/images/nf-core-modules-lint.svg index e82458136e..c53b1dfdbc 100644 --- a/docs/images/nf-core-modules-lint.svg +++ b/docs/images/nf-core-modules-lint.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + - + - - $ nf-core modules lint multiqc - - ,--./,-. - ___ __ __ __ ___ /,-._.--~\ - |\ | |__ __ / ` / \ |__) |__ } { - | \| | \__, \__/ | \ |___ \`-._,-`-, - `._,._,' - - nf-core/tools version 2.12.1 - https://nf-co.re - - - -INFO Linting modules repo: '.' -INFO Linting module: 'multiqc' -INFO Found 4 inputs in modules/nf-core/multiqc/main.nf -INFO Found 4 outputs in modules/nf-core/multiqc/main.nf - -╭───────────────────────╮ -│LINT RESULTS SUMMARY│ -├───────────────────────┤ -│[✔] 57 Tests Passed │ -│[!] 0 Test Warnings│ -│[✗] 0 Tests Failed │ -╰───────────────────────╯ + + $ nf-core modules lint multiqc + + ,--./,-. + ___ __ __ __ ___ /,-._.--~\ + |\ | |__ __ / ` / \ |__) |__ } { + | \| | \__, \__/ | \ |___ \`-._,-`-, + `._,._,' + + nf-core/tools version 2.13 - https://nf-co.re + + + +INFO Linting modules repo: '.' +INFO Linting module: 'multiqc' diff --git a/docs/images/nf-core-modules-list-local.svg b/docs/images/nf-core-modules-list-local.svg index 2d4759e444..4b06362a82 100644 --- a/docs/images/nf-core-modules-list-local.svg +++ b/docs/images/nf-core-modules-list-local.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - - - - - - - - - - + - + - - $ nf-core modules list local - - ,--./,-. - ___ __ __ __ ___ /,-._.--~\ - |\ | |__ __ / ` / \ |__) |__ } { - | \| | \__, \__/ | \ |___ \`-._,-`-, - `._,._,' - - nf-core/tools version 2.12.1 - https://nf-co.re - - - -INFO Modules installed in '.': - -self.repo_type='pipeline' -┏━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━┓ -┃Module Name ┃Repository ┃Version SHA ┃Message ┃Date ┃ -┡━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━┩ -│ custom/dumpsoftwar… │ https://github.co… │ 8ec825f465b9c17f9d… │ Bump MultiQC │ 2024-01-10 │ -│ │ │ │ version to 1.19 │ │ -│ │ │ │ (#4705) │ │ -│ fastqc │ https://github.co… │ c9488585ce7bd35ccd… │ CHORES: update │ 2024-01-18 │ -│ │ │ │ fasqc tests with │ │ -│ │ │ │ new data │ │ -│ │ │ │ organisation │ │ -│ │ │ │ (#4760) │ │ -[..truncated..] + + $ nf-core modules list local + + ,--./,-. + ___ __ __ __ ___ /,-._.--~\ + |\ | |__ __ / ` / \ |__) |__ } { + | \| | \__, \__/ | \ |___ \`-._,-`-, + `._,._,' + + nf-core/tools version 2.13 - https://nf-co.re + + + +INFO Modules installed in '.': + +self.repo_type='pipeline' +┏━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━┓ +┃Module Name┃Repository ┃Version SHA ┃Message ┃Date ┃ +┡━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━┩ +│ fastqc │ https://github.com/n… │ f4ae1d942bd50c5c0b9b… │ Update FASTQC to use │ 2024-01-31 │ +│ │ │ │ unique names for │ │ +│ │ │ │ snapshots (#4825) │ │ +│ multiqc │ https://github.com/n… │ ccacf6f5de6df3bc6d73… │ Bump Multiqc to 1.20 │ 2024-02-14 │ +│ │ │ │ (#4910) │ │ +└─────────────┴───────────────────────┴───────────────────────┴───────────────────────┴────────────┘ diff --git a/docs/images/nf-core-modules-list-remote.svg b/docs/images/nf-core-modules-list-remote.svg index 6848284ec6..aca2dc7d9b 100644 --- a/docs/images/nf-core-modules-list-remote.svg +++ b/docs/images/nf-core-modules-list-remote.svg @@ -19,109 +19,109 @@ font-weight: 700; } - .terminal-2238208439-matrix { + .terminal-2422626649-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-2238208439-title { + .terminal-2422626649-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-2238208439-r1 { fill: #c5c8c6 } -.terminal-2238208439-r2 { fill: #98a84b } -.terminal-2238208439-r3 { fill: #9a9b99 } -.terminal-2238208439-r4 { fill: #608ab1 } -.terminal-2238208439-r5 { fill: #d0b344 } -.terminal-2238208439-r6 { fill: #1984e9;text-decoration: underline; } -.terminal-2238208439-r7 { fill: #c5c8c6;font-weight: bold } -.terminal-2238208439-r8 { fill: #868887;font-style: italic; } + .terminal-2422626649-r1 { fill: #c5c8c6 } +.terminal-2422626649-r2 { fill: #98a84b } +.terminal-2422626649-r3 { fill: #9a9b99 } +.terminal-2422626649-r4 { fill: #608ab1 } +.terminal-2422626649-r5 { fill: #d0b344 } +.terminal-2422626649-r6 { fill: #1984e9;text-decoration: underline; } +.terminal-2422626649-r7 { fill: #c5c8c6;font-weight: bold } +.terminal-2422626649-r8 { fill: #868887;font-style: italic; } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -133,36 +133,36 @@ - + - - $ nf-core modules list remote - - ,--./,-. - ___ __ __ __ ___ /,-._.--~\ - |\ | |__ __ / ` / \ |__) |__ } { - | \| | \__, \__/ | \ |___ \`-._,-`-, - `._,._,' - - nf-core/tools version 2.12.1 - https://nf-co.re - - - -INFO Modules available from https://github.com/nf-core/modules.git(master): - -┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓ -┃Module Name ┃ -┡━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┩ -│ abacas │ -│ abricate/run │ -│ abricate/summary │ -│ abritamr/run │ -│ adapterremoval │ -│ adapterremovalfixprefix │ -│ admixture │ -│ affy/justrma │ -│ agat/convertspgff2gtf │ -[..truncated..] + + $ nf-core modules list remote + + ,--./,-. + ___ __ __ __ ___ /,-._.--~\ + |\ | |__ __ / ` / \ |__) |__ } { + | \| | \__, \__/ | \ |___ \`-._,-`-, + `._,._,' + + nf-core/tools version 2.13 - https://nf-co.re + + + +INFO Modules available from https://github.com/nf-core/modules.git(master): + +┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓ +┃Module Name ┃ +┡━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┩ +│ abacas │ +│ abricate/run │ +│ abricate/summary │ +│ abritamr/run │ +│ adapterremoval │ +│ adapterremovalfixprefix │ +│ admixture │ +│ affy/justrma │ +│ agat/convertspgff2gtf │ +[..truncated..] diff --git a/docs/images/nf-core-modules-patch.svg b/docs/images/nf-core-modules-patch.svg index ca6445e5f4..84ad24867e 100644 --- a/docs/images/nf-core-modules-patch.svg +++ b/docs/images/nf-core-modules-patch.svg @@ -19,127 +19,127 @@ font-weight: 700; } - .terminal-3644795976-matrix { + .terminal-3587976170-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-3644795976-title { + .terminal-3587976170-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-3644795976-r1 { fill: #c5c8c6 } -.terminal-3644795976-r2 { fill: #98a84b } -.terminal-3644795976-r3 { fill: #9a9b99 } -.terminal-3644795976-r4 { fill: #608ab1 } -.terminal-3644795976-r5 { fill: #d0b344 } -.terminal-3644795976-r6 { fill: #ff2627 } -.terminal-3644795976-r7 { fill: #00823d } -.terminal-3644795976-r8 { fill: #ff2c7a;font-weight: bold } + .terminal-3587976170-r1 { fill: #c5c8c6 } +.terminal-3587976170-r2 { fill: #98a84b } +.terminal-3587976170-r3 { fill: #9a9b99 } +.terminal-3587976170-r4 { fill: #608ab1 } +.terminal-3587976170-r5 { fill: #d0b344 } +.terminal-3587976170-r6 { fill: #ff2627 } +.terminal-3587976170-r7 { fill: #00823d } +.terminal-3587976170-r8 { fill: #ff2c7a;font-weight: bold } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -151,42 +151,42 @@ - + - - $ nf-core modules patch fastqc - - ,--./,-. - ___ __ __ __ ___ /,-._.--~\ - |\ | |__ __ / ` / \ |__) |__ } { - | \| | \__, \__/ | \ |___ \`-._,-`-, - `._,._,' - - nf-core/tools version 2.12.1 - https://nf-co.re - - - -INFO Changes in module 'nf-core/fastqc' -INFO 'modules/nf-core/fastqc/meta.yml' is unchanged -INFO 'modules/nf-core/fastqc/environment.yml' is unchanged -INFO Changes in 'fastqc/main.nf': - ---- modules/nf-core/fastqc/main.nf -+++ modules/nf-core/fastqc/main.nf -@@ -1,6 +1,6 @@ -process FASTQC { - tag "$meta.id" -- label 'process_medium' -+ label 'process_low' - - conda "${moduleDir}/environment.yml" - container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_ - - -INFO 'modules/nf-core/fastqc/tests/main.nf.test' is unchanged -INFO 'modules/nf-core/fastqc/tests/tags.yml' is unchanged -INFO 'modules/nf-core/fastqc/tests/main.nf.test.snap' is unchanged -INFO Patch file of 'modules/nf-core/fastqc' written to 'modules/nf-core/fastqc/fastqc.diff' + + $ nf-core modules patch fastqc + + ,--./,-. + ___ __ __ __ ___ /,-._.--~\ + |\ | |__ __ / ` / \ |__) |__ } { + | \| | \__, \__/ | \ |___ \`-._,-`-, + `._,._,' + + nf-core/tools version 2.13 - https://nf-co.re + + + +INFO Changes in module 'nf-core/fastqc' +INFO 'modules/nf-core/fastqc/environment.yml' is unchanged +INFO 'modules/nf-core/fastqc/meta.yml' is unchanged +INFO Changes in 'fastqc/main.nf': + +--- modules/nf-core/fastqc/main.nf ++++ modules/nf-core/fastqc/main.nf +@@ -1,6 +1,6 @@ +process FASTQC { + tag "$meta.id" +- label 'process_medium' ++ label 'process_low' + + conda "${moduleDir}/environment.yml" + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_ + + +INFO 'modules/nf-core/fastqc/tests/tags.yml' is unchanged +INFO 'modules/nf-core/fastqc/tests/main.nf.test' is unchanged +INFO 'modules/nf-core/fastqc/tests/main.nf.test.snap' is unchanged +INFO Patch file of 'modules/nf-core/fastqc' written to 'modules/nf-core/fastqc/fastqc.diff' diff --git a/docs/images/nf-core-modules-remove.svg b/docs/images/nf-core-modules-remove.svg index c8ac631ad5..c78034e35c 100644 --- a/docs/images/nf-core-modules-remove.svg +++ b/docs/images/nf-core-modules-remove.svg @@ -19,64 +19,64 @@ font-weight: 700; } - .terminal-2072587744-matrix { + .terminal-3332058498-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-2072587744-title { + .terminal-3332058498-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-2072587744-r1 { fill: #c5c8c6 } -.terminal-2072587744-r2 { fill: #98a84b } -.terminal-2072587744-r3 { fill: #9a9b99 } -.terminal-2072587744-r4 { fill: #608ab1 } -.terminal-2072587744-r5 { fill: #d0b344 } + .terminal-3332058498-r1 { fill: #c5c8c6 } +.terminal-3332058498-r2 { fill: #98a84b } +.terminal-3332058498-r3 { fill: #9a9b99 } +.terminal-3332058498-r4 { fill: #608ab1 } +.terminal-3332058498-r5 { fill: #d0b344 } - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -88,22 +88,22 @@ - + - - $ nf-core modules remove abacas - - ,--./,-. - ___ __ __ __ ___ /,-._.--~\ - |\ | |__ __ / ` / \ |__) |__ } { - | \| | \__, \__/ | \ |___ \`-._,-`-, - `._,._,' - - nf-core/tools version 2.12.1 - https://nf-co.re - - - -INFO Removed files for 'abacas' and its dependencies 'abacas'. + + $ nf-core modules remove abacas + + ,--./,-. + ___ __ __ __ ___ /,-._.--~\ + |\ | |__ __ / ` / \ |__) |__ } { + | \| | \__, \__/ | \ |___ \`-._,-`-, + `._,._,' + + nf-core/tools version 2.13 - https://nf-co.re + + + +INFO Removed files for 'abacas' and its dependencies 'abacas'. diff --git a/docs/images/nf-core-modules-test.svg b/docs/images/nf-core-modules-test.svg index 9520b174af..daff523354 100644 --- a/docs/images/nf-core-modules-test.svg +++ b/docs/images/nf-core-modules-test.svg @@ -19,64 +19,64 @@ font-weight: 700; } - .terminal-4272025854-matrix { + .terminal-2250961056-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-4272025854-title { + .terminal-2250961056-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-4272025854-r1 { fill: #c5c8c6 } -.terminal-4272025854-r2 { fill: #98a84b } -.terminal-4272025854-r3 { fill: #9a9b99 } -.terminal-4272025854-r4 { fill: #608ab1 } -.terminal-4272025854-r5 { fill: #d0b344 } + .terminal-2250961056-r1 { fill: #c5c8c6 } +.terminal-2250961056-r2 { fill: #98a84b } +.terminal-2250961056-r3 { fill: #9a9b99 } +.terminal-2250961056-r4 { fill: #608ab1 } +.terminal-2250961056-r5 { fill: #d0b344 } - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -88,22 +88,22 @@ - + - - $ nf-core modules test fastqc --no-prompts - - ,--./,-. - ___ __ __ __ ___ /,-._.--~\ - |\ | |__ __ / ` / \ |__) |__ } { - | \| | \__, \__/ | \ |___ \`-._,-`-, - `._,._,' - - nf-core/tools version 2.12.1 - https://nf-co.re - - - -INFO Generating nf-test snapshot + + $ nf-core modules test fastqc --no-prompts + + ,--./,-. + ___ __ __ __ ___ /,-._.--~\ + |\ | |__ __ / ` / \ |__) |__ } { + | \| | \__, \__/ | \ |___ \`-._,-`-, + `._,._,' + + nf-core/tools version 2.13 - https://nf-co.re + + + +INFO Generating nf-test snapshot diff --git a/docs/images/nf-core-modules-update.svg b/docs/images/nf-core-modules-update.svg index fd13131174..ddc5facf65 100644 --- a/docs/images/nf-core-modules-update.svg +++ b/docs/images/nf-core-modules-update.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - - - - + - + - - $ nf-core modules update --all --no-preview - - ,--./,-. - ___ __ __ __ ___ /,-._.--~\ - |\ | |__ __ / ` / \ |__) |__ } { - | \| | \__, \__/ | \ |___ \`-._,-`-, - `._,._,' - - nf-core/tools version 2.12.1 - https://nf-co.re - - - -INFO 'modules/nf-core/abacas' is already up to date -INFO 'modules/nf-core/custom/dumpsoftwareversions' is already up to date -INFO Updating 'nf-core/fastqc' -INFO 'modules/nf-core/multiqc' is already up to date -INFO Updates complete ✨ + + $ nf-core modules update --all --no-preview + + ,--./,-. + ___ __ __ __ ___ /,-._.--~\ + |\ | |__ __ / ` / \ |__) |__ } { + | \| | \__, \__/ | \ |___ \`-._,-`-, + `._,._,' + + nf-core/tools version 2.13 - https://nf-co.re + + + +INFO 'modules/nf-core/abacas' is already up to date +INFO 'modules/nf-core/fastqc' is already up to date +INFO 'modules/nf-core/multiqc' is already up to date +INFO Updates complete ✨ diff --git a/docs/images/nf-core-schema-build.svg b/docs/images/nf-core-schema-build.svg index 6875c1282b..cf0ab90005 100644 --- a/docs/images/nf-core-schema-build.svg +++ b/docs/images/nf-core-schema-build.svg @@ -19,72 +19,72 @@ font-weight: 700; } - .terminal-848808703-matrix { + .terminal-2534001313-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-848808703-title { + .terminal-2534001313-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-848808703-r1 { fill: #c5c8c6 } -.terminal-848808703-r2 { fill: #98a84b } -.terminal-848808703-r3 { fill: #9a9b99 } -.terminal-848808703-r4 { fill: #608ab1 } -.terminal-848808703-r5 { fill: #d0b344 } -.terminal-848808703-r6 { fill: #98a84b;font-weight: bold } -.terminal-848808703-r7 { fill: #868887;font-weight: bold } -.terminal-848808703-r8 { fill: #868887 } -.terminal-848808703-r9 { fill: #4e707b;font-weight: bold } -.terminal-848808703-r10 { fill: #68a0b3;font-weight: bold } + .terminal-2534001313-r1 { fill: #c5c8c6 } +.terminal-2534001313-r2 { fill: #98a84b } +.terminal-2534001313-r3 { fill: #9a9b99 } +.terminal-2534001313-r4 { fill: #608ab1 } +.terminal-2534001313-r5 { fill: #d0b344 } +.terminal-2534001313-r6 { fill: #98a84b;font-weight: bold } +.terminal-2534001313-r7 { fill: #868887;font-weight: bold } +.terminal-2534001313-r8 { fill: #868887 } +.terminal-2534001313-r9 { fill: #4e707b;font-weight: bold } +.terminal-2534001313-r10 { fill: #68a0b3;font-weight: bold } - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -96,23 +96,23 @@ - + - - $ nf-core schema build --no-prompts - - ,--./,-. - ___ __ __ __ ___ /,-._.--~\ - |\ | |__ __ / ` / \ |__) |__ } { - | \| | \__, \__/ | \ |___ \`-._,-`-, - `._,._,' - - nf-core/tools version 2.12.1 - https://nf-co.re - - -INFO [✓] Default parameters match schema validation -INFO [✓] Pipeline schema looks valid(found 30 params) -INFO Writing schema with 31 params: 'nextflow_schema.json' + + $ nf-core schema build --no-prompts + + ,--./,-. + ___ __ __ __ ___ /,-._.--~\ + |\ | |__ __ / ` / \ |__) |__ } { + | \| | \__, \__/ | \ |___ \`-._,-`-, + `._,._,' + + nf-core/tools version 2.13 - https://nf-co.re + + +INFO [✓] Default parameters match schema validation +INFO [✓] Pipeline schema looks valid(found 30 params) +INFO Writing schema with 31 params: 'nextflow_schema.json' diff --git a/docs/images/nf-core-schema-lint.svg b/docs/images/nf-core-schema-lint.svg index 751f284e59..fc3b8f5f31 100644 --- a/docs/images/nf-core-schema-lint.svg +++ b/docs/images/nf-core-schema-lint.svg @@ -19,68 +19,68 @@ font-weight: 700; } - .terminal-3463596934-matrix { + .terminal-3503377192-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-3463596934-title { + .terminal-3503377192-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-3463596934-r1 { fill: #c5c8c6 } -.terminal-3463596934-r2 { fill: #98a84b } -.terminal-3463596934-r3 { fill: #9a9b99 } -.terminal-3463596934-r4 { fill: #608ab1 } -.terminal-3463596934-r5 { fill: #d0b344 } -.terminal-3463596934-r6 { fill: #98a84b;font-weight: bold } -.terminal-3463596934-r7 { fill: #868887;font-weight: bold } -.terminal-3463596934-r8 { fill: #868887 } -.terminal-3463596934-r9 { fill: #4e707b;font-weight: bold } + .terminal-3503377192-r1 { fill: #c5c8c6 } +.terminal-3503377192-r2 { fill: #98a84b } +.terminal-3503377192-r3 { fill: #9a9b99 } +.terminal-3503377192-r4 { fill: #608ab1 } +.terminal-3503377192-r5 { fill: #d0b344 } +.terminal-3503377192-r6 { fill: #98a84b;font-weight: bold } +.terminal-3503377192-r7 { fill: #868887;font-weight: bold } +.terminal-3503377192-r8 { fill: #868887 } +.terminal-3503377192-r9 { fill: #4e707b;font-weight: bold } - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -92,22 +92,22 @@ - + - - $ nf-core schema lint - - ,--./,-. - ___ __ __ __ ___ /,-._.--~\ - |\ | |__ __ / ` / \ |__) |__ } { - | \| | \__, \__/ | \ |___ \`-._,-`-, - `._,._,' - - nf-core/tools version 2.12.1 - https://nf-co.re - - -INFO [✓] Default parameters match schema validation -INFO [✓] Pipeline schema looks valid(found 31 params) + + $ nf-core schema lint + + ,--./,-. + ___ __ __ __ ___ /,-._.--~\ + |\ | |__ __ / ` / \ |__) |__ } { + | \| | \__, \__/ | \ |___ \`-._,-`-, + `._,._,' + + nf-core/tools version 2.13 - https://nf-co.re + + +INFO [✓] Default parameters match schema validation +INFO [✓] Pipeline schema looks valid(found 31 params) diff --git a/docs/images/nf-core-schema-validate.svg b/docs/images/nf-core-schema-validate.svg index 199c1cb0ea..c5bbd59a37 100644 --- a/docs/images/nf-core-schema-validate.svg +++ b/docs/images/nf-core-schema-validate.svg @@ -19,71 +19,71 @@ font-weight: 700; } - .terminal-2317352182-matrix { + .terminal-2855795864-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-2317352182-title { + .terminal-2855795864-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-2317352182-r1 { fill: #c5c8c6 } -.terminal-2317352182-r2 { fill: #98a84b } -.terminal-2317352182-r3 { fill: #9a9b99 } -.terminal-2317352182-r4 { fill: #608ab1 } -.terminal-2317352182-r5 { fill: #d0b344 } -.terminal-2317352182-r6 { fill: #98a84b;font-weight: bold } -.terminal-2317352182-r7 { fill: #868887;font-weight: bold } -.terminal-2317352182-r8 { fill: #868887 } -.terminal-2317352182-r9 { fill: #4e707b;font-weight: bold } + .terminal-2855795864-r1 { fill: #c5c8c6 } +.terminal-2855795864-r2 { fill: #98a84b } +.terminal-2855795864-r3 { fill: #9a9b99 } +.terminal-2855795864-r4 { fill: #608ab1 } +.terminal-2855795864-r5 { fill: #d0b344 } +.terminal-2855795864-r6 { fill: #98a84b;font-weight: bold } +.terminal-2855795864-r7 { fill: #868887;font-weight: bold } +.terminal-2855795864-r8 { fill: #868887 } +.terminal-2855795864-r9 { fill: #4e707b;font-weight: bold } - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -95,23 +95,23 @@ - + - - $ nf-core schema validate nf-core-rnaseq/3_8 nf-params.json - - ,--./,-. - ___ __ __ __ ___ /,-._.--~\ - |\ | |__ __ / ` / \ |__) |__ } { - | \| | \__, \__/ | \ |___ \`-._,-`-, - `._,._,' - - nf-core/tools version 2.12.1 - https://nf-co.re - - -INFO [✓] Default parameters match schema validation -INFO [✓] Pipeline schema looks valid(found 93 params) -INFO [✓] Input parameters look valid + + $ nf-core schema validate nf-core-rnaseq/3_8 nf-params.json + + ,--./,-. + ___ __ __ __ ___ /,-._.--~\ + |\ | |__ __ / ` / \ |__) |__ } { + | \| | \__, \__/ | \ |___ \`-._,-`-, + `._,._,' + + nf-core/tools version 2.13 - https://nf-co.re + + +INFO [✓] Default parameters match schema validation +INFO [✓] Pipeline schema looks valid(found 93 params) +INFO [✓] Input parameters look valid diff --git a/docs/images/nf-core-subworkflows-create.svg b/docs/images/nf-core-subworkflows-create.svg index 8ce6f0cc8d..e39d489764 100644 --- a/docs/images/nf-core-subworkflows-create.svg +++ b/docs/images/nf-core-subworkflows-create.svg @@ -19,89 +19,89 @@ font-weight: 700; } - .terminal-519671195-matrix { + .terminal-1669631293-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-519671195-title { + .terminal-1669631293-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-519671195-r1 { fill: #c5c8c6 } -.terminal-519671195-r2 { fill: #98a84b } -.terminal-519671195-r3 { fill: #9a9b99 } -.terminal-519671195-r4 { fill: #608ab1 } -.terminal-519671195-r5 { fill: #d0b344 } -.terminal-519671195-r6 { fill: #68a0b3;font-weight: bold } + .terminal-1669631293-r1 { fill: #c5c8c6 } +.terminal-1669631293-r2 { fill: #98a84b } +.terminal-1669631293-r3 { fill: #9a9b99 } +.terminal-1669631293-r4 { fill: #608ab1 } +.terminal-1669631293-r5 { fill: #d0b344 } +.terminal-1669631293-r6 { fill: #68a0b3;font-weight: bold } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -113,30 +113,30 @@ - + - - $ nf-core subworkflows create bam_stats_samtools --author @nf-core-bot --force - - ,--./,-. - ___ __ __ __ ___ /,-._.--~\ - |\ | |__ __ / ` / \ |__) |__ } { - | \| | \__, \__/ | \ |___ \`-._,-`-, - `._,._,' - - nf-core/tools version 2.12.1 - https://nf-co.re - - - -INFO Repository type: modules -INFO Press enter to use default values (shown in brackets)or type your own responses. -ctrl+click underlined text to open links. -INFO Created component template: 'bam_stats_samtools' -INFO Created following files: - subworkflows/nf-core/bam_stats_samtools/main.nf - subworkflows/nf-core/bam_stats_samtools/meta.yml - subworkflows/nf-core/bam_stats_samtools/tests/tags.yml - subworkflows/nf-core/bam_stats_samtools/tests/main.nf.test + + $ nf-core subworkflows create bam_stats_samtools --author @nf-core-bot --force + + ,--./,-. + ___ __ __ __ ___ /,-._.--~\ + |\ | |__ __ / ` / \ |__) |__ } { + | \| | \__, \__/ | \ |___ \`-._,-`-, + `._,._,' + + nf-core/tools version 2.13 - https://nf-co.re + + + +INFO Repository type: modules +INFO Press enter to use default values (shown in brackets)or type your own responses. +ctrl+click underlined text to open links. +INFO Created component template: 'bam_stats_samtools' +INFO Created following files: + subworkflows/nf-core/bam_stats_samtools/main.nf + subworkflows/nf-core/bam_stats_samtools/meta.yml + subworkflows/nf-core/bam_stats_samtools/tests/tags.yml + subworkflows/nf-core/bam_stats_samtools/tests/main.nf.test diff --git a/docs/images/nf-core-subworkflows-info.svg b/docs/images/nf-core-subworkflows-info.svg index 85cfb8a2f1..5e792aaf27 100644 --- a/docs/images/nf-core-subworkflows-info.svg +++ b/docs/images/nf-core-subworkflows-info.svg @@ -19,112 +19,112 @@ font-weight: 700; } - .terminal-2433004757-matrix { + .terminal-3309876258-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-2433004757-title { + .terminal-3309876258-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-2433004757-r1 { fill: #c5c8c6 } -.terminal-2433004757-r2 { fill: #98a84b } -.terminal-2433004757-r3 { fill: #9a9b99 } -.terminal-2433004757-r4 { fill: #608ab1 } -.terminal-2433004757-r5 { fill: #d0b344 } -.terminal-2433004757-r6 { fill: #c5c8c6;font-weight: bold } -.terminal-2433004757-r7 { fill: #98a84b;font-weight: bold } -.terminal-2433004757-r8 { fill: #868887 } -.terminal-2433004757-r9 { fill: #868887;font-style: italic; } -.terminal-2433004757-r10 { fill: #d08442 } -.terminal-2433004757-r11 { fill: #98729f } + .terminal-3309876258-r1 { fill: #c5c8c6 } +.terminal-3309876258-r2 { fill: #98a84b } +.terminal-3309876258-r3 { fill: #9a9b99 } +.terminal-3309876258-r4 { fill: #608ab1 } +.terminal-3309876258-r5 { fill: #d0b344 } +.terminal-3309876258-r6 { fill: #c5c8c6;font-weight: bold } +.terminal-3309876258-r7 { fill: #98a84b;font-weight: bold } +.terminal-3309876258-r8 { fill: #868887 } +.terminal-3309876258-r9 { fill: #868887;font-style: italic; } +.terminal-3309876258-r10 { fill: #d08442 } +.terminal-3309876258-r11 { fill: #98729f } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -136,36 +136,36 @@ - - - - $ nf-core subworkflows info bam_rseqc - - ,--./,-. - ___ __ __ __ ___ /,-._.--~\ - |\ | |__ __ / ` / \ |__) |__ } { - | \| | \__, \__/ | \ |___ \`-._,-`-, - `._,._,' - - nf-core/tools version 2.12.1 - https://nf-co.re - - - -╭─ Subworkflow: bam_rseqc ────────────────────────────────────────────────────────────────────────╮ -│ 🌐 Repository: https://github.com/nf-core/modules.git │ -│ 📖 Description: Subworkflow to run multiple commands in the RSeqC package │ -╰──────────────────────────────────────────────────────────────────────────────────────────────────╯ -[..truncated..] - ch_readduplication_rscript (file)│script to reproduce the plot │ *.R -╶───────────────────────────────────┼────────────────────────────────────┼─────────────────────────╴ - ch_tin (file) │TXT file containing tin.py results │ *.txt - │summary │ -╶───────────────────────────────────┼────────────────────────────────────┼─────────────────────────╴ - versions (file) │File containing software versions │ versions.yml - ╵ ╵ - - 💻 Installation command: nf-core subworkflows install bam_rseqc - + + + + $ nf-core subworkflows info bam_rseqc + + ,--./,-. + ___ __ __ __ ___ /,-._.--~\ + |\ | |__ __ / ` / \ |__) |__ } { + | \| | \__, \__/ | \ |___ \`-._,-`-, + `._,._,' + + nf-core/tools version 2.13 - https://nf-co.re + + + +╭─ Subworkflow: bam_rseqc ────────────────────────────────────────────────────────────────────────╮ +│ 🌐 Repository: https://github.com/nf-core/modules.git │ +│ 📖 Description: Subworkflow to run multiple commands in the RSeqC package │ +╰──────────────────────────────────────────────────────────────────────────────────────────────────╯ +[..truncated..] + readduplication_rscript (file) │script to reproduce the plot │ *.R +╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ + tin_txt (file) │TXT file containing tin.py results │ *.txt + │summary │ +╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ + versions (file) │File containing software versions │ versions.yml + ╵ ╵ + + 💻 Installation command: nf-core subworkflows install bam_rseqc + diff --git a/docs/images/nf-core-subworkflows-install.svg b/docs/images/nf-core-subworkflows-install.svg index d282df2f7f..84c9fe6ac9 100644 --- a/docs/images/nf-core-subworkflows-install.svg +++ b/docs/images/nf-core-subworkflows-install.svg @@ -19,64 +19,64 @@ font-weight: 700; } - .terminal-3559394546-matrix { + .terminal-964365460-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-3559394546-title { + .terminal-964365460-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-3559394546-r1 { fill: #c5c8c6 } -.terminal-3559394546-r2 { fill: #98a84b } -.terminal-3559394546-r3 { fill: #9a9b99 } -.terminal-3559394546-r4 { fill: #608ab1 } -.terminal-3559394546-r5 { fill: #d0b344 } + .terminal-964365460-r1 { fill: #c5c8c6 } +.terminal-964365460-r2 { fill: #98a84b } +.terminal-964365460-r3 { fill: #9a9b99 } +.terminal-964365460-r4 { fill: #608ab1 } +.terminal-964365460-r5 { fill: #d0b344 } - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -88,22 +88,22 @@ - + - - $ nf-core subworkflows install bam_rseqc - - ,--./,-. - ___ __ __ __ ___ /,-._.--~\ - |\ | |__ __ / ` / \ |__) |__ } { - | \| | \__, \__/ | \ |___ \`-._,-`-, - `._,._,' - - nf-core/tools version 2.12.1 - https://nf-co.re - - - -INFO Installing 'bam_rseqc' + + $ nf-core subworkflows install bam_rseqc + + ,--./,-. + ___ __ __ __ ___ /,-._.--~\ + |\ | |__ __ / ` / \ |__) |__ } { + | \| | \__, \__/ | \ |___ \`-._,-`-, + `._,._,' + + nf-core/tools version 2.13 - https://nf-co.re + + + +INFO Installing 'bam_rseqc' diff --git a/docs/images/nf-core-subworkflows-lint.svg b/docs/images/nf-core-subworkflows-lint.svg index aa67f0ca26..3199189c16 100644 --- a/docs/images/nf-core-subworkflows-lint.svg +++ b/docs/images/nf-core-subworkflows-lint.svg @@ -19,239 +19,239 @@ font-weight: 700; } - .terminal-84539906-matrix { + .terminal-785643940-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-84539906-title { + .terminal-785643940-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-84539906-r1 { fill: #c5c8c6 } -.terminal-84539906-r2 { fill: #98a84b } -.terminal-84539906-r3 { fill: #9a9b99 } -.terminal-84539906-r4 { fill: #608ab1 } -.terminal-84539906-r5 { fill: #d0b344 } -.terminal-84539906-r6 { fill: #d0b344;font-weight: bold } -.terminal-84539906-r7 { fill: #8d7b39 } -.terminal-84539906-r8 { fill: #68a0b3;font-weight: bold } -.terminal-84539906-r9 { fill: #d0b344;font-style: italic; } -.terminal-84539906-r10 { fill: #c5c8c6;font-weight: bold } -.terminal-84539906-r11 { fill: #98a84b;font-weight: bold } -.terminal-84539906-r12 { fill: #cc555a } + .terminal-785643940-r1 { fill: #c5c8c6 } +.terminal-785643940-r2 { fill: #98a84b } +.terminal-785643940-r3 { fill: #9a9b99 } +.terminal-785643940-r4 { fill: #608ab1 } +.terminal-785643940-r5 { fill: #d0b344 } +.terminal-785643940-r6 { fill: #d0b344;font-weight: bold } +.terminal-785643940-r7 { fill: #8d7b39 } +.terminal-785643940-r8 { fill: #68a0b3;font-weight: bold } +.terminal-785643940-r9 { fill: #d0b344;font-style: italic; } +.terminal-785643940-r10 { fill: #c5c8c6;font-weight: bold } +.terminal-785643940-r11 { fill: #98a84b;font-weight: bold } +.terminal-785643940-r12 { fill: #cc555a } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -263,78 +263,78 @@ - + - - $ nf-core subworkflows lint bam_stats_samtools - - ,--./,-. - ___ __ __ __ ___ /,-._.--~\ - |\ | |__ __ / ` / \ |__) |__ } { - | \| | \__, \__/ | \ |___ \`-._,-`-, - `._,._,' - - nf-core/tools version 2.12.1 - https://nf-co.re - - - -INFO Linting modules repo: '.' -INFO Linting subworkflow: 'bam_stats_samtools' - -╭─[!] 14 Subworkflow Test Warnings───────────────────────────────────────────────────────────────╮ -│ ╷ ╷ │ -│Subworkflow name │File path │Test message │ -│╶────────────────────┼─────────────────────────────────────┼─────────────────────────────────────╴│ -│bam_stats_samtools │subworkflows/nf-core/bam_stats_sam…│TODO string in main.nf.test: //Add │ -│││all required assertions to verify │ -│││the test output.│ -│bam_stats_samtools │subworkflows/nf-core/bam_stats_sam…│TODO string in main.nf.test: Add │ -│││tags for all modules used within │ -│││this subworkflow. Example:│ -│bam_stats_samtools │subworkflows/nf-core/bam_stats_sam…│TODO string in main.nf.test: Change │ -│││the test name preferably indicating │ -│││the test-data and file-format used│ -│bam_stats_samtools │subworkflows/nf-core/bam_stats_sam…│TODO string in main.nf.test: Once │ -│││you have added the required tests, │ -│││please run the following command to │ -│││build this file:│ -│bam_stats_samtools │subworkflows/nf-core/bam_stats_sam…│TODO string in main.nf.test: define │ -│││inputs of the workflow here. │ -│││Example:│ -│bam_stats_samtools │subworkflows/nf-core/bam_stats_sam…│TODO string in main.nf: A │ -│││subworkflow SHOULD import at least │ -│││two modules│ -│bam_stats_samtools │subworkflows/nf-core/bam_stats_sam…│TODO string in main.nf: If in doubt │ -│││look at other nf-core/subworkflows │ -│││to see how we are doing things! :)│ -│bam_stats_samtools │subworkflows/nf-core/bam_stats_sam…│TODO string in main.nf: edit emitted│ -│││channels│ -│bam_stats_samtools │subworkflows/nf-core/bam_stats_sam…│TODO string in main.nf: edit input │ -│││(take) channels│ -│bam_stats_samtools │subworkflows/nf-core/bam_stats_sam…│TODO string in main.nf: substitute │ -│││modules here for the modules of your│ -│││subworkflow│ -│bam_stats_samtools │subworkflows/nf-core/bam_stats_sam…│TODO string in meta.yml: #Add a │ -│││description of the subworkflow and │ -│││list keywords│ -│bam_stats_samtools │subworkflows/nf-core/bam_stats_sam…│TODO string in meta.yml: #Add a list│ -│││of the modules and/or subworkflows │ -│││used in the subworkflow│ -│bam_stats_samtools │subworkflows/nf-core/bam_stats_sam…│TODO string in meta.yml: #List all │ -│││of the channels used as input with a│ -│││description and their structure│ -│bam_stats_samtools │subworkflows/nf-core/bam_stats_sam…│TODO string in meta.yml: #List all │ -│││of the channels used as output with │ -│││a descriptions and their structure│ -│ ╵ ╵ │ -╰──────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭───────────────────────╮ -│LINT RESULTS SUMMARY│ -├───────────────────────┤ -│[✔] 42 Tests Passed │ -│[!] 14 Test Warnings│ -│[✗] 0 Tests Failed │ -╰───────────────────────╯ + + $ nf-core subworkflows lint bam_stats_samtools + + ,--./,-. + ___ __ __ __ ___ /,-._.--~\ + |\ | |__ __ / ` / \ |__) |__ } { + | \| | \__, \__/ | \ |___ \`-._,-`-, + `._,._,' + + nf-core/tools version 2.13 - https://nf-co.re + + + +INFO Linting modules repo: '.' +INFO Linting subworkflow: 'bam_stats_samtools' + +╭─[!] 14 Subworkflow Test Warnings───────────────────────────────────────────────────────────────╮ +│ ╷ ╷ │ +│Subworkflow name │File path │Test message │ +│╶────────────────────┼─────────────────────────────────────┼─────────────────────────────────────╴│ +│bam_stats_samtools │subworkflows/nf-core/bam_stats_sam…│TODO string in main.nf.test: //Add │ +│││all required assertions to verify │ +│││the test output.│ +│bam_stats_samtools │subworkflows/nf-core/bam_stats_sam…│TODO string in main.nf.test: Add │ +│││tags for all modules used within │ +│││this subworkflow. Example:│ +│bam_stats_samtools │subworkflows/nf-core/bam_stats_sam…│TODO string in main.nf.test: Change │ +│││the test name preferably indicating │ +│││the test-data and file-format used│ +│bam_stats_samtools │subworkflows/nf-core/bam_stats_sam…│TODO string in main.nf.test: Once │ +│││you have added the required tests, │ +│││please run the following command to │ +│││build this file:│ +│bam_stats_samtools │subworkflows/nf-core/bam_stats_sam…│TODO string in main.nf.test: define │ +│││inputs of the workflow here. │ +│││Example:│ +│bam_stats_samtools │subworkflows/nf-core/bam_stats_sam…│TODO string in main.nf: A │ +│││subworkflow SHOULD import at least │ +│││two modules│ +│bam_stats_samtools │subworkflows/nf-core/bam_stats_sam…│TODO string in main.nf: If in doubt │ +│││look at other nf-core/subworkflows │ +│││to see how we are doing things! :)│ +│bam_stats_samtools │subworkflows/nf-core/bam_stats_sam…│TODO string in main.nf: edit emitted│ +│││channels│ +│bam_stats_samtools │subworkflows/nf-core/bam_stats_sam…│TODO string in main.nf: edit input │ +│││(take) channels│ +│bam_stats_samtools │subworkflows/nf-core/bam_stats_sam…│TODO string in main.nf: substitute │ +│││modules here for the modules of your│ +│││subworkflow│ +│bam_stats_samtools │subworkflows/nf-core/bam_stats_sam…│TODO string in meta.yml: #Add a │ +│││description of the subworkflow and │ +│││list keywords│ +│bam_stats_samtools │subworkflows/nf-core/bam_stats_sam…│TODO string in meta.yml: #Add a list│ +│││of the modules and/or subworkflows │ +│││used in the subworkflow│ +│bam_stats_samtools │subworkflows/nf-core/bam_stats_sam…│TODO string in meta.yml: #List all │ +│││of the channels used as input with a│ +│││description and their structure│ +│bam_stats_samtools │subworkflows/nf-core/bam_stats_sam…│TODO string in meta.yml: #List all │ +│││of the channels used as output with │ +│││a descriptions and their structure│ +│ ╵ ╵ │ +╰──────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭───────────────────────╮ +│LINT RESULTS SUMMARY│ +├───────────────────────┤ +│[✔] 42 Tests Passed │ +│[!] 14 Test Warnings│ +│[✗] 0 Tests Failed │ +╰───────────────────────╯ diff --git a/docs/images/nf-core-subworkflows-list-local.svg b/docs/images/nf-core-subworkflows-list-local.svg index 2b72e839d0..04a21b7708 100644 --- a/docs/images/nf-core-subworkflows-list-local.svg +++ b/docs/images/nf-core-subworkflows-list-local.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - + - + - - $ nf-core subworkflows list local - - ,--./,-. - ___ __ __ __ ___ /,-._.--~\ - |\ | |__ __ / ` / \ |__) |__ } { - | \| | \__, \__/ | \ |___ \`-._,-`-, - `._,._,' - - nf-core/tools version 2.12.1 - https://nf-co.re - - - -INFO No nf-core subworkflows found in '.' -self.repo_type='pipeline' - + + $ nf-core subworkflows list local + + ,--./,-. + ___ __ __ __ ___ /,-._.--~\ + |\ | |__ __ / ` / \ |__) |__ } { + | \| | \__, \__/ | \ |___ \`-._,-`-, + `._,._,' + + nf-core/tools version 2.13 - https://nf-co.re + + + +INFO Subworkflows installed in '.': + +self.repo_type='pipeline' +┏━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━┓ +┃Subworkflow Name ┃Repository ┃Version SHA ┃Message ┃Date ┃ +┡━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━┩ +│ utils_nextflow_pip… │ https://github.co… │ cd08c91373cd00a732… │ nicer assertions │ 2024-01-19 │ +│ │ │ │ in utils │ │ +│ │ │ │ subworkflows │ │ +│ │ │ │ (#4779) │ │ +│ utils_nfcore_pipel… │ https://github.co… │ 262b17ed2aad591039… │ Update │ 2024-02-12 │ +│ │ │ │ utils-nfcore-pipe… │ │ +│ │ │ │ add multiqc report │ │ +│ │ │ │ to completion │ │ +[..truncated..] diff --git a/docs/images/nf-core-subworkflows-list-remote.svg b/docs/images/nf-core-subworkflows-list-remote.svg index 5158fb8c88..28754bea25 100644 --- a/docs/images/nf-core-subworkflows-list-remote.svg +++ b/docs/images/nf-core-subworkflows-list-remote.svg @@ -19,109 +19,109 @@ font-weight: 700; } - .terminal-2156284683-matrix { + .terminal-3053865645-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-2156284683-title { + .terminal-3053865645-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-2156284683-r1 { fill: #c5c8c6 } -.terminal-2156284683-r2 { fill: #98a84b } -.terminal-2156284683-r3 { fill: #9a9b99 } -.terminal-2156284683-r4 { fill: #608ab1 } -.terminal-2156284683-r5 { fill: #d0b344 } -.terminal-2156284683-r6 { fill: #1984e9;text-decoration: underline; } -.terminal-2156284683-r7 { fill: #c5c8c6;font-weight: bold } -.terminal-2156284683-r8 { fill: #868887;font-style: italic; } + .terminal-3053865645-r1 { fill: #c5c8c6 } +.terminal-3053865645-r2 { fill: #98a84b } +.terminal-3053865645-r3 { fill: #9a9b99 } +.terminal-3053865645-r4 { fill: #608ab1 } +.terminal-3053865645-r5 { fill: #d0b344 } +.terminal-3053865645-r6 { fill: #1984e9;text-decoration: underline; } +.terminal-3053865645-r7 { fill: #c5c8c6;font-weight: bold } +.terminal-3053865645-r8 { fill: #868887;font-style: italic; } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -133,36 +133,36 @@ - + - - $ nf-core subworkflows list remote - - ,--./,-. - ___ __ __ __ ___ /,-._.--~\ - |\ | |__ __ / ` / \ |__) |__ } { - | \| | \__, \__/ | \ |___ \`-._,-`-, - `._,._,' - - nf-core/tools version 2.12.1 - https://nf-co.re - - - -INFO Subworkflows available from https://github.com/nf-core/modules.git(master): - -┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓ -┃Subworkflow Name ┃ -┡━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┩ -│ bam_cnv_wisecondorx │ -│ bam_create_som_pon_gatk │ -│ bam_dedup_stats_samtools_umitools │ -│ bam_docounts_contamination_angsd │ -│ bam_markduplicates_picard │ -│ bam_markduplicates_samtools │ -│ bam_ngscheckmate │ -│ bam_qc_picard │ -│ bam_rseqc │ -[..truncated..] + + $ nf-core subworkflows list remote + + ,--./,-. + ___ __ __ __ ___ /,-._.--~\ + |\ | |__ __ / ` / \ |__) |__ } { + | \| | \__, \__/ | \ |___ \`-._,-`-, + `._,._,' + + nf-core/tools version 2.13 - https://nf-co.re + + + +INFO Subworkflows available from https://github.com/nf-core/modules.git(master): + +┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓ +┃Subworkflow Name ┃ +┡━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┩ +│ bam_cnv_wisecondorx │ +│ bam_create_som_pon_gatk │ +│ bam_dedup_stats_samtools_umitools │ +│ bam_docounts_contamination_angsd │ +│ bam_markduplicates_picard │ +│ bam_markduplicates_samtools │ +│ bam_ngscheckmate │ +│ bam_qc_picard │ +│ bam_rseqc │ +[..truncated..] diff --git a/docs/images/nf-core-subworkflows-remove.svg b/docs/images/nf-core-subworkflows-remove.svg index fd9841a27a..91bce3c2ef 100644 --- a/docs/images/nf-core-subworkflows-remove.svg +++ b/docs/images/nf-core-subworkflows-remove.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - - - - - - - + - + - - $ nf-core subworkflows remove bam_rseqc - - ,--./,-. - ___ __ __ __ ___ /,-._.--~\ - |\ | |__ __ / ` / \ |__) |__ } { - | \| | \__, \__/ | \ |___ \`-._,-`-, - `._,._,' - - nf-core/tools version 2.12.1 - https://nf-co.re - - - -INFO Removed files for 'rseqc/bamstat' and its dependencies 'rseqc/bamstat'. -INFO Removed files for 'rseqc/inferexperiment' and its dependencies 'rseqc/inferexperiment'. -INFO Removed files for 'rseqc/innerdistance' and its dependencies 'rseqc/innerdistance'. -INFO Removed files for 'rseqc/junctionannotation' and its dependencies -'rseqc/junctionannotation'. -INFO Removed files for 'rseqc/junctionsaturation' and its dependencies -'rseqc/junctionsaturation'. -INFO Removed files for 'rseqc/readdistribution' and its dependencies 'rseqc/readdistribution'. -INFO Removed files for 'rseqc/readduplication' and its dependencies 'rseqc/readduplication'. -INFO Removed files for 'rseqc/tin' and its dependencies 'rseqc/tin'. -INFO Removed files for 'bam_rseqc' and its dependencies 'bam_rseqc, rseqc_bamstat, -rseqc_inferexperiment, rseqc_innerdistance, rseqc_junctionannotation, -rseqc_junctionsaturation, rseqc_readdistribution, rseqc_readduplication, rseqc_tin'. + + $ nf-core subworkflows remove bam_rseqc + + ,--./,-. + ___ __ __ __ ___ /,-._.--~\ + |\ | |__ __ / ` / \ |__) |__ } { + | \| | \__, \__/ | \ |___ \`-._,-`-, + `._,._,' + + nf-core/tools version 2.13 - https://nf-co.re + + + +INFO Removed files for 'rseqc/bamstat' and its dependencies 'rseqc/bamstat'. +INFO Removed files for 'rseqc/inferexperiment' and its dependencies 'rseqc/inferexperiment'. +INFO Removed files for 'rseqc/innerdistance' and its dependencies 'rseqc/innerdistance'. +INFO Removed files for 'rseqc/junctionannotation' and its dependencies +'rseqc/junctionannotation'. +INFO Removed files for 'rseqc/junctionsaturation' and its dependencies +'rseqc/junctionsaturation'. +INFO Removed files for 'rseqc/readdistribution' and its dependencies 'rseqc/readdistribution'. +INFO Removed files for 'bam_rseqc' and its dependencies 'bam_rseqc, rseqc_bamstat, +rseqc_inferexperiment, rseqc_innerdistance, rseqc_junctionannotation, +rseqc_junctionsaturation, rseqc_readdistribution'. diff --git a/docs/images/nf-core-subworkflows-test.svg b/docs/images/nf-core-subworkflows-test.svg index 1c719d84c3..f4261d37f5 100644 --- a/docs/images/nf-core-subworkflows-test.svg +++ b/docs/images/nf-core-subworkflows-test.svg @@ -19,64 +19,64 @@ font-weight: 700; } - .terminal-1736045672-matrix { + .terminal-3894408202-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-1736045672-title { + .terminal-3894408202-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-1736045672-r1 { fill: #c5c8c6 } -.terminal-1736045672-r2 { fill: #98a84b } -.terminal-1736045672-r3 { fill: #9a9b99 } -.terminal-1736045672-r4 { fill: #608ab1 } -.terminal-1736045672-r5 { fill: #d0b344 } + .terminal-3894408202-r1 { fill: #c5c8c6 } +.terminal-3894408202-r2 { fill: #98a84b } +.terminal-3894408202-r3 { fill: #9a9b99 } +.terminal-3894408202-r4 { fill: #608ab1 } +.terminal-3894408202-r5 { fill: #d0b344 } - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -88,22 +88,22 @@ - + - - $ nf-core subworkflows test bam_rseqc --no-prompts - - ,--./,-. - ___ __ __ __ ___ /,-._.--~\ - |\ | |__ __ / ` / \ |__) |__ } { - | \| | \__, \__/ | \ |___ \`-._,-`-, - `._,._,' - - nf-core/tools version 2.12.1 - https://nf-co.re - - - -INFO Generating nf-test snapshot + + $ nf-core subworkflows test bam_rseqc --no-prompts + + ,--./,-. + ___ __ __ __ ___ /,-._.--~\ + |\ | |__ __ / ` / \ |__) |__ } { + | \| | \__, \__/ | \ |___ \`-._,-`-, + `._,._,' + + nf-core/tools version 2.13 - https://nf-co.re + + + +INFO Generating nf-test snapshot diff --git a/docs/images/nf-core-subworkflows-update.svg b/docs/images/nf-core-subworkflows-update.svg index db8d96c904..64919a7e3d 100644 --- a/docs/images/nf-core-subworkflows-update.svg +++ b/docs/images/nf-core-subworkflows-update.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - - - - + - + - - $ nf-core subworkflows update --all --no-preview - - ,--./,-. - ___ __ __ __ ___ /,-._.--~\ - |\ | |__ __ / ` / \ |__) |__ } { - | \| | \__, \__/ | \ |___ \`-._,-`-, - `._,._,' - - nf-core/tools version 2.12.1 - https://nf-co.re - - - -INFO 'subworkflows/nf-core/bam_rseqc' is already up to date -INFO Updates complete ✨ + + $ nf-core subworkflows update --all --no-preview + + ,--./,-. + ___ __ __ __ ___ /,-._.--~\ + |\ | |__ __ / ` / \ |__) |__ } { + | \| | \__, \__/ | \ |___ \`-._,-`-, + `._,._,' + + nf-core/tools version 2.13 - https://nf-co.re + + + +ERROR 'rseqc/readduplication' diff --git a/docs/images/nf-core-sync.svg b/docs/images/nf-core-sync.svg index 344a9b04ba..1b2917e5db 100644 --- a/docs/images/nf-core-sync.svg +++ b/docs/images/nf-core-sync.svg @@ -19,91 +19,91 @@ font-weight: 700; } - .terminal-372548199-matrix { + .terminal-1194172937-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-372548199-title { + .terminal-1194172937-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-372548199-r1 { fill: #c5c8c6 } -.terminal-372548199-r2 { fill: #98a84b } -.terminal-372548199-r3 { fill: #9a9b99 } -.terminal-372548199-r4 { fill: #608ab1 } -.terminal-372548199-r5 { fill: #d0b344 } -.terminal-372548199-r6 { fill: #cc555a } -.terminal-372548199-r7 { fill: #98729f } -.terminal-372548199-r8 { fill: #ff2c7a } + .terminal-1194172937-r1 { fill: #c5c8c6 } +.terminal-1194172937-r2 { fill: #98a84b } +.terminal-1194172937-r3 { fill: #9a9b99 } +.terminal-1194172937-r4 { fill: #608ab1 } +.terminal-1194172937-r5 { fill: #d0b344 } +.terminal-1194172937-r6 { fill: #cc555a } +.terminal-1194172937-r7 { fill: #98729f } +.terminal-1194172937-r8 { fill: #ff2c7a } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -115,30 +115,30 @@ - + - - $ nf-core sync - - ,--./,-. - ___ __ __ __ ___ /,-._.--~\ - |\ | |__ __ / ` / \ |__) |__ } { - | \| | \__, \__/ | \ |___ \`-._,-`-, - `._,._,' - - nf-core/tools version 2.12.1 - https://nf-co.re - - -WARNING Could not find GitHub authentication token. Some API requests may fail. -INFO Pipeline directory: /home/runner/work/tools/tools/tmp/nf-core-nextbigthing -INFO Original pipeline repository branch is 'master' -INFO Deleting all files in 'TEMPLATE' branch -INFO Making a new template pipeline using pipeline variables -INFO Committed changes to 'TEMPLATE' branch -INFO Checking out original branch: 'master' -INFO Now try to merge the updates in to your pipeline: - cd /home/runner/work/tools/tools/tmp/nf-core-nextbigthing - git merge TEMPLATE + + $ nf-core sync + + ,--./,-. + ___ __ __ __ ___ /,-._.--~\ + |\ | |__ __ / ` / \ |__) |__ } { + | \| | \__, \__/ | \ |___ \`-._,-`-, + `._,._,' + + nf-core/tools version 2.13 - https://nf-co.re + + +WARNING Could not find GitHub authentication token. Some API requests may fail. +INFO Pipeline directory: /home/runner/work/tools/tools/tmp/nf-core-nextbigthing +INFO Original pipeline repository branch is 'master' +INFO Deleting all files in 'TEMPLATE' branch +INFO Making a new template pipeline using pipeline variables +INFO Committed changes to 'TEMPLATE' branch +INFO Checking out original branch: 'master' +INFO Now try to merge the updates in to your pipeline: + cd /home/runner/work/tools/tools/tmp/nf-core-nextbigthing + git merge TEMPLATE diff --git a/nf_core/__main__.py b/nf_core/__main__.py index a39c3cf732..7d2d083fa9 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -1134,7 +1134,13 @@ def create_module( default=False, help="Run tests only once. Don't check snapshot stability", ) -def test_module(ctx, tool, dir, no_prompts, update, once): +@click.option( + "--profile", + type=click.Choice(["docker", "singularity", "conda"]), + default=None, + help="Run tests with a specific profile", +) +def test_module(ctx, tool, dir, no_prompts, update, once, profile): """ Run nf-test for a module. @@ -1153,6 +1159,7 @@ def test_module(ctx, tool, dir, no_prompts, update, once): remote_url=ctx.obj["modules_repo_url"], branch=ctx.obj["modules_repo_branch"], verbose=ctx.obj["verbose"], + profile=profile, ) module_tester.run() except (UserWarning, LookupError) as e: @@ -1398,7 +1405,13 @@ def create_subworkflow(ctx, subworkflow, dir, author, force, migrate_pytest): default=False, help="Run tests only once. Don't check snapshot stability", ) -def test_subworkflow(ctx, subworkflow, dir, no_prompts, update, once): +@click.option( + "--profile", + type=click.Choice(["none", "singularity"]), + default=None, + help="Run tests with a specific profile", +) +def test_subworkflow(ctx, subworkflow, dir, no_prompts, update, once, profile): """ Run nf-test for a subworkflow. @@ -1417,6 +1430,7 @@ def test_subworkflow(ctx, subworkflow, dir, no_prompts, update, once): remote_url=ctx.obj["modules_repo_url"], branch=ctx.obj["modules_repo_branch"], verbose=ctx.obj["verbose"], + profile=profile, ) sw_tester.run() except (UserWarning, LookupError) as e: diff --git a/nf_core/components/components_test.py b/nf_core/components/components_test.py index 3294c2878b..f1a9e7c401 100644 --- a/nf_core/components/components_test.py +++ b/nf_core/components/components_test.py @@ -48,6 +48,8 @@ class ComponentsTest(ComponentCommand): # type: ignore[misc] flag indicating if the existing snapshot should be updated once : bool flag indicating if the test should be run only once + profile : str + container software to use (docker, singularity or conda) Methods ------- @@ -72,6 +74,7 @@ def __init__( verbose: bool = False, update: bool = False, once: bool = False, + profile: Optional[str] = None, ): super().__init__(component_type, directory, remote_url, branch, no_prompts=no_prompts) self.component_name = component_name @@ -82,6 +85,7 @@ def __init__( self.obsolete_snapshots: bool = False self.update = update self.once = once + self.profile = profile def run(self) -> None: """Run build steps""" @@ -129,7 +133,7 @@ def check_inputs(self) -> None: ) # Check container software to use - if os.environ.get("PROFILE") is None: + if os.environ.get("PROFILE") is None and self.profile is None: os.environ["PROFILE"] = "" if self.no_prompts: log.info( @@ -190,10 +194,11 @@ def generate_snapshot(self) -> bool: update = "--update-snapshot" if self.update else "" self.update = False # reset self.update to False to test if the new snapshot is stable tag = f"subworkflows/{self.component_name}" if self.component_type == "subworkflows" else self.component_name + profile = self.profile if self.profile else os.environ["PROFILE"] result = nf_core.utils.run_cmd( "nf-test", - f"test --tag {tag} --profile {os.environ['PROFILE']} {verbose} {update}", + f"test --tag {tag} --profile {profile} {verbose} {update}", ) if result is not None: nftest_out, nftest_err = result @@ -232,16 +237,18 @@ def check_snapshot_stability(self) -> bool: log.error("nf-test snapshot is not stable") self.errors.append("nf-test snapshot is not stable") return False + else: if self.obsolete_snapshots: # ask if the user wants to remove obsolete snapshots using nf-test --clean-snapshot if self.no_prompts or Confirm.ask( "nf-test found obsolete snapshots. Do you want to remove them?", default=True ): + profile = self.profile if self.profile else os.environ["PROFILE"] log.info("Removing obsolete snapshots") nf_core.utils.run_cmd( "nf-test", - f"test --tag {self.component_name} --profile {os.environ['PROFILE']} --clean-snapshot", + f"test --tag {self.component_name} --profile {profile} --clean-snapshot", ) else: log.debug("Obsolete snapshots not removed") diff --git a/nf_core/components/create.py b/nf_core/components/create.py index 32f6d1a433..c4b477a0ab 100644 --- a/nf_core/components/create.py +++ b/nf_core/components/create.py @@ -462,7 +462,13 @@ def _print_and_delete_pytest_files(self): ): with open(pytest_dir / "main.nf") as fh: log.info(fh.read()) - shutil.rmtree(pytest_dir) + if pytest_dir.is_symlink(): + resolved_dir = pytest_dir.resolve() + log.debug(f"Removing symlink: {resolved_dir}") + shutil.rmtree(resolved_dir) + pytest_dir.unlink() + else: + shutil.rmtree(pytest_dir) log.info( "[yellow]Please convert the pytest tests to nf-test in 'main.nf.test'.[/]\n" "You can find more information about nf-test [link=https://nf-co.re/docs/contributing/modules#migrating-from-pytest-to-nf-test]at the nf-core web[/link]. " diff --git a/nf_core/components/update.py b/nf_core/components/update.py index 077cb2b840..a54c47232e 100644 --- a/nf_core/components/update.py +++ b/nf_core/components/update.py @@ -76,7 +76,7 @@ def _parameter_checks(self): if not self.has_valid_directory(): raise UserWarning("The command was not run in a valid pipeline directory.") - def update(self, component=None, silent=False, updated=None, check_diff_exist=True): + def update(self, component=None, silent=False, updated=None, check_diff_exist=True) -> bool: """Updates a specified module/subworkflow or all modules/subworkflows in a pipeline. If updating a subworkflow: updates all modules used in that subworkflow. @@ -188,7 +188,7 @@ def update(self, component=None, silent=False, updated=None, check_diff_exist=Tr continue # Download component files - if not self.install_component_files(component, version, modules_repo, install_tmp_dir): + if not self.install_component_files(component, version, modules_repo, str(install_tmp_dir)): exit_value = False continue @@ -282,7 +282,7 @@ def update(self, component=None, silent=False, updated=None, check_diff_exist=Tr if not dry_run: # Clear the component directory and move the installed files there - self.move_files_from_tmp_dir(component, install_tmp_dir, modules_repo.repo_path, version) + self.move_files_from_tmp_dir(component, str(install_tmp_dir), modules_repo.repo_path, version) # Update modules.json with newly installed component self.modules_json.update(self.component_type, modules_repo, component, version, installed_by=None) updated.append(component) @@ -727,7 +727,7 @@ def setup_diff_file(self, check_diff_exist=True): # This guarantees that the file exists after calling the function self.save_diff_fn.touch() - def move_files_from_tmp_dir(self, component, install_folder, repo_path, new_version): + def move_files_from_tmp_dir(self, component: str, install_folder: str, repo_path: str, new_version: str) -> None: """Move the files from the temporary to the installation directory. Args: @@ -736,18 +736,34 @@ def move_files_from_tmp_dir(self, component, install_folder, repo_path, new_vers repo_path (str): The name of the directory where modules/subworkflows are installed new_version (str): The version of the module/subworkflow that was installed. """ - temp_component_dir = os.path.join(install_folder, component) - files = os.listdir(temp_component_dir) - pipeline_path = os.path.join(self.dir, self.component_type, repo_path, component) + temp_component_dir = Path(install_folder, component) + files = [file_path for file_path in temp_component_dir.rglob("*") if file_path.is_file()] + pipeline_path = Path(self.dir, self.component_type, repo_path, component) + + if pipeline_path.exists(): + pipeline_files = [f.name for f in pipeline_path.iterdir() if f.is_file()] + # check if any *.config file exists in the pipeline + config_files = [f for f in pipeline_files if str(f).endswith(".config")] + for config_file in config_files: + log.debug(f"Moving '{component}/{config_file}' to updated component") + shutil.move(pipeline_path / config_file, temp_component_dir / config_file) + files.append(temp_component_dir / config_file) + + else: + log.debug(f"Creating new {self.component_type[:-1]} '{component}' in '{self.component_type}/{repo_path}'") log.debug(f"Removing old version of {self.component_type[:-1]} '{component}'") - self.clear_component_dir(component, pipeline_path) + self.clear_component_dir(component, str(pipeline_path)) - os.makedirs(pipeline_path) + pipeline_path.mkdir(parents=True, exist_ok=True) for file in files: - path = os.path.join(temp_component_dir, file) - if os.path.exists(path): - shutil.move(path, os.path.join(pipeline_path, file)) + file = file.relative_to(temp_component_dir) + path = Path(temp_component_dir, file) + if path.exists(): + log.debug(f"Moving '{file}' to updated component") + dest = Path(pipeline_path, file) + dest.parent.mkdir(parents=True, exist_ok=True) + shutil.move(path, dest) log.info(f"Updating '{repo_path}/{component}'") log.debug(f"Updating {self.component_type[:-1]} '{component}' to {new_version} from {repo_path}") diff --git a/nf_core/create.py b/nf_core/create.py index 8038a995c5..c094d33a22 100644 --- a/nf_core/create.py +++ b/nf_core/create.py @@ -292,7 +292,7 @@ def render_template(self): short_name = self.template_params["short_name"] rename_files = { "workflows/pipeline.nf": f"workflows/{short_name}.nf", - "lib/WorkflowPipeline.groovy": f"lib/Workflow{short_name[0].upper()}{short_name[1:]}.groovy", + "subworkflows/local/utils_nfcore_pipeline_pipeline/main.nf": f"subworkflows/local/utils_nfcore_{short_name}_pipeline/main.nf", } # Set the paths to skip according to customization diff --git a/nf_core/download.py b/nf_core/download.py index 4c0bc97f42..bb7b2ae473 100644 --- a/nf_core/download.py +++ b/nf_core/download.py @@ -759,6 +759,9 @@ def rectify_raw_container_matches(self, raw_findings): Example syntax: Early DSL2: + + .. code-block:: groovy + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { container "https://depot.galaxyproject.org/singularity/fastqc:0.11.9--0" } else { @@ -766,11 +769,17 @@ def rectify_raw_container_matches(self, raw_findings): } Later DSL2: + + .. code-block:: groovy + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/fastqc:0.11.9--0' : 'biocontainers/fastqc:0.11.9--0' }" Later DSL2, variable is being used: + + .. code-block:: groovy + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? "https://depot.galaxyproject.org/singularity/${container_id}" : "quay.io/biocontainers/${container_id}" }" @@ -778,7 +787,11 @@ def rectify_raw_container_matches(self, raw_findings): container_id = 'mulled-v2-1fa26d1ce03c295fe2fdcf85831a92fbcbd7e8c2:afaaa4c6f5b308b4b6aa2dd8e99e1466b2a6b0cd-0' DSL1 / Special case DSL2: + + .. code-block:: groovy + container "nfcore/cellranger:6.0.2" + """ cleaned_matches = [] diff --git a/nf_core/gitpod/gitpod.Dockerfile b/nf_core/gitpod/gitpod.Dockerfile index ad4bed5052..c441d40df1 100644 --- a/nf_core/gitpod/gitpod.Dockerfile +++ b/nf_core/gitpod/gitpod.Dockerfile @@ -1,7 +1,7 @@ # Test build locally before making a PR # docker build -t gitpod:test -f nf_core/gitpod/gitpod.Dockerfile . -FROM gitpod/workspace-base +FROM gitpod/workspace-base@sha256:728e1fab64f6924128b987264603a6f277bd881de95feaf39129a1ffdde36e14 USER root diff --git a/nf_core/lint/__init__.py b/nf_core/lint/__init__.py index d39ce5d7e9..be9ac183a6 100644 --- a/nf_core/lint/__init__.py +++ b/nf_core/lint/__init__.py @@ -404,9 +404,7 @@ def format_result(test_results): if "dev" in __version__: tools_version = "latest" for eid, msg in test_results: - yield Markdown( - f"[{eid}](https://nf-co.re/tools/docs/{tools_version}/pipeline_lint_tests/{eid}.html): {msg}" - ) + yield Markdown(f"[{eid}](https://nf-co.re/tools/docs/{tools_version}/pipeline_lint_tests/{eid}): {msg}") # Table of passed tests if len(self.passed) > 0 and show_passed: @@ -507,7 +505,7 @@ def _get_results_md(self): test_failures = "### :x: Test failures:\n\n{}\n\n".format( "\n".join( [ - f"* [{eid}](https://nf-co.re/tools/docs/{tools_version}/pipeline_lint_tests/{eid}.html) - " + f"* [{eid}](https://nf-co.re/tools/docs/{tools_version}/pipeline_lint_tests/{eid}) - " f"{strip_ansi_codes(msg, '`')}" for eid, msg in self.failed ] @@ -521,7 +519,7 @@ def _get_results_md(self): test_ignored = "### :grey_question: Tests ignored:\n\n{}\n\n".format( "\n".join( [ - f"* [{eid}](https://nf-co.re/tools/docs/{tools_version}/pipeline_lint_tests/{eid}.html) - " + f"* [{eid}](https://nf-co.re/tools/docs/{tools_version}/pipeline_lint_tests/{eid}) - " f"{strip_ansi_codes(msg, '`')}" for eid, msg in self.ignored ] @@ -535,7 +533,7 @@ def _get_results_md(self): test_fixed = "### :grey_question: Tests fixed:\n\n{}\n\n".format( "\n".join( [ - f"* [{eid}](https://nf-co.re/tools/docs/{tools_version}/pipeline_lint_tests/{eid}.html) - " + f"* [{eid}](https://nf-co.re/tools/docs/{tools_version}/pipeline_lint_tests/{eid}) - " f"{strip_ansi_codes(msg, '`')}" for eid, msg in self.fixed ] @@ -549,7 +547,7 @@ def _get_results_md(self): test_warnings = "### :heavy_exclamation_mark: Test warnings:\n\n{}\n\n".format( "\n".join( [ - f"* [{eid}](https://nf-co.re/tools/docs/{tools_version}/pipeline_lint_tests/{eid}.html) - " + f"* [{eid}](https://nf-co.re/tools/docs/{tools_version}/pipeline_lint_tests/{eid}) - " f"{strip_ansi_codes(msg, '`')}" for eid, msg in self.warned ] @@ -564,7 +562,7 @@ def _get_results_md(self): "\n".join( [ ( - f"* [{eid}](https://nf-co.re/tools/docs/{tools_version}/pipeline_lint_tests/{eid}.html)" + f"* [{eid}](https://nf-co.re/tools/docs/{tools_version}/pipeline_lint_tests/{eid})" f" - {strip_ansi_codes(msg, '`')}" ) for eid, msg in self.passed diff --git a/nf_core/lint/files_exist.py b/nf_core/lint/files_exist.py index 1bd6dba74c..5d62a23bf8 100644 --- a/nf_core/lint/files_exist.py +++ b/nf_core/lint/files_exist.py @@ -52,9 +52,6 @@ def files_exist(self) -> Dict[str, Union[List[str], bool]]: docs/output.md docs/README.md docs/usage.md - lib/NfcoreTemplate.groovy - lib/Utils.groovy - lib/WorkflowMain.groovy nextflow_schema.json nextflow.config README.md @@ -69,7 +66,6 @@ def files_exist(self) -> Dict[str, Union[List[str], bool]]: conf/igenomes.config .github/workflows/awstest.yml .github/workflows/awsfulltest.yml - lib/WorkflowPIPELINE.groovy pyproject.toml Files that *must not* be present, due to being renamed or removed in the template: @@ -91,6 +87,11 @@ def files_exist(self) -> Dict[str, Union[List[str], bool]]: lib/Checks.groovy lib/Completion.groovy lib/Workflow.groovy + lib/WorkflowPIPELINE.groovy + lib/NfcoreTemplate.groovy + lib/Utils.groovy + lib/WorkflowMain.groovy + Files that *should not* be present: @@ -165,9 +166,6 @@ def files_exist(self) -> Dict[str, Union[List[str], bool]]: [Path("docs", "README.md")], [Path("docs", "README.md")], [Path("docs", "usage.md")], - [Path("lib", "NfcoreTemplate.groovy")], - [Path("lib", "Utils.groovy")], - [Path("lib", "WorkflowMain.groovy")], ] files_warn = [ @@ -177,7 +175,6 @@ def files_exist(self) -> Dict[str, Union[List[str], bool]]: [Path("conf", "igenomes.config")], [Path(".github", "workflows", "awstest.yml")], [Path(".github", "workflows", "awsfulltest.yml")], - [Path("lib", f"Workflow{short_name[0].upper()}{short_name[1:]}.groovy")], [Path("modules.json")], [Path("pyproject.toml")], ] @@ -199,6 +196,10 @@ def files_exist(self) -> Dict[str, Union[List[str], bool]]: Path("lib", "Checks.groovy"), Path("lib", "Completion.groovy"), Path("lib", "Workflow.groovy"), + Path("lib", "Utils.groovy"), + Path("lib", "WorkflowMain.groovy"), + Path("lib", "NfcoreTemplate.groovy"), + Path("lib", f"Workflow{short_name[0].upper()}{short_name[1:]}.groovy"), ] files_warn_ifexists = [Path(".travis.yml")] files_fail_ifinconfig: List[Tuple[Path, Dict[str, str]]] = [ @@ -236,7 +237,7 @@ def pf(file_path: Union[str, Path]) -> Path: # Files that cause an error if they exist for file in files_fail_ifexists: - if file in ignore_files: + if str(file) in ignore_files: continue if pf(file).is_file(): failed.append(f"File must be removed: {self._wrap_quotes(file)}") diff --git a/nf_core/lint/files_unchanged.py b/nf_core/lint/files_unchanged.py index 399830faae..3a3a0cb74a 100644 --- a/nf_core/lint/files_unchanged.py +++ b/nf_core/lint/files_unchanged.py @@ -1,5 +1,6 @@ import filecmp import logging +import os import shutil import tempfile from pathlib import Path @@ -40,7 +41,6 @@ def files_unchanged(self) -> Dict[str, Union[List[str], bool]]: docs/images/nf-core-PIPELINE_logo_light.png docs/images/nf-core-PIPELINE_logo_dark.png docs/README.md' - lib/NfcoreTemplate.groovy ['LICENSE', 'LICENSE.md', 'LICENCE', 'LICENCE.md'], # NB: British / American spelling Files that can have additional content but must include the template contents:: @@ -104,7 +104,6 @@ def files_unchanged(self) -> Dict[str, Union[List[str], bool]]: [Path("docs", "images", f"nf-core-{short_name}_logo_light.png")], [Path("docs", "images", f"nf-core-{short_name}_logo_dark.png")], [Path("docs", "README.md")], - [Path("lib", "NfcoreTemplate.groovy")], ] files_partial = [ [Path(".gitignore"), Path(".prettierignore"), Path("pyproject.toml")], @@ -162,7 +161,15 @@ def _tf(file_path: Union[str, Path]) -> Path: if filecmp.cmp(_pf(f), _tf(f), shallow=True): passed.append(f"`{f}` matches the template") else: - if "files_unchanged" in self.fix: + if ( + f.name.endswith(".png") + and os.stat(_pf(f)).st_mode == os.stat(_tf(f)).st_mode + and int(os.stat(_pf(f)).st_size / 100) == int(os.stat(_tf(f)).st_size / 100) + ): + # almost the same file, good enough for the logo + log.debug(f"Files are almost the same. Will pass: {f}") + passed.append(f"`{f}` matches the template") + elif "files_unchanged" in self.fix: # Try to fix the problem by overwriting the pipeline file shutil.copy(_tf(f), _pf(f)) passed.append(f"`{f}` matches the template") @@ -176,7 +183,7 @@ def _tf(file_path: Union[str, Path]) -> Path: # Files that can be added to, but that must contain the template contents for files in files_partial: # Ignore if file specified in linting config - if any([f in ignore_files for f in files]): + if any([str(f) in ignore_files for f in files]): ignored.append(f"File ignored due to lint config: {self._wrap_quotes(files)}") # Ignore if we can't find the file diff --git a/nf_core/lint/merge_markers.py b/nf_core/lint/merge_markers.py index 80cd655066..d57b63fd19 100644 --- a/nf_core/lint/merge_markers.py +++ b/nf_core/lint/merge_markers.py @@ -11,19 +11,25 @@ def merge_markers(self): """Check for remaining merge markers. This test looks for remaining merge markers in the code, e.g.: - >>>>>>> or <<<<<<< + ``>>>>>>>`` or ``<<<<<<<`` - .. tip:: You can choose to ignore this lint tests by editing the file called + .. note:: You can choose to ignore this lint tests by editing the file called ``.nf-core.yml`` in the root of your pipeline and setting the test to false: + .. code-block:: yaml + lint: merge_markers: False + To disable this test only for specific files, you can specify a list of file paths to ignore. For example, to ignore a pdf you added to the docs: + .. code-block:: yaml + lint: merge_markers: - docs/my_pdf.pdf + """ passed = [] failed = [] diff --git a/nf_core/lint/modules_structure.py b/nf_core/lint/modules_structure.py index f0e13e0346..9d9b4c9fc0 100644 --- a/nf_core/lint/modules_structure.py +++ b/nf_core/lint/modules_structure.py @@ -8,9 +8,15 @@ def modules_structure(self): """ Check that the structure of the modules directory in a pipeline is the correct one: + + .. code-block:: bash + modules/nf-core/TOOL/SUBTOOL Prior to nf-core/tools release 2.6 the directory structure had an additional level of nesting: + + .. code-block:: bash + modules/nf-core/modules/TOOL/SUBTOOL """ wrong_location_modules = [] diff --git a/nf_core/lint/multiqc_config.py b/nf_core/lint/multiqc_config.py index b2f1a89a1b..9b9c80c44e 100644 --- a/nf_core/lint/multiqc_config.py +++ b/nf_core/lint/multiqc_config.py @@ -9,6 +9,7 @@ def multiqc_config(self) -> Dict[str, List[str]]: Basic template: .. code-block:: yaml + report_comment: > This report has been generated by the nf-core/quantms analysis pipeline. For information about how to interpret these results, please see the @@ -18,9 +19,10 @@ def multiqc_config(self) -> Dict[str, List[str]]: order: -1000 nf-core-quantms-summary: order: -1001 - export_plots: true + """ + passed: List[str] = [] failed: List[str] = [] diff --git a/nf_core/lint/nextflow_config.py b/nf_core/lint/nextflow_config.py index 68750cd859..d3e29d2363 100644 --- a/nf_core/lint/nextflow_config.py +++ b/nf_core/lint/nextflow_config.py @@ -129,6 +129,7 @@ def nextflow_config(self): - config_defaults: - params.input """ + passed = [] warned = [] failed = [] diff --git a/nf_core/lint/schema_description.py b/nf_core/lint/schema_description.py index ca22f266ab..90735f609a 100644 --- a/nf_core/lint/schema_description.py +++ b/nf_core/lint/schema_description.py @@ -2,10 +2,11 @@ def schema_description(self): - """Check that every parameter in the schema has a description + """Check that every parameter in the schema has a description. - The ``nextflow_schema.json`` pipeline schema should describe every flat parameter - Furthermore warns about parameters outside of groups + The ``nextflow_schema.json`` pipeline schema should describe every flat parameter. + + Furthermore warns about parameters outside of groups. * Warning: Parameters in ``nextflow_schema.json`` without a description * Warning: Parameters in ``nextflow_schema.json`` that are defined outside of a group diff --git a/nf_core/modules/lint/meta_yml.py b/nf_core/modules/lint/meta_yml.py index 551a978f4d..481d50b3ef 100644 --- a/nf_core/modules/lint/meta_yml.py +++ b/nf_core/modules/lint/meta_yml.py @@ -25,7 +25,8 @@ def meta_yml(module_lint_object: ComponentLint, module: NFCoreComponent) -> None If the module has inputs or outputs, they are expected to be formatted as: - ..code-block:: + .. code-block:: groovy + tuple val(foo) path(bar) val foo path foo diff --git a/nf_core/modules/modules_differ.py b/nf_core/modules/modules_differ.py index a97229ff62..dc2b163dd4 100644 --- a/nf_core/modules/modules_differ.py +++ b/nf_core/modules/modules_differ.py @@ -447,8 +447,12 @@ def try_apply_patch(module, repo_path, patch_path, module_dir, reverse=False): log.debug(f"Applying patch to {file}") fn = Path(file).relative_to(module_relpath) file_path = module_dir / fn - with open(file_path) as fh: - file_lines = fh.readlines() + try: + with open(file_path) as fh: + file_lines = fh.readlines() + except FileNotFoundError: + # The file was added with the patch + file_lines = [""] patched_new_lines = ModulesDiffer.try_apply_single_patch(file_lines, patch, reverse=reverse) new_files[str(fn)] = patched_new_lines return new_files diff --git a/nf_core/params_file.py b/nf_core/params_file.py index 51986821b5..267fe7086a 100644 --- a/nf_core/params_file.py +++ b/nf_core/params_file.py @@ -180,9 +180,7 @@ def format_param(self, name, properties, required_properties=(), show_hidden=Fal Returns: str: Section of a params-file.yml for given parameter - None: - If the parameter is skipped because it is hidden and - show_hidden is not set + None: If the parameter is skipped because it is hidden and show_hidden is not set """ out = "" hidden = properties.get("hidden", False) diff --git a/nf_core/pipeline-template/.editorconfig b/nf_core/pipeline-template/.editorconfig index 9b990088ab..dd9ffa5387 100644 --- a/nf_core/pipeline-template/.editorconfig +++ b/nf_core/pipeline-template/.editorconfig @@ -18,7 +18,12 @@ end_of_line = unset insert_final_newline = unset trim_trailing_whitespace = unset indent_style = unset -indent_size = unset +[/subworkflows/nf-core/**] +charset = unset +end_of_line = unset +insert_final_newline = unset +trim_trailing_whitespace = unset +indent_style = unset [/assets/email*] indent_size = unset @@ -28,5 +33,5 @@ indent_size = unset indent_style = unset # ignore python -[*.{py}] +[*.{py,md}] indent_style = unset diff --git a/nf_core/pipeline-template/.github/workflows/awsfulltest.yml b/nf_core/pipeline-template/.github/workflows/awsfulltest.yml index 4c9fd69fcc..52aa8fa196 100644 --- a/nf_core/pipeline-template/.github/workflows/awsfulltest.yml +++ b/nf_core/pipeline-template/.github/workflows/awsfulltest.yml @@ -14,7 +14,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Launch workflow via tower - uses: seqeralabs/action-tower-launch@v2 + uses: seqeralabs/action-tower-launch@922e5c8d5ac4e918107ec311d2ebbd65e5982b3d # v2 # TODO nf-core: You can customise AWS full pipeline tests as required # Add full size test data (but still relatively small datasets for few samples) # on the `test_full.config` test runs with only one set of parameters {%- raw %} @@ -31,7 +31,7 @@ jobs: } profiles: test_full - - uses: actions/upload-artifact@v4 + - uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4 with: name: Tower debug log file path: | diff --git a/nf_core/pipeline-template/.github/workflows/awstest.yml b/nf_core/pipeline-template/.github/workflows/awstest.yml index 25726aa1c9..5488095197 100644 --- a/nf_core/pipeline-template/.github/workflows/awstest.yml +++ b/nf_core/pipeline-template/.github/workflows/awstest.yml @@ -12,7 +12,7 @@ jobs: steps: # Launch workflow using Tower CLI tool action {%- raw %} - name: Launch workflow via tower - uses: seqeralabs/action-tower-launch@v2 + uses: seqeralabs/action-tower-launch@922e5c8d5ac4e918107ec311d2ebbd65e5982b3d # v2 with: workspace_id: ${{ secrets.TOWER_WORKSPACE_ID }} access_token: ${{ secrets.TOWER_ACCESS_TOKEN }} @@ -25,7 +25,7 @@ jobs: } profiles: test - - uses: actions/upload-artifact@v4 + - uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4 with: name: Tower debug log file path: | diff --git a/nf_core/pipeline-template/.github/workflows/branch.yml b/nf_core/pipeline-template/.github/workflows/branch.yml index 057016e4be..df1a627b15 100644 --- a/nf_core/pipeline-template/.github/workflows/branch.yml +++ b/nf_core/pipeline-template/.github/workflows/branch.yml @@ -19,7 +19,7 @@ jobs: # NOTE - this doesn't currently work if the PR is coming from a fork, due to limitations in GitHub actions secrets - name: Post PR comment if: failure() - uses: mshick/add-pr-comment@v2 + uses: mshick/add-pr-comment@b8f338c590a895d50bcbfa6c5859251edc8952fc # v2 with: message: | ## This PR is against the `master` branch :x: diff --git a/nf_core/pipeline-template/.github/workflows/ci.yml b/nf_core/pipeline-template/.github/workflows/ci.yml index 3edd49f09d..631862d9ec 100644 --- a/nf_core/pipeline-template/.github/workflows/ci.yml +++ b/nf_core/pipeline-template/.github/workflows/ci.yml @@ -28,13 +28,16 @@ jobs: - "latest-everything" steps: - name: Check out pipeline code - uses: actions/checkout@v4 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4 - name: Install Nextflow - uses: nf-core/setup-nextflow@v1 + uses: nf-core/setup-nextflow@b9f764e8ba5c76b712ace14ecbfcef0e40ae2dd8 # v1 with: version: "{% raw %}${{ matrix.NXF_VER }}{% endraw %}" + - name: Disk space cleanup + uses: jlumbroso/free-disk-space@54081f138730dfa15788a46383842cd2f914a1be # v1.3.1 + - name: Run pipeline with test data # TODO nf-core: You can customise CI pipeline run tests as required # For example: adding multiple test runs with different parameters diff --git a/nf_core/pipeline-template/.github/workflows/clean-up.yml b/nf_core/pipeline-template/.github/workflows/clean-up.yml index 8feb3fb017..b3b5c05d60 100644 --- a/nf_core/pipeline-template/.github/workflows/clean-up.yml +++ b/nf_core/pipeline-template/.github/workflows/clean-up.yml @@ -10,7 +10,7 @@ jobs: issues: write pull-requests: write steps: - - uses: actions/stale@v9 + - uses: actions/stale@28ca1036281a5e5922ead5184a1bbf96e5fc984e # v9 with: stale-issue-message: "This issue has been tagged as awaiting-changes or awaiting-feedback by an nf-core contributor. Remove stale label or add a comment otherwise this issue will be closed in 20 days." stale-pr-message: "This PR has been tagged as awaiting-changes or awaiting-feedback by an nf-core contributor. Remove stale label or add a comment if it is still useful." diff --git a/nf_core/pipeline-template/.github/workflows/download_pipeline.yml b/nf_core/pipeline-template/.github/workflows/download_pipeline.yml index 07ac82c1ed..20b811ab2b 100644 --- a/nf_core/pipeline-template/.github/workflows/download_pipeline.yml +++ b/nf_core/pipeline-template/.github/workflows/download_pipeline.yml @@ -6,6 +6,11 @@ name: Test successful pipeline download with 'nf-core download' # - the head branch of the pull request is updated, i.e. if fixes for a release are pushed last minute to dev. on: workflow_dispatch: + inputs: + testbranch: + description: "The specific branch you wish to utilize for the test execution of nf-core download." + required: true + default: "dev" pull_request: types: - opened @@ -23,13 +28,13 @@ jobs: runs-on: ubuntu-latest steps: - name: Install Nextflow - uses: nf-core/setup-nextflow@v1 + uses: nf-core/setup-nextflow@b9f764e8ba5c76b712ace14ecbfcef0e40ae2dd8 # v1 - - uses: actions/setup-python@v5 + - uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5 with: python-version: "3.11" architecture: "x64" - - uses: eWaterCycle/setup-singularity@v7 + - uses: eWaterCycle/setup-singularity@931d4e31109e875b13309ae1d07c70ca8fbc8537 # v7 with: singularity-version: 3.8.3 @@ -42,13 +47,13 @@ jobs: run: | echo "REPO_LOWERCASE=${GITHUB_REPOSITORY,,}" >> ${GITHUB_ENV} echo "REPOTITLE_LOWERCASE=$(basename ${GITHUB_REPOSITORY,,})" >> ${GITHUB_ENV} - echo "REPO_BRANCH=${GITHUB_REF#refs/heads/}" >> ${GITHUB_ENV} + echo "{% raw %}REPO_BRANCH=${{ github.event.inputs.testbranch || 'dev' }}" >> ${GITHUB_ENV} - name: Download the pipeline env: NXF_SINGULARITY_CACHEDIR: ./ run: | - nf-core download {% raw %} ${{ env.REPO_LOWERCASE }} \ + nf-core download ${{ env.REPO_LOWERCASE }} \ --revision ${{ env.REPO_BRANCH }} \ --outdir ./${{ env.REPOTITLE_LOWERCASE }} \ --compress "none" \ @@ -64,4 +69,4 @@ jobs: env: NXF_SINGULARITY_CACHEDIR: ./ NXF_SINGULARITY_HOME_MOUNT: true - run: nextflow run ./${{ env.REPOTITLE_LOWERCASE }}/$( sed 's/\W/_/g' <<< ${{ env.REPO_BRANCH }}) -stub -profile test,singularity --outdir ./results {% endraw %} + run: nextflow run ./${{ env.REPOTITLE_LOWERCASE }}/$( sed 's/\W/_/g' <<< ${{ env.REPO_BRANCH }}) -stub -profile test,singularity --outdir ./results{% endraw %} diff --git a/nf_core/pipeline-template/.github/workflows/linting.yml b/nf_core/pipeline-template/.github/workflows/linting.yml index a267f1ec14..3ec259b5ed 100644 --- a/nf_core/pipeline-template/.github/workflows/linting.yml +++ b/nf_core/pipeline-template/.github/workflows/linting.yml @@ -14,10 +14,10 @@ jobs: pre-commit: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4 - name: Set up Python 3.11 - uses: actions/setup-python@v5 + uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5 with: python-version: 3.11 cache: "pip" @@ -32,12 +32,12 @@ jobs: runs-on: ubuntu-latest steps: - name: Check out pipeline code - uses: actions/checkout@v4 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4 - name: Install Nextflow - uses: nf-core/setup-nextflow@v1 + uses: nf-core/setup-nextflow@b9f764e8ba5c76b712ace14ecbfcef0e40ae2dd8 # v1 - - uses: actions/setup-python@v5 + - uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5 with: python-version: "3.11" architecture: "x64" @@ -60,7 +60,7 @@ jobs: - name: Upload linting log file artifact if: ${{ always() }} - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4 with: name: linting-logs path: | diff --git a/nf_core/pipeline-template/.github/workflows/linting_comment.yml b/nf_core/pipeline-template/.github/workflows/linting_comment.yml index e5528b29cf..bb3eafcc2c 100644 --- a/nf_core/pipeline-template/.github/workflows/linting_comment.yml +++ b/nf_core/pipeline-template/.github/workflows/linting_comment.yml @@ -11,7 +11,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Download lint results - uses: dawidd6/action-download-artifact@v3 + uses: dawidd6/action-download-artifact@f6b0bace624032e30a85a8fd9c1a7f8f611f5737 # v3 with: workflow: linting.yml workflow_conclusion: completed @@ -21,7 +21,7 @@ jobs: run: echo "pr_number=$(cat linting-logs/PR_number.txt)" >> $GITHUB_OUTPUT - name: Post PR comment - uses: marocchino/sticky-pull-request-comment@v2 + uses: marocchino/sticky-pull-request-comment@331f8f5b4215f0445d3c07b4967662a32a2d3e31 # v2 with: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} number: ${{ steps.pr_number.outputs.pr_number }} diff --git a/nf_core/pipeline-template/.github/workflows/release-announcements.yml b/nf_core/pipeline-template/.github/workflows/release-announcements.yml index 1dd48b123f..6d11280ae8 100644 --- a/nf_core/pipeline-template/.github/workflows/release-announcements.yml +++ b/nf_core/pipeline-template/.github/workflows/release-announcements.yml @@ -9,6 +9,11 @@ jobs: toot: runs-on: ubuntu-latest steps: + - name: get topics and convert to hashtags + id: get_topics + run: | + curl -s https://nf-co.re/pipelines.json | jq -r '.remote_workflows[] | select(.name == "${{ github.repository }}") | .topics[]' | awk '{print "#"$0}' | tr '\n' ' ' > $GITHUB_OUTPUT + - uses: rzr/fediverse-action@master with: access-token: ${{ secrets.MASTODON_ACCESS_TOKEN }} @@ -20,11 +25,13 @@ jobs: Please see the changelog: ${{ github.event.release.html_url }} + ${{ steps.get_topics.outputs.GITHUB_OUTPUT }} #nfcore #openscience #nextflow #bioinformatics + send-tweet: runs-on: ubuntu-latest steps: - - uses: actions/setup-python@v5 + - uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5 with: python-version: "3.10" - name: Install dependencies @@ -56,7 +63,7 @@ jobs: bsky-post: runs-on: ubuntu-latest steps: - - uses: zentered/bluesky-post-action@v0.1.0 + - uses: zentered/bluesky-post-action@80dbe0a7697de18c15ad22f4619919ceb5ccf597 # v0.1.0 with: post: | Pipeline release! ${{ github.repository }} v${{ github.event.release.tag_name }} - ${{ github.event.release.name }}! diff --git a/nf_core/pipeline-template/README.md b/nf_core/pipeline-template/README.md index 5160ccb04d..037d3fd81a 100644 --- a/nf_core/pipeline-template/README.md +++ b/nf_core/pipeline-template/README.md @@ -6,10 +6,11 @@ + {% endif -%} {% if github_badges -%} -[![GitHub Actions CI Status](https://github.com/{{ name }}/workflows/nf-core%20CI/badge.svg)](https://github.com/{{ name }}/actions?query=workflow%3A%22nf-core+CI%22) -[![GitHub Actions Linting Status](https://github.com/{{ name }}/workflows/nf-core%20linting/badge.svg)](https://github.com/{{ name }}/actions?query=workflow%3A%22nf-core+linting%22){% endif -%} +[![GitHub Actions CI Status](https://github.com/{{ name }}/actions/workflows/ci.yml/badge.svg)](https://github.com/{{ name }}/actions/workflows/ci.yml) +[![GitHub Actions Linting Status](https://github.com/{{ name }}/actions/workflows/linting.yml/badge.svg)](https://github.com/{{ name }}/actions/workflows/linting.yml){% endif -%} {% if branded -%}[![AWS CI](https://img.shields.io/badge/CI%20tests-full%20size-FF9900?labelColor=000000&logo=Amazon%20AWS)](https://nf-co.re/{{ short_name }}/results){% endif -%} {%- if github_badges -%} [![Cite with Zenodo](http://img.shields.io/badge/DOI-10.5281/zenodo.XXXXXXX-1073c8?labelColor=000000)](https://doi.org/10.5281/zenodo.XXXXXXX) diff --git a/nf_core/pipeline-template/assets/multiqc_config.yml b/nf_core/pipeline-template/assets/multiqc_config.yml index 39943ffe49..b13b7ae074 100644 --- a/nf_core/pipeline-template/assets/multiqc_config.yml +++ b/nf_core/pipeline-template/assets/multiqc_config.yml @@ -17,3 +17,5 @@ report_section_order: order: -1002 export_plots: true + +disable_version_detection: true diff --git a/nf_core/pipeline-template/assets/schema_input.json b/nf_core/pipeline-template/assets/schema_input.json index 509048bd8a..e76b95fa99 100644 --- a/nf_core/pipeline-template/assets/schema_input.json +++ b/nf_core/pipeline-template/assets/schema_input.json @@ -10,25 +10,22 @@ "sample": { "type": "string", "pattern": "^\\S+$", - "errorMessage": "Sample name must be provided and cannot contain spaces" + "errorMessage": "Sample name must be provided and cannot contain spaces", + "meta": ["id"] }, "fastq_1": { "type": "string", + "format": "file-path", + "exists": true, "pattern": "^\\S+\\.f(ast)?q\\.gz$", "errorMessage": "FastQ file for reads 1 must be provided, cannot contain spaces and must have extension '.fq.gz' or '.fastq.gz'" }, "fastq_2": { - "errorMessage": "FastQ file for reads 2 cannot contain spaces and must have extension '.fq.gz' or '.fastq.gz'", - "anyOf": [ - { - "type": "string", - "pattern": "^\\S+\\.f(ast)?q\\.gz$" - }, - { - "type": "string", - "maxLength": 0 - } - ] + "type": "string", + "format": "file-path", + "exists": true, + "pattern": "^\\S+\\.f(ast)?q\\.gz$", + "errorMessage": "FastQ file for reads 2 cannot contain spaces and must have extension '.fq.gz' or '.fastq.gz'" } }, "required": ["sample", "fastq_1"] diff --git a/nf_core/pipeline-template/bin/check_samplesheet.py b/nf_core/pipeline-template/bin/check_samplesheet.py deleted file mode 100755 index 4a758fe003..0000000000 --- a/nf_core/pipeline-template/bin/check_samplesheet.py +++ /dev/null @@ -1,259 +0,0 @@ -#!/usr/bin/env python - - -"""Provide a command line tool to validate and transform tabular samplesheets.""" - - -import argparse -import csv -import logging -import sys -from collections import Counter -from pathlib import Path - -logger = logging.getLogger() - - -class RowChecker: - """ - Define a service that can validate and transform each given row. - - Attributes: - modified (list): A list of dicts, where each dict corresponds to a previously - validated and transformed row. The order of rows is maintained. - - """ - - VALID_FORMATS = ( - ".fq.gz", - ".fastq.gz", - ) - - def __init__( - self, - sample_col="sample", - first_col="fastq_1", - second_col="fastq_2", - single_col="single_end", - **kwargs, - ): - """ - Initialize the row checker with the expected column names. - - Args: - sample_col (str): The name of the column that contains the sample name - (default "sample"). - first_col (str): The name of the column that contains the first (or only) - FASTQ file path (default "fastq_1"). - second_col (str): The name of the column that contains the second (if any) - FASTQ file path (default "fastq_2"). - single_col (str): The name of the new column that will be inserted and - records whether the sample contains single- or paired-end sequencing - reads (default "single_end"). - - """ - super().__init__(**kwargs) - self._sample_col = sample_col - self._first_col = first_col - self._second_col = second_col - self._single_col = single_col - self._seen = set() - self.modified = [] - - def validate_and_transform(self, row): - """ - Perform all validations on the given row and insert the read pairing status. - - Args: - row (dict): A mapping from column headers (keys) to elements of that row - (values). - - """ - self._validate_sample(row) - self._validate_first(row) - self._validate_second(row) - self._validate_pair(row) - self._seen.add((row[self._sample_col], row[self._first_col])) - self.modified.append(row) - - def _validate_sample(self, row): - """Assert that the sample name exists and convert spaces to underscores.""" - if len(row[self._sample_col]) <= 0: - raise AssertionError("Sample input is required.") - # Sanitize samples slightly. - row[self._sample_col] = row[self._sample_col].replace(" ", "_") - - def _validate_first(self, row): - """Assert that the first FASTQ entry is non-empty and has the right format.""" - if len(row[self._first_col]) <= 0: - raise AssertionError("At least the first FASTQ file is required.") - self._validate_fastq_format(row[self._first_col]) - - def _validate_second(self, row): - """Assert that the second FASTQ entry has the right format if it exists.""" - if len(row[self._second_col]) > 0: - self._validate_fastq_format(row[self._second_col]) - - def _validate_pair(self, row): - """Assert that read pairs have the same file extension. Report pair status.""" - if row[self._first_col] and row[self._second_col]: - row[self._single_col] = False - first_col_suffix = Path(row[self._first_col]).suffixes[-2:] - second_col_suffix = Path(row[self._second_col]).suffixes[-2:] - if first_col_suffix != second_col_suffix: - raise AssertionError("FASTQ pairs must have the same file extensions.") - else: - row[self._single_col] = True - - def _validate_fastq_format(self, filename): - """Assert that a given filename has one of the expected FASTQ extensions.""" - if not any(filename.endswith(extension) for extension in self.VALID_FORMATS): - raise AssertionError( - f"The FASTQ file has an unrecognized extension: {filename}\n" - f"It should be one of: {', '.join(self.VALID_FORMATS)}" - ) - - def validate_unique_samples(self): - """ - Assert that the combination of sample name and FASTQ filename is unique. - - In addition to the validation, also rename all samples to have a suffix of _T{n}, where n is the - number of times the same sample exist, but with different FASTQ files, e.g., multiple runs per experiment. - - """ - if len(self._seen) != len(self.modified): - raise AssertionError("The pair of sample name and FASTQ must be unique.") - seen = Counter() - for row in self.modified: - sample = row[self._sample_col] - seen[sample] += 1 - row[self._sample_col] = f"{sample}_T{seen[sample]}" - - -def read_head(handle, num_lines=10): - """Read the specified number of lines from the current position in the file.""" - lines = [] - for idx, line in enumerate(handle): - if idx == num_lines: - break - lines.append(line) - return "".join(lines) - - -def sniff_format(handle): - """ - Detect the tabular format. - - Args: - handle (text file): A handle to a `text file`_ object. The read position is - expected to be at the beginning (index 0). - - Returns: - csv.Dialect: The detected tabular format. - - .. _text file: - https://docs.python.org/3/glossary.html#term-text-file - - """ - peek = read_head(handle) - handle.seek(0) - sniffer = csv.Sniffer() - dialect = sniffer.sniff(peek) - return dialect - - -def check_samplesheet(file_in, file_out): - """ - Check that the tabular samplesheet has the structure expected by nf-core pipelines. - - Validate the general shape of the table, expected columns, and each row. Also add - an additional column which records whether one or two FASTQ reads were found. - - Args: - file_in (pathlib.Path): The given tabular samplesheet. The format can be either - CSV, TSV, or any other format automatically recognized by ``csv.Sniffer``. - file_out (pathlib.Path): Where the validated and transformed samplesheet should - be created; always in CSV format. - - Example: - This function checks that the samplesheet follows the following structure, - see also the `viral recon samplesheet`_:: - - sample,fastq_1,fastq_2 - SAMPLE_PE,SAMPLE_PE_RUN1_1.fastq.gz,SAMPLE_PE_RUN1_2.fastq.gz - SAMPLE_PE,SAMPLE_PE_RUN2_1.fastq.gz,SAMPLE_PE_RUN2_2.fastq.gz - SAMPLE_SE,SAMPLE_SE_RUN1_1.fastq.gz, - - .. _viral recon samplesheet: - https://raw.githubusercontent.com/nf-core/test-datasets/viralrecon/samplesheet/samplesheet_test_illumina_amplicon.csv - - """ - required_columns = {"sample", "fastq_1", "fastq_2"} - # See https://docs.python.org/3.9/library/csv.html#id3 to read up on `newline=""`. - with file_in.open(newline="") as in_handle: - reader = csv.DictReader(in_handle, dialect=sniff_format(in_handle)) - # Validate the existence of the expected header columns. - if not required_columns.issubset(reader.fieldnames): - req_cols = ", ".join(required_columns) - logger.critical(f"The sample sheet **must** contain these column headers: {req_cols}.") - sys.exit(1) - # Validate each row. - checker = RowChecker() - for i, row in enumerate(reader): - try: - checker.validate_and_transform(row) - except AssertionError as error: - logger.critical(f"{str(error)} On line {i + 2}.") - sys.exit(1) - checker.validate_unique_samples() - header = list(reader.fieldnames) - header.insert(1, "single_end") - # See https://docs.python.org/3.9/library/csv.html#id3 to read up on `newline=""`. - with file_out.open(mode="w", newline="") as out_handle: - writer = csv.DictWriter(out_handle, header, delimiter=",") - writer.writeheader() - for row in checker.modified: - writer.writerow(row) - - -def parse_args(argv=None): - """Define and immediately parse command line arguments.""" - parser = argparse.ArgumentParser( - description="Validate and transform a tabular samplesheet.", - epilog="Example: python check_samplesheet.py samplesheet.csv samplesheet.valid.csv", - ) - parser.add_argument( - "file_in", - metavar="FILE_IN", - type=Path, - help="Tabular input samplesheet in CSV or TSV format.", - ) - parser.add_argument( - "file_out", - metavar="FILE_OUT", - type=Path, - help="Transformed output samplesheet in CSV format.", - ) - parser.add_argument( - "-l", - "--log-level", - help="The desired log level (default WARNING).", - choices=("CRITICAL", "ERROR", "WARNING", "INFO", "DEBUG"), - default="WARNING", - ) - return parser.parse_args(argv) - - -def main(argv=None): - """Coordinate argument parsing and program execution.""" - args = parse_args(argv) - logging.basicConfig(level=args.log_level, format="[%(levelname)s] %(message)s") - if not args.file_in.is_file(): - logger.error(f"The given input file {args.file_in} was not found!") - sys.exit(2) - args.file_out.parent.mkdir(parents=True, exist_ok=True) - check_samplesheet(args.file_in, args.file_out) - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/nf_core/pipeline-template/conf/modules.config b/nf_core/pipeline-template/conf/modules.config index d91c6aba0b..e3ea8fa6c4 100644 --- a/nf_core/pipeline-template/conf/modules.config +++ b/nf_core/pipeline-template/conf/modules.config @@ -18,14 +18,6 @@ process { saveAs: { filename -> filename.equals('versions.yml') ? null : filename } ] - withName: SAMPLESHEET_CHECK { - publishDir = [ - path: { "${params.outdir}/pipeline_info" }, - mode: params.publish_dir_mode, - saveAs: { filename -> filename.equals('versions.yml') ? null : filename } - ] - } - withName: FASTQC { ext.args = '--quiet' } diff --git a/nf_core/pipeline-template/conf/test.config b/nf_core/pipeline-template/conf/test.config index 49bfe8a6db..32b9619ebb 100644 --- a/nf_core/pipeline-template/conf/test.config +++ b/nf_core/pipeline-template/conf/test.config @@ -27,8 +27,5 @@ params { {% if igenomes -%} // Genome references genome = 'R64-1-1' - {%- else -%} - // Fasta references - fasta = 'https://raw.githubusercontent.com/nf-core/test-datasets/viralrecon/genome/NC_045512.2/GCF_009858895.2_ASM985889v3_genomic.200409.fna.gz' {%- endif %} } diff --git a/nf_core/pipeline-template/lib/NfcoreTemplate.groovy b/nf_core/pipeline-template/lib/NfcoreTemplate.groovy deleted file mode 100755 index 0f8d021a03..0000000000 --- a/nf_core/pipeline-template/lib/NfcoreTemplate.groovy +++ /dev/null @@ -1,356 +0,0 @@ -// -// This file holds several functions used within the nf-core pipeline template. -// - -import org.yaml.snakeyaml.Yaml -import groovy.json.JsonOutput -import nextflow.extension.FilesEx - -class NfcoreTemplate { - - // - // Check AWS Batch related parameters have been specified correctly - // - public static void awsBatch(workflow, params) { - if (workflow.profile.contains('awsbatch')) { - // Check params.awsqueue and params.awsregion have been set if running on AWSBatch - assert (params.awsqueue && params.awsregion) : "Specify correct --awsqueue and --awsregion parameters on AWSBatch!" - // Check outdir paths to be S3 buckets if running on AWSBatch - assert params.outdir.startsWith('s3:') : "Outdir not on S3 - specify S3 Bucket to run on AWSBatch!" - } - } - - // - // Warn if a -profile or Nextflow config has not been provided to run the pipeline - // - public static void checkConfigProvided(workflow, log) { - if (workflow.profile == 'standard' && workflow.configFiles.size() <= 1) { - log.warn "[$workflow.manifest.name] You are attempting to run the pipeline without any custom configuration!\n\n" + - "This will be dependent on your local compute environment but can be achieved via one or more of the following:\n" + - " (1) Using an existing pipeline profile e.g. `-profile docker` or `-profile singularity`\n" + - " (2) Using an existing nf-core/configs for your Institution e.g. `-profile crick` or `-profile uppmax`\n" + - " (3) Using your own local custom config e.g. `-c /path/to/your/custom.config`\n\n" + - "Please refer to the quick start section and usage docs for the pipeline.\n " - } - } - - // - // Generate version string - // - public static String version(workflow) { - String version_string = "" - - if (workflow.manifest.version) { - def prefix_v = workflow.manifest.version[0] != 'v' ? 'v' : '' - version_string += "${prefix_v}${workflow.manifest.version}" - } - - if (workflow.commitId) { - def git_shortsha = workflow.commitId.substring(0, 7) - version_string += "-g${git_shortsha}" - } - - return version_string - } - - // - // Construct and send completion email - // - public static void email(workflow, params, summary_params, projectDir, log, multiqc_report=[]) { - - // Set up the e-mail variables - def subject = "[$workflow.manifest.name] Successful: $workflow.runName" - if (!workflow.success) { - subject = "[$workflow.manifest.name] FAILED: $workflow.runName" - } - - def summary = [:] - for (group in summary_params.keySet()) { - summary << summary_params[group] - } - - def misc_fields = [:] - misc_fields['Date Started'] = workflow.start - misc_fields['Date Completed'] = workflow.complete - misc_fields['Pipeline script file path'] = workflow.scriptFile - misc_fields['Pipeline script hash ID'] = workflow.scriptId - if (workflow.repository) misc_fields['Pipeline repository Git URL'] = workflow.repository - if (workflow.commitId) misc_fields['Pipeline repository Git Commit'] = workflow.commitId - if (workflow.revision) misc_fields['Pipeline Git branch/tag'] = workflow.revision - misc_fields['Nextflow Version'] = workflow.nextflow.version - misc_fields['Nextflow Build'] = workflow.nextflow.build - misc_fields['Nextflow Compile Timestamp'] = workflow.nextflow.timestamp - - def email_fields = [:] - email_fields['version'] = NfcoreTemplate.version(workflow) - email_fields['runName'] = workflow.runName - email_fields['success'] = workflow.success - email_fields['dateComplete'] = workflow.complete - email_fields['duration'] = workflow.duration - email_fields['exitStatus'] = workflow.exitStatus - email_fields['errorMessage'] = (workflow.errorMessage ?: 'None') - email_fields['errorReport'] = (workflow.errorReport ?: 'None') - email_fields['commandLine'] = workflow.commandLine - email_fields['projectDir'] = workflow.projectDir - email_fields['summary'] = summary << misc_fields - - // On success try attach the multiqc report - def mqc_report = null - try { - if (workflow.success) { - mqc_report = multiqc_report.getVal() - if (mqc_report.getClass() == ArrayList && mqc_report.size() >= 1) { - if (mqc_report.size() > 1) { - log.warn "[$workflow.manifest.name] Found multiple reports from process 'MULTIQC', will use only one" - } - mqc_report = mqc_report[0] - } - } - } catch (all) { - if (multiqc_report) { - log.warn "[$workflow.manifest.name] Could not attach MultiQC report to summary email" - } - } - - // Check if we are only sending emails on failure - def email_address = params.email - if (!params.email && params.email_on_fail && !workflow.success) { - email_address = params.email_on_fail - } - - // Render the TXT template - def engine = new groovy.text.GStringTemplateEngine() - def tf = new File("$projectDir/assets/email_template.txt") - def txt_template = engine.createTemplate(tf).make(email_fields) - def email_txt = txt_template.toString() - - // Render the HTML template - def hf = new File("$projectDir/assets/email_template.html") - def html_template = engine.createTemplate(hf).make(email_fields) - def email_html = html_template.toString() - - // Render the sendmail template - def max_multiqc_email_size = (params.containsKey('max_multiqc_email_size') ? params.max_multiqc_email_size : 0) as nextflow.util.MemoryUnit - def smail_fields = [ email: email_address, subject: subject, email_txt: email_txt, email_html: email_html, projectDir: "$projectDir", mqcFile: mqc_report, mqcMaxSize: max_multiqc_email_size.toBytes() ] - def sf = new File("$projectDir/assets/sendmail_template.txt") - def sendmail_template = engine.createTemplate(sf).make(smail_fields) - def sendmail_html = sendmail_template.toString() - - // Send the HTML e-mail - Map colors = logColours(params.monochrome_logs) - if (email_address) { - try { - if (params.plaintext_email) { throw GroovyException('Send plaintext e-mail, not HTML') } - // Try to send HTML e-mail using sendmail - def sendmail_tf = new File(workflow.launchDir.toString(), ".sendmail_tmp.html") - sendmail_tf.withWriter { w -> w << sendmail_html } - [ 'sendmail', '-t' ].execute() << sendmail_html - log.info "-${colors.purple}[$workflow.manifest.name]${colors.green} Sent summary e-mail to $email_address (sendmail)-" - } catch (all) { - // Catch failures and try with plaintext - def mail_cmd = [ 'mail', '-s', subject, '--content-type=text/html', email_address ] - if ( mqc_report != null && mqc_report.size() <= max_multiqc_email_size.toBytes() ) { - mail_cmd += [ '-A', mqc_report ] - } - mail_cmd.execute() << email_html - log.info "-${colors.purple}[$workflow.manifest.name]${colors.green} Sent summary e-mail to $email_address (mail)-" - } - } - - // Write summary e-mail HTML to a file - def output_hf = new File(workflow.launchDir.toString(), ".pipeline_report.html") - output_hf.withWriter { w -> w << email_html } - FilesEx.copyTo(output_hf.toPath(), "${params.outdir}/pipeline_info/pipeline_report.html"); - output_hf.delete() - - // Write summary e-mail TXT to a file - def output_tf = new File(workflow.launchDir.toString(), ".pipeline_report.txt") - output_tf.withWriter { w -> w << email_txt } - FilesEx.copyTo(output_tf.toPath(), "${params.outdir}/pipeline_info/pipeline_report.txt"); - output_tf.delete() - } - - // - // Construct and send a notification to a web server as JSON - // e.g. Microsoft Teams and Slack - // - public static void IM_notification(workflow, params, summary_params, projectDir, log) { - def hook_url = params.hook_url - - def summary = [:] - for (group in summary_params.keySet()) { - summary << summary_params[group] - } - - def misc_fields = [:] - misc_fields['start'] = workflow.start - misc_fields['complete'] = workflow.complete - misc_fields['scriptfile'] = workflow.scriptFile - misc_fields['scriptid'] = workflow.scriptId - if (workflow.repository) misc_fields['repository'] = workflow.repository - if (workflow.commitId) misc_fields['commitid'] = workflow.commitId - if (workflow.revision) misc_fields['revision'] = workflow.revision - misc_fields['nxf_version'] = workflow.nextflow.version - misc_fields['nxf_build'] = workflow.nextflow.build - misc_fields['nxf_timestamp'] = workflow.nextflow.timestamp - - def msg_fields = [:] - msg_fields['version'] = NfcoreTemplate.version(workflow) - msg_fields['runName'] = workflow.runName - msg_fields['success'] = workflow.success - msg_fields['dateComplete'] = workflow.complete - msg_fields['duration'] = workflow.duration - msg_fields['exitStatus'] = workflow.exitStatus - msg_fields['errorMessage'] = (workflow.errorMessage ?: 'None') - msg_fields['errorReport'] = (workflow.errorReport ?: 'None') - msg_fields['commandLine'] = workflow.commandLine.replaceFirst(/ +--hook_url +[^ ]+/, "") - msg_fields['projectDir'] = workflow.projectDir - msg_fields['summary'] = summary << misc_fields - - // Render the JSON template - def engine = new groovy.text.GStringTemplateEngine() - // Different JSON depending on the service provider - // Defaults to "Adaptive Cards" (https://adaptivecards.io), except Slack which has its own format - def json_path = hook_url.contains("hooks.slack.com") ? "slackreport.json" : "adaptivecard.json" - def hf = new File("$projectDir/assets/${json_path}") - def json_template = engine.createTemplate(hf).make(msg_fields) - def json_message = json_template.toString() - - // POST - def post = new URL(hook_url).openConnection(); - post.setRequestMethod("POST") - post.setDoOutput(true) - post.setRequestProperty("Content-Type", "application/json") - post.getOutputStream().write(json_message.getBytes("UTF-8")); - def postRC = post.getResponseCode(); - if (! postRC.equals(200)) { - log.warn(post.getErrorStream().getText()); - } - } - - // - // Dump pipeline parameters in a json file - // - public static void dump_parameters(workflow, params) { - def timestamp = new java.util.Date().format( 'yyyy-MM-dd_HH-mm-ss') - def filename = "params_${timestamp}.json" - def temp_pf = new File(workflow.launchDir.toString(), ".${filename}") - def jsonStr = JsonOutput.toJson(params) - temp_pf.text = JsonOutput.prettyPrint(jsonStr) - - FilesEx.copyTo(temp_pf.toPath(), "${params.outdir}/pipeline_info/params_${timestamp}.json") - temp_pf.delete() - } - - // - // Print pipeline summary on completion - // - public static void summary(workflow, params, log) { - Map colors = logColours(params.monochrome_logs) - if (workflow.success) { - if (workflow.stats.ignoredCount == 0) { - log.info "-${colors.purple}[$workflow.manifest.name]${colors.green} Pipeline completed successfully${colors.reset}-" - } else { - log.info "-${colors.purple}[$workflow.manifest.name]${colors.yellow} Pipeline completed successfully, but with errored process(es) ${colors.reset}-" - } - } else { - log.info "-${colors.purple}[$workflow.manifest.name]${colors.red} Pipeline completed with errors${colors.reset}-" - } - } - - // - // ANSII Colours used for terminal logging - // - public static Map logColours(Boolean monochrome_logs) { - Map colorcodes = [:] - - // Reset / Meta - colorcodes['reset'] = monochrome_logs ? '' : "\033[0m" - colorcodes['bold'] = monochrome_logs ? '' : "\033[1m" - colorcodes['dim'] = monochrome_logs ? '' : "\033[2m" - colorcodes['underlined'] = monochrome_logs ? '' : "\033[4m" - colorcodes['blink'] = monochrome_logs ? '' : "\033[5m" - colorcodes['reverse'] = monochrome_logs ? '' : "\033[7m" - colorcodes['hidden'] = monochrome_logs ? '' : "\033[8m" - - // Regular Colors - colorcodes['black'] = monochrome_logs ? '' : "\033[0;30m" - colorcodes['red'] = monochrome_logs ? '' : "\033[0;31m" - colorcodes['green'] = monochrome_logs ? '' : "\033[0;32m" - colorcodes['yellow'] = monochrome_logs ? '' : "\033[0;33m" - colorcodes['blue'] = monochrome_logs ? '' : "\033[0;34m" - colorcodes['purple'] = monochrome_logs ? '' : "\033[0;35m" - colorcodes['cyan'] = monochrome_logs ? '' : "\033[0;36m" - colorcodes['white'] = monochrome_logs ? '' : "\033[0;37m" - - // Bold - colorcodes['bblack'] = monochrome_logs ? '' : "\033[1;30m" - colorcodes['bred'] = monochrome_logs ? '' : "\033[1;31m" - colorcodes['bgreen'] = monochrome_logs ? '' : "\033[1;32m" - colorcodes['byellow'] = monochrome_logs ? '' : "\033[1;33m" - colorcodes['bblue'] = monochrome_logs ? '' : "\033[1;34m" - colorcodes['bpurple'] = monochrome_logs ? '' : "\033[1;35m" - colorcodes['bcyan'] = monochrome_logs ? '' : "\033[1;36m" - colorcodes['bwhite'] = monochrome_logs ? '' : "\033[1;37m" - - // Underline - colorcodes['ublack'] = monochrome_logs ? '' : "\033[4;30m" - colorcodes['ured'] = monochrome_logs ? '' : "\033[4;31m" - colorcodes['ugreen'] = monochrome_logs ? '' : "\033[4;32m" - colorcodes['uyellow'] = monochrome_logs ? '' : "\033[4;33m" - colorcodes['ublue'] = monochrome_logs ? '' : "\033[4;34m" - colorcodes['upurple'] = monochrome_logs ? '' : "\033[4;35m" - colorcodes['ucyan'] = monochrome_logs ? '' : "\033[4;36m" - colorcodes['uwhite'] = monochrome_logs ? '' : "\033[4;37m" - - // High Intensity - colorcodes['iblack'] = monochrome_logs ? '' : "\033[0;90m" - colorcodes['ired'] = monochrome_logs ? '' : "\033[0;91m" - colorcodes['igreen'] = monochrome_logs ? '' : "\033[0;92m" - colorcodes['iyellow'] = monochrome_logs ? '' : "\033[0;93m" - colorcodes['iblue'] = monochrome_logs ? '' : "\033[0;94m" - colorcodes['ipurple'] = monochrome_logs ? '' : "\033[0;95m" - colorcodes['icyan'] = monochrome_logs ? '' : "\033[0;96m" - colorcodes['iwhite'] = monochrome_logs ? '' : "\033[0;97m" - - // Bold High Intensity - colorcodes['biblack'] = monochrome_logs ? '' : "\033[1;90m" - colorcodes['bired'] = monochrome_logs ? '' : "\033[1;91m" - colorcodes['bigreen'] = monochrome_logs ? '' : "\033[1;92m" - colorcodes['biyellow'] = monochrome_logs ? '' : "\033[1;93m" - colorcodes['biblue'] = monochrome_logs ? '' : "\033[1;94m" - colorcodes['bipurple'] = monochrome_logs ? '' : "\033[1;95m" - colorcodes['bicyan'] = monochrome_logs ? '' : "\033[1;96m" - colorcodes['biwhite'] = monochrome_logs ? '' : "\033[1;97m" - - return colorcodes - } - - // - // Does what is says on the tin - // - public static String dashedLine(monochrome_logs) { - Map colors = logColours(monochrome_logs) - return "-${colors.dim}----------------------------------------------------${colors.reset}-" - } - - // - // nf-core logo - // - public static String logo(workflow, monochrome_logs) { - Map colors = logColours(monochrome_logs) - String workflow_version = NfcoreTemplate.version(workflow) - String.format( - """\n - ${dashedLine(monochrome_logs)}{% if branded %} - ${colors.green},--.${colors.black}/${colors.green},-.${colors.reset} - ${colors.blue} ___ __ __ __ ___ ${colors.green}/,-._.--~\'${colors.reset} - ${colors.blue} |\\ | |__ __ / ` / \\ |__) |__ ${colors.yellow}} {${colors.reset} - ${colors.blue} | \\| | \\__, \\__/ | \\ |___ ${colors.green}\\`-._,-`-,${colors.reset} - ${colors.green}`._,._,\'${colors.reset}{% endif %} - ${colors.purple} ${workflow.manifest.name} ${workflow_version}${colors.reset} - ${dashedLine(monochrome_logs)} - """.stripIndent() - ) - } -} diff --git a/nf_core/pipeline-template/lib/Utils.groovy b/nf_core/pipeline-template/lib/Utils.groovy deleted file mode 100644 index 8d030f4e84..0000000000 --- a/nf_core/pipeline-template/lib/Utils.groovy +++ /dev/null @@ -1,47 +0,0 @@ -// -// This file holds several Groovy functions that could be useful for any Nextflow pipeline -// - -import org.yaml.snakeyaml.Yaml - -class Utils { - - // - // When running with -profile conda, warn if channels have not been set-up appropriately - // - public static void checkCondaChannels(log) { - Yaml parser = new Yaml() - def channels = [] - try { - def config = parser.load("conda config --show channels".execute().text) - channels = config.channels - } catch(NullPointerException | IOException e) { - log.warn "Could not verify conda channel configuration." - return - } - - // Check that all channels are present - // This channel list is ordered by required channel priority. - def required_channels_in_order = ['conda-forge', 'bioconda', 'defaults'] - def channels_missing = ((required_channels_in_order as Set) - (channels as Set)) as Boolean - - // Check that they are in the right order - def channel_priority_violation = false - def n = required_channels_in_order.size() - for (int i = 0; i < n - 1; i++) { - channel_priority_violation |= !(channels.indexOf(required_channels_in_order[i]) < channels.indexOf(required_channels_in_order[i+1])) - } - - if (channels_missing | channel_priority_violation) { - log.warn "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n" + - " There is a problem with your Conda configuration!\n\n" + - " You will need to set-up the conda-forge and bioconda channels correctly.\n" + - " Please refer to https://bioconda.github.io/\n" + - " The observed channel order is \n" + - " ${channels}\n" + - " but the following channel order is required:\n" + - " ${required_channels_in_order}\n" + - "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~" - } - } -} diff --git a/nf_core/pipeline-template/lib/WorkflowMain.groovy b/nf_core/pipeline-template/lib/WorkflowMain.groovy deleted file mode 100755 index a254b2b22a..0000000000 --- a/nf_core/pipeline-template/lib/WorkflowMain.groovy +++ /dev/null @@ -1,80 +0,0 @@ -// -// This file holds several functions specific to the main.nf workflow in the {{ name }} pipeline -// - -import nextflow.Nextflow - -class WorkflowMain { - - // - // Citation string for pipeline - // - public static String citation(workflow) { - return "If you use ${workflow.manifest.name} for your analysis please cite:\n\n" + - // TODO nf-core: Add Zenodo DOI for pipeline after first release - //"* The pipeline\n" + - //" https://doi.org/10.5281/zenodo.XXXXXXX\n\n" + - "* The nf-core framework\n" + - " https://doi.org/10.1038/s41587-020-0439-x\n\n" + - "* Software dependencies\n" + - " https://github.com/${workflow.manifest.name}/blob/master/CITATIONS.md" - } - - - // - // Validate parameters and print summary to screen - // - public static void initialise(workflow, params, log, args) { - - // Print workflow version and exit on --version - if (params.version) { - String workflow_version = NfcoreTemplate.version(workflow) - log.info "${workflow.manifest.name} ${workflow_version}" - System.exit(0) - } - - // Check that a -profile or Nextflow config has been provided to run the pipeline - NfcoreTemplate.checkConfigProvided(workflow, log) - // Check that the profile doesn't contain spaces and doesn't end with a trailing comma - checkProfile(workflow.profile, args, log) - - // Check that conda channels are set-up correctly - if (workflow.profile.tokenize(',').intersect(['conda', 'mamba']).size() >= 1) { - Utils.checkCondaChannels(log) - } - - // Check AWS batch settings - NfcoreTemplate.awsBatch(workflow, params) - - // Check input has been provided - if (!params.input) { - Nextflow.error("Please provide an input samplesheet to the pipeline e.g. '--input samplesheet.csv'") - } - } - - {%- if igenomes %} - // - // Get attribute from genome config file e.g. fasta - // - public static Object getGenomeAttribute(params, attribute) { - if (params.genomes && params.genome && params.genomes.containsKey(params.genome)) { - if (params.genomes[ params.genome ].containsKey(attribute)) { - return params.genomes[ params.genome ][ attribute ] - } - } - return null - } - {%- endif %} - - // - // Exit pipeline if --profile contains spaces - // - private static void checkProfile(profile, args, log) { - if (profile.endsWith(',')) { - Nextflow.error "Profile cannot end with a trailing comma. Please remove the comma from the end of the profile string.\nHint: A common mistake is to provide multiple values to `-profile` separated by spaces. Please use commas to separate profiles instead,e.g., `-profile docker,test`." - } - if (args[0]) { - log.warn "nf-core pipelines do not accept positional arguments. The positional argument `${args[0]}` has been detected.\n Hint: A common mistake is to provide multiple values to `-profile` separated by spaces. Please use commas to separate profiles instead,e.g., `-profile docker,test`." - } - } -} diff --git a/nf_core/pipeline-template/lib/WorkflowPipeline.groovy b/nf_core/pipeline-template/lib/WorkflowPipeline.groovy deleted file mode 100755 index f9a7859ef5..0000000000 --- a/nf_core/pipeline-template/lib/WorkflowPipeline.groovy +++ /dev/null @@ -1,123 +0,0 @@ -// -// This file holds several functions specific to the workflow/{{ short_name }}.nf in the {{ name }} pipeline -// - -import nextflow.Nextflow -import groovy.text.SimpleTemplateEngine - -class Workflow{{ short_name[0]|upper }}{{ short_name[1:] }} { - - // - // Check and validate parameters - // - public static void initialise(params, log) { -{% if igenomes %} - genomeExistsError(params, log) -{% endif %} - - if (!params.fasta) { - Nextflow.error "Genome fasta file not specified with e.g. '--fasta genome.fa' or via a detectable config file." - } - } - - // - // Get workflow summary for MultiQC - // - public static String paramsSummaryMultiqc(workflow, summary) { - String summary_section = '' - for (group in summary.keySet()) { - def group_params = summary.get(group) // This gets the parameters of that particular group - if (group_params) { - summary_section += " $group\n" - summary_section += " \n" - for (param in group_params.keySet()) { - summary_section += " $param${group_params.get(param) ?: 'N/A'}\n" - } - summary_section += " \n" - } - } - - String yaml_file_text = "id: '${workflow.manifest.name.replace('/','-')}-summary'\n" - yaml_file_text += "description: ' - this information is collected when the pipeline is started.'\n" - yaml_file_text += "section_name: '${workflow.manifest.name} Workflow Summary'\n" - yaml_file_text += "section_href: 'https://github.com/${workflow.manifest.name}'\n" - yaml_file_text += "plot_type: 'html'\n" - yaml_file_text += "data: |\n" - yaml_file_text += "${summary_section}" - return yaml_file_text - } - - // - // Generate methods description for MultiQC - // - - public static String toolCitationText(params) { - - // TODO nf-core: Optionally add in-text citation tools to this list. - // Can use ternary operators to dynamically construct based conditions, e.g. params["run_xyz"] ? "Tool (Foo et al. 2023)" : "", - // Uncomment function in methodsDescriptionText to render in MultiQC report - def citation_text = [ - "Tools used in the workflow included:", - "FastQC (Andrews 2010),", - "MultiQC (Ewels et al. 2016)", - "." - ].join(' ').trim() - - return citation_text - } - - public static String toolBibliographyText(params) { - - // TODO Optionally add bibliographic entries to this list. - // Can use ternary operators to dynamically construct based conditions, e.g. params["run_xyz"] ? "Author (2023) Pub name, Journal, DOI" : "", - // Uncomment function in methodsDescriptionText to render in MultiQC report - def reference_text = [ - "Andrews S, (2010) FastQC, URL: https://www.bioinformatics.babraham.ac.uk/projects/fastqc/).", - "Ewels, P., Magnusson, M., Lundin, S., & Käller, M. (2016). MultiQC: summarize analysis results for multiple tools and samples in a single report. Bioinformatics , 32(19), 3047–3048. doi: /10.1093/bioinformatics/btw354" - ].join(' ').trim() - - return reference_text - } - - public static String methodsDescriptionText(run_workflow, mqc_methods_yaml, params) { - // Convert to a named map so can be used as with familar NXF ${workflow} variable syntax in the MultiQC YML file - def meta = [:] - meta.workflow = run_workflow.toMap() - meta["manifest_map"] = run_workflow.manifest.toMap() - - // Pipeline DOI - meta["doi_text"] = meta.manifest_map.doi ? "(doi: ${meta.manifest_map.doi})" : "" - meta["nodoi_text"] = meta.manifest_map.doi ? "": "If available, make sure to update the text to include the Zenodo DOI of version of the pipeline used. " - - // Tool references - meta["tool_citations"] = "" - meta["tool_bibliography"] = "" - - // TODO Only uncomment below if logic in toolCitationText/toolBibliographyText has been filled! - //meta["tool_citations"] = toolCitationText(params).replaceAll(", \\.", ".").replaceAll("\\. \\.", ".").replaceAll(", \\.", ".") - //meta["tool_bibliography"] = toolBibliographyText(params) - - - def methods_text = mqc_methods_yaml.text - - def engine = new SimpleTemplateEngine() - def description_html = engine.createTemplate(methods_text).make(meta) - - return description_html - } - {%- if igenomes %} - - // - // Exit pipeline if incorrect --genome key provided - // - private static void genomeExistsError(params, log) { - if (params.genomes && params.genome && !params.genomes.containsKey(params.genome)) { - def error_string = "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n" + - " Genome '${params.genome}' not found in any config files provided to the pipeline.\n" + - " Currently, the available genome keys are:\n" + - " ${params.genomes.keySet().join(", ")}\n" + - "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~" - Nextflow.error(error_string) - } - } -{% endif -%}} diff --git a/nf_core/pipeline-template/main.nf b/nf_core/pipeline-template/main.nf index 78da158856..2590f7467b 100644 --- a/nf_core/pipeline-template/main.nf +++ b/nf_core/pipeline-template/main.nf @@ -12,7 +12,19 @@ */ nextflow.enable.dsl = 2 + +/* +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + IMPORT FUNCTIONS / MODULES / SUBWORKFLOWS / WORKFLOWS +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +*/ + +include { {{ short_name|upper }} } from './workflows/{{ short_name }}' +include { PIPELINE_INITIALISATION } from './subworkflows/local/utils_nfcore_{{ short_name }}_pipeline' +include { PIPELINE_COMPLETION } from './subworkflows/local/utils_nfcore_{{ short_name }}_pipeline' {% if igenomes %} +include { getGenomeAttribute } from './subworkflows/local/utils_nfcore_{{ short_name }}_pipeline' + /* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ GENOME PARAMETER VALUES @@ -22,59 +34,77 @@ nextflow.enable.dsl = 2 // TODO nf-core: Remove this line if you don't need a FASTA file // This is an example of how to use getGenomeAttribute() to fetch parameters // from igenomes.config using `--genome` -params.fasta = WorkflowMain.getGenomeAttribute(params, 'fasta') +params.fasta = getGenomeAttribute('fasta') {% endif %} /* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - VALIDATE & PRINT PARAMETER SUMMARY + NAMED WORKFLOWS FOR PIPELINE ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */ -include { validateParameters; paramsHelp } from 'plugin/nf-validation' +// +// WORKFLOW: Run main analysis pipeline depending on type of input +// +workflow {{ prefix_nodash|upper }}_{{ short_name|upper }} { -// Print help message if needed -if (params.help) { - def logo = NfcoreTemplate.logo(workflow, params.monochrome_logs) - def citation = '\n' + WorkflowMain.citation(workflow) + '\n' - def String command = "nextflow run ${workflow.manifest.name} --input samplesheet.csv --genome GRCh37 -profile docker" - log.info logo + paramsHelp(command) + citation + NfcoreTemplate.dashedLine(params.monochrome_logs) - System.exit(0) -} + take: + samplesheet // channel: samplesheet read in from --input -// Validate input parameters -if (params.validate_params) { - validateParameters() -} + main: -WorkflowMain.initialise(workflow, params, log, args) + // + // WORKFLOW: Run pipeline + // + {{ short_name|upper }} ( + samplesheet + ) -/* -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - NAMED WORKFLOW FOR PIPELINE -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -*/ + emit: + multiqc_report = {{ short_name|upper }}.out.multiqc_report // channel: /path/to/multiqc_report.html -include { {{ short_name|upper }} } from './workflows/{{ short_name }}' - -// -// WORKFLOW: Run main {{ name }} analysis pipeline -// -workflow {{ prefix_nodash|upper }}_{{ short_name|upper }} { - {{ short_name|upper }} () } - /* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - RUN ALL WORKFLOWS + RUN MAIN WORKFLOW ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */ -// -// WORKFLOW: Execute a single named workflow for the pipeline -// See: https://github.com/nf-core/rnaseq/issues/619 -// workflow { - {{ prefix_nodash|upper }}_{{ short_name|upper }} () + + main: + + // + // SUBWORKFLOW: Run initialisation tasks + // + PIPELINE_INITIALISATION ( + params.version, + params.help, + params.validate_params, + params.monochrome_logs, + args, + params.outdir, + params.input + ) + + // + // WORKFLOW: Run main workflow + // + {{ prefix_nodash|upper }}_{{ short_name|upper }} ( + PIPELINE_INITIALISATION.out.samplesheet + ) + + // + // SUBWORKFLOW: Run completion tasks + // + PIPELINE_COMPLETION ( + params.email, + params.email_on_fail, + params.plaintext_email, + params.outdir, + params.monochrome_logs, + params.hook_url, + {{ prefix_nodash|upper }}_{{ short_name|upper }}.out.multiqc_report + ) } /* diff --git a/nf_core/pipeline-template/modules.json b/nf_core/pipeline-template/modules.json index 8660da2d42..97cbbe6df5 100644 --- a/nf_core/pipeline-template/modules.json +++ b/nf_core/pipeline-template/modules.json @@ -5,22 +5,36 @@ "https://github.com/nf-core/modules.git": { "modules": { "nf-core": { - "custom/dumpsoftwareversions": { - "branch": "master", - "git_sha": "8ec825f465b9c17f9d83000022995b4f7de6fe93", - "installed_by": ["modules"] - }, "fastqc": { "branch": "master", - "git_sha": "c9488585ce7bd35ccd2a30faa2371454c8112fb9", + "git_sha": "f4ae1d942bd50c5c0b9bd2de1393ce38315ba57c", "installed_by": ["modules"] }, "multiqc": { "branch": "master", - "git_sha": "8ec825f465b9c17f9d83000022995b4f7de6fe93", + "git_sha": "ccacf6f5de6df3bc6d73b665c1fd2933d8bbc290", "installed_by": ["modules"] } } + }, + "subworkflows": { + "nf-core": { + "utils_nextflow_pipeline": { + "branch": "master", + "git_sha": "cd08c91373cd00a73255081340e4914485846ba1", + "installed_by": ["subworkflows"] + }, + "utils_nfcore_pipeline": { + "branch": "master", + "git_sha": "262b17ed2aad591039f914951659177e6c39a8d8", + "installed_by": ["subworkflows"] + }, + "utils_nfvalidation_plugin": { + "branch": "master", + "git_sha": "cd08c91373cd00a73255081340e4914485846ba1", + "installed_by": ["subworkflows"] + } + } } } } diff --git a/nf_core/pipeline-template/modules/local/samplesheet_check.nf b/nf_core/pipeline-template/modules/local/samplesheet_check.nf deleted file mode 100644 index 77be6dfff4..0000000000 --- a/nf_core/pipeline-template/modules/local/samplesheet_check.nf +++ /dev/null @@ -1,31 +0,0 @@ -process SAMPLESHEET_CHECK { - tag "$samplesheet" - label 'process_single' - - conda "conda-forge::python=3.8.3" - container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/python:3.8.3' : - 'biocontainers/python:3.8.3' }" - - input: - path samplesheet - - output: - path '*.csv' , emit: csv - path "versions.yml", emit: versions - - when: - task.ext.when == null || task.ext.when - - script: // This script is bundled with the pipeline, in {{ name }}/bin/ - """ - check_samplesheet.py \\ - $samplesheet \\ - samplesheet.valid.csv - - cat <<-END_VERSIONS > versions.yml - "${task.process}": - python: \$(python --version | sed 's/Python //g') - END_VERSIONS - """ -} diff --git a/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/environment.yml b/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/environment.yml deleted file mode 100644 index 9b3272bc11..0000000000 --- a/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/environment.yml +++ /dev/null @@ -1,7 +0,0 @@ -name: custom_dumpsoftwareversions -channels: - - conda-forge - - bioconda - - defaults -dependencies: - - bioconda::multiqc=1.19 diff --git a/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/main.nf b/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/main.nf deleted file mode 100644 index f2187611cc..0000000000 --- a/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/main.nf +++ /dev/null @@ -1,24 +0,0 @@ -process CUSTOM_DUMPSOFTWAREVERSIONS { - label 'process_single' - - // Requires `pyyaml` which does not have a dedicated container but is in the MultiQC container - conda "${moduleDir}/environment.yml" - container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/multiqc:1.19--pyhdfd78af_0' : - 'biocontainers/multiqc:1.19--pyhdfd78af_0' }" - - input: - path versions - - output: - path "software_versions.yml" , emit: yml - path "software_versions_mqc.yml", emit: mqc_yml - path "versions.yml" , emit: versions - - when: - task.ext.when == null || task.ext.when - - script: - def args = task.ext.args ?: '' - template 'dumpsoftwareversions.py' -} diff --git a/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/meta.yml b/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/meta.yml deleted file mode 100644 index 5f15a5fde0..0000000000 --- a/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/meta.yml +++ /dev/null @@ -1,37 +0,0 @@ -# yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/meta-schema.json -name: custom_dumpsoftwareversions -description: Custom module used to dump software versions within the nf-core pipeline template -keywords: - - custom - - dump - - version -tools: - - custom: - description: Custom module used to dump software versions within the nf-core pipeline template - homepage: https://github.com/nf-core/tools - documentation: https://github.com/nf-core/tools - licence: ["MIT"] -input: - - versions: - type: file - description: YML file containing software versions - pattern: "*.yml" -output: - - yml: - type: file - description: Standard YML file containing software versions - pattern: "software_versions.yml" - - mqc_yml: - type: file - description: MultiQC custom content YML file containing software versions - pattern: "software_versions_mqc.yml" - - versions: - type: file - description: File containing software versions - pattern: "versions.yml" -authors: - - "@drpatelh" - - "@grst" -maintainers: - - "@drpatelh" - - "@grst" diff --git a/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/templates/dumpsoftwareversions.py b/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/templates/dumpsoftwareversions.py deleted file mode 100755 index e55b8d43a9..0000000000 --- a/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/templates/dumpsoftwareversions.py +++ /dev/null @@ -1,102 +0,0 @@ -#!/usr/bin/env python - - -"""Provide functions to merge multiple versions.yml files.""" - - -import platform -from textwrap import dedent - -import yaml - - -def _make_versions_html(versions): - """Generate a tabular HTML output of all versions for MultiQC.""" - html = [ - dedent( - """\\ - - - - - Process Name - Software - Version - - - """ - ) - ] - for process, tmp_versions in sorted(versions.items()): - html.append("") - for i, (tool, version) in enumerate(sorted(tmp_versions.items())): - html.append( - dedent( - f"""\\ - - {process if (i == 0) else ''} - {tool} - {version} - - """ - ) - ) - html.append("") - html.append("") - return "\\n".join(html) - - -def main(): - """Load all version files and generate merged output.""" - versions_this_module = {} - versions_this_module["${task.process}"] = { - "python": platform.python_version(), - "yaml": yaml.__version__, - } - - with open("$versions") as f: - versions_by_process = yaml.load(f, Loader=yaml.BaseLoader) | versions_this_module - - # aggregate versions by the module name (derived from fully-qualified process name) - versions_by_module = {} - for process, process_versions in versions_by_process.items(): - module = process.split(":")[-1] - try: - if versions_by_module[module] != process_versions: - raise AssertionError( - "We assume that software versions are the same between all modules. " - "If you see this error-message it means you discovered an edge-case " - "and should open an issue in nf-core/tools. " - ) - except KeyError: - versions_by_module[module] = process_versions - - versions_by_module["Workflow"] = { - "Nextflow": "$workflow.nextflow.version", - "$workflow.manifest.name": "$workflow.manifest.version", - } - - versions_mqc = { - "id": "software_versions", - "section_name": "${workflow.manifest.name} Software Versions", - "section_href": "https://github.com/${workflow.manifest.name}", - "plot_type": "html", - "description": "are collected at run time from the software output.", - "data": _make_versions_html(versions_by_module), - } - - with open("software_versions.yml", "w") as f: - yaml.dump(versions_by_module, f, default_flow_style=False) - with open("software_versions_mqc.yml", "w") as f: - yaml.dump(versions_mqc, f, default_flow_style=False) - - with open("versions.yml", "w") as f: - yaml.dump(versions_this_module, f, default_flow_style=False) - - -if __name__ == "__main__": - main() diff --git a/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/tests/main.nf.test b/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/tests/main.nf.test deleted file mode 100644 index b1e1630bb3..0000000000 --- a/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/tests/main.nf.test +++ /dev/null @@ -1,43 +0,0 @@ -nextflow_process { - - name "Test Process CUSTOM_DUMPSOFTWAREVERSIONS" - script "../main.nf" - process "CUSTOM_DUMPSOFTWAREVERSIONS" - tag "modules" - tag "modules_nfcore" - tag "custom" - tag "dumpsoftwareversions" - tag "custom/dumpsoftwareversions" - - test("Should run without failures") { - when { - process { - """ - def tool1_version = ''' - TOOL1: - tool1: 0.11.9 - '''.stripIndent() - - def tool2_version = ''' - TOOL2: - tool2: 1.9 - '''.stripIndent() - - input[0] = Channel.of(tool1_version, tool2_version).collectFile() - """ - } - } - - then { - assertAll( - { assert process.success }, - { assert snapshot( - process.out.versions, - file(process.out.mqc_yml[0]).readLines()[0..10], - file(process.out.yml[0]).readLines()[0..7] - ).match() - } - ) - } - } -} diff --git a/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/tests/main.nf.test.snap b/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/tests/main.nf.test.snap deleted file mode 100644 index 5f59a936d7..0000000000 --- a/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/tests/main.nf.test.snap +++ /dev/null @@ -1,33 +0,0 @@ -{ - "Should run without failures": { - "content": [ - [ - "versions.yml:md5,76d454d92244589d32455833f7c1ba6d" - ], - [ - "data: \"\\n\\n \\n \\n Process Name \\n \\", - " \\ Software \\n Version \\n \\n \\n\\", - " \\n\\n\\n CUSTOM_DUMPSOFTWAREVERSIONS\\n python\\n\\", - " \\ 3.11.7\\n\\n\\n\\n \\n \\", - " \\ yaml\\n 5.4.1\\n\\n\\n\\n\\", - " \\n\\n TOOL1\\n tool1\\n\\", - " \\ 0.11.9\\n\\n\\n\\n\\n\\n TOOL2\\n\\", - " \\ tool2\\n 1.9\\n\\n\\n\\n\\", - " \\n\\n Workflow\\n Nextflow\\n\\" - ], - [ - "CUSTOM_DUMPSOFTWAREVERSIONS:", - " python: 3.11.7", - " yaml: 5.4.1", - "TOOL1:", - " tool1: 0.11.9", - "TOOL2:", - " tool2: '1.9'", - "Workflow:" - ] - ], - "timestamp": "2024-01-09T23:01:18.710682" - } -} \ No newline at end of file diff --git a/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/tests/tags.yml b/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/tests/tags.yml deleted file mode 100644 index 405aa24ae3..0000000000 --- a/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/tests/tags.yml +++ /dev/null @@ -1,2 +0,0 @@ -custom/dumpsoftwareversions: - - modules/nf-core/custom/dumpsoftwareversions/** diff --git a/nf_core/pipeline-template/modules/nf-core/fastqc/tests/main.nf.test b/nf_core/pipeline-template/modules/nf-core/fastqc/tests/main.nf.test index 1f21c66469..70edae4d99 100644 --- a/nf_core/pipeline-template/modules/nf-core/fastqc/tests/main.nf.test +++ b/nf_core/pipeline-template/modules/nf-core/fastqc/tests/main.nf.test @@ -33,7 +33,7 @@ nextflow_process { { assert process.out.zip[0][1] ==~ ".*/test_fastqc.zip" }, { assert path(process.out.html[0][1]).text.contains("File typeConventional base calls") }, - { assert snapshot(process.out.versions).match("versions") } + { assert snapshot(process.out.versions).match("fastqc_versions_single") } ) } } @@ -63,7 +63,7 @@ nextflow_process { { assert path(process.out.html[0][1][0]).text.contains("File typeConventional base calls") }, { assert path(process.out.html[0][1][1]).text.contains("File typeConventional base calls") }, - { assert snapshot(process.out.versions).match("versions") } + { assert snapshot(process.out.versions).match("fastqc_versions_paired") } ) } } @@ -89,7 +89,7 @@ nextflow_process { { assert process.out.zip[0][1] ==~ ".*/test_fastqc.zip" }, { assert path(process.out.html[0][1]).text.contains("File typeConventional base calls") }, - { assert snapshot(process.out.versions).match("versions") } + { assert snapshot(process.out.versions).match("fastqc_versions_interleaved") } ) } } @@ -115,7 +115,7 @@ nextflow_process { { assert process.out.zip[0][1] ==~ ".*/test_fastqc.zip" }, { assert path(process.out.html[0][1]).text.contains("File typeConventional base calls") }, - { assert snapshot(process.out.versions).match("versions") } + { assert snapshot(process.out.versions).match("fastqc_versions_bam") } ) } } @@ -153,7 +153,7 @@ nextflow_process { { assert path(process.out.html[0][1][2]).text.contains("File typeConventional base calls") }, { assert path(process.out.html[0][1][3]).text.contains("File typeConventional base calls") }, - { assert snapshot(process.out.versions).match("versions") } + { assert snapshot(process.out.versions).match("fastqc_versions_multiple") } ) } } @@ -179,7 +179,7 @@ nextflow_process { { assert process.out.zip[0][1] ==~ ".*/mysample_fastqc.zip" }, { assert path(process.out.html[0][1]).text.contains("File typeConventional base calls") }, - { assert snapshot(process.out.versions).match("versions") } + { assert snapshot(process.out.versions).match("fastqc_versions_custom_prefix") } ) } } @@ -204,7 +204,7 @@ nextflow_process { { assert process.success }, { assert snapshot(process.out.html.collect { file(it[1]).getName() } + process.out.zip.collect { file(it[1]).getName() } + - process.out.versions ).match() } + process.out.versions ).match("fastqc_stub") } ) } } diff --git a/nf_core/pipeline-template/modules/nf-core/fastqc/tests/main.nf.test.snap b/nf_core/pipeline-template/modules/nf-core/fastqc/tests/main.nf.test.snap index 5d624bb82e..86f7c31154 100644 --- a/nf_core/pipeline-template/modules/nf-core/fastqc/tests/main.nf.test.snap +++ b/nf_core/pipeline-template/modules/nf-core/fastqc/tests/main.nf.test.snap @@ -1,5 +1,17 @@ { - "sarscov2 single-end [fastq] - stub": { + "fastqc_versions_interleaved": { + "content": [ + [ + "versions.yml:md5,e1cc25ca8af856014824abd842e93978" + ] + ], + "meta": { + "nf-test": "0.8.4", + "nextflow": "23.10.1" + }, + "timestamp": "2024-01-31T17:40:07.293713" + }, + "fastqc_stub": { "content": [ [ "test.html", @@ -7,14 +19,70 @@ "versions.yml:md5,e1cc25ca8af856014824abd842e93978" ] ], - "timestamp": "2024-01-17T18:40:57.254299" + "meta": { + "nf-test": "0.8.4", + "nextflow": "23.10.1" + }, + "timestamp": "2024-01-31T17:31:01.425198" + }, + "fastqc_versions_multiple": { + "content": [ + [ + "versions.yml:md5,e1cc25ca8af856014824abd842e93978" + ] + ], + "meta": { + "nf-test": "0.8.4", + "nextflow": "23.10.1" + }, + "timestamp": "2024-01-31T17:40:55.797907" + }, + "fastqc_versions_bam": { + "content": [ + [ + "versions.yml:md5,e1cc25ca8af856014824abd842e93978" + ] + ], + "meta": { + "nf-test": "0.8.4", + "nextflow": "23.10.1" + }, + "timestamp": "2024-01-31T17:40:26.795862" + }, + "fastqc_versions_single": { + "content": [ + [ + "versions.yml:md5,e1cc25ca8af856014824abd842e93978" + ] + ], + "meta": { + "nf-test": "0.8.4", + "nextflow": "23.10.1" + }, + "timestamp": "2024-01-31T17:39:27.043675" + }, + "fastqc_versions_paired": { + "content": [ + [ + "versions.yml:md5,e1cc25ca8af856014824abd842e93978" + ] + ], + "meta": { + "nf-test": "0.8.4", + "nextflow": "23.10.1" + }, + "timestamp": "2024-01-31T17:39:47.584191" }, - "versions": { + "fastqc_versions_custom_prefix": { "content": [ [ "versions.yml:md5,e1cc25ca8af856014824abd842e93978" ] ], - "timestamp": "2024-01-17T18:36:50.033627" + "meta": { + "nf-test": "0.8.4", + "nextflow": "23.10.1" + }, + "timestamp": "2024-01-31T17:41:14.576531" } } \ No newline at end of file diff --git a/nf_core/pipeline-template/modules/nf-core/multiqc/environment.yml b/nf_core/pipeline-template/modules/nf-core/multiqc/environment.yml index 7625b75206..2212096af4 100644 --- a/nf_core/pipeline-template/modules/nf-core/multiqc/environment.yml +++ b/nf_core/pipeline-template/modules/nf-core/multiqc/environment.yml @@ -4,4 +4,4 @@ channels: - bioconda - defaults dependencies: - - bioconda::multiqc=1.19 + - bioconda::multiqc=1.20 diff --git a/nf_core/pipeline-template/modules/nf-core/multiqc/main.nf b/nf_core/pipeline-template/modules/nf-core/multiqc/main.nf index 1b9f7c431d..354f4430f9 100644 --- a/nf_core/pipeline-template/modules/nf-core/multiqc/main.nf +++ b/nf_core/pipeline-template/modules/nf-core/multiqc/main.nf @@ -3,8 +3,8 @@ process MULTIQC { conda "${moduleDir}/environment.yml" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/multiqc:1.19--pyhdfd78af_0' : - 'biocontainers/multiqc:1.19--pyhdfd78af_0' }" + 'https://depot.galaxyproject.org/singularity/multiqc:1.20--pyhdfd78af_0' : + 'biocontainers/multiqc:1.20--pyhdfd78af_0' }" input: path multiqc_files, stageAs: "?/*" diff --git a/nf_core/pipeline-template/modules/nf-core/multiqc/tests/main.nf.test b/nf_core/pipeline-template/modules/nf-core/multiqc/tests/main.nf.test index d0438eda6b..f1c4242ef2 100644 --- a/nf_core/pipeline-template/modules/nf-core/multiqc/tests/main.nf.test +++ b/nf_core/pipeline-template/modules/nf-core/multiqc/tests/main.nf.test @@ -3,6 +3,7 @@ nextflow_process { name "Test Process MULTIQC" script "../main.nf" process "MULTIQC" + tag "modules" tag "modules_nfcore" tag "multiqc" @@ -12,7 +13,7 @@ nextflow_process { when { process { """ - input[0] = Channel.of([file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz_fastqc_zip'], checkIfExists: true)]) + input[0] = Channel.of(file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastqc/test_fastqc.zip', checkIfExists: true)) input[1] = [] input[2] = [] input[3] = [] @@ -25,7 +26,7 @@ nextflow_process { { assert process.success }, { assert process.out.report[0] ==~ ".*/multiqc_report.html" }, { assert process.out.data[0] ==~ ".*/multiqc_data" }, - { assert snapshot(process.out.versions).match("versions") } + { assert snapshot(process.out.versions).match("multiqc_versions_single") } ) } @@ -36,7 +37,7 @@ nextflow_process { when { process { """ - input[0] = Channel.of([file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz_fastqc_zip'], checkIfExists: true)]) + input[0] = Channel.of(file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastqc/test_fastqc.zip', checkIfExists: true)) input[1] = Channel.of(file("https://github.com/nf-core/tools/raw/dev/nf_core/pipeline-template/assets/multiqc_config.yml", checkIfExists: true)) input[2] = [] input[3] = [] @@ -49,7 +50,7 @@ nextflow_process { { assert process.success }, { assert process.out.report[0] ==~ ".*/multiqc_report.html" }, { assert process.out.data[0] ==~ ".*/multiqc_data" }, - { assert snapshot(process.out.versions).match("versions") } + { assert snapshot(process.out.versions).match("multiqc_versions_config") } ) } } @@ -61,7 +62,7 @@ nextflow_process { when { process { """ - input[0] = Channel.of([file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz_fastqc_zip'], checkIfExists: true)]) + input[0] = Channel.of(file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastqc/test_fastqc.zip', checkIfExists: true)) input[1] = [] input[2] = [] input[3] = [] @@ -75,7 +76,7 @@ nextflow_process { { assert snapshot(process.out.report.collect { file(it).getName() } + process.out.data.collect { file(it).getName() } + process.out.plots.collect { file(it).getName() } + - process.out.versions ).match() } + process.out.versions ).match("multiqc_stub") } ) } diff --git a/nf_core/pipeline-template/modules/nf-core/multiqc/tests/main.nf.test.snap b/nf_core/pipeline-template/modules/nf-core/multiqc/tests/main.nf.test.snap index d37e73040d..c204b4881e 100644 --- a/nf_core/pipeline-template/modules/nf-core/multiqc/tests/main.nf.test.snap +++ b/nf_core/pipeline-template/modules/nf-core/multiqc/tests/main.nf.test.snap @@ -1,21 +1,41 @@ { - "versions": { + "multiqc_versions_single": { "content": [ [ - "versions.yml:md5,14e9a2661241abd828f4f06a7b5c222d" + "versions.yml:md5,d320d4c37e349c5588e07e7a31cd4186" ] ], - "timestamp": "2024-01-09T23:02:49.911994" + "meta": { + "nf-test": "0.8.4", + "nextflow": "23.10.1" + }, + "timestamp": "2024-02-14T09:28:51.744211298" }, - "sarscov2 single-end [fastqc] - stub": { + "multiqc_stub": { "content": [ [ "multiqc_report.html", "multiqc_data", "multiqc_plots", - "versions.yml:md5,14e9a2661241abd828f4f06a7b5c222d" + "versions.yml:md5,d320d4c37e349c5588e07e7a31cd4186" ] ], - "timestamp": "2024-01-09T23:03:14.524346" + "meta": { + "nf-test": "0.8.4", + "nextflow": "23.10.1" + }, + "timestamp": "2024-02-14T09:29:28.847433492" + }, + "multiqc_versions_config": { + "content": [ + [ + "versions.yml:md5,d320d4c37e349c5588e07e7a31cd4186" + ] + ], + "meta": { + "nf-test": "0.8.4", + "nextflow": "23.10.1" + }, + "timestamp": "2024-02-14T09:29:13.223621555" } } \ No newline at end of file diff --git a/nf_core/pipeline-template/nextflow.config b/nf_core/pipeline-template/nextflow.config index 3153ff70d6..17e75f18a4 100644 --- a/nf_core/pipeline-template/nextflow.config +++ b/nf_core/pipeline-template/nextflow.config @@ -18,9 +18,8 @@ params { genome = null igenomes_base = 's3://ngi-igenomes/igenomes/' igenomes_ignore = false - {% else %} fasta = null - {%- endif %} + {%- endif -%} // MultiQC options multiqc_config = null @@ -49,7 +48,7 @@ params { custom_config_base = "https://raw.githubusercontent.com/nf-core/configs/${params.custom_config_version}" config_profile_contact = null config_profile_url = null - {% endif %} + {%- endif %} // Max resource options // Defaults only, expecting to be overwritten diff --git a/nf_core/pipeline-template/nextflow_schema.json b/nf_core/pipeline-template/nextflow_schema.json index 080797b4eb..77b1a7a070 100644 --- a/nf_core/pipeline-template/nextflow_schema.json +++ b/nf_core/pipeline-template/nextflow_schema.json @@ -16,6 +16,7 @@ "type": "string", "format": "file-path", "exists": true, + "schema": "assets/schema_input.json", "mimetype": "text/csv", "pattern": "^\\S+\\.csv$", "description": "Path to comma-separated file containing information about the samples in the experiment.", @@ -42,6 +43,7 @@ } } }, + {%- if igenomes %} "reference_genome_options": { "title": "Reference genome options", "type": "object", @@ -73,6 +75,7 @@ } } }, + {%- endif %} "institutional_config_options": { "title": "Institutional config options", "type": "object", @@ -272,9 +275,9 @@ { "$ref": "#/definitions/input_output_options" }, - { + {% if igenomes %}{ "$ref": "#/definitions/reference_genome_options" - }, + },{% endif %} { "$ref": "#/definitions/institutional_config_options" }, diff --git a/nf_core/pipeline-template/pyproject.toml b/nf_core/pipeline-template/pyproject.toml index 7d08e1c8ef..56110621e7 100644 --- a/nf_core/pipeline-template/pyproject.toml +++ b/nf_core/pipeline-template/pyproject.toml @@ -3,11 +3,13 @@ [tool.ruff] line-length = 120 target-version = "py38" -select = ["I", "E1", "E4", "E7", "E9", "F", "UP", "N"] cache-dir = "~/.cache/ruff" -[tool.ruff.isort] +[tool.ruff.lint] +select = ["I", "E1", "E4", "E7", "E9", "F", "UP", "N"] + +[tool.ruff.lint.isort] known-first-party = ["nf_core"] -[tool.ruff.per-file-ignores] +[tool.ruff.lint.per-file-ignores] "__init__.py" = ["E402", "F401"] diff --git a/nf_core/pipeline-template/subworkflows/local/input_check.nf b/nf_core/pipeline-template/subworkflows/local/input_check.nf deleted file mode 100644 index 0aecf87fb7..0000000000 --- a/nf_core/pipeline-template/subworkflows/local/input_check.nf +++ /dev/null @@ -1,44 +0,0 @@ -// -// Check input samplesheet and get read channels -// - -include { SAMPLESHEET_CHECK } from '../../modules/local/samplesheet_check' - -workflow INPUT_CHECK { - take: - samplesheet // file: /path/to/samplesheet.csv - - main: - SAMPLESHEET_CHECK ( samplesheet ) - .csv - .splitCsv ( header:true, sep:',' ) - .map { create_fastq_channel(it) } - .set { reads } - - emit: - reads // channel: [ val(meta), [ reads ] ] - versions = SAMPLESHEET_CHECK.out.versions // channel: [ versions.yml ] -} - -// Function to get list of [ meta, [ fastq_1, fastq_2 ] ] -def create_fastq_channel(LinkedHashMap row) { - // create meta map - def meta = [:] - meta.id = row.sample - meta.single_end = row.single_end.toBoolean() - - // add path(s) of the fastq file(s) to the meta map - def fastq_meta = [] - if (!file(row.fastq_1).exists()) { - exit 1, "ERROR: Please check input samplesheet -> Read 1 FastQ file does not exist!\n${row.fastq_1}" - } - if (meta.single_end) { - fastq_meta = [ meta, [ file(row.fastq_1) ] ] - } else { - if (!file(row.fastq_2).exists()) { - exit 1, "ERROR: Please check input samplesheet -> Read 2 FastQ file does not exist!\n${row.fastq_2}" - } - fastq_meta = [ meta, [ file(row.fastq_1), file(row.fastq_2) ] ] - } - return fastq_meta -} diff --git a/nf_core/pipeline-template/subworkflows/local/utils_nfcore_pipeline_pipeline/main.nf b/nf_core/pipeline-template/subworkflows/local/utils_nfcore_pipeline_pipeline/main.nf new file mode 100644 index 0000000000..24d4c2d7e1 --- /dev/null +++ b/nf_core/pipeline-template/subworkflows/local/utils_nfcore_pipeline_pipeline/main.nf @@ -0,0 +1,260 @@ +// +// Subworkflow with functionality specific to the nf-core/pipeline pipeline +// + +/* +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + IMPORT FUNCTIONS / MODULES / SUBWORKFLOWS +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +*/ + +include { UTILS_NFVALIDATION_PLUGIN } from '../../nf-core/utils_nfvalidation_plugin' +include { paramsSummaryMap } from 'plugin/nf-validation' +include { fromSamplesheet } from 'plugin/nf-validation' +include { UTILS_NEXTFLOW_PIPELINE } from '../../nf-core/utils_nextflow_pipeline' +include { completionEmail } from '../../nf-core/utils_nfcore_pipeline' +include { completionSummary } from '../../nf-core/utils_nfcore_pipeline' +include { dashedLine } from '../../nf-core/utils_nfcore_pipeline' +include { nfCoreLogo } from '../../nf-core/utils_nfcore_pipeline' +include { imNotification } from '../../nf-core/utils_nfcore_pipeline' +include { UTILS_NFCORE_PIPELINE } from '../../nf-core/utils_nfcore_pipeline' +include { workflowCitation } from '../../nf-core/utils_nfcore_pipeline' + +/* +======================================================================================== + SUBWORKFLOW TO INITIALISE PIPELINE +======================================================================================== +*/ + +workflow PIPELINE_INITIALISATION { + + take: + version // boolean: Display version and exit + help // boolean: Display help text + validate_params // boolean: Boolean whether to validate parameters against the schema at runtime + monochrome_logs // boolean: Do not use coloured log outputs + nextflow_cli_args // array: List of positional nextflow CLI args + outdir // string: The output directory where the results will be saved + input // string: Path to input samplesheet + + main: + + ch_versions = Channel.empty() + + // + // Print version and exit if required and dump pipeline parameters to JSON file + // + UTILS_NEXTFLOW_PIPELINE ( + version, + true, + outdir, + workflow.profile.tokenize(',').intersect(['conda', 'mamba']).size() >= 1 + ) + + // + // Validate parameters and generate parameter summary to stdout + // + pre_help_text = nfCoreLogo(monochrome_logs) + post_help_text = '\n' + workflowCitation() + '\n' + dashedLine(monochrome_logs) + def String workflow_command = "nextflow run ${workflow.manifest.name} -profile --input samplesheet.csv --outdir " + UTILS_NFVALIDATION_PLUGIN ( + help, + workflow_command, + pre_help_text, + post_help_text, + validate_params, + "nextflow_schema.json" + ) + + // + // Check config provided to the pipeline + // + UTILS_NFCORE_PIPELINE ( + nextflow_cli_args + ) + + {%- if igenomes %} + // + // Custom validation for pipeline parameters + // + validateInputParameters() + {%- endif %} + + // + // Create channel from input file provided through params.input + // + Channel + .fromSamplesheet("input") + .map { + meta, fastq_1, fastq_2 -> + if (!fastq_2) { + return [ meta.id, meta + [ single_end:true ], [ fastq_1 ] ] + } else { + return [ meta.id, meta + [ single_end:false ], [ fastq_1, fastq_2 ] ] + } + } + .groupTuple() + .map { + validateInputSamplesheet(it) + } + .map { + meta, fastqs -> + return [ meta, fastqs.flatten() ] + } + .set { ch_samplesheet } + + emit: + samplesheet = ch_samplesheet + versions = ch_versions +} + +/* +======================================================================================== + SUBWORKFLOW FOR PIPELINE COMPLETION +======================================================================================== +*/ + +workflow PIPELINE_COMPLETION { + + take: + email // string: email address + email_on_fail // string: email address sent on pipeline failure + plaintext_email // boolean: Send plain-text email instead of HTML + outdir // path: Path to output directory where results will be published + monochrome_logs // boolean: Disable ANSI colour codes in log output + hook_url // string: hook URL for notifications + multiqc_report // string: Path to MultiQC report + + main: + + summary_params = paramsSummaryMap(workflow, parameters_schema: "nextflow_schema.json") + + // + // Completion email and summary + // + workflow.onComplete { + if (email || email_on_fail) { + completionEmail(summary_params, email, email_on_fail, plaintext_email, outdir, monochrome_logs, multiqc_report.toList()) + } + + completionSummary(monochrome_logs) + + if (hook_url) { + imNotification(summary_params, hook_url) + } + } +} + +/* +======================================================================================== + FUNCTIONS +======================================================================================== +*/ + +{%- if igenomes %} +// +// Check and validate pipeline parameters +// +def validateInputParameters() { + genomeExistsError() +} +{%- endif -%} + +// +// Validate channels from input samplesheet +// +def validateInputSamplesheet(input) { + def (metas, fastqs) = input[1..2] + + // Check that multiple runs of the same sample are of the same datatype i.e. single-end / paired-end + def endedness_ok = metas.collect{ it.single_end }.unique().size == 1 + if (!endedness_ok) { + error("Please check input samplesheet -> Multiple runs of a sample must be of the same datatype i.e. single-end or paired-end: ${metas[0].id}") + } + + return [ metas[0], fastqs ] +} + +{%- if igenomes %} +// +// Get attribute from genome config file e.g. fasta +// +def getGenomeAttribute(attribute) { + if (params.genomes && params.genome && params.genomes.containsKey(params.genome)) { + if (params.genomes[ params.genome ].containsKey(attribute)) { + return params.genomes[ params.genome ][ attribute ] + } + } + return null +} + +// +// Exit pipeline if incorrect --genome key provided +// +def genomeExistsError() { + if (params.genomes && params.genome && !params.genomes.containsKey(params.genome)) { + def error_string = "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n" + + " Genome '${params.genome}' not found in any config files provided to the pipeline.\n" + + " Currently, the available genome keys are:\n" + + " ${params.genomes.keySet().join(", ")}\n" + + "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~" + error(error_string) + } +} +{%- endif -%} + +// +// Generate methods description for MultiQC +// +def toolCitationText() { + // TODO nf-core: Optionally add in-text citation tools to this list. + // Can use ternary operators to dynamically construct based conditions, e.g. params["run_xyz"] ? "Tool (Foo et al. 2023)" : "", + // Uncomment function in methodsDescriptionText to render in MultiQC report + def citation_text = [ + "Tools used in the workflow included:", + "FastQC (Andrews 2010),", + "MultiQC (Ewels et al. 2016)", + "." + ].join(' ').trim() + + return citation_text +} + +def toolBibliographyText() { + // TODO nf-core: Optionally add bibliographic entries to this list. + // Can use ternary operators to dynamically construct based conditions, e.g. params["run_xyz"] ? "Author (2023) Pub name, Journal, DOI" : "", + // Uncomment function in methodsDescriptionText to render in MultiQC report + def reference_text = [ + "Andrews S, (2010) FastQC, URL: https://www.bioinformatics.babraham.ac.uk/projects/fastqc/).", + "Ewels, P., Magnusson, M., Lundin, S., & Käller, M. (2016). MultiQC: summarize analysis results for multiple tools and samples in a single report. Bioinformatics , 32(19), 3047–3048. doi: /10.1093/bioinformatics/btw354" + ].join(' ').trim() + + return reference_text +} + +def methodsDescriptionText(mqc_methods_yaml) { + // Convert to a named map so can be used as with familar NXF ${workflow} variable syntax in the MultiQC YML file + def meta = [:] + meta.workflow = workflow.toMap() + meta["manifest_map"] = workflow.manifest.toMap() + + // Pipeline DOI + meta["doi_text"] = meta.manifest_map.doi ? "(doi: ${meta.manifest_map.doi})" : "" + meta["nodoi_text"] = meta.manifest_map.doi ? "": "If available, make sure to update the text to include the Zenodo DOI of version of the pipeline used. " + + // Tool references + meta["tool_citations"] = "" + meta["tool_bibliography"] = "" + + // TODO nf-core: Only uncomment below if logic in toolCitationText/toolBibliographyText has been filled! + // meta["tool_citations"] = toolCitationText().replaceAll(", \\.", ".").replaceAll("\\. \\.", ".").replaceAll(", \\.", ".") + // meta["tool_bibliography"] = toolBibliographyText() + + + def methods_text = mqc_methods_yaml.text + + def engine = new groovy.text.SimpleTemplateEngine() + def description_html = engine.createTemplate(methods_text).make(meta) + + return description_html.toString() +} diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/main.nf b/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/main.nf new file mode 100644 index 0000000000..ac31f28f66 --- /dev/null +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/main.nf @@ -0,0 +1,126 @@ +// +// Subworkflow with functionality that may be useful for any Nextflow pipeline +// + +import org.yaml.snakeyaml.Yaml +import groovy.json.JsonOutput +import nextflow.extension.FilesEx + +/* +======================================================================================== + SUBWORKFLOW DEFINITION +======================================================================================== +*/ + +workflow UTILS_NEXTFLOW_PIPELINE { + + take: + print_version // boolean: print version + dump_parameters // boolean: dump parameters + outdir // path: base directory used to publish pipeline results + check_conda_channels // boolean: check conda channels + + main: + + // + // Print workflow version and exit on --version + // + if (print_version) { + log.info "${workflow.manifest.name} ${getWorkflowVersion()}" + System.exit(0) + } + + // + // Dump pipeline parameters to a JSON file + // + if (dump_parameters && outdir) { + dumpParametersToJSON(outdir) + } + + // + // When running with Conda, warn if channels have not been set-up appropriately + // + if (check_conda_channels) { + checkCondaChannels() + } + + emit: + dummy_emit = true +} + +/* +======================================================================================== + FUNCTIONS +======================================================================================== +*/ + +// +// Generate version string +// +def getWorkflowVersion() { + String version_string = "" + if (workflow.manifest.version) { + def prefix_v = workflow.manifest.version[0] != 'v' ? 'v' : '' + version_string += "${prefix_v}${workflow.manifest.version}" + } + + if (workflow.commitId) { + def git_shortsha = workflow.commitId.substring(0, 7) + version_string += "-g${git_shortsha}" + } + + return version_string +} + +// +// Dump pipeline parameters to a JSON file +// +def dumpParametersToJSON(outdir) { + def timestamp = new java.util.Date().format( 'yyyy-MM-dd_HH-mm-ss') + def filename = "params_${timestamp}.json" + def temp_pf = new File(workflow.launchDir.toString(), ".${filename}") + def jsonStr = JsonOutput.toJson(params) + temp_pf.text = JsonOutput.prettyPrint(jsonStr) + + FilesEx.copyTo(temp_pf.toPath(), "${outdir}/pipeline_info/params_${timestamp}.json") + temp_pf.delete() +} + +// +// When running with -profile conda, warn if channels have not been set-up appropriately +// +def checkCondaChannels() { + Yaml parser = new Yaml() + def channels = [] + try { + def config = parser.load("conda config --show channels".execute().text) + channels = config.channels + } catch(NullPointerException | IOException e) { + log.warn "Could not verify conda channel configuration." + return + } + + // Check that all channels are present + // This channel list is ordered by required channel priority. + def required_channels_in_order = ['conda-forge', 'bioconda', 'defaults'] + def channels_missing = ((required_channels_in_order as Set) - (channels as Set)) as Boolean + + // Check that they are in the right order + def channel_priority_violation = false + def n = required_channels_in_order.size() + for (int i = 0; i < n - 1; i++) { + channel_priority_violation |= !(channels.indexOf(required_channels_in_order[i]) < channels.indexOf(required_channels_in_order[i+1])) + } + + if (channels_missing | channel_priority_violation) { + log.warn "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n" + + " There is a problem with your Conda configuration!\n\n" + + " You will need to set-up the conda-forge and bioconda channels correctly.\n" + + " Please refer to https://bioconda.github.io/\n" + + " The observed channel order is \n" + + " ${channels}\n" + + " but the following channel order is required:\n" + + " ${required_channels_in_order}\n" + + "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~" + } +} diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/meta.yml b/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/meta.yml new file mode 100644 index 0000000000..e5c3a0a828 --- /dev/null +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/meta.yml @@ -0,0 +1,38 @@ +# yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/subworkflows/yaml-schema.json +name: "UTILS_NEXTFLOW_PIPELINE" +description: Subworkflow with functionality that may be useful for any Nextflow pipeline +keywords: + - utility + - pipeline + - initialise + - version +components: [] +input: + - print_version: + type: boolean + description: | + Print the version of the pipeline and exit + - dump_parameters: + type: boolean + description: | + Dump the parameters of the pipeline to a JSON file + - output_directory: + type: directory + description: Path to output dir to write JSON file to. + pattern: "results/" + - check_conda_channel: + type: boolean + description: | + Check if the conda channel priority is correct. +output: + - dummy_emit: + type: boolean + description: | + Dummy emit to make nf-core subworkflows lint happy +authors: + - "@adamrtalbot" + - "@drpatelh" +maintainers: + - "@adamrtalbot" + - "@drpatelh" + - "@maxulysse" diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.function.nf.test b/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.function.nf.test new file mode 100644 index 0000000000..8ed4310cac --- /dev/null +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.function.nf.test @@ -0,0 +1,54 @@ + +nextflow_function { + + name "Test Functions" + script "subworkflows/nf-core/utils_nextflow_pipeline/main.nf" + config "subworkflows/nf-core/utils_nextflow_pipeline/tests/nextflow.config" + tag 'subworkflows' + tag 'utils_nextflow_pipeline' + tag 'subworkflows/utils_nextflow_pipeline' + + test("Test Function getWorkflowVersion") { + + function "getWorkflowVersion" + + then { + assertAll( + { assert function.success }, + { assert snapshot(function.result).match() } + ) + } + } + + test("Test Function dumpParametersToJSON") { + + function "dumpParametersToJSON" + + when { + function { + """ + // define inputs of the function here. Example: + input[0] = "$outputDir" + """.stripIndent() + } + } + + then { + assertAll( + { assert function.success } + ) + } + } + + test("Test Function checkCondaChannels") { + + function "checkCondaChannels" + + then { + assertAll( + { assert function.success }, + { assert snapshot(function.result).match() } + ) + } + } +} \ No newline at end of file diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.function.nf.test.snap b/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.function.nf.test.snap new file mode 100644 index 0000000000..db2030f8b0 --- /dev/null +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.function.nf.test.snap @@ -0,0 +1,12 @@ +{ + "Test Function getWorkflowVersion": { + "content": [ + "v9.9.9" + ], + "timestamp": "2024-01-19T11:32:36.031083" + }, + "Test Function checkCondaChannels": { + "content": null, + "timestamp": "2024-01-19T11:32:50.456" + } +} \ No newline at end of file diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.workflow.nf.test b/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.workflow.nf.test new file mode 100644 index 0000000000..f7c54bc68f --- /dev/null +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.workflow.nf.test @@ -0,0 +1,123 @@ +nextflow_workflow { + + name "Test Workflow UTILS_NEXTFLOW_PIPELINE" + script "../main.nf" + config "subworkflows/nf-core/utils_nextflow_pipeline/tests/nextflow.config" + workflow "UTILS_NEXTFLOW_PIPELINE" + tag 'subworkflows' + tag 'utils_nextflow_pipeline' + tag 'subworkflows/utils_nextflow_pipeline' + + test("Should run no inputs") { + + when { + params { + outdir = "tests/results" + } + workflow { + """ + print_version = false + dump_parameters = false + outdir = null + check_conda_channels = false + + input[0] = print_version + input[1] = dump_parameters + input[2] = outdir + input[3] = check_conda_channels + """ + } + } + + then { + assertAll( + { assert workflow.success } + ) + } + } + + test("Should print version") { + + when { + params { + outdir = "tests/results" + } + workflow { + """ + print_version = true + dump_parameters = false + outdir = null + check_conda_channels = false + + input[0] = print_version + input[1] = dump_parameters + input[2] = outdir + input[3] = check_conda_channels + """ + } + } + + then { + assertAll( + { assert workflow.success }, + { assert workflow.stdout.contains("nextflow_workflow v9.9.9") } + ) + } + } + + test("Should dump params") { + + when { + params { + outdir = "$outputDir" + } + workflow { + """ + print_version = false + dump_parameters = true + outdir = params.outdir + check_conda_channels = false + + input[0] = false + input[1] = true + input[2] = params.outdir + input[3] = false + """ + } + } + + then { + assertAll( + { assert workflow.success } + ) + } + } + + test("Should not create params JSON if no output directory") { + + when { + params { + outdir = "$outputDir" + } + workflow { + """ + print_version = false + dump_parameters = true + outdir = params.outdir + check_conda_channels = false + + input[0] = false + input[1] = true + input[2] = null + input[3] = false + """ + } + } + + then { + assertAll( + { assert workflow.success } + ) + } + } +} diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/tests/nextflow.config b/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/tests/nextflow.config new file mode 100644 index 0000000000..53574ffec4 --- /dev/null +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/tests/nextflow.config @@ -0,0 +1,9 @@ +manifest { + name = 'nextflow_workflow' + author = """nf-core""" + homePage = 'https://127.0.0.1' + description = """Dummy pipeline""" + nextflowVersion = '!>=23.04.0' + version = '9.9.9' + doi = 'https://doi.org/10.5281/zenodo.5070524' +} \ No newline at end of file diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/tests/tags.yml b/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/tests/tags.yml new file mode 100644 index 0000000000..f84761125a --- /dev/null +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/tests/tags.yml @@ -0,0 +1,2 @@ +subworkflows/utils_nextflow_pipeline: + - subworkflows/nf-core/utils_nextflow_pipeline/** diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/main.nf b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/main.nf new file mode 100644 index 0000000000..a8b55d6fe1 --- /dev/null +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/main.nf @@ -0,0 +1,440 @@ +// +// Subworkflow with utility functions specific to the nf-core pipeline template +// + +import org.yaml.snakeyaml.Yaml +import nextflow.extension.FilesEx + +/* +======================================================================================== + SUBWORKFLOW DEFINITION +======================================================================================== +*/ + +workflow UTILS_NFCORE_PIPELINE { + + take: + nextflow_cli_args + + main: + valid_config = checkConfigProvided() + checkProfileProvided(nextflow_cli_args) + + emit: + valid_config +} + +/* +======================================================================================== + FUNCTIONS +======================================================================================== +*/ + +// +// Warn if a -profile or Nextflow config has not been provided to run the pipeline +// +def checkConfigProvided() { + valid_config = true + if (workflow.profile == 'standard' && workflow.configFiles.size() <= 1) { + log.warn "[$workflow.manifest.name] You are attempting to run the pipeline without any custom configuration!\n\n" + + "This will be dependent on your local compute environment but can be achieved via one or more of the following:\n" + + " (1) Using an existing pipeline profile e.g. `-profile docker` or `-profile singularity`\n" + + " (2) Using an existing nf-core/configs for your Institution e.g. `-profile crick` or `-profile uppmax`\n" + + " (3) Using your own local custom config e.g. `-c /path/to/your/custom.config`\n\n" + + "Please refer to the quick start section and usage docs for the pipeline.\n " + valid_config = false + } + return valid_config +} + +// +// Exit pipeline if --profile contains spaces +// +def checkProfileProvided(nextflow_cli_args) { + if (workflow.profile.endsWith(',')) { + error "The `-profile` option cannot end with a trailing comma, please remove it and re-run the pipeline!\n" + + "HINT: A common mistake is to provide multiple values separated by spaces e.g. `-profile test, docker`.\n" + } + if (nextflow_cli_args[0]) { + log.warn "nf-core pipelines do not accept positional arguments. The positional argument `${nextflow_cli_args[0]}` has been detected.\n" + + "HINT: A common mistake is to provide multiple values separated by spaces e.g. `-profile test, docker`.\n" + } +} + +// +// Citation string for pipeline +// +def workflowCitation() { + return "If you use ${workflow.manifest.name} for your analysis please cite:\n\n" + + "* The pipeline\n" + + " ${workflow.manifest.doi}\n\n" + + "* The nf-core framework\n" + + " https://doi.org/10.1038/s41587-020-0439-x\n\n" + + "* Software dependencies\n" + + " https://github.com/${workflow.manifest.name}/blob/master/CITATIONS.md" +} + +// +// Generate workflow version string +// +def getWorkflowVersion() { + String version_string = "" + if (workflow.manifest.version) { + def prefix_v = workflow.manifest.version[0] != 'v' ? 'v' : '' + version_string += "${prefix_v}${workflow.manifest.version}" + } + + if (workflow.commitId) { + def git_shortsha = workflow.commitId.substring(0, 7) + version_string += "-g${git_shortsha}" + } + + return version_string +} + +// +// Get software versions for pipeline +// +def processVersionsFromYAML(yaml_file) { + Yaml yaml = new Yaml() + versions = yaml.load(yaml_file).collectEntries { k, v -> [ k.tokenize(':')[-1], v ] } + return yaml.dumpAsMap(versions).trim() +} + +// +// Get workflow version for pipeline +// +def workflowVersionToYAML() { + return """ + Workflow: + $workflow.manifest.name: ${getWorkflowVersion()} + Nextflow: $workflow.nextflow.version + """.stripIndent().trim() +} + +// +// Get channel of software versions used in pipeline in YAML format +// +def softwareVersionsToYAML(ch_versions) { + return ch_versions + .unique() + .map { processVersionsFromYAML(it) } + .unique() + .mix(Channel.of(workflowVersionToYAML())) +} + +// +// Get workflow summary for MultiQC +// +def paramsSummaryMultiqc(summary_params) { + def summary_section = '' + for (group in summary_params.keySet()) { + def group_params = summary_params.get(group) // This gets the parameters of that particular group + if (group_params) { + summary_section += " $group\n" + summary_section += " \n" + for (param in group_params.keySet()) { + summary_section += " $param${group_params.get(param) ?: 'N/A'}\n" + } + summary_section += " \n" + } + } + + String yaml_file_text = "id: '${workflow.manifest.name.replace('/','-')}-summary'\n" + yaml_file_text += "description: ' - this information is collected when the pipeline is started.'\n" + yaml_file_text += "section_name: '${workflow.manifest.name} Workflow Summary'\n" + yaml_file_text += "section_href: 'https://github.com/${workflow.manifest.name}'\n" + yaml_file_text += "plot_type: 'html'\n" + yaml_file_text += "data: |\n" + yaml_file_text += "${summary_section}" + + return yaml_file_text +} + +// +// nf-core logo +// +def nfCoreLogo(monochrome_logs=true) { + Map colors = logColours(monochrome_logs) + String.format( + """\n + ${dashedLine(monochrome_logs)} + ${colors.green},--.${colors.black}/${colors.green},-.${colors.reset} + ${colors.blue} ___ __ __ __ ___ ${colors.green}/,-._.--~\'${colors.reset} + ${colors.blue} |\\ | |__ __ / ` / \\ |__) |__ ${colors.yellow}} {${colors.reset} + ${colors.blue} | \\| | \\__, \\__/ | \\ |___ ${colors.green}\\`-._,-`-,${colors.reset} + ${colors.green}`._,._,\'${colors.reset} + ${colors.purple} ${workflow.manifest.name} ${getWorkflowVersion()}${colors.reset} + ${dashedLine(monochrome_logs)} + """.stripIndent() + ) +} + +// +// Return dashed line +// +def dashedLine(monochrome_logs=true) { + Map colors = logColours(monochrome_logs) + return "-${colors.dim}----------------------------------------------------${colors.reset}-" +} + +// +// ANSII colours used for terminal logging +// +def logColours(monochrome_logs=true) { + Map colorcodes = [:] + + // Reset / Meta + colorcodes['reset'] = monochrome_logs ? '' : "\033[0m" + colorcodes['bold'] = monochrome_logs ? '' : "\033[1m" + colorcodes['dim'] = monochrome_logs ? '' : "\033[2m" + colorcodes['underlined'] = monochrome_logs ? '' : "\033[4m" + colorcodes['blink'] = monochrome_logs ? '' : "\033[5m" + colorcodes['reverse'] = monochrome_logs ? '' : "\033[7m" + colorcodes['hidden'] = monochrome_logs ? '' : "\033[8m" + + // Regular Colors + colorcodes['black'] = monochrome_logs ? '' : "\033[0;30m" + colorcodes['red'] = monochrome_logs ? '' : "\033[0;31m" + colorcodes['green'] = monochrome_logs ? '' : "\033[0;32m" + colorcodes['yellow'] = monochrome_logs ? '' : "\033[0;33m" + colorcodes['blue'] = monochrome_logs ? '' : "\033[0;34m" + colorcodes['purple'] = monochrome_logs ? '' : "\033[0;35m" + colorcodes['cyan'] = monochrome_logs ? '' : "\033[0;36m" + colorcodes['white'] = monochrome_logs ? '' : "\033[0;37m" + + // Bold + colorcodes['bblack'] = monochrome_logs ? '' : "\033[1;30m" + colorcodes['bred'] = monochrome_logs ? '' : "\033[1;31m" + colorcodes['bgreen'] = monochrome_logs ? '' : "\033[1;32m" + colorcodes['byellow'] = monochrome_logs ? '' : "\033[1;33m" + colorcodes['bblue'] = monochrome_logs ? '' : "\033[1;34m" + colorcodes['bpurple'] = monochrome_logs ? '' : "\033[1;35m" + colorcodes['bcyan'] = monochrome_logs ? '' : "\033[1;36m" + colorcodes['bwhite'] = monochrome_logs ? '' : "\033[1;37m" + + // Underline + colorcodes['ublack'] = monochrome_logs ? '' : "\033[4;30m" + colorcodes['ured'] = monochrome_logs ? '' : "\033[4;31m" + colorcodes['ugreen'] = monochrome_logs ? '' : "\033[4;32m" + colorcodes['uyellow'] = monochrome_logs ? '' : "\033[4;33m" + colorcodes['ublue'] = monochrome_logs ? '' : "\033[4;34m" + colorcodes['upurple'] = monochrome_logs ? '' : "\033[4;35m" + colorcodes['ucyan'] = monochrome_logs ? '' : "\033[4;36m" + colorcodes['uwhite'] = monochrome_logs ? '' : "\033[4;37m" + + // High Intensity + colorcodes['iblack'] = monochrome_logs ? '' : "\033[0;90m" + colorcodes['ired'] = monochrome_logs ? '' : "\033[0;91m" + colorcodes['igreen'] = monochrome_logs ? '' : "\033[0;92m" + colorcodes['iyellow'] = monochrome_logs ? '' : "\033[0;93m" + colorcodes['iblue'] = monochrome_logs ? '' : "\033[0;94m" + colorcodes['ipurple'] = monochrome_logs ? '' : "\033[0;95m" + colorcodes['icyan'] = monochrome_logs ? '' : "\033[0;96m" + colorcodes['iwhite'] = monochrome_logs ? '' : "\033[0;97m" + + // Bold High Intensity + colorcodes['biblack'] = monochrome_logs ? '' : "\033[1;90m" + colorcodes['bired'] = monochrome_logs ? '' : "\033[1;91m" + colorcodes['bigreen'] = monochrome_logs ? '' : "\033[1;92m" + colorcodes['biyellow'] = monochrome_logs ? '' : "\033[1;93m" + colorcodes['biblue'] = monochrome_logs ? '' : "\033[1;94m" + colorcodes['bipurple'] = monochrome_logs ? '' : "\033[1;95m" + colorcodes['bicyan'] = monochrome_logs ? '' : "\033[1;96m" + colorcodes['biwhite'] = monochrome_logs ? '' : "\033[1;97m" + + return colorcodes +} + +// +// Attach the multiqc report to email +// +def attachMultiqcReport(multiqc_report) { + def mqc_report = null + try { + if (workflow.success) { + mqc_report = multiqc_report.getVal() + if (mqc_report.getClass() == ArrayList && mqc_report.size() >= 1) { + if (mqc_report.size() > 1) { + log.warn "[$workflow.manifest.name] Found multiple reports from process 'MULTIQC', will use only one" + } + mqc_report = mqc_report[0] + } + } + } catch (all) { + if (multiqc_report) { + log.warn "[$workflow.manifest.name] Could not attach MultiQC report to summary email" + } + } + return mqc_report +} + +// +// Construct and send completion email +// +def completionEmail(summary_params, email, email_on_fail, plaintext_email, outdir, monochrome_logs=true, multiqc_report=null) { + + // Set up the e-mail variables + def subject = "[$workflow.manifest.name] Successful: $workflow.runName" + if (!workflow.success) { + subject = "[$workflow.manifest.name] FAILED: $workflow.runName" + } + + def summary = [:] + for (group in summary_params.keySet()) { + summary << summary_params[group] + } + + def misc_fields = [:] + misc_fields['Date Started'] = workflow.start + misc_fields['Date Completed'] = workflow.complete + misc_fields['Pipeline script file path'] = workflow.scriptFile + misc_fields['Pipeline script hash ID'] = workflow.scriptId + if (workflow.repository) misc_fields['Pipeline repository Git URL'] = workflow.repository + if (workflow.commitId) misc_fields['Pipeline repository Git Commit'] = workflow.commitId + if (workflow.revision) misc_fields['Pipeline Git branch/tag'] = workflow.revision + misc_fields['Nextflow Version'] = workflow.nextflow.version + misc_fields['Nextflow Build'] = workflow.nextflow.build + misc_fields['Nextflow Compile Timestamp'] = workflow.nextflow.timestamp + + def email_fields = [:] + email_fields['version'] = getWorkflowVersion() + email_fields['runName'] = workflow.runName + email_fields['success'] = workflow.success + email_fields['dateComplete'] = workflow.complete + email_fields['duration'] = workflow.duration + email_fields['exitStatus'] = workflow.exitStatus + email_fields['errorMessage'] = (workflow.errorMessage ?: 'None') + email_fields['errorReport'] = (workflow.errorReport ?: 'None') + email_fields['commandLine'] = workflow.commandLine + email_fields['projectDir'] = workflow.projectDir + email_fields['summary'] = summary << misc_fields + + // On success try attach the multiqc report + def mqc_report = attachMultiqcReport(multiqc_report) + + // Check if we are only sending emails on failure + def email_address = email + if (!email && email_on_fail && !workflow.success) { + email_address = email_on_fail + } + + // Render the TXT template + def engine = new groovy.text.GStringTemplateEngine() + def tf = new File("${workflow.projectDir}/assets/email_template.txt") + def txt_template = engine.createTemplate(tf).make(email_fields) + def email_txt = txt_template.toString() + + // Render the HTML template + def hf = new File("${workflow.projectDir}/assets/email_template.html") + def html_template = engine.createTemplate(hf).make(email_fields) + def email_html = html_template.toString() + + // Render the sendmail template + def max_multiqc_email_size = (params.containsKey('max_multiqc_email_size') ? params.max_multiqc_email_size : 0) as nextflow.util.MemoryUnit + def smail_fields = [ email: email_address, subject: subject, email_txt: email_txt, email_html: email_html, projectDir: "${workflow.projectDir}", mqcFile: mqc_report, mqcMaxSize: max_multiqc_email_size.toBytes() ] + def sf = new File("${workflow.projectDir}/assets/sendmail_template.txt") + def sendmail_template = engine.createTemplate(sf).make(smail_fields) + def sendmail_html = sendmail_template.toString() + + // Send the HTML e-mail + Map colors = logColours(monochrome_logs) + if (email_address) { + try { + if (plaintext_email) { throw GroovyException('Send plaintext e-mail, not HTML') } + // Try to send HTML e-mail using sendmail + def sendmail_tf = new File(workflow.launchDir.toString(), ".sendmail_tmp.html") + sendmail_tf.withWriter { w -> w << sendmail_html } + [ 'sendmail', '-t' ].execute() << sendmail_html + log.info "-${colors.purple}[$workflow.manifest.name]${colors.green} Sent summary e-mail to $email_address (sendmail)-" + } catch (all) { + // Catch failures and try with plaintext + def mail_cmd = [ 'mail', '-s', subject, '--content-type=text/html', email_address ] + mail_cmd.execute() << email_html + log.info "-${colors.purple}[$workflow.manifest.name]${colors.green} Sent summary e-mail to $email_address (mail)-" + } + } + + // Write summary e-mail HTML to a file + def output_hf = new File(workflow.launchDir.toString(), ".pipeline_report.html") + output_hf.withWriter { w -> w << email_html } + FilesEx.copyTo(output_hf.toPath(), "${outdir}/pipeline_info/pipeline_report.html"); + output_hf.delete() + + // Write summary e-mail TXT to a file + def output_tf = new File(workflow.launchDir.toString(), ".pipeline_report.txt") + output_tf.withWriter { w -> w << email_txt } + FilesEx.copyTo(output_tf.toPath(), "${outdir}/pipeline_info/pipeline_report.txt"); + output_tf.delete() +} + +// +// Print pipeline summary on completion +// +def completionSummary(monochrome_logs=true) { + Map colors = logColours(monochrome_logs) + if (workflow.success) { + if (workflow.stats.ignoredCount == 0) { + log.info "-${colors.purple}[$workflow.manifest.name]${colors.green} Pipeline completed successfully${colors.reset}-" + } else { + log.info "-${colors.purple}[$workflow.manifest.name]${colors.yellow} Pipeline completed successfully, but with errored process(es) ${colors.reset}-" + } + } else { + log.info "-${colors.purple}[$workflow.manifest.name]${colors.red} Pipeline completed with errors${colors.reset}-" + } +} + +// +// Construct and send a notification to a web server as JSON e.g. Microsoft Teams and Slack +// +def imNotification(summary_params, hook_url) { + def summary = [:] + for (group in summary_params.keySet()) { + summary << summary_params[group] + } + + def misc_fields = [:] + misc_fields['start'] = workflow.start + misc_fields['complete'] = workflow.complete + misc_fields['scriptfile'] = workflow.scriptFile + misc_fields['scriptid'] = workflow.scriptId + if (workflow.repository) misc_fields['repository'] = workflow.repository + if (workflow.commitId) misc_fields['commitid'] = workflow.commitId + if (workflow.revision) misc_fields['revision'] = workflow.revision + misc_fields['nxf_version'] = workflow.nextflow.version + misc_fields['nxf_build'] = workflow.nextflow.build + misc_fields['nxf_timestamp'] = workflow.nextflow.timestamp + + def msg_fields = [:] + msg_fields['version'] = getWorkflowVersion() + msg_fields['runName'] = workflow.runName + msg_fields['success'] = workflow.success + msg_fields['dateComplete'] = workflow.complete + msg_fields['duration'] = workflow.duration + msg_fields['exitStatus'] = workflow.exitStatus + msg_fields['errorMessage'] = (workflow.errorMessage ?: 'None') + msg_fields['errorReport'] = (workflow.errorReport ?: 'None') + msg_fields['commandLine'] = workflow.commandLine.replaceFirst(/ +--hook_url +[^ ]+/, "") + msg_fields['projectDir'] = workflow.projectDir + msg_fields['summary'] = summary << misc_fields + + // Render the JSON template + def engine = new groovy.text.GStringTemplateEngine() + // Different JSON depending on the service provider + // Defaults to "Adaptive Cards" (https://adaptivecards.io), except Slack which has its own format + def json_path = hook_url.contains("hooks.slack.com") ? "slackreport.json" : "adaptivecard.json" + def hf = new File("${workflow.projectDir}/assets/${json_path}") + def json_template = engine.createTemplate(hf).make(msg_fields) + def json_message = json_template.toString() + + // POST + def post = new URL(hook_url).openConnection(); + post.setRequestMethod("POST") + post.setDoOutput(true) + post.setRequestProperty("Content-Type", "application/json") + post.getOutputStream().write(json_message.getBytes("UTF-8")); + def postRC = post.getResponseCode(); + if (! postRC.equals(200)) { + log.warn(post.getErrorStream().getText()); + } +} diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/meta.yml b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/meta.yml new file mode 100644 index 0000000000..d08d24342d --- /dev/null +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/meta.yml @@ -0,0 +1,24 @@ +# yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/subworkflows/yaml-schema.json +name: "UTILS_NFCORE_PIPELINE" +description: Subworkflow with utility functions specific to the nf-core pipeline template +keywords: + - utility + - pipeline + - initialise + - version +components: [] +input: + - nextflow_cli_args: + type: list + description: | + Nextflow CLI positional arguments +output: + - success: + type: boolean + description: | + Dummy output to indicate success +authors: + - "@adamrtalbot" +maintainers: + - "@adamrtalbot" + - "@maxulysse" diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test new file mode 100644 index 0000000000..1dc317f8f7 --- /dev/null +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test @@ -0,0 +1,134 @@ + +nextflow_function { + + name "Test Functions" + script "../main.nf" + config "subworkflows/nf-core/utils_nfcore_pipeline/tests/nextflow.config" + tag "subworkflows" + tag "subworkflows_nfcore" + tag "utils_nfcore_pipeline" + tag "subworkflows/utils_nfcore_pipeline" + + test("Test Function checkConfigProvided") { + + function "checkConfigProvided" + + then { + assertAll( + { assert function.success }, + { assert snapshot(function.result).match() } + ) + } + } + + test("Test Function checkProfileProvided") { + + function "checkProfileProvided" + + when { + function { + """ + input[0] = [] + """ + } + } + + then { + assertAll( + { assert function.success }, + { assert snapshot(function.result).match() } + ) + } + } + + test("Test Function workflowCitation") { + + function "workflowCitation" + + then { + assertAll( + { assert function.success }, + { assert snapshot(function.result).match() } + ) + } + } + + test("Test Function nfCoreLogo") { + + function "nfCoreLogo" + + when { + function { + """ + input[0] = false + """ + } + } + + then { + assertAll( + { assert function.success }, + { assert snapshot(function.result).match() } + ) + } + } + + test("Test Function dashedLine") { + + function "dashedLine" + + when { + function { + """ + input[0] = false + """ + } + } + + then { + assertAll( + { assert function.success }, + { assert snapshot(function.result).match() } + ) + } + } + + test("Test Function without logColours") { + + function "logColours" + + when { + function { + """ + input[0] = true + """ + } + } + + then { + assertAll( + { assert function.success }, + { assert snapshot(function.result).match() } + ) + } + } + + test("Test Function with logColours") { + function "logColours" + + when { + function { + """ + input[0] = false + """ + } + } + + then { + assertAll( + { assert function.success }, + { assert snapshot(function.result).match() } + ) + } + } +} diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test.snap b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test.snap new file mode 100644 index 0000000000..10f948e629 --- /dev/null +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test.snap @@ -0,0 +1,138 @@ +{ + "Test Function checkProfileProvided": { + "content": null, + "timestamp": "2024-02-09T15:43:55.145717" + }, + "Test Function checkConfigProvided": { + "content": [ + true + ], + "timestamp": "2024-01-19T11:34:13.548431224" + }, + "Test Function nfCoreLogo": { + "content": [ + "\n\n-\u001b[2m----------------------------------------------------\u001b[0m-\n \u001b[0;32m,--.\u001b[0;30m/\u001b[0;32m,-.\u001b[0m\n\u001b[0;34m ___ __ __ __ ___ \u001b[0;32m/,-._.--~'\u001b[0m\n\u001b[0;34m |\\ | |__ __ / ` / \\ |__) |__ \u001b[0;33m} {\u001b[0m\n\u001b[0;34m | \\| | \\__, \\__/ | \\ |___ \u001b[0;32m\\`-._,-`-,\u001b[0m\n \u001b[0;32m`._,._,'\u001b[0m\n\u001b[0;35m nextflow_workflow v9.9.9\u001b[0m\n-\u001b[2m----------------------------------------------------\u001b[0m-\n" + ], + "timestamp": "2024-01-19T11:34:38.840454873" + }, + "Test Function workflowCitation": { + "content": [ + "If you use nextflow_workflow for your analysis please cite:\n\n* The pipeline\n https://doi.org/10.5281/zenodo.5070524\n\n* The nf-core framework\n https://doi.org/10.1038/s41587-020-0439-x\n\n* Software dependencies\n https://github.com/nextflow_workflow/blob/master/CITATIONS.md" + ], + "timestamp": "2024-01-19T11:34:22.24352016" + }, + "Test Function without logColours": { + "content": [ + { + "reset": "", + "bold": "", + "dim": "", + "underlined": "", + "blink": "", + "reverse": "", + "hidden": "", + "black": "", + "red": "", + "green": "", + "yellow": "", + "blue": "", + "purple": "", + "cyan": "", + "white": "", + "bblack": "", + "bred": "", + "bgreen": "", + "byellow": "", + "bblue": "", + "bpurple": "", + "bcyan": "", + "bwhite": "", + "ublack": "", + "ured": "", + "ugreen": "", + "uyellow": "", + "ublue": "", + "upurple": "", + "ucyan": "", + "uwhite": "", + "iblack": "", + "ired": "", + "igreen": "", + "iyellow": "", + "iblue": "", + "ipurple": "", + "icyan": "", + "iwhite": "", + "biblack": "", + "bired": "", + "bigreen": "", + "biyellow": "", + "biblue": "", + "bipurple": "", + "bicyan": "", + "biwhite": "" + } + ], + "timestamp": "2024-01-19T11:35:04.418416984" + }, + "Test Function dashedLine": { + "content": [ + "-\u001b[2m----------------------------------------------------\u001b[0m-" + ], + "timestamp": "2024-01-19T11:34:55.420000755" + }, + "Test Function with logColours": { + "content": [ + { + "reset": "\u001b[0m", + "bold": "\u001b[1m", + "dim": "\u001b[2m", + "underlined": "\u001b[4m", + "blink": "\u001b[5m", + "reverse": "\u001b[7m", + "hidden": "\u001b[8m", + "black": "\u001b[0;30m", + "red": "\u001b[0;31m", + "green": "\u001b[0;32m", + "yellow": "\u001b[0;33m", + "blue": "\u001b[0;34m", + "purple": "\u001b[0;35m", + "cyan": "\u001b[0;36m", + "white": "\u001b[0;37m", + "bblack": "\u001b[1;30m", + "bred": "\u001b[1;31m", + "bgreen": "\u001b[1;32m", + "byellow": "\u001b[1;33m", + "bblue": "\u001b[1;34m", + "bpurple": "\u001b[1;35m", + "bcyan": "\u001b[1;36m", + "bwhite": "\u001b[1;37m", + "ublack": "\u001b[4;30m", + "ured": "\u001b[4;31m", + "ugreen": "\u001b[4;32m", + "uyellow": "\u001b[4;33m", + "ublue": "\u001b[4;34m", + "upurple": "\u001b[4;35m", + "ucyan": "\u001b[4;36m", + "uwhite": "\u001b[4;37m", + "iblack": "\u001b[0;90m", + "ired": "\u001b[0;91m", + "igreen": "\u001b[0;92m", + "iyellow": "\u001b[0;93m", + "iblue": "\u001b[0;94m", + "ipurple": "\u001b[0;95m", + "icyan": "\u001b[0;96m", + "iwhite": "\u001b[0;97m", + "biblack": "\u001b[1;90m", + "bired": "\u001b[1;91m", + "bigreen": "\u001b[1;92m", + "biyellow": "\u001b[1;93m", + "biblue": "\u001b[1;94m", + "bipurple": "\u001b[1;95m", + "bicyan": "\u001b[1;96m", + "biwhite": "\u001b[1;97m" + } + ], + "timestamp": "2024-01-19T11:35:13.436366565" + } +} \ No newline at end of file diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.workflow.nf.test b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.workflow.nf.test new file mode 100644 index 0000000000..8940d32d1e --- /dev/null +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.workflow.nf.test @@ -0,0 +1,29 @@ +nextflow_workflow { + + name "Test Workflow UTILS_NFCORE_PIPELINE" + script "../main.nf" + config "subworkflows/nf-core/utils_nfcore_pipeline/tests/nextflow.config" + workflow "UTILS_NFCORE_PIPELINE" + tag "subworkflows" + tag "subworkflows_nfcore" + tag "utils_nfcore_pipeline" + tag "subworkflows/utils_nfcore_pipeline" + + test("Should run without failures") { + + when { + workflow { + """ + input[0] = [] + """ + } + } + + then { + assertAll( + { assert workflow.success }, + { assert snapshot(workflow.out).match() } + ) + } + } +} diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.workflow.nf.test.snap b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.workflow.nf.test.snap new file mode 100644 index 0000000000..d07ce54c51 --- /dev/null +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.workflow.nf.test.snap @@ -0,0 +1,15 @@ +{ + "Should run without failures": { + "content": [ + { + "0": [ + true + ], + "valid_config": [ + true + ] + } + ], + "timestamp": "2024-01-19T11:35:22.538940073" + } +} \ No newline at end of file diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/nextflow.config b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/nextflow.config new file mode 100644 index 0000000000..d0a926bf6d --- /dev/null +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/nextflow.config @@ -0,0 +1,9 @@ +manifest { + name = 'nextflow_workflow' + author = """nf-core""" + homePage = 'https://127.0.0.1' + description = """Dummy pipeline""" + nextflowVersion = '!>=23.04.0' + version = '9.9.9' + doi = 'https://doi.org/10.5281/zenodo.5070524' +} diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/tags.yml b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/tags.yml new file mode 100644 index 0000000000..ac8523c9a2 --- /dev/null +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/tags.yml @@ -0,0 +1,2 @@ +subworkflows/utils_nfcore_pipeline: + - subworkflows/nf-core/utils_nfcore_pipeline/** diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/main.nf b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/main.nf new file mode 100644 index 0000000000..2585b65d1b --- /dev/null +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/main.nf @@ -0,0 +1,62 @@ +// +// Subworkflow that uses the nf-validation plugin to render help text and parameter summary +// + +/* +======================================================================================== + IMPORT NF-VALIDATION PLUGIN +======================================================================================== +*/ + +include { paramsHelp } from 'plugin/nf-validation' +include { paramsSummaryLog } from 'plugin/nf-validation' +include { validateParameters } from 'plugin/nf-validation' + +/* +======================================================================================== + SUBWORKFLOW DEFINITION +======================================================================================== +*/ + +workflow UTILS_NFVALIDATION_PLUGIN { + + take: + print_help // boolean: print help + workflow_command // string: default commmand used to run pipeline + pre_help_text // string: string to be printed before help text and summary log + post_help_text // string: string to be printed after help text and summary log + validate_params // boolean: validate parameters + schema_filename // path: JSON schema file, null to use default value + + main: + + log.debug "Using schema file: ${schema_filename}" + + // Default values for strings + pre_help_text = pre_help_text ?: '' + post_help_text = post_help_text ?: '' + workflow_command = workflow_command ?: '' + + // + // Print help message if needed + // + if (print_help) { + log.info pre_help_text + paramsHelp(workflow_command, parameters_schema: schema_filename) + post_help_text + System.exit(0) + } + + // + // Print parameter summary to stdout + // + log.info pre_help_text + paramsSummaryLog(workflow, parameters_schema: schema_filename) + post_help_text + + // + // Validate parameters relative to the parameter JSON schema + // + if (validate_params){ + validateParameters(parameters_schema: schema_filename) + } + + emit: + dummy_emit = true +} diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/meta.yml b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/meta.yml new file mode 100644 index 0000000000..3d4a6b04f5 --- /dev/null +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/meta.yml @@ -0,0 +1,44 @@ +# yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/subworkflows/yaml-schema.json +name: "UTILS_NFVALIDATION_PLUGIN" +description: Use nf-validation to initiate and validate a pipeline +keywords: + - utility + - pipeline + - initialise + - validation +components: [] +input: + - print_help: + type: boolean + description: | + Print help message and exit + - workflow_command: + type: string + description: | + The command to run the workflow e.g. "nextflow run main.nf" + - pre_help_text: + type: string + description: | + Text to print before the help message + - post_help_text: + type: string + description: | + Text to print after the help message + - validate_params: + type: boolean + description: | + Validate the parameters and error if invalid. + - schema_filename: + type: string + description: | + The filename of the schema to validate against. +output: + - dummy_emit: + type: boolean + description: | + Dummy emit to make nf-core subworkflows lint happy +authors: + - "@adamrtalbot" +maintainers: + - "@adamrtalbot" + - "@maxulysse" diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/tests/main.nf.test b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/tests/main.nf.test new file mode 100644 index 0000000000..517ee54e48 --- /dev/null +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/tests/main.nf.test @@ -0,0 +1,200 @@ +nextflow_workflow { + + name "Test Workflow UTILS_NFVALIDATION_PLUGIN" + script "../main.nf" + workflow "UTILS_NFVALIDATION_PLUGIN" + tag "subworkflows" + tag "subworkflows_nfcore" + tag "plugin/nf-validation" + tag "'plugin/nf-validation'" + tag "utils_nfvalidation_plugin" + tag "subworkflows/utils_nfvalidation_plugin" + + test("Should run nothing") { + + when { + + params { + monochrome_logs = true + test_data = '' + } + + workflow { + """ + help = false + workflow_command = null + pre_help_text = null + post_help_text = null + validate_params = false + schema_filename = "$moduleTestDir/nextflow_schema.json" + + input[0] = help + input[1] = workflow_command + input[2] = pre_help_text + input[3] = post_help_text + input[4] = validate_params + input[5] = schema_filename + """ + } + } + + then { + assertAll( + { assert workflow.success } + ) + } + } + + test("Should run help") { + + + when { + + params { + monochrome_logs = true + test_data = '' + } + workflow { + """ + help = true + workflow_command = null + pre_help_text = null + post_help_text = null + validate_params = false + schema_filename = "$moduleTestDir/nextflow_schema.json" + + input[0] = help + input[1] = workflow_command + input[2] = pre_help_text + input[3] = post_help_text + input[4] = validate_params + input[5] = schema_filename + """ + } + } + + then { + assertAll( + { assert workflow.success }, + { assert workflow.exitStatus == 0 }, + { assert workflow.stdout.any { it.contains('Input/output options') } }, + { assert workflow.stdout.any { it.contains('--outdir') } } + ) + } + } + + test("Should run help with command") { + + when { + + params { + monochrome_logs = true + test_data = '' + } + workflow { + """ + help = true + workflow_command = "nextflow run noorg/doesntexist" + pre_help_text = null + post_help_text = null + validate_params = false + schema_filename = "$moduleTestDir/nextflow_schema.json" + + input[0] = help + input[1] = workflow_command + input[2] = pre_help_text + input[3] = post_help_text + input[4] = validate_params + input[5] = schema_filename + """ + } + } + + then { + assertAll( + { assert workflow.success }, + { assert workflow.exitStatus == 0 }, + { assert workflow.stdout.any { it.contains('nextflow run noorg/doesntexist') } }, + { assert workflow.stdout.any { it.contains('Input/output options') } }, + { assert workflow.stdout.any { it.contains('--outdir') } } + ) + } + } + + test("Should run help with extra text") { + + + when { + + params { + monochrome_logs = true + test_data = '' + } + workflow { + """ + help = true + workflow_command = "nextflow run noorg/doesntexist" + pre_help_text = "pre-help-text" + post_help_text = "post-help-text" + validate_params = false + schema_filename = "$moduleTestDir/nextflow_schema.json" + + input[0] = help + input[1] = workflow_command + input[2] = pre_help_text + input[3] = post_help_text + input[4] = validate_params + input[5] = schema_filename + """ + } + } + + then { + assertAll( + { assert workflow.success }, + { assert workflow.exitStatus == 0 }, + { assert workflow.stdout.any { it.contains('pre-help-text') } }, + { assert workflow.stdout.any { it.contains('nextflow run noorg/doesntexist') } }, + { assert workflow.stdout.any { it.contains('Input/output options') } }, + { assert workflow.stdout.any { it.contains('--outdir') } }, + { assert workflow.stdout.any { it.contains('post-help-text') } } + ) + } + } + + test("Should validate params") { + + when { + + params { + monochrome_logs = true + test_data = '' + outdir = 1 + } + workflow { + """ + help = false + workflow_command = null + pre_help_text = null + post_help_text = null + validate_params = true + schema_filename = "$moduleTestDir/nextflow_schema.json" + + input[0] = help + input[1] = workflow_command + input[2] = pre_help_text + input[3] = post_help_text + input[4] = validate_params + input[5] = schema_filename + """ + } + } + + then { + assertAll( + { assert workflow.failed }, + { assert workflow.stdout.any { it.contains('ERROR ~ ERROR: Validation of pipeline parameters failed!') } } + ) + } + } +} \ No newline at end of file diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/tests/nextflow_schema.json b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/tests/nextflow_schema.json new file mode 100644 index 0000000000..7626c1c93e --- /dev/null +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/tests/nextflow_schema.json @@ -0,0 +1,96 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema", + "$id": "https://raw.githubusercontent.com/./master/nextflow_schema.json", + "title": ". pipeline parameters", + "description": "", + "type": "object", + "definitions": { + "input_output_options": { + "title": "Input/output options", + "type": "object", + "fa_icon": "fas fa-terminal", + "description": "Define where the pipeline should find input data and save output data.", + "required": ["outdir"], + "properties": { + "validate_params": { + "type": "boolean", + "description": "Validate parameters?", + "default": true, + "hidden": true + }, + "outdir": { + "type": "string", + "format": "directory-path", + "description": "The output directory where the results will be saved. You have to use absolute paths to storage on Cloud infrastructure.", + "fa_icon": "fas fa-folder-open" + }, + "test_data_base": { + "type": "string", + "default": "https://raw.githubusercontent.com/nf-core/test-datasets/modules", + "description": "Base for test data directory", + "hidden": true + }, + "test_data": { + "type": "string", + "description": "Fake test data param", + "hidden": true + } + } + }, + "generic_options": { + "title": "Generic options", + "type": "object", + "fa_icon": "fas fa-file-import", + "description": "Less common options for the pipeline, typically set in a config file.", + "help_text": "These options are common to all nf-core pipelines and allow you to customise some of the core preferences for how the pipeline runs.\n\nTypically these options would be set in a Nextflow config file loaded for all pipeline runs, such as `~/.nextflow/config`.", + "properties": { + "help": { + "type": "boolean", + "description": "Display help text.", + "fa_icon": "fas fa-question-circle", + "hidden": true + }, + "version": { + "type": "boolean", + "description": "Display version and exit.", + "fa_icon": "fas fa-question-circle", + "hidden": true + }, + "logo": { + "type": "boolean", + "default": true, + "description": "Display nf-core logo in console output.", + "fa_icon": "fas fa-image", + "hidden": true + }, + "singularity_pull_docker_container": { + "type": "boolean", + "description": "Pull Singularity container from Docker?", + "hidden": true + }, + "publish_dir_mode": { + "type": "string", + "default": "copy", + "description": "Method used to save pipeline results to output directory.", + "help_text": "The Nextflow `publishDir` option specifies which intermediate files should be saved to the output directory. This option tells the pipeline what method should be used to move these files. See [Nextflow docs](https://www.nextflow.io/docs/latest/process.html#publishdir) for details.", + "fa_icon": "fas fa-copy", + "enum": ["symlink", "rellink", "link", "copy", "copyNoFollow", "move"], + "hidden": true + }, + "monochrome_logs": { + "type": "boolean", + "description": "Use monochrome_logs", + "hidden": true + } + } + } + }, + "allOf": [ + { + "$ref": "#/definitions/input_output_options" + }, + { + "$ref": "#/definitions/generic_options" + } + ] +} diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/tests/tags.yml b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/tests/tags.yml new file mode 100644 index 0000000000..60b1cfff49 --- /dev/null +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/tests/tags.yml @@ -0,0 +1,2 @@ +subworkflows/utils_nfvalidation_plugin: + - subworkflows/nf-core/utils_nfvalidation_plugin/** diff --git a/nf_core/pipeline-template/workflows/pipeline.nf b/nf_core/pipeline-template/workflows/pipeline.nf index 4583f2a9d6..68adbaa328 100644 --- a/nf_core/pipeline-template/workflows/pipeline.nf +++ b/nf_core/pipeline-template/workflows/pipeline.nf @@ -1,54 +1,15 @@ /* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - PRINT PARAMS SUMMARY + IMPORT MODULES / SUBWORKFLOWS / FUNCTIONS ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */ -include { paramsSummaryLog; paramsSummaryMap } from 'plugin/nf-validation' - -def logo = NfcoreTemplate.logo(workflow, params.monochrome_logs) -def citation = '\n' + WorkflowMain.citation(workflow) + '\n' -def summary_params = paramsSummaryMap(workflow) - -// Print parameter summary log to screen -log.info logo + paramsSummaryLog(workflow) + citation - -Workflow{{ short_name[0]|upper }}{{ short_name[1:] }}.initialise(params, log) - -/* -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - CONFIG FILES -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -*/ - -ch_multiqc_config = Channel.fromPath("$projectDir/assets/multiqc_config.yml", checkIfExists: true) -ch_multiqc_custom_config = params.multiqc_config ? Channel.fromPath( params.multiqc_config, checkIfExists: true ) : Channel.empty() -ch_multiqc_logo = params.multiqc_logo ? Channel.fromPath( params.multiqc_logo, checkIfExists: true ) : Channel.empty() -ch_multiqc_custom_methods_description = params.multiqc_methods_description ? file(params.multiqc_methods_description, checkIfExists: true) : file("$projectDir/assets/methods_description_template.yml", checkIfExists: true) - -/* -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - IMPORT LOCAL MODULES/SUBWORKFLOWS -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -*/ - -// -// SUBWORKFLOW: Consisting of a mix of local and nf-core/modules -// -include { INPUT_CHECK } from '../subworkflows/local/input_check' - -/* -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - IMPORT NF-CORE MODULES/SUBWORKFLOWS -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -*/ - -// -// MODULE: Installed directly from nf-core/modules -// -include { FASTQC } from '../modules/nf-core/fastqc/main' -include { MULTIQC } from '../modules/nf-core/multiqc/main' -include { CUSTOM_DUMPSOFTWAREVERSIONS } from '../modules/nf-core/custom/dumpsoftwareversions/main' +include { FASTQC } from '../modules/nf-core/fastqc/main' +include { MULTIQC } from '../modules/nf-core/multiqc/main' +include { paramsSummaryMap } from 'plugin/nf-validation' +include { paramsSummaryMultiqc } from '../subworkflows/nf-core/utils_nfcore_pipeline' +include { softwareVersionsToYAML } from '../subworkflows/nf-core/utils_nfcore_pipeline' +include { methodsDescriptionText } from '../subworkflows/local/utils_nfcore_{{ short_name }}_pipeline' /* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -56,50 +17,45 @@ include { CUSTOM_DUMPSOFTWAREVERSIONS } from '../modules/nf-core/custom/dumpsoft ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */ -// Info required for completion email and summary -def multiqc_report = [] - workflow {{ short_name|upper }} { - ch_versions = Channel.empty() + take: + ch_samplesheet // channel: samplesheet read in from --input - // - // SUBWORKFLOW: Read in samplesheet, validate and stage input files - // - INPUT_CHECK ( - file(params.input) - ) - ch_versions = ch_versions.mix(INPUT_CHECK.out.versions) - // TODO: OPTIONAL, you can use nf-validation plugin to create an input channel from the samplesheet with Channel.fromSamplesheet("input") - // See the documentation https://nextflow-io.github.io/nf-validation/samplesheets/fromSamplesheet/ - // ! There is currently no tooling to help you write a sample sheet schema + main: + + ch_versions = Channel.empty() + ch_multiqc_files = Channel.empty() // // MODULE: Run FastQC // FASTQC ( - INPUT_CHECK.out.reads + ch_samplesheet ) + ch_multiqc_files = ch_multiqc_files.mix(FASTQC.out.zip.collect{it[1]}) ch_versions = ch_versions.mix(FASTQC.out.versions.first()) - CUSTOM_DUMPSOFTWAREVERSIONS ( - ch_versions.unique().collectFile(name: 'collated_versions.yml') - ) + // + // Collate and save software versions + // + softwareVersionsToYAML(ch_versions) + .collectFile(storeDir: "${params.outdir}/pipeline_info", name: 'nf_core_pipeline_software_mqc_versions.yml', sort: true, newLine: true) + .set { ch_collated_versions } // // MODULE: MultiQC // - workflow_summary = Workflow{{ short_name[0]|upper }}{{ short_name[1:] }}.paramsSummaryMultiqc(workflow, summary_params) - ch_workflow_summary = Channel.value(workflow_summary) - - methods_description = Workflow{{ short_name[0]|upper }}{{ short_name[1:] }}.methodsDescriptionText(workflow, ch_multiqc_custom_methods_description, params) - ch_methods_description = Channel.value(methods_description) - - ch_multiqc_files = Channel.empty() - ch_multiqc_files = ch_multiqc_files.mix(ch_workflow_summary.collectFile(name: 'workflow_summary_mqc.yaml')) - ch_multiqc_files = ch_multiqc_files.mix(ch_methods_description.collectFile(name: 'methods_description_mqc.yaml')) - ch_multiqc_files = ch_multiqc_files.mix(CUSTOM_DUMPSOFTWAREVERSIONS.out.mqc_yml.collect()) - ch_multiqc_files = ch_multiqc_files.mix(FASTQC.out.zip.collect{it[1]}.ifEmpty([])) + ch_multiqc_config = Channel.fromPath("$projectDir/assets/multiqc_config.yml", checkIfExists: true) + ch_multiqc_custom_config = params.multiqc_config ? Channel.fromPath(params.multiqc_config, checkIfExists: true) : Channel.empty() + ch_multiqc_logo = params.multiqc_logo ? Channel.fromPath(params.multiqc_logo, checkIfExists: true) : Channel.empty() + summary_params = paramsSummaryMap(workflow, parameters_schema: "nextflow_schema.json") + ch_workflow_summary = Channel.value(paramsSummaryMultiqc(summary_params)) + ch_multiqc_custom_methods_description = params.multiqc_methods_description ? file(params.multiqc_methods_description, checkIfExists: true) : file("$projectDir/assets/methods_description_template.yml", checkIfExists: true) + ch_methods_description = Channel.value(methodsDescriptionText(ch_multiqc_custom_methods_description)) + ch_multiqc_files = ch_multiqc_files.mix(ch_workflow_summary.collectFile(name: 'workflow_summary_mqc.yaml')) + ch_multiqc_files = ch_multiqc_files.mix(ch_collated_versions) + ch_multiqc_files = ch_multiqc_files.mix(ch_methods_description.collectFile(name: 'methods_description_mqc.yaml', sort: false)) MULTIQC ( ch_multiqc_files.collect(), @@ -107,31 +63,10 @@ workflow {{ short_name|upper }} { ch_multiqc_custom_config.toList(), ch_multiqc_logo.toList() ) - multiqc_report = MULTIQC.out.report.toList() -} - -/* -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - COMPLETION EMAIL AND SUMMARY -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -*/ - -workflow.onComplete { - if (params.email || params.email_on_fail) { - NfcoreTemplate.email(workflow, params, summary_params, projectDir, log, multiqc_report) - } - NfcoreTemplate.dump_parameters(workflow, params) - NfcoreTemplate.summary(workflow, params, log) - if (params.hook_url) { - NfcoreTemplate.IM_notification(workflow, params, summary_params, projectDir, log) - } -} -workflow.onError { - if (workflow.errorReport.contains("Process requirement exceeds available memory")) { - println("🛑 Default resources exceed availability 🛑 ") - println("💡 See here on how to configure pipeline: https://nf-co.re/docs/usage/configuration#tuning-workflow-resources 💡") - } + emit: + multiqc_report = MULTIQC.out.report.toList() // channel: /path/to/multiqc_report.html + versions = ch_versions // channel: [ path(versions.yml) ] } /* diff --git a/nf_core/subworkflows/lint/main_nf.py b/nf_core/subworkflows/lint/main_nf.py index f59e1e4279..c73559502c 100644 --- a/nf_core/subworkflows/lint/main_nf.py +++ b/nf_core/subworkflows/lint/main_nf.py @@ -4,6 +4,7 @@ import logging import re +from typing import List log = logging.getLogger(__name__) @@ -120,7 +121,7 @@ def check_main_section(self, lines, included_components): # Check that all included components are used # Check that all included component versions are used - if included_components is not None: + if included_components: for component in included_components: if component in script: self.passed.append( @@ -152,7 +153,7 @@ def check_main_section(self, lines, included_components): ) -def check_subworkflow_section(self, lines): +def check_subworkflow_section(self, lines: List[str]) -> List[str]: """Lint the section of a subworkflow before the workflow definition Specifically checks if the subworkflow includes at least two modules or subworkflows @@ -160,7 +161,7 @@ def check_subworkflow_section(self, lines): lines (List[str]): Content of subworkflow. Returns: - List: List of included component names. If subworkflow doesn't contain any lines, return None. + List[str]: List of included components. """ # Check that we have subworkflow content if len(lines) == 0: @@ -171,7 +172,7 @@ def check_subworkflow_section(self, lines): self.main_nf, ) ) - return + return [] self.passed.append( ("subworkflow_include", "Subworkflow does include modules before the workflow definition", self.main_nf) ) @@ -179,10 +180,17 @@ def check_subworkflow_section(self, lines): includes = [] for line in lines: if line.strip().startswith("include"): - component_name = line.split("{")[1].split("}")[0].strip() - if " as " in component_name: - component_name = component_name.split(" as ")[1].strip() - includes.append(component_name) + component_name = [line.split("{")[1].split("}")[0].strip()] + # check if multiple components are included + if ";" in component_name[0]: + component_name = component_name[0].split(";") + for comp in component_name: + if " as " in comp: + comp = comp.split(" as ")[1].strip() + includes.append(comp) + continue + # remove duplicated components + includes = list(set(includes)) if len(includes) >= 2: self.passed.append(("main_nf_include", "Subworkflow includes two or more modules", self.main_nf)) else: diff --git a/nf_core/synced_repo.py b/nf_core/synced_repo.py index ac0f467e66..5c31e96911 100644 --- a/nf_core/synced_repo.py +++ b/nf_core/synced_repo.py @@ -208,7 +208,18 @@ def checkout_branch(self): """ Checks out the specified branch of the repository """ - self.repo.git.checkout(self.branch) + try: + self.repo.git.checkout(self.branch) + except GitCommandError as e: + if ( + self.fullname + and "modules" in self.fullname + and "Your local changes to the following files would be overwritten by checkout" in str(e) + ): + log.debug(f"Overwriting local changes in '{self.local_repo_dir}'") + self.repo.git.checkout(self.branch, force=True) + else: + raise e def checkout(self, commit): """ @@ -217,7 +228,18 @@ def checkout(self, commit): Args: commit (str): Git SHA of the commit """ - self.repo.git.checkout(commit) + try: + self.repo.git.checkout(commit) + except GitCommandError as e: + if ( + self.fullname + and "modules" in self.fullname + and "Your local changes to the following files would be overwritten by checkout" in str(e) + ): + log.debug(f"Overwriting local changes in '{self.local_repo_dir}'") + self.repo.git.checkout(self.branch, force=True) + else: + raise e def component_exists(self, component_name, component_type, checkout=True, commit=None): """ diff --git a/pyproject.toml b/pyproject.toml index d75ae89df6..8168bd7c11 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,29 +1,35 @@ [build-system] build-backend = "setuptools.build_meta" -requires = [ - "setuptools>=40.6.0", - "wheel" -] +requires = ["setuptools>=40.6.0", "wheel"] [tool.pytest.ini_options] -markers = [ - "datafiles: load datafiles" -] +markers = ["datafiles: load datafiles"] testpaths = ["tests"] -norecursedirs = [ ".*", "build", "dist", "*.egg", "data", "__pycache__", ".github", "nf_core", "docs"] +norecursedirs = [ + ".*", + "build", + "dist", + "*.egg", + "data", + "__pycache__", + ".github", + "nf_core", + "docs", +] [tool.ruff] line-length = 120 target-version = "py38" -select = ["I", "E1", "E4", "E7", "E9", "F", "UP", "N"] cache-dir = "~/.cache/ruff" -[tool.ruff.isort] +[tool.ruff.lint] +select = ["I", "E1", "E4", "E7", "E9", "F", "UP", "N"] + +[tool.ruff.lint.isort] known-first-party = ["nf_core"] -[tool.ruff.per-file-ignores] +[tool.ruff.lint.per-file-ignores] "__init__.py" = ["E402", "F401"] [tool.ruff.lint.pep8-naming] extend-ignore-names = ["mocked_*", "*allOf", "*URI*"] - diff --git a/setup.py b/setup.py index 87677b45c1..3bfb99d4a8 100644 --- a/setup.py +++ b/setup.py @@ -2,7 +2,7 @@ from setuptools import find_packages, setup -version = "2.12.1" +version = "2.13" with open("README.md") as f: readme = f.read() diff --git a/tests/lint/files_exist.py b/tests/lint/files_exist.py index d3a6a25e82..5081522899 100644 --- a/tests/lint/files_exist.py +++ b/tests/lint/files_exist.py @@ -61,7 +61,9 @@ def test_files_exist_pass_conditional(self): lint_obj = nf_core.lint.PipelineLint(new_pipeline) lint_obj._load() lint_obj.nf_config["plugins"] = [] - Path(new_pipeline, "lib/nfcore_external_java_deps.jar").touch() + lib_dir = Path(new_pipeline, "lib") + lib_dir.mkdir() + (lib_dir / "nfcore_external_java_deps.jar").touch() results = lint_obj.files_exist() assert results["failed"] == [] assert results["ignored"] == [] @@ -71,7 +73,9 @@ def test_files_exist_fail_conditional(self): new_pipeline = self._make_pipeline_copy() lint_obj = nf_core.lint.PipelineLint(new_pipeline) lint_obj._load() - Path(new_pipeline, "lib/nfcore_external_java_deps.jar").touch() + lib_dir = Path(new_pipeline, "lib") + lib_dir.mkdir() + (lib_dir / "nfcore_external_java_deps.jar").touch() results = lint_obj.files_exist() assert results["failed"] == ["File must be removed: `lib/nfcore_external_java_deps.jar`"] assert results["ignored"] == [] diff --git a/tests/modules/create.py b/tests/modules/create.py index 460a1439cb..cf39621f07 100644 --- a/tests/modules/create.py +++ b/tests/modules/create.py @@ -138,3 +138,28 @@ def test_modules_migrate_no_delete(self, mock_rich_ask): with open(Path(self.nfcore_modules, "tests", "config", "pytest_modules.yml")) as fh: modules_yml = yaml.safe_load(fh) assert "samtools/sort" not in modules_yml.keys() + + +@mock.patch("rich.prompt.Confirm.ask") +def test_modules_migrate_symlink(self, mock_rich_ask): + """Create a module with the --migrate-pytest option to convert pytest with symlinks to nf-test. + Test that the symlink is deleted and the file is copied.""" + + pytest_dir = Path(self.nfcore_modules, "tests", "modules", "nf-core", "samtools", "sort") + module_dir = Path(self.nfcore_modules, "modules", "nf-core", "samtools", "sort") + + # Clone modules repo with pytests + shutil.rmtree(self.nfcore_modules) + Repo.clone_from(GITLAB_URL, self.nfcore_modules, branch=GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH) + + # Create a symlinked file in the pytest directory + symlink_file = pytest_dir / "symlink_file.txt" + symlink_file.symlink_to(module_dir / "main.nf") + + # Create a module with --migrate-pytest + mock_rich_ask.return_value = True + module_create = nf_core.modules.ModuleCreate(self.nfcore_modules, "samtools/sort", migrate_pytest=True) + module_create.create() + + # Check that symlink is deleted + assert not symlink_file.is_symlink() diff --git a/tests/modules/lint.py b/tests/modules/lint.py index a5d8567b76..9bd280ddd8 100644 --- a/tests/modules/lint.py +++ b/tests/modules/lint.py @@ -40,7 +40,6 @@ def test_modules_lint_empty(self): """Test linting a pipeline with no modules installed""" self.mods_remove.remove("fastqc", force=True) self.mods_remove.remove("multiqc", force=True) - self.mods_remove.remove("custom/dumpsoftwareversions", force=True) with pytest.raises(LookupError): nf_core.modules.ModuleLint(dir=self.pipeline_dir) @@ -58,7 +57,6 @@ def test_modules_lint_no_gitlab(self): """Test linting a pipeline with no modules installed""" self.mods_remove.remove("fastqc", force=True) self.mods_remove.remove("multiqc", force=True) - self.mods_remove.remove("custom/dumpsoftwareversions", force=True) with pytest.raises(LookupError): nf_core.modules.ModuleLint(dir=self.pipeline_dir, remote_url=GITLAB_URL) diff --git a/tests/modules/modules_json.py b/tests/modules/modules_json.py index a054b6b131..e0100adfb7 100644 --- a/tests/modules/modules_json.py +++ b/tests/modules/modules_json.py @@ -1,6 +1,5 @@ import copy import json -import os import shutil from pathlib import Path @@ -16,7 +15,7 @@ def test_get_modules_json(self): """Checks that the get_modules_json function returns the correct result""" - mod_json_path = os.path.join(self.pipeline_dir, "modules.json") + mod_json_path = Path(self.pipeline_dir, "modules.json") with open(mod_json_path) as fh: try: mod_json_sb = json.load(fh) @@ -49,16 +48,16 @@ def test_mod_json_update(self): def test_mod_json_create(self): """Test creating a modules.json file from scratch""" - mod_json_path = os.path.join(self.pipeline_dir, "modules.json") + mod_json_path = Path(self.pipeline_dir, "modules.json") # Remove the existing modules.json file - os.remove(mod_json_path) + mod_json_path.unlink() # Create the new modules.json file # (There are no prompts as long as there are only nf-core modules) ModulesJson(self.pipeline_dir).create() # Check that the file exists - assert os.path.exists(mod_json_path) + assert (mod_json_path).exists() # Get the contents of the file mod_json_obj = ModulesJson(self.pipeline_dir) @@ -94,7 +93,7 @@ def test_mod_json_create_with_patch(self): patch_obj.patch("fastqc") # Remove the existing modules.json file - os.remove(mod_json_path) + mod_json_path.unlink() # Create the new modules.json file ModulesJson(self.pipeline_dir).create() @@ -137,7 +136,7 @@ def test_mod_json_up_to_date_module_removed(self): but is missing in the pipeline """ # Remove the fastqc module - fastqc_path = os.path.join(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, "fastqc") + fastqc_path = Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, "fastqc") shutil.rmtree(fastqc_path) # Check that the modules.json file is up to date, and reinstall the module @@ -146,9 +145,9 @@ def test_mod_json_up_to_date_module_removed(self): # Check that the module has been reinstalled files = ["main.nf", "meta.yml"] - assert os.path.exists(fastqc_path) + assert fastqc_path.exists() for f in files: - assert os.path.exists(os.path.join(fastqc_path, f)) + assert Path(fastqc_path, f).exists() def test_mod_json_up_to_date_reinstall_fails(self): @@ -161,7 +160,7 @@ def test_mod_json_up_to_date_reinstall_fails(self): mod_json_obj.update("modules", ModulesRepo(), "fastqc", "INVALID_GIT_SHA", "modules", write_file=True) # Remove the fastqc module - fastqc_path = os.path.join(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, "fastqc") + fastqc_path = Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, "fastqc") shutil.rmtree(fastqc_path) # Check that the modules.json file is up to date, and remove the fastqc module entry @@ -206,12 +205,12 @@ def test_mod_json_dump(self): mod_json_obj = ModulesJson(self.pipeline_dir) mod_json = mod_json_obj.get_modules_json() # Remove the modules.json file - mod_json_path = os.path.join(self.pipeline_dir, "modules.json") - os.remove(mod_json_path) + mod_json_path = Path(self.pipeline_dir, "modules.json") + mod_json_path.unlink() # Check that the dump function creates the file mod_json_obj.dump() - assert os.path.exists(mod_json_path) + assert mod_json_path.exists() # Check that the dump function writes the correct content with open(mod_json_path) as f: diff --git a/tests/modules/update.py b/tests/modules/update.py index 5208070fa5..81eb85716e 100644 --- a/tests/modules/update.py +++ b/tests/modules/update.py @@ -317,13 +317,13 @@ def test_update_only_show_differences(self, mock_prompt): mod_json = modules_json.get_modules_json() # Loop through all modules and check that they are NOT updated (according to the modules.json file) - # Modules that can be updated but shouldn't are custom/dumpsoftwareversions and fastqc + # A module that can be updated but shouldn't is fastqc # Module multiqc is already up to date so don't check - for mod in ["custom/dumpsoftwareversions", "fastqc"]: - correct_git_sha = list(update_obj.modules_repo.get_component_git_log(mod, "modules", depth=1))[0]["git_sha"] - current_git_sha = mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME][mod]["git_sha"] - assert correct_git_sha != current_git_sha - assert cmp_module(Path(tmpdir, mod), Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, mod)) is True + mod = "fastqc" + correct_git_sha = list(update_obj.modules_repo.get_component_git_log(mod, "modules", depth=1))[0]["git_sha"] + current_git_sha = mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME][mod]["git_sha"] + assert correct_git_sha != current_git_sha + assert cmp_module(Path(tmpdir, mod), Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, mod)) is True # Mock questionary answer: do not update module, only show diffs @@ -357,22 +357,40 @@ def test_update_only_show_differences_when_patch(self, mock_prompt): patch_obj = ModulePatch(self.pipeline_dir) patch_obj.patch("fastqc") # Check that a patch file with the correct name has been created - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", "fastqc.diff"} + assert "fastqc.diff" in set(os.listdir(module_path)) # Update all modules assert update_obj.update() is True mod_json = modules_json.get_modules_json() # Loop through all modules and check that they are NOT updated (according to the modules.json file) - # Modules that can be updated but shouldn't are custom/dumpsoftwareversions and fastqc + # A module that can be updated but shouldn't is fastqc # Module multiqc is already up to date so don't check - for mod in ["custom/dumpsoftwareversions", "fastqc"]: - correct_git_sha = list(update_obj.modules_repo.get_component_git_log(mod, "modules", depth=1))[0]["git_sha"] - current_git_sha = mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME][mod]["git_sha"] - assert correct_git_sha != current_git_sha + mod = "fastqc" + correct_git_sha = list(update_obj.modules_repo.get_component_git_log(mod, "modules", depth=1))[0]["git_sha"] + current_git_sha = mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME][mod]["git_sha"] + assert correct_git_sha != current_git_sha def cmp_module(dir1, dir2): """Compare two versions of the same module""" files = ["main.nf", "meta.yml"] return all(filecmp.cmp(os.path.join(dir1, f), os.path.join(dir2, f), shallow=False) for f in files) + + +def test_update_module_with_extra_config_file(self): + """Try updating a module with a config file""" + # Install the module + assert self.mods_install.install("trimgalore") + # Add a nextflow_test.config file to the module + trimgalore_path = Path(self.pipeline_dir, "modules", "nf-core", "trimgalore") + Path(trimgalore_path, "nextflow_test.config").touch() + with open(Path(trimgalore_path, "nextflow_test.config"), "w") as fh: + fh.write("params.my_param = 'my_value'\n") + # Update the module + update_obj = ModuleUpdate(self.pipeline_dir, show_diff=False) + assert update_obj.update("trimgalore") + # Check that the nextflow_test.config file is still there + assert Path(trimgalore_path, "nextflow_test.config").exists() + with open(Path(trimgalore_path, "nextflow_test.config")) as fh: + assert "params.my_param = 'my_value'" in fh.read() diff --git a/tests/subworkflows/lint.py b/tests/subworkflows/lint.py index b53fef7f0e..b89b7b78ce 100644 --- a/tests/subworkflows/lint.py +++ b/tests/subworkflows/lint.py @@ -19,6 +19,9 @@ def test_subworkflows_lint(self): def test_subworkflows_lint_empty(self): """Test linting a pipeline with no subworkflows installed""" + self.subworkflow_remove.remove("utils_nextflow_pipeline", force=True) + self.subworkflow_remove.remove("utils_nfcore_pipeline", force=True) + self.subworkflow_remove.remove("utils_nfvalidation_plugin", force=True) with pytest.raises(LookupError): nf_core.subworkflows.SubworkflowLint(dir=self.pipeline_dir) @@ -101,3 +104,80 @@ def test_subworkflows_lint_snapshot_file_not_needed(self): assert len(subworkflow_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" assert len(subworkflow_lint.passed) > 0 assert len(subworkflow_lint.warned) >= 0 + + +def test_subworkflows_lint_less_than_two_modules_warning(self): + """Test linting a subworkflow with less than two modules""" + self.subworkflow_install.install("bam_stats_samtools") + # Remove two modules + with open(Path(self.pipeline_dir, "subworkflows", "nf-core", "bam_stats_samtools", "main.nf")) as fh: + content = fh.read() + new_content = content.replace( + "include { SAMTOOLS_IDXSTATS } from '../../../modules/nf-core/samtools/idxstats/main'", "" + ) + new_content = new_content.replace( + "include { SAMTOOLS_FLAGSTAT } from '../../../modules/nf-core/samtools/flagstat/main'", "" + ) + with open(Path(self.pipeline_dir, "subworkflows", "nf-core", "bam_stats_samtools", "main.nf"), "w") as fh: + fh.write(new_content) + subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.pipeline_dir) + subworkflow_lint.lint(print_results=False, subworkflow="bam_stats_samtools") + assert len(subworkflow_lint.failed) >= 0, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" + assert len(subworkflow_lint.passed) > 0 + assert len(subworkflow_lint.warned) > 0 + assert subworkflow_lint.warned[0].lint_test == "main_nf_include" + # cleanup + self.subworkflow_remove.remove("bam_stats_samtools", force=True) + + +def test_subworkflows_lint_include_multiple_alias(self): + """Test linting a subworkflow with multiple include methods""" + self.subworkflow_install.install("bam_stats_samtools") + with open(Path(self.pipeline_dir, "subworkflows", "nf-core", "bam_stats_samtools", "main.nf")) as fh: + content = fh.read() + new_content = content.replace("SAMTOOLS_STATS", "SAMTOOLS_STATS_1") + new_content = new_content.replace( + "include { SAMTOOLS_STATS_1 ", + "include { SAMTOOLS_STATS as SAMTOOLS_STATS_1; SAMTOOLS_STATS as SAMTOOLS_STATS_2 ", + ) + with open(Path(self.pipeline_dir, "subworkflows", "nf-core", "bam_stats_samtools", "main.nf"), "w") as fh: + fh.write(new_content) + + subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.pipeline_dir) + subworkflow_lint.lint(print_results=False, subworkflow="bam_stats_samtools") + assert len(subworkflow_lint.failed) >= 0, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" + assert len(subworkflow_lint.passed) > 0 + assert len(subworkflow_lint.warned) == 2 + assert any( + [ + x.message == "Included component 'SAMTOOLS_STATS_1' versions are added in main.nf" + for x in subworkflow_lint.passed + ] + ) + assert any([x.message == "Included component 'SAMTOOLS_STATS_1' used in main.nf" for x in subworkflow_lint.passed]) + assert any( + [x.message == "Included component 'SAMTOOLS_STATS_2' not used in main.nf" for x in subworkflow_lint.warned] + ) + + # cleanup + self.subworkflow_remove.remove("bam_stats_samtools", force=True) + + +def test_subworkflows_lint_capitalization_fail(self): + """Test linting a subworkflow with a capitalization fail""" + self.subworkflow_install.install("bam_stats_samtools") + # change workflow name to lowercase + with open(Path(self.pipeline_dir, "subworkflows", "nf-core", "bam_stats_samtools", "main.nf")) as fh: + content = fh.read() + new_content = content.replace("workflow BAM_STATS_SAMTOOLS {", "workflow bam_stats_samtools {") + with open(Path(self.pipeline_dir, "subworkflows", "nf-core", "bam_stats_samtools", "main.nf"), "w") as fh: + fh.write(new_content) + subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.pipeline_dir) + subworkflow_lint.lint(print_results=False, subworkflow="bam_stats_samtools") + assert len(subworkflow_lint.failed) >= 1, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" + assert len(subworkflow_lint.passed) > 0 + assert len(subworkflow_lint.warned) >= 0 + assert any([x.lint_test == "workflow_capitals" for x in subworkflow_lint.failed]) + + # cleanup + self.subworkflow_remove.remove("bam_stats_samtools", force=True) diff --git a/tests/subworkflows/remove.py b/tests/subworkflows/remove.py index dec67875bd..c6a3b98454 100644 --- a/tests/subworkflows/remove.py +++ b/tests/subworkflows/remove.py @@ -20,13 +20,15 @@ def test_subworkflows_remove_subworkflow(self): mod_json_before = ModulesJson(self.pipeline_dir).get_modules_json() assert self.subworkflow_remove.remove("bam_sort_stats_samtools") mod_json_after = ModulesJson(self.pipeline_dir).get_modules_json() - assert Path.exists(subworkflow_path) is False assert Path.exists(bam_sort_stats_samtools_path) is False assert Path.exists(bam_stats_samtools_path) is False assert Path.exists(samtools_index_path) is False assert mod_json_before != mod_json_after # assert subworkflows key is removed from modules.json - assert "subworkflows" not in mod_json_after["repos"]["https://github.com/nf-core/modules.git"].keys() + assert ( + "bam_sort_stats_samtools" + not in mod_json_after["repos"]["https://github.com/nf-core/modules.git"]["subworkflows"].keys() + ) assert "samtools/index" not in mod_json_after["repos"]["https://github.com/nf-core/modules.git"]["modules"].keys() @@ -44,13 +46,15 @@ def test_subworkflows_remove_subworkflow_keep_installed_module(self): assert self.subworkflow_remove.remove("bam_sort_stats_samtools") mod_json_after = ModulesJson(self.pipeline_dir).get_modules_json() - assert Path.exists(subworkflow_path) is False assert Path.exists(bam_sort_stats_samtools_path) is False assert Path.exists(bam_stats_samtools_path) is False assert Path.exists(samtools_index_path) is True assert mod_json_before != mod_json_after # assert subworkflows key is removed from modules.json - assert "subworkflows" not in mod_json_after["repos"]["https://github.com/nf-core/modules.git"].keys() + assert ( + "bam_sort_stats_samtools" + not in mod_json_after["repos"]["https://github.com/nf-core/modules.git"]["subworkflows"].keys() + ) assert ( "samtools/index" in mod_json_after["repos"]["https://github.com/nf-core/modules.git"]["modules"]["nf-core"].keys() diff --git a/tests/subworkflows/update.py b/tests/subworkflows/update.py index 32a69ba180..9ddc9bec0c 100644 --- a/tests/subworkflows/update.py +++ b/tests/subworkflows/update.py @@ -171,9 +171,9 @@ def test_update_with_config_fix_all(self): with open(Path(self.pipeline_dir, config_fn), "w") as f: yaml.dump(tools_config, f) - # Update all subworkflows in the pipeline - update_obj = SubworkflowUpdate(self.pipeline_dir, update_all=True, show_diff=False) - assert update_obj.update() is True + # Update fastq_align_bowtie2 + update_obj = SubworkflowUpdate(self.pipeline_dir, update_all=False, update_deps=True, show_diff=False) + assert update_obj.update("fastq_align_bowtie2") is True # Check that the git sha for fastq_align_bowtie2 is correctly downgraded mod_json = ModulesJson(self.pipeline_dir).get_modules_json() diff --git a/tests/test_modules.py b/tests/test_modules.py index f9c3b6f2a7..539d3dcc57 100644 --- a/tests/test_modules.py +++ b/tests/test_modules.py @@ -159,6 +159,7 @@ def test_modulesrepo_class(self): test_modules_create_succeed, test_modules_migrate, test_modules_migrate_no_delete, + test_modules_migrate_symlink, ) from .modules.info import ( # type: ignore[misc] test_modules_info_in_modules_repo, @@ -259,6 +260,7 @@ def test_modulesrepo_class(self): test_update_different_branch_mix_modules_branch_test, test_update_different_branch_mixed_modules_main, test_update_different_branch_single_module, + test_update_module_with_extra_config_file, test_update_only_show_differences, test_update_only_show_differences_when_patch, test_update_with_config_dont_update, diff --git a/tests/test_subworkflows.py b/tests/test_subworkflows.py index 19872ee168..cd0bd21146 100644 --- a/tests/test_subworkflows.py +++ b/tests/test_subworkflows.py @@ -121,8 +121,11 @@ def tearDown(self): ) from .subworkflows.lint import ( # type: ignore[misc] test_subworkflows_lint, + test_subworkflows_lint_capitalization_fail, test_subworkflows_lint_empty, test_subworkflows_lint_gitlab_subworkflows, + test_subworkflows_lint_include_multiple_alias, + test_subworkflows_lint_less_than_two_modules_warning, test_subworkflows_lint_multiple_remotes, test_subworkflows_lint_new_subworkflow, test_subworkflows_lint_no_gitlab,
$group