diff --git a/.editorconfig b/.editorconfig index 014c2383bd..449f446a3b 100644 --- a/.editorconfig +++ b/.editorconfig @@ -10,3 +10,13 @@ indent_style = space [*.{md,yml,yaml,html,css,scss,js,cff}] indent_size = 2 + +# ignore python and markdown files +[*.py] +indent_style = unset + +[**/{CONTRIBUTING,README}.md] +indent_style = unset + +[**/Makefile] +indent_style = unset diff --git a/.github/.coveragerc b/.github/.coveragerc index 522a29eb62..24a419ae07 100644 --- a/.github/.coveragerc +++ b/.github/.coveragerc @@ -1,2 +1,5 @@ [run] -omit = nf_core/pipeline-template/* +omit = nf_core/*-template/* +source = nf_core +relative_files = True + diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md index 75da414db6..04d327bd8c 100644 --- a/.github/CONTRIBUTING.md +++ b/.github/CONTRIBUTING.md @@ -35,45 +35,28 @@ pip install -e . ## Code formatting -### Black +### Ruff -All Python code in nf-core/tools must be passed through the [Black Python code formatter](https://black.readthedocs.io/en/stable/). +All Python code in nf-core/tools must be passed through the [Ruff code linter and formatter](https://github.com/astral-sh/ruff). This ensures a harmonised code formatting style throughout the package, from all contributors. -You can run Black on the command line (it's included in `requirements-dev.txt`) - eg. to run recursively on the whole repository: +You can run Ruff on the command line (it's included in `requirements-dev.txt`) - eg. to run recursively on the whole repository: ```bash -black . +ruff format . ``` -Alternatively, Black has [integrations for most common editors](https://black.readthedocs.io/en/stable/editor_integration.html) +Alternatively, Ruff has [integrations for most common editors](https://github.com/astral-sh/ruff-lsp) and VSCode(https://github.com/astral-sh/ruff-vscode) to automatically format code when you hit save. -You can also set it up to run when you [make a commit](https://black.readthedocs.io/en/stable/version_control_integration.html). There is an automated CI check that runs when you open a pull-request to nf-core/tools that will fail if -any code does not adhere to Black formatting. +any code does not adhere to Ruff formatting. -### isort - -All Python code must also be passed through [isort](https://pycqa.github.io/isort/index.html). -This ensures a harmonised imports throughout the package, from all contributors. - -To run isort on the command line recursively on the whole repository you can use: - -```bash -isort . -``` - -isort also has [plugins for most common editors](https://github.com/pycqa/isort/wiki/isort-Plugins) -to automatically format code when you hit save. -Or [version control integration](https://pycqa.github.io/isort/docs/configuration/pre-commit.html) to set it up to run when you make a commit. - -There is an automated CI check that runs when you open a pull-request to nf-core/tools that will fail if -any code does not adhere to isort formatting. +Ruff has been adopted for linting and formatting in replacement of Black, isort (for imports) and pyupgrade. It also includes Flake8. ### pre-commit hooks -This repository comes with [pre-commit](https://pre-commit.com/) hooks for black, isort and Prettier. pre-commit automatically runs checks before a commit is committed into the git history. If all checks pass, the commit is made, if files are changed by the pre-commit hooks, the user is informed and has to stage the changes and attempt the commit again. +This repository comes with [pre-commit](https://pre-commit.com/) hooks for ruff and Prettier. pre-commit automatically runs checks before a commit is committed into the git history. If all checks pass, the commit is made, if files are changed by the pre-commit hooks, the user is informed and has to stage the changes and attempt the commit again. You can use the pre-commit hooks if you like, but you don't have to. The CI on Github will run the same checks as the tools installed with pre-commit. If the pre-commit checks pass, then the same checks in the CI will pass, too. diff --git a/.github/renovate.json5 b/.github/renovate.json5 index f9b377c615..8d123ab17a 100644 --- a/.github/renovate.json5 +++ b/.github/renovate.json5 @@ -1,5 +1,17 @@ { $schema: "https://docs.renovatebot.com/renovate-schema.json", extends: ["github>nf-core/ops//.github/renovate/default.json5"], + ignorePaths: ["**/nf_core/pipeline-template/modules/nf-core/**"], baseBranches: ["dev"], + packageRules: [ + { + matchDatasources: ["docker"], + matchPackageNames: ["python"], + versioning: "pep440", + }, + { + matchDatasources: ["docker"], + registryUrls: ["docker.io"], + }, + ], } diff --git a/.github/workflows/branch.yml b/.github/workflows/branch.yml index dd64ffa3e5..54dee6df16 100644 --- a/.github/workflows/branch.yml +++ b/.github/workflows/branch.yml @@ -18,7 +18,7 @@ jobs: # If the above check failed, post a comment on the PR explaining the failure - name: Post PR comment if: failure() - uses: mshick/add-pr-comment@v1 + uses: mshick/add-pr-comment@v2 with: message: | ## This PR is against the `master` branch :x: diff --git a/.github/workflows/changelog.py b/.github/workflows/changelog.py new file mode 100644 index 0000000000..eb56499c93 --- /dev/null +++ b/.github/workflows/changelog.py @@ -0,0 +1,228 @@ +""" +Taken from https://github.com/MultiQC/MultiQC/blob/main/.github/workflows/changelog.py and updated for nf-core + +To be called by a CI action. Assumes the following environment variables are set: +PR_TITLE, PR_NUMBER, GITHUB_WORKSPACE. + +Adds a line into the CHANGELOG.md: +* Looks for the section to add the line to, based on the PR title, e.g. `Template:`, `Modules:`. +* All other change will go under the "### General" section. +* If an entry for the PR is already added, it will not run. + +Other assumptions: +- CHANGELOG.md has a running section for an ongoing "dev" version +(i.e. titled "## nf-core vX.Ydev"). +""" + +import os +import re +import sys +from pathlib import Path +from typing import List + +REPO_URL = "https://github.com/nf-core/tools" + +# Assumes the environment is set by the GitHub action. +pr_title = os.environ["PR_TITLE"] +pr_number = os.environ["PR_NUMBER"] +comment = os.environ.get("COMMENT", "") +workspace_path = Path(os.environ.get("GITHUB_WORKSPACE", "")) + +assert pr_title, pr_title +assert pr_number, pr_number + +# Trim the PR number added when GitHub squashes commits, e.g. "Template: Updated (#2026)" +pr_title = pr_title.removesuffix(f" (#{pr_number})") + +changelog_path = workspace_path / "CHANGELOG.md" + +if any( + line in pr_title.lower() + for line in [ + "skip changelog", + "skip change log", + "no changelog", + "no change log", + "bump version", + ] +): + print("Skipping changelog update") + sys.exit(0) + + +def _determine_change_type(pr_title) -> tuple[str, str]: + """ + Determine the type of the PR: Template, Download, Linting, Modules, Subworkflows, or General + Returns a tuple of the section name and the module info. + """ + sections = { + "Template": "### Template", + "Download": "### Download", + "Linting": "### Linting", + "Modules": "### Modules", + "Subworkflows": "### Subworkflows", + } + current_section_header = "### General" + current_section = "General" + + # Check if the PR in any of the sections. + for section, section_header in sections.items(): + # check if the PR title contains any of the section headers, with some loose matching, e.g. removing plural and suffixes + if re.sub(r"s$", "", section.lower().replace("ing", "")) in pr_title.lower(): + current_section_header = section_header + current_section = section + print(f"Detected section: {current_section}") + return current_section, current_section_header + + +# Determine the type of the PR +section, section_header = _determine_change_type(pr_title) + +# Remove section indicator from the PR title. +pr_title = re.sub(rf"{section}[:\s]*", "", pr_title, flags=re.IGNORECASE) + +# Prepare the change log entry. +pr_link = f"([#{pr_number}]({REPO_URL}/pull/{pr_number}))" + +# Handle manual changelog entries through comments. +if comment := comment.removeprefix("@nf-core-bot changelog").strip(): + print(f"Adding manual changelog entry: {comment}") + pr_title = comment +new_lines = [ + f"- {pr_title} {pr_link}\n", +] + +print(f"Adding new lines into section '{section}':\n" + "".join(new_lines)) + +# Finally, updating the changelog. +# Read the current changelog lines. We will print them back as is, except for one new +# entry, corresponding to this new PR. +with changelog_path.open("r") as f: + orig_lines = f.readlines() +updated_lines: List[str] = [] + + +def _skip_existing_entry_for_this_pr(line: str, same_section: bool = True) -> str: + if line.strip().endswith(pr_link): + print(f"Found existing entry for this pull request #{pr_number}:") + existing_lines = [line] + if new_lines and new_lines == existing_lines and same_section: + print(f"Found existing identical entry for this pull request #{pr_number} in the same section:") + print("".join(existing_lines)) + sys.exit(0) # Just leaving the CHANGELOG intact + else: + print( + f"Found existing entry for this pull request #{pr_number}. It will be replaced and/or moved to proper section" + ) + print("".join(existing_lines)) + for _ in range(len(existing_lines)): + try: + line = orig_lines.pop(0) + except IndexError: + break + return line + + +# Find the next line in the change log that matches the pattern "# nf-core/tools v.*dev" +# If it doesn't exist, exist with code 1 (let's assume that a new section is added +# manually or by CI when a release is pushed). +# Else, find the next line that matches the `section` variable, and insert a new line +# under it (we also assume that section headers are added already). +inside_version_dev = False +already_added_entry = False +while orig_lines: + line = orig_lines.pop(0) + + # If the line already contains a link to the PR, don't add it again. + line = _skip_existing_entry_for_this_pr(line, same_section=False) + + if line.startswith("# ") and not line.strip() == "# nf-core/tools: Changelog": # Version header, e.g. "# v2.12dev" + print(f"Found version header: {line.strip()}") + updated_lines.append(line) + + # Parse version from the line `# v2.12dev` or + # `# [v2.11.1 - Magnesium Dragon Patch](https://github.com/nf-core/tools/releases/tag/2.11) - [2023-12-20]` ... + if not (m := re.match(r".*(v\d+\.\d+(dev)?).*", line)): + print(f"Cannot parse version from line {line.strip()}.", file=sys.stderr) + sys.exit(1) + version = m.group(1) + + if not inside_version_dev: + if not version.endswith("dev"): + print( + "Can't find a 'dev' version section in the changelog. Make sure " + "it's created, and all the required sections, e.g. `### Template` are created under it .", + file=sys.stderr, + ) + sys.exit(1) + inside_version_dev = True + else: + if version.endswith("dev"): + print( + f"Found another 'dev' version section in the changelog, make" + f"sure to change it to a 'release' stable version tag. " + f"Line: {line.strip()}", + file=sys.stderr, + ) + sys.exit(1) + # We are past the dev version, so just add back the rest of the lines and break. + while orig_lines: + line = orig_lines.pop(0) + line = _skip_existing_entry_for_this_pr(line, same_section=False) + if line: + updated_lines.append(line) + break + continue + print(f"Found line: {line.strip()}") + print(f"inside_version_dev: {inside_version_dev}") + print(f"section_header: {section_header}") + if inside_version_dev and line.lower().startswith(section_header.lower()): # Section of interest header + print(f"Found section header: {line.strip()}") + if already_added_entry: + print( + f"Already added new lines into section {section}, is the section duplicated?", + file=sys.stderr, + ) + sys.exit(1) + updated_lines.append(line) + # Collecting lines until the next section. + section_lines: List[str] = [] + while True: + line = orig_lines.pop(0) + if line.startswith("#"): + print(f"Found the next section header: {line.strip()}") + # Found the next section header, so need to put all the lines we collected. + updated_lines.append("\n") + _updated_lines = [_l for _l in section_lines + new_lines if _l.strip()] + updated_lines.extend(_updated_lines) + updated_lines.append("\n") + if new_lines: + print(f"Updated {changelog_path} section '{section}' with lines:\n" + "".join(new_lines)) + else: + print(f"Removed existing entry from {changelog_path} section '{section}'") + already_added_entry = True + # Pushing back the next section header line + orig_lines.insert(0, line) + break + # If the line already contains a link to the PR, don't add it again. + line = _skip_existing_entry_for_this_pr(line, same_section=True) + section_lines.append(line) + else: + updated_lines.append(line) + + +def collapse_newlines(lines: List[str]) -> List[str]: + updated = [] + for idx in range(len(lines)): + if idx != 0 and not lines[idx].strip() and not lines[idx - 1].strip(): + continue + updated.append(lines[idx]) + return updated + + +updated_lines = collapse_newlines(updated_lines) + + +# Finally, writing the updated lines back. +with changelog_path.open("w") as f: + f.writelines(updated_lines) diff --git a/.github/workflows/changelog.yml b/.github/workflows/changelog.yml new file mode 100644 index 0000000000..63dcf2e428 --- /dev/null +++ b/.github/workflows/changelog.yml @@ -0,0 +1,88 @@ +name: Update CHANGELOG.md +on: + issue_comment: + types: [created] + pull_request_target: + types: [opened] + +jobs: + update_changelog: + runs-on: ubuntu-latest + # Run if comment is on a PR with the main repo, and if it contains the magic keywords. + # Or run on PR creation, unless asked otherwise in the title. + if: | + github.repository_owner == 'nf-core' && ( + github.event_name == 'pull_request_target' || + github.event.issue.pull_request && startsWith(github.event.comment.body, '@nf-core-bot changelog') + ) + + steps: + - uses: actions/checkout@v4 + with: + token: ${{ secrets.NF_CORE_BOT_AUTH_TOKEN }} + + # Action runs on the issue comment, so we don't get the PR by default. + # Use the GitHub CLI to check out the PR: + - name: Checkout Pull Request + env: + GH_TOKEN: ${{ secrets.NF_CORE_BOT_AUTH_TOKEN }} + run: | + if [[ "${{ github.event_name }}" == "issue_comment" ]]; then + PR_NUMBER="${{ github.event.issue.number }}" + elif [[ "${{ github.event_name }}" == "pull_request_target" ]]; then + PR_NUMBER="${{ github.event.pull_request.number }}" + fi + gh pr checkout $PR_NUMBER + + - uses: actions/setup-python@v5 + with: + python-version: "3.11" + + - name: Install packages + run: | + python -m pip install --upgrade pip + pip install pyyaml + + - name: Update CHANGELOG.md from the PR title + env: + COMMENT: ${{ github.event.comment.body }} + GH_TOKEN: ${{ secrets.NF_CORE_BOT_AUTH_TOKEN }} + run: | + if [[ "${{ github.event_name }}" == "issue_comment" ]]; then + export PR_NUMBER='${{ github.event.issue.number }}' + export PR_TITLE='${{ github.event.issue.title }}' + elif [[ "${{ github.event_name }}" == "pull_request_target" ]]; then + export PR_NUMBER='${{ github.event.pull_request.number }}' + export PR_TITLE='${{ github.event.pull_request.title }}' + fi + python ${GITHUB_WORKSPACE}/.github/workflows/changelog.py + + - name: Check if CHANGELOG.md actually changed + run: | + git diff --exit-code ${GITHUB_WORKSPACE}/CHANGELOG.md || echo "changed=YES" >> $GITHUB_ENV + echo "File changed: ${{ env.changed }}" + + - name: Set up Python 3.11 + uses: actions/setup-python@v5 + with: + python-version: 3.11 + cache: "pip" + + - name: Install pre-commit + run: pip install pre-commit + + - name: Run pre-commit checks + if: env.changed == 'YES' + run: | + pre-commit run --all-files + + - name: Commit and push changes + if: env.changed == 'YES' + run: | + git config user.email "core@nf-co.re" + git config user.name "nf-core-bot" + git config push.default upstream + git add ${GITHUB_WORKSPACE}/CHANGELOG.md + git status + git commit -m "[automated] Update CHANGELOG.md" + git push diff --git a/.github/workflows/clean-up.yml b/.github/workflows/clean-up.yml index 4b55c5e4aa..ff311f9df8 100644 --- a/.github/workflows/clean-up.yml +++ b/.github/workflows/clean-up.yml @@ -10,7 +10,7 @@ jobs: issues: write pull-requests: write steps: - - uses: actions/stale@v7 + - uses: actions/stale@v9 with: stale-issue-message: "This issue has been tagged as awaiting-changes or awaiting-feedback by an nf-core contributor. Remove stale label or add a comment otherwise this issue will be closed in 20 days." stale-pr-message: "This PR has been tagged as awaiting-changes or awaiting-feedback by an nf-core contributor. Remove stale label or add a comment if it is still useful." diff --git a/.github/workflows/create-lint-wf.yml b/.github/workflows/create-lint-wf.yml index 57dbe86d65..c4d0358982 100644 --- a/.github/workflows/create-lint-wf.yml +++ b/.github/workflows/create-lint-wf.yml @@ -6,6 +6,15 @@ on: pull_request: release: types: [published] + workflow_dispatch: + inputs: + runners: + description: "Runners to test on" + type: choice + options: + - "ubuntu-latest" + - "self-hosted" + default: "self-hosted" # Cancel if a newer run is started concurrency: @@ -17,7 +26,7 @@ env: jobs: MakeTestWorkflow: - runs-on: self-hosted + runs-on: ${{ github.event.inputs.runners || 'self-hosted' }} env: NXF_ANSI_LOG: false strategy: @@ -26,15 +35,22 @@ jobs: - "23.04.0" - "latest-everything" steps: + - name: go to subdirectory and change nextflow workdir + run: | + mkdir -p create-lint-wf + cd create-lint-wf + export NXF_WORK=$(pwd) + # Get the repo code - uses: actions/checkout@v4 name: Check out source-code repository # Set up nf-core/tools - name: Set up Python 3.11 - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: 3.11 + cache: pip - name: Install python dependencies run: | @@ -47,14 +63,6 @@ jobs: with: version: ${{ matrix.NXF_VER }} - # Install the Prettier linting tools - - uses: actions/setup-node@v4 - with: - node-version: "20" - - - name: Install Prettier and editorconfig-checker - run: npm install -g prettier editorconfig-checker - # Build a pipeline from the template - name: nf-core create run: | @@ -73,11 +81,8 @@ jobs: working-directory: create-lint-wf # Run code style linting - - name: Run Prettier --check - run: prettier --check create-lint-wf/nf-core-testpipeline - - - name: Run ECLint check - run: editorconfig-checker -exclude README.md $(find nf-core-testpipeline/.* -type f | grep -v '.git\|.py\|md\|json\|yml\|yaml\|html\|css\|work\|.nextflow\|build\|nf_core.egg-info\|log.txt\|Makefile') + - name: run pre-commit + run: pre-commit run --all-files working-directory: create-lint-wf # Update modules to the latest version @@ -97,7 +102,7 @@ jobs: # Run nf-core linting - name: nf-core lint - run: nf-core --log-file log.txt --hide-progress lint --dir nf-core-testpipeline --fail-ignored --fail-warned + run: nf-core --verbose --log-file log.txt --hide-progress lint --dir nf-core-testpipeline --fail-ignored --fail-warned working-directory: create-lint-wf # Run the other nf-core commands @@ -142,7 +147,11 @@ jobs: - name: Upload log file artifact if: ${{ always() }} - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: - name: nf-core-log-file + name: nf-core-log-file-${{ matrix.NXF_VER }} path: create-lint-wf/log.txt + + - name: Cleanup work directory + run: sudo rm -rf create-lint-wf + if: always() diff --git a/.github/workflows/create-test-lint-wf-template.yml b/.github/workflows/create-test-lint-wf-template.yml index 37cbf65c7d..7d586fc3d2 100644 --- a/.github/workflows/create-test-lint-wf-template.yml +++ b/.github/workflows/create-test-lint-wf-template.yml @@ -8,6 +8,15 @@ on: pull_request: release: types: [published] + workflow_dispatch: + inputs: + runners: + description: "Runners to test on" + type: choice + options: + - "ubuntu-latest" + - "self-hosted" + default: "self-hosted" # Cancel if a newer run is started concurrency: @@ -20,24 +29,39 @@ env: jobs: RunTestWorkflow: - runs-on: ubuntu-latest + runs-on: ${{ matrix.runner }} env: NXF_ANSI_LOG: false strategy: matrix: TEMPLATE: - - "template_skip_all.yml" - "template_skip_github_badges.yml" - "template_skip_igenomes.yml" - "template_skip_ci.yml" - - "template_skip_nf_core_configs.yml" + runner: + - ${{ github.event.inputs.runners || 'self-hosted' }} + profile: ["self_hosted_runner"] + include: + - TEMPLATE: "template_skip_all.yml" + runner: ubuntu-latest + profile: "docker" + - TEMPLATE: "template_skip_nf_core_configs.yml" + runner: ubuntu-latest + profile: "docker" + fail-fast: false steps: + - name: go to working directory + run: | + mkdir -p create-lint-wf-template + cd create-lint-wf-template + export NXF_WORK=$(pwd) + - uses: actions/checkout@v4 name: Check out source-code repository - name: Set up Python 3.11 - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: 3.11 @@ -51,18 +75,6 @@ jobs: with: version: latest-everything - # Install the Prettier linting tools - - uses: actions/setup-node@v4 - with: - node-version: "20" - - - name: Install Prettier - run: npm install -g prettier - - # Install the editorconfig linting tools - - name: Install editorconfig-checker - run: npm install -g editorconfig-checker - # Create template files - name: Create template skip all (except github) run: | @@ -95,7 +107,7 @@ jobs: - name: run the pipeline run: | cd create-test-lint-wf - nextflow run my-prefix-testpipeline -profile test,docker --outdir ./results + nextflow run my-prefix-testpipeline -profile test,${{matrix.profile}} --outdir ./results # Remove results folder before linting - name: remove results folder @@ -107,11 +119,8 @@ jobs: run: nf-core --log-file log.txt sync --dir create-test-lint-wf/my-prefix-testpipeline/ # Run code style linting - - name: Run Prettier --check - run: prettier --check create-test-lint-wf/my-prefix-testpipeline - - - name: Run ECLint check - run: editorconfig-checker -exclude README.md $(find my-prefix-testpipeline/.* -type f | grep -v '.git\|.py\|md\|json\|yml\|yaml\|html\|css\|work\|.nextflow\|build\|nf_core.egg-info\|log.txt\|Makefile') + - name: Run pre-commit + run: pre-commit run --all-files working-directory: create-test-lint-wf # Remove TODO statements @@ -145,7 +154,11 @@ jobs: - name: Upload log file artifact if: ${{ always() }} - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: - name: nf-core-log-file + name: nf-core-log-file-${{ matrix.TEMPLATE }} path: create-test-lint-wf/artifact_files.tar + + - name: Cleanup work directory + run: sudo rm -rf create-test-lint-wf + if: always() diff --git a/.github/workflows/create-test-wf.yml b/.github/workflows/create-test-wf.yml index 026b0a889b..9301d1224e 100644 --- a/.github/workflows/create-test-wf.yml +++ b/.github/workflows/create-test-wf.yml @@ -6,6 +6,15 @@ on: pull_request: release: types: [published] + workflow_dispatch: + inputs: + runners: + description: "Runners to test on" + type: choice + options: + - "ubuntu-latest" + - "self-hosted" + default: "self-hosted" # Cancel if a newer run is started concurrency: @@ -17,7 +26,7 @@ env: jobs: RunTestWorkflow: - runs-on: ubuntu-latest + runs-on: ${{ github.event.inputs.runners || 'self-hosted' }} env: NXF_ANSI_LOG: false strategy: @@ -26,11 +35,17 @@ jobs: - "23.04.0" - "latest-everything" steps: + - name: go to working directory + run: | + mkdir -p create-test-wf + cd create-test-wf + export NXF_WORK=$(pwd) + - uses: actions/checkout@v4 name: Check out source-code repository - name: Set up Python 3.11 - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: 3.11 @@ -49,11 +64,16 @@ jobs: mkdir create-test-wf && cd create-test-wf export NXF_WORK=$(pwd) nf-core --log-file log.txt create -n testpipeline -d "This pipeline is for testing" -a "Testing McTestface" --plain - nextflow run nf-core-testpipeline -profile test,docker --outdir ./results + nextflow run nf-core-testpipeline -profile test,self_hosted_runner --outdir ./results - name: Upload log file artifact if: ${{ always() }} - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: - name: nf-core-log-file + name: nf-core-log-file-${{ matrix.NXF_VER }} path: create-test-wf/log.txt + + - name: Cleanup work directory + # cleanup work directory + run: sudo rm -rf create-test-wf + if: always() diff --git a/.github/workflows/deploy-pypi.yml b/.github/workflows/deploy-pypi.yml index 62c53508d8..8d3a154d80 100644 --- a/.github/workflows/deploy-pypi.yml +++ b/.github/workflows/deploy-pypi.yml @@ -17,7 +17,7 @@ jobs: name: Check out source-code repository - name: Set up Python 3.11 - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: 3.11 diff --git a/.github/workflows/fix-linting.yml b/.github/workflows/fix-linting.yml index 4184bc5e59..595fff66ae 100644 --- a/.github/workflows/fix-linting.yml +++ b/.github/workflows/fix-linting.yml @@ -4,19 +4,26 @@ on: types: [created] jobs: - deploy: + fix-linting: # Only run if comment is on a PR with the main repo, and if it contains the magic keywords if: > contains(github.event.comment.html_url, '/pull/') && contains(github.event.comment.body, '@nf-core-bot fix linting') && github.repository == 'nf-core/tools' - runs-on: ubuntu-latest + runs-on: self-hosted steps: # Use the @nf-core-bot token to check out so we can push later - - uses: actions/checkout@v4 + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4 with: token: ${{ secrets.nf_core_bot_auth_token }} + # indication that the linting is being fixed + - name: React on comment + uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4 + with: + comment-id: ${{ github.event.comment.id }} + reactions: eyes + # Action runs on the issue comment, so we don't get the PR by default # Use the gh cli to check out the PR - name: Checkout Pull Request @@ -24,33 +31,30 @@ jobs: env: GITHUB_TOKEN: ${{ secrets.nf_core_bot_auth_token }} - - uses: actions/setup-node@v4 + # Install and run pre-commit + - uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5 with: - node-version: "20" + python-version: 3.11 - - name: Install Prettier - run: npm install -g prettier @prettier/plugin-php + - name: Install pre-commit + run: pip install pre-commit - - name: Run 'prettier --write' - run: prettier --write ${GITHUB_WORKSPACE} + - name: Run pre-commit + id: pre-commit + run: pre-commit run --all-files + continue-on-error: true - - name: Run Black - uses: psf/black@stable - with: - # Override to remove the default --check flag so that we make changes - options: "--color" - - - name: Set up Python 3.11 - uses: actions/setup-python@v4 - with: - python-version: 3.11 - - name: python-isort - uses: isort/isort-action@v1.0.0 + # indication that the linting has finished + - name: react if linting finished succesfully + if: steps.pre-commit.outcome == 'success' + uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4 with: - isortVersion: "latest" - requirementsFiles: "requirements.txt requirements-dev.txt" + comment-id: ${{ github.event.comment.id }} + reactions: "+1" - name: Commit & push changes + id: commit-and-push + if: steps.pre-commit.outcome == 'failure' run: | git config user.email "core@nf-co.re" git config user.name "nf-core-bot" @@ -59,3 +63,27 @@ jobs: git status git commit -m "[automated] Fix code linting" git push + + - name: react if linting errors were fixed + id: react-if-fixed + if: steps.commit-and-push.outcome == 'success' + uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4 + with: + comment-id: ${{ github.event.comment.id }} + reactions: hooray + + - name: react if linting errors were not fixed + if: steps.commit-and-push.outcome == 'failure' + uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4 + with: + comment-id: ${{ github.event.comment.id }} + reactions: confused + + - name: react if linting errors were not fixed + if: steps.commit-and-push.outcome == 'failure' + uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4 + with: + issue-number: ${{ github.event.issue.number }} + body: | + @${{ github.actor }} I tried to fix the linting errors, but it didn't work. Please fix them manually. + See [CI log](https://github.com/nf-core/tools/actions/runs/${{ github.run_id }}) for more details. diff --git a/.github/workflows/lint-code.yml b/.github/workflows/lint-code.yml index 23972c56f6..d9847dd365 100644 --- a/.github/workflows/lint-code.yml +++ b/.github/workflows/lint-code.yml @@ -13,119 +13,21 @@ concurrency: cancel-in-progress: true jobs: - EditorConfig: - runs-on: ["self-hosted"] + Pre-commit: + runs-on: ubuntu-latest + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} steps: - uses: actions/checkout@v4 - - uses: actions/setup-node@v4 - with: - node-version: "20" - - - name: Install editorconfig-checker - run: npm install -g editorconfig-checker - - # Run editor config check only on files not covered by a linter - - name: Run ECLint check - run: editorconfig-checker -exclude README.md $(git ls-files | grep -v 'test\|.py\|md\|json\|yml\|yaml\|html\|css\|Makefile') - - Prettier: - runs-on: ["self-hosted"] - steps: - - uses: actions/checkout@v4 - - - uses: actions/setup-node@v4 - with: - node-version: "20" - - - name: Install Prettier - run: npm install -g prettier - - - name: Run Prettier --check - run: prettier --check ${GITHUB_WORKSPACE} - - PythonBlack: - runs-on: ["self-hosted"] - steps: - - uses: actions/checkout@v4 - - - name: Check code lints with Black - uses: psf/black@stable - - # If the above check failed, post a comment on the PR explaining the failure - - name: Post PR comment - if: failure() - uses: mshick/add-pr-comment@v1 - with: - message: | - ## Python linting (`black`) is failing - - To keep the code consistent with lots of contributors, we run automated code consistency checks. - To fix this CI test, please run: - - * Install [`black`](https://black.readthedocs.io/en/stable/): `pip install black` - * Fix formatting errors in your pipeline: `black .` - - Once you push these changes the test should pass, and you can hide this comment :+1: - - We highly recommend setting up Black in your code editor so that this formatting is done automatically on save. Ask about it on Slack for help! - - Thanks again for your contribution! - repo-token: ${{ secrets.GITHUB_TOKEN }} - allow-repeats: false - - isort: - runs-on: ["self-hosted"] - steps: - - name: Check out source-code repository - uses: actions/checkout@v4 - - name: Set up Python 3.11 - uses: actions/setup-python@v4 - with: - python-version: 3.11 - - name: python-isort - uses: isort/isort-action@v1.1.0 - with: - isortVersion: "latest" - requirementsFiles: "requirements.txt requirements-dev.txt" - - static-type-check: - runs-on: ["self-hosted"] - steps: - - uses: actions/checkout@v4 - - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: 3.11 cache: "pip" - - name: Install dependencies - run: | - python -m pip install --upgrade pip -r requirements-dev.txt - pip install -e . + - name: Install pre-commit + run: pip install pre-commit - - name: Cache nf-test installation - id: cache-software - uses: actions/cache@v3 - with: - path: | - /usr/local/bin/nf-test - /home/runner/.nf-test/nf-test.jar - key: ${{ runner.os }}-${{ env.NFTEST_VER }}-nftest - - - name: Install nf-test - if: steps.cache-software.outputs.cache-hit != 'true' - run: | - wget -qO- https://code.askimed.com/install/nf-test | bash - sudo mv nf-test /usr/local/bin/ - - - name: Get Python changed files - id: changed-py-files - uses: tj-actions/changed-files@v23 - with: - files: | - *.py - **/*.py - - name: Run if any of the listed files above is changed - if: steps.changed-py-files.outputs.any_changed == 'true' - run: mypy ${{ steps.changed-py-files.outputs.all_changed_files }} + - name: Run pre-commit + run: pre-commit run --all-files diff --git a/.github/workflows/push_dockerhub_dev.yml b/.github/workflows/push_dockerhub_dev.yml index 1230bfc9d3..169a917d83 100644 --- a/.github/workflows/push_dockerhub_dev.yml +++ b/.github/workflows/push_dockerhub_dev.yml @@ -13,7 +13,7 @@ concurrency: jobs: push_dockerhub: name: Push new Docker image to Docker Hub (dev) - runs-on: self-hosted + runs-on: ubuntu-latest # Only run for the nf-core repo, for releases and merged PRs if: ${{ github.repository == 'nf-core/tools' }} env: diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index b6f3592165..8a4afe9161 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -14,8 +14,17 @@ on: - "CHANGELOG.md" release: types: [published] + workflow_dispatch: + inputs: + runners: + description: "Runners to test on" + type: choice + options: + - "ubuntu-latest" + - "self-hosted" + default: "self-hosted" -# Cancel if a newer run is started +# Cancel if a newer run with the same workflow name is queued concurrency: group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} cancel-in-progress: true @@ -25,7 +34,7 @@ env: jobs: setup: - runs-on: ["ubuntu-latest"] + runs-on: "ubuntu-latest" strategy: matrix: python-version: ["3.8", "3.11"] @@ -44,20 +53,45 @@ jobs: runner: ${{ matrix.runner }} run-tests: ${{ steps.conditions.outputs.run-tests }} + # create a test matrix based on all python files in /tests + list_tests: + name: Get test file matrix + runs-on: "ubuntu-latest" + steps: + - uses: actions/checkout@v4 + name: Check out source-code repository + + - name: List tests + id: list_tests + run: | + echo "tests=$(find tests/test_* | tac | sed 's/tests\///g' | jq -R -s -c '{test: (split("\n")[:-1])}')" >> $GITHUB_OUTPUT + outputs: + tests: ${{ steps.list_tests.outputs.tests }} + test: - name: Test with Python ${{ needs.setup.outputs.python-version }} on ${{ needs.setup.outputs.runner }} - needs: setup - if: ${{ needs.setup.outputs.run-tests}} - runs-on: ${{ needs.setup.outputs.runner }} + name: Run ${{matrix.test}} with Python ${{ needs.setup.outputs.python-version }} on ${{ needs.setup.outputs.runner }} + needs: [setup, list_tests] + if: ${{ needs.setup.outputs.run-tests }} + runs-on: ${{ github.event.inputs.runners || 'self-hosted' }} + strategy: + matrix: ${{ fromJson(needs.list_tests.outputs.tests) }} + fail-fast: false # run all tests even if one fails steps: - - uses: actions/checkout@v2 + - name: go to subdirectory and change nextflow workdir + run: | + mkdir -p pytest + cd pytest + export NXF_WORK=$(pwd) + + - uses: actions/checkout@v4 name: Check out source-code repository - name: Set up Python ${{ needs.setup.outputs.python-version }} - uses: actions/setup-python@v2 + uses: actions/setup-python@v5 with: python-version: ${{ needs.setup.outputs.python-version }} cache: "pip" + token: ${{ secrets.GITHUB_TOKEN }} - name: Install dependencies run: | @@ -71,18 +105,27 @@ jobs: sudo apt remove -y git git-man sudo add-apt-repository --remove ppa:git-core/ppa sudo apt install -y git + - name: Get current date id: date run: echo "date=$(date +'%Y-%m')" >> $GITHUB_ENV - name: Install Nextflow uses: nf-core/setup-nextflow@v1 - with: - version: "latest-everything" + + - name: Look if nf-test is already installed and write to env variable + id: check-nftest + run: | + if [ -f /usr/local/bin/nf-test ]; then + echo "nftest_installed=true" >> $GITHUB_ENV + else + echo "nftest_installed=false" >> $GITHUB_ENV + fi - name: Cache nf-test installation + if: env.nftest_installed != 'true' id: cache-software - uses: actions/cache@v3 + uses: actions/cache@13aacd865c20de90d75de3b17ebe84f7a17d57d2 # v4 with: path: | /usr/local/bin/nf-test @@ -90,16 +133,70 @@ jobs: key: ${{ runner.os }}-nftest-${{ env.date }} - name: Install nf-test - if: steps.cache-software.outputs.cache-hit != 'true' + if: steps.cache-software.outputs.cache-hit != 'true' && env.nftest_installed != 'true' run: | wget -qO- https://code.askimed.com/install/nf-test | bash sudo mv nf-test /usr/local/bin/ + - name: move coveragerc file up + run: | + mv .github/.coveragerc . + - name: Test with pytest - run: python3 -m pytest tests/ --color=yes --cov-report=xml --cov-config=.github/.coveragerc --cov=nf_core + run: | + python3 -m pytest tests/${{matrix.test}} --color=yes --cov --durations=0 && exit_code=0|| exit_code=$? + # don't fail if no tests were collected, e.g. for test_licence.py + if [ "${exit_code}" -eq 5 ]; then + echo "No tests were collected" + exit 0 + elif [ "${exit_code}" -ne 0 ]; then + echo "Tests failed with exit code ${exit_code}" + exit 1 + fi + + - name: Upload coverage + uses: actions/upload-artifact@v4 + with: + name: coverage_${{ matrix.test }} + path: .coverage + + coverage: + needs: test + runs-on: ${{ github.event.inputs.runners || 'self-hosted' }} + steps: + - name: go to subdirectory + run: | + mkdir -p pytest + cd pytest + + - uses: actions/checkout@v4 + - name: Set up Python 3.11 + uses: actions/setup-python@v5 + env: + AGENT_TOOLSDIRECTORY: /opt/actions-runner/_work/tools/tools/ + with: + python-version: 3.11 + cache: "pip" + + - name: Install dependencies + run: | + python -m pip install --upgrade pip -r requirements-dev.txt + pip install -e . + + - name: move coveragerc file up + run: | + mv .github/.coveragerc . + + - name: Download all artifacts + uses: actions/download-artifact@v4 + - name: Run coverage + run: | + coverage combine --keep coverage*/.coverage* + coverage report + coverage xml - - uses: codecov/codecov-action@v1 - name: Upload code coverage report + - uses: codecov/codecov-action@v3 with: - if: success() - token: ${{ secrets.CODECOV_TOKEN }} + files: coverage.xml + env: + CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} diff --git a/.github/workflows/rich-codex.yml b/.github/workflows/rich-codex.yml index f5f289b73e..ecbae8ffec 100644 --- a/.github/workflows/rich-codex.yml +++ b/.github/workflows/rich-codex.yml @@ -8,7 +8,7 @@ jobs: - name: Check out the repo uses: actions/checkout@v4 - name: Set up Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: 3.x cache: pip @@ -19,7 +19,7 @@ jobs: - name: Cache nf-test installation id: cache-software - uses: actions/cache@v3 + uses: actions/cache@13aacd865c20de90d75de3b17ebe84f7a17d57d2 # v4 with: path: | /usr/local/bin/nf-test diff --git a/.github/workflows/sync.yml b/.github/workflows/sync.yml index 94f8ee54e2..aeb24839f1 100644 --- a/.github/workflows/sync.yml +++ b/.github/workflows/sync.yml @@ -8,6 +8,13 @@ on: type: boolean description: Only run on nf-core/testpipeline? required: true + runners: + description: "Runners to test on" + type: choice + options: + - "ubuntu-latest" + - "self-hosted" + default: "self-hosted" # Cancel if a newer run is started concurrency: @@ -16,7 +23,7 @@ concurrency: jobs: get-pipelines: - runs-on: ubuntu-latest + runs-on: ${{ github.event.inputs.runners || 'self-hosted' }} outputs: matrix: ${{ steps.set-matrix.outputs.matrix }} steps: @@ -31,7 +38,7 @@ jobs: sync: needs: get-pipelines - runs-on: self-hosted + runs-on: ${{ github.event.inputs.runners || 'self-hosted' }} strategy: matrix: ${{fromJson(needs.get-pipelines.outputs.matrix)}} fail-fast: false @@ -49,7 +56,7 @@ jobs: fetch-depth: "0" - name: Set up Python 3.11 - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: 3.11 @@ -78,7 +85,7 @@ jobs: - name: Upload sync log file artifact if: ${{ always() }} - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: sync_log_${{ matrix.pipeline }} path: sync_log_${{ matrix.pipeline }}.txt diff --git a/.github/workflows/tools-api-docs-dev.yml b/.github/workflows/tools-api-docs-dev.yml index 51c25fa250..91396b2a25 100644 --- a/.github/workflows/tools-api-docs-dev.yml +++ b/.github/workflows/tools-api-docs-dev.yml @@ -20,14 +20,14 @@ concurrency: jobs: api-docs: name: Build & push Sphinx API docs - runs-on: self-hosted + runs-on: ubuntu-latest steps: - name: Check out source-code repository uses: actions/checkout@v4 - name: Set up Python 3.11 - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: 3.11 @@ -43,7 +43,7 @@ jobs: - name: Sync dev docs # Only sync with the website if it was a push from nf-core/tools dev branch if: github.repository == 'nf-core/tools' && github.event_name == 'push' && github.event.ref == 'refs/heads/dev' - uses: SamKirkland/FTP-Deploy-Action@4.0.0 + uses: SamKirkland/FTP-Deploy-Action@v4.3.4 with: server: ${{ secrets.ftp_server }} username: ${{ secrets.ftp_username}} diff --git a/.github/workflows/tools-api-docs-release.yml b/.github/workflows/tools-api-docs-release.yml index b0869190d9..2183db3fcf 100644 --- a/.github/workflows/tools-api-docs-release.yml +++ b/.github/workflows/tools-api-docs-release.yml @@ -22,7 +22,7 @@ jobs: uses: actions/checkout@v4 - name: Set up Python 3.11 - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: 3.11 @@ -37,7 +37,7 @@ jobs: - name: Sync release docs if: github.repository == 'nf-core/tools' - uses: SamKirkland/FTP-Deploy-Action@4.0.0 + uses: SamKirkland/FTP-Deploy-Action@v4.3.4 with: server: ${{ secrets.ftp_server }} username: ${{ secrets.ftp_username}} diff --git a/.gitpod.yml b/.gitpod.yml index 899f58e556..a4f1c17169 100644 --- a/.gitpod.yml +++ b/.gitpod.yml @@ -9,6 +9,7 @@ tasks: - name: unset JAVA_TOOL_OPTIONS command: | unset JAVA_TOOL_OPTIONS + vscode: extensions: # based on nf-core.nf-core-extensionpack - codezombiech.gitignore # Language support for .gitignore files @@ -21,3 +22,4 @@ vscode: # - nextflow.nextflow # Nextflow syntax highlighting - oderwat.indent-rainbow # Highlight indentation level - streetsidesoftware.code-spell-checker # Spelling checker for source code + - charliermarsh.ruff # Code linter Ruff diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index ad23a3c895..d4eb6a721d 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,23 +1,23 @@ repos: - - repo: https://github.com/psf/black - rev: 23.1.0 + - repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.1.14 hooks: - - id: black - - repo: https://github.com/pycqa/isort - rev: 5.12.0 - hooks: - - id: isort + - id: ruff # linter + args: [--fix, --exit-non-zero-on-fix] # sort imports and fix + - id: ruff-format # formatter - repo: https://github.com/pre-commit/mirrors-prettier - rev: "v2.7.1" + rev: "v3.1.0" hooks: - id: prettier - - repo: https://github.com/asottile/pyupgrade - rev: v3.15.0 + + - repo: https://github.com/editorconfig-checker/editorconfig-checker.python + rev: "2.7.3" hooks: - - id: pyupgrade - args: [--py38-plus] + - id: editorconfig-checker + alias: ec + - repo: https://github.com/pre-commit/mirrors-mypy - rev: "v1.7.1" # Use the sha / tag you want to point at + rev: "v1.8.0" hooks: - id: mypy additional_dependencies: diff --git a/.prettierignore b/.prettierignore index 344cafca6e..a55074abfb 100644 --- a/.prettierignore +++ b/.prettierignore @@ -6,3 +6,7 @@ testing nf_core/module-template/meta.yml nf_core/module-template/tests/tags.yml nf_core/subworkflow-template/tests/tags.yml +# don't run on things handled by ruff +*.py +*.pyc + diff --git a/CHANGELOG.md b/CHANGELOG.md index 28c02ea066..4ec71a8282 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,51 @@ # nf-core/tools: Changelog +# [v2.12 - Aluminium Wolf](https://github.com/nf-core/tools/releases/tag/2.11) - [2024-01-29] + +### Template + +- Add a Github Action Workflow to the pipeline template that tests a successful download with 'nf-core download' ([#2618](https://github.com/nf-core/tools/pull/2618)) +- Use `pre-commit` to lint files in GitHub CI ([#2635](https://github.com/nf-core/tools/pull/2635)) +- Use pdiff also on gitpod for nf-test ([#2640](https://github.com/nf-core/tools/pull/2640)) +- switch to new image syntax in readme ([#2645](https://github.com/nf-core/tools/pull/2645)) +- Add conda channel order to nextflow.config ([#2094](https://github.com/nf-core/tools/pull/2094)) +- Fix tyop in pipeline nextflow.config ([#2664](https://github.com/nf-core/tools/pull/2664)) +- Remove `nfcore_external_java_deps.jar` from lib directory in pipeline template ([#2675](https://github.com/nf-core/tools/pull/2675)) +- Add function to check `-profile` is well formatted ([#2678](https://github.com/nf-core/tools/pull/2678)) +- Add new pipeline error message pointing to docs when 'requirement exceeds available memory' error message ([#2680](https://github.com/nf-core/tools/pull/2680)) +- add 👀👍🏻🎉😕 reactions to fix-linting-bot action ([#2692](https://github.com/nf-core/tools/pull/2692)) + +### Linting + +- Fix linting of a pipeline with patched custom module ([#2669](https://github.com/nf-core/tools/pull/2669)) +- linting a pipeline also lints the installed subworkflows ([#2677](https://github.com/nf-core/tools/pull/2677)) +- environment.yml name must be lowercase ([#2676](https://github.com/nf-core/tools/pull/2676)) +- allow ignoring specific files when template_strings ([#2686](https://github.com/nf-core/tools/pull/2686)) +- lint `nextflow.config` default values match the ones specified in `nextflow_schema.json` ([#2684](https://github.com/nf-core/tools/pull/2684)) + +### Modules + +- Fix empty json output for `nf-core list local` ([#2668](https://github.com/nf-core/tools/pull/2668)) + +### General + +- Run CI-pytests for nf-core tools on self-hosted runners ([#2550](https://github.com/nf-core/tools/pull/2550)) +- Add Ruff linter and formatter replacing Black, isort and pyupgrade ([#2620](https://github.com/nf-core/tools/pull/2620)) +- Set pdiff as nf-test differ in Docker image for Gitpod ([#2642](https://github.com/nf-core/tools/pull/2642)) +- Fix Renovate Dockerfile updating issues ([#2648](https://github.com/nf-core/tools/pull/2648) and [#2651](https://github.com/nf-core/tools/pull/2651)) +- Add new subcommand `nf-core tui`, which launches a TUI (terminal user interface) to intuitively explore the command line flags, built using [Trogon](https://github.com/Textualize/trogon) ([#2655](https://github.com/nf-core/tools/pull/2655)) +- Add new subcommand: `nf-core logo-create` to output an nf-core logo for a pipeline (instead of going through the website) ([#2662](https://github.com/nf-core/tools/pull/2662)) +- Handle api redirects from the old site ([#2672](https://github.com/nf-core/tools/pull/2672)) +- Remove redundanct v in pipeline version for emails ([#2667](https://github.com/nf-core/tools/pull/2667)) +- add function to check `-profile` is well formatted ([#2678](https://github.com/nf-core/tools/pull/2678)) +- Update pre-commit hook astral-sh/ruff-pre-commit to v0.1.14 ([#2674](https://github.com/nf-core/tools/pull/2674)) +- Update pre-commit hook pre-commit/mirrors-mypy to v1.8.0 ([#2630](https://github.com/nf-core/tools/pull/2630)) +- Update mshick/add-pr-comment action to v2 ([#2632](https://github.com/nf-core/tools/pull/2632)) +- update python image version in docker file ([#2636](https://github.com/nf-core/tools/pull/2636)) +- Update actions/cache action to v4 ([#2666](https://github.com/nf-core/tools/pull/2666)) +- Update peter-evans/create-or-update-comment action to v4 ([#2683](https://github.com/nf-core/tools/pull/2683)) +- Update peter-evans/create-or-update-comment action to v4 ([#2695](https://github.com/nf-core/tools/pull/2695)) + # [v2.11.1 - Magnesium Dragon Patch](https://github.com/nf-core/tools/releases/tag/2.11) - [2023-12-20] ### Template diff --git a/Dockerfile b/Dockerfile index 95d544b26f..9c9770c25f 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.11.5-slim +FROM python:3.11-slim LABEL authors="phil.ewels@scilifelab.se,erik.danielsson@scilifelab.se" \ description="Docker image containing requirements for the nfcore tools" diff --git a/MANIFEST.in b/MANIFEST.in index 7db3ca4353..5ec177b783 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -5,3 +5,7 @@ graft nf_core/pipeline-template graft nf_core/subworkflow-template include requirements.txt include nf_core/.pre-commit-prettier-config.yaml +include nf_core/assets/logo/nf-core-repo-logo-base-lightbg.png +include nf_core/assets/logo/nf-core-repo-logo-base-darkbg.png +include nf_core/assets/logo/placeholder_logo.svg +include nf_core/assets/logo/MavenPro-Bold.ttf diff --git a/README.md b/README.md index 653f5295ea..0adf04d4c6 100644 --- a/README.md +++ b/README.md @@ -1,10 +1,14 @@ -# ![nf-core/tools](docs/images/nfcore-tools_logo_light.png#gh-light-mode-only) ![nf-core/tools](docs/images/nfcore-tools_logo_dark.png#gh-dark-mode-only) +

+ + + nf-core/tools + +

[![Python tests](https://github.com/nf-core/tools/workflows/Python%20tests/badge.svg?branch=master&event=push)](https://github.com/nf-core/tools/actions?query=workflow%3A%22Python+tests%22+branch%3Amaster) [![codecov](https://codecov.io/gh/nf-core/tools/branch/master/graph/badge.svg)](https://codecov.io/gh/nf-core/tools) -[![code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) [![code style: prettier](https://img.shields.io/badge/code%20style-prettier-ff69b4.svg)](https://github.com/prettier/prettier) -[![Imports: isort](https://img.shields.io/badge/%20imports-isort-%231674b1?style=flat&labelColor=ef8336)](https://pycqa.github.io/isort/) +[![code style: Ruff](https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/charliermarsh/ruff/main/assets/badge/v1.json)](https://github.com/charliermarsh/ruff) [![install with Bioconda](https://img.shields.io/badge/install%20with-bioconda-brightgreen.svg)](https://bioconda.github.io/recipes/nf-core/README.html) [![install with PyPI](https://img.shields.io/badge/install%20with-PyPI-blue.svg)](https://pypi.org/project/nf-core/) @@ -28,6 +32,8 @@ A python package with helper tools for the nf-core community. - [`nf-core schema` - Work with pipeline schema files](#pipeline-schema) - [`nf-core bump-version` - Update nf-core pipeline version number](#bumping-a-pipeline-version-number) - [`nf-core sync` - Synchronise pipeline TEMPLATE branches](#sync-a-pipeline-with-the-template) +- [`nf-core create-logo` - Create an nf-core pipeline logo](#create-an-nf-core-pipeline-logo) +- [`nf-core tui` - Explore the nf-core command line graphically](#tools-cli-tui) - [`nf-core modules` - commands for dealing with DSL2 modules](#modules) - [`modules list` - List available modules](#list-modules) @@ -727,6 +733,29 @@ To create the pull request, a personal access token is required for API authenti These can be created at [https://github.com/settings/tokens](https://github.com/settings/tokens). Supply this using the `--auth-token` flag. +## Create an nf-core pipeline logo + +The `nf-core create-logo` command creates a logo for your pipeline based on the nf-core template and the pipeline name. You can specify the width of the logo in pixels with the `--width` flag. Additionally, you can specify the output format to be either `png` or `svg` with the `--format` flag. The default format is `png`. + +Usage is `nf-core create-logo `, eg: + + + +![`nf-core create-logo nextbigthing`](docs/images/nf-core-create-logo.svg) + +## Tools CLI TUI + +_CLI:_ Command line interface +_TUI:_ Terminal user interface + +The `nf-core` command line interface is fairly large, with a lot of commands and options. +To make it easier to explore and use, run `nf-core tui` to launch a graphical terminal interface. + +This functionality works using [Textualize/trogon](https://github.com/Textualize/trogon) +and is based on the underlying CLI implementation that uses [Click](https://click.palletsprojects.com/). + ## Modules With the advent of [Nextflow DSL2](https://www.nextflow.io/docs/latest/dsl2.html), we are creating a centralised repository of modules. diff --git a/codecov.yml b/codecov.yml index 1ecf8960c0..11a63f8bbf 100644 --- a/codecov.yml +++ b/codecov.yml @@ -4,3 +4,6 @@ coverage: default: threshold: 5% patch: off +comment: + layout: "condensed_header, condensed_files, condensed_footer" + require_changes: true diff --git a/docs/api/Makefile b/docs/api/Makefile index f961e4ded1..ab30a5051e 100644 --- a/docs/api/Makefile +++ b/docs/api/Makefile @@ -16,4 +16,4 @@ help: # Catch-all target: route all unknown targets to Sphinx using the new # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). %: Makefile - @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) \ No newline at end of file + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/docs/api/_src/conf.py b/docs/api/_src/conf.py index 27eaf9bcb3..bfdbd7888d 100644 --- a/docs/api/_src/conf.py +++ b/docs/api/_src/conf.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # # Configuration file for the Sphinx documentation builder. # @@ -16,9 +15,10 @@ import sys from typing import Dict -sys.path.insert(0, os.path.abspath("../../../nf_core")) import nf_core +sys.path.insert(0, os.path.abspath("../../../nf_core")) + # -- Project information ----------------------------------------------------- project = "nf-core/tools" diff --git a/docs/images/nf-core-bump-version.svg b/docs/images/nf-core-bump-version.svg index c57af953e3..602b18304d 100644 --- a/docs/images/nf-core-bump-version.svg +++ b/docs/images/nf-core-bump-version.svg @@ -19,122 +19,122 @@ font-weight: 700; } - .terminal-2993841204-matrix { + .terminal-760177622-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-2993841204-title { + .terminal-760177622-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-2993841204-r1 { fill: #c5c8c6 } -.terminal-2993841204-r2 { fill: #98a84b } -.terminal-2993841204-r3 { fill: #9a9b99 } -.terminal-2993841204-r4 { fill: #608ab1 } -.terminal-2993841204-r5 { fill: #d0b344 } -.terminal-2993841204-r6 { fill: #cc555a } + .terminal-760177622-r1 { fill: #c5c8c6 } +.terminal-760177622-r2 { fill: #98a84b } +.terminal-760177622-r3 { fill: #9a9b99 } +.terminal-760177622-r4 { fill: #608ab1 } +.terminal-760177622-r5 { fill: #d0b344 } +.terminal-760177622-r6 { fill: #cc555a } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -146,41 +146,41 @@ - + - - $ nf-core bump-version 1.1 - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.11.1 - https://nf-co.re - - -INFO     Changing version number from '1.0dev' to '1.1' -INFO     Updated version in 'nextflow.config' - - version         = '1.0dev' - + version         = '1.1' - - -INFO     Updated version in 'assets/multiqc_config.yml' - - This report has been generated by the <a  -href="https://github.com/nf-core/nextbigthing/tree/dev" target="_blank">nf-core/nextbigthing</a> - + This report has been generated by the <a  -href="https://github.com/nf-core/nextbigthing/releases/tag/1.1"  -target="_blank">nf-core/nextbigthing</a> - - -INFO     Updated version in 'assets/multiqc_config.yml' - - <a href="https://nf-co.re/nextbigthing/dev/docs/output"  -target="_blank">documentation</a>. - + <a href="https://nf-co.re/nextbigthing/1.1/docs/output"  -target="_blank">documentation</a>. - - + + $ nf-core bump-version 1.1 + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.12 - https://nf-co.re + + +INFO     Changing version number from '1.0dev' to '1.1' +INFO     Updated version in 'nextflow.config' + - version         = '1.0dev' + + version         = '1.1' + + +INFO     Updated version in 'assets/multiqc_config.yml' + - This report has been generated by the <a  +href="https://github.com/nf-core/nextbigthing/tree/dev" target="_blank">nf-core/nextbigthing</a> + + This report has been generated by the <a  +href="https://github.com/nf-core/nextbigthing/releases/tag/1.1"  +target="_blank">nf-core/nextbigthing</a> + + +INFO     Updated version in 'assets/multiqc_config.yml' + - <a href="https://nf-co.re/nextbigthing/dev/docs/output"  +target="_blank">documentation</a>. + + <a href="https://nf-co.re/nextbigthing/1.1/docs/output"  +target="_blank">documentation</a>. + + diff --git a/docs/images/nf-core-create-logo.svg b/docs/images/nf-core-create-logo.svg new file mode 100644 index 0000000000..b4d88480d9 --- /dev/null +++ b/docs/images/nf-core-create-logo.svg @@ -0,0 +1,108 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + $ nf-core create-logo nextbigthing + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.12 - https://nf-co.re + + +INFO     Created logo: /home/runner/work/tools/tools/tmp/nf-core-nextbigthing_logo_light.png + + + + diff --git a/docs/images/nf-core-create.svg b/docs/images/nf-core-create.svg index 99a7112fb4..79e349f4fc 100644 --- a/docs/images/nf-core-create.svg +++ b/docs/images/nf-core-create.svg @@ -19,104 +19,104 @@ font-weight: 700; } - .terminal-3963146934-matrix { + .terminal-1826411096-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-3963146934-title { + .terminal-1826411096-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-3963146934-r1 { fill: #c5c8c6 } -.terminal-3963146934-r2 { fill: #98a84b } -.terminal-3963146934-r3 { fill: #9a9b99 } -.terminal-3963146934-r4 { fill: #608ab1 } -.terminal-3963146934-r5 { fill: #d0b344 } -.terminal-3963146934-r6 { fill: #98729f } -.terminal-3963146934-r7 { fill: #ff2c7a } -.terminal-3963146934-r8 { fill: #98a84b;font-weight: bold } -.terminal-3963146934-r9 { fill: #1984e9;text-decoration: underline; } + .terminal-1826411096-r1 { fill: #c5c8c6 } +.terminal-1826411096-r2 { fill: #98a84b } +.terminal-1826411096-r3 { fill: #9a9b99 } +.terminal-1826411096-r4 { fill: #608ab1 } +.terminal-1826411096-r5 { fill: #d0b344 } +.terminal-1826411096-r6 { fill: #98729f } +.terminal-1826411096-r7 { fill: #ff2c7a } +.terminal-1826411096-r8 { fill: #98a84b;font-weight: bold } +.terminal-1826411096-r9 { fill: #1984e9;text-decoration: underline; } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -128,34 +128,34 @@ - + - - $ nf-core create -n nextbigthing -d "This pipeline analyses data from the next big omics technique"  --a "Big Steve" --plain - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.11.1 - https://nf-co.re - - -INFO     Creating new nf-core pipeline: 'nf-core/nextbigthing' -INFO     Initialising pipeline git repository                                                        -INFO     Done. Remember to add a remote and push to GitHub:                                          - cd /home/runner/work/tools/tools/tmp/nf-core-nextbigthing - git remote add origin git@github.com:USERNAME/REPO_NAME.git  - git push --all origin                                        -INFO     This will also push your newly created dev branch and the TEMPLATE branch for syncing.      -INFO    !!!!!! IMPORTANT !!!!!! - -If you are interested in adding your pipeline to the nf-core community, -PLEASE COME AND TALK TO US IN THE NF-CORE SLACK BEFORE WRITING ANY CODE! - -Please read: https://nf-co.re/developers/adding_pipelines#join-the-community + + $ nf-core create -n nextbigthing -d "This pipeline analyses data from the next big omics technique"  +-a "Big Steve" --plain + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.12 - https://nf-co.re + + +INFO     Creating new nf-core pipeline: 'nf-core/nextbigthing' +INFO     Initialising pipeline git repository                                                        +INFO     Done. Remember to add a remote and push to GitHub:                                          + cd /home/runner/work/tools/tools/tmp/nf-core-nextbigthing + git remote add origin git@github.com:USERNAME/REPO_NAME.git  + git push --all origin                                        +INFO     This will also push your newly created dev branch and the TEMPLATE branch for syncing.      +INFO    !!!!!! IMPORTANT !!!!!! + +If you are interested in adding your pipeline to the nf-core community, +PLEASE COME AND TALK TO US IN THE NF-CORE SLACK BEFORE WRITING ANY CODE! + +Please read: https://nf-co.re/developers/adding_pipelines#join-the-community diff --git a/docs/images/nf-core-download.svg b/docs/images/nf-core-download.svg index cc6b17ebfd..05b7456fbd 100644 --- a/docs/images/nf-core-download.svg +++ b/docs/images/nf-core-download.svg @@ -19,86 +19,86 @@ font-weight: 700; } - .terminal-2599349710-matrix { + .terminal-1937960304-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-2599349710-title { + .terminal-1937960304-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-2599349710-r1 { fill: #c5c8c6 } -.terminal-2599349710-r2 { fill: #98a84b } -.terminal-2599349710-r3 { fill: #9a9b99 } -.terminal-2599349710-r4 { fill: #608ab1 } -.terminal-2599349710-r5 { fill: #d0b344 } -.terminal-2599349710-r6 { fill: #cc555a } + .terminal-1937960304-r1 { fill: #c5c8c6 } +.terminal-1937960304-r2 { fill: #98a84b } +.terminal-1937960304-r3 { fill: #9a9b99 } +.terminal-1937960304-r4 { fill: #608ab1 } +.terminal-1937960304-r5 { fill: #d0b344 } +.terminal-1937960304-r6 { fill: #cc555a } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -110,29 +110,29 @@ - + - - $ nf-core download rnaseq -r 3.8 --outdir nf-core-rnaseq -x none -s none -d - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.11.1 - https://nf-co.re - - -WARNING  Could not find GitHub authentication token. Some API requests may fail.                     -INFO     Saving 'nf-core/rnaseq' -          Pipeline revision: '3.8' -          Use containers: 'none' -          Container library: 'quay.io' -          Output directory: 'nf-core-rnaseq' -          Include default institutional configuration: 'True' -INFO     Downloading centralised configs from GitHub                                                 -INFO     Downloading workflow files from GitHub                                                      + + $ nf-core download rnaseq -r 3.8 --outdir nf-core-rnaseq -x none -s none -d + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.12 - https://nf-co.re + + +WARNING  Could not find GitHub authentication token. Some API requests may fail.                     +INFO     Saving 'nf-core/rnaseq' +          Pipeline revision: '3.8' +          Use containers: 'none' +          Container library: 'quay.io' +          Output directory: 'nf-core-rnaseq' +          Include default institutional configuration: 'True' +INFO     Downloading centralised configs from GitHub                                                 +INFO     Downloading workflow files from GitHub                                                      diff --git a/docs/images/nf-core-launch-rnaseq.svg b/docs/images/nf-core-launch-rnaseq.svg index b76e4893c5..8956e15703 100644 --- a/docs/images/nf-core-launch-rnaseq.svg +++ b/docs/images/nf-core-launch-rnaseq.svg @@ -19,72 +19,72 @@ font-weight: 700; } - .terminal-3706564145-matrix { + .terminal-1862905299-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-3706564145-title { + .terminal-1862905299-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-3706564145-r1 { fill: #c5c8c6 } -.terminal-3706564145-r2 { fill: #98a84b } -.terminal-3706564145-r3 { fill: #9a9b99 } -.terminal-3706564145-r4 { fill: #608ab1 } -.terminal-3706564145-r5 { fill: #d0b344 } -.terminal-3706564145-r6 { fill: #c5c8c6;font-weight: bold } -.terminal-3706564145-r7 { fill: #68a0b3;font-weight: bold } + .terminal-1862905299-r1 { fill: #c5c8c6 } +.terminal-1862905299-r2 { fill: #98a84b } +.terminal-1862905299-r3 { fill: #9a9b99 } +.terminal-1862905299-r4 { fill: #608ab1 } +.terminal-1862905299-r5 { fill: #d0b344 } +.terminal-1862905299-r6 { fill: #c5c8c6;font-weight: bold } +.terminal-1862905299-r7 { fill: #68a0b3;font-weight: bold } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -96,24 +96,24 @@ - + - - $ nf-core launch rnaseq -r 3.8.1 - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.11.1 - https://nf-co.re - - -INFO     NOTE: This tool ignores any pipeline parameter defaults overwritten by Nextflow config      -         files or profiles                                                                           - -INFO     Downloading workflow: nf-core/rnaseq (3.8.1) + + $ nf-core launch rnaseq -r 3.8.1 + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.12 - https://nf-co.re + + +INFO     NOTE: This tool ignores any pipeline parameter defaults overwritten by Nextflow config      +         files or profiles                                                                           + +INFO     Downloading workflow: nf-core/rnaseq (3.8.1) diff --git a/docs/images/nf-core-licences.svg b/docs/images/nf-core-licences.svg index 934458cf62..0512d5c954 100644 --- a/docs/images/nf-core-licences.svg +++ b/docs/images/nf-core-licences.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + - + - - $ nf-core licences deepvariant - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.11.1 - https://nf-co.re - - -INFO     Fetching licence information for 8 tools                                                    -INFO     Warning: This tool only prints licence information for the software tools packaged using    -         conda.                                                                                      -INFO     The pipeline may use other software and dependencies not described here.                    -┏━━━━━━━━━━━━━━┳━━━━━━━━━┳━━━━━━━━━┓ -Package NameVersionLicence -┡━━━━━━━━━━━━━━╇━━━━━━━━━╇━━━━━━━━━┩ -│ lbzip2       │ 2.5     │ GPL v3  │ -│ deepvariant  │ 0.7.0   │ MIT     │ -│ htslib       │ 1.9     │ MIT     │ -│ picard       │ 2.18.7  │ MIT     │ -│ pip          │ 10.0.1  │ MIT     │ -│ samtools     │ 1.9     │ MIT     │ -│ python       │ 2.7.15  │ PSF     │ -│ bzip2        │ 1.0.6   │ bzip2   │ -└──────────────┴─────────┴─────────┘ + + $ nf-core licences deepvariant + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.12 - https://nf-co.re + + +INFO     Fetching licence information for 8 tools                                                    diff --git a/docs/images/nf-core-lint.svg b/docs/images/nf-core-lint.svg index 1c6f1dd1b0..140f02d3f2 100644 --- a/docs/images/nf-core-lint.svg +++ b/docs/images/nf-core-lint.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + + + + - + - - - - $ nf-core lint - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.11.1 - https://nf-co.re - - - -INFO     Testing pipeline: . - - -╭─[?] 1 Pipeline Test Ignored────────────────────────────────────────────────────────────────────╮ - -pipeline_todos: pipeline_todos - -╰──────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─[!] 1 Pipeline Test Warning────────────────────────────────────────────────────────────────────╮ - -readme: README contains the placeholder zenodo.XXXXXXX. This should be replaced with the zenodo  -doi (after the first release). - -╰──────────────────────────────────────────────────────────────────────────────────────────────────╯ - -╭──────────────────────╮ -LINT RESULTS SUMMARY -├──────────────────────┤ -[✔] 192 Tests Passed -[?]   1 Test Ignored -[!]   1 Test Warning -[✗]   0 Tests Failed -╰──────────────────────╯ + + + + $ nf-core lint + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.12 - https://nf-co.re + + + +INFO     Testing pipeline: . +INFO     Files to ignore: [] + + +╭─[?] 1 Pipeline Test Ignored────────────────────────────────────────────────────────────────────╮ + +pipeline_todos: pipeline_todos                                                                   + +╰──────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─[!] 1 Pipeline Test Warning────────────────────────────────────────────────────────────────────╮ + +readme: README contains the placeholder zenodo.XXXXXXX. This should be replaced with the zenodo  +doi (after the first release).                                                                   + +╰──────────────────────────────────────────────────────────────────────────────────────────────────╯ + +╭──────────────────────╮ +LINT RESULTS SUMMARY +├──────────────────────┤ +[✔] 199 Tests Passed +[?]   1 Test Ignored +[!]   1 Test Warning +[✗]   0 Tests Failed +╰──────────────────────╯ diff --git a/docs/images/nf-core-list-rna.svg b/docs/images/nf-core-list-rna.svg index ed22bfbc6f..deb9d390e2 100644 --- a/docs/images/nf-core-list-rna.svg +++ b/docs/images/nf-core-list-rna.svg @@ -19,111 +19,111 @@ font-weight: 700; } - .terminal-1199297796-matrix { + .terminal-1343542234-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-1199297796-title { + .terminal-1343542234-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-1199297796-r1 { fill: #c5c8c6 } -.terminal-1199297796-r2 { fill: #98a84b } -.terminal-1199297796-r3 { fill: #9a9b99 } -.terminal-1199297796-r4 { fill: #608ab1 } -.terminal-1199297796-r5 { fill: #d0b344 } -.terminal-1199297796-r6 { fill: #c5c8c6;font-weight: bold } -.terminal-1199297796-r7 { fill: #868887 } + .terminal-1343542234-r1 { fill: #c5c8c6 } +.terminal-1343542234-r2 { fill: #98a84b } +.terminal-1343542234-r3 { fill: #9a9b99 } +.terminal-1343542234-r4 { fill: #608ab1 } +.terminal-1343542234-r5 { fill: #d0b344 } +.terminal-1343542234-r6 { fill: #c5c8c6;font-weight: bold } +.terminal-1343542234-r7 { fill: #868887 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -135,37 +135,37 @@ - + - - $ nf-core list rna rna-seq - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.11.1 - https://nf-co.re - - -┏━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━┳━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━┳━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━┓ -Have latest         -Pipeline Name       StarsLatest Release    ReleasedLast Pulledrelease?            -┡━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━╇━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━╇━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━┩ -│ rnafusion            │   120 │          3.0.1 │  3 weeks ago │           - │ -                   │ -│ rnaseq               │   704 │         3.13.2 │  4 weeks ago │           - │ -                   │ -│ differentialabundan… │    32 │          1.4.0 │  3 weeks ago │           - │ -                   │ -│ spatialtranscriptom… │    31 │            dev │  3 weeks ago │           - │ -                   │ -│ circrna              │    33 │            dev │  3 weeks ago │           - │ -                   │ -│ rnasplice            │    14 │          1.0.1 │ 1 months ago │           - │ -                   │ -│ smrnaseq             │    60 │          2.2.4 │ 2 months ago │           - │ -                   │ -│ scrnaseq             │   110 │          2.4.1 │ 3 months ago │           - │ -                   │ -│ dualrnaseq           │    13 │          1.0.0 │  3 years ago │           - │ -                   │ -│ marsseq              │     4 │          1.0.3 │ 5 months ago │           - │ -                   │ -│ lncpipe              │    26 │            dev │  1 years ago │           - │ -                   │ -│ scflow               │    22 │            dev │  2 years ago │           - │ -                   │ -└──────────────────────┴───────┴────────────────┴──────────────┴─────────────┴─────────────────────┘ + + $ nf-core list rna rna-seq + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.12 - https://nf-co.re + + +┏━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━┳━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━┳━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━┓ +Have latest         +Pipeline Name       StarsLatest Release    ReleasedLast Pulledrelease?            +┡━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━╇━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━╇━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━┩ +│ rnafusion            │   125 │          3.0.1 │ 2 months ago │           - │ -                   │ +│ differentialabundan… │    35 │          1.4.0 │ 2 months ago │           - │ -                   │ +│ circrna              │    33 │            dev │   2 days ago │           - │ -                   │ +│ smrnaseq             │    63 │          2.2.4 │ 3 months ago │           - │ -                   │ +│ spatialtranscriptom… │    35 │            dev │   3 days ago │           - │ -                   │ +│ scrnaseq             │   119 │          2.5.1 │   6 days ago │           - │ -                   │ +│ rnaseq               │   722 │         3.14.0 │  3 weeks ago │           - │ -                   │ +│ rnasplice            │    20 │          1.0.2 │  3 weeks ago │           - │ -                   │ +│ dualrnaseq           │    15 │          1.0.0 │  3 years ago │           - │ -                   │ +│ marsseq              │     5 │          1.0.3 │ 6 months ago │           - │ -                   │ +│ lncpipe              │    27 │            dev │  1 years ago │           - │ -                   │ +│ scflow               │    23 │            dev │  2 years ago │           - │ -                   │ +└──────────────────────┴───────┴────────────────┴──────────────┴─────────────┴─────────────────────┘ diff --git a/docs/images/nf-core-list-stars.svg b/docs/images/nf-core-list-stars.svg index 01431bf102..f9a9d3d042 100644 --- a/docs/images/nf-core-list-stars.svg +++ b/docs/images/nf-core-list-stars.svg @@ -19,88 +19,88 @@ font-weight: 700; } - .terminal-11073929-matrix { + .terminal-926284067-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-11073929-title { + .terminal-926284067-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-11073929-r1 { fill: #c5c8c6 } -.terminal-11073929-r2 { fill: #98a84b } -.terminal-11073929-r3 { fill: #9a9b99 } -.terminal-11073929-r4 { fill: #608ab1 } -.terminal-11073929-r5 { fill: #d0b344 } -.terminal-11073929-r6 { fill: #c5c8c6;font-weight: bold } -.terminal-11073929-r7 { fill: #868887 } -.terminal-11073929-r8 { fill: #868887;font-style: italic; } + .terminal-926284067-r1 { fill: #c5c8c6 } +.terminal-926284067-r2 { fill: #98a84b } +.terminal-926284067-r3 { fill: #9a9b99 } +.terminal-926284067-r4 { fill: #608ab1 } +.terminal-926284067-r5 { fill: #d0b344 } +.terminal-926284067-r6 { fill: #c5c8c6;font-weight: bold } +.terminal-926284067-r7 { fill: #868887 } +.terminal-926284067-r8 { fill: #868887;font-style: italic; } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -112,29 +112,29 @@ - + - - $ nf-core list -s stars - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.11.1 - https://nf-co.re - - -┏━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━┳━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━┓ -Have latest         -Pipeline Name      StarsLatest Release     ReleasedLast Pulledrelease?            -┡━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━╇━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━┩ -│ rnaseq              │   704 │         3.13.2 │   4 weeks ago │           - │ -                   │ -│ sarek               │   288 │          3.4.0 │  1 months ago │           - │ -                   │ -│ chipseq             │   157 │          2.0.0 │   1 years ago │           - │ -                   │ -│ mag                 │   156 │          2.5.1 │  1 months ago │           - │ -                   │ -[..truncated..] + + $ nf-core list -s stars + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.12 - https://nf-co.re + + +┏━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━┳━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━┓ +Have latest         +Pipeline Name      StarsLatest Release     ReleasedLast Pulledrelease?            +┡━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━╇━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━┩ +│ rnaseq              │   722 │         3.14.0 │   3 weeks ago │           - │ -                   │ +│ sarek               │   303 │          3.4.0 │  2 months ago │           - │ -                   │ +│ mag                 │   167 │          2.5.1 │  2 months ago │           - │ -                   │ +│ chipseq             │   159 │          2.0.0 │   1 years ago │           - │ -                   │ +[..truncated..] diff --git a/docs/images/nf-core-list.svg b/docs/images/nf-core-list.svg index d0b37306a3..e42799f98a 100644 --- a/docs/images/nf-core-list.svg +++ b/docs/images/nf-core-list.svg @@ -19,91 +19,91 @@ font-weight: 700; } - .terminal-691768826-matrix { + .terminal-1472038847-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-691768826-title { + .terminal-1472038847-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-691768826-r1 { fill: #c5c8c6 } -.terminal-691768826-r2 { fill: #98a84b } -.terminal-691768826-r3 { fill: #9a9b99 } -.terminal-691768826-r4 { fill: #608ab1 } -.terminal-691768826-r5 { fill: #d0b344 } -.terminal-691768826-r6 { fill: #c5c8c6;font-weight: bold } -.terminal-691768826-r7 { fill: #868887 } -.terminal-691768826-r8 { fill: #868887;font-style: italic; } + .terminal-1472038847-r1 { fill: #c5c8c6 } +.terminal-1472038847-r2 { fill: #98a84b } +.terminal-1472038847-r3 { fill: #9a9b99 } +.terminal-1472038847-r4 { fill: #608ab1 } +.terminal-1472038847-r5 { fill: #d0b344 } +.terminal-1472038847-r6 { fill: #c5c8c6;font-weight: bold } +.terminal-1472038847-r7 { fill: #868887 } +.terminal-1472038847-r8 { fill: #868887;font-style: italic; } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -115,30 +115,30 @@ - + - - $ nf-core list - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.11.1 - https://nf-co.re - - -┏━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━┳━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━┓ -Have latest         -Pipeline Name      StarsLatest Release     ReleasedLast Pulledrelease?            -┡━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━╇━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━┩ -│ callingcards        │     2 │            dev │  18 hours ago │           - │ -                   │ -│ ampliseq            │   140 │          2.7.1 │  1 months ago │           - │ -                   │ -│ raredisease         │    63 │          1.1.1 │  5 months ago │           - │ -                   │ -│ epitopeprediction   │    27 │          2.2.1 │  9 months ago │           - │ -                   │ -│ metatdenovo         │     3 │            dev │     yesterday │           - │ -                   │ -[..truncated..] + + $ nf-core list + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.12 - https://nf-co.re + + +┏━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━┳━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━┓ +Have latest         +Pipeline Name      StarsLatest Release     ReleasedLast Pulledrelease?            +┡━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━╇━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━┩ +│ rnafusion           │   125 │          3.0.1 │  2 months ago │           - │ -                   │ +│ differentialabunda… │    35 │          1.4.0 │  2 months ago │           - │ -                   │ +│ raredisease         │    64 │          1.1.1 │  6 months ago │           - │ -                   │ +│ circrna             │    33 │            dev │    2 days ago │           - │ -                   │ +│ mag                 │   167 │          2.5.1 │  2 months ago │           - │ -                   │ +[..truncated..] diff --git a/docs/images/nf-core-modules-bump-version.svg b/docs/images/nf-core-modules-bump-version.svg index 13be58189e..657b1ffbc5 100644 --- a/docs/images/nf-core-modules-bump-version.svg +++ b/docs/images/nf-core-modules-bump-version.svg @@ -19,90 +19,90 @@ font-weight: 700; } - .terminal-1176762434-matrix { + .terminal-446953444-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-1176762434-title { + .terminal-446953444-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-1176762434-r1 { fill: #c5c8c6 } -.terminal-1176762434-r2 { fill: #98a84b } -.terminal-1176762434-r3 { fill: #9a9b99 } -.terminal-1176762434-r4 { fill: #608ab1 } -.terminal-1176762434-r5 { fill: #d0b344 } -.terminal-1176762434-r6 { fill: #98a84b;font-weight: bold } -.terminal-1176762434-r7 { fill: #c5c8c6;font-weight: bold } + .terminal-446953444-r1 { fill: #c5c8c6 } +.terminal-446953444-r2 { fill: #98a84b } +.terminal-446953444-r3 { fill: #9a9b99 } +.terminal-446953444-r4 { fill: #608ab1 } +.terminal-446953444-r5 { fill: #d0b344 } +.terminal-446953444-r6 { fill: #98a84b;font-weight: bold } +.terminal-446953444-r7 { fill: #c5c8c6;font-weight: bold } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -114,30 +114,30 @@ - + - - $ nf-core modules bump-versions fastqc - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.11.1 - https://nf-co.re - - - - -╭──────────────────────────────────────────────────────────────────────────────────────────────────╮ -[!] 1 Module version up to date. -╰──────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭──────────────────────────────────────────┬───────────────────────────────────────────────────────╮ -Module name                             Update Message                                        -├──────────────────────────────────────────┼───────────────────────────────────────────────────────┤ - fastqc                                    Module version up to date: fastqc                      -╰──────────────────────────────────────────┴───────────────────────────────────────────────────────╯ + + $ nf-core modules bump-versions fastqc + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.12 - https://nf-co.re + + + + +╭──────────────────────────────────────────────────────────────────────────────────────────────────╮ +[!] 1 Module version up to date. +╰──────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭──────────────────────────────────────────┬───────────────────────────────────────────────────────╮ +Module name                             Update Message                                        +├──────────────────────────────────────────┼───────────────────────────────────────────────────────┤ + fastqc                                    Module version up to date: fastqc                      +╰──────────────────────────────────────────┴───────────────────────────────────────────────────────╯ diff --git a/docs/images/nf-core-modules-create.svg b/docs/images/nf-core-modules-create.svg index 7cf8b9ca99..17fb2e365e 100644 --- a/docs/images/nf-core-modules-create.svg +++ b/docs/images/nf-core-modules-create.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - - - - + - + - - $ nf-core modules create fastqc --author @nf-core-bot  --label process_low --meta --force - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.11.1 - https://nf-co.re - - - -INFO     Repository type: modules -INFO    Press enter to use default values (shown in brackets)or type your own responses.  -ctrl+click underlined text to open links. -INFO     Using Bioconda package: 'bioconda::fastqc=0.12.1' -INFO     Using Docker container: 'biocontainers/fastqc:0.12.1--hdfd78af_0' -INFO     Using Singularity container:                                                                -'https://depot.galaxyproject.org/singularity/fastqc:0.12.1--hdfd78af_0' -INFO     Created component template: 'fastqc' -INFO     Created following files:                                                                    -           modules/nf-core/fastqc/main.nf                                                            -           modules/nf-core/fastqc/meta.yml                                                           -           modules/nf-core/fastqc/environment.yml                                                    -           modules/nf-core/fastqc/tests/tags.yml                                                     -           modules/nf-core/fastqc/tests/main.nf.test                                                 + + $ nf-core modules create fastqc --author @nf-core-bot  --label process_low --meta --force + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.12 - https://nf-co.re + + + +INFO     Repository type: modules +INFO    Press enter to use default values (shown in brackets)or type your own responses.  +ctrl+click underlined text to open links. +INFO     Using Bioconda package: 'bioconda::fastqc=0.12.1' +INFO     Could not find a Docker/Singularity container (Unexpected response code `503` for           +https://api.biocontainers.pro/ga4gh/trs/v2/tools/fastqc/versions/fastqc-0.12.1) +INFO     Created component template: 'fastqc' +INFO     Created following files:                                                                    +           modules/nf-core/fastqc/main.nf                                                            +           modules/nf-core/fastqc/meta.yml                                                           +           modules/nf-core/fastqc/environment.yml                                                    +           modules/nf-core/fastqc/tests/tags.yml                                                     +           modules/nf-core/fastqc/tests/main.nf.test                                                 diff --git a/docs/images/nf-core-modules-info.svg b/docs/images/nf-core-modules-info.svg index b604cb3f7b..65b3256542 100644 --- a/docs/images/nf-core-modules-info.svg +++ b/docs/images/nf-core-modules-info.svg @@ -19,163 +19,163 @@ font-weight: 700; } - .terminal-149484079-matrix { + .terminal-2512974289-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-149484079-title { + .terminal-2512974289-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-149484079-r1 { fill: #c5c8c6 } -.terminal-149484079-r2 { fill: #98a84b } -.terminal-149484079-r3 { fill: #9a9b99 } -.terminal-149484079-r4 { fill: #608ab1 } -.terminal-149484079-r5 { fill: #d0b344 } -.terminal-149484079-r6 { fill: #c5c8c6;font-weight: bold } -.terminal-149484079-r7 { fill: #98a84b;font-weight: bold } -.terminal-149484079-r8 { fill: #868887 } -.terminal-149484079-r9 { fill: #d08442 } -.terminal-149484079-r10 { fill: #868887;font-style: italic; } -.terminal-149484079-r11 { fill: #98729f } + .terminal-2512974289-r1 { fill: #c5c8c6 } +.terminal-2512974289-r2 { fill: #98a84b } +.terminal-2512974289-r3 { fill: #9a9b99 } +.terminal-2512974289-r4 { fill: #608ab1 } +.terminal-2512974289-r5 { fill: #d0b344 } +.terminal-2512974289-r6 { fill: #c5c8c6;font-weight: bold } +.terminal-2512974289-r7 { fill: #98a84b;font-weight: bold } +.terminal-2512974289-r8 { fill: #868887 } +.terminal-2512974289-r9 { fill: #d08442 } +.terminal-2512974289-r10 { fill: #868887;font-style: italic; } +.terminal-2512974289-r11 { fill: #98729f } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -187,53 +187,53 @@ - + - - $ nf-core modules info abacas - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.11.1 - https://nf-co.re - - - -╭─ Module: abacas  ────────────────────────────────────────────────────────────────────────────────╮ -│ 🌐 Repository: https://github.com/nf-core/modules.git                                            │ -│ 🔧 Tools: abacas                                                                                 │ -│ 📖 Description: contiguate draft genome assembly                                                 │ -╰──────────────────────────────────────────────────────────────────────────────────────────────────╯ -                  ╷                                                                   ╷              -📥 Inputs        Description                                                             Pattern -╺━━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━╸ - meta  (map)     │Groovy Map containing sample information e.g. [ id:'test',         │ -                  │single_end:false ]                                                 │ -╶─────────────────┼───────────────────────────────────────────────────────────────────┼────────────╴ - scaffold  (file)│Fasta file containing scaffold                                     │*.{fasta,fa} -╶─────────────────┼───────────────────────────────────────────────────────────────────┼────────────╴ - fasta  (file)   │FASTA reference file                                               │*.{fasta,fa} -                  ╵                                                                   ╵              -                  ╷                                                                   ╷              -📤 Outputs       Description                                                             Pattern -╺━━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━╸ - meta  (map)     │Groovy Map containing sample information e.g. [ id:'test',         │ -                  │single_end:false ]                                                 │ -╶─────────────────┼───────────────────────────────────────────────────────────────────┼────────────╴ - results  (file) │List containing abacas output files [ 'test.abacas.bin',           │ *.{abacas}* -                  │'test.abacas.fasta', 'test.abacas.gaps', 'test.abacas.gaps.tab',   │ -                  │'test.abacas.nucmer.delta', 'test.abacas.nucmer.filtered.delta',   │ -                  │'test.abacas.nucmer.tiling', 'test.abacas.tab',                    │ -                  │'test.abacas.unused.contigs.out', 'test.abacas.MULTIFASTA.fa' ]    │ -╶─────────────────┼───────────────────────────────────────────────────────────────────┼────────────╴ - versions  (file)│File containing software versions                                  │versions.yml -                  ╵                                                                   ╵              - - 💻  Installation command: nf-core modules install abacas - + + $ nf-core modules info abacas + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.12 - https://nf-co.re + + + +╭─ Module: abacas  ────────────────────────────────────────────────────────────────────────────────╮ +│ 🌐 Repository: https://github.com/nf-core/modules.git                                            │ +│ 🔧 Tools: abacas                                                                                 │ +│ 📖 Description: contiguate draft genome assembly                                                 │ +╰──────────────────────────────────────────────────────────────────────────────────────────────────╯ +                  ╷                                                                   ╷              +📥 Inputs        Description                                                             Pattern +╺━━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━╸ + meta  (map)     │Groovy Map containing sample information e.g. [ id:'test',         │ +                  │single_end:false ]                                                 │ +╶─────────────────┼───────────────────────────────────────────────────────────────────┼────────────╴ + scaffold  (file)│Fasta file containing scaffold                                     │*.{fasta,fa} +╶─────────────────┼───────────────────────────────────────────────────────────────────┼────────────╴ + fasta  (file)   │FASTA reference file                                               │*.{fasta,fa} +                  ╵                                                                   ╵              +                  ╷                                                                   ╷              +📤 Outputs       Description                                                             Pattern +╺━━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━╸ + meta  (map)     │Groovy Map containing sample information e.g. [ id:'test',         │ +                  │single_end:false ]                                                 │ +╶─────────────────┼───────────────────────────────────────────────────────────────────┼────────────╴ + results  (file) │List containing abacas output files [ 'test.abacas.bin',           │ *.{abacas}* +                  │'test.abacas.fasta', 'test.abacas.gaps', 'test.abacas.gaps.tab',   │ +                  │'test.abacas.nucmer.delta', 'test.abacas.nucmer.filtered.delta',   │ +                  │'test.abacas.nucmer.tiling', 'test.abacas.tab',                    │ +                  │'test.abacas.unused.contigs.out', 'test.abacas.MULTIFASTA.fa' ]    │ +╶─────────────────┼───────────────────────────────────────────────────────────────────┼────────────╴ + versions  (file)│File containing software versions                                  │versions.yml +                  ╵                                                                   ╵              + + 💻  Installation command: nf-core modules install abacas + diff --git a/docs/images/nf-core-modules-install.svg b/docs/images/nf-core-modules-install.svg index f444190f32..8e5ba2ee29 100644 --- a/docs/images/nf-core-modules-install.svg +++ b/docs/images/nf-core-modules-install.svg @@ -19,76 +19,76 @@ font-weight: 700; } - .terminal-160730253-matrix { + .terminal-2712767535-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-160730253-title { + .terminal-2712767535-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-160730253-r1 { fill: #c5c8c6 } -.terminal-160730253-r2 { fill: #98a84b } -.terminal-160730253-r3 { fill: #9a9b99 } -.terminal-160730253-r4 { fill: #608ab1 } -.terminal-160730253-r5 { fill: #d0b344 } + .terminal-2712767535-r1 { fill: #c5c8c6 } +.terminal-2712767535-r2 { fill: #98a84b } +.terminal-2712767535-r3 { fill: #9a9b99 } +.terminal-2712767535-r4 { fill: #608ab1 } +.terminal-2712767535-r5 { fill: #d0b344 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -100,26 +100,26 @@ - + - - $ nf-core modules install abacas - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.11.1 - https://nf-co.re - - - -INFO     Installing 'abacas' -INFO     Use the following statement to include this module:                                         - - include { ABACAS } from '../modules/nf-core/abacas/main'                                            - + + $ nf-core modules install abacas + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.12 - https://nf-co.re + + + +INFO     Installing 'abacas' +INFO     Use the following statement to include this module:                                         + + include { ABACAS } from '../modules/nf-core/abacas/main'                                            + diff --git a/docs/images/nf-core-modules-lint.svg b/docs/images/nf-core-modules-lint.svg index 03edcaf551..1ef2c54aed 100644 --- a/docs/images/nf-core-modules-lint.svg +++ b/docs/images/nf-core-modules-lint.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + - - - - $ nf-core modules lint multiqc - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.11.1 - https://nf-co.re - - - -INFO     Linting modules repo: '.' -INFO     Linting module: 'multiqc' -INFO     Found 4 inputs in modules/nf-core/multiqc/main.nf                                           -INFO     Found 4 outputs in modules/nf-core/multiqc/main.nf                                          - -╭─[!] 1 Module Test Warning──────────────────────────────────────────────────────────────────────╮ -              ╷                                 ╷                                                  -Module name File path                      Test message                                    -╶─────────────┼─────────────────────────────────┼────────────────────────────────────────────────╴ -multiqcmodules/nf-core/multiqc/main.nfConda update: bioconda::multiqc 1.18 -> 1.19 -              ╵                                 ╵                                                  -╰──────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭──────────────────────╮ -LINT RESULTS SUMMARY -├──────────────────────┤ -[✔]  50 Tests Passed -[!]   1 Test Warning -[✗]   0 Tests Failed -╰──────────────────────╯ + + + + $ nf-core modules lint multiqc + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.12 - https://nf-co.re + + + +INFO     Linting modules repo: '.' +INFO     Linting module: 'multiqc' diff --git a/docs/images/nf-core-modules-list-local.svg b/docs/images/nf-core-modules-list-local.svg index a1b2806467..a94545663b 100644 --- a/docs/images/nf-core-modules-list-local.svg +++ b/docs/images/nf-core-modules-list-local.svg @@ -19,108 +19,108 @@ font-weight: 700; } - .terminal-1750863134-matrix { + .terminal-114288408-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-1750863134-title { + .terminal-114288408-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-1750863134-r1 { fill: #c5c8c6 } -.terminal-1750863134-r2 { fill: #98a84b } -.terminal-1750863134-r3 { fill: #9a9b99 } -.terminal-1750863134-r4 { fill: #608ab1 } -.terminal-1750863134-r5 { fill: #d0b344 } -.terminal-1750863134-r6 { fill: #c5c8c6;font-weight: bold } -.terminal-1750863134-r7 { fill: #868887;font-style: italic; } + .terminal-114288408-r1 { fill: #c5c8c6 } +.terminal-114288408-r2 { fill: #98a84b } +.terminal-114288408-r3 { fill: #9a9b99 } +.terminal-114288408-r4 { fill: #608ab1 } +.terminal-114288408-r5 { fill: #d0b344 } +.terminal-114288408-r6 { fill: #c5c8c6;font-weight: bold } +.terminal-114288408-r7 { fill: #868887;font-style: italic; } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -132,36 +132,36 @@ - + - - $ nf-core modules list local - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.11.1 - https://nf-co.re - - - -INFO     Modules installed in '.':                                                                   - -┏━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━┓ -Module Name        Repository        Version SHA        Message           Date       -┡━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━┩ -│ custom/dumpsoftwar… │ https://github.co… │ bba7e362e4afead706… │ add                │ 2023-11-14 │ -│                     │                    │                     │ environment-schem… │            │ -│                     │                    │                     │ rename yaml-schema │            │ -│                     │                    │                     │ to meta-schema     │            │ -│                     │                    │                     │ (#4323)            │            │ -│ fastqc              │ https://github.co… │ 65ad3e0b9a4099592e… │ Update version     │ 2023-12-08 │ -│                     │                    │                     │ strings (#4556)    │            │ -│ multiqc             │ https://github.co… │ 4ab13872435962dadc… │ Pull multiqc       │ 2023-12-05 │ -│                     │                    │                     │ static multiqc     │            │ -[..truncated..] + + $ nf-core modules list local + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.12 - https://nf-co.re + + + +INFO     Modules installed in '.':                                                                   + +self.repo_type='pipeline' +┏━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━┓ +Module Name        Repository        Version SHA        Message           Date       +┡━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━┩ +│ custom/dumpsoftwar… │ https://github.co… │ 8ec825f465b9c17f9d… │ Bump MultiQC       │ 2024-01-10 │ +│                     │                    │                     │ version to 1.19    │            │ +│                     │                    │                     │ (#4705)            │            │ +│ fastqc              │ https://github.co… │ c9488585ce7bd35ccd… │ CHORES: update     │ 2024-01-18 │ +│                     │                    │                     │ fasqc tests with   │            │ +│                     │                    │                     │ new data           │            │ +│                     │                    │                     │ organisation       │            │ +│                     │                    │                     │ (#4760)            │            │ +[..truncated..] diff --git a/docs/images/nf-core-modules-list-remote.svg b/docs/images/nf-core-modules-list-remote.svg index 28be0b24ee..f8a16874f3 100644 --- a/docs/images/nf-core-modules-list-remote.svg +++ b/docs/images/nf-core-modules-list-remote.svg @@ -19,109 +19,109 @@ font-weight: 700; } - .terminal-2091866550-matrix { + .terminal-2276415832-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-2091866550-title { + .terminal-2276415832-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-2091866550-r1 { fill: #c5c8c6 } -.terminal-2091866550-r2 { fill: #98a84b } -.terminal-2091866550-r3 { fill: #9a9b99 } -.terminal-2091866550-r4 { fill: #608ab1 } -.terminal-2091866550-r5 { fill: #d0b344 } -.terminal-2091866550-r6 { fill: #1984e9;text-decoration: underline; } -.terminal-2091866550-r7 { fill: #c5c8c6;font-weight: bold } -.terminal-2091866550-r8 { fill: #868887;font-style: italic; } + .terminal-2276415832-r1 { fill: #c5c8c6 } +.terminal-2276415832-r2 { fill: #98a84b } +.terminal-2276415832-r3 { fill: #9a9b99 } +.terminal-2276415832-r4 { fill: #608ab1 } +.terminal-2276415832-r5 { fill: #d0b344 } +.terminal-2276415832-r6 { fill: #1984e9;text-decoration: underline; } +.terminal-2276415832-r7 { fill: #c5c8c6;font-weight: bold } +.terminal-2276415832-r8 { fill: #868887;font-style: italic; } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -133,36 +133,36 @@ - + - - $ nf-core modules list remote - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.11.1 - https://nf-co.re - - - -INFO     Modules available from https://github.com/nf-core/modules.git(master):                     - -┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓ -Module Name                                           -┡━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┩ -│ abacas                                                │ -│ abricate/run                                          │ -│ abricate/summary                                      │ -│ abritamr/run                                          │ -│ adapterremoval                                        │ -│ adapterremovalfixprefix                               │ -│ admixture                                             │ -│ affy/justrma                                          │ -│ agat/convertspgff2gtf                                 │ -[..truncated..] + + $ nf-core modules list remote + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.12 - https://nf-co.re + + + +INFO     Modules available from https://github.com/nf-core/modules.git(master):                     + +┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓ +Module Name                                           +┡━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┩ +│ abacas                                                │ +│ abricate/run                                          │ +│ abricate/summary                                      │ +│ abritamr/run                                          │ +│ adapterremoval                                        │ +│ adapterremovalfixprefix                               │ +│ admixture                                             │ +│ affy/justrma                                          │ +│ agat/convertspgff2gtf                                 │ +[..truncated..] diff --git a/docs/images/nf-core-modules-patch.svg b/docs/images/nf-core-modules-patch.svg index 25033c107a..70dc93a8a3 100644 --- a/docs/images/nf-core-modules-patch.svg +++ b/docs/images/nf-core-modules-patch.svg @@ -19,127 +19,127 @@ font-weight: 700; } - .terminal-1129851975-matrix { + .terminal-3232574441-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-1129851975-title { + .terminal-3232574441-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-1129851975-r1 { fill: #c5c8c6 } -.terminal-1129851975-r2 { fill: #98a84b } -.terminal-1129851975-r3 { fill: #9a9b99 } -.terminal-1129851975-r4 { fill: #608ab1 } -.terminal-1129851975-r5 { fill: #d0b344 } -.terminal-1129851975-r6 { fill: #ff2627 } -.terminal-1129851975-r7 { fill: #00823d } -.terminal-1129851975-r8 { fill: #ff2c7a;font-weight: bold } + .terminal-3232574441-r1 { fill: #c5c8c6 } +.terminal-3232574441-r2 { fill: #98a84b } +.terminal-3232574441-r3 { fill: #9a9b99 } +.terminal-3232574441-r4 { fill: #608ab1 } +.terminal-3232574441-r5 { fill: #d0b344 } +.terminal-3232574441-r6 { fill: #ff2627 } +.terminal-3232574441-r7 { fill: #00823d } +.terminal-3232574441-r8 { fill: #ff2c7a;font-weight: bold } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -151,42 +151,42 @@ - + - - $ nf-core modules patch fastqc - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.11.1 - https://nf-co.re - - - -INFO     Changes in module 'nf-core/fastqc' -INFO    'modules/nf-core/fastqc/environment.yml' is unchanged                                       -INFO    'modules/nf-core/fastqc/meta.yml' is unchanged                                              -INFO     Changes in 'fastqc/main.nf':                                                                - ---- modules/nf-core/fastqc/main.nf -+++ modules/nf-core/fastqc/main.nf -@@ -1,6 +1,6 @@ -process FASTQC {                                                                                   -    tag "$meta.id"                                                                                 --    label 'process_medium' -+    label 'process_low' - -    conda "${moduleDir}/environment.yml"                                                           -    container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_  - - -INFO    'modules/nf-core/fastqc/tests/tags.yml' is unchanged                                        -INFO    'modules/nf-core/fastqc/tests/main.nf.test' is unchanged                                    -INFO    'modules/nf-core/fastqc/tests/main.nf.test.snap' is unchanged                               -INFO     Patch file of 'modules/nf-core/fastqc' written to 'modules/nf-core/fastqc/fastqc.diff' + + $ nf-core modules patch fastqc + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.12 - https://nf-co.re + + + +INFO     Changes in module 'nf-core/fastqc' +INFO    'modules/nf-core/fastqc/meta.yml' is unchanged                                              +INFO     Changes in 'fastqc/main.nf':                                                                + +--- modules/nf-core/fastqc/main.nf ++++ modules/nf-core/fastqc/main.nf +@@ -1,6 +1,6 @@ +process FASTQC {                                                                                   +    tag "$meta.id"                                                                                 +-    label 'process_medium' ++    label 'process_low' + +    conda "${moduleDir}/environment.yml"                                                           +    container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_  + + +INFO    'modules/nf-core/fastqc/environment.yml' is unchanged                                       +INFO    'modules/nf-core/fastqc/tests/main.nf.test.snap' is unchanged                               +INFO    'modules/nf-core/fastqc/tests/tags.yml' is unchanged                                        +INFO    'modules/nf-core/fastqc/tests/main.nf.test' is unchanged                                    +INFO     Patch file of 'modules/nf-core/fastqc' written to 'modules/nf-core/fastqc/fastqc.diff' diff --git a/docs/images/nf-core-modules-remove.svg b/docs/images/nf-core-modules-remove.svg index b1dafc6753..910e57fb3c 100644 --- a/docs/images/nf-core-modules-remove.svg +++ b/docs/images/nf-core-modules-remove.svg @@ -19,64 +19,64 @@ font-weight: 700; } - .terminal-2029268447-matrix { + .terminal-3288870273-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-2029268447-title { + .terminal-3288870273-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-2029268447-r1 { fill: #c5c8c6 } -.terminal-2029268447-r2 { fill: #98a84b } -.terminal-2029268447-r3 { fill: #9a9b99 } -.terminal-2029268447-r4 { fill: #608ab1 } -.terminal-2029268447-r5 { fill: #d0b344 } + .terminal-3288870273-r1 { fill: #c5c8c6 } +.terminal-3288870273-r2 { fill: #98a84b } +.terminal-3288870273-r3 { fill: #9a9b99 } +.terminal-3288870273-r4 { fill: #608ab1 } +.terminal-3288870273-r5 { fill: #d0b344 } - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -88,22 +88,22 @@ - + - - $ nf-core modules remove abacas - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.11.1 - https://nf-co.re - - - -INFO     Removed files for 'abacas' and its dependencies 'abacas'.                                   + + $ nf-core modules remove abacas + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.12 - https://nf-co.re + + + +INFO     Removed files for 'abacas' and its dependencies 'abacas'.                                   diff --git a/docs/images/nf-core-modules-test.svg b/docs/images/nf-core-modules-test.svg index 3416c21bc0..475a31db2b 100644 --- a/docs/images/nf-core-modules-test.svg +++ b/docs/images/nf-core-modules-test.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + - - - - $ nf-core modules test fastqc --no-prompts - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.11.1 - https://nf-co.re - - - -INFO     Generating nf-test snapshot                                                                 -╭───────────────────────────────────────── nf-test output ─────────────────────────────────────────╮ -│                                                                                                  │ -│ 🚀 nf-test 0.8.2                                                                                 │ -│ https://code.askimed.com/nf-test                                                                 │ -│ (c) 2021 - 2023 Lukas Forer and Sebastian Schoenherr                                             │ -│                                                                                                  │ -│ Found 1 files in test directory.                                                                 │ -│                                                                                                  │ -│ Test Process FASTQC                                                                              │ -│                                                                                                  │ -│   Test [9cf5f580] 'sarscov2 - bam' FAILED (5.818s)                                               │ -│                                                                                                  │ -│   Assertion failed:                                                                              │ -│                                                                                                  │ -│   2 of 2 assertions failed                                                                       │ -│                                                                                                  │ -│   Nextflow stdout:                                                                               │ -│                                                                                                  │ -│   Process `FASTQC` declares 1 input channel but 2 were specified                                 │ -│                                                                                                  │ -│    -- Check script 'meta/mock.nf' at line: 41 or see                                             │ -│ '/home/runner/work/tools/tools/tmp/modules/.nf-test/tests/9cf5f5804db49395806e612d2a2c23ca/meta/ │ -│ nextflow.log' file for more details                                                              │ -│   Nextflow stderr:                                                                               │ -│                                                                                                  │ -│                                                                                                  │ -│                                                                                                  │ -│   Test [a2de5162] 'sarscov2 - bam - stub' FAILED (4.637s)                                        │ -│                                                                                                  │ -│   Assertion failed:                                                                              │ -│                                                                                                  │ -│   2 of 2 assertions failed                                                                       │ -│                                                                                                  │ -│   Nextflow stdout:                                                                               │ -│                                                                                                  │ -│   Process `FASTQC` declares 1 input channel but 2 were specified                                 │ -│                                                                                                  │ -│    -- Check script 'meta/mock.nf' at line: 41 or see                                             │ -│ '/home/runner/work/tools/tools/tmp/modules/.nf-test/tests/a2de51628f1d36119180ba630b7026a2/meta/ │ -│ nextflow.log' file for more details                                                              │ -│   Nextflow stderr:                                                                               │ -│                                                                                                  │ -│                                                                                                  │ -│                                                                                                  │ -│                                                                                                  │ -│ Test Workflow FASTQ_FASTQC_UMITOOLS_FASTP                                                        │ -│                                                                                                  │ -│   Test [19e7efa0] 'sarscov2 paired-end [fastq]' FAILED (5.439s)                                  │ -│                                                                                                  │ -│   Assertion failed:                                                                              │ -│                                                                                                  │ -│   14 of 14 assertions failed                                                                     │ -│                                                                                                  │ -│   Nextflow stdout:                                                                               │ -│                                                                                                  │ -│   ERROR ~ No such variable: Exception evaluating property 'html' for nextflow.script.ChannelOut, │ -│ Reason: groovy.lang.MissingPropertyException: No such property: html for class:                  │ -│ groovyx.gpars.dataflow.DataflowBroadcast                                                         │ -│   Possible solutions: head                                                                       │ -│                                                                                                  │ -│    -- Check script                                                                               │ -│ '/home/runner/work/tools/tools/tmp/modules/./subworkflows/nf-core/fastq_fastqc_umitools_fastp/te │ -│ sts/../main.nf' at line: 115 or see                                                              │ -│ '/home/runner/work/tools/tools/tmp/modules/.nf-test/tests/19e7efa075a613f6b4da77288e1c9f33/meta/ │ -│ nextflow.log' file for more details                                                              │ -│   Nextflow stderr:                                                                               │ -│                                                                                                  │ -│                                                                                                  │ -│                                                                                                  │ -│                                                                                                  │ -│                                                                                                  │ -│ FAILURE: Executed 3 tests in 15.898s (3 failed)                                                  │ -│                                                                                                  │ -╰──────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭───────────────────────────────────────── nf-test error ──────────────────────────────────────────╮ -│ Assertion failed:                                                                                │ -│                                                                                                  │ -│ assert process.success                                                                           │ -│        |       |                                                                                 │ -│        FASTQC  false                                                                             │ -│                                                                                                  │ -│ java.lang.RuntimeException: Process has no output channels. process.out can not be used.         │ -│ Assertion failed:                                                                                │ -│                                                                                                  │ -│ assert process.success                                                                           │ -│        |       |                                                                                 │ -│        FASTQC  false                                                                             │ -│                                                                                                  │ -│ java.lang.RuntimeException: Process has no output channels. process.out can not be used.         │ -│ Assertion failed:                                                                                │ -│                                                                                                  │ -│ assert workflow.success                                                                          │ -│        |        |                                                                                │ -│        workflow false                                                                            │ -│                                                                                                  │ -│ java.lang.RuntimeException: Workflow has no output channels. workflow.out can not be used.       │ -│ java.lang.RuntimeException: Workflow has no output channels. workflow.out can not be used.       │ -│ java.lang.RuntimeException: Workflow has no output channels. workflow.out can not be used.       │ -│ java.lang.RuntimeException: Workflow has no output channels. workflow.out can not be used.       │ -│ java.lang.RuntimeException: Workflow has no output channels. workflow.out can not be used.       │ -│ java.lang.RuntimeException: Workflow has no output channels. workflow.out can not be used.       │ -│ java.lang.RuntimeException: Workflow has no output channels. workflow.out can not be used.       │ -│ java.lang.RuntimeException: Workflow has no output channels. workflow.out can not be used.       │ -│ java.lang.RuntimeException: Workflow has no output channels. workflow.out can not be used.       │ -│ java.lang.RuntimeException: Workflow has no output channels. workflow.out can not be used.       │ -│ java.lang.RuntimeException: Workflow has no output channels. workflow.out can not be used.       │ -│ java.lang.RuntimeException: Workflow has no output channels. workflow.out can not be used.       │ -│ java.lang.RuntimeException: Workflow has no output channels. workflow.out can not be used.       │ -│                                                                                                  │ -╰──────────────────────────────────────────────────────────────────────────────────────────────────╯ -CRITICAL Ran, but found errors:                                                                      -          - nf-test failed                                                                           + + + + $ nf-core modules test fastqc --no-prompts + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.12 - https://nf-co.re + + + +INFO     Generating nf-test snapshot                                                                 diff --git a/docs/images/nf-core-modules-update.svg b/docs/images/nf-core-modules-update.svg index 6169857b74..400b4470c1 100644 --- a/docs/images/nf-core-modules-update.svg +++ b/docs/images/nf-core-modules-update.svg @@ -19,76 +19,76 @@ font-weight: 700; } - .terminal-2555794352-matrix { + .terminal-1757107026-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-2555794352-title { + .terminal-1757107026-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-2555794352-r1 { fill: #c5c8c6 } -.terminal-2555794352-r2 { fill: #98a84b } -.terminal-2555794352-r3 { fill: #9a9b99 } -.terminal-2555794352-r4 { fill: #608ab1 } -.terminal-2555794352-r5 { fill: #d0b344 } + .terminal-1757107026-r1 { fill: #c5c8c6 } +.terminal-1757107026-r2 { fill: #98a84b } +.terminal-1757107026-r3 { fill: #9a9b99 } +.terminal-1757107026-r4 { fill: #608ab1 } +.terminal-1757107026-r5 { fill: #d0b344 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -100,26 +100,26 @@ - + - - $ nf-core modules update --all --no-preview - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.11.1 - https://nf-co.re - - - -INFO    'modules/nf-core/abacas' is already up to date                                              -INFO    'modules/nf-core/custom/dumpsoftwareversions' is already up to date                         -INFO    'modules/nf-core/fastqc' is already up to date                                              -INFO    'modules/nf-core/multiqc' is already up to date                                             -INFO     Updates complete ✨                                                                         + + $ nf-core modules update --all --no-preview + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.12 - https://nf-co.re + + + +INFO    'modules/nf-core/abacas' is already up to date                                              +INFO    'modules/nf-core/custom/dumpsoftwareversions' is already up to date                         +INFO    'modules/nf-core/fastqc' is already up to date                                              +INFO    'modules/nf-core/multiqc' is already up to date                                             +INFO     Updates complete ✨                                                                         diff --git a/docs/images/nf-core-schema-build.svg b/docs/images/nf-core-schema-build.svg index deb06cbf36..e3404658f0 100644 --- a/docs/images/nf-core-schema-build.svg +++ b/docs/images/nf-core-schema-build.svg @@ -19,72 +19,72 @@ font-weight: 700; } - .terminal-343853851-matrix { + .terminal-2029177533-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-343853851-title { + .terminal-2029177533-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-343853851-r1 { fill: #c5c8c6 } -.terminal-343853851-r2 { fill: #98a84b } -.terminal-343853851-r3 { fill: #9a9b99 } -.terminal-343853851-r4 { fill: #608ab1 } -.terminal-343853851-r5 { fill: #d0b344 } -.terminal-343853851-r6 { fill: #98a84b;font-weight: bold } -.terminal-343853851-r7 { fill: #868887;font-weight: bold } -.terminal-343853851-r8 { fill: #868887 } -.terminal-343853851-r9 { fill: #4e707b;font-weight: bold } -.terminal-343853851-r10 { fill: #68a0b3;font-weight: bold } + .terminal-2029177533-r1 { fill: #c5c8c6 } +.terminal-2029177533-r2 { fill: #98a84b } +.terminal-2029177533-r3 { fill: #9a9b99 } +.terminal-2029177533-r4 { fill: #608ab1 } +.terminal-2029177533-r5 { fill: #d0b344 } +.terminal-2029177533-r6 { fill: #98a84b;font-weight: bold } +.terminal-2029177533-r7 { fill: #868887;font-weight: bold } +.terminal-2029177533-r8 { fill: #868887 } +.terminal-2029177533-r9 { fill: #4e707b;font-weight: bold } +.terminal-2029177533-r10 { fill: #68a0b3;font-weight: bold } - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -96,23 +96,23 @@ - + - - $ nf-core schema build --no-prompts - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.11.1 - https://nf-co.re - - -INFO    [] Default parameters match schema validation -INFO    [] Pipeline schema looks valid(found 30 params) -INFO     Writing schema with 31 params: './nextflow_schema.json' + + $ nf-core schema build --no-prompts + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.12 - https://nf-co.re + + +INFO    [] Default parameters match schema validation +INFO    [] Pipeline schema looks valid(found 30 params) +INFO     Writing schema with 31 params: './nextflow_schema.json' diff --git a/docs/images/nf-core-schema-lint.svg b/docs/images/nf-core-schema-lint.svg index f8ca82b271..34b71e7f47 100644 --- a/docs/images/nf-core-schema-lint.svg +++ b/docs/images/nf-core-schema-lint.svg @@ -19,68 +19,68 @@ font-weight: 700; } - .terminal-3360246661-matrix { + .terminal-3400157991-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-3360246661-title { + .terminal-3400157991-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-3360246661-r1 { fill: #c5c8c6 } -.terminal-3360246661-r2 { fill: #98a84b } -.terminal-3360246661-r3 { fill: #9a9b99 } -.terminal-3360246661-r4 { fill: #608ab1 } -.terminal-3360246661-r5 { fill: #d0b344 } -.terminal-3360246661-r6 { fill: #98a84b;font-weight: bold } -.terminal-3360246661-r7 { fill: #868887;font-weight: bold } -.terminal-3360246661-r8 { fill: #868887 } -.terminal-3360246661-r9 { fill: #4e707b;font-weight: bold } + .terminal-3400157991-r1 { fill: #c5c8c6 } +.terminal-3400157991-r2 { fill: #98a84b } +.terminal-3400157991-r3 { fill: #9a9b99 } +.terminal-3400157991-r4 { fill: #608ab1 } +.terminal-3400157991-r5 { fill: #d0b344 } +.terminal-3400157991-r6 { fill: #98a84b;font-weight: bold } +.terminal-3400157991-r7 { fill: #868887;font-weight: bold } +.terminal-3400157991-r8 { fill: #868887 } +.terminal-3400157991-r9 { fill: #4e707b;font-weight: bold } - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -92,22 +92,22 @@ - + - - $ nf-core schema lint - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.11.1 - https://nf-co.re - - -INFO    [] Default parameters match schema validation -INFO    [] Pipeline schema looks valid(found 31 params) + + $ nf-core schema lint + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.12 - https://nf-co.re + + +INFO    [] Default parameters match schema validation +INFO    [] Pipeline schema looks valid(found 31 params) diff --git a/docs/images/nf-core-schema-validate.svg b/docs/images/nf-core-schema-validate.svg index 2eb5093ede..048289f30e 100644 --- a/docs/images/nf-core-schema-validate.svg +++ b/docs/images/nf-core-schema-validate.svg @@ -19,71 +19,71 @@ font-weight: 700; } - .terminal-2178481397-matrix { + .terminal-2717056151-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-2178481397-title { + .terminal-2717056151-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-2178481397-r1 { fill: #c5c8c6 } -.terminal-2178481397-r2 { fill: #98a84b } -.terminal-2178481397-r3 { fill: #9a9b99 } -.terminal-2178481397-r4 { fill: #608ab1 } -.terminal-2178481397-r5 { fill: #d0b344 } -.terminal-2178481397-r6 { fill: #98a84b;font-weight: bold } -.terminal-2178481397-r7 { fill: #868887;font-weight: bold } -.terminal-2178481397-r8 { fill: #868887 } -.terminal-2178481397-r9 { fill: #4e707b;font-weight: bold } + .terminal-2717056151-r1 { fill: #c5c8c6 } +.terminal-2717056151-r2 { fill: #98a84b } +.terminal-2717056151-r3 { fill: #9a9b99 } +.terminal-2717056151-r4 { fill: #608ab1 } +.terminal-2717056151-r5 { fill: #d0b344 } +.terminal-2717056151-r6 { fill: #98a84b;font-weight: bold } +.terminal-2717056151-r7 { fill: #868887;font-weight: bold } +.terminal-2717056151-r8 { fill: #868887 } +.terminal-2717056151-r9 { fill: #4e707b;font-weight: bold } - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -95,23 +95,23 @@ - + - - $ nf-core schema validate nf-core-rnaseq/3_8 nf-params.json - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.11.1 - https://nf-co.re - - -INFO    [] Default parameters match schema validation -INFO    [] Pipeline schema looks valid(found 93 params) -INFO    [] Input parameters look valid + + $ nf-core schema validate nf-core-rnaseq/3_8 nf-params.json + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.12 - https://nf-co.re + + +INFO    [] Default parameters match schema validation +INFO    [] Pipeline schema looks valid(found 93 params) +INFO    [] Input parameters look valid diff --git a/docs/images/nf-core-subworkflows-create.svg b/docs/images/nf-core-subworkflows-create.svg index 21edf7370b..d559a8819d 100644 --- a/docs/images/nf-core-subworkflows-create.svg +++ b/docs/images/nf-core-subworkflows-create.svg @@ -19,89 +19,89 @@ font-weight: 700; } - .terminal-344362394-matrix { + .terminal-1494453564-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-344362394-title { + .terminal-1494453564-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-344362394-r1 { fill: #c5c8c6 } -.terminal-344362394-r2 { fill: #98a84b } -.terminal-344362394-r3 { fill: #9a9b99 } -.terminal-344362394-r4 { fill: #608ab1 } -.terminal-344362394-r5 { fill: #d0b344 } -.terminal-344362394-r6 { fill: #68a0b3;font-weight: bold } + .terminal-1494453564-r1 { fill: #c5c8c6 } +.terminal-1494453564-r2 { fill: #98a84b } +.terminal-1494453564-r3 { fill: #9a9b99 } +.terminal-1494453564-r4 { fill: #608ab1 } +.terminal-1494453564-r5 { fill: #d0b344 } +.terminal-1494453564-r6 { fill: #68a0b3;font-weight: bold } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -113,30 +113,30 @@ - + - - $ nf-core subworkflows create bam_stats_samtools --author @nf-core-bot --force - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.11.1 - https://nf-co.re - - - -INFO     Repository type: modules -INFO    Press enter to use default values (shown in brackets)or type your own responses.  -ctrl+click underlined text to open links. -INFO     Created component template: 'bam_stats_samtools' -INFO     Created following files:                                                                    -           subworkflows/nf-core/bam_stats_samtools/main.nf                                           -           subworkflows/nf-core/bam_stats_samtools/meta.yml                                          -           subworkflows/nf-core/bam_stats_samtools/tests/tags.yml                                    -           subworkflows/nf-core/bam_stats_samtools/tests/main.nf.test                                + + $ nf-core subworkflows create bam_stats_samtools --author @nf-core-bot --force + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.12 - https://nf-co.re + + + +INFO     Repository type: modules +INFO    Press enter to use default values (shown in brackets)or type your own responses.  +ctrl+click underlined text to open links. +INFO     Created component template: 'bam_stats_samtools' +INFO     Created following files:                                                                    +           subworkflows/nf-core/bam_stats_samtools/main.nf                                           +           subworkflows/nf-core/bam_stats_samtools/meta.yml                                          +           subworkflows/nf-core/bam_stats_samtools/tests/tags.yml                                    +           subworkflows/nf-core/bam_stats_samtools/tests/main.nf.test                                diff --git a/docs/images/nf-core-subworkflows-info.svg b/docs/images/nf-core-subworkflows-info.svg index d21f2e2b51..86d8ff7970 100644 --- a/docs/images/nf-core-subworkflows-info.svg +++ b/docs/images/nf-core-subworkflows-info.svg @@ -19,112 +19,112 @@ font-weight: 700; } - .terminal-2172892372-matrix { + .terminal-139834486-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-2172892372-title { + .terminal-139834486-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-2172892372-r1 { fill: #c5c8c6 } -.terminal-2172892372-r2 { fill: #98a84b } -.terminal-2172892372-r3 { fill: #9a9b99 } -.terminal-2172892372-r4 { fill: #608ab1 } -.terminal-2172892372-r5 { fill: #d0b344 } -.terminal-2172892372-r6 { fill: #c5c8c6;font-weight: bold } -.terminal-2172892372-r7 { fill: #98a84b;font-weight: bold } -.terminal-2172892372-r8 { fill: #868887 } -.terminal-2172892372-r9 { fill: #868887;font-style: italic; } -.terminal-2172892372-r10 { fill: #d08442 } -.terminal-2172892372-r11 { fill: #98729f } + .terminal-139834486-r1 { fill: #c5c8c6 } +.terminal-139834486-r2 { fill: #98a84b } +.terminal-139834486-r3 { fill: #9a9b99 } +.terminal-139834486-r4 { fill: #608ab1 } +.terminal-139834486-r5 { fill: #d0b344 } +.terminal-139834486-r6 { fill: #c5c8c6;font-weight: bold } +.terminal-139834486-r7 { fill: #98a84b;font-weight: bold } +.terminal-139834486-r8 { fill: #868887 } +.terminal-139834486-r9 { fill: #868887;font-style: italic; } +.terminal-139834486-r10 { fill: #d08442 } +.terminal-139834486-r11 { fill: #98729f } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -136,36 +136,36 @@ - + - - $ nf-core subworkflows info bam_rseqc - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.11.1 - https://nf-co.re - - - -╭─ Subworkflow: bam_rseqc  ────────────────────────────────────────────────────────────────────────╮ -│ 🌐 Repository: https://github.com/nf-core/modules.git                                            │ -│ 📖 Description: Subworkflow to run multiple commands in the RSeqC package                        │ -╰──────────────────────────────────────────────────────────────────────────────────────────────────╯ -[..truncated..] - ch_readduplication_rscript  (file)│script to reproduce the plot        │                      *.R -╶───────────────────────────────────┼────────────────────────────────────┼─────────────────────────╴ - ch_tin  (file)                    │TXT file containing tin.py results  │                    *.txt -                                    │summary                             │ -╶───────────────────────────────────┼────────────────────────────────────┼─────────────────────────╴ - versions  (file)                  │File containing software versions   │             versions.yml -                                    ╵                                    ╵                           - - 💻  Installation command: nf-core subworkflows install bam_rseqc - + + $ nf-core subworkflows info bam_rseqc + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.12 - https://nf-co.re + + + +╭─ Subworkflow: bam_rseqc  ────────────────────────────────────────────────────────────────────────╮ +│ 🌐 Repository: https://github.com/nf-core/modules.git                                            │ +│ 📖 Description: Subworkflow to run multiple commands in the RSeqC package                        │ +╰──────────────────────────────────────────────────────────────────────────────────────────────────╯ +[..truncated..] + ch_readduplication_rscript  (file)│script to reproduce the plot        │                      *.R +╶───────────────────────────────────┼────────────────────────────────────┼─────────────────────────╴ + ch_tin  (file)                    │TXT file containing tin.py results  │                    *.txt +                                    │summary                             │ +╶───────────────────────────────────┼────────────────────────────────────┼─────────────────────────╴ + versions  (file)                  │File containing software versions   │             versions.yml +                                    ╵                                    ╵                           + + 💻  Installation command: nf-core subworkflows install bam_rseqc + diff --git a/docs/images/nf-core-subworkflows-install.svg b/docs/images/nf-core-subworkflows-install.svg index 4e1c3d29a9..344150cbf2 100644 --- a/docs/images/nf-core-subworkflows-install.svg +++ b/docs/images/nf-core-subworkflows-install.svg @@ -19,76 +19,76 @@ font-weight: 700; } - .terminal-1280284146-matrix { + .terminal-3712652692-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-1280284146-title { + .terminal-3712652692-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-1280284146-r1 { fill: #c5c8c6 } -.terminal-1280284146-r2 { fill: #98a84b } -.terminal-1280284146-r3 { fill: #9a9b99 } -.terminal-1280284146-r4 { fill: #608ab1 } -.terminal-1280284146-r5 { fill: #d0b344 } + .terminal-3712652692-r1 { fill: #c5c8c6 } +.terminal-3712652692-r2 { fill: #98a84b } +.terminal-3712652692-r3 { fill: #9a9b99 } +.terminal-3712652692-r4 { fill: #608ab1 } +.terminal-3712652692-r5 { fill: #d0b344 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -100,26 +100,26 @@ - + - - $ nf-core subworkflows install bam_rseqc - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.11.1 - https://nf-co.re - - - -INFO     Installing 'bam_rseqc' -INFO     Use the following statement to include this subworkflow:                                    - - include { BAM_RSEQC } from '../subworkflows/nf-core/bam_rseqc/main'                                 - + + $ nf-core subworkflows install bam_rseqc + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.12 - https://nf-co.re + + + +INFO     Installing 'bam_rseqc' +INFO     Use the following statement to include this subworkflow:                                    + + include { BAM_RSEQC } from '../subworkflows/nf-core/bam_rseqc/main'                                 + diff --git a/docs/images/nf-core-subworkflows-lint.svg b/docs/images/nf-core-subworkflows-lint.svg index ce3299a21f..3b84645270 100644 --- a/docs/images/nf-core-subworkflows-lint.svg +++ b/docs/images/nf-core-subworkflows-lint.svg @@ -19,239 +19,239 @@ font-weight: 700; } - .terminal-4099828428-matrix { + .terminal-1652488611-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-4099828428-title { + .terminal-1652488611-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-4099828428-r1 { fill: #c5c8c6 } -.terminal-4099828428-r2 { fill: #98a84b } -.terminal-4099828428-r3 { fill: #9a9b99 } -.terminal-4099828428-r4 { fill: #608ab1 } -.terminal-4099828428-r5 { fill: #d0b344 } -.terminal-4099828428-r6 { fill: #d0b344;font-weight: bold } -.terminal-4099828428-r7 { fill: #8d7b39 } -.terminal-4099828428-r8 { fill: #fdfdc5 } -.terminal-4099828428-r9 { fill: #d0b344;font-style: italic; } -.terminal-4099828428-r10 { fill: #c5c8c6;font-weight: bold } -.terminal-4099828428-r11 { fill: #98a84b;font-weight: bold } -.terminal-4099828428-r12 { fill: #cc555a } + .terminal-1652488611-r1 { fill: #c5c8c6 } +.terminal-1652488611-r2 { fill: #98a84b } +.terminal-1652488611-r3 { fill: #9a9b99 } +.terminal-1652488611-r4 { fill: #608ab1 } +.terminal-1652488611-r5 { fill: #d0b344 } +.terminal-1652488611-r6 { fill: #d0b344;font-weight: bold } +.terminal-1652488611-r7 { fill: #8d7b39 } +.terminal-1652488611-r8 { fill: #68a0b3;font-weight: bold } +.terminal-1652488611-r9 { fill: #d0b344;font-style: italic; } +.terminal-1652488611-r10 { fill: #c5c8c6;font-weight: bold } +.terminal-1652488611-r11 { fill: #98a84b;font-weight: bold } +.terminal-1652488611-r12 { fill: #cc555a } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -263,78 +263,78 @@ - + - - $ nf-core subworkflows lint bam_stats_samtools - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.11.1 - https://nf-co.re - - - -INFO     Linting modules repo: '.' -INFO     Linting subworkflow: 'bam_stats_samtools' - -╭─[!] 14 Subworkflow Test Warnings───────────────────────────────────────────────────────────────╮ -                     ╷                                     ╷                                       -Subworkflow name   File path                          Test message                         -╶────────────────────┼─────────────────────────────────────┼─────────────────────────────────────╴ -bam_stats_samtoolssubworkflows/nf-core/bam_stats_sam…TODO string in main.nf.test//Add  -all required assertions to verify  -the test output. -bam_stats_samtoolssubworkflows/nf-core/bam_stats_sam…TODO string in main.nf.testAdd  -tags for all modules used within  -this subworkflow. Example: -bam_stats_samtoolssubworkflows/nf-core/bam_stats_sam…TODO string in main.nf.testChange  -the test name preferably indicating  -the test-data and file-format used -bam_stats_samtoolssubworkflows/nf-core/bam_stats_sam…TODO string in main.nf.testOnce  -you have added the required tests,  -please run the following command to  -build this file: -bam_stats_samtoolssubworkflows/nf-core/bam_stats_sam…TODO string in main.nf.testdefine  -inputs of the workflow here.  -Example: -bam_stats_samtoolssubworkflows/nf-core/bam_stats_sam…TODO string in main.nf -subworkflow SHOULD import at least  -two modules -bam_stats_samtoolssubworkflows/nf-core/bam_stats_sam…TODO string in main.nfIf in doubt  -look at other nf-core/subworkflows  -to see how we are doing things! :) -bam_stats_samtoolssubworkflows/nf-core/bam_stats_sam…TODO string in main.nfedit emitted -channels -bam_stats_samtoolssubworkflows/nf-core/bam_stats_sam…TODO string in main.nfedit input  -(take) channels -bam_stats_samtoolssubworkflows/nf-core/bam_stats_sam…TODO string in main.nfsubstitute  -modules here for the modules of your -subworkflow -bam_stats_samtoolssubworkflows/nf-core/bam_stats_sam…TODO string in meta.yml#Add a  -description of the subworkflow and  -list keywords -bam_stats_samtoolssubworkflows/nf-core/bam_stats_sam…TODO string in meta.yml#Add a list -of the modules and/or subworkflows  -used in the subworkflow -bam_stats_samtoolssubworkflows/nf-core/bam_stats_sam…TODO string in meta.yml#List all  -of the channels used as input with a -description and their structure -bam_stats_samtoolssubworkflows/nf-core/bam_stats_sam…TODO string in meta.yml#List all  -of the channels used as output with  -a descriptions and their structure -                     ╵                                     ╵                                       -╰──────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭───────────────────────╮ -LINT RESULTS SUMMARY -├───────────────────────┤ -[✔]  42 Tests Passed  -[!]  14 Test Warnings -[✗]   0 Tests Failed  -╰───────────────────────╯ + + $ nf-core subworkflows lint bam_stats_samtools + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.12 - https://nf-co.re + + + +INFO     Linting modules repo: '.' +INFO     Linting subworkflow: 'bam_stats_samtools' + +╭─[!] 14 Subworkflow Test Warnings───────────────────────────────────────────────────────────────╮ +                     ╷                                     ╷                                       +Subworkflow name   File path                          Test message                         +╶────────────────────┼─────────────────────────────────────┼─────────────────────────────────────╴ +bam_stats_samtools subworkflows/nf-core/bam_stats_sam…TODO string in main.nf.test//Add  +all required assertions to verify  +the test output. +bam_stats_samtools subworkflows/nf-core/bam_stats_sam…TODO string in main.nf.testAdd  +tags for all modules used within  +this subworkflow. Example: +bam_stats_samtools subworkflows/nf-core/bam_stats_sam…TODO string in main.nf.testChange  +the test name preferably indicating  +the test-data and file-format used +bam_stats_samtools subworkflows/nf-core/bam_stats_sam…TODO string in main.nf.testOnce  +you have added the required tests,  +please run the following command to  +build this file: +bam_stats_samtools subworkflows/nf-core/bam_stats_sam…TODO string in main.nf.testdefine  +inputs of the workflow here.  +Example: +bam_stats_samtools subworkflows/nf-core/bam_stats_sam…TODO string in main.nf +subworkflow SHOULD import at least  +two modules +bam_stats_samtools subworkflows/nf-core/bam_stats_sam…TODO string in main.nfIf in doubt  +look at other nf-core/subworkflows  +to see how we are doing things! :) +bam_stats_samtools subworkflows/nf-core/bam_stats_sam…TODO string in main.nfedit emitted +channels +bam_stats_samtools subworkflows/nf-core/bam_stats_sam…TODO string in main.nfedit input  +(take) channels +bam_stats_samtools subworkflows/nf-core/bam_stats_sam…TODO string in main.nfsubstitute  +modules here for the modules of your +subworkflow +bam_stats_samtools subworkflows/nf-core/bam_stats_sam…TODO string in meta.yml#Add a  +description of the subworkflow and  +list keywords +bam_stats_samtools subworkflows/nf-core/bam_stats_sam…TODO string in meta.yml#Add a list +of the modules and/or subworkflows  +used in the subworkflow +bam_stats_samtools subworkflows/nf-core/bam_stats_sam…TODO string in meta.yml#List all  +of the channels used as input with a +description and their structure +bam_stats_samtools subworkflows/nf-core/bam_stats_sam…TODO string in meta.yml#List all  +of the channels used as output with  +a descriptions and their structure +                     ╵                                     ╵                                       +╰──────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭───────────────────────╮ +LINT RESULTS SUMMARY +├───────────────────────┤ +[✔]  42 Tests Passed  +[!]  14 Test Warnings +[✗]   0 Tests Failed  +╰───────────────────────╯ diff --git a/docs/images/nf-core-subworkflows-list-local.svg b/docs/images/nf-core-subworkflows-list-local.svg index 47871a6f4d..b73f19d7e6 100644 --- a/docs/images/nf-core-subworkflows-list-local.svg +++ b/docs/images/nf-core-subworkflows-list-local.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + + + + - + - + - - $ nf-core subworkflows list local - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.11.1 - https://nf-co.re - - - -INFO     No nf-core subworkflows found in '.' - + + $ nf-core subworkflows list local + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.12 - https://nf-co.re + + + +INFO     No nf-core subworkflows found in '.' +self.repo_type='pipeline' + diff --git a/docs/images/nf-core-subworkflows-list-remote.svg b/docs/images/nf-core-subworkflows-list-remote.svg index bdc5e51c7c..6df1eb00d9 100644 --- a/docs/images/nf-core-subworkflows-list-remote.svg +++ b/docs/images/nf-core-subworkflows-list-remote.svg @@ -19,109 +19,109 @@ font-weight: 700; } - .terminal-2018331402-matrix { + .terminal-2916043436-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-2018331402-title { + .terminal-2916043436-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-2018331402-r1 { fill: #c5c8c6 } -.terminal-2018331402-r2 { fill: #98a84b } -.terminal-2018331402-r3 { fill: #9a9b99 } -.terminal-2018331402-r4 { fill: #608ab1 } -.terminal-2018331402-r5 { fill: #d0b344 } -.terminal-2018331402-r6 { fill: #1984e9;text-decoration: underline; } -.terminal-2018331402-r7 { fill: #c5c8c6;font-weight: bold } -.terminal-2018331402-r8 { fill: #868887;font-style: italic; } + .terminal-2916043436-r1 { fill: #c5c8c6 } +.terminal-2916043436-r2 { fill: #98a84b } +.terminal-2916043436-r3 { fill: #9a9b99 } +.terminal-2916043436-r4 { fill: #608ab1 } +.terminal-2916043436-r5 { fill: #d0b344 } +.terminal-2916043436-r6 { fill: #1984e9;text-decoration: underline; } +.terminal-2916043436-r7 { fill: #c5c8c6;font-weight: bold } +.terminal-2916043436-r8 { fill: #868887;font-style: italic; } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -133,36 +133,36 @@ - + - - $ nf-core subworkflows list remote - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.11.1 - https://nf-co.re - - - -INFO     Subworkflows available from https://github.com/nf-core/modules.git(master):                - -┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓ -Subworkflow Name                              -┡━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┩ -│ bam_cnv_wisecondorx                           │ -│ bam_create_som_pon_gatk                       │ -│ bam_dedup_stats_samtools_umitools             │ -│ bam_docounts_contamination_angsd              │ -│ bam_markduplicates_picard                     │ -│ bam_markduplicates_samtools                   │ -│ bam_ngscheckmate                              │ -│ bam_qc_picard                                 │ -│ bam_rseqc                                     │ -[..truncated..] + + $ nf-core subworkflows list remote + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.12 - https://nf-co.re + + + +INFO     Subworkflows available from https://github.com/nf-core/modules.git(master):                + +┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓ +Subworkflow Name                              +┡━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┩ +│ bam_cnv_wisecondorx                           │ +│ bam_create_som_pon_gatk                       │ +│ bam_dedup_stats_samtools_umitools             │ +│ bam_docounts_contamination_angsd              │ +│ bam_markduplicates_picard                     │ +│ bam_markduplicates_samtools                   │ +│ bam_ngscheckmate                              │ +│ bam_qc_picard                                 │ +│ bam_rseqc                                     │ +[..truncated..] diff --git a/docs/images/nf-core-subworkflows-remove.svg b/docs/images/nf-core-subworkflows-remove.svg index 28527bc299..5bf8048c3b 100644 --- a/docs/images/nf-core-subworkflows-remove.svg +++ b/docs/images/nf-core-subworkflows-remove.svg @@ -19,100 +19,100 @@ font-weight: 700; } - .terminal-1022880614-matrix { + .terminal-4184599304-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-1022880614-title { + .terminal-4184599304-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-1022880614-r1 { fill: #c5c8c6 } -.terminal-1022880614-r2 { fill: #98a84b } -.terminal-1022880614-r3 { fill: #9a9b99 } -.terminal-1022880614-r4 { fill: #608ab1 } -.terminal-1022880614-r5 { fill: #d0b344 } + .terminal-4184599304-r1 { fill: #c5c8c6 } +.terminal-4184599304-r2 { fill: #98a84b } +.terminal-4184599304-r3 { fill: #9a9b99 } +.terminal-4184599304-r4 { fill: #608ab1 } +.terminal-4184599304-r5 { fill: #d0b344 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -124,34 +124,34 @@ - + - - $ nf-core subworkflows remove bam_rseqc - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.11.1 - https://nf-co.re - - - -INFO     Removed files for 'rseqc/bamstat' and its dependencies 'rseqc/bamstat'.                     -INFO     Removed files for 'rseqc/inferexperiment' and its dependencies 'rseqc/inferexperiment'.     -INFO     Removed files for 'rseqc/innerdistance' and its dependencies 'rseqc/innerdistance'.         -INFO     Removed files for 'rseqc/junctionannotation' and its dependencies                           -'rseqc/junctionannotation'.                                                                 -INFO     Removed files for 'rseqc/junctionsaturation' and its dependencies                           -'rseqc/junctionsaturation'.                                                                 -INFO     Removed files for 'rseqc/readdistribution' and its dependencies 'rseqc/readdistribution'.   -INFO     Removed files for 'rseqc/readduplication' and its dependencies 'rseqc/readduplication'.     -INFO     Removed files for 'rseqc/tin' and its dependencies 'rseqc/tin'.                             -INFO     Removed files for 'bam_rseqc' and its dependencies 'bam_rseqc, rseqc_bamstat,  -rseqc_inferexperiment, rseqc_innerdistance, rseqc_junctionannotation,  -rseqc_junctionsaturation, rseqc_readdistribution, rseqc_readduplication, rseqc_tin'.        + + $ nf-core subworkflows remove bam_rseqc + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.12 - https://nf-co.re + + + +INFO     Removed files for 'rseqc/bamstat' and its dependencies 'rseqc/bamstat'.                     +INFO     Removed files for 'rseqc/inferexperiment' and its dependencies 'rseqc/inferexperiment'.     +INFO     Removed files for 'rseqc/innerdistance' and its dependencies 'rseqc/innerdistance'.         +INFO     Removed files for 'rseqc/junctionannotation' and its dependencies                           +'rseqc/junctionannotation'.                                                                 +INFO     Removed files for 'rseqc/junctionsaturation' and its dependencies                           +'rseqc/junctionsaturation'.                                                                 +INFO     Removed files for 'rseqc/readdistribution' and its dependencies 'rseqc/readdistribution'.   +INFO     Removed files for 'rseqc/readduplication' and its dependencies 'rseqc/readduplication'.     +INFO     Removed files for 'rseqc/tin' and its dependencies 'rseqc/tin'.                             +INFO     Removed files for 'bam_rseqc' and its dependencies 'bam_rseqc, rseqc_bamstat,  +rseqc_inferexperiment, rseqc_innerdistance, rseqc_junctionannotation,  +rseqc_junctionsaturation, rseqc_readdistribution, rseqc_readduplication, rseqc_tin'.        diff --git a/docs/images/nf-core-subworkflows-test.svg b/docs/images/nf-core-subworkflows-test.svg index cbc724f546..8bcab4bda0 100644 --- a/docs/images/nf-core-subworkflows-test.svg +++ b/docs/images/nf-core-subworkflows-test.svg @@ -19,64 +19,64 @@ font-weight: 700; } - .terminal-1704916071-matrix { + .terminal-3863409673-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-1704916071-title { + .terminal-3863409673-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-1704916071-r1 { fill: #c5c8c6 } -.terminal-1704916071-r2 { fill: #98a84b } -.terminal-1704916071-r3 { fill: #9a9b99 } -.terminal-1704916071-r4 { fill: #608ab1 } -.terminal-1704916071-r5 { fill: #d0b344 } + .terminal-3863409673-r1 { fill: #c5c8c6 } +.terminal-3863409673-r2 { fill: #98a84b } +.terminal-3863409673-r3 { fill: #9a9b99 } +.terminal-3863409673-r4 { fill: #608ab1 } +.terminal-3863409673-r5 { fill: #d0b344 } - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -88,22 +88,22 @@ - + - - $ nf-core subworkflows test bam_rseqc --no-prompts - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.11.1 - https://nf-co.re - - - -INFO     Generating nf-test snapshot                                                                 + + $ nf-core subworkflows test bam_rseqc --no-prompts + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.12 - https://nf-co.re + + + +INFO     Generating nf-test snapshot                                                                 diff --git a/docs/images/nf-core-subworkflows-update.svg b/docs/images/nf-core-subworkflows-update.svg index 0a9d9eceaf..285e5e569f 100644 --- a/docs/images/nf-core-subworkflows-update.svg +++ b/docs/images/nf-core-subworkflows-update.svg @@ -19,67 +19,67 @@ font-weight: 700; } - .terminal-2377731045-matrix { + .terminal-2735557511-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-2377731045-title { + .terminal-2735557511-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-2377731045-r1 { fill: #c5c8c6 } -.terminal-2377731045-r2 { fill: #98a84b } -.terminal-2377731045-r3 { fill: #9a9b99 } -.terminal-2377731045-r4 { fill: #608ab1 } -.terminal-2377731045-r5 { fill: #d0b344 } + .terminal-2735557511-r1 { fill: #c5c8c6 } +.terminal-2735557511-r2 { fill: #98a84b } +.terminal-2735557511-r3 { fill: #9a9b99 } +.terminal-2735557511-r4 { fill: #608ab1 } +.terminal-2735557511-r5 { fill: #d0b344 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -91,23 +91,23 @@ - + - - $ nf-core subworkflows update --all --no-preview - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.11.1 - https://nf-co.re - - - -INFO    'subworkflows/nf-core/bam_rseqc' is already up to date                                      -INFO     Updates complete ✨                                                                         + + $ nf-core subworkflows update --all --no-preview + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.12 - https://nf-co.re + + + +INFO    'subworkflows/nf-core/bam_rseqc' is already up to date                                      +INFO     Updates complete ✨                                                                         diff --git a/docs/images/nf-core-sync.svg b/docs/images/nf-core-sync.svg index 18aa790e23..3d231e45f1 100644 --- a/docs/images/nf-core-sync.svg +++ b/docs/images/nf-core-sync.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + + + + + + + + + + + + + + + + - + - + - - $ nf-core sync - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.11.1 - https://nf-co.re - - -WARNING  Could not find GitHub authentication token. Some API requests may fail.                     -INFO     Pipeline directory: /home/runner/work/tools/tools/tmp/nf-core-nextbigthing -INFO     Original pipeline repository branch is 'master' -INFO     Deleting all files in 'TEMPLATE' branch                                                     -INFO     Making a new template pipeline using pipeline variables                                     + + $ nf-core sync + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.12 - https://nf-co.re + + +WARNING  Could not find GitHub authentication token. Some API requests may fail.                     +INFO     Pipeline directory: /home/runner/work/tools/tools/tmp/nf-core-nextbigthing +INFO     Original pipeline repository branch is 'master' +INFO     Deleting all files in 'TEMPLATE' branch                                                     +INFO     Making a new template pipeline using pipeline variables                                     +INFO     Committed changes to 'TEMPLATE' branch                                                      +INFO     Checking out original branch: 'master' +INFO     Now try to merge the updates in to your pipeline:                                           +           cd /home/runner/work/tools/tools/tmp/nf-core-nextbigthing +           git merge TEMPLATE                                                                        diff --git a/nf_core/__main__.py b/nf_core/__main__.py index 8cfacf7399..7970c34081 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -3,12 +3,14 @@ import logging import os import sys +from pathlib import Path import rich import rich.console import rich.logging import rich.traceback import rich_click as click +from trogon import tui from nf_core import __version__ from nf_core.download import DownloadError @@ -30,11 +32,27 @@ "nf-core": [ { "name": "Commands for users", - "commands": ["list", "launch", "create-params-file", "download", "licences"], + "commands": [ + "list", + "launch", + "create-params-file", + "download", + "licences", + "tui", + ], }, { "name": "Commands for developers", - "commands": ["create", "lint", "modules", "subworkflows", "schema", "bump-version", "sync"], + "commands": [ + "create", + "lint", + "modules", + "subworkflows", + "schema", + "create-logo", + "bump-version", + "sync", + ], }, ], "nf-core modules": [ @@ -89,10 +107,22 @@ def run_nf_core(): if os.environ.get("_NF_CORE_COMPLETE") is None: # Print nf-core header stderr.print(f"\n[green]{' ' * 42},--.[grey39]/[green],-.", highlight=False) - stderr.print("[blue] ___ __ __ __ ___ [green]/,-._.--~\\", highlight=False) - stderr.print(r"[blue] |\ | |__ __ / ` / \ |__) |__ [yellow] } {", highlight=False) - stderr.print(r"[blue] | \| | \__, \__/ | \ |___ [green]\`-._,-`-,", highlight=False) - stderr.print("[green] `._,._,'\n", highlight=False) + stderr.print( + "[blue] ___ __ __ __ ___ [green]/,-._.--~\\", + highlight=False, + ) + stderr.print( + r"[blue] |\ | |__ __ / ` / \ |__) |__ [yellow] } {", + highlight=False, + ) + stderr.print( + r"[blue] | \| | \__, \__/ | \ |___ [green]\`-._,-`-,", + highlight=False, + ) + stderr.print( + "[green] `._,._,'\n", + highlight=False, + ) stderr.print( f"[grey39] nf-core/tools version {__version__} - [link=https://nf-co.re]https://nf-co.re[/]", highlight=False, @@ -111,9 +141,16 @@ def run_nf_core(): nf_core_cli(auto_envvar_prefix="NFCORE") +@tui() @click.group(context_settings=dict(help_option_names=["-h", "--help"])) @click.version_option(__version__) -@click.option("-v", "--verbose", is_flag=True, default=False, help="Print verbose output to the console.") +@click.option( + "-v", + "--verbose", + is_flag=True, + default=False, + help="Print verbose output to the console.", +) @click.option("--hide-progress", is_flag=True, default=False, help="Don't show progress bars.") @click.option("-l", "--log-file", help="Save a verbose log to a file.", metavar="") @click.pass_context @@ -180,7 +217,11 @@ def list_pipelines(keywords, sort, json, show_archived): @click.option("-r", "--revision", help="Release/branch/SHA of the project to run (if remote)") @click.option("-i", "--id", help="ID for web-gui launch parameter set") @click.option( - "-c", "--command-only", is_flag=True, default=False, help="Create Nextflow command with params (no params file)" + "-c", + "--command-only", + is_flag=True, + default=False, + help="Create Nextflow command with params (no params file)", ) @click.option( "-o", @@ -190,18 +231,43 @@ def list_pipelines(keywords, sort, json, show_archived): help="Path to save run parameters file", ) @click.option( - "-p", "--params-in", type=click.Path(exists=True), help="Set of input run params to use from a previous run" + "-p", + "--params-in", + type=click.Path(exists=True), + help="Set of input run params to use from a previous run", ) @click.option( - "-a", "--save-all", is_flag=True, default=False, help="Save all parameters, even if unchanged from default" + "-a", + "--save-all", + is_flag=True, + default=False, + help="Save all parameters, even if unchanged from default", ) @click.option( - "-x", "--show-hidden", is_flag=True, default=False, help="Show hidden params which don't normally need changing" + "-x", + "--show-hidden", + is_flag=True, + default=False, + help="Show hidden params which don't normally need changing", ) @click.option( - "-u", "--url", type=str, default="https://nf-co.re/launch", help="Customise the builder URL (for development work)" + "-u", + "--url", + type=str, + default="https://nf-co.re/launch", + help="Customise the builder URL (for development work)", ) -def launch(pipeline, id, revision, command_only, params_in, params_out, save_all, show_hidden, url): +def launch( + pipeline, + id, + revision, + command_only, + params_in, + params_out, + save_all, + show_hidden, + url, +): """ Launch a pipeline using a web GUI or command line prompts. @@ -217,7 +283,17 @@ def launch(pipeline, id, revision, command_only, params_in, params_out, save_all """ from nf_core.launch import Launch - launcher = Launch(pipeline, revision, command_only, params_in, params_out, save_all, show_hidden, url, id) + launcher = Launch( + pipeline, + revision, + command_only, + params_in, + params_out, + save_all, + show_hidden, + url, + id, + ) if not launcher.launch_pipeline(): sys.exit(1) @@ -236,7 +312,11 @@ def launch(pipeline, id, revision, command_only, params_in, params_out, save_all ) @click.option("-f", "--force", is_flag=True, default=False, help="Overwrite existing files") @click.option( - "-x", "--show-hidden", is_flag=True, default=False, help="Show hidden params which don't normally need changing" + "-x", + "--show-hidden", + is_flag=True, + default=False, + help="Show hidden params which don't normally need changing", ) def create_params_file(pipeline, revision, output, force, show_hidden): """ @@ -267,10 +347,19 @@ def create_params_file(pipeline, revision, output, force, show_hidden): ) @click.option("-o", "--outdir", type=str, help="Output directory") @click.option( - "-x", "--compress", type=click.Choice(["tar.gz", "tar.bz2", "zip", "none"]), help="Archive compression type" + "-x", + "--compress", + type=click.Choice(["tar.gz", "tar.bz2", "zip", "none"]), + help="Archive compression type", ) @click.option("-f", "--force", is_flag=True, default=False, help="Overwrite existing files") -@click.option("-t", "--tower", is_flag=True, default=False, help="Download for seqeralabs® Nextflow Tower") +@click.option( + "-t", + "--tower", + is_flag=True, + default=False, + help="Download for Seqera Platform (formerly Nextflow Tower)", +) @click.option( "-d", "--download-configuration", @@ -304,7 +393,13 @@ def create_params_file(pipeline, revision, output, force, show_hidden): type=str, help="List of images already available in a remote `singularity.cacheDir`.", ) -@click.option("-p", "--parallel-downloads", type=int, default=4, help="Number of parallel image downloads") +@click.option( + "-p", + "--parallel-downloads", + type=int, + default=4, + help="Number of parallel image downloads", +) def download( pipeline, revision, @@ -378,7 +473,13 @@ def licences(pipeline, json): @click.option("-d", "--description", type=str, help="A short description of your pipeline") @click.option("-a", "--author", type=str, help="Name of the main author(s)") @click.option("--version", type=str, default="1.0dev", help="The initial version number to use") -@click.option("-f", "--force", is_flag=True, default=False, help="Overwrite output directory if it already exists") +@click.option( + "-f", + "--force", + is_flag=True, + default=False, + help="Overwrite output directory if it already exists", +) @click.option("-o", "--outdir", help="Output directory for new pipeline (default: pipeline name)") @click.option("-t", "--template-yaml", help="Pass a YAML file to customize the template") @click.option("--plain", is_flag=True, help="Use the standard nf-core template") @@ -426,14 +527,36 @@ def create(name, description, author, version, force, outdir, template_yaml, pla help="Execute additional checks for release-ready workflows.", ) @click.option( - "-f", "--fix", type=str, metavar="", multiple=True, help="Attempt to automatically fix specified lint test" + "-f", + "--fix", + type=str, + metavar="", + multiple=True, + help="Attempt to automatically fix specified lint test", +) +@click.option( + "-k", + "--key", + type=str, + metavar="", + multiple=True, + help="Run only these lint tests", ) -@click.option("-k", "--key", type=str, metavar="", multiple=True, help="Run only these lint tests") @click.option("-p", "--show-passed", is_flag=True, help="Show passing tests on the command line") @click.option("-i", "--fail-ignored", is_flag=True, help="Convert ignored tests to failures") @click.option("-w", "--fail-warned", is_flag=True, help="Convert warn tests to failures") -@click.option("--markdown", type=str, metavar="", help="File to write linting results to (Markdown)") -@click.option("--json", type=str, metavar="", help="File to write linting results to (JSON)") +@click.option( + "--markdown", + type=str, + metavar="", + help="File to write linting results to (Markdown)", +) +@click.option( + "--json", + type=str, + metavar="", + help="File to write linting results to (JSON)", +) @click.option( "--sort-by", type=click.Choice(["module", "test"]), @@ -442,7 +565,19 @@ def create(name, description, author, version, force, outdir, template_yaml, pla show_default=True, ) @click.pass_context -def lint(ctx, dir, release, fix, key, show_passed, fail_ignored, fail_warned, markdown, json, sort_by): +def lint( + ctx, + dir, + release, + fix, + key, + show_passed, + fail_ignored, + fail_warned, + markdown, + json, + sort_by, +): """ Check pipeline code against nf-core guidelines. @@ -465,7 +600,7 @@ def lint(ctx, dir, release, fix, key, show_passed, fail_ignored, fail_warned, ma # Run the lint tests! try: - lint_obj, module_lint_obj = run_linting( + lint_obj, module_lint_obj, subworkflow_lint_obj = run_linting( dir, release, fix, @@ -478,7 +613,10 @@ def lint(ctx, dir, release, fix, key, show_passed, fail_ignored, fail_warned, ma json, ctx.obj["hide_progress"], ) - if len(lint_obj.failed) + len(module_lint_obj.failed) > 0: + swf_failed = 0 + if subworkflow_lint_obj is not None: + swf_failed = len(subworkflow_lint_obj.failed) + if len(lint_obj.failed) + len(module_lint_obj.failed) + swf_failed > 0: sys.exit(1) except AssertionError as e: log.critical(e) @@ -497,7 +635,13 @@ def lint(ctx, dir, release, fix, key, show_passed, fail_ignored, fail_warned, ma default=NF_CORE_MODULES_REMOTE, help="Remote git repo to fetch files from", ) -@click.option("-b", "--branch", type=str, default=None, help="Branch of git repository hosting modules.") +@click.option( + "-b", + "--branch", + type=str, + default=None, + help="Branch of git repository hosting modules.", +) @click.option( "-N", "--no-pull", @@ -529,7 +673,13 @@ def modules(ctx, git_remote, branch, no_pull): default=NF_CORE_MODULES_REMOTE, help="Remote git repo to fetch files from", ) -@click.option("-b", "--branch", type=str, default=None, help="Branch of git repository hosting modules.") +@click.option( + "-b", + "--branch", + type=str, + default=None, + help="Branch of git repository hosting modules.", +) @click.option( "-N", "--no-pull", @@ -630,8 +780,20 @@ def modules_list_local(ctx, keywords, json, dir): # pylint: disable=redefined-b default=".", help=r"Pipeline directory. [dim]\[default: current working directory][/]", ) -@click.option("-p", "--prompt", is_flag=True, default=False, help="Prompt for the version of the module") -@click.option("-f", "--force", is_flag=True, default=False, help="Force reinstallation of module if it already exists") +@click.option( + "-p", + "--prompt", + is_flag=True, + default=False, + help="Prompt for the version of the module", +) +@click.option( + "-f", + "--force", + is_flag=True, + default=False, + help="Force reinstallation of module if it already exists", +) @click.option("-s", "--sha", type=str, metavar="", help="Install module at commit SHA") def modules_install(ctx, tool, dir, prompt, force, sha): """ @@ -672,10 +834,21 @@ def modules_install(ctx, tool, dir, prompt, force, sha): help=r"Pipeline directory. [dim]\[default: current working directory][/]", ) @click.option("-f", "--force", is_flag=True, default=False, help="Force update of module") -@click.option("-p", "--prompt", is_flag=True, default=False, help="Prompt for the version of the module") +@click.option( + "-p", + "--prompt", + is_flag=True, + default=False, + help="Prompt for the version of the module", +) @click.option("-s", "--sha", type=str, metavar="", help="Install module at commit SHA") @click.option( - "-a", "--all", "install_all", is_flag=True, default=False, help="Update all modules installed in pipeline" + "-a", + "--all", + "install_all", + is_flag=True, + default=False, + help="Update all modules installed in pipeline", ) @click.option( "-x/-y", @@ -699,7 +872,18 @@ def modules_install(ctx, tool, dir, prompt, force, sha): default=False, help="Automatically update all linked modules and subworkflows without asking for confirmation", ) -def modules_update(ctx, tool, directory, force, prompt, sha, install_all, preview, save_diff, update_deps): +def modules_update( + ctx, + tool, + directory, + force, + prompt, + sha, + install_all, + preview, + save_diff, + update_deps, +): """ Update DSL2 modules within a pipeline. @@ -801,13 +985,55 @@ def modules_remove(ctx, dir, tool): @click.pass_context @click.argument("tool", type=str, required=False, metavar=" or ") @click.option("-d", "--dir", type=click.Path(exists=True), default=".", metavar="") -@click.option("-a", "--author", type=str, metavar="", help="Module author's GitHub username prefixed with '@'") -@click.option("-l", "--label", type=str, metavar="", help="Standard resource label for process") -@click.option("-m", "--meta", is_flag=True, default=False, help="Use Groovy meta map for sample information") -@click.option("-n", "--no-meta", is_flag=True, default=False, help="Don't use meta map for sample information") -@click.option("-f", "--force", is_flag=True, default=False, help="Overwrite any files if they already exist") -@click.option("-c", "--conda-name", type=str, default=None, help="Name of the conda package to use") -@click.option("-p", "--conda-package-version", type=str, default=None, help="Version of conda package to use") +@click.option( + "-a", + "--author", + type=str, + metavar="", + help="Module author's GitHub username prefixed with '@'", +) +@click.option( + "-l", + "--label", + type=str, + metavar="", + help="Standard resource label for process", +) +@click.option( + "-m", + "--meta", + is_flag=True, + default=False, + help="Use Groovy meta map for sample information", +) +@click.option( + "-n", + "--no-meta", + is_flag=True, + default=False, + help="Don't use meta map for sample information", +) +@click.option( + "-f", + "--force", + is_flag=True, + default=False, + help="Overwrite any files if they already exist", +) +@click.option( + "-c", + "--conda-name", + type=str, + default=None, + help="Name of the conda package to use", +) +@click.option( + "-p", + "--conda-package-version", + type=str, + default=None, + help="Version of conda package to use", +) @click.option( "-i", "--empty-template", @@ -815,7 +1041,12 @@ def modules_remove(ctx, dir, tool): default=False, help="Create a module from the template without TODOs or examples", ) -@click.option("--migrate-pytest", is_flag=True, default=False, help="Migrate a module with pytest tests to nf-test") +@click.option( + "--migrate-pytest", + is_flag=True, + default=False, + help="Migrate a module with pytest tests to nf-test", +) def create_module( ctx, tool, @@ -853,7 +1084,16 @@ def create_module( # Run function try: module_create = ModuleCreate( - dir, tool, author, label, has_meta, force, conda_name, conda_package_version, empty_template, migrate_pytest + dir, + tool, + author, + label, + has_meta, + force, + conda_name, + conda_package_version, + empty_template, + migrate_pytest, ) module_create.create() except UserWarning as e: @@ -868,10 +1108,28 @@ def create_module( @modules.command("test") @click.pass_context @click.argument("tool", type=str, required=False, metavar=" or ") -@click.option("-d", "--dir", type=click.Path(exists=True), default=".", metavar="") -@click.option("-p", "--no-prompts", is_flag=True, default=False, help="Use defaults without prompting") +@click.option( + "-d", + "--dir", + type=click.Path(exists=True), + default=".", + metavar="", +) +@click.option( + "-p", + "--no-prompts", + is_flag=True, + default=False, + help="Use defaults without prompting", +) @click.option("-u", "--update", is_flag=True, default=False, help="Update existing snapshots") -@click.option("-o", "--once", is_flag=True, default=False, help="Run tests only once. Don't check snapshot stability") +@click.option( + "-o", + "--once", + is_flag=True, + default=False, + help="Run tests only once. Don't check snapshot stability", +) def test_module(ctx, tool, dir, no_prompts, update, once): """ Run nf-test for a module. @@ -902,7 +1160,13 @@ def test_module(ctx, tool, dir, no_prompts, update, once): @modules.command("lint") @click.pass_context @click.argument("tool", type=str, required=False, metavar=" or ") -@click.option("-d", "--dir", type=click.Path(exists=True), default=".", metavar="") +@click.option( + "-d", + "--dir", + type=click.Path(exists=True), + default=".", + metavar="", +) @click.option( "-r", "--registry", @@ -911,7 +1175,14 @@ def test_module(ctx, tool, dir, no_prompts, update, once): default=None, help="Registry to use for containers. If not specified it will use docker.registry value in the nextflow.config file", ) -@click.option("-k", "--key", type=str, metavar="", multiple=True, help="Run only these lint tests") +@click.option( + "-k", + "--key", + type=str, + metavar="", + multiple=True, + help="Run only these lint tests", +) @click.option("-a", "--all", is_flag=True, help="Run on all modules") @click.option("-w", "--fail-warned", is_flag=True, help="Convert warn tests to failures") @click.option("--local", is_flag=True, help="Run additional lint tests for local modules") @@ -923,7 +1194,11 @@ def test_module(ctx, tool, dir, no_prompts, update, once): help="Sort lint output by module or test name.", show_default=True, ) -@click.option("--fix-version", is_flag=True, help="Fix the module version if a newer version is available") +@click.option( + "--fix-version", + is_flag=True, + help="Fix the module version if a newer version is available", +) def modules_lint(ctx, tool, dir, registry, key, all, fail_warned, local, passed, sort_by, fix_version): """ Lint one or more modules in a directory. @@ -934,7 +1209,7 @@ def modules_lint(ctx, tool, dir, registry, key, all, fail_warned, local, passed, Test modules within a pipeline or a clone of the nf-core/modules repository. """ - from nf_core.components.lint import LintException + from nf_core.components.lint import LintExceptionError from nf_core.modules import ModuleLint try: @@ -960,7 +1235,7 @@ def modules_lint(ctx, tool, dir, registry, key, all, fail_warned, local, passed, ) if len(module_lint.failed) > 0: sys.exit(1) - except LintException as e: + except LintExceptionError as e: log.error(e) sys.exit(1) except (UserWarning, LookupError) as e: @@ -1011,7 +1286,13 @@ def modules_info(ctx, tool, dir): @modules.command() @click.pass_context @click.argument("tool", type=str, required=False, metavar=" or ") -@click.option("-d", "--dir", type=click.Path(exists=True), default=".", metavar="") +@click.option( + "-d", + "--dir", + type=click.Path(exists=True), + default=".", + metavar="", +) @click.option("-a", "--all", is_flag=True, help="Run on all modules") @click.option("-s", "--show-all", is_flag=True, help="Show up-to-date modules in results too") def bump_versions(ctx, tool, dir, all, show_all): @@ -1020,7 +1301,7 @@ def bump_versions(ctx, tool, dir, all, show_all): the nf-core/modules repo. """ from nf_core.modules.bump_versions import ModuleVersionBumper - from nf_core.modules.modules_utils import ModuleException + from nf_core.modules.modules_utils import ModuleExceptionError try: version_bumper = ModuleVersionBumper( @@ -1030,7 +1311,7 @@ def bump_versions(ctx, tool, dir, all, show_all): ctx.obj["modules_repo_no_pull"], ) version_bumper.bump_versions(module=tool, all_modules=all, show_uptodate=show_all) - except ModuleException as e: + except ModuleExceptionError as e: log.error(e) sys.exit(1) except (UserWarning, LookupError) as e: @@ -1043,9 +1324,26 @@ def bump_versions(ctx, tool, dir, all, show_all): @click.pass_context @click.argument("subworkflow", type=str, required=False, metavar="subworkflow name") @click.option("-d", "--dir", type=click.Path(exists=True), default=".", metavar="") -@click.option("-a", "--author", type=str, metavar="", help="Module author's GitHub username prefixed with '@'") -@click.option("-f", "--force", is_flag=True, default=False, help="Overwrite any files if they already exist") -@click.option("--migrate-pytest", is_flag=True, default=False, help="Migrate a module with pytest tests to nf-test") +@click.option( + "-a", + "--author", + type=str, + metavar="", + help="Module author's GitHub username prefixed with '@'", +) +@click.option( + "-f", + "--force", + is_flag=True, + default=False, + help="Overwrite any files if they already exist", +) +@click.option( + "--migrate-pytest", + is_flag=True, + default=False, + help="Migrate a module with pytest tests to nf-test", +) def create_subworkflow(ctx, subworkflow, dir, author, force, migrate_pytest): """ Create a new subworkflow from the nf-core template. @@ -1074,10 +1372,28 @@ def create_subworkflow(ctx, subworkflow, dir, author, force, migrate_pytest): @subworkflows.command("test") @click.pass_context @click.argument("subworkflow", type=str, required=False, metavar="subworkflow name") -@click.option("-d", "--dir", type=click.Path(exists=True), default=".", metavar="") -@click.option("-p", "--no-prompts", is_flag=True, default=False, help="Use defaults without prompting") +@click.option( + "-d", + "--dir", + type=click.Path(exists=True), + default=".", + metavar="", +) +@click.option( + "-p", + "--no-prompts", + is_flag=True, + default=False, + help="Use defaults without prompting", +) @click.option("-u", "--update", is_flag=True, default=False, help="Update existing snapshots") -@click.option("-o", "--once", is_flag=True, default=False, help="Run tests only once. Don't check snapshot stability") +@click.option( + "-o", + "--once", + is_flag=True, + default=False, + help="Run tests only once. Don't check snapshot stability", +) def test_subworkflow(ctx, subworkflow, dir, no_prompts, update, once): """ Run nf-test for a subworkflow. @@ -1176,7 +1492,13 @@ def subworkflows_list_local(ctx, keywords, json, dir): # pylint: disable=redefi @subworkflows.command("lint") @click.pass_context @click.argument("subworkflow", type=str, required=False, metavar="subworkflow name") -@click.option("-d", "--dir", type=click.Path(exists=True), default=".", metavar="") +@click.option( + "-d", + "--dir", + type=click.Path(exists=True), + default=".", + metavar="", +) @click.option( "-r", "--registry", @@ -1185,7 +1507,14 @@ def subworkflows_list_local(ctx, keywords, json, dir): # pylint: disable=redefi default=None, help="Registry to use for containers. If not specified it will use docker.registry value in the nextflow.config file", ) -@click.option("-k", "--key", type=str, metavar="", multiple=True, help="Run only these lint tests") +@click.option( + "-k", + "--key", + type=str, + metavar="", + multiple=True, + help="Run only these lint tests", +) @click.option("-a", "--all", is_flag=True, help="Run on all subworkflows") @click.option("-w", "--fail-warned", is_flag=True, help="Convert warn tests to failures") @click.option("--local", is_flag=True, help="Run additional lint tests for local subworkflows") @@ -1207,7 +1536,7 @@ def subworkflows_lint(ctx, subworkflow, dir, registry, key, all, fail_warned, lo Test subworkflows within a pipeline or a clone of the nf-core/modules repository. """ - from nf_core.components.lint import LintException + from nf_core.components.lint import LintExceptionError from nf_core.subworkflows import SubworkflowLint try: @@ -1232,7 +1561,7 @@ def subworkflows_lint(ctx, subworkflow, dir, registry, key, all, fail_warned, lo ) if len(subworkflow_lint.failed) > 0: sys.exit(1) - except LintException as e: + except LintExceptionError as e: log.error(e) sys.exit(1) except (UserWarning, LookupError) as e: @@ -1290,11 +1619,27 @@ def subworkflows_info(ctx, tool, dir): default=".", help=r"Pipeline directory. [dim]\[default: current working directory][/]", ) -@click.option("-p", "--prompt", is_flag=True, default=False, help="Prompt for the version of the subworkflow") @click.option( - "-f", "--force", is_flag=True, default=False, help="Force reinstallation of subworkflow if it already exists" + "-p", + "--prompt", + is_flag=True, + default=False, + help="Prompt for the version of the subworkflow", +) +@click.option( + "-f", + "--force", + is_flag=True, + default=False, + help="Force reinstallation of subworkflow if it already exists", +) +@click.option( + "-s", + "--sha", + type=str, + metavar="", + help="Install subworkflow at commit SHA", ) -@click.option("-s", "--sha", type=str, metavar="", help="Install subworkflow at commit SHA") def subworkflows_install(ctx, subworkflow, dir, prompt, force, sha): """ Install DSL2 subworkflow within a pipeline. @@ -1363,10 +1708,27 @@ def subworkflows_remove(ctx, dir, subworkflow): help=r"Pipeline directory. [dim]\[default: current working directory][/]", ) @click.option("-f", "--force", is_flag=True, default=False, help="Force update of subworkflow") -@click.option("-p", "--prompt", is_flag=True, default=False, help="Prompt for the version of the subworkflow") -@click.option("-s", "--sha", type=str, metavar="", help="Install subworkflow at commit SHA") @click.option( - "-a", "--all", "install_all", is_flag=True, default=False, help="Update all subworkflow installed in pipeline" + "-p", + "--prompt", + is_flag=True, + default=False, + help="Prompt for the version of the subworkflow", +) +@click.option( + "-s", + "--sha", + type=str, + metavar="", + help="Install subworkflow at commit SHA", +) +@click.option( + "-a", + "--all", + "install_all", + is_flag=True, + default=False, + help="Update all subworkflow installed in pipeline", ) @click.option( "-x/-y", @@ -1390,7 +1752,18 @@ def subworkflows_remove(ctx, dir, subworkflow): default=False, help="Automatically update all linked modules and subworkflows without asking for confirmation", ) -def subworkflows_update(ctx, subworkflow, dir, force, prompt, sha, install_all, preview, save_diff, update_deps): +def subworkflows_update( + ctx, + subworkflow, + dir, + force, + prompt, + sha, + install_all, + preview, + save_diff, + update_deps, +): """ Update DSL2 subworkflow within a pipeline. @@ -1472,8 +1845,16 @@ def validate(pipeline, params): default=".", help=r"Pipeline directory. [dim]\[default: current working directory][/]", ) -@click.option("--no-prompts", is_flag=True, help="Do not confirm changes, just update parameters and exit") -@click.option("--web-only", is_flag=True, help="Skip building using Nextflow config, just launch the web tool") +@click.option( + "--no-prompts", + is_flag=True, + help="Do not confirm changes, just update parameters and exit", +) +@click.option( + "--web-only", + is_flag=True, + help="Skip building using Nextflow config, just launch the web tool", +) @click.option( "--url", type=str, @@ -1506,7 +1887,10 @@ def build(dir, no_prompts, web_only, url): # nf-core schema lint @schema.command("lint") @click.argument( - "schema_path", type=click.Path(exists=True), default="nextflow_schema.json", metavar="" + "schema_path", + type=click.Path(exists=True), + default="nextflow_schema.json", + metavar="", ) def schema_lint(schema_path): """ @@ -1543,9 +1927,19 @@ def schema_lint(schema_path): required=False, metavar="", ) -@click.option("-o", "--output", type=str, metavar="", help="Output filename. Defaults to standard out.") @click.option( - "-x", "--format", type=click.Choice(["markdown", "html"]), default="markdown", help="Format to output docs in." + "-o", + "--output", + type=str, + metavar="", + help="Output filename. Defaults to standard out.", +) +@click.option( + "-x", + "--format", + type=click.Choice(["markdown", "html"]), + default="markdown", + help="Format to output docs in.", ) @click.option("-f", "--force", is_flag=True, default=False, help="Overwrite existing files") @click.option( @@ -1584,7 +1978,11 @@ def docs(schema_path, output, format, force, columns): help=r"Pipeline directory. [dim]\[default: current working directory][/]", ) @click.option( - "-n", "--nextflow", is_flag=True, default=False, help="Bump required nextflow version instead of pipeline version" + "-n", + "--nextflow", + is_flag=True, + default=False, + help="Bump required nextflow version instead of pipeline version", ) def bump_version(new_version, dir, nextflow): """ @@ -1620,6 +2018,62 @@ def bump_version(new_version, dir, nextflow): sys.exit(1) +# nf-core create-logo +@nf_core_cli.command("create-logo") +@click.argument("logo-text", metavar="") +@click.option("-d", "--dir", type=click.Path(), default=".", help="Directory to save the logo in.") +@click.option( + "-n", + "--name", + type=str, + help="Name of the output file (with or without '.png' suffix).", +) +@click.option( + "--theme", + type=click.Choice(["light", "dark"]), + default="light", + help="Theme for the logo.", + show_default=True, +) +@click.option( + "--width", + type=int, + default=2300, + help="Width of the logo in pixels.", + show_default=True, +) +@click.option( + "--format", + type=click.Choice(["png", "svg"]), + default="png", + help="Image format of the logo, either PNG or SVG.", + show_default=True, +) +@click.option( + "-f", + "--force", + is_flag=True, + default=False, + help="Overwrite any files if they already exist", +) +def logo(logo_text, dir, name, theme, width, format, force): + """ + Generate a logo with the nf-core logo template. + + This command generates an nf-core pipeline logo, using the supplied + """ + from nf_core.create_logo import create_logo + + try: + if dir == ".": + dir = Path.cwd() + logo_path = create_logo(logo_text, dir, name, theme, width, format, force) + log.info(f"Created logo: [magenta]{logo_path}[/]") + except UserWarning as e: + log.error(e) + sys.exit(1) + + # nf-core sync @nf_core_cli.command("sync") @click.option( @@ -1629,8 +2083,19 @@ def bump_version(new_version, dir, nextflow): default=".", help=r"Pipeline directory. [dim]\[default: current working directory][/]", ) -@click.option("-b", "--from-branch", type=str, help="The git branch to use to fetch workflow variables.") -@click.option("-p", "--pull-request", is_flag=True, default=False, help="Make a GitHub pull-request with the changes.") +@click.option( + "-b", + "--from-branch", + type=str, + help="The git branch to use to fetch workflow variables.", +) +@click.option( + "-p", + "--pull-request", + is_flag=True, + default=False, + help="Make a GitHub pull-request with the changes.", +) @click.option("-g", "--github-repository", type=str, help="GitHub PR: target repository.") @click.option("-u", "--username", type=str, help="GitHub PR: auth username.") @click.option("-t", "--template-yaml", help="Pass a YAML file to customize the template") @@ -1647,7 +2112,7 @@ def sync(dir, from_branch, pull_request, github_repository, username, template_y the pipeline. It is run automatically for all pipelines when ever a new release of [link=https://github.com/nf-core/tools]nf-core/tools[/link] (and the included template) is made. """ - from nf_core.sync import PipelineSync, PullRequestException, SyncException + from nf_core.sync import PipelineSync, PullRequestExceptionError, SyncExceptionError from nf_core.utils import is_pipeline_directory # Check if pipeline directory contains necessary files @@ -1657,7 +2122,7 @@ def sync(dir, from_branch, pull_request, github_repository, username, template_y sync_obj = PipelineSync(dir, from_branch, pull_request, github_repository, username, template_yaml) try: sync_obj.sync() - except (SyncException, PullRequestException) as e: + except (SyncExceptionError, PullRequestExceptionError) as e: log.error(e) sys.exit(1) diff --git a/nf_core/assets/logo/MavenPro-Bold.ttf b/nf_core/assets/logo/MavenPro-Bold.ttf new file mode 100644 index 0000000000..2ab88ea532 Binary files /dev/null and b/nf_core/assets/logo/MavenPro-Bold.ttf differ diff --git a/nf_core/assets/logo/nf-core-repo-logo-base-darkbg.png b/nf_core/assets/logo/nf-core-repo-logo-base-darkbg.png new file mode 100644 index 0000000000..d977d01626 Binary files /dev/null and b/nf_core/assets/logo/nf-core-repo-logo-base-darkbg.png differ diff --git a/nf_core/assets/logo/nf-core-repo-logo-base-lightbg.png b/nf_core/assets/logo/nf-core-repo-logo-base-lightbg.png new file mode 100644 index 0000000000..812f46e266 Binary files /dev/null and b/nf_core/assets/logo/nf-core-repo-logo-base-lightbg.png differ diff --git a/nf_core/assets/logo/placeholder_logo.svg b/nf_core/assets/logo/placeholder_logo.svg new file mode 100644 index 0000000000..c4c419d125 --- /dev/null +++ b/nf_core/assets/logo/placeholder_logo.svg @@ -0,0 +1 @@ +nf-core/PLACEHOLDER \ No newline at end of file diff --git a/nf_core/bump_version.py b/nf_core/bump_version.py index 40c8f8984f..c5e8931fbc 100644 --- a/nf_core/bump_version.py +++ b/nf_core/bump_version.py @@ -68,7 +68,7 @@ def bump_pipeline_version(pipeline_obj: Pipeline, new_version: str) -> None: [ ( f"/releases/tag/{current_version}", - f"/tree/dev", + "/tree/dev", ) ], ) @@ -78,7 +78,7 @@ def bump_pipeline_version(pipeline_obj: Pipeline, new_version: str) -> None: pipeline_obj, [ ( - f"/tree/dev", + "/tree/dev", f"/releases/tag/{multiqc_new_version}", ) ], @@ -187,7 +187,7 @@ def update_file_version(filename: Union[str, Path], pipeline_obj: Pipeline, patt fn = pipeline_obj._fp(filename) content = "" try: - with open(fn, "r") as fh: + with open(fn) as fh: content = fh.read() except FileNotFoundError: log.warning(f"File not found: '{fn}'") diff --git a/nf_core/components/components_command.py b/nf_core/components/components_command.py index 44924a2704..8332429835 100644 --- a/nf_core/components/components_command.py +++ b/nf_core/components/components_command.py @@ -227,7 +227,7 @@ def check_patch_paths(self, patch_path: Path, module_name: str) -> None: if patch_path.exists(): log.info(f"Modules {module_name} contains a patch file.") rewrite = False - with open(patch_path, "r") as fh: + with open(patch_path) as fh: lines = fh.readlines() for index, line in enumerate(lines): # Check if there are old paths in the patch file and replace @@ -264,7 +264,7 @@ def check_if_in_include_stmts(self, component_path: str) -> Dict[str, List[Dict[ if self.repo_type == "pipeline": workflow_files = Path(self.dir, "workflows").glob("*.nf") for workflow_file in workflow_files: - with open(workflow_file, "r") as fh: + with open(workflow_file) as fh: # Check if component path is in the file using mmap with mmap.mmap(fh.fileno(), 0, access=mmap.ACCESS_READ) as s: if s.find(component_path.encode()) != -1: diff --git a/nf_core/components/components_utils.py b/nf_core/components/components_utils.py index eec533ce60..01650a643d 100644 --- a/nf_core/components/components_utils.py +++ b/nf_core/components/components_utils.py @@ -53,7 +53,7 @@ def get_repo_info(directory: str, use_prompt: Optional[bool] = True) -> Tuple[st raise UserWarning("Repository type could not be established") # Check if it's a valid answer - if not repo_type in ["pipeline", "modules"]: + if repo_type not in ["pipeline", "modules"]: raise UserWarning(f"Invalid repository type: '{repo_type}'") # Check for org if modules repo @@ -138,7 +138,7 @@ def get_components_to_install(subworkflow_dir: str) -> Tuple[List[str], List[str """ modules = [] subworkflows = [] - with open(Path(subworkflow_dir, "main.nf"), "r") as fh: + with open(Path(subworkflow_dir, "main.nf")) as fh: for line in fh: regex = re.compile( r"include(?: *{ *)([a-zA-Z\_0-9]*)(?: *as *)?(?:[a-zA-Z\_0-9]*)?(?: *})(?: *from *)(?:'|\")(.*)(?:'|\")" diff --git a/nf_core/components/create.py b/nf_core/components/create.py index 568ca22af5..32f6d1a433 100644 --- a/nf_core/components/create.py +++ b/nf_core/components/create.py @@ -2,7 +2,6 @@ The ComponentCreate class handles generating of module and subworkflow templates """ -from __future__ import print_function import glob import json @@ -440,12 +439,13 @@ def _copy_old_files(self, component_old_path): pytest_dir = Path(self.directory, "tests", self.component_type, self.org, self.component_dir) nextflow_config = pytest_dir / "nextflow.config" if nextflow_config.is_file(): - with open(nextflow_config, "r") as fh: + with open(nextflow_config) as fh: config_lines = "" for line in fh: - if "publishDir" not in line: + if "publishDir" not in line and line.strip() != "": config_lines += line - if len(config_lines) > 0: + # if the nextflow.config file only contained publishDir, non_publish_dir_lines will be 11 characters long (`process {\n}`) + if len(config_lines) > 11: log.debug("Copying nextflow.config file from pytest tests") with open( Path(self.directory, self.component_type, self.org, self.component_dir, "tests", "nextflow.config"), @@ -460,7 +460,7 @@ def _print_and_delete_pytest_files(self): "[violet]Do you want to delete the pytest files?[/]\nPytest file 'main.nf' will be printed to standard output to allow migrating the tests manually to 'main.nf.test'.", default=False, ): - with open(pytest_dir / "main.nf", "r") as fh: + with open(pytest_dir / "main.nf") as fh: log.info(fh.read()) shutil.rmtree(pytest_dir) log.info( @@ -475,7 +475,7 @@ def _print_and_delete_pytest_files(self): ) # Delete tags from pytest_modules.yml modules_yml = Path(self.directory, "tests", "config", "pytest_modules.yml") - with open(modules_yml, "r") as fh: + with open(modules_yml) as fh: yml_file = yaml.safe_load(fh) yml_key = str(self.component_dir) if self.component_type == "modules" else f"subworkflows/{self.component_dir}" if yml_key in yml_file: diff --git a/nf_core/components/info.py b/nf_core/components/info.py index e4d8038b87..54fc0004dc 100644 --- a/nf_core/components/info.py +++ b/nf_core/components/info.py @@ -184,7 +184,7 @@ def get_local_yaml(self): meta_fn = Path(comp_dir, "meta.yml") if meta_fn.exists(): log.debug(f"Found local file: {meta_fn}") - with open(meta_fn, "r") as fh: + with open(meta_fn) as fh: self.local_path = comp_dir return yaml.safe_load(fh) @@ -196,7 +196,7 @@ def get_local_yaml(self): meta_fn = Path(comp_dir, "meta.yml") if meta_fn.exists(): log.debug(f"Found local file: {meta_fn}") - with open(meta_fn, "r") as fh: + with open(meta_fn) as fh: self.local_path = comp_dir return yaml.safe_load(fh) log.debug(f"{self.component_type[:-1].title()} '{self.component}' meta.yml not found locally") diff --git a/nf_core/components/lint/__init__.py b/nf_core/components/lint/__init__.py index efffc28e85..3c2fb9dde3 100644 --- a/nf_core/components/lint/__init__.py +++ b/nf_core/components/lint/__init__.py @@ -3,7 +3,6 @@ in nf-core pipelines """ -from __future__ import print_function import logging import operator @@ -27,7 +26,7 @@ log = logging.getLogger(__name__) -class LintException(Exception): +class LintExceptionError(Exception): """Exception raised when there was an error with module or subworkflow linting""" pass @@ -216,7 +215,7 @@ def _print_results(self, show_passed=False, sort_by="test"): try: for lint_result in tests: max_name_len = max(len(lint_result.component_name), max_name_len) - except: + except Exception: pass # Helper function to format test links nicely diff --git a/nf_core/components/list.py b/nf_core/components/list.py index 47c0eaad62..b0c5af219f 100644 --- a/nf_core/components/list.py +++ b/nf_core/components/list.py @@ -70,7 +70,7 @@ def pattern_msg(keywords: List[str]) -> str: # We have a pipeline - list what's installed else: # Check that we are in a pipeline directory - + print(f"{self.repo_type=}") try: if self.repo_type != "pipeline": raise UserWarning( @@ -141,6 +141,7 @@ def pattern_msg(keywords: List[str]) -> str: date = "[red]Not Available" message = "[red]Not Available" table.add_row(component, repo_url, version_sha, message, date) + components.append(component) if print_json: return json.dumps(components, sort_keys=True, indent=4) diff --git a/nf_core/components/nfcore_component.py b/nf_core/components/nfcore_component.py index 874fa570bc..2f73afe9d3 100644 --- a/nf_core/components/nfcore_component.py +++ b/nf_core/components/nfcore_component.py @@ -81,7 +81,7 @@ def __init__( def _get_main_nf_tags(self, test_main_nf: Union[Path, str]): """Collect all tags from the main.nf.test file.""" tags = [] - with open(test_main_nf, "r") as fh: + with open(test_main_nf) as fh: for line in fh: if line.strip().startswith("tag"): tags.append(line.strip().split()[1].strip('"')) @@ -90,7 +90,7 @@ def _get_main_nf_tags(self, test_main_nf: Union[Path, str]): def _get_included_components(self, main_nf: Union[Path, str]): """Collect all included components from the main.nf file.""" included_components = [] - with open(main_nf, "r") as fh: + with open(main_nf) as fh: for line in fh: if line.strip().startswith("include"): # get tool/subtool or subworkflow name from include statement, can be in the form @@ -107,7 +107,7 @@ def _get_included_components(self, main_nf: Union[Path, str]): def _get_included_components_in_chained_tests(self, main_nf_test: Union[Path, str]): """Collect all included components from the main.nf file.""" included_components = [] - with open(main_nf_test, "r") as fh: + with open(main_nf_test) as fh: for line in fh: if line.strip().startswith("script"): # get tool/subtool or subworkflow name from script statement, can be: @@ -151,7 +151,7 @@ def _get_included_components_in_chained_tests(self, main_nf_test: Union[Path, st def get_inputs_from_main_nf(self): """Collect all inputs from the main.nf file.""" inputs = [] - with open(self.main_nf, "r") as f: + with open(self.main_nf) as f: data = f.read() # get input values from main.nf after "input:", which can be formatted as tuple val(foo) path(bar) or val foo or val bar or path bar or path foo # regex matches: @@ -168,17 +168,19 @@ def get_inputs_from_main_nf(self): input_data = data.split("input:")[1].split("output:")[0] regex = r"(val|path)\s*(\(([^)]+)\)|\s*([^)\s,]+))" matches = re.finditer(regex, input_data, re.MULTILINE) - for matchNum, match in enumerate(matches, start=1): + for _, match in enumerate(matches, start=1): if match.group(3): - inputs.append(match.group(3)) + input_val = match.group(3).split(",")[0] # handle `files, stageAs: "inputs/*"` cases + inputs.append(input_val) elif match.group(4): - inputs.append(match.group(4)) + input_val = match.group(4).split(",")[0] # handle `files, stageAs: "inputs/*"` cases + inputs.append(input_val) log.info(f"Found {len(inputs)} inputs in {self.main_nf}") self.inputs = inputs def get_outputs_from_main_nf(self): outputs = [] - with open(self.main_nf, "r") as f: + with open(self.main_nf) as f: data = f.read() # get output values from main.nf after "output:". the names are always after "emit:" if "output:" not in data: @@ -187,7 +189,7 @@ def get_outputs_from_main_nf(self): output_data = data.split("output:")[1].split("when:")[0] regex = r"emit:\s*([^)\s,]+)" matches = re.finditer(regex, output_data, re.MULTILINE) - for matchNum, match in enumerate(matches, start=1): + for _, match in enumerate(matches, start=1): outputs.append(match.group(1)) log.info(f"Found {len(outputs)} outputs in {self.main_nf}") self.outputs = outputs diff --git a/nf_core/components/patch.py b/nf_core/components/patch.py index 28f2f886b1..55d5747451 100644 --- a/nf_core/components/patch.py +++ b/nf_core/components/patch.py @@ -35,7 +35,7 @@ def _parameter_checks(self, component): if component is not None and component not in component_names: component_dir = [dir for dir, m in components if m == component][0] raise UserWarning( - f"{self.component_type[:-1].title()} '{Path(self.component_type, component_dir, module)}' does not exist in the pipeline" + f"{self.component_type[:-1].title()} '{Path(self.component_type, component_dir, component)}' does not exist in the pipeline" ) def patch(self, component=None): @@ -220,5 +220,5 @@ def remove(self, component): ): log.error( f"Module files do not appear to match the remote for the commit sha in the 'module.json': {component_version}\n" - f"Recommend reinstalling with 'nf-core modules install --force --sha {component_version} {module}' " + f"Recommend reinstalling with 'nf-core modules install --force --sha {component_version} {component}' " ) diff --git a/nf_core/create.py b/nf_core/create.py index 56d0912a07..8038a995c5 100644 --- a/nf_core/create.py +++ b/nf_core/create.py @@ -4,23 +4,20 @@ import configparser import logging import os -import random import re import shutil import sys -import time from pathlib import Path -import filetype # type: ignore import git import jinja2 import questionary -import requests import yaml import nf_core import nf_core.schema import nf_core.utils +from nf_core.create_logo import create_logo from nf_core.lint_utils import run_prettier_on_file log = logging.getLogger(__name__) @@ -108,7 +105,7 @@ def create_param_dict(self, name, description, author, version, template_yaml_pa # Obtain template customization info from template yaml file or `.nf-core.yml` config file try: if template_yaml_path is not None: - with open(template_yaml_path, "r") as f: + with open(template_yaml_path) as f: template_yaml = yaml.safe_load(f) elif "template" in config_yml: template_yaml = config_yml["template"] @@ -177,8 +174,8 @@ def create_param_dict(self, name, description, author, version, template_yaml_pa param_dict["name_noslash"] = param_dict["name"].replace("/", "-") param_dict["prefix_nodash"] = param_dict["prefix"].replace("-", "") param_dict["name_docker"] = param_dict["name"].replace(param_dict["prefix"], param_dict["prefix_nodash"]) - param_dict["logo_light"] = f"{param_dict['name_noslash']}_logo_light.png" - param_dict["logo_dark"] = f"{param_dict['name_noslash']}_logo_dark.png" + param_dict["logo_light"] = f"nf-core-{param_dict['short_name']}_logo_light.png" + param_dict["logo_dark"] = f"nf-core-{param_dict['short_name']}_logo_dark.png" param_dict["version"] = version if ( @@ -395,7 +392,7 @@ def remove_nf_core_in_bug_report_template(self): """ bug_report_path = self.outdir / ".github" / "ISSUE_TEMPLATE" / "bug_report.yml" - with open(bug_report_path, "r") as fh: + with open(bug_report_path) as fh: contents = yaml.load(fh, Loader=yaml.FullLoader) # Remove the first item in the body, which is the information about the docs @@ -507,59 +504,13 @@ def fix_linting(self): def make_pipeline_logo(self): """Fetch a logo for the new pipeline from the nf-core website""" - - logo_url = f"https://nf-co.re/logo/{self.template_params['short_name']}?theme=light" - log.debug(f"Fetching logo from {logo_url}") - - email_logo_path = self.outdir / "assets" / f"{self.template_params['name_noslash']}_logo_light.png" - self.download_pipeline_logo(f"{logo_url}?w=600&theme=light", email_logo_path) + email_logo_path = Path(self.outdir) / "assets" + create_logo(text=self.template_params["short_name"], dir=email_logo_path, theme="light", force=self.force) for theme in ["dark", "light"]: - readme_logo_url = f"{logo_url}?w=600&theme={theme}" - readme_logo_path = ( - self.outdir / "docs" / "images" / f"{self.template_params['name_noslash']}_logo_{theme}.png" + readme_logo_path = Path(self.outdir) / "docs" / "images" + create_logo( + text=self.template_params["short_name"], dir=readme_logo_path, width=600, theme=theme, force=self.force ) - self.download_pipeline_logo(readme_logo_url, readme_logo_path) - - def download_pipeline_logo(self, url, img_fn): - """Attempt to download a logo from the website. Retry if it fails.""" - os.makedirs(os.path.dirname(img_fn), exist_ok=True) - attempt = 0 - max_attempts = 10 - retry_delay = 0 # x up to 10 each time, so first delay will be 1-100 seconds - while attempt < max_attempts: - # If retrying, wait a while - if retry_delay > 0: - log.info(f"Waiting {retry_delay} seconds before next image fetch attempt") - time.sleep(retry_delay) - - attempt += 1 - # Use a random number to avoid the template sync hitting the website simultaneously for all pipelines - retry_delay = random.randint(1, 100) * attempt - log.debug(f"Fetching logo '{img_fn}' (attempt {attempt})") - try: - # Try to fetch the logo from the website - r = requests.get(url, timeout=180) - if r.status_code != 200: - raise UserWarning(f"Got status code {r.status_code}") - # Check that the returned image looks right - - except (ConnectionError, UserWarning) as e: - # Something went wrong - try again - log.warning(e) - log.error("Connection error - retrying") - continue - - # Write the new logo to the file - with open(img_fn, "wb") as fh: - fh.write(r.content) - # Check that the file looks valid - image_type = filetype.guess(img_fn).extension - if image_type != "png": - log.error(f"Logo from the website didn't look like an image: '{image_type}'") - continue - - # Got this far, presumably it's good - break the retry loop - break def git_init_pipeline(self): """Initialises the new pipeline as a Git repository and submits first commit. @@ -588,8 +539,24 @@ def git_init_pipeline(self): repo.index.commit(f"initial template build from nf-core/tools, version {nf_core.__version__}") if default_branch: repo.active_branch.rename(default_branch) - repo.git.branch("TEMPLATE") - repo.git.branch("dev") + try: + repo.git.branch("TEMPLATE") + repo.git.branch("dev") + + except git.GitCommandError as e: + if "already exists" in e.stderr: + log.debug("Branches 'TEMPLATE' and 'dev' already exist") + if self.force: + log.debug("Force option set - deleting branches") + repo.git.branch("-D", "TEMPLATE") + repo.git.branch("-D", "dev") + repo.git.branch("TEMPLATE") + repo.git.branch("dev") + else: + log.error( + "Branches 'TEMPLATE' and 'dev' already exist. Use --force to overwrite existing branches." + ) + sys.exit(1) log.info( "Done. Remember to add a remote and push to GitHub:\n" f"[white on grey23] cd {self.outdir} \n" diff --git a/nf_core/create_logo.py b/nf_core/create_logo.py new file mode 100644 index 0000000000..4dfebd3712 --- /dev/null +++ b/nf_core/create_logo.py @@ -0,0 +1,110 @@ +import logging +from pathlib import Path +from typing import Union + +from PIL import Image, ImageDraw, ImageFont + +import nf_core +from nf_core.utils import NFCORE_CACHE_DIR + +log = logging.getLogger(__name__) + + +def create_logo( + text: str, + dir: Union[Path, str], + filename: str = "", + theme: str = "light", + width: int = 2300, + format: str = "png", + force: bool = False, +) -> Path: + """Create a logo for a pipeline.""" + + if not text: + raise UserWarning("Please provide the name of the text to put on the logo.") + dir = Path(dir) + if not dir.is_dir(): + log.debug(f"Creating directory {dir}") + dir.mkdir(parents=True, exist_ok=True) + assets = Path(nf_core.__file__).parent / "assets/logo" + + if format == "svg": + template_fn = "placeholder_logo.svg" + + if width != 2300: + log.warning("SVG format does not support resizing. Setting width to 2300px.") + + # replace the placeholder text with the pipeline name + with open(assets / template_fn) as fh: + svg = fh.read().replace("PLACEHOLDER", text) + if theme == "dark": + svg = svg.replace("#050505", "#fafafa") + + # save the svg + logo_filename = f"nf-core-{text}_logo_{theme}.svg" if not filename else filename + logo_filename = f"{logo_filename}.svg" if not logo_filename.lower().endswith(".svg") else logo_filename + logo_path = Path(dir, logo_filename) + with open(logo_path, "w") as fh: + fh.write(svg) + + else: + logo_filename = f"nf-core-{text}_logo_{theme}.png" if not filename else filename + logo_filename = f"{logo_filename}.png" if not logo_filename.lower().endswith(".png") else logo_filename + cache_name = f"nf-core-{text}_logo_{theme}_{width}.png" + logo_path = Path(dir, logo_filename) + + # Check if we haven't already created this logo + if logo_path.is_file() and not force: + log.info(f"Logo already exists at: {logo_path}. Use `--force` to overwrite.") + return logo_path + # cache file + cache_path = Path(NFCORE_CACHE_DIR, "logo", cache_name) + img = None + if cache_path.is_file(): + log.debug(f"Logo already exists in cache at: {cache_path}. Reusing this file.") + img = Image.open(str(cache_path)) + if not img: + log.debug(f"Creating logo for {text}") + + # make sure the figure fits the text + font_path = assets / "MavenPro-Bold.ttf" + log.debug(f"Using font: {str(font_path)}") + font = ImageFont.truetype(str(font_path), 400) + text_length = font.getmask(text).getbbox()[2] # get the width of the text based on the font + + max_width = max( + 2300, text_length + len(text) * 20 + ) # need to add some more space to the text length to make sure it fits + + template_fn = "nf-core-repo-logo-base-lightbg.png" + if theme == "dark": + template_fn = "nf-core-repo-logo-base-darkbg.png" + + template_path = assets / template_fn + img = Image.open(str(template_path)) + # get the height of the template image + height = img.size[1] + + # Draw text + draw = ImageDraw.Draw(img) + color = theme == "dark" and (250, 250, 250) or (5, 5, 5) + draw.text((110, 465), text, color, font=font) + + # Crop to max width + img = img.crop((0, 0, max_width, height)) + + # Resize + img = img.resize((width, int((width / max_width) * height))) + + # Save to cache + Path(cache_path.parent).mkdir(parents=True, exist_ok=True) + log.debug(f"Saving logo to cache: {cache_path}") + img.save(cache_path, "PNG") + # Save + img.save(logo_path, "PNG") + + log.debug(f"Saved logo to: {logo_path}") + + # Return the logo + return logo_path diff --git a/nf_core/download.py b/nf_core/download.py index 08bef935ba..4c0bc97f42 100644 --- a/nf_core/download.py +++ b/nf_core/download.py @@ -1,6 +1,5 @@ """Downloads a nf-core pipeline to the local file system.""" -from __future__ import print_function import concurrent.futures import io @@ -21,7 +20,7 @@ import rich import rich.progress from git.exc import GitCommandError, InvalidGitRepositoryError -from pkg_resources import parse_version as VersionParser +from pkg_resources import parse_version as version_parser import nf_core import nf_core.list @@ -551,7 +550,7 @@ def read_remote_containers(self): self.containers_remote = sorted(list(set(self.containers_remote))) except (FileNotFoundError, LookupError) as e: log.error(f"[red]Issue with reading the specified remote $NXF_SINGULARITY_CACHE index:[/]\n{e}\n") - if stderr.is_interactive and rich.prompt.Confirm.ask(f"[blue]Specify a new index file and try again?"): + if stderr.is_interactive and rich.prompt.Confirm.ask("[blue]Specify a new index file and try again?"): self.container_cache_index = None # reset chosen path to index file. self.prompt_singularity_cachedir_remote() else: @@ -640,7 +639,7 @@ def wf_use_local_configs(self, revision_dirname): log.debug(f"Editing 'params.custom_config_base' in '{nfconfig_fn}'") # Load the nextflow.config file into memory - with open(nfconfig_fn, "r") as nfconfig_fh: + with open(nfconfig_fn) as nfconfig_fh: nfconfig = nfconfig_fh.read() # Replace the target string @@ -700,7 +699,7 @@ def find_container_images(self, workflow_directory): if bool(config_findings_dsl2): # finding fill always be a tuple of length 2, first the quote used and second the enquoted value. for finding in config_findings_dsl2: - config_findings.append((finding + (self.nf_config, "Nextflow configs"))) + config_findings.append(finding + (self.nf_config, "Nextflow configs")) else: # no regex match, likely just plain string """ Append string also as finding-like tuple for consistency @@ -719,7 +718,7 @@ def find_container_images(self, workflow_directory): for file in files: if file.endswith(".nf"): file_path = os.path.join(subdir, file) - with open(file_path, "r") as fh: + with open(file_path) as fh: # Look for any lines with container "xxx" or container 'xxx' search_space = fh.read() """ @@ -744,7 +743,7 @@ def find_container_images(self, workflow_directory): for finding in local_module_findings: # append finding since we want to collect them from all modules # also append search_space because we need to start over later if nothing was found. - module_findings.append((finding + (search_space, file_path))) + module_findings.append(finding + (search_space, file_path)) # Not sure if there will ever be multiple container definitions per module, but beware DSL3. # Like above run on shallow copy, because length may change at runtime. @@ -853,7 +852,7 @@ def rectify_raw_container_matches(self, raw_findings): ['https://depot.galaxyproject.org/singularity/scanpy:1.7.2--pyhdfd78af_0', 'biocontainers/scanpy:1.7.2--pyhdfd78af_0'] """ container_value_defs = [ - capture for _, capture in container_value_defs[:] if not capture in ["singularity", "apptainer"] + capture for _, capture in container_value_defs[:] if capture not in ["singularity", "apptainer"] ] """ @@ -1066,10 +1065,10 @@ def get_singularity_images(self, current_revision=""): self.singularity_pull_image(*container, library, progress) # Pulling the image was successful, no ContainerError was raised, break the library loop break - except ContainerError.ImageExists as e: + except ContainerError.ImageExistsError: # Pulling not required break - except ContainerError.RegistryNotFound as e: + except ContainerError.RegistryNotFoundError as e: self.container_library.remove(library) # The only library was removed if not self.container_library: @@ -1079,13 +1078,13 @@ def get_singularity_images(self, current_revision=""): else: # Other libraries can be used continue - except ContainerError.ImageNotFound as e: + except ContainerError.ImageNotFoundError as e: # Try other registries if e.error_log.absolute_URI: break # there no point in trying other registries if absolute URI was specified. else: continue - except ContainerError.InvalidTag as e: + except ContainerError.InvalidTagError: # Try other registries continue except ContainerError.OtherError as e: @@ -1524,7 +1523,7 @@ def tidy_tags_and_branches(self): else: # desired revisions may contain arbitrary branch names that do not correspond to valid sematic versioning patterns. valid_versions = [ - VersionParser(v) + version_parser(v) for v in desired_revisions if re.match(r"\d+\.\d+(?:\.\d+)*(?:[\w\-_])*", v) ] @@ -1583,7 +1582,7 @@ def __init__(self, container, registry, address, absolute_URI, out_path, singula for line in error_msg: if re.search(r"dial\stcp.*no\ssuch\shost", line): - self.error_type = self.RegistryNotFound(self) + self.error_type = self.RegistryNotFoundError(self) break elif ( re.search(r"requested\saccess\sto\sthe\sresource\sis\sdenied", line) @@ -1595,13 +1594,13 @@ def __init__(self, container, registry, address, absolute_URI, out_path, singula # unauthorized: authentication required # Quay.io: StatusCode: 404, \n'] # ghcr.io: Requesting bearer token: invalid status code from registry 400 (Bad Request) - self.error_type = self.ImageNotFound(self) + self.error_type = self.ImageNotFoundError(self) break elif re.search(r"manifest\sunknown", line): - self.error_type = self.InvalidTag(self) + self.error_type = self.InvalidTagError(self) break elif re.search(r"Image\sfile\salready\sexists", line): - self.error_type = self.ImageExists(self) + self.error_type = self.ImageExistsError(self) break else: continue @@ -1615,7 +1614,7 @@ def __init__(self, container, registry, address, absolute_URI, out_path, singula raise self.error_type - class RegistryNotFound(ConnectionRefusedError): + class RegistryNotFoundError(ConnectionRefusedError): """The specified registry does not resolve to a valid IP address""" def __init__(self, error_log): @@ -1628,7 +1627,7 @@ def __init__(self, error_log): ) super().__init__(self.message, self.helpmessage, self.error_log) - class ImageNotFound(FileNotFoundError): + class ImageNotFoundError(FileNotFoundError): """The image can not be found in the registry""" def __init__(self, error_log): @@ -1644,7 +1643,7 @@ def __init__(self, error_log): super().__init__(self.message) - class InvalidTag(AttributeError): + class InvalidTagError(AttributeError): """Image and registry are valid, but the (version) tag is not""" def __init__(self, error_log): @@ -1653,7 +1652,7 @@ def __init__(self, error_log): self.helpmessage = f'Please chose a different library than {self.error_log.registry}\nor try to locate the "{self.error_log.address.split(":")[-1]}" version of "{self.error_log.container}" manually.\nPlease troubleshoot the command \n"{" ".join(self.error_log.singularity_command)}" manually.\n' super().__init__(self.message) - class ImageExists(FileExistsError): + class ImageExistsError(FileExistsError): """Image already exists in cache/output directory.""" def __init__(self, error_log): diff --git a/nf_core/gitpod/gitpod.Dockerfile b/nf_core/gitpod/gitpod.Dockerfile index e721f210d0..ad4bed5052 100644 --- a/nf_core/gitpod/gitpod.Dockerfile +++ b/nf_core/gitpod/gitpod.Dockerfile @@ -47,19 +47,23 @@ RUN conda config --add channels defaults && \ conda config --add channels conda-forge && \ conda config --set channel_priority strict && \ conda install --quiet --yes --name base \ - mamba \ - nextflow \ - nf-core \ - nf-test \ - black \ - prettier \ - pre-commit \ - openjdk \ - pytest-workflow && \ + mamba \ + nextflow \ + nf-core \ + nf-test \ + prettier \ + pre-commit \ + ruff \ + openjdk \ + pytest-workflow && \ conda clean --all --force-pkgs-dirs --yes # Update Nextflow RUN nextflow self-update # Install nf-core -RUN python -m pip install . +RUN python -m pip install . --no-cache-dir + +# Setup pdiff for nf-test diffs +RUN export NFT_DIFF="pdiff" && \ + export NFT_DIFF_ARGS="--line-numbers --expand-tabs=2" diff --git a/nf_core/launch.py b/nf_core/launch.py index 363506c448..25bb4c150c 100644 --- a/nf_core/launch.py +++ b/nf_core/launch.py @@ -1,6 +1,5 @@ """ Launch a pipeline, interactively collecting params """ -from __future__ import print_function import copy import json @@ -428,7 +427,7 @@ def prompt_param(self, param_id, param_obj, is_required, answers): answer = questionary.unsafe_prompt([question], style=nf_core.utils.nfcore_question_style) # If required and got an empty reponse, ask again - while type(answer[param_id]) is str and answer[param_id].strip() == "" and is_required: + while isinstance(answer[param_id], str) and answer[param_id].strip() == "" and is_required: log.error(f"'--{param_id}' is required") answer = questionary.unsafe_prompt([question], style=nf_core.utils.nfcore_question_style) @@ -546,14 +545,14 @@ def single_param_to_questionary(self, param_id, param_obj, answers=None, print_h # Start with the default from the param object if "default" in param_obj: # Boolean default is cast back to a string later - this just normalises all inputs - if param_obj["type"] == "boolean" and type(param_obj["default"]) is str: + if param_obj["type"] == "boolean" and isinstance(param_obj["default"], str): question["default"] = param_obj["default"].lower() == "true" else: question["default"] = param_obj["default"] # Overwrite default with parsed schema, includes --params-in etc if self.schema_obj is not None and param_id in self.schema_obj.input_params: - if param_obj["type"] == "boolean" and type(self.schema_obj.input_params[param_id]) is str: + if param_obj["type"] == "boolean" and isinstance(self.schema_obj.input_params[param_id], str): question["default"] = "true" == self.schema_obj.input_params[param_id].lower() else: question["default"] = self.schema_obj.input_params[param_id] diff --git a/nf_core/licences.py b/nf_core/licences.py index d686a56178..a8a35334dd 100644 --- a/nf_core/licences.py +++ b/nf_core/licences.py @@ -1,6 +1,5 @@ """Lists software licences for a given workflow.""" -from __future__ import print_function import json import logging diff --git a/nf_core/lint/__init__.py b/nf_core/lint/__init__.py index 797ebbcc91..4f7657aec5 100644 --- a/nf_core/lint/__init__.py +++ b/nf_core/lint/__init__.py @@ -20,6 +20,7 @@ import nf_core.lint_utils import nf_core.modules.lint +import nf_core.subworkflows.lint import nf_core.utils from nf_core import __version__ from nf_core.lint_utils import console @@ -53,6 +54,8 @@ def run_linting( Returns: An object of type :class:`PipelineLint` that contains all the linting results. + An object of type :class:`ComponentLint` that contains all the linting results for the modules. + An object of type :class:`ComponentLint` that contains all the linting results for the subworkflows. """ # Verify that the requested tests exist @@ -87,6 +90,11 @@ def run_linting( # Create the modules lint object module_lint_obj = nf_core.modules.lint.ModuleLint(pipeline_dir, hide_progress=hide_progress) + # Create the subworkflows lint object + try: + subworkflow_lint_obj = nf_core.subworkflows.lint.SubworkflowLint(pipeline_dir, hide_progress=hide_progress) + except LookupError: + subworkflow_lint_obj = None # Verify that the pipeline is correctly configured and has a modules.json file module_lint_obj.has_valid_directory() @@ -98,13 +106,24 @@ def run_linting( module_lint_tests = list( set(key).intersection(set(nf_core.modules.lint.ModuleLint.get_all_module_lint_tests(is_pipeline=True))) ) + # Select only the subworkflow lint tests + subworkflow_lint_tests = list( + set(key).intersection( + set(nf_core.subworkflows.lint.SubworkflowLint.get_all_subworkflow_lint_tests(is_pipeline=True)) + ) + ) else: # If no key is supplied, run the default modules tests module_lint_tests = ("module_changes", "module_version") + subworkflow_lint_tests = ("subworkflow_changes", "subworkflow_version") module_lint_obj.filter_tests_by_key(module_lint_tests) + if subworkflow_lint_obj is not None: + subworkflow_lint_obj.filter_tests_by_key(subworkflow_lint_tests) - # Set up files for modules linting test + # Set up files for component linting test module_lint_obj.set_up_pipeline_files() + if subworkflow_lint_obj is not None: + subworkflow_lint_obj.set_up_pipeline_files() # Run the pipeline linting tests try: @@ -119,11 +138,19 @@ def run_linting( module_lint_obj.lint_modules(module_lint_obj.all_local_components, local=True) if len(module_lint_obj.all_remote_components) > 0: module_lint_obj.lint_modules(module_lint_obj.all_remote_components, local=False) + # Run the subworkflows lint tests + if subworkflow_lint_obj is not None: + if len(subworkflow_lint_obj.all_local_components) > 0: + subworkflow_lint_obj.lint_subworkflows(subworkflow_lint_obj.all_local_components, local=True) + if len(subworkflow_lint_obj.all_remote_components) > 0: + subworkflow_lint_obj.lint_subworkflows(subworkflow_lint_obj.all_remote_components, local=False) # Print the results lint_obj._print_results(show_passed) module_lint_obj._print_results(show_passed, sort_by=sort_by) - nf_core.lint_utils.print_joint_summary(lint_obj, module_lint_obj) + if subworkflow_lint_obj is not None: + subworkflow_lint_obj._print_results(show_passed, sort_by=sort_by) + nf_core.lint_utils.print_joint_summary(lint_obj, module_lint_obj, subworkflow_lint_obj) nf_core.lint_utils.print_fixes(lint_obj) # Save results to Markdown file @@ -142,7 +169,7 @@ def run_linting( if release_mode: log.info("Reminder: Lint tests were run in --release mode.") - return lint_obj, module_lint_obj + return lint_obj, module_lint_obj, subworkflow_lint_obj class PipelineLint(nf_core.utils.Pipeline): diff --git a/nf_core/lint/actions_awsfulltest.py b/nf_core/lint/actions_awsfulltest.py index e8e1c951b1..66aa3f99bf 100644 --- a/nf_core/lint/actions_awsfulltest.py +++ b/nf_core/lint/actions_awsfulltest.py @@ -32,7 +32,7 @@ def actions_awsfulltest(self): fn = os.path.join(self.wf_path, ".github", "workflows", "awsfulltest.yml") if os.path.isfile(fn): try: - with open(fn, "r") as fh: + with open(fn) as fh: wf = yaml.safe_load(fh) except Exception as e: return {"failed": [f"Could not parse yaml file: {fn}, {e}"]} diff --git a/nf_core/lint/actions_awstest.py b/nf_core/lint/actions_awstest.py index ccdf0abf6a..7c55998944 100644 --- a/nf_core/lint/actions_awstest.py +++ b/nf_core/lint/actions_awstest.py @@ -27,7 +27,7 @@ def actions_awstest(self): return {"ignored": [f"'awstest.yml' workflow not found: `{fn}`"]} try: - with open(fn, "r") as fh: + with open(fn) as fh: wf = yaml.safe_load(fh) except Exception as e: return {"failed": [f"Could not parse yaml file: {fn}, {e}"]} diff --git a/nf_core/lint/actions_ci.py b/nf_core/lint/actions_ci.py index e669eceb8c..a3e7d54b66 100644 --- a/nf_core/lint/actions_ci.py +++ b/nf_core/lint/actions_ci.py @@ -1,5 +1,4 @@ import os -import re import yaml @@ -48,7 +47,7 @@ def actions_ci(self): return {"ignored": ["'.github/workflows/ci.yml' not found"]} try: - with open(fn, "r") as fh: + with open(fn) as fh: ciwf = yaml.safe_load(fh) except Exception as e: return {"failed": [f"Could not parse yaml file: {fn}, {e}"]} @@ -62,7 +61,7 @@ def actions_ci(self): if not ( pr_subtree is None or ("branches" in pr_subtree and "dev" in pr_subtree["branches"]) - or ("ignore_branches" in pr_subtree and not "dev" in pr_subtree["ignore_branches"]) + or ("ignore_branches" in pr_subtree and "dev" not in pr_subtree["ignore_branches"]) ): raise AssertionError() if "published" not in ciwf[True]["release"]["types"]: diff --git a/nf_core/lint/actions_schema_validation.py b/nf_core/lint/actions_schema_validation.py index 9d49b84c6b..fa4471d98c 100644 --- a/nf_core/lint/actions_schema_validation.py +++ b/nf_core/lint/actions_schema_validation.py @@ -36,7 +36,7 @@ def actions_schema_validation(self): # load workflow try: - with open(wf_path, "r") as fh: + with open(wf_path) as fh: wf_json = yaml.safe_load(fh) except Exception as e: failed.append(f"Could not parse yaml file: {wf}, {e}") diff --git a/nf_core/lint/files_exist.py b/nf_core/lint/files_exist.py index 117704d1f1..78a744a4d7 100644 --- a/nf_core/lint/files_exist.py +++ b/nf_core/lint/files_exist.py @@ -1,5 +1,6 @@ import logging -import os +from pathlib import Path +from typing import Union log = logging.getLogger(__name__) @@ -51,7 +52,6 @@ def files_exist(self): docs/output.md docs/README.md docs/usage.md - lib/nfcore_external_java_deps.jar lib/NfcoreTemplate.groovy lib/Utils.groovy lib/WorkflowMain.groovy @@ -98,6 +98,12 @@ def files_exist(self): .travis.yml + Files that *must not* be present if a certain entry is present in ``nextflow.config``: + + .. code-block:: bash + + lib/nfcore_external_java_deps.jar # if "nf-validation" is in nextflow.config + .. tip:: You can configure the ``nf-core lint`` tests to ignore any of these checks by setting the ``files_exist`` key as follows in your ``.nf-core.yml`` config file. For example: @@ -132,48 +138,46 @@ def files_exist(self): ["CHANGELOG.md"], ["CITATIONS.md"], ["CODE_OF_CONDUCT.md"], - ["CODE_OF_CONDUCT.md"], ["LICENSE", "LICENSE.md", "LICENCE", "LICENCE.md"], # NB: British / American spelling ["nextflow_schema.json"], ["nextflow.config"], ["README.md"], - [os.path.join(".github", ".dockstore.yml")], - [os.path.join(".github", "CONTRIBUTING.md")], - [os.path.join(".github", "ISSUE_TEMPLATE", "bug_report.yml")], - [os.path.join(".github", "ISSUE_TEMPLATE", "config.yml")], - [os.path.join(".github", "ISSUE_TEMPLATE", "feature_request.yml")], - [os.path.join(".github", "PULL_REQUEST_TEMPLATE.md")], - [os.path.join(".github", "workflows", "branch.yml")], - [os.path.join(".github", "workflows", "ci.yml")], - [os.path.join(".github", "workflows", "linting_comment.yml")], - [os.path.join(".github", "workflows", "linting.yml")], - [os.path.join("assets", "email_template.html")], - [os.path.join("assets", "email_template.txt")], - [os.path.join("assets", "sendmail_template.txt")], - [os.path.join("assets", f"nf-core-{short_name}_logo_light.png")], - [os.path.join("conf", "modules.config")], - [os.path.join("conf", "test.config")], - [os.path.join("conf", "test_full.config")], - [os.path.join("docs", "images", f"nf-core-{short_name}_logo_light.png")], - [os.path.join("docs", "images", f"nf-core-{short_name}_logo_dark.png")], - [os.path.join("docs", "output.md")], - [os.path.join("docs", "README.md")], - [os.path.join("docs", "README.md")], - [os.path.join("docs", "usage.md")], - [os.path.join("lib", "nfcore_external_java_deps.jar")], - [os.path.join("lib", "NfcoreTemplate.groovy")], - [os.path.join("lib", "Utils.groovy")], - [os.path.join("lib", "WorkflowMain.groovy")], + [Path(".github", ".dockstore.yml")], + [Path(".github", "CONTRIBUTING.md")], + [Path(".github", "ISSUE_TEMPLATE", "bug_report.yml")], + [Path(".github", "ISSUE_TEMPLATE", "config.yml")], + [Path(".github", "ISSUE_TEMPLATE", "feature_request.yml")], + [Path(".github", "PULL_REQUEST_TEMPLATE.md")], + [Path(".github", "workflows", "branch.yml")], + [Path(".github", "workflows", "ci.yml")], + [Path(".github", "workflows", "linting_comment.yml")], + [Path(".github", "workflows", "linting.yml")], + [Path("assets", "email_template.html")], + [Path("assets", "email_template.txt")], + [Path("assets", "sendmail_template.txt")], + [Path("assets", f"nf-core-{short_name}_logo_light.png")], + [Path("conf", "modules.config")], + [Path("conf", "test.config")], + [Path("conf", "test_full.config")], + [Path("docs", "images", f"nf-core-{short_name}_logo_light.png")], + [Path("docs", "images", f"nf-core-{short_name}_logo_dark.png")], + [Path("docs", "output.md")], + [Path("docs", "README.md")], + [Path("docs", "README.md")], + [Path("docs", "usage.md")], + [Path("lib", "NfcoreTemplate.groovy")], + [Path("lib", "Utils.groovy")], + [Path("lib", "WorkflowMain.groovy")], ] files_warn = [ ["main.nf"], - [os.path.join("assets", "multiqc_config.yml")], - [os.path.join("conf", "base.config")], - [os.path.join("conf", "igenomes.config")], - [os.path.join(".github", "workflows", "awstest.yml")], - [os.path.join(".github", "workflows", "awsfulltest.yml")], - [os.path.join("lib", f"Workflow{short_name[0].upper()}{short_name[1:]}.groovy")], + [Path("assets", "multiqc_config.yml")], + [Path("conf", "base.config")], + [Path("conf", "igenomes.config")], + [Path(".github", "workflows", "awstest.yml")], + [Path(".github", "workflows", "awsfulltest.yml")], + [Path("lib", f"Workflow{short_name[0].upper()}{short_name[1:]}.groovy")], ["modules.json"], ["pyproject.toml"], ] @@ -184,45 +188,47 @@ def files_exist(self): "parameters.settings.json", "pipeline_template.yml", # saving information in .nf-core.yml ".nf-core.yaml", # yml not yaml - os.path.join("bin", "markdown_to_html.r"), - os.path.join("conf", "aws.config"), - os.path.join(".github", "workflows", "push_dockerhub.yml"), - os.path.join(".github", "ISSUE_TEMPLATE", "bug_report.md"), - os.path.join(".github", "ISSUE_TEMPLATE", "feature_request.md"), - os.path.join("docs", "images", f"nf-core-{short_name}_logo.png"), + Path("bin", "markdown_to_html.r"), + Path("conf", "aws.config"), + Path(".github", "workflows", "push_dockerhub.yml"), + Path(".github", "ISSUE_TEMPLATE", "bug_report.md"), + Path(".github", "ISSUE_TEMPLATE", "feature_request.md"), + Path("docs", "images", f"nf-core-{short_name}_logo.png"), ".markdownlint.yml", ".yamllint.yml", - os.path.join("lib", "Checks.groovy"), - os.path.join("lib", "Completion.groovy"), - os.path.join("lib", "Workflow.groovy"), + Path("lib", "Checks.groovy"), + Path("lib", "Completion.groovy"), + Path("lib", "Workflow.groovy"), ] files_warn_ifexists = [".travis.yml"] + files_fail_ifinconfig = [[Path("lib", "nfcore_external_java_deps.jar"), "nf-validation"]] # Remove files that should be ignored according to the linting config ignore_files = self.lint_config.get("files_exist", []) + log.info(f"Files to ignore: {ignore_files}") - def pf(file_path): - return os.path.join(self.wf_path, file_path) + def pf(file_path: Union[str, Path]) -> Path: + return Path(self.wf_path, file_path) # First - critical files. Check that this is actually a Nextflow pipeline - if not os.path.isfile(pf("nextflow.config")) and not os.path.isfile(pf("main.nf")): + if not pf("nextflow.config").is_file() and not pf("main.nf").is_file(): failed.append("File not found: nextflow.config or main.nf") raise AssertionError("Neither nextflow.config or main.nf found! Is this a Nextflow pipeline?") # Files that cause an error if they don't exist for files in files_fail: - if any([f in ignore_files for f in files]): + if any([str(f) in ignore_files for f in files]): continue - if any([os.path.isfile(pf(f)) for f in files]): + if any([pf(f).is_file() for f in files]): passed.append(f"File found: {self._wrap_quotes(files)}") else: failed.append(f"File not found: {self._wrap_quotes(files)}") # Files that cause a warning if they don't exist for files in files_warn: - if any([f in ignore_files for f in files]): + if any([str(f) in ignore_files for f in files]): continue - if any([os.path.isfile(pf(f)) for f in files]): + if any([pf(f).is_file() for f in files]): passed.append(f"File found: {self._wrap_quotes(files)}") else: warned.append(f"File not found: {self._wrap_quotes(files)}") @@ -231,16 +237,32 @@ def pf(file_path): for file in files_fail_ifexists: if file in ignore_files: continue - if os.path.isfile(pf(file)): + if pf(file).is_file(): failed.append(f"File must be removed: {self._wrap_quotes(file)}") else: passed.append(f"File not found check: {self._wrap_quotes(file)}") - + # Files that cause an error if they exists together with a certain entry in nextflow.config + for file in files_fail_ifinconfig: + if str(file[0]) in ignore_files: + continue + nextflow_config = pf("nextflow.config") + in_config = False + with open(nextflow_config) as f: + if file[1] in f.read(): + in_config = True + if pf(file[0]).is_file() and in_config: + failed.append(f"File must be removed: {self._wrap_quotes(file[0])}") + elif pf(file[0]).is_file() and not in_config: + passed.append(f"File found check: {self._wrap_quotes(file[0])}") + elif not pf(file[0]).is_file() and not in_config: + failed.append(f"File not found check: {self._wrap_quotes(file[0])}") + elif not pf(file[0]).is_file() and in_config: + passed.append(f"File not found check: {self._wrap_quotes(file[0])}") # Files that cause a warning if they exist for file in files_warn_ifexists: if file in ignore_files: continue - if os.path.isfile(pf(file)): + if pf(file).is_file(): warned.append(f"File should be removed: {self._wrap_quotes(file)}") else: passed.append(f"File not found check: {self._wrap_quotes(file)}") diff --git a/nf_core/lint/files_unchanged.py b/nf_core/lint/files_unchanged.py index 2b64d62638..176b0e9e65 100644 --- a/nf_core/lint/files_unchanged.py +++ b/nf_core/lint/files_unchanged.py @@ -1,8 +1,9 @@ import filecmp import logging -import os import shutil import tempfile +from pathlib import Path +from typing import Union import yaml @@ -39,7 +40,6 @@ def files_unchanged(self): docs/images/nf-core-PIPELINE_logo_light.png docs/images/nf-core-PIPELINE_logo_dark.png docs/README.md' - lib/nfcore_external_java_deps.jar lib/NfcoreTemplate.groovy ['LICENSE', 'LICENSE.md', 'LICENCE', 'LICENCE.md'], # NB: British / American spelling @@ -49,6 +49,10 @@ def files_unchanged(self): .prettierignore pyproject.toml + Files that need to be there or not based on a entry in nextflow config:: + + lib/nfcore_external_java_deps.jar # if config doesn't mention nf-validation + .. tip:: You can configure the ``nf-core lint`` tests to ignore any of these checks by setting the ``files_unchanged`` key as follows in your ``.nf-core.yml`` config file. For example: @@ -87,28 +91,30 @@ def files_unchanged(self): [".prettierrc.yml"], ["CODE_OF_CONDUCT.md"], ["LICENSE", "LICENSE.md", "LICENCE", "LICENCE.md"], # NB: British / American spelling - [os.path.join(".github", ".dockstore.yml")], - [os.path.join(".github", "CONTRIBUTING.md")], - [os.path.join(".github", "ISSUE_TEMPLATE", "bug_report.yml")], - [os.path.join(".github", "ISSUE_TEMPLATE", "config.yml")], - [os.path.join(".github", "ISSUE_TEMPLATE", "feature_request.yml")], - [os.path.join(".github", "PULL_REQUEST_TEMPLATE.md")], - [os.path.join(".github", "workflows", "branch.yml")], - [os.path.join(".github", "workflows", "linting_comment.yml")], - [os.path.join(".github", "workflows", "linting.yml")], - [os.path.join("assets", "email_template.html")], - [os.path.join("assets", "email_template.txt")], - [os.path.join("assets", "sendmail_template.txt")], - [os.path.join("assets", f"nf-core-{short_name}_logo_light.png")], - [os.path.join("docs", "images", f"nf-core-{short_name}_logo_light.png")], - [os.path.join("docs", "images", f"nf-core-{short_name}_logo_dark.png")], - [os.path.join("docs", "README.md")], - [os.path.join("lib", "nfcore_external_java_deps.jar")], - [os.path.join("lib", "NfcoreTemplate.groovy")], + [Path(".github", ".dockstore.yml")], + [Path(".github", "CONTRIBUTING.md")], + [Path(".github", "ISSUE_TEMPLATE", "bug_report.yml")], + [Path(".github", "ISSUE_TEMPLATE", "config.yml")], + [Path(".github", "ISSUE_TEMPLATE", "feature_request.yml")], + [Path(".github", "PULL_REQUEST_TEMPLATE.md")], + [Path(".github", "workflows", "branch.yml")], + [Path(".github", "workflows", "linting_comment.yml")], + [Path(".github", "workflows", "linting.yml")], + [Path("assets", "email_template.html")], + [Path("assets", "email_template.txt")], + [Path("assets", "sendmail_template.txt")], + [Path("assets", f"nf-core-{short_name}_logo_light.png")], + [Path("docs", "images", f"nf-core-{short_name}_logo_light.png")], + [Path("docs", "images", f"nf-core-{short_name}_logo_dark.png")], + [Path("docs", "README.md")], + [Path("lib", "NfcoreTemplate.groovy")], ] files_partial = [ [".gitignore", ".prettierignore", "pyproject.toml"], ] + files_conditional = [ + [Path("lib", "nfcore_external_java_deps.jar"), {"plugins": "nf_validation"}], + ] # Only show error messages from pipeline creation logging.getLogger("nf_core.create").setLevel(logging.ERROR) @@ -124,24 +130,24 @@ def files_unchanged(self): "prefix": prefix, } - template_yaml_path = os.path.join(tmp_dir, "template.yaml") + template_yaml_path = Path(tmp_dir, "template.yaml") with open(template_yaml_path, "w") as fh: yaml.dump(template_yaml, fh, default_flow_style=False) - test_pipeline_dir = os.path.join(tmp_dir, f"{prefix}-{short_name}") + test_pipeline_dir = Path(tmp_dir, f"{prefix}-{short_name}") create_obj = nf_core.create.PipelineCreate( None, None, None, no_git=True, outdir=test_pipeline_dir, template_yaml_path=template_yaml_path ) create_obj.init_pipeline() # Helper functions for file paths - def _pf(file_path): + def _pf(file_path: Union[str, Path]) -> Path: """Helper function - get file path for pipeline file""" - return os.path.join(self.wf_path, file_path) + return Path(self.wf_path, file_path) - def _tf(file_path): + def _tf(file_path: Union[str, Path]) -> Path: """Helper function - get file path for template file""" - return os.path.join(test_pipeline_dir, file_path) + return Path(test_pipeline_dir, file_path) # Files that must be completely unchanged from template for files in files_exact: @@ -151,7 +157,7 @@ def _tf(file_path): ignored.append(f"File ignored due to lint config: {self._wrap_quotes(files)}") # Ignore if we can't find the file - elif not any([os.path.isfile(_pf(f)) for f in files]): + elif not any([_pf(f).is_file() for f in files]): ignored.append(f"File does not exist: {self._wrap_quotes(files)}") # Check that the file has an identical match @@ -180,23 +186,23 @@ def _tf(file_path): ignored.append(f"File ignored due to lint config: {self._wrap_quotes(files)}") # Ignore if we can't find the file - elif not any([os.path.isfile(_pf(f)) for f in files]): + elif not any([_pf(f).is_file() for f in files]): ignored.append(f"File does not exist: {self._wrap_quotes(files)}") # Check that the file contains the template file contents else: for f in files: try: - with open(_pf(f), "r") as fh: + with open(_pf(f)) as fh: pipeline_file = fh.read() - with open(_tf(f), "r") as fh: + with open(_tf(f)) as fh: template_file = fh.read() if template_file in pipeline_file: passed.append(f"`{f}` matches the template") else: if "files_unchanged" in self.fix: # Try to fix the problem by overwriting the pipeline file - with open(_tf(f), "r") as fh: + with open(_tf(f)) as fh: template_file = fh.read() with open(_pf(f), "w") as fh: fh.write(template_file) @@ -208,6 +214,39 @@ def _tf(file_path): except FileNotFoundError: pass + # Files that should be there only if an entry in nextflow config is not set + for files in files_conditional: + # Ignore if file specified in linting config + ignore_files = self.lint_config.get("files_unchanged", []) + if files[0] in ignore_files: + ignored.append(f"File ignored due to lint config: {self._wrap_quotes(files)}") + + # Ignore if we can't find the file + elif _pf(files[0]).is_file(): + ignored.append(f"File does not exist: {self._wrap_quotes(files[0])}") + + # Check that the file has an identical match + else: + config_key, config_value = list(files[1].items())[0] + if config_key in self.nf_config and self.nf_config[config_key] == config_value: + # Ignore if the config key is set to the expected value + ignored.append(f"File ignored due to config: {self._wrap_quotes(files)}") + else: + try: + if filecmp.cmp(_pf(files[0]), _tf(files[0]), shallow=True): + passed.append(f"`{files[0]}` matches the template") + else: + if "files_unchanged" in self.fix: + # Try to fix the problem by overwriting the pipeline file + shutil.copy(_tf(files[0]), _pf(files[0])) + passed.append(f"`{files[0]}` matches the template") + fixed.append(f"`{files[0]}` overwritten with template file") + else: + failed.append(f"`{files[0]}` does not match the template") + could_fix = True + except FileNotFoundError: + pass + # cleaning up temporary dir shutil.rmtree(tmp_dir) diff --git a/nf_core/lint/merge_markers.py b/nf_core/lint/merge_markers.py index f33a5095d8..80cd655066 100644 --- a/nf_core/lint/merge_markers.py +++ b/nf_core/lint/merge_markers.py @@ -1,5 +1,4 @@ import fnmatch -import io import logging import os @@ -14,6 +13,17 @@ def merge_markers(self): This test looks for remaining merge markers in the code, e.g.: >>>>>>> or <<<<<<< + .. tip:: You can choose to ignore this lint tests by editing the file called + ``.nf-core.yml`` in the root of your pipeline and setting the test to false: + .. code-block:: yaml + lint: + merge_markers: False + To disable this test only for specific files, you can specify a list of file paths to ignore. + For example, to ignore a pdf you added to the docs: + .. code-block:: yaml + lint: + merge_markers: + - docs/my_pdf.pdf """ passed = [] failed = [] @@ -23,9 +33,9 @@ def merge_markers(self): ignore = [".git"] if os.path.isfile(os.path.join(self.wf_path, ".gitignore")): - with io.open(os.path.join(self.wf_path, ".gitignore"), "rt", encoding="latin1") as fh: - for l in fh: - ignore.append(os.path.basename(l.strip().rstrip("/"))) + with open(os.path.join(self.wf_path, ".gitignore"), encoding="latin1") as fh: + for line in fh: + ignore.append(os.path.basename(line.strip().rstrip("/"))) for root, dirs, files in os.walk(self.wf_path, topdown=True): # Ignore files for i_base in ignore: @@ -41,12 +51,12 @@ def merge_markers(self): if nf_core.utils.is_file_binary(os.path.join(root, fname)): continue try: - with io.open(os.path.join(root, fname), "rt", encoding="latin1") as fh: - for l in fh: - if ">>>>>>>" in l: - failed.append(f"Merge marker '>>>>>>>' in `{os.path.join(root, fname)}`: {l[:30]}") - if "<<<<<<<" in l: - failed.append(f"Merge marker '<<<<<<<' in `{os.path.join(root, fname)}`: {l[:30]}") + with open(os.path.join(root, fname), encoding="latin1") as fh: + for line in fh: + if ">>>>>>>" in line: + failed.append(f"Merge marker '>>>>>>>' in `{os.path.join(root, fname)}`: {line[:30]}") + if "<<<<<<<" in line: + failed.append(f"Merge marker '<<<<<<<' in `{os.path.join(root, fname)}`: {line[:30]}") except FileNotFoundError: log.debug(f"Could not open file {os.path.join(root, fname)} in merge_markers lint test") if len(failed) == 0: diff --git a/nf_core/lint/multiqc_config.py b/nf_core/lint/multiqc_config.py index cbbeae07a8..b2f1a89a1b 100644 --- a/nf_core/lint/multiqc_config.py +++ b/nf_core/lint/multiqc_config.py @@ -34,7 +34,7 @@ def multiqc_config(self) -> Dict[str, List[str]]: return {"ignored": ["'assets/multiqc_config.yml' not found"]} try: - with open(fn, "r") as fh: + with open(fn) as fh: mqc_yml = yaml.safe_load(fh) except Exception as e: return {"failed": [f"Could not parse yaml file: {fn}, {e}"]} diff --git a/nf_core/lint/nextflow_config.py b/nf_core/lint/nextflow_config.py index 24f1e5c12f..1e0a6c4995 100644 --- a/nf_core/lint/nextflow_config.py +++ b/nf_core/lint/nextflow_config.py @@ -1,6 +1,9 @@ import logging import os import re +from pathlib import Path + +from nf_core.schema import PipelineSchema log = logging.getLogger(__name__) @@ -113,6 +116,18 @@ def nextflow_config(self): * A ``test`` configuration profile should exist. + **The default values in ``nextflow.config`` should match the default values defined in the ``nextflow_schema.json``.** + + .. tip:: You can choose to ignore tests for the default value of an specific parameter + by creating a file called ``.nf-core.yml`` in the root of your pipeline and creating + a list the config parameters that should be ignored. For example to ignore the default value for the input parameter: + + .. code-block:: yaml + + lint: + nextflow_config: + - config_defaults: + - params.input """ passed = [] warned = [] @@ -300,7 +315,7 @@ def nextflow_config(self): ] path = os.path.join(self.wf_path, "nextflow.config") i = 0 - with open(path, "r") as f: + with open(path) as f: for line in f: if lines[i] in line: i += 1 @@ -320,7 +335,7 @@ def nextflow_config(self): ) # Check for the availability of the "test" configuration profile by parsing nextflow.config - with open(os.path.join(self.wf_path, "nextflow.config"), "r") as f: + with open(os.path.join(self.wf_path, "nextflow.config")) as f: content = f.read() # Remove comments @@ -347,4 +362,40 @@ def nextflow_config(self): else: failed.append("nextflow.config does not contain configuration profile `test`") + # Check that the default values in nextflow.config match the default values defined in the nextflow_schema.json + ignore_defaults = [] + for item in ignore_configs: + if isinstance(item, dict) and "config_defaults" in item: + ignore_defaults = item.get("config_defaults", []) + schema_path = Path(self.wf_path) / "nextflow_schema.json" + schema = PipelineSchema() + schema.schema_filename = schema_path + schema.no_prompts = True + schema.load_schema() + schema.get_schema_defaults() # Get default values from schema + self.nf_config.keys() # Params in nextflow.config + for param_name in schema.schema_defaults.keys(): + param = "params." + param_name + # Convert booleans to strings if needed + schema_default = ( + "true" + if str(schema.schema_defaults[param_name]) == "True" + else "false" + if str(schema.schema_defaults[param_name]) == "False" + else str(schema.schema_defaults[param_name]) + ) + if param in ignore_defaults: + ignored.append(f"Config default ignored: {param}") + elif param in self.nf_config.keys(): + if str(self.nf_config[param]) == schema_default: + passed.append(f"Config default value correct: {param}") + else: + failed.append( + f"Config default value incorrect: `{param}` is set as {self._wrap_quotes(schema_default)} in `nextflow_schema.json` but is {self._wrap_quotes(self.nf_config[param])} in `nextflow.config`." + ) + else: + failed.append( + f"Default value from the Nextflow schema '{param} = {self._wrap_quotes(schema_default)}' not found in `nextflow.config`." + ) + return {"passed": passed, "warned": warned, "failed": failed, "ignored": ignored} diff --git a/nf_core/lint/pipeline_todos.py b/nf_core/lint/pipeline_todos.py index 890e227fa1..ba6ec79150 100644 --- a/nf_core/lint/pipeline_todos.py +++ b/nf_core/lint/pipeline_todos.py @@ -1,5 +1,4 @@ import fnmatch -import io import logging import os @@ -41,9 +40,9 @@ def pipeline_todos(self, root_dir=None): ignore = [".git"] if os.path.isfile(os.path.join(root_dir, ".gitignore")): - with io.open(os.path.join(root_dir, ".gitignore"), "rt", encoding="latin1") as fh: - for l in fh: - ignore.append(os.path.basename(l.strip().rstrip("/"))) + with open(os.path.join(root_dir, ".gitignore"), encoding="latin1") as fh: + for line in fh: + ignore.append(os.path.basename(line.strip().rstrip("/"))) for root, dirs, files in os.walk(root_dir, topdown=True): # Ignore files for i_base in ignore: @@ -52,18 +51,18 @@ def pipeline_todos(self, root_dir=None): files[:] = [f for f in files if not fnmatch.fnmatch(os.path.join(root, f), i)] for fname in files: try: - with io.open(os.path.join(root, fname), "rt", encoding="latin1") as fh: - for l in fh: - if "TODO nf-core" in l: - l = ( - l.replace("", "") .replace("# TODO nf-core: ", "") .replace("// TODO nf-core: ", "") .replace("TODO nf-core: ", "") .strip() ) - warned.append(f"TODO string in `{fname}`: _{l}_") + warned.append(f"TODO string in `{fname}`: _{line}_") file_paths.append(os.path.join(root, fname)) except FileNotFoundError: log.debug(f"Could not open file {fname} in pipeline_todos lint test") diff --git a/nf_core/lint/readme.py b/nf_core/lint/readme.py index 55060442b1..cade9ca3ea 100644 --- a/nf_core/lint/readme.py +++ b/nf_core/lint/readme.py @@ -31,7 +31,7 @@ def readme(self): # Remove field that should be ignored according to the linting config ignore_configs = self.lint_config.get("readme", []) - with open(os.path.join(self.wf_path, "README.md"), "r") as fh: + with open(os.path.join(self.wf_path, "README.md")) as fh: content = fh.read() if "nextflow_badge" not in ignore_configs: diff --git a/nf_core/lint/system_exit.py b/nf_core/lint/system_exit.py index 56a526d97b..435a2452d0 100644 --- a/nf_core/lint/system_exit.py +++ b/nf_core/lint/system_exit.py @@ -25,9 +25,9 @@ def system_exit(self): for file in to_check: try: with file.open() as fh: - for i, l in enumerate(fh.readlines(), start=1): - if "System.exit" in l and not "System.exit(0)" in l: - warned.append(f"`System.exit` in {file.name}: _{l.strip()}_ [line {i}]") + for i, line in enumerate(fh.readlines(), start=1): + if "System.exit" in line and "System.exit(0)" not in line: + warned.append(f"`System.exit` in {file.name}: _{line.strip()}_ [line {i}]") except FileNotFoundError: log.debug(f"Could not open file {file.name} in system_exit lint test") diff --git a/nf_core/lint/template_strings.py b/nf_core/lint/template_strings.py index fb1f0f32e5..9b015bc209 100644 --- a/nf_core/lint/template_strings.py +++ b/nf_core/lint/template_strings.py @@ -1,4 +1,3 @@ -import io import mimetypes import re @@ -17,24 +16,47 @@ def template_strings(self): This test ignores any double-brackets prefixed with a dollar sign, such as ``${{ secrets.AWS_ACCESS_KEY_ID }}`` as these placeholders are used in GitHub Actions workflows. + + .. tip:: You can choose to ignore lint test tests by editing the file called + ``.nf-core.yml`` in the root of your pipeline and setting the test to false: + + .. code-block:: yaml + + lint: + template_strings: False + + To disable this test only for specific files, you can specify a list of file paths to ignore. + For example, to ignore a pdf you added to the docs: + + .. code-block:: yaml + + lint: + template_strings: + - docs/my_pdf.pdf """ passed = [] failed = [] + ignored = [] + # Files that should be ignored according to the linting config + ignore_files = self.lint_config.get("template_strings", []) # Loop through files, searching for string num_matches = 0 for fn in self.files: + if str(fn.relative_to(self.wf_path)) in ignore_files: + ignored.append(f"Ignoring Jinja template strings in file `{fn}`") + continue # Skip binary files binary_ftypes = ["image", "application/java-archive"] (ftype, encoding) = mimetypes.guess_type(fn) if encoding is not None or (ftype is not None and any([ftype.startswith(ft) for ft in binary_ftypes])): continue - with io.open(fn, "r", encoding="latin1") as fh: + with open(fn, encoding="latin1") as fh: lnum = 0 - for l in fh: + for line in fh: lnum += 1 - cc_matches = re.findall(r"[^$]{{[^:}]*}}", l) + cc_matches = re.findall(r"[^$]{{[^:}]*}}", line) if len(cc_matches) > 0: for cc_match in cc_matches: failed.append(f"Found a Jinja template string in `{fn}` L{lnum}: {cc_match}") @@ -42,4 +64,4 @@ def template_strings(self): if num_matches == 0: passed.append(f"Did not find any Jinja template strings ({len(self.files)} files)") - return {"passed": passed, "failed": failed} + return {"passed": passed, "failed": failed, "ignored": ignored} diff --git a/nf_core/lint/version_consistency.py b/nf_core/lint/version_consistency.py index fa5b50de01..e396ca9e7a 100644 --- a/nf_core/lint/version_consistency.py +++ b/nf_core/lint/version_consistency.py @@ -31,7 +31,7 @@ def version_consistency(self): versions["manifest.version"] = self.nf_config.get("manifest.version", "").strip(" '\"") # Get version from the docker tag - if self.nf_config.get("process.container", "") and not ":" in self.nf_config.get("process.container", ""): + if self.nf_config.get("process.container", "") and ":" not in self.nf_config.get("process.container", ""): failed.append(f"Docker slug seems not to have a version tag: {self.nf_config.get('process.container', '')}") # Get config container tag (if set; one container per workflow) @@ -53,8 +53,9 @@ def version_consistency(self): # Check if they are consistent if len(set(versions.values())) != 1: failed.append( - "The versioning is not consistent between container, release tag " - "and config. Found {}".format(", ".join(["{} = {}".format(k, v) for k, v in versions.items()])) + "The versioning is not consistent between container, release tag " "and config. Found {}".format( + ", ".join([f"{k} = {v}" for k, v in versions.items()]) + ) ) passed.append("Version tags are numeric and consistent between container, release tag and config.") diff --git a/nf_core/lint_utils.py b/nf_core/lint_utils.py index c2fd75d375..6eca6522d4 100644 --- a/nf_core/lint_utils.py +++ b/nf_core/lint_utils.py @@ -16,13 +16,20 @@ console = Console(force_terminal=nf_core.utils.rich_force_colors()) -def print_joint_summary(lint_obj, module_lint_obj): - """Print a joint summary of the general pipe lint tests and the module lint tests""" - nbr_passed = len(lint_obj.passed) + len(module_lint_obj.passed) +def print_joint_summary(lint_obj, module_lint_obj, subworkflow_lint_obj): + """Print a joint summary of the general pipe lint tests and the module and subworkflow lint tests""" + swf_passed = 0 + swf_warned = 0 + swf_failed = 0 + if subworkflow_lint_obj is not None: + swf_passed = len(subworkflow_lint_obj.passed) + swf_warned = len(subworkflow_lint_obj.warned) + swf_failed = len(subworkflow_lint_obj.failed) + nbr_passed = len(lint_obj.passed) + len(module_lint_obj.passed) + swf_passed nbr_ignored = len(lint_obj.ignored) nbr_fixed = len(lint_obj.fixed) - nbr_warned = len(lint_obj.warned) + len(module_lint_obj.warned) - nbr_failed = len(lint_obj.failed) + len(module_lint_obj.failed) + nbr_warned = len(lint_obj.warned) + len(module_lint_obj.warned) + swf_warned + nbr_failed = len(lint_obj.failed) + len(module_lint_obj.failed) + swf_failed summary_colour = "red" if nbr_failed > 0 else "green" table = Table(box=rich.box.ROUNDED, style=summary_colour) diff --git a/nf_core/list.py b/nf_core/list.py index 94d9d8e043..d0b59319a3 100644 --- a/nf_core/list.py +++ b/nf_core/list.py @@ -1,6 +1,5 @@ """Lists available nf-core pipelines and versions.""" -from __future__ import print_function import json import logging @@ -205,7 +204,7 @@ def print_summary(self): def sort_pulled_date(wf): try: return wf.local_wf.last_pull * -1 - except: + except Exception: return 0 filtered_workflows.sort(key=sort_pulled_date) diff --git a/nf_core/module-template/tests/main.nf.test b/nf_core/module-template/tests/main.nf.test index 5a2e6cdc63..e1b1dadf12 100644 --- a/nf_core/module-template/tests/main.nf.test +++ b/nf_core/module-template/tests/main.nf.test @@ -26,12 +26,12 @@ nextflow_process { """ // TODO nf-core: define inputs of the process here. Example: {% if has_meta %} - input = [ + input[0] = [ [ id:'test', single_end:false ], // meta map file(params.test_data['sarscov2']['illumina']['test_paired_end_bam'], checkIfExists: true) ] {%- else %} - input = file(params.test_data['sarscov2']['illumina']['test_single_end_bam'], checkIfExists: true) + input[0] = file(params.test_data['sarscov2']['illumina']['test_single_end_bam'], checkIfExists: true) {%- endif %} """ } @@ -58,12 +58,12 @@ nextflow_process { """ // TODO nf-core: define inputs of the process here. Example: {% if has_meta %} - input = [ + input[0] = [ [ id:'test', single_end:false ], // meta map file(params.test_data['sarscov2']['illumina']['test_paired_end_bam'], checkIfExists: true) ] {%- else %} - input = file(params.test_data['sarscov2']['illumina']['test_single_end_bam'], checkIfExists: true) + input[0] = file(params.test_data['sarscov2']['illumina']['test_single_end_bam'], checkIfExists: true) {%- endif %} """ } diff --git a/nf_core/modules/__init__.py b/nf_core/modules/__init__.py index 4b36f302bd..6be871ece8 100644 --- a/nf_core/modules/__init__.py +++ b/nf_core/modules/__init__.py @@ -6,7 +6,7 @@ from .list import ModuleList from .modules_json import ModulesJson from .modules_repo import ModulesRepo -from .modules_utils import ModuleException +from .modules_utils import ModuleExceptionError from .patch import ModulePatch from .remove import ModuleRemove from .update import ModuleUpdate diff --git a/nf_core/modules/bump_versions.py b/nf_core/modules/bump_versions.py index 25259f1a16..b9003be974 100644 --- a/nf_core/modules/bump_versions.py +++ b/nf_core/modules/bump_versions.py @@ -4,8 +4,6 @@ """ -from __future__ import print_function - import logging import os import re @@ -24,9 +22,8 @@ import nf_core.utils from nf_core.components.components_command import ComponentCommand from nf_core.components.nfcore_component import NFCoreComponent -from nf_core.utils import custom_yaml_dumper +from nf_core.utils import custom_yaml_dumper, rich_force_colors from nf_core.utils import plural_s as _s -from nf_core.utils import rich_force_colors log = logging.getLogger(__name__) @@ -74,7 +71,7 @@ def bump_versions( # Verify that this is not a pipeline if not self.repo_type == "modules": - raise nf_core.modules.modules_utils.ModuleException( + raise nf_core.modules.modules_utils.ModuleExceptionError( "This command only works on the nf-core/modules repository, not on pipelines!" ) @@ -105,12 +102,14 @@ def bump_versions( if module: self.show_up_to_date = True if all_modules: - raise nf_core.modules.modules_utils.ModuleException( + raise nf_core.modules.modules_utils.ModuleExceptionError( "You cannot specify a tool and request all tools to be bumped." ) nfcore_modules = [m for m in nfcore_modules if m.component_name == module] if len(nfcore_modules) == 0: - raise nf_core.modules.modules_utils.ModuleException(f"Could not find the specified module: '{module}'") + raise nf_core.modules.modules_utils.ModuleExceptionError( + f"Could not find the specified module: '{module}'" + ) progress_bar = Progress( "[bold blue]{task.description}", @@ -146,10 +145,10 @@ def bump_module_version(self, module: NFCoreComponent) -> bool: except FileNotFoundError: # try it in the main.nf instead try: - with open(module.main_nf, "r") as fh: - for l in fh: - if "bioconda::" in l: - bioconda_packages = [b for b in l.split() if "bioconda::" in b] + with open(module.main_nf) as fh: + for line in fh: + if "bioconda::" in line: + bioconda_packages = [b for b in line.split() if "bioconda::" in b] except FileNotFoundError: log.error( f"Neither `environment.yml` nor `main.nf` of {module.component_name} module could be read to get bioconada version of used tools." @@ -208,7 +207,7 @@ def bump_module_version(self, module: NFCoreComponent) -> bool: ), ] - with open(module.main_nf, "r") as fh: + with open(module.main_nf) as fh: content = fh.read() # Go over file content of main.nf and find replacements @@ -241,7 +240,7 @@ def bump_module_version(self, module: NFCoreComponent) -> bool: fh.write(content) # change version in environment.yml - with open(module.environment_yml, "r") as fh: + with open(module.environment_yml) as fh: env_yml = yaml.safe_load(fh) re.sub(bioconda_packages[0], f"'bioconda::{bioconda_tool_name}={last_ver}'", env_yml["dependencies"]) with open(module.environment_yml, "w") as fh: @@ -266,7 +265,7 @@ def get_bioconda_version(self, module: NFCoreComponent) -> List[str]: # Check whether file exists and load it bioconda_packages = [] try: - with open(module.environment_yml, "r") as fh: + with open(module.environment_yml) as fh: env_yml = yaml.safe_load(fh) bioconda_packages = env_yml.get("dependencies", []) except FileNotFoundError: @@ -289,7 +288,7 @@ def _print_results(self) -> None: for m in [self.up_to_date, self.updated, self.failed]: try: max_mod_name_len = max(len(m[2]), max_mod_name_len) - except: + except Exception: pass def format_result(module_updates: List[Tuple[str, str]], table: Table) -> Table: diff --git a/nf_core/modules/lint/__init__.py b/nf_core/modules/lint/__init__.py index 68a38cc0cd..866e6312aa 100644 --- a/nf_core/modules/lint/__init__.py +++ b/nf_core/modules/lint/__init__.py @@ -6,7 +6,6 @@ nf-core modules lint """ -from __future__ import print_function import logging import os @@ -16,7 +15,7 @@ import nf_core.modules.modules_utils import nf_core.utils -from nf_core.components.lint import ComponentLint, LintException, LintResult +from nf_core.components.lint import ComponentLint, LintExceptionError, LintResult from nf_core.lint_utils import console log = logging.getLogger(__name__) @@ -119,11 +118,11 @@ def lint( # Only lint the given module if module: if all_modules: - raise LintException("You cannot specify a tool and request all tools to be linted.") + raise LintExceptionError("You cannot specify a tool and request all tools to be linted.") local_modules = [] remote_modules = [m for m in self.all_remote_components if m.component_name == module] if len(remote_modules) == 0: - raise LintException(f"Could not find the specified module: '{module}'") + raise LintExceptionError(f"Could not find the specified module: '{module}'") else: local_modules = self.all_local_components remote_modules = self.all_remote_components diff --git a/nf_core/modules/lint/environment_yml.py b/nf_core/modules/lint/environment_yml.py index a052425539..92281d99c0 100644 --- a/nf_core/modules/lint/environment_yml.py +++ b/nf_core/modules/lint/environment_yml.py @@ -23,14 +23,14 @@ def environment_yml(module_lint_object: ComponentLint, module: NFCoreComponent) env_yml = None # load the environment.yml file try: - with open(Path(module.component_dir, "environment.yml"), "r") as fh: + with open(Path(module.component_dir, "environment.yml")) as fh: env_yml = yaml.safe_load(fh) module.passed.append(("environment_yml_exists", "Module's `environment.yml` exists", module.environment_yml)) except FileNotFoundError: # check if the module's main.nf requires a conda environment - with open(Path(module.component_dir, "main.nf"), "r") as fh: + with open(Path(module.component_dir, "main.nf")) as fh: main_nf = fh.read() if 'conda "${moduleDir}/environment.yml"' in main_nf: module.failed.append( @@ -49,9 +49,7 @@ def environment_yml(module_lint_object: ComponentLint, module: NFCoreComponent) if env_yml: valid_env_yml = False try: - with open( - Path(module_lint_object.modules_repo.local_repo_dir, "modules/environment-schema.json"), "r" - ) as fh: + with open(Path(module_lint_object.modules_repo.local_repo_dir, "modules/environment-schema.json")) as fh: schema = json.load(fh) validators.validate(instance=env_yml, schema=schema) module.passed.append( @@ -92,7 +90,7 @@ def environment_yml(module_lint_object: ComponentLint, module: NFCoreComponent) yaml.dump(env_yml, fh, Dumper=custom_yaml_dumper()) # Check that the name in the environment.yml file matches the name in the meta.yml file - with open(Path(module.component_dir, "meta.yml"), "r") as fh: + with open(Path(module.component_dir, "meta.yml")) as fh: meta_yml = yaml.safe_load(fh) if env_yml["name"] == meta_yml["name"]: @@ -111,3 +109,21 @@ def environment_yml(module_lint_object: ComponentLint, module: NFCoreComponent) module.environment_yml, ) ) + + # Check that the name is lowercase + if env_yml["name"] == env_yml["name"].lower(): + module.passed.append( + ( + "environment_yml_name_lowercase", + "The module's `environment.yml` name is lowercase", + module.environment_yml, + ) + ) + else: + module.failed.append( + ( + "environment_yml_name_lowercase", + "The module's `environment.yml` name is not lowercase", + module.environment_yml, + ) + ) diff --git a/nf_core/modules/lint/main_nf.py b/nf_core/modules/lint/main_nf.py index 56a9e99925..fd4d81f7f2 100644 --- a/nf_core/modules/lint/main_nf.py +++ b/nf_core/modules/lint/main_nf.py @@ -55,7 +55,7 @@ def main_nf(module_lint_object, module, fix_version, registry, progress_bar): if lines is None: try: # Check whether file exists and load it - with open(module.main_nf, "r") as fh: + with open(module.main_nf) as fh: lines = fh.readlines() module.passed.append(("main_nf_exists", "Module file exists", module.main_nf)) except FileNotFoundError: @@ -81,39 +81,39 @@ def main_nf(module_lint_object, module, fix_version, registry, progress_bar): script_lines = [] shell_lines = [] when_lines = [] - for l in lines: - if re.search(r"^\s*process\s*\w*\s*{", l) and state == "module": + for line in lines: + if re.search(r"^\s*process\s*\w*\s*{", line) and state == "module": state = "process" - if re.search(r"input\s*:", l) and state in ["process"]: + if re.search(r"input\s*:", line) and state in ["process"]: state = "input" continue - if re.search(r"output\s*:", l) and state in ["input", "process"]: + if re.search(r"output\s*:", line) and state in ["input", "process"]: state = "output" continue - if re.search(r"when\s*:", l) and state in ["input", "output", "process"]: + if re.search(r"when\s*:", line) and state in ["input", "output", "process"]: state = "when" continue - if re.search(r"script\s*:", l) and state in ["input", "output", "when", "process"]: + if re.search(r"script\s*:", line) and state in ["input", "output", "when", "process"]: state = "script" continue - if re.search(r"shell\s*:", l) and state in ["input", "output", "when", "process"]: + if re.search(r"shell\s*:", line) and state in ["input", "output", "when", "process"]: state = "shell" continue # Perform state-specific linting checks - if state == "process" and not _is_empty(l): - process_lines.append(l) - if state == "input" and not _is_empty(l): - inputs.extend(_parse_input(module, l)) - if state == "output" and not _is_empty(l): - outputs += _parse_output(module, l) + if state == "process" and not _is_empty(line): + process_lines.append(line) + if state == "input" and not _is_empty(line): + inputs.extend(_parse_input(module, line)) + if state == "output" and not _is_empty(line): + outputs += _parse_output(module, line) outputs = list(set(outputs)) # remove duplicate 'meta's - if state == "when" and not _is_empty(l): - when_lines.append(l) - if state == "script" and not _is_empty(l): - script_lines.append(l) - if state == "shell" and not _is_empty(l): - shell_lines.append(l) + if state == "when" and not _is_empty(line): + when_lines.append(line) + if state == "script" and not _is_empty(line): + script_lines.append(line) + if state == "shell" and not _is_empty(line): + shell_lines.append(line) # Check that we have required sections if not len(outputs): @@ -140,7 +140,7 @@ def main_nf(module_lint_object, module, fix_version, registry, progress_bar): # Check that shell uses a template if len(shell_lines): - if any("template" in l for l in shell_lines): + if any("template" in line for line in shell_lines): module.passed.append(("main_nf_shell_template", "`template` found in `shell` block", module.main_nf)) else: module.failed.append(("main_nf_shell_template", "No `template` found in `shell` block", module.main_nf)) @@ -249,21 +249,21 @@ def check_process_section(self, lines, registry, fix_version, progress_bar): # Deprecated enable_conda for i, raw_line in enumerate(lines): url = None - l = raw_line.strip(" \n'\"}:") + line = raw_line.strip(" \n'\"}:") # Catch preceeding "container " - if l.startswith("container"): - l = l.replace("container", "").strip(" \n'\"}:") + if line.startswith("container"): + line = line.replace("container", "").strip(" \n'\"}:") - if _container_type(l) == "conda": - if "bioconda::" in l: - bioconda_packages = [b for b in l.split() if "bioconda::" in b] - match = re.search(r"params\.enable_conda", l) + if _container_type(line) == "conda": + if "bioconda::" in line: + bioconda_packages = [b for b in line.split() if "bioconda::" in b] + match = re.search(r"params\.enable_conda", line) if match is None: self.passed.append( ( "deprecated_enable_conda", - f"Deprecated parameter 'params.enable_conda' correctly not found in the conda definition", + "Deprecated parameter 'params.enable_conda' correctly not found in the conda definition", self.main_nf, ) ) @@ -271,35 +271,35 @@ def check_process_section(self, lines, registry, fix_version, progress_bar): self.failed.append( ( "deprecated_enable_conda", - f"Found deprecated parameter 'params.enable_conda' in the conda definition", + "Found deprecated parameter 'params.enable_conda' in the conda definition", self.main_nf, ) ) - if _container_type(l) == "singularity": + if _container_type(line) == "singularity": # e.g. "https://containers.biocontainers.pro/s3/SingImgsRepo/biocontainers/v1.2.0_cv1/biocontainers_v1.2.0_cv1.img -> v1.2.0_cv1 # e.g. "https://depot.galaxyproject.org/singularity/fastqc:0.11.9--0 -> 0.11.9--0 # Please god let's find a better way to do this than regex - match = re.search(r"(?:[:.])?([A-Za-z\d\-_.]+?)(?:\.img)?(?:\.sif)?$", l) + match = re.search(r"(?:[:.])?([A-Za-z\d\-_.]+?)(?:\.img)?(?:\.sif)?$", line) if match is not None: singularity_tag = match.group(1) self.passed.append(("singularity_tag", f"Found singularity tag: {singularity_tag}", self.main_nf)) else: self.failed.append(("singularity_tag", "Unable to parse singularity tag", self.main_nf)) singularity_tag = None - url = urlparse(l.split("'")[0]) + url = urlparse(line.split("'")[0]) - if _container_type(l) == "docker": + if _container_type(line) == "docker": # e.g. "quay.io/biocontainers/krona:2.7.1--pl526_5 -> 2.7.1--pl526_5 # e.g. "biocontainers/biocontainers:v1.2.0_cv1 -> v1.2.0_cv1 - match = re.search(r":([A-Za-z\d\-_.]+)$", l) + match = re.search(r":([A-Za-z\d\-_.]+)$", line) if match is not None: docker_tag = match.group(1) self.passed.append(("docker_tag", f"Found docker tag: {docker_tag}", self.main_nf)) else: self.failed.append(("docker_tag", "Unable to parse docker tag", self.main_nf)) docker_tag = None - if l.startswith(registry): - l_stripped = re.sub(r"\W+$", "", l) + if line.startswith(registry): + l_stripped = re.sub(r"\W+$", "", line) self.failed.append( ( "container_links", @@ -308,15 +308,15 @@ def check_process_section(self, lines, registry, fix_version, progress_bar): ) ) else: - self.passed.append(("container_links", f"Container prefix is correct", self.main_nf)) + self.passed.append(("container_links", "Container prefix is correct", self.main_nf)) # Guess if container name is simple one (e.g. nfcore/ubuntu:20.04) # If so, add quay.io as default container prefix - if l.count("/") == 1 and l.count(":") == 1: - l = "/".join([registry, l]).replace("//", "/") - url = urlparse(l.split("'")[0]) + if line.count("/") == 1 and line.count(":") == 1: + line = "/".join([registry, line]).replace("//", "/") + url = urlparse(line.split("'")[0]) - if l.startswith("container") or _container_type(l) == "docker" or _container_type(l) == "singularity": + if line.startswith("container") or _container_type(line) == "docker" or _container_type(line) == "singularity": check_container_link_line(self, raw_line, registry) # Try to connect to container URLs @@ -348,7 +348,7 @@ def check_process_section(self, lines, registry, fix_version, progress_bar): # Get bioconda packages from environment.yml try: - with open(Path(self.component_dir, "environment.yml"), "r") as fh: + with open(Path(self.component_dir, "environment.yml")) as fh: env_yml = yaml.safe_load(fh) if "dependencies" in env_yml: bioconda_packages = [x for x in env_yml["dependencies"] if isinstance(x, str) and "bioconda::" in x] @@ -424,7 +424,7 @@ def check_process_section(self, lines, registry, fix_version, progress_bar): def check_process_labels(self, lines): correct_process_labels = ["process_single", "process_low", "process_medium", "process_high", "process_long"] - all_labels = [l.strip() for l in lines if l.lstrip().startswith("label ")] + all_labels = [line.strip() for line in lines if line.lstrip().startswith("label ")] bad_labels = [] good_labels = [] if len(all_labels) > 0: @@ -475,14 +475,14 @@ def check_process_labels(self, lines): def check_container_link_line(self, raw_line, registry): """Look for common problems in the container name / URL, for docker and singularity.""" - l = raw_line.strip(" \n'\"}:") + line = raw_line.strip(" \n'\"}:") # lint double quotes - if l.count('"') > 2: + if line.count('"') > 2: self.failed.append( ( "container_links", - f"Too many double quotes found when specifying container: {l.lstrip('container ')}", + f"Too many double quotes found when specifying container: {line.lstrip('container ')}", self.main_nf, ) ) @@ -490,7 +490,7 @@ def check_container_link_line(self, raw_line, registry): self.passed.append( ( "container_links", - f"Correct number of double quotes found when specifying container: {l.lstrip('container ')}", + f"Correct number of double quotes found when specifying container: {line.lstrip('container ')}", self.main_nf, ) ) @@ -524,7 +524,9 @@ def check_container_link_line(self, raw_line, registry): ) # lint more than one container in the same line - if ("https://containers" in l or "https://depot" in l) and ("biocontainers/" in l or l.startswith(registry)): + if ("https://containers" in line or "https://depot" in line) and ( + "biocontainers/" in line or line.startswith(registry) + ): self.warned.append( ( "container_links", @@ -576,7 +578,7 @@ def _parse_output(self, line): output = [] if "meta" in line: output.append("meta") - if not "emit:" in line: + if "emit:" not in line: self.failed.append(("missing_emit", f"Missing emit statement: {line.strip()}", self.main_nf)) else: output.append(line.split("emit:")[1].strip()) @@ -605,14 +607,14 @@ def _fix_module_version(self, current_version, latest_version, singularity_tag, # Get latest build build = _get_build(response) - with open(self.main_nf, "r") as source: + with open(self.main_nf) as source: lines = source.readlines() # Check if the new version + build exist and replace new_lines = [] for line in lines: - l = line.strip(" '\"") - build_type = _container_type(l) + line_stripped = line.strip(" '\"") + build_type = _container_type(line_stripped) if build_type == "conda": new_lines.append(re.sub(rf"{current_version}", f"{latest_version}", line)) elif build_type in ("singularity", "docker"): diff --git a/nf_core/modules/lint/meta_yml.py b/nf_core/modules/lint/meta_yml.py index 7552c1ceae..551a978f4d 100644 --- a/nf_core/modules/lint/meta_yml.py +++ b/nf_core/modules/lint/meta_yml.py @@ -54,7 +54,7 @@ def meta_yml(module_lint_object: ComponentLint, module: NFCoreComponent) -> None meta_yaml = yaml.safe_load("".join(lines)) if meta_yaml is None: try: - with open(module.meta_yml, "r") as fh: + with open(module.meta_yml) as fh: meta_yaml = yaml.safe_load(fh) module.passed.append(("meta_yml_exists", "Module `meta.yml` exists", module.meta_yml)) except FileNotFoundError: @@ -64,7 +64,7 @@ def meta_yml(module_lint_object: ComponentLint, module: NFCoreComponent) -> None # Confirm that the meta.yml file is valid according to the JSON schema valid_meta_yml = False try: - with open(Path(module_lint_object.modules_repo.local_repo_dir, "modules/meta-schema.json"), "r") as fh: + with open(Path(module_lint_object.modules_repo.local_repo_dir, "modules/meta-schema.json")) as fh: schema = json.load(fh) validators.validate(instance=meta_yaml, schema=schema) module.passed.append(("meta_yml_valid", "Module `meta.yml` is valid", module.meta_yml)) diff --git a/nf_core/modules/lint/module_changes.py b/nf_core/modules/lint/module_changes.py index 0f4c2aad25..ee8cabebe1 100644 --- a/nf_core/modules/lint/module_changes.py +++ b/nf_core/modules/lint/module_changes.py @@ -31,7 +31,7 @@ def module_changes(module_lint_object, module): try: new_lines = ModulesDiffer.try_apply_patch( module.component_name, - module_lint_object.modules_repo.repo_path, + module.org, module.patch_path, tempdir, reverse=True, diff --git a/nf_core/modules/lint/module_patch.py b/nf_core/modules/lint/module_patch.py index d52962eabb..29bf78a66b 100644 --- a/nf_core/modules/lint/module_patch.py +++ b/nf_core/modules/lint/module_patch.py @@ -40,7 +40,7 @@ def check_patch_valid(module, patch_path): Returns: (bool): False if any test failed, True otherwise """ - with open(patch_path, "r") as fh: + with open(patch_path) as fh: patch_lines = fh.readlines() # Check that the file contains a patch for at least one file @@ -170,8 +170,8 @@ def patch_reversible(module_lint_object, module, patch_path): ) except LookupError: # Patch failed. Save the patch file by moving to the install dir - module.failed.append((("patch_reversible", "Patch file is outdated or edited", patch_path))) + module.failed.append(("patch_reversible", "Patch file is outdated or edited", patch_path)) return False - module.passed.append((("patch_reversible", "Patch agrees with module files", patch_path))) + module.passed.append(("patch_reversible", "Patch agrees with module files", patch_path)) return True diff --git a/nf_core/modules/lint/module_tests.py b/nf_core/modules/lint/module_tests.py index 87033e3f49..520f8cf0a2 100644 --- a/nf_core/modules/lint/module_tests.py +++ b/nf_core/modules/lint/module_tests.py @@ -44,7 +44,7 @@ def module_tests(_, module: NFCoreComponent): if module.nftest_main_nf.is_file(): # Check if main.nf.test.snap file exists, if 'snap(' is inside main.nf.test - with open(module.nftest_main_nf, "r") as fh: + with open(module.nftest_main_nf) as fh: if "snapshot(" in fh.read(): snap_file = module.nftest_testdir / "main.nf.test.snap" if snap_file.is_file(): @@ -52,7 +52,7 @@ def module_tests(_, module: NFCoreComponent): ("test_snapshot_exists", "snapshot file `main.nf.test.snap` exists", snap_file) ) # Validate no empty files - with open(snap_file, "r") as snap_fh: + with open(snap_file) as snap_fh: try: snap_content = json.load(snap_fh) for test_name in snap_content.keys(): @@ -145,7 +145,7 @@ def module_tests(_, module: NFCoreComponent): pytest_yml_path = module.base_dir / "tests" / "config" / "pytest_modules.yml" if pytest_yml_path.is_file() and not is_pytest: try: - with open(pytest_yml_path, "r") as fh: + with open(pytest_yml_path) as fh: pytest_yml = yaml.safe_load(fh) if module.component_name in pytest_yml.keys(): module.failed.append( @@ -165,7 +165,7 @@ def module_tests(_, module: NFCoreComponent): if module.tags_yml.is_file(): # Check that tags.yml exists and it has the correct entry module.passed.append(("test_tags_yml_exists", "file `tags.yml` exists", module.tags_yml)) - with open(module.tags_yml, "r") as fh: + with open(module.tags_yml) as fh: tags_yml = yaml.safe_load(fh) if module.component_name in tags_yml.keys(): module.passed.append(("test_tags_yml", "correct entry in tags.yml", module.tags_yml)) diff --git a/nf_core/modules/modules_differ.py b/nf_core/modules/modules_differ.py index efce3868e5..a97229ff62 100644 --- a/nf_core/modules/modules_differ.py +++ b/nf_core/modules/modules_differ.py @@ -74,9 +74,9 @@ def get_module_diffs(from_dir, to_dir, for_git=True, dsp_from_dir=None, dsp_to_d temp_path = Path(to_dir, file) curr_path = Path(from_dir, file) if temp_path.exists() and curr_path.exists() and temp_path.is_file(): - with open(temp_path, "r") as fh: + with open(temp_path) as fh: new_lines = fh.readlines() - with open(curr_path, "r") as fh: + with open(curr_path) as fh: old_lines = fh.readlines() if new_lines == old_lines: @@ -93,7 +93,7 @@ def get_module_diffs(from_dir, to_dir, for_git=True, dsp_from_dir=None, dsp_to_d diffs[file] = (ModulesDiffer.DiffEnum.CHANGED, diff) elif temp_path.exists(): - with open(temp_path, "r") as fh: + with open(temp_path) as fh: new_lines = fh.readlines() # The file was created # Show file against /dev/null @@ -108,7 +108,7 @@ def get_module_diffs(from_dir, to_dir, for_git=True, dsp_from_dir=None, dsp_to_d elif curr_path.exists(): # The file was removed # Show file against /dev/null - with open(curr_path, "r") as fh: + with open(curr_path) as fh: old_lines = fh.readlines() diff = difflib.unified_diff( old_lines, @@ -279,7 +279,7 @@ def per_file_patch(patch_fn): dict[str, str]: A dictionary indexed by the filenames with the file patches as values """ - with open(patch_fn, "r") as fh: + with open(patch_fn) as fh: lines = fh.readlines() patches = {} @@ -447,7 +447,7 @@ def try_apply_patch(module, repo_path, patch_path, module_dir, reverse=False): log.debug(f"Applying patch to {file}") fn = Path(file).relative_to(module_relpath) file_path = module_dir / fn - with open(file_path, "r") as fh: + with open(file_path) as fh: file_lines = fh.readlines() patched_new_lines = ModulesDiffer.try_apply_single_patch(file_lines, patch, reverse=reverse) new_files[str(fn)] = patched_new_lines diff --git a/nf_core/modules/modules_json.py b/nf_core/modules/modules_json.py index 32eb8736d6..f68c27b2d8 100644 --- a/nf_core/modules/modules_json.py +++ b/nf_core/modules/modules_json.py @@ -637,7 +637,7 @@ def load(self): UserWarning: If the modules.json file is not found """ try: - with open(self.modules_json_path, "r") as fh: + with open(self.modules_json_path) as fh: try: self.modules_json = json.load(fh) except json.JSONDecodeError as e: diff --git a/nf_core/modules/modules_repo.py b/nf_core/modules/modules_repo.py index 152ed7b0c0..204c20fd71 100644 --- a/nf_core/modules/modules_repo.py +++ b/nf_core/modules/modules_repo.py @@ -1,8 +1,6 @@ -import filecmp import logging import os import shutil -from pathlib import Path import git import rich diff --git a/nf_core/modules/modules_utils.py b/nf_core/modules/modules_utils.py index 3ae01e9eef..ca8993483b 100644 --- a/nf_core/modules/modules_utils.py +++ b/nf_core/modules/modules_utils.py @@ -9,7 +9,7 @@ log = logging.getLogger(__name__) -class ModuleException(Exception): +class ModuleExceptionError(Exception): """Exception raised when there was an error with module commands""" pass @@ -69,12 +69,12 @@ def get_installed_modules(dir: str, repo_type="modules") -> Tuple[List[str], Lis if os.path.exists(nfcore_modules_dir): for m in sorted([m for m in os.listdir(nfcore_modules_dir) if not m == "lib"]): if not os.path.isdir(os.path.join(nfcore_modules_dir, m)): - raise ModuleException( + raise ModuleExceptionError( f"File found in '{nfcore_modules_dir}': '{m}'! This directory should only contain module directories." ) m_content = os.listdir(os.path.join(nfcore_modules_dir, m)) # Not a module, but contains sub-modules - if not "main.nf" in m_content: + if "main.nf" not in m_content: for tool in m_content: nfcore_modules_names.append(os.path.join(m, tool)) else: diff --git a/nf_core/params_file.py b/nf_core/params_file.py index 5c50c53fb9..51986821b5 100644 --- a/nf_core/params_file.py +++ b/nf_core/params_file.py @@ -1,6 +1,5 @@ """ Create a YAML parameter file """ -from __future__ import print_function import json import logging @@ -9,8 +8,6 @@ from typing import Literal, Optional import questionary -import rich -import rich.columns import nf_core.list import nf_core.utils @@ -196,7 +193,7 @@ def format_param(self, name, properties, required_properties=(), show_hidden=Fal description = properties.get("description", "") self.schema_obj.get_schema_defaults() default = properties.get("default") - typ = properties.get("type") + type = properties.get("type") required = name in required_properties out += _print_wrapped(name, "-", mode="both") @@ -204,8 +201,11 @@ def format_param(self, name, properties, required_properties=(), show_hidden=Fal if description: out += _print_wrapped(description + "\n", mode="none", indent=4) - if typ: - out += _print_wrapped(f"Type: {typ}", mode="none", indent=4) + if type: + out += _print_wrapped(f"Type: {type}", mode="none", indent=4) + + if required: + out += _print_wrapped("Required", mode="none", indent=4) out += _print_wrapped("\n", mode="end") out += f"# {name} = {json.dumps(default)}\n" diff --git a/nf_core/pipeline-template/.editorconfig b/nf_core/pipeline-template/.editorconfig index b6b3190776..9b990088ab 100644 --- a/nf_core/pipeline-template/.editorconfig +++ b/nf_core/pipeline-template/.editorconfig @@ -22,3 +22,11 @@ indent_size = unset [/assets/email*] indent_size = unset + +# ignore Readme +[README.md] +indent_style = unset + +# ignore python +[*.{py}] +indent_style = unset diff --git a/nf_core/pipeline-template/.github/workflows/awsfulltest.yml b/nf_core/pipeline-template/.github/workflows/awsfulltest.yml index 2f83a0962c..4c9fd69fcc 100644 --- a/nf_core/pipeline-template/.github/workflows/awsfulltest.yml +++ b/nf_core/pipeline-template/.github/workflows/awsfulltest.yml @@ -31,7 +31,7 @@ jobs: } profiles: test_full - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 with: name: Tower debug log file path: | diff --git a/nf_core/pipeline-template/.github/workflows/awstest.yml b/nf_core/pipeline-template/.github/workflows/awstest.yml index 9a0bf4afbc..25726aa1c9 100644 --- a/nf_core/pipeline-template/.github/workflows/awstest.yml +++ b/nf_core/pipeline-template/.github/workflows/awstest.yml @@ -25,7 +25,7 @@ jobs: } profiles: test - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 with: name: Tower debug log file path: | diff --git a/nf_core/pipeline-template/.github/workflows/branch.yml b/nf_core/pipeline-template/.github/workflows/branch.yml index 8edfa540c9..057016e4be 100644 --- a/nf_core/pipeline-template/.github/workflows/branch.yml +++ b/nf_core/pipeline-template/.github/workflows/branch.yml @@ -19,7 +19,7 @@ jobs: # NOTE - this doesn't currently work if the PR is coming from a fork, due to limitations in GitHub actions secrets - name: Post PR comment if: failure() - uses: mshick/add-pr-comment@v1 + uses: mshick/add-pr-comment@v2 with: message: | ## This PR is against the `master` branch :x: diff --git a/nf_core/pipeline-template/.github/workflows/clean-up.yml b/nf_core/pipeline-template/.github/workflows/clean-up.yml index 427aad5087..8feb3fb017 100644 --- a/nf_core/pipeline-template/.github/workflows/clean-up.yml +++ b/nf_core/pipeline-template/.github/workflows/clean-up.yml @@ -10,7 +10,7 @@ jobs: issues: write pull-requests: write steps: - - uses: actions/stale@v7 + - uses: actions/stale@v9 with: stale-issue-message: "This issue has been tagged as awaiting-changes or awaiting-feedback by an nf-core contributor. Remove stale label or add a comment otherwise this issue will be closed in 20 days." stale-pr-message: "This PR has been tagged as awaiting-changes or awaiting-feedback by an nf-core contributor. Remove stale label or add a comment if it is still useful." diff --git a/nf_core/pipeline-template/.github/workflows/download_pipeline.yml b/nf_core/pipeline-template/.github/workflows/download_pipeline.yml new file mode 100644 index 0000000000..07ac82c1ed --- /dev/null +++ b/nf_core/pipeline-template/.github/workflows/download_pipeline.yml @@ -0,0 +1,67 @@ +name: Test successful pipeline download with 'nf-core download' + +# Run the workflow when: +# - dispatched manually +# - when a PR is opened or reopened to master branch +# - the head branch of the pull request is updated, i.e. if fixes for a release are pushed last minute to dev. +on: + workflow_dispatch: + pull_request: + types: + - opened + branches: + - master + pull_request_target: + branches: + - master + +env: + NXF_ANSI_LOG: false + +jobs: + download: + runs-on: ubuntu-latest + steps: + - name: Install Nextflow + uses: nf-core/setup-nextflow@v1 + + - uses: actions/setup-python@v5 + with: + python-version: "3.11" + architecture: "x64" + - uses: eWaterCycle/setup-singularity@v7 + with: + singularity-version: 3.8.3 + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install git+https://github.com/nf-core/tools.git@dev + + - name: Get the repository name and current branch set as environment variable + run: | + echo "REPO_LOWERCASE=${GITHUB_REPOSITORY,,}" >> ${GITHUB_ENV} + echo "REPOTITLE_LOWERCASE=$(basename ${GITHUB_REPOSITORY,,})" >> ${GITHUB_ENV} + echo "REPO_BRANCH=${GITHUB_REF#refs/heads/}" >> ${GITHUB_ENV} + + - name: Download the pipeline + env: + NXF_SINGULARITY_CACHEDIR: ./ + run: | + nf-core download {% raw %} ${{ env.REPO_LOWERCASE }} \ + --revision ${{ env.REPO_BRANCH }} \ + --outdir ./${{ env.REPOTITLE_LOWERCASE }} \ + --compress "none" \ + --container-system 'singularity' \ + --container-library "quay.io" -l "docker.io" -l "ghcr.io" \ + --container-cache-utilisation 'amend' \ + --download-configuration + + - name: Inspect download + run: tree ./${{ env.REPOTITLE_LOWERCASE }} + + - name: Run the downloaded pipeline + env: + NXF_SINGULARITY_CACHEDIR: ./ + NXF_SINGULARITY_HOME_MOUNT: true + run: nextflow run ./${{ env.REPOTITLE_LOWERCASE }}/$( sed 's/\W/_/g' <<< ${{ env.REPO_BRANCH }}) -stub -profile test,singularity --outdir ./results {% endraw %} diff --git a/nf_core/pipeline-template/.github/workflows/fix-linting.yml b/nf_core/pipeline-template/.github/workflows/fix-linting.yml index 31e8cd2b36..28e6605b96 100644 --- a/nf_core/pipeline-template/.github/workflows/fix-linting.yml +++ b/nf_core/pipeline-template/.github/workflows/fix-linting.yml @@ -4,7 +4,7 @@ on: types: [created] jobs: - deploy: + fix-linting: # Only run if comment is on a PR with the main repo, and if it contains the magic keywords if: > contains(github.event.comment.html_url, '/pull/') && @@ -13,10 +13,17 @@ jobs: runs-on: ubuntu-latest steps: # Use the @nf-core-bot token to check out so we can push later - - uses: actions/checkout@v4 + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4 with: token: ${{ secrets.nf_core_bot_auth_token }} + # indication that the linting is being fixed + - name: React on comment + uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4 + with: + comment-id: ${{ github.event.comment.id }} + reactions: eyes + # Action runs on the issue comment, so we don't get the PR by default # Use the gh cli to check out the PR - name: Checkout Pull Request @@ -24,32 +31,59 @@ jobs: env: GITHUB_TOKEN: ${{ secrets.nf_core_bot_auth_token }} - - uses: actions/setup-node@v4 + # Install and run pre-commit + - uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5 + with: + python-version: 3.11 - - name: Install Prettier - run: npm install -g prettier @prettier/plugin-php + - name: Install pre-commit + run: pip install pre-commit - # Check that we actually need to fix something - - name: Run 'prettier --check' - id: prettier_status - run: | - if prettier --check ${GITHUB_WORKSPACE}; then - echo "result=pass" >> $GITHUB_OUTPUT - else - echo "result=fail" >> $GITHUB_OUTPUT - fi + - name: Run pre-commit + id: pre-commit + run: pre-commit run --all-files + continue-on-error: true - - name: Run 'prettier --write' - if: steps.prettier_status.outputs.result == 'fail' - run: prettier --write ${GITHUB_WORKSPACE} + # indication that the linting has finished + - name: react if linting finished succesfully + if: steps.pre-commit.outcome == 'success' + uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4 + with: + comment-id: ${{ github.event.comment.id }} + reactions: "+1" - name: Commit & push changes - if: steps.prettier_status.outputs.result == 'fail' + id: commit-and-push + if: steps.pre-commit.outcome == 'failure' run: | git config user.email "core@nf-co.re" git config user.name "nf-core-bot" git config push.default upstream git add . git status - git commit -m "[automated] Fix linting with Prettier" - git push {%- endraw %} + git commit -m "[automated] Fix code linting" + git push + + - name: react if linting errors were fixed + id: react-if-fixed + if: steps.commit-and-push.outcome == 'success' + uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4 + with: + comment-id: ${{ github.event.comment.id }} + reactions: hooray + + - name: react if linting errors were not fixed + if: steps.commit-and-push.outcome == 'failure' + uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4 + with: + comment-id: ${{ github.event.comment.id }} + reactions: confused + + - name: react if linting errors were not fixed + if: steps.commit-and-push.outcome == 'failure' + uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4 + with: + issue-number: ${{ github.event.issue.number }} + body: | + @${{ github.actor }} I tried to fix the linting errors, but it didn't work. Please fix them manually. + See [CI log](https://github.com/{% endraw %}{{name}}{% raw %}/actions/runs/${{ github.run_id }}) for more details.{% endraw %} diff --git a/nf_core/pipeline-template/.github/workflows/linting.yml b/nf_core/pipeline-template/.github/workflows/linting.yml index 94aa5278be..a267f1ec14 100644 --- a/nf_core/pipeline-template/.github/workflows/linting.yml +++ b/nf_core/pipeline-template/.github/workflows/linting.yml @@ -11,61 +11,22 @@ on: types: [published] jobs: - EditorConfig: + pre-commit: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - - uses: actions/setup-node@v4 - - - name: Install editorconfig-checker - run: npm install -g editorconfig-checker - - - name: Run ECLint check - run: editorconfig-checker -exclude README.md $(find .* -type f | grep -v '.git\|.py\|.md\|json\|yml\|yaml\|html\|css\|work\|.nextflow\|build\|nf_core.egg-info\|log.txt\|Makefile') - - Prettier: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - - uses: actions/setup-node@v4 - - - name: Install Prettier - run: npm install -g prettier - - - name: Run Prettier --check - run: prettier --check ${GITHUB_WORKSPACE} - - PythonBlack: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - - name: Check code lints with Black - uses: psf/black@stable - - # If the above check failed, post a comment on the PR explaining the failure - - name: Post PR comment - if: failure() - uses: mshick/add-pr-comment@v1 + - name: Set up Python 3.11 + uses: actions/setup-python@v5 with: - message: | - ## Python linting (`black`) is failing - - To keep the code consistent with lots of contributors, we run automated code consistency checks. - To fix this CI test, please run: - - * Install [`black`](https://black.readthedocs.io/en/stable/): `pip install black` - * Fix formatting errors in your pipeline: `black .` - - Once you push these changes the test should pass, and you can hide this comment :+1: + python-version: 3.11 + cache: "pip" - We highly recommend setting up Black in your code editor so that this formatting is done automatically on save. Ask about it on Slack for help! + - name: Install pre-commit + run: pip install pre-commit - Thanks again for your contribution! - repo-token: ${{ secrets.GITHUB_TOKEN }} - allow-repeats: false + - name: Run pre-commit + run: pre-commit run --all-files nf-core: runs-on: ubuntu-latest @@ -76,7 +37,7 @@ jobs: - name: Install Nextflow uses: nf-core/setup-nextflow@v1 - - uses: actions/setup-python@v4 + - uses: actions/setup-python@v5 with: python-version: "3.11" architecture: "x64" @@ -99,7 +60,7 @@ jobs: - name: Upload linting log file artifact if: ${{ always() }} - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: linting-logs path: | diff --git a/nf_core/pipeline-template/.github/workflows/linting_comment.yml b/nf_core/pipeline-template/.github/workflows/linting_comment.yml index 09f8c423e5..e5528b29cf 100644 --- a/nf_core/pipeline-template/.github/workflows/linting_comment.yml +++ b/nf_core/pipeline-template/.github/workflows/linting_comment.yml @@ -11,7 +11,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Download lint results - uses: dawidd6/action-download-artifact@v2 + uses: dawidd6/action-download-artifact@v3 with: workflow: linting.yml workflow_conclusion: completed diff --git a/nf_core/pipeline-template/.github/workflows/release-announcements.yml b/nf_core/pipeline-template/.github/workflows/release-announcements.yml index ad497db4e1..1dd48b123f 100644 --- a/nf_core/pipeline-template/.github/workflows/release-announcements.yml +++ b/nf_core/pipeline-template/.github/workflows/release-announcements.yml @@ -24,7 +24,7 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/setup-python@v4 + - uses: actions/setup-python@v5 with: python-version: "3.10" - name: Install dependencies @@ -56,7 +56,7 @@ jobs: bsky-post: runs-on: ubuntu-latest steps: - - uses: zentered/bluesky-post-action@v0.0.2 + - uses: zentered/bluesky-post-action@v0.1.0 with: post: | Pipeline release! ${{ github.repository }} v${{ github.event.release.tag_name }} - ${{ github.event.release.name }}! diff --git a/nf_core/pipeline-template/.gitpod.yml b/nf_core/pipeline-template/.gitpod.yml index acf7269536..363d5b1d42 100644 --- a/nf_core/pipeline-template/.gitpod.yml +++ b/nf_core/pipeline-template/.gitpod.yml @@ -7,6 +7,7 @@ tasks: - name: unset JAVA_TOOL_OPTIONS command: | unset JAVA_TOOL_OPTIONS + vscode: extensions: # based on nf-core.nf-core-extensionpack - codezombiech.gitignore # Language support for .gitignore files diff --git a/nf_core/pipeline-template/.pre-commit-config.yaml b/nf_core/pipeline-template/.pre-commit-config.yaml index 0c31cdb99f..af57081f60 100644 --- a/nf_core/pipeline-template/.pre-commit-config.yaml +++ b/nf_core/pipeline-template/.pre-commit-config.yaml @@ -1,5 +1,10 @@ repos: - repo: https://github.com/pre-commit/mirrors-prettier - rev: "v2.7.1" + rev: "v3.1.0" hooks: - id: prettier + - repo: https://github.com/editorconfig-checker/editorconfig-checker.python + rev: "2.7.3" + hooks: + - id: editorconfig-checker + alias: ec diff --git a/nf_core/pipeline-template/README.md b/nf_core/pipeline-template/README.md index c874090b51..5160ccb04d 100644 --- a/nf_core/pipeline-template/README.md +++ b/nf_core/pipeline-template/README.md @@ -1,7 +1,11 @@ {% if branded -%} -# ![{{ name }}](docs/images/{{ logo_light }}#gh-light-mode-only) ![{{ name }}](docs/images/{{ logo_dark }}#gh-dark-mode-only) - +

+ + + {{ name }} + +

{% endif -%} {% if github_badges -%} [![GitHub Actions CI Status](https://github.com/{{ name }}/workflows/nf-core%20CI/badge.svg)](https://github.com/{{ name }}/actions?query=workflow%3A%22nf-core+CI%22) diff --git a/nf_core/pipeline-template/assets/email_template.html b/nf_core/pipeline-template/assets/email_template.html index ecff600d44..56759fff65 100644 --- a/nf_core/pipeline-template/assets/email_template.html +++ b/nf_core/pipeline-template/assets/email_template.html @@ -12,7 +12,7 @@ -

{{ name }} v${version}

+

{{ name }} ${version}

Run Name: $runName

<% if (!success){ diff --git a/nf_core/pipeline-template/assets/email_template.txt b/nf_core/pipeline-template/assets/email_template.txt index edc8f71016..25b12e8ce0 100644 --- a/nf_core/pipeline-template/assets/email_template.txt +++ b/nf_core/pipeline-template/assets/email_template.txt @@ -5,7 +5,7 @@ |\\ | |__ __ / ` / \\ |__) |__ } { | \\| | \\__, \\__/ | \\ |___ \\`-._,-`-, `._,._,' - {{ name }} v${version} + {{ name }} ${version} ---------------------------------------------------- {% endif -%} diff --git a/nf_core/pipeline-template/lib/WorkflowMain.groovy b/nf_core/pipeline-template/lib/WorkflowMain.groovy index 5824dae2fb..a254b2b22a 100755 --- a/nf_core/pipeline-template/lib/WorkflowMain.groovy +++ b/nf_core/pipeline-template/lib/WorkflowMain.groovy @@ -24,7 +24,7 @@ class WorkflowMain { // // Validate parameters and print summary to screen // - public static void initialise(workflow, params, log) { + public static void initialise(workflow, params, log, args) { // Print workflow version and exit on --version if (params.version) { @@ -35,6 +35,8 @@ class WorkflowMain { // Check that a -profile or Nextflow config has been provided to run the pipeline NfcoreTemplate.checkConfigProvided(workflow, log) + // Check that the profile doesn't contain spaces and doesn't end with a trailing comma + checkProfile(workflow.profile, args, log) // Check that conda channels are set-up correctly if (workflow.profile.tokenize(',').intersect(['conda', 'mamba']).size() >= 1) { @@ -63,4 +65,16 @@ class WorkflowMain { return null } {%- endif %} + + // + // Exit pipeline if --profile contains spaces + // + private static void checkProfile(profile, args, log) { + if (profile.endsWith(',')) { + Nextflow.error "Profile cannot end with a trailing comma. Please remove the comma from the end of the profile string.\nHint: A common mistake is to provide multiple values to `-profile` separated by spaces. Please use commas to separate profiles instead,e.g., `-profile docker,test`." + } + if (args[0]) { + log.warn "nf-core pipelines do not accept positional arguments. The positional argument `${args[0]}` has been detected.\n Hint: A common mistake is to provide multiple values to `-profile` separated by spaces. Please use commas to separate profiles instead,e.g., `-profile docker,test`." + } + } } diff --git a/nf_core/pipeline-template/lib/nfcore_external_java_deps.jar b/nf_core/pipeline-template/lib/nfcore_external_java_deps.jar deleted file mode 100644 index 805c8bb5e4..0000000000 Binary files a/nf_core/pipeline-template/lib/nfcore_external_java_deps.jar and /dev/null differ diff --git a/nf_core/pipeline-template/main.nf b/nf_core/pipeline-template/main.nf index 3d632eb8c5..78da158856 100644 --- a/nf_core/pipeline-template/main.nf +++ b/nf_core/pipeline-template/main.nf @@ -46,7 +46,7 @@ if (params.validate_params) { validateParameters() } -WorkflowMain.initialise(workflow, params, log) +WorkflowMain.initialise(workflow, params, log, args) /* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/nf_core/pipeline-template/modules.json b/nf_core/pipeline-template/modules.json index 223bcacc9c..8660da2d42 100644 --- a/nf_core/pipeline-template/modules.json +++ b/nf_core/pipeline-template/modules.json @@ -7,17 +7,17 @@ "nf-core": { "custom/dumpsoftwareversions": { "branch": "master", - "git_sha": "bba7e362e4afead70653f84d8700588ea28d0f9e", + "git_sha": "8ec825f465b9c17f9d83000022995b4f7de6fe93", "installed_by": ["modules"] }, "fastqc": { "branch": "master", - "git_sha": "65ad3e0b9a4099592e1102e92e10455dc661cf53", + "git_sha": "c9488585ce7bd35ccd2a30faa2371454c8112fb9", "installed_by": ["modules"] }, "multiqc": { "branch": "master", - "git_sha": "4ab13872435962dadc239979554d13709e20bf29", + "git_sha": "8ec825f465b9c17f9d83000022995b4f7de6fe93", "installed_by": ["modules"] } } diff --git a/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/environment.yml b/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/environment.yml index f0c63f6984..9b3272bc11 100644 --- a/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/environment.yml +++ b/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/environment.yml @@ -4,4 +4,4 @@ channels: - bioconda - defaults dependencies: - - bioconda::multiqc=1.17 + - bioconda::multiqc=1.19 diff --git a/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/main.nf b/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/main.nf index 7685b33cde..f2187611cc 100644 --- a/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/main.nf +++ b/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/main.nf @@ -4,8 +4,8 @@ process CUSTOM_DUMPSOFTWAREVERSIONS { // Requires `pyyaml` which does not have a dedicated container but is in the MultiQC container conda "${moduleDir}/environment.yml" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/multiqc:1.17--pyhdfd78af_0' : - 'biocontainers/multiqc:1.17--pyhdfd78af_0' }" + 'https://depot.galaxyproject.org/singularity/multiqc:1.19--pyhdfd78af_0' : + 'biocontainers/multiqc:1.19--pyhdfd78af_0' }" input: path versions diff --git a/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/tests/main.nf.test b/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/tests/main.nf.test index eec1db10a2..b1e1630bb3 100644 --- a/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/tests/main.nf.test +++ b/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/tests/main.nf.test @@ -31,7 +31,12 @@ nextflow_process { then { assertAll( { assert process.success }, - { assert snapshot(process.out).match() } + { assert snapshot( + process.out.versions, + file(process.out.mqc_yml[0]).readLines()[0..10], + file(process.out.yml[0]).readLines()[0..7] + ).match() + } ) } } diff --git a/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/tests/main.nf.test.snap b/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/tests/main.nf.test.snap index 4274ed57aa..5f59a936d7 100644 --- a/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/tests/main.nf.test.snap +++ b/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/tests/main.nf.test.snap @@ -1,27 +1,33 @@ { "Should run without failures": { "content": [ - { - "0": [ - "software_versions.yml:md5,1c851188476409cda5752ce971b20b58" - ], - "1": [ - "software_versions_mqc.yml:md5,2570f4ba271ad08357b0d3d32a9cf84d" - ], - "2": [ - "versions.yml:md5,3843ac526e762117eedf8825b40683df" - ], - "mqc_yml": [ - "software_versions_mqc.yml:md5,2570f4ba271ad08357b0d3d32a9cf84d" - ], - "versions": [ - "versions.yml:md5,3843ac526e762117eedf8825b40683df" - ], - "yml": [ - "software_versions.yml:md5,1c851188476409cda5752ce971b20b58" - ] - } + [ + "versions.yml:md5,76d454d92244589d32455833f7c1ba6d" + ], + [ + "data: \"\\n\\n \\n \\n \\n \\n \\n \\n \\n\\", + " \\n\\n\\n \\n \\n\\", + " \\ \\n\\n\\n\\n \\n \\", + " \\ \\n \\n\\n\\n\\n\\", + " \\n\\n \\n \\n\\", + " \\ \\n\\n\\n\\n\\n\\n \\n\\", + " \\ \\n \\n\\n\\n\\n\\", + " \\n\\n \\n \\n\\" + ], + [ + "CUSTOM_DUMPSOFTWAREVERSIONS:", + " python: 3.11.7", + " yaml: 5.4.1", + "TOOL1:", + " tool1: 0.11.9", + "TOOL2:", + " tool2: '1.9'", + "Workflow:" + ] ], - "timestamp": "2023-11-03T14:43:22.157011" + "timestamp": "2024-01-09T23:01:18.710682" } -} +} \ No newline at end of file diff --git a/nf_core/pipeline-template/modules/nf-core/fastqc/tests/main.nf.test b/nf_core/pipeline-template/modules/nf-core/fastqc/tests/main.nf.test index b9e8f926eb..1f21c66469 100644 --- a/nf_core/pipeline-template/modules/nf-core/fastqc/tests/main.nf.test +++ b/nf_core/pipeline-template/modules/nf-core/fastqc/tests/main.nf.test @@ -3,24 +3,20 @@ nextflow_process { name "Test Process FASTQC" script "../main.nf" process "FASTQC" + tag "modules" tag "modules_nfcore" tag "fastqc" - test("Single-Read") { + test("sarscov2 single-end [fastq]") { when { - params { - outdir = "$outputDir" - } process { """ - input[0] = [ + input[0] = Channel.of([ [ id: 'test', single_end:true ], - [ - file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) - ] - ] + [ file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastq/test_1.fastq.gz', checkIfExists: true) ] + ]) """ } } @@ -28,82 +24,189 @@ nextflow_process { then { assertAll ( { assert process.success }, + // NOTE The report contains the date inside it, which means that the md5sum is stable per day, but not longer than that. So you can't md5sum it. // looks like this:
Mon 2 Oct 2023
test.gz
// https://github.com/nf-core/modules/pull/3903#issuecomment-1743620039 - { assert process.out.html.get(0).get(1) ==~ ".*/test_fastqc.html" }, - { assert path(process.out.html.get(0).get(1)).getText().contains("") }, - { assert snapshot(process.out.versions).match("versions") }, - { assert process.out.zip.get(0).get(1) ==~ ".*/test_fastqc.zip" } + + { assert process.out.html[0][1] ==~ ".*/test_fastqc.html" }, + { assert process.out.zip[0][1] ==~ ".*/test_fastqc.zip" }, + { assert path(process.out.html[0][1]).text.contains("") }, + + { assert snapshot(process.out.versions).match("versions") } + ) + } + } + + test("sarscov2 paired-end [fastq]") { + + when { + process { + """ + input[0] = Channel.of([ + [id: 'test', single_end: false], // meta map + [ file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastq/test_1.fastq.gz', checkIfExists: true), + file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastq/test_2.fastq.gz', checkIfExists: true) ] + ]) + """ + } + } + + then { + assertAll ( + { assert process.success }, + + { assert process.out.html[0][1][0] ==~ ".*/test_1_fastqc.html" }, + { assert process.out.html[0][1][1] ==~ ".*/test_2_fastqc.html" }, + { assert process.out.zip[0][1][0] ==~ ".*/test_1_fastqc.zip" }, + { assert process.out.zip[0][1][1] ==~ ".*/test_2_fastqc.zip" }, + { assert path(process.out.html[0][1][0]).text.contains("") }, + { assert path(process.out.html[0][1][1]).text.contains("") }, + + { assert snapshot(process.out.versions).match("versions") } + ) + } + } + + test("sarscov2 interleaved [fastq]") { + + when { + process { + """ + input[0] = Channel.of([ + [id: 'test', single_end: false], // meta map + file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastq/test_interleaved.fastq.gz', checkIfExists: true) + ]) + """ + } + } + + then { + assertAll ( + { assert process.success }, + + { assert process.out.html[0][1] ==~ ".*/test_fastqc.html" }, + { assert process.out.zip[0][1] ==~ ".*/test_fastqc.zip" }, + { assert path(process.out.html[0][1]).text.contains("") }, + + { assert snapshot(process.out.versions).match("versions") } ) } } -// TODO -// // -// // Test with paired-end data -// // -// workflow test_fastqc_paired_end { -// input = [ -// [id: 'test', single_end: false], // meta map -// [ -// file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), -// file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) -// ] -// ] - -// FASTQC ( input ) -// } - -// // -// // Test with interleaved data -// // -// workflow test_fastqc_interleaved { -// input = [ -// [id: 'test', single_end: false], // meta map -// file(params.test_data['sarscov2']['illumina']['test_interleaved_fastq_gz'], checkIfExists: true) -// ] - -// FASTQC ( input ) -// } - -// // -// // Test with bam data -// // -// workflow test_fastqc_bam { -// input = [ -// [id: 'test', single_end: false], // meta map -// file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true) -// ] - -// FASTQC ( input ) -// } - -// // -// // Test with multiple samples -// // -// workflow test_fastqc_multiple { -// input = [ -// [id: 'test', single_end: false], // meta map -// [ -// file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), -// file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true), -// file(params.test_data['sarscov2']['illumina']['test2_1_fastq_gz'], checkIfExists: true), -// file(params.test_data['sarscov2']['illumina']['test2_2_fastq_gz'], checkIfExists: true) -// ] -// ] - -// FASTQC ( input ) -// } - -// // -// // Test with custom prefix -// // -// workflow test_fastqc_custom_prefix { -// input = [ -// [ id:'mysample', single_end:true ], // meta map -// file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) -// ] - -// FASTQC ( input ) -// } + + test("sarscov2 paired-end [bam]") { + + when { + process { + """ + input[0] = Channel.of([ + [id: 'test', single_end: false], // meta map + file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/bam/test.paired_end.sorted.bam', checkIfExists: true) + ]) + """ + } + } + + then { + assertAll ( + { assert process.success }, + + { assert process.out.html[0][1] ==~ ".*/test_fastqc.html" }, + { assert process.out.zip[0][1] ==~ ".*/test_fastqc.zip" }, + { assert path(process.out.html[0][1]).text.contains("") }, + + { assert snapshot(process.out.versions).match("versions") } + ) + } + } + + test("sarscov2 multiple [fastq]") { + + when { + process { + """ + input[0] = Channel.of([ + [id: 'test', single_end: false], // meta map + [ file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastq/test_1.fastq.gz', checkIfExists: true), + file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastq/test_2.fastq.gz', checkIfExists: true), + file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastq/test2_1.fastq.gz', checkIfExists: true), + file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastq/test2_2.fastq.gz', checkIfExists: true) ] + ]) + """ + } + } + + then { + assertAll ( + { assert process.success }, + + { assert process.out.html[0][1][0] ==~ ".*/test_1_fastqc.html" }, + { assert process.out.html[0][1][1] ==~ ".*/test_2_fastqc.html" }, + { assert process.out.html[0][1][2] ==~ ".*/test_3_fastqc.html" }, + { assert process.out.html[0][1][3] ==~ ".*/test_4_fastqc.html" }, + { assert process.out.zip[0][1][0] ==~ ".*/test_1_fastqc.zip" }, + { assert process.out.zip[0][1][1] ==~ ".*/test_2_fastqc.zip" }, + { assert process.out.zip[0][1][2] ==~ ".*/test_3_fastqc.zip" }, + { assert process.out.zip[0][1][3] ==~ ".*/test_4_fastqc.zip" }, + { assert path(process.out.html[0][1][0]).text.contains("") }, + { assert path(process.out.html[0][1][1]).text.contains("") }, + { assert path(process.out.html[0][1][2]).text.contains("") }, + { assert path(process.out.html[0][1][3]).text.contains("") }, + + { assert snapshot(process.out.versions).match("versions") } + ) + } + } + + test("sarscov2 custom_prefix") { + + when { + process { + """ + input[0] = Channel.of([ + [ id:'mysample', single_end:true ], // meta map + file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastq/test_1.fastq.gz', checkIfExists: true) + ]) + """ + } + } + + then { + assertAll ( + { assert process.success }, + + { assert process.out.html[0][1] ==~ ".*/mysample_fastqc.html" }, + { assert process.out.zip[0][1] ==~ ".*/mysample_fastqc.zip" }, + { assert path(process.out.html[0][1]).text.contains("") }, + + { assert snapshot(process.out.versions).match("versions") } + ) + } + } + + test("sarscov2 single-end [fastq] - stub") { + + options "-stub" + + when { + process { + """ + input[0] = Channel.of([ + [ id: 'test', single_end:true ], + [ file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastq/test_1.fastq.gz', checkIfExists: true) ] + ]) + """ + } + } + + then { + assertAll ( + { assert process.success }, + { assert snapshot(process.out.html.collect { file(it[1]).getName() } + + process.out.zip.collect { file(it[1]).getName() } + + process.out.versions ).match() } + ) + } + } + } diff --git a/nf_core/pipeline-template/modules/nf-core/fastqc/tests/main.nf.test.snap b/nf_core/pipeline-template/modules/nf-core/fastqc/tests/main.nf.test.snap index 636a32cead..5d624bb82e 100644 --- a/nf_core/pipeline-template/modules/nf-core/fastqc/tests/main.nf.test.snap +++ b/nf_core/pipeline-template/modules/nf-core/fastqc/tests/main.nf.test.snap @@ -1,10 +1,20 @@ { + "sarscov2 single-end [fastq] - stub": { + "content": [ + [ + "test.html", + "test.zip", + "versions.yml:md5,e1cc25ca8af856014824abd842e93978" + ] + ], + "timestamp": "2024-01-17T18:40:57.254299" + }, "versions": { "content": [ [ "versions.yml:md5,e1cc25ca8af856014824abd842e93978" ] ], - "timestamp": "2023-10-09T23:40:54+0000" + "timestamp": "2024-01-17T18:36:50.033627" } } \ No newline at end of file diff --git a/nf_core/pipeline-template/modules/nf-core/multiqc/environment.yml b/nf_core/pipeline-template/modules/nf-core/multiqc/environment.yml index bc0bdb5b68..7625b75206 100644 --- a/nf_core/pipeline-template/modules/nf-core/multiqc/environment.yml +++ b/nf_core/pipeline-template/modules/nf-core/multiqc/environment.yml @@ -4,4 +4,4 @@ channels: - bioconda - defaults dependencies: - - bioconda::multiqc=1.18 + - bioconda::multiqc=1.19 diff --git a/nf_core/pipeline-template/modules/nf-core/multiqc/main.nf b/nf_core/pipeline-template/modules/nf-core/multiqc/main.nf index 00cc48d275..1b9f7c431d 100644 --- a/nf_core/pipeline-template/modules/nf-core/multiqc/main.nf +++ b/nf_core/pipeline-template/modules/nf-core/multiqc/main.nf @@ -3,8 +3,8 @@ process MULTIQC { conda "${moduleDir}/environment.yml" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/multiqc:1.18--pyhdfd78af_0' : - 'biocontainers/multiqc:1.18--pyhdfd78af_0' }" + 'https://depot.galaxyproject.org/singularity/multiqc:1.19--pyhdfd78af_0' : + 'biocontainers/multiqc:1.19--pyhdfd78af_0' }" input: path multiqc_files, stageAs: "?/*" @@ -43,7 +43,7 @@ process MULTIQC { stub: """ - touch multiqc_data + mkdir multiqc_data touch multiqc_plots touch multiqc_report.html diff --git a/nf_core/pipeline-template/modules/nf-core/multiqc/meta.yml b/nf_core/pipeline-template/modules/nf-core/multiqc/meta.yml index f1aa660eb7..45a9bc35e1 100644 --- a/nf_core/pipeline-template/modules/nf-core/multiqc/meta.yml +++ b/nf_core/pipeline-template/modules/nf-core/multiqc/meta.yml @@ -1,4 +1,3 @@ -# yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/meta-schema.json name: multiqc description: Aggregate results from bioinformatics analyses across many samples into a single report keywords: diff --git a/nf_core/pipeline-template/modules/nf-core/multiqc/tests/main.nf.test b/nf_core/pipeline-template/modules/nf-core/multiqc/tests/main.nf.test index c2dad217c4..d0438eda6b 100644 --- a/nf_core/pipeline-template/modules/nf-core/multiqc/tests/main.nf.test +++ b/nf_core/pipeline-template/modules/nf-core/multiqc/tests/main.nf.test @@ -7,12 +7,9 @@ nextflow_process { tag "modules_nfcore" tag "multiqc" - test("MULTIQC: FASTQC") { + test("sarscov2 single-end [fastqc]") { when { - params { - outdir = "$outputDir" - } process { """ input[0] = Channel.of([file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz_fastqc_zip'], checkIfExists: true)]) @@ -26,20 +23,17 @@ nextflow_process { then { assertAll( { assert process.success }, - { assert path(process.out.report.get(0)).exists() }, - { assert path(process.out.data.get(0)).exists() }, - { assert path(process.out.versions.get(0)).getText().contains("multiqc") } + { assert process.out.report[0] ==~ ".*/multiqc_report.html" }, + { assert process.out.data[0] ==~ ".*/multiqc_data" }, + { assert snapshot(process.out.versions).match("versions") } ) } } - test("MULTIQC: FASTQC and a config file") { + test("sarscov2 single-end [fastqc] [config]") { when { - params { - outdir = "$outputDir" - } process { """ input[0] = Channel.of([file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz_fastqc_zip'], checkIfExists: true)]) @@ -53,9 +47,35 @@ nextflow_process { then { assertAll( { assert process.success }, - { assert path(process.out.report.get(0)).exists() }, - { assert path(process.out.data.get(0)).exists() }, - { assert path(process.out.versions.get(0)).getText().contains("multiqc") } + { assert process.out.report[0] ==~ ".*/multiqc_report.html" }, + { assert process.out.data[0] ==~ ".*/multiqc_data" }, + { assert snapshot(process.out.versions).match("versions") } + ) + } + } + + test("sarscov2 single-end [fastqc] - stub") { + + options "-stub" + + when { + process { + """ + input[0] = Channel.of([file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz_fastqc_zip'], checkIfExists: true)]) + input[1] = [] + input[2] = [] + input[3] = [] + """ + } + } + + then { + assertAll( + { assert process.success }, + { assert snapshot(process.out.report.collect { file(it).getName() } + + process.out.data.collect { file(it).getName() } + + process.out.plots.collect { file(it).getName() } + + process.out.versions ).match() } ) } diff --git a/nf_core/pipeline-template/modules/nf-core/multiqc/tests/main.nf.test.snap b/nf_core/pipeline-template/modules/nf-core/multiqc/tests/main.nf.test.snap new file mode 100644 index 0000000000..d37e73040d --- /dev/null +++ b/nf_core/pipeline-template/modules/nf-core/multiqc/tests/main.nf.test.snap @@ -0,0 +1,21 @@ +{ + "versions": { + "content": [ + [ + "versions.yml:md5,14e9a2661241abd828f4f06a7b5c222d" + ] + ], + "timestamp": "2024-01-09T23:02:49.911994" + }, + "sarscov2 single-end [fastqc] - stub": { + "content": [ + [ + "multiqc_report.html", + "multiqc_data", + "multiqc_plots", + "versions.yml:md5,14e9a2661241abd828f4f06a7b5c222d" + ] + ], + "timestamp": "2024-01-09T23:03:14.524346" + } +} \ No newline at end of file diff --git a/nf_core/pipeline-template/nextflow.config b/nf_core/pipeline-template/nextflow.config index dec8051a17..3153ff70d6 100644 --- a/nf_core/pipeline-template/nextflow.config +++ b/nf_core/pipeline-template/nextflow.config @@ -78,7 +78,7 @@ try { } // Load {{ name }} custom profiles from different institutions. -// Warning: Uncomment only if a pipeline-specific instititutional config already exists on nf-core/configs! +// Warning: Uncomment only if a pipeline-specific institutional config already exists on nf-core/configs! // try { // includeConfig "${params.custom_config_base}/pipeline/{{ short_name }}.config" // } catch (Exception e) { @@ -100,6 +100,7 @@ profiles { podman.enabled = false shifter.enabled = false charliecloud.enabled = false + channels = ['conda-forge', 'bioconda', 'defaults'] apptainer.enabled = false } mamba { diff --git a/nf_core/pipeline-template/pyproject.toml b/nf_core/pipeline-template/pyproject.toml index 0d62beb6f9..7d08e1c8ef 100644 --- a/nf_core/pipeline-template/pyproject.toml +++ b/nf_core/pipeline-template/pyproject.toml @@ -1,10 +1,13 @@ -# Config file for Python. Mostly used to configure linting of bin/check_samplesheet.py with Black. +# Config file for Python. Mostly used to configure linting of bin/*.py with Ruff. # Should be kept the same as nf-core/tools to avoid fighting with template synchronisation. -[tool.black] +[tool.ruff] line-length = 120 -target_version = ["py37", "py38", "py39", "py310"] +target-version = "py38" +select = ["I", "E1", "E4", "E7", "E9", "F", "UP", "N"] +cache-dir = "~/.cache/ruff" -[tool.isort] -profile = "black" -known_first_party = ["nf_core"] -multi_line_output = 3 +[tool.ruff.isort] +known-first-party = ["nf_core"] + +[tool.ruff.per-file-ignores] +"__init__.py" = ["E402", "F401"] diff --git a/nf_core/pipeline-template/workflows/pipeline.nf b/nf_core/pipeline-template/workflows/pipeline.nf index 558e9a1f9f..4583f2a9d6 100644 --- a/nf_core/pipeline-template/workflows/pipeline.nf +++ b/nf_core/pipeline-template/workflows/pipeline.nf @@ -127,6 +127,13 @@ workflow.onComplete { } } +workflow.onError { + if (workflow.errorReport.contains("Process requirement exceeds available memory")) { + println("🛑 Default resources exceed availability 🛑 ") + println("💡 See here on how to configure pipeline: https://nf-co.re/docs/usage/configuration#tuning-workflow-resources 💡") + } +} + /* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ THE END diff --git a/nf_core/refgenie.py b/nf_core/refgenie.py index 6f09b75532..de9201bcd6 100644 --- a/nf_core/refgenie.py +++ b/nf_core/refgenie.py @@ -2,7 +2,6 @@ Update a nextflow.config file with refgenie genomes """ -import json import logging import os import re @@ -86,7 +85,7 @@ def _update_nextflow_home_config(refgenie_genomes_config_file, nxf_home): if os.path.exists(nxf_home_config): # look for include statement in config has_include_statement = False - with open(nxf_home_config, "r") as fh: + with open(nxf_home_config) as fh: lines = fh.readlines() for line in lines: if re.match(rf"\s*includeConfig\s*'{os.path.abspath(refgenie_genomes_config_file)}'", line): diff --git a/nf_core/schema.py b/nf_core/schema.py index 7e4726f189..b0c5dc04b6 100644 --- a/nf_core/schema.py +++ b/nf_core/schema.py @@ -1,6 +1,5 @@ """ Code to deal with pipeline JSON Schema """ -from __future__ import print_function import copy import json @@ -107,7 +106,7 @@ def load_lint_schema(self): def load_schema(self): """Load a pipeline schema from a file""" - with open(self.schema_filename, "r") as fh: + with open(self.schema_filename) as fh: self.schema = json.load(fh) self.schema_defaults = {} self.schema_params = {} @@ -189,7 +188,7 @@ def load_input_params(self, params_path): """ # First, try to load as JSON try: - with open(params_path, "r") as fh: + with open(params_path) as fh: try: params = json.load(fh) except json.JSONDecodeError as e: @@ -200,7 +199,7 @@ def load_input_params(self, params_path): log.debug(f"Could not load input params as JSON: {json_e}") # This failed, try to load as YAML try: - with open(params_path, "r") as fh: + with open(params_path) as fh: params = yaml.safe_load(fh) self.input_params.update(params) log.debug(f"Loaded YAML input params: {params_path}") @@ -317,7 +316,7 @@ def validate_config_default_parameter(self, param, schema_param, config_default) param ] = f"String should not be set to `{config_default}`" if schema_param["type"] == "boolean": - if not str(config_default) in ["false", "true"]: + if str(config_default) not in ["false", "true"]: self.invalid_nextflow_config_default_parameters[ param ] = f"Booleans should only be true or false, not `{config_default}`" @@ -757,9 +756,7 @@ def prompt_remove_schema_notfound_config(self, p_key): if self.no_prompts or self.schema_from_scratch: return True if Confirm.ask( - ":question: Unrecognised [bold]'params.{}'[/] found in the schema but not in the pipeline config! [yellow]Remove it?".format( - p_key - ) + f":question: Unrecognised [bold]'params.{p_key}'[/] found in the schema but not in the pipeline config! [yellow]Remove it?" ): return True return False diff --git a/nf_core/subworkflows/lint/__init__.py b/nf_core/subworkflows/lint/__init__.py index ffba41f9da..3a87190422 100644 --- a/nf_core/subworkflows/lint/__init__.py +++ b/nf_core/subworkflows/lint/__init__.py @@ -6,7 +6,6 @@ nf-core subworkflows lint """ -from __future__ import print_function import logging import os @@ -16,7 +15,7 @@ import nf_core.modules.modules_utils import nf_core.utils -from nf_core.components.lint import ComponentLint, LintException, LintResult +from nf_core.components.lint import ComponentLint, LintExceptionError, LintResult from nf_core.lint_utils import console log = logging.getLogger(__name__) @@ -114,11 +113,11 @@ def lint( # Only lint the given module if subworkflow: if all_subworkflows: - raise LintException("You cannot specify a tool and request all tools to be linted.") + raise LintExceptionError("You cannot specify a tool and request all tools to be linted.") local_subworkflows = [] remote_subworkflows = [s for s in self.all_remote_components if s.component_name == subworkflow] if len(remote_subworkflows) == 0: - raise LintException(f"Could not find the specified subworkflow: '{subworkflow}'") + raise LintExceptionError(f"Could not find the specified subworkflow: '{subworkflow}'") else: local_subworkflows = self.all_local_components remote_subworkflows = self.all_remote_components diff --git a/nf_core/subworkflows/lint/main_nf.py b/nf_core/subworkflows/lint/main_nf.py index c7ce77490d..f59e1e4279 100644 --- a/nf_core/subworkflows/lint/main_nf.py +++ b/nf_core/subworkflows/lint/main_nf.py @@ -32,7 +32,7 @@ def main_nf(_, subworkflow): if lines is None: try: # Check whether file exists and load it - with open(subworkflow.main_nf, "r") as fh: + with open(subworkflow.main_nf) as fh: lines = fh.readlines() subworkflow.passed.append(("main_nf_exists", "Subworkflow file exists", subworkflow.main_nf)) except FileNotFoundError: @@ -45,30 +45,30 @@ def main_nf(_, subworkflow): subworkflow_lines = [] workflow_lines = [] main_lines = [] - for l in lines: - if re.search(r"^\s*workflow\s*\w*\s*{", l) and state == "subworkflow": + for line in lines: + if re.search(r"^\s*workflow\s*\w*\s*{", line) and state == "subworkflow": state = "workflow" - if re.search(r"take\s*:", l) and state in ["workflow"]: + if re.search(r"take\s*:", line) and state in ["workflow"]: state = "take" continue - if re.search(r"main\s*:", l) and state in ["take", "workflow"]: + if re.search(r"main\s*:", line) and state in ["take", "workflow"]: state = "main" continue - if re.search(r"emit\s*:", l) and state in ["take", "main", "workflow"]: + if re.search(r"emit\s*:", line) and state in ["take", "main", "workflow"]: state = "emit" continue # Perform state-specific linting checks - if state == "subworkflow" and not _is_empty(l): - subworkflow_lines.append(l) - if state == "workflow" and not _is_empty(l): - workflow_lines.append(l) - if state == "take" and not _is_empty(l): - inputs.extend(_parse_input(subworkflow, l)) - if state == "emit" and not _is_empty(l): - outputs.extend(_parse_output(subworkflow, l)) - if state == "main" and not _is_empty(l): - main_lines.append(l) + if state == "subworkflow" and not _is_empty(line): + subworkflow_lines.append(line) + if state == "workflow" and not _is_empty(line): + workflow_lines.append(line) + if state == "take" and not _is_empty(line): + inputs.extend(_parse_input(subworkflow, line)) + if state == "emit" and not _is_empty(line): + outputs.extend(_parse_output(subworkflow, line)) + if state == "main" and not _is_empty(line): + main_lines.append(line) # Check that we have required sections if not len(outputs): @@ -177,9 +177,9 @@ def check_subworkflow_section(self, lines): ) includes = [] - for l in lines: - if l.strip().startswith("include"): - component_name = l.split("{")[1].split("}")[0].strip() + for line in lines: + if line.strip().startswith("include"): + component_name = line.split("{")[1].split("}")[0].strip() if " as " in component_name: component_name = component_name.split(" as ")[1].strip() includes.append(component_name) diff --git a/nf_core/subworkflows/lint/meta_yml.py b/nf_core/subworkflows/lint/meta_yml.py index 4944b26188..24e75eddbf 100644 --- a/nf_core/subworkflows/lint/meta_yml.py +++ b/nf_core/subworkflows/lint/meta_yml.py @@ -26,7 +26,7 @@ def meta_yml(subworkflow_lint_object, subworkflow): """ # Read the meta.yml file try: - with open(subworkflow.meta_yml, "r") as fh: + with open(subworkflow.meta_yml) as fh: meta_yaml = yaml.safe_load(fh) subworkflow.passed.append(("meta_yml_exists", "Subworkflow `meta.yml` exists", subworkflow.meta_yml)) except FileNotFoundError: @@ -36,9 +36,7 @@ def meta_yml(subworkflow_lint_object, subworkflow): # Confirm that the meta.yml file is valid according to the JSON schema valid_meta_yml = True try: - with open( - Path(subworkflow_lint_object.modules_repo.local_repo_dir, "subworkflows/yaml-schema.json"), "r" - ) as fh: + with open(Path(subworkflow_lint_object.modules_repo.local_repo_dir, "subworkflows/yaml-schema.json")) as fh: schema = json.load(fh) jsonschema.validators.validate(instance=meta_yaml, schema=schema) subworkflow.passed.append(("meta_yml_valid", "Subworkflow `meta.yml` is valid", subworkflow.meta_yml)) @@ -118,7 +116,7 @@ def meta_yml(subworkflow_lint_object, subworkflow): subworkflow.failed.append( ( "meta_modules_deprecated", - f"Deprecated section 'modules' found in `meta.yml`, use 'components' instead", + "Deprecated section 'modules' found in `meta.yml`, use 'components' instead", subworkflow.meta_yml, ) ) @@ -126,7 +124,7 @@ def meta_yml(subworkflow_lint_object, subworkflow): subworkflow.passed.append( ( "meta_modules_deprecated", - f"Deprecated section 'modules' not found in `meta.yml`", + "Deprecated section 'modules' not found in `meta.yml`", subworkflow.meta_yml, ) ) diff --git a/nf_core/subworkflows/lint/subworkflow_tests.py b/nf_core/subworkflows/lint/subworkflow_tests.py index 1ebced6d42..f7284320ea 100644 --- a/nf_core/subworkflows/lint/subworkflow_tests.py +++ b/nf_core/subworkflows/lint/subworkflow_tests.py @@ -52,14 +52,14 @@ def subworkflow_tests(_, subworkflow: NFCoreComponent): ) if subworkflow.nftest_main_nf.is_file(): - with open(subworkflow.nftest_main_nf, "r") as fh: + with open(subworkflow.nftest_main_nf) as fh: # Check if main.nf.test.snap file exists, if 'snap(' is inside main.nf.test if "snapshot(" in fh.read(): snap_file = subworkflow.nftest_testdir / "main.nf.test.snap" if snap_file.is_file(): subworkflow.passed.append(("test_snapshot_exists", "test `main.nf.test.snap` exists", snap_file)) # Validate no empty files - with open(snap_file, "r") as snap_fh: + with open(snap_file) as snap_fh: try: snap_content = json.load(snap_fh) for test_name in snap_content.keys(): @@ -158,7 +158,7 @@ def subworkflow_tests(_, subworkflow: NFCoreComponent): pytest_yml_path = subworkflow.base_dir / "tests" / "config" / "pytest_modules.yml" if pytest_yml_path.is_file() and not is_pytest: try: - with open(pytest_yml_path, "r") as fh: + with open(pytest_yml_path) as fh: pytest_yml = yaml.safe_load(fh) if "subworkflows/" + subworkflow.component_name in pytest_yml.keys(): subworkflow.failed.append( @@ -178,7 +178,7 @@ def subworkflow_tests(_, subworkflow: NFCoreComponent): if subworkflow.tags_yml.is_file(): # Check tags.yml exists and it has the correct entry subworkflow.passed.append(("test_tags_yml_exists", "file `tags.yml` exists", subworkflow.tags_yml)) - with open(subworkflow.tags_yml, "r") as fh: + with open(subworkflow.tags_yml) as fh: tags_yml = yaml.safe_load(fh) if "subworkflows/" + subworkflow.component_name in tags_yml.keys(): subworkflow.passed.append(("test_tags_yml", "correct entry in tags.yml", subworkflow.tags_yml)) diff --git a/nf_core/sync.py b/nf_core/sync.py index 5402a6121d..995baeacd2 100644 --- a/nf_core/sync.py +++ b/nf_core/sync.py @@ -23,13 +23,13 @@ log = logging.getLogger(__name__) -class SyncException(Exception): +class SyncExceptionError(Exception): """Exception raised when there was an error with TEMPLATE branch synchronisation""" pass -class PullRequestException(Exception): +class PullRequestExceptionError(Exception): """Exception raised when there was an error creating a Pull-Request on GitHub.com""" pass @@ -96,7 +96,7 @@ def __init__( default=False, ).unsafe_ask() if overwrite_template or "template" not in self.config_yml: - with open(template_yaml_path, "r") as f: + with open(template_yaml_path) as f: self.config_yml["template"] = yaml.safe_load(f) with open(self.config_yml_path, "w") as fh: yaml.safe_dump(self.config_yml, fh) @@ -138,20 +138,20 @@ def sync(self): try: # Check that we have an API auth token if os.environ.get("GITHUB_AUTH_TOKEN", "") == "": - raise PullRequestException("GITHUB_AUTH_TOKEN not set!") + raise PullRequestExceptionError("GITHUB_AUTH_TOKEN not set!") # Check that we know the github username and repo name if self.gh_username is None and self.gh_repo is None: - raise PullRequestException("Could not find GitHub username and repo name") + raise PullRequestExceptionError("Could not find GitHub username and repo name") self.push_template_branch() self.create_merge_base_branch() self.push_merge_branch() self.make_pull_request() self.close_open_template_merge_prs() - except PullRequestException as e: + except PullRequestExceptionError as e: self.reset_target_dir() - raise PullRequestException(e) + raise PullRequestExceptionError(e) self.reset_target_dir() @@ -170,7 +170,7 @@ def inspect_sync_dir(self): try: self.repo = git.Repo(self.pipeline_dir) except InvalidGitRepositoryError: - raise SyncException(f"'{self.pipeline_dir}' does not appear to be a git repository") + raise SyncExceptionError(f"'{self.pipeline_dir}' does not appear to be a git repository") # get current branch so we can switch back later self.original_branch = self.repo.active_branch.name @@ -178,7 +178,7 @@ def inspect_sync_dir(self): # Check to see if there are uncommitted changes on current branch if self.repo.is_dirty(untracked_files=True): - raise SyncException( + raise SyncExceptionError( "Uncommitted changes found in pipeline directory!\nPlease commit these before running nf-core sync" ) @@ -192,7 +192,7 @@ def get_wf_config(self): log.info(f"Checking out workflow branch '{self.from_branch}'") self.repo.git.checkout(self.from_branch) except GitCommandError: - raise SyncException(f"Branch `{self.from_branch}` not found!") + raise SyncExceptionError(f"Branch `{self.from_branch}` not found!") # If not specified, get the name of the active branch if not self.from_branch: @@ -208,7 +208,7 @@ def get_wf_config(self): # Check that we have the required variables for rvar in self.required_config_vars: if rvar not in self.wf_config: - raise SyncException(f"Workflow config variable `{rvar}` not found!") + raise SyncExceptionError(f"Workflow config variable `{rvar}` not found!") def checkout_template_branch(self): """ @@ -223,7 +223,7 @@ def checkout_template_branch(self): try: self.repo.git.checkout("TEMPLATE") except GitCommandError: - raise SyncException("Could not check out branch 'origin/TEMPLATE' or 'TEMPLATE'") + raise SyncExceptionError("Could not check out branch 'origin/TEMPLATE' or 'TEMPLATE'") def delete_template_branch_files(self): """ @@ -242,7 +242,7 @@ def delete_template_branch_files(self): elif os.path.isdir(file_path): shutil.rmtree(file_path) except Exception as e: - raise SyncException(e) + raise SyncExceptionError(e) def make_template_pipeline(self): """ @@ -272,7 +272,7 @@ def make_template_pipeline(self): except Exception as err: # Reset to where you were to prevent git getting messed up. self.repo.git.reset("--hard") - raise SyncException(f"Failed to rebuild pipeline from template with error:\n{err}") + raise SyncExceptionError(f"Failed to rebuild pipeline from template with error:\n{err}") def commit_template_changes(self): """If we have any changes with the new template files, make a git commit""" @@ -287,7 +287,7 @@ def commit_template_changes(self): self.made_changes = True log.info("Committed changes to 'TEMPLATE' branch") except Exception as e: - raise SyncException(f"Could not commit changes to TEMPLATE:\n{e}") + raise SyncExceptionError(f"Could not commit changes to TEMPLATE:\n{e}") return True def push_template_branch(self): @@ -299,7 +299,7 @@ def push_template_branch(self): try: self.repo.git.push() except GitCommandError as e: - raise PullRequestException(f"Could not push TEMPLATE branch:\n {e}") + raise PullRequestExceptionError(f"Could not push TEMPLATE branch:\n {e}") def create_merge_base_branch(self): """Create a new branch from the updated TEMPLATE branch @@ -326,7 +326,7 @@ def create_merge_base_branch(self): try: self.repo.create_head(self.merge_branch) except GitCommandError as e: - raise SyncException(f"Could not create new branch '{self.merge_branch}'\n{e}") + raise SyncExceptionError(f"Could not create new branch '{self.merge_branch}'\n{e}") def push_merge_branch(self): """Push the newly created merge branch to the remote repository""" @@ -335,7 +335,7 @@ def push_merge_branch(self): origin = self.repo.remote() origin.push(self.merge_branch) except GitCommandError as e: - raise PullRequestException(f"Could not push branch '{self.merge_branch}':\n {e}") + raise PullRequestExceptionError(f"Could not push branch '{self.merge_branch}':\n {e}") def make_pull_request(self): """Create a pull request to a base branch (default: dev), @@ -374,7 +374,7 @@ def make_pull_request(self): ) except Exception as e: stderr.print_exception() - raise PullRequestException(f"Something went badly wrong - {e}") + raise PullRequestExceptionError(f"Something went badly wrong - {e}") else: self.gh_pr_returned_data = r.json() self.pr_url = self.gh_pr_returned_data["html_url"] @@ -395,7 +395,7 @@ def close_open_template_merge_prs(self): try: list_prs_json = json.loads(list_prs_request.content) list_prs_pp = json.dumps(list_prs_json, indent=4) - except: + except Exception: list_prs_json = list_prs_request.content list_prs_pp = list_prs_request.content @@ -438,7 +438,7 @@ def close_open_pr(self, pr): try: pr_request_json = json.loads(pr_request.content) pr_request_pp = json.dumps(pr_request_json, indent=4) - except: + except Exception: pr_request_json = pr_request.content pr_request_pp = pr_request.content @@ -462,4 +462,4 @@ def reset_target_dir(self): try: self.repo.git.checkout(self.original_branch) except GitCommandError as e: - raise SyncException(f"Could not reset to original branch `{self.original_branch}`:\n{e}") + raise SyncExceptionError(f"Could not reset to original branch `{self.original_branch}`:\n{e}") diff --git a/nf_core/synced_repo.py b/nf_core/synced_repo.py index a2107f633c..ac0f467e66 100644 --- a/nf_core/synced_repo.py +++ b/nf_core/synced_repo.py @@ -6,7 +6,6 @@ from typing import Dict import git -import rich.progress from git.exc import GitCommandError from nf_core.utils import load_tools_config @@ -117,8 +116,6 @@ def __init__(self, remote_url=None, branch=None, no_pull=False, hide_progress=Fa self.remote_url = remote_url - self.fullname = nf_core.modules.modules_utils.repo_full_name_from_remote(self.remote_url) - self.setup_local_repo(remote_url, branch, hide_progress) config_fn, repo_config = load_tools_config(self.local_repo_dir) diff --git a/nf_core/utils.py b/nf_core/utils.py index bcc8faa3fd..e1778b55b3 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -139,7 +139,7 @@ def __init__(self, wf_path): try: repo = git.Repo(self.wf_path) self.git_sha = repo.head.object.hexsha - except: + except Exception: log.debug(f"Could not find git hash for pipeline: {self.wf_path}") # Overwrite if we have the last commit from the PR - otherwise we get a merge commit hash @@ -159,8 +159,8 @@ def _list_files(self): git_ls_files = subprocess.check_output(["git", "ls-files"], cwd=self.wf_path).splitlines() self.files = [] for fn in git_ls_files: - full_fn = os.path.join(self.wf_path, fn.decode("utf-8")) - if os.path.isfile(full_fn): + full_fn = Path(self.wf_path) / fn.decode("utf-8") + if full_fn.is_file(): self.files.append(full_fn) else: log.debug(f"`git ls-files` returned '{full_fn}' but could not open it!") @@ -170,7 +170,7 @@ def _list_files(self): self.files = [] for subdir, _, files in os.walk(self.wf_path): for fn in files: - self.files.append(os.path.join(subdir, fn)) + self.files.append(Path(subdir) / fn) def _load_pipeline_config(self): """Get the nextflow config for this pipeline @@ -181,14 +181,14 @@ def _load_pipeline_config(self): self.pipeline_prefix, self.pipeline_name = self.nf_config.get("manifest.name", "").strip("'").split("/") - nextflowVersionMatch = re.search(r"[0-9\.]+(-edge)?", self.nf_config.get("manifest.nextflowVersion", "")) - if nextflowVersionMatch: - self.minNextflowVersion = nextflowVersionMatch.group(0) + nextflow_version_match = re.search(r"[0-9\.]+(-edge)?", self.nf_config.get("manifest.nextflowVersion", "")) + if nextflow_version_match: + self.minNextflowVersion = nextflow_version_match.group(0) def _load_conda_environment(self): """Try to load the pipeline environment.yml file, if it exists""" try: - with open(os.path.join(self.wf_path, "environment.yml"), "r") as fh: + with open(os.path.join(self.wf_path, "environment.yml")) as fh: self.conda_config = yaml.safe_load(fh) except FileNotFoundError: log.debug("No conda `environment.yml` file found.") @@ -262,7 +262,7 @@ def fetch_wf_config(wf_path, cache_config=True): cache_path = os.path.join(cache_basedir, cache_fn) if os.path.isfile(cache_path) and cache_config is True: log.debug(f"Found a config cache, loading: {cache_path}") - with open(cache_path, "r") as fh: + with open(cache_path) as fh: try: config = json.load(fh) except json.JSONDecodeError as e: @@ -274,8 +274,8 @@ def fetch_wf_config(wf_path, cache_config=True): result = run_cmd("nextflow", f"config -flat {wf_path}") if result is not None: nfconfig_raw, _ = result - for l in nfconfig_raw.splitlines(): - ul = l.decode("utf-8") + for line in nfconfig_raw.splitlines(): + ul = line.decode("utf-8") try: k, v = ul.split(" = ", 1) config[k] = v.strip("'\"") @@ -286,9 +286,9 @@ def fetch_wf_config(wf_path, cache_config=True): # Values in this file are likely to be complex, so don't both trying to capture them. Just get the param name. try: main_nf = os.path.join(wf_path, "main.nf") - with open(main_nf, "r") as fh: - for l in fh: - match = re.match(r"^\s*(params\.[a-zA-Z0-9_]+)\s*=", l) + with open(main_nf) as fh: + for line in fh: + match = re.match(r"^\s*(params\.[a-zA-Z0-9_]+)\s*=", line) if match: config[match.group(1)] = "null" except FileNotFoundError as e: @@ -312,7 +312,7 @@ def run_cmd(executable: str, cmd: str) -> Union[Tuple[bytes, bytes], None]: full_cmd = f"{executable} {cmd}" log.debug(f"Running command: {full_cmd}") try: - proc = subprocess.run(shlex.split(full_cmd), stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=True) + proc = subprocess.run(shlex.split(full_cmd), capture_output=True, check=True) return (proc.stdout, proc.stderr) except OSError as e: if e.errno == errno.ENOENT: @@ -341,7 +341,7 @@ def setup_nfcore_dir(): return True -def setup_requests_cachedir(): +def setup_requests_cachedir() -> dict: """Sets up local caching for faster remote HTTP requests. Caching directory will be set up in the user's home directory under @@ -351,8 +351,7 @@ def setup_requests_cachedir(): Also returns the config dict so that we can use the same setup with a Session. """ pyversion = ".".join(str(v) for v in sys.version_info[0:3]) - cachedir = os.path.join(NFCORE_CACHE_DIR, f"cache_{pyversion}") - + cachedir = setup_nfcore_cachedir(f"cache_{pyversion}") config = { "cache_name": os.path.join(cachedir, "github_info"), "expire_after": datetime.timedelta(hours=1), @@ -360,14 +359,21 @@ def setup_requests_cachedir(): } logging.getLogger("requests_cache").setLevel(logging.WARNING) + return config + + +def setup_nfcore_cachedir(cache_fn: Union[str, Path]) -> Path: + """Sets up local caching for caching files between sessions.""" + + cachedir = Path(NFCORE_CACHE_DIR, cache_fn) + try: - if not os.path.exists(cachedir): - os.makedirs(cachedir) - requests_cache.install_cache(**config) + if not Path(cachedir).exists(): + Path(cachedir).mkdir(parents=True) except PermissionError: - pass + log.warn(f"Could not create cache directory: {cachedir}") - return config + return cachedir def wait_cli_function(poll_func, refresh_per_second=20): @@ -414,11 +420,14 @@ def poll_nfcore_web_api(api_url, post_data=None): except requests.exceptions.ConnectionError: raise AssertionError(f"Could not connect to URL: {api_url}") else: - if response.status_code != 200: + if response.status_code != 200 and response.status_code != 301: log.debug(f"Response content:\n{response.content}") raise AssertionError( f"Could not access remote API results: {api_url} (HTML {response.status_code} Error)" ) + # follow redirects + if response.status_code == 301: + return poll_nfcore_web_api(response.headers["Location"], post_data) try: web_response = json.loads(response.content) if "status" not in web_response: @@ -433,7 +442,7 @@ def poll_nfcore_web_api(api_url, post_data=None): return web_response -class GitHub_API_Session(requests_cache.CachedSession): +class GitHubAPISession(requests_cache.CachedSession): """ Class to provide a single session for interacting with the GitHub API for a run. Inherits the requests_cache.CachedSession and adds additional functionality, @@ -480,7 +489,7 @@ def __call__(self, r): gh_cli_config_fn = os.path.expanduser("~/.config/gh/hosts.yml") if self.auth is None and os.path.exists(gh_cli_config_fn): try: - with open(gh_cli_config_fn, "r") as fh: + with open(gh_cli_config_fn) as fh: gh_cli_config = yaml.safe_load(fh) self.auth = requests.auth.HTTPBasicAuth( gh_cli_config["github.com"]["user"], gh_cli_config["github.com"]["oauth_token"] @@ -590,7 +599,7 @@ def request_retry(self, url, post_data=None): # Single session object to use for entire codebase. Not sure if there's a better way to do this? -gh_api = GitHub_API_Session() +gh_api = GitHubAPISession() def anaconda_package(dep, dep_channels=None): @@ -666,18 +675,18 @@ def parse_anaconda_licence(anaconda_response, version=None): # Clean up / standardise licence names clean_licences = [] - for l in licences: - l = re.sub(r"GNU General Public License v\d \(([^\)]+)\)", r"\1", l) - l = re.sub(r"GNU GENERAL PUBLIC LICENSE", "GPL", l, flags=re.IGNORECASE) - l = l.replace("GPL-", "GPLv") - l = re.sub(r"GPL\s*([\d\.]+)", r"GPL v\1", l) # Add v prefix to GPL version if none found - l = re.sub(r"GPL\s*v(\d).0", r"GPL v\1", l) # Remove superflous .0 from GPL version - l = re.sub(r"GPL \(([^\)]+)\)", r"GPL \1", l) - l = re.sub(r"GPL\s*v", "GPL v", l) # Normalise whitespace to one space between GPL and v - l = re.sub(r"\s*(>=?)\s*(\d)", r" \1\2", l) # Normalise whitespace around >= GPL versions - l = l.replace("Clause", "clause") # BSD capitilisation - l = re.sub(r"-only$", "", l) # Remove superflous GPL "only" version suffixes - clean_licences.append(l) + for license in licences: + license = re.sub(r"GNU General Public License v\d \(([^\)]+)\)", r"\1", license) + license = re.sub(r"GNU GENERAL PUBLIC LICENSE", "GPL", license, flags=re.IGNORECASE) + license = license.replace("GPL-", "GPLv") + license = re.sub(r"GPL\s*([\d\.]+)", r"GPL v\1", license) # Add v prefix to GPL version if none found + license = re.sub(r"GPL\s*v(\d).0", r"GPL v\1", license) # Remove superflous .0 from GPL version + license = re.sub(r"GPL \(([^\)]+)\)", r"GPL \1", license) + license = re.sub(r"GPL\s*v", "GPL v", license) # Normalise whitespace to one space between GPL and v + license = re.sub(r"\s*(>=?)\s*(\d)", r" \1\2", license) # Normalise whitespace around >= GPL versions + license = license.replace("Clause", "clause") # BSD capitilisation + license = re.sub(r"-only$", "", license) # Remove superflous GPL "only" version suffixes + clean_licences.append(license) return clean_licences @@ -792,7 +801,7 @@ def increase_indent(self, flow=False, indentless=False): See https://github.com/yaml/pyyaml/issues/234#issuecomment-765894586 """ - return super(CustomDumper, self).increase_indent(flow=flow, indentless=False) + return super().increase_indent(flow=flow, indentless=False) # HACK: insert blank lines between top-level objects # inspired by https://stackoverflow.com/a/44284819/3786245 @@ -1025,7 +1034,7 @@ def load_tools_config(directory: Union[str, Path] = "."): log.debug(f"No tools config file found: {CONFIG_PATHS[0]}") return Path(directory, CONFIG_PATHS[0]), {} - with open(config_fn, "r") as fh: + with open(config_fn) as fh: tools_config = yaml.safe_load(fh) # If the file is empty @@ -1145,7 +1154,7 @@ def validate_file_md5(file_name, expected_md5hex): if file_md5hex.upper() == expected_md5hex.upper(): log.debug(f"md5 sum of image matches expected: {expected_md5hex}") else: - raise IOError(f"{file_name} md5 does not match remote: {expected_md5hex} - {file_md5hex}") + raise OSError(f"{file_name} md5 does not match remote: {expected_md5hex} - {file_md5hex}") return True diff --git a/pyproject.toml b/pyproject.toml index 2380073107..d75ae89df6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -5,10 +5,6 @@ requires = [ "wheel" ] -[tool.black] -line-length = 120 -target_version = ["py37", "py38", "py39", "py310"] - [tool.pytest.ini_options] markers = [ "datafiles: load datafiles" @@ -16,7 +12,18 @@ markers = [ testpaths = ["tests"] norecursedirs = [ ".*", "build", "dist", "*.egg", "data", "__pycache__", ".github", "nf_core", "docs"] -[tool.isort] -profile = "black" -known_first_party = ["nf_core"] -multi_line_output = 3 +[tool.ruff] +line-length = 120 +target-version = "py38" +select = ["I", "E1", "E4", "E7", "E9", "F", "UP", "N"] +cache-dir = "~/.cache/ruff" + +[tool.ruff.isort] +known-first-party = ["nf_core"] + +[tool.ruff.per-file-ignores] +"__init__.py" = ["E402", "F401"] + +[tool.ruff.lint.pep8-naming] +extend-ignore-names = ["mocked_*", "*allOf", "*URI*"] + diff --git a/requirements-dev.txt b/requirements-dev.txt index 13dba6f30d..9fbb49c10c 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,15 +1,13 @@ -black -isort +mypy myst_parser pytest-cov pytest-datafiles responses +ruff Sphinx sphinx-rtd-theme -mypy -types-PyYAML -pyupgrade -types-requests types-jsonschema types-Markdown +types-PyYAML +types-requests types-setuptools diff --git a/requirements.txt b/requirements.txt index add52f4bc6..e4319d2352 100644 --- a/requirements.txt +++ b/requirements.txt @@ -5,10 +5,12 @@ jinja2 jsonschema>=3.0 markdown>=3.3 packaging +pillow +pdiff pre-commit prompt_toolkit>=3.0.3 -pytest>=7.0.0 pytest-workflow>=1.6.0 +pytest>=7.0.0 pyyaml questionary>=1.8.0 refgenie @@ -17,4 +19,4 @@ requests_cache rich-click>=1.6.1 rich>=13.3.1 tabulate -pdiff +trogon diff --git a/setup.py b/setup.py index 84c6529dce..5fd855c798 100644 --- a/setup.py +++ b/setup.py @@ -2,7 +2,7 @@ from setuptools import find_packages, setup -version = "2.11.1" +version = "2.12" with open("README.md") as f: readme = f.read() diff --git a/tests/components/generate_snapshot.py b/tests/components/generate_snapshot.py index 46fd63fe3f..c5067d7210 100644 --- a/tests/components/generate_snapshot.py +++ b/tests/components/generate_snapshot.py @@ -26,7 +26,7 @@ def test_generate_snapshot_module(self): snap_path = Path("modules", "nf-core-test", "fastqc", "tests", "main.nf.test.snap") assert snap_path.exists() - with open(snap_path, "r") as fh: + with open(snap_path) as fh: snap_content = json.load(fh) assert "versions" in snap_content assert "content" in snap_content["versions"] @@ -48,7 +48,7 @@ def test_generate_snapshot_subworkflow(self): snap_path = Path("subworkflows", "nf-core-test", "bam_sort_stats_samtools", "tests", "main.nf.test.snap") assert snap_path.exists() - with open(snap_path, "r") as fh: + with open(snap_path) as fh: snap_content = json.load(fh) assert "test_bam_sort_stats_samtools_paired_end_flagstats" in snap_content assert ( @@ -86,7 +86,7 @@ def test_update_snapshot_module(self): with set_wd(self.nfcore_modules): snap_path = Path("modules", "nf-core-test", "bwa", "mem", "tests", "main.nf.test.snap") - with open(snap_path, "r") as fh: + with open(snap_path) as fh: snap_content = json.load(fh) original_timestamp = snap_content["Single-End"]["timestamp"] # delete the timestamp in json @@ -103,7 +103,7 @@ def test_update_snapshot_module(self): ) snap_generator.run() - with open(snap_path, "r") as fh: + with open(snap_path) as fh: snap_content = json.load(fh) assert "Single-End" in snap_content assert snap_content["Single-End"]["timestamp"] != original_timestamp diff --git a/tests/fixtures/create_logo.png b/tests/fixtures/create_logo.png new file mode 100644 index 0000000000..cb897a810f Binary files /dev/null and b/tests/fixtures/create_logo.png differ diff --git a/tests/fixtures/create_logo_dark.png b/tests/fixtures/create_logo_dark.png new file mode 100644 index 0000000000..ed00f1208e Binary files /dev/null and b/tests/fixtures/create_logo_dark.png differ diff --git a/tests/fixtures/create_logo_width100.png b/tests/fixtures/create_logo_width100.png new file mode 100644 index 0000000000..552396082e Binary files /dev/null and b/tests/fixtures/create_logo_width100.png differ diff --git a/tests/lint/actions_awsfulltest.py b/tests/lint/actions_awsfulltest.py index 30293e31a4..bbda92a4d1 100644 --- a/tests/lint/actions_awsfulltest.py +++ b/tests/lint/actions_awsfulltest.py @@ -19,7 +19,7 @@ def test_actions_awsfulltest_pass(self): # Edit .github/workflows/awsfulltest.yml to use -profile test_full new_pipeline = self._make_pipeline_copy() - with open(os.path.join(new_pipeline, ".github", "workflows", "awsfulltest.yml"), "r") as fh: + with open(os.path.join(new_pipeline, ".github", "workflows", "awsfulltest.yml")) as fh: awsfulltest_yml = fh.read() awsfulltest_yml = awsfulltest_yml.replace("-profile test ", "-profile test_full ") with open(os.path.join(new_pipeline, ".github", "workflows", "awsfulltest.yml"), "w") as fh: @@ -44,7 +44,7 @@ def test_actions_awsfulltest_fail(self): # Edit .github/workflows/awsfulltest.yml to use -profile test_full new_pipeline = self._make_pipeline_copy() - with open(os.path.join(new_pipeline, ".github", "workflows", "awsfulltest.yml"), "r") as fh: + with open(os.path.join(new_pipeline, ".github", "workflows", "awsfulltest.yml")) as fh: awsfulltest_yml = yaml.safe_load(fh) del awsfulltest_yml[True]["release"] with open(os.path.join(new_pipeline, ".github", "workflows", "awsfulltest.yml"), "w") as fh: diff --git a/tests/lint/actions_awstest.py b/tests/lint/actions_awstest.py index 0e19f781aa..7bfa6052f8 100644 --- a/tests/lint/actions_awstest.py +++ b/tests/lint/actions_awstest.py @@ -20,7 +20,7 @@ def test_actions_awstest_fail(self): # Edit .github/workflows/awsfulltest.yml to use -profile test_full new_pipeline = self._make_pipeline_copy() - with open(os.path.join(new_pipeline, ".github", "workflows", "awstest.yml"), "r") as fh: + with open(os.path.join(new_pipeline, ".github", "workflows", "awstest.yml")) as fh: awstest_yml = yaml.safe_load(fh) awstest_yml[True]["push"] = ["master"] with open(os.path.join(new_pipeline, ".github", "workflows", "awstest.yml"), "w") as fh: diff --git a/tests/lint/actions_ci.py b/tests/lint/actions_ci.py index d44dbb73b5..8734b2f78b 100644 --- a/tests/lint/actions_ci.py +++ b/tests/lint/actions_ci.py @@ -31,7 +31,7 @@ def test_actions_ci_fail_wrong_trigger(self): # Edit .github/workflows/actions_ci.yml to mess stuff up! new_pipeline = self._make_pipeline_copy() - with open(os.path.join(new_pipeline, ".github", "workflows", "ci.yml"), "r") as fh: + with open(os.path.join(new_pipeline, ".github", "workflows", "ci.yml")) as fh: ci_yml = yaml.safe_load(fh) ci_yml[True]["push"] = ["dev", "patch"] ci_yml["jobs"]["test"]["strategy"]["matrix"] = {"nxf_versionnn": ["foo", ""]} diff --git a/tests/lint/actions_schema_validation.py b/tests/lint/actions_schema_validation.py index 48bb07e4dd..ad65d90018 100644 --- a/tests/lint/actions_schema_validation.py +++ b/tests/lint/actions_schema_validation.py @@ -9,7 +9,7 @@ def test_actions_schema_validation_missing_jobs(self): """Missing 'jobs' field should result in failure""" new_pipeline = self._make_pipeline_copy() - with open(os.path.join(new_pipeline, ".github", "workflows", "awstest.yml"), "r") as fh: + with open(os.path.join(new_pipeline, ".github", "workflows", "awstest.yml")) as fh: awstest_yml = yaml.safe_load(fh) awstest_yml.pop("jobs") with open(os.path.join(new_pipeline, ".github", "workflows", "awstest.yml"), "w") as fh: @@ -27,7 +27,7 @@ def test_actions_schema_validation_missing_on(self): """Missing 'on' field should result in failure""" new_pipeline = self._make_pipeline_copy() - with open(os.path.join(new_pipeline, ".github", "workflows", "awstest.yml"), "r") as fh: + with open(os.path.join(new_pipeline, ".github", "workflows", "awstest.yml")) as fh: awstest_yml = yaml.safe_load(fh) awstest_yml.pop(True) with open(os.path.join(new_pipeline, ".github", "workflows", "awstest.yml"), "w") as fh: @@ -46,7 +46,7 @@ def test_actions_schema_validation_fails_for_additional_property(self): """Missing 'jobs' field should result in failure""" new_pipeline = self._make_pipeline_copy() - with open(os.path.join(new_pipeline, ".github", "workflows", "awstest.yml"), "r") as fh: + with open(os.path.join(new_pipeline, ".github", "workflows", "awstest.yml")) as fh: awstest_yml = yaml.safe_load(fh) awstest_yml["not_jobs"] = awstest_yml["jobs"] with open(os.path.join(new_pipeline, ".github", "workflows", "awstest.yml"), "w") as fh: diff --git a/tests/lint/files_exist.py b/tests/lint/files_exist.py index 4e5e4d3c2b..5ba26d77a0 100644 --- a/tests/lint/files_exist.py +++ b/tests/lint/files_exist.py @@ -1,4 +1,5 @@ import os +from pathlib import Path import nf_core.lint @@ -7,7 +8,7 @@ def test_files_exist_missing_config(self): """Lint test: critical files missing FAIL""" new_pipeline = self._make_pipeline_copy() - os.remove(os.path.join(new_pipeline, "CHANGELOG.md")) + Path(new_pipeline, "CHANGELOG.md").unlink() lint_obj = nf_core.lint.PipelineLint(new_pipeline) lint_obj._load() @@ -21,7 +22,7 @@ def test_files_exist_missing_main(self): """Check if missing main issues warning""" new_pipeline = self._make_pipeline_copy() - os.remove(os.path.join(new_pipeline, "main.nf")) + Path(new_pipeline, "main.nf").unlink() lint_obj = nf_core.lint.PipelineLint(new_pipeline) lint_obj._load() @@ -34,7 +35,7 @@ def test_files_exist_depreciated_file(self): """Check whether depreciated file issues warning""" new_pipeline = self._make_pipeline_copy() - nf = os.path.join(new_pipeline, "parameters.settings.json") + nf = Path(new_pipeline, "parameters.settings.json") os.system(f"touch {nf}") lint_obj = nf_core.lint.PipelineLint(new_pipeline) diff --git a/tests/lint/merge_markers.py b/tests/lint/merge_markers.py index be0d076757..64a62e25c3 100644 --- a/tests/lint/merge_markers.py +++ b/tests/lint/merge_markers.py @@ -7,7 +7,7 @@ def test_merge_markers_found(self): """Missing 'jobs' field should result in failure""" new_pipeline = self._make_pipeline_copy() - with open(os.path.join(new_pipeline, "main.nf"), "r") as fh: + with open(os.path.join(new_pipeline, "main.nf")) as fh: main_nf_content = fh.read() main_nf_content = ">>>>>>>\n" + main_nf_content with open(os.path.join(new_pipeline, "main.nf"), "w") as fh: diff --git a/tests/lint/multiqc_config.py b/tests/lint/multiqc_config.py index 446b4378b0..721560ce81 100644 --- a/tests/lint/multiqc_config.py +++ b/tests/lint/multiqc_config.py @@ -18,7 +18,7 @@ def test_multiqc_config_exists_ignore(self): def test_multiqc_config_missing_report_section_order(self): """Test that linting fails if the multiqc_config.yml file is missing the report_section_order""" new_pipeline = self._make_pipeline_copy() - with open(Path(new_pipeline, "assets", "multiqc_config.yml"), "r") as fh: + with open(Path(new_pipeline, "assets", "multiqc_config.yml")) as fh: mqc_yml = yaml.safe_load(fh) mqc_yml_tmp = mqc_yml mqc_yml.pop("report_section_order") @@ -36,7 +36,7 @@ def test_multiqc_config_missing_report_section_order(self): def test_multiqc_incorrect_export_plots(self): """Test that linting fails if the multiqc_config.yml file has an incorrect value for export_plots""" new_pipeline = self._make_pipeline_copy() - with open(Path(new_pipeline, "assets", "multiqc_config.yml"), "r") as fh: + with open(Path(new_pipeline, "assets", "multiqc_config.yml")) as fh: mqc_yml = yaml.safe_load(fh) mqc_yml_tmp = mqc_yml mqc_yml["export_plots"] = False @@ -54,7 +54,7 @@ def test_multiqc_incorrect_export_plots(self): def test_multiqc_config_report_comment_fail(self): """Test that linting fails if the multiqc_config.yml file has an incorrect report_comment""" new_pipeline = self._make_pipeline_copy() - with open(Path(new_pipeline, "assets", "multiqc_config.yml"), "r") as fh: + with open(Path(new_pipeline, "assets", "multiqc_config.yml")) as fh: mqc_yml = yaml.safe_load(fh) mqc_yml_tmp = mqc_yml mqc_yml["report_comment"] = "This is a test" @@ -73,7 +73,7 @@ def test_multiqc_config_report_comment_fail(self): def test_multiqc_config_report_comment_release_fail(self): """Test that linting fails if the multiqc_config.yml file has an incorrect report_comment for a release version""" new_pipeline = self._make_pipeline_copy() - with open(Path(new_pipeline, "assets", "multiqc_config.yml"), "r") as fh: + with open(Path(new_pipeline, "assets", "multiqc_config.yml")) as fh: mqc_yml = yaml.safe_load(fh) mqc_yml_tmp = mqc_yml with open(Path(new_pipeline, "assets", "multiqc_config.yml"), "w") as fh: diff --git a/tests/lint/nextflow_config.py b/tests/lint/nextflow_config.py index 1542b8cf65..60aaee5243 100644 --- a/tests/lint/nextflow_config.py +++ b/tests/lint/nextflow_config.py @@ -1,5 +1,6 @@ import os import re +from pathlib import Path import nf_core.create import nf_core.lint @@ -43,7 +44,7 @@ def test_nextflow_config_missing_test_profile_failed(self): new_pipeline = self._make_pipeline_copy() # Change the name of the test profile so there is no such profile nf_conf_file = os.path.join(new_pipeline, "nextflow.config") - with open(nf_conf_file, "r") as f: + with open(nf_conf_file) as f: content = f.read() fail_content = re.sub(r"\btest\b", "testfail", content) with open(nf_conf_file, "w") as f: @@ -53,3 +54,66 @@ def test_nextflow_config_missing_test_profile_failed(self): result = lint_obj.nextflow_config() assert len(result["failed"]) > 0 assert len(result["warned"]) == 0 + + +def test_default_values_match(self): + """Test that the default values in nextflow.config match the default values defined in the nextflow_schema.json.""" + new_pipeline = self._make_pipeline_copy() + lint_obj = nf_core.lint.PipelineLint(new_pipeline) + lint_obj._load_pipeline_config() + result = lint_obj.nextflow_config() + assert len(result["failed"]) == 0 + assert len(result["warned"]) == 0 + assert "Config default value correct: params.max_cpus" in result["passed"] + assert "Config default value correct: params.validate_params" in result["passed"] + + +def test_default_values_fail(self): + """Test linting fails if the default values in nextflow.config do not match the ones defined in the nextflow_schema.json.""" + new_pipeline = self._make_pipeline_copy() + # Change the default value of max_cpus in nextflow.config + nf_conf_file = Path(new_pipeline) / "nextflow.config" + with open(nf_conf_file) as f: + content = f.read() + fail_content = re.sub(r"\bmax_cpus = 16\b", "max_cpus = 0", content) + with open(nf_conf_file, "w") as f: + f.write(fail_content) + # Change the default value of max_memory in nextflow_schema.json + nf_schema_file = Path(new_pipeline) / "nextflow_schema.json" + with open(nf_schema_file) as f: + content = f.read() + fail_content = re.sub(r'"default": "128.GB"', '"default": "18.GB"', content) + print(fail_content) + with open(nf_schema_file, "w") as f: + f.write(fail_content) + lint_obj = nf_core.lint.PipelineLint(new_pipeline) + lint_obj._load_pipeline_config() + result = lint_obj.nextflow_config() + assert len(result["failed"]) == 2 + assert ( + "Config default value incorrect: `params.max_cpus` is set as `16` in `nextflow_schema.json` but is `0` in `nextflow.config`." + in result["failed"] + ) + assert ( + "Config default value incorrect: `params.max_memory` is set as `18.GB` in `nextflow_schema.json` but is `128.GB` in `nextflow.config`." + in result["failed"] + ) + + +def test_default_values_ignored(self): + """Test ignoring linting of default values.""" + new_pipeline = self._make_pipeline_copy() + # Add max_cpus to the ignore list + nf_core_yml = Path(new_pipeline) / ".nf-core.yml" + with open(nf_core_yml, "w") as f: + f.write( + "repository_type: pipeline\nlint:\n nextflow_config:\n - config_defaults:\n - params.max_cpus\n" + ) + lint_obj = nf_core.lint.PipelineLint(new_pipeline) + lint_obj._load_pipeline_config() + lint_obj._load_lint_config() + result = lint_obj.nextflow_config() + assert len(result["failed"]) == 0 + assert len(result["ignored"]) == 1 + assert "Config default value correct: params.max_cpus" not in result["passed"] + assert "Config default ignored: params.max_cpus" in result["ignored"] diff --git a/tests/lint/template_strings.py b/tests/lint/template_strings.py new file mode 100644 index 0000000000..ac0ae01681 --- /dev/null +++ b/tests/lint/template_strings.py @@ -0,0 +1,54 @@ +import subprocess +from pathlib import Path + +import nf_core.create +import nf_core.lint + + +def test_template_strings(self): + """Tests finding a template string in a file fails linting.""" + new_pipeline = self._make_pipeline_copy() + # Add template string to a file + txt_file = Path(new_pipeline) / "docs" / "test.txt" + with open(txt_file, "w") as f: + f.write("my {{ template_string }}") + subprocess.check_output(["git", "add", "docs"], cwd=new_pipeline) + lint_obj = nf_core.lint.PipelineLint(new_pipeline) + lint_obj._load() + result = lint_obj.template_strings() + print(result["failed"]) + assert len(result["failed"]) == 1 + assert len(result["ignored"]) == 0 + + +def test_template_strings_ignored(self): + """Tests ignoring template_strings""" + new_pipeline = self._make_pipeline_copy() + # Ignore template_strings test + nf_core_yml = Path(new_pipeline) / ".nf-core.yml" + with open(nf_core_yml, "w") as f: + f.write("repository_type: pipeline\nlint:\n template_strings: False") + lint_obj = nf_core.lint.PipelineLint(new_pipeline) + lint_obj._load() + lint_obj._lint_pipeline() + assert len(lint_obj.failed) == 0 + assert len(lint_obj.ignored) == 1 + + +def test_template_strings_ignore_file(self): + """Tests ignoring template_strings file""" + new_pipeline = self._make_pipeline_copy() + # Add template string to a file + txt_file = Path(new_pipeline) / "docs" / "test.txt" + with open(txt_file, "w") as f: + f.write("my {{ template_string }}") + subprocess.check_output(["git", "add", "docs"], cwd=new_pipeline) + # Ignore template_strings test + nf_core_yml = Path(new_pipeline) / ".nf-core.yml" + with open(nf_core_yml, "w") as f: + f.write("repository_type: pipeline\nlint:\n template_strings:\n - docs/test.txt") + lint_obj = nf_core.lint.PipelineLint(new_pipeline) + lint_obj._load() + result = lint_obj.template_strings() + assert len(result["failed"]) == 0 + assert len(result["ignored"]) == 1 diff --git a/tests/modules/bump_versions.py b/tests/modules/bump_versions.py index 3c19041f63..ce8c6dbe11 100644 --- a/tests/modules/bump_versions.py +++ b/tests/modules/bump_versions.py @@ -2,17 +2,16 @@ import re import pytest -import yaml import nf_core.modules -from nf_core.modules.modules_utils import ModuleException +from nf_core.modules.modules_utils import ModuleExceptionError def test_modules_bump_versions_single_module(self): """Test updating a single module""" # Change the bpipe/test version to an older version env_yml_path = os.path.join(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "environment.yml") - with open(env_yml_path, "r") as fh: + with open(env_yml_path) as fh: content = fh.read() new_content = re.sub(r"bioconda::star=\d.\d.\d\D?", r"bioconda::star=2.6.1d", content) with open(env_yml_path, "w") as fh: @@ -32,7 +31,7 @@ def test_modules_bump_versions_all_modules(self): def test_modules_bump_versions_fail(self): """Fail updating a module with wrong name""" version_bumper = nf_core.modules.ModuleVersionBumper(pipeline_dir=self.nfcore_modules) - with pytest.raises(ModuleException) as excinfo: + with pytest.raises(ModuleExceptionError) as excinfo: version_bumper.bump_versions(module="no/module") assert "Could not find the specified module:" in str(excinfo.value) @@ -41,7 +40,7 @@ def test_modules_bump_versions_fail_unknown_version(self): """Fail because of an unknown version""" # Change the bpipe/test version to an older version env_yml_path = os.path.join(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "environment.yml") - with open(env_yml_path, "r") as fh: + with open(env_yml_path) as fh: content = fh.read() new_content = re.sub(r"bioconda::bpipe=\d.\d.\d\D?", r"bioconda::bpipe=xxx", content) with open(env_yml_path, "w") as fh: diff --git a/tests/modules/create.py b/tests/modules/create.py index 74e5ec3896..460a1439cb 100644 --- a/tests/modules/create.py +++ b/tests/modules/create.py @@ -1,4 +1,3 @@ -import filecmp import os import shutil from pathlib import Path @@ -87,9 +86,9 @@ def test_modules_migrate(self, mock_rich_ask): # Clone modules repo with pytests shutil.rmtree(self.nfcore_modules) Repo.clone_from(GITLAB_URL, self.nfcore_modules, branch=GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH) - with open(module_dir / "main.nf", "r") as fh: + with open(module_dir / "main.nf") as fh: old_main_nf = fh.read() - with open(module_dir / "meta.yml", "r") as fh: + with open(module_dir / "meta.yml") as fh: old_meta_yml = fh.read() # Create a module with --migrate-pytest @@ -97,9 +96,9 @@ def test_modules_migrate(self, mock_rich_ask): module_create = nf_core.modules.ModuleCreate(self.nfcore_modules, "samtools/sort", migrate_pytest=True) module_create.create() - with open(module_dir / "main.nf", "r") as fh: + with open(module_dir / "main.nf") as fh: new_main_nf = fh.read() - with open(module_dir / "meta.yml", "r") as fh: + with open(module_dir / "meta.yml") as fh: new_meta_yml = fh.read() nextflow_config = module_dir / "tests" / "nextflow.config" diff --git a/tests/modules/lint.py b/tests/modules/lint.py index a8a775e6f6..a5d8567b76 100644 --- a/tests/modules/lint.py +++ b/tests/modules/lint.py @@ -333,7 +333,7 @@ def test_modules_lint_snapshot_file_missing_fail(self): def test_modules_lint_snapshot_file_not_needed(self): """Test linting a module which doesn't need a snapshot file by removing the snapshot keyword in the main.nf.test file""" - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test"), "r") as fh: + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test")) as fh: content = fh.read() new_content = content.replace("snapshot(", "snap (") with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test"), "w") as fh: @@ -372,7 +372,7 @@ def test_modules_environment_yml_file_sorted_correctly(self): def test_modules_environment_yml_file_sorted_incorrectly(self): """Test linting a module with an incorrectly sorted environment.yml file""" - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "environment.yml"), "r") as fh: + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "environment.yml")) as fh: yaml_content = yaml.safe_load(fh) # Add a new dependency to the environment.yml file and reverse the order yaml_content["dependencies"].append("z") @@ -548,7 +548,7 @@ def test_modules_missing_test_main_nf(self): def test_modules_missing_required_tag(self): """Test linting a module with a missing required tag""" - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test"), "r") as fh: + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test")) as fh: content = fh.read() new_content = content.replace("modules_nfcore", "foo") with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test"), "w") as fh: @@ -581,7 +581,7 @@ def test_modules_missing_tags_yml(self): def test_modules_incorrect_tags_yml_key(self): """Test linting a module with an incorrect key in tags.yml file""" - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "tags.yml"), "r") as fh: + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "tags.yml")) as fh: content = fh.read() new_content = content.replace("bpipe/test:", "bpipe_test:") with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "tags.yml"), "w") as fh: @@ -598,7 +598,7 @@ def test_modules_incorrect_tags_yml_key(self): def test_modules_incorrect_tags_yml_values(self): """Test linting a module with an incorrect path in tags.yml file""" - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "tags.yml"), "r") as fh: + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "tags.yml")) as fh: content = fh.read() new_content = content.replace("modules/nf-core/bpipe/test/**", "foo") with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "tags.yml"), "w") as fh: diff --git a/tests/modules/list.py b/tests/modules/list.py index d92cd58dd5..3cb00a84d6 100644 --- a/tests/modules/list.py +++ b/tests/modules/list.py @@ -1,3 +1,7 @@ +import json +from pathlib import Path + +import yaml from rich.console import Console import nf_core.modules @@ -56,3 +60,75 @@ def test_modules_install_gitlab_and_list_pipeline(self): console.print(listed_mods) output = console.export_text() assert "fastqc" in output + + +def test_modules_list_local_json(self): + """Test listing locally installed modules as JSON""" + mods_list = nf_core.modules.ModuleList(self.pipeline_dir, remote=False) + listed_mods = mods_list.list_components(print_json=True) + listed_mods = json.loads(listed_mods) + assert "fastqc" in listed_mods + assert "multiqc" in listed_mods + + +def test_modules_list_remote_json(self): + """Test listing available modules as JSON""" + mods_list = nf_core.modules.ModuleList(None, remote=True) + listed_mods = mods_list.list_components(print_json=True) + listed_mods = json.loads(listed_mods) + assert "fastqc" in listed_mods + assert "multiqc" in listed_mods + + +def test_modules_list_with_one_keyword(self): + """Test listing available modules with one keyword""" + mods_list = nf_core.modules.ModuleList(None, remote=True) + listed_mods = mods_list.list_components(keywords=["qc"]) + console = Console(record=True) + console.print(listed_mods) + output = console.export_text() + assert "multiqc" in output + + +def test_modules_list_with_keywords(self): + """Test listing available modules with multiple keywords""" + mods_list = nf_core.modules.ModuleList(None, remote=True) + listed_mods = mods_list.list_components(keywords=["fastq", "qc"]) + console = Console(record=True) + console.print(listed_mods) + output = console.export_text() + assert "fastqc" in output + + +def test_modules_list_with_unused_keyword(self): + """Test listing available modules with an unused keyword""" + mods_list = nf_core.modules.ModuleList(None, remote=True) + with self.assertLogs(level="INFO") as log: + listed_mods = mods_list.list_components(keywords=["you_will_never_find_me"]) + self.assertIn("No available", log.output[0]) + # expect empty list + assert listed_mods == "" + + +def test_modules_list_in_wrong_repo_fail(self): + """Test listing available modules in a non-pipeline repo""" + # modify repotype in .nf-core.yml + with open(Path(self.pipeline_dir, ".nf-core.yml")) as fh: + nf_core_yml = yaml.safe_load(fh) + nf_core_yml_orig = nf_core_yml.copy() + nf_core_yml["repository_type"] = "modules" + nf_core_yml["org_path"] = "nf-core" + + print(nf_core_yml) + with open(Path(self.pipeline_dir, ".nf-core.yml"), "w") as fh: + yaml.safe_dump(nf_core_yml, fh) + # expect error logged + with self.assertLogs(level="ERROR") as log: + mods_list = nf_core.modules.ModuleList(self.pipeline_dir, remote=False) + listed_mods = mods_list.list_components() + self.assertIn("must be run from a pipeline directory", log.output[0]) + # expect empty list + assert listed_mods == "" + # restore .nf-core.yml + with open(Path(self.pipeline_dir, ".nf-core.yml"), "w") as fh: + yaml.safe_dump(nf_core_yml_orig, fh) diff --git a/tests/modules/modules_json.py b/tests/modules/modules_json.py index 63ee4e743d..a054b6b131 100644 --- a/tests/modules/modules_json.py +++ b/tests/modules/modules_json.py @@ -17,7 +17,7 @@ def test_get_modules_json(self): """Checks that the get_modules_json function returns the correct result""" mod_json_path = os.path.join(self.pipeline_dir, "modules.json") - with open(mod_json_path, "r") as fh: + with open(mod_json_path) as fh: try: mod_json_sb = json.load(fh) except json.JSONDecodeError as e: @@ -73,7 +73,7 @@ def test_mod_json_create(self): def modify_main_nf(path): """Modify a file to test patch creation""" - with open(path, "r") as fh: + with open(path) as fh: lines = fh.readlines() # Modify $meta.id to $meta.single_end lines[1] = ' tag "$meta.single_end"\n' @@ -112,7 +112,7 @@ def test_mod_json_create_with_patch(self): assert "branch" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"]["fastqc"] # Check that fastqc/main.nf maintains the changes - with open(module_path / "main.nf", "r") as fh: + with open(module_path / "main.nf") as fh: lines = fh.readlines() assert lines[1] == ' tag "$meta.single_end"\n' @@ -214,7 +214,7 @@ def test_mod_json_dump(self): assert os.path.exists(mod_json_path) # Check that the dump function writes the correct content - with open(mod_json_path, "r") as f: + with open(mod_json_path) as f: try: mod_json_new = json.load(f) except json.JSONDecodeError as e: diff --git a/tests/modules/patch.py b/tests/modules/patch.py index 338d890f2f..dc939c7ea7 100644 --- a/tests/modules/patch.py +++ b/tests/modules/patch.py @@ -43,7 +43,7 @@ def setup_patch(pipeline_dir, modify_module): def modify_main_nf(path): """Modify a file to test patch creation""" - with open(path, "r") as fh: + with open(path) as fh: lines = fh.readlines() # We want a patch file that looks something like: # - tuple val(meta), path(reads) @@ -99,7 +99,7 @@ def test_create_patch_change(self): ) # Check that the correct lines are in the patch file - with open(module_path / patch_fn, "r") as fh: + with open(module_path / patch_fn) as fh: patch_lines = fh.readlines() module_relpath = module_path.relative_to(self.pipeline_dir) assert f"--- {module_relpath / 'main.nf'}\n" in patch_lines, module_relpath / "main.nf" @@ -157,7 +157,7 @@ def test_create_patch_try_apply_successful(self): ) # Check that the correct lines are in the patch file - with open(module_path / patch_fn, "r") as fh: + with open(module_path / patch_fn) as fh: patch_lines = fh.readlines() module_relpath = module_path.relative_to(self.pipeline_dir) assert f"--- {module_relpath / 'main.nf'}\n" in patch_lines @@ -167,7 +167,7 @@ def test_create_patch_try_apply_successful(self): assert "+ tuple val(meta), path(reads), path(index)\n" in patch_lines # Check that 'main.nf' is updated correctly - with open(module_path / "main.nf", "r") as fh: + with open(module_path / "main.nf") as fh: main_nf_lines = fh.readlines() # These lines should have been removed by the patch assert " tuple val(meta), path(reads)\n" not in main_nf_lines @@ -258,7 +258,7 @@ def test_create_patch_update_success(self): ), modules_json_obj.get_patch_fn(BISMARK_ALIGN, GITLAB_URL, REPO_NAME) # Check that the correct lines are in the patch file - with open(module_path / patch_fn, "r") as fh: + with open(module_path / patch_fn) as fh: patch_lines = fh.readlines() module_relpath = module_path.relative_to(self.pipeline_dir) assert f"--- {module_relpath / 'main.nf'}\n" in patch_lines @@ -268,7 +268,7 @@ def test_create_patch_update_success(self): assert "+ tuple val(meta), path(reads), path(index)\n" in patch_lines # Check that 'main.nf' is updated correctly - with open(module_path / "main.nf", "r") as fh: + with open(module_path / "main.nf") as fh: main_nf_lines = fh.readlines() # These lines should have been removed by the patch assert " tuple val(meta), path(reads)\n" not in main_nf_lines @@ -300,7 +300,7 @@ def test_create_patch_update_fail(self): ) # Save the file contents for downstream comparison - with open(module_path / patch_fn, "r") as fh: + with open(module_path / patch_fn) as fh: patch_contents = fh.read() update_obj = nf_core.modules.ModuleUpdate( @@ -317,14 +317,14 @@ def test_create_patch_update_fail(self): temp_module_dir = temp_dir / BISMARK_ALIGN for file in os.listdir(temp_module_dir): assert file in os.listdir(module_path) - with open(module_path / file, "r") as fh: + with open(module_path / file) as fh: installed = fh.read() - with open(temp_module_dir / file, "r") as fh: + with open(temp_module_dir / file) as fh: shouldbe = fh.read() assert installed == shouldbe # Check that the patch file is unaffected - with open(module_path / patch_fn, "r") as fh: + with open(module_path / patch_fn) as fh: new_patch_contents = fh.read() assert patch_contents == new_patch_contents diff --git a/tests/modules/update.py b/tests/modules/update.py index 399e9cc12c..5208070fa5 100644 --- a/tests/modules/update.py +++ b/tests/modules/update.py @@ -345,7 +345,7 @@ def test_update_only_show_differences_when_patch(self, mock_prompt): # We modify fastqc because it's one of the modules that can be updated and there's another one before it (custom/dumpsoftwareversions) module_path = Path(self.pipeline_dir, "modules", "nf-core", "fastqc") main_path = Path(module_path, "main.nf") - with open(main_path, "r") as fh: + with open(main_path) as fh: lines = fh.readlines() for line_index in range(len(lines)): if lines[line_index] == " label 'process_medium'\n": diff --git a/tests/subworkflows/create.py b/tests/subworkflows/create.py index fc628df34f..002b889671 100644 --- a/tests/subworkflows/create.py +++ b/tests/subworkflows/create.py @@ -1,4 +1,3 @@ -import filecmp import os import shutil from pathlib import Path @@ -53,9 +52,9 @@ def test_subworkflows_migrate(self, mock_rich_ask): # Clone modules repo with pytests shutil.rmtree(self.nfcore_modules) Repo.clone_from(GITLAB_URL, self.nfcore_modules, branch=GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH) - with open(subworkflow_dir / "main.nf", "r") as fh: + with open(subworkflow_dir / "main.nf") as fh: old_main_nf = fh.read() - with open(subworkflow_dir / "meta.yml", "r") as fh: + with open(subworkflow_dir / "meta.yml") as fh: old_meta_yml = fh.read() # Create a subworkflow with --migrate-pytest @@ -65,9 +64,9 @@ def test_subworkflows_migrate(self, mock_rich_ask): ) subworkflow_create.create() - with open(subworkflow_dir / "main.nf", "r") as fh: + with open(subworkflow_dir / "main.nf") as fh: new_main_nf = fh.read() - with open(subworkflow_dir / "meta.yml", "r") as fh: + with open(subworkflow_dir / "meta.yml") as fh: new_meta_yml = fh.read() nextflow_config = subworkflow_dir / "tests" / "nextflow.config" diff --git a/tests/subworkflows/lint.py b/tests/subworkflows/lint.py index 1380db2260..b53fef7f0e 100644 --- a/tests/subworkflows/lint.py +++ b/tests/subworkflows/lint.py @@ -1,4 +1,3 @@ -import os from pathlib import Path import pytest @@ -87,9 +86,7 @@ def test_subworkflows_lint_snapshot_file_missing_fail(self): def test_subworkflows_lint_snapshot_file_not_needed(self): """Test linting a subworkflow which doesn't need a snapshot file by removing the snapshot keyword in the main.nf.test file""" - with open( - Path(self.nfcore_modules, "subworkflows", "nf-core", "test_subworkflow", "tests", "main.nf.test"), "r" - ) as fh: + with open(Path(self.nfcore_modules, "subworkflows", "nf-core", "test_subworkflow", "tests", "main.nf.test")) as fh: content = fh.read() new_content = content.replace("snapshot(", "snap (") with open( diff --git a/tests/subworkflows/remove.py b/tests/subworkflows/remove.py index 53a948778b..dec67875bd 100644 --- a/tests/subworkflows/remove.py +++ b/tests/subworkflows/remove.py @@ -1,7 +1,5 @@ from pathlib import Path -from rich.console import Console - from nf_core.modules.modules_json import ModulesJson @@ -18,7 +16,7 @@ def test_subworkflows_remove_subworkflow(self): bam_sort_stats_samtools_path = Path(subworkflow_path, "bam_sort_stats_samtools") bam_stats_samtools_path = Path(subworkflow_path, "bam_stats_samtools") samtools_index_path = Path(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "index") - mod_json_obj = ModulesJson(self.pipeline_dir) + ModulesJson(self.pipeline_dir) mod_json_before = ModulesJson(self.pipeline_dir).get_modules_json() assert self.subworkflow_remove.remove("bam_sort_stats_samtools") mod_json_after = ModulesJson(self.pipeline_dir).get_modules_json() diff --git a/tests/subworkflows/update.py b/tests/subworkflows/update.py index 698086e186..32a69ba180 100644 --- a/tests/subworkflows/update.py +++ b/tests/subworkflows/update.py @@ -8,7 +8,6 @@ import nf_core.utils from nf_core.modules.modules_json import ModulesJson from nf_core.modules.modules_repo import NF_CORE_MODULES_NAME, NF_CORE_MODULES_REMOTE -from nf_core.modules.remove import ModuleRemove from nf_core.modules.update import ModuleUpdate from nf_core.subworkflows.update import SubworkflowUpdate @@ -73,7 +72,7 @@ def test_install_at_hash_and_update_and_save_diff_to_file(self): assert update_obj.update("fastq_align_bowtie2") is True assert cmp_component(tmpdir, sw_path) is True - with open(patch_path, "r") as fh: + with open(patch_path) as fh: line = fh.readline() assert line.startswith( "Changes in module 'nf-core/fastq_align_bowtie2' between (f3c078809a2513f1c95de14f6633fe1f03572fdb) and" diff --git a/tests/test_cli.py b/tests/test_cli.py index fc172deba9..1261e3a9e9 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -6,6 +6,7 @@ import tempfile import unittest +from pathlib import Path from unittest import mock from click.testing import CliRunner @@ -262,9 +263,10 @@ def test_create(self, mock_create): @mock.patch("nf_core.lint.run_linting") def test_lint(self, mock_lint, mock_is_pipeline): """Test nf-core lint""" - mock_lint_results = (mock.MagicMock, mock.MagicMock) + mock_lint_results = (mock.MagicMock, mock.MagicMock, mock.MagicMock) mock_lint_results[0].failed = [] mock_lint_results[1].failed = [] + mock_lint_results[2].failed = [] mock_lint.return_value = mock_lint_results temp_pipeline_dir = tempfile.NamedTemporaryFile() @@ -373,3 +375,13 @@ def test_schema_lint_filename(self, mock_get_schema_path): assert mock_get_schema_path.called_with("some_other_filename") assert "some_other_filename" in result.output assert "nextflow_schema.json" not in result.output + + @mock.patch("nf_core.create_logo.create_logo") + def test_create_logo(self, mock_create_logo): + # Set up the mock to return a specific value + + cmd = ["create-logo", "test"] + result = self.invoke_cli(cmd) + + mock_create_logo.assert_called_with("test", Path.cwd(), None, "light", 2300, "png", False) + assert result.exit_code == 0 diff --git a/tests/test_create_logo.py b/tests/test_create_logo.py new file mode 100644 index 0000000000..b3c01638e1 --- /dev/null +++ b/tests/test_create_logo.py @@ -0,0 +1,126 @@ +"""Test covering the create-logo command.""" + +import tempfile +import unittest +from pathlib import Path + +import nf_core.create_logo + + +class TestCreateLogo(unittest.TestCase): + """Class for create-logo tests""" + + # create tempdir in setup step + def setUp(self): + self.tempdir = tempfile.TemporaryDirectory() + self.tempdir_path = Path(self.tempdir.name) + + # delete tempdir in teardown step + def tearDown(self): + self.tempdir.cleanup() + + def test_create_logo_png(self): + """Test that the create-logo command works for PNGs""" + + # Create a logo + logo_fn = nf_core.create_logo.create_logo("pipes", self.tempdir_path) + # Check that the file exists + self.assertTrue(logo_fn.is_file()) + # Check that the file is a PNG + self.assertTrue(logo_fn.suffix == ".png") + # Check that the file is the right size + fixture_fn = Path(__file__).parent / "fixtures" / "create_logo.png" + # allow some flexibility in the file size + self.assertTrue(int(logo_fn.stat().st_size / 1000) == int(fixture_fn.stat().st_size / 1000)) + + def test_create_logo_png_dark(self): + """Test that the create-logo command works for dark PNGs""" + + # Create a logo + logo_fn = nf_core.create_logo.create_logo("pipes", self.tempdir_path, theme="dark") + # Check that the file exists + self.assertTrue(logo_fn.is_file()) + # Check that the file is a PNG + self.assertTrue(logo_fn.suffix == ".png") + # Check that the file is the right size + fixture_fn = Path(__file__).parent / "fixtures" / "create_logo_dark.png" + # allow some flexibility in the file size + self.assertTrue(int(logo_fn.stat().st_size / 1000) == int(fixture_fn.stat().st_size / 1000)) + + def test_create_log_png_width(self): + """Test that the create-logo command works for PNGs with a custom width""" + + # Create a logo + logo_fn = nf_core.create_logo.create_logo("pipes", self.tempdir_path, width=100) + # Check that the file exists + self.assertTrue(logo_fn.is_file()) + # Check that the file is a PNG + self.assertTrue(logo_fn.suffix == ".png") + # Check that the file is the right size + fixture_fn = Path(__file__).parent / "fixtures" / "create_logo_width100.png" + # allow some flexibility in the file size + self.assertTrue(int(logo_fn.stat().st_size / 100) == int(fixture_fn.stat().st_size / 100)) + + def test_create_logo_twice(self): + """Test that the create-logo command returns an info message when run twice""" + + # Create a logo + logo_fn = nf_core.create_logo.create_logo("pipes", self.tempdir_path) + # Check that the file exists + self.assertTrue(logo_fn.is_file()) + # Create the logo again and capture the log output + with self.assertLogs(level="INFO") as log: + nf_core.create_logo.create_logo("pipes", self.tempdir_path) + # Check that the log message is correct + self.assertIn("Logo already exists", log.output[0]) + + def test_create_logo_without_text_fail(self): + """Test that the create-logo command fails without text""" + + # Create a logo + with self.assertRaises(UserWarning): + nf_core.create_logo.create_logo("", self.tempdir_path) + + def test_create_logo_with_filename(self): + """Test that the create-logo command works with a custom filename""" + + # Create a logo + logo_fn = nf_core.create_logo.create_logo("pipes", Path(self.tempdir_path / "custom_dir"), filename="custom") + # Check that the file exists + self.assertTrue(logo_fn.is_file()) + # Check that the parent directory name + self.assertTrue(logo_fn.parent.name == "custom_dir") + # Check that the file has correct name + self.assertTrue(logo_fn.name == "custom.png") + + def test_create_logo_svg(self): + """Test that the create-logo command works for SVGs""" + + # Create a logo + logo_fn = nf_core.create_logo.create_logo("pipes", self.tempdir_path, format="svg") + # Check that the file exists + self.assertTrue(logo_fn.is_file()) + # Check that the file is a SVG + self.assertTrue(logo_fn.suffix == ".svg") + # Check that the svg contains the correct text + with open(logo_fn) as fh: + svg = fh.read() + self.assertIn("pipes", svg) + # check that it is the light theme + self.assertIn("#050505", svg) + + def test_create_logo_svg_dark(self): + """Test that the create-logo command works for svgs and dark theme""" + + # Create a logo + logo_fn = nf_core.create_logo.create_logo("pipes", self.tempdir_path, format="svg", theme="dark") + # Check that the file exists + self.assertTrue(logo_fn.is_file()) + # Check that the file is a SVG + self.assertTrue(logo_fn.suffix == ".svg") + # Check that the svg contains the correct text + with open(logo_fn) as fh: + svg = fh.read() + self.assertIn("pipes", svg) + # check that it is the dark theme + self.assertIn("#fafafa", svg) diff --git a/tests/test_download.py b/tests/test_download.py index 7c9532e977..7f34f7fbc6 100644 --- a/tests/test_download.py +++ b/tests/test_download.py @@ -1,7 +1,6 @@ """Tests for the download subcommand of nf-core tools """ -import hashlib import os import re import shutil @@ -16,9 +15,9 @@ import nf_core.utils from nf_core.download import ContainerError, DownloadWorkflow, WorkflowRepo from nf_core.synced_repo import SyncedRepo -from nf_core.utils import NFCORE_CACHE_DIR, NFCORE_DIR, run_cmd +from nf_core.utils import run_cmd -from .utils import with_temporary_file, with_temporary_folder +from .utils import with_temporary_folder class DownloadTest(unittest.TestCase): @@ -160,8 +159,8 @@ def test__find_container_images_config_nextflow(self, tmp_path, mock_fetch_wf_co if result is not None: nfconfig_raw, _ = result config = {} - for l in nfconfig_raw.splitlines(): - ul = l.decode("utf-8") + for line in nfconfig_raw.splitlines(): + ul = line.decode("utf-8") try: k, v = ul.split(" = ", 1) config[k] = v.strip("'\"") @@ -259,7 +258,7 @@ def test_singularity_pull_image_singularity_installed(self, tmp_dir, mock_rich_p ) # Pull again, but now the image already exists - with pytest.raises(ContainerError.ImageExists): + with pytest.raises(ContainerError.ImageExistsError): download_obj.singularity_pull_image( "hello-world", f"{tmp_dir}/hello-world.sif", None, "docker.io", mock_rich_progress ) @@ -269,8 +268,8 @@ def test_singularity_pull_image_singularity_installed(self, tmp_dir, mock_rich_p "docker.io/bschiffthaler/sed", f"{tmp_dir}/sed.sif", None, "docker.io", mock_rich_progress ) - # try to pull from non-existing registry (Name change hello-world_new.sif is needed, otherwise ImageExists is raised before attempting to pull.) - with pytest.raises(ContainerError.RegistryNotFound): + # try to pull from non-existing registry (Name change hello-world_new.sif is needed, otherwise ImageExistsError is raised before attempting to pull.) + with pytest.raises(ContainerError.RegistryNotFoundError): download_obj.singularity_pull_image( "hello-world", f"{tmp_dir}/hello-world_new.sif", @@ -280,23 +279,23 @@ def test_singularity_pull_image_singularity_installed(self, tmp_dir, mock_rich_p ) # test Image not found for several registries - with pytest.raises(ContainerError.ImageNotFound): + with pytest.raises(ContainerError.ImageNotFoundError): download_obj.singularity_pull_image( "a-container", f"{tmp_dir}/acontainer.sif", None, "quay.io", mock_rich_progress ) - with pytest.raises(ContainerError.ImageNotFound): + with pytest.raises(ContainerError.ImageNotFoundError): download_obj.singularity_pull_image( "a-container", f"{tmp_dir}/acontainer.sif", None, "docker.io", mock_rich_progress ) - with pytest.raises(ContainerError.ImageNotFound): + with pytest.raises(ContainerError.ImageNotFoundError): download_obj.singularity_pull_image( "a-container", f"{tmp_dir}/acontainer.sif", None, "ghcr.io", mock_rich_progress ) # test Image not found for absolute URI. - with pytest.raises(ContainerError.ImageNotFound): + with pytest.raises(ContainerError.ImageNotFoundError): download_obj.singularity_pull_image( "docker.io/bschiffthaler/nothingtopullhere", f"{tmp_dir}/nothingtopullhere.sif", @@ -306,7 +305,7 @@ def test_singularity_pull_image_singularity_installed(self, tmp_dir, mock_rich_p ) # Traffic from Github Actions to GitHub's Container Registry is unlimited, so no harm should be done here. - with pytest.raises(ContainerError.InvalidTag): + with pytest.raises(ContainerError.InvalidTagError): download_obj.singularity_pull_image( "ewels/multiqc:go-rewrite", f"{tmp_dir}/umi-transfer.sif", @@ -343,9 +342,8 @@ def test_get_singularity_images(self, tmp_path, mock_fetch_wf_config): container_library=("mirage-the-imaginative-registry.io", "quay.io", "ghcr.io", "docker.io"), ) mock_fetch_wf_config.return_value = { - "process.mapping.container": "helloworld", - "process.mapping.container": "helloworld", - "process.mapping.container": "helloooooooworld", + "process.helloworld.container": "helloworld", + "process.hellooworld.container": "helloooooooworld", "process.mapping.container": "ewels/multiqc:gorewrite", } download_obj.find_container_images("workflow") diff --git a/tests/test_launch.py b/tests/test_launch.py index 03c6a8b692..dc8d6b147c 100644 --- a/tests/test_launch.py +++ b/tests/test_launch.py @@ -86,7 +86,7 @@ def test_get_pipeline_defaults(self): self.launcher.get_pipeline_schema() self.launcher.set_schema_inputs() assert len(self.launcher.schema_obj.input_params) > 0 - assert self.launcher.schema_obj.input_params["validate_params"] == True + assert self.launcher.schema_obj.input_params["validate_params"] is True @with_temporary_file def test_get_pipeline_defaults_input_params(self, tmp_file): @@ -119,12 +119,12 @@ def test_ob_to_questionary_string(self): @mock.patch("questionary.unsafe_prompt", side_effect=[{"use_web_gui": "Web based"}]) def test_prompt_web_gui_true(self, mock_prompt): """Check the prompt to launch the web schema or use the cli""" - assert self.launcher.prompt_web_gui() == True + assert self.launcher.prompt_web_gui() is True @mock.patch("questionary.unsafe_prompt", side_effect=[{"use_web_gui": "Command line"}]) def test_prompt_web_gui_false(self, mock_prompt): """Check the prompt to launch the web schema or use the cli""" - assert self.launcher.prompt_web_gui() == False + assert self.launcher.prompt_web_gui() is False @mock.patch("nf_core.utils.poll_nfcore_web_api", side_effect=[{}]) def test_launch_web_gui_missing_keys(self, mock_poll_nfcore_web_api): @@ -144,7 +144,7 @@ def test_launch_web_gui(self, mock_poll_nfcore_web_api, mock_webbrowser, mock_wa """Check the code that opens the web browser""" self.launcher.get_pipeline_schema() self.launcher.merge_nxf_flag_schema() - assert self.launcher.launch_web_gui() == None + assert self.launcher.launch_web_gui() is None @mock.patch("nf_core.utils.poll_nfcore_web_api", side_effect=[{"status": "error", "message": "foo"}]) def test_get_web_launch_response_error(self, mock_poll_nfcore_web_api): @@ -163,7 +163,7 @@ def test_get_web_launch_response_unexpected(self, mock_poll_nfcore_web_api): @mock.patch("nf_core.utils.poll_nfcore_web_api", side_effect=[{"status": "waiting_for_user"}]) def test_get_web_launch_response_waiting(self, mock_poll_nfcore_web_api): """Test polling the website for a launch response - status waiting_for_user""" - assert self.launcher.get_web_launch_response() == False + assert self.launcher.get_web_launch_response() is False @mock.patch("nf_core.utils.poll_nfcore_web_api", side_effect=[{"status": "launch_params_complete"}]) def test_get_web_launch_response_missing_keys(self, mock_poll_nfcore_web_api): @@ -191,7 +191,7 @@ def test_get_web_launch_response_missing_keys(self, mock_poll_nfcore_web_api): def test_get_web_launch_response_valid(self, mock_poll_nfcore_web_api, mock_sanitise): """Test polling the website for a launch response - complete, valid response""" self.launcher.get_pipeline_schema() - assert self.launcher.get_web_launch_response() == True + assert self.launcher.get_web_launch_response() is True def test_sanitise_web_response(self): """Check that we can properly sanitise results from the web""" @@ -201,7 +201,7 @@ def test_sanitise_web_response(self): self.launcher.schema_obj.input_params["max_cpus"] = "12" self.launcher.sanitise_web_response() assert "-name" not in self.launcher.nxf_flags - assert self.launcher.schema_obj.input_params["igenomes_ignore"] == True + assert self.launcher.schema_obj.input_params["igenomes_ignore"] is True assert self.launcher.schema_obj.input_params["max_cpus"] == 12 def test_ob_to_questionary_bool(self): @@ -216,12 +216,12 @@ def test_ob_to_questionary_bool(self): assert result["message"] == "" assert result["choices"] == ["True", "False"] assert result["default"] == "True" - assert result["filter"]("True") == True - assert result["filter"]("true") == True - assert result["filter"](True) == True - assert result["filter"]("False") == False - assert result["filter"]("false") == False - assert result["filter"](False) == False + assert result["filter"]("True") is True + assert result["filter"]("true") is True + assert result["filter"](True) is True + assert result["filter"]("False") is False + assert result["filter"]("false") is False + assert result["filter"](False) is False def test_ob_to_questionary_number(self): """Check converting a python dict to a pyenquirer format - with enum""" @@ -234,7 +234,7 @@ def test_ob_to_questionary_number(self): assert result["validate"]("") is True assert result["validate"]("123.56.78") == "Must be a number" assert result["validate"]("123.56sdkfjb") == "Must be a number" - assert result["filter"]("123.456") == float(123.456) + assert result["filter"]("123.456") == 123.456 assert result["filter"]("") == "" def test_ob_to_questionary_integer(self): @@ -248,7 +248,7 @@ def test_ob_to_questionary_integer(self): assert result["validate"]("") is True assert result["validate"]("123.45") == "Must be an integer" assert result["validate"]("123.56sdkfjb") == "Must be an integer" - assert result["filter"]("123") == int(123) + assert result["filter"]("123") == 123 assert result["filter"]("") == "" def test_ob_to_questionary_range(self): @@ -321,7 +321,7 @@ def test_build_command_params(self): == f'nextflow run {self.pipeline_dir} -params-file "{os.path.relpath(self.nf_params_fn)}"' ) # Check saved parameters file - with open(self.nf_params_fn, "r") as fh: + with open(self.nf_params_fn) as fh: try: saved_json = json.load(fh) except json.JSONDecodeError as e: diff --git a/tests/test_lint.py b/tests/test_lint.py index b2e7f3b574..c8d7135654 100644 --- a/tests/test_lint.py +++ b/tests/test_lint.py @@ -56,9 +56,9 @@ def test_run_linting_function(self): We don't really check any of this code as it's just a series of function calls and we're testing each of those individually. This is mostly to check for syntax errors.""" - lint_obj = nf_core.lint.run_linting(self.test_pipeline_dir, False) + nf_core.lint.run_linting(self.test_pipeline_dir, False) - def test_init_PipelineLint(self): + def test_init_pipeline_lint(self): """Simply create a PipelineLint object. This checks that all of the lint test imports are working properly, @@ -134,7 +134,7 @@ def test_json_output(self, tmp_dir): self.lint_obj._save_json_results(json_fn) # Load created JSON file and check its contents - with open(json_fn, "r") as fh: + with open(json_fn) as fh: try: saved_json = json.load(fh) except json.JSONDecodeError as e: @@ -219,11 +219,19 @@ def test_sphinx_md_files(self): test_multiqc_incorrect_export_plots, ) from .lint.nextflow_config import ( # type: ignore[misc] + test_default_values_fail, + test_default_values_ignored, + test_default_values_match, test_nextflow_config_bad_name_fail, test_nextflow_config_dev_in_release_mode_failed, test_nextflow_config_example_pass, test_nextflow_config_missing_test_profile_failed, ) + from .lint.template_strings import ( # type: ignore[misc] + test_template_strings, + test_template_strings_ignore_file, + test_template_strings_ignored, + ) from .lint.version_consistency import test_version_consistency # type: ignore[misc] diff --git a/tests/test_modules.py b/tests/test_modules.py index 92c8dfda3f..f9c3b6f2a7 100644 --- a/tests/test_modules.py +++ b/tests/test_modules.py @@ -47,7 +47,7 @@ def create_modules_repo_dummy(tmp_dir): # Remove doi from meta.yml which makes lint fail meta_yml_path = Path(root_dir, "modules", "nf-core", "bpipe", "test", "meta.yml") - with open(meta_yml_path, "r") as fh: + with open(meta_yml_path) as fh: meta_yml = yaml.safe_load(fh) del meta_yml["tools"][0]["bpipe"]["doi"] with open(meta_yml_path, "w") as fh: @@ -60,7 +60,7 @@ def create_modules_repo_dummy(tmp_dir): # remove "TODO" statements from main.nf main_nf_path = Path(root_dir, "modules", "nf-core", "bpipe", "test", "main.nf") - with open(main_nf_path, "r") as fh: + with open(main_nf_path) as fh: main_nf = fh.read() main_nf = main_nf.replace("TODO", "") with open(main_nf_path, "w") as fh: @@ -68,7 +68,7 @@ def create_modules_repo_dummy(tmp_dir): # remove "TODO" statements from main.nf.test main_nf_test_path = Path(root_dir, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test") - with open(main_nf_test_path, "r") as fh: + with open(main_nf_test_path) as fh: main_nf_test = fh.read() main_nf_test = main_nf_test.replace("TODO", "") with open(main_nf_test_path, "w") as fh: @@ -212,9 +212,15 @@ def test_modulesrepo_class(self): from .modules.list import ( # type: ignore[misc] test_modules_install_and_list_pipeline, test_modules_install_gitlab_and_list_pipeline, + test_modules_list_in_wrong_repo_fail, + test_modules_list_local_json, test_modules_list_pipeline, test_modules_list_remote, test_modules_list_remote_gitlab, + test_modules_list_remote_json, + test_modules_list_with_keywords, + test_modules_list_with_one_keyword, + test_modules_list_with_unused_keyword, ) from .modules.modules_json import ( # type: ignore[misc] test_get_modules_json, diff --git a/tests/test_params_file.py b/tests/test_params_file.py index 824e8fe345..13c82f5188 100644 --- a/tests/test_params_file.py +++ b/tests/test_params_file.py @@ -31,7 +31,7 @@ def setup_class(cls): cls.invalid_template_schema = os.path.join(cls.template_dir, "nextflow_schema_invalid.json") # Remove the allOf section to make the schema invalid - with open(cls.template_schema, "r") as fh: + with open(cls.template_schema) as fh: o = json.load(fh) del o["allOf"] @@ -49,7 +49,7 @@ def test_build_template(self): assert os.path.exists(outfile) - with open(outfile, "r") as fh: + with open(outfile) as fh: out = fh.read() assert "nf-core/testpipeline" in out @@ -68,7 +68,7 @@ def test_build_template_file_exists(self, caplog): # Creates a new empty file outfile = Path(self.tmp_dir) / "params-file.yml" - with open(outfile, "w") as fp: + with open(outfile, "w"): pass res = self.params_template_builder.write_params_file(outfile) diff --git a/tests/test_refgenie.py b/tests/test_refgenie.py index 73fbcb863f..5440c1c477 100644 --- a/tests/test_refgenie.py +++ b/tests/test_refgenie.py @@ -7,8 +7,6 @@ import tempfile import unittest -import yaml - class TestRefgenie(unittest.TestCase): """Class for refgenie tests""" @@ -26,7 +24,7 @@ def setUp(self): # avoids adding includeConfig statement to config file outside the current tmpdir try: self.NXF_HOME_ORIGINAL = os.environ["NXF_HOME"] - except: + except Exception: self.NXF_HOME_ORIGINAL = None os.environ["NXF_HOME"] = self.NXF_HOME diff --git a/tests/test_schema.py b/tests/test_schema.py index 105cd9473e..89fcc98b66 100644 --- a/tests/test_schema.py +++ b/tests/test_schema.py @@ -305,7 +305,7 @@ def test_build_schema(self): Build a new schema param from a pipeline Run code to ensure it doesn't crash. Individual functions tested separately. """ - param = self.schema_obj.build_schema(self.template_dir, True, False, None) + self.schema_obj.build_schema(self.template_dir, True, False, None) @with_temporary_folder def test_build_schema_from_scratch(self, tmp_dir): @@ -319,7 +319,7 @@ def test_build_schema_from_scratch(self, tmp_dir): shutil.copytree(self.template_dir, test_pipeline_dir) os.remove(os.path.join(test_pipeline_dir, "nextflow_schema.json")) - param = self.schema_obj.build_schema(test_pipeline_dir, True, False, None) + self.schema_obj.build_schema(test_pipeline_dir, True, False, None) @mock.patch("requests.post") def test_launch_web_builder_timeout(self, mock_post): diff --git a/tests/test_sync.py b/tests/test_sync.py index 597e4375d3..51a27653ab 100644 --- a/tests/test_sync.py +++ b/tests/test_sync.py @@ -44,7 +44,7 @@ def tearDown(self): def test_inspect_sync_dir_notgit(self, tmp_dir): """Try syncing an empty directory""" psync = nf_core.sync.PipelineSync(tmp_dir) - with pytest.raises(nf_core.sync.SyncException) as exc_info: + with pytest.raises(nf_core.sync.SyncExceptionError) as exc_info: psync.inspect_sync_dir() assert "does not appear to be a git repository" in exc_info.value.args[0] @@ -56,7 +56,7 @@ def test_inspect_sync_dir_dirty(self): # Try to sync, check we halt with the right error psync = nf_core.sync.PipelineSync(self.pipeline_dir) try: - with pytest.raises(nf_core.sync.SyncException) as exc_info: + with pytest.raises(nf_core.sync.SyncExceptionError) as exc_info: psync.inspect_sync_dir() assert exc_info.value.args[0].startswith("Uncommitted changes found in pipeline directory!") finally: @@ -66,7 +66,7 @@ def test_get_wf_config_no_branch(self): """Try getting a workflow config when the branch doesn't exist""" # Try to sync, check we halt with the right error psync = nf_core.sync.PipelineSync(self.pipeline_dir, from_branch="foo") - with pytest.raises(nf_core.sync.SyncException) as exc_info: + with pytest.raises(nf_core.sync.SyncExceptionError) as exc_info: psync.inspect_sync_dir() psync.get_wf_config() assert exc_info.value.args[0] == "Branch `foo` not found!" @@ -76,7 +76,7 @@ def test_get_wf_config_missing_required_config(self): # Try to sync, check we halt with the right error psync = nf_core.sync.PipelineSync(self.pipeline_dir) psync.required_config_vars = ["fakethisdoesnotexist"] - with pytest.raises(nf_core.sync.SyncException) as exc_info: + with pytest.raises(nf_core.sync.SyncExceptionError) as exc_info: psync.inspect_sync_dir() psync.get_wf_config() # Check that we did actually get some config back @@ -99,7 +99,7 @@ def test_checkout_template_branch_no_template(self): psync.repo.delete_head("TEMPLATE") - with pytest.raises(nf_core.sync.SyncException) as exc_info: + with pytest.raises(nf_core.sync.SyncExceptionError) as exc_info: psync.checkout_template_branch() assert exc_info.value.args[0] == "Could not check out branch 'origin/TEMPLATE' or 'TEMPLATE'" @@ -165,7 +165,7 @@ def test_push_template_branch_error(self): test_fn.touch() psync.commit_template_changes() # Try to push changes - with pytest.raises(nf_core.sync.PullRequestException) as exc_info: + with pytest.raises(nf_core.sync.PullRequestExceptionError) as exc_info: psync.push_template_branch() assert exc_info.value.args[0].startswith("Could not push TEMPLATE branch") @@ -220,7 +220,7 @@ def test_push_merge_branch_without_create_branch(self): psync.get_wf_config() psync.repo.create_remote("origin", self.remote_path) - with pytest.raises(nf_core.sync.PullRequestException) as exc_info: + with pytest.raises(nf_core.sync.PullRequestExceptionError) as exc_info: psync.push_merge_branch() assert exc_info.value.args[0].startswith(f"Could not push branch '{psync.merge_branch}'") @@ -329,7 +329,7 @@ def test_make_pull_request_bad_response(self, mock_post, mock_get): psync.gh_username = "bad_url" psync.gh_repo = "bad_url/response" os.environ["GITHUB_AUTH_TOKEN"] = "test" - with pytest.raises(nf_core.sync.PullRequestException) as exc_info: + with pytest.raises(nf_core.sync.PullRequestExceptionError) as exc_info: psync.make_pull_request() assert exc_info.value.args[0].startswith( "Something went badly wrong - GitHub API PR failed - got return code 404" @@ -420,6 +420,6 @@ def test_reset_target_dir_fake_branch(self): psync.original_branch = "fake_branch" - with pytest.raises(nf_core.sync.SyncException) as exc_info: + with pytest.raises(nf_core.sync.SyncExceptionError) as exc_info: psync.reset_target_dir() assert exc_info.value.args[0].startswith("Could not reset to original branch `fake_branch`") diff --git a/tests/test_test_utils.py b/tests/test_test_utils.py index 154a31fca6..c7088b9282 100644 --- a/tests/test_test_utils.py +++ b/tests/test_test_utils.py @@ -1,8 +1,5 @@ -import tempfile from pathlib import Path -import pytest - from .utils import with_temporary_file, with_temporary_folder diff --git a/tests/test_utils.py b/tests/test_utils.py index 90d1886dbd..4b5ab19fce 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -112,14 +112,14 @@ def test_load_pipeline_config(self): def test_list_files_git(self): """Test listing pipeline files using `git ls`""" self.pipeline_obj._list_files() - assert os.path.join(self.test_pipeline_dir, "main.nf") in self.pipeline_obj.files + assert Path(self.test_pipeline_dir, "main.nf") in self.pipeline_obj.files @with_temporary_folder def test_list_files_no_git(self, tmpdir): """Test listing pipeline files without `git-ls`""" # Create a test file in a temporary directory - tmp_fn = os.path.join(tmpdir, "testfile") - Path(tmp_fn).touch() + tmp_fn = Path(tmpdir, "testfile") + tmp_fn.touch() pipeline_obj = nf_core.utils.Pipeline(tmpdir) pipeline_obj._list_files() assert tmp_fn in pipeline_obj.files @@ -134,7 +134,7 @@ def test_request_cant_create_cache(self, mock_mkd, mock_exists): def test_pip_package_pass(self): result = nf_core.utils.pip_package("multiqc=1.10") - assert type(result) == dict + assert isinstance(result, dict) @mock.patch("requests.get") def test_pip_package_timeout(self, mock_get): diff --git a/tests/utils.py b/tests/utils.py index 198ac3d583..89c1328818 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -5,9 +5,7 @@ import functools import os import tempfile -from contextlib import contextmanager -from pathlib import Path -from typing import Any, Callable, Generator, Tuple +from typing import Any, Callable, Tuple import responses @@ -27,7 +25,7 @@ GITLAB_BRANCH_ORG_PATH_BRANCH = "org-path" GITLAB_BRANCH_TEST_OLD_SHA = "e772abc22c1ff26afdf377845c323172fb3c19ca" GITLAB_BRANCH_TEST_NEW_SHA = "7d73e21f30041297ea44367f2b4fd4e045c0b991" -GITLAB_NFTEST_BRANCH = "nf-test-tests" +GITLAB_NFTEST_BRANCH = "nf-test-tests-self-hosted-runners" def with_temporary_folder(func: Callable[..., Any]) -> Callable[..., Any]:
Process Name \\", + " \\ Software Version
CUSTOM_DUMPSOFTWAREVERSIONSpython3.11.7
yaml5.4.1
TOOL1tool10.11.9
TOOL2tool21.9
WorkflowNextflow
File typeConventional base calls
File typeConventional base calls
File typeConventional base calls
File typeConventional base calls
File typeConventional base calls
File typeConventional base calls
File typeConventional base calls
File typeConventional base calls
File typeConventional base calls
File typeConventional base calls
File typeConventional base calls