Skip to content

Commit

Permalink
Merge branch 'dev' into add_custom_registry_to_nfcore_modules_lint
Browse files Browse the repository at this point in the history
  • Loading branch information
adamrtalbot authored Jun 15, 2023
2 parents 4fe8a04 + 59d2309 commit f63afd2
Show file tree
Hide file tree
Showing 17 changed files with 168 additions and 635 deletions.
3 changes: 3 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,8 @@
- Remove `aws_tower` profile ([#2287])(https://github.com/nf-core/tools/pull/2287)
- Fixed the Slack report to include the pipeline name ([#2291](https://github.com/nf-core/tools/pull/2291))
- Fix link in the MultiQC report to point to exact version of output docs ([#2298](https://github.com/nf-core/tools/pull/2298))
- Updates seqeralabs/action-tower-launch to v2.0.0 ([#2301](https://github.com/nf-core/tools/pull/2301))
- Remove schema validation from `lib` folder and use Nextflow [nf-validation plugin](https://nextflow-io.github.io/nf-validation/) instead ([#1771](https://github.com/nf-core/tools/pull/1771/))
- Fix parsing of container directive when it is not typical nf-core format ([#2306](https://github.com/nf-core/tools/pull/2306))
- Add ability to specify custom registry for linting modules, defaults to quay.io ([#2313](https://github.com/nf-core/tools/pull/2313))

Expand All @@ -34,6 +36,7 @@

- GitPod base image: Always self-update to the latest version of Nextflow. Add [pre-commit](https://pre-commit.com/) dependency.
- GitPod configs: Update Nextflow as an init task, init pre-commit in pipeline config.
- Refgenie: Create `nxf_home/nf-core/refgenie_genomes.config` path if it doesn't exist ([#2312](https://github.com/nf-core/tools/pull/2312))

# [v2.8 - Ruthenium Monkey](https://github.com/nf-core/tools/releases/tag/2.8) - [2023-04-27]

Expand Down
63 changes: 49 additions & 14 deletions nf_core/download.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,12 +22,17 @@
import rich
import rich.progress
from git.exc import GitCommandError, InvalidGitRepositoryError
from pkg_resources import parse_version as VersionParser

import nf_core
import nf_core.list
import nf_core.utils
from nf_core.synced_repo import RemoteProgressbar, SyncedRepo
from nf_core.utils import NFCORE_CACHE_DIR, NFCORE_DIR
from nf_core.utils import (
NFCORE_CACHE_DIR,
NFCORE_DIR,
SingularityCacheFilePathValidator,
)

log = logging.getLogger(__name__)
stderr = rich.console.Console(
Expand Down Expand Up @@ -157,7 +162,7 @@ def download_workflow(self):
sys.exit(1)

summary_log = [
f"Pipeline revision: '{', '.join(self.revision) if len(self.revision) < 5 else self.revision[0]+',...,['+str(len(self.revision)-2)+' more revisions],...,'+self.revision[-1]}'",
f"Pipeline revision: '{', '.join(self.revision) if len(self.revision) < 5 else self.revision[0]+',['+str(len(self.revision)-2)+' more revisions],'+self.revision[-1]}'",
f"Pull containers: '{self.container}'",
]
if self.container == "singularity" and os.environ.get("NXF_SINGULARITY_CACHEDIR") is not None:
Expand Down Expand Up @@ -493,8 +498,8 @@ def prompt_singularity_cachedir_remote(self):
cachedir_index = None
while cachedir_index is None:
prompt_cachedir_index = questionary.path(
"Specify a list of the remote images already present in the remote system :",
file_filter="*.txt",
"Specify a list of the container images that are already present on the remote system:",
validate=SingularityCacheFilePathValidator,
style=nf_core.utils.nfcore_question_style,
).unsafe_ask()
cachedir_index = os.path.abspath(os.path.expanduser(prompt_cachedir_index))
Expand Down Expand Up @@ -531,7 +536,8 @@ def read_remote_containers(self):
except (FileNotFoundError, LookupError) as e:
log.error(f"[red]Issue with reading the specified remote $NXF_SINGULARITY_CACHE index:[/]\n{e}\n")
if stderr.is_interactive and rich.prompt.Confirm.ask(f"[blue]Specify a new index file and try again?"):
self.prompt_singularity_cachedir_remote(retry=True)
self.singularity_cache_index = None # reset chosen path to index file.
self.prompt_singularity_cachedir_remote()
else:
log.info("Proceeding without consideration of the remote $NXF_SINGULARITY_CACHE index.")
self.singularity_cache_index = None
Expand Down Expand Up @@ -731,7 +737,7 @@ def find_container_images(self, workflow_directory):
r"(?<=container)[^\${}]+\${([^{}]+)}(?![^{]*})", contents
)

if bool(container_definition) & bool(container_definition.group(1)):
if bool(container_definition) and bool(container_definition.group(1)):
pattern = re.escape(container_definition.group(1))
# extract the quoted string(s) following the variable assignment
container_names = re.findall(r"%s\s*=\s*[\"\']([^\"\']+)[\"\']" % pattern, contents)
Expand Down Expand Up @@ -880,7 +886,8 @@ def get_singularity_images(self, current_revision=""):
# Raise exception if this is not possible
log.error("Not able to pull image. Service might be down or internet connection is dead.")
raise r
progress.update(task, advance=1)
finally:
progress.update(task, advance=1)

def singularity_image_filenames(self, container):
"""Check Singularity cache for image, copy to destination folder if found.
Expand Down Expand Up @@ -1048,9 +1055,12 @@ def singularity_pull_image(self, container, out_path, cache_path, progress):
if lines:
# something went wrong with the container retrieval
if any("FATAL: " in line for line in lines):
log.info("Singularity container retrieval failed with the following error:")
log.info("".join(lines))
raise FileNotFoundError(f'The container "{container}" is unavailable.\n{"".join(lines)}')
log.error(f'[bold red]The singularity image "{container}" could not be pulled:[/]\n\n{"".join(lines)}')
log.error(
f'Skipping failed pull of "{container}". Please troubleshoot the command \n"{" ".join(singularity_command)}"\n\n\n'
)
progress.remove_task(task)
return

# Copy cached download if we are using the cache
if cache_path:
Expand Down Expand Up @@ -1252,8 +1262,8 @@ def tidy_tags_and_branches(self):
desired_revisions = set(self.revision)

# determine what needs pruning
tags_to_remove = {tag for tag in self.repo.tags if tag.name not in desired_revisions}
heads_to_remove = {head for head in self.repo.heads if head.name not in desired_revisions}
tags_to_remove = {tag for tag in self.repo.tags if tag.name not in desired_revisions.union({"latest"})}
heads_to_remove = {head for head in self.repo.heads if head.name not in desired_revisions.union({"latest"})}

try:
# delete unwanted tags from repository
Expand All @@ -1268,9 +1278,34 @@ def tidy_tags_and_branches(self):
for head in heads_to_remove:
self.repo.delete_head(head)

# ensure all desired branches are available
# ensure all desired revisions/branches are available
for revision in desired_revisions:
self.checkout(revision)
if not self.repo.is_valid_object(revision):
self.checkout(revision)
self.repo.create_head(revision, revision)
if self.repo.head.is_detached:
self.repo.head.reset(index=True, working_tree=True)

# no branch exists, but one is required for Tower's UI to display revisions correctly). Thus, "latest" will be created.
if not bool(self.repo.heads):
if self.repo.is_valid_object("latest"):
# "latest" exists as tag but not as branch
self.repo.create_head("latest", "latest") # create a new head for latest
self.checkout("latest")
else:
# desired revisions may contain arbitrary branch names that do not correspond to valid sematic versioning patterns.
valid_versions = [
VersionParser(v)
for v in desired_revisions
if re.match(r"\d+\.\d+(?:\.\d+)*(?:[\w\-_])*", v)
]
# valid versions sorted in ascending order, last will be aliased as "latest".
latest = sorted(valid_versions)[-1]
self.repo.create_head("latest", latest)
self.checkout(latest)
if self.repo.head.is_detached:
self.repo.head.reset(index=True, working_tree=True)

self.heads = self.repo.heads

# get all tags and available remote_branches
Expand Down
2 changes: 0 additions & 2 deletions nf_core/lint/files_exist.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,6 @@ def files_exist(self):
docs/README.md
docs/usage.md
lib/nfcore_external_java_deps.jar
lib/NfcoreSchema.groovy
lib/NfcoreTemplate.groovy
lib/Utils.groovy
lib/WorkflowMain.groovy
Expand Down Expand Up @@ -161,7 +160,6 @@ def files_exist(self):
[os.path.join("docs", "README.md")],
[os.path.join("docs", "usage.md")],
[os.path.join("lib", "nfcore_external_java_deps.jar")],
[os.path.join("lib", "NfcoreSchema.groovy")],
[os.path.join("lib", "NfcoreTemplate.groovy")],
[os.path.join("lib", "Utils.groovy")],
[os.path.join("lib", "WorkflowMain.groovy")],
Expand Down
2 changes: 0 additions & 2 deletions nf_core/lint/files_unchanged.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,6 @@ def files_unchanged(self):
docs/images/nf-core-PIPELINE_logo_dark.png
docs/README.md'
lib/nfcore_external_java_deps.jar
lib/NfcoreSchema.groovy
lib/NfcoreTemplate.groovy
['LICENSE', 'LICENSE.md', 'LICENCE', 'LICENCE.md'], # NB: British / American spelling
Expand Down Expand Up @@ -105,7 +104,6 @@ def files_unchanged(self):
[os.path.join("docs", "images", f"nf-core-{short_name}_logo_dark.png")],
[os.path.join("docs", "README.md")],
[os.path.join("lib", "nfcore_external_java_deps.jar")],
[os.path.join("lib", "NfcoreSchema.groovy")],
[os.path.join("lib", "NfcoreTemplate.groovy")],
]
files_partial = [
Expand Down
8 changes: 4 additions & 4 deletions nf_core/lint/nextflow_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,11 +62,11 @@ def nextflow_config(self):
* Should always be set to default value:
``https://raw.githubusercontent.com/nf-core/configs/${params.custom_config_version}``
* ``params.show_hidden_params``
* ``params.validationShowHiddenParams``
* Determines whether boilerplate params are showed by schema. Set to ``false`` by default
* ``params.schema_ignore_params``
* ``params.validationSchemaIgnoreParams``
* A comma separated string of inputs the schema validation should ignore.
Expand Down Expand Up @@ -130,8 +130,8 @@ def nextflow_config(self):
["process.time"],
["params.outdir"],
["params.input"],
["params.show_hidden_params"],
["params.schema_ignore_params"],
["params.validationShowHiddenParams"],
["params.validationSchemaIgnoreParams"],
]
# Throw a warning if these are missing
config_warn = [
Expand Down
9 changes: 7 additions & 2 deletions nf_core/pipeline-template/.github/workflows/awsfulltest.yml
Original file line number Diff line number Diff line change
Expand Up @@ -14,21 +14,26 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Launch workflow via tower
uses: seqeralabs/action-tower-launch@v1
uses: seqeralabs/action-tower-launch@v2
# TODO nf-core: You can customise AWS full pipeline tests as required
# Add full size test data (but still relatively small datasets for few samples)
# on the `test_full.config` test runs with only one set of parameters {%- raw %}
with:
workspace_id: ${{ secrets.TOWER_WORKSPACE_ID }}
access_token: ${{ secrets.TOWER_ACCESS_TOKEN }}
compute_env: ${{ secrets.TOWER_COMPUTE_ENV }}
revision: ${{ github.sha }}
workdir: s3://${{ secrets.AWS_S3_BUCKET }}{% endraw %}/work/{{ short_name }}/{% raw %}work-${{ github.sha }}{% endraw %}
parameters: |
{
"hook_url": "{% raw %}${{ secrets.MEGATESTS_ALERTS_SLACK_HOOK_URL }}{% endraw %}",
"outdir": "s3://{% raw %}${{ secrets.AWS_S3_BUCKET }}{% endraw %}/{{ short_name }}/{% raw %}results-${{ github.sha }}{% endraw %}"
}
profiles: test_full

- uses: actions/upload-artifact@v3
with:
name: Tower debug log file
path: tower_action_*.log
path: |
tower_action_*.log
tower_action_*.json
8 changes: 6 additions & 2 deletions nf_core/pipeline-template/.github/workflows/awstest.yml
Original file line number Diff line number Diff line change
Expand Up @@ -12,18 +12,22 @@ jobs:
steps:
# Launch workflow using Tower CLI tool action {%- raw %}
- name: Launch workflow via tower
uses: seqeralabs/action-tower-launch@v1
uses: seqeralabs/action-tower-launch@v2
with:
workspace_id: ${{ secrets.TOWER_WORKSPACE_ID }}
access_token: ${{ secrets.TOWER_ACCESS_TOKEN }}
compute_env: ${{ secrets.TOWER_COMPUTE_ENV }}
revision: ${{ github.sha }}
workdir: s3://${{ secrets.AWS_S3_BUCKET }}{% endraw %}/work/{{ short_name }}/{% raw %}work-${{ github.sha }}{% endraw %}
parameters: |
{
"outdir": "s3://{% raw %}${{ secrets.AWS_S3_BUCKET }}{% endraw %}/{{ short_name }}/{% raw %}results-test-${{ github.sha }}{% endraw %}"
}
profiles: test

- uses: actions/upload-artifact@v3
with:
name: Tower debug log file
path: tower_action_*.log
path: |
tower_action_*.log
tower_action_*.json
Loading

0 comments on commit f63afd2

Please sign in to comment.