diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index b5138b1d0d..1a13a981a8 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -1,7 +1,7 @@ { "name": "nfcore", "image": "nfcore/gitpod:latest", - "postCreateCommand": "python -m pip install --upgrade -r ../requirements-dev.txt -e ../", + "postCreateCommand": "python -m pip install --upgrade -r ../requirements-dev.txt -e ../ && pre-commit install --install-hooks", "remoteUser": "gitpod", // Configure tool-specific properties. diff --git a/.github/workflows/pytest-frozen-ubuntu-20.04.yml b/.github/workflows/pytest-frozen-ubuntu-20.04.yml new file mode 100644 index 0000000000..6d49145ed7 --- /dev/null +++ b/.github/workflows/pytest-frozen-ubuntu-20.04.yml @@ -0,0 +1,52 @@ +name: Python tests Ubuntu-20.04 (frozen) +# This workflow is triggered on pushes and PRs to the repository. +# Only run if we changed a Python file +on: + push: + branches: + - dev + pull_request: + release: + types: [published] + +# Cancel if a newer run is started +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true + +jobs: + pytest: + runs-on: ubuntu-20.04 + steps: + - uses: actions/checkout@v3 + name: Check out source-code repository + + - name: Set up Python 3.8 + uses: actions/setup-python@v3 + with: + python-version: "3.8" + + - name: Install python dependencies + run: | + python -m pip install --upgrade pip -r requirements-dev.txt + pip install -e . + + - name: Downgrade git to the Ubuntu official repository's version + run: | + sudo apt remove git git-man + sudo add-apt-repository --remove ppa:git-core/ppa + sudo apt install git + + - name: Install Nextflow + uses: nf-core/setup-nextflow@v1 + with: + version: "latest-everything" + + - name: Test with pytest + run: python3 -m pytest tests/ --color=yes --cov-report=xml --cov-config=.github/.coveragerc --cov=nf_core + + - uses: codecov/codecov-action@v1 + name: Upload code coverage report + with: + if: success() + token: ${{ secrets.CODECOV_TOKEN }} diff --git a/.gitpod.yml b/.gitpod.yml index 64994f3c08..263fcc41db 100644 --- a/.gitpod.yml +++ b/.gitpod.yml @@ -1,7 +1,10 @@ image: nfcore/gitpod:latest tasks: - - name: install current state of nf-core/tools - command: python -m pip install --upgrade -r requirements-dev.txt -e . + - name: install current state of nf-core/tools and setup pre-commit + command: | + python -m pip install -e . + python -m pip install -r requirements-dev.txt + pre-commit install --install-hooks vscode: extensions: # based on nf-core.nf-core-extensionpack - codezombiech.gitignore # Language support for .gitignore files diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 74ec28d1e0..eddb0f1048 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -3,7 +3,7 @@ repos: rev: 22.3.0 hooks: - id: black - language_version: python3.10 + language_version: python3.9 - repo: https://github.com/pycqa/isort rev: 5.10.1 hooks: diff --git a/CHANGELOG.md b/CHANGELOG.md index c6778222fa..f40c1c27f9 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,7 @@ ### Template +- Ignore files in `bin/` directory when running prettier. - Fix lint warnings for `samplesheet_check.nf` module - Add codespaces template ([#1957](https://github.com/nf-core/tools/pull/1957)) - Check that the workflow name provided with a template doesn't contain dashes ([#1822](https://github.com/nf-core/tools/pull/1822)) @@ -20,6 +21,7 @@ ### General +- Use pre-commit run prettier if prettier is not available ([#1983](https://github.com/nf-core/tools/pull/1983)) and initialize pre-commit in gitpod and codespaces. - Refactor CLI flag `--hide-progress` to be at the top-level group, like `--verbose` ([#2016](https://github.com/nf-core/tools/pull/2016)) - Fix error in tagging GitPod docker images during releases - `nf-core sync` now supports the template YAML file using `-t/--template-yaml`. @@ -32,15 +34,18 @@ - Add file `versions.yml` when generating `test.yml` with `nf-core modules create-test-yml` but don't check for md5sum [#1963](https://github.com/nf-core/tools/pull/1963) - Mock biocontainers and anaconda api calls in modules and subworkflows tests [#1967](https://github.com/nf-core/tools/pull/1967) - Run tests with Python 3.11 ([#1970](https://github.com/nf-core/tools/pull/1970)) +- Run test with a realistic version of git ([#2043](https://github.com/nf-core/tools/pull/2043)) - Bump promoted Python version from 3.7 to 3.8 ([#1971](https://github.com/nf-core/tools/pull/1971)) - Fix incorrect file deletion in `nf-core launch` when `--params_in` has the same name as `--params_out` - Updated GitHub actions ([#1998](https://github.com/nf-core/tools/pull/1998), [#2001](https://github.com/nf-core/tools/pull/2001)) -- Code maintenance ([#1818](https://github.com/nf-core/tools/pull/1818), [#2032](https://github.com/nf-core/tools/pull/2032)) +- Code maintenance ([#1818](https://github.com/nf-core/tools/pull/1818), [#2032](https://github.com/nf-core/tools/pull/2032), [#2073](https://github.com/nf-core/tools/pull/2073)) - Track from where modules and subworkflows are installed ([#1999](https://github.com/nf-core/tools/pull/1999)) - Substitute ModulesCommand and SubworkflowsCommand by ComponentsCommand ([#2000](https://github.com/nf-core/tools/pull/2000)) - Don't print source file + line number on logging messages (except when verbose) ([#2015](https://github.com/nf-core/tools/pull/2015)) - Extended the chat notifications to Slack ([#1829](https://github.com/nf-core/tools/pull/1829)) - Allow other remote URLs not starting with `http` ([#2061](https://github.com/nf-core/tools/pull/2061)) +- Prevent installation with unsupported Python versions ([#2075](https://github.com/nf-core/tools/pull/2075)) +- Automatically format `test.yml` content with Prettier ([#2078](https://github.com/nf-core/tools/pull/2078)) ### Modules diff --git a/README.md b/README.md index 0f21412215..aa985cd471 100644 --- a/README.md +++ b/README.md @@ -214,6 +214,22 @@ pip install --upgrade nf-core Please refer to the respective documentation for further details to manage packages, as for example [conda](https://docs.conda.io/projects/conda/en/latest/user-guide/tasks/manage-pkgs.html#updating-packages) or [pip](https://packaging.python.org/en/latest/tutorials/installing-packages/#upgrading-packages). +### Activate shell completions for nf-core/tools + +Auto-completion for the `nf-core` command is available for bash, zsh and fish. To activate it, add the following lines to the respective shell config files. +shell | shell config file | command +--- | --- | --- +bash | ~/.bashrc | `eval "$(_NF_CORE_COMPLETE=bash_source nf-core)"` +Zsh | ~/.zshrc | `eval "$(_NF_CORE_COMPLETE=zsh_source nf-core)"` +fish | ~/.config/fish/completions/nf-core.fish | `eval (env _NF_CORE_COMPLETE=fish_source nf-core)` + +After a restart of the shell session you should have auto-completion for the `nf-core` command and all its sub-commands and options. + +> **NB:** The added line will run the command `nf-core` (which will also slow down startup time of your shell). You should therefore either have the nf-core/tools installed globally. +> You can also wrap it inside `if type nf-core > /dev/null; then ` \ `fi` for bash and zsh or `if command -v nf-core &> /dev/null eval (env _NF_CORE_COMPLETE=fish_source nf-core) end` for fish. You need to then source the config in your environment for the completions to be activated. + +> **NB:** If you see the error `command not found compdef` , be sure that your config file contains the line `autoload -Uz compinit && compinit` before the eval line. + ## Listing pipelines The command `nf-core list` shows all available nf-core pipelines along with their latest version, when that was published and how recently the pipeline code was pulled to your local system (if at all). diff --git a/nf_core/__main__.py b/nf_core/__main__.py index 7ba0c945bf..2966f758eb 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -79,27 +79,28 @@ def run_nf_core(): - # Print nf-core header - stderr.print(f"\n[green]{' ' * 42},--.[grey39]/[green],-.", highlight=False) - stderr.print("[blue] ___ __ __ __ ___ [green]/,-._.--~\\", highlight=False) - stderr.print(r"[blue] |\ | |__ __ / ` / \ |__) |__ [yellow] } {", highlight=False) - stderr.print(r"[blue] | \| | \__, \__/ | \ |___ [green]\`-._,-`-,", highlight=False) - stderr.print("[green] `._,._,'\n", highlight=False) - stderr.print( - f"[grey39] nf-core/tools version {nf_core.__version__} - [link=https://nf-co.re]https://nf-co.re[/]", - highlight=False, - ) - try: - is_outdated, _, remote_vers = nf_core.utils.check_if_outdated() - if is_outdated: - stderr.print( - f"[bold bright_yellow] There is a new version of nf-core/tools available! ({remote_vers})", - highlight=False, - ) - except Exception as e: - log.debug(f"Could not check latest version: {e}") - stderr.print("\n") - + # print nf-core header if environment variable is not set + if os.environ.get("_NF_CORE_COMPLETE") is None: + # Print nf-core header + stderr.print(f"\n[green]{' ' * 42},--.[grey39]/[green],-.", highlight=False) + stderr.print("[blue] ___ __ __ __ ___ [green]/,-._.--~\\", highlight=False) + stderr.print(r"[blue] |\ | |__ __ / ` / \ |__) |__ [yellow] } {", highlight=False) + stderr.print(r"[blue] | \| | \__, \__/ | \ |___ [green]\`-._,-`-,", highlight=False) + stderr.print("[green] `._,._,'\n", highlight=False) + stderr.print( + f"[grey39] nf-core/tools version {nf_core.__version__} - [link=https://nf-co.re]https://nf-co.re[/]", + highlight=False, + ) + try: + is_outdated, _, remote_vers = nf_core.utils.check_if_outdated() + if is_outdated: + stderr.print( + f"[bold bright_yellow] There is a new version of nf-core/tools available! ({remote_vers})", + highlight=False, + ) + except Exception as e: + log.debug(f"Could not check latest version: {e}") + stderr.print("\n") # Launch the click cli nf_core_cli(auto_envvar_prefix="NFCORE") diff --git a/nf_core/components/components_utils.py b/nf_core/components/components_utils.py index 7c2fd48d97..9a0565296e 100644 --- a/nf_core/components/components_utils.py +++ b/nf_core/components/components_utils.py @@ -69,7 +69,7 @@ def get_repo_info(directory, use_prompt=True): ).unsafe_ask() log.info("To avoid this prompt in the future, add the 'org_path' key to a root '%s' file.", config_fn.name) if rich.prompt.Confirm.ask("[bold][blue]?[/] Would you like me to add this config now?", default=True): - with open(os.path.join(dir, ".nf-core.yml"), "a+") as fh: + with open(config_fn, "a+") as fh: fh.write(f"org_path: {org}\n") log.info(f"Config added to '{config_fn.name}'") diff --git a/nf_core/components/info.py b/nf_core/components/info.py index b371290006..b89baaf99e 100644 --- a/nf_core/components/info.py +++ b/nf_core/components/info.py @@ -103,13 +103,13 @@ def init_mod_name(self, component): components = self.get_components_clone_modules() else: components = self.modules_json.get_all_components(self.component_type).get( - self.modules_repo.remote_url + self.modules_repo.remote_url, {} ) components = [ component if directory == self.modules_repo.repo_path else f"{directory}/{component}" for directory, component in components ] - if components is None: + if not components: raise UserWarning( f"No {self.component_type[:-1]} installed from '{self.modules_repo.remote_url}'" ) diff --git a/nf_core/components/remove.py b/nf_core/components/remove.py index 0916d56e85..528f032124 100644 --- a/nf_core/components/remove.py +++ b/nf_core/components/remove.py @@ -161,10 +161,8 @@ def remove(self, component, removed_by=None, removed_components=None, force=Fals else: log.info(f"Removed files for '{component}'.") else: - installed_by = modules_json.modules_json["repos"][self.modules_repo.remote_url][self.component_type][ - repo_path - ][component]["installed_by"] - if installed_by == self.component_type: + installed_by = modules_json.get_installed_by_entries(self.component_type, component) + if installed_by == [self.component_type]: log.error( f"Did not remove '{component}', because it was also manually installed. Only updated 'installed_by' entry in modules.json." ) diff --git a/nf_core/components/update.py b/nf_core/components/update.py index 7e1d67ce87..90a6e0e6b9 100644 --- a/nf_core/components/update.py +++ b/nf_core/components/update.py @@ -9,7 +9,12 @@ import nf_core.modules.modules_utils import nf_core.utils from nf_core.components.components_command import ComponentCommand -from nf_core.components.components_utils import prompt_component_version_sha +from nf_core.components.components_utils import ( + get_components_to_install, + prompt_component_version_sha, +) +from nf_core.components.install import ComponentInstall +from nf_core.components.remove import ComponentRemove from nf_core.modules.modules_differ import ModulesDiffer from nf_core.modules.modules_json import ModulesJson from nf_core.modules.modules_repo import ModulesRepo @@ -234,7 +239,7 @@ def update(self, component=None, silent=False, updated=None, check_diff_exist=Tr else: updated.append(component) recursive_update = True - modules_to_update, subworkflows_to_update = self.get_components_to_update(component, modules_repo) + modules_to_update, subworkflows_to_update = self.get_components_to_update(component) if not silent and len(modules_to_update + subworkflows_to_update) > 0: log.warning( f"All modules and subworkflows linked to the updated {self.component_type[:-1]} will be added to the same diff file.\n" @@ -254,6 +259,7 @@ def update(self, component=None, silent=False, updated=None, check_diff_exist=Tr self.update_linked_components( modules_to_update, subworkflows_to_update, updated, check_diff_exist=False ) + self.manage_changes_in_linked_components(component, modules_to_update, subworkflows_to_update) elif self.show_diff: ModulesDiffer.print_diff( @@ -278,10 +284,10 @@ def update(self, component=None, silent=False, updated=None, check_diff_exist=Tr # Clear the component directory and move the installed files there self.move_files_from_tmp_dir(component, install_tmp_dir, modules_repo.repo_path, version) # Update modules.json with newly installed component - self.modules_json.update(self.component_type, modules_repo, component, version, self.component_type) + self.modules_json.update(self.component_type, modules_repo, component, version, installed_by=None) updated.append(component) recursive_update = True - modules_to_update, subworkflows_to_update = self.get_components_to_update(component, modules_repo) + modules_to_update, subworkflows_to_update = self.get_components_to_update(component) if not silent and not self.update_all and len(modules_to_update + subworkflows_to_update) > 0: log.warning( f"All modules and subworkflows linked to the updated {self.component_type[:-1]} will be {'asked for update' if self.show_diff else 'automatically updated'}.\n" @@ -299,10 +305,16 @@ def update(self, component=None, silent=False, updated=None, check_diff_exist=Tr if recursive_update and len(modules_to_update + subworkflows_to_update) > 0: # Update linked components self.update_linked_components(modules_to_update, subworkflows_to_update, updated) + self.manage_changes_in_linked_components(component, modules_to_update, subworkflows_to_update) else: # Don't save to a file, just iteratively update the variable self.modules_json.update( - self.component_type, modules_repo, component, version, self.component_type, write_file=False + self.component_type, + modules_repo, + component, + version, + installed_by=None, + write_file=False, ) if self.save_diff_fn: @@ -819,7 +831,7 @@ def try_apply_patch( return True - def get_components_to_update(self, component, modules_repo): + def get_components_to_update(self, component): """ Get all modules and subworkflows linked to the updated component. @@ -829,9 +841,7 @@ def get_components_to_update(self, component, modules_repo): mods_json = self.modules_json.get_modules_json() modules_to_update = [] subworkflows_to_update = [] - installed_by = mods_json["repos"][modules_repo.remote_url][self.component_type][modules_repo.repo_path][ - component - ]["installed_by"] + installed_by = self.modules_json.get_installed_by_entries(self.component_type, component) if self.component_type == "modules": # All subworkflow names in the installed_by section of a module are subworkflows using this module @@ -871,6 +881,34 @@ def update_linked_components(self, modules_to_update, subworkflows_to_update, up self.update(m_update, silent=True, updated=updated, check_diff_exist=check_diff_exist) self._reset_component_type(original_component_type, original_update_all) + def manage_changes_in_linked_components(self, component, modules_to_update, subworkflows_to_update): + """Check for linked components added or removed in the new subworkflow version""" + if self.component_type == "subworkflows": + subworkflow_directory = Path(self.dir, self.component_type, self.modules_repo.repo_path, component) + included_modules, included_subworkflows = get_components_to_install(subworkflow_directory) + # If a new module/subworkflow is included in the subworklfow and wasn't included before + for module in included_modules: + if module not in modules_to_update: + log.info(f"Installing newly included module '{module}' for '{component}'") + install_module_object = ComponentInstall(self.dir, "modules", installed_by=component) + install_module_object.install(module, silent=True) + for subworkflow in included_subworkflows: + if subworkflow not in subworkflows_to_update: + log.info(f"Installing newly included subworkflow '{subworkflow}' for '{component}'") + install_subworkflow_object = ComponentInstall(self.dir, "subworkflows", installed_by=component) + install_subworkflow_object.install(subworkflow, silent=True) + # If a module/subworkflow has been removed from the subworkflow + for module in modules_to_update: + if module not in included_modules: + log.info(f"Removing module '{module}' which is not included in '{component}' anymore.") + remove_module_object = ComponentRemove("modules", self.dir) + remove_module_object.remove(module, removed_by=component) + for subworkflow in subworkflows_to_update: + if subworkflow not in included_subworkflows: + log.info(f"Removing subworkflow '{subworkflow}' which is not included in '{component}' anymore.") + remove_subworkflow_object = ComponentRemove("subworkflows", self.dir) + remove_subworkflow_object.remove(subworkflow, removed_by=component) + def _change_component_type(self, new_component_type): original_component_type = self.component_type self.component_type = new_component_type diff --git a/nf_core/create.py b/nf_core/create.py index 299ff9397f..3a5f1a502b 100644 --- a/nf_core/create.py +++ b/nf_core/create.py @@ -38,7 +38,7 @@ class PipelineCreate: force (bool): Overwrites a given workflow directory with the same name. Defaults to False. May the force be with you. outdir (str): Path to the local output directory. - template_yaml (str): Path to template.yml file for pipeline creation settings. + template_yaml_path (str): Path to template.yml file for pipeline creation settings. plain (bool): If true the Git repository will be initialized plain. default_branch (str): Specifies the --initial-branch name. """ @@ -528,10 +528,10 @@ def git_init_pipeline(self): log.info("Initialising pipeline git repository") repo = git.Repo.init(self.outdir) - if default_branch: - repo.active_branch.rename(default_branch) repo.git.add(A=True) repo.index.commit(f"initial template build from nf-core/tools, version {nf_core.__version__}") + if default_branch: + repo.active_branch.rename(default_branch) repo.git.branch("TEMPLATE") repo.git.branch("dev") log.info( diff --git a/nf_core/modules/lint/__init__.py b/nf_core/modules/lint/__init__.py index 60385ea0d3..0f4ece6c49 100644 --- a/nf_core/modules/lint/__init__.py +++ b/nf_core/modules/lint/__init__.py @@ -89,23 +89,29 @@ def __init__( if self.repo_type == "pipeline": modules_json = ModulesJson(self.dir) modules_json.check_up_to_date() - all_pipeline_modules = modules_json.get_all_components(self.component_type) - if all_pipeline_modules is not None and self.modules_repo.remote_url in all_pipeline_modules: - module_dir = Path(self.dir, "modules", self.modules_repo.repo_path) - self.all_remote_modules = [ - NFCoreModule(m[1], self.modules_repo.remote_url, module_dir / m[1], self.repo_type, Path(self.dir)) - for m in all_pipeline_modules[self.modules_repo.remote_url] - ] # m = (module_dir, module_name) - if not self.all_remote_modules: - raise LookupError(f"No modules from {self.modules_repo.remote_url} installed in pipeline.") - local_module_dir = Path(self.dir, "modules", "local") + self.all_remote_modules = [] + for repo_url, components in modules_json.get_all_components(self.component_type).items(): + for org, comp in components: + self.all_remote_modules.append( + NFCoreModule( + comp, + repo_url, + Path(self.dir, self.component_type, org, comp), + self.repo_type, + Path(self.dir), + ) + ) + if not self.all_remote_modules: + raise LookupError(f"No modules from {self.modules_repo.remote_url} installed in pipeline.") + local_module_dir = Path(self.dir, "modules", "local") + self.all_local_modules = [] + if local_module_dir.exists(): self.all_local_modules = [ - NFCoreModule(m, None, local_module_dir / m, self.repo_type, Path(self.dir), nf_core_module=False) + NFCoreModule( + m, None, Path(local_module_dir, m), self.repo_type, Path(self.dir), remote_module=False + ) for m in self.get_local_components() ] - - else: - raise LookupError(f"No modules from {self.modules_repo.remote_url} installed in pipeline.") else: module_dir = Path(self.dir, self.default_modules_path) self.all_remote_modules = [ diff --git a/nf_core/modules/lint/main_nf.py b/nf_core/modules/lint/main_nf.py index 7eaa46a874..c4f4bf9bc8 100644 --- a/nf_core/modules/lint/main_nf.py +++ b/nf_core/modules/lint/main_nf.py @@ -6,6 +6,7 @@ import re import sqlite3 from pathlib import Path +from urllib.parse import urlparse, urlunparse import requests @@ -254,30 +255,59 @@ def check_process_section(self, lines, fix_version, progress_bar): self.passed.append(("process_standard_label", "Correct process label", self.main_nf)) else: self.warned.append(("process_standard_label", "Process label unspecified", self.main_nf)) - for l in lines: + for i, l in enumerate(lines): + url = None if _container_type(l) == "bioconda": bioconda_packages = [b for b in l.split() if "bioconda::" in b] l = l.strip(" '\"") if _container_type(l) == "singularity": # e.g. "https://containers.biocontainers.pro/s3/SingImgsRepo/biocontainers/v1.2.0_cv1/biocontainers_v1.2.0_cv1.img' :" -> v1.2.0_cv1 # e.g. "https://depot.galaxyproject.org/singularity/fastqc:0.11.9--0' :" -> 0.11.9--0 - match = re.search(r"(?:/)?(?:biocontainers_)?(?::)?([A-Za-z\d\-_.]+?)(?:\.img)?['\"]", l) + match = re.search(r"(?:/)?(?:biocontainers_)?(?::)?([A-Za-z\d\-_.]+?)(?:\.img)?'", l) if match is not None: singularity_tag = match.group(1) self.passed.append(("singularity_tag", f"Found singularity tag: {singularity_tag}", self.main_nf)) else: self.failed.append(("singularity_tag", "Unable to parse singularity tag", self.main_nf)) singularity_tag = None + url = urlparse(l.split("'")[0]) if _container_type(l) == "docker": # e.g. "quay.io/biocontainers/krona:2.7.1--pl526_5' }" -> 2.7.1--pl526_5 # e.g. "biocontainers/biocontainers:v1.2.0_cv1' }" -> v1.2.0_cv1 - match = re.search(r"(?:[/])?(?::)?([A-Za-z\d\-_.]+)['\"]", l) + match = re.search(r"(?:[/])?(?::)?([A-Za-z\d\-_.]+)'", l) if match is not None: docker_tag = match.group(1) self.passed.append(("docker_tag", f"Found docker tag: {docker_tag}", self.main_nf)) else: self.failed.append(("docker_tag", "Unable to parse docker tag", self.main_nf)) docker_tag = None + url = urlparse(l.split("'")[0]) + # lint double quotes + if l.startswith("container"): + container_section = l + lines[i + 1] + lines[i + 2] + if container_section.count('"') > 2: + self.failed.append( + ("container_links", "Too many double quotes found when specifying containers", self.main_nf) + ) + # Try to connect to container URLs + if url is None: + continue + try: + response = requests.head( + "https://" + urlunparse(url) if not url.scheme == "https" else urlunparse(url), + stream=True, + allow_redirects=True, + ) + log.debug( + f"Connected to URL: {'https://' + urlunparse(url) if not url.scheme == 'https' else urlunparse(url)}, " + f"status_code: {response.status_code}" + ) + except (requests.exceptions.RequestException, sqlite3.InterfaceError) as e: + log.debug(f"Unable to connect to url '{urlunparse(url)}' due to error: {e}") + self.failed.append(("container_links", "Unable to connect to container URL", self.main_nf)) + continue + if response.status_code != 200: + self.failed.append(("container_links", "Unable to connect to container URL", self.main_nf)) # Check that all bioconda packages have build numbers # Also check for newer versions diff --git a/nf_core/modules/lint/module_changes.py b/nf_core/modules/lint/module_changes.py index c2f1c2e1dd..61b416e5f7 100644 --- a/nf_core/modules/lint/module_changes.py +++ b/nf_core/modules/lint/module_changes.py @@ -5,6 +5,7 @@ import tempfile from pathlib import Path +import nf_core.modules.modules_repo from nf_core.modules.modules_differ import ModulesDiffer @@ -39,10 +40,12 @@ def module_changes(module_lint_object, module): return else: tempdir = module.module_dir + module.branch = module_lint_object.modules_json.get_component_branch( + "modules", module.module_name, module.repo_url, module.org + ) + modules_repo = nf_core.modules.modules_repo.ModulesRepo(remote_url=module.repo_url, branch=module.branch) - for f, same in module_lint_object.modules_repo.module_files_identical( - module.module_name, tempdir, module.git_sha - ).items(): + for f, same in modules_repo.module_files_identical(module.module_name, tempdir, module.git_sha).items(): if same: module.passed.append( ( diff --git a/nf_core/modules/lint/module_version.py b/nf_core/modules/lint/module_version.py index 3f68395031..1cf142e8eb 100644 --- a/nf_core/modules/lint/module_version.py +++ b/nf_core/modules/lint/module_version.py @@ -22,11 +22,8 @@ def module_version(module_lint_object, module): """ modules_json_path = Path(module_lint_object.dir, "modules.json") - # Verify that a git_sha exists in the `modules.json` file for this module - version = module_lint_object.modules_json.get_module_version( - module.module_name, module_lint_object.modules_repo.remote_url, module_lint_object.modules_repo.repo_path - ) + version = module_lint_object.modules_json.get_module_version(module.module_name, module.repo_url, module.org) if version is None: module.failed.append(("git_sha", "No git_sha entry in `modules.json`", modules_json_path)) return @@ -36,7 +33,11 @@ def module_version(module_lint_object, module): # Check whether a new version is available try: - modules_repo = nf_core.modules.modules_repo.ModulesRepo() + module.branch = module_lint_object.modules_json.get_component_branch( + "modules", module.module_name, module.repo_url, module.org + ) + modules_repo = nf_core.modules.modules_repo.ModulesRepo(remote_url=module.repo_url, branch=module.branch) + module_git_log = modules_repo.get_component_git_log(module.module_name, "modules") if version == next(module_git_log)["git_sha"]: module.passed.append(("module_version", "Module is the latest version", module.module_dir)) diff --git a/nf_core/modules/modules_json.py b/nf_core/modules/modules_json.py index 855a1adf66..ea35276633 100644 --- a/nf_core/modules/modules_json.py +++ b/nf_core/modules/modules_json.py @@ -546,6 +546,16 @@ def check_up_to_date(self): self.load() if not self.has_git_url_and_modules(): raise UserWarning + # check that all "installed_by" entries are lists and not strings + # [these strings come from an older dev version, so this check can probably be removed in a future release] + for _, repo_entry in self.modules_json.get("repos", {}).items(): + for component_type in ["modules", "subworkflows"]: + if component_type in repo_entry: + for install_dir, install_dir_entry in repo_entry[component_type].items(): + for _, component in install_dir_entry.items(): + if "installed_by" in component and isinstance(component["installed_by"], str): + log.debug(f"Updating {component} in modules.json") + component["installed_by"] = [component["installed_by"]] except UserWarning: log.info("The 'modules.json' file is not up to date. Recreating the 'modules.json' file.") self.create() @@ -656,7 +666,7 @@ def update( repo_component_entry[component_name]["git_sha"] = component_version repo_component_entry[component_name]["branch"] = branch try: - if installed_by not in repo_component_entry[component_name]["installed_by"]: + if installed_by not in repo_component_entry[component_name]["installed_by"] and installed_by is not None: repo_component_entry[component_name]["installed_by"].append(installed_by) except KeyError: repo_component_entry[component_name]["installed_by"] = [installed_by] @@ -917,8 +927,6 @@ def get_all_components(self, component_type): if component_type in repo_entry: for dir, components in repo_entry[component_type].items(): self.pipeline_components[repo] = [(dir, m) for m in components] - if self.pipeline_components == {}: - self.pipeline_components = None return self.pipeline_components @@ -961,6 +969,30 @@ def get_dependent_components( return dependent_components + def get_installed_by_entries(self, component_type, name): + """ + Retrieves all entries of installed_by for a given component + + Args: + component_type (str): Type of component [modules, subworkflows] + name (str): Name of the component to find dependencies for + + Returns: + (list): The list of installed_by entries + + """ + if self.modules_json is None: + self.load() + installed_by_entries = {} + for repo_url, repo_entry in self.modules_json.get("repos", {}).items(): + if component_type in repo_entry: + for install_dir, components in repo_entry[component_type].items(): + if name in components: + installed_by_entries = components[name]["installed_by"] + break + + return installed_by_entries + def get_component_branch(self, component_type, component, repo_url, install_dir): """ Gets the branch from which the module/subworkflow was installed @@ -968,7 +1000,7 @@ def get_component_branch(self, component_type, component, repo_url, install_dir) Returns: (str): The branch name Raises: - LookupError: If their is no branch entry in the `modules.json` + LookupError: If there is no branch entry in the `modules.json` """ if self.modules_json is None: self.load() diff --git a/nf_core/modules/modules_repo.py b/nf_core/modules/modules_repo.py index 77a266b4e9..606514e55e 100644 --- a/nf_core/modules/modules_repo.py +++ b/nf_core/modules/modules_repo.py @@ -376,8 +376,8 @@ def module_files_identical(self, module_name, base_path, commit): else: self.checkout(commit) module_files = ["main.nf", "meta.yml"] - module_dir = self.get_component_dir(module_name, "modules") files_identical = {file: True for file in module_files} + module_dir = self.get_component_dir(module_name, "modules") for file in module_files: try: files_identical[file] = filecmp.cmp(os.path.join(module_dir, file), os.path.join(base_path, file)) diff --git a/nf_core/modules/nfcore_module.py b/nf_core/modules/nfcore_module.py index e8bff1f686..431ef23381 100644 --- a/nf_core/modules/nfcore_module.py +++ b/nf_core/modules/nfcore_module.py @@ -10,7 +10,7 @@ class NFCoreModule: Includes functionality for linting """ - def __init__(self, module_name, repo_url, module_dir, repo_type, base_dir, nf_core_module=True): + def __init__(self, module_name, repo_url, module_dir, repo_type, base_dir, remote_module=True): """ Initialize the object @@ -20,7 +20,7 @@ def __init__(self, module_name, repo_url, module_dir, repo_type, base_dir, nf_co whether the directory is a pipeline or clone of nf-core/modules. base_dir (Path): The absolute path to the pipeline base dir - nf_core_module (bool): Whether the module is to be treated as a + remote_module (bool): Whether the module is to be treated as a nf-core or local module """ self.module_name = module_name @@ -36,14 +36,14 @@ def __init__(self, module_name, repo_url, module_dir, repo_type, base_dir, nf_co self.has_meta = False self.git_sha = None self.is_patched = False - self.is_patched = None - if nf_core_module: + if remote_module: # Initialize the important files self.main_nf = self.module_dir / "main.nf" self.meta_yml = self.module_dir / "meta.yml" repo_dir = self.module_dir.parts[: self.module_dir.parts.index(self.module_name.split("/")[0])][-1] + self.org = repo_dir self.test_dir = Path(self.base_dir, "tests", "modules", repo_dir, self.module_name) self.test_yml = self.test_dir / "test.yml" self.test_main_nf = self.test_dir / "main.nf" diff --git a/nf_core/modules/test_yml_builder.py b/nf_core/modules/test_yml_builder.py index 2d20785b7c..cef23fb6b3 100644 --- a/nf_core/modules/test_yml_builder.py +++ b/nf_core/modules/test_yml_builder.py @@ -25,6 +25,7 @@ import nf_core.utils from nf_core.components.components_command import ComponentCommand +from ..lint_utils import run_prettier_on_file from .modules_repo import ModulesRepo log = logging.getLogger(__name__) @@ -358,16 +359,19 @@ def print_test_yml(self): """ Generate the test yml file. """ + with tempfile.NamedTemporaryFile(mode="w+") as fh: + yaml.dump(self.tests, fh, Dumper=nf_core.utils.custom_yaml_dumper(), width=10000000) + run_prettier_on_file(fh.name) + fh.seek(0) + prettified_yml = fh.read() if self.test_yml_output_path == "-": console = rich.console.Console() - yaml_str = yaml.dump(self.tests, Dumper=nf_core.utils.custom_yaml_dumper(), width=10000000) - console.print("\n", Syntax(yaml_str, "yaml"), "\n") - return - - try: - log.info(f"Writing to '{self.test_yml_output_path}'") - with open(self.test_yml_output_path, "w") as fh: - yaml.dump(self.tests, fh, Dumper=nf_core.utils.custom_yaml_dumper(), width=10000000) - except FileNotFoundError as e: - raise UserWarning(f"Could not create test.yml file: '{e}'") + console.print("\n", Syntax(prettified_yml, "yaml"), "\n") + else: + try: + log.info(f"Writing to '{self.test_yml_output_path}'") + with open(self.test_yml_output_path, "w") as fh: + fh.write(prettified_yml) + except FileNotFoundError as e: + raise UserWarning(f"Could not create test.yml file: '{e}'") diff --git a/nf_core/pipeline-template/.prettierignore b/nf_core/pipeline-template/.prettierignore index 0a949c30b2..437d763d0c 100644 --- a/nf_core/pipeline-template/.prettierignore +++ b/nf_core/pipeline-template/.prettierignore @@ -9,3 +9,4 @@ results/ testing/ testing* *.pyc +bin/ diff --git a/nf_core/subworkflows/test_yml_builder.py b/nf_core/subworkflows/test_yml_builder.py index 4d3162bd7c..b2f6cd6ceb 100644 --- a/nf_core/subworkflows/test_yml_builder.py +++ b/nf_core/subworkflows/test_yml_builder.py @@ -28,6 +28,8 @@ from nf_core.modules.modules_json import ModulesJson from nf_core.modules.modules_repo import ModulesRepo +from ..lint_utils import run_prettier_on_file + log = logging.getLogger(__name__) @@ -374,16 +376,19 @@ def print_test_yml(self): """ Generate the test yml file. """ + with tempfile.NamedTemporaryFile(mode="w+") as fh: + yaml.dump(self.tests, fh, Dumper=nf_core.utils.custom_yaml_dumper(), width=10000000) + run_prettier_on_file(fh.name) + fh.seek(0) + prettified_yml = fh.read() if self.test_yml_output_path == "-": console = rich.console.Console() - yaml_str = yaml.dump(self.tests, Dumper=nf_core.utils.custom_yaml_dumper(), width=10000000) - console.print("\n", Syntax(yaml_str, "yaml"), "\n") - return - - try: - log.info(f"Writing to '{self.test_yml_output_path}'") - with open(self.test_yml_output_path, "w") as fh: - yaml.dump(self.tests, fh, Dumper=nf_core.utils.custom_yaml_dumper(), width=10000000) - except FileNotFoundError as e: - raise UserWarning(f"Could not create test.yml file: '{e}'") + console.print("\n", Syntax(prettified_yml, "yaml"), "\n") + else: + try: + log.info(f"Writing to '{self.test_yml_output_path}'") + with open(self.test_yml_output_path, "w") as fh: + fh.write(prettified_yml) + except FileNotFoundError as e: + raise UserWarning(f"Could not create test.yml file: '{e}'") diff --git a/setup.py b/setup.py index 81956e8eeb..ac78577950 100644 --- a/setup.py +++ b/setup.py @@ -35,6 +35,7 @@ "console_scripts": ["nf-core=nf_core.__main__:run_nf_core"], "refgenie.hooks.post_update": ["nf-core-refgenie=nf_core.refgenie:update_config"], }, + python_requires=">=3.7, <4", install_requires=required, packages=find_packages(exclude=("docs")), include_package_data=True, diff --git a/tests/modules/install.py b/tests/modules/install.py index 1c3f1aefc7..d01459f142 100644 --- a/tests/modules/install.py +++ b/tests/modules/install.py @@ -71,3 +71,15 @@ def test_modules_install_different_branch_succeed(self): modules_json.get_component_branch(self.component_type, "fastp", GITLAB_URL, GITLAB_REPO) == GITLAB_BRANCH_TEST_BRANCH ) + + +def test_modules_install_tracking(self): + """Test installing a module and finding 'modules' in the installed_by section of modules.json""" + self.mods_install.install("trimgalore") + + # Verify that the installed_by entry was added correctly + modules_json = ModulesJson(self.pipeline_dir) + mod_json = modules_json.get_modules_json() + assert mod_json["repos"]["https://github.com/nf-core/modules.git"]["modules"]["nf-core"]["trimgalore"][ + "installed_by" + ] == ["modules"] diff --git a/tests/modules/lint.py b/tests/modules/lint.py index 183276672b..476481a109 100644 --- a/tests/modules/lint.py +++ b/tests/modules/lint.py @@ -53,6 +53,9 @@ def test_modules_lint_new_modules(self): def test_modules_lint_no_gitlab(self): """Test linting a pipeline with no modules installed""" + self.mods_remove.remove("fastqc", force=True) + self.mods_remove.remove("multiqc", force=True) + self.mods_remove.remove("custom/dumpsoftwareversions", force=True) with pytest.raises(LookupError): nf_core.modules.ModuleLint(dir=self.pipeline_dir, remote_url=GITLAB_URL) @@ -68,6 +71,17 @@ def test_modules_lint_gitlab_modules(self): assert len(module_lint.warned) >= 0 +def test_modules_lint_multiple_remotes(self): + """Lint modules from a different remote""" + self.mods_install.install("fastqc") + self.mods_install_gitlab.install("multiqc") + module_lint = nf_core.modules.ModuleLint(dir=self.pipeline_dir, remote_url=GITLAB_URL) + module_lint.lint(print_results=False, all_modules=True) + assert len(module_lint.failed) == 0 + assert len(module_lint.passed) > 0 + assert len(module_lint.warned) >= 0 + + def test_modules_lint_patched_modules(self): """ Test creating a patch file and applying it to a new version of the the files @@ -81,8 +95,13 @@ def test_modules_lint_patched_modules(self): # change temporarily working directory to the pipeline directory # to avoid error from try_apply_patch() during linting with set_wd(self.pipeline_dir): - module_lint = nf_core.modules.ModuleLint(dir=self.pipeline_dir, remote_url=GITLAB_URL, branch=PATCH_BRANCH) - module_lint.lint(print_results=False, all_modules=True) + module_lint = nf_core.modules.ModuleLint( + dir=self.pipeline_dir, remote_url=GITLAB_URL, branch=PATCH_BRANCH, hide_progress=True + ) + module_lint.lint( + print_results=False, + all_modules=True, + ) assert len(module_lint.failed) == 0 assert len(module_lint.passed) > 0 diff --git a/tests/subworkflows/install.py b/tests/subworkflows/install.py index 94b2d79ba0..6c04c9ad22 100644 --- a/tests/subworkflows/install.py +++ b/tests/subworkflows/install.py @@ -76,3 +76,67 @@ def test_subworkflows_install_different_branch_fail(self): install_obj = SubworkflowInstall(self.pipeline_dir, remote_url=GITLAB_URL, branch=GITLAB_BRANCH_TEST_BRANCH) # The bam_stats_samtools subworkflow does not exists in the branch-test branch assert install_obj.install("bam_stats_samtools") is False + + +def test_subworkflows_install_tracking(self): + """Test installing a subworkflow and finding the correct entries in installed_by section of modules.json""" + self.subworkflow_install.install("bam_sort_stats_samtools") + + # Verify that the installed_by entry was added correctly + modules_json = ModulesJson(self.pipeline_dir) + mod_json = modules_json.get_modules_json() + assert mod_json["repos"]["https://github.com/nf-core/modules.git"]["subworkflows"]["nf-core"][ + "bam_sort_stats_samtools" + ]["installed_by"] == ["subworkflows"] + assert mod_json["repos"]["https://github.com/nf-core/modules.git"]["subworkflows"]["nf-core"]["bam_stats_samtools"][ + "installed_by" + ] == ["bam_sort_stats_samtools"] + assert mod_json["repos"]["https://github.com/nf-core/modules.git"]["modules"]["nf-core"]["samtools/stats"][ + "installed_by" + ] == ["bam_stats_samtools"] + assert mod_json["repos"]["https://github.com/nf-core/modules.git"]["modules"]["nf-core"]["samtools/sort"][ + "installed_by" + ] == ["bam_sort_stats_samtools"] + + # Clean directory + self.subworkflow_remove.remove("bam_sort_stats_samtools") + + +def test_subworkflows_install_tracking_added_already_installed(self): + """Test installing a subworkflow and finding the correct entries in installed_by section of modules.json""" + self.subworkflow_install.install("bam_sort_stats_samtools") + self.subworkflow_install.install("bam_stats_samtools") + + # Verify that the installed_by entry was added correctly + modules_json = ModulesJson(self.pipeline_dir) + mod_json = modules_json.get_modules_json() + assert mod_json["repos"]["https://github.com/nf-core/modules.git"]["subworkflows"]["nf-core"][ + "bam_sort_stats_samtools" + ]["installed_by"] == ["subworkflows"] + assert sorted( + mod_json["repos"]["https://github.com/nf-core/modules.git"]["subworkflows"]["nf-core"]["bam_stats_samtools"][ + "installed_by" + ] + ) == sorted(["bam_sort_stats_samtools", "subworkflows"]) + + # Clean directory + self.subworkflow_remove.remove("bam_sort_stats_samtools") + self.subworkflow_remove.remove("bam_stats_samtools") + + +def test_subworkflows_install_tracking_added_super_subworkflow(self): + """Test installing a subworkflow and finding the correct entries in installed_by section of modules.json""" + self.subworkflow_install.install("bam_stats_samtools") + self.subworkflow_install.install("bam_sort_stats_samtools") + + # Verify that the installed_by entry was added correctly + modules_json = ModulesJson(self.pipeline_dir) + mod_json = modules_json.get_modules_json() + assert mod_json["repos"]["https://github.com/nf-core/modules.git"]["subworkflows"]["nf-core"][ + "bam_sort_stats_samtools" + ]["installed_by"] == ["subworkflows"] + assert sorted( + mod_json["repos"]["https://github.com/nf-core/modules.git"]["subworkflows"]["nf-core"]["bam_stats_samtools"][ + "installed_by" + ] + ) == sorted(["subworkflows", "bam_sort_stats_samtools"]) diff --git a/tests/subworkflows/update.py b/tests/subworkflows/update.py index defc9c687e..29e6cb1179 100644 --- a/tests/subworkflows/update.py +++ b/tests/subworkflows/update.py @@ -296,6 +296,33 @@ def test_update_all_subworkflows_from_module(self): ) +def test_update_change_of_included_modules(self): + """Update a subworkflow which has a module change in the new version.""" + # Install an old version of vcf_annotate_ensemblvep with tabix/bgziptabix and without tabix/tabix + self.subworkflow_install_module_change.install("vcf_annotate_ensemblvep") + old_mod_json = ModulesJson(self.pipeline_dir).get_modules_json() + + # Check that tabix/bgziptabix is there + assert "tabix/bgziptabix" in old_mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME] + assert Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, "tabix/bgziptabix").is_dir() + # Check that tabix/tabix is not there + assert "tabix/tabix" not in old_mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME] + assert not Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, "tabix/tabix").is_dir() + + # Update vcf_annotate_ensemblvep without tabix/bgziptabix and with tabix/tabix + update_obj = SubworkflowUpdate(self.pipeline_dir, update_deps=True, show_diff=False) + assert update_obj.update("vcf_annotate_ensemblvep") is True + + mod_json = ModulesJson(self.pipeline_dir).get_modules_json() + + # Check that tabix/bgziptabix is not there + assert "tabix/bgziptabix" not in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME] + assert not Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, "tabix/bgziptabix").is_dir() + # Check that tabix/tabix is there + assert "tabix/tabix" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME] + assert Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, "tabix/tabix").is_dir() + + def cmp_component(dir1, dir2): """Compare two versions of the same component""" files = ["main.nf", "meta.yml"] diff --git a/tests/test_modules.py b/tests/test_modules.py index 56a32af77b..c50c1f2ba8 100644 --- a/tests/test_modules.py +++ b/tests/test_modules.py @@ -36,7 +36,7 @@ def create_modules_repo_dummy(tmp_dir): # mock biocontainers and anaconda response with requests_mock.Mocker() as mock: - mock_api_calls(mock, "bpipe", "0.9.11") + mock_api_calls(mock, "bpipe", "0.9.11--hdfd78af_0") # bpipe is a valid package on bioconda that is very unlikely to ever be added to nf-core/modules module_create = nf_core.modules.ModuleCreate(root_dir, "bpipe/test", "@author", "process_single", False, False) module_create.create() @@ -152,12 +152,14 @@ def test_modulesrepo_class(self): test_modules_install_from_gitlab, test_modules_install_nomodule, test_modules_install_nopipeline, + test_modules_install_tracking, test_modules_install_trimgalore, test_modules_install_trimgalore_twice, ) from .modules.lint import ( test_modules_lint_empty, test_modules_lint_gitlab_modules, + test_modules_lint_multiple_remotes, test_modules_lint_new_modules, test_modules_lint_no_gitlab, test_modules_lint_patched_modules, diff --git a/tests/test_subworkflows.py b/tests/test_subworkflows.py index 7ff0824dc2..552a2ab176 100644 --- a/tests/test_subworkflows.py +++ b/tests/test_subworkflows.py @@ -67,6 +67,12 @@ def setUp(self): force=False, sha=OLD_SUBWORKFLOWS_SHA, ) + self.subworkflow_install_module_change = nf_core.subworkflows.SubworkflowInstall( + self.pipeline_dir, + prompt=False, + force=False, + sha="8c343b3c8a0925949783dc547666007c245c235b", + ) self.mods_install = nf_core.modules.ModuleInstall(self.pipeline_dir, prompt=False, force=True) # Set up remove objects @@ -101,6 +107,9 @@ def tearDown(self): test_subworkflows_install_emptypipeline, test_subworkflows_install_from_gitlab, test_subworkflows_install_nosubworkflow, + test_subworkflows_install_tracking, + test_subworkflows_install_tracking_added_already_installed, + test_subworkflows_install_tracking_added_super_subworkflow, ) from .subworkflows.list import ( test_subworkflows_install_and_list_subworkflows, @@ -126,6 +135,7 @@ def tearDown(self): test_update_all, test_update_all_linked_components_from_subworkflow, test_update_all_subworkflows_from_module, + test_update_change_of_included_modules, test_update_with_config_dont_update, test_update_with_config_fix_all, test_update_with_config_fixed_version, diff --git a/tests/test_sync.py b/tests/test_sync.py index f0f6c7edca..597e4375d3 100644 --- a/tests/test_sync.py +++ b/tests/test_sync.py @@ -32,7 +32,9 @@ def setUp(self): self.create_obj.init_pipeline() self.remote_path = os.path.join(self.tmp_dir, "remote_repo") self.remote_repo = git.Repo.init(self.remote_path, bare=True) - self.remote_repo.active_branch.rename(default_branch) + + if self.remote_repo.active_branch.name != "master": + self.remote_repo.active_branch.rename(default_branch) def tearDown(self): if os.path.exists(self.tmp_dir): diff --git a/tests/utils.py b/tests/utils.py index 6296595cd3..77e94be464 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -70,18 +70,33 @@ def set_wd(path: Path): def mock_api_calls(mock, module, version): """Mock biocontainers and anaconda api calls for module""" - biocontainers_api_url = f"https://api.biocontainers.pro/ga4gh/trs/v2/tools/{module}/versions/{module}-{version}" + biocontainers_api_url = ( + f"https://api.biocontainers.pro/ga4gh/trs/v2/tools/{module}/versions/{module}-{version.split('--')[0]}" + ) anaconda_api_url = f"https://api.anaconda.org/package/bioconda/{module}" - mock.register_uri("GET", biocontainers_api_url, text="to modify when the api works and I can know what to add") anaconda_mock = { "status_code": 200, - "latest_version": version, + "latest_version": version.split("--")[0], "summary": "", "doc_url": "", "dev_url": "", - "files": [{"version": version}], + "files": [{"version": version.split("--")[0]}], "license": "", } - biocontainers_mock = {"status_code": 200, "images": [{"image_type": "Docker", "image_name": f"{module}-{version}"}]} + biocontainers_mock = { + "status_code": 200, + "images": [ + { + "image_type": "Singularity", + "image_name": f"https://depot.galaxyproject.org/singularity/{module}:{version}", + "updated": "2021-09-04T00:00:00Z", + }, + { + "image_type": "Docker", + "image_name": f"quay.io/biocontainers/{module}:{version}", + "updated": "2021-09-04T00:00:00Z", + }, + ], + } mock.register_uri("GET", anaconda_api_url, json=anaconda_mock) mock.register_uri("GET", biocontainers_api_url, json=biocontainers_mock)