diff --git a/CHANGELOG.md b/CHANGELOG.md index 603318ecd9..c84d203a77 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -12,6 +12,7 @@ - Remove call to `getGenomeAttribute` in `main.nf` when running `nf-core create` without iGenomes ([#1670](https://github.com/nf-core/tools/issues/1670)) - Make `nf-core create` fail if Git default branch name is dev or TEMPLATE ([#1705](https://github.com/nf-core/tools/pull/1705)) - Convert `console` snippets to `bash` snippets in the template where applicable ([#1729](https://github.com/nf-core/tools/pull/1729)) +- Add `branch` field to module entries in `modules.json` to record what branch a module was installed from ([#1728](https://github.com/nf-core/tools/issues/1728)) ### Linting @@ -68,6 +69,7 @@ - Make `nf-core modules` commands work with arbitrary git remotes ([#1721](https://github.com/nf-core/tools/issues/1721)) - Add links in `README.md` for `info` and `patch` commands ([#1722](https://github.com/nf-core/tools/issues/1722)]) - Fix misc. issues with `--branch` and `--base-path` ([#1726](https://github.com/nf-core/tools/issues/1726)) +- Add `branch` field to module entries in `modules.json` to record what branch a module was installed from ([#1728](https://github.com/nf-core/tools/issues/1728)) ## [v2.4.1 - Cobolt Koala Patch](https://github.com/nf-core/tools/releases/tag/2.4) - [2022-05-16] diff --git a/nf_core/__main__.py b/nf_core/__main__.py index e1db10ecc5..49fa5941ff 100755 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -437,7 +437,7 @@ def local(ctx, keywords, json, dir): ) print(module_list.list_modules(keywords, json)) except (UserWarning, LookupError) as e: - log.critical(e) + log.error(e) sys.exit(1) diff --git a/nf_core/lint/modules_json.py b/nf_core/lint/modules_json.py index 4cb5e2cfa7..5c6160778f 100644 --- a/nf_core/lint/modules_json.py +++ b/nf_core/lint/modules_json.py @@ -36,13 +36,16 @@ def modules_json(self): ) continue - for module in modules_json_dict["repos"][repo]["modules"]: + for module, module_entry in modules_json_dict["repos"][repo]["modules"].items(): if not Path(modules_dir, repo, module).exists(): failed.append( f"Entry for `{Path(repo, module)}` found in `modules.json` but module is not installed in pipeline." ) all_modules_passed = False - + if module_entry.get("branch") is None: + failed.append(f"Entry for `{Path(repo, module)}` is missing branch information.") + if module_entry.get("git_sha") is None: + failed.append(f"Entry for `{Path(repo, module)}` is missing version information.") if all_modules_passed: passed.append("Only installed modules found in `modules.json`") else: diff --git a/nf_core/modules/list.py b/nf_core/modules/list.py index 7ae3e08962..cc99c406d5 100644 --- a/nf_core/modules/list.py +++ b/nf_core/modules/list.py @@ -102,7 +102,9 @@ def pattern_msg(keywords): try: # pass repo_name to get info on modules even outside nf-core/modules message, date = ModulesRepo( - remote_url=repo_entry["git_url"], base_path=repo_entry["base_path"] + remote_url=repo_entry["git_url"], + base_path=repo_entry["base_path"], + branch=module_entry["branch"], ).get_commit_info(version_sha) except LookupError as e: log.warning(e) diff --git a/nf_core/modules/modules_json.py b/nf_core/modules/modules_json.py index 99f1d38194..f50365441b 100644 --- a/nf_core/modules/modules_json.py +++ b/nf_core/modules/modules_json.py @@ -8,7 +8,6 @@ import git import questionary -import rich.progress import nf_core.modules.module_utils import nf_core.modules.modules_repo @@ -30,7 +29,7 @@ def __init__(self, pipeline_dir): pipeline_dir (str): The pipeline directory """ self.dir = pipeline_dir - self.modules_dir = os.path.join(self.dir, "modules") + self.modules_dir = Path(self.dir, "modules") self.modules_json = None self.pipeline_modules = None @@ -45,20 +44,20 @@ def create(self): pipeline_name = pipeline_config.get("manifest.name", "") pipeline_url = pipeline_config.get("manifest.homePage", "") modules_json = {"name": pipeline_name.strip("'"), "homePage": pipeline_url.strip("'"), "repos": dict()} - modules_dir = f"{self.dir}/modules" + modules_dir = Path(self.dir, "modules") - if not os.path.exists(modules_dir): + if not modules_dir.exists(): raise UserWarning("Can't find a ./modules directory. Is this a DSL2 pipeline?") - repos = self.get_pipeline_module_repositories(Path(modules_dir)) + repos, _ = self.get_pipeline_module_repositories(modules_dir) # Get all module names in the repos repo_module_names = [ ( repo_name, [ - os.path.relpath(dir_name, os.path.join(modules_dir, repo_name)) - for dir_name, _, file_names in os.walk(os.path.join(modules_dir, repo_name)) + str(Path(dir_name).relative_to(modules_dir / repo_name)) + for dir_name, _, file_names in os.walk(modules_dir / repo_name) if "main.nf" in file_names ], repo_remote, @@ -66,50 +65,33 @@ def create(self): ) for repo_name, (repo_remote, base_path) in repos.items() ] - progress_bar = rich.progress.Progress( - "[bold blue]{task.description}", - rich.progress.BarColumn(bar_width=None), - "[magenta]{task.completed} of {task.total}[reset] ยป [bold yellow]{task.fields[test_name]}", - transient=True, - ) - with progress_bar: - n_total_modules = sum(len(modules) for _, modules, _, _ in repo_module_names) - file_progress = progress_bar.add_task( - "Creating 'modules.json' file", total=n_total_modules, test_name="module.json" - ) - for repo_name, module_names, remote, base_path in sorted(repo_module_names): - try: - # Create a ModulesRepo object without progress bar to not conflict with the other one - modules_repo = nf_core.modules.modules_repo.ModulesRepo( - remote_url=remote, base_path=base_path, no_progress=True - ) - except LookupError as e: - raise UserWarning(e) - - repo_path = os.path.join(modules_dir, repo_name) - modules_json["repos"][repo_name] = dict() - modules_json["repos"][repo_name]["git_url"] = remote - modules_json["repos"][repo_name]["modules"] = dict() - modules_json["repos"][repo_name]["base_path"] = base_path - for module_name in sorted(module_names): - module_path = os.path.join(repo_path, module_name) - progress_bar.update(file_progress, advance=1, test_name=f"{repo_name}/{module_name}") - correct_commit_sha = self.find_correct_commit_sha(module_name, module_path, modules_repo) - modules_json["repos"][repo_name]["modules"][module_name] = {"git_sha": correct_commit_sha} + for repo_name, module_names, remote_url, base_path in sorted(repo_module_names): + modules_json["repos"][repo_name] = dict() + modules_json["repos"][repo_name]["git_url"] = remote_url + modules_json["repos"][repo_name]["modules"] = dict() + modules_json["repos"][repo_name]["base_path"] = base_path + modules_json["repos"][repo_name]["modules"] = self.determine_module_branches_and_shas( + repo_name, remote_url, base_path, module_names + ) - modules_json_path = os.path.join(self.dir, "modules.json") + modules_json_path = Path(self.dir, "modules.json") with open(modules_json_path, "w") as fh: json.dump(modules_json, fh, indent=4) fh.write("\n") def get_pipeline_module_repositories(self, modules_dir, repos=None): """ - Finds all module repositories in the modules directory. Ignores the local modules. + Finds all module repositories in the modules directory. + Ignores the local modules. + Args: modules_dir (Path): base directory for the module files Returns - repos [ (str, str, str) ]: List of tuples of repo name, repo remote URL and path to modules in repo + repos ([ (str, str, str) ]), + renamed_dirs (dict[Path, Path]): List of tuples of repo name, repo + remote URL and path to modules in + repo """ if repos is None: repos = {} @@ -120,7 +102,8 @@ def get_pipeline_module_repositories(self, modules_dir, repos=None): nf_core.modules.modules_repo.NF_CORE_MODULES_REMOTE, nf_core.modules.modules_repo.NF_CORE_MODULES_BASE_PATH, ) - + # The function might rename some directories, keep track of them + renamed_dirs = {} # Check if there are any untracked repositories dirs_not_covered = self.dir_tree_uncovered(modules_dir, [Path(name) for name in repos]) if len(dirs_not_covered) > 0: @@ -134,7 +117,8 @@ def get_pipeline_module_repositories(self, modules_dir, repos=None): ) ) nrepo_remote = questionary.text( - "Please provide a URL for for one of the repos contained in the untracked directories." + "Please provide a URL for for one of the repos contained in the untracked directories.", + style=nf_core.utils.nfcore_question_style, ).unsafe_ask() # Verify that the remote exists while True: @@ -154,44 +138,29 @@ def get_pipeline_module_repositories(self, modules_dir, repos=None): "The directory structure should be the same as in the remote." ) dir_name = questionary.text( - "Please provide the correct directory, it will be renamed. If left empty, the remote will be ignored." + "Please provide the correct directory, it will be renamed. If left empty, the remote will be ignored.", + style=nf_core.utils.nfcore_question_style, ).unsafe_ask() if dir_name: - (modules_dir, dir_name).rename(modules_dir / nrepo_name) + old_path = modules_dir / dir_name + new_path = modules_dir / nrepo_name + old_path.rename(new_path) + renamed_dirs[old_path] = new_path else: continue # Prompt the user for the modules base path in the remote nrepo_base_path = questionary.text( f"Please provide the path of the modules directory in the remote. " - f"Will default to '{nf_core.modules.modules_repo.NF_CORE_MODULES_BASE_PATH}' if left empty." + f"Will default to '{nf_core.modules.modules_repo.NF_CORE_MODULES_BASE_PATH}' if left empty.", + style=nf_core.utils.nfcore_question_style, ).unsafe_ask() if not nrepo_base_path: nrepo_base_path = nf_core.modules.modules_repo.NF_CORE_MODULES_BASE_PATH repos[nrepo_name] = (nrepo_remote, nrepo_base_path) dirs_not_covered = self.dir_tree_uncovered(modules_dir, [Path(name) for name in repos]) - return repos - - def find_correct_commit_sha(self, module_name, module_path, modules_repo): - """ - Returns the SHA for the latest commit where the local files are identical to the remote files - Args: - module_name (str): Name of module - module_path (str): Path to module in local repo - module_repo (str): Remote repo for module - Returns: - commit_sha (str): The latest commit SHA where local files are identical to remote files, - or None if no commit is found - """ - # Find the correct commit SHA for the local module files. - # We iterate over the commit history for the module until we find - # a revision that matches the file contents - commit_shas = (commit["git_sha"] for commit in modules_repo.get_module_git_log(module_name, depth=1000)) - for commit_sha in commit_shas: - if all(modules_repo.module_files_identical(module_name, module_path, commit_sha).values()): - return commit_sha - return None + return repos, renamed_dirs def dir_tree_uncovered(self, modules_dir, repos): """ @@ -207,7 +176,6 @@ def dir_tree_uncovered(self, modules_dir, repos): Returns: dirs_not_covered ([ Path ]): A list of directories that are currently not covered by any remote. """ - # Initialise the FIFO queue. Note that we assume the directory to be correctly # configured, i.e. no files etc. fifo = [subdir for subdir in modules_dir.iterdir() if subdir.stem != "local"] @@ -229,69 +197,254 @@ def dir_tree_uncovered(self, modules_dir, repos): depth += 1 return dirs_not_covered - def check_up_to_date(self): + def determine_module_branches_and_shas(self, repo_name, remote_url, base_path, modules): """ - Checks whether the modules installed in the directory - are consistent with the entries in the 'modules.json' file and vice versa. + Determines what branch and commit sha each module in the pipeline belong to - If a module has an entry in the 'modules.json' file but is missing in the directory, - we first try to reinstall the module from the remote and if that fails we remove the entry - in 'modules.json'. + Assumes all modules are installed from the default branch. If it fails to find the + module in the default branch, it prompts the user with the available branches - If a module is installed but the entry in 'modules.json' is missing we iterate through - the commit log in the remote to try to determine the SHA. + Args: + repo_name (str): The name of the module repository + remote_url (str): The url to the remote repository + base_path (Path): The base path in the remote + modules_base_path (Path): The path to the modules directory in the pipeline + modules ([str]): List of names of installed modules from the repository + + Returns: + (dict[str, dict[str, str]]): The module.json entries for the modules + from the repository + """ + default_modules_repo = nf_core.modules.modules_repo.ModulesRepo(remote_url=remote_url, base_path=base_path) + repo_path = self.modules_dir / repo_name + # Get the branches present in the repository, as well as the default branch + available_branches = nf_core.modules.modules_repo.ModulesRepo.get_remote_branches(remote_url) + sb_local = [] + dead_modules = [] + repo_entry = {} + for module in sorted(modules): + modules_repo = default_modules_repo + module_path = repo_path / module + correct_commit_sha = None + tried_branches = {default_modules_repo.branch} + found_sha = False + while True: + correct_commit_sha = self.find_correct_commit_sha(module, module_path, modules_repo) + if correct_commit_sha is None: + log.info(f"Was unable to find matching module files in the {modules_repo.branch} branch.") + choices = [{"name": "No", "value": None}] + [ + {"name": branch, "value": branch} for branch in (available_branches - tried_branches) + ] + branch = questionary.select( + "Was the modules installed from a different branch in the remote?", + choices=choices, + style=nf_core.utils.nfcore_question_style, + ).unsafe_ask() + if branch is None: + action = questionary.select( + f"Module is untracked '{module}'. Please select what action to take", + choices=[ + {"name": "Move the directory to 'local'", "value": 0}, + {"name": "Remove the files", "value": 1}, + ], + style=nf_core.utils.nfcore_question_style, + ) + if action == 0: + sb_local.append(module) + else: + dead_modules.append(module) + break + # Create a new modules repo with the selected branch, and retry find the sha + modules_repo = nf_core.modules.modules_repo.ModulesRepo( + remote_url=remote_url, base_path=base_path, branch=branch, no_pull=True, no_progress=True + ) + else: + found_sha = True + break + if found_sha: + repo_entry[module] = {"branch": modules_repo.branch, "git_sha": correct_commit_sha} + + # Clean up the modules we were unable to find the sha for + for module in sb_local: + log.debug(f"Moving module '{Path(repo_name, module)}' to 'local' directory") + self.move_module_to_local(module, repo_name, self.modules_dir) + + for module in dead_modules: + log.debug(f"Removing module {Path(repo_name, module)}'") + shutil.rmtree(repo_path / module) + + return repo_entry + + def find_correct_commit_sha(self, module_name, module_path, modules_repo): """ - self.load() - old_modules_json = copy.deepcopy(self.modules_json) + Returns the SHA for the latest commit where the local files are identical to the remote files + Args: + module_name (str): Name of module + module_path (str): Path to module in local repo + module_repo (str): Remote repo for module + Returns: + commit_sha (str): The latest commit SHA where local files are identical to remote files, + or None if no commit is found + """ + # Find the correct commit SHA for the local module files. + # We iterate over the commit history for the module until we find + # a revision that matches the file contents + commit_shas = (commit["git_sha"] for commit in modules_repo.get_module_git_log(module_name, depth=1000)) + for commit_sha in commit_shas: + if all(modules_repo.module_files_identical(module_name, module_path, commit_sha).values()): + return commit_sha + return None + + def move_module_to_local(self, module, repo_name): + """ + Move a module to the 'local' directory - # Compute the difference between the modules in the directory - # and the modules in the 'modules.json' file - # This is done by looking at all directories containing - # a 'main.nf' file + Args: + module (str): The name of the modules + repo_name (str): The name of the repository the module resides in + """ + current_path = (self.modules_dir / repo_name) / module + local_modules_dir = self.modules_dir / "local" + if not local_modules_dir.exists(): + local_modules_dir.mkdir() + + to_name = module + # Check if there is already a subdirectory with the name + while (local_modules_dir / to_name).exists(): + # Add a time suffix to the path to make it unique + # (do it again and again if it didn't work out...) + to_name += f"-{datetime.datetime.now().strftime('%y%m%d%H%M%S')}" + shutil.move(current_path, local_modules_dir / to_name) + + def unsynced_modules(self): + """ + Compute the difference between the modules in the directory and the + modules in the 'modules.json' file. This is done by looking at all + directories containing a 'main.nf' file + + Returns: + (untrack_dirs ([ Path ]), + missing_installation (dict)): Directories that are not tracked + by the modules.json file, and modules in the modules.json that + where the installation directory is missing + """ + missing_installation = copy.deepcopy(self.modules_json["repos"]) dirs = [ - os.path.relpath(dir_name, start=self.modules_dir) + Path(dir_name).relative_to(self.modules_dir) for dir_name, _, file_names in os.walk(self.modules_dir) - if "main.nf" in file_names and not os.path.relpath(dir_name, start=self.modules_dir).startswith("local") + if "main.nf" in file_names and not str(Path(dir_name).relative_to(self.modules_dir)).startswith("local") ] - - missing_from_modules_json = [] - repo_names = list(old_modules_json["repos"].keys()) + untracked_dirs = [] for dir in dirs: # Check if the modules directory exists module_repo_name = None - for repo in repo_names: - if dir.startswith(repo + os.sep): + for repo in missing_installation: + if str(dir).startswith(repo + os.sep): module_repo_name = repo break if module_repo_name is not None: # If it does, check if the module is in the 'modules.json' file - modules_path = os.path.relpath(dir, start=module_repo_name) + module = str(dir.relative_to(module_repo_name)) - if module_repo_name not in old_modules_json["repos"]: - missing_from_modules_json.append(dir) - elif modules_path not in old_modules_json["repos"][module_repo_name].get("modules", {}): - missing_from_modules_json.append(dir) + if module not in missing_installation[module_repo_name].get("modules", {}): + untracked_dirs.append(dir) else: - old_modules_json["repos"][module_repo_name]["modules"].pop(modules_path) - if len(old_modules_json["repos"][module_repo_name]["modules"]) == 0: - old_modules_json["repos"].pop(module_repo_name) + # Check if the entry has a git sha and branch before removing + modules = missing_installation[module_repo_name]["modules"] + if "git_sha" not in modules[module] or "branch" not in modules[module]: + raise UserWarning( + "The 'modules.json' file is not up to date. " + "Please reinstall it by removing it and rerunning the command." + ) + missing_installation[module_repo_name]["modules"].pop(module) + if len(missing_installation[module_repo_name]["modules"]) == 0: + missing_installation.pop(module_repo_name) else: # If it is not, add it to the list of missing modules - missing_from_modules_json.append(dir) - - # Check which repos are missing the remote url or base path - for repo, values in old_modules_json.get("repos", {}).items(): - if "git_url" not in values or "base_path" not in values: - raise UserWarning( - "The 'modules.json' file is not up to date. " - "Please reinstall it by removing it and rerunning the command." + untracked_dirs.append(dir) + + return untracked_dirs, missing_installation + + def has_git_url_and_base_path(self): + """ + Check that that all repo entries in the modules.json + has a git url and a base_path + + Returns: + (bool): True if they are found for all repos, False otherwise + """ + for repo_entry in self.modules_json.get("repos", {}).values(): + if "git_url" not in repo_entry or "base_path" not in repo_entry: + return False + return True + + def reinstall_repo(self, repo_name, remote_url, base_path, module_entries): + """ + Reinstall modules from a repository + + Args: + repo_name (str): The name of the repository + remote_url (str): The git url of the remote repository + base_path (Path): The base path in the repository + modules ([ dict[str, dict[str, str]] ]): Module entries with + branch and git sha info + + Returns: + ([ str ]): List of modules that we failed to install + """ + branches_and_mods = {} + failed_to_install = [] + for module, module_entry in module_entries.items(): + if "git_sha" not in module_entry or "branch" not in module_entry: + failed_to_install.append(module) + else: + branch = module_entry["branch"] + sha = module_entry["git_sha"] + if branch not in branches_and_mods: + branches_and_mods[branch] = [] + branches_and_mods[branch].append((module, sha)) + + for branch, modules in branches_and_mods.items(): + try: + modules_repo = nf_core.modules.modules_repo.ModulesRepo( + remote_url=remote_url, branch=branch, base_path=base_path ) + except LookupError as e: + log.error(e) + failed_to_install.extend(modules) + for module, sha in modules: + if not modules_repo.install_module(module, (self.modules_dir / repo_name), sha): + log.warning(f"Could not install module '{Path(repo_name, module)}' - removing from modules.json") + failed_to_install.append(module) + return failed_to_install + + def check_up_to_date(self): + """ + Checks whether the modules installed in the directory + are consistent with the entries in the 'modules.json' file and vice versa. + + If a module has an entry in the 'modules.json' file but is missing in the directory, + we first try to reinstall the module from the remote and if that fails we remove the entry + in 'modules.json'. + + If a module is installed but the entry in 'modules.json' is missing we iterate through + the commit log in the remote to try to determine the SHA. + """ + self.load() + if not self.has_git_url_and_base_path(): + raise UserWarning( + "The 'modules.json' file is not up to date. " + "Please reinstall it by removing it and rerunning the command." + ) + + missing_from_modules_json, missing_installation = self.unsynced_modules() + # If there are any modules left in 'modules.json' after all installed are removed, # we try to reinstall them - if len(old_modules_json["repos"]) > 0: + if len(missing_installation) > 0: missing_but_in_mod_json = [ f"'{repo}/{module}'" - for repo, contents in old_modules_json["repos"].items() + for repo, contents in missing_installation.items() for module in contents["modules"] ] log.info( @@ -299,30 +452,11 @@ def check_up_to_date(self): ) remove_from_mod_json = {} - for repo, contents in old_modules_json["repos"].items(): - modules = contents["modules"] - remote = contents["git_url"] + for repo, contents in missing_installation.items(): + module_entries = contents["modules"] + remote_url = contents["git_url"] base_path = contents["base_path"] - - modules_repo = nf_core.modules.modules_repo.ModulesRepo(remote_url=remote, base_path=base_path) - install_dir = os.path.join(self.dir, "modules", modules_repo.fullname) - - for module, entry in modules.items(): - sha = entry.get("git_sha") - if sha is None: - if repo not in remove_from_mod_json: - remove_from_mod_json[repo] = [] - log.warning( - f"Could not find git SHA for module '{module}' in '{repo}' - removing from modules.json" - ) - remove_from_mod_json[repo].append(module) - continue - if not modules_repo.install_module(module, install_dir, sha): - if repo not in remove_from_mod_json: - remove_from_mod_json[repo] = [] - log.warning(f"Could not install module '{module}' in '{repo}' - removing from modules.json") - remove_from_mod_json[repo].append(module) - continue + remove_from_mod_json[repo] = self.reinstall_repo(repo, remote_url, base_path, module_entries) # If the reinstall fails, we remove those entries in 'modules.json' if sum(map(len, remove_from_mod_json.values())) > 0: @@ -336,18 +470,15 @@ def check_up_to_date(self): f"Was unable to reinstall some modules. Removing 'modules.json' entries: {', '.join(uninstallable_mods)}" ) - for repo, modules in remove_from_mod_json.items(): - for module in modules: + for repo, module_entries in remove_from_mod_json.items(): + for module in module_entries: self.modules_json["repos"][repo]["modules"].pop(module) if len(self.modules_json["repos"][repo]["modules"]) == 0: self.modules_json["repos"].pop(repo) # If some modules didn't have an entry in the 'modules.json' file # we try to determine the SHA from the commit log of the remote - dead_repos = [] - sb_local_repos = [] if len(missing_from_modules_json) > 0: - format_missing = [f"'{dir}'" for dir in missing_from_modules_json] if len(format_missing) == 1: log.info(f"Recomputing commit SHA for module {format_missing[0]} which was missing from 'modules.json'") @@ -356,114 +487,37 @@ def check_up_to_date(self): f"Recomputing commit SHAs for modules which were missing from 'modules.json': {', '.join(format_missing)}" ) - for dir in missing_from_modules_json: - choice = questionary.select( - f"Found untracked file '{dir}'. Please select a choice", - choices=[ - {"name": "Provide the remote", "value": 0}, - {"name": "Move the directory to 'local'", "value": 1}, - {"name": "Remove the files", "value": 2}, - ], - ).unsafe_ask() - if choice == 0: - # Ask the user if the module belongs to an installed remote - choices = [{"name": "No", "value": (None, None)}] + [ - { - "name": f"'{repo}' ({self.modules_json['repos'][repo]['git_url']})", - "value": ( - self.modules_json["repos"][repo]["git_url"], - self.modules_json["repos"][repo]["base_path"], - ), - } - for repo in self.modules_json["repos"] - ] - remote, base_path = questionary.select( - "Does the module belong to an installed remote?", - choices=choices, - style=nf_core.utils.nfcore_question_style, - ).unsafe_ask() - if remote is None: - while True: - remote = questionary.text( - "Please provide the URL of the remote", style=nf_core.utils.nfcore_question_style - ).unsafe_ask() - # Verify that the name is consistent with the untracked file - repo = nf_core.modules.module_utils.path_from_remote(remote) - if not dir.startswith(repo): - log.info("The module name does not match the remote name") - continue - # Verify that the remote exists - try: - git.Git().ls_remote(remote) - except git.exc.GitCommandError: - log.info("The remote does not exist") - continue - # Ask the user for the modules base path in the remote - base_path = questionary.text( - f"Please provide the path of the modules directory in the remote. " - f"Will default to '{nf_core.modules.modules_repo.NF_CORE_MODULES_BASE_PATH}' if left empty." - ).unsafe_ask() - if not base_path: - base_path = nf_core.modules.modules_repo.NF_CORE_MODULES_BASE_PATH - break - else: - repo = nf_core.modules.module_utils.path_from_remote(remote) - elif choice == 1: - sb_local_repos.append(repo) - continue - else: - dead_repos.append(repo) - continue - - modules_repo = nf_core.modules.modules_repo.ModulesRepo(remote_url=remote, base_path=base_path) - repo_path = os.path.join(self.dir, "modules", repo) - module = os.path.relpath(dir, repo) - module_path = os.path.join(repo_path, module) - correct_commit_sha = self.find_correct_commit_sha(module, module_path, modules_repo) - if correct_commit_sha is not None: - if repo not in self.modules_json["repos"]: - self.modules_json["repos"][repo] = {"git_url": remote, "base_path": base_path, "modules": {}} + # Get the remotes we are missing + tracked_repos = { + repo_name: (repo_entry["git_url"], repo_entry["base_path"]) + for repo_name, repo_entry in self.modules_json["repos"].items() + } + repos, renamed_dirs = self.get_pipeline_module_repositories(self.modules_dir, tracked_repos) + + modules_with_repos = ( + (repo_name, str(dir.relative_to(repo_name))) + for dir in missing_from_modules_json + for repo_name in repos + if nf_core.utils.is_relative_to(dir, repo_name) + ) - self.modules_json["repos"][repo]["modules"][module] = {"git_sha": correct_commit_sha} + repos_with_modules = {} + for repo_name, module in modules_with_repos: + if repo_name not in repos_with_modules: + repos_with_modules[repo_name] = [] + repos_with_modules[repo_name].append(module) + + for repo_name, modules in repos_with_modules.items(): + remote_url, base_path = repos[repo_name] + repo_entry = self.determine_module_branches_and_shas(repo_name, remote_url, base_path, modules) + if repo_name in self.modules_json["repos"]: + self.modules_json["repos"][repo_name]["modules"].update(repo_entry) else: - choices = [ - {"name": "Move the directory to local", "value": 0}, - {"name": "Remove the files", "value": 1}, - ] - choice = questionary.select(f"Could not find commit SHA for {dir}", choices=choices).unsafe_ask() - if choice == 0: - sb_local_repos.append(repo) - continue - else: - dead_repos.append(repo) - continue - - # Remove the requested repos - for repo in dead_repos: - path = os.path.join(self.dir, "modules", repo) - shutil.rmtree(path) - - # Copy the untracked repos to local - for repo in sb_local_repos: - modules_path = os.path.join(self.dir, "modules") - path = os.path.join(modules_path, repo) - local_path = os.path.join(modules_path, "local") - - # Create the local module directory if it doesn't already exist - if not os.path.exists(local_path): - os.makedirs(local_path) - - # Check if there is already a subdirectory with the name - if os.path.exists(os.path.join(local_path, to_path)): - to_path = path - while os.path.exists(os.path.join(local_path, to_path)): - # Add a time suffix to the path to make it unique - # (do it again and again if it didn't work out...) - to_path += f"-{datetime.datetime.now().strftime('%y%m%d%H%M%S')}" - shutil.move(path, to_path) - path = to_path - - shutil.move(path, local_path) + self.modules_json["repos"][repo_name] = { + "git_url": remote_url, + "base_path": base_path, + "modules": repo_entry, + } self.dump() @@ -498,12 +552,14 @@ def update(self, modules_repo, module_name, module_version, write_file=True): repo_name = modules_repo.fullname remote_url = modules_repo.remote_url base_path = modules_repo.base_path + branch = modules_repo.branch if repo_name not in self.modules_json["repos"]: self.modules_json["repos"][repo_name] = {"modules": {}, "git_url": remote_url, "base_path": base_path} repo_modules_entry = self.modules_json["repos"][repo_name]["modules"] if module_name not in repo_modules_entry: repo_modules_entry[module_name] = {} repo_modules_entry[module_name]["git_sha"] = module_version + repo_modules_entry[module_name]["branch"] = branch # Sort the 'modules.json' repo entries self.modules_json["repos"] = nf_core.utils.sort_dictionary(self.modules_json["repos"]) @@ -670,6 +726,25 @@ def get_all_modules(self): return self.pipeline_modules + def get_module_branch(self, module, repo_name): + """ + Gets the branch from which the module was installed + + Returns: + (str): The branch name + Raises: + LookupError: If their is no branch entry in the `modules.json` + """ + if self.modules_json is None: + self.load() + branch = self.modules_json["repos"].get(repo_name, {}).get("modules", {}).get(module, {}).get("branch") + if branch is None: + raise LookupError( + f"Could not find branch information for module '{Path(repo_name, module)}'." + f"Please remove the 'modules.json' and rerun the command to recreate it" + ) + return branch + def dump(self): """ Sort the modules.json, and write it to file @@ -680,3 +755,11 @@ def dump(self): with open(modules_json_path, "w") as fh: json.dump(self.modules_json, fh, indent=4) fh.write("\n") + + def __str__(self): + if self.modules_json is None: + self.load() + return json.dumps(self.modules_json, indent=4) + + def __repr__(self): + return self.__str__() diff --git a/nf_core/modules/modules_repo.py b/nf_core/modules/modules_repo.py index 0aae5383e4..1551ec5366 100644 --- a/nf_core/modules/modules_repo.py +++ b/nf_core/modules/modules_repo.py @@ -2,6 +2,7 @@ import logging import os import shutil +from pathlib import Path import git import rich.progress @@ -16,6 +17,7 @@ NF_CORE_MODULES_NAME = "nf-core/modules" NF_CORE_MODULES_REMOTE = "https://github.com/nf-core/modules.git" NF_CORE_MODULES_BASE_PATH = "modules" +NF_CORE_MODULES_DEFAULT_BRANCH = "master" class RemoteProgressbar(git.RemoteProgress): @@ -84,6 +86,31 @@ def update_local_repo_status(repo_name, up_to_date): """ ModulesRepo.local_repo_statuses[repo_name] = up_to_date + @staticmethod + def get_remote_branches(remote_url): + """ + Get all branches from a remote repository + + Args: + remote_url (str): The git url to the remote repository + + Returns: + (set[str]): All branches found in the remote + """ + try: + unparsed_branches = git.Git().ls_remote(remote_url) + except git.GitCommandError: + raise LookupError(f"Was unable to fetch branches from '{remote_url}'") + else: + branches = {} + for branch_info in unparsed_branches.split("\n"): + sha, name = branch_info.split("\t") + if name != "HEAD": + # The remote branches are shown as 'ref/head/branch' + branch_name = Path(name).stem + branches[sha] = branch_name + return set(branches.values()) + def __init__(self, remote_url=None, branch=None, no_pull=False, base_path=None, no_progress=False): """ Initializes the object and clones the git repository if it is not already present diff --git a/nf_core/modules/patch.py b/nf_core/modules/patch.py index 8104a939ea..8827ed711a 100644 --- a/nf_core/modules/patch.py +++ b/nf_core/modules/patch.py @@ -51,6 +51,11 @@ def patch(self, module=None): raise UserWarning( f"The '{module_fullname}' module does not have a valid version in the 'modules.json' file. Cannot compute patch" ) + # Get the module branch and reset it in the ModulesRepo object + module_branch = self.modules_json.get_module_branch(module, self.modules_repo.fullname) + if module_branch != self.modules_repo.branch: + self.modules_repo.setup_branch(module_branch) + # Set the diff filename based on the module name patch_filename = f"{module.replace('/', '-')}.diff" module_relpath = Path("modules", self.modules_repo.fullname, module) diff --git a/nf_core/modules/update.py b/nf_core/modules/update.py index 2b25011873..5ca96a1136 100644 --- a/nf_core/modules/update.py +++ b/nf_core/modules/update.py @@ -44,6 +44,7 @@ def __init__( self.module = None self.update_config = None self.modules_json = ModulesJson(self.dir) + self.branch = branch class DiffEnum(enum.Enum): """Enumeration to keeping track of file diffs. @@ -141,8 +142,9 @@ def update(self, module=None): # and do the requested action on them exit_value = True all_patches_successful = True + print(modules_info) for modules_repo, module, sha, patch_relpath in modules_info: - + print(sha) module_fullname = str(Path(modules_repo.fullname, module)) # Are we updating the files in place or not? dry_run = self.show_diff or self.save_diff_fn @@ -315,12 +317,25 @@ def get_single_module_info(self, module): log.info(f"Found entry in '.nf-core.yml' for module '{module}'") log.info(f"Updating module to ({sha})") + # Check if the update branch is the same as the installation branch + current_branch = self.modules_json.get_module_branch(module, self.modules_repo.fullname) + new_branch = self.modules_repo.branch + if current_branch != new_branch: + log.warning( + f"You are trying to update the '{Path(self.modules_repo.fullname, module)}' module from " + f"the '{new_branch}' branch. This module was installed from the '{current_branch}'" + ) + switch = questionary.confirm(f"Do you want to update using the '{current_branch}' instead?").unsafe_ask() + if switch: + # Change the branch + self.modules_repo.setup_branch(current_branch) + # If there is a patch file, get its filename patch_fn = self.modules_json.get_patch_fn(module, self.modules_repo.fullname) return (self.modules_repo, module, sha, patch_fn) - def get_all_modules_info(self): + def get_all_modules_info(self, branch=None): """Collects the module repository, version and sha for all modules. Information about the module version in the '.nf-core.yml' overrides the '--sha' option. @@ -329,6 +344,12 @@ def get_all_modules_info(self): [(ModulesRepo, str, str)]: A list of tuples containing a ModulesRepo object, the module name, and the module version. """ + if branch is not None: + use_branch = questionary.confirm( + "'--branch' was specified. Should this branch be used to update all modules?", default=False + ) + if not use_branch: + branch = None skipped_repos = [] skipped_modules = [] overridden_repos = [] @@ -338,18 +359,24 @@ def get_all_modules_info(self): # and check if they have an entry in the '.nf-core.yml' file for repo_name, modules in self.modules_json.get_all_modules().items(): if repo_name not in self.update_config or self.update_config[repo_name] is True: - modules_info[repo_name] = [(module, self.sha) for module in modules] + modules_info[repo_name] = [ + (module, self.sha, self.modules_json.get_module_branch(module, repo_name)) for module in modules + ] elif isinstance(self.update_config[repo_name], dict): # If it is a dict, then there are entries for individual modules repo_config = self.update_config[repo_name] modules_info[repo_name] = [] for module in modules: if module not in repo_config or repo_config[module] is True: - modules_info[repo_name].append((module, self.sha)) + modules_info[repo_name].append( + (module, self.sha, self.modules_json.get_module_branch(module, repo_name)) + ) elif isinstance(repo_config[module], str): # If a string is given it is the commit SHA to which we should update to custom_sha = repo_config[module] - modules_info[repo_name].append((module, custom_sha)) + modules_info[repo_name].append( + (module, custom_sha, self.modules_json.get_module_branch(module, repo_name)) + ) if self.sha is not None: overridden_modules.append(module) elif repo_config[module] is False: @@ -360,7 +387,10 @@ def get_all_modules_info(self): elif isinstance(self.update_config[repo_name], str): # If a string is given it is the commit SHA to which we should update to custom_sha = self.update_config[repo_name] - modules_info[repo_name] = [(module_name, custom_sha) for module_name in modules] + modules_info[repo_name] = [ + (module_name, custom_sha, self.modules_json.get_module_branch(module_name, repo_name)) + for module_name in modules + ] if self.sha is not None: overridden_repos.append(repo_name) elif self.update_config[repo_name] is False: @@ -389,32 +419,51 @@ def get_all_modules_info(self): f"Overriding '--sha' flag for module{plural_s(overridden_modules)} with " f"'.nf-core.yml' entry: '{overridden_str}'" ) + # Loop through modules_info and create on ModulesRepo object per remote and branch + repos_and_branches = {} + for repo_name, mods in modules_info.items(): + for mod, sha, mod_branch in mods: + if branch is not None: + mod_branch = branch + if (repo_name, mod_branch) not in repos_and_branches: + repos_and_branches[(repo_name, mod_branch)] = [] + repos_and_branches[(repo_name, mod_branch)].append((mod, sha)) # Get the git urls from the modules.json - modules_info = [ - (self.modules_json.get_git_url(repo_name), self.modules_json.get_base_path(repo_name), mods_shas) - for repo_name, mods_shas in modules_info.items() - ] + modules_info = ( + (self.modules_json.get_git_url(repo_name), branch, self.modules_json.get_base_path(repo_name), mods_shas) + for (repo_name, branch), mods_shas in repos_and_branches.items() + ) # Create ModulesRepo objects - modules_info = [ - (ModulesRepo(remote_url=repo_url, base_path=base_path), mods_shas) - for repo_url, base_path, mods_shas in modules_info - ] + repo_objs_mods = [] + for repo_url, branch, base_path, mods_shas in modules_info: + try: + modules_repo = ModulesRepo(remote_url=repo_url, branch=branch, base_path=base_path) + except LookupError as e: + log.warning(e) + log.info(f"Skipping modules in '{modules_repo.fullname}'") + else: + repo_objs_mods.append((modules_repo, mods_shas)) - # Flatten and return the list - modules_info = [(repo, mod, sha) for repo, mods_shas in modules_info for mod, sha in mods_shas] + # Flatten the list + modules_info = [(repo, mod, sha) for repo, mods_shas in repo_objs_mods for mod, sha in mods_shas] - # Verify that that all modules exist in their respective ModulesRepo, + # Verify that that all modules and shas exist in their respective ModulesRepo, # don't try to update those that don't i = 0 while i < len(modules_info): repo, module, _ = modules_info[i] - if repo.module_exists(module): - i += 1 - else: + if not repo.module_exists(module): log.warning(f"Module '{module}' does not exist in '{repo.fullname}'. Skipping...") modules_info.pop(i) + elif sha is not None and not repo.sha_exists_on_branch(sha): + log.warning( + f"Git sha '{sha}' does not exists on the '{branch}' of '{repo.fullname}'. Skipping module '{mod}'" + ) + modules_info.pop(i) + else: + i += 1 # Add patch filenames to the modules that have them modules_info = [ @@ -426,7 +475,7 @@ def get_all_modules_info(self): def setup_diff_file(self): """Sets up the diff file. - If the save diff option was choosen interactively, the user is asked to supply a name for the diff file. + If the save diff option was chosen interactively, the user is asked to supply a name for the diff file. Then creates the file for saving the diff. """ diff --git a/nf_core/pipeline-template/modules.json b/nf_core/pipeline-template/modules.json index 7d534dad65..a14671c6ed 100644 --- a/nf_core/pipeline-template/modules.json +++ b/nf_core/pipeline-template/modules.json @@ -7,13 +7,16 @@ "git_url": "https://github.com/nf-core/modules.git", "modules": { "custom/dumpsoftwareversions": { - "git_sha": "e745e167c1020928ef20ea1397b6b4d230681b4d" + "git_sha": "e745e167c1020928ef20ea1397b6b4d230681b4d", + "branch": "master" }, "fastqc": { - "git_sha": "e745e167c1020928ef20ea1397b6b4d230681b4d" + "git_sha": "e745e167c1020928ef20ea1397b6b4d230681b4d", + "branch": "master" }, "multiqc": { - "git_sha": "e745e167c1020928ef20ea1397b6b4d230681b4d" + "git_sha": "e745e167c1020928ef20ea1397b6b4d230681b4d", + "branch": "master" } } } diff --git a/nf_core/utils.py b/nf_core/utils.py index e8a284a1d3..916f2a4dcd 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -1006,3 +1006,15 @@ def strip_ansi_codes(string, replace_with=""): From Stack Overflow: https://stackoverflow.com/a/14693789/713980 """ return ANSI_ESCAPE_RE.sub(replace_with, string) + + +def is_relative_to(path1, path2): + """ + Checks if a path is relative to another. + + Should mimic Path.is_relative_to which not available in Python < 3.9 + + path1 (Path | str): The path that could be a subpath + path2 (Path | str): The path the could be the superpath + """ + return str(path1).startswith(str(path2) + os.sep) diff --git a/tests/modules/install.py b/tests/modules/install.py index 88bce485ff..d2b13c2aee 100644 --- a/tests/modules/install.py +++ b/tests/modules/install.py @@ -2,7 +2,15 @@ import pytest -from ..utils import with_temporary_folder +from nf_core.modules.install import ModuleInstall +from nf_core.modules.modules_json import ModulesJson + +from ..utils import ( + GITLAB_BRANCH_TEST_BRANCH, + GITLAB_REPO, + GITLAB_URL, + with_temporary_folder, +) def test_modules_install_nopipeline(self): @@ -43,9 +51,19 @@ def test_modules_install_from_gitlab(self): assert self.mods_install_gitlab.install("fastqc") is True -# TODO Remove comments once external repository to have same structure as nf-core/modules -# def test_modules_install_trimgalore_alternative_source(self): -# """Test installing a module from a different source repository - TrimGalore!""" -# assert self.mods_install_alt.install("trimgalore") is not False -# module_path = os.path.join(self.mods_install.dir, "modules", "ewels", "nf-core-modules", "trimgalore") -# assert os.path.exists(module_path) +def test_modules_install_different_branch_fail(self): + """Test installing a module from a different branch""" + install_obj = ModuleInstall(self.pipeline_dir, remote_url=GITLAB_URL, branch=GITLAB_BRANCH_TEST_BRANCH) + # The FastQC module does not exists in the branch-test branch + assert install_obj.install("fastqc") is False + + +def test_modules_install_different_branch_succeed(self): + """Test installing a module from a different branch""" + install_obj = ModuleInstall(self.pipeline_dir, remote_url=GITLAB_URL, branch=GITLAB_BRANCH_TEST_BRANCH) + # The fastp module does exists in the branch-test branch + assert install_obj.install("fastp") is True + + # Verify that the branch entry was added correctly + modules_json = ModulesJson(self.pipeline_dir) + assert modules_json.get_module_branch("fastp", GITLAB_REPO) == GITLAB_BRANCH_TEST_BRANCH diff --git a/tests/modules/modules_json.py b/tests/modules/modules_json.py index 7b9a635d95..35808c529b 100644 --- a/tests/modules/modules_json.py +++ b/tests/modules/modules_json.py @@ -5,6 +5,7 @@ from nf_core.modules.modules_json import ModulesJson from nf_core.modules.modules_repo import ( NF_CORE_MODULES_BASE_PATH, + NF_CORE_MODULES_DEFAULT_BRANCH, NF_CORE_MODULES_NAME, NF_CORE_MODULES_REMOTE, ModulesRepo, @@ -34,6 +35,7 @@ def test_mod_json_update(self): assert "MODULE_NAME" in mod_json["repos"][NF_CORE_MODULES_NAME]["modules"] assert "git_sha" in mod_json["repos"][NF_CORE_MODULES_NAME]["modules"]["MODULE_NAME"] assert "GIT_SHA" == mod_json["repos"][NF_CORE_MODULES_NAME]["modules"]["MODULE_NAME"]["git_sha"] + assert NF_CORE_MODULES_DEFAULT_BRANCH == mod_json["repos"][NF_CORE_MODULES_NAME]["modules"]["MODULE_NAME"]["branch"] def test_mod_json_create(self): @@ -57,6 +59,7 @@ def test_mod_json_create(self): for mod in mods: assert mod in mod_json["repos"][NF_CORE_MODULES_NAME]["modules"] assert "git_sha" in mod_json["repos"][NF_CORE_MODULES_NAME]["modules"][mod] + assert "branch" in mod_json["repos"][NF_CORE_MODULES_NAME]["modules"][mod] def test_mod_json_up_to_date(self): diff --git a/tests/modules/update.py b/tests/modules/update.py index fd20b4a7b7..f49d2be257 100644 --- a/tests/modules/update.py +++ b/tests/modules/update.py @@ -6,11 +6,20 @@ import yaml import nf_core.utils +from nf_core.modules.install import ModuleInstall from nf_core.modules.modules_json import ModulesJson from nf_core.modules.modules_repo import NF_CORE_MODULES_NAME from nf_core.modules.update import ModuleUpdate -from ..utils import OLD_TRIMGALORE_SHA +from ..utils import ( + GITLAB_BRANCH_TEST_BRANCH, + GITLAB_BRANCH_TEST_NEW_SHA, + GITLAB_BRANCH_TEST_OLD_SHA, + GITLAB_DEFAULT_BRANCH, + GITLAB_REPO, + GITLAB_URL, + OLD_TRIMGALORE_SHA, +) def test_install_and_update(self): @@ -46,7 +55,7 @@ def test_install_at_hash_and_update(self): mod_json_obj = ModulesJson(self.pipeline_dir) mod_json = mod_json_obj.get_modules_json() # Get the up-to-date git_sha for the module from the ModulesRepo object - correct_git_sha = list(update_obj.modules_repo.get_module_git_log("trimgalore", depth=1))[0]["git_sha"] + correct_git_sha = update_obj.modules_repo.get_latest_module_version("trimgalore") current_git_sha = mod_json["repos"][NF_CORE_MODULES_NAME]["modules"]["trimgalore"]["git_sha"] assert correct_git_sha == current_git_sha @@ -179,6 +188,62 @@ def test_update_with_config_no_updates(self): ) +def test_update_different_branch_single_module(self): + """Try updating a module in a specific branch""" + install_obj = ModuleInstall( + self.pipeline_dir, remote_url=GITLAB_URL, branch=GITLAB_BRANCH_TEST_BRANCH, sha=GITLAB_BRANCH_TEST_OLD_SHA + ) + install_obj.install("fastp") + update_obj = ModuleUpdate( + self.pipeline_dir, remote_url=GITLAB_URL, branch=GITLAB_BRANCH_TEST_BRANCH, show_diff=False + ) + update_obj.update("fastp") + + # Verify that the branch entry was updated correctly + modules_json = ModulesJson(self.pipeline_dir) + assert modules_json.get_module_branch("fastp", GITLAB_REPO) == GITLAB_BRANCH_TEST_BRANCH + assert modules_json.get_module_version("fastp", GITLAB_REPO) == GITLAB_BRANCH_TEST_NEW_SHA + + +def test_update_different_branch_mixed_modules_main(self): + """Try updating all modules where MultiQC is installed from main branch""" + # Install fastp + install_obj = ModuleInstall( + self.pipeline_dir, remote_url=GITLAB_URL, branch=GITLAB_BRANCH_TEST_BRANCH, sha=GITLAB_BRANCH_TEST_OLD_SHA + ) + install_obj.install("fastp") + + # Install MultiQC from gitlab default branch + install_obj = ModuleInstall(self.pipeline_dir, remote_url=GITLAB_URL, branch=GITLAB_DEFAULT_BRANCH) + install_obj.install("multiqc") + + # Try updating + update_obj = ModuleUpdate(self.pipeline_dir, update_all=True, show_diff=False) + assert update_obj.update() is True + + modules_json = ModulesJson(self.pipeline_dir) + # Verify that the branch entry was updated correctly + assert modules_json.get_module_branch("fastp", GITLAB_REPO) == GITLAB_BRANCH_TEST_BRANCH + assert modules_json.get_module_version("fastp", GITLAB_REPO) == GITLAB_BRANCH_TEST_NEW_SHA + # MultiQC is present in both branches but should've been updated using the 'main' branch + assert modules_json.get_module_branch("multiqc", GITLAB_REPO) == GITLAB_DEFAULT_BRANCH + + +def test_update_different_branch_mix_modules_branch_test(self): + """Try updating all modules where MultiQC is installed from branch-test branch""" + # Install multiqc from the branch-test branch + install_obj = ModuleInstall( + self.pipeline_dir, remote_url=GITLAB_URL, branch=GITLAB_BRANCH_TEST_BRANCH, sha=GITLAB_BRANCH_TEST_OLD_SHA + ) + install_obj.install("multiqc") + update_obj = ModuleUpdate(self.pipeline_dir, update_all=True, show_diff=False) + update_obj.update() + + modules_json = ModulesJson(self.pipeline_dir) + assert modules_json.get_module_branch("multiqc", GITLAB_REPO) == GITLAB_BRANCH_TEST_BRANCH + assert modules_json.get_module_version("multiqc", GITLAB_REPO) == GITLAB_BRANCH_TEST_NEW_SHA + + def cmp_module(dir1, dir2): """Compare two versions of the same module""" files = ["main.nf", "meta.yml"] diff --git a/tests/test_modules.py b/tests/test_modules.py index c3a9abfbf8..2ceb2ab08f 100644 --- a/tests/test_modules.py +++ b/tests/test_modules.py @@ -101,6 +101,8 @@ def test_modulesrepo_class(self): test_modules_test_file_dict, ) from .modules.install import ( + test_modules_install_different_branch_fail, + test_modules_install_different_branch_succeed, test_modules_install_emptypipeline, test_modules_install_from_gitlab, test_modules_install_nomodule, @@ -158,6 +160,9 @@ def test_modulesrepo_class(self): test_install_at_hash_and_update, test_install_at_hash_and_update_and_save_diff_to_file, test_update_all, + test_update_different_branch_mix_modules_branch_test, + test_update_different_branch_mixed_modules_main, + test_update_different_branch_single_module, test_update_with_config_dont_update, test_update_with_config_fix_all, test_update_with_config_fixed_version, diff --git a/tests/utils.py b/tests/utils.py index a844d774f8..03bfe272a0 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -10,6 +10,11 @@ OLD_TRIMGALORE_SHA = "20d8250d9f39ddb05dfb437603aaf99b5c0b2b41" GITLAB_URL = "https://gitlab.com/nf-core/modules-test.git" GITLAB_REPO = "nf-core/modules-test" +GITLAB_DEFAULT_BRANCH = "main" +# Branch test stuff +GITLAB_BRANCH_TEST_BRANCH = "branch-tester" +GITLAB_BRANCH_TEST_OLD_SHA = "eb4bc244de7eaef8e8ff0d451e4ca2e4b2c29821" +GITLAB_BRANCH_TEST_NEW_SHA = "e43448a2cc17d59e085c4d3f77489af5a4dcc26d" def with_temporary_folder(func):