diff --git a/docs/api/_src/conf.py b/docs/api/_src/conf.py index 27eaf9bcb3..8304775d5f 100644 --- a/docs/api/_src/conf.py +++ b/docs/api/_src/conf.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # # Configuration file for the Sphinx documentation builder. # diff --git a/nf_core/bump_version.py b/nf_core/bump_version.py index 40c8f8984f..8472f78494 100644 --- a/nf_core/bump_version.py +++ b/nf_core/bump_version.py @@ -187,7 +187,7 @@ def update_file_version(filename: Union[str, Path], pipeline_obj: Pipeline, patt fn = pipeline_obj._fp(filename) content = "" try: - with open(fn, "r") as fh: + with open(fn) as fh: content = fh.read() except FileNotFoundError: log.warning(f"File not found: '{fn}'") diff --git a/nf_core/components/components_command.py b/nf_core/components/components_command.py index 44924a2704..8332429835 100644 --- a/nf_core/components/components_command.py +++ b/nf_core/components/components_command.py @@ -227,7 +227,7 @@ def check_patch_paths(self, patch_path: Path, module_name: str) -> None: if patch_path.exists(): log.info(f"Modules {module_name} contains a patch file.") rewrite = False - with open(patch_path, "r") as fh: + with open(patch_path) as fh: lines = fh.readlines() for index, line in enumerate(lines): # Check if there are old paths in the patch file and replace @@ -264,7 +264,7 @@ def check_if_in_include_stmts(self, component_path: str) -> Dict[str, List[Dict[ if self.repo_type == "pipeline": workflow_files = Path(self.dir, "workflows").glob("*.nf") for workflow_file in workflow_files: - with open(workflow_file, "r") as fh: + with open(workflow_file) as fh: # Check if component path is in the file using mmap with mmap.mmap(fh.fileno(), 0, access=mmap.ACCESS_READ) as s: if s.find(component_path.encode()) != -1: diff --git a/nf_core/components/components_utils.py b/nf_core/components/components_utils.py index eec533ce60..2dc5bf4fc1 100644 --- a/nf_core/components/components_utils.py +++ b/nf_core/components/components_utils.py @@ -138,7 +138,7 @@ def get_components_to_install(subworkflow_dir: str) -> Tuple[List[str], List[str """ modules = [] subworkflows = [] - with open(Path(subworkflow_dir, "main.nf"), "r") as fh: + with open(Path(subworkflow_dir, "main.nf")) as fh: for line in fh: regex = re.compile( r"include(?: *{ *)([a-zA-Z\_0-9]*)(?: *as *)?(?:[a-zA-Z\_0-9]*)?(?: *})(?: *from *)(?:'|\")(.*)(?:'|\")" diff --git a/nf_core/components/create.py b/nf_core/components/create.py index 568ca22af5..8ccd83b594 100644 --- a/nf_core/components/create.py +++ b/nf_core/components/create.py @@ -2,7 +2,6 @@ The ComponentCreate class handles generating of module and subworkflow templates """ -from __future__ import print_function import glob import json @@ -440,7 +439,7 @@ def _copy_old_files(self, component_old_path): pytest_dir = Path(self.directory, "tests", self.component_type, self.org, self.component_dir) nextflow_config = pytest_dir / "nextflow.config" if nextflow_config.is_file(): - with open(nextflow_config, "r") as fh: + with open(nextflow_config) as fh: config_lines = "" for line in fh: if "publishDir" not in line: @@ -460,7 +459,7 @@ def _print_and_delete_pytest_files(self): "[violet]Do you want to delete the pytest files?[/]\nPytest file 'main.nf' will be printed to standard output to allow migrating the tests manually to 'main.nf.test'.", default=False, ): - with open(pytest_dir / "main.nf", "r") as fh: + with open(pytest_dir / "main.nf") as fh: log.info(fh.read()) shutil.rmtree(pytest_dir) log.info( @@ -475,7 +474,7 @@ def _print_and_delete_pytest_files(self): ) # Delete tags from pytest_modules.yml modules_yml = Path(self.directory, "tests", "config", "pytest_modules.yml") - with open(modules_yml, "r") as fh: + with open(modules_yml) as fh: yml_file = yaml.safe_load(fh) yml_key = str(self.component_dir) if self.component_type == "modules" else f"subworkflows/{self.component_dir}" if yml_key in yml_file: diff --git a/nf_core/components/info.py b/nf_core/components/info.py index e4d8038b87..54fc0004dc 100644 --- a/nf_core/components/info.py +++ b/nf_core/components/info.py @@ -184,7 +184,7 @@ def get_local_yaml(self): meta_fn = Path(comp_dir, "meta.yml") if meta_fn.exists(): log.debug(f"Found local file: {meta_fn}") - with open(meta_fn, "r") as fh: + with open(meta_fn) as fh: self.local_path = comp_dir return yaml.safe_load(fh) @@ -196,7 +196,7 @@ def get_local_yaml(self): meta_fn = Path(comp_dir, "meta.yml") if meta_fn.exists(): log.debug(f"Found local file: {meta_fn}") - with open(meta_fn, "r") as fh: + with open(meta_fn) as fh: self.local_path = comp_dir return yaml.safe_load(fh) log.debug(f"{self.component_type[:-1].title()} '{self.component}' meta.yml not found locally") diff --git a/nf_core/components/lint/__init__.py b/nf_core/components/lint/__init__.py index efffc28e85..ea8299cf7c 100644 --- a/nf_core/components/lint/__init__.py +++ b/nf_core/components/lint/__init__.py @@ -3,7 +3,6 @@ in nf-core pipelines """ -from __future__ import print_function import logging import operator diff --git a/nf_core/components/nfcore_component.py b/nf_core/components/nfcore_component.py index 874fa570bc..e7e12c7837 100644 --- a/nf_core/components/nfcore_component.py +++ b/nf_core/components/nfcore_component.py @@ -81,7 +81,7 @@ def __init__( def _get_main_nf_tags(self, test_main_nf: Union[Path, str]): """Collect all tags from the main.nf.test file.""" tags = [] - with open(test_main_nf, "r") as fh: + with open(test_main_nf) as fh: for line in fh: if line.strip().startswith("tag"): tags.append(line.strip().split()[1].strip('"')) @@ -90,7 +90,7 @@ def _get_main_nf_tags(self, test_main_nf: Union[Path, str]): def _get_included_components(self, main_nf: Union[Path, str]): """Collect all included components from the main.nf file.""" included_components = [] - with open(main_nf, "r") as fh: + with open(main_nf) as fh: for line in fh: if line.strip().startswith("include"): # get tool/subtool or subworkflow name from include statement, can be in the form @@ -107,7 +107,7 @@ def _get_included_components(self, main_nf: Union[Path, str]): def _get_included_components_in_chained_tests(self, main_nf_test: Union[Path, str]): """Collect all included components from the main.nf file.""" included_components = [] - with open(main_nf_test, "r") as fh: + with open(main_nf_test) as fh: for line in fh: if line.strip().startswith("script"): # get tool/subtool or subworkflow name from script statement, can be: @@ -151,7 +151,7 @@ def _get_included_components_in_chained_tests(self, main_nf_test: Union[Path, st def get_inputs_from_main_nf(self): """Collect all inputs from the main.nf file.""" inputs = [] - with open(self.main_nf, "r") as f: + with open(self.main_nf) as f: data = f.read() # get input values from main.nf after "input:", which can be formatted as tuple val(foo) path(bar) or val foo or val bar or path bar or path foo # regex matches: @@ -178,7 +178,7 @@ def get_inputs_from_main_nf(self): def get_outputs_from_main_nf(self): outputs = [] - with open(self.main_nf, "r") as f: + with open(self.main_nf) as f: data = f.read() # get output values from main.nf after "output:". the names are always after "emit:" if "output:" not in data: diff --git a/nf_core/components/update.py b/nf_core/components/update.py index 077cb2b840..1664ec1b4c 100644 --- a/nf_core/components/update.py +++ b/nf_core/components/update.py @@ -499,7 +499,7 @@ def get_all_components_info(self, branch=None): ] elif isinstance(self.update_config[repo_name], dict): # If it is a dict, then there are entries for individual components or component directories - for component_dir in set([dir for dir, _ in components]): + for component_dir in {dir for dir, _ in components}: if isinstance(self.update_config[repo_name][component_dir], str): # If a string is given it is the commit SHA to which we should update to custom_sha = self.update_config[repo_name][component_dir] diff --git a/nf_core/create.py b/nf_core/create.py index 56d0912a07..7971eb919b 100644 --- a/nf_core/create.py +++ b/nf_core/create.py @@ -75,7 +75,7 @@ def __init__( ], } # Get list of files we're skipping with the supplied skip keys - self.skip_paths = set(sp for k in skip_paths_keys for sp in skippable_paths[k]) + self.skip_paths = {sp for k in skip_paths_keys for sp in skippable_paths[k]} # Set convenience variables self.name = self.template_params["name"] @@ -108,7 +108,7 @@ def create_param_dict(self, name, description, author, version, template_yaml_pa # Obtain template customization info from template yaml file or `.nf-core.yml` config file try: if template_yaml_path is not None: - with open(template_yaml_path, "r") as f: + with open(template_yaml_path) as f: template_yaml = yaml.safe_load(f) elif "template" in config_yml: template_yaml = config_yml["template"] @@ -395,7 +395,7 @@ def remove_nf_core_in_bug_report_template(self): """ bug_report_path = self.outdir / ".github" / "ISSUE_TEMPLATE" / "bug_report.yml" - with open(bug_report_path, "r") as fh: + with open(bug_report_path) as fh: contents = yaml.load(fh, Loader=yaml.FullLoader) # Remove the first item in the body, which is the information about the docs diff --git a/nf_core/download.py b/nf_core/download.py index 08bef935ba..f8cad8152f 100644 --- a/nf_core/download.py +++ b/nf_core/download.py @@ -1,6 +1,5 @@ """Downloads a nf-core pipeline to the local file system.""" -from __future__ import print_function import concurrent.futures import io @@ -640,7 +639,7 @@ def wf_use_local_configs(self, revision_dirname): log.debug(f"Editing 'params.custom_config_base' in '{nfconfig_fn}'") # Load the nextflow.config file into memory - with open(nfconfig_fn, "r") as nfconfig_fh: + with open(nfconfig_fn) as nfconfig_fh: nfconfig = nfconfig_fh.read() # Replace the target string @@ -700,7 +699,7 @@ def find_container_images(self, workflow_directory): if bool(config_findings_dsl2): # finding fill always be a tuple of length 2, first the quote used and second the enquoted value. for finding in config_findings_dsl2: - config_findings.append((finding + (self.nf_config, "Nextflow configs"))) + config_findings.append(finding + (self.nf_config, "Nextflow configs")) else: # no regex match, likely just plain string """ Append string also as finding-like tuple for consistency @@ -719,7 +718,7 @@ def find_container_images(self, workflow_directory): for file in files: if file.endswith(".nf"): file_path = os.path.join(subdir, file) - with open(file_path, "r") as fh: + with open(file_path) as fh: # Look for any lines with container "xxx" or container 'xxx' search_space = fh.read() """ @@ -744,7 +743,7 @@ def find_container_images(self, workflow_directory): for finding in local_module_findings: # append finding since we want to collect them from all modules # also append search_space because we need to start over later if nothing was found. - module_findings.append((finding + (search_space, file_path))) + module_findings.append(finding + (search_space, file_path)) # Not sure if there will ever be multiple container definitions per module, but beware DSL3. # Like above run on shallow copy, because length may change at runtime. diff --git a/nf_core/launch.py b/nf_core/launch.py index 363506c448..4174812d6e 100644 --- a/nf_core/launch.py +++ b/nf_core/launch.py @@ -1,6 +1,5 @@ """ Launch a pipeline, interactively collecting params """ -from __future__ import print_function import copy import json diff --git a/nf_core/licences.py b/nf_core/licences.py index d686a56178..a8a35334dd 100644 --- a/nf_core/licences.py +++ b/nf_core/licences.py @@ -1,6 +1,5 @@ """Lists software licences for a given workflow.""" -from __future__ import print_function import json import logging diff --git a/nf_core/lint/actions_awsfulltest.py b/nf_core/lint/actions_awsfulltest.py index e8e1c951b1..66aa3f99bf 100644 --- a/nf_core/lint/actions_awsfulltest.py +++ b/nf_core/lint/actions_awsfulltest.py @@ -32,7 +32,7 @@ def actions_awsfulltest(self): fn = os.path.join(self.wf_path, ".github", "workflows", "awsfulltest.yml") if os.path.isfile(fn): try: - with open(fn, "r") as fh: + with open(fn) as fh: wf = yaml.safe_load(fh) except Exception as e: return {"failed": [f"Could not parse yaml file: {fn}, {e}"]} diff --git a/nf_core/lint/actions_awstest.py b/nf_core/lint/actions_awstest.py index ccdf0abf6a..7c55998944 100644 --- a/nf_core/lint/actions_awstest.py +++ b/nf_core/lint/actions_awstest.py @@ -27,7 +27,7 @@ def actions_awstest(self): return {"ignored": [f"'awstest.yml' workflow not found: `{fn}`"]} try: - with open(fn, "r") as fh: + with open(fn) as fh: wf = yaml.safe_load(fh) except Exception as e: return {"failed": [f"Could not parse yaml file: {fn}, {e}"]} diff --git a/nf_core/lint/actions_ci.py b/nf_core/lint/actions_ci.py index e669eceb8c..9eee03a0d3 100644 --- a/nf_core/lint/actions_ci.py +++ b/nf_core/lint/actions_ci.py @@ -48,7 +48,7 @@ def actions_ci(self): return {"ignored": ["'.github/workflows/ci.yml' not found"]} try: - with open(fn, "r") as fh: + with open(fn) as fh: ciwf = yaml.safe_load(fh) except Exception as e: return {"failed": [f"Could not parse yaml file: {fn}, {e}"]} diff --git a/nf_core/lint/actions_schema_validation.py b/nf_core/lint/actions_schema_validation.py index 9d49b84c6b..fa4471d98c 100644 --- a/nf_core/lint/actions_schema_validation.py +++ b/nf_core/lint/actions_schema_validation.py @@ -36,7 +36,7 @@ def actions_schema_validation(self): # load workflow try: - with open(wf_path, "r") as fh: + with open(wf_path) as fh: wf_json = yaml.safe_load(fh) except Exception as e: failed.append(f"Could not parse yaml file: {wf}, {e}") diff --git a/nf_core/lint/files_unchanged.py b/nf_core/lint/files_unchanged.py index 2b64d62638..82b286fb44 100644 --- a/nf_core/lint/files_unchanged.py +++ b/nf_core/lint/files_unchanged.py @@ -187,16 +187,16 @@ def _tf(file_path): else: for f in files: try: - with open(_pf(f), "r") as fh: + with open(_pf(f)) as fh: pipeline_file = fh.read() - with open(_tf(f), "r") as fh: + with open(_tf(f)) as fh: template_file = fh.read() if template_file in pipeline_file: passed.append(f"`{f}` matches the template") else: if "files_unchanged" in self.fix: # Try to fix the problem by overwriting the pipeline file - with open(_tf(f), "r") as fh: + with open(_tf(f)) as fh: template_file = fh.read() with open(_pf(f), "w") as fh: fh.write(template_file) diff --git a/nf_core/lint/merge_markers.py b/nf_core/lint/merge_markers.py index f33a5095d8..144d4990f1 100644 --- a/nf_core/lint/merge_markers.py +++ b/nf_core/lint/merge_markers.py @@ -23,7 +23,7 @@ def merge_markers(self): ignore = [".git"] if os.path.isfile(os.path.join(self.wf_path, ".gitignore")): - with io.open(os.path.join(self.wf_path, ".gitignore"), "rt", encoding="latin1") as fh: + with open(os.path.join(self.wf_path, ".gitignore"), encoding="latin1") as fh: for l in fh: ignore.append(os.path.basename(l.strip().rstrip("/"))) for root, dirs, files in os.walk(self.wf_path, topdown=True): @@ -41,7 +41,7 @@ def merge_markers(self): if nf_core.utils.is_file_binary(os.path.join(root, fname)): continue try: - with io.open(os.path.join(root, fname), "rt", encoding="latin1") as fh: + with open(os.path.join(root, fname), encoding="latin1") as fh: for l in fh: if ">>>>>>>" in l: failed.append(f"Merge marker '>>>>>>>' in `{os.path.join(root, fname)}`: {l[:30]}") diff --git a/nf_core/lint/multiqc_config.py b/nf_core/lint/multiqc_config.py index cbbeae07a8..b2f1a89a1b 100644 --- a/nf_core/lint/multiqc_config.py +++ b/nf_core/lint/multiqc_config.py @@ -34,7 +34,7 @@ def multiqc_config(self) -> Dict[str, List[str]]: return {"ignored": ["'assets/multiqc_config.yml' not found"]} try: - with open(fn, "r") as fh: + with open(fn) as fh: mqc_yml = yaml.safe_load(fh) except Exception as e: return {"failed": [f"Could not parse yaml file: {fn}, {e}"]} diff --git a/nf_core/lint/nextflow_config.py b/nf_core/lint/nextflow_config.py index 24f1e5c12f..7648c107b2 100644 --- a/nf_core/lint/nextflow_config.py +++ b/nf_core/lint/nextflow_config.py @@ -190,13 +190,11 @@ def nextflow_config(self): # Check and warn if the process configuration is done with deprecated syntax process_with_deprecated_syntax = list( - set( - [ - re.search(r"^(process\.\$.*?)\.+.*$", ck).group(1) - for ck in self.nf_config.keys() - if re.match(r"^(process\.\$.*?)\.+.*$", ck) - ] - ) + { + re.search(r"^(process\.\$.*?)\.+.*$", ck).group(1) + for ck in self.nf_config.keys() + if re.match(r"^(process\.\$.*?)\.+.*$", ck) + } ) for pd in process_with_deprecated_syntax: warned.append(f"Process configuration is done with deprecated_syntax: {pd}") @@ -300,7 +298,7 @@ def nextflow_config(self): ] path = os.path.join(self.wf_path, "nextflow.config") i = 0 - with open(path, "r") as f: + with open(path) as f: for line in f: if lines[i] in line: i += 1 @@ -320,7 +318,7 @@ def nextflow_config(self): ) # Check for the availability of the "test" configuration profile by parsing nextflow.config - with open(os.path.join(self.wf_path, "nextflow.config"), "r") as f: + with open(os.path.join(self.wf_path, "nextflow.config")) as f: content = f.read() # Remove comments diff --git a/nf_core/lint/pipeline_todos.py b/nf_core/lint/pipeline_todos.py index 890e227fa1..ed2f1fb94e 100644 --- a/nf_core/lint/pipeline_todos.py +++ b/nf_core/lint/pipeline_todos.py @@ -41,7 +41,7 @@ def pipeline_todos(self, root_dir=None): ignore = [".git"] if os.path.isfile(os.path.join(root_dir, ".gitignore")): - with io.open(os.path.join(root_dir, ".gitignore"), "rt", encoding="latin1") as fh: + with open(os.path.join(root_dir, ".gitignore"), encoding="latin1") as fh: for l in fh: ignore.append(os.path.basename(l.strip().rstrip("/"))) for root, dirs, files in os.walk(root_dir, topdown=True): @@ -52,7 +52,7 @@ def pipeline_todos(self, root_dir=None): files[:] = [f for f in files if not fnmatch.fnmatch(os.path.join(root, f), i)] for fname in files: try: - with io.open(os.path.join(root, fname), "rt", encoding="latin1") as fh: + with open(os.path.join(root, fname), encoding="latin1") as fh: for l in fh: if "TODO nf-core" in l: l = ( diff --git a/nf_core/lint/readme.py b/nf_core/lint/readme.py index 55060442b1..cade9ca3ea 100644 --- a/nf_core/lint/readme.py +++ b/nf_core/lint/readme.py @@ -31,7 +31,7 @@ def readme(self): # Remove field that should be ignored according to the linting config ignore_configs = self.lint_config.get("readme", []) - with open(os.path.join(self.wf_path, "README.md"), "r") as fh: + with open(os.path.join(self.wf_path, "README.md")) as fh: content = fh.read() if "nextflow_badge" not in ignore_configs: diff --git a/nf_core/lint/template_strings.py b/nf_core/lint/template_strings.py index fb1f0f32e5..0665675c97 100644 --- a/nf_core/lint/template_strings.py +++ b/nf_core/lint/template_strings.py @@ -30,7 +30,7 @@ def template_strings(self): if encoding is not None or (ftype is not None and any([ftype.startswith(ft) for ft in binary_ftypes])): continue - with io.open(fn, "r", encoding="latin1") as fh: + with open(fn, encoding="latin1") as fh: lnum = 0 for l in fh: lnum += 1 diff --git a/nf_core/lint/version_consistency.py b/nf_core/lint/version_consistency.py index fa5b50de01..2336db4a42 100644 --- a/nf_core/lint/version_consistency.py +++ b/nf_core/lint/version_consistency.py @@ -54,7 +54,7 @@ def version_consistency(self): if len(set(versions.values())) != 1: failed.append( "The versioning is not consistent between container, release tag " - "and config. Found {}".format(", ".join(["{} = {}".format(k, v) for k, v in versions.items()])) + "and config. Found {}".format(", ".join([f"{k} = {v}" for k, v in versions.items()])) ) passed.append("Version tags are numeric and consistent between container, release tag and config.") diff --git a/nf_core/list.py b/nf_core/list.py index 94d9d8e043..38bfb3f938 100644 --- a/nf_core/list.py +++ b/nf_core/list.py @@ -1,6 +1,5 @@ """Lists available nf-core pipelines and versions.""" -from __future__ import print_function import json import logging diff --git a/nf_core/modules/bump_versions.py b/nf_core/modules/bump_versions.py index 25259f1a16..17774ac410 100644 --- a/nf_core/modules/bump_versions.py +++ b/nf_core/modules/bump_versions.py @@ -4,8 +4,6 @@ """ -from __future__ import print_function - import logging import os import re @@ -146,7 +144,7 @@ def bump_module_version(self, module: NFCoreComponent) -> bool: except FileNotFoundError: # try it in the main.nf instead try: - with open(module.main_nf, "r") as fh: + with open(module.main_nf) as fh: for l in fh: if "bioconda::" in l: bioconda_packages = [b for b in l.split() if "bioconda::" in b] @@ -208,7 +206,7 @@ def bump_module_version(self, module: NFCoreComponent) -> bool: ), ] - with open(module.main_nf, "r") as fh: + with open(module.main_nf) as fh: content = fh.read() # Go over file content of main.nf and find replacements @@ -241,7 +239,7 @@ def bump_module_version(self, module: NFCoreComponent) -> bool: fh.write(content) # change version in environment.yml - with open(module.environment_yml, "r") as fh: + with open(module.environment_yml) as fh: env_yml = yaml.safe_load(fh) re.sub(bioconda_packages[0], f"'bioconda::{bioconda_tool_name}={last_ver}'", env_yml["dependencies"]) with open(module.environment_yml, "w") as fh: @@ -266,7 +264,7 @@ def get_bioconda_version(self, module: NFCoreComponent) -> List[str]: # Check whether file exists and load it bioconda_packages = [] try: - with open(module.environment_yml, "r") as fh: + with open(module.environment_yml) as fh: env_yml = yaml.safe_load(fh) bioconda_packages = env_yml.get("dependencies", []) except FileNotFoundError: diff --git a/nf_core/modules/lint/__init__.py b/nf_core/modules/lint/__init__.py index 68a38cc0cd..75d2d830b8 100644 --- a/nf_core/modules/lint/__init__.py +++ b/nf_core/modules/lint/__init__.py @@ -6,7 +6,6 @@ nf-core modules lint """ -from __future__ import print_function import logging import os diff --git a/nf_core/modules/lint/environment_yml.py b/nf_core/modules/lint/environment_yml.py index a052425539..c2fc9384e5 100644 --- a/nf_core/modules/lint/environment_yml.py +++ b/nf_core/modules/lint/environment_yml.py @@ -23,14 +23,14 @@ def environment_yml(module_lint_object: ComponentLint, module: NFCoreComponent) env_yml = None # load the environment.yml file try: - with open(Path(module.component_dir, "environment.yml"), "r") as fh: + with open(Path(module.component_dir, "environment.yml")) as fh: env_yml = yaml.safe_load(fh) module.passed.append(("environment_yml_exists", "Module's `environment.yml` exists", module.environment_yml)) except FileNotFoundError: # check if the module's main.nf requires a conda environment - with open(Path(module.component_dir, "main.nf"), "r") as fh: + with open(Path(module.component_dir, "main.nf")) as fh: main_nf = fh.read() if 'conda "${moduleDir}/environment.yml"' in main_nf: module.failed.append( @@ -49,9 +49,7 @@ def environment_yml(module_lint_object: ComponentLint, module: NFCoreComponent) if env_yml: valid_env_yml = False try: - with open( - Path(module_lint_object.modules_repo.local_repo_dir, "modules/environment-schema.json"), "r" - ) as fh: + with open(Path(module_lint_object.modules_repo.local_repo_dir, "modules/environment-schema.json")) as fh: schema = json.load(fh) validators.validate(instance=env_yml, schema=schema) module.passed.append( @@ -92,7 +90,7 @@ def environment_yml(module_lint_object: ComponentLint, module: NFCoreComponent) yaml.dump(env_yml, fh, Dumper=custom_yaml_dumper()) # Check that the name in the environment.yml file matches the name in the meta.yml file - with open(Path(module.component_dir, "meta.yml"), "r") as fh: + with open(Path(module.component_dir, "meta.yml")) as fh: meta_yml = yaml.safe_load(fh) if env_yml["name"] == meta_yml["name"]: diff --git a/nf_core/modules/lint/main_nf.py b/nf_core/modules/lint/main_nf.py index 56a9e99925..52ee0151ed 100644 --- a/nf_core/modules/lint/main_nf.py +++ b/nf_core/modules/lint/main_nf.py @@ -55,7 +55,7 @@ def main_nf(module_lint_object, module, fix_version, registry, progress_bar): if lines is None: try: # Check whether file exists and load it - with open(module.main_nf, "r") as fh: + with open(module.main_nf) as fh: lines = fh.readlines() module.passed.append(("main_nf_exists", "Module file exists", module.main_nf)) except FileNotFoundError: @@ -348,7 +348,7 @@ def check_process_section(self, lines, registry, fix_version, progress_bar): # Get bioconda packages from environment.yml try: - with open(Path(self.component_dir, "environment.yml"), "r") as fh: + with open(Path(self.component_dir, "environment.yml")) as fh: env_yml = yaml.safe_load(fh) if "dependencies" in env_yml: bioconda_packages = [x for x in env_yml["dependencies"] if isinstance(x, str) and "bioconda::" in x] @@ -605,7 +605,7 @@ def _fix_module_version(self, current_version, latest_version, singularity_tag, # Get latest build build = _get_build(response) - with open(self.main_nf, "r") as source: + with open(self.main_nf) as source: lines = source.readlines() # Check if the new version + build exist and replace diff --git a/nf_core/modules/lint/meta_yml.py b/nf_core/modules/lint/meta_yml.py index 7552c1ceae..551a978f4d 100644 --- a/nf_core/modules/lint/meta_yml.py +++ b/nf_core/modules/lint/meta_yml.py @@ -54,7 +54,7 @@ def meta_yml(module_lint_object: ComponentLint, module: NFCoreComponent) -> None meta_yaml = yaml.safe_load("".join(lines)) if meta_yaml is None: try: - with open(module.meta_yml, "r") as fh: + with open(module.meta_yml) as fh: meta_yaml = yaml.safe_load(fh) module.passed.append(("meta_yml_exists", "Module `meta.yml` exists", module.meta_yml)) except FileNotFoundError: @@ -64,7 +64,7 @@ def meta_yml(module_lint_object: ComponentLint, module: NFCoreComponent) -> None # Confirm that the meta.yml file is valid according to the JSON schema valid_meta_yml = False try: - with open(Path(module_lint_object.modules_repo.local_repo_dir, "modules/meta-schema.json"), "r") as fh: + with open(Path(module_lint_object.modules_repo.local_repo_dir, "modules/meta-schema.json")) as fh: schema = json.load(fh) validators.validate(instance=meta_yaml, schema=schema) module.passed.append(("meta_yml_valid", "Module `meta.yml` is valid", module.meta_yml)) diff --git a/nf_core/modules/lint/module_patch.py b/nf_core/modules/lint/module_patch.py index d52962eabb..29bf78a66b 100644 --- a/nf_core/modules/lint/module_patch.py +++ b/nf_core/modules/lint/module_patch.py @@ -40,7 +40,7 @@ def check_patch_valid(module, patch_path): Returns: (bool): False if any test failed, True otherwise """ - with open(patch_path, "r") as fh: + with open(patch_path) as fh: patch_lines = fh.readlines() # Check that the file contains a patch for at least one file @@ -170,8 +170,8 @@ def patch_reversible(module_lint_object, module, patch_path): ) except LookupError: # Patch failed. Save the patch file by moving to the install dir - module.failed.append((("patch_reversible", "Patch file is outdated or edited", patch_path))) + module.failed.append(("patch_reversible", "Patch file is outdated or edited", patch_path)) return False - module.passed.append((("patch_reversible", "Patch agrees with module files", patch_path))) + module.passed.append(("patch_reversible", "Patch agrees with module files", patch_path)) return True diff --git a/nf_core/modules/lint/module_tests.py b/nf_core/modules/lint/module_tests.py index 87033e3f49..520f8cf0a2 100644 --- a/nf_core/modules/lint/module_tests.py +++ b/nf_core/modules/lint/module_tests.py @@ -44,7 +44,7 @@ def module_tests(_, module: NFCoreComponent): if module.nftest_main_nf.is_file(): # Check if main.nf.test.snap file exists, if 'snap(' is inside main.nf.test - with open(module.nftest_main_nf, "r") as fh: + with open(module.nftest_main_nf) as fh: if "snapshot(" in fh.read(): snap_file = module.nftest_testdir / "main.nf.test.snap" if snap_file.is_file(): @@ -52,7 +52,7 @@ def module_tests(_, module: NFCoreComponent): ("test_snapshot_exists", "snapshot file `main.nf.test.snap` exists", snap_file) ) # Validate no empty files - with open(snap_file, "r") as snap_fh: + with open(snap_file) as snap_fh: try: snap_content = json.load(snap_fh) for test_name in snap_content.keys(): @@ -145,7 +145,7 @@ def module_tests(_, module: NFCoreComponent): pytest_yml_path = module.base_dir / "tests" / "config" / "pytest_modules.yml" if pytest_yml_path.is_file() and not is_pytest: try: - with open(pytest_yml_path, "r") as fh: + with open(pytest_yml_path) as fh: pytest_yml = yaml.safe_load(fh) if module.component_name in pytest_yml.keys(): module.failed.append( @@ -165,7 +165,7 @@ def module_tests(_, module: NFCoreComponent): if module.tags_yml.is_file(): # Check that tags.yml exists and it has the correct entry module.passed.append(("test_tags_yml_exists", "file `tags.yml` exists", module.tags_yml)) - with open(module.tags_yml, "r") as fh: + with open(module.tags_yml) as fh: tags_yml = yaml.safe_load(fh) if module.component_name in tags_yml.keys(): module.passed.append(("test_tags_yml", "correct entry in tags.yml", module.tags_yml)) diff --git a/nf_core/modules/modules_differ.py b/nf_core/modules/modules_differ.py index efce3868e5..a97229ff62 100644 --- a/nf_core/modules/modules_differ.py +++ b/nf_core/modules/modules_differ.py @@ -74,9 +74,9 @@ def get_module_diffs(from_dir, to_dir, for_git=True, dsp_from_dir=None, dsp_to_d temp_path = Path(to_dir, file) curr_path = Path(from_dir, file) if temp_path.exists() and curr_path.exists() and temp_path.is_file(): - with open(temp_path, "r") as fh: + with open(temp_path) as fh: new_lines = fh.readlines() - with open(curr_path, "r") as fh: + with open(curr_path) as fh: old_lines = fh.readlines() if new_lines == old_lines: @@ -93,7 +93,7 @@ def get_module_diffs(from_dir, to_dir, for_git=True, dsp_from_dir=None, dsp_to_d diffs[file] = (ModulesDiffer.DiffEnum.CHANGED, diff) elif temp_path.exists(): - with open(temp_path, "r") as fh: + with open(temp_path) as fh: new_lines = fh.readlines() # The file was created # Show file against /dev/null @@ -108,7 +108,7 @@ def get_module_diffs(from_dir, to_dir, for_git=True, dsp_from_dir=None, dsp_to_d elif curr_path.exists(): # The file was removed # Show file against /dev/null - with open(curr_path, "r") as fh: + with open(curr_path) as fh: old_lines = fh.readlines() diff = difflib.unified_diff( old_lines, @@ -279,7 +279,7 @@ def per_file_patch(patch_fn): dict[str, str]: A dictionary indexed by the filenames with the file patches as values """ - with open(patch_fn, "r") as fh: + with open(patch_fn) as fh: lines = fh.readlines() patches = {} @@ -447,7 +447,7 @@ def try_apply_patch(module, repo_path, patch_path, module_dir, reverse=False): log.debug(f"Applying patch to {file}") fn = Path(file).relative_to(module_relpath) file_path = module_dir / fn - with open(file_path, "r") as fh: + with open(file_path) as fh: file_lines = fh.readlines() patched_new_lines = ModulesDiffer.try_apply_single_patch(file_lines, patch, reverse=reverse) new_files[str(fn)] = patched_new_lines diff --git a/nf_core/modules/modules_json.py b/nf_core/modules/modules_json.py index 32eb8736d6..f68c27b2d8 100644 --- a/nf_core/modules/modules_json.py +++ b/nf_core/modules/modules_json.py @@ -637,7 +637,7 @@ def load(self): UserWarning: If the modules.json file is not found """ try: - with open(self.modules_json_path, "r") as fh: + with open(self.modules_json_path) as fh: try: self.modules_json = json.load(fh) except json.JSONDecodeError as e: diff --git a/nf_core/params_file.py b/nf_core/params_file.py index 5c50c53fb9..d9db5a3fd7 100644 --- a/nf_core/params_file.py +++ b/nf_core/params_file.py @@ -1,6 +1,5 @@ """ Create a YAML parameter file """ -from __future__ import print_function import json import logging diff --git a/nf_core/refgenie.py b/nf_core/refgenie.py index 6f09b75532..d52c9c5554 100644 --- a/nf_core/refgenie.py +++ b/nf_core/refgenie.py @@ -86,7 +86,7 @@ def _update_nextflow_home_config(refgenie_genomes_config_file, nxf_home): if os.path.exists(nxf_home_config): # look for include statement in config has_include_statement = False - with open(nxf_home_config, "r") as fh: + with open(nxf_home_config) as fh: lines = fh.readlines() for line in lines: if re.match(rf"\s*includeConfig\s*'{os.path.abspath(refgenie_genomes_config_file)}'", line): diff --git a/nf_core/schema.py b/nf_core/schema.py index 7e4726f189..32ced148a8 100644 --- a/nf_core/schema.py +++ b/nf_core/schema.py @@ -1,6 +1,5 @@ """ Code to deal with pipeline JSON Schema """ -from __future__ import print_function import copy import json @@ -107,7 +106,7 @@ def load_lint_schema(self): def load_schema(self): """Load a pipeline schema from a file""" - with open(self.schema_filename, "r") as fh: + with open(self.schema_filename) as fh: self.schema = json.load(fh) self.schema_defaults = {} self.schema_params = {} @@ -189,7 +188,7 @@ def load_input_params(self, params_path): """ # First, try to load as JSON try: - with open(params_path, "r") as fh: + with open(params_path) as fh: try: params = json.load(fh) except json.JSONDecodeError as e: @@ -200,7 +199,7 @@ def load_input_params(self, params_path): log.debug(f"Could not load input params as JSON: {json_e}") # This failed, try to load as YAML try: - with open(params_path, "r") as fh: + with open(params_path) as fh: params = yaml.safe_load(fh) self.input_params.update(params) log.debug(f"Loaded YAML input params: {params_path}") diff --git a/nf_core/subworkflows/lint/__init__.py b/nf_core/subworkflows/lint/__init__.py index ffba41f9da..dc8fa68c30 100644 --- a/nf_core/subworkflows/lint/__init__.py +++ b/nf_core/subworkflows/lint/__init__.py @@ -6,7 +6,6 @@ nf-core subworkflows lint """ -from __future__ import print_function import logging import os diff --git a/nf_core/subworkflows/lint/main_nf.py b/nf_core/subworkflows/lint/main_nf.py index c7ce77490d..f52b7846c3 100644 --- a/nf_core/subworkflows/lint/main_nf.py +++ b/nf_core/subworkflows/lint/main_nf.py @@ -32,7 +32,7 @@ def main_nf(_, subworkflow): if lines is None: try: # Check whether file exists and load it - with open(subworkflow.main_nf, "r") as fh: + with open(subworkflow.main_nf) as fh: lines = fh.readlines() subworkflow.passed.append(("main_nf_exists", "Subworkflow file exists", subworkflow.main_nf)) except FileNotFoundError: diff --git a/nf_core/subworkflows/lint/meta_yml.py b/nf_core/subworkflows/lint/meta_yml.py index 4944b26188..c5f544a597 100644 --- a/nf_core/subworkflows/lint/meta_yml.py +++ b/nf_core/subworkflows/lint/meta_yml.py @@ -26,7 +26,7 @@ def meta_yml(subworkflow_lint_object, subworkflow): """ # Read the meta.yml file try: - with open(subworkflow.meta_yml, "r") as fh: + with open(subworkflow.meta_yml) as fh: meta_yaml = yaml.safe_load(fh) subworkflow.passed.append(("meta_yml_exists", "Subworkflow `meta.yml` exists", subworkflow.meta_yml)) except FileNotFoundError: @@ -36,9 +36,7 @@ def meta_yml(subworkflow_lint_object, subworkflow): # Confirm that the meta.yml file is valid according to the JSON schema valid_meta_yml = True try: - with open( - Path(subworkflow_lint_object.modules_repo.local_repo_dir, "subworkflows/yaml-schema.json"), "r" - ) as fh: + with open(Path(subworkflow_lint_object.modules_repo.local_repo_dir, "subworkflows/yaml-schema.json")) as fh: schema = json.load(fh) jsonschema.validators.validate(instance=meta_yaml, schema=schema) subworkflow.passed.append(("meta_yml_valid", "Subworkflow `meta.yml` is valid", subworkflow.meta_yml)) diff --git a/nf_core/subworkflows/lint/subworkflow_tests.py b/nf_core/subworkflows/lint/subworkflow_tests.py index 1ebced6d42..f7284320ea 100644 --- a/nf_core/subworkflows/lint/subworkflow_tests.py +++ b/nf_core/subworkflows/lint/subworkflow_tests.py @@ -52,14 +52,14 @@ def subworkflow_tests(_, subworkflow: NFCoreComponent): ) if subworkflow.nftest_main_nf.is_file(): - with open(subworkflow.nftest_main_nf, "r") as fh: + with open(subworkflow.nftest_main_nf) as fh: # Check if main.nf.test.snap file exists, if 'snap(' is inside main.nf.test if "snapshot(" in fh.read(): snap_file = subworkflow.nftest_testdir / "main.nf.test.snap" if snap_file.is_file(): subworkflow.passed.append(("test_snapshot_exists", "test `main.nf.test.snap` exists", snap_file)) # Validate no empty files - with open(snap_file, "r") as snap_fh: + with open(snap_file) as snap_fh: try: snap_content = json.load(snap_fh) for test_name in snap_content.keys(): @@ -158,7 +158,7 @@ def subworkflow_tests(_, subworkflow: NFCoreComponent): pytest_yml_path = subworkflow.base_dir / "tests" / "config" / "pytest_modules.yml" if pytest_yml_path.is_file() and not is_pytest: try: - with open(pytest_yml_path, "r") as fh: + with open(pytest_yml_path) as fh: pytest_yml = yaml.safe_load(fh) if "subworkflows/" + subworkflow.component_name in pytest_yml.keys(): subworkflow.failed.append( @@ -178,7 +178,7 @@ def subworkflow_tests(_, subworkflow: NFCoreComponent): if subworkflow.tags_yml.is_file(): # Check tags.yml exists and it has the correct entry subworkflow.passed.append(("test_tags_yml_exists", "file `tags.yml` exists", subworkflow.tags_yml)) - with open(subworkflow.tags_yml, "r") as fh: + with open(subworkflow.tags_yml) as fh: tags_yml = yaml.safe_load(fh) if "subworkflows/" + subworkflow.component_name in tags_yml.keys(): subworkflow.passed.append(("test_tags_yml", "correct entry in tags.yml", subworkflow.tags_yml)) diff --git a/nf_core/sync.py b/nf_core/sync.py index 5402a6121d..085a6e6d65 100644 --- a/nf_core/sync.py +++ b/nf_core/sync.py @@ -96,7 +96,7 @@ def __init__( default=False, ).unsafe_ask() if overwrite_template or "template" not in self.config_yml: - with open(template_yaml_path, "r") as f: + with open(template_yaml_path) as f: self.config_yml["template"] = yaml.safe_load(f) with open(self.config_yml_path, "w") as fh: yaml.safe_dump(self.config_yml, fh) diff --git a/nf_core/utils.py b/nf_core/utils.py index bcc8faa3fd..e790e37d8a 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -188,7 +188,7 @@ def _load_pipeline_config(self): def _load_conda_environment(self): """Try to load the pipeline environment.yml file, if it exists""" try: - with open(os.path.join(self.wf_path, "environment.yml"), "r") as fh: + with open(os.path.join(self.wf_path, "environment.yml")) as fh: self.conda_config = yaml.safe_load(fh) except FileNotFoundError: log.debug("No conda `environment.yml` file found.") @@ -262,7 +262,7 @@ def fetch_wf_config(wf_path, cache_config=True): cache_path = os.path.join(cache_basedir, cache_fn) if os.path.isfile(cache_path) and cache_config is True: log.debug(f"Found a config cache, loading: {cache_path}") - with open(cache_path, "r") as fh: + with open(cache_path) as fh: try: config = json.load(fh) except json.JSONDecodeError as e: @@ -286,7 +286,7 @@ def fetch_wf_config(wf_path, cache_config=True): # Values in this file are likely to be complex, so don't both trying to capture them. Just get the param name. try: main_nf = os.path.join(wf_path, "main.nf") - with open(main_nf, "r") as fh: + with open(main_nf) as fh: for l in fh: match = re.match(r"^\s*(params\.[a-zA-Z0-9_]+)\s*=", l) if match: @@ -312,7 +312,7 @@ def run_cmd(executable: str, cmd: str) -> Union[Tuple[bytes, bytes], None]: full_cmd = f"{executable} {cmd}" log.debug(f"Running command: {full_cmd}") try: - proc = subprocess.run(shlex.split(full_cmd), stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=True) + proc = subprocess.run(shlex.split(full_cmd), capture_output=True, check=True) return (proc.stdout, proc.stderr) except OSError as e: if e.errno == errno.ENOENT: @@ -480,7 +480,7 @@ def __call__(self, r): gh_cli_config_fn = os.path.expanduser("~/.config/gh/hosts.yml") if self.auth is None and os.path.exists(gh_cli_config_fn): try: - with open(gh_cli_config_fn, "r") as fh: + with open(gh_cli_config_fn) as fh: gh_cli_config = yaml.safe_load(fh) self.auth = requests.auth.HTTPBasicAuth( gh_cli_config["github.com"]["user"], gh_cli_config["github.com"]["oauth_token"] @@ -792,7 +792,7 @@ def increase_indent(self, flow=False, indentless=False): See https://github.com/yaml/pyyaml/issues/234#issuecomment-765894586 """ - return super(CustomDumper, self).increase_indent(flow=flow, indentless=False) + return super().increase_indent(flow=flow, indentless=False) # HACK: insert blank lines between top-level objects # inspired by https://stackoverflow.com/a/44284819/3786245 @@ -1025,7 +1025,7 @@ def load_tools_config(directory: Union[str, Path] = "."): log.debug(f"No tools config file found: {CONFIG_PATHS[0]}") return Path(directory, CONFIG_PATHS[0]), {} - with open(config_fn, "r") as fh: + with open(config_fn) as fh: tools_config = yaml.safe_load(fh) # If the file is empty @@ -1145,7 +1145,7 @@ def validate_file_md5(file_name, expected_md5hex): if file_md5hex.upper() == expected_md5hex.upper(): log.debug(f"md5 sum of image matches expected: {expected_md5hex}") else: - raise IOError(f"{file_name} md5 does not match remote: {expected_md5hex} - {file_md5hex}") + raise OSError(f"{file_name} md5 does not match remote: {expected_md5hex} - {file_md5hex}") return True diff --git a/tests/components/generate_snapshot.py b/tests/components/generate_snapshot.py index 46fd63fe3f..c5067d7210 100644 --- a/tests/components/generate_snapshot.py +++ b/tests/components/generate_snapshot.py @@ -26,7 +26,7 @@ def test_generate_snapshot_module(self): snap_path = Path("modules", "nf-core-test", "fastqc", "tests", "main.nf.test.snap") assert snap_path.exists() - with open(snap_path, "r") as fh: + with open(snap_path) as fh: snap_content = json.load(fh) assert "versions" in snap_content assert "content" in snap_content["versions"] @@ -48,7 +48,7 @@ def test_generate_snapshot_subworkflow(self): snap_path = Path("subworkflows", "nf-core-test", "bam_sort_stats_samtools", "tests", "main.nf.test.snap") assert snap_path.exists() - with open(snap_path, "r") as fh: + with open(snap_path) as fh: snap_content = json.load(fh) assert "test_bam_sort_stats_samtools_paired_end_flagstats" in snap_content assert ( @@ -86,7 +86,7 @@ def test_update_snapshot_module(self): with set_wd(self.nfcore_modules): snap_path = Path("modules", "nf-core-test", "bwa", "mem", "tests", "main.nf.test.snap") - with open(snap_path, "r") as fh: + with open(snap_path) as fh: snap_content = json.load(fh) original_timestamp = snap_content["Single-End"]["timestamp"] # delete the timestamp in json @@ -103,7 +103,7 @@ def test_update_snapshot_module(self): ) snap_generator.run() - with open(snap_path, "r") as fh: + with open(snap_path) as fh: snap_content = json.load(fh) assert "Single-End" in snap_content assert snap_content["Single-End"]["timestamp"] != original_timestamp diff --git a/tests/lint/actions_awsfulltest.py b/tests/lint/actions_awsfulltest.py index 30293e31a4..bbda92a4d1 100644 --- a/tests/lint/actions_awsfulltest.py +++ b/tests/lint/actions_awsfulltest.py @@ -19,7 +19,7 @@ def test_actions_awsfulltest_pass(self): # Edit .github/workflows/awsfulltest.yml to use -profile test_full new_pipeline = self._make_pipeline_copy() - with open(os.path.join(new_pipeline, ".github", "workflows", "awsfulltest.yml"), "r") as fh: + with open(os.path.join(new_pipeline, ".github", "workflows", "awsfulltest.yml")) as fh: awsfulltest_yml = fh.read() awsfulltest_yml = awsfulltest_yml.replace("-profile test ", "-profile test_full ") with open(os.path.join(new_pipeline, ".github", "workflows", "awsfulltest.yml"), "w") as fh: @@ -44,7 +44,7 @@ def test_actions_awsfulltest_fail(self): # Edit .github/workflows/awsfulltest.yml to use -profile test_full new_pipeline = self._make_pipeline_copy() - with open(os.path.join(new_pipeline, ".github", "workflows", "awsfulltest.yml"), "r") as fh: + with open(os.path.join(new_pipeline, ".github", "workflows", "awsfulltest.yml")) as fh: awsfulltest_yml = yaml.safe_load(fh) del awsfulltest_yml[True]["release"] with open(os.path.join(new_pipeline, ".github", "workflows", "awsfulltest.yml"), "w") as fh: diff --git a/tests/lint/actions_awstest.py b/tests/lint/actions_awstest.py index 0e19f781aa..7bfa6052f8 100644 --- a/tests/lint/actions_awstest.py +++ b/tests/lint/actions_awstest.py @@ -20,7 +20,7 @@ def test_actions_awstest_fail(self): # Edit .github/workflows/awsfulltest.yml to use -profile test_full new_pipeline = self._make_pipeline_copy() - with open(os.path.join(new_pipeline, ".github", "workflows", "awstest.yml"), "r") as fh: + with open(os.path.join(new_pipeline, ".github", "workflows", "awstest.yml")) as fh: awstest_yml = yaml.safe_load(fh) awstest_yml[True]["push"] = ["master"] with open(os.path.join(new_pipeline, ".github", "workflows", "awstest.yml"), "w") as fh: diff --git a/tests/lint/actions_ci.py b/tests/lint/actions_ci.py index d44dbb73b5..8734b2f78b 100644 --- a/tests/lint/actions_ci.py +++ b/tests/lint/actions_ci.py @@ -31,7 +31,7 @@ def test_actions_ci_fail_wrong_trigger(self): # Edit .github/workflows/actions_ci.yml to mess stuff up! new_pipeline = self._make_pipeline_copy() - with open(os.path.join(new_pipeline, ".github", "workflows", "ci.yml"), "r") as fh: + with open(os.path.join(new_pipeline, ".github", "workflows", "ci.yml")) as fh: ci_yml = yaml.safe_load(fh) ci_yml[True]["push"] = ["dev", "patch"] ci_yml["jobs"]["test"]["strategy"]["matrix"] = {"nxf_versionnn": ["foo", ""]} diff --git a/tests/lint/actions_schema_validation.py b/tests/lint/actions_schema_validation.py index 48bb07e4dd..ad65d90018 100644 --- a/tests/lint/actions_schema_validation.py +++ b/tests/lint/actions_schema_validation.py @@ -9,7 +9,7 @@ def test_actions_schema_validation_missing_jobs(self): """Missing 'jobs' field should result in failure""" new_pipeline = self._make_pipeline_copy() - with open(os.path.join(new_pipeline, ".github", "workflows", "awstest.yml"), "r") as fh: + with open(os.path.join(new_pipeline, ".github", "workflows", "awstest.yml")) as fh: awstest_yml = yaml.safe_load(fh) awstest_yml.pop("jobs") with open(os.path.join(new_pipeline, ".github", "workflows", "awstest.yml"), "w") as fh: @@ -27,7 +27,7 @@ def test_actions_schema_validation_missing_on(self): """Missing 'on' field should result in failure""" new_pipeline = self._make_pipeline_copy() - with open(os.path.join(new_pipeline, ".github", "workflows", "awstest.yml"), "r") as fh: + with open(os.path.join(new_pipeline, ".github", "workflows", "awstest.yml")) as fh: awstest_yml = yaml.safe_load(fh) awstest_yml.pop(True) with open(os.path.join(new_pipeline, ".github", "workflows", "awstest.yml"), "w") as fh: @@ -46,7 +46,7 @@ def test_actions_schema_validation_fails_for_additional_property(self): """Missing 'jobs' field should result in failure""" new_pipeline = self._make_pipeline_copy() - with open(os.path.join(new_pipeline, ".github", "workflows", "awstest.yml"), "r") as fh: + with open(os.path.join(new_pipeline, ".github", "workflows", "awstest.yml")) as fh: awstest_yml = yaml.safe_load(fh) awstest_yml["not_jobs"] = awstest_yml["jobs"] with open(os.path.join(new_pipeline, ".github", "workflows", "awstest.yml"), "w") as fh: diff --git a/tests/lint/merge_markers.py b/tests/lint/merge_markers.py index be0d076757..64a62e25c3 100644 --- a/tests/lint/merge_markers.py +++ b/tests/lint/merge_markers.py @@ -7,7 +7,7 @@ def test_merge_markers_found(self): """Missing 'jobs' field should result in failure""" new_pipeline = self._make_pipeline_copy() - with open(os.path.join(new_pipeline, "main.nf"), "r") as fh: + with open(os.path.join(new_pipeline, "main.nf")) as fh: main_nf_content = fh.read() main_nf_content = ">>>>>>>\n" + main_nf_content with open(os.path.join(new_pipeline, "main.nf"), "w") as fh: diff --git a/tests/lint/multiqc_config.py b/tests/lint/multiqc_config.py index 446b4378b0..721560ce81 100644 --- a/tests/lint/multiqc_config.py +++ b/tests/lint/multiqc_config.py @@ -18,7 +18,7 @@ def test_multiqc_config_exists_ignore(self): def test_multiqc_config_missing_report_section_order(self): """Test that linting fails if the multiqc_config.yml file is missing the report_section_order""" new_pipeline = self._make_pipeline_copy() - with open(Path(new_pipeline, "assets", "multiqc_config.yml"), "r") as fh: + with open(Path(new_pipeline, "assets", "multiqc_config.yml")) as fh: mqc_yml = yaml.safe_load(fh) mqc_yml_tmp = mqc_yml mqc_yml.pop("report_section_order") @@ -36,7 +36,7 @@ def test_multiqc_config_missing_report_section_order(self): def test_multiqc_incorrect_export_plots(self): """Test that linting fails if the multiqc_config.yml file has an incorrect value for export_plots""" new_pipeline = self._make_pipeline_copy() - with open(Path(new_pipeline, "assets", "multiqc_config.yml"), "r") as fh: + with open(Path(new_pipeline, "assets", "multiqc_config.yml")) as fh: mqc_yml = yaml.safe_load(fh) mqc_yml_tmp = mqc_yml mqc_yml["export_plots"] = False @@ -54,7 +54,7 @@ def test_multiqc_incorrect_export_plots(self): def test_multiqc_config_report_comment_fail(self): """Test that linting fails if the multiqc_config.yml file has an incorrect report_comment""" new_pipeline = self._make_pipeline_copy() - with open(Path(new_pipeline, "assets", "multiqc_config.yml"), "r") as fh: + with open(Path(new_pipeline, "assets", "multiqc_config.yml")) as fh: mqc_yml = yaml.safe_load(fh) mqc_yml_tmp = mqc_yml mqc_yml["report_comment"] = "This is a test" @@ -73,7 +73,7 @@ def test_multiqc_config_report_comment_fail(self): def test_multiqc_config_report_comment_release_fail(self): """Test that linting fails if the multiqc_config.yml file has an incorrect report_comment for a release version""" new_pipeline = self._make_pipeline_copy() - with open(Path(new_pipeline, "assets", "multiqc_config.yml"), "r") as fh: + with open(Path(new_pipeline, "assets", "multiqc_config.yml")) as fh: mqc_yml = yaml.safe_load(fh) mqc_yml_tmp = mqc_yml with open(Path(new_pipeline, "assets", "multiqc_config.yml"), "w") as fh: diff --git a/tests/lint/nextflow_config.py b/tests/lint/nextflow_config.py index 1542b8cf65..5d5f8e7345 100644 --- a/tests/lint/nextflow_config.py +++ b/tests/lint/nextflow_config.py @@ -43,7 +43,7 @@ def test_nextflow_config_missing_test_profile_failed(self): new_pipeline = self._make_pipeline_copy() # Change the name of the test profile so there is no such profile nf_conf_file = os.path.join(new_pipeline, "nextflow.config") - with open(nf_conf_file, "r") as f: + with open(nf_conf_file) as f: content = f.read() fail_content = re.sub(r"\btest\b", "testfail", content) with open(nf_conf_file, "w") as f: diff --git a/tests/modules/bump_versions.py b/tests/modules/bump_versions.py index 3c19041f63..99900e7509 100644 --- a/tests/modules/bump_versions.py +++ b/tests/modules/bump_versions.py @@ -12,7 +12,7 @@ def test_modules_bump_versions_single_module(self): """Test updating a single module""" # Change the bpipe/test version to an older version env_yml_path = os.path.join(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "environment.yml") - with open(env_yml_path, "r") as fh: + with open(env_yml_path) as fh: content = fh.read() new_content = re.sub(r"bioconda::star=\d.\d.\d\D?", r"bioconda::star=2.6.1d", content) with open(env_yml_path, "w") as fh: @@ -41,7 +41,7 @@ def test_modules_bump_versions_fail_unknown_version(self): """Fail because of an unknown version""" # Change the bpipe/test version to an older version env_yml_path = os.path.join(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "environment.yml") - with open(env_yml_path, "r") as fh: + with open(env_yml_path) as fh: content = fh.read() new_content = re.sub(r"bioconda::bpipe=\d.\d.\d\D?", r"bioconda::bpipe=xxx", content) with open(env_yml_path, "w") as fh: diff --git a/tests/modules/create.py b/tests/modules/create.py index 74e5ec3896..47f541a417 100644 --- a/tests/modules/create.py +++ b/tests/modules/create.py @@ -87,9 +87,9 @@ def test_modules_migrate(self, mock_rich_ask): # Clone modules repo with pytests shutil.rmtree(self.nfcore_modules) Repo.clone_from(GITLAB_URL, self.nfcore_modules, branch=GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH) - with open(module_dir / "main.nf", "r") as fh: + with open(module_dir / "main.nf") as fh: old_main_nf = fh.read() - with open(module_dir / "meta.yml", "r") as fh: + with open(module_dir / "meta.yml") as fh: old_meta_yml = fh.read() # Create a module with --migrate-pytest @@ -97,9 +97,9 @@ def test_modules_migrate(self, mock_rich_ask): module_create = nf_core.modules.ModuleCreate(self.nfcore_modules, "samtools/sort", migrate_pytest=True) module_create.create() - with open(module_dir / "main.nf", "r") as fh: + with open(module_dir / "main.nf") as fh: new_main_nf = fh.read() - with open(module_dir / "meta.yml", "r") as fh: + with open(module_dir / "meta.yml") as fh: new_meta_yml = fh.read() nextflow_config = module_dir / "tests" / "nextflow.config" diff --git a/tests/modules/lint.py b/tests/modules/lint.py index a8a775e6f6..a5d8567b76 100644 --- a/tests/modules/lint.py +++ b/tests/modules/lint.py @@ -333,7 +333,7 @@ def test_modules_lint_snapshot_file_missing_fail(self): def test_modules_lint_snapshot_file_not_needed(self): """Test linting a module which doesn't need a snapshot file by removing the snapshot keyword in the main.nf.test file""" - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test"), "r") as fh: + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test")) as fh: content = fh.read() new_content = content.replace("snapshot(", "snap (") with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test"), "w") as fh: @@ -372,7 +372,7 @@ def test_modules_environment_yml_file_sorted_correctly(self): def test_modules_environment_yml_file_sorted_incorrectly(self): """Test linting a module with an incorrectly sorted environment.yml file""" - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "environment.yml"), "r") as fh: + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "environment.yml")) as fh: yaml_content = yaml.safe_load(fh) # Add a new dependency to the environment.yml file and reverse the order yaml_content["dependencies"].append("z") @@ -548,7 +548,7 @@ def test_modules_missing_test_main_nf(self): def test_modules_missing_required_tag(self): """Test linting a module with a missing required tag""" - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test"), "r") as fh: + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test")) as fh: content = fh.read() new_content = content.replace("modules_nfcore", "foo") with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test"), "w") as fh: @@ -581,7 +581,7 @@ def test_modules_missing_tags_yml(self): def test_modules_incorrect_tags_yml_key(self): """Test linting a module with an incorrect key in tags.yml file""" - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "tags.yml"), "r") as fh: + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "tags.yml")) as fh: content = fh.read() new_content = content.replace("bpipe/test:", "bpipe_test:") with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "tags.yml"), "w") as fh: @@ -598,7 +598,7 @@ def test_modules_incorrect_tags_yml_key(self): def test_modules_incorrect_tags_yml_values(self): """Test linting a module with an incorrect path in tags.yml file""" - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "tags.yml"), "r") as fh: + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "tags.yml")) as fh: content = fh.read() new_content = content.replace("modules/nf-core/bpipe/test/**", "foo") with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "tags.yml"), "w") as fh: diff --git a/tests/modules/modules_json.py b/tests/modules/modules_json.py index 63ee4e743d..a054b6b131 100644 --- a/tests/modules/modules_json.py +++ b/tests/modules/modules_json.py @@ -17,7 +17,7 @@ def test_get_modules_json(self): """Checks that the get_modules_json function returns the correct result""" mod_json_path = os.path.join(self.pipeline_dir, "modules.json") - with open(mod_json_path, "r") as fh: + with open(mod_json_path) as fh: try: mod_json_sb = json.load(fh) except json.JSONDecodeError as e: @@ -73,7 +73,7 @@ def test_mod_json_create(self): def modify_main_nf(path): """Modify a file to test patch creation""" - with open(path, "r") as fh: + with open(path) as fh: lines = fh.readlines() # Modify $meta.id to $meta.single_end lines[1] = ' tag "$meta.single_end"\n' @@ -112,7 +112,7 @@ def test_mod_json_create_with_patch(self): assert "branch" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"]["fastqc"] # Check that fastqc/main.nf maintains the changes - with open(module_path / "main.nf", "r") as fh: + with open(module_path / "main.nf") as fh: lines = fh.readlines() assert lines[1] == ' tag "$meta.single_end"\n' @@ -214,7 +214,7 @@ def test_mod_json_dump(self): assert os.path.exists(mod_json_path) # Check that the dump function writes the correct content - with open(mod_json_path, "r") as f: + with open(mod_json_path) as f: try: mod_json_new = json.load(f) except json.JSONDecodeError as e: diff --git a/tests/modules/patch.py b/tests/modules/patch.py index 338d890f2f..dc939c7ea7 100644 --- a/tests/modules/patch.py +++ b/tests/modules/patch.py @@ -43,7 +43,7 @@ def setup_patch(pipeline_dir, modify_module): def modify_main_nf(path): """Modify a file to test patch creation""" - with open(path, "r") as fh: + with open(path) as fh: lines = fh.readlines() # We want a patch file that looks something like: # - tuple val(meta), path(reads) @@ -99,7 +99,7 @@ def test_create_patch_change(self): ) # Check that the correct lines are in the patch file - with open(module_path / patch_fn, "r") as fh: + with open(module_path / patch_fn) as fh: patch_lines = fh.readlines() module_relpath = module_path.relative_to(self.pipeline_dir) assert f"--- {module_relpath / 'main.nf'}\n" in patch_lines, module_relpath / "main.nf" @@ -157,7 +157,7 @@ def test_create_patch_try_apply_successful(self): ) # Check that the correct lines are in the patch file - with open(module_path / patch_fn, "r") as fh: + with open(module_path / patch_fn) as fh: patch_lines = fh.readlines() module_relpath = module_path.relative_to(self.pipeline_dir) assert f"--- {module_relpath / 'main.nf'}\n" in patch_lines @@ -167,7 +167,7 @@ def test_create_patch_try_apply_successful(self): assert "+ tuple val(meta), path(reads), path(index)\n" in patch_lines # Check that 'main.nf' is updated correctly - with open(module_path / "main.nf", "r") as fh: + with open(module_path / "main.nf") as fh: main_nf_lines = fh.readlines() # These lines should have been removed by the patch assert " tuple val(meta), path(reads)\n" not in main_nf_lines @@ -258,7 +258,7 @@ def test_create_patch_update_success(self): ), modules_json_obj.get_patch_fn(BISMARK_ALIGN, GITLAB_URL, REPO_NAME) # Check that the correct lines are in the patch file - with open(module_path / patch_fn, "r") as fh: + with open(module_path / patch_fn) as fh: patch_lines = fh.readlines() module_relpath = module_path.relative_to(self.pipeline_dir) assert f"--- {module_relpath / 'main.nf'}\n" in patch_lines @@ -268,7 +268,7 @@ def test_create_patch_update_success(self): assert "+ tuple val(meta), path(reads), path(index)\n" in patch_lines # Check that 'main.nf' is updated correctly - with open(module_path / "main.nf", "r") as fh: + with open(module_path / "main.nf") as fh: main_nf_lines = fh.readlines() # These lines should have been removed by the patch assert " tuple val(meta), path(reads)\n" not in main_nf_lines @@ -300,7 +300,7 @@ def test_create_patch_update_fail(self): ) # Save the file contents for downstream comparison - with open(module_path / patch_fn, "r") as fh: + with open(module_path / patch_fn) as fh: patch_contents = fh.read() update_obj = nf_core.modules.ModuleUpdate( @@ -317,14 +317,14 @@ def test_create_patch_update_fail(self): temp_module_dir = temp_dir / BISMARK_ALIGN for file in os.listdir(temp_module_dir): assert file in os.listdir(module_path) - with open(module_path / file, "r") as fh: + with open(module_path / file) as fh: installed = fh.read() - with open(temp_module_dir / file, "r") as fh: + with open(temp_module_dir / file) as fh: shouldbe = fh.read() assert installed == shouldbe # Check that the patch file is unaffected - with open(module_path / patch_fn, "r") as fh: + with open(module_path / patch_fn) as fh: new_patch_contents = fh.read() assert patch_contents == new_patch_contents diff --git a/tests/modules/update.py b/tests/modules/update.py index 399e9cc12c..5208070fa5 100644 --- a/tests/modules/update.py +++ b/tests/modules/update.py @@ -345,7 +345,7 @@ def test_update_only_show_differences_when_patch(self, mock_prompt): # We modify fastqc because it's one of the modules that can be updated and there's another one before it (custom/dumpsoftwareversions) module_path = Path(self.pipeline_dir, "modules", "nf-core", "fastqc") main_path = Path(module_path, "main.nf") - with open(main_path, "r") as fh: + with open(main_path) as fh: lines = fh.readlines() for line_index in range(len(lines)): if lines[line_index] == " label 'process_medium'\n": diff --git a/tests/subworkflows/create.py b/tests/subworkflows/create.py index fc628df34f..03a2fe1aaf 100644 --- a/tests/subworkflows/create.py +++ b/tests/subworkflows/create.py @@ -53,9 +53,9 @@ def test_subworkflows_migrate(self, mock_rich_ask): # Clone modules repo with pytests shutil.rmtree(self.nfcore_modules) Repo.clone_from(GITLAB_URL, self.nfcore_modules, branch=GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH) - with open(subworkflow_dir / "main.nf", "r") as fh: + with open(subworkflow_dir / "main.nf") as fh: old_main_nf = fh.read() - with open(subworkflow_dir / "meta.yml", "r") as fh: + with open(subworkflow_dir / "meta.yml") as fh: old_meta_yml = fh.read() # Create a subworkflow with --migrate-pytest @@ -65,9 +65,9 @@ def test_subworkflows_migrate(self, mock_rich_ask): ) subworkflow_create.create() - with open(subworkflow_dir / "main.nf", "r") as fh: + with open(subworkflow_dir / "main.nf") as fh: new_main_nf = fh.read() - with open(subworkflow_dir / "meta.yml", "r") as fh: + with open(subworkflow_dir / "meta.yml") as fh: new_meta_yml = fh.read() nextflow_config = subworkflow_dir / "tests" / "nextflow.config" diff --git a/tests/subworkflows/lint.py b/tests/subworkflows/lint.py index 1380db2260..9f690cb234 100644 --- a/tests/subworkflows/lint.py +++ b/tests/subworkflows/lint.py @@ -87,9 +87,7 @@ def test_subworkflows_lint_snapshot_file_missing_fail(self): def test_subworkflows_lint_snapshot_file_not_needed(self): """Test linting a subworkflow which doesn't need a snapshot file by removing the snapshot keyword in the main.nf.test file""" - with open( - Path(self.nfcore_modules, "subworkflows", "nf-core", "test_subworkflow", "tests", "main.nf.test"), "r" - ) as fh: + with open(Path(self.nfcore_modules, "subworkflows", "nf-core", "test_subworkflow", "tests", "main.nf.test")) as fh: content = fh.read() new_content = content.replace("snapshot(", "snap (") with open( diff --git a/tests/subworkflows/update.py b/tests/subworkflows/update.py index 698086e186..a615f55f8f 100644 --- a/tests/subworkflows/update.py +++ b/tests/subworkflows/update.py @@ -73,7 +73,7 @@ def test_install_at_hash_and_update_and_save_diff_to_file(self): assert update_obj.update("fastq_align_bowtie2") is True assert cmp_component(tmpdir, sw_path) is True - with open(patch_path, "r") as fh: + with open(patch_path) as fh: line = fh.readline() assert line.startswith( "Changes in module 'nf-core/fastq_align_bowtie2' between (f3c078809a2513f1c95de14f6633fe1f03572fdb) and" diff --git a/tests/test_launch.py b/tests/test_launch.py index 03c6a8b692..a2e02f2ae7 100644 --- a/tests/test_launch.py +++ b/tests/test_launch.py @@ -321,7 +321,7 @@ def test_build_command_params(self): == f'nextflow run {self.pipeline_dir} -params-file "{os.path.relpath(self.nf_params_fn)}"' ) # Check saved parameters file - with open(self.nf_params_fn, "r") as fh: + with open(self.nf_params_fn) as fh: try: saved_json = json.load(fh) except json.JSONDecodeError as e: diff --git a/tests/test_lint.py b/tests/test_lint.py index b2e7f3b574..6b62173d6e 100644 --- a/tests/test_lint.py +++ b/tests/test_lint.py @@ -134,7 +134,7 @@ def test_json_output(self, tmp_dir): self.lint_obj._save_json_results(json_fn) # Load created JSON file and check its contents - with open(json_fn, "r") as fh: + with open(json_fn) as fh: try: saved_json = json.load(fh) except json.JSONDecodeError as e: diff --git a/tests/test_modules.py b/tests/test_modules.py index 92c8dfda3f..f7ada2a483 100644 --- a/tests/test_modules.py +++ b/tests/test_modules.py @@ -47,7 +47,7 @@ def create_modules_repo_dummy(tmp_dir): # Remove doi from meta.yml which makes lint fail meta_yml_path = Path(root_dir, "modules", "nf-core", "bpipe", "test", "meta.yml") - with open(meta_yml_path, "r") as fh: + with open(meta_yml_path) as fh: meta_yml = yaml.safe_load(fh) del meta_yml["tools"][0]["bpipe"]["doi"] with open(meta_yml_path, "w") as fh: @@ -60,7 +60,7 @@ def create_modules_repo_dummy(tmp_dir): # remove "TODO" statements from main.nf main_nf_path = Path(root_dir, "modules", "nf-core", "bpipe", "test", "main.nf") - with open(main_nf_path, "r") as fh: + with open(main_nf_path) as fh: main_nf = fh.read() main_nf = main_nf.replace("TODO", "") with open(main_nf_path, "w") as fh: @@ -68,7 +68,7 @@ def create_modules_repo_dummy(tmp_dir): # remove "TODO" statements from main.nf.test main_nf_test_path = Path(root_dir, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test") - with open(main_nf_test_path, "r") as fh: + with open(main_nf_test_path) as fh: main_nf_test = fh.read() main_nf_test = main_nf_test.replace("TODO", "") with open(main_nf_test_path, "w") as fh: diff --git a/tests/test_params_file.py b/tests/test_params_file.py index 824e8fe345..e1777a0a96 100644 --- a/tests/test_params_file.py +++ b/tests/test_params_file.py @@ -31,7 +31,7 @@ def setup_class(cls): cls.invalid_template_schema = os.path.join(cls.template_dir, "nextflow_schema_invalid.json") # Remove the allOf section to make the schema invalid - with open(cls.template_schema, "r") as fh: + with open(cls.template_schema) as fh: o = json.load(fh) del o["allOf"] @@ -49,7 +49,7 @@ def test_build_template(self): assert os.path.exists(outfile) - with open(outfile, "r") as fh: + with open(outfile) as fh: out = fh.read() assert "nf-core/testpipeline" in out