Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

run pyupgrade for python 3.8 compatibility #2623

Closed
wants to merge 1 commit into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 0 additions & 1 deletion docs/api/_src/conf.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
#
# Configuration file for the Sphinx documentation builder.
#
Expand Down
2 changes: 1 addition & 1 deletion nf_core/bump_version.py
Original file line number Diff line number Diff line change
Expand Up @@ -187,7 +187,7 @@ def update_file_version(filename: Union[str, Path], pipeline_obj: Pipeline, patt
fn = pipeline_obj._fp(filename)
content = ""
try:
with open(fn, "r") as fh:
with open(fn) as fh:
content = fh.read()
except FileNotFoundError:
log.warning(f"File not found: '{fn}'")
Expand Down
4 changes: 2 additions & 2 deletions nf_core/components/components_command.py
Original file line number Diff line number Diff line change
Expand Up @@ -227,7 +227,7 @@ def check_patch_paths(self, patch_path: Path, module_name: str) -> None:
if patch_path.exists():
log.info(f"Modules {module_name} contains a patch file.")
rewrite = False
with open(patch_path, "r") as fh:
with open(patch_path) as fh:
lines = fh.readlines()
for index, line in enumerate(lines):
# Check if there are old paths in the patch file and replace
Expand Down Expand Up @@ -264,7 +264,7 @@ def check_if_in_include_stmts(self, component_path: str) -> Dict[str, List[Dict[
if self.repo_type == "pipeline":
workflow_files = Path(self.dir, "workflows").glob("*.nf")
for workflow_file in workflow_files:
with open(workflow_file, "r") as fh:
with open(workflow_file) as fh:
# Check if component path is in the file using mmap
with mmap.mmap(fh.fileno(), 0, access=mmap.ACCESS_READ) as s:
if s.find(component_path.encode()) != -1:
Expand Down
2 changes: 1 addition & 1 deletion nf_core/components/components_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -138,7 +138,7 @@ def get_components_to_install(subworkflow_dir: str) -> Tuple[List[str], List[str
"""
modules = []
subworkflows = []
with open(Path(subworkflow_dir, "main.nf"), "r") as fh:
with open(Path(subworkflow_dir, "main.nf")) as fh:
for line in fh:
regex = re.compile(
r"include(?: *{ *)([a-zA-Z\_0-9]*)(?: *as *)?(?:[a-zA-Z\_0-9]*)?(?: *})(?: *from *)(?:'|\")(.*)(?:'|\")"
Expand Down
7 changes: 3 additions & 4 deletions nf_core/components/create.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@
The ComponentCreate class handles generating of module and subworkflow templates
"""

from __future__ import print_function

import glob
import json
Expand Down Expand Up @@ -440,7 +439,7 @@ def _copy_old_files(self, component_old_path):
pytest_dir = Path(self.directory, "tests", self.component_type, self.org, self.component_dir)
nextflow_config = pytest_dir / "nextflow.config"
if nextflow_config.is_file():
with open(nextflow_config, "r") as fh:
with open(nextflow_config) as fh:
config_lines = ""
for line in fh:
if "publishDir" not in line:
Expand All @@ -460,7 +459,7 @@ def _print_and_delete_pytest_files(self):
"[violet]Do you want to delete the pytest files?[/]\nPytest file 'main.nf' will be printed to standard output to allow migrating the tests manually to 'main.nf.test'.",
default=False,
):
with open(pytest_dir / "main.nf", "r") as fh:
with open(pytest_dir / "main.nf") as fh:
log.info(fh.read())
shutil.rmtree(pytest_dir)
log.info(
Expand All @@ -475,7 +474,7 @@ def _print_and_delete_pytest_files(self):
)
# Delete tags from pytest_modules.yml
modules_yml = Path(self.directory, "tests", "config", "pytest_modules.yml")
with open(modules_yml, "r") as fh:
with open(modules_yml) as fh:
yml_file = yaml.safe_load(fh)
yml_key = str(self.component_dir) if self.component_type == "modules" else f"subworkflows/{self.component_dir}"
if yml_key in yml_file:
Expand Down
4 changes: 2 additions & 2 deletions nf_core/components/info.py
Original file line number Diff line number Diff line change
Expand Up @@ -184,7 +184,7 @@ def get_local_yaml(self):
meta_fn = Path(comp_dir, "meta.yml")
if meta_fn.exists():
log.debug(f"Found local file: {meta_fn}")
with open(meta_fn, "r") as fh:
with open(meta_fn) as fh:
self.local_path = comp_dir
return yaml.safe_load(fh)

Expand All @@ -196,7 +196,7 @@ def get_local_yaml(self):
meta_fn = Path(comp_dir, "meta.yml")
if meta_fn.exists():
log.debug(f"Found local file: {meta_fn}")
with open(meta_fn, "r") as fh:
with open(meta_fn) as fh:
self.local_path = comp_dir
return yaml.safe_load(fh)
log.debug(f"{self.component_type[:-1].title()} '{self.component}' meta.yml not found locally")
Expand Down
1 change: 0 additions & 1 deletion nf_core/components/lint/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
in nf-core pipelines
"""

from __future__ import print_function

import logging
import operator
Expand Down
10 changes: 5 additions & 5 deletions nf_core/components/nfcore_component.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ def __init__(
def _get_main_nf_tags(self, test_main_nf: Union[Path, str]):
"""Collect all tags from the main.nf.test file."""
tags = []
with open(test_main_nf, "r") as fh:
with open(test_main_nf) as fh:
for line in fh:
if line.strip().startswith("tag"):
tags.append(line.strip().split()[1].strip('"'))
Expand All @@ -90,7 +90,7 @@ def _get_main_nf_tags(self, test_main_nf: Union[Path, str]):
def _get_included_components(self, main_nf: Union[Path, str]):
"""Collect all included components from the main.nf file."""
included_components = []
with open(main_nf, "r") as fh:
with open(main_nf) as fh:
for line in fh:
if line.strip().startswith("include"):
# get tool/subtool or subworkflow name from include statement, can be in the form
Expand All @@ -107,7 +107,7 @@ def _get_included_components(self, main_nf: Union[Path, str]):
def _get_included_components_in_chained_tests(self, main_nf_test: Union[Path, str]):
"""Collect all included components from the main.nf file."""
included_components = []
with open(main_nf_test, "r") as fh:
with open(main_nf_test) as fh:
for line in fh:
if line.strip().startswith("script"):
# get tool/subtool or subworkflow name from script statement, can be:
Expand Down Expand Up @@ -151,7 +151,7 @@ def _get_included_components_in_chained_tests(self, main_nf_test: Union[Path, st
def get_inputs_from_main_nf(self):
"""Collect all inputs from the main.nf file."""
inputs = []
with open(self.main_nf, "r") as f:
with open(self.main_nf) as f:
data = f.read()
# get input values from main.nf after "input:", which can be formatted as tuple val(foo) path(bar) or val foo or val bar or path bar or path foo
# regex matches:
Expand All @@ -178,7 +178,7 @@ def get_inputs_from_main_nf(self):

def get_outputs_from_main_nf(self):
outputs = []
with open(self.main_nf, "r") as f:
with open(self.main_nf) as f:
data = f.read()
# get output values from main.nf after "output:". the names are always after "emit:"
if "output:" not in data:
Expand Down
2 changes: 1 addition & 1 deletion nf_core/components/update.py
Original file line number Diff line number Diff line change
Expand Up @@ -499,7 +499,7 @@ def get_all_components_info(self, branch=None):
]
elif isinstance(self.update_config[repo_name], dict):
# If it is a dict, then there are entries for individual components or component directories
for component_dir in set([dir for dir, _ in components]):
for component_dir in {dir for dir, _ in components}:
if isinstance(self.update_config[repo_name][component_dir], str):
# If a string is given it is the commit SHA to which we should update to
custom_sha = self.update_config[repo_name][component_dir]
Expand Down
6 changes: 3 additions & 3 deletions nf_core/create.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ def __init__(
],
}
# Get list of files we're skipping with the supplied skip keys
self.skip_paths = set(sp for k in skip_paths_keys for sp in skippable_paths[k])
self.skip_paths = {sp for k in skip_paths_keys for sp in skippable_paths[k]}

# Set convenience variables
self.name = self.template_params["name"]
Expand Down Expand Up @@ -108,7 +108,7 @@ def create_param_dict(self, name, description, author, version, template_yaml_pa
# Obtain template customization info from template yaml file or `.nf-core.yml` config file
try:
if template_yaml_path is not None:
with open(template_yaml_path, "r") as f:
with open(template_yaml_path) as f:
template_yaml = yaml.safe_load(f)
elif "template" in config_yml:
template_yaml = config_yml["template"]
Expand Down Expand Up @@ -395,7 +395,7 @@ def remove_nf_core_in_bug_report_template(self):
"""
bug_report_path = self.outdir / ".github" / "ISSUE_TEMPLATE" / "bug_report.yml"

with open(bug_report_path, "r") as fh:
with open(bug_report_path) as fh:
contents = yaml.load(fh, Loader=yaml.FullLoader)

# Remove the first item in the body, which is the information about the docs
Expand Down
9 changes: 4 additions & 5 deletions nf_core/download.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
"""Downloads a nf-core pipeline to the local file system."""

from __future__ import print_function

import concurrent.futures
import io
Expand Down Expand Up @@ -640,7 +639,7 @@ def wf_use_local_configs(self, revision_dirname):
log.debug(f"Editing 'params.custom_config_base' in '{nfconfig_fn}'")

# Load the nextflow.config file into memory
with open(nfconfig_fn, "r") as nfconfig_fh:
with open(nfconfig_fn) as nfconfig_fh:
nfconfig = nfconfig_fh.read()

# Replace the target string
Expand Down Expand Up @@ -700,7 +699,7 @@ def find_container_images(self, workflow_directory):
if bool(config_findings_dsl2):
# finding fill always be a tuple of length 2, first the quote used and second the enquoted value.
for finding in config_findings_dsl2:
config_findings.append((finding + (self.nf_config, "Nextflow configs")))
config_findings.append(finding + (self.nf_config, "Nextflow configs"))
else: # no regex match, likely just plain string
"""
Append string also as finding-like tuple for consistency
Expand All @@ -719,7 +718,7 @@ def find_container_images(self, workflow_directory):
for file in files:
if file.endswith(".nf"):
file_path = os.path.join(subdir, file)
with open(file_path, "r") as fh:
with open(file_path) as fh:
# Look for any lines with container "xxx" or container 'xxx'
search_space = fh.read()
"""
Expand All @@ -744,7 +743,7 @@ def find_container_images(self, workflow_directory):
for finding in local_module_findings:
# append finding since we want to collect them from all modules
# also append search_space because we need to start over later if nothing was found.
module_findings.append((finding + (search_space, file_path)))
module_findings.append(finding + (search_space, file_path))

# Not sure if there will ever be multiple container definitions per module, but beware DSL3.
# Like above run on shallow copy, because length may change at runtime.
Expand Down
1 change: 0 additions & 1 deletion nf_core/launch.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
""" Launch a pipeline, interactively collecting params """

from __future__ import print_function

import copy
import json
Expand Down
1 change: 0 additions & 1 deletion nf_core/licences.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
"""Lists software licences for a given workflow."""

from __future__ import print_function

import json
import logging
Expand Down
2 changes: 1 addition & 1 deletion nf_core/lint/actions_awsfulltest.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ def actions_awsfulltest(self):
fn = os.path.join(self.wf_path, ".github", "workflows", "awsfulltest.yml")
if os.path.isfile(fn):
try:
with open(fn, "r") as fh:
with open(fn) as fh:
wf = yaml.safe_load(fh)
except Exception as e:
return {"failed": [f"Could not parse yaml file: {fn}, {e}"]}
Expand Down
2 changes: 1 addition & 1 deletion nf_core/lint/actions_awstest.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ def actions_awstest(self):
return {"ignored": [f"'awstest.yml' workflow not found: `{fn}`"]}

try:
with open(fn, "r") as fh:
with open(fn) as fh:
wf = yaml.safe_load(fh)
except Exception as e:
return {"failed": [f"Could not parse yaml file: {fn}, {e}"]}
Expand Down
2 changes: 1 addition & 1 deletion nf_core/lint/actions_ci.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ def actions_ci(self):
return {"ignored": ["'.github/workflows/ci.yml' not found"]}

try:
with open(fn, "r") as fh:
with open(fn) as fh:
ciwf = yaml.safe_load(fh)
except Exception as e:
return {"failed": [f"Could not parse yaml file: {fn}, {e}"]}
Expand Down
2 changes: 1 addition & 1 deletion nf_core/lint/actions_schema_validation.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ def actions_schema_validation(self):

# load workflow
try:
with open(wf_path, "r") as fh:
with open(wf_path) as fh:
wf_json = yaml.safe_load(fh)
except Exception as e:
failed.append(f"Could not parse yaml file: {wf}, {e}")
Expand Down
6 changes: 3 additions & 3 deletions nf_core/lint/files_unchanged.py
Original file line number Diff line number Diff line change
Expand Up @@ -187,16 +187,16 @@ def _tf(file_path):
else:
for f in files:
try:
with open(_pf(f), "r") as fh:
with open(_pf(f)) as fh:
pipeline_file = fh.read()
with open(_tf(f), "r") as fh:
with open(_tf(f)) as fh:
template_file = fh.read()
if template_file in pipeline_file:
passed.append(f"`{f}` matches the template")
else:
if "files_unchanged" in self.fix:
# Try to fix the problem by overwriting the pipeline file
with open(_tf(f), "r") as fh:
with open(_tf(f)) as fh:
template_file = fh.read()
with open(_pf(f), "w") as fh:
fh.write(template_file)
Expand Down
4 changes: 2 additions & 2 deletions nf_core/lint/merge_markers.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ def merge_markers(self):

ignore = [".git"]
if os.path.isfile(os.path.join(self.wf_path, ".gitignore")):
with io.open(os.path.join(self.wf_path, ".gitignore"), "rt", encoding="latin1") as fh:
with open(os.path.join(self.wf_path, ".gitignore"), encoding="latin1") as fh:
for l in fh:
ignore.append(os.path.basename(l.strip().rstrip("/")))
for root, dirs, files in os.walk(self.wf_path, topdown=True):
Expand All @@ -41,7 +41,7 @@ def merge_markers(self):
if nf_core.utils.is_file_binary(os.path.join(root, fname)):
continue
try:
with io.open(os.path.join(root, fname), "rt", encoding="latin1") as fh:
with open(os.path.join(root, fname), encoding="latin1") as fh:
for l in fh:
if ">>>>>>>" in l:
failed.append(f"Merge marker '>>>>>>>' in `{os.path.join(root, fname)}`: {l[:30]}")
Expand Down
2 changes: 1 addition & 1 deletion nf_core/lint/multiqc_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ def multiqc_config(self) -> Dict[str, List[str]]:
return {"ignored": ["'assets/multiqc_config.yml' not found"]}

try:
with open(fn, "r") as fh:
with open(fn) as fh:
mqc_yml = yaml.safe_load(fh)
except Exception as e:
return {"failed": [f"Could not parse yaml file: {fn}, {e}"]}
Expand Down
16 changes: 7 additions & 9 deletions nf_core/lint/nextflow_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -190,13 +190,11 @@ def nextflow_config(self):

# Check and warn if the process configuration is done with deprecated syntax
process_with_deprecated_syntax = list(
set(
[
re.search(r"^(process\.\$.*?)\.+.*$", ck).group(1)
for ck in self.nf_config.keys()
if re.match(r"^(process\.\$.*?)\.+.*$", ck)
]
)
{
re.search(r"^(process\.\$.*?)\.+.*$", ck).group(1)
for ck in self.nf_config.keys()
if re.match(r"^(process\.\$.*?)\.+.*$", ck)
}
)
for pd in process_with_deprecated_syntax:
warned.append(f"Process configuration is done with deprecated_syntax: {pd}")
Expand Down Expand Up @@ -300,7 +298,7 @@ def nextflow_config(self):
]
path = os.path.join(self.wf_path, "nextflow.config")
i = 0
with open(path, "r") as f:
with open(path) as f:
for line in f:
if lines[i] in line:
i += 1
Expand All @@ -320,7 +318,7 @@ def nextflow_config(self):
)

# Check for the availability of the "test" configuration profile by parsing nextflow.config
with open(os.path.join(self.wf_path, "nextflow.config"), "r") as f:
with open(os.path.join(self.wf_path, "nextflow.config")) as f:
content = f.read()

# Remove comments
Expand Down
4 changes: 2 additions & 2 deletions nf_core/lint/pipeline_todos.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ def pipeline_todos(self, root_dir=None):

ignore = [".git"]
if os.path.isfile(os.path.join(root_dir, ".gitignore")):
with io.open(os.path.join(root_dir, ".gitignore"), "rt", encoding="latin1") as fh:
with open(os.path.join(root_dir, ".gitignore"), encoding="latin1") as fh:
for l in fh:
ignore.append(os.path.basename(l.strip().rstrip("/")))
for root, dirs, files in os.walk(root_dir, topdown=True):
Expand All @@ -52,7 +52,7 @@ def pipeline_todos(self, root_dir=None):
files[:] = [f for f in files if not fnmatch.fnmatch(os.path.join(root, f), i)]
for fname in files:
try:
with io.open(os.path.join(root, fname), "rt", encoding="latin1") as fh:
with open(os.path.join(root, fname), encoding="latin1") as fh:
for l in fh:
if "TODO nf-core" in l:
l = (
Expand Down
2 changes: 1 addition & 1 deletion nf_core/lint/readme.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ def readme(self):
# Remove field that should be ignored according to the linting config
ignore_configs = self.lint_config.get("readme", [])

with open(os.path.join(self.wf_path, "README.md"), "r") as fh:
with open(os.path.join(self.wf_path, "README.md")) as fh:
content = fh.read()

if "nextflow_badge" not in ignore_configs:
Expand Down
2 changes: 1 addition & 1 deletion nf_core/lint/template_strings.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ def template_strings(self):
if encoding is not None or (ftype is not None and any([ftype.startswith(ft) for ft in binary_ftypes])):
continue

with io.open(fn, "r", encoding="latin1") as fh:
with open(fn, encoding="latin1") as fh:
lnum = 0
for l in fh:
lnum += 1
Expand Down
Loading
Loading