Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

update: add support for ruff linter #229

Merged
merged 13 commits into from
Jun 21, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
26 changes: 9 additions & 17 deletions .github/workflows/lint.yml
Original file line number Diff line number Diff line change
Expand Up @@ -6,26 +6,18 @@ on:

jobs:
lint:
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest]
python-version: ['3.11']

runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Set up Python ${{ matrix.python-version }}
- name: Install Python
uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
python-version: "3.11"
- name: Install dependencies
run: |
python -m pip install --upgrade pip setuptools wheel
python -m pip install tox tox-gh-actions
- name: Check with flake8
run: |
tox -e flake8
- name: Check with codespell
run: |
tox -e codespell
python -m pip install --upgrade pip
python -m pip install tox ruff
- name: Run ruff
run: ruff check --format=github .
- name: Run codespell
run: tox -e codespell
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
![GitHub](https://img.shields.io/github/license/juaml/junifer?style=flat-square)
![Codecov](https://img.shields.io/codecov/c/github/juaml/junifer?style=flat-square)
[![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg?style=flat-square)](https://github.com/psf/black)
[![Ruff](https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/charliermarsh/ruff/main/assets/badge/v2.json)](https://github.com/charliermarsh/ruff)

## About

Expand Down
5 changes: 1 addition & 4 deletions conda-env.yml
Original file line number Diff line number Diff line change
Expand Up @@ -22,13 +22,10 @@ dependencies:
- sphinxcontrib-mermaid=0.8.*
- tox
- ipykernel
- isort
- pytest-cov
- pytest
- black
- flake8
- flake8-docstrings
- flake8-bugbear
- ruff
- codespell
- pip
- pip:
Expand Down
1 change: 1 addition & 0 deletions docs/changes/newsfragments/229.misc
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Adopt ``ruff`` as the only linter for the codebase by `Synchon Mandal`_
1 change: 0 additions & 1 deletion docs/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -138,7 +138,6 @@
"julearn": ("https://juaml.github.io/julearn/main", None),
"nibabel": ("https://nipy.org/nibabel/", None),
"numpy": ("https://numpy.org/doc/stable/", None),
"numpy": ("https://numpy.org/doc/stable/", None),
"pandas": ("https://pandas.pydata.org/pandas-docs/dev", None),
# "sqlalchemy": ("https://docs.sqlalchemy.org/en/20/", None),
"scipy": ("https://docs.scipy.org/doc/scipy/", None),
Expand Down
5 changes: 5 additions & 0 deletions junifer/api/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -135,6 +135,7 @@ def cli() -> None: # pragma: no cover
)
def run(filepath: click.Path, element: str, verbose: Union[str, int]) -> None:
"""Run command for CLI.

\f
Parameters
----------
Expand Down Expand Up @@ -182,6 +183,7 @@ def run(filepath: click.Path, element: str, verbose: Union[str, int]) -> None:
)
def collect(filepath: click.Path, verbose: Union[str, int]) -> None:
"""Collect command for CLI.

\f
Parameters
----------
Expand Down Expand Up @@ -224,6 +226,7 @@ def queue(
verbose: Union[str, int],
) -> None:
"""Queue command for CLI.

\f
Parameters
----------
Expand Down Expand Up @@ -261,6 +264,7 @@ def queue(
@click.option("--long", "long_", is_flag=True)
def wtf(long_: bool) -> None:
"""Wtf command for CLI.

\f
Parameters
----------
Expand All @@ -282,6 +286,7 @@ def wtf(long_: bool) -> None:
@click.argument("subpkg", type=str)
def selftest(subpkg: str) -> None:
"""Selftest command for CLI.

\f
Parameters
----------
Expand Down
32 changes: 16 additions & 16 deletions junifer/api/functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -234,19 +234,19 @@ def queue(
# Create a folder within the CWD to store the job files / config
cwd = Path.cwd()
jobdir = cwd / "junifer_jobs" / jobname
logger.info(f"Creating job in {str(jobdir.absolute())}")
logger.info(f"Creating job in {jobdir.absolute()!s}")
if jobdir.exists():
if not overwrite:
raise_error(
f"Job folder for {jobname} already exists. "
"This error is raised to prevent overwriting job files "
"that might be scheduled but not yet executed. "
f"Either delete the directory {str(jobdir.absolute())} "
f"Either delete the directory {jobdir.absolute()!s} "
"or set overwrite=True."
)
else:
logger.info(
f"Deleting existing job directory at {str(jobdir.absolute())}"
f"Deleting existing job directory at {jobdir.absolute()!s}"
)
shutil.rmtree(jobdir)
jobdir.mkdir(exist_ok=True, parents=True)
Expand All @@ -268,7 +268,7 @@ def queue(
config["with"] = fixed_load

yaml_config = jobdir / "config.yaml"
logger.info(f"Writing YAML config to {str(yaml_config.absolute())}")
logger.info(f"Writing YAML config to {yaml_config.absolute()!s}")
yaml.dump(config, stream=yaml_config)

# Get list of elements
Expand Down Expand Up @@ -378,11 +378,11 @@ def _queue_condor(
"""
logger.debug("Creating HTCondor job")
run_junifer_args = (
f"run {str(yaml_config.absolute())} "
f"run {yaml_config.absolute()!s} "
f"--verbose {verbose} --element $(element)"
)
collect_junifer_args = (
f"collect {str(yaml_config.absolute())} --verbose {verbose} "
f"collect {yaml_config.absolute()!s} --verbose {verbose} "
)

if not isinstance(collect, str):
Expand All @@ -401,7 +401,7 @@ def _queue_condor(
executable = "run_conda.sh"
arguments = f"{env_name} junifer"
exec_path = jobdir / executable
logger.info(f"Copying {executable} to {str(exec_path.absolute())}")
logger.info(f"Copying {executable} to {exec_path.absolute()!s}")
shutil.copy(Path(__file__).parent / "res" / executable, exec_path)
make_executable(exec_path)
elif env["kind"] == "venv":
Expand Down Expand Up @@ -441,7 +441,7 @@ def _queue_condor(
request_disk = {disk}

# Executable
initial_dir = {str(jobdir.absolute())}
initial_dir = {jobdir.absolute()!s}
executable = $(initial_dir)/{executable}
transfer_executable = False

Expand All @@ -450,9 +450,9 @@ def _queue_condor(
{extra_preamble}

# Logs
log = {str(log_dir.absolute())}/junifer_run_$(log_element).log
output = {str(log_dir.absolute())}/junifer_run_$(log_element).out
error = {str(log_dir.absolute())}/junifer_run_$(log_element).err
log = {log_dir.absolute()!s}/junifer_run_$(log_element).log
output = {log_dir.absolute()!s}/junifer_run_$(log_element).out
error = {log_dir.absolute()!s}/junifer_run_$(log_element).err
"""

submit_run_fname = jobdir / f"run_{jobname}.submit"
Expand All @@ -475,7 +475,7 @@ def _queue_condor(
request_disk = {disk}

# Executable
initial_dir = {str(jobdir.absolute())}
initial_dir = {jobdir.absolute()!s}
executable = $(initial_dir)/{executable}
transfer_executable = False

Expand All @@ -484,9 +484,9 @@ def _queue_condor(
{extra_preamble}

# Logs
log = {str(log_dir.absolute())}/junifer_collect.log
output = {str(log_dir.absolute())}/junifer_collect.out
error = {str(log_dir.absolute())}/junifer_collect.err
log = {log_dir.absolute()!s}/junifer_collect.log
output = {log_dir.absolute()!s}/junifer_collect.out
error = {log_dir.absolute()!s}/junifer_collect.err
"""

# Now create the collect submit file
Expand Down Expand Up @@ -535,7 +535,7 @@ def _queue_condor(
subprocess.run(["condor_submit_dag", dag_fname])
logger.info("HTCondor job submitted")
else:
cmd = f"condor_submit_dag {str(dag_fname.absolute())}"
cmd = f"condor_submit_dag {dag_fname.absolute()!s}"
logger.info(
f"HTCondor job files created, to submit the job, run `{cmd}`"
)
Expand Down
4 changes: 2 additions & 2 deletions junifer/api/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,10 +32,10 @@ def parse_yaml(filepath: Union[str, Path]) -> Dict:
if not isinstance(filepath, Path):
filepath = Path(filepath)

logger.info(f"Parsing yaml file: {str(filepath.absolute())}")
logger.info(f"Parsing yaml file: {filepath.absolute()!s}")
# Filepath existence check
if not filepath.exists():
raise_error(f"File does not exist: {str(filepath.absolute())}")
raise_error(f"File does not exist: {filepath.absolute()!s}")
# Filepath reading
contents = yaml.load(filepath)
if "elements" in contents:
Expand Down
6 changes: 2 additions & 4 deletions junifer/api/tests/test_api_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ def test_get_python_information() -> None:
def test_get_dependency_information_short() -> None:
"""Test short version of _get_dependency_information()."""
dependency_information = _get_dependency_information(long_=False)
assert [key for key in dependency_information.keys()] == [
assert list(dependency_information.keys()) == [
"click",
"numpy",
"datalad",
Expand All @@ -47,9 +47,7 @@ def test_get_dependency_information_short() -> None:
def test_get_dependency_information_long() -> None:
"""Test long version of _get_dependency_information()."""
dependency_information = _get_dependency_information(long_=True)
dependency_information_keys = [
key for key in dependency_information.keys()
]
dependency_information_keys = list(dependency_information.keys())
for key in [
"click",
"numpy",
Expand Down
8 changes: 4 additions & 4 deletions junifer/api/tests/test_functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -718,7 +718,7 @@ def test_queue_condor_assets_generation(
# Check junifer run submit file
assert run_submit_file_path.is_file()
# Read run submit file to check if resources are correct
with open(run_submit_file_path, "r") as f:
with open(run_submit_file_path) as f:
for line in f.read().splitlines():
if "request_cpus" in line:
assert int(line.split("=")[1].strip()) == cpus
Expand All @@ -744,7 +744,7 @@ def test_queue_condor_assets_generation(
element_count = 0
has_collect_job = False
has_final_collect_job = False
with open(dag_file_path, "r") as f:
with open(dag_file_path) as f:
for line in f.read().splitlines():
if "JOB" in line:
element_count += 1
Expand Down Expand Up @@ -812,7 +812,7 @@ def test_queue_condor_extra_preamble(
run_submit_file_path = Path(
tmp_path / "junifer_jobs" / jobname / f"run_{jobname}.submit"
)
with open(run_submit_file_path, "r") as f:
with open(run_submit_file_path) as f:
for line in f.read().splitlines():
if "FOO" in line:
assert line.strip() == extra_preamble
Expand All @@ -821,7 +821,7 @@ def test_queue_condor_extra_preamble(
collect_submit_file_path = Path(
tmp_path / "junifer_jobs" / jobname / f"collect_{jobname}.submit"
)
with open(collect_submit_file_path, "r") as f:
with open(collect_submit_file_path) as f:
for line in f.read().splitlines():
if "FOO" in line:
assert line.strip() == extra_preamble
Expand Down
2 changes: 1 addition & 1 deletion junifer/data/coordinates.py
Original file line number Diff line number Diff line change
Expand Up @@ -143,7 +143,7 @@ def load_coordinates(name: str) -> Tuple[ArrayLike, List[str]]:
if isinstance(t_coord, Path):
df_coords = pd.read_csv(t_coord, sep="\t", header=None)
coords = df_coords.iloc[:, [0, 1, 2]].to_numpy()
names = [x for x in df_coords.iloc[:, [3]].values[:, 0]]
names = list(df_coords.iloc[:, [3]].values[:, 0])
else:
coords = t_coord["coords"]
coords = typing.cast(ArrayLike, coords)
Expand Down
17 changes: 12 additions & 5 deletions junifer/data/parcellations.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,6 +103,7 @@ def register_parcellation(
ValueError
If the parcellation name is already registered and overwrite is set to
False or if the parcellation name is a built-in parcellation.

"""
# Check for attempt of overwriting built-in parcellations
if name in _available_parcellations:
Expand Down Expand Up @@ -139,6 +140,7 @@ def list_parcellations() -> List[str]:
-------
list of str
A list with all available parcellations.

"""
return sorted(_available_parcellations.keys())

Expand Down Expand Up @@ -187,6 +189,7 @@ def load_parcellation(
Parcellation labels.
pathlib.Path
File path to the parcellation image.

"""
# Invalid parcellation name
if name not in _available_parcellations:
Expand All @@ -209,7 +212,7 @@ def load_parcellation(
**parcellation_definition,
)

logger.info(f"Loading parcellation {str(parcellation_fname.absolute())}")
logger.info(f"Loading parcellation {parcellation_fname.absolute()!s}")

parcellation_img = None
if path_only is False:
Expand Down Expand Up @@ -254,12 +257,12 @@ def _retrieve_parcellation(
available, the closest resolution will be loaded. Preferably, use a
resolution higher than the desired one. By default, will load the
highest one (default None).
**kwargs
Use to specify parcellation-specific keyword arguments found in the
following section.

Other Parameters
----------------
**kwargs
Use to specify parcellation-specific keyword arguments:

* Schaefer :
``n_rois`` : {100, 200, 300, 400, 500, 600, 700, 800, 900, 1000}
Granularity of parcellation to be used.
Expand Down Expand Up @@ -289,6 +292,7 @@ def _retrieve_parcellation(
------
ValueError
If the parcellation's name is invalid.

"""
if parcellations_dir is None:
parcellations_dir = (
Expand Down Expand Up @@ -364,6 +368,7 @@ def _retrieve_schaefer(
ValueError
If invalid value is provided for `n_rois` or `yeo_networks` or if
there is a problem fetching the parcellation.

"""
logger.info("Parcellation parameters:")
logger.info(f"\tn_rois: {n_rois}")
Expand Down Expand Up @@ -471,6 +476,7 @@ def _retrieve_tian(
ValueError
If invalid value is provided for `scale` or `magneticfield` or `space`
or if there is a problem fetching the parcellation.

"""
# show parameters to user
logger.info("Parcellation parameters:")
Expand Down Expand Up @@ -619,7 +625,7 @@ def _retrieve_suit(
see http://www.diedrichsenlab.org/imaging/suit.htm).

Returns
------
-------
pathlib.Path
File path to the parcellation image.
list of str
Expand All @@ -630,6 +636,7 @@ def _retrieve_suit(
ValueError
If invalid value is provided for `space` or if there is a problem
fetching the parcellation.

"""
logger.info("Parcellation parameters:")
logger.info(f"\tspace: {space}")
Expand Down
2 changes: 1 addition & 1 deletion junifer/data/tests/test_parcellations.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ def test_parcellation_wrong_labels_values(tmp_path: Path) -> None:
load_parcellation("WrongLabels")

# Test wrong number of labels
register_parcellation("WrongLabels2", schaefer_path, labels + ["wrong"])
register_parcellation("WrongLabels2", schaefer_path, [*labels, "wrong"])

with pytest.raises(ValueError, match=r"has 100 parcels but 101"):
load_parcellation("WrongLabels2")
Expand Down
Loading