Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feature/configure ruff for faster linting #32

Merged
merged 2 commits into from
Oct 16, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 0 additions & 2 deletions .bandit

This file was deleted.

3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -97,6 +97,9 @@ venv.bak/
.dmypy.json
dmypy.json

# ruff
.ruff_cache

# PyCharm
.idea/
.idea_modules/
Expand Down
3 changes: 0 additions & 3 deletions .isort.cfg

This file was deleted.

41 changes: 12 additions & 29 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -2,45 +2,28 @@ fail_fast: false
default_language_version:
python: python3.11
repos:
- repo: https://github.com/pycqa/isort
rev: 5.12.0
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.0.292
hooks:
- id: isort
- repo: https://github.com/myint/autoflake
rev: v2.2.1
hooks:
- id: autoflake
args: [--in-place, --remove-all-unused-imports]
- id: ruff
args: [--fix, --exit-non-zero-on-fix]
- repo: https://github.com/psf/black
rev: 23.7.0
rev: 23.9.1
hooks:
- id: black
- repo: https://github.com/PyCQA/bandit
rev: 1.7.5
hooks:
- id: bandit
args: [--silent, --ini=.bandit]
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.4.0
rev: v4.5.0
hooks:
- id: pretty-format-json
name: json
args: [--autofix]
- id: end-of-file-fixer
- id: debug-statements
exclude: "cli\\.py$"
- repo: https://github.com/PyCQA/pydocstyle
rev: 6.3.0
hooks:
- id: pydocstyle
pass_filenames: true
additional_dependencies: [".[toml]"]
exclude: "test_"
- repo: https://github.com/python-poetry/poetry
rev: 1.6.1
rev: 1.6.0
hooks:
- id: poetry-check
name: poetry
- repo: https://github.com/pre-commit/mirrors-mypy
rev: v1.5.1
rev: v1.6.0
hooks:
- id: mypy
name: mypy
Expand All @@ -50,6 +33,6 @@ repos:
- "click>=8.1.7,<9"
- "pydantic[dotenv,email]>=1.10.12,<2"
- "pytest>=7.4.2,<8"
- "types-pytz>=2023.3.0.1,<2024"
- "types-requests>=2.31.0.2,<3"
- "types-pytz>=2023.3.1.1,<2024"
- "types-requests>=2.31.0.8,<3"
- "types-setuptools>=68.2.0.0,<69"
10 changes: 0 additions & 10 deletions .pydocstyle

This file was deleted.

4 changes: 2 additions & 2 deletions mex/common/cli.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import json
import pdb
import pdb # noqa: T100
import sys
from bdb import BdbQuit
from enum import Enum
Expand Down Expand Up @@ -76,7 +76,7 @@ def field_to_option(field: ModelField) -> Option:
return Option(
field_to_parameters(field),
default=default,
envvar=list(field.field_info.extra["env_names"])[0].upper(),
envvar=next(iter(field.field_info.extra["env_names"])).upper(),
help=field.field_info.description,
is_flag=field.type_ is bool and field.default is False,
show_default=True,
Expand Down
1 change: 1 addition & 0 deletions mex/common/connector/http.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,7 @@ def request(
method: HTTP method to use
endpoint: Path to API endpoint to be prefixed with host and version
payload: Data to be serialized as JSON using the `MExEncoder`
params: Dictionary to be sent in the query string of the request
kwargs: Further keyword arguments passed to `requests`

Raises:
Expand Down
2 changes: 1 addition & 1 deletion mex/common/extract.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ def parse_csv(
Args:
path_or_buffer: Location of CSV file or read buffer with CSV content
into: Type of model to parse
chunkssize: Buffer size for chunked reading
chunksize: Buffer size for chunked reading
kwargs: Additional keywords arguments for pandas

Returns:
Expand Down
14 changes: 8 additions & 6 deletions mex/common/ldap/connector.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ def _paged_ldap_search(
return list(entries)

def get_functional_accounts(
self, mail: str = "*", sAMAccountName: str = "*", **filters: str
self, mail: str = "*", sAMAccountName: str = "*", **filters: str # noqa: N803
) -> Generator[LDAPActor, None, None]:
"""Get LDAP functional accounts that match provided filters.

Expand Down Expand Up @@ -145,7 +145,7 @@ def get_persons(
)

def get_units(
self, sAMAccountName: str = "*", mail: str = "*", **filters: str
self, sAMAccountName: str = "*", mail: str = "*", **filters: str # noqa: N803
) -> Generator[LDAPUnit, None, None]:
"""Get LDAP units that match the provided filters.

Expand All @@ -166,7 +166,7 @@ def get_units(
)

def get_functional_account(
self, objectGUID: str = "*", **filters: str
self, objectGUID: str = "*", **filters: str # noqa: N803
) -> LDAPActor:
"""Get a single LDAP functional account for the given filters.

Expand All @@ -188,16 +188,18 @@ def get_functional_account(
)
if not functional_accounts:
raise EmptySearchResultError(
f"Cannot find AD functional account for filters 'objectGUID: {objectGUID}, {filters}'"
"Cannot find AD functional account for filters "
f"'objectGUID: {objectGUID}, {filters}'"
)
if len(functional_accounts) > 1:
raise FoundMoreThanOneError(
f"Found multiple AD functional accounts for filters 'objectGUID: {objectGUID}, {filters}'"
"Found multiple AD functional accounts for filters "
f"'objectGUID: {objectGUID}, {filters}'"
)
return functional_accounts[0]

def get_person(
self, objectGUID: str = "*", employeeID: str = "*", **filters: str
self, objectGUID: str = "*", employeeID: str = "*", **filters: str # noqa: N803
) -> LDAPPerson:
"""Get a single LDAP person for the given filters.

Expand Down
4 changes: 2 additions & 2 deletions mex/common/models/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ def transform(value: Any) -> str:

try:
value = super().__getitem__(key)
except:
except: # noqa: E722
value = None
return transform(value)

Expand Down Expand Up @@ -145,7 +145,7 @@ def fix_listyness(cls, values: ModelValuesT) -> ModelValuesT:

def checksum(self) -> str:
"""Calculate md5 checksum for this model."""
return hashlib.md5(pickle.dumps(self)).hexdigest() # nosec
return hashlib.md5(pickle.dumps(self)).hexdigest() # noqa: S324

def __str__(self) -> str:
"""Format this model as a string for logging."""
Expand Down
2 changes: 1 addition & 1 deletion mex/common/organigram/transform.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ def transform_organigram_units_to_organizational_units(

for extracted_unit in extracted_unit_by_id_in_primary_source.values():
identifier_in_primary_source = extracted_unit.identifierInPrimarySource
if parent_identifier_in_primary_source := parent_id_in_primary_source_by_id_in_primary_source.get(
if parent_identifier_in_primary_source := parent_id_in_primary_source_by_id_in_primary_source.get( # noqa: E501
identifier_in_primary_source
):
if parent_unit := extracted_unit_by_id_in_primary_source.get(
Expand Down
5 changes: 2 additions & 3 deletions mex/common/public_api/connector.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ def echo_job_logs(self, job_id: str) -> None:
def wait_for_job(self, job_id: str) -> str:
"""Poll the status for this `job_id` until it is no longer 'RUNNING'."""
response = self.request("GET", f"jobs/{job_id}")
return response.get("status", "NONE")
return str(response.get("status", "NONE"))

def get_job_items(self, job_id: str) -> Generator[Identifier, None, None]:
"""Get the identifiers of the items created, updated or deleted during a job.
Expand Down Expand Up @@ -217,8 +217,7 @@ def get_item(self, identifier: Identifier | UUID) -> PublicApiItem | None:
) or error.response.status_code == 404:
return None
# Re-raise any unexpected errors
else:
raise error
raise error
else:
return PublicApiItem.parse_obj(response)

Expand Down
4 changes: 2 additions & 2 deletions mex/common/public_api/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ class PublicApiItem(PublicApiBaseModel):
values: list[PublicApiField] = Field(..., include=True)

@property
def stableTargetId(self) -> Identifier:
def stableTargetId(self) -> Identifier: # noqa: N802
"""Return the stableTargetId of this item."""
return Identifier(self.businessId.removesuffix("#"))

Expand Down Expand Up @@ -100,7 +100,7 @@ class PublicApiItemWithoutValues(PublicApiBaseModel):
businessId: str

@property
def stableTargetId(self) -> Identifier:
def stableTargetId(self) -> Identifier: # noqa: N802
"""Return the stableTargetId of this item."""
return Identifier(self.businessId.removesuffix("#"))

Expand Down
7 changes: 3 additions & 4 deletions mex/common/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,8 @@
from pathlib import Path
from typing import Any, Optional, TypeVar, Union

from pydantic import AnyUrl
from pydantic import AnyUrl, Extra, Field, SecretStr
from pydantic import BaseSettings as PydanticBaseSettings
from pydantic import Extra, Field, SecretStr
from pydantic.env_settings import DotenvType, env_file_sentinel
from pydantic.typing import StrPath

Expand Down Expand Up @@ -191,8 +190,8 @@ def text(self) -> str:
return "\n".join(
[
f"{key.ljust(indent)} "
f"{', '.join(str(v) for v in value) if isinstance(value, list) else value}"
for key, value in dict_.items()
f"{', '.join(str(v) for v in val) if isinstance(val, list) else val}"
for key, val in dict_.items()
]
)

Expand Down
3 changes: 1 addition & 2 deletions mex/common/types/link.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,5 +75,4 @@ def __str__(self) -> str:
title = markdown_escape(title)
url = markdown_escape(self.url)
return f"[{title}]({url})"
else:
return self.url
return self.url
2 changes: 1 addition & 1 deletion mex/common/types/resolved_path.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ def resolve(self) -> Path:
decide what it is relative to. We read the settings from the current
`SettingsContext` and pick a base path from there.
"""
from mex.common.settings import SettingsContext # noqa
from mex.common.settings import SettingsContext

if self._path.is_absolute():
return self._path
Expand Down
4 changes: 2 additions & 2 deletions mex/common/types/timestamp.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,15 +71,15 @@ def __init__(
self,
*args: Union[str, date, datetime, "Timestamp"],
tzinfo: Literal[None] = None,
) -> None: # noqa: D107
) -> None:
... # pragma: no cover

@overload
def __init__(
self,
*args: int,
tzinfo: Optional[tzinfo] = None,
) -> None: # noqa: D107
) -> None:
... # pragma: no cover

def __init__(
Expand Down
2 changes: 1 addition & 1 deletion mex/common/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,4 +49,4 @@ def jitter_sleep(min_seconds: float, jitter_seconds: float) -> None:
min_seconds: The minimum time to sleep
jitter_seconds: The variable sleep time added to the minimum
"""
sleep(min_seconds + random() * jitter_seconds) # nosec
sleep(min_seconds + random() * jitter_seconds) # noqa: S311
13 changes: 12 additions & 1 deletion mex/common/wikidata/connector.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,18 @@ def get_wikidata_item_details_by_id(self, item_id: str) -> dict[str, str]:
"action": "wbgetentities",
"format": "json",
"ids": item_id,
"props": "info|aliases|labels|descriptions|datatype|claims|sitelinks|sitelinks/urls",
"props": "|".join(
[
"info",
"aliases",
"labels",
"descriptions",
"datatype",
"claims",
"sitelinks",
"sitelinks/urls",
]
),
"formatversion": "2",
}

Expand Down
2 changes: 1 addition & 1 deletion mex/common/wikidata/extract.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ def search_organization_by_label(
) from exc
except requests.exceptions.RetryError as exc:
raise MExError(
f"RetryError: Max retries exceeded while processing results for {item_label}"
f"RetryError: Max retries exceeded processing results for {item_label}"
) from exc
except KeyError as exc:
raise MExError(
Expand Down
Loading