Skip to content

Commit

Permalink
Pre-commit: move yapf and mypy config to pyproject.toml
Browse files Browse the repository at this point in the history
The libraries `yapf` and `mypy` support the `pyproject.toml` file for
their configuration starting from v0.31 and v0.9, respectively.

Co-authored-by: Jason Eu <morty.yu@yahoo.com>
  • Loading branch information
sphuber and unkcpz committed Jul 27, 2021
1 parent e1abe0a commit c342428
Show file tree
Hide file tree
Showing 19 changed files with 86 additions and 105 deletions.
3 changes: 2 additions & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ repos:
]

- repo: https://github.com/pre-commit/mirrors-yapf
rev: v0.30.0
rev: v0.31.0
hooks:
- id: yapf
name: yapf
Expand All @@ -28,6 +28,7 @@ repos:
docs/.*|
)$
args: ['-i']
additional_dependencies: ['toml']

- repo: local

Expand Down
8 changes: 0 additions & 8 deletions .style.yapf

This file was deleted.

2 changes: 1 addition & 1 deletion aiida/engine/daemon/execmanager.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ def upload_calculation(
link_label = 'remote_folder'
if node.get_outgoing(RemoteData, link_label_filter=link_label).first():
EXEC_LOGGER.warning(f'CalcJobNode<{node.pk}> already has a `{link_label}` output: skipping upload')
return calc_info
return

computer = node.computer

Expand Down
2 changes: 1 addition & 1 deletion aiida/engine/processes/calcjobs/manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -263,7 +263,7 @@ class JobManager:

def __init__(self, transport_queue: 'TransportQueue') -> None:
self._transport_queue = transport_queue
self._job_lists: Dict[Hashable, 'JobInfo'] = {}
self._job_lists: Dict[Hashable, 'JobsList'] = {}

def get_jobs_list(self, authinfo: AuthInfo) -> JobsList:
"""Get or create a new `JobLists` instance for the given authinfo.
Expand Down
11 changes: 8 additions & 3 deletions aiida/manage/configuration/profile.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@

if TYPE_CHECKING:
from aiida.repository import Repository # pylint: disable=ungrouped-imports
from aiida.repository.backend import DiskObjectStoreRepositoryBackend

__all__ = ('Profile',)

Expand Down Expand Up @@ -130,12 +131,16 @@ def __init__(self, name, attributes, from_config=False):

def get_repository(self) -> 'Repository':
"""Return the repository configured for this profile."""
from disk_objectstore import Container
from aiida.repository import Repository
backend = self.get_repository_backend()
return Repository(backend=backend)

def get_repository_backend(self) -> 'DiskObjectStoreRepositoryBackend':
"""Return the backend of the repository configured for this profile."""
from disk_objectstore import Container
from aiida.repository.backend import DiskObjectStoreRepositoryBackend
container = Container(self.repository_path / 'container')
backend = DiskObjectStoreRepositoryBackend(container=container)
return Repository(backend=backend)
return DiskObjectStoreRepositoryBackend(container=container)

@property
def uuid(self):
Expand Down
18 changes: 12 additions & 6 deletions aiida/manage/manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -158,6 +158,8 @@ def _load_backend(self, schema_check: bool = True, repository_check: bool = True
elif backend_type == BACKEND_SQLA:
from aiida.orm.implementation.sqlalchemy.backend import SqlaBackend
self._backend = SqlaBackend()
else:
raise AssertionError('Could not load the backend.')

# Reconfigure the logging with `with_orm=True` to make sure that profile specific logging configuration options
# are taken into account and the `DbLogHandler` is configured.
Expand Down Expand Up @@ -206,7 +208,7 @@ def get_backend(self) -> 'Backend':
"""
if self._backend is None:
self._load_backend()
return self._load_backend()

return self._backend

Expand Down Expand Up @@ -269,7 +271,7 @@ def create_communicator(
# used by verdi status to get a communicator without needing to load the dbenv
from aiida.common import json
encoder = functools.partial(json.dumps, encoding='utf-8')
decoder = json.loads
decoder = json.loads # type: ignore[assignment]

return kiwipy.rmq.RmqThreadCommunicator.connect(
connection_params={'url': profile.get_rmq_url()},
Expand All @@ -289,14 +291,18 @@ def get_daemon_client(self) -> 'DaemonClient':
"""Return the daemon client for the current profile.
:return: the daemon client
:raises aiida.common.MissingConfigurationError: if the configuration file cannot be found
:raises aiida.common.ProfileConfigurationError: if the given profile does not exist
"""
from aiida.common import ConfigurationError
from aiida.engine.daemon.client import DaemonClient

profile = self.get_profile()
if profile is None:
raise ConfigurationError(
'Could not determine the current profile. Consider loading a profile using `aiida.load_profile()`.'
)

if self._daemon_client is None:
self._daemon_client = DaemonClient(self.get_profile())
self._daemon_client = DaemonClient(profile)

return self._daemon_client

Expand Down
11 changes: 6 additions & 5 deletions aiida/orm/nodes/node.py
Original file line number Diff line number Diff line change
Expand Up @@ -207,7 +207,7 @@ def validate_storability(self) -> None:
def class_node_type(cls) -> str:
"""Returns the node type of this node (sub) class."""
# pylint: disable=no-self-argument,no-member
return cls._plugin_type_string
return cls._plugin_type_string # type: ignore[attr-defined]

@property
def logger(self) -> Optional[Logger]:
Expand Down Expand Up @@ -324,7 +324,7 @@ def computer(self, computer: Optional[Computer]) -> None:
if computer is not None:
computer = computer.backend_entity

self.backend_entity.computer = computer
self.backend_entity.computer = computer # type: ignore[misc] # See: https://github.com/python/mypy/issues/4165

@property
def user(self) -> User:
Expand All @@ -345,7 +345,8 @@ def user(self, user: User) -> None:
raise exceptions.ModificationNotAllowed('cannot set the user on a stored node')

type_check(user, User)
self.backend_entity.user = user.backend_entity
# See: https://github.com/python/mypy/issues/4165
self.backend_entity.user = user.backend_entity # type: ignore[misc]

@property
def ctime(self) -> datetime.datetime:
Expand Down Expand Up @@ -507,7 +508,7 @@ def get_stored_link_triples(
:param link_direction: `incoming` or `outgoing` to get the incoming or outgoing links, respectively.
:param only_uuid: project only the node UUID instead of the instance onto the `NodeTriple.node` entries
"""
if not isinstance(link_type, tuple):
if isinstance(link_type, LinkType):
link_type = (link_type,)

if link_type and not all([isinstance(t, LinkType) for t in link_type]):
Expand Down Expand Up @@ -565,7 +566,7 @@ def get_incoming(
"""
assert self._incoming_cache is not None, 'incoming_cache not initialised'

if not isinstance(link_type, tuple):
if isinstance(link_type, LinkType):
link_type = (link_type,)

if self.is_stored:
Expand Down
2 changes: 1 addition & 1 deletion aiida/tools/graph/graph_traversers.py
Original file line number Diff line number Diff line change
Expand Up @@ -250,7 +250,7 @@ def traverse_graph(
elif missing_pks and missing_callback is not None:
missing_callback(missing_pks)

rules = []
rules = [] # type: List[UpdateRule|RuleSaveWalkers|RuleSetWalkers]
basket = Basket(nodes=existing_pks)

# When max_iterations is finite, the order of traversal may affect the result
Expand Down
4 changes: 2 additions & 2 deletions aiida/tools/groups/paths.py
Original file line number Diff line number Diff line change
Expand Up @@ -184,7 +184,7 @@ def is_virtual(self) -> bool:

def get_or_create_group(self) -> Tuple[orm.Group, bool]:
"""Return the concrete group associated with this path or, create it, if it does not already exist."""
return self.cls.objects.get_or_create(label=self.path)
return self.cls.objects.get_or_create(label=self.path) # type: ignore[attr-defined]

def delete_group(self):
"""Delete the concrete group associated with this path.
Expand All @@ -196,7 +196,7 @@ def delete_group(self):
raise GroupNotFoundError(self)
if len(ids) > 1:
raise GroupNotUniqueError(self)
self.cls.objects.delete(ids[0])
self.cls.objects.delete(ids[0]) # type: ignore[attr-defined]

@property
def children(self) -> Iterator['GroupPath']:
Expand Down
2 changes: 1 addition & 1 deletion aiida/tools/importexport/archive/migrators.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ def get_migrator(file_format: str) -> Type['ArchiveMigratorAbstract']:
f'Can only migrate in the formats: {tuple(migrators.keys())}, please specify one for "file_format".'
)

return cast(Type[ArchiveMigratorAbstract], migrators[file_format])
return cast(Type[ArchiveMigratorAbstract], migrators[file_format]) # type: ignore[index]


class ArchiveMigratorAbstract(ABC):
Expand Down
6 changes: 4 additions & 2 deletions aiida/tools/importexport/common/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
###########################################################################
# pylint: disable=invalid-name
""" Configuration file for AiiDA Import/Export module """
from typing import Dict
from enum import Enum
from aiida.orm import Computer, Group, Node, User, Log, Comment

Expand Down Expand Up @@ -105,7 +106,8 @@ class ExportFileFormat(str, Enum):
'dbnode': 'dbnode_id',
'user': 'user_id'
}
}
} # type: Dict[str, Dict[str, str]]

# As above but the opposite procedure
model_fields_to_file_fields = {
NODE_ENTITY_NAME: {
Expand All @@ -124,7 +126,7 @@ class ExportFileFormat(str, Enum):
'dbnode_id': 'dbnode',
'user_id': 'user'
}
}
} # type: Dict[str, Dict[str, str]]


def get_all_fields_info():
Expand Down
2 changes: 1 addition & 1 deletion aiida/tools/importexport/dbexport/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -586,7 +586,7 @@ def _write_node_repositories(

profile = get_manager().get_profile()
assert profile is not None, 'profile not loaded'
container_profile = profile.get_repository().backend.container
container_profile = profile.get_repository_backend().container

# This should be done more effectively, starting by not having to load the node. Either the repository
# metadata should be collected earlier when the nodes themselves are already exported or a single separate
Expand Down
6 changes: 3 additions & 3 deletions aiida/tools/importexport/dbimport/backends/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ def _copy_node_repositories(*, repository_metadatas: List[Dict], reader: Archive

profile = get_manager().get_profile()
assert profile is not None, 'profile not loaded'
container_profile = profile.get_repository().backend.container
container_profile = profile.get_repository_backend().container

def collect_hashkeys(objects, hashkeys):
for obj in objects.values():
Expand All @@ -66,7 +66,7 @@ def collect_hashkeys(objects, hashkeys):
container_export.export(set(hashkeys), container_profile, compress=True, callback=callback)


def _make_import_group(*, group: Optional[ImportGroup], node_pks: List[int]) -> ImportGroup:
def _make_import_group(*, group: Optional[Group], node_pks: List[int]) -> Optional[Group]:
"""Make an import group containing all imported nodes.
:param group: Use an existing group
Expand All @@ -79,7 +79,7 @@ def _make_import_group(*, group: Optional[ImportGroup], node_pks: List[int]) ->
return group

# If user specified a group, import all things into it
if not group:
if group is None:
# Get an unique name for the import group, based on the current (local) time
basename = timezone.localtime(timezone.now()).strftime('%Y%m%d-%H%M%S')
counter = 0
Expand Down
1 change: 1 addition & 0 deletions docs/source/nitpick-exceptions
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,7 @@ py:class ProcessSpec
py:class Port
py:class PortNamespace
py:class Repository
py:class DiskObjectStoreRepositoryBackend
py:class Runner
py:class Transport
py:class TransportQueue
Expand Down
67 changes: 0 additions & 67 deletions mypy.ini

This file was deleted.

40 changes: 40 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -71,6 +71,46 @@ markers = [
"sphinx: set parameters for the sphinx `app` fixture"
]

[tool.yapf]
based_on_style = "google"
column_limit = 120
dedent_closing_brackets = true
coalesce_brackets = true
align_closing_bracket_with_visual_indent = true
split_arguments_when_comma_terminated = true
indent_dictionary_value = false

[tool.mypy]
show_error_codes = true
check_untyped_defs = true
scripts_are_modules = true
warn_unused_ignores = true
warn_redundant_casts = true

[[tool.mypy.overrides]]
module = [
'tqdm.*',
'circus.*',
'disk_objectstore.*',
'django.*',
'sqlalchemy.*',
'numpy.*',
'kiwipy.*',
'scipy.*',
'wrapt.*',
'ruamel.*',
'pymatgen',
]
ignore_missing_imports = true

[[tool.mypy.overrides]]
module = 'aiida'
follow_imports = "skip"

[[tool.mypy.overrides]]
module = 'tests'
check_untyped_defs = false

[tool.tox]
legacy_tox_ini = """
[tox]
Expand Down
2 changes: 1 addition & 1 deletion setup.json
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@
],
"pre-commit": [
"astroid<2.5",
"mypy==0.790",
"mypy==0.910",
"packaging==20.3",
"pre-commit~=2.2",
"pylint~=2.5.0",
Expand Down
Loading

0 comments on commit c342428

Please sign in to comment.