From 73476232eb9a0e0f2d8fc2452fb2c9b422ae16dc Mon Sep 17 00:00:00 2001 From: Chris Sewell Date: Tue, 22 Feb 2022 16:23:49 +0100 Subject: [PATCH 01/26] =?UTF-8?q?=E2=99=BB=EF=B8=8F=20REFACTOR:=20Move=20a?= =?UTF-8?q?rchive=20backend=20to=20`aiida/storage`?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- aiida/cmdline/commands/cmd_archive.py | 4 +- aiida/manage/configuration/profile.py | 6 +- aiida/storage/psql_dos/__init__.py | 2 +- aiida/storage/sqlite_zip/__init__.py | 33 ++ .../sqlite => storage/sqlite_zip}/backend.py | 387 +++++++----------- aiida/storage/sqlite_zip/models.py | 119 ++++++ aiida/storage/sqlite_zip/utils.py | 85 ++++ .../archive/implementations/sqlite/common.py | 27 +- .../archive/implementations/sqlite/main.py | 3 +- .../sqlite/migrations/legacy_to_new.py | 3 +- .../archive/implementations/sqlite/reader.py | 58 +-- .../archive/implementations/sqlite/writer.py | 28 +- utils/make_all.py | 2 +- 13 files changed, 419 insertions(+), 338 deletions(-) create mode 100644 aiida/storage/sqlite_zip/__init__.py rename aiida/{tools/archive/implementations/sqlite => storage/sqlite_zip}/backend.py (65%) create mode 100644 aiida/storage/sqlite_zip/models.py create mode 100644 aiida/storage/sqlite_zip/utils.py diff --git a/aiida/cmdline/commands/cmd_archive.py b/aiida/cmdline/commands/cmd_archive.py index 2fdb40f933..2a8af352d1 100644 --- a/aiida/cmdline/commands/cmd_archive.py +++ b/aiida/cmdline/commands/cmd_archive.py @@ -23,6 +23,8 @@ from aiida.cmdline.params import arguments, options from aiida.cmdline.params.types import GroupParamType, PathOrUrl from aiida.cmdline.utils import decorators, echo +from aiida.cmdline.utils.common import get_database_summary +from aiida.common.exceptions import UnreachableStorage from aiida.common.links import GraphTraversalRules from aiida.common.log import AIIDA_LOGGER @@ -54,7 +56,7 @@ def inspect(archive, version, meta_data, database): latest_version = archive_format.latest_version try: current_version = archive_format.read_version(archive) - except UnreadableArchiveError as exc: + except (UnreadableArchiveError, UnreachableStorage) as exc: echo.echo_critical(f'archive file of unknown format: {exc}') if version: diff --git a/aiida/manage/configuration/profile.py b/aiida/manage/configuration/profile.py index fc5e9d96b4..a808efc668 100644 --- a/aiida/manage/configuration/profile.py +++ b/aiida/manage/configuration/profile.py @@ -127,9 +127,9 @@ def storage_cls(self) -> Type['StorageBackend']: if self.storage_backend == 'psql_dos': from aiida.storage.psql_dos.backend import PsqlDosBackend return PsqlDosBackend - if self.storage_backend == 'archive.sqlite': - from aiida.tools.archive.implementations.sqlite.backend import ArchiveReadOnlyBackend - return ArchiveReadOnlyBackend + if self.storage_backend == 'sqlite_zip': + from aiida.storage.sqlite_zip.backend import SqliteZipBackend + return SqliteZipBackend raise ValueError(f'unknown storage backend type: {self.storage_backend}') @property diff --git a/aiida/storage/psql_dos/__init__.py b/aiida/storage/psql_dos/__init__.py index eac0048fe9..8bea8e1e03 100644 --- a/aiida/storage/psql_dos/__init__.py +++ b/aiida/storage/psql_dos/__init__.py @@ -7,7 +7,7 @@ # For further information on the license, see the LICENSE.txt file # # For further information please visit http://www.aiida.net # ########################################################################### -"""Module with implementation of the storage backend using SqlAlchemy and the disk-objectstore.""" +"""Module with implementation of the storage backend using PostGreSQL and the disk-objectstore.""" # AUTO-GENERATED diff --git a/aiida/storage/sqlite_zip/__init__.py b/aiida/storage/sqlite_zip/__init__.py new file mode 100644 index 0000000000..85b3587914 --- /dev/null +++ b/aiida/storage/sqlite_zip/__init__.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +"""Module with implementation of the storage backend, +using an SQLite database and repository files within a zipfile. + +The content of the zip file is:: + + |- storage.zip + |- metadata.json + |- db.sqlite3 + |- repo/ + |- hashkey1 + |- hashkey2 + ... + +For quick access, the metadata (such as the version) is stored in a `metadata.json` file, +at the "top" of the zip file, with the sqlite database, just below it, then the repository files. +Repository files are named by their SHA256 content hash. + +This storage method is primarily intended for the AiiDA archive, +as a read-only storage method. +This is because sqlite and zip are not suitable for concurrent write access. + +The archive format originally used a JSON file to store the database, +and these revisions are handled by the `version_profile` and `migrate` backend methods. +""" diff --git a/aiida/tools/archive/implementations/sqlite/backend.py b/aiida/storage/sqlite_zip/backend.py similarity index 65% rename from aiida/tools/archive/implementations/sqlite/backend.py rename to aiida/storage/sqlite_zip/backend.py index 934dd2bf1b..1482a06edb 100644 --- a/aiida/tools/archive/implementations/sqlite/backend.py +++ b/aiida/storage/sqlite_zip/backend.py @@ -9,7 +9,6 @@ ########################################################################### """The table models are dynamically generated from the sqlalchemy backend models.""" from contextlib import contextmanager -from datetime import datetime from functools import singledispatch from pathlib import Path import tempfile @@ -18,235 +17,25 @@ from zipfile import ZipFile from archive_path import extract_file_in_zip -import pytz -from sqlalchemy import CHAR, Text, orm, types -from sqlalchemy.dialects.postgresql import JSONB, UUID -from sqlalchemy.dialects.sqlite import JSON -from sqlalchemy.sql.schema import Table +from sqlalchemy.orm import Session from aiida.common.exceptions import UnreachableStorage from aiida.manage import Profile from aiida.orm.entities import EntityTypes from aiida.orm.implementation import StorageBackend from aiida.repository.backend.abstract import AbstractRepositoryBackend -# we need to import all models, to ensure they are loaded on the SQLA Metadata -from aiida.storage.psql_dos.models import authinfo, base, comment, computer, group, log, node, user from aiida.storage.psql_dos.orm import authinfos, comments, computers, entities, groups, logs, nodes, users from aiida.storage.psql_dos.orm.querybuilder import SqlaQueryBuilder from aiida.storage.psql_dos.orm.utils import ModelWrapper from aiida.tools.archive.exceptions import ArchiveClosedError, CorruptArchive, ReadOnlyError +from aiida.tools.archive.implementations.sqlite.common import DB_FILENAME, REPO_FOLDER -from .common import DB_FILENAME, REPO_FOLDER, create_sqla_engine - - -class SqliteModel: - """Represent a row in an sqlite database table""" - - def __repr__(self) -> str: - """Return a representation of the row columns""" - string = f'<{self.__class__.__name__}' - for col in self.__table__.columns: # type: ignore[attr-defined] # pylint: disable=no-member - # don't include columns with potentially large values - if isinstance(col.type, (JSON, Text)): - continue - string += f' {col.name}={getattr(self, col.name)}' - return string + '>' - - -class TZDateTime(types.TypeDecorator): # pylint: disable=abstract-method - """A timezone naive UTC ``DateTime`` implementation for SQLite. - - see: https://docs.sqlalchemy.org/en/14/core/custom_types.html#store-timezone-aware-timestamps-as-timezone-naive-utc - """ - impl = types.DateTime - cache_ok = True - - def process_bind_param(self, value: Optional[datetime], dialect): - """Process before writing to database.""" - if value is None: - return value - if value.tzinfo is None: - value = value.astimezone(pytz.utc) - value = value.astimezone(pytz.utc).replace(tzinfo=None) - return value - - def process_result_value(self, value: Optional[datetime], dialect): - """Process when returning from database.""" - if value is None: - return value - if value.tzinfo is None: - return value.replace(tzinfo=pytz.utc) - return value.astimezone(pytz.utc) - - -ArchiveDbBase = orm.declarative_base(cls=SqliteModel, name='SqliteModel') - - -def pg_to_sqlite(pg_table: Table): - """Convert a model intended for PostGreSQL to one compatible with SQLite""" - new = pg_table.to_metadata(ArchiveDbBase.metadata) - for column in new.columns: - if isinstance(column.type, UUID): - column.type = CHAR(32) - elif isinstance(column.type, types.DateTime): - column.type = TZDateTime() - elif isinstance(column.type, JSONB): - column.type = JSON() - return new - - -def create_orm_cls(klass: base.Base) -> ArchiveDbBase: - """Create an ORM class from an existing table in the declarative meta""" - tbl = ArchiveDbBase.metadata.tables[klass.__tablename__] - return type( # type: ignore[return-value] - klass.__name__, - (ArchiveDbBase,), - { - '__tablename__': tbl.name, - '__table__': tbl, - **{col.name if col.name != 'metadata' else '_metadata': col for col in tbl.columns}, - }, - ) - - -for table in base.Base.metadata.sorted_tables: - pg_to_sqlite(table) - -DbUser = create_orm_cls(user.DbUser) -DbComputer = create_orm_cls(computer.DbComputer) -DbAuthInfo = create_orm_cls(authinfo.DbAuthInfo) -DbGroup = create_orm_cls(group.DbGroup) -DbNode = create_orm_cls(node.DbNode) -DbGroupNodes = create_orm_cls(group.DbGroupNode) -DbComment = create_orm_cls(comment.DbComment) -DbLog = create_orm_cls(log.DbLog) -DbLink = create_orm_cls(node.DbLink) - -# to-do This was the minimum for creating a graph, but really all relationships should be copied -DbNode.dbcomputer = orm.relationship('DbComputer', backref='dbnodes') # type: ignore[attr-defined] -DbGroup.dbnodes = orm.relationship( # type: ignore[attr-defined] - 'DbNode', secondary='db_dbgroup_dbnodes', backref='dbgroups', lazy='dynamic' -) +from . import models +from .utils import create_sqla_engine, read_version -class ZipfileBackendRepository(AbstractRepositoryBackend): - """A read-only backend for an open zip file.""" - - def __init__(self, file: ZipFile): - self._zipfile = file - - @property - def zipfile(self) -> ZipFile: - if self._zipfile.fp is None: - raise ArchiveClosedError() - return self._zipfile - - @property - def uuid(self) -> Optional[str]: - return None - - @property - def key_format(self) -> Optional[str]: - return 'sha256' - - def initialise(self, **kwargs) -> None: - pass - - @property - def is_initialised(self) -> bool: - return True - - def erase(self) -> None: - raise ReadOnlyError() - - def _put_object_from_filelike(self, handle: BinaryIO) -> str: - raise ReadOnlyError() - - def has_object(self, key: str) -> bool: - try: - self.zipfile.getinfo(f'{REPO_FOLDER}/{key}') - except KeyError: - return False - return True - - def has_objects(self, keys: List[str]) -> List[bool]: - return [self.has_object(key) for key in keys] - - def list_objects(self) -> Iterable[str]: - for name in self.zipfile.namelist(): - if name.startswith(REPO_FOLDER + '/') and name[len(REPO_FOLDER) + 1:]: - yield name[len(REPO_FOLDER) + 1:] - - @contextmanager - def open(self, key: str) -> Iterator[BinaryIO]: - try: - handle = self.zipfile.open(f'{REPO_FOLDER}/{key}') - yield cast(BinaryIO, handle) - except KeyError: - raise FileNotFoundError(f'object with key `{key}` does not exist.') - finally: - handle.close() - - def iter_object_streams(self, keys: List[str]) -> Iterator[Tuple[str, BinaryIO]]: - for key in keys: - with self.open(key) as handle: # pylint: disable=not-context-manager - yield key, handle - - def delete_objects(self, keys: List[str]) -> None: - raise ReadOnlyError() - - def get_object_hash(self, key: str) -> str: - return key - - def maintain(self, dry_run: bool = False, live: bool = True, **kwargs) -> None: - raise NotImplementedError - - def get_info(self, statistics: bool = False, **kwargs) -> dict: - return {'objects': {'count': len(list(self.list_objects()))}} - - -class ArchiveBackendQueryBuilder(SqlaQueryBuilder): - """Archive query builder""" - - @property - def Node(self): - return DbNode - - @property - def Link(self): - return DbLink - - @property - def Computer(self): - return DbComputer - - @property - def User(self): - return DbUser - - @property - def Group(self): - return DbGroup - - @property - def AuthInfo(self): - return DbAuthInfo - - @property - def Comment(self): - return DbComment - - @property - def Log(self): - return DbLog - - @property - def table_groups_nodes(self): - return DbGroupNodes.__table__ # type: ignore[attr-defined] # pylint: disable=no-member - - -class ArchiveReadOnlyBackend(StorageBackend): # pylint: disable=too-many-public-methods - """A read-only backend for the archive.""" +class SqliteZipBackend(StorageBackend): # pylint: disable=too-many-public-methods + """A read-only backend for a sqlite/zip format.""" @classmethod def version_head(cls) -> str: @@ -254,7 +43,7 @@ def version_head(cls) -> str: @classmethod def version_profile(cls, profile: Profile) -> None: - raise NotImplementedError + return read_version(profile.storage_config['path']) @classmethod def migrate(cls, profile: Profile): @@ -267,13 +56,13 @@ def __init__(self, profile: Profile): raise UnreachableStorage(f'archive file `{self._path}` does not exist.') # lazy open the archive zipfile and extract the database file self._db_file: Optional[Path] = None - self._session: Optional[orm.Session] = None + self._session: Optional[Session] = None self._zipfile: Optional[zipfile.ZipFile] = None self._closed = False def __str__(self) -> str: state = 'closed' if self.is_closed else 'open' - return f'Aiida archive (read-only) [{state}] @ {self._path}' + return f'SqliteZip storage (read-only) [{state}] @ {self._path}' @property def is_closed(self) -> bool: @@ -292,7 +81,7 @@ def close(self): self._zipfile = None self._closed = True - def get_session(self) -> orm.Session: + def get_session(self) -> Session: """Return an SQLAlchemy session.""" if self._closed: raise ArchiveClosedError() @@ -305,18 +94,18 @@ def get_session(self) -> orm.Session: except Exception as exc: raise CorruptArchive(f'database could not be read: {exc}') from exc if self._session is None: - self._session = orm.Session(create_sqla_engine(self._db_file)) + self._session = Session(create_sqla_engine(self._db_file)) return self._session - def get_repository(self) -> ZipfileBackendRepository: + def get_repository(self) -> 'ZipfileBackendRepository': if self._closed: raise ArchiveClosedError() if self._zipfile is None: self._zipfile = ZipFile(self._path, mode='r') # pylint: disable=consider-using-with return ZipfileBackendRepository(self._zipfile) - def query(self) -> ArchiveBackendQueryBuilder: - return ArchiveBackendQueryBuilder(self) + def query(self) -> 'SqliteBackendQueryBuilder': + return SqliteBackendQueryBuilder(self) def get_backend_entity(self, res): # pylint: disable=no-self-use """Return the backend entity that corresponds to the given Model instance.""" @@ -325,31 +114,35 @@ def get_backend_entity(self, res): # pylint: disable=no-self-use @property def authinfos(self): - return create_backend_collection(authinfos.SqlaAuthInfoCollection, self, authinfos.SqlaAuthInfo, DbAuthInfo) + return create_backend_collection( + authinfos.SqlaAuthInfoCollection, self, authinfos.SqlaAuthInfo, models.DbAuthInfo + ) @property def comments(self): - return create_backend_collection(comments.SqlaCommentCollection, self, comments.SqlaComment, DbComment) + return create_backend_collection(comments.SqlaCommentCollection, self, comments.SqlaComment, models.DbComment) @property def computers(self): - return create_backend_collection(computers.SqlaComputerCollection, self, computers.SqlaComputer, DbComputer) + return create_backend_collection( + computers.SqlaComputerCollection, self, computers.SqlaComputer, models.DbComputer + ) @property def groups(self): - return create_backend_collection(groups.SqlaGroupCollection, self, groups.SqlaGroup, DbGroup) + return create_backend_collection(groups.SqlaGroupCollection, self, groups.SqlaGroup, models.DbGroup) @property def logs(self): - return create_backend_collection(logs.SqlaLogCollection, self, logs.SqlaLog, DbLog) + return create_backend_collection(logs.SqlaLogCollection, self, logs.SqlaLog, models.DbLog) @property def nodes(self): - return create_backend_collection(nodes.SqlaNodeCollection, self, nodes.SqlaNode, DbNode) + return create_backend_collection(nodes.SqlaNodeCollection, self, nodes.SqlaNode, models.DbNode) @property def users(self): - return create_backend_collection(users.SqlaUserCollection, self, users.SqlaUser, DbUser) + return create_backend_collection(users.SqlaUserCollection, self, users.SqlaUser, models.DbUser) def _clear(self, recreate_user: bool = True) -> None: raise ReadOnlyError() @@ -376,6 +169,122 @@ def get_global_variable(self, key: str): def set_global_variable(self, key: str, value, description: Optional[str] = None, overwrite=True) -> None: raise ReadOnlyError() + +class ZipfileBackendRepository(AbstractRepositoryBackend): + """A read-only backend for an open zip file.""" + + def __init__(self, file: ZipFile): + self._zipfile = file + + @property + def zipfile(self) -> ZipFile: + if self._zipfile.fp is None: + raise ArchiveClosedError() + return self._zipfile + + @property + def uuid(self) -> Optional[str]: + return None + + @property + def key_format(self) -> Optional[str]: + return 'sha256' + + def initialise(self, **kwargs) -> None: + pass + + @property + def is_initialised(self) -> bool: + return True + + def erase(self) -> None: + raise ReadOnlyError() + + def _put_object_from_filelike(self, handle: BinaryIO) -> str: + raise ReadOnlyError() + + def has_object(self, key: str) -> bool: + try: + self.zipfile.getinfo(f'{REPO_FOLDER}/{key}') + except KeyError: + return False + return True + + def has_objects(self, keys: List[str]) -> List[bool]: + return [self.has_object(key) for key in keys] + + def list_objects(self) -> Iterable[str]: + for name in self.zipfile.namelist(): + if name.startswith(REPO_FOLDER + '/') and name[len(REPO_FOLDER) + 1:]: + yield name[len(REPO_FOLDER) + 1:] + + @contextmanager + def open(self, key: str) -> Iterator[BinaryIO]: + try: + handle = self.zipfile.open(f'{REPO_FOLDER}/{key}') + yield cast(BinaryIO, handle) + except KeyError: + raise FileNotFoundError(f'object with key `{key}` does not exist.') + finally: + handle.close() + + def iter_object_streams(self, keys: List[str]) -> Iterator[Tuple[str, BinaryIO]]: + for key in keys: + with self.open(key) as handle: # pylint: disable=not-context-manager + yield key, handle + + def delete_objects(self, keys: List[str]) -> None: + raise ReadOnlyError() + + def get_object_hash(self, key: str) -> str: + return key + + def maintain(self, dry_run: bool = False, live: bool = True, **kwargs) -> None: + raise NotImplementedError + + def get_info(self, statistics: bool = False, **kwargs) -> dict: + return {'objects': {'count': len(list(self.list_objects()))}} + + +class SqliteBackendQueryBuilder(SqlaQueryBuilder): + """Archive query builder""" + + @property + def Node(self): + return models.DbNode + + @property + def Link(self): + return models.DbLink + + @property + def Computer(self): + return models.DbComputer + + @property + def User(self): + return models.DbUser + + @property + def Group(self): + return models.DbGroup + + @property + def AuthInfo(self): + return models.DbAuthInfo + + @property + def Comment(self): + return models.DbComment + + @property + def Log(self): + return models.DbLog + + @property + def table_groups_nodes(self): + return models.DbGroupNodes.__table__ # type: ignore[attr-defined] # pylint: disable=no-member + def maintain(self, full: bool = False, dry_run: bool = False, **kwargs) -> None: raise NotImplementedError @@ -434,36 +343,36 @@ def get_backend_entity(dbmodel) -> Type[entities.SqlaModelEntity]: # pylint: di raise TypeError(f'Cannot get backend entity for {dbmodel}') -@get_backend_entity.register(DbAuthInfo) # type: ignore[call-overload] +@get_backend_entity.register(models.DbAuthInfo) # type: ignore[call-overload] def _(dbmodel): return create_backend_cls(authinfos.SqlaAuthInfo, dbmodel.__class__) -@get_backend_entity.register(DbComment) # type: ignore[call-overload] +@get_backend_entity.register(models.DbComment) # type: ignore[call-overload] def _(dbmodel): return create_backend_cls(comments.SqlaComment, dbmodel.__class__) -@get_backend_entity.register(DbComputer) # type: ignore[call-overload] +@get_backend_entity.register(models.DbComputer) # type: ignore[call-overload] def _(dbmodel): return create_backend_cls(computers.SqlaComputer, dbmodel.__class__) -@get_backend_entity.register(DbGroup) # type: ignore[call-overload] +@get_backend_entity.register(models.DbGroup) # type: ignore[call-overload] def _(dbmodel): return create_backend_cls(groups.SqlaGroup, dbmodel.__class__) -@get_backend_entity.register(DbLog) # type: ignore[call-overload] +@get_backend_entity.register(models.DbLog) # type: ignore[call-overload] def _(dbmodel): return create_backend_cls(logs.SqlaLog, dbmodel.__class__) -@get_backend_entity.register(DbNode) # type: ignore[call-overload] +@get_backend_entity.register(models.DbNode) # type: ignore[call-overload] def _(dbmodel): return create_backend_cls(nodes.SqlaNode, dbmodel.__class__) -@get_backend_entity.register(DbUser) # type: ignore[call-overload] +@get_backend_entity.register(models.DbUser) # type: ignore[call-overload] def _(dbmodel): return create_backend_cls(users.SqlaUser, dbmodel.__class__) diff --git a/aiida/storage/sqlite_zip/models.py b/aiida/storage/sqlite_zip/models.py new file mode 100644 index 0000000000..990b2eeff7 --- /dev/null +++ b/aiida/storage/sqlite_zip/models.py @@ -0,0 +1,119 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +"""This module contains the SQLAlchemy models for the SQLite backend. + +These models are intended to be identical to those of the `psql_dos` backend, +except for changes to the database specific types: + +- UUID +- DateTime +- JSONB +""" +from datetime import datetime +from typing import Optional + +import pytz +import sqlalchemy as sa +from sqlalchemy import orm as sa_orm +from sqlalchemy.dialects.postgresql import JSONB, UUID +from sqlalchemy.dialects.sqlite import JSON + +# we need to import all models, to ensure they are loaded on the SQLA Metadata +from aiida.storage.psql_dos.models import authinfo, base, comment, computer, group, log, node, user + + +class SqliteModel: + """Represent a row in an sqlite database table""" + + def __repr__(self) -> str: + """Return a representation of the row columns""" + string = f'<{self.__class__.__name__}' + for col in self.__table__.columns: # type: ignore[attr-defined] # pylint: disable=no-member + # don't include columns with potentially large values + if isinstance(col.type, (JSON, sa.Text)): + continue + string += f' {col.name}={getattr(self, col.name)}' + return string + '>' + + +class TZDateTime(sa.TypeDecorator): # pylint: disable=abstract-method + """A timezone naive UTC ``DateTime`` implementation for SQLite. + + see: https://docs.sqlalchemy.org/en/14/core/custom_types.html#store-timezone-aware-timestamps-as-timezone-naive-utc + """ + impl = sa.DateTime + cache_ok = True + + def process_bind_param(self, value: Optional[datetime], dialect): + """Process before writing to database.""" + if value is None: + return value + if value.tzinfo is None: + value = value.astimezone(pytz.utc) + value = value.astimezone(pytz.utc).replace(tzinfo=None) + return value + + def process_result_value(self, value: Optional[datetime], dialect): + """Process when returning from database.""" + if value is None: + return value + if value.tzinfo is None: + return value.replace(tzinfo=pytz.utc) + return value.astimezone(pytz.utc) + + +SqliteBase = sa.orm.declarative_base(cls=SqliteModel, name='SqliteModel') + + +def pg_to_sqlite(pg_table: sa.Table): + """Convert a model intended for PostGreSQL to one compatible with SQLite""" + new = pg_table.to_metadata(SqliteBase.metadata) + for column in new.columns: + if isinstance(column.type, UUID): + column.type = sa.CHAR(32) + elif isinstance(column.type, sa.DateTime): + column.type = TZDateTime() + elif isinstance(column.type, JSONB): + column.type = JSON() + return new + + +def create_orm_cls(klass: base.Base) -> SqliteBase: + """Create an ORM class from an existing table in the declarative meta""" + tbl = SqliteBase.metadata.tables[klass.__tablename__] + return type( # type: ignore[return-value] + klass.__name__, + (SqliteBase,), + { + '__tablename__': tbl.name, + '__table__': tbl, + **{col.name if col.name != 'metadata' else '_metadata': col for col in tbl.columns}, + }, + ) + + +for table in base.Base.metadata.sorted_tables: + pg_to_sqlite(table) + +DbUser = create_orm_cls(user.DbUser) +DbComputer = create_orm_cls(computer.DbComputer) +DbAuthInfo = create_orm_cls(authinfo.DbAuthInfo) +DbGroup = create_orm_cls(group.DbGroup) +DbNode = create_orm_cls(node.DbNode) +DbGroupNodes = create_orm_cls(group.DbGroupNode) +DbComment = create_orm_cls(comment.DbComment) +DbLog = create_orm_cls(log.DbLog) +DbLink = create_orm_cls(node.DbLink) + +# to-do This was the minimum for creating a graph, but really all relationships should be copied +DbNode.dbcomputer = sa_orm.relationship('DbComputer', backref='dbnodes') # type: ignore[attr-defined] +DbGroup.dbnodes = sa_orm.relationship( # type: ignore[attr-defined] + 'DbNode', secondary='db_dbgroup_dbnodes', backref='dbgroups', lazy='dynamic' +) diff --git a/aiida/storage/sqlite_zip/utils.py b/aiida/storage/sqlite_zip/utils.py new file mode 100644 index 0000000000..cec7298867 --- /dev/null +++ b/aiida/storage/sqlite_zip/utils.py @@ -0,0 +1,85 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +"""Utilities for this backend.""" +from pathlib import Path +import tarfile +from typing import Any, Dict, Optional, Union +import zipfile + +from archive_path import read_file_in_tar, read_file_in_zip +from sqlalchemy import event +from sqlalchemy.future.engine import Engine, create_engine + +from aiida.common import json +from aiida.common.exceptions import UnreachableStorage + +META_FILENAME = 'metadata.json' +"""The filename containing meta information about the storage instance.""" + + +def sqlite_enforce_foreign_keys(dbapi_connection, _): + """Enforce foreign key constraints, when using sqlite backend (off by default)""" + cursor = dbapi_connection.cursor() + cursor.execute('PRAGMA foreign_keys=ON;') + cursor.close() + + +def create_sqla_engine(path: Union[str, Path], *, enforce_foreign_keys: bool = True, **kwargs) -> Engine: + """Create a new engine instance.""" + engine = create_engine( + f'sqlite:///{path}', + json_serializer=json.dumps, + json_deserializer=json.loads, + encoding='utf-8', + future=True, + **kwargs + ) + if enforce_foreign_keys: + event.listen(engine, 'connect', sqlite_enforce_foreign_keys) + return engine + + +def extract_metadata(path: Union[str, Path], search_limit: Optional[int] = 10) -> Dict[str, Any]: + """Extract the metadata dictionary from the archive""" + # we fail if not one of the first record in central directory (as expected) + # so we don't have to iter all repo files to fail + return json.loads(read_file_in_zip(path, META_FILENAME, 'utf8', search_limit=search_limit)) + + +def read_version(path: Union[str, Path]) -> str: + """Read the version of the storage instance from the file. + + This is intended to work for all versions of the storage format. + + :param path: path to storage instance + + :raises: ``UnreachableStorage`` if a version cannot be read from the file + """ + path = Path(path) + if not path.is_file(): + raise UnreachableStorage('archive file not found') + + if zipfile.is_zipfile(path): + try: + metadata = extract_metadata(path, search_limit=None) + except Exception as exc: + raise UnreachableStorage(f'Could not read metadata for version: {exc}') from exc + elif tarfile.is_tarfile(path): + try: + metadata = json.loads(read_file_in_tar(path, META_FILENAME)) + except Exception as exc: + raise UnreachableStorage(f'Could not read metadata for version: {exc}') from exc + else: + raise UnreachableStorage('Not a zip or tar file') + + if 'export_version' in metadata: + return metadata['export_version'] + + raise UnreachableStorage("Metadata does not contain 'export_version' key") diff --git a/aiida/tools/archive/implementations/sqlite/common.py b/aiida/tools/archive/implementations/sqlite/common.py index a375cf7c26..06640ae0cc 100644 --- a/aiida/tools/archive/implementations/sqlite/common.py +++ b/aiida/tools/archive/implementations/sqlite/common.py @@ -12,13 +12,10 @@ from pathlib import Path import shutil import tempfile -from typing import Callable, Sequence, Union +from typing import Callable, Sequence from archive_path import TarPath, ZipPath -from sqlalchemy import event -from sqlalchemy.future.engine import Engine, create_engine -from aiida.common import json from aiida.common.progress_reporter import create_callback, get_progress_reporter META_FILENAME = 'metadata.json' @@ -27,28 +24,6 @@ REPO_FOLDER = 'repo' -def sqlite_enforce_foreign_keys(dbapi_connection, _): - """Enforce foreign key constraints, when using sqlite backend (off by default)""" - cursor = dbapi_connection.cursor() - cursor.execute('PRAGMA foreign_keys=ON;') - cursor.close() - - -def create_sqla_engine(path: Union[str, Path], *, enforce_foreign_keys: bool = True, **kwargs) -> Engine: - """Create a new engine instance.""" - engine = create_engine( - f'sqlite:///{path}', - json_serializer=json.dumps, - json_deserializer=json.loads, - encoding='utf-8', - future=True, - **kwargs - ) - if enforce_foreign_keys: - event.listen(engine, 'connect', sqlite_enforce_foreign_keys) - return engine - - def copy_zip_to_zip( inpath: Path, outpath: Path, diff --git a/aiida/tools/archive/implementations/sqlite/main.py b/aiida/tools/archive/implementations/sqlite/main.py index 85b1242991..74a3d458fd 100644 --- a/aiida/tools/archive/implementations/sqlite/main.py +++ b/aiida/tools/archive/implementations/sqlite/main.py @@ -11,10 +11,11 @@ from pathlib import Path from typing import Any, List, Literal, Union, overload +from aiida.storage.sqlite_zip.utils import read_version from aiida.tools.archive.abstract import ArchiveFormatAbstract from .migrations.main import ALL_VERSIONS, migrate -from .reader import ArchiveReaderSqlZip, read_version +from .reader import ArchiveReaderSqlZip from .writer import ArchiveAppenderSqlZip, ArchiveWriterSqlZip __all__ = ('ArchiveFormatSqlZip',) diff --git a/aiida/tools/archive/implementations/sqlite/migrations/legacy_to_new.py b/aiida/tools/archive/implementations/sqlite/migrations/legacy_to_new.py index c770e9f233..470e259a8f 100644 --- a/aiida/tools/archive/implementations/sqlite/migrations/legacy_to_new.py +++ b/aiida/tools/archive/implementations/sqlite/migrations/legacy_to_new.py @@ -24,11 +24,12 @@ from aiida.common.hashing import chunked_file_hash from aiida.common.progress_reporter import get_progress_reporter from aiida.repository.common import File, FileType +from aiida.storage.sqlite_zip.utils import create_sqla_engine from aiida.tools.archive.common import MIGRATE_LOGGER, batch_iter from aiida.tools.archive.exceptions import CorruptArchive, MigrationValidationError from . import v1_db_schema as db -from ..common import DB_FILENAME, META_FILENAME, REPO_FOLDER, create_sqla_engine +from ..common import DB_FILENAME, META_FILENAME, REPO_FOLDER from .utils import update_metadata _NODE_ENTITY_NAME = 'Node' diff --git a/aiida/tools/archive/implementations/sqlite/reader.py b/aiida/tools/archive/implementations/sqlite/reader.py index f3cdebbe74..2c9e59e998 100644 --- a/aiida/tools/archive/implementations/sqlite/reader.py +++ b/aiida/tools/archive/implementations/sqlite/reader.py @@ -8,20 +8,14 @@ # For further information please visit http://www.aiida.net # ########################################################################### """AiiDA archive reader implementation.""" -import json from pathlib import Path -import tarfile from typing import Any, Dict, Optional, Union -import zipfile - -from archive_path import read_file_in_tar, read_file_in_zip from aiida.manage import Profile +from aiida.storage.sqlite_zip.backend import SqliteZipBackend +from aiida.storage.sqlite_zip.utils import extract_metadata from aiida.tools.archive.abstract import ArchiveReaderAbstract -from aiida.tools.archive.exceptions import CorruptArchive, UnreadableArchiveError - -from . import backend as db -from .common import META_FILENAME +from aiida.tools.archive.exceptions import CorruptArchive class ArchiveReaderSqlZip(ArchiveReaderAbstract): @@ -31,7 +25,7 @@ def __init__(self, path: Union[str, Path], **kwargs: Any): super().__init__(path, **kwargs) self._in_context = False # we lazily create the storage backend, then clean up on exit - self._backend: Optional[db.ArchiveReadOnlyBackend] = None + self._backend: Optional[SqliteZipBackend] = None def __enter__(self) -> 'ArchiveReaderSqlZip': self._in_context = True @@ -51,7 +45,7 @@ def get_metadata(self) -> Dict[str, Any]: except Exception as exc: raise CorruptArchive('metadata could not be read') from exc - def get_backend(self) -> db.ArchiveReadOnlyBackend: + def get_backend(self) -> SqliteZipBackend: if not self._in_context: raise AssertionError('Not in context') if self._backend is not None: @@ -59,7 +53,7 @@ def get_backend(self) -> db.ArchiveReadOnlyBackend: profile = Profile( 'default', { 'storage': { - 'backend': 'archive.sqlite', + 'backend': 'sqlite_zip', 'config': { 'path': str(self.path) } @@ -70,43 +64,5 @@ def get_backend(self) -> db.ArchiveReadOnlyBackend: } } ) - self._backend = db.ArchiveReadOnlyBackend(profile) + self._backend = SqliteZipBackend(profile) return self._backend - - -def extract_metadata(path: Union[str, Path], search_limit: Optional[int] = 10) -> Dict[str, Any]: - """Extract the metadata dictionary from the archive""" - # we fail if not one of the first record in central directory (as expected) - # so we don't have to iter all repo files to fail - return json.loads(read_file_in_zip(path, META_FILENAME, 'utf8', search_limit=search_limit)) - - -def read_version(path: Union[str, Path]) -> str: - """Read the version of the archive from the file. - - Intended to work for all versions of the archive format. - - :param path: archive path - - :raises: ``FileNotFoundError`` if the file does not exist - :raises: ``UnreadableArchiveError`` if a version cannot be read from the archive - """ - path = Path(path) - if not path.is_file(): - raise FileNotFoundError('archive file not found') - # check the file is at least a zip or tar file - if zipfile.is_zipfile(path): - try: - metadata = extract_metadata(path, search_limit=None) - except Exception as exc: - raise UnreadableArchiveError(f'Could not read metadata for version: {exc}') from exc - elif tarfile.is_tarfile(path): - try: - metadata = json.loads(read_file_in_tar(path, META_FILENAME)) - except Exception as exc: - raise UnreadableArchiveError(f'Could not read metadata for version: {exc}') from exc - else: - raise UnreadableArchiveError('Not a zip or tar file') - if 'export_version' in metadata: - return metadata['export_version'] - raise UnreadableArchiveError("Metadata does not contain 'export_version' key") diff --git a/aiida/tools/archive/implementations/sqlite/writer.py b/aiida/tools/archive/implementations/sqlite/writer.py index 4ee7358c84..b6d99fe159 100644 --- a/aiida/tools/archive/implementations/sqlite/writer.py +++ b/aiida/tools/archive/implementations/sqlite/writer.py @@ -29,26 +29,26 @@ from aiida.common.hashing import chunked_file_hash from aiida.common.progress_reporter import get_progress_reporter from aiida.orm.entities import EntityTypes +from aiida.storage.sqlite_zip import models, utils from aiida.tools.archive.abstract import ArchiveFormatAbstract, ArchiveWriterAbstract from aiida.tools.archive.exceptions import CorruptArchive, IncompatibleArchiveVersionError -from . import backend as db -from .common import DB_FILENAME, META_FILENAME, REPO_FOLDER, create_sqla_engine +from .common import DB_FILENAME, META_FILENAME, REPO_FOLDER @functools.lru_cache(maxsize=10) def _get_model_from_entity(entity_type: EntityTypes): """Return the Sqlalchemy model and column names corresponding to the given entity.""" model = { - EntityTypes.USER: db.DbUser, - EntityTypes.AUTHINFO: db.DbAuthInfo, - EntityTypes.GROUP: db.DbGroup, - EntityTypes.NODE: db.DbNode, - EntityTypes.COMMENT: db.DbComment, - EntityTypes.COMPUTER: db.DbComputer, - EntityTypes.LOG: db.DbLog, - EntityTypes.LINK: db.DbLink, - EntityTypes.GROUP_NODE: db.DbGroupNodes + EntityTypes.USER: models.DbUser, + EntityTypes.AUTHINFO: models.DbAuthInfo, + EntityTypes.GROUP: models.DbGroup, + EntityTypes.NODE: models.DbNode, + EntityTypes.COMMENT: models.DbComment, + EntityTypes.COMPUTER: models.DbComputer, + EntityTypes.LOG: models.DbLog, + EntityTypes.LINK: models.DbLink, + EntityTypes.GROUP_NODE: models.DbGroupNodes }[entity_type] mapper = inspect(model).mapper column_names = {col.name for col in mapper.c.values()} @@ -106,10 +106,10 @@ def __enter__(self) -> 'ArchiveWriterSqlZip': info_order=(self.meta_name, self.db_name), name_to_info=self._central_dir, ) - engine = create_sqla_engine( + engine = utils.create_sqla_engine( self._work_dir / self.db_name, enforce_foreign_keys=self._enforce_foreign_keys, echo=self._debug ) - db.ArchiveDbBase.metadata.create_all(engine) + models.SqliteBase.metadata.create_all(engine) self._conn = engine.connect() self._in_context = True return self @@ -251,7 +251,7 @@ def __enter__(self) -> 'ArchiveAppenderSqlZip': except Exception as exc: raise CorruptArchive(f'database could not be read: {exc}') from exc # open a connection to the database - engine = create_sqla_engine( + engine = utils.create_sqla_engine( self._work_dir / self.db_name, enforce_foreign_keys=self._enforce_foreign_keys, echo=self._debug ) # to-do could check that the database has correct schema: diff --git a/utils/make_all.py b/utils/make_all.py index 9f8cc42174..daee5b9f12 100644 --- a/utils/make_all.py +++ b/utils/make_all.py @@ -157,7 +157,7 @@ def write_inits(folder_path: str, all_dict: dict, skip_children: Dict[str, List[ # skipped since this is for testing only not general use 'manage': ['tests'], # skipped since we don't want to expose the implementation at the top-level - 'storage': ['psql_dos'], + 'storage': ['psql_dos', 'sqlite_zip'], 'orm': ['implementation'], # skip all since the module requires extra requirements 'restapi': ['*'], From 064b42cbad82d85796d79463412bef49c0780293 Mon Sep 17 00:00:00 2001 From: Chris Sewell Date: Wed, 23 Feb 2022 16:00:04 +0100 Subject: [PATCH 02/26] move migrations --- aiida/storage/psql_dos/migrations/env.py | 14 +- aiida/storage/sqlite_zip/backend.py | 2 +- .../sqlite_zip}/migrations/__init__.py | 1 - aiida/storage/sqlite_zip/migrations/env.py | 49 ++++ .../sqlite_zip}/migrations/legacy/__init__.py | 3 +- .../sqlite_zip/migrations/legacy}/utils.py | 0 .../migrations/legacy/v04_to_v05.py | 2 +- .../migrations/legacy/v05_to_v06.py | 2 +- .../migrations/legacy/v06_to_v07.py | 2 +- .../migrations/legacy/v07_to_v08.py | 2 +- .../migrations/legacy/v08_to_v09.py | 2 +- .../migrations/legacy/v09_to_v10.py | 2 +- .../migrations/legacy/v10_to_v11.py | 2 +- .../migrations/legacy/v11_to_v12.py | 2 +- .../sqlite_zip}/migrations/legacy_to_new.py | 43 ++-- .../sqlite_zip}/migrations/main.py | 73 ++++-- .../sqlite_zip/migrations/script.py.mako | 24 ++ .../sqlite_zip/migrations/v1_db_schema.py | 211 ++++++++++++++++++ .../migrations/versions/__init__.py | 9 + .../migrations/versions/main_0001_initial.py | 197 ++++++++++++++++ aiida/storage/sqlite_zip/models.py | 12 +- aiida/tools/archive/abstract.py | 13 +- aiida/tools/archive/create.py | 2 +- .../tools/archive/implementations/__init__.py | 2 +- .../sqlite/migrations/v1_db_schema.py | 169 -------------- .../{sqlite => sqlite_zip}/__init__.py | 0 .../{sqlite => sqlite_zip}/common.py | 0 .../{sqlite => sqlite_zip}/main.py | 11 +- .../{sqlite => sqlite_zip}/reader.py | 0 .../{sqlite => sqlite_zip}/writer.py | 0 aiida/tools/archive/imports.py | 6 +- tests/cmdline/commands/test_archive_create.py | 5 +- tests/cmdline/commands/test_archive_import.py | 3 +- tests/static/export/compare/django.aiida | Bin 2967 -> 3497 bytes tests/static/export/compare/sqlalchemy.aiida | Bin 2966 -> 3495 bytes ...le.aiida => export_main_0001_simple.aiida} | Bin 47286 -> 49240 bytes tests/tools/archive/migration/conftest.py | 2 +- .../archive/migration/test_legacy_funcs.py | 4 +- .../archive/migration/test_v04_to_v05.py | 2 +- .../archive/migration/test_v05_to_v06.py | 4 +- .../archive/migration/test_v06_to_v07.py | 4 +- .../archive/migration/test_v07_to_v08.py | 5 +- .../archive/migration/test_v08_to_v09.py | 5 +- tests/tools/archive/test_backend.py | 2 +- tests/tools/archive/test_common.py | 2 +- 45 files changed, 618 insertions(+), 277 deletions(-) rename aiida/{tools/archive/implementations/sqlite => storage/sqlite_zip}/migrations/__init__.py (90%) create mode 100644 aiida/storage/sqlite_zip/migrations/env.py rename aiida/{tools/archive/implementations/sqlite => storage/sqlite_zip}/migrations/legacy/__init__.py (94%) rename aiida/{tools/archive/implementations/sqlite/migrations => storage/sqlite_zip/migrations/legacy}/utils.py (100%) rename aiida/{tools/archive/implementations/sqlite => storage/sqlite_zip}/migrations/legacy/v04_to_v05.py (95%) rename aiida/{tools/archive/implementations/sqlite => storage/sqlite_zip}/migrations/legacy/v05_to_v06.py (98%) rename aiida/{tools/archive/implementations/sqlite => storage/sqlite_zip}/migrations/legacy/v06_to_v07.py (98%) rename aiida/{tools/archive/implementations/sqlite => storage/sqlite_zip}/migrations/legacy/v07_to_v08.py (95%) rename aiida/{tools/archive/implementations/sqlite => storage/sqlite_zip}/migrations/legacy/v08_to_v09.py (95%) rename aiida/{tools/archive/implementations/sqlite => storage/sqlite_zip}/migrations/legacy/v09_to_v10.py (92%) rename aiida/{tools/archive/implementations/sqlite => storage/sqlite_zip}/migrations/legacy/v10_to_v11.py (93%) rename aiida/{tools/archive/implementations/sqlite => storage/sqlite_zip}/migrations/legacy/v11_to_v12.py (98%) rename aiida/{tools/archive/implementations/sqlite => storage/sqlite_zip}/migrations/legacy_to_new.py (90%) rename aiida/{tools/archive/implementations/sqlite => storage/sqlite_zip}/migrations/main.py (78%) create mode 100644 aiida/storage/sqlite_zip/migrations/script.py.mako create mode 100644 aiida/storage/sqlite_zip/migrations/v1_db_schema.py create mode 100644 aiida/storage/sqlite_zip/migrations/versions/__init__.py create mode 100644 aiida/storage/sqlite_zip/migrations/versions/main_0001_initial.py delete mode 100644 aiida/tools/archive/implementations/sqlite/migrations/v1_db_schema.py rename aiida/tools/archive/implementations/{sqlite => sqlite_zip}/__init__.py (100%) rename aiida/tools/archive/implementations/{sqlite => sqlite_zip}/common.py (100%) rename aiida/tools/archive/implementations/{sqlite => sqlite_zip}/main.py (90%) rename aiida/tools/archive/implementations/{sqlite => sqlite_zip}/reader.py (100%) rename aiida/tools/archive/implementations/{sqlite => sqlite_zip}/writer.py (100%) rename tests/static/export/migrate/{export_v1.0_simple.aiida => export_main_0001_simple.aiida} (81%) diff --git a/aiida/storage/psql_dos/migrations/env.py b/aiida/storage/psql_dos/migrations/env.py index 613d237c34..aacf26e98d 100644 --- a/aiida/storage/psql_dos/migrations/env.py +++ b/aiida/storage/psql_dos/migrations/env.py @@ -16,18 +16,8 @@ def run_migrations_online(): The connection should have been passed to the config, which we use to configue the migration context. """ + from aiida.storage.psql_dos.models.base import get_orm_metadata - # pylint: disable=unused-import - from aiida.common.exceptions import DbContentError - from aiida.storage.psql_dos.models.authinfo import DbAuthInfo - from aiida.storage.psql_dos.models.base import Base - from aiida.storage.psql_dos.models.comment import DbComment - from aiida.storage.psql_dos.models.computer import DbComputer - from aiida.storage.psql_dos.models.group import DbGroup - from aiida.storage.psql_dos.models.log import DbLog - from aiida.storage.psql_dos.models.node import DbLink, DbNode - from aiida.storage.psql_dos.models.settings import DbSetting - from aiida.storage.psql_dos.models.user import DbUser config = context.config # pylint: disable=no-member connection = config.attributes.get('connection', None) @@ -43,7 +33,7 @@ def run_migrations_online(): context.configure( # pylint: disable=no-member connection=connection, - target_metadata=Base.metadata, + target_metadata=get_orm_metadata(), transaction_per_migration=True, aiida_profile=aiida_profile, on_version_apply=on_version_apply diff --git a/aiida/storage/sqlite_zip/backend.py b/aiida/storage/sqlite_zip/backend.py index 1482a06edb..250a9b6d1e 100644 --- a/aiida/storage/sqlite_zip/backend.py +++ b/aiida/storage/sqlite_zip/backend.py @@ -28,7 +28,7 @@ from aiida.storage.psql_dos.orm.querybuilder import SqlaQueryBuilder from aiida.storage.psql_dos.orm.utils import ModelWrapper from aiida.tools.archive.exceptions import ArchiveClosedError, CorruptArchive, ReadOnlyError -from aiida.tools.archive.implementations.sqlite.common import DB_FILENAME, REPO_FOLDER +from aiida.tools.archive.implementations.sqlite_zip.common import DB_FILENAME, REPO_FOLDER from . import models from .utils import create_sqla_engine, read_version diff --git a/aiida/tools/archive/implementations/sqlite/migrations/__init__.py b/aiida/storage/sqlite_zip/migrations/__init__.py similarity index 90% rename from aiida/tools/archive/implementations/sqlite/migrations/__init__.py rename to aiida/storage/sqlite_zip/migrations/__init__.py index 84dbe1264d..2776a55f97 100644 --- a/aiida/tools/archive/implementations/sqlite/migrations/__init__.py +++ b/aiida/storage/sqlite_zip/migrations/__init__.py @@ -7,4 +7,3 @@ # For further information on the license, see the LICENSE.txt file # # For further information please visit http://www.aiida.net # ########################################################################### -"""Migration archive files from old export versions to newer ones.""" diff --git a/aiida/storage/sqlite_zip/migrations/env.py b/aiida/storage/sqlite_zip/migrations/env.py new file mode 100644 index 0000000000..2ee03a00b2 --- /dev/null +++ b/aiida/storage/sqlite_zip/migrations/env.py @@ -0,0 +1,49 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +"""Upper level SQLAlchemy migration funcitons.""" +from alembic import context + + +def run_migrations_online(): + """Run migrations in 'online' mode. + + The connection should have been passed to the config, which we use to configue the migration context. + """ + from aiida.storage.sqlite_zip.models import SqliteBase + + config = context.config # pylint: disable=no-member + + connection = config.attributes.get('connection', None) + aiida_profile = config.attributes.get('aiida_profile', None) + on_version_apply = config.attributes.get('on_version_apply', None) + + if connection is None: + from aiida.common.exceptions import ConfigurationError + raise ConfigurationError('An initialized connection is expected for the AiiDA online migrations.') + + context.configure( # pylint: disable=no-member + connection=connection, + target_metadata=SqliteBase.metadata, + transaction_per_migration=True, + aiida_profile=aiida_profile, + on_version_apply=on_version_apply + ) + + context.run_migrations() # pylint: disable=no-member + + +try: + if context.is_offline_mode(): # pylint: disable=no-member + NotImplementedError('This feature is not currently supported.') + + run_migrations_online() +except NameError: + # This will occur in an environment that is just compiling the documentation + pass diff --git a/aiida/tools/archive/implementations/sqlite/migrations/legacy/__init__.py b/aiida/storage/sqlite_zip/migrations/legacy/__init__.py similarity index 94% rename from aiida/tools/archive/implementations/sqlite/migrations/legacy/__init__.py rename to aiida/storage/sqlite_zip/migrations/legacy/__init__.py index 5190ad4d96..f46a36c0bd 100644 --- a/aiida/tools/archive/implementations/sqlite/migrations/legacy/__init__.py +++ b/aiida/storage/sqlite_zip/migrations/legacy/__init__.py @@ -24,8 +24,7 @@ from .v11_to_v12 import migrate_v11_to_v12 # version from -> version to, function which modifies metadata, data in-place -_vtype = Dict[str, Tuple[str, Callable[[dict, dict], None]]] -LEGACY_MIGRATE_FUNCTIONS: _vtype = { +LEGACY_MIGRATE_FUNCTIONS: Dict[str, Tuple[str, Callable[[dict, dict], None]]] = { '0.4': ('0.5', migrate_v4_to_v5), '0.5': ('0.6', migrate_v5_to_v6), '0.6': ('0.7', migrate_v6_to_v7), diff --git a/aiida/tools/archive/implementations/sqlite/migrations/utils.py b/aiida/storage/sqlite_zip/migrations/legacy/utils.py similarity index 100% rename from aiida/tools/archive/implementations/sqlite/migrations/utils.py rename to aiida/storage/sqlite_zip/migrations/legacy/utils.py diff --git a/aiida/tools/archive/implementations/sqlite/migrations/legacy/v04_to_v05.py b/aiida/storage/sqlite_zip/migrations/legacy/v04_to_v05.py similarity index 95% rename from aiida/tools/archive/implementations/sqlite/migrations/legacy/v04_to_v05.py rename to aiida/storage/sqlite_zip/migrations/legacy/v04_to_v05.py index 2e872db20f..35107e48c8 100644 --- a/aiida/tools/archive/implementations/sqlite/migrations/legacy/v04_to_v05.py +++ b/aiida/storage/sqlite_zip/migrations/legacy/v04_to_v05.py @@ -24,7 +24,7 @@ Where id is a SQLA id and migration-name is the name of the particular migration. """ # pylint: disable=invalid-name -from ..utils import remove_fields, update_metadata, verify_metadata_version # pylint: disable=no-name-in-module +from .utils import remove_fields, update_metadata, verify_metadata_version # pylint: disable=no-name-in-module def migration_drop_node_columns_nodeversion_public(metadata, data): diff --git a/aiida/tools/archive/implementations/sqlite/migrations/legacy/v05_to_v06.py b/aiida/storage/sqlite_zip/migrations/legacy/v05_to_v06.py similarity index 98% rename from aiida/tools/archive/implementations/sqlite/migrations/legacy/v05_to_v06.py rename to aiida/storage/sqlite_zip/migrations/legacy/v05_to_v06.py index 3f6a7ea9c5..d949c0877e 100644 --- a/aiida/tools/archive/implementations/sqlite/migrations/legacy/v05_to_v06.py +++ b/aiida/storage/sqlite_zip/migrations/legacy/v05_to_v06.py @@ -26,7 +26,7 @@ # pylint: disable=invalid-name from typing import Union -from ..utils import update_metadata, verify_metadata_version # pylint: disable=no-name-in-module +from .utils import update_metadata, verify_metadata_version # pylint: disable=no-name-in-module def migrate_deserialized_datetime(data, conversion): diff --git a/aiida/tools/archive/implementations/sqlite/migrations/legacy/v06_to_v07.py b/aiida/storage/sqlite_zip/migrations/legacy/v06_to_v07.py similarity index 98% rename from aiida/tools/archive/implementations/sqlite/migrations/legacy/v06_to_v07.py rename to aiida/storage/sqlite_zip/migrations/legacy/v06_to_v07.py index 4b764140f6..85c0b74cf2 100644 --- a/aiida/tools/archive/implementations/sqlite/migrations/legacy/v06_to_v07.py +++ b/aiida/storage/sqlite_zip/migrations/legacy/v06_to_v07.py @@ -24,7 +24,7 @@ Where id is a SQLA id and migration-name is the name of the particular migration. """ # pylint: disable=invalid-name -from ..utils import update_metadata, verify_metadata_version # pylint: disable=no-name-in-module +from .utils import update_metadata, verify_metadata_version # pylint: disable=no-name-in-module def data_migration_legacy_process_attributes(data): diff --git a/aiida/tools/archive/implementations/sqlite/migrations/legacy/v07_to_v08.py b/aiida/storage/sqlite_zip/migrations/legacy/v07_to_v08.py similarity index 95% rename from aiida/tools/archive/implementations/sqlite/migrations/legacy/v07_to_v08.py rename to aiida/storage/sqlite_zip/migrations/legacy/v07_to_v08.py index 15ea832041..14e46658b0 100644 --- a/aiida/tools/archive/implementations/sqlite/migrations/legacy/v07_to_v08.py +++ b/aiida/storage/sqlite_zip/migrations/legacy/v07_to_v08.py @@ -24,7 +24,7 @@ Where id is a SQLA id and migration-name is the name of the particular migration. """ # pylint: disable=invalid-name -from ..utils import update_metadata, verify_metadata_version # pylint: disable=no-name-in-module +from .utils import update_metadata, verify_metadata_version # pylint: disable=no-name-in-module def migration_default_link_label(data: dict): diff --git a/aiida/tools/archive/implementations/sqlite/migrations/legacy/v08_to_v09.py b/aiida/storage/sqlite_zip/migrations/legacy/v08_to_v09.py similarity index 95% rename from aiida/tools/archive/implementations/sqlite/migrations/legacy/v08_to_v09.py rename to aiida/storage/sqlite_zip/migrations/legacy/v08_to_v09.py index c3c12d616b..b206ea30e0 100644 --- a/aiida/tools/archive/implementations/sqlite/migrations/legacy/v08_to_v09.py +++ b/aiida/storage/sqlite_zip/migrations/legacy/v08_to_v09.py @@ -24,7 +24,7 @@ Where id is a SQLA id and migration-name is the name of the particular migration. """ # pylint: disable=invalid-name -from ..utils import update_metadata, verify_metadata_version # pylint: disable=no-name-in-module +from .utils import update_metadata, verify_metadata_version # pylint: disable=no-name-in-module def migration_dbgroup_type_string(data): diff --git a/aiida/tools/archive/implementations/sqlite/migrations/legacy/v09_to_v10.py b/aiida/storage/sqlite_zip/migrations/legacy/v09_to_v10.py similarity index 92% rename from aiida/tools/archive/implementations/sqlite/migrations/legacy/v09_to_v10.py rename to aiida/storage/sqlite_zip/migrations/legacy/v09_to_v10.py index a005837005..578dc896b4 100644 --- a/aiida/tools/archive/implementations/sqlite/migrations/legacy/v09_to_v10.py +++ b/aiida/storage/sqlite_zip/migrations/legacy/v09_to_v10.py @@ -9,7 +9,7 @@ ########################################################################### """Migration from v0.9 to v0.10, used by `verdi export migrate` command.""" # pylint: disable=invalid-name,unused-argument -from ..utils import update_metadata, verify_metadata_version # pylint: disable=no-name-in-module +from .utils import update_metadata, verify_metadata_version # pylint: disable=no-name-in-module def migrate_v9_to_v10(metadata: dict, data: dict) -> None: diff --git a/aiida/tools/archive/implementations/sqlite/migrations/legacy/v10_to_v11.py b/aiida/storage/sqlite_zip/migrations/legacy/v10_to_v11.py similarity index 93% rename from aiida/tools/archive/implementations/sqlite/migrations/legacy/v10_to_v11.py rename to aiida/storage/sqlite_zip/migrations/legacy/v10_to_v11.py index 011a83d761..a0af93f926 100644 --- a/aiida/tools/archive/implementations/sqlite/migrations/legacy/v10_to_v11.py +++ b/aiida/storage/sqlite_zip/migrations/legacy/v10_to_v11.py @@ -11,7 +11,7 @@ This migration applies the name change of the ``Computer`` attribute ``name`` to ``label``. """ -from ..utils import update_metadata, verify_metadata_version # pylint: disable=no-name-in-module +from .utils import update_metadata, verify_metadata_version # pylint: disable=no-name-in-module def migrate_v10_to_v11(metadata: dict, data: dict) -> None: diff --git a/aiida/tools/archive/implementations/sqlite/migrations/legacy/v11_to_v12.py b/aiida/storage/sqlite_zip/migrations/legacy/v11_to_v12.py similarity index 98% rename from aiida/tools/archive/implementations/sqlite/migrations/legacy/v11_to_v12.py rename to aiida/storage/sqlite_zip/migrations/legacy/v11_to_v12.py index fd6efd27ad..5bd2531ea0 100644 --- a/aiida/tools/archive/implementations/sqlite/migrations/legacy/v11_to_v12.py +++ b/aiida/storage/sqlite_zip/migrations/legacy/v11_to_v12.py @@ -11,7 +11,7 @@ This migration is necessary after the `core.` prefix was added to entry points shipped with `aiida-core`. """ -from ..utils import update_metadata, verify_metadata_version # pylint: disable=no-name-in-module +from .utils import update_metadata, verify_metadata_version # pylint: disable=no-name-in-module MAPPING_DATA = { 'data.array.ArrayData.': 'data.core.array.ArrayData.', diff --git a/aiida/tools/archive/implementations/sqlite/migrations/legacy_to_new.py b/aiida/storage/sqlite_zip/migrations/legacy_to_new.py similarity index 90% rename from aiida/tools/archive/implementations/sqlite/migrations/legacy_to_new.py rename to aiida/storage/sqlite_zip/migrations/legacy_to_new.py index 470e259a8f..2fb243ec4a 100644 --- a/aiida/tools/archive/implementations/sqlite/migrations/legacy_to_new.py +++ b/aiida/storage/sqlite_zip/migrations/legacy_to_new.py @@ -7,7 +7,7 @@ # For further information on the license, see the LICENSE.txt file # # For further information please visit http://www.aiida.net # ########################################################################### -"""Migration from legacy JSON format.""" +"""Migration from the "legacy" JSON format, to an sqlite database.""" from contextlib import contextmanager from datetime import datetime from hashlib import sha256 @@ -28,9 +28,9 @@ from aiida.tools.archive.common import MIGRATE_LOGGER, batch_iter from aiida.tools.archive.exceptions import CorruptArchive, MigrationValidationError -from . import v1_db_schema as db -from ..common import DB_FILENAME, META_FILENAME, REPO_FOLDER -from .utils import update_metadata +from . import v1_db_schema as v1_schema +from ....tools.archive.implementations.sqlite_zip.common import DB_FILENAME, META_FILENAME, REPO_FOLDER +from .legacy.utils import update_metadata _NODE_ENTITY_NAME = 'Node' _GROUP_ENTITY_NAME = 'Group' @@ -58,18 +58,20 @@ } aiida_orm_to_backend = { - _USER_ENTITY_NAME: db.DbUser, - _GROUP_ENTITY_NAME: db.DbGroup, - _NODE_ENTITY_NAME: db.DbNode, - _COMMENT_ENTITY_NAME: db.DbComment, - _COMPUTER_ENTITY_NAME: db.DbComputer, - _LOG_ENTITY_NAME: db.DbLog, + _USER_ENTITY_NAME: v1_schema.DbUser, + _GROUP_ENTITY_NAME: v1_schema.DbGroup, + _NODE_ENTITY_NAME: v1_schema.DbNode, + _COMMENT_ENTITY_NAME: v1_schema.DbComment, + _COMPUTER_ENTITY_NAME: v1_schema.DbComputer, + _LOG_ENTITY_NAME: v1_schema.DbLog, } +MIGRATED_TO_REVISION = 'main_0001' + def perform_v1_migration( # pylint: disable=too-many-locals inpath: Path, working: Path, archive_name: str, is_tar: bool, metadata: dict, data: dict, compression: int -) -> str: +) -> None: """Perform the repository and JSON to SQLite migration. 1. Iterate though the repository paths in the archive @@ -141,11 +143,9 @@ def in_archive_context(_inpath): metadata['compression'] = compression metadata['key_format'] = 'sha256' metadata['mtime'] = datetime.now().isoformat() - update_metadata(metadata, '1.0') + update_metadata(metadata, MIGRATED_TO_REVISION) (new_path / META_FILENAME).write_text(json.dumps(metadata)) - return '1.0' - def _json_to_sqlite( outpath: Path, data: dict, node_repos: Dict[str, List[Tuple[str, Optional[str]]]], batch_size: int = 100 @@ -154,7 +154,7 @@ def _json_to_sqlite( MIGRATE_LOGGER.report('Converting DB to SQLite') engine = create_sqla_engine(outpath) - db.ArchiveV1Base.metadata.create_all(engine) + v1_schema.ArchiveV1Base.metadata.create_all(engine) with engine.begin() as connection: # proceed in order of relationships @@ -169,6 +169,7 @@ def _json_to_sqlite( with get_progress_reporter()(desc=f'Adding {entity_type}s', total=length) as progress: for nrows, rows in batch_iter(_iter_entity_fields(data, entity_type, node_repos), batch_size): # to-do check for unused keys? + # to-do handle null values? try: connection.execute(insert(backend_cls.__table__), rows) # type: ignore except IntegrityError as exc: @@ -181,7 +182,9 @@ def _json_to_sqlite( with engine.begin() as connection: # get mapping of node IDs to node UUIDs - node_uuid_map = {uuid: pk for uuid, pk in connection.execute(select(db.DbNode.uuid, db.DbNode.id))} # pylint: disable=unnecessary-comprehension + node_uuid_map = { + uuid: pk for uuid, pk in connection.execute(select(v1_schema.DbNode.uuid, v1_schema.DbNode.id)) # pylint: disable=unnecessary-comprehension + } # links if data['links_uuid']: @@ -196,19 +199,21 @@ def _transform_link(link_row): with get_progress_reporter()(desc='Adding Links', total=len(data['links_uuid'])) as progress: for nrows, rows in batch_iter(data['links_uuid'], batch_size, transform=_transform_link): - connection.execute(insert(db.DbLink.__table__), rows) + connection.execute(insert(v1_schema.DbLink.__table__), rows) progress.update(nrows) # groups to nodes if data['groups_uuid']: # get mapping of node IDs to node UUIDs - group_uuid_map = {uuid: pk for uuid, pk in connection.execute(select(db.DbGroup.uuid, db.DbGroup.id))} # pylint: disable=unnecessary-comprehension + group_uuid_map = { + uuid: pk for uuid, pk in connection.execute(select(v1_schema.DbGroup.uuid, v1_schema.DbGroup.id)) # pylint: disable=unnecessary-comprehension + } length = sum(len(uuids) for uuids in data['groups_uuid'].values()) with get_progress_reporter()(desc='Adding Group-Nodes', total=length) as progress: for group_uuid, node_uuids in data['groups_uuid'].items(): group_id = group_uuid_map[group_uuid] connection.execute( - insert(db.DbGroupNodes.__table__), [{ + insert(v1_schema.DbGroupNodes.__table__), [{ 'dbnode_id': node_uuid_map[uuid], 'dbgroup_id': group_id } for uuid in node_uuids] diff --git a/aiida/tools/archive/implementations/sqlite/migrations/main.py b/aiida/storage/sqlite_zip/migrations/main.py similarity index 78% rename from aiida/tools/archive/implementations/sqlite/migrations/main.py rename to aiida/storage/sqlite_zip/migrations/main.py index b0a69ac0f9..f21b51ca84 100644 --- a/aiida/tools/archive/implementations/sqlite/migrations/main.py +++ b/aiida/storage/sqlite_zip/migrations/main.py @@ -8,6 +8,7 @@ # For further information please visit http://www.aiida.net # ########################################################################### """AiiDA archive migrator implementation.""" +import os from pathlib import Path import shutil import tarfile @@ -15,24 +16,44 @@ from typing import Any, Dict, List, Optional, Union import zipfile +from alembic.config import Config +from alembic.script import ScriptDirectory from archive_path import open_file_in_tar, open_file_in_zip from aiida.common import json from aiida.common.progress_reporter import get_progress_reporter from aiida.tools.archive.common import MIGRATE_LOGGER from aiida.tools.archive.exceptions import ArchiveMigrationError, CorruptArchive +from aiida.tools.archive.implementations.sqlite_zip.common import copy_tar_to_zip, copy_zip_to_zip -from ..common import copy_tar_to_zip, copy_zip_to_zip from .legacy import FINAL_LEGACY_VERSION, LEGACY_MIGRATE_FUNCTIONS -from .legacy_to_new import perform_v1_migration +from .legacy_to_new import MIGRATED_TO_REVISION, perform_v1_migration -ALL_VERSIONS = ['0.4', '0.5', '0.6', '0.7', '0.8', '0.9', '0.10', '0.11', '0.12', '1.0'] + +def _alembic_config() -> Config: + """Return an instance of an Alembic `Config`.""" + config = Config() + config.set_main_option('script_location', os.path.dirname(os.path.realpath(__file__))) + return config + + +def get_schema_version_head() -> str: + """Return the head schema version for this storage, i.e. the latest schema this storage can be migrated to.""" + return ScriptDirectory.from_config(_alembic_config()).revision_map.get_current_head('main') + + +def list_versions() -> List[str]: + """Return all available schema versions (oldest to latest).""" + legacy_versions = list(LEGACY_MIGRATE_FUNCTIONS) + [FINAL_LEGACY_VERSION] + alembic_versions = [ + entry.revision for entry in reversed(list(ScriptDirectory.from_config(_alembic_config()).walk_revisions())) + ] + return legacy_versions + alembic_versions def migrate( # pylint: disable=too-many-branches,too-many-statements inpath: Union[str, Path], outpath: Union[str, Path], - current_version: str, version: str, *, force: bool = False, @@ -50,16 +71,39 @@ def migrate( # pylint: disable=too-many-branches,too-many-statements if outpath.exists() and not outpath.is_file(): raise IOError('Existing output path is not a file') + # the file should be either a tar (legacy only) or zip file + if tarfile.is_tarfile(str(inpath)): + is_tar = True + elif zipfile.is_zipfile(str(inpath)): + is_tar = False + else: + raise CorruptArchive(f'The input file is neither a tar nor a zip file: {inpath}') + + # read the metadata.json which should always be present + try: + metadata = _read_json(inpath, 'metadata.json', is_tar) + except FileNotFoundError: + raise CorruptArchive('No metadata.json file found') + except IOError as exc: + raise CorruptArchive(f'No input file could not be read: {exc}') from exc + + # opbtain the current version + if 'export_version' not in metadata: + raise CorruptArchive('No export_version found in metadata.json') + current_version = metadata['export_version'] + # check versions are valid # versions 0.1, 0.2, 0.3 are no longer supported, # since 0.3 -> 0.4 requires costly migrations of repo files (you would need to unpack all of them) if current_version in ('0.1', '0.2', '0.3') or version in ('0.1', '0.2', '0.3'): raise ArchiveMigrationError( - f"Migration from '{current_version}' -> '{version}' is not supported in aiida-core v2" + f"Legacy migration from '{current_version}' -> '{version}' is not supported in aiida-core v2" ) - if current_version not in ALL_VERSIONS: + + all_versions = list_versions() + if current_version not in all_versions: raise ArchiveMigrationError(f"Unknown current version '{current_version}'") - if version not in ALL_VERSIONS: + if version not in all_versions: raise ArchiveMigrationError(f"Unknown target version '{version}'") # if we are already at the desired version, then no migration is required @@ -70,16 +114,6 @@ def migrate( # pylint: disable=too-many-branches,too-many-statements shutil.copyfile(inpath, outpath) return - # the file should be either a tar (legacy only) or zip file - if tarfile.is_tarfile(str(inpath)): - is_tar = True - elif zipfile.is_zipfile(str(inpath)): - is_tar = False - else: - raise CorruptArchive(f'The input file is neither a tar nor a zip file: {inpath}') - - # read the metadata.json which should always be present - metadata = _read_json(inpath, 'metadata.json', is_tar) # data.json will only be read from legacy archives data: Optional[Dict[str, Any]] = None @@ -123,9 +157,8 @@ def path_callback(inpath, outpath) -> bool: if data is None: MIGRATE_LOGGER.report('Extracting data.json ...') data = _read_json(inpath, 'data.json', is_tar) - current_version = perform_v1_migration( - inpath, Path(tmpdirname), 'new.zip', is_tar, metadata, data, compression - ) + perform_v1_migration(inpath, Path(tmpdirname), 'new.zip', is_tar, metadata, data, compression) + current_version = MIGRATED_TO_REVISION if not current_version == version: raise ArchiveMigrationError(f"Migration from '{current_version}' -> '{version}' failed") diff --git a/aiida/storage/sqlite_zip/migrations/script.py.mako b/aiida/storage/sqlite_zip/migrations/script.py.mako new file mode 100644 index 0000000000..2c0156303a --- /dev/null +++ b/aiida/storage/sqlite_zip/migrations/script.py.mako @@ -0,0 +1,24 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision = ${repr(up_revision)} +down_revision = ${repr(down_revision)} +branch_labels = ${repr(branch_labels)} +depends_on = ${repr(depends_on)} + + +def upgrade(): + ${upgrades if upgrades else "pass"} + + +def downgrade(): + ${downgrades if downgrades else "pass"} diff --git a/aiida/storage/sqlite_zip/migrations/v1_db_schema.py b/aiida/storage/sqlite_zip/migrations/v1_db_schema.py new file mode 100644 index 0000000000..1e56b1024f --- /dev/null +++ b/aiida/storage/sqlite_zip/migrations/v1_db_schema.py @@ -0,0 +1,211 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +"""This is the sqlite DB schema, coresponding to the `main_0001` revision of the `psql_dos` backend. + +For normal operation of the archive, +we auto-generate the schema from the models in ``aiida.storage.psql_dos.models``. +However, when migrating an archive from the old format, we require a fixed revision of the schema. + +The only difference between the PostGreSQL schema and SQLite one, +is the replacement of ``JSONB`` with ``JSON``, and ``UUID`` with ``CHAR(36)``. +""" +from sqlalchemy import ForeignKey, MetaData, orm +from sqlalchemy.dialects.sqlite import JSON +from sqlalchemy.schema import Column, UniqueConstraint +from sqlalchemy.types import CHAR, Boolean, DateTime, Integer, String, Text + +from aiida.common import timezone +from aiida.common.utils import get_new_uuid + +# see https://alembic.sqlalchemy.org/en/latest/naming.html +naming_convention = ( + ('pk', '%(table_name)s_pkey'), + ('ix', 'ix_%(table_name)s_%(column_0_N_label)s'), + ('uq', 'uq_%(table_name)s_%(column_0_N_name)s'), + ('ck', 'ck_%(table_name)s_%(constraint_name)s'), + ('fk', 'fk_%(table_name)s_%(column_0_N_name)s_%(referred_table_name)s'), +) + +ArchiveV1Base = orm.declarative_base(metadata=MetaData(naming_convention=dict(naming_convention))) + + +class DbAuthInfo(ArchiveV1Base): + """Class that keeps the authentication data.""" + + __tablename__ = 'db_dbauthinfo' + __table_args__ = (UniqueConstraint('aiidauser_id', 'dbcomputer_id'),) + + id = Column(Integer, primary_key=True) # pylint: disable=invalid-name + aiidauser_id = Column( + Integer, + ForeignKey('db_dbuser.id', ondelete='CASCADE', deferrable=True, initially='DEFERRED'), + nullable=False, + index=True + ) + dbcomputer_id = Column( + Integer, + ForeignKey('db_dbcomputer.id', ondelete='CASCADE', deferrable=True, initially='DEFERRED'), + nullable=False, + index=True + ) + _metadata = Column('metadata', JSON, default=dict, nullable=False) + auth_params = Column(JSON, default=dict, nullable=False) + enabled = Column(Boolean, default=True, nullable=False) + + +class DbComment(ArchiveV1Base): + """Class to store comments.""" + + __tablename__ = 'db_dbcomment' + + id = Column(Integer, primary_key=True) # pylint: disable=invalid-name + uuid = Column(CHAR(36), default=get_new_uuid, nullable=False, unique=True) + dbnode_id = Column( + Integer, + ForeignKey('db_dbnode.id', ondelete='CASCADE', deferrable=True, initially='DEFERRED'), + nullable=False, + index=True + ) + ctime = Column(DateTime(timezone=True), default=timezone.now, nullable=False) + mtime = Column(DateTime(timezone=True), default=timezone.now, nullable=False) + user_id = Column( + Integer, + ForeignKey('db_dbuser.id', ondelete='CASCADE', deferrable=True, initially='DEFERRED'), + nullable=False, + index=True + ) + content = Column(Text, default='', nullable=True) + + +class DbComputer(ArchiveV1Base): + """Class to store computers.""" + __tablename__ = 'db_dbcomputer' + + id = Column(Integer, primary_key=True) # pylint: disable=invalid-name + uuid = Column(CHAR(36), default=get_new_uuid, nullable=False, unique=True) + label = Column(String(255), unique=True, nullable=False) + hostname = Column(String(255), default='', nullable=False) + description = Column(Text, default='', nullable=True) + scheduler_type = Column(String(255), default='', nullable=False) + transport_type = Column(String(255), default='', nullable=False) + _metadata = Column('metadata', JSON, default=dict, nullable=False) + + +class DbGroupNodes(ArchiveV1Base): + """Class to store join table for group -> nodes.""" + + __tablename__ = 'db_dbgroup_dbnodes' + __table_args__ = (UniqueConstraint('dbgroup_id', 'dbnode_id'),) + + id = Column(Integer, primary_key=True) # pylint: disable=invalid-name + dbnode_id = Column( + Integer, ForeignKey('db_dbnode.id', deferrable=True, initially='DEFERRED'), nullable=False, index=True + ) + dbgroup_id = Column( + Integer, ForeignKey('db_dbgroup.id', deferrable=True, initially='DEFERRED'), nullable=False, index=True + ) + + +class DbGroup(ArchiveV1Base): + """Class to store groups.""" + + __tablename__ = 'db_dbgroup' + __table_args__ = (UniqueConstraint('label', 'type_string'),) + + id = Column(Integer, primary_key=True) # pylint: disable=invalid-name + uuid = Column(CHAR(36), default=get_new_uuid, nullable=False, unique=True) + label = Column(String(255), nullable=False, index=True) + type_string = Column(String(255), default='', nullable=False, index=True) + time = Column(DateTime(timezone=True), default=timezone.now, nullable=False) + description = Column(Text, default='', nullable=False) + extras = Column(JSON, default=dict, nullable=False) + user_id = Column( + Integer, + ForeignKey('db_dbuser.id', ondelete='CASCADE', deferrable=True, initially='DEFERRED'), + nullable=False, + index=True + ) + + +class DbLog(ArchiveV1Base): + """Class to store logs.""" + + __tablename__ = 'db_dblog' + + id = Column(Integer, primary_key=True) # pylint: disable=invalid-name + uuid = Column(CHAR(36), default=get_new_uuid, nullable=False, unique=True) + time = Column(DateTime(timezone=True), default=timezone.now, nullable=False) + loggername = Column(String(255), default='', nullable=False, index=True) + levelname = Column(String(255), default='', nullable=False, index=True) + dbnode_id = Column( + Integer, + ForeignKey('db_dbnode.id', deferrable=True, initially='DEFERRED', ondelete='CASCADE'), + nullable=False, + index=True + ) + message = Column(Text(), default='', nullable=False) + _metadata = Column('metadata', JSON, default=dict, nullable=False) + + +class DbNode(ArchiveV1Base): + """Class to store nodes.""" + + __tablename__ = 'db_dbnode' + + id = Column(Integer, primary_key=True) # pylint: disable=invalid-name + uuid = Column(CHAR(36), default=get_new_uuid, nullable=False, unique=True) + node_type = Column(String(255), default='', nullable=False, index=True) + process_type = Column(String(255), index=True) + label = Column(String(255), default='', index=True, nullable=False) + description = Column(Text(), default='', nullable=False) + ctime = Column(DateTime(timezone=True), default=timezone.now, nullable=False, index=True) + mtime = Column(DateTime(timezone=True), default=timezone.now, nullable=False, index=True) + attributes = Column(JSON) + extras = Column(JSON) + repository_metadata = Column(JSON, nullable=False, default=dict, server_default='{}') + dbcomputer_id = Column( + Integer, + ForeignKey('db_dbcomputer.id', deferrable=True, initially='DEFERRED', ondelete='RESTRICT'), + nullable=True + ) + user_id = Column( + Integer, ForeignKey('db_dbuser.id', deferrable=True, initially='DEFERRED', ondelete='restrict'), nullable=False + ) + + +class DbLink(ArchiveV1Base): + """Class to store links between nodes.""" + + __tablename__ = 'db_dblink' + + id = Column(Integer, primary_key=True) # pylint: disable=invalid-name + input_id = Column( + Integer, ForeignKey('db_dbnode.id', deferrable=True, initially='DEFERRED'), nullable=False, index=True + ) + output_id = Column( + Integer, + ForeignKey('db_dbnode.id', ondelete='CASCADE', deferrable=True, initially='DEFERRED'), + nullable=False, + index=True + ) + label = Column(String(255), default='', nullable=False, index=True) + type = Column(String(255), nullable=False, index=True) + + +class DbUser(ArchiveV1Base): + """Class to store users.""" + + __tablename__ = 'db_dbuser' + + id = Column(Integer, primary_key=True) # pylint: disable=invalid-name + email = Column(String(254), nullable=False, unique=True) + first_name = Column(String(254), default='', nullable=False) + last_name = Column(String(254), default='', nullable=False) + institution = Column(String(254), default='', nullable=False) diff --git a/aiida/storage/sqlite_zip/migrations/versions/__init__.py b/aiida/storage/sqlite_zip/migrations/versions/__init__.py new file mode 100644 index 0000000000..2776a55f97 --- /dev/null +++ b/aiida/storage/sqlite_zip/migrations/versions/__init__.py @@ -0,0 +1,9 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### diff --git a/aiida/storage/sqlite_zip/migrations/versions/main_0001_initial.py b/aiida/storage/sqlite_zip/migrations/versions/main_0001_initial.py new file mode 100644 index 0000000000..8eadfb649d --- /dev/null +++ b/aiida/storage/sqlite_zip/migrations/versions/main_0001_initial.py @@ -0,0 +1,197 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +# pylint: disable=invalid-name,no-member +"""Initial main branch schema + +Revision ID: main_0001 +Revises: +Create Date: 2021-02-02 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +revision = 'main_0001' +down_revision = None +branch_labels = ('main',) +depends_on = None + + +def upgrade(): + """Migrations for the upgrade.""" + op.create_table( + 'db_dbcomputer', + sa.Column('id', sa.Integer(), nullable=False, primary_key=True), + sa.Column('uuid', postgresql.UUID(as_uuid=True), nullable=False, unique=True), + sa.Column('label', sa.String(length=255), nullable=False, unique=True), + sa.Column('hostname', sa.String(length=255), nullable=False), + sa.Column('description', sa.Text(), nullable=False), + sa.Column('scheduler_type', sa.String(length=255), nullable=False), + sa.Column('transport_type', sa.String(length=255), nullable=False), + sa.Column('metadata', postgresql.JSONB(astext_type=sa.Text()), nullable=False), + ) + op.create_table( + 'db_dbuser', + sa.Column('id', sa.Integer(), nullable=False, primary_key=True), + sa.Column('email', sa.String(length=254), nullable=False, unique=True), + sa.Column('first_name', sa.String(length=254), nullable=False), + sa.Column('last_name', sa.String(length=254), nullable=False), + sa.Column('institution', sa.String(length=254), nullable=False), + ) + op.create_table( + 'db_dbauthinfo', + sa.Column('id', sa.Integer(), nullable=False, primary_key=True), + sa.Column('aiidauser_id', sa.Integer(), nullable=False, index=True), + sa.Column('dbcomputer_id', sa.Integer(), nullable=False, index=True), + sa.Column('metadata', postgresql.JSONB(astext_type=sa.Text()), nullable=False), + sa.Column('auth_params', postgresql.JSONB(astext_type=sa.Text()), nullable=False), + sa.Column('enabled', sa.Boolean(), nullable=False), + sa.ForeignKeyConstraint( + ['aiidauser_id'], + ['db_dbuser.id'], + ondelete='CASCADE', + initially='DEFERRED', + deferrable=True, + ), + sa.ForeignKeyConstraint( + ['dbcomputer_id'], + ['db_dbcomputer.id'], + ondelete='CASCADE', + initially='DEFERRED', + deferrable=True, + ), + sa.UniqueConstraint('aiidauser_id', 'dbcomputer_id'), + ) + op.create_table( + 'db_dbgroup', + sa.Column('id', sa.Integer(), nullable=False, primary_key=True), + sa.Column('uuid', postgresql.UUID(as_uuid=True), nullable=False, unique=True), + sa.Column('label', sa.String(length=255), nullable=False, index=True), + sa.Column('type_string', sa.String(length=255), nullable=False, index=True), + sa.Column('time', sa.DateTime(timezone=True), nullable=False), + sa.Column('description', sa.Text(), nullable=False), + sa.Column('extras', postgresql.JSONB(astext_type=sa.Text()), nullable=False), + sa.Column('user_id', sa.Integer(), nullable=False, index=True), + sa.ForeignKeyConstraint( + ['user_id'], + ['db_dbuser.id'], + ondelete='CASCADE', + initially='DEFERRED', + deferrable=True, + ), + sa.UniqueConstraint('label', 'type_string'), + ) + + op.create_table( + 'db_dbnode', + sa.Column('id', sa.Integer(), nullable=False, primary_key=True), + sa.Column('uuid', postgresql.UUID(as_uuid=True), nullable=False, unique=True), + sa.Column('node_type', sa.String(length=255), nullable=False, index=True), + sa.Column('process_type', sa.String(length=255), nullable=True, index=True), + sa.Column('label', sa.String(length=255), nullable=False, index=True), + sa.Column('description', sa.Text(), nullable=False), + sa.Column('ctime', sa.DateTime(timezone=True), nullable=False, index=True), + sa.Column('mtime', sa.DateTime(timezone=True), nullable=False, index=True), + sa.Column('attributes', postgresql.JSONB(astext_type=sa.Text()), nullable=True), + sa.Column('extras', postgresql.JSONB(astext_type=sa.Text()), nullable=True), + sa.Column('repository_metadata', postgresql.JSONB(astext_type=sa.Text()), nullable=False), + sa.Column('dbcomputer_id', sa.Integer(), nullable=True, index=True), + sa.Column('user_id', sa.Integer(), nullable=False, index=True), + sa.ForeignKeyConstraint( + ['dbcomputer_id'], + ['db_dbcomputer.id'], + ondelete='RESTRICT', + initially='DEFERRED', + deferrable=True, + ), + sa.ForeignKeyConstraint( + ['user_id'], + ['db_dbuser.id'], + ondelete='restrict', + initially='DEFERRED', + deferrable=True, + ), + ) + + op.create_table( + 'db_dbcomment', + sa.Column('id', sa.Integer(), nullable=False, primary_key=True), + sa.Column('uuid', postgresql.UUID(as_uuid=True), nullable=False, unique=True), + sa.Column('dbnode_id', sa.Integer(), nullable=False, index=True), + sa.Column('ctime', sa.DateTime(timezone=True), nullable=False), + sa.Column('mtime', sa.DateTime(timezone=True), nullable=False), + sa.Column('user_id', sa.Integer(), nullable=False, index=True), + sa.Column('content', sa.Text(), nullable=False), + sa.ForeignKeyConstraint( + ['dbnode_id'], + ['db_dbnode.id'], + ondelete='CASCADE', + initially='DEFERRED', + deferrable=True, + ), + sa.ForeignKeyConstraint( + ['user_id'], + ['db_dbuser.id'], + ondelete='CASCADE', + initially='DEFERRED', + deferrable=True, + ), + ) + + op.create_table( + 'db_dbgroup_dbnodes', + sa.Column('id', sa.Integer(), nullable=False, primary_key=True), + sa.Column('dbnode_id', sa.Integer(), nullable=False, index=True), + sa.Column('dbgroup_id', sa.Integer(), nullable=False, index=True), + sa.ForeignKeyConstraint(['dbgroup_id'], ['db_dbgroup.id'], initially='DEFERRED', deferrable=True), + sa.ForeignKeyConstraint(['dbnode_id'], ['db_dbnode.id'], initially='DEFERRED', deferrable=True), + sa.UniqueConstraint('dbgroup_id', 'dbnode_id'), + ) + op.create_table( + 'db_dblink', + sa.Column('id', sa.Integer(), nullable=False, primary_key=True), + sa.Column('input_id', sa.Integer(), nullable=False, index=True), + sa.Column('output_id', sa.Integer(), nullable=False, index=True), + sa.Column('label', sa.String(length=255), nullable=False, index=True), + sa.Column('type', sa.String(length=255), nullable=False, index=True), + sa.ForeignKeyConstraint(['input_id'], ['db_dbnode.id'], initially='DEFERRED', deferrable=True), + sa.ForeignKeyConstraint( + ['output_id'], + ['db_dbnode.id'], + ondelete='CASCADE', + initially='DEFERRED', + deferrable=True, + ), + ) + + op.create_table( + 'db_dblog', + sa.Column('id', sa.Integer(), nullable=False, primary_key=True), + sa.Column('uuid', postgresql.UUID(as_uuid=True), nullable=False, unique=True), + sa.Column('time', sa.DateTime(timezone=True), nullable=False), + sa.Column('loggername', sa.String(length=255), nullable=False, index=True), + sa.Column('levelname', sa.String(length=50), nullable=False, index=True), + sa.Column('dbnode_id', sa.Integer(), nullable=False, index=True), + sa.Column('message', sa.Text(), nullable=False), + sa.Column('metadata', postgresql.JSONB(astext_type=sa.Text()), nullable=False), + sa.ForeignKeyConstraint( + ['dbnode_id'], + ['db_dbnode.id'], + ondelete='CASCADE', + initially='DEFERRED', + deferrable=True, + ), + ) + + +def downgrade(): + """Migrations for the downgrade.""" + raise NotImplementedError('Downgrade of main_0001.') diff --git a/aiida/storage/sqlite_zip/models.py b/aiida/storage/sqlite_zip/models.py index 990b2eeff7..eee99c45d4 100644 --- a/aiida/storage/sqlite_zip/models.py +++ b/aiida/storage/sqlite_zip/models.py @@ -12,9 +12,11 @@ These models are intended to be identical to those of the `psql_dos` backend, except for changes to the database specific types: -- UUID -- DateTime -- JSONB +- UUID -> CHAR(32) +- DateTime -> TZDateTime +- JSONB -> JSON + +Also, `varchar_pattern_ops` indexes are not possible in sqlite. """ from datetime import datetime from typing import Optional @@ -69,7 +71,9 @@ def process_result_value(self, value: Optional[datetime], dialect): return value.astimezone(pytz.utc) -SqliteBase = sa.orm.declarative_base(cls=SqliteModel, name='SqliteModel') +SqliteBase = sa.orm.declarative_base( + cls=SqliteModel, name='SqliteModel', metadata=sa.MetaData(naming_convention=dict(base.naming_convention)) +) def pg_to_sqlite(pg_table: sa.Table): diff --git a/aiida/tools/archive/abstract.py b/aiida/tools/archive/abstract.py index b45eded9a6..371be5399c 100644 --- a/aiida/tools/archive/abstract.py +++ b/aiida/tools/archive/abstract.py @@ -180,13 +180,8 @@ class ArchiveFormatAbstract(ABC): @property @abstractmethod - def versions(self) -> List[str]: - """Return ordered list of versions of the archive format, oldest -> latest.""" - - @property def latest_version(self) -> str: - """Return the latest version of the archive format.""" - return self.versions[-1] + """Return the latest schema version of the archive format.""" @property @abstractmethod @@ -279,13 +274,13 @@ def migrate( """ -def get_format(name: str = 'sqlitezip') -> ArchiveFormatAbstract: +def get_format(name: str = 'sqlite_zip') -> ArchiveFormatAbstract: """Get the archive format instance. :param name: name of the archive format :return: archive format instance """ # to-do entry point for archive formats? - assert name == 'sqlitezip' - from aiida.tools.archive.implementations.sqlite.main import ArchiveFormatSqlZip + assert name == 'sqlite_zip' + from aiida.tools.archive.implementations.sqlite_zip.main import ArchiveFormatSqlZip return ArchiveFormatSqlZip() diff --git a/aiida/tools/archive/create.py b/aiida/tools/archive/create.py index edd60d5132..5a83a860b1 100644 --- a/aiida/tools/archive/create.py +++ b/aiida/tools/archive/create.py @@ -36,7 +36,7 @@ from .abstract import ArchiveFormatAbstract, ArchiveWriterAbstract from .common import batch_iter, entity_type_to_orm from .exceptions import ArchiveExportError, ExportValidationError -from .implementations.sqlite import ArchiveFormatSqlZip +from .implementations.sqlite_zip import ArchiveFormatSqlZip __all__ = ('create_archive', 'EXPORT_LOGGER') diff --git a/aiida/tools/archive/implementations/__init__.py b/aiida/tools/archive/implementations/__init__.py index 6f85411389..fed227acb2 100644 --- a/aiida/tools/archive/implementations/__init__.py +++ b/aiida/tools/archive/implementations/__init__.py @@ -14,7 +14,7 @@ # yapf: disable # pylint: disable=wildcard-import -from .sqlite import * +from .sqlite_zip import * __all__ = ( 'ArchiveFormatSqlZip', diff --git a/aiida/tools/archive/implementations/sqlite/migrations/v1_db_schema.py b/aiida/tools/archive/implementations/sqlite/migrations/v1_db_schema.py deleted file mode 100644 index 30baf8709f..0000000000 --- a/aiida/tools/archive/implementations/sqlite/migrations/v1_db_schema.py +++ /dev/null @@ -1,169 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -"""This is the sqlite DB schema, coresponding to the 34a831f4286d main DB revision. - -For normal operation of the archive, -we auto-generate the schema from the models in ``aiida.storage.psql_dos.models``. -However, when migrating an archive from the old format, we require a fixed revision of the schema. - -The only difference between the PostGreSQL schema and SQLite one, -is the replacement of ``JSONB`` with ``JSON``, and ``UUID`` with ``CHAR(36)``. -""" -from sqlalchemy import ForeignKey, orm -from sqlalchemy.dialects.sqlite import JSON -from sqlalchemy.schema import Column, Index, UniqueConstraint -from sqlalchemy.types import CHAR, Boolean, DateTime, Integer, String, Text - -ArchiveV1Base = orm.declarative_base() - - -class DbAuthInfo(ArchiveV1Base): - """Class that keeps the authernification data.""" - - __tablename__ = 'db_dbauthinfo' - __table_args__ = (UniqueConstraint('aiidauser_id', 'dbcomputer_id'),) - - id = Column(Integer, primary_key=True) # pylint: disable=invalid-name - aiidauser_id = Column( - Integer, ForeignKey('db_dbuser.id', ondelete='CASCADE', deferrable=True, initially='DEFERRED') - ) - dbcomputer_id = Column( - Integer, ForeignKey('db_dbcomputer.id', ondelete='CASCADE', deferrable=True, initially='DEFERRED') - ) - _metadata = Column('metadata', JSON) - auth_params = Column(JSON) - enabled = Column(Boolean, default=True) - - -class DbComment(ArchiveV1Base): - """Class to store comments.""" - - __tablename__ = 'db_dbcomment' - - id = Column(Integer, primary_key=True) # pylint: disable=invalid-name - uuid = Column(CHAR(36), unique=True) - dbnode_id = Column(Integer, ForeignKey('db_dbnode.id', ondelete='CASCADE', deferrable=True, initially='DEFERRED')) - ctime = Column(DateTime(timezone=True)) - mtime = Column(DateTime(timezone=True)) - user_id = Column(Integer, ForeignKey('db_dbuser.id', ondelete='CASCADE', deferrable=True, initially='DEFERRED')) - content = Column(Text, nullable=True) - - -class DbComputer(ArchiveV1Base): - """Class to store computers.""" - __tablename__ = 'db_dbcomputer' - - id = Column(Integer, primary_key=True) # pylint: disable=invalid-name - uuid = Column(CHAR(36), unique=True) - label = Column(String(255), unique=True, nullable=False) - hostname = Column(String(255)) - description = Column(Text, nullable=True) - scheduler_type = Column(String(255)) - transport_type = Column(String(255)) - _metadata = Column('metadata', JSON) - - -class DbGroupNodes(ArchiveV1Base): - """Class to store join table for group -> nodes.""" - - __tablename__ = 'db_dbgroup_dbnodes' - __table_args__ = (UniqueConstraint('dbgroup_id', 'dbnode_id', name='db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key'),) - - id = Column(Integer, primary_key=True) # pylint: disable=invalid-name - dbnode_id = Column(Integer, ForeignKey('db_dbnode.id', deferrable=True, initially='DEFERRED')) - dbgroup_id = Column(Integer, ForeignKey('db_dbgroup.id', deferrable=True, initially='DEFERRED')) - - -class DbGroup(ArchiveV1Base): - """Class to store groups.""" - - __tablename__ = 'db_dbgroup' - __table_args__ = (UniqueConstraint('label', 'type_string'),) - - id = Column(Integer, primary_key=True) # pylint: disable=invalid-name - uuid = Column(CHAR(36), unique=True) - label = Column(String(255), index=True) - type_string = Column(String(255), default='', index=True) - time = Column(DateTime(timezone=True)) - description = Column(Text, nullable=True) - extras = Column(JSON, default=dict, nullable=False) - user_id = Column(Integer, ForeignKey('db_dbuser.id', ondelete='CASCADE', deferrable=True, initially='DEFERRED')) - - Index('db_dbgroup_dbnodes_dbnode_id_idx', DbGroupNodes.dbnode_id) - Index('db_dbgroup_dbnodes_dbgroup_id_idx', DbGroupNodes.dbgroup_id) - - -class DbLog(ArchiveV1Base): - """Class to store logs.""" - - __tablename__ = 'db_dblog' - - id = Column(Integer, primary_key=True) # pylint: disable=invalid-name - uuid = Column(CHAR(36), unique=True) - time = Column(DateTime(timezone=True)) - loggername = Column(String(255), index=True) - levelname = Column(String(255), index=True) - dbnode_id = Column( - Integer, ForeignKey('db_dbnode.id', deferrable=True, initially='DEFERRED', ondelete='CASCADE'), nullable=False - ) - message = Column(Text(), nullable=True) - _metadata = Column('metadata', JSON) - - -class DbNode(ArchiveV1Base): - """Class to store nodes.""" - - __tablename__ = 'db_dbnode' - - id = Column(Integer, primary_key=True) # pylint: disable=invalid-name - uuid = Column(CHAR(36), unique=True) - node_type = Column(String(255), index=True) - process_type = Column(String(255), index=True) - label = Column(String(255), index=True, nullable=True, default='') - description = Column(Text(), nullable=True, default='') - ctime = Column(DateTime(timezone=True)) - mtime = Column(DateTime(timezone=True)) - attributes = Column(JSON) - extras = Column(JSON) - repository_metadata = Column(JSON, nullable=False, default=dict, server_default='{}') - dbcomputer_id = Column( - Integer, - ForeignKey('db_dbcomputer.id', deferrable=True, initially='DEFERRED', ondelete='RESTRICT'), - nullable=True - ) - user_id = Column( - Integer, ForeignKey('db_dbuser.id', deferrable=True, initially='DEFERRED', ondelete='restrict'), nullable=False - ) - - -class DbLink(ArchiveV1Base): - """Class to store links between nodes.""" - - __tablename__ = 'db_dblink' - - id = Column(Integer, primary_key=True) # pylint: disable=invalid-name - input_id = Column(Integer, ForeignKey('db_dbnode.id', deferrable=True, initially='DEFERRED'), index=True) - output_id = Column( - Integer, ForeignKey('db_dbnode.id', ondelete='CASCADE', deferrable=True, initially='DEFERRED'), index=True - ) - label = Column(String(255), index=True, nullable=False) - type = Column(String(255), index=True) - - -class DbUser(ArchiveV1Base): - """Class to store users.""" - - __tablename__ = 'db_dbuser' - - id = Column(Integer, primary_key=True) # pylint: disable=invalid-name - email = Column(String(254), unique=True, index=True) - first_name = Column(String(254), nullable=True) - last_name = Column(String(254), nullable=True) - institution = Column(String(254), nullable=True) diff --git a/aiida/tools/archive/implementations/sqlite/__init__.py b/aiida/tools/archive/implementations/sqlite_zip/__init__.py similarity index 100% rename from aiida/tools/archive/implementations/sqlite/__init__.py rename to aiida/tools/archive/implementations/sqlite_zip/__init__.py diff --git a/aiida/tools/archive/implementations/sqlite/common.py b/aiida/tools/archive/implementations/sqlite_zip/common.py similarity index 100% rename from aiida/tools/archive/implementations/sqlite/common.py rename to aiida/tools/archive/implementations/sqlite_zip/common.py diff --git a/aiida/tools/archive/implementations/sqlite/main.py b/aiida/tools/archive/implementations/sqlite_zip/main.py similarity index 90% rename from aiida/tools/archive/implementations/sqlite/main.py rename to aiida/tools/archive/implementations/sqlite_zip/main.py index 74a3d458fd..4b05e94545 100644 --- a/aiida/tools/archive/implementations/sqlite/main.py +++ b/aiida/tools/archive/implementations/sqlite_zip/main.py @@ -9,12 +9,12 @@ ########################################################################### """The file format implementation""" from pathlib import Path -from typing import Any, List, Literal, Union, overload +from typing import Any, Literal, Union, overload +from aiida.storage.sqlite_zip.migrations.main import get_schema_version_head, migrate from aiida.storage.sqlite_zip.utils import read_version from aiida.tools.archive.abstract import ArchiveFormatAbstract -from .migrations.main import ALL_VERSIONS, migrate from .reader import ArchiveReaderSqlZip from .writer import ArchiveAppenderSqlZip, ArchiveWriterSqlZip @@ -37,8 +37,8 @@ class ArchiveFormatSqlZip(ArchiveFormatAbstract): """ @property - def versions(self) -> List[str]: - return ALL_VERSIONS + def latest_version(self) -> str: + return get_schema_version_head() def read_version(self, path: Union[str, Path]) -> str: return read_version(path) @@ -107,5 +107,4 @@ def migrate( :param path: archive path """ - current_version = self.read_version(inpath) - return migrate(inpath, outpath, current_version, version, force=force, compression=compression) + return migrate(inpath, outpath, version, force=force, compression=compression) diff --git a/aiida/tools/archive/implementations/sqlite/reader.py b/aiida/tools/archive/implementations/sqlite_zip/reader.py similarity index 100% rename from aiida/tools/archive/implementations/sqlite/reader.py rename to aiida/tools/archive/implementations/sqlite_zip/reader.py diff --git a/aiida/tools/archive/implementations/sqlite/writer.py b/aiida/tools/archive/implementations/sqlite_zip/writer.py similarity index 100% rename from aiida/tools/archive/implementations/sqlite/writer.py rename to aiida/tools/archive/implementations/sqlite_zip/writer.py diff --git a/aiida/tools/archive/imports.py b/aiida/tools/archive/imports.py index 7c0b002bf9..6d87d36704 100644 --- a/aiida/tools/archive/imports.py +++ b/aiida/tools/archive/imports.py @@ -29,7 +29,7 @@ from .abstract import ArchiveFormatAbstract from .common import batch_iter, entity_type_to_orm from .exceptions import ImportTestRun, ImportUniquenessError, ImportValidationError, IncompatibleArchiveVersionError -from .implementations.sqlite import ArchiveFormatSqlZip +from .implementations.sqlite_zip import ArchiveFormatSqlZip __all__ = ('IMPORT_LOGGER', 'import_archive') @@ -127,8 +127,8 @@ def import_archive( # its a bit weird at the moment because django/sqlalchemy have different versioning if not archive_format.read_version(path) == archive_format.latest_version: raise IncompatibleArchiveVersionError( - f'The archive version {archive_format.read_version(path)} ' - f'is not the latest version {archive_format.latest_version}' + f'The archive version {archive_format.read_version(path)!r} ' + f'is not the latest version {archive_format.latest_version!r}' ) IMPORT_LOGGER.report( diff --git a/tests/cmdline/commands/test_archive_create.py b/tests/cmdline/commands/test_archive_create.py index 51a2229ce0..895f9acc9f 100644 --- a/tests/cmdline/commands/test_archive_create.py +++ b/tests/cmdline/commands/test_archive_create.py @@ -15,6 +15,7 @@ from aiida.cmdline.commands import cmd_archive from aiida.orm import Code, Computer, Dict, Group +from aiida.storage.sqlite_zip.migrations.main import list_versions from aiida.tools.archive import ArchiveFormatSqlZip from tests.utils.archives import get_archive_file @@ -85,7 +86,7 @@ def test_create_basic(run_cli_command, tmp_path): assert archive.querybuilder().append(Dict, project=['uuid']).all(flat=True) == [node.uuid] -@pytest.mark.parametrize('version', ArchiveFormatSqlZip().versions[:-1]) +@pytest.mark.parametrize('version', ('0.4', '0.5', '0.6', '0.7', '0.8', '0.9', '0.10', '0.11', '0.12')) def test_migrate_versions_old(run_cli_command, tmp_path, version): """Migrating archives with a version older than the current should work.""" archive = f'export_v{version}_simple.aiida' @@ -177,7 +178,7 @@ def test_migrate_low_verbosity(run_cli_command, tmp_path): assert ArchiveFormatSqlZip().read_version(filename_output) == ArchiveFormatSqlZip().latest_version -@pytest.mark.parametrize('version', ArchiveFormatSqlZip().versions) +@pytest.mark.parametrize('version', list_versions()) def test_inspect_version(run_cli_command, version): """Test the functionality of `verdi export inspect --version`.""" archive = f'export_v{version}_simple.aiida' diff --git a/tests/cmdline/commands/test_archive_import.py b/tests/cmdline/commands/test_archive_import.py index 7ea347e882..8e319cc17f 100644 --- a/tests/cmdline/commands/test_archive_import.py +++ b/tests/cmdline/commands/test_archive_import.py @@ -14,6 +14,7 @@ from aiida.cmdline.commands import cmd_archive from aiida.orm import Group +from aiida.storage.sqlite_zip.migrations.main import list_versions from aiida.tools.archive import ArchiveFormatSqlZip from tests.utils.archives import get_archive_file @@ -176,7 +177,7 @@ def test_import_old_local_archives(self): """ Test import of old local archives Expected behavior: Automatically migrate to newest version and import correctly. """ - for version in ArchiveFormatSqlZip().versions: + for version in list_versions(): archive, version = (f'export_v{version}_simple.aiida', f'{version}') options = [get_archive_file(archive, filepath=self.archive_path)] result = self.cli_runner.invoke(cmd_archive.import_archive, options) diff --git a/tests/static/export/compare/django.aiida b/tests/static/export/compare/django.aiida index 254e73e62d7de3eb7f76620fb0412143148a0fd9..08a035840e8ac852164b2ef2a6e42ee652c0fe18 100644 GIT binary patch delta 944 zcmbO(zEavFz?+#xgn|8)T&;W$dGwZ@bK>9$SkBNbS7ie^D zYDr>BVo9Q2R&jnF*mNhLsUYl(Y5LSlx48}(@UT4y=iEE(;=2|1cx$;Q25#qLUMCwn zllkM_wXa$DH*ct4?Qm$iz>oYTA)CZ^IjeF{*{T$6G2!kF<3&wTtJ#)WUQl3JoYVQB z>lsgyfs=P#l93=J@BD&g0X$7Fz@ushAiJ!M^*w!#^OrHeo1G)1V&U7JP`_gj6#-y4N&rJ8KHk^S**`c!UoRee0Qms}2ZYgr zKRH1(IlVq1DdB^!PuK_ku#SKRrU??v>H^Y?A8i%bK1s4lbTIWPDmn^?MI0AbnC2LK zh)0zDl(*{h=dV=7M4l{p^;AV{`V_I{%a*82TcX9z;(RuNQRog2Pmx0D&d!srKX*F3 zOxpYC%A6}tCY|}SXTgy@UqV(y)d?_e;q%RnOe;(a@>`W&ALo_D%G}&IJwnLiYf&y^ zq(E*)YFfrJZk1jiUSM)y=Lnyidoa+@2x1YF66+yJ z5!ql+%0mu)P|AaWhQ@ecO2r<`IMN}y(a8RhV2DFDyO^7S?AQ$OW@Q7pk{Jjkfk9Qp H1L6Sy10n|o delta 410 zcmZ1}JzZQQz?+#xgn@y9gF#H#F*w=eNDn&~kZZui0HRWo^ok2}GD}j8H+pb#)gvnn zJ+y;s3s6Z169WS;kj_mlNlZyBNz}_K&d-~A={DCP10J>q;hcM?U3|CV9&au8M8oZT z%(wd1ooW2>?pkWo@k8sFIOP=|?%(9KI&K!WjS!)xIniRgfUv$BCi Nn1HYtNb_-ncmVi_eEI+Y diff --git a/tests/static/export/compare/sqlalchemy.aiida b/tests/static/export/compare/sqlalchemy.aiida index bd78b641e28ff3b087a3930109d1aa88b7f59732..810d73ee4d33a490e370affa39e17476f0677d84 100644 GIT binary patch delta 934 zcmbOxzFgWPz?+#xgnt5)!>3-l&2`9thv7mzXYI6$cbDfo|948cvNp-#_Qk1| z0{`#s(x{l(da=IU;jpLEiK$nkzFsO>5!(7{!A-7hIkx{IZyq@K!r12Xfo6_1Wr}Uw zx0FvD^jds`bJwke?3+ygPR~d&idRz!o}~I{S&JP{#ewe|#h0;c=A2QH=w>jVx47x3 z{+CTnH$HXme4gr^r1-zcQ=oD#Gs|Iq3EO8P|EBc`^gg@%vFwhKTK&^=ftFit-FW4B zM{(0;htR@v$4@bB6nuYk`rf;Y0p9E!(szAN9fXDf_R!%12B!otbmHTE9i9DyBlPv+ zu?JKaFpxkPITq@HMkgnTCZ{JPC4BJp3H!hw))COaG(mz{T|k=gBd-G6C&`@>9ZY?S zijD$e5y!<9#GHc<@ra5iBuwx+cmArkj>ZYktEWA6^fYz-d_6sNJVW_7+GZVOQi+h1 zc;fJ?NO*Ghr{cy-N2`vmv|M>ob>`1X$Ii-^D+9JFh#MOjoUmQ6QunmZIiFLWU(a}Z zu1!eL_$=hjv~u13l3+F$>8P2J5)ur+RKd<6F8*c80-$3-z76nZWD;QprohR8+@kdg zKv4*2Xav#3r$4wkz?2M1Z^&T{N^daG&=|(WfGhRYV^4nQW+S^F6k9OR&{)LHKwjJd WjR^2&6=nk&#SDa!z@Vw%0r3DdsTUCd delta 385 zcmZ23Jx!cFz?+#xgn?mlBabK#14BxZUU6YgW=X2?LJzKbunYqSgORXf@M?v3zxOaO zFzjbyVBiJPxv3?IDTyVCdRfK!c~dW)f9Cnl_nlI%tUcVcZLxN- z!oJ<5GZz$#x$!l&^tVi~UODB>&UY=7mQLtZinf?=H^O*No7-w_pV|d+3~o0?4*AY7 z{&28B_a^JEdav$l6L|kjS3H%mwezGXg;p1`M6-;wY4H4;2u>WuX_k1srtaHWxK*6-{A4 zSH7{qBs#VtLl_y9QbU}{z{MdkZY;VBaq1t!9}Wjh+(k3|zLv^4@j%b!M9Fk3r1r^oS%FBiZa`5OTRbVO|r44G{M=n4_`1Jan=r= zS4ol$!KR&Gs%7`XMrOXXbheMoS6o{6w&?wVez_v7R@UgaP}Kd#S%tksa$j?;@aA6$ zyXwalf=7a%To;O}5_@Ei9#76){6*k!d>tAdHuGSEV?2JKZah#E3A(*4B`t-FssChM_Aq^o^nIm~&*g^1J9O<=+S3zyK0X@R zBlh!~)GI)u;j@oA;x5RCI$H-!43`Ig7;hRnpa-Z^w+Cnh)VOg#=N#xkrS6b^i)#!d za0Eig$6&=q!n8s}3PONahZH&9CY9w9hAs;S(Y45_kJv6MD7EjAd1Kl?=bmOn@%RzP zdmGzetEiToI>vHSLmMejRgEs{{(NOLqM}-IYSlS6wVY3ZYUfY+L#=0~3TJQoV;HI> zrz)U3&{yjQ?@+5&4w2=);NEUngbK-#9#fC7y)*5X*sk`szN`;g+tixntHX#suER8&i@wG++NU5iVEwRnDwwTL2JhAj zA^7NB;e-;FqH=QZ)n)K`2PN=!Flv@Suye{!k^@UoIXT#)xS5*bTnIfN`%%5N+8aCE z=6=BRuGT)mv?6n}zVLlwf##xGa;cv25)D$Iau_;9oju*h)k5m(|5*<;Kth3mU|gZ=$-oGz`q>$V1dFPpM@y=J zoMm~79=y|tiH{&6^Uh{)n420&83>e4aR6Qv=Xns)m`Ym_K4^%FV#2{eK7AlQW{AL! zct&k#}_m2py&m#HR)k z*t-+R&2;v_T2I`Ard^Ap q)I@BtP64K!bO6sdh=71S;I=gNCGYnF4j0{&g5c31z_3pp=Du<6|7|P|GlD& z43pP8xNR17Ol6cp*5|W_`&U0B1H)x&u)hAi?wdc`O+-~=Y>_WA1E}T)3s{ZpF89qJ zO=qF1QJ=872T9Gv?e3dnm1Z$YBips$cEsD8ybKJ*i$UfvO!ifC+q_xrHZO{UfbM5t z0uev}^oZSdcR6MTRKJ=#o#f0!xM%A=_s!hS*{JUMKfRBiHJ^du@FgygF}L@-Z=UGJ zo{wVH_S-z-{y=L^K&;B#<33ry&T4XotvHIZ%6t5$1As~{K$KnE4N~SjxzC0lMOnJS z-9#j1f54^%L1RE+Hz)?aTDonHwu)zj3Oeo(ot(16eRGEjT0j}*y4!yT+O{3&Nl-v7 zba9)^?)nqOI;%tvzBNGkQb?q107i<8#X6YL>$i&XFfgPf=@l2|WR|2FZ}eCwSC1SQ zJYObMt^!)|1{l`BC}GG=ElEsCEJ@VMD$dWFddZjTkbwZ}gVj8d-Y1s)f7f--X^F_~ zBr!7~XVD|OXP0eSksy9phw1Z=O*6lX{e7J`H-FhZ+aB4Yax7k|WeQQryUO-n5AwYF zXin@KiRNQH&Acx?Wv127eQzx>XRqTKM^Bdg!;2qg{t?Ye?lbk#ID95P*T`2VaZ^jg zsT9qPJtvKlEGIub!lkC&G1VtbX!VRWN^P%>uZ`MJTPUjeDKm#V%Pv+*_}1_6dG_%E z-i%Bl%!qt|?k}L%z@ULqLvjO}H{p6G1B1&Arjmi7fiZC+a^S($Va;pE`hal-%z2z} z4WOLIkE~!3Pz5j_>cCY%jYQ3c$T~8ZCId4ih6dC;iL4=tWil{_VrW3kpvW3R*(L+C zt1h~e&~htWM+0L32RQFySOHRk$hvS1!0_x517&0k4X7CzS;I%M$-r!lp#e2lBWu_z z0m|bT8c?%1vIfvM*Qst6%ZIvgH}Gkbu=*k=mnP+7_LMuE#Mk}Q4`cZ8CZ6R1$eWvfrN#C Pa4|ar!(U)#VPF6N>X&f} diff --git a/tests/tools/archive/migration/conftest.py b/tests/tools/archive/migration/conftest.py index a6b9bccc4d..7d285962cb 100644 --- a/tests/tools/archive/migration/conftest.py +++ b/tests/tools/archive/migration/conftest.py @@ -10,7 +10,7 @@ """Module with tests for export archive migrations.""" import pytest -from aiida.tools.archive.implementations.sqlite.migrations.utils import verify_metadata_version +from aiida.storage.sqlite_zip.migrations.legacy.utils import verify_metadata_version from tests.utils.archives import get_archive_file, read_json_files diff --git a/tests/tools/archive/migration/test_legacy_funcs.py b/tests/tools/archive/migration/test_legacy_funcs.py index 79aba89ab0..380c609109 100644 --- a/tests/tools/archive/migration/test_legacy_funcs.py +++ b/tests/tools/archive/migration/test_legacy_funcs.py @@ -12,8 +12,8 @@ import pytest from aiida import get_version -from aiida.tools.archive.implementations.sqlite.migrations.legacy import LEGACY_MIGRATE_FUNCTIONS -from aiida.tools.archive.implementations.sqlite.migrations.utils import verify_metadata_version +from aiida.storage.sqlite_zip.migrations.legacy import LEGACY_MIGRATE_FUNCTIONS +from aiida.storage.sqlite_zip.migrations.legacy.utils import verify_metadata_version from tests.utils.archives import get_archive_file, read_json_files diff --git a/tests/tools/archive/migration/test_v04_to_v05.py b/tests/tools/archive/migration/test_v04_to_v05.py index b22f0e2e48..f394426eff 100644 --- a/tests/tools/archive/migration/test_v04_to_v05.py +++ b/tests/tools/archive/migration/test_v04_to_v05.py @@ -8,7 +8,7 @@ # For further information please visit http://www.aiida.net # ########################################################################### """Test archive file migration from export version 0.4 to 0.5""" -from aiida.tools.archive.implementations.sqlite.migrations.legacy import migrate_v4_to_v5 +from aiida.storage.sqlite_zip.migrations.legacy import migrate_v4_to_v5 def test_migrate_external(migrate_from_func): diff --git a/tests/tools/archive/migration/test_v05_to_v06.py b/tests/tools/archive/migration/test_v05_to_v06.py index 4a6a29342c..6d0c77a6f5 100644 --- a/tests/tools/archive/migration/test_v05_to_v06.py +++ b/tests/tools/archive/migration/test_v05_to_v06.py @@ -9,8 +9,8 @@ ########################################################################### """Test archive file migration from export version 0.5 to 0.6""" from aiida.storage.psql_dos.migrations.utils.calc_state import STATE_MAPPING -from aiida.tools.archive.implementations.sqlite.migrations.legacy import migrate_v5_to_v6 -from aiida.tools.archive.implementations.sqlite.migrations.utils import verify_metadata_version +from aiida.storage.sqlite_zip.migrations.legacy import migrate_v5_to_v6 +from aiida.storage.sqlite_zip.migrations.legacy.utils import verify_metadata_version from tests.utils.archives import get_archive_file, read_json_files diff --git a/tests/tools/archive/migration/test_v06_to_v07.py b/tests/tools/archive/migration/test_v06_to_v07.py index 96a80aee31..a80e1ea5d9 100644 --- a/tests/tools/archive/migration/test_v06_to_v07.py +++ b/tests/tools/archive/migration/test_v06_to_v07.py @@ -10,11 +10,11 @@ """Test archive file migration from export version 0.6 to 0.7""" import pytest -from aiida.tools.archive.exceptions import CorruptArchive -from aiida.tools.archive.implementations.sqlite.migrations.legacy.v06_to_v07 import ( +from aiida.storage.sqlite_zip.migrations.legacy.v06_to_v07 import ( data_migration_legacy_process_attributes, migrate_v6_to_v7, ) +from aiida.tools.archive.exceptions import CorruptArchive def test_migrate_external(migrate_from_func): diff --git a/tests/tools/archive/migration/test_v07_to_v08.py b/tests/tools/archive/migration/test_v07_to_v08.py index 34c9f0ece7..7308fa0ce4 100644 --- a/tests/tools/archive/migration/test_v07_to_v08.py +++ b/tests/tools/archive/migration/test_v07_to_v08.py @@ -8,10 +8,7 @@ # For further information please visit http://www.aiida.net # ########################################################################### """Test archive file migration from export version 0.7 to 0.8""" -from aiida.tools.archive.implementations.sqlite.migrations.legacy.v07_to_v08 import ( - migrate_v7_to_v8, - migration_default_link_label, -) +from aiida.storage.sqlite_zip.migrations.legacy.v07_to_v08 import migrate_v7_to_v8, migration_default_link_label def test_migrate_external(migrate_from_func): diff --git a/tests/tools/archive/migration/test_v08_to_v09.py b/tests/tools/archive/migration/test_v08_to_v09.py index 23c5adb136..46049771f4 100644 --- a/tests/tools/archive/migration/test_v08_to_v09.py +++ b/tests/tools/archive/migration/test_v08_to_v09.py @@ -8,10 +8,7 @@ # For further information please visit http://www.aiida.net # ########################################################################### """Test archive file migration from export version 0.8 to 0.9""" -from aiida.tools.archive.implementations.sqlite.migrations.legacy.v08_to_v09 import ( - migrate_v8_to_v9, - migration_dbgroup_type_string, -) +from aiida.storage.sqlite_zip.migrations.legacy.v08_to_v09 import migrate_v8_to_v9, migration_dbgroup_type_string def test_migrate_external(migrate_from_func): diff --git a/tests/tools/archive/test_backend.py b/tests/tools/archive/test_backend.py index d537c32d09..08eb48317e 100644 --- a/tests/tools/archive/test_backend.py +++ b/tests/tools/archive/test_backend.py @@ -21,7 +21,7 @@ @pytest.fixture() def archive(tmp_path): """Yield the archive open in read mode.""" - filepath_archive = get_archive_file('export_v1.0_simple.aiida', filepath='export/migrate') + filepath_archive = get_archive_file('export_main_0001_simple.aiida', filepath='export/migrate') archive_format = ArchiveFormatSqlZip() new_archive = tmp_path / 'out.aiida' archive_format.migrate(filepath_archive, new_archive, archive_format.latest_version) diff --git a/tests/tools/archive/test_common.py b/tests/tools/archive/test_common.py index 50512737f1..72af015270 100644 --- a/tests/tools/archive/test_common.py +++ b/tests/tools/archive/test_common.py @@ -10,7 +10,7 @@ """Test common functions.""" from archive_path import TarPath, ZipPath -from aiida.tools.archive.implementations.sqlite.common import copy_tar_to_zip, copy_zip_to_zip +from aiida.tools.archive.implementations.sqlite_zip.common import copy_tar_to_zip, copy_zip_to_zip def test_copy_zip_to_zip(tmp_path): From 4e72acbf674cd86db117aa83a401750f48e92eb4 Mon Sep 17 00:00:00 2001 From: Chris Sewell Date: Wed, 23 Feb 2022 16:24:04 +0100 Subject: [PATCH 03/26] Move utilities --- aiida/storage/sqlite_zip/backend.py | 3 +-- .../{legacy_to_new.py => legacy_to_main.py} | 21 +++++++++++-------- aiida/storage/sqlite_zip/migrations/main.py | 4 ++-- .../sqlite_zip/migrations/utils.py} | 5 ----- aiida/storage/sqlite_zip/utils.py | 6 ++++++ .../implementations/sqlite_zip/writer.py | 18 +++++++--------- .../archive/{test_common.py => test_utils.py} | 4 ++-- 7 files changed, 31 insertions(+), 30 deletions(-) rename aiida/storage/sqlite_zip/migrations/{legacy_to_new.py => legacy_to_main.py} (94%) rename aiida/{tools/archive/implementations/sqlite_zip/common.py => storage/sqlite_zip/migrations/utils.py} (98%) rename tests/tools/archive/{test_common.py => test_utils.py} (94%) diff --git a/aiida/storage/sqlite_zip/backend.py b/aiida/storage/sqlite_zip/backend.py index 250a9b6d1e..9628f15ac1 100644 --- a/aiida/storage/sqlite_zip/backend.py +++ b/aiida/storage/sqlite_zip/backend.py @@ -28,10 +28,9 @@ from aiida.storage.psql_dos.orm.querybuilder import SqlaQueryBuilder from aiida.storage.psql_dos.orm.utils import ModelWrapper from aiida.tools.archive.exceptions import ArchiveClosedError, CorruptArchive, ReadOnlyError -from aiida.tools.archive.implementations.sqlite_zip.common import DB_FILENAME, REPO_FOLDER from . import models -from .utils import create_sqla_engine, read_version +from .utils import DB_FILENAME, REPO_FOLDER, create_sqla_engine, read_version class SqliteZipBackend(StorageBackend): # pylint: disable=too-many-public-methods diff --git a/aiida/storage/sqlite_zip/migrations/legacy_to_new.py b/aiida/storage/sqlite_zip/migrations/legacy_to_main.py similarity index 94% rename from aiida/storage/sqlite_zip/migrations/legacy_to_new.py rename to aiida/storage/sqlite_zip/migrations/legacy_to_main.py index 2fb243ec4a..6f921540c6 100644 --- a/aiida/storage/sqlite_zip/migrations/legacy_to_new.py +++ b/aiida/storage/sqlite_zip/migrations/legacy_to_main.py @@ -7,7 +7,7 @@ # For further information on the license, see the LICENSE.txt file # # For further information please visit http://www.aiida.net # ########################################################################### -"""Migration from the "legacy" JSON format, to an sqlite database.""" +"""Migration from the "legacy" JSON format, to an sqlite database, and node uuid based repository to hash based.""" from contextlib import contextmanager from datetime import datetime from hashlib import sha256 @@ -29,7 +29,6 @@ from aiida.tools.archive.exceptions import CorruptArchive, MigrationValidationError from . import v1_db_schema as v1_schema -from ....tools.archive.implementations.sqlite_zip.common import DB_FILENAME, META_FILENAME, REPO_FOLDER from .legacy.utils import update_metadata _NODE_ENTITY_NAME = 'Node' @@ -66,6 +65,10 @@ _LOG_ENTITY_NAME: v1_schema.DbLog, } +_META_FILENAME = 'metadata.json' +_DB_FILENAME = 'db.sqlite3' +_REPO_FOLDER = 'repo' + MIGRATED_TO_REVISION = 'main_0001' @@ -103,7 +106,7 @@ def in_archive_context(_inpath): mode='w', compresslevel=compression, name_to_info=central_dir, - info_order=(META_FILENAME, DB_FILENAME) + info_order=(_META_FILENAME, _DB_FILENAME) ) as new_path: with in_archive_context(inpath) as path: length = sum(1 for _ in path.glob('**/*')) @@ -121,18 +124,18 @@ def in_archive_context(_inpath): if subpath.is_file(): with subpath.open('rb') as handle: hashkey = chunked_file_hash(handle, sha256) - if f'{REPO_FOLDER}/{hashkey}' not in central_dir: + if f'{_REPO_FOLDER}/{hashkey}' not in central_dir: with subpath.open('rb') as handle: - with (new_path / f'{REPO_FOLDER}/{hashkey}').open(mode='wb') as handle2: + with (new_path / f'{_REPO_FOLDER}/{hashkey}').open(mode='wb') as handle2: shutil.copyfileobj(handle, handle2) node_repos.setdefault(uuid, []).append((posix_rel.as_posix(), hashkey)) MIGRATE_LOGGER.report(f'Unique files written: {len(central_dir)}') - _json_to_sqlite(working / DB_FILENAME, data, node_repos) + _json_to_sqlite(working / _DB_FILENAME, data, node_repos) MIGRATE_LOGGER.report('Finalising archive') - with (working / DB_FILENAME).open('rb') as handle: - with (new_path / DB_FILENAME).open(mode='wb') as handle2: + with (working / _DB_FILENAME).open('rb') as handle: + with (new_path / _DB_FILENAME).open(mode='wb') as handle2: shutil.copyfileobj(handle, handle2) # remove legacy keys from metadata and store @@ -144,7 +147,7 @@ def in_archive_context(_inpath): metadata['key_format'] = 'sha256' metadata['mtime'] = datetime.now().isoformat() update_metadata(metadata, MIGRATED_TO_REVISION) - (new_path / META_FILENAME).write_text(json.dumps(metadata)) + (new_path / _META_FILENAME).write_text(json.dumps(metadata)) def _json_to_sqlite( diff --git a/aiida/storage/sqlite_zip/migrations/main.py b/aiida/storage/sqlite_zip/migrations/main.py index f21b51ca84..9e9bd2e6e4 100644 --- a/aiida/storage/sqlite_zip/migrations/main.py +++ b/aiida/storage/sqlite_zip/migrations/main.py @@ -22,12 +22,12 @@ from aiida.common import json from aiida.common.progress_reporter import get_progress_reporter +from aiida.storage.sqlite_zip.migrations.utils import copy_tar_to_zip, copy_zip_to_zip from aiida.tools.archive.common import MIGRATE_LOGGER from aiida.tools.archive.exceptions import ArchiveMigrationError, CorruptArchive -from aiida.tools.archive.implementations.sqlite_zip.common import copy_tar_to_zip, copy_zip_to_zip from .legacy import FINAL_LEGACY_VERSION, LEGACY_MIGRATE_FUNCTIONS -from .legacy_to_new import MIGRATED_TO_REVISION, perform_v1_migration +from .legacy_to_main import MIGRATED_TO_REVISION, perform_v1_migration def _alembic_config() -> Config: diff --git a/aiida/tools/archive/implementations/sqlite_zip/common.py b/aiida/storage/sqlite_zip/migrations/utils.py similarity index 98% rename from aiida/tools/archive/implementations/sqlite_zip/common.py rename to aiida/storage/sqlite_zip/migrations/utils.py index 06640ae0cc..8dc669d86b 100644 --- a/aiida/tools/archive/implementations/sqlite_zip/common.py +++ b/aiida/storage/sqlite_zip/migrations/utils.py @@ -18,11 +18,6 @@ from aiida.common.progress_reporter import create_callback, get_progress_reporter -META_FILENAME = 'metadata.json' -DB_FILENAME = 'db.sqlite3' -# folder to store repository files in -REPO_FOLDER = 'repo' - def copy_zip_to_zip( inpath: Path, diff --git a/aiida/storage/sqlite_zip/utils.py b/aiida/storage/sqlite_zip/utils.py index cec7298867..e0315140cd 100644 --- a/aiida/storage/sqlite_zip/utils.py +++ b/aiida/storage/sqlite_zip/utils.py @@ -23,6 +23,12 @@ META_FILENAME = 'metadata.json' """The filename containing meta information about the storage instance.""" +DB_FILENAME = 'db.sqlite3' +"""The filename of the SQLite database.""" + +REPO_FOLDER = 'repo' +"""The name of the folder containing the repository files.""" + def sqlite_enforce_foreign_keys(dbapi_connection, _): """Enforce foreign key constraints, when using sqlite backend (off by default)""" diff --git a/aiida/tools/archive/implementations/sqlite_zip/writer.py b/aiida/tools/archive/implementations/sqlite_zip/writer.py index b6d99fe159..0477bd9055 100644 --- a/aiida/tools/archive/implementations/sqlite_zip/writer.py +++ b/aiida/tools/archive/implementations/sqlite_zip/writer.py @@ -33,8 +33,6 @@ from aiida.tools.archive.abstract import ArchiveFormatAbstract, ArchiveWriterAbstract from aiida.tools.archive.exceptions import CorruptArchive, IncompatibleArchiveVersionError -from .common import DB_FILENAME, META_FILENAME, REPO_FOLDER - @functools.lru_cache(maxsize=10) def _get_model_from_entity(entity_type: EntityTypes): @@ -58,8 +56,8 @@ def _get_model_from_entity(entity_type: EntityTypes): class ArchiveWriterSqlZip(ArchiveWriterAbstract): """AiiDA archive writer implementation.""" - meta_name = META_FILENAME - db_name = DB_FILENAME + meta_name = utils.META_FILENAME + db_name = utils.DB_FILENAME def __init__( self, @@ -197,8 +195,8 @@ def put_object(self, stream: BinaryIO, *, buffer_size: Optional[int] = None, key if key is None: key = chunked_file_hash(stream, hashlib.sha256) stream.seek(0) - if f'{REPO_FOLDER}/{key}' not in self._central_dir: - self._stream_binary(f'{REPO_FOLDER}/{key}', stream, buffer_size=buffer_size) + if f'{utils.REPO_FOLDER}/{key}' not in self._central_dir: + self._stream_binary(f'{utils.REPO_FOLDER}/{key}', stream, buffer_size=buffer_size) return key def delete_object(self, key: str) -> None: @@ -210,9 +208,9 @@ class ArchiveAppenderSqlZip(ArchiveWriterSqlZip): def delete_object(self, key: str) -> None: self._assert_in_context() - if f'{REPO_FOLDER}/{key}' in self._central_dir: + if f'{utils.REPO_FOLDER}/{key}' in self._central_dir: raise IOError(f'Cannot delete object {key!r} that has been added in the same append context') - self._deleted_paths.add(f'{REPO_FOLDER}/{key}') + self._deleted_paths.add(f'{utils.REPO_FOLDER}/{key}') def __enter__(self) -> 'ArchiveAppenderSqlZip': """Start appending to the archive""" @@ -226,7 +224,7 @@ def __enter__(self) -> 'ArchiveAppenderSqlZip': f'Archive is version {version!r} but expected {self._format.latest_version!r}' ) # load the metadata - self._metadata = json.loads(read_file_in_zip(self._path, META_FILENAME, 'utf8', search_limit=4)) + self._metadata = json.loads(read_file_in_zip(self._path, utils.META_FILENAME, 'utf8', search_limit=4)) # overwrite metadata self._metadata['mtime'] = datetime.now().isoformat() self._metadata['compression'] = self._compression @@ -247,7 +245,7 @@ def __enter__(self) -> 'ArchiveAppenderSqlZip': db_file = self._work_dir / self.db_name with db_file.open('wb') as handle: try: - extract_file_in_zip(self.path, DB_FILENAME, handle, search_limit=4) + extract_file_in_zip(self.path, utils.DB_FILENAME, handle, search_limit=4) except Exception as exc: raise CorruptArchive(f'database could not be read: {exc}') from exc # open a connection to the database diff --git a/tests/tools/archive/test_common.py b/tests/tools/archive/test_utils.py similarity index 94% rename from tests/tools/archive/test_common.py rename to tests/tools/archive/test_utils.py index 72af015270..ef9c6ad76a 100644 --- a/tests/tools/archive/test_common.py +++ b/tests/tools/archive/test_utils.py @@ -7,10 +7,10 @@ # For further information on the license, see the LICENSE.txt file # # For further information please visit http://www.aiida.net # ########################################################################### -"""Test common functions.""" +"""Test utility functions.""" from archive_path import TarPath, ZipPath -from aiida.tools.archive.implementations.sqlite_zip.common import copy_tar_to_zip, copy_zip_to_zip +from aiida.storage.sqlite_zip.migrations.utils import copy_tar_to_zip, copy_zip_to_zip def test_copy_zip_to_zip(tmp_path): From d04ce8e1444d49f78de83f42d78e779e0986ea07 Mon Sep 17 00:00:00 2001 From: Chris Sewell Date: Wed, 23 Feb 2022 16:34:45 +0100 Subject: [PATCH 04/26] Update backend.py --- aiida/storage/sqlite_zip/backend.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/aiida/storage/sqlite_zip/backend.py b/aiida/storage/sqlite_zip/backend.py index 9628f15ac1..51afd2fd2e 100644 --- a/aiida/storage/sqlite_zip/backend.py +++ b/aiida/storage/sqlite_zip/backend.py @@ -30,6 +30,7 @@ from aiida.tools.archive.exceptions import ArchiveClosedError, CorruptArchive, ReadOnlyError from . import models +from .migrations.main import get_schema_version_head from .utils import DB_FILENAME, REPO_FOLDER, create_sqla_engine, read_version @@ -38,7 +39,7 @@ class SqliteZipBackend(StorageBackend): # pylint: disable=too-many-public-metho @classmethod def version_head(cls) -> str: - raise NotImplementedError + return get_schema_version_head() @classmethod def version_profile(cls, profile: Profile) -> None: @@ -168,6 +169,12 @@ def get_global_variable(self, key: str): def set_global_variable(self, key: str, value, description: Optional[str] = None, overwrite=True) -> None: raise ReadOnlyError() + def maintain(self, dry_run: bool = False, live: bool = True, **kwargs) -> None: + raise NotImplementedError + + def get_info(self, statistics: bool = False, **kwargs) -> dict: + raise NotImplementedError + class ZipfileBackendRepository(AbstractRepositoryBackend): """A read-only backend for an open zip file.""" From 6f161a6f29c9b5179e71a39eb4fdf4a2fae6d22e Mon Sep 17 00:00:00 2001 From: Chris Sewell Date: Thu, 24 Feb 2022 00:52:12 +0100 Subject: [PATCH 05/26] consolidate use of exceptions --- aiida/cmdline/commands/cmd_archive.py | 11 ++--- aiida/storage/sqlite_zip/backend.py | 25 ++++++---- .../sqlite_zip/migrations/legacy/utils.py | 6 +-- .../migrations/legacy/v05_to_v06.py | 4 +- .../migrations/legacy/v06_to_v07.py | 12 ++--- .../sqlite_zip/migrations/legacy_to_main.py | 8 ++-- aiida/storage/sqlite_zip/migrations/main.py | 47 ++++++++++++++----- aiida/storage/sqlite_zip/utils.py | 10 ++-- aiida/tools/archive/__init__.py | 4 -- aiida/tools/archive/abstract.py | 6 +-- aiida/tools/archive/exceptions.py | 38 --------------- .../implementations/sqlite_zip/reader.py | 4 +- .../implementations/sqlite_zip/writer.py | 7 ++- aiida/tools/archive/imports.py | 12 ++--- .../tools/archive/migration/test_migration.py | 9 ++-- .../archive/migration/test_v06_to_v07.py | 8 ++-- .../archive/migration/test_v07_to_v08.py | 2 +- tests/tools/archive/orm/test_links.py | 1 - tests/tools/archive/test_simple.py | 6 +-- 19 files changed, 101 insertions(+), 119 deletions(-) diff --git a/aiida/cmdline/commands/cmd_archive.py b/aiida/cmdline/commands/cmd_archive.py index 2a8af352d1..42e0d80265 100644 --- a/aiida/cmdline/commands/cmd_archive.py +++ b/aiida/cmdline/commands/cmd_archive.py @@ -24,7 +24,7 @@ from aiida.cmdline.params.types import GroupParamType, PathOrUrl from aiida.cmdline.utils import decorators, echo from aiida.cmdline.utils.common import get_database_summary -from aiida.common.exceptions import UnreachableStorage +from aiida.common.exceptions import CorruptStorage, UnreachableStorage from aiida.common.links import GraphTraversalRules from aiida.common.log import AIIDA_LOGGER @@ -50,13 +50,12 @@ def inspect(archive, version, meta_data, database): The various options can be used to change exactly what information is displayed. """ from aiida.tools.archive.abstract import get_format - from aiida.tools.archive.exceptions import UnreadableArchiveError archive_format = get_format() latest_version = archive_format.latest_version try: current_version = archive_format.read_version(archive) - except (UnreadableArchiveError, UnreachableStorage) as exc: + except (UnreachableStorage, CorruptStorage) as exc: echo.echo_critical(f'archive file of unknown format: {exc}') if version: @@ -426,12 +425,12 @@ def _import_archive_and_migrate(archive: str, web_based: bool, import_kwargs: di :param archive: the path or URL to the archive :param web_based: If the archive needs to be downloaded first :param import_kwargs: keyword arguments to pass to the import function - :param try_migration: whether to try a migration if the import raises IncompatibleArchiveVersionError + :param try_migration: whether to try a migration if the import raises `IncompatibleStorageSchema` """ + from aiida.common.exceptions import IncompatibleStorageSchema from aiida.common.folders import SandboxFolder from aiida.tools.archive.abstract import get_format - from aiida.tools.archive.exceptions import IncompatibleArchiveVersionError from aiida.tools.archive.imports import import_archive as _import_archive archive_format = get_format() @@ -454,7 +453,7 @@ def _import_archive_and_migrate(archive: str, web_based: bool, import_kwargs: di echo.echo_report(f'starting import: {archive}') try: _import_archive(archive_path, archive_format=archive_format, **import_kwargs) - except IncompatibleArchiveVersionError as exception: + except IncompatibleStorageSchema as exception: if try_migration: echo.echo_report(f'incompatible version detected for {archive}, trying migration') diff --git a/aiida/storage/sqlite_zip/backend.py b/aiida/storage/sqlite_zip/backend.py index 51afd2fd2e..5ed605337b 100644 --- a/aiida/storage/sqlite_zip/backend.py +++ b/aiida/storage/sqlite_zip/backend.py @@ -19,7 +19,7 @@ from archive_path import extract_file_in_zip from sqlalchemy.orm import Session -from aiida.common.exceptions import UnreachableStorage +from aiida.common.exceptions import ClosedStorage, CorruptStorage from aiida.manage import Profile from aiida.orm.entities import EntityTypes from aiida.orm.implementation import StorageBackend @@ -27,10 +27,9 @@ from aiida.storage.psql_dos.orm import authinfos, comments, computers, entities, groups, logs, nodes, users from aiida.storage.psql_dos.orm.querybuilder import SqlaQueryBuilder from aiida.storage.psql_dos.orm.utils import ModelWrapper -from aiida.tools.archive.exceptions import ArchiveClosedError, CorruptArchive, ReadOnlyError from . import models -from .migrations.main import get_schema_version_head +from .migrations.main import get_schema_version_head, validate_storage from .utils import DB_FILENAME, REPO_FOLDER, create_sqla_engine, read_version @@ -52,8 +51,7 @@ def migrate(cls, profile: Profile): def __init__(self, profile: Profile): super().__init__(profile) self._path = Path(profile.storage_config['path']) - if not self._path.is_file(): - raise UnreachableStorage(f'archive file `{self._path}` does not exist.') + validate_storage(self._path) # lazy open the archive zipfile and extract the database file self._db_file: Optional[Path] = None self._session: Optional[Session] = None @@ -84,7 +82,7 @@ def close(self): def get_session(self) -> Session: """Return an SQLAlchemy session.""" if self._closed: - raise ArchiveClosedError() + raise ClosedStorage(str(self)) if self._db_file is None: _, path = tempfile.mkstemp() self._db_file = Path(path) @@ -92,14 +90,14 @@ def get_session(self) -> Session: try: extract_file_in_zip(self._path, DB_FILENAME, handle, search_limit=4) except Exception as exc: - raise CorruptArchive(f'database could not be read: {exc}') from exc + raise CorruptStorage(f'database could not be read: {exc}') from exc if self._session is None: self._session = Session(create_sqla_engine(self._db_file)) return self._session def get_repository(self) -> 'ZipfileBackendRepository': if self._closed: - raise ArchiveClosedError() + raise ClosedStorage(str(self)) if self._zipfile is None: self._zipfile = ZipFile(self._path, mode='r') # pylint: disable=consider-using-with return ZipfileBackendRepository(self._zipfile) @@ -176,6 +174,13 @@ def get_info(self, statistics: bool = False, **kwargs) -> dict: raise NotImplementedError +class ReadOnlyError(IOError): + """Raised when a write operation is called on a read-only archive.""" + + def __init__(self, msg='Archive is read-only'): # pylint: disable=useless-super-delegation + super().__init__(msg) + + class ZipfileBackendRepository(AbstractRepositoryBackend): """A read-only backend for an open zip file.""" @@ -185,7 +190,7 @@ def __init__(self, file: ZipFile): @property def zipfile(self) -> ZipFile: if self._zipfile.fp is None: - raise ArchiveClosedError() + raise ClosedStorage(f'zipfile closed: {self._zipfile}') return self._zipfile @property @@ -332,7 +337,7 @@ def is_stored(self): return True def store(self): # pylint: disable=no-self-use - return ReadOnlyError() + raise ReadOnlyError() return ReadOnlyEntityBackend diff --git a/aiida/storage/sqlite_zip/migrations/legacy/utils.py b/aiida/storage/sqlite_zip/migrations/legacy/utils.py index e769de1bd4..fecd2d9bf2 100644 --- a/aiida/storage/sqlite_zip/migrations/legacy/utils.py +++ b/aiida/storage/sqlite_zip/migrations/legacy/utils.py @@ -9,7 +9,7 @@ ########################################################################### """Utility functions for migration of export-files.""" -from aiida.tools.archive import exceptions +from aiida.common import exceptions def verify_metadata_version(metadata, version=None): @@ -23,13 +23,13 @@ def verify_metadata_version(metadata, version=None): try: metadata_version = metadata['export_version'] except KeyError: - raise exceptions.ArchiveMigrationError("metadata is missing the 'export_version' key") + raise exceptions.StorageMigrationError("metadata is missing the 'export_version' key") if version is None: return metadata_version if metadata_version != version: - raise exceptions.MigrationValidationError( + raise exceptions.StorageMigrationError( f'expected archive file with version {version} but found version {metadata_version}' ) diff --git a/aiida/storage/sqlite_zip/migrations/legacy/v05_to_v06.py b/aiida/storage/sqlite_zip/migrations/legacy/v05_to_v06.py index d949c0877e..0f3eb3bc15 100644 --- a/aiida/storage/sqlite_zip/migrations/legacy/v05_to_v06.py +++ b/aiida/storage/sqlite_zip/migrations/legacy/v05_to_v06.py @@ -31,7 +31,7 @@ def migrate_deserialized_datetime(data, conversion): """Deserialize datetime strings from export archives, meaning to reattach the UTC timezone information.""" - from aiida.tools.archive.exceptions import ArchiveMigrationError + from aiida.common.exceptions import StorageMigrationError ret_data: Union[str, dict, list] @@ -62,7 +62,7 @@ def migrate_deserialized_datetime(data, conversion): # Since we know that all strings will be UTC, here we are simply reattaching that information. ret_data = f'{data}+00:00' else: - raise ArchiveMigrationError(f"Unknown convert_type '{conversion}'") + raise StorageMigrationError(f"Unknown convert_type '{conversion}'") return ret_data diff --git a/aiida/storage/sqlite_zip/migrations/legacy/v06_to_v07.py b/aiida/storage/sqlite_zip/migrations/legacy/v06_to_v07.py index 85c0b74cf2..6cd4bd5aa2 100644 --- a/aiida/storage/sqlite_zip/migrations/legacy/v06_to_v07.py +++ b/aiida/storage/sqlite_zip/migrations/legacy/v06_to_v07.py @@ -46,14 +46,14 @@ def data_migration_legacy_process_attributes(data): `process_state` attribute. If they have it, it is checked whether the state is active or not, if not, the `sealed` attribute is created and set to `True`. - :raises `~aiida.tools.archive.exceptions.CorruptArchive`: if a Node, found to have attributes, + :raises `~aiida.common.exceptions.CorruptStorage`: if a Node, found to have attributes, cannot be found in the list of exported entities. - :raises `~aiida.tools.archive.exceptions.CorruptArchive`: if the 'sealed' attribute does not exist and + :raises `~aiida.common.exceptions.CorruptStorage`: if the 'sealed' attribute does not exist and the ProcessNode is in an active state, i.e. `process_state` is one of ('created', 'running', 'waiting'). A log-file, listing all illegal ProcessNodes, will be produced in the current directory. """ + from aiida.common.exceptions import CorruptStorage from aiida.storage.psql_dos.migrations.utils.integrity import write_database_integrity_violation - from aiida.tools.archive.exceptions import CorruptArchive attrs_to_remove = ['_sealed', '_finished', '_failed', '_aborted', '_do_abort'] active_states = {'created', 'running', 'waiting'} @@ -68,7 +68,7 @@ def data_migration_legacy_process_attributes(data): if process_state in active_states: # The ProcessNode is in an active state, and should therefore never have been allowed # to be exported. The Node will be added to a log that is saved in the working directory, - # then a CorruptArchive will be raised, since the archive needs to be migrated manually. + # then a CorruptStorage will be raised, since the archive needs to be migrated manually. uuid_pk = data['export_data']['Node'][node_pk].get('uuid', node_pk) illegal_cases.append([uuid_pk, process_state]) continue # No reason to do more now @@ -81,7 +81,7 @@ def data_migration_legacy_process_attributes(data): for attr in attrs_to_remove: content.pop(attr, None) except KeyError as exc: - raise CorruptArchive(f'Your export archive is corrupt! Org. exception: {exc}') + raise CorruptStorage(f'Your export archive is corrupt! Org. exception: {exc}') if illegal_cases: headers = ['UUID/PK', 'process_state'] @@ -89,7 +89,7 @@ def data_migration_legacy_process_attributes(data): 'that should never have been allowed to be exported.' write_database_integrity_violation(illegal_cases, headers, warning_message) - raise CorruptArchive( + raise CorruptStorage( 'Your export archive is corrupt! ' 'Please see the log-file in your current directory for more details.' ) diff --git a/aiida/storage/sqlite_zip/migrations/legacy_to_main.py b/aiida/storage/sqlite_zip/migrations/legacy_to_main.py index 6f921540c6..8e4b29bef7 100644 --- a/aiida/storage/sqlite_zip/migrations/legacy_to_main.py +++ b/aiida/storage/sqlite_zip/migrations/legacy_to_main.py @@ -21,12 +21,12 @@ from sqlalchemy import insert, select from sqlalchemy.exc import IntegrityError +from aiida.common.exceptions import CorruptStorage, StorageMigrationError from aiida.common.hashing import chunked_file_hash from aiida.common.progress_reporter import get_progress_reporter from aiida.repository.common import File, FileType from aiida.storage.sqlite_zip.utils import create_sqla_engine from aiida.tools.archive.common import MIGRATE_LOGGER, batch_iter -from aiida.tools.archive.exceptions import CorruptArchive, MigrationValidationError from . import v1_db_schema as v1_schema from .legacy.utils import update_metadata @@ -176,7 +176,7 @@ def _json_to_sqlite( try: connection.execute(insert(backend_cls.__table__), rows) # type: ignore except IntegrityError as exc: - raise MigrationValidationError(f'Database integrity error: {exc}') from exc + raise StorageMigrationError(f'Database integrity error: {exc}') from exc progress.update(nrows) if not (data['groups_uuid'] or data['links_uuid']): @@ -243,9 +243,9 @@ def _iter_entity_fields( extras = data.get('node_extras', {}) for pk, all_fields in data['export_data'].get(name, {}).items(): if pk not in attributes: - raise CorruptArchive(f'Unable to find attributes info for Node with Pk={pk}') + raise CorruptStorage(f'Unable to find attributes info for Node with Pk={pk}') if pk not in extras: - raise CorruptArchive(f'Unable to find extra info for Node with Pk={pk}') + raise CorruptStorage(f'Unable to find extra info for Node with Pk={pk}') uuid = all_fields['uuid'] repository_metadata = _create_repo_metadata(node_repos[uuid]) if uuid in node_repos else {} yield { diff --git a/aiida/storage/sqlite_zip/migrations/main.py b/aiida/storage/sqlite_zip/migrations/main.py index 9e9bd2e6e4..2279f0b91a 100644 --- a/aiida/storage/sqlite_zip/migrations/main.py +++ b/aiida/storage/sqlite_zip/migrations/main.py @@ -21,13 +21,14 @@ from archive_path import open_file_in_tar, open_file_in_zip from aiida.common import json +from aiida.common.exceptions import CorruptStorage, IncompatibleStorageSchema, StorageMigrationError from aiida.common.progress_reporter import get_progress_reporter -from aiida.storage.sqlite_zip.migrations.utils import copy_tar_to_zip, copy_zip_to_zip from aiida.tools.archive.common import MIGRATE_LOGGER -from aiida.tools.archive.exceptions import ArchiveMigrationError, CorruptArchive +from ..utils import read_version from .legacy import FINAL_LEGACY_VERSION, LEGACY_MIGRATE_FUNCTIONS from .legacy_to_main import MIGRATED_TO_REVISION, perform_v1_migration +from .utils import copy_tar_to_zip, copy_zip_to_zip def _alembic_config() -> Config: @@ -51,6 +52,26 @@ def list_versions() -> List[str]: return legacy_versions + alembic_versions +def validate_storage(inpath: Path) -> None: + """Validate that the storage is at the head version. + + :raises: :class:`aiida.common.exceptions.UnreachableStorage` if the file does not exist + :raises: :class:`aiida.common.exceptions.CorruptStorage` + if the version cannot be read from the storage. + :raises: :class:`aiida.common.exceptions.IncompatibleStorageSchema` + if the storage is not compatible with the code API. + """ + schema_version_code = get_schema_version_head() + schema_version_archive = read_version(inpath) + if schema_version_archive != schema_version_code: + raise IncompatibleStorageSchema( + f'Archive schema version `{schema_version_archive}` ' + f'is incompatible with the required schema version `{schema_version_code}`. ' + 'To migrate the archive schema version to the current one, ' + f'run the following command: verdi archive migrate {str(inpath)!r}' + ) + + def migrate( # pylint: disable=too-many-branches,too-many-statements inpath: Union[str, Path], outpath: Union[str, Path], @@ -77,34 +98,34 @@ def migrate( # pylint: disable=too-many-branches,too-many-statements elif zipfile.is_zipfile(str(inpath)): is_tar = False else: - raise CorruptArchive(f'The input file is neither a tar nor a zip file: {inpath}') + raise CorruptStorage(f'The input file is neither a tar nor a zip file: {inpath}') # read the metadata.json which should always be present try: metadata = _read_json(inpath, 'metadata.json', is_tar) except FileNotFoundError: - raise CorruptArchive('No metadata.json file found') + raise CorruptStorage('No metadata.json file found') except IOError as exc: - raise CorruptArchive(f'No input file could not be read: {exc}') from exc + raise CorruptStorage(f'No input file could not be read: {exc}') from exc - # opbtain the current version + # obtain the current version if 'export_version' not in metadata: - raise CorruptArchive('No export_version found in metadata.json') + raise CorruptStorage('No export_version found in metadata.json') current_version = metadata['export_version'] # check versions are valid # versions 0.1, 0.2, 0.3 are no longer supported, # since 0.3 -> 0.4 requires costly migrations of repo files (you would need to unpack all of them) if current_version in ('0.1', '0.2', '0.3') or version in ('0.1', '0.2', '0.3'): - raise ArchiveMigrationError( + raise StorageMigrationError( f"Legacy migration from '{current_version}' -> '{version}' is not supported in aiida-core v2" ) all_versions = list_versions() if current_version not in all_versions: - raise ArchiveMigrationError(f"Unknown current version '{current_version}'") + raise StorageMigrationError(f"Unknown current version '{current_version}'") if version not in all_versions: - raise ArchiveMigrationError(f"Unknown target version '{version}'") + raise StorageMigrationError(f"Unknown target version '{version}'") # if we are already at the desired version, then no migration is required if current_version == version: @@ -161,7 +182,7 @@ def path_callback(inpath, outpath) -> bool: current_version = MIGRATED_TO_REVISION if not current_version == version: - raise ArchiveMigrationError(f"Migration from '{current_version}' -> '{version}' failed") + raise StorageMigrationError(f"Migration from '{current_version}' -> '{version}' failed") if outpath.exists() and force: outpath.unlink() @@ -196,9 +217,9 @@ def _perform_legacy_migrations(current_version: str, to_version: str, metadata: pathway: List[str] = [] while prev_version != to_version: if prev_version not in LEGACY_MIGRATE_FUNCTIONS: - raise ArchiveMigrationError(f"No migration pathway available for '{current_version}' to '{to_version}'") + raise StorageMigrationError(f"No migration pathway available for '{current_version}' to '{to_version}'") if prev_version in pathway: - raise ArchiveMigrationError( + raise StorageMigrationError( f'cyclic migration pathway encountered: {" -> ".join(pathway + [prev_version])}' ) pathway.append(prev_version) diff --git a/aiida/storage/sqlite_zip/utils.py b/aiida/storage/sqlite_zip/utils.py index e0315140cd..ceab594948 100644 --- a/aiida/storage/sqlite_zip/utils.py +++ b/aiida/storage/sqlite_zip/utils.py @@ -18,7 +18,7 @@ from sqlalchemy.future.engine import Engine, create_engine from aiida.common import json -from aiida.common.exceptions import UnreachableStorage +from aiida.common.exceptions import CorruptStorage, UnreachableStorage META_FILENAME = 'metadata.json' """The filename containing meta information about the storage instance.""" @@ -76,16 +76,16 @@ def read_version(path: Union[str, Path]) -> str: try: metadata = extract_metadata(path, search_limit=None) except Exception as exc: - raise UnreachableStorage(f'Could not read metadata for version: {exc}') from exc + raise CorruptStorage(f'Could not read metadata for version: {exc}') from exc elif tarfile.is_tarfile(path): try: metadata = json.loads(read_file_in_tar(path, META_FILENAME)) except Exception as exc: - raise UnreachableStorage(f'Could not read metadata for version: {exc}') from exc + raise CorruptStorage(f'Could not read metadata for version: {exc}') from exc else: - raise UnreachableStorage('Not a zip or tar file') + raise CorruptStorage('Not a zip or tar file') if 'export_version' in metadata: return metadata['export_version'] - raise UnreachableStorage("Metadata does not contain 'export_version' key") + raise CorruptStorage("Metadata does not contain 'export_version' key") diff --git a/aiida/tools/archive/__init__.py b/aiida/tools/archive/__init__.py index 4252c80745..e62ca09039 100644 --- a/aiida/tools/archive/__init__.py +++ b/aiida/tools/archive/__init__.py @@ -28,10 +28,8 @@ 'ArchiveFormatAbstract', 'ArchiveFormatSqlZip', 'ArchiveImportError', - 'ArchiveMigrationError', 'ArchiveReaderAbstract', 'ArchiveWriterAbstract', - 'CorruptArchive', 'EXPORT_LOGGER', 'ExportImportException', 'ExportValidationError', @@ -39,9 +37,7 @@ 'ImportTestRun', 'ImportUniquenessError', 'ImportValidationError', - 'IncompatibleArchiveVersionError', 'MIGRATE_LOGGER', - 'MigrationValidationError', 'create_archive', 'get_format', 'import_archive', diff --git a/aiida/tools/archive/abstract.py b/aiida/tools/archive/abstract.py index 371be5399c..08a5cb9ad8 100644 --- a/aiida/tools/archive/abstract.py +++ b/aiida/tools/archive/abstract.py @@ -141,7 +141,7 @@ def __exit__(self, *args, **kwargs) -> None: def get_metadata(self) -> Dict[str, Any]: """Return the top-level metadata. - :raises: ``UnreadableArchiveError`` if the top-level metadata cannot be read from the archive + :raises: ``CorruptStorage`` if the top-level metadata cannot be read from the archive """ @abstractmethod @@ -196,8 +196,8 @@ def read_version(self, path: Union[str, Path]) -> str: :param path: archive path - :raises: ``FileNotFoundError`` if the file does not exist - :raises: ``UnreadableArchiveError`` if a version cannot be read from the archive + :raises: ``UnreachableStorage`` if the file does not exist + :raises: ``CorruptStorage`` if a version cannot be read from the archive """ @overload diff --git a/aiida/tools/archive/exceptions.py b/aiida/tools/archive/exceptions.py index 1ad358308f..05db839a36 100644 --- a/aiida/tools/archive/exceptions.py +++ b/aiida/tools/archive/exceptions.py @@ -19,11 +19,7 @@ 'ExportImportException', 'ArchiveExportError', 'ExportValidationError', - 'CorruptArchive', - 'ArchiveMigrationError', - 'MigrationValidationError', 'ArchiveImportError', - 'IncompatibleArchiveVersionError', 'ImportValidationError', 'ImportUniquenessError', 'ImportTestRun', @@ -42,22 +38,10 @@ class ExportValidationError(ArchiveExportError): """Raised when validation fails during export, e.g. for non-sealed ``ProcessNode`` s.""" -class UnreadableArchiveError(ArchiveExportError): - """Raised when the version cannot be extracted from the archive.""" - - -class CorruptArchive(ExportImportException): - """Raised when an operation is applied to a corrupt export archive, e.g. missing files or invalid formats.""" - - class ArchiveImportError(ExportImportException): """Base class for all AiiDA import exceptions.""" -class IncompatibleArchiveVersionError(ExportImportException): - """Raised when trying to import an export archive with an incompatible schema version.""" - - class ImportUniquenessError(ArchiveImportError): """Raised when the user tries to violate a uniqueness constraint. @@ -71,25 +55,3 @@ class ImportValidationError(ArchiveImportError): class ImportTestRun(ArchiveImportError): """Raised during an import, before the transaction is commited.""" - - -class ArchiveMigrationError(ExportImportException): - """Base class for all AiiDA export archive migration exceptions.""" - - -class MigrationValidationError(ArchiveMigrationError): - """Raised when validation fails during migration of export archives.""" - - -class ReadOnlyError(IOError): - """Raised when a write operation is called on a read-only archive.""" - - def __init__(self, msg='Archive is read-only'): # pylint: disable=useless-super-delegation - super().__init__(msg) - - -class ArchiveClosedError(IOError): - """Raised when the archive is closed.""" - - def __init__(self, msg='Archive is closed'): # pylint: disable=useless-super-delegation - super().__init__(msg) diff --git a/aiida/tools/archive/implementations/sqlite_zip/reader.py b/aiida/tools/archive/implementations/sqlite_zip/reader.py index 2c9e59e998..9f524621c1 100644 --- a/aiida/tools/archive/implementations/sqlite_zip/reader.py +++ b/aiida/tools/archive/implementations/sqlite_zip/reader.py @@ -11,11 +11,11 @@ from pathlib import Path from typing import Any, Dict, Optional, Union +from aiida.common.exceptions import CorruptStorage from aiida.manage import Profile from aiida.storage.sqlite_zip.backend import SqliteZipBackend from aiida.storage.sqlite_zip.utils import extract_metadata from aiida.tools.archive.abstract import ArchiveReaderAbstract -from aiida.tools.archive.exceptions import CorruptArchive class ArchiveReaderSqlZip(ArchiveReaderAbstract): @@ -43,7 +43,7 @@ def get_metadata(self) -> Dict[str, Any]: try: return extract_metadata(self.path) except Exception as exc: - raise CorruptArchive('metadata could not be read') from exc + raise CorruptStorage('metadata could not be read') from exc def get_backend(self) -> SqliteZipBackend: if not self._in_context: diff --git a/aiida/tools/archive/implementations/sqlite_zip/writer.py b/aiida/tools/archive/implementations/sqlite_zip/writer.py index 0477bd9055..7283b91a32 100644 --- a/aiida/tools/archive/implementations/sqlite_zip/writer.py +++ b/aiida/tools/archive/implementations/sqlite_zip/writer.py @@ -25,13 +25,12 @@ from sqlalchemy.future.engine import Connection from aiida import get_version -from aiida.common.exceptions import IntegrityError +from aiida.common.exceptions import CorruptStorage, IncompatibleStorageSchema, IntegrityError from aiida.common.hashing import chunked_file_hash from aiida.common.progress_reporter import get_progress_reporter from aiida.orm.entities import EntityTypes from aiida.storage.sqlite_zip import models, utils from aiida.tools.archive.abstract import ArchiveFormatAbstract, ArchiveWriterAbstract -from aiida.tools.archive.exceptions import CorruptArchive, IncompatibleArchiveVersionError @functools.lru_cache(maxsize=10) @@ -220,7 +219,7 @@ def __enter__(self) -> 'ArchiveAppenderSqlZip': # the file should be an archive with the correct version version = self._format.read_version(self._path) if not version == self._format.latest_version: - raise IncompatibleArchiveVersionError( + raise IncompatibleStorageSchema( f'Archive is version {version!r} but expected {self._format.latest_version!r}' ) # load the metadata @@ -247,7 +246,7 @@ def __enter__(self) -> 'ArchiveAppenderSqlZip': try: extract_file_in_zip(self.path, utils.DB_FILENAME, handle, search_limit=4) except Exception as exc: - raise CorruptArchive(f'database could not be read: {exc}') from exc + raise CorruptStorage(f'archive database could not be read: {exc}') from exc # open a connection to the database engine = utils.create_sqla_engine( self._work_dir / self.db_name, enforce_foreign_keys=self._enforce_foreign_keys, echo=self._debug diff --git a/aiida/tools/archive/imports.py b/aiida/tools/archive/imports.py index 6d87d36704..f5b0f332e6 100644 --- a/aiida/tools/archive/imports.py +++ b/aiida/tools/archive/imports.py @@ -16,6 +16,7 @@ from aiida import orm from aiida.common import timezone +from aiida.common.exceptions import IncompatibleStorageSchema from aiida.common.lang import type_check from aiida.common.links import LinkType from aiida.common.log import AIIDA_LOGGER @@ -28,7 +29,7 @@ from .abstract import ArchiveFormatAbstract from .common import batch_iter, entity_type_to_orm -from .exceptions import ImportTestRun, ImportUniquenessError, ImportValidationError, IncompatibleArchiveVersionError +from .exceptions import ImportTestRun, ImportUniquenessError, ImportValidationError from .implementations.sqlite_zip import ArchiveFormatSqlZip __all__ = ('IMPORT_LOGGER', 'import_archive') @@ -95,10 +96,9 @@ def import_archive( :returns: Primary Key of the import Group - :raises `~aiida.tools.archive.exceptions.IncompatibleArchiveVersionError`: if the provided archive's - version is not equal to the version of AiiDA at the moment of import. - :raises `~aiida.tools.archive.exceptions.ImportValidationError`: if parameters or the contents of - :raises `~aiida.tools.archive.exceptions.CorruptArchive`: if the provided archive cannot be read. + :raises `~aiida.common.exceptions.CorruptStorage`: if the provided archive cannot be read. + :raises `~aiida.common.exceptions.IncompatibleStorageSchema`: if the archive version is not at head. + :raises `~aiida.tools.archive.exceptions.ImportValidationError`: if invalid entities are found in the archive. :raises `~aiida.tools.archive.exceptions.ImportUniquenessError`: if a new unique entity can not be created. """ archive_format = archive_format or ArchiveFormatSqlZip() @@ -126,7 +126,7 @@ def import_archive( # i.e. its not whether the version is the latest that matters, it is that it is compatible with the backend version # its a bit weird at the moment because django/sqlalchemy have different versioning if not archive_format.read_version(path) == archive_format.latest_version: - raise IncompatibleArchiveVersionError( + raise IncompatibleStorageSchema( f'The archive version {archive_format.read_version(path)!r} ' f'is not the latest version {archive_format.latest_version!r}' ) diff --git a/tests/tools/archive/migration/test_migration.py b/tests/tools/archive/migration/test_migration.py index c998e1504f..d2e22b356c 100644 --- a/tests/tools/archive/migration/test_migration.py +++ b/tests/tools/archive/migration/test_migration.py @@ -12,7 +12,8 @@ import pytest from aiida import orm -from aiida.tools.archive import ArchiveFormatSqlZip, ArchiveMigrationError +from aiida.common.exceptions import StorageMigrationError +from aiida.tools.archive import ArchiveFormatSqlZip from tests.utils.archives import get_archive_file # archives to test migration against @@ -84,10 +85,10 @@ def test_partial_migrations(core_archive, tmp_path): new_archive = tmp_path / 'out.aiida' - with pytest.raises(ArchiveMigrationError, match='Unknown target version'): + with pytest.raises(StorageMigrationError, match='Unknown target version'): archive_format.migrate(filepath_archive, new_archive, 0.2) - with pytest.raises(ArchiveMigrationError, match='No migration pathway available'): + with pytest.raises(StorageMigrationError, match='No migration pathway available'): archive_format.migrate(filepath_archive, new_archive, '0.4') archive_format.migrate(filepath_archive, new_archive, '0.7') @@ -119,7 +120,7 @@ def test_wrong_versions(core_archive, tmp_path, version): filepath_archive = get_archive_file('export_v0.4_simple.aiida', **core_archive) archive_format = ArchiveFormatSqlZip() new_archive = tmp_path / 'out.aiida' - with pytest.raises(ArchiveMigrationError, match='Unknown target version'): + with pytest.raises(StorageMigrationError, match='Unknown target version'): archive_format.migrate(filepath_archive, new_archive, version) assert not new_archive.exists() diff --git a/tests/tools/archive/migration/test_v06_to_v07.py b/tests/tools/archive/migration/test_v06_to_v07.py index a80e1ea5d9..b4f2e502b0 100644 --- a/tests/tools/archive/migration/test_v06_to_v07.py +++ b/tests/tools/archive/migration/test_v06_to_v07.py @@ -10,11 +10,11 @@ """Test archive file migration from export version 0.6 to 0.7""" import pytest +from aiida.common.exceptions import CorruptStorage from aiida.storage.sqlite_zip.migrations.legacy.v06_to_v07 import ( data_migration_legacy_process_attributes, migrate_v6_to_v7, ) -from aiida.tools.archive.exceptions import CorruptArchive def test_migrate_external(migrate_from_func): @@ -49,7 +49,7 @@ def test_migrate_external(migrate_from_func): def test_migration_0040_corrupt_archive(): - """Check CorruptArchive is raised for different cases during migration 0040""" + """Check CorruptStorage is raised for different cases during migration 0040""" # data has one "valid" entry, in the form of Node . # At least it has the needed key `node_type`. # data also has one "invalid" entry, in form of Node . @@ -73,7 +73,7 @@ def test_migration_0040_corrupt_archive(): } } - with pytest.raises(CorruptArchive, match='Your export archive is corrupt! Org. exception:'): + with pytest.raises(CorruptStorage, match='Your export archive is corrupt! Org. exception:'): data_migration_legacy_process_attributes(data) # data has one "valid" entry, in the form of Node . @@ -101,7 +101,7 @@ def test_migration_0040_corrupt_archive(): } } - with pytest.raises(CorruptArchive, match='Your export archive is corrupt! Please see the log-file'): + with pytest.raises(CorruptStorage, match='Your export archive is corrupt! Please see the log-file'): data_migration_legacy_process_attributes(data) diff --git a/tests/tools/archive/migration/test_v07_to_v08.py b/tests/tools/archive/migration/test_v07_to_v08.py index 7308fa0ce4..5c6dd52109 100644 --- a/tests/tools/archive/migration/test_v07_to_v08.py +++ b/tests/tools/archive/migration/test_v07_to_v08.py @@ -25,7 +25,7 @@ def test_migrate_external(migrate_from_func): def test_migration_0043_default_link_label(): - """Check CorruptArchive is raised for different cases during migration 0040""" + """Check link labels are migrated properly.""" # data has one "valid" link, in the form of . # data also has one "invalid" link, in form of . # After the migration, the "invalid" link should have been updated to the "valid" link diff --git a/tests/tools/archive/orm/test_links.py b/tests/tools/archive/orm/test_links.py index 242dea30e7..f8ed4a78e5 100644 --- a/tests/tools/archive/orm/test_links.py +++ b/tests/tools/archive/orm/test_links.py @@ -12,7 +12,6 @@ from aiida.common.links import LinkType from aiida.orm.entities import EntityTypes from aiida.tools.archive import ArchiveFormatSqlZip, create_archive, import_archive -# from aiida.tools.archive.exceptions import DanglingLinkError from tests.tools.archive.utils import get_all_node_links diff --git a/tests/tools/archive/test_simple.py b/tests/tools/archive/test_simple.py index 0f1002ee6e..43a58a1e7c 100644 --- a/tests/tools/archive/test_simple.py +++ b/tests/tools/archive/test_simple.py @@ -13,9 +13,9 @@ from aiida import orm from aiida.common import json -from aiida.common.exceptions import LicensingException +from aiida.common.exceptions import IncompatibleStorageSchema, LicensingException from aiida.common.links import LinkType -from aiida.tools.archive import create_archive, exceptions, import_archive +from aiida.tools.archive import create_archive, import_archive @pytest.mark.parametrize('entities', ['all', 'specific']) @@ -102,7 +102,7 @@ def test_check_for_export_format_version(aiida_profile_clean, tmp_path): # then try to import it aiida_profile_clean.clear_profile() - with pytest.raises(exceptions.IncompatibleArchiveVersionError): + with pytest.raises(IncompatibleStorageSchema): import_archive(filename2) From 6b7dc36011e308f4c501058be389ec0920e5fada Mon Sep 17 00:00:00 2001 From: Chris Sewell Date: Thu, 24 Feb 2022 13:14:06 +0100 Subject: [PATCH 06/26] Make ReadOnlyError subclass of AiidaException --- aiida/storage/sqlite_zip/backend.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/aiida/storage/sqlite_zip/backend.py b/aiida/storage/sqlite_zip/backend.py index 5ed605337b..995317cc21 100644 --- a/aiida/storage/sqlite_zip/backend.py +++ b/aiida/storage/sqlite_zip/backend.py @@ -19,7 +19,7 @@ from archive_path import extract_file_in_zip from sqlalchemy.orm import Session -from aiida.common.exceptions import ClosedStorage, CorruptStorage +from aiida.common.exceptions import AiidaException, ClosedStorage, CorruptStorage from aiida.manage import Profile from aiida.orm.entities import EntityTypes from aiida.orm.implementation import StorageBackend @@ -174,10 +174,10 @@ def get_info(self, statistics: bool = False, **kwargs) -> dict: raise NotImplementedError -class ReadOnlyError(IOError): +class ReadOnlyError(AiidaException): """Raised when a write operation is called on a read-only archive.""" - def __init__(self, msg='Archive is read-only'): # pylint: disable=useless-super-delegation + def __init__(self, msg='sqlite_zip storage is read-only'): # pylint: disable=useless-super-delegation super().__init__(msg) From 894807b4392ba17a8ba6cb2e1b32ab4bc5737132 Mon Sep 17 00:00:00 2001 From: Chris Sewell Date: Thu, 24 Feb 2022 17:32:38 +0100 Subject: [PATCH 07/26] Move get_model_from_entity --- aiida/storage/sqlite_zip/models.py | 23 +++++++++++++++++- .../implementations/sqlite_zip/writer.py | 24 ++----------------- 2 files changed, 24 insertions(+), 23 deletions(-) diff --git a/aiida/storage/sqlite_zip/models.py b/aiida/storage/sqlite_zip/models.py index eee99c45d4..a81faa3dd0 100644 --- a/aiida/storage/sqlite_zip/models.py +++ b/aiida/storage/sqlite_zip/models.py @@ -19,7 +19,8 @@ Also, `varchar_pattern_ops` indexes are not possible in sqlite. """ from datetime import datetime -from typing import Optional +import functools +from typing import Any, Optional, Set, Tuple import pytz import sqlalchemy as sa @@ -27,6 +28,7 @@ from sqlalchemy.dialects.postgresql import JSONB, UUID from sqlalchemy.dialects.sqlite import JSON +from aiida.orm.entities import EntityTypes # we need to import all models, to ensure they are loaded on the SQLA Metadata from aiida.storage.psql_dos.models import authinfo, base, comment, computer, group, log, node, user @@ -121,3 +123,22 @@ def create_orm_cls(klass: base.Base) -> SqliteBase: DbGroup.dbnodes = sa_orm.relationship( # type: ignore[attr-defined] 'DbNode', secondary='db_dbgroup_dbnodes', backref='dbgroups', lazy='dynamic' ) + + +@functools.lru_cache(maxsize=10) +def get_model_from_entity(entity_type: EntityTypes) -> Tuple[Any, Set[str]]: + """Return the Sqlalchemy model and column names corresponding to the given entity.""" + model = { + EntityTypes.USER: DbUser, + EntityTypes.AUTHINFO: DbAuthInfo, + EntityTypes.GROUP: DbGroup, + EntityTypes.NODE: DbNode, + EntityTypes.COMMENT: DbComment, + EntityTypes.COMPUTER: DbComputer, + EntityTypes.LOG: DbLog, + EntityTypes.LINK: DbLink, + EntityTypes.GROUP_NODE: DbGroupNodes + }[entity_type] + mapper = sa.inspect(model).mapper + column_names = {col.name for col in mapper.c.values()} + return model, column_names diff --git a/aiida/tools/archive/implementations/sqlite_zip/writer.py b/aiida/tools/archive/implementations/sqlite_zip/writer.py index 7283b91a32..2e4315b1da 100644 --- a/aiida/tools/archive/implementations/sqlite_zip/writer.py +++ b/aiida/tools/archive/implementations/sqlite_zip/writer.py @@ -9,7 +9,6 @@ ########################################################################### """AiiDA archive writer implementation.""" from datetime import datetime -import functools import hashlib from io import BytesIO import json @@ -20,7 +19,7 @@ import zipfile from archive_path import NOTSET, ZipPath, extract_file_in_zip, read_file_in_zip -from sqlalchemy import insert, inspect +from sqlalchemy import insert from sqlalchemy.exc import IntegrityError as SqlaIntegrityError from sqlalchemy.future.engine import Connection @@ -33,25 +32,6 @@ from aiida.tools.archive.abstract import ArchiveFormatAbstract, ArchiveWriterAbstract -@functools.lru_cache(maxsize=10) -def _get_model_from_entity(entity_type: EntityTypes): - """Return the Sqlalchemy model and column names corresponding to the given entity.""" - model = { - EntityTypes.USER: models.DbUser, - EntityTypes.AUTHINFO: models.DbAuthInfo, - EntityTypes.GROUP: models.DbGroup, - EntityTypes.NODE: models.DbNode, - EntityTypes.COMMENT: models.DbComment, - EntityTypes.COMPUTER: models.DbComputer, - EntityTypes.LOG: models.DbLog, - EntityTypes.LINK: models.DbLink, - EntityTypes.GROUP_NODE: models.DbGroupNodes - }[entity_type] - mapper = inspect(model).mapper - column_names = {col.name for col in mapper.c.values()} - return model, column_names - - class ArchiveWriterSqlZip(ArchiveWriterAbstract): """AiiDA archive writer implementation.""" @@ -147,7 +127,7 @@ def bulk_insert( return self._assert_in_context() assert self._conn is not None - model, col_keys = _get_model_from_entity(entity_type) + model, col_keys = models.get_model_from_entity(entity_type) if allow_defaults: for row in rows: if not col_keys.issuperset(row): From 72a6cb079ea8b75a06d72c4b0cb466030167eff4 Mon Sep 17 00:00:00 2001 From: Chris Sewell Date: Thu, 24 Feb 2022 20:38:03 +0100 Subject: [PATCH 08/26] Allow for profile creation via `SqliteZipBackend.create_profile` --- aiida/storage/sqlite_zip/backend.py | 32 +++++++++++++++---- .../implementations/sqlite_zip/reader.py | 16 +--------- 2 files changed, 27 insertions(+), 21 deletions(-) diff --git a/aiida/storage/sqlite_zip/backend.py b/aiida/storage/sqlite_zip/backend.py index 995317cc21..bc0548885e 100644 --- a/aiida/storage/sqlite_zip/backend.py +++ b/aiida/storage/sqlite_zip/backend.py @@ -8,11 +8,13 @@ # For further information please visit http://www.aiida.net # ########################################################################### """The table models are dynamically generated from the sqlalchemy backend models.""" +from __future__ import annotations + from contextlib import contextmanager from functools import singledispatch from pathlib import Path import tempfile -from typing import BinaryIO, Iterable, Iterator, List, Optional, Sequence, Tuple, Type, cast +from typing import BinaryIO, Iterable, Iterator, Optional, Sequence, Tuple, Type, cast import zipfile from zipfile import ZipFile @@ -40,6 +42,24 @@ class SqliteZipBackend(StorageBackend): # pylint: disable=too-many-public-metho def version_head(cls) -> str: return get_schema_version_head() + @staticmethod + def create_profile(path: str | Path) -> Profile: + """Create a new profile instance for this backend, from the path to the zip file.""" + return Profile( + 'default', { + 'storage': { + 'backend': 'sqlite_zip', + 'config': { + 'path': str(path) + } + }, + 'process_control': { + 'backend': 'null', + 'config': {} + } + } + ) + @classmethod def version_profile(cls, profile: Profile) -> None: return read_version(profile.storage_config['path']) @@ -152,10 +172,10 @@ def transaction(self): def in_transaction(self) -> bool: return False - def bulk_insert(self, entity_type: EntityTypes, rows: List[dict], allow_defaults: bool = False) -> List[int]: + def bulk_insert(self, entity_type: EntityTypes, rows: list[dict], allow_defaults: bool = False) -> list[int]: raise ReadOnlyError() - def bulk_update(self, entity_type: EntityTypes, rows: List[dict]) -> None: + def bulk_update(self, entity_type: EntityTypes, rows: list[dict]) -> None: raise ReadOnlyError() def delete_nodes_and_connections(self, pks_to_delete: Sequence[int]): @@ -221,7 +241,7 @@ def has_object(self, key: str) -> bool: return False return True - def has_objects(self, keys: List[str]) -> List[bool]: + def has_objects(self, keys: list[str]) -> list[bool]: return [self.has_object(key) for key in keys] def list_objects(self) -> Iterable[str]: @@ -239,12 +259,12 @@ def open(self, key: str) -> Iterator[BinaryIO]: finally: handle.close() - def iter_object_streams(self, keys: List[str]) -> Iterator[Tuple[str, BinaryIO]]: + def iter_object_streams(self, keys: list[str]) -> Iterator[Tuple[str, BinaryIO]]: for key in keys: with self.open(key) as handle: # pylint: disable=not-context-manager yield key, handle - def delete_objects(self, keys: List[str]) -> None: + def delete_objects(self, keys: list[str]) -> None: raise ReadOnlyError() def get_object_hash(self, key: str) -> str: diff --git a/aiida/tools/archive/implementations/sqlite_zip/reader.py b/aiida/tools/archive/implementations/sqlite_zip/reader.py index 9f524621c1..e5b73c18e4 100644 --- a/aiida/tools/archive/implementations/sqlite_zip/reader.py +++ b/aiida/tools/archive/implementations/sqlite_zip/reader.py @@ -12,7 +12,6 @@ from typing import Any, Dict, Optional, Union from aiida.common.exceptions import CorruptStorage -from aiida.manage import Profile from aiida.storage.sqlite_zip.backend import SqliteZipBackend from aiida.storage.sqlite_zip.utils import extract_metadata from aiida.tools.archive.abstract import ArchiveReaderAbstract @@ -50,19 +49,6 @@ def get_backend(self) -> SqliteZipBackend: raise AssertionError('Not in context') if self._backend is not None: return self._backend - profile = Profile( - 'default', { - 'storage': { - 'backend': 'sqlite_zip', - 'config': { - 'path': str(self.path) - } - }, - 'process_control': { - 'backend': 'null', - 'config': {} - } - } - ) + profile = SqliteZipBackend.create_profile(self.path) self._backend = SqliteZipBackend(profile) return self._backend From 6bed02b6ee91c8f19cf58ea1cecb85d6ff444f5e Mon Sep 17 00:00:00 2001 From: Chris Sewell Date: Thu, 24 Feb 2022 21:04:43 +0100 Subject: [PATCH 09/26] Fix rebase errors --- aiida/cmdline/commands/cmd_archive.py | 1 - aiida/storage/sqlite_zip/backend.py | 14 ++++---------- 2 files changed, 4 insertions(+), 11 deletions(-) diff --git a/aiida/cmdline/commands/cmd_archive.py b/aiida/cmdline/commands/cmd_archive.py index 42e0d80265..5659b3aa16 100644 --- a/aiida/cmdline/commands/cmd_archive.py +++ b/aiida/cmdline/commands/cmd_archive.py @@ -23,7 +23,6 @@ from aiida.cmdline.params import arguments, options from aiida.cmdline.params.types import GroupParamType, PathOrUrl from aiida.cmdline.utils import decorators, echo -from aiida.cmdline.utils.common import get_database_summary from aiida.common.exceptions import CorruptStorage, UnreachableStorage from aiida.common.links import GraphTraversalRules from aiida.common.log import AIIDA_LOGGER diff --git a/aiida/storage/sqlite_zip/backend.py b/aiida/storage/sqlite_zip/backend.py index bc0548885e..c758a8d18b 100644 --- a/aiida/storage/sqlite_zip/backend.py +++ b/aiida/storage/sqlite_zip/backend.py @@ -190,8 +190,10 @@ def set_global_variable(self, key: str, value, description: Optional[str] = None def maintain(self, dry_run: bool = False, live: bool = True, **kwargs) -> None: raise NotImplementedError - def get_info(self, statistics: bool = False, **kwargs) -> dict: - raise NotImplementedError + def get_info(self, statistics: bool = False) -> dict: + results = super().get_info(statistics=statistics) + results['repository'] = self.get_repository().get_info(statistics) + return results class ReadOnlyError(AiidaException): @@ -316,14 +318,6 @@ def Log(self): def table_groups_nodes(self): return models.DbGroupNodes.__table__ # type: ignore[attr-defined] # pylint: disable=no-member - def maintain(self, full: bool = False, dry_run: bool = False, **kwargs) -> None: - raise NotImplementedError - - def get_info(self, statistics: bool = False) -> dict: - results = super().get_info(statistics=statistics) - results['repository'] = self.get_repository().get_info(statistics) - return results - def create_backend_cls(base_class, model_cls): """Create an archive backend class for the given model class.""" From 999aec47d803206ab174e7a8de22210676818894 Mon Sep 17 00:00:00 2001 From: Chris Sewell Date: Thu, 24 Feb 2022 22:05:57 +0100 Subject: [PATCH 10/26] Introduce aiida.storage.log.MIGRATE_LOGGER --- aiida/cmdline/commands/cmd_archive.py | 2 +- aiida/storage/log.py | 1 + aiida/storage/psql_dos/migrator.py | 14 ++++++-------- aiida/storage/sqlite_zip/backend.py | 2 +- .../sqlite_zip/migrations/legacy_to_main.py | 6 ++++-- .../sqlite_zip/{migrations/main.py => migrator.py} | 12 ++++++------ aiida/tools/archive/__init__.py | 2 -- aiida/tools/archive/common.py | 5 ----- .../archive/implementations/sqlite_zip/main.py | 2 +- docs/source/nitpick-exceptions | 4 +++- tests/cmdline/commands/test_archive_create.py | 2 +- tests/cmdline/commands/test_archive_import.py | 2 +- 12 files changed, 25 insertions(+), 29 deletions(-) rename aiida/storage/sqlite_zip/{migrations/main.py => migrator.py} (95%) diff --git a/aiida/cmdline/commands/cmd_archive.py b/aiida/cmdline/commands/cmd_archive.py index 5659b3aa16..214e0f22b1 100644 --- a/aiida/cmdline/commands/cmd_archive.py +++ b/aiida/cmdline/commands/cmd_archive.py @@ -248,7 +248,7 @@ def migrate(input_file, output_file, force, in_place, version): f'{error.__class__.__name__}:{error}' ) - echo.echo_success(f'migrated the archive to version {version}') + echo.echo_success(f'migrated the archive to version {version!r}') class ExtrasImportCode(Enum): diff --git a/aiida/storage/log.py b/aiida/storage/log.py index 11ef376b36..24a037f442 100644 --- a/aiida/storage/log.py +++ b/aiida/storage/log.py @@ -12,3 +12,4 @@ from aiida.common.log import AIIDA_LOGGER STORAGE_LOGGER = AIIDA_LOGGER.getChild('storage') +MIGRATE_LOGGER = STORAGE_LOGGER.getChild('migrate') diff --git a/aiida/storage/psql_dos/migrator.py b/aiida/storage/psql_dos/migrator.py index ef97683548..64a7c210d7 100644 --- a/aiida/storage/psql_dos/migrator.py +++ b/aiida/storage/psql_dos/migrator.py @@ -33,6 +33,7 @@ from aiida.common import exceptions from aiida.manage.configuration.profile import Profile +from aiida.storage.log import MIGRATE_LOGGER from aiida.storage.psql_dos.models.settings import DbSetting from aiida.storage.psql_dos.utils import create_sqlalchemy_engine @@ -197,8 +198,6 @@ def migrate(self) -> None: :raises: :class:`~aiida.common.exceptions.UnreachableStorage` if the storage cannot be accessed """ - from aiida.cmdline.utils import echo - # the database can be in one of a few states: # 1. Completely empty -> we can simply initialise it with the current ORM schema # 2. Legacy django database -> we transfer the version to alembic, migrate to the head of the django branch, @@ -211,7 +210,7 @@ def migrate(self) -> None: if not inspect(connection).has_table(self.alembic_version_tbl_name): if not inspect(connection).has_table(self.django_version_table.name): # the database is assumed to be empty, so we need to initialise it - echo.echo_report('initialising empty storage schema') + MIGRATE_LOGGER.report('initialising empty storage schema') self.initialise() return # the database is a legacy django one, @@ -238,10 +237,10 @@ def migrate(self) -> None: if 'django' in branches or 'sqlalchemy' in branches: # migrate up to the top of the respective legacy branches if 'django' in branches: - echo.echo_report('Migrating to the head of the legacy django branch') + MIGRATE_LOGGER.report('Migrating to the head of the legacy django branch') self.migrate_up('django@head') elif 'sqlalchemy' in branches: - echo.echo_report('Migrating to the head of the legacy sqlalchemy branch') + MIGRATE_LOGGER.report('Migrating to the head of the legacy sqlalchemy branch') self.migrate_up('sqlalchemy@head') # now re-stamp with the comparable revision on the main branch with self._connection_context() as connection: @@ -251,7 +250,7 @@ def migrate(self) -> None: connection.commit() # finally migrate to the main head revision - echo.echo_report('Migrating to the head of the main branch') + MIGRATE_LOGGER.report('Migrating to the head of the main branch') self.migrate_up('main@head') def migrate_up(self, version: str) -> None: @@ -297,9 +296,8 @@ def _alembic_connect(self, _connection: Optional[Connection] = None): def _callback(step: MigrationInfo, **kwargs): # pylint: disable=unused-argument """Callback to be called after a migration step is executed.""" - from aiida.cmdline.utils import echo from_rev = step.down_revision_ids[0] if step.down_revision_ids else '' - echo.echo_report(f'- {from_rev} -> {step.up_revision_id}') + MIGRATE_LOGGER.report(f'- {from_rev} -> {step.up_revision_id}') config.attributes['on_version_apply'] = _callback # pylint: disable=unsupported-assignment-operation diff --git a/aiida/storage/sqlite_zip/backend.py b/aiida/storage/sqlite_zip/backend.py index c758a8d18b..10e19ef81f 100644 --- a/aiida/storage/sqlite_zip/backend.py +++ b/aiida/storage/sqlite_zip/backend.py @@ -31,7 +31,7 @@ from aiida.storage.psql_dos.orm.utils import ModelWrapper from . import models -from .migrations.main import get_schema_version_head, validate_storage +from .migrator import get_schema_version_head, validate_storage from .utils import DB_FILENAME, REPO_FOLDER, create_sqla_engine, read_version diff --git a/aiida/storage/sqlite_zip/migrations/legacy_to_main.py b/aiida/storage/sqlite_zip/migrations/legacy_to_main.py index 8e4b29bef7..e5ed01ad74 100644 --- a/aiida/storage/sqlite_zip/migrations/legacy_to_main.py +++ b/aiida/storage/sqlite_zip/migrations/legacy_to_main.py @@ -25,10 +25,10 @@ from aiida.common.hashing import chunked_file_hash from aiida.common.progress_reporter import get_progress_reporter from aiida.repository.common import File, FileType -from aiida.storage.sqlite_zip.utils import create_sqla_engine -from aiida.tools.archive.common import MIGRATE_LOGGER, batch_iter +from aiida.storage.log import MIGRATE_LOGGER from . import v1_db_schema as v1_schema +from ..utils import create_sqla_engine from .legacy.utils import update_metadata _NODE_ENTITY_NAME = 'Node' @@ -154,6 +154,8 @@ def _json_to_sqlite( outpath: Path, data: dict, node_repos: Dict[str, List[Tuple[str, Optional[str]]]], batch_size: int = 100 ) -> None: """Convert a JSON archive format to SQLite.""" + from aiida.tools.archive.common import batch_iter + MIGRATE_LOGGER.report('Converting DB to SQLite') engine = create_sqla_engine(outpath) diff --git a/aiida/storage/sqlite_zip/migrations/main.py b/aiida/storage/sqlite_zip/migrator.py similarity index 95% rename from aiida/storage/sqlite_zip/migrations/main.py rename to aiida/storage/sqlite_zip/migrator.py index 2279f0b91a..4e3e25916d 100644 --- a/aiida/storage/sqlite_zip/migrations/main.py +++ b/aiida/storage/sqlite_zip/migrator.py @@ -23,18 +23,18 @@ from aiida.common import json from aiida.common.exceptions import CorruptStorage, IncompatibleStorageSchema, StorageMigrationError from aiida.common.progress_reporter import get_progress_reporter -from aiida.tools.archive.common import MIGRATE_LOGGER +from aiida.storage.log import MIGRATE_LOGGER -from ..utils import read_version -from .legacy import FINAL_LEGACY_VERSION, LEGACY_MIGRATE_FUNCTIONS -from .legacy_to_main import MIGRATED_TO_REVISION, perform_v1_migration -from .utils import copy_tar_to_zip, copy_zip_to_zip +from .migrations.legacy import FINAL_LEGACY_VERSION, LEGACY_MIGRATE_FUNCTIONS +from .migrations.legacy_to_main import MIGRATED_TO_REVISION, perform_v1_migration +from .migrations.utils import copy_tar_to_zip, copy_zip_to_zip +from .utils import read_version def _alembic_config() -> Config: """Return an instance of an Alembic `Config`.""" config = Config() - config.set_main_option('script_location', os.path.dirname(os.path.realpath(__file__))) + config.set_main_option('script_location', str(Path(os.path.realpath(__file__)).parent / 'migrations')) return config diff --git a/aiida/tools/archive/__init__.py b/aiida/tools/archive/__init__.py index e62ca09039..735e4dc43d 100644 --- a/aiida/tools/archive/__init__.py +++ b/aiida/tools/archive/__init__.py @@ -17,7 +17,6 @@ # pylint: disable=wildcard-import from .abstract import * -from .common import * from .create import * from .exceptions import * from .implementations import * @@ -37,7 +36,6 @@ 'ImportTestRun', 'ImportUniquenessError', 'ImportValidationError', - 'MIGRATE_LOGGER', 'create_archive', 'get_format', 'import_archive', diff --git a/aiida/tools/archive/common.py b/aiida/tools/archive/common.py index a6bdce8094..0411dd2bcc 100644 --- a/aiida/tools/archive/common.py +++ b/aiida/tools/archive/common.py @@ -13,14 +13,9 @@ import urllib.parse import urllib.request -from aiida.common.log import AIIDA_LOGGER from aiida.orm import AuthInfo, Comment, Computer, Entity, Group, Log, Node, User from aiida.orm.entities import EntityTypes -__all__ = ('MIGRATE_LOGGER',) - -MIGRATE_LOGGER = AIIDA_LOGGER.getChild('migrate') - # Mapping from entity names to AiiDA classes entity_type_to_orm: Dict[EntityTypes, Type[Entity]] = { EntityTypes.AUTHINFO: AuthInfo, diff --git a/aiida/tools/archive/implementations/sqlite_zip/main.py b/aiida/tools/archive/implementations/sqlite_zip/main.py index 4b05e94545..a86dc5dff1 100644 --- a/aiida/tools/archive/implementations/sqlite_zip/main.py +++ b/aiida/tools/archive/implementations/sqlite_zip/main.py @@ -11,7 +11,7 @@ from pathlib import Path from typing import Any, Literal, Union, overload -from aiida.storage.sqlite_zip.migrations.main import get_schema_version_head, migrate +from aiida.storage.sqlite_zip.migrator import get_schema_version_head, migrate from aiida.storage.sqlite_zip.utils import read_version from aiida.tools.archive.abstract import ArchiveFormatAbstract diff --git a/docs/source/nitpick-exceptions b/docs/source/nitpick-exceptions index bc87aa31aa..0464c9e354 100644 --- a/docs/source/nitpick-exceptions +++ b/docs/source/nitpick-exceptions @@ -52,6 +52,7 @@ py:class SelfType py:class Profile py:class PsqlDosBackend py:class str | list[str] +py:class str | Path ### AiiDA @@ -70,7 +71,6 @@ py:class aiida.tools.groups.paths.WalkNodeResult py:meth aiida.orm.groups.GroupCollection.delete py:class AbstractRepositoryBackend -py:class Backend py:class BackendEntity py:class BackendEntityType py:class BackendNode @@ -116,6 +116,7 @@ py:class ReturnType py:class Runner py:class Scheduler py:class SelfType +py:class StorageBackend py:class TransactionType py:class Transport py:class TransportQueue @@ -127,6 +128,7 @@ py:class orm.implementation.Backend py:class aiida.common.exceptions.UnreachableStorage py:class aiida.common.exceptions.IncompatibleDatabaseSchema py:class aiida.common.exceptions.DatabaseMigrationError +py:class aiida.storage.sqlite_zip.models.DbGroupNode py:class AuthInfoCollection py:class CommentCollection diff --git a/tests/cmdline/commands/test_archive_create.py b/tests/cmdline/commands/test_archive_create.py index 895f9acc9f..60c2194702 100644 --- a/tests/cmdline/commands/test_archive_create.py +++ b/tests/cmdline/commands/test_archive_create.py @@ -15,7 +15,7 @@ from aiida.cmdline.commands import cmd_archive from aiida.orm import Code, Computer, Dict, Group -from aiida.storage.sqlite_zip.migrations.main import list_versions +from aiida.storage.sqlite_zip.migrator import list_versions from aiida.tools.archive import ArchiveFormatSqlZip from tests.utils.archives import get_archive_file diff --git a/tests/cmdline/commands/test_archive_import.py b/tests/cmdline/commands/test_archive_import.py index 8e319cc17f..9666bc1355 100644 --- a/tests/cmdline/commands/test_archive_import.py +++ b/tests/cmdline/commands/test_archive_import.py @@ -14,7 +14,7 @@ from aiida.cmdline.commands import cmd_archive from aiida.orm import Group -from aiida.storage.sqlite_zip.migrations.main import list_versions +from aiida.storage.sqlite_zip.migrator import list_versions from aiida.tools.archive import ArchiveFormatSqlZip from tests.utils.archives import get_archive_file From dbbc5a05e44f3ffd7bf213195b1c96251aeaca8c Mon Sep 17 00:00:00 2001 From: Chris Sewell Date: Thu, 24 Feb 2022 22:32:23 +0100 Subject: [PATCH 11/26] fix test archive file naming --- tests/cmdline/commands/test_archive_create.py | 18 +++++++++--------- tests/cmdline/commands/test_archive_import.py | 6 +++--- tests/static/calcjob/arithmetic.add.aiida | Bin 7738 -> 9349 bytes tests/static/calcjob/container/config.json | 1 - ...72180cb56463c2897c7ff6fb93a5ebc5d64162e15b | Bin 34 -> 0 bytes ...d53541c3b4640a64133bbee2188444810cd3169f81 | 1 - ...cdbd021d8395e9ef993fec33cbb6c34a73d6cf5372 | 1 - ...f64df320ca1ce7d4a4d3a58f4f7920487a5ec7f532 | 1 - ...05c6b814f542bf6bb88d29ac4213e7bf6afce0501e | 6 ------ ...d63c5304c00bf94a8e6b6a0db33c940d1f49667879 | Bin 28 -> 0 bytes ...c8996fb92427ae41e4649b934ca495991b7852b855 | 0 ...5e656c7862d086e9245420892a7de62cd9ec582a06 | 1 - tests/static/calcjob/container/packs.idx | Bin 12288 -> 0 bytes tests/static/calcjob/container/packs/0 | Bin 1289 -> 0 bytes tests/static/calcjob/data.json | 1 - tests/static/calcjob/metadata.json | 1 - ..._simple.aiida => export_0.10_simple.aiida} | Bin ..._simple.aiida => export_0.11_simple.aiida} | Bin ..._simple.aiida => export_0.12_simple.aiida} | Bin ...1_simple.aiida => export_0.1_simple.aiida} | Bin ...2_simple.aiida => export_0.2_simple.aiida} | Bin ...simple.tar.gz => export_0.2_simple.tar.gz} | Bin ...3_simple.aiida => export_0.3_simple.aiida} | Bin ..._Nodes.aiida => export_0.4_no_Nodes.aiida} | Bin ...4_simple.aiida => export_0.4_simple.aiida} | Bin ...simple.tar.gz => export_0.4_simple.tar.gz} | Bin ...5_simple.aiida => export_0.5_simple.aiida} | Bin ...6_simple.aiida => export_0.6_simple.aiida} | Bin ...7_simple.aiida => export_0.7_simple.aiida} | Bin ...8_simple.aiida => export_0.8_simple.aiida} | Bin ...9_simple.aiida => export_0.9_simple.aiida} | Bin .../archive/migration/test_legacy_funcs.py | 4 ++-- .../tools/archive/migration/test_migration.py | 8 ++++---- .../archive/migration/test_v05_to_v06.py | 12 ++++++------ 34 files changed, 24 insertions(+), 37 deletions(-) delete mode 100644 tests/static/calcjob/container/config.json delete mode 100644 tests/static/calcjob/container/loose/04/bd777eeb8fb55b05d1ab72180cb56463c2897c7ff6fb93a5ebc5d64162e15b delete mode 100644 tests/static/calcjob/container/loose/33/7b794ce718a09a620090d53541c3b4640a64133bbee2188444810cd3169f81 delete mode 100644 tests/static/calcjob/container/loose/59/ad1048cf9741febe6085cdbd021d8395e9ef993fec33cbb6c34a73d6cf5372 delete mode 100644 tests/static/calcjob/container/loose/95/f819ef2ea203bed2cacaf64df320ca1ce7d4a4d3a58f4f7920487a5ec7f532 delete mode 100644 tests/static/calcjob/container/loose/d3/e14d6651a535b4689e0605c6b814f542bf6bb88d29ac4213e7bf6afce0501e delete mode 100644 tests/static/calcjob/container/loose/e1/49222b6bf7570a66c6d9d63c5304c00bf94a8e6b6a0db33c940d1f49667879 delete mode 100644 tests/static/calcjob/container/loose/e3/b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 delete mode 100644 tests/static/calcjob/container/loose/f0/b5c2c2211c8d67ed15e75e656c7862d086e9245420892a7de62cd9ec582a06 delete mode 100644 tests/static/calcjob/container/packs.idx delete mode 100644 tests/static/calcjob/container/packs/0 delete mode 100644 tests/static/calcjob/data.json delete mode 100644 tests/static/calcjob/metadata.json rename tests/static/export/migrate/{export_v0.10_simple.aiida => export_0.10_simple.aiida} (100%) rename tests/static/export/migrate/{export_v0.11_simple.aiida => export_0.11_simple.aiida} (100%) rename tests/static/export/migrate/{export_v0.12_simple.aiida => export_0.12_simple.aiida} (100%) rename tests/static/export/migrate/{export_v0.1_simple.aiida => export_0.1_simple.aiida} (100%) rename tests/static/export/migrate/{export_v0.2_simple.aiida => export_0.2_simple.aiida} (100%) rename tests/static/export/migrate/{export_v0.2_simple.tar.gz => export_0.2_simple.tar.gz} (100%) rename tests/static/export/migrate/{export_v0.3_simple.aiida => export_0.3_simple.aiida} (100%) rename tests/static/export/migrate/{export_v0.4_no_Nodes.aiida => export_0.4_no_Nodes.aiida} (100%) rename tests/static/export/migrate/{export_v0.4_simple.aiida => export_0.4_simple.aiida} (100%) rename tests/static/export/migrate/{export_v0.4_simple.tar.gz => export_0.4_simple.tar.gz} (100%) rename tests/static/export/migrate/{export_v0.5_simple.aiida => export_0.5_simple.aiida} (100%) rename tests/static/export/migrate/{export_v0.6_simple.aiida => export_0.6_simple.aiida} (100%) rename tests/static/export/migrate/{export_v0.7_simple.aiida => export_0.7_simple.aiida} (100%) rename tests/static/export/migrate/{export_v0.8_simple.aiida => export_0.8_simple.aiida} (100%) rename tests/static/export/migrate/{export_v0.9_simple.aiida => export_0.9_simple.aiida} (100%) diff --git a/tests/cmdline/commands/test_archive_create.py b/tests/cmdline/commands/test_archive_create.py index 60c2194702..b564fa8d1a 100644 --- a/tests/cmdline/commands/test_archive_create.py +++ b/tests/cmdline/commands/test_archive_create.py @@ -89,7 +89,7 @@ def test_create_basic(run_cli_command, tmp_path): @pytest.mark.parametrize('version', ('0.4', '0.5', '0.6', '0.7', '0.8', '0.9', '0.10', '0.11', '0.12')) def test_migrate_versions_old(run_cli_command, tmp_path, version): """Migrating archives with a version older than the current should work.""" - archive = f'export_v{version}_simple.aiida' + archive = f'export_{version}_simple.aiida' filename_input = get_archive_file(archive, filepath='export/migrate') filename_output = tmp_path / 'archive.aiida' @@ -101,7 +101,7 @@ def test_migrate_versions_old(run_cli_command, tmp_path, version): def test_migrate_version_specific(run_cli_command, tmp_path): """Test the `-v/--version` option to migrate to a specific version instead of the latest.""" - archive = 'export_v0.5_simple.aiida' + archive = 'export_0.5_simple.aiida' target_version = '0.8' filename_input = get_archive_file(archive, filepath='export/migrate') filename_output = tmp_path / 'archive.aiida' @@ -118,7 +118,7 @@ def test_migrate_file_already_exists(run_cli_command, tmp_path): """Test that using a file that already exists will raise.""" outpath = tmp_path / 'archive.aiida' outpath.touch() - filename_input = get_archive_file('export_v0.6_simple.aiida', filepath='export/migrate') + filename_input = get_archive_file('export_0.6_simple.aiida', filepath='export/migrate') options = [filename_input, outpath] run_cli_command(cmd_archive.migrate, options, raises=True) @@ -127,7 +127,7 @@ def test_migrate_force(run_cli_command, tmp_path): """Test that using a file that already exists will work when the ``-f/--force`` parameter is used.""" outpath = tmp_path / 'archive.aiida' outpath.touch() - filename_input = get_archive_file('export_v0.6_simple.aiida', filepath='export/migrate') + filename_input = get_archive_file('export_0.6_simple.aiida', filepath='export/migrate') options = ['--force', filename_input, outpath] run_cli_command(cmd_archive.migrate, options) assert ArchiveFormatSqlZip().read_version(outpath) == ArchiveFormatSqlZip().latest_version @@ -135,7 +135,7 @@ def test_migrate_force(run_cli_command, tmp_path): def test_migrate_in_place(run_cli_command, tmp_path): """Test that passing the -i/--in-place option will overwrite the passed file.""" - archive = 'export_v0.6_simple.aiida' + archive = 'export_0.6_simple.aiida' target_version = '0.8' filename_input = get_archive_file(archive, filepath='export/migrate') filename_clone = tmp_path / 'archive.aiida' @@ -167,7 +167,7 @@ def test_migrate_low_verbosity(run_cli_command, tmp_path): Note that we use the ``config_with_profile`` fixture to create a dummy profile, since the ``--verbosity`` option will change the profile configuration which could potentially influence the other tests. """ - filename_input = get_archive_file('export_v0.6_simple.aiida', filepath='export/migrate') + filename_input = get_archive_file('export_0.6_simple.aiida', filepath='export/migrate') filename_output = tmp_path / 'archive.aiida' options = ['--verbosity', 'WARNING', filename_input, filename_output] @@ -181,7 +181,7 @@ def test_migrate_low_verbosity(run_cli_command, tmp_path): @pytest.mark.parametrize('version', list_versions()) def test_inspect_version(run_cli_command, version): """Test the functionality of `verdi export inspect --version`.""" - archive = f'export_v{version}_simple.aiida' + archive = f'export_{version}_simple.aiida' filename_input = get_archive_file(archive, filepath='export/migrate') options = ['--version', filename_input] result = run_cli_command(cmd_archive.inspect, options) @@ -190,7 +190,7 @@ def test_inspect_version(run_cli_command, version): def test_inspect_metadata(run_cli_command): """Test the functionality of `verdi export inspect --meta-data`.""" - archive = f'export_v{ArchiveFormatSqlZip().latest_version}_simple.aiida' + archive = f'export_{ArchiveFormatSqlZip().latest_version}_simple.aiida' filename_input = get_archive_file(archive, filepath='export/migrate') options = ['--meta-data', filename_input] result = run_cli_command(cmd_archive.inspect, options) @@ -199,7 +199,7 @@ def test_inspect_metadata(run_cli_command): def test_inspect_database(run_cli_command): """Test the functionality of `verdi export inspect --meta-data`.""" - archive = f'export_v{ArchiveFormatSqlZip().latest_version}_simple.aiida' + archive = f'export_{ArchiveFormatSqlZip().latest_version}_simple.aiida' filename_input = get_archive_file(archive, filepath='export/migrate') options = ['--database', filename_input] result = run_cli_command(cmd_archive.inspect, options) diff --git a/tests/cmdline/commands/test_archive_import.py b/tests/cmdline/commands/test_archive_import.py index 9666bc1355..a2441faee6 100644 --- a/tests/cmdline/commands/test_archive_import.py +++ b/tests/cmdline/commands/test_archive_import.py @@ -31,7 +31,7 @@ def init_cls(self, aiida_profile_clean): # pylint: disable=unused-argument self.url_path = 'https://raw.githubusercontent.com/aiidateam/aiida-core/' \ '0599dabf0887bee172a04f308307e99e3c3f3ff2/aiida/backends/tests/fixtures/export/migrate/' self.archive_path = 'export/migrate' - self.newest_archive = f'export_v{ArchiveFormatSqlZip().latest_version}_simple.aiida' + self.newest_archive = f'export_{ArchiveFormatSqlZip().latest_version}_simple.aiida' def test_import_no_archives(self): """Test that passing no valid archives will lead to command failure.""" @@ -178,7 +178,7 @@ def test_import_old_local_archives(self): Expected behavior: Automatically migrate to newest version and import correctly. """ for version in list_versions(): - archive, version = (f'export_v{version}_simple.aiida', f'{version}') + archive, version = (f'export_{version}_simple.aiida', f'{version}') options = [get_archive_file(archive, filepath=self.archive_path)] result = self.cli_runner.invoke(cmd_archive.import_archive, options) @@ -244,7 +244,7 @@ def test_migration(self): `migration` = True (default), Expected: No query, migrate `migration` = False, Expected: No query, no migrate """ - archive = get_archive_file('export_v0.4_simple.aiida', filepath=self.archive_path) + archive = get_archive_file('export_0.4_simple.aiida', filepath=self.archive_path) success_message = f'Success: imported archive {archive}' # Import "normally", but explicitly specifying `--migration`, make sure confirm message is present diff --git a/tests/static/calcjob/arithmetic.add.aiida b/tests/static/calcjob/arithmetic.add.aiida index 9166ab33c41bc6c1ab8f5a099bdc2aaeafea2f94..5fdfab954809a5ce94c77dd24d3a2845d93a6d93 100644 GIT binary patch delta 2683 zcmdmG)9UFF;LXe;!oUH99SV-Y3@~88#K6U%z>t!pS6rBrS(0iT8p6xKz91(o5u{?` zck!gs3T_5QmamKq3_$%144Zw~J`2^eb8y#9l>I0K6abol-RwI+Nl~EL@$tTn&i=s> z`g-wLtXU4U0))}5Nlp+=PESZm_~7dk_JKdFBcOq4f&{a=fHdPrEd{pvkM9ixSY@1@ znilv>=+ks~)3~ffQ1RUSIrCRfj|iABdDZmUGv+Xhq$HlW@hRcbu6RG`?}j#d#g89Y zxy{-8f&u7Yc8;uO&Ly!xCxV=b@PPQ{h!CIv7$h?>Fz^CBkegbPn37nMsFziop9c<~ z`uRW?gYZI3mruRq&2`8?fbGF*o=9(xSk`Kl%jBFlIs)E?{^`NoSGVkXwM?b19vmdGI}-x7 z0lg0LIQAgn0wx0q28DX$NW}~uP{@!Tue=IupCormbTIWPDmn^?MI0Ab5OWSb#3L%6 zkTAjP-1)2CIvOWDub%eQ(W}?g_4D=g)bR}E<7k_8kVz#%QsRljt0Lja-JgmZFCDEq zy3%sxN!6J@D;+y4U#<+;svvG`WN>1^O5M{s=X_3iem&#uxi%qT#*bJV&Lvjw(q=1m zm~AmKH8zAK4Th?dk_&+00P-y)96;%T50NyyL3{>QVA?23Ey&jgr-@#mr~7~yM585U zL|CHaJ)jY;3<{IyOS}1_pP$x(?c5{K@4Gw{OvTpSR z&3y_nHQdsc8Dt_zZ-6%=lL#}So;mnEXM0Ykc%@=ISvC2jhw)m412MKqfkdPI|R9e0G5X^lNlNs zr9`35uZL85*sUSp1(112#SPeaRwQfQIx2`U5GZ?$p%pf=(_zIAD5^mKm|4Iyh-hd` z7Mnaz+TDjxxS))M0pq(Qj+wF3v)6{QjIryd={!l zHo&Ie!FdBvb1cvVULc*DT9TNOSdyrhRh*wU_0n0cW&<9E2jQHy{g=xAy;Hj#-J|HI z{n}A`>yCprcCXiyn^VksQgOnEi2|Qgmgcc%RxQ09vvsQ)yT#w_fD+sZB6p zSRKptaNQeTqosb5Jag)ePOYon)coJTMPluN1Ki3jAJZh1ed-oYS3PlwwfJGGrqMYu zy#DLc7oqUS{^^-TDmGJZvI*A(#w)J z{pY^;$`;_w$Rxsy6!mDn1A35wK^}-380{p{JPX%585n48V3lB^fl(D0#fTt;tHYYS zko5uM1DM!!&^4f@HDnEY89^zJ6RrW2^7xSz*fD|AA%>Bt=@41RVy4N!REcCHFh!ym z$-@jzpBP4>rcY!eUoeAGD~1Nt)QYUZjCC?F?drn41r0c~vU;OPmFGmqC-?gt_xH)(k8D2Jh5aMHCU;qHeJPP*! diff --git a/tests/static/calcjob/container/loose/33/7b794ce718a09a620090d53541c3b4640a64133bbee2188444810cd3169f81 b/tests/static/calcjob/container/loose/33/7b794ce718a09a620090d53541c3b4640a64133bbee2188444810cd3169f81 deleted file mode 100644 index 654d526942..0000000000 --- a/tests/static/calcjob/container/loose/33/7b794ce718a09a620090d53541c3b4640a64133bbee2188444810cd3169f81 +++ /dev/null @@ -1 +0,0 @@ -2 3 diff --git a/tests/static/calcjob/container/loose/59/ad1048cf9741febe6085cdbd021d8395e9ef993fec33cbb6c34a73d6cf5372 b/tests/static/calcjob/container/loose/59/ad1048cf9741febe6085cdbd021d8395e9ef993fec33cbb6c34a73d6cf5372 deleted file mode 100644 index 6a5c41cdf4..0000000000 --- a/tests/static/calcjob/container/loose/59/ad1048cf9741febe6085cdbd021d8395e9ef993fec33cbb6c34a73d6cf5372 +++ /dev/null @@ -1 +0,0 @@ -{"uuid": "9d3fda4f-6782-4441-a276-b8965aa3f97f", "codes_info": [{"cmdline_params": ["/home/candersen/virtualenv/new_tests/aiida/aiida_core/.ci/add.sh", "-in", "aiida.in"], "stdout_name": "aiida.out", "code_uuid": "8052fd27-f3ee-46cb-b23e-4ce5e446483e"}], "retrieve_list": ["aiida.out", "_scheduler-stdout.txt", "_scheduler-stderr.txt"], "local_copy_list": [], "remote_copy_list": []} \ No newline at end of file diff --git a/tests/static/calcjob/container/loose/95/f819ef2ea203bed2cacaf64df320ca1ce7d4a4d3a58f4f7920487a5ec7f532 b/tests/static/calcjob/container/loose/95/f819ef2ea203bed2cacaf64df320ca1ce7d4a4d3a58f4f7920487a5ec7f532 deleted file mode 100644 index cc936f6430..0000000000 --- a/tests/static/calcjob/container/loose/95/f819ef2ea203bed2cacaf64df320ca1ce7d4a4d3a58f4f7920487a5ec7f532 +++ /dev/null @@ -1 +0,0 @@ -{"shebang": "#!/bin/bash", "submit_as_hold": false, "rerunnable": false, "job_environment": {}, "job_name": "aiida-30", "sched_output_path": "_scheduler-stdout.txt", "sched_error_path": "_scheduler-stderr.txt", "sched_join_files": false, "prepend_text": "", "append_text": "", "job_resource": {"num_cores_per_machine": null, "num_cores_per_mpiproc": null, "num_machines": 1, "num_mpiprocs_per_machine": 1}, "codes_info": [{"cmdline_params": ["/home/candersen/virtualenv/new_tests/aiida/aiida_core/.ci/add.sh", "-in", "aiida.in"], "stdout_name": "aiida.out", "code_uuid": "8052fd27-f3ee-46cb-b23e-4ce5e446483e"}], "codes_run_mode": 0, "import_sys_environment": true} \ No newline at end of file diff --git a/tests/static/calcjob/container/loose/d3/e14d6651a535b4689e0605c6b814f542bf6bb88d29ac4213e7bf6afce0501e b/tests/static/calcjob/container/loose/d3/e14d6651a535b4689e0605c6b814f542bf6bb88d29ac4213e7bf6afce0501e deleted file mode 100644 index 2012bdf8a1..0000000000 --- a/tests/static/calcjob/container/loose/d3/e14d6651a535b4689e0605c6b814f542bf6bb88d29ac4213e7bf6afce0501e +++ /dev/null @@ -1,6 +0,0 @@ -#!/bin/bash -exec > _scheduler-stdout.txt -exec 2> _scheduler-stderr.txt - - -'/home/candersen/virtualenv/new_tests/aiida/aiida_core/.ci/add.sh' '-in' 'aiida.in' > 'aiida.out' diff --git a/tests/static/calcjob/container/loose/e1/49222b6bf7570a66c6d9d63c5304c00bf94a8e6b6a0db33c940d1f49667879 b/tests/static/calcjob/container/loose/e1/49222b6bf7570a66c6d9d63c5304c00bf94a8e6b6a0db33c940d1f49667879 deleted file mode 100644 index 524686cd873d047db0975e2046a31789e73ba2b8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 28 jcmb2|=3saps+7R=zxdmCUXCJOJ~oC>fA)S31_lNIhE@l> diff --git a/tests/static/calcjob/container/loose/e3/b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 b/tests/static/calcjob/container/loose/e3/b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/tests/static/calcjob/container/loose/f0/b5c2c2211c8d67ed15e75e656c7862d086e9245420892a7de62cd9ec582a06 b/tests/static/calcjob/container/loose/f0/b5c2c2211c8d67ed15e75e656c7862d086e9245420892a7de62cd9ec582a06 deleted file mode 100644 index 7ed6ff82de..0000000000 --- a/tests/static/calcjob/container/loose/f0/b5c2c2211c8d67ed15e75e656c7862d086e9245420892a7de62cd9ec582a06 +++ /dev/null @@ -1 +0,0 @@ -5 diff --git a/tests/static/calcjob/container/packs.idx b/tests/static/calcjob/container/packs.idx deleted file mode 100644 index e47a083397916957ba6c5f4b039976b3c5984639..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 12288 zcmeI0O>Y}T7{_+#B7gzkpY*fa4Kdu7W!kI=$~cEL)pfTjlrf&y9T)mR~G8;9`S={_R11 zSz%*+>eGG)^(XZNj>ZRYbT}B?fE()<((v>CWcP0W%{h1S`02ASn*|SVkH>?$KYC;7 zZ2E1uQgizx##xweUpe_GJo$2deC5IBaRS^o$^8kk9m+h2risdf_S7v(1dYDkkZ6R9|Un; z2gemLNPx8#7`B#jlu9c#b3tNdkT!xKq><;8Gwi5kf*~UqrqtRH2v&+QrZ94zVxgnL z&9z2zcx@>cFw=w(D{PcprlrCOuf3p-Q^XuXHfm;65LOuEEu~H~JaNXED436wAZKv$?U`Yo8m=OXQWM0KG(0#NIVIpF7=f4j z&4pkEswz;69AiYQ=&(y>O|+3YrL~AQQ6tg>hC>QlTTPiWOmnTVm5LLqI0vI~W9g1- z<8j*0MJ*YQ!3Kd8ciwtLuvb*`ki2V6V{j=_YlWlCNa}@4<|HX!6u*`)hG0n#G3%w2 zAy^eH=N55~jg<*Ia+V90FcPI@iY=boO+g3ixKH^JBw%j0i?i)v`O064*3*;@F9gRX z3{^{PFRImOee%fVXac0XyX{cG{`h!q6>&49E!8zY$KiKs{LEh{kpT+{kZdE z_vg-o&fjSuw@e@t$OJNhOdu1;1Tuk4AQQ+0GJ*d#fx3CxE>|rgR11|=YX_}`%Bm%U lYYUZCD+Oz>ysTIrXf9M%tqU|3DytR)z@3!o^;K&C{{R6UxB&nF diff --git a/tests/static/calcjob/container/packs/0 b/tests/static/calcjob/container/packs/0 deleted file mode 100644 index 5bb5ef7be5829ae1ca369b429624d03b153d8577..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1289 zcmds1y^_-~5DpZOG2{wf0HZ6?;5f0JKMG!f>SmZ>7$Hk*tQ$-6NOH(cCe5`JydAH= z&{6PHf@H@W2~g3|$kOg=_xpA~>-f8wBIHR03j3Bm-?g#v86}Ln@G)Si&1@!B`U$&Kc=eOqGUsLU~LWo0xJOjw3GJ z+Qv-$)`2;k9Ex!mJbXJG+<%T<-TrRAej5$-=Nb%kFx%8w7%T2<1p10WThyn+^p)WpEwNTyTqt!KJGFhze2}Xff+XN|IN;3%&7D<=9DwYb6CpPi41vvEF>tuPB|i z$-QK;>s{SgH#%E-0LP>bLcWQ1ES3ICaNhxsyG?zmxWomoz)PYBS!<|4GN-D}ao4=oiXOWUcRZNc;fCnqPIJzD z+x{<+cWj(!uy8-GYr)h} Date: Thu, 24 Feb 2022 22:55:29 +0100 Subject: [PATCH 12/26] Update arithmetic.add_old.aiida --- tests/static/calcjob/arithmetic.add_old.aiida | Bin 6721 -> 8171 bytes 1 file changed, 0 insertions(+), 0 deletions(-) diff --git a/tests/static/calcjob/arithmetic.add_old.aiida b/tests/static/calcjob/arithmetic.add_old.aiida index 7c3c1f985a844d40b08383601d580bfc41ceea40..b5e5b01959693b2435dac7c719b0002100fb9688 100644 GIT binary patch delta 2348 zcmX?T^4i`bz?+#xgn z`g-wLteFS20))}5Nlp+=PESZm_~7dk_JKdFBcOq4f&{a=fHdPrEd{pvkM9ixSY@1@ znilv>=+ks~)3~ffQ1RUSIrCRfj|iABdDZmUGv+Xhq$HlW@hRcbu6RG`?}j#d#g89Y zxy{-8f&u7Yc8;uO&Ly!xCxV=b@W7dE5g|YUFi2ryVBiIMAUCxnF(t7iQ7@}FKMx!} z^%HXX{%Qsdjv*yRDeiaimDt)A2K2u)x;%@V~;%TN6Vr6d3 zzi-*TQT-Il`;|E}9&kQxbztAI;Bv|JOEO%VS~H3dOygb~aJ0GWL*Jb|<|WKBvv<7- z(z?j^&)eTy{@t2-UB$FFGS2zYHx8}LZrRs%Fij#uqRv^F+q65(M^?GpWb?;UJFne) zA#&luzlL)=)D8PXT!PcBOWjtQb8DpX<_~@G*$tjy((v2T&-Whfh5ufVhB(LIO30F~bTJOr%FLuL9dA$(<4%Onr)q zjsjv4$Hf)IoP!VXh>9m9Oz^5dcmArkj>ZYktEWA6^fYz-d_6sNJVW_7+GZVOQi+h1 zc;fJ?NO*Ghr{cy-N2`vmv|M>ob>`1X$Ii-^D+9JFh#MOjoLI0@_q5JApHrS+&v<*T zO-NYrOMM;4NQ zu^Y+)lm!{uBjFZI$fd?X2G2kx4m-yW)v4iZK%F4n*v)N%nky+mgkv?Cl7Nm?@!uwt z0MrK3irr9P(10DghQ+O(pktpxoEmOv%M5ZRNN<2QBa;X-q9{f#NI^w03^X)?SXfF- zkT9fRu16`E(G5l};6UXt3^X)`3o?*bDkEC~EGg?lkP8V=Q3(SLjp9NKSPD)&OP$%T^cK7`T<+yH3xjGPaF83V~cVbO^)q??DF1yRl86raq;?5;_)11C>l zLiJ&x#N=)X(p`j{&rn^&B{^9_f)p1WltgvWU&+awaJoCfRQNPXiqPou_*+0|QO_RG* z5$5&W?&RU zqX6pHRJdn>GGNfaC@Y5Mb#%SxL5k4&4HzAW0EX)WB`bcIDh7rIV6=l1nGRe9)-;B! z;}|0-#p$AJKud9O9Sw|~OyKl~p#v@LA?f%8)B#M17>)udL8L^u2B0^zSU~9#Lj!8M eMAp#H0!pP~0p6^jYy$K&2ZJgz0|T2lhz9`s!_dqC From fed42e6af9ac1c381695a825644fbc093531e565 Mon Sep 17 00:00:00 2001 From: Chris Sewell Date: Fri, 25 Feb 2022 14:02:54 +0100 Subject: [PATCH 13/26] Implement alembic migrations --- .../sqlite_zip/migrations/legacy/utils.py | 75 ------ .../migrations/legacy/v04_to_v05.py | 22 +- .../migrations/legacy/v05_to_v06.py | 2 +- .../migrations/legacy/v06_to_v07.py | 2 +- .../migrations/legacy/v07_to_v08.py | 2 +- .../migrations/legacy/v08_to_v09.py | 2 +- .../migrations/legacy/v09_to_v10.py | 2 +- .../migrations/legacy/v10_to_v11.py | 2 +- .../migrations/legacy/v11_to_v12.py | 2 +- .../sqlite_zip/migrations/legacy_to_main.py | 107 ++++----- .../sqlite_zip/migrations/script.py.mako | 2 + aiida/storage/sqlite_zip/migrations/utils.py | 44 ++++ ...n_0001_initial.py => main_0000_initial.py} | 34 +-- .../migrations/versions/main_0001.py | 30 +++ aiida/storage/sqlite_zip/migrator.py | 218 ++++++++++++++---- tests/cmdline/commands/test_archive_import.py | 47 ++-- .../migrate/export_main_0000_simple.aiida | Bin 0 -> 49277 bytes tests/tools/archive/migration/conftest.py | 2 +- .../archive/migration/test_legacy_funcs.py | 2 +- .../archive/migration/test_v05_to_v06.py | 2 +- 20 files changed, 375 insertions(+), 224 deletions(-) delete mode 100644 aiida/storage/sqlite_zip/migrations/legacy/utils.py rename aiida/storage/sqlite_zip/migrations/versions/{main_0001_initial.py => main_0000_initial.py} (85%) create mode 100644 aiida/storage/sqlite_zip/migrations/versions/main_0001.py create mode 100644 tests/static/export/migrate/export_main_0000_simple.aiida diff --git a/aiida/storage/sqlite_zip/migrations/legacy/utils.py b/aiida/storage/sqlite_zip/migrations/legacy/utils.py deleted file mode 100644 index fecd2d9bf2..0000000000 --- a/aiida/storage/sqlite_zip/migrations/legacy/utils.py +++ /dev/null @@ -1,75 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -"""Utility functions for migration of export-files.""" - -from aiida.common import exceptions - - -def verify_metadata_version(metadata, version=None): - """Utility function to verify that the metadata has the correct version number. - - If no version number is passed, it will just extract the version number and return it. - - :param metadata: the content of an export archive metadata.json file - :param version: string version number that the metadata is expected to have - """ - try: - metadata_version = metadata['export_version'] - except KeyError: - raise exceptions.StorageMigrationError("metadata is missing the 'export_version' key") - - if version is None: - return metadata_version - - if metadata_version != version: - raise exceptions.StorageMigrationError( - f'expected archive file with version {version} but found version {metadata_version}' - ) - - return None - - -def update_metadata(metadata, version): - """Update the metadata with a new version number and a notification of the conversion that was executed. - - :param metadata: the content of an export archive metadata.json file - :param version: string version number that the updated metadata should get - """ - from aiida import get_version - - old_version = metadata['export_version'] - conversion_info = metadata.get('conversion_info', []) - - conversion_message = f'Converted from version {old_version} to {version} with AiiDA v{get_version()}' - conversion_info.append(conversion_message) - - metadata['aiida_version'] = get_version() - metadata['export_version'] = version - metadata['conversion_info'] = conversion_info - - -def remove_fields(metadata, data, entities, fields): - """Remove fields under entities from data.json and metadata.json. - - :param metadata: the content of an export archive metadata.json file - :param data: the content of an export archive data.json file - :param entities: list of ORM entities - :param fields: list of fields to be removed from the export archive files - """ - # data.json - for entity in entities: - for content in data['export_data'].get(entity, {}).values(): - for field in fields: - content.pop(field, None) - - # metadata.json - for entity in entities: - for field in fields: - metadata['all_fields_info'][entity].pop(field, None) diff --git a/aiida/storage/sqlite_zip/migrations/legacy/v04_to_v05.py b/aiida/storage/sqlite_zip/migrations/legacy/v04_to_v05.py index 35107e48c8..17402b4e85 100644 --- a/aiida/storage/sqlite_zip/migrations/legacy/v04_to_v05.py +++ b/aiida/storage/sqlite_zip/migrations/legacy/v04_to_v05.py @@ -24,7 +24,27 @@ Where id is a SQLA id and migration-name is the name of the particular migration. """ # pylint: disable=invalid-name -from .utils import remove_fields, update_metadata, verify_metadata_version # pylint: disable=no-name-in-module +from ..utils import update_metadata, verify_metadata_version # pylint: disable=no-name-in-module + + +def remove_fields(metadata, data, entities, fields): + """Remove fields under entities from data.json and metadata.json. + + :param metadata: the content of an export archive metadata.json file + :param data: the content of an export archive data.json file + :param entities: list of ORM entities + :param fields: list of fields to be removed from the export archive files + """ + # data.json + for entity in entities: + for content in data['export_data'].get(entity, {}).values(): + for field in fields: + content.pop(field, None) + + # metadata.json + for entity in entities: + for field in fields: + metadata['all_fields_info'][entity].pop(field, None) def migration_drop_node_columns_nodeversion_public(metadata, data): diff --git a/aiida/storage/sqlite_zip/migrations/legacy/v05_to_v06.py b/aiida/storage/sqlite_zip/migrations/legacy/v05_to_v06.py index 0f3eb3bc15..934c03d4c7 100644 --- a/aiida/storage/sqlite_zip/migrations/legacy/v05_to_v06.py +++ b/aiida/storage/sqlite_zip/migrations/legacy/v05_to_v06.py @@ -26,7 +26,7 @@ # pylint: disable=invalid-name from typing import Union -from .utils import update_metadata, verify_metadata_version # pylint: disable=no-name-in-module +from ..utils import update_metadata, verify_metadata_version # pylint: disable=no-name-in-module def migrate_deserialized_datetime(data, conversion): diff --git a/aiida/storage/sqlite_zip/migrations/legacy/v06_to_v07.py b/aiida/storage/sqlite_zip/migrations/legacy/v06_to_v07.py index 6cd4bd5aa2..c76d2f8e0c 100644 --- a/aiida/storage/sqlite_zip/migrations/legacy/v06_to_v07.py +++ b/aiida/storage/sqlite_zip/migrations/legacy/v06_to_v07.py @@ -24,7 +24,7 @@ Where id is a SQLA id and migration-name is the name of the particular migration. """ # pylint: disable=invalid-name -from .utils import update_metadata, verify_metadata_version # pylint: disable=no-name-in-module +from ..utils import update_metadata, verify_metadata_version # pylint: disable=no-name-in-module def data_migration_legacy_process_attributes(data): diff --git a/aiida/storage/sqlite_zip/migrations/legacy/v07_to_v08.py b/aiida/storage/sqlite_zip/migrations/legacy/v07_to_v08.py index 14e46658b0..15ea832041 100644 --- a/aiida/storage/sqlite_zip/migrations/legacy/v07_to_v08.py +++ b/aiida/storage/sqlite_zip/migrations/legacy/v07_to_v08.py @@ -24,7 +24,7 @@ Where id is a SQLA id and migration-name is the name of the particular migration. """ # pylint: disable=invalid-name -from .utils import update_metadata, verify_metadata_version # pylint: disable=no-name-in-module +from ..utils import update_metadata, verify_metadata_version # pylint: disable=no-name-in-module def migration_default_link_label(data: dict): diff --git a/aiida/storage/sqlite_zip/migrations/legacy/v08_to_v09.py b/aiida/storage/sqlite_zip/migrations/legacy/v08_to_v09.py index b206ea30e0..c3c12d616b 100644 --- a/aiida/storage/sqlite_zip/migrations/legacy/v08_to_v09.py +++ b/aiida/storage/sqlite_zip/migrations/legacy/v08_to_v09.py @@ -24,7 +24,7 @@ Where id is a SQLA id and migration-name is the name of the particular migration. """ # pylint: disable=invalid-name -from .utils import update_metadata, verify_metadata_version # pylint: disable=no-name-in-module +from ..utils import update_metadata, verify_metadata_version # pylint: disable=no-name-in-module def migration_dbgroup_type_string(data): diff --git a/aiida/storage/sqlite_zip/migrations/legacy/v09_to_v10.py b/aiida/storage/sqlite_zip/migrations/legacy/v09_to_v10.py index 578dc896b4..a005837005 100644 --- a/aiida/storage/sqlite_zip/migrations/legacy/v09_to_v10.py +++ b/aiida/storage/sqlite_zip/migrations/legacy/v09_to_v10.py @@ -9,7 +9,7 @@ ########################################################################### """Migration from v0.9 to v0.10, used by `verdi export migrate` command.""" # pylint: disable=invalid-name,unused-argument -from .utils import update_metadata, verify_metadata_version # pylint: disable=no-name-in-module +from ..utils import update_metadata, verify_metadata_version # pylint: disable=no-name-in-module def migrate_v9_to_v10(metadata: dict, data: dict) -> None: diff --git a/aiida/storage/sqlite_zip/migrations/legacy/v10_to_v11.py b/aiida/storage/sqlite_zip/migrations/legacy/v10_to_v11.py index a0af93f926..011a83d761 100644 --- a/aiida/storage/sqlite_zip/migrations/legacy/v10_to_v11.py +++ b/aiida/storage/sqlite_zip/migrations/legacy/v10_to_v11.py @@ -11,7 +11,7 @@ This migration applies the name change of the ``Computer`` attribute ``name`` to ``label``. """ -from .utils import update_metadata, verify_metadata_version # pylint: disable=no-name-in-module +from ..utils import update_metadata, verify_metadata_version # pylint: disable=no-name-in-module def migrate_v10_to_v11(metadata: dict, data: dict) -> None: diff --git a/aiida/storage/sqlite_zip/migrations/legacy/v11_to_v12.py b/aiida/storage/sqlite_zip/migrations/legacy/v11_to_v12.py index 5bd2531ea0..fd6efd27ad 100644 --- a/aiida/storage/sqlite_zip/migrations/legacy/v11_to_v12.py +++ b/aiida/storage/sqlite_zip/migrations/legacy/v11_to_v12.py @@ -11,7 +11,7 @@ This migration is necessary after the `core.` prefix was added to entry points shipped with `aiida-core`. """ -from .utils import update_metadata, verify_metadata_version # pylint: disable=no-name-in-module +from ..utils import update_metadata, verify_metadata_version # pylint: disable=no-name-in-module MAPPING_DATA = { 'data.array.ArrayData.': 'data.core.array.ArrayData.', diff --git a/aiida/storage/sqlite_zip/migrations/legacy_to_main.py b/aiida/storage/sqlite_zip/migrations/legacy_to_main.py index e5ed01ad74..c820a2cc9a 100644 --- a/aiida/storage/sqlite_zip/migrations/legacy_to_main.py +++ b/aiida/storage/sqlite_zip/migrations/legacy_to_main.py @@ -11,7 +11,6 @@ from contextlib import contextmanager from datetime import datetime from hashlib import sha256 -import json from pathlib import Path, PurePosixPath import shutil import tarfile @@ -28,8 +27,8 @@ from aiida.storage.log import MIGRATE_LOGGER from . import v1_db_schema as v1_schema -from ..utils import create_sqla_engine -from .legacy.utils import update_metadata +from ..utils import DB_FILENAME, REPO_FOLDER, create_sqla_engine +from .utils import update_metadata _NODE_ENTITY_NAME = 'Node' _GROUP_ENTITY_NAME = 'Group' @@ -65,16 +64,18 @@ _LOG_ENTITY_NAME: v1_schema.DbLog, } -_META_FILENAME = 'metadata.json' -_DB_FILENAME = 'db.sqlite3' -_REPO_FOLDER = 'repo' - -MIGRATED_TO_REVISION = 'main_0001' +LEGACY_TO_MAIN_REVISION = 'main_0000' def perform_v1_migration( # pylint: disable=too-many-locals - inpath: Path, working: Path, archive_name: str, is_tar: bool, metadata: dict, data: dict, compression: int -) -> None: + inpath: Path, + working: Path, + new_zip: ZipPath, + central_dir: Dict[str, Any], + is_tar: bool, + metadata: dict, + data: dict, +) -> Path: """Perform the repository and JSON to SQLite migration. 1. Iterate though the repository paths in the archive @@ -84,10 +85,11 @@ def perform_v1_migration( # pylint: disable=too-many-locals :param inpath: the input path to the old archive :param metadata: the metadata to migrate :param data: the data to migrate + + :returns:the path to the sqlite database file """ MIGRATE_LOGGER.report('Initialising new archive...') node_repos: Dict[str, List[Tuple[str, Optional[str]]]] = {} - central_dir: Dict[str, Any] = {} if is_tar: # we cannot stream from a tar file performantly, so we extract it to disk first @contextmanager @@ -101,53 +103,44 @@ def in_archive_context(_inpath): shutil.rmtree(temp_folder) else: in_archive_context = ZipPath # type: ignore - with ZipPath( - working / archive_name, - mode='w', - compresslevel=compression, - name_to_info=central_dir, - info_order=(_META_FILENAME, _DB_FILENAME) - ) as new_path: - with in_archive_context(inpath) as path: - length = sum(1 for _ in path.glob('**/*')) - base_parts = len(path.parts) - with get_progress_reporter()(desc='Converting repo', total=length) as progress: - for subpath in path.glob('**/*'): - progress.update() - parts = subpath.parts[base_parts:] - # repository file are stored in the legacy archive as `nodes/uuid[0:2]/uuid[2:4]/uuid[4:]/path/...` - if len(parts) < 6 or parts[0] != 'nodes' or parts[4] not in ('raw_input', 'path'): - continue - uuid = ''.join(parts[1:4]) - posix_rel = PurePosixPath(*parts[5:]) - hashkey = None - if subpath.is_file(): + + with in_archive_context(inpath) as path: + length = sum(1 for _ in path.glob('**/*')) + base_parts = len(path.parts) + with get_progress_reporter()(desc='Converting repo', total=length) as progress: + for subpath in path.glob('**/*'): + progress.update() + parts = subpath.parts[base_parts:] + # repository file are stored in the legacy archive as `nodes/uuid[0:2]/uuid[2:4]/uuid[4:]/path/...` + if len(parts) < 6 or parts[0] != 'nodes' or parts[4] not in ('raw_input', 'path'): + continue + uuid = ''.join(parts[1:4]) + posix_rel = PurePosixPath(*parts[5:]) + hashkey = None + if subpath.is_file(): + with subpath.open('rb') as handle: + hashkey = chunked_file_hash(handle, sha256) + if f'{REPO_FOLDER}/{hashkey}' not in central_dir: with subpath.open('rb') as handle: - hashkey = chunked_file_hash(handle, sha256) - if f'{_REPO_FOLDER}/{hashkey}' not in central_dir: - with subpath.open('rb') as handle: - with (new_path / f'{_REPO_FOLDER}/{hashkey}').open(mode='wb') as handle2: - shutil.copyfileobj(handle, handle2) - node_repos.setdefault(uuid, []).append((posix_rel.as_posix(), hashkey)) - MIGRATE_LOGGER.report(f'Unique files written: {len(central_dir)}') - - _json_to_sqlite(working / _DB_FILENAME, data, node_repos) - - MIGRATE_LOGGER.report('Finalising archive') - with (working / _DB_FILENAME).open('rb') as handle: - with (new_path / _DB_FILENAME).open(mode='wb') as handle2: - shutil.copyfileobj(handle, handle2) - - # remove legacy keys from metadata and store - metadata.pop('unique_identifiers', None) - metadata.pop('all_fields_info', None) - # remove legacy key nesting - metadata['creation_parameters'] = metadata.pop('export_parameters', {}) - metadata['compression'] = compression - metadata['key_format'] = 'sha256' - metadata['mtime'] = datetime.now().isoformat() - update_metadata(metadata, MIGRATED_TO_REVISION) - (new_path / _META_FILENAME).write_text(json.dumps(metadata)) + with (new_zip / f'{REPO_FOLDER}/{hashkey}').open(mode='wb') as handle2: + shutil.copyfileobj(handle, handle2) + node_repos.setdefault(uuid, []).append((posix_rel.as_posix(), hashkey)) + MIGRATE_LOGGER.report(f'Unique files written: {len(central_dir)}') + + # convert the JSON database to SQLite + _json_to_sqlite(working / DB_FILENAME, data, node_repos) + + # remove legacy keys from metadata and store + metadata.pop('unique_identifiers', None) + metadata.pop('all_fields_info', None) + # remove legacy key nesting + metadata['creation_parameters'] = metadata.pop('export_parameters', {}) + metadata['key_format'] = 'sha256' + + # update the version in the metadata + update_metadata(metadata, LEGACY_TO_MAIN_REVISION) + + return working / DB_FILENAME def _json_to_sqlite( diff --git a/aiida/storage/sqlite_zip/migrations/script.py.mako b/aiida/storage/sqlite_zip/migrations/script.py.mako index 2c0156303a..b0e41c2687 100644 --- a/aiida/storage/sqlite_zip/migrations/script.py.mako +++ b/aiida/storage/sqlite_zip/migrations/script.py.mako @@ -17,8 +17,10 @@ depends_on = ${repr(depends_on)} def upgrade(): + """Migrations for the upgrade.""" ${upgrades if upgrades else "pass"} def downgrade(): + """Migrations for the downgrade.""" ${downgrades if downgrades else "pass"} diff --git a/aiida/storage/sqlite_zip/migrations/utils.py b/aiida/storage/sqlite_zip/migrations/utils.py index 8dc669d86b..dfd72ec6ca 100644 --- a/aiida/storage/sqlite_zip/migrations/utils.py +++ b/aiida/storage/sqlite_zip/migrations/utils.py @@ -16,9 +16,53 @@ from archive_path import TarPath, ZipPath +from aiida.common import exceptions from aiida.common.progress_reporter import create_callback, get_progress_reporter +def update_metadata(metadata, version): + """Update the metadata with a new version number and a notification of the conversion that was executed. + + :param metadata: the content of an export archive metadata.json file + :param version: string version number that the updated metadata should get + """ + from aiida import get_version + + old_version = metadata['export_version'] + conversion_info = metadata.get('conversion_info', []) + + conversion_message = f'Converted from version {old_version} to {version} with AiiDA v{get_version()}' + conversion_info.append(conversion_message) + + metadata['aiida_version'] = get_version() + metadata['export_version'] = version + metadata['conversion_info'] = conversion_info + + +def verify_metadata_version(metadata, version=None): + """Utility function to verify that the metadata has the correct version number. + + If no version number is passed, it will just extract the version number and return it. + + :param metadata: the content of an export archive metadata.json file + :param version: string version number that the metadata is expected to have + """ + try: + metadata_version = metadata['export_version'] + except KeyError: + raise exceptions.StorageMigrationError("metadata is missing the 'export_version' key") + + if version is None: + return metadata_version + + if metadata_version != version: + raise exceptions.StorageMigrationError( + f'expected archive file with version {version} but found version {metadata_version}' + ) + + return None + + def copy_zip_to_zip( inpath: Path, outpath: Path, diff --git a/aiida/storage/sqlite_zip/migrations/versions/main_0001_initial.py b/aiida/storage/sqlite_zip/migrations/versions/main_0000_initial.py similarity index 85% rename from aiida/storage/sqlite_zip/migrations/versions/main_0001_initial.py rename to aiida/storage/sqlite_zip/migrations/versions/main_0000_initial.py index 8eadfb649d..65b236ab0a 100644 --- a/aiida/storage/sqlite_zip/migrations/versions/main_0001_initial.py +++ b/aiida/storage/sqlite_zip/migrations/versions/main_0000_initial.py @@ -10,16 +10,16 @@ # pylint: disable=invalid-name,no-member """Initial main branch schema -Revision ID: main_0001 +Revision ID: main_0000 Revises: Create Date: 2021-02-02 """ from alembic import op import sqlalchemy as sa -from sqlalchemy.dialects import postgresql +from sqlalchemy.dialects.sqlite import JSON -revision = 'main_0001' +revision = 'main_0000' down_revision = None branch_labels = ('main',) depends_on = None @@ -30,13 +30,13 @@ def upgrade(): op.create_table( 'db_dbcomputer', sa.Column('id', sa.Integer(), nullable=False, primary_key=True), - sa.Column('uuid', postgresql.UUID(as_uuid=True), nullable=False, unique=True), + sa.Column('uuid', sa.CHAR(32), nullable=False, unique=True), sa.Column('label', sa.String(length=255), nullable=False, unique=True), sa.Column('hostname', sa.String(length=255), nullable=False), sa.Column('description', sa.Text(), nullable=False), sa.Column('scheduler_type', sa.String(length=255), nullable=False), sa.Column('transport_type', sa.String(length=255), nullable=False), - sa.Column('metadata', postgresql.JSONB(astext_type=sa.Text()), nullable=False), + sa.Column('metadata', JSON(), nullable=False), ) op.create_table( 'db_dbuser', @@ -51,8 +51,8 @@ def upgrade(): sa.Column('id', sa.Integer(), nullable=False, primary_key=True), sa.Column('aiidauser_id', sa.Integer(), nullable=False, index=True), sa.Column('dbcomputer_id', sa.Integer(), nullable=False, index=True), - sa.Column('metadata', postgresql.JSONB(astext_type=sa.Text()), nullable=False), - sa.Column('auth_params', postgresql.JSONB(astext_type=sa.Text()), nullable=False), + sa.Column('metadata', JSON(), nullable=False), + sa.Column('auth_params', JSON(), nullable=False), sa.Column('enabled', sa.Boolean(), nullable=False), sa.ForeignKeyConstraint( ['aiidauser_id'], @@ -73,12 +73,12 @@ def upgrade(): op.create_table( 'db_dbgroup', sa.Column('id', sa.Integer(), nullable=False, primary_key=True), - sa.Column('uuid', postgresql.UUID(as_uuid=True), nullable=False, unique=True), + sa.Column('uuid', sa.CHAR(32), nullable=False, unique=True), sa.Column('label', sa.String(length=255), nullable=False, index=True), sa.Column('type_string', sa.String(length=255), nullable=False, index=True), sa.Column('time', sa.DateTime(timezone=True), nullable=False), sa.Column('description', sa.Text(), nullable=False), - sa.Column('extras', postgresql.JSONB(astext_type=sa.Text()), nullable=False), + sa.Column('extras', JSON(), nullable=False), sa.Column('user_id', sa.Integer(), nullable=False, index=True), sa.ForeignKeyConstraint( ['user_id'], @@ -93,16 +93,16 @@ def upgrade(): op.create_table( 'db_dbnode', sa.Column('id', sa.Integer(), nullable=False, primary_key=True), - sa.Column('uuid', postgresql.UUID(as_uuid=True), nullable=False, unique=True), + sa.Column('uuid', sa.CHAR(32), nullable=False, unique=True), sa.Column('node_type', sa.String(length=255), nullable=False, index=True), sa.Column('process_type', sa.String(length=255), nullable=True, index=True), sa.Column('label', sa.String(length=255), nullable=False, index=True), sa.Column('description', sa.Text(), nullable=False), sa.Column('ctime', sa.DateTime(timezone=True), nullable=False, index=True), sa.Column('mtime', sa.DateTime(timezone=True), nullable=False, index=True), - sa.Column('attributes', postgresql.JSONB(astext_type=sa.Text()), nullable=True), - sa.Column('extras', postgresql.JSONB(astext_type=sa.Text()), nullable=True), - sa.Column('repository_metadata', postgresql.JSONB(astext_type=sa.Text()), nullable=False), + sa.Column('attributes', JSON(), nullable=True), + sa.Column('extras', JSON(), nullable=True), + sa.Column('repository_metadata', JSON(), nullable=False), sa.Column('dbcomputer_id', sa.Integer(), nullable=True, index=True), sa.Column('user_id', sa.Integer(), nullable=False, index=True), sa.ForeignKeyConstraint( @@ -124,7 +124,7 @@ def upgrade(): op.create_table( 'db_dbcomment', sa.Column('id', sa.Integer(), nullable=False, primary_key=True), - sa.Column('uuid', postgresql.UUID(as_uuid=True), nullable=False, unique=True), + sa.Column('uuid', sa.CHAR(32), nullable=False, unique=True), sa.Column('dbnode_id', sa.Integer(), nullable=False, index=True), sa.Column('ctime', sa.DateTime(timezone=True), nullable=False), sa.Column('mtime', sa.DateTime(timezone=True), nullable=False), @@ -175,13 +175,13 @@ def upgrade(): op.create_table( 'db_dblog', sa.Column('id', sa.Integer(), nullable=False, primary_key=True), - sa.Column('uuid', postgresql.UUID(as_uuid=True), nullable=False, unique=True), + sa.Column('uuid', sa.CHAR(32), nullable=False, unique=True), sa.Column('time', sa.DateTime(timezone=True), nullable=False), sa.Column('loggername', sa.String(length=255), nullable=False, index=True), sa.Column('levelname', sa.String(length=50), nullable=False, index=True), sa.Column('dbnode_id', sa.Integer(), nullable=False, index=True), sa.Column('message', sa.Text(), nullable=False), - sa.Column('metadata', postgresql.JSONB(astext_type=sa.Text()), nullable=False), + sa.Column('metadata', JSON(), nullable=False), sa.ForeignKeyConstraint( ['dbnode_id'], ['db_dbnode.id'], @@ -194,4 +194,4 @@ def upgrade(): def downgrade(): """Migrations for the downgrade.""" - raise NotImplementedError('Downgrade of main_0001.') + raise NotImplementedError('Downgrade of main_0000.') diff --git a/aiida/storage/sqlite_zip/migrations/versions/main_0001.py b/aiida/storage/sqlite_zip/migrations/versions/main_0001.py new file mode 100644 index 0000000000..bf266a18db --- /dev/null +++ b/aiida/storage/sqlite_zip/migrations/versions/main_0001.py @@ -0,0 +1,30 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +# pylint: disable=invalid-name,no-member +"""Bring schema inline with psql_dos main_0001 + +Revision ID: main_0001 +Revises: +Create Date: 2021-02-02 + +""" +revision = 'main_0001' +down_revision = 'main_0000' +branch_labels = None +depends_on = None + + +def upgrade(): + """Migrations for the upgrade.""" + + +def downgrade(): + """Migrations for the downgrade.""" + raise NotImplementedError('Downgrade of main_0001.') diff --git a/aiida/storage/sqlite_zip/migrator.py b/aiida/storage/sqlite_zip/migrator.py index 4e3e25916d..45997e26ff 100644 --- a/aiida/storage/sqlite_zip/migrator.py +++ b/aiida/storage/sqlite_zip/migrator.py @@ -7,18 +7,24 @@ # For further information on the license, see the LICENSE.txt file # # For further information please visit http://www.aiida.net # ########################################################################### -"""AiiDA archive migrator implementation.""" +"""Versioning and migration implementation for the sqlite_zip format.""" +import contextlib +from datetime import datetime import os from pathlib import Path import shutil import tarfile import tempfile -from typing import Any, Dict, List, Optional, Union +from typing import Any, Dict, Iterator, List, Optional, Union import zipfile +from alembic.command import upgrade from alembic.config import Config +from alembic.runtime.environment import EnvironmentContext +from alembic.runtime.migration import MigrationContext, MigrationInfo from alembic.script import ScriptDirectory -from archive_path import open_file_in_tar, open_file_in_zip +from archive_path import ZipPath, extract_file_in_zip, open_file_in_tar, open_file_in_zip +from sqlalchemy.future.engine import Connection from aiida.common import json from aiida.common.exceptions import CorruptStorage, IncompatibleStorageSchema, StorageMigrationError @@ -26,29 +32,20 @@ from aiida.storage.log import MIGRATE_LOGGER from .migrations.legacy import FINAL_LEGACY_VERSION, LEGACY_MIGRATE_FUNCTIONS -from .migrations.legacy_to_main import MIGRATED_TO_REVISION, perform_v1_migration -from .migrations.utils import copy_tar_to_zip, copy_zip_to_zip -from .utils import read_version - - -def _alembic_config() -> Config: - """Return an instance of an Alembic `Config`.""" - config = Config() - config.set_main_option('script_location', str(Path(os.path.realpath(__file__)).parent / 'migrations')) - return config +from .migrations.legacy_to_main import LEGACY_TO_MAIN_REVISION, perform_v1_migration +from .migrations.utils import copy_tar_to_zip, copy_zip_to_zip, update_metadata +from .utils import DB_FILENAME, META_FILENAME, REPO_FOLDER, create_sqla_engine, read_version def get_schema_version_head() -> str: """Return the head schema version for this storage, i.e. the latest schema this storage can be migrated to.""" - return ScriptDirectory.from_config(_alembic_config()).revision_map.get_current_head('main') + return _alembic_script().revision_map.get_current_head('main') def list_versions() -> List[str]: """Return all available schema versions (oldest to latest).""" legacy_versions = list(LEGACY_MIGRATE_FUNCTIONS) + [FINAL_LEGACY_VERSION] - alembic_versions = [ - entry.revision for entry in reversed(list(ScriptDirectory.from_config(_alembic_config()).walk_revisions())) - ] + alembic_versions = [entry.revision for entry in reversed(list(_alembic_script().walk_revisions()))] return legacy_versions + alembic_versions @@ -72,7 +69,7 @@ def validate_storage(inpath: Path) -> None: ) -def migrate( # pylint: disable=too-many-branches,too-many-statements +def migrate( # pylint: disable=too-many-branches,too-many-statements,too-many-locals inpath: Union[str, Path], outpath: Union[str, Path], version: str, @@ -80,17 +77,40 @@ def migrate( # pylint: disable=too-many-branches,too-many-statements force: bool = False, compression: int = 6 ) -> None: - """Migrate an archive to a specific version. - - :param path: archive path + """Migrate an sqlite_zip storage file to a specific version. + + Historically, this format could be a zip or a tar file, + contained the database as a bespoke JSON format, and the repository files in the "legacy" per-node format. + For these versions, we first migrate the JSON database to the final legacy schema, + then we convert this file to the SQLite database, whilst sequentially migrating the repository files. + + Once any legacy migrations have been performed, we can then migrate the SQLite database to the final schema, + using alembic. + + Note that, to minimise disk space usage, we never fully extract/uncompress the input file + (except when migrating from a legacy tar file, whereby we cannot extract individual files): + + 1. The sqlite database is extracted to a temporary location and migrated + 2. A new zip file is opened, within a temporary folder + 3. The repository files are "streamed" directly between the input file and the new zip file + 4. The sqlite database and metadata JSON are written to the new zip file + 5. The new zip file is closed (which writes its final central directory) + 6. The new zip file is moved to the output location, removing any existing file if `force=True` + + :param path: Path to the file + :param outpath: Path to output the migrated file + :param version: Target version + :param force: If True, overwrite the output file if it exists + :param compression: Compression level for the output file """ inpath = Path(inpath) outpath = Path(outpath) + # halt immediately, if we could not write to the output file if outpath.exists() and not force: - raise IOError('Output path already exists and force=False') + raise StorageMigrationError('Output path already exists and force=False') if outpath.exists() and not outpath.is_file(): - raise IOError('Existing output path is not a file') + raise StorageMigrationError('Existing output path is not a file') # the file should be either a tar (legacy only) or zip file if tarfile.is_tarfile(str(inpath)): @@ -108,10 +128,13 @@ def migrate( # pylint: disable=too-many-branches,too-many-statements except IOError as exc: raise CorruptStorage(f'No input file could not be read: {exc}') from exc - # obtain the current version + # obtain the current version from the metadata if 'export_version' not in metadata: raise CorruptStorage('No export_version found in metadata.json') current_version = metadata['export_version'] + # update the modified time of the file and the compression + metadata['mtime'] = datetime.now().isoformat() + metadata['compression'] = compression # check versions are valid # versions 0.1, 0.2, 0.3 are no longer supported, @@ -120,14 +143,13 @@ def migrate( # pylint: disable=too-many-branches,too-many-statements raise StorageMigrationError( f"Legacy migration from '{current_version}' -> '{version}' is not supported in aiida-core v2" ) - all_versions = list_versions() if current_version not in all_versions: raise StorageMigrationError(f"Unknown current version '{current_version}'") if version not in all_versions: raise StorageMigrationError(f"Unknown target version '{version}'") - # if we are already at the desired version, then no migration is required + # if we are already at the desired version, then no migration is required, so simply copy the file if necessary if current_version == version: if inpath != outpath: if outpath.exists() and force: @@ -135,10 +157,8 @@ def migrate( # pylint: disable=too-many-branches,too-many-statements shutil.copyfile(inpath, outpath) return - # data.json will only be read from legacy archives + # if the archive is a "legacy" format, i.e. has a data.json file, migrate it to the target/final legacy schema data: Optional[Dict[str, Any]] = None - - # if the archive is a "legacy" format, i.e. has a data.json file, migrate to latest one if current_version in LEGACY_MIGRATE_FUNCTIONS: MIGRATE_LOGGER.report('Legacy migrations required') MIGRATE_LOGGER.report('Extracting data.json ...') @@ -147,6 +167,7 @@ def migrate( # pylint: disable=too-many-branches,too-many-statements to_version = FINAL_LEGACY_VERSION if version not in LEGACY_MIGRATE_FUNCTIONS else version current_version = _perform_legacy_migrations(current_version, to_version, metadata, data) + # if we are now at the target version, then write the updated files to a new zip file and exit if current_version == version: # create new legacy archive with updated metadata & data def path_callback(inpath, outpath) -> bool: @@ -171,22 +192,90 @@ def path_callback(inpath, outpath) -> bool: ) return + # open the temporary directory, to perform further migrations with tempfile.TemporaryDirectory() as tmpdirname: - if current_version == FINAL_LEGACY_VERSION: - MIGRATE_LOGGER.report('aiida-core v1 -> v2 migration required') - if data is None: - MIGRATE_LOGGER.report('Extracting data.json ...') - data = _read_json(inpath, 'data.json', is_tar) - perform_v1_migration(inpath, Path(tmpdirname), 'new.zip', is_tar, metadata, data, compression) - current_version = MIGRATED_TO_REVISION - - if not current_version == version: - raise StorageMigrationError(f"Migration from '{current_version}' -> '{version}' failed") - + # open the new zip file, within which to write the migrated content + new_zip_path = Path(tmpdirname) / 'new.zip' + central_dir: Dict[str, Any] = {} + with ZipPath( + new_zip_path, + mode='w', + compresslevel=compression, + name_to_info=central_dir, + # this ensures that the metadata and database files are written above the repository files, + # in in the central directory, so that they can be accessed easily + info_order=(META_FILENAME, DB_FILENAME) + ) as new_zip: + + written_repo = False + if current_version == FINAL_LEGACY_VERSION: + # migrate from the legacy format, + # streaming the repository files directly to the new zip file + MIGRATE_LOGGER.report( + f'legacy {FINAL_LEGACY_VERSION!r} -> {LEGACY_TO_MAIN_REVISION!r} conversion required' + ) + if data is None: + MIGRATE_LOGGER.report('Extracting data.json ...') + data = _read_json(inpath, 'data.json', is_tar) + db_path = perform_v1_migration(inpath, Path(tmpdirname), new_zip, central_dir, is_tar, metadata, data) + # the migration includes adding the repository files to the new zip file + written_repo = True + current_version = LEGACY_TO_MAIN_REVISION + else: + if is_tar: + raise CorruptStorage('Tar files are not supported for this format') + # extract the sqlite database, for alembic migrations + db_path = Path(tmpdirname) / DB_FILENAME + with db_path.open('wb') as handle: + try: + extract_file_in_zip(inpath, DB_FILENAME, handle) + except Exception as exc: + raise CorruptStorage(f'database could not be read: {exc}') from exc + + # perform alembic migrations + # note, we do this before writing the repository files (unless a legacy migration), + # so that we don't waste time doing that (which could be slow), only for alembic to fail + if current_version != version: + MIGRATE_LOGGER.report('Performing SQLite migrations:') + with _migration_context(db_path) as context: + context.stamp(context.script, current_version) + context.connection.commit() + with _alembic_connect(db_path) as config: + upgrade(config, version) + update_metadata(metadata, version) + + if not written_repo: + # stream the repository files directly to the new zip file + with ZipPath(inpath, mode='r') as old_zip: + length = sum(1 for _ in old_zip.glob('**/*', include_virtual=False)) + title = 'Copying repository files' + with get_progress_reporter()(desc=title, total=length) as progress: + for subpath in old_zip.glob('**/*', include_virtual=False): + new_path_sub = new_zip.joinpath(subpath.at) + if subpath.parts[0] == REPO_FOLDER: + if subpath.is_dir(): + new_path_sub.mkdir(exist_ok=True) + else: + new_path_sub.putfile(subpath) + progress.update() + + MIGRATE_LOGGER.report('Finalising the migration ...') + + # write the final database file to the new zip file + with db_path.open('rb') as handle: + with (new_zip / DB_FILENAME).open(mode='wb') as handle2: + shutil.copyfileobj(handle, handle2) + + # write the final metadata.json file to the new zip file + (new_zip / META_FILENAME).write_text(json.dumps(metadata)) + + # on exiting the the ZipPath context, the zip file is closed and the central directory written + + # move the new zip file to the final location if outpath.exists() and force: outpath.unlink() - shutil.move(Path(tmpdirname) / 'new.zip', outpath) # type: ignore[arg-type] + shutil.move(new_zip_path, outpath) # type: ignore[arg-type] def _read_json(inpath: Path, filename: str, is_tar: bool) -> Dict[str, Any]: @@ -239,3 +328,50 @@ def _perform_legacy_migrations(current_version: str, to_version: str, metadata: progress.update() return to_version + + +def _alembic_config() -> Config: + """Return an instance of an Alembic `Config`.""" + config = Config() + config.set_main_option('script_location', str(Path(os.path.realpath(__file__)).parent / 'migrations')) + return config + + +def _alembic_script() -> ScriptDirectory: + """Return an instance of an Alembic `ScriptDirectory`.""" + return ScriptDirectory.from_config(_alembic_config()) + + +@contextlib.contextmanager +def _alembic_connect(db_path: Path) -> Iterator[Connection]: + """Context manager to return an instance of an Alembic configuration. + + The profiles's database connection is added in the `attributes` property, through which it can then also be + retrieved, also in the `env.py` file, which is run when the database is migrated. + """ + with create_sqla_engine(db_path).connect() as connection: + config = _alembic_config() + config.attributes['connection'] = connection # pylint: disable=unsupported-assignment-operation + + def _callback(step: MigrationInfo, **kwargs): # pylint: disable=unused-argument + """Callback to be called after a migration step is executed.""" + from_rev = step.down_revision_ids[0] if step.down_revision_ids else '' + MIGRATE_LOGGER.report(f'- {from_rev} -> {step.up_revision_id}') + + config.attributes['on_version_apply'] = _callback # pylint: disable=unsupported-assignment-operation + + yield config + + +@contextlib.contextmanager +def _migration_context(db_path: Path) -> Iterator[MigrationContext]: + """Context manager to return an instance of an Alembic migration context. + + This migration context will have been configured with the current database connection, which allows this context + to be used to inspect the contents of the database, such as the current revision. + """ + with _alembic_connect(db_path) as config: + script = ScriptDirectory.from_config(config) + with EnvironmentContext(config, script) as context: + context.configure(context.config.attributes['connection']) + yield context.get_context() diff --git a/tests/cmdline/commands/test_archive_import.py b/tests/cmdline/commands/test_archive_import.py index a2441faee6..229c99f0f6 100644 --- a/tests/cmdline/commands/test_archive_import.py +++ b/tests/cmdline/commands/test_archive_import.py @@ -18,6 +18,8 @@ from aiida.tools.archive import ArchiveFormatSqlZip from tests.utils.archives import get_archive_file +ARCHIVE_PATH = 'export/migrate' + class TestVerdiImport: """Tests for `verdi import`.""" @@ -30,7 +32,6 @@ def init_cls(self, aiida_profile_clean): # pylint: disable=unused-argument # Helper variables self.url_path = 'https://raw.githubusercontent.com/aiidateam/aiida-core/' \ '0599dabf0887bee172a04f308307e99e3c3f3ff2/aiida/backends/tests/fixtures/export/migrate/' - self.archive_path = 'export/migrate' self.newest_archive = f'export_{ArchiveFormatSqlZip().latest_version}_simple.aiida' def test_import_no_archives(self): @@ -56,7 +57,7 @@ def test_import_archive(self): """ archives = [ get_archive_file('arithmetic.add.aiida', filepath='calcjob'), - get_archive_file(self.newest_archive, filepath=self.archive_path) + get_archive_file(self.newest_archive, filepath=ARCHIVE_PATH) ] options = [] + archives @@ -72,7 +73,7 @@ def test_import_to_group(self): """ archives = [ get_archive_file('arithmetic.add.aiida', filepath='calcjob'), - get_archive_file(self.newest_archive, filepath=self.archive_path) + get_archive_file(self.newest_archive, filepath=ARCHIVE_PATH) ] group_label = 'import_madness' @@ -116,7 +117,7 @@ def test_import_make_new_group(self): """Make sure imported entities are saved in new Group""" # Initialization group_label = 'new_group_for_verdi_import' - archives = [get_archive_file(self.newest_archive, filepath=self.archive_path)] + archives = [get_archive_file(self.newest_archive, filepath=ARCHIVE_PATH)] # Check Group does not already exist group_search = Group.objects.find(filters={'label': group_label}) @@ -135,7 +136,7 @@ def test_import_make_new_group(self): def test_no_import_group(self): """Test '--import-group/--no-import-group' options.""" - archives = [get_archive_file(self.newest_archive, filepath=self.archive_path)] + archives = [get_archive_file(self.newest_archive, filepath=ARCHIVE_PATH)] assert Group.objects.count() == 0, 'There should be no Groups.' @@ -166,27 +167,13 @@ def test_no_import_group(self): @pytest.mark.skip('Due to summary being logged, this can not be checked against `results.output`.') # pylint: disable=not-callable def test_comment_mode(self): """Test toggling comment mode flag""" - archives = [get_archive_file(self.newest_archive, filepath=self.archive_path)] + archives = [get_archive_file(self.newest_archive, filepath=ARCHIVE_PATH)] for mode in ['leave', 'newest', 'overwrite']: options = ['--comment-mode', mode] + archives result = self.cli_runner.invoke(cmd_archive.import_archive, options) assert result.exception is None, result.output assert result.exit_code == 0, result.output - def test_import_old_local_archives(self): - """ Test import of old local archives - Expected behavior: Automatically migrate to newest version and import correctly. - """ - for version in list_versions(): - archive, version = (f'export_{version}_simple.aiida', f'{version}') - options = [get_archive_file(archive, filepath=self.archive_path)] - result = self.cli_runner.invoke(cmd_archive.import_archive, options) - - assert result.exception is None, result.output - assert result.exit_code == 0, result.output - assert version in result.output, result.exception - assert f'Success: imported archive {options[0]}' in result.output, result.exception - def test_import_old_url_archives(self): """ Test import of old URL archives Expected behavior: Automatically migrate to newest version and import correctly. @@ -208,8 +195,8 @@ def test_import_url_and_local_archives(self): local_archive = self.newest_archive options = [ - get_archive_file(local_archive, filepath=self.archive_path), self.url_path + url_archive, - get_archive_file(local_archive, filepath=self.archive_path) + get_archive_file(local_archive, filepath=ARCHIVE_PATH), self.url_path + url_archive, + get_archive_file(local_archive, filepath=ARCHIVE_PATH) ] result = self.cli_runner.invoke(cmd_archive.import_archive, options) @@ -244,7 +231,7 @@ def test_migration(self): `migration` = True (default), Expected: No query, migrate `migration` = False, Expected: No query, no migrate """ - archive = get_archive_file('export_0.4_simple.aiida', filepath=self.archive_path) + archive = get_archive_file('export_0.4_simple.aiida', filepath=ARCHIVE_PATH) success_message = f'Success: imported archive {archive}' # Import "normally", but explicitly specifying `--migration`, make sure confirm message is present @@ -268,3 +255,17 @@ def test_migration(self): assert 'trying migration' not in result.output, result.exception assert success_message not in result.output, result.exception + + +@pytest.mark.usefixtures('aiida_profile_clean') +@pytest.mark.parametrize('version', list_versions()) +def test_import_old_local_archives(version, run_cli_command): + """ Test import of old local archives + Expected behavior: Automatically migrate to newest version and import correctly. + """ + archive, version = (f'export_{version}_simple.aiida', f'{version}') + options = [get_archive_file(archive, filepath=ARCHIVE_PATH)] + result = run_cli_command(cmd_archive.import_archive, options) + + assert version in result.output, result.exception + assert f'Success: imported archive {options[0]}' in result.output, result.exception diff --git a/tests/static/export/migrate/export_main_0000_simple.aiida b/tests/static/export/migrate/export_main_0000_simple.aiida new file mode 100644 index 0000000000000000000000000000000000000000..b24b062b1170e35975d0128845106845dd24ea37 GIT binary patch literal 49277 zcmbTd1z1#V_b-eENGM23N{BQw%nU;#(k88oxPvo3)}8yucd)Q9N=HX;Zv3u>FN1{f76YH<^7_wXPESoTlseE?&V+A-(PsUx!RPzb8v($ z42PWdKQkT+T6ZfoHfjhFa`=64wdWrI{+@Y2bDypVD2Hz# z4a&af0%a`L%vo{uYp^BCF7HF~d}AE1 zs4rUMdREdA8_Weq+Oy$m#b^O6C1IX7w_^W2pzOPeym7*K71{UxTYMhj?e`d6`uAr~ zwWa?ScK!I@Wh|_IzN;%Q`-@ooL8q*d^(NS(znYw1xwpo#8i)wgfgLPf*%5BRH@Z?# zRJ^73+Lc@*(U!}sR0o>vrAg5&rbJeZ`>96@T2!XN*+1~$sioQ6j2^_gWF!pgCA-rN zVbxnawiAM?LGK;d)w@qo$hwq|kdPmE#~#L~mb4jR7KT3_QT4Ja)3aO?CM$?_Wg+^T z75&)tC2f$B4?kHda!yoOZn<&Sk5vC&GAQ3vo#6{)BpE@3zEWft z@?ZN|v|*N=9^ZB#r(gb4N=%Vc?3Vs>qk4oV^L5D)ecXRA-8-klL`J2LJit-#V}EGM z6@1@Y3D4gBXt`_g=K8_)`0uWzcAr8Y_iL|)XNIw}kvA1bGAeZ`f3aszZpmWZY~RM{ z4Zyjr(}nuZeqi_^+^3I8w9owR_VI1qgLRR0oWWE8&R=7&=*?4j9Kx?Y4LD_FG*T<= z)~UK}-rjOGa9rmdjj7cA2lugrlkx^FN=|d`sI7;z-!$RQci-&L-=*QArGQQn#}u(% z)2CeJ9OD1S44qOGLS33$4cQ4Z4P3AG-{6pR`V1hdsgt#Hf83Rw zUON3Lt9$&AV1lv8Jicwra?yWEINOB1NXmkfNxwvQMoFe4MjAP!CQR?bQY=&5inxgJ zDd|5rqLn}lSTfcPOHY5$)b9-5N&D7^sFii#%#7pR*M$+6#48T9@@jcRJz?pn`23Aw zqi8Zl!hn6Mu}jZLrgm$i@wr>0QqU&DV>Z`3_R-tHV)9I{2v5U{*ukdHHw{PHgW;J? zQJ$I?H$$WJe=+|o`dua%^2C2}iefHt(M`b}hbOj?@4fH$R$n@Kuj4txRV>rw5$3+6cInQaF)8tN|2aZG~$AhXd5C_p)Z5me zt}dq+x90=9bda#Cp97zmc*1XXSf5VNderO{5??d1|M+3G4;5t`_#S|19XuK0zK11z z={}T9l^UPS<+<5TTIaA)1L-GZDqf2-GRo(+)aa&GC^jgFsheFXiz4yS7Hz6k^(d^> zGHR$-HEM9(Uvzh|O&@{Z=@zRhchrLR+dQUgikY<rAlCHfos7!0|oRqvrSn_q)8uUZ!)H zukmmdHmEkf_Q-JO+mp;HDoXLXHmh)tfhO-cA0Ox)Uzz6%4^H?P^nD)tTC6qpHNSi0 zxE?C#^GZjT^c4QtVO-C_1F?N|fX(CSV5ww3cRq4Ry%x)DhnChlO*~gMY<6{sTJGSc0;2%R;rTl=MbyH@EISJ9nY;*Im-&Afd)WTRSpZtK-5c0lZG ztXETC-dJC*<`J#75rqI1C$p1FTEDO!^^G2V#}V4*;T2#ea%y+chG0>1+v&PAM=XI% z`bg(k8mvr$D&PMK$F}MS5NaYhinyV;6&bse{O;>l+j~<@50CYfU0Owa4F`$x*{%0TK+=YsEK$Fw>7L7Um<&%W z8kxTNLQMFInm^acx^wd4m(_CfKsH5_(D<^_i>tovWO_1^OOa`HLAqH7nx|To!q;iR zF(#wbLJ_}p#R8+s$rrI{@xCs#zIqRk^DTeg5qk~09I-fBe6m4JmTVHBD)2M6b|$k%X`2mwJw!FAFg zlS9R2?%R7+8GS#NUTET2DOiw;{milDzx~Uvd7Gr>1sSu~^XqZTHgR|$E^d`?%hqle z&vzAXn!0wCQj*VB_UV-jsJ%?wPkuOs8Bkt_3>d!|z`{Bs74KdGjat|g^M9i?dfbw% zN5rRZ%C%Q3YAn1rZ~4s137T{n)9+Z{@8RKSfreF{t-j-SU#oC~Iypar2RObKJQb`u z{;>ZvKy&`bx8J#+ck-L_T34KycDpk-y>=$MfS$I$sQPl9Ua}3+q;$>iBU8j1Ror>5 z>-mAs-5cAbhkHc`M`tGy6|diYu2m`*%BijuTZis#VV6E`OE{JvPE&8~-8_H)e&OZg z6Hs4#FUv?wSaR`FRCdxm$h5zUVLOP=d~1#+^juD&a}yuubo1r3(VTQ*fqC7{T74?A zf1)dG?c(YE;15P*5#TV%TYo+f*ubs(?%B*=W2dS1_N@SnQ);4BYekk4^mh zzFvM6J;IZ#DwmLfq<24ZlgSW|x^F$W!_bE#iS5jtk*tgme>sceIbq|3N*)ks)7_2pa&Mm632 zW!mxhh_${y>Za%kVUR_W$f0a})LH>WCz?CSb6C~H*WYwEGivU-?fOIi$NnTyL-)(! z@B84#n@&y6^9$g|E@G{7J|b8AJl|>e{BeU?qD|(Wu8NXpOh!4Mnrx1XlPBgXoTA!` z#!Mb15#TU6;Y8I>8s6HC8S#IIlQfHjgwD|k2}g;CaP>c{8lR2sF!4k?-YVEKs@^7k zHk_k))Q_B3>S?aO@a@YRLV!jl0z{c#rA#`={~z9E@ACFPoYU65wf`<|di~YveByH~ z_E%B(a<#J>H%1p}M`1^n>;0L#3QcU+RXH=ne=T~C^WfFG zeTi4=!tedTS2l0MVeVhS`dd~UA89#~*&b~%(=05^^!g6059YynY}RWA71~cmzyEMv zKf6Eo@W?H8D} z3p-AdITP^O{d_afJT*AK{X=7Yxb5cjw*CC}an|Yl75!f$MMsx&F3!(y9X)TFl2dQ3 z*Vb;uJ#Tg~H~qz{CXE%jwZUhFLz{LXE1P!wxWb&~mFR@Awbgi;akF8nIEx1iO3tL8 z-K=D+xa4dmmCGF+9FDIx%w66?^~!@!DTMpV%i$~s9)lu*@I7!~LGpZs;NT(`=|HPYoeJnL%B&jDwyu9v`o*Mp& zSjb{z(@E$!(CP!Y4+VZ(Zzt5X1NARaT#T8N(Fo;@7jJ6=C&R zZusr|FOF0>0E4M+%r&dk{86_;y??iMQBgk+@so-cdrvkwMq5vm=Q<1O=F0Yg^`M{7 z=;+w_N`CC9Z)(nA_pw&wptEOuP&Ow+NJF#PU`-L1ho$F-j%K9|O($P_frH=f6=8`F zkH;b71k-|txD!e*jvz6c<~GA-ePszi=%o?L1LE~?_aN!8n>{>U2>PzHHF^8M-Y}W< z+>nj~*9CQ;K*ycL_)c@!>$BU7!>dbpMCM_>FlT(z6C72V%&N8G`8YPF&E@kkp5nuw zg#%Ej7D&RcxRFR9s{?~cZ8{SXRNk4B{MuMe!nZ`N!OXPI#o{^i_Sig|v)jl$x?19_ z>b{1eQtaxV_d|GAn`;Y#mhP8nA(ThEx?&w>G$L$PFOJ`ZW*72v9%>nIa`Z^t*t}Vf z8V=E0XS8!5V>aiI9o359)Q%{~c&1zQOjldHwDLXD@R@TX%^=Bb=#y87a!VegTO z;-yxs>Jf^pvMi*3M#bbyjH*WzVA{2bOYc1B!X{lh-RsHw8m`M%( zH81VI=Ow|5eE6Wt_CZ%IULt zK|3CkftO_M;_+s0zgn4`pLJb3l)cw6o7M9O9U;p${6RBR&Vx)q>+?AgRKjMFTXs5^ zD=VVq*QJj6ng&Ucg`Bs2cBF+_N2sJZr;yP`Z6U(^V8_0`Vo!&4Ms@$bqRy>p7Uu_992EJ~IF+=M#lEtr^qQS@GKH1L5=GXo zMb_ul@=MOZrY;jZ^y}fcLhp01=k|qxQNQq8uBHOKsW+bASASRG44<@5)93X!5{FCe zXplp=oM^ugx_tM}bC5XmW1x0E`yG==-N4%(9k2PY>XQ}FF6i3?TqaJu!Y`8b$kzO) zR&*WbiGc{NQ}wmx=1mqWe)8tbH&(*>sTX-%nOumJPBJeISB_w(`e&sEj;9Bp^Cw)Y^;QAOE_Y+ z^X(2BT1FzDO&6%AKGm%dq`q}=nxT}{>wL(8{amR2!a&mN@@u^ZB**g1L+&jhMgMmF z%ko#VhtuIIM~ZaTtaN*5`~cH@@hG(7#B{fG81u7UeV2vETr!fM)W7?@#gnF5d$f*h z@>3iwr}Uy8T6Gemxj}qutch==+VzE>TBGPgNdmWD~Wtaj#5reArjE;~8UxRLl@w3Y#EsmFkDt9^V>qkQ>lnmQZgTk6X5p_gk zaELu$0RuIn6(ZaYcnI;>n)tf#ti6&{(>{>M=;6gh3-Hs5>mzeFm8_IChLElUkywCF zu+{63(s&Avgk8$oT{9|GHdRAO+^iyBhj5BZQ=U|@LdDPqJxY!k{GkKn(s{p%N45^# zjPoSwf`9*{W!888L(9lmPIh)oOylulNrS?Nhs&4Pj-(2e78XB$NCj{4b4LbYK$)L( zz!}Zz3RQ-!{UUp>@U+rdo5H>Cvu&DMpQU^g1-FAX_lQo~9-J$BZ1)-1)-6ao*nm#) zR$1Dnxu`6xvRV`hS)px;sez{QRqVBBV0fS3X)t9EPL{=eI}g(c%@U=}mwB`doXrst z-PgPt8H@anTyrD3?VQdnhn~B-k$3Nw`v_Gy#olMLtP%&wx7kL_0WVz|r_UFkxag0Z zj8%z!=wNbc^jEWIcX}7>dr<8zdw1m<=OfuK!sUwFm5e>Vmp>J!c{ub%Wn9zI&QU`} zZSO7Y%ySW!bKaE$quxFag<3h`L*r$Il=o5dE zvAfI=(<-f=b2p!zd~;&^zhM`SX^;KGZNbVP>`f^-9NJ^W1)5`HJR!ePH{Gn^r_OjG zL+@(CO~xX&b&9voBdMvT?e*S-O*=z|+jhuJQym|M8llzdO2 zr-6ILY4Wh^AjuSvB|o)`ng>LYwE7^pBY|0 z_GiHoLpuD_%wAt#Q;Hmx%a~zdiH=Ttx$OG%vtUb5s-LRhBZ8@d5$fGBmX&%D#3RB_ znQ*!U^@k@|9F%65L=ZpmW%YCyjOlya^)O+4iQ{>C+};JtebM_YZPvvn=Z-do&8t`? z!J!$_g$v96QMJ`dTO{&A1%loVQ4NwenDWAN$34J+McD$xv;$g4e9{IRIq)y3&W55_5Kr`_4htyy0#txf#& zQJ-9hsy)4c&L@vbD^!Iec;c8#yYgGwW`THwjE1WD{*>kClkfsf?&UH1)=3lW-k5KU z6N+;wTTF8*Zv`!OMSZesyw113I=hF{n}l13LVDZot6Fw5aDTR(J3_Td{wUhfFI}68 zuBJPPNIxZgPpw7f*`u-GNPgRC7LL^XOqN`zO8%*@%Jy=qJwmgtU_69f5B$-p?!H_o zJ(V5xxQToVy}# z3HLYd42O6w1D$;Ctx0sD+4QjO+yf&xw}6*Vqpr1OaA-u@Oso4+#0)`n5FuL+ImZ~& z*5}7;FTGanRK6&j-`+w69cuyCsVukqzaOst32eO|on|!PFeIME`pK2Qpub$Tg|+N+ zCZGL?Hk?9`H0{>%o+znzwApfwh)YU%#JsVhT{e9MlLeJ;wthjNU9 z3HH=$L~J|%$kC`!%%rUCUmW+-z3ll~>Cro4h&JU2o_$fRKhR7A07giCGg}WfiC4@< zFljdbc#)F97@&*I$pe3j%NAzEkq{zm=w#5!=l)YmpBqXrTU&DaWxz1M;V|i&uVJPj zIXqdyy*-k+gf?E2tXt<;NuKjwUF8Am=Iy?l(C%kF4uZDIeVPVcBi5yNQH6!+*j2?X zTCG_&TnFa-p;W?Q!1*a%fw%5=R)(sHhij?6u#?T$N0?0|WKV_(PG`GsYXTvy1B4@d zN>Yf;aw9M~`(gX*c2&(AX+QK=-}o0w@^{m+1&xUgpuuM8(vS0g_2$k#xvbCbtLmi< za0oNay%OoV&f1JR7^yMZ6P;_cI8U=fc}Z-AH%;Y^SuOAr=%fjn>~8xWNYo2a2P%kv zD{VW~H(tP5Tj5|VcURBLg%3=}Iu}uOsv0y_kOB1L6kvB&+#3v(P2(hOP&;x3XwQYu z4LA;!zw+@2#T>U%WAI2shhSSwmA~u=l%y+_kg=12b>GZ3;|fu)Rhkd!XNv0Cx#`_? zHuEk%0-^?1`eq z{L0O;G++2vDh;RYKJk5@sn!NNLy70ZRpb1-ZWeKGlt%^t5LW&B9ARj9cXqHcBp5OW zC-kLlJ>isjQ44$4*=tNr1R12$W%}SiPMu?~(wdN_8x~;LvMdm6?Mhe5_4%_AqLti+ zw0(?lw5cc!);eU-`=&oFjB`>5^qj(9TR>z4eoZmv@h<2zWMebXg8cXcov5Lo5n>xO0U;aj>uWk2$pIPGOoKMu0PUPu@&2t?r`QRnk6l?(`#Ib^ z>cE$gTX3zxM+={_jc3 zYmMykt-N;ZO%5$$&8j#-J#2Hpz~*UE1C9`nJ>8Os;g-B}_F}2Y-7*~Ql6V_|sclJ&fDT&f4l-6gkfx#*@f2txB~DAs zYUKSkz4j+F(NN2QhpF750|&SDvHcW(7?Ci&)K!F``472&&j?FQ3WgHH2xDUq6Pv0diPA9r2=Dp zSm!C9Jh>0^u?OH`JuP`Fuk&T`CFkd))cjbT*AY=)W21Bo)bx@PA|rJoztRv9KqT(S zIs*WEOv5WIENd^9d7q)Xe3re_rOsJ&!+k5f5@%&wFk6QjhW}CGyvB z42s-@WhK*fH7ipK0EkQeEPFREWyw{XXz_LLhKjMnXZIW)bvGxqk-ghHi!E5RlzLC8 zDgUqq2e*4z(yJN6ra#R1Uu^oLConnf|IwE}|%qzf;MgS2&umB7VM}rak0s?$!xBxGh7YajxfhaH^SO6{n z0wZ8x0f>MA5Dw*s@WS~akbgR)aqnMr{aN+D6Ig%0sI&Y3u_!bI1ry+d!$5E-5+Q&< z3ZMn}_+dys1PlU1K|l}$KOZj=h(rSfP%zBe`H=`XFC2-6K|pX`Bm%+rPm9_fo-vyO zkmBsQiGA4L4zv@sKe}V{icQ&svP(8`#y{HSO@croLb@zxE^n!3eYE~%9=Dgw6E8U& zC3N_l(|&QOfX1bu0^kFZFO;RsY)&)@)WGVV@3^wsl79JhmamgDK6Rqs){cL4`(&S| z_~g=&s(n9Grzh6N!`Q3$$$RD^x>CO=8*M$F;JT?^_nOJ&5O)XlKhV>GBz=m_u5oB^7 z=6jBdb!!)C;`AO%oFMJ~L4$%kCwv^s?w8=HGTYP9U{)&gwnsPMAX=AQaqr&ENA?Fg z^v~Y9=Un2aml-NwjNP-@jdn#d{jLY-39szZB$qWmxMTSCTYsLimSL12>8sPN+EAHR zR{|Mt^t-ijzZ?>m<^=HkN6c~JsDUN8uPMj}B_7zE0P;>YL+KR^HpL-Qd4Xb7AS1qKW7fxvtK zga8-~LdI~^v&EeDWOh%sZcmyaB^JqIl(Vt;&NduHE z>67`9QtH-?EuU+VoHpr$bA6r6tdOW@zp%9Ro?iom0iC+z7!Q!fUyO+pnMV1sysUan z^deJF@{Vtb?5uPmyW?$)d*j^7uWhyjYyM=vhFi^qPvy~<_b)Syg0Q| zyLsQ$ORj)4JR@YB1DbkUe!)%GmjM~tXGbMJIY}t#{@olZD|6Io_hoqya?D@%CjD^j z^qno$x;H5VrAQVLd+(l+8TQ?pyB+tb&1P2|eC^5iU(!E_e~xX!&_m+v_ zl&gPrNzZDN*4cyG`za3gZho7mfo_K-U%Cu5PPeG#mA=zq>>w z?^VSv<3Cgd0EG%5P+)#uUVaoW1kMix z^P!MX0e}F47XbjHAW#?>f#3!4K|s71Q^*JBg8+e0J_s0y1VRNcz5hv7whb-q5@ny< zq!>4^Z*oOCe?n<@)qNO{&Hm_(C7;w#n~qH6ujP+aVa!|hye520iEevlL=dU=piWc# zWz(I+h1!$V>pRTg%=N&mZ63Q?o{i4!z7Re!`f1+F@NGmKw)Gu-P-nu%hfa}^a?3BZzdB?Gmtxz| z`;=#ilH{k05{f@wXO|eA%Wg4f?H7*!#F4TSHncG$ock$$Cl%_+`6gV@v41DlgpIE) z2p|5!(tS{Hdis~aTCH8#O{tGT4a^BpUBA(NPmc1(;u}^UuanE`@t{ji#7loDWEA}! zm*F?yz9G%x%3|!wWhaBTCI4M{(Wv zH_d!>TJ3p$V_89X3hHe`O-qmN>!7U}m=D?+56YIKhd4g*upDGn{ou*bW;A{!x=m}l zftxbOIi&%!|8eSi(q6JAwd2rrT>W6-I^fkTh5W2X*fIT+U9tME%atSf{ z8lIewUk1?DV$btZBmiy8pEBwAN$&1UAC5 z-z+%4P`n~xyy3JqdcS)rC@@W&`g!FionC1c?HOs`{Ctl1%1?VN)P%1-z)7~B%Ff^O zqCQi3&dEGT9{E-GhoM$Q#5Apu5y7Q{-djpG4W{5l?=m4djm~&M197t2hOxriYttDw znNpUYE5it@_^?&!74nSnK$<}^ep2@Nt_NcaUYrNaxR%oxGxg_zWaG04Z+ zyEwNQDE2dZ**bCXQMTceQy$E0BtQm-*(!dax|wOv95rnrBECkZ33T%r(OgOwB9@`t?>w z^q9>HD=9lDfGXQ0t_bSnz19N5E#PX!Clm1!PLv9wRPV$KI~)Q#3x7m8v(WUia{~v3 zzAz;gr5&&beezpVwby_)Ro)pUrg*i7mrgMFF(`0H@aG%|gh*+11nEP@%>bA#n zw3$(-NSmf#gqu%1R{zSts>bX1_V+ErRh{(J9%`?(>P&#kQ}T(J+Vd-1uOk)zRJHv8 z>;0&>ES4)-X3u53GoRmi6k=Hi+IeR~i#eWGKbSqSKkupAU9k}v%1h;vi#^{1$vUpQ zI(r&%R+H;T-zb2E@{6@)D|+xc3aj?n9We4vVBJryFXwt3zprpL$X;4LFckClXZGgi zxSPv%g&cu$dG=w8#WH&qv1|PbZKr1^9r6A~F6PX%C-I(T;xpdDTCmxZ6Ar9Eu_y1( z)Np_PwBdTcr$zD}{_7bokh-@Wsj3XzF>F4&c$<= zUpIXqn%#I$G!jJqwK84hlsy9(XGZJ?y(`Xa5jpd|xFy%9{B_n-TE^Vj68B>*70<%q zSGd*=;(=x|_&uKX%5+OAm#;FGy$k>MG@r=hxYlWKlL_Ak56k~q3dQ@}nDXyia`LH)}gqDorvLeYpUaGue-Ra$)-Lt38DlHX(z6= z7gb_he#%%3Uj?{I6Ng-g7W(K zFEu@=1Nv|zG<@}te7Uh4M-*P3A#x(y&_Y7RjWpL^jAb~4s)H+GBG7`Gnh#tbsl16t z0?Fe;f5QP^FT2AWgBN!1GkqoF;?*CJdZTe^Ub2d&ssxx}Noxq{AuBUNntxZt?+&%( z-t=!veSOXa)E{hrGrW8nyj>n@*ZE2EIP3X|@fxm;{m*p-MCH~rWDENpX2H+VPPMFI zo>97i|9awefN&h)M!M9=diLE45&`8avPQ|{+~?@+tz9DmA*Z%>b^w+5Oa6(&NrK=B4zR$=yuqhbC`U!%Ah@}fLiC>U4nYT+~0j~t;I zB^^q$ekN~SdFO57$yP zQ(DFse`9KHg^zt4PaeKV(@asCS*^%e2sAV4p4i?xj0@6o!Iss38*3^NbK`29J&75* zTiTBl)LSuqw*9?H_nGRo#raFIOU2bp%@pb>=}febq(k}2J=$$he4LicdkgAM2@lln zC(ncJl2_=T4Sb3Hd+c!pV5wI zk_;{W@%lBAjdITARoK}?75b4_n%k$<7mS_{T^3zPYi(JRa_tg(W&Tt3@Y*~)pl~Ts)YF(O zN_ARXGMC*zQ=>(F=XQ^!Y1E)T$x2bwY{FZwc>B|gbtP|+A>lVzZVAE-I4#NvXbR`? zef0gE5&hhIU*+>6pefa|2CExeaC-SlD{sUGO$7t$8NS8`RN&M2uYy(SsdDJ|Fp&eA zchf9vP~tV;F={`PWbS`jW$LZ* zt8s~o1pA0lRbN?ZJC0v-8D5mzLsm2Pd~Aul`YJxYV}^^9+@N1=SV)83oh*H2K-*aA zsF?I7QT(pC8cwx=?9lEz2D!6vNrGV+*^r{|GhP=$cJ0S%0YcLHv zWFkW}k%wD8>WuC>iq-b7o&BRHGrNV>pSLfE^q17bCPA}Fd%GNj`+(yeQ+Y8FCWR6h z3g5mrz8z9kNckk?^2ir5-)^nF+*T!T=G z_qPbXHp-7&8bOR7oI%# ziv1{&(VXL4RBK1=3{$Bc_^9~k+ZzJ)-uMTtI>1%a7hC&pUE`8*wG8aC^JY7P5{j#a zRo0Uua`*!tvArPx_JY$D_opEDqAs{TyDv*@3t^`oWNBjD@;WFXRFdqLO~E+NjqQl1 z$j^rruN7Y|`iC?}7uJdb-**{+!@VWn^!|NHdI}TgINz|Be(@)9sPEZxuYcjaX>fFq zaTJTu41JR#1n)5~sQxKAbcZ%{=apdE8c*u=NO-%#=~@-VU3W<3dO8D%8Ux*v9RbEX zgYdPukCD5UVKD7HG8(3oao}PKBJ+Y)y_8&xq*L~Vh)85MJ~9BY)y7Uk1p zzu%d@kwnKa>F1}q>%!VfDfS<%h4l6KDH{+?cU}pso^xy}z1~iNv65?9_Y|BimFOBq z))v1k%3r0aG4{1yi7DIvDc3_k3{(lohzhO!cwzJC^C?_J zM#Qb*(R{qqEzY04=9ej*?Fm9GtUo)-zu#+0{x__~3q}AC5F|em&Ibp=fI!S$0Eq$u zfN0E46D@#7p+R5}5`qK*fnW#>2nRr*P!x!l7m7ll`CZw*pB`_d8vmil z5;8L+{tJ@>U{(u83xJTkFkT2R6oaq85ikrE2*AKBNH9MX1GA!d1pr`# z004jnK`;cuA-pgMl9!(sgV+K8`M=`4c^kpjchZwv zkj$QYLp>8TpoHrSLmH@(%mq%z731N$yHiU5OWE44{&Bw9>2Xn4xtw@^b{A4>w>C$i z14=!&VOin1meE-|Q68Y!&$DGxF-xCdb$Hd%JyVp(BCix|+xWT{aR?d0)f-VM-LPq@ zU=InnJa#WJSVJ||a~!yhsAr2=O%@e?W#H!7IXLL<&G~%aK7d{`d&;bA#LW1%=%Qc< ziB!W5v+%VWctYKSLF9Px*rag!B=2Xx6DX?s8C5w6ST=VBz3SRow@^+~6{n`P;Vl49TUHM){U za5F_cFfXce>tzaf5#uybC3=zXc@_RbJ8Hk3@(58<-C*d^=)y|AQin)G+RlmQl$`C} z26pQheDs+XT3T3?yTPai4lan_n)LsU$vI%qy8prC1o*&EI1&S_Adx^65~CeZC;*N| z!Vz#V2mpehpeQ~JoD1MZ3qU|=L;z|1A(`Ow z0D@AioTA49`~=A`ICXehKnkU9pbne#N9LsE-OKwdS(a*@gDa+|+w#^=i&~J$mQh*0 zL>Xk}9AgT-Zi9y|W;vHolBIv4&2-WKaaC9$<}?#*F7dhR zap`;v=V#&N1U9qr*@J9V2e!gT)FZYQ43;mmI)8o2PabZm*9&QI-pcGp^&vHF(4JMw zN%==I2~r&M0_LHU3eSk~zNz?YP7~cqip3UWk$lcx$PTh7`id8xwOl>WNv9A7&|2;4N9X2aRgYKUr zO55zmhj}9|861k=!tagAF15&Q$-dk>?S;M&Z@41wUpr)z$38aJ>xV>$^4yS`q?4$a zoNdNF{n>`nPQ{*JHce^=2&Er0x%0SCqBkg>O6|$Lb=GOR@~Z0>m*9-0j0ho^eYzmV z!#{SN(tMAZx)>B#0Xe{GVzP5bLQAF|W~}heaaM+CMoUepo;8deEe)i{g2S6T5AXrmJ!;g%WTZdesmc<3DgWFnF*9;%Zm?u|1hOSi)t>KabsOSj z%Qv@G9pT!nv{Epe_r+SHv?XjZJVhVk?At5Xv_85I0*+J0t-xXfb+qyd^M8cAcX+1p zb9LwX<-_9V47GuBUw>uiYz|d1aO+zu<^krC*B__FG_cRuQn0NxSywAB&^s`CZv=me2F;>MW+^-y2>PEn}vej$YSX`P*>p>pa6^o||48-k<^AhFeI)`a0OJMlfq@7#0>O{s zNAp4W1khlNFX0!!xEvG;gTjIV7*m1i!;b**0(hZdel#x}1c&p&0Dyn=EQwnb8`MNy zKju4L7p6q&B^hK|Wqq-VLZ=mFd`XEh)=-O%qewN5j>LQ7q|n2r{2-XZgqb|^?m3k| zS};KnuL2MEyTjJmVW|So7!4&W)xRvF%k1Os*BB!H5c2mV{`gN4!Duub#)szP0}3Dk zASeKd0HFjBnEV-ZO$MBm$AQvZ4>d*@#iR!tF5s9GqkE*;U^}n)`CeLPI|92q{3H^s%5`X{^C^Qra1)-4`5-@)P1$bd72nyr700O*x zFepC?0K(WM7%v)%36?O?5kDrs078Hu5KPMHA7i@hkvaKR0A;|!p<#Px7-`4v)N*sj z(QGbU390?N<=J{kp~%9aItu%b6>aMVk2q5*A0tTYK4_Uq?#m9`&VI(=$7a zW__8hRkKtdNwzEty)+-$NT+^U&)I3+5L{;T5FtnPq)>Tas_MqUVPB$q?P=n;sg!2~ ze~ezzT3+CGxehm+r))S_G8&{`7>BCKiUAS#RE!2keK#@>a1txt3EqDd+UnW8ma#6_0?l*3yaRk}qEcaM^(;OtD?tDEp6NhM6S3Z)2G_Gc2#xCNT%JD!BTNb<(=Yg-BHPON5E;iO+2k zmU`4!mhw+I|8QL2Xh~CwDGovW=p=Mg)($`z6&b4tPwjR$cIxmbcc{l$A6!P@&vQ=s z8yr|wR46XCyF*g?v{aw;veJU(yF zx+r)&4W4-(3I3#-Jrini`h8T{)B8(dqW8;;4i^#<=>L*B#_pj2NEn14gv1y! z7y$FaW2DR{0LA>^m6eRT?RT5Wg}{HznFEb}G!%Qfe-%?leIM%&ed?77niy zL{^TobAQGmaNYTF(_yNRe2HwH4|VY5{%{LD@|E=24tM)E;pgsUT#4<6DK9if2<~>I zgQ-JxYdEbD%HMTM4wjs~v=dP4N3Nn&DywoaihV=N3EQlz6d|>FhxOYEgNMv@0u4#$ z{^-ky&iXzI^FF?Ig~midTvtk}Z{D6ypUdgWDq}tSJ@J~d&`&C}DqTzNF_u?#x-t$- zUpo!UBpH04^#j&~;JIgpILG!!DHAc;F6&=!y+1O(SC*MM0TFy4(-P#JPlB+{CV6h2 zTwl$y*{kvI$wd2UZS5Y%HuSvKisGR4+U}wxx11-c*~+PC z=)?4BS*{n6uOVlLiAh=hkf~r_SVxN(iM!!$$=Rvcq9{2IaPL;R>Du|d_8-|pUv)h+ z{lCh-3e);Ou`1@+3>tudK~b0}lph7hY;!dsB`z6v_xc!t++tXbR=ya+(O5eiHc=gogKa}VltDr08qF^g zXI8yDt_9;qY7Ye05a$f?->;pEI?I`N$>))8D<7@PS=G2z19~2s?v&YWp7*$W4%Ri% z&laXH{|j*eTFQu>VuFu9{Qi5}+d7E%mn2{qfC7v}p)oiF5)K7na{4e#7>Nm~_~A$} z=8y8{fO!)(y= z!{lfcL34+?nW!lH&c5-e8V;u!XB&lSH@gC^^}xuPl+W~DFTEpQ5|Q2^%4Gc-*JGv2 z_;N3gf)ww=R|?i9QH!zgsk5gZXWgAuO<#)Dz06cBqqi5gxg|EqUd=Y`XosOLS!Wrh zJP+e<_-oNe?HQGMu|2@2`CN zZQ%a?7X0>;7Y_8EGk$fqhc>uUODU8yrg4IQZDOgQ`-jJerrvr;&JVDVt&C_GjXw0rQ{frVy*W zixZPEyd2GdnJ(5N%Q+v3iIr3HLtK)}ldUtITUV_c?rK*SYVFi$-$71q|FyixmW-7O8wRR~Ne*G-tJ`iemPxLCA?z1A_jyiZAHp~qB z`2Wv!9mm2U-rGneZ1?tEED=AI_0NW|Bg?OgK0)sk;Ql{cp9~Ho9v>~R^!wFwFSgxq zn5H}A%ezllkS#9++o@^QnQlL74fAkHki^W+yEMsN;&o_KkM|w*(^7;65ggmL$vDf6 zAE-BLWIw(?sWG^CmyRUoU(l_vS#&-VC5WT=%GB|~eV;tyyns361bLDQ$atyD?~s0v z_w2v+0Pj6CTKnGnqE+iGy)f@8L7`zMz6W$Hb5{RKu)>ANf^LpArFwgR) z#DOb`#^nTx!_=N0qfk&Aw!buOvgg)Fzp{7Qp}BOtLp17Hv}ish;;7SpaOx25w9X}= z)YLNNT-_VHvL~xmZatn1e5xpDd9{45P4@5z&bYGtqZwW0$=A24J*DWwl1y1*@OVc& zdfRf{1GmftS*%8Xn08*$d%?CQ!4kf5pfTCxjl(^fRbFwg?#^8RS;icjwrNz3R~F2* zfV*a&QeV=j5lkIITsjLaW2w9`7vdSY^CXdb?+qbG9>X>`C z7MWmB=$0vo7wqr+Tya3v9eO~vJk(MZ>3I9D5u?%e>x#HGa*WHx(^UI#hA08}GD4eg z#~kS!+V_h(^@rw}C>pEHkPfsVVjPfVhyG6{`6qXu{l5uUE-fi~ck|1A?9TSPtujs} zLD;Pu8(|kE8x39(h(GzmT+bmCxS3}9`xA+#WQSr43pnh!*D+lAWes5dQLeM(wK<^_ zXx+KzS;Uz;VZP!sCH~p-g`wOn@&kIk1O1l|o{{spmV%KotpM{UGw(nGmUXxnx_)aC zri0TRk!Jp54q5GK;*NR2zUfiV=(2#I-o^VKKpu~N!Porac-THDWF2n19#=1^gZ6^#TJ(3gxpWv+O#3!De zq1)Kcq06nB)-@dDDJL$+w2#f*6)%p#Mt>9wrvzhemR+JA^+*~Z;^!E>Ymx|{cM1c}0oRAE%otfSLsW>r%}2H=>FU+>Wnxkn1?F*8e2b)E_AHs75Z^! zz@A>byEDA|TPy>^K==5K#~0L1=r<=m6l-}A8(SLuMi&VbHV@DM0S7}_P?l7O88iBk( zrP?ICtcdlDf`e!3l=f`q+!FG42ROK)&xehl7HkY11^F$u@3D2iYZ*?9D4X6q^U!?M zR4)i2;;&z7 z)5%;8(0dSmwiwHhgt=RgJM(uyLc1sy=vY~a7^L9~HDMx&A#`iZKbX`Kpx!^&l*b>N zhEvE%2c`>o&T0u99&9E}DWB|aGk}6Z>XU%RJ3WGvA~BQ8A&P7u4=cpd!Gn`N$!Or) zZ@GVgSOKI&pm!LcU-+xPlg;zReq;qA<$U^K9}Ue^Coq}g&X7AMbn~->=>8Ho+`Fob z^T5v=SiI-YeznLKEAzyun^pe4n-dO{r@v<|t{9ur|E`_wDo~L(AT18S5FYJJ#x7w6 z87bZu3W?t}jtve@_T=|iXAb0wMzVq|O8x+2!Ve}Xm8jf^#1=~@{UeSnmNp^st-oU0 ziyJB|mPtX<$%ck*3;8=e=e0f4zUAYeAoAAQi{V0^hIUJI)4uBrQ}KWvBUU*8y_w4SW3GzpzC3!bX#LaIt?gWC19OBkgFt+3aid(KQvLV5(N}A3vjhYky+%+0v@O_ zFXbCvYnBS-7dN@p`o7_HrSm_ld zCCaOAJHkG)NAy$_12lYct5vc$bUP=j?x%UTNjtlnqmnVa(82*dZ=O^x8?9yqwK~0& ztnI*NIRZm_R;vV046vTrkz3;u&_f%Der9jgF_(2@FC9_jfQ~B$$T_+iJJ7ZAT>Ao> zOpS1$|9D!Cm`Nlst*?hzjOWcgcZ2rP#B~!2C|s5y92h0>YGn)!3cvz5#{(Ta8N~oq z9q_`=3m ztCew!0oYgiPg5SQj22j5Qh-06yY<=`w?eQf^$5E7%cz+#Jho5%-xFe%Ojq`C)#>Q! zba3$B+8Q3%qI7Q^kZEPma|`e$I}Jg(TLjKNuZgX890BL|F&`vNw41i>oIq|4xPX16 zt>sa8YJ`-cXH@iy2{6?{`JCMHtrr27?T;O<0;U4 zCmQiTLn?sWxy>sfp`ePG|c_{?jzoIa&KN4QCIM0mmS9CD| zWHtYY>h^k%*yfSmIPUyyprybdW#=2-72EBVVXC2o)000XL-@Yy&Az5bMu9MY=pOO? zr?%1g4_)f}XIC9fGKcM>%2w>J8s{vp)DMZ{eO3V^p+3h~@7nGzYMxboh@LT>c5S*P zGJBV-E4RB|oZ7FwwM%qo*sJJys!Md2U2nY;UGFYi=@FPWOt-S#?gepc0DPQn?=fU0 zxfdR(DQWg!_QX$9B1^rD2F_2f+A+Y0z6O(W1Yl3B=oln420@Q(Yg=5fgg-8WV5$mG zE^-`f^$246;w~N>rYM9to>Az5egAcx_@FORM6sPI=|BuP z$u{{wtr@yMh;-6NQK-w3QLW4E==nEq`SZBpNN#{!!tQbe{aYQoG8=tm@M6D&mZ zQ<(Rgk=*CVpzq1iHoPR|i0rExe zQ5Cw!c}q>0LUTHbb!)^CI*NC@7h+rDsvxXvrpa*S(~(l(P%g99jhUxV$M4@~l!@z_ zmC_G$Sn3h9IsXRk^Yd3 z4albQM*9|QQoOD0b>I7o-8)8b>PnmVm8f@zqKwPoI2aez?0Cpgi}>{`3m)yBPiCSN z#Qa=uB>)dDxamJd@A@<{6Opm)d7d3+&~@;f_3=bJiI$E(A8qg8F<7wUa{f>iZ&Mv@ z=N$)oD&hKE)0N8cX=h{)%^LnfiLD?MZVo@9*XA%|LWLlVr&! zF#lAE`rVc`tgD;;Ydz1R4yWf3(}4)ykS1m!AP06?cxPBvnu)$%K2T^eoz+9m3Y;sOoI6MT)1IcOr34FJV=Br7 zUavExR&_{;%40gW2)4u?UhZ2Xr)yM-iJ6wumt_6pD+Yg`cIIe4y$4#Jo6OfBxzvV5 zwK))6c#1D^{8;-5uR?2W>edF>{gU@5E;>Tq`frqXgzf8|gJ}@@FUm6V85D+JlzBbM zW#VcYTs7MWk4BmhUxC}vus?p1E-Ez|0+BDG9NTTqv5g-mQ7vz`C$P#b+L3( z8*tj2)BcjHCc6T%6%?(IvANaxlWE}8@!q3zrA?WTPkRJU$mCxQBD`(p~nsF>#3~f)c{I)O< z8S{Bzpt0K%rT)QbmJ{ZAjpAMA#L>#4*}=q{{!XHNCnORZA`&*jFM9oMD;&xUq1r;t>x-6UD?E7;EoR z^fu0V4wOgRJQ!}mXAS%!K<}y*ks~%-$wm3|Tk&WzM0afFEFNfap$fhdCjtrkLJt@| zQ8o^YzEz(`S=p|I%r*{meN+~z@k75K$1&JpWS|t^9pMc4HTLt%t`utA%r}#vR>)0m zUC*3d0{_}@QOIEC{dx}rO1f9o+()^5uqwXNEQN~Vv_OB-H4*EHFl>99TK51&>>#aX zb`7RUo9fQgIvj{>Y-$;;lRu(!Hlq_!M>udct#D}(@u^&MbfIC(+221Kw=!)~zj7#X z1xaGnx>1_${ubY!bzM4Od1zLj6D47bwxBCSM9&cQG#E}9K0d7%hsX!`i!&4sgoHZ; zAxmL}XSjq*#iOTsOU~(rp%0nlRmqHaZ)e4!(&_MybZ~SHBSPz&)lI(gPloldoh?@`|ozGp`; ziR2{og9ZQR?d3nqib#ZQBQ`x@!P0@VW1l}c^gx$0qp&co8vakPkX`A#@*B!nPL@0* zztI`U+dLU~!waZdbCyk2r!8eY3z1l~8J#!LUsUj)gX>Mv3g0$&!V&ai&3NFuBOz-=+X}QT)iTc*n9Fnk@m9b)3 zG57%!qy|5D1P*L2nAh)CH#Cp6ftqM1)xe@RWb4^kW!P$xFRm62G#h=kA}g#=@4Cwt zn*5=y3|tThy(gQ88BD2;0@tMX@VULMgO@j-Jf++^@7@0zkyDiHU)@UPydVGI!q{ow zwMKfHVQ>biCql%N=Mr=`!UUAS5L0Jv3B8q!Cm6`y;6Ma^hHPLtEm9BeIb z-Bg;iM%JPzb7cA?ohrgKRtQ(Qn(mH=)_B70Hg3XUrE^e97>3AE-EUC?EbLW}IX43< zO*;chAre-52W0&4OJ}lSejc^Rikuq?rDPPj7$$Tik>4}RlQ2P~VZYfcBfr1Vqz?Ov z3frY1q9$>%5?g((vyYlSz!<20WOM1kJn55h2@JNU=`e*ear5Iq7 zfIMcI#BntCKprGwuy8>}l;`Jxuv}!Ft-w3)f(dQGX32~X=QKkCE~)xowOq~R&tkk|9F#xd@L|D zu-6VTFF^PcvocTd?{y4ma97dbGp0y!R~eCDEKehUFFQ3Z#260j-iQR^&WDZBST!jm z&gV$iy)l**Md(Vx!g1?N!uod4?76qYJJw+9ME@So>0ripqg&0*fkb0a&Hg0V*+J{} z)MONg1tq$La%ZcQhvoVs?$AN1jL}MuQg*4MC(4&RvG(FNC8ls-0e%1((OuCZn_#FN z!bh*MlO#S1lcF6u7)-fjv9SrLEt_I9rv_}IKnQeD87auLTvRT)vyE(Iv*v1j?U5Ay z%zmE?Il|kjw-I&_gUT#&l0rhx=DsUP8PIam93@H{EG&|_4ycg#SOLI%mw!>r$G-4I zN$?f1xryTOP%?=CxUJvshmEoOUVVq-wswfytVCx*yVxRL*oCh7K6g^C#eqip76IMA z8$;4Q71xjyzqQ5V<8l~FxBmwiMKn^Rh(6jD**kJTZ9dqdi=f~;Le6*1@>IJ;H_Iqy zc=?p)7q>UC;ZwVRE8y=MNkVPZ;lJNHj?5aq%P|D-*c?`QcA5YN!dIQ%3bqmRIW7Js z0gIr)zQymn@I4LB7k(J8nFn#>k+COQ{*f0;7 z`Q27ClDQ)NH;->bjgo8H=C|Y|>wFrF#|}$q2GaCG{cSAXc`0=8Dv~ z-)`Q2EKuyLqWwH4$rS=)O2+!L9UKo;dJcb^h2r>0z7aC0O{G4tsB#Annjq?sAqBr2 z*~$M%8+BPrl{K$>U&q4$!~KI znXFK`z-gS=i$9`DVNP#)wH1Hpw)@}F6S#%)ybS4d2A+pui@ABKGp#+Wv~t<><-tg$qyj*3#;0{?7b{{)-Hhg`_!U^J5O_W9wHYuO|yZ z^58#f+AF?2*UY*vN*xs*S6i$5t#CWcw<;srf%oPtx2f%aR%nP%dgeKN*#>Rk7h65P zTBH!gqc1B-0rcHB15g17nWotTG-g4Kai-}U#2Wp-bxyO4dSM03AcPT&1TNOZf}Q>e zAO}7u#@^drKjimk zsaL^feJHP~$Mx%zZZQ^!zQ&1X^8Zlb-^YNp$J4=I(DfOY(Wt@KNSq=m?<{J@&&8|kocXDCJu<)L&JEP2oLqJ z_$_X>+|u_9qH_3WRhn{gu=S7htsAvug%zz#QKiTIM}v?#zyb1gq_N;c7E&h-o`1;U zfCrCIzVMF4)cpO3i^aA7dCb#$ms=#yM;|XzZ61eM4Q*Kw?U?Q5Do_9Jb`2qIEO_F$ zcC;C+hKi%3^YA+6nk)3o;-Jrd;gb>Qy#1UC*&pARn7lQgFv>mgl2uamnb_vkYz4ob zEJj)92+FooU#+^OM7ew2?_#Otj^B-~1bN3epWRo4d!uQ3dT3QR%N3McFriR2+odTF z2%-!767CiTtZ4dAbfVQtxOJkjAAfZ&tQaHNevYJi?0e`T1l*pTyMsy_nko!adXb@( znu0lj?0|N%23c+E!O;ru;DVG#N_8fhMI(+ytBxnf49_)#VY+(wf>VQrHJS5{Dfi*J zeD9ZYWXtE}ae-t(sw*biWx)=i%@N6W?wk>)aK^xKET+&W8!wW;F7jr%Q{&?U=HX01 zCg){tb;08aK|N&8lpXJS7tF4eOQh%dd6&PJNK4b*`Oo^*L$}(|AC`p?5EM~lRc)SKPid_Oy=wo{fMNPp=1e-$f4{Hp&;tYD!Zp9lAd@XBF5 zg%7xtW7cr{sh&&T@&AurVUS?(|F2&016#gc;vCTA8euD^bVd1k!ZTLbC6{DSv$mO! zU%P!}PxA_?h6wOgui6~)W*Oq#wo+m(nJ&3&13kW~lPzbA8P({yW3=t7drB^TuUDwO z>^8+FO24K9)n<=Yf-f%~udv(J4W^Z`1N6NLl~tH;O8P?7&VE9M4!v9!`mgA^LtBUP zQ@eht5JSktEN?aUSW>(E5y9Qeh^{0C zC^$mVttEaRqU_uE=}p}3FUZiCW87Hej5{o$eKG1i`m6rE$^&nCPNAzD16c!#rHC3pdP+O9-q)B>; zK0{0t(m1A%fgb4DOp4ZWA%m!C{TWR?z8;+di6uL2AgG%GxrV_{Bjc=tuI08+pC<<&rmeZTeQZe z${6n@1IRwq`|>V$-1~X|Cl@|K;1Ok|OTb-JyEkw)disjz85FU{tyK@IOGEmi9racb z#KfK}&*@Ir#62FuoO_{{lzha`^hIjkdCwK5y2O?zjA%*1W?Uxks6~>h09!Bkecq9x zuooW%g&4q$Rnl0MQaK6eqyTv+$E2e3?_Ks8x#Vb9-`wW4XTE~cYDuyVlQ+TOjzHha zB)l|C`=I~hyr;7Vk;k`+R>cMPP3An)?OElg!NQz5AZL_;=mJdon6hPayb82rx9TS) z7=|+Aap^0Sy10q=jS|@hMTO)9&ZB}xsGayiFZG(-DI?Ao`VQ4t!!TiwVdlGl8(vk= z*kA^&D=+Hu*2d(Bb^C;na4QY>0Sd!~0N9?Z*4q`f$ugCV$5XC8{w7gBj3)sMAF+do zPM3O@?|8u5?0>CaP92=Nub)izK3((nZaL{<@8j?fff)UxpMpCj`iqyYr28Ox~%POfwkFeIs>_LoL(z)8oe1jo%m+iMq_x6&Ks9RwWj|A9l9L!I>!^{1EJ8D*+T|f`yj4FY^5Y#g{f>$}q=+b3=v0s1j*Ffhk{gAb;R>OJZc%Zhm!Ltl6lt zbEf;?a;en>cdV;n4Q+G?R@YqJ=qC~F_fzjpZ2}VD#b{VNO$i7lyq>8W(CvdYD56~{ z!F}MrUbe?s_`>&&f1ZR3N~C&9M&ga(DBT7LXdxB0!EX@34#_)P3qKR-mMj=ko_y}M zt9nU9nkycp*a@%C848l|R?mf{xh1b9st`U#O9S1%2F(g=wH_pl_jumD^Er|0x`_nK zwyB0q>d?jz=dh09CY+HWHkNkTNw3Pyy41}d_7#OH^e&;>-N-B$e$K^Zry`|Vy9s0) zw0G4es^9pXOKab1>2Ni}U|nO4bdM6JS||&O$6sA3cW4MLS6Wy9!8*gDy54f-12^g593x6t&;YG`eirSwM#u zNrJrw8}7XRyq~W}dD&Id4ZEarfC0pra*Oo0O8L~WrfXQgk3xNAHm0+hO$ z;$hFFDQ(RnuRcc0Gl`ACjxghzqj z;KCmk!VMqa9q{NZ0m&H$h3?TQ#;Odq?*tml~Q-08;hsjE@dVkfk+rfg(L>>Z(|h4Vi5c8m{Fyb ztivKqoClyC78}MF19|41>R%?}i>W&B-2Yf0`cpgl^D^se|KZVf*UHH|nCVF0anA_u zijqz3Dp-u3gwudKo}vgPC&58RA=Ln|_?OBe$4o~RASoikztG7Fj-BL+wQW9w_Aq2{ zeK@|(WTU*(K(FN50n4j?EhkH`*JO*cdApn1_3=Zu-yg6~FP(!fej*t9nPW7T+`QFE zR|izEXx2fVpsuFP?rSflEQWP`pA;=4N`2o>loPTY(5(F4EYyqBsxUA&u~m?ag#8=cRm(2UDuP9n|dGI4alMO7)Mk(N~S{`=)_VLCcFEC=VNRTq+m=T3vyf3 z2y0OK2)z6`cbV>?H6$YTW3uIi_3*)W#NXn_T;q|%ZALne89@If9hZ|$#6zkbQ!GAMPH{bjRHga#iyU3s^s3wMfhYauM_1efZ@EmE!eSQ}1RjBMfcO zJU(fCS7dGMG}<*SUm{Kvn;4BHhU#V#m*}%4G;}$senDEuYoDL=peI{#IHLHACkqvP z&jD@JJmWfe-lu_Hx^4-q;~29$$$>eASY9$Lv+h~Psww$iV8l(|6+hh;WBd5{M_|BZ z_*2;0-kj#I5z&^D&WNXK7+J*2_Wb6@#LPp!WuHg& z(s}wZ#NDP5NIkNCcpGOuVshCPPe*szQ1M1o<~0<9A$lOoE>KnTK62| z;~tO43iuM@HDrpK`WQ0h#3={tV`n?L;OeL;lCIb z`;nF8@*mOr)D+*vqFgstG4vj+&15sTXz?%5vG2o+CjSIiZmsl^qP^c@%vo-!Q7<;M z{j-EPvmx@0U)$DJ#utV8Y(j)1`2G;$s| z4iXHRJgI0eUR{>pmE4inv|Fia$Ea<-ol-)c)>9z<9tAO@gpDX`Z{$~ROn8}ao94cn z!mePGH?)X^SjhwW(>AXcEFN?AIIJ}&mc~lJ% z$&+r^A_cB6;Rc4@vD+$Y&2Pk7Jj=X@Gs&@OJ#CKl-TunkwqVt7_8twR$;{=fwH6*J5RkD_GB%V|Tzk7~aeUg(1 zny*gZB$^U74sXOj0XC6;65>CBe=m3rwcXH&_S+T;A!J-4-4gx;%&{w^DDGk%)d_xK z^}9mk*Ge|}!z%(Ygl`Rh?{q(Vx*=vukt&B^O+&mSdLpulTdR$+A3mgg;8W>lhx*Q+otQ@Z)(;-;H@3BNKzD*O;yu%wHK=fYaxiKoKyW1cKyOPX{$a9wGhyWUs_ zZ!>5IA$4^lhS+N?agy(C1n75&IRXJUr73cH;uzbvP;U_R@1`q1_-(P-iKfRVpV`l) zO1gDRL{A&g=hQe9@gHrOsF_`0`zNJSkcKP}v+r>Dsc&-$f<* zgst>iQhdw5*NC8ieh;xv^2GT`pk%;+Rol7&9PKt3Qs8+9Ssl3T(9^25pZ=^J_H9sj zPH1wEQj5Q%_Uw{I{(JS)_WT%e8Y>!>E@pC&FEr;JRG;*Q&(S|4S0JCwGQq^9B4-;G7?%WQ_D!44oFx~>k-M{eKRUm zKImxIW8Ll3FZeM0$}-Q@%e`paRQvkpupX7*~s#aX$+!cdZd1Nxzr&gy^kSr&nS$l0CXrFgxUU#N4ivNTSCulv(2|R-_ zp0H!R9wHK!`OGABeRYN4BW5Q&()jsvudq{;k`n*9o?Wh;(pA?;T>Q7O-F*sBq_d2a zY)>8^{m)vue2-e#-2W_6Bb?A8p2AtPIFl1A2?=SJ>Ww~OmAHuL;%H{K_#I?S!Do9* zE}I9DI(-rBhr8Tv!lBgF5!)n}#EsYAG=^oy2G~51w-5(36p|sivn_8xVpLM`o%Iqo zk6lglqK8z5D6qjEIAL)J?FF;_s{JibUi1e!0 zJSe~HS#gx*5a-K?ZqIDq2wp_D=A7k`9;F7DqFro2(LzNzSmYpVes8AcLXw(>aC&ePj7_3%$hs)aAv|8@+`=aWT?(l8myUOUCh# z{$|>c$;G88&0X%f#Q5A*=Fp_CFX2`s8Bb^ryQyAu$l0*N>xAyVAZPQurYxF9;)&9| zru5CtfULH^9wUk}nws(}CAV3@_Od_R@1bRie^Fpn|EnoFB4wmUrEJKAnO#V!Ui@8N zv!mVM!#r2xiJl#Y8m`9rOnr#%85&dL%qUYzb%I8!Dy6nai~ynD!$UB{H*rqaRR32~ zGn6yoI;zR6z$oFmRUms=qnfS+Jen$iX-xa`ZT;~3xDm(a3kQmjM*n-$M3((hpm_tR z)LgmfkpOa2X~Al1XX_^23M$p9yoqXBv}yvCiUAgPnMKFf4WO++i@Xe-`r``^qzb-} zA825|+np^3V+C3;3B@19Zf0~3&W^!l0Nh&Rq;uY}teb{jm_h(YTK?K=EHy&q9Vk7D7AGaF9p(F{% zOrMD_V@lh5Dq4a-Df6j3QAhUKkm-n*%;OS5KeQa8cLV7$rh#7obfzda(8!2ozfu~_ zD0#LciJqE3?vyvkU=I3-YBmmIMxt7Ej)8cTxYRTr4m`BrMKhGVZCa{>w{#J_diBEv z{Dr+H96p_)My3UqTe8Ro{PeBnF5liTdue=&UCo6+bD zq<%%cJVi~kB+_L*>^qPzASvl8#LF4rc|Mrpikgn-V^_})qz~U{X~h>I6b0FZQlf4t zj<9#;te%@7swhWNoc}CIt7&cwx&b>@9lr-dw&rYoJQqG&amZF~KhugE4Z(OB4dkRKi_PzW*a?pTi*pa*6hF6yNjYC4YxWCfSbl8trj9C=LmZU_qI1~-f+LRF3 zC~)WYo!P8SG8dJ1{VFqszZ9yVj{@k{wBN0O0;;4M4g4>EQc2m%HF?oFP}>X@Q_L4Ne)CERNjh#wk2EvwA?$T+{W7U5Jsjyqbfi#9vgt)aH?Cp zp-nijX4u`2a&Eo``G`=$4V&(-;X#+t2U^$xui-P>Z}MW`6?p$b-`APAm4ActSCsRW zPx{;<0f75J&M|6NhkdlIRp%pW{+18zuMzV_>F^^%G7Ark#pfH27C?>wqmvF78UnA? z25@fWtw)c@MSlN1zGJci7oiPfs>GXA2fv$?!#i$8*5reCT3d|Y5Y7G~X7>&=SQ^a? z$yc)U-E^@V9ZruvQ`oiaaI278eOo^P2ko=~!tx8XnR>`l*wp@9D7p#y1S#AUX&#WD zdN-*w(e@?%W#B| zLUY6vyhvoXQJt}_*?Cn^y7ej>c~w!Oy?v00VEfp=6YTknBpS#jrR-@``WCY+sh;1*icz|K)@oWz*+=aKnExcj(EN4JwpM<=U$ zAf)R*HQ~T5kF?biF%GxXQ7J=%HL?aR+OHpQk1KVo;ctTRj(21i9(>9w)Yl)Hw@j%! z;O#bu-#srHYY9wr3^A!qmpKqFHN%w+nh>h_nnzzcm@}f5sV1Px0gBjb_tEev#xr@$ z?80DK5XM799LK`j5FxzvFeo<#$(9}grXsG6Hi70*`{ksga-s5H<_6yRH0GO$o$B2D zr09xHQ)Tr^zOs?&oF5ge1C{0nWZ@v9M5a+5b@d6qv+oPiiwS!~o+&fv6*p>a+-`Si z6*mn9N$&bu)$Q z#-cYby2U6^2>mPCFTY@Z$4_LaQ=68Cb%6laLsjaR^Q+(^#{*$Vjar>?ZPdhp!ju`F zROOm@e-CEscG>ZRQM7@>LMJi!?wu8e5xc9gvVF2XqpNWP^anjH4Xx-&A_J}Scfty= z`s@PAd&2k4Ij+^WqpWW$b;Nv#UF*G2r^_{!c26+@R}Q(djEIjloYX&4Yal-zN<;oy zB-Ox-REaJ;11hSjJ*t{_5{m7m4C*%lX}@xWT(=GnFs`@%w(#mVP?dJ>sdW3$%hSRX z8iH1)pgA=#`oab-l~W=oEDaW0RAl2qL?pP;L$2k`$sDT0vxjEn1WK)oh6DSGo1_8C zBbC(q<;V+_mbM=RI`}huxiTsC+6hYFWYLG=mU|>&n7#LU)C>Yk64A(Mh&8WGJ!(rn zC>(R0=+p}^mhj2PEtFDIKbWeNX=mle=$%musPD-pSM)CuVpG|9G-a}b=_+R z`)75R(YwWJK?(V3KQkc(zrlV>>yzv*5AO6Q4u=0tXv+Z^_rqC~sTkzWz$dCWS`|Ya zID+?V4PW!Hj2iwiR*+*S5J`)LX?PPud*$HRRQNgNlkxOiVWM{cUQ_Emjb;9f!gsSy&1*3YR>J&vkEQDpYKQgWTXPO+N zreoXocx#J|@xB+Mw`vqIPpUs;rtLGsy3YOF_+{DYE~!-4LN#^}@Z(4BW#rc?V(gMX z$tG>7COXEb6~3xHy>lSJkmjs&s#QNt!)8WPX+VA3$1VcdEP-QoH_$9Wke&6D@C(7Ad>1%Fje{;;e5 zAPP~G;^|}~v$ZNG#YFdz6^Epih-x%a*z4%?h}+(G;{vPI8pXtx;M};2zU!l(c${!_ z@UY8@La71!{H?hyMr9=~EOpRw&%+m+g&JZ@Pp22)c3iS)T5aGr!W!J^l+uGE%RvXn z*Q+4oCbIX1s@a@Q5$;^4#V$U&kvS(g_$0u8DE=oOmZ&mY$$-xftX!CweF9diLd@3j zaiW|ubgN|-f*xdH&?_dA%5X@HmmVieqYKc8mME}RiU9baB4a$_XQ0Nandrw5VPhHU z+p`hNEkdc&^|IyOIJr}AsNsL-dVTGM<$8IAj~HIQ4n0(JyNxv4(i0-+qSsC%+`z-k z)VV;F3DTT-QuLGA3i3A{@9uDTFX_)k@&&&}vzX%gQ0hC52E-Xi&sgLR+>u_#o3Sm* z_?k7Y21= z?Kp6@$>8oti!2JJb5iwA7Dm=kvB=#cYlHC&zVB# zU_|0<61U{9l#ndE=T4_h7Ah^n9&DZgDxhkJ*f;qUMbJ3Hd`^H9ldbU)Jr~7&wOoxjMQ`Didx@?XWH!LCS zqqhntb#As$?$JIG1mX(YCF1WqeiIk=vV4JH@-Lsp$0vIZG%VtQ%C?nAH&gVL;%|}N zAzal7V~x$p#nm_H#JuH7Rn0$OIkB**)j{c!MZtQ>6cHUu4E?c7yy-87e#|Eef zAw7hmRF$HLASf-Q(2H~skd7in0hO+xf(p{Bp^AXg5roh|r1xH=+&6Cb`S#xTp6A^2 z$Co^r^qDnltywe4`>y%@P-!oAKInTi%X+=i?Gzr~+C5FBG0)apA4R_7cx#Y`|NAR+ z`|>26Mzrl`4haYP%%9`ko^Y!n@sm+%3ZoYHn$Aq2>|jk+(ATw=5SMkg**f%&8EmCp>9WG~6~ z9(lFR{F?8C7{(bCAxC{o2tpm)-^_r20uVy>pK{=V9GawzZtfrHn5*hzk3zOi&@A#T z7`vu7x=ls}ynHSEg&|WYA@NB8lOJkAa(&8lzD8q6`ocHQGIBt)%Yoaoss};};qQMy z6pZs%zLSf zI5!7xW$n{`jkJBE746Osy{oA>R=^KEO>oixzQRry+G%ffxbiQNQ|94s;Cn! z;soMn6Jtk~3JOhfk1>aN=}&j=f^!Gr3M#XPI8IlN3#^&Dk_5jOoEbQMH5te}Xu8z> z&EbAbSi1s--L)8VT14PO6=P5+ag;z}lS$g^%JHSC%-~1$NzSeNgW2=-2^%EHmArsS_H{lqn(f3rgpSh5;TrhaZ^y<1nckkgNBbYEPuT_ z7LkR8ULb4vHVRp!5vJ2*+28CKsjd}#@v8Yz@LFC*Fp1%a%tfHCFf_ zh+|g#TwY;9OfPK&@u|XAt#x3+<*=CAgaRNJOxlN50|AwKnLxlT1AkCuJ4~n5Ia1LZ*jxqkkK=I1cgGjc^odJ092TC3nKc+a#W$LD?422ISTiY5BW?R7 zRG<@CmOhV6+biB&W_x&s zDqzJ9iW0nST=_iG-mXW=!0FmWc0+A%SerrEx5fJFDW47auo=RA=6Vh$BXV_CqCX8y zo2@f*ebu59-0?x$Xz!wSa6h+#z(B%aWaug1(ab3g^cAIjiP>b-oLIt*LBZ^#DNZ+? zWVc6JXQg?1T3uH0*um7;p-l|1JJ5Nmb9MbkPOt1e_)t0Aad$-c@r%O@w(~xZ4>`yt zyDV;Z#~#Kzm+xO#b$L8kT{Ju~?{xAt&xv#2<&Ql$6?HxT_zXY z^~{ZSEyGV}JQc)u1$p*Ah^;@Ls-|%FejBjGf~$<<@_K3Of%HF^QvryM@2h&-3kQ$ z4xIAwy{pKQz`a&T@DZ?EGZ{jN3T0U@1qb8cxVGu8y)vRqbL%Tn|8d=S>t&tex~C;H zN!FRAhOE&k?x#-9q9ruDin>&mOM*EIr_7P=qTHB=;Zf8h1i#bzPn5r2C$1EOtOu{Gq)?(m11ZYnq%iFqm|D8c5^? z&d)9sXqbF>cdfoHH4%Bb?M-scigUT&U;`zccOmvHicn?h$gAg78fR!5e%ww>X3gPy z^lH)jxhp|=z&Y4AxJz>%aix7b*ir0D-=Nu59v3MToxVnQLS6doL7aQJO8hp@RaPkKh{CCzkuQX)Ea@yymIRR*iw*FDt6xywOuj^Qz{b zsOaxr0Wu*~)X93vrFogIc3UBX(xPUr|2p%#+p>0J0=5oE@5+&@l(A+xFLIg2&*{$0 zD(177}dh*Ixn32*2zlk&E#@#h1~d|{P@<9Xx#w?oJCTU*6cPN|LxIDZv* zP!n>pCow}tC||wQ=~I8VpE$8h+uPMso*iMjUPg~F?Kp2&VH{>cEa0aV?44PDNGn(;ZFXLPp480H zcxK|#3v4wM%KGV~JBr}SAI->MOB#ETNOa2E>}Tsyj(E$Frdb~!*j0G;V02xIYJfq( zJR|$WDJrR!xOFp?)HCRp$3C8Tje3L01luJt`iE$9@)&BXfupoJQMxy0YU8(#eO&sm zUUqTYL|8$rxOpsah=<$TYN4d2{M+U^((&Zm+=@AcL*Dm4*(+1_2l>Q&;!bNP4*a<6 zW`Wo3F1Agc99u`Kc~BlrQ)>-dnK}lPo_WI>gg#~t1tDm*iOx;*)LU)GuA}f{m7i;f zs|&P?wIyM%d}>NG&MVv~Rj4VYuHt4gkK8pd_v5C%`(*9hCoR6=!(&m`1I?29Yo$xd z`pVq%TjkkXhUTCi)|xT>BLCQ&;Qm3`-76(b;VXxB4|@>W1>-VSz_v-_JJNmWCO*|# z!$L;k28yrYm#ppeZk5FuTzDO#Zf$?<)^_Yufzt#vv%T#;Kh^thhu-b-ZjCxANv0?i*I+_7O#ZBGposL@!ZvG5hJGEJR8}DPM4}a!b#RB;Q z-i5=bvrGhJt_nXwbafnaKL$>zXrALeoSIwc62NjNOUyJEZs(_ro~G~m(o!P)+}7{X zx&|e4Jn;kDK=LVm`9Dr$K8^J0A3R%4ZR)S?L4eH1OtpFoI#y@Jjy3Kt%|Kd{{5XMW zRL%mTDj(Q)pg8ae*y^o$E_xb0Y~M9j~ zTkmD_l_M)BeGeHTEOpwa*0)?YJgK`%GE4N*CeA^`hPA?`OV}1*pHE*)*O*bOJ?u#f zU}Qniu#{Zwj>EPv23#CpB{sdnAL+Z>ZH~|e&N68|lN0t1t-s$AH3lRK1+7hX2g_!L z`!!8=i@@XcDA_x@+rP7Sf+FC|0X%hSbiwcohBPDDxp|?XmLB6OG`IQf2u$W`c=;XM z<@o2Z!lnT^$S;VD=62VR7$1d8$r#7!%B3jR^ur7X!5cR=Pf1OGHGAX2z2z4L+B(A;cu<+fe+dB9#WyER<6Qwg8or1ixs+hc2pIq_a ze{y?}Or5try!sf%$@B8Fg4Ry;n%`x5m4ZgCz%ySGytbB2MQ5BYe`k-e547`(5k?+k zp5tjxKRBnF()!8&^oc7^dGLo1oESUzOfOwp;aO9SdA)Ll6l`lc7yI-8?-_{Y)7>|C z`g~4tgenu1`PrG7cI$1Ms?*nz2sfr4KkhUE^DvtQ6)MmCg33thl;)9)_+ybqi=B683w&Q;`-beRR5AWx`SGj8w4f)@$v7Dl|1X;$R$3uI}Zo)&@3*)Re0;%)W zuL;lB_zyPd3g1!~y~q~sPPbBC7ttQ#^yp*^4I=|OPu|Wr(qTLtA*v>}ci(iviSyI? zT*G>_o43;A)Z?n8N3?;by9Djd!m6xAlbFSRI;Do8+o6fQ0d(PS>6)09IX8uuq=OEt z&K>S^7$k+I1(Ftg%xrX8?qrVzoagHMzL_kd;Eyo2?reA2x3IY-kulBemQ0 z+P>4n(yaCbm5Fz3UgdjLdPdt6sBOw|J7poWG5WpnumLT@T$McDfr0CF#wU_k?(kJV zG!&z~nQtMeVLZ+u&pbKlk9FSeD4}(*uGD912=M%@S+5#d*AMze|q7xt`cY%>FcWvFOn+h&{+W_qE&*QO$SrS^`&)t|%HLkBUsm{Ho;~*tC=DnrPZmXj=Xmy>d|;2#ZW5tA5FziQ<_BjrpSK3Ak|Ft^}Rp#`-|Tk4TyKNeq57t*jAdAqpkt4C;q0z%wgNu=NJ zZhWkY=qVm6!Cam>%ZhCyj=rl?JMH{M{yfO;Kt$hr4^QW0$4#rSXNQu&;rL?<$a1Oo zJtNn=jps;RjD#=w!l@b(NekYoc>Y^HX7y#a2isj%D~A0f{O&0w_j{iAC39bT$`0&~ z_vE#a;_;WIE6!DBG5&4eqg-#&dZt%n<~1eO_M_(Wk0z+!OHYmU**FY>v$8N9q7M?K zA%k|08=~j94e0K@9P-ihxN5Zz?~@*HW!WfYyd$I1HWsXFPQ2IW9;>XRXW*e1opA@!vGZX)j!171tbf}i3%pmZ zi>x-l)C#o}K|puYcG#ZWoOyiWgYwHg9h>WekH1#W)_)UIzxZ=N!G4XgbdPV0^x#Qj zY`vN2%32S5W~1eL6l3Cr*a7?WH|Z7D*NS;#^$UEZ7>XWkUx(dMwv{XNRm!u3O7#?V zR(~3>Pt^7^T6$&{BNct|^7C<%k1*#9Q$D%E&u-C?Y#!&YA?{sR0obw)E~_o~&Zb&j zwM<>Q8!$ByN&BH;Mt%H@xx{h=$i2oR1vfC=IOqX6BK_|w} zjvU(W6UI@dqmt7C*`3+oRpXJ^UU*b=@vZ5fC4xQE0=-2!x((9Q^F{f3h3p_om9e#K zprh1ZBk%E=SIJfB8M*sc2`Y}bJrE{0)Oh$0xnUV>AsrrKh5kcNEHr+`y6=pY28wt5 za8Ht)O}qf_sY|`3JKZWJbFBlu*}byYQ8JiNk#n)9J4r2d3FCIxHr3jMhP-h<<+~rs zoPkW`evC{dc0S%Zdo*&r*c%&OB|^2EBkIebF1RdAmV8;S(>lUR4zQt3pqr-;5?)w} z|603}WbDUA>Ei&<$Kcry4S*#QjckB>ODy>10RHQT2IoMrB9$!l-t7!GR`cGI?%|g4 zzxqZqpCdxb4;&s@?tkpT)p80fuimJ6<@Sva>-^F{@;=-kV_JoSRL(7#e)XtfEFfh; zwO}5`f&sz;vF#rb)(To4`n1|!zIVckTHB%YL3+-EjMR5IOhKR7Q);e%g^#sFo!Z2zVF>%u=7SM#$OIuQg3n( z>h{1jvWfePB+OiiKoA(|>DR~vo;;fQhR~rWZyGLIF!(U|^j237q}=MDO&oOKR5O8g z8OrTcE%iOk%&=8%%(qiB!Fdf!Im=h8_XhuJbQWmTqoxg5ZDODY&(G1QB7x~{=ri8k zCq64GSNOGhoz_{O%;pR4T63DDg_gJbSK6Jn9lH0RdF9kV%OKL+M>ZcSf8lLX8WW5^ zuX>=wwWmvW8k3frVN2aytROxX6PJ@O-&|7pM!i?{SCa_9r2H#=78hUJq|+w5e=XkhftZnIOWbTpcsnC-k4VD4FgRCJknO~Sc8|doOM0&u1 zyYe-uFtmWFSbTbqx{uSVZfVgk4{bC_wG@JHg53Ru0pZ}d%<)3Hd5)Gs%e^5sw;j3- zX{CF!Dx6l%B<(o2NN0sPNhcqSll^HE&Yi-~Y$r6ddr_&5YVNIHEC(R(Z2>1*Je#Mt zi3PdSxkr7_RXOg$in%0TZbU`n_oO$ zJS^MBE`%~)dY8MK!*Y4`>bQV<99uW2n^!*yn2g6$lo(AuleF$XbQD7% ztt*p@X&i%FT=_3>Nxi4SAq`Tm`N~Yx*FHJZ_LLP4jgIe~9U7E#koEVXc<8Sllg`e| zZ}3g5{E7&-`+O!%t-fk=8rp!aM!$bhkZO<<`LGX~C!}HTc%C=-GvKVLB`-Tx-wUL= zYp=m5BUBMEP687!G%mz>QE%s%Rq3g#)+KqORb5SLnIkemDGkIGec9NvKg0@yx;$#q ze_4~t-QR@MUEnSqu0CoLd0~nVHZz#wsn2qPBPh!;{qu_|@05@PUizG+w~#lYb)b7t}*ggsV z*-6`vI=Ec(Y;xy8j#A&!Hm^KR>@2K&LOlG0@S9$vny>s`wNlr)X27n#l%a9yZq!n6 zl-|j&Ro0K`|MUx+JkISl*8x`EHLB=bVLhuN!7SIuxkc=^_;^((#w4$);;xFUn*Xk(IsvlY(j1Sekk7Pdb?h=3h%uQ{e>4C&4&{&HNu&>1gd%+UJN2O;Ve!A-;lbK)*BUiRZAg-eOhiV zUz|TC*aUi=bGEO}p|`~+WvpSQ;h&t1f;hmzg^-s=5+_%SA@EK@tD24`Z9#y zut;Oxe~B`5KjhK)WIQTC46gKyyf{X_aSk(N4*|fqMv8;@zBo5564K)(m6z8FLGKzqQslLZ^=q1 zhyVaR!;v>}FX|%$JDwZmynbaX)R;dc_6}V*YM8;z@Jq8N8+AHIe5vN;YaD`oGN(PH zaG86s`X_U*C0}p6a{nQmF2?qB$$Kn#Y?d^JLtgi#82)8F+xigSj*hIlSBnp2*1lan zn<{;?u?y~#`MZ1pVi0AZ z&MMxnVcQ0-c_f|r?%tRq?)!@Gm9AJTycPBe`%H1FT@mij^HijBkJEomTR`i*A;Qxb z-+3-v92JHh<92o3Dj&$44K9=Wb1oaV0=5w7*dkJ<7T^y#n%5Cu9-}(%mH3Euf_3^0D@jdTl;*)9D9nDM{TTS_JIQ3yWM-wo0L(4BG+XcG3@ktUR zkUG}s>>=uu8Au6i?rxd=Ioz00k`Si$*7bKO>J|NJ+irteo#!bI zCtwM=Fv3lq`7-<@A7PF}&9CEm@<4Ws28{&*pKjTsT9^ z^JTf-Ax}HB)xNHWp^0pLs)qlZdl2{ifbbOW6uRC``+>+CMiNDqQD^6ZtCU=3qwKbM zc`-FQyco0MR5;*zLyYcj>gUZpT75FBZaVQ-*)pV3iwQgkX`r;c3y$XC7$x1QJpvf~ zCU&GYPQJ;h=jiH1M@Torpum}U(BjxbVHkh9gWgtOf0ZRV>bt>%Im4>)Gt9TSzLylf zP$yFZ+bu+-?B3y}I9MJ@eLiqFFKa-c3g#XVS}nM}=%2DHw+Hd*WvdVY@gM&+z%4t} zSzH{+wZ>GGr35|6Ho>wnc!+T)YBTeioHklcw~&cZHCI(Xl;mVu>ce%-%G`oMJG4*L zND=N+5-{Oe@?PA|9glJU@s=rxv+v?`FVk(STjXC;8nCn$I9L?IXv%6Fub1wwk$zJ+ zlgon{#Yl9oV2y*9`U5gS_Z#HwvRhymssvBA02-ZN`Yi}}^s3jl>^^)EeGT{lGX$p0 zFYxBoJ4+Gj%fj-yKynaBw^4F%mYXB{b1996?CEM@vfr!tn2`d@)H!cmR#DZA9Z=rf zQOpLH7A2~ofstc)%Y2HR2eaYJGl~f}c{PS19T_R6$z`~p{#i@q2kGd??ChoPtt}9# zgj_4n;^V=xHXolSczmSufJ>P*MHjA16n69YZdISdUiQty1+;jM17r2I{0Fc^vA@gH zcH$elEnjLtZ;+v0+c>v`cA=%{$FCI=(s{mne+Uqwmxq*~lRa$+3|wwvB*x9xt|yrJ ziPn!qNtdnKW|+ZO8`fN5rHF?F%(#QSfiS9(_1aP3r_4JLvc{fvF)Lx0fq5$uh~|h$ zzKMM2B#67fltM%JkU3pCJO5Wy1-4zNSvPX6)bg>g+B+X}HB-Ynh8(v${M3|*b5T6c zMSrSN)*{o?kdER8>TKzq7MVvH4^+9f(e>hz;)Vx6^VGncHfOcKZ{+^t-Qrzx31{EC z7*5py-kwWQ_P3lVi?Oa%3^nkZ?Bx_GQQp*lNPjJ*M@}e@bZkEKTwz_KLzz#eTNE=e zE2u{cPm|7#V){q;lDX<6Y#ga&dD8p6^H;`Cy(_q2wL@zBqdnIgWth!{zCv)`jd~E0 zIbk)gU_cSr?CR7g5b7*INUhsxCa_DLdxvS6NSLIl$^kxLSlydi{d2l3pY3zMKJaV>awt= zcLJxrO;YF0s;eGX=$#Icd_^^)eN5l6o#T^3M`*%u>SGMcw+BrK8*v+*%rjo&0_nRM z517dtXE{K~yjepdeV5y|5l%GjmDs7791|)`M9OeE^&cyE%0ixW8PH1z+Md&;zhth} zI(p0Y$Xh1zZIvk+0`p9LLv)NXFzOczBaV{%$^;W#bJ)Gzg5UO<*FV#%>k6<=rI<$3 zT;nH=tmx6f%3ZrKR-}2apu1o7oYNm|kXS4-y?Cr5D65cGfk=BFf0FRvK8$J~YR5nO zO^waTmVSnMP{qKgQto@e8ybi77P&+TS?|6&rc5e7Tvn)?a#uDD5O1H=L~SxyDBds- zcDAdKore*{{qLG*3mlPKoG-ZLMjQ*nFn<}9vrX6PHm%gE6F&Wp6xSrB0)+jiYg(aVE;v2N# zJ9~|Blzy@5dYPa$tYfHda?z<{fTb?%wu;!U*z6;dm%SR@(vq+r_8D_8ABB<=o`3kb z+Uf6+r8aE;R7zCbWzaTpVfE>nCwfHYOHjDrmkm0lIQnImL$3!Kg5}WlF_}J-D5r{B z_-VrG#v-e=;!5CK-cad`QYfpXgXFIHUA^`sqiqKt7jPX{@^%-vXn&kR?wIXt=tZ$T z4yM-y^dRHhdFIuT`jct zPiia7$!pU@Ujwholf+&NLqx&aCXLaI+A0I*tY*F0c1G!8o=(T;M-SUY1MY$ z*-I`P>uwAW+~?3X8Kl|g*n#%z!#2PZWcEVqQH{lwI_=IPG2(-aDwouZgMbV}rz@f# z88rzydX3HXtX*mD8t$*H6vatIL_h614Z-1e(a_CE)T{#S(D=Tsd*(h|*OKMOhtz)lwNb)us-ssk|81dGmAK{= zPtCUm zRAk{}q+)j*9x_Foe>8-s9O1Nyy_kMEUScqMu%nTA68t&SlQbZHdFOyrD4yR^Psy1W zF!B6K=Ak0*U^^^vn>@N2BpLRDwz1T+oTKC%xai~Csdi=L`^7-Ae$X3Z>%O%P)WNsRFEAge!o;iq83~2B z3h-2Md2@zgO3l13fR*z7I?DHd7r^&{S2G>!LjK*mX9k?chy3U$UHz6*6H4Mv3}i)~ zpcEoN!Ql5x0bPsCc4fEI&XAo>rUS(|ZR*e&x}n>tXCl95EK!JKytU?QpH}OXANqBbo=k z(48DLSa&&?o9SX%Jb(_RJKwFkT9pYMI?;Ja$Crl#$Qb=1og(Tl2O9G%d4Ai?&Cz8u zz4RvOk)7c}I^Q`~PL(~w{M{7nWX5@kMn!Q*w3I`x`|-ZKel^*iZ{8tcn2E-48F>wk zpzPJE)nz9W$#L)L?`lS@%Q)6$Eg0)g{*_vnvXlj7_Ffg$lInG+?OT@Cd>)eub8eW-ZO+CGUk^FkG=?S< zN!fO6cB3@g-mOoi`UK5SGyNyN7K~lAe9yCk_h7~C-@A7Zxwj?I@WGR;ZO~jBUCg&& zRx%@v?CbeCM66bM=*jys%r)T!N}G2D_OM!9hS^2lDNVjwA_O=Mjsw;Q|cm|>Z-KAsxa5BaFFTvjMRv*XZo}coe2^CZYReQuefa6*0#B_ zv9>$5wrSnEIi@9C`ss#rBgbOI+&GhPF6W>Dze83UdxiJtZq2w;TU6b-Si2`ax0iie zntWR_M53*|gxY78G*CKA-$P$}d^tSR5GQf>;v_PHcB!yk=dDY-f&^MRwYiU~Emor< z+Y8SSpNro=)>*jUljx=iOL`ik=9TsNO26dcoj(ow+BI9#KhTqWgrzh7Y_E|td%5~B5 zkz&;*H320>1!rGcf+_Wb^w--KaFbo^ZXLs4<{T+!8|`*S~^ zceF1Z?hkeQEFJPv2#JT!qS)yiK_Ci!@0unRH5UyRh!MEb&|K7$eR=NQa}faAXHE@b z28jU={B{hd=`|OI&gFIWC2mpZU7WPS}vf9e^%Yxt$ME(c-xLraUn- z)4r&3RnJgH+U)<`iWblU<@Rq{0V)PzFW$i6P8J7Xn6X3)A`FK@qY*>^ zJ`IBabW)fs0fqqZ#Aql0M1-N?Xtb;a5(S4?py2TTM5p|#E-B`hc$5_pPxS{l_3t7b z{wWd>PQaoO7FehSfM7%eaA+bLfx_Yu0Fx0wqe9_06as>m#S_730v3=Q1rSvs7I-2S z4uCZAI2_`?6^VG`nlweEI+{PIo&Teyv!{M59s&34p>-C;|}&B_jVLoco_zdNE+Q(8me_xxD{(*xl>pVK0=K^#t`_!NBmxuy!+~*dJPL1tu#m;d$^!RjJV6#LO9TWW zqKO26eG0|H@v^eAFgRA$0t`nYfp0+o;3g6EAL-Zs)Djfs$o2pBrC9i%S^}B_K;uw6 z1Pg&fkbu1`Z~$~04F=4HhoO)F3X}i=xZ^l97)*r10SPVO5G))IK|ugdll`xv5XHUa zu$S1f0bg>X{il|ykiSJjBVb4i00WK2<7EkWKnf%PfhFQCZ~zGw422T_q&wj5UJ`}h2sBP!=>A6M4JF2r2&!P?}pn){}u`8IT-L20t^axHvx=5 z13H4h!7wCT7LP~cpt5L+a|76NJQjFc7692|ad-p~0HneZfb;)HDD^)L=QYgU{3liY z?;7EmGF!4lvEz=r|GH3Ex75x`Kum#`2b5*VDoXhZ>V3m{^{ z;XugzuY;H7`zXGEFF4BADKYi(c83_JQk^V|b{<}zTfq?#N s#Qkq`s{bj{Ul)mg7YVBVTcrPOVDmLiI(o`%&jnnQnL(iDV#>4s1Cqj5=l}o! literal 0 HcmV?d00001 diff --git a/tests/tools/archive/migration/conftest.py b/tests/tools/archive/migration/conftest.py index 7d285962cb..ef4ee36d0a 100644 --- a/tests/tools/archive/migration/conftest.py +++ b/tests/tools/archive/migration/conftest.py @@ -10,7 +10,7 @@ """Module with tests for export archive migrations.""" import pytest -from aiida.storage.sqlite_zip.migrations.legacy.utils import verify_metadata_version +from aiida.storage.sqlite_zip.migrations.utils import verify_metadata_version from tests.utils.archives import get_archive_file, read_json_files diff --git a/tests/tools/archive/migration/test_legacy_funcs.py b/tests/tools/archive/migration/test_legacy_funcs.py index ac4d78910e..ba576f9c76 100644 --- a/tests/tools/archive/migration/test_legacy_funcs.py +++ b/tests/tools/archive/migration/test_legacy_funcs.py @@ -13,7 +13,7 @@ from aiida import get_version from aiida.storage.sqlite_zip.migrations.legacy import LEGACY_MIGRATE_FUNCTIONS -from aiida.storage.sqlite_zip.migrations.legacy.utils import verify_metadata_version +from aiida.storage.sqlite_zip.migrations.utils import verify_metadata_version from tests.utils.archives import get_archive_file, read_json_files diff --git a/tests/tools/archive/migration/test_v05_to_v06.py b/tests/tools/archive/migration/test_v05_to_v06.py index d589722888..7223e53ca2 100644 --- a/tests/tools/archive/migration/test_v05_to_v06.py +++ b/tests/tools/archive/migration/test_v05_to_v06.py @@ -10,7 +10,7 @@ """Test archive file migration from export version 0.5 to 0.6""" from aiida.storage.psql_dos.migrations.utils.calc_state import STATE_MAPPING from aiida.storage.sqlite_zip.migrations.legacy import migrate_v5_to_v6 -from aiida.storage.sqlite_zip.migrations.legacy.utils import verify_metadata_version +from aiida.storage.sqlite_zip.migrations.utils import verify_metadata_version from tests.utils.archives import get_archive_file, read_json_files From 98dfa092a5cffc409296f924159e24ffe5ef5ccb Mon Sep 17 00:00:00 2001 From: Chris Sewell Date: Sun, 27 Feb 2022 02:31:34 +0100 Subject: [PATCH 14/26] Deprecate `verdi archive inspect` Replaced by `verdi archive version` and `verdi archive info`, to bring it inline with `verdi storage` --- aiida/cmdline/commands/cmd_archive.py | 110 +++++++++--------- aiida/storage/sqlite_zip/backend.py | 12 +- aiida/tools/archive/create.py | 3 +- docs/source/reference/command_line.rst | 3 +- tests/cmdline/commands/test_archive_create.py | 36 +++--- 5 files changed, 85 insertions(+), 79 deletions(-) diff --git a/aiida/cmdline/commands/cmd_archive.py b/aiida/cmdline/commands/cmd_archive.py index 214e0f22b1..50ad0cb28f 100644 --- a/aiida/cmdline/commands/cmd_archive.py +++ b/aiida/cmdline/commands/cmd_archive.py @@ -11,19 +11,19 @@ """`verdi archive` command.""" from enum import Enum import logging +from pathlib import Path import traceback from typing import List, Tuple import urllib.request import click from click_spinner import spinner -import tabulate from aiida.cmdline.commands.cmd_verdi import verdi from aiida.cmdline.params import arguments, options from aiida.cmdline.params.types import GroupParamType, PathOrUrl from aiida.cmdline.utils import decorators, echo -from aiida.common.exceptions import CorruptStorage, UnreachableStorage +from aiida.common.exceptions import CorruptStorage, IncompatibleStorageSchema, UnreachableStorage from aiida.common.links import GraphTraversalRules from aiida.common.log import AIIDA_LOGGER @@ -37,65 +37,68 @@ def verdi_archive(): """Create, inspect and import AiiDA archives.""" -@verdi_archive.command('inspect') +@verdi_archive.command('version') +@click.argument('path', nargs=1, type=click.Path(exists=True, readable=True)) +def archive_version(path): + """Print the current version of the archive schema.""" + # note: this mirrors `cmd_storage:storage_version` + # it is currently hardcoded to the `SqliteZipBackend`, but could be generalized in the future + from aiida.storage.sqlite_zip.backend import SqliteZipBackend + storage_cls = SqliteZipBackend + profile = storage_cls.create_profile(path) + head_version = storage_cls.version_head() + try: + profile_version = storage_cls.version_profile(profile) + except (UnreachableStorage, CorruptStorage) as exc: + echo.echo_critical(f'archive file version unreadable: {exc}') + echo.echo(f'Latest archive schema version: {head_version!r}') + echo.echo(f'Archive schema version of {Path(path).name!r}: {profile_version!r}') + + +@verdi_archive.command('info') +@click.argument('path', nargs=1, type=click.Path(exists=True, readable=True)) +@click.option('--statistics', is_flag=True, help='Provides more in-detail statistically relevant data.') +def archive_info(path, statistics): + """Summarise the contents of the archive.""" + # note: this mirrors `cmd_storage:storage_info` + # it is currently hardcoded to the `SqliteZipBackend`, but could be generalized in the future + from aiida.storage.sqlite_zip.backend import SqliteZipBackend + try: + storage = SqliteZipBackend(SqliteZipBackend.create_profile(path)) + except (UnreachableStorage, CorruptStorage) as exc: + echo.echo_critical(f'archive file unreadable: {exc}') + except IncompatibleStorageSchema as exc: + echo.echo_critical(f'archive version incompatible: {exc}') + with spinner(): + try: + data = storage.get_info(statistics=statistics) + finally: + storage.close() + + echo.echo_dictionary(data, sort_keys=False, fmt='yaml') + + +@verdi_archive.command('inspect', hidden=True) @click.argument('archive', nargs=1, type=click.Path(exists=True, readable=True)) @click.option('-v', '--version', is_flag=True, help='Print the archive format version and exit.') @click.option('-m', '--meta-data', is_flag=True, help='Print the meta data contents and exit.') @click.option('-d', '--database', is_flag=True, help='Include information on entities in the database.') -def inspect(archive, version, meta_data, database): +@decorators.deprecated_command( + 'This command has been deprecated and will be removed soon. ' + 'Please call `verdi archive version` or `verdi archive info` instead.\n' +) +@click.pass_context +def inspect(ctx, archive, version, meta_data, database): # pylint: disable=unused-argument """Inspect contents of an archive without importing it. - By default a summary of the archive contents will be printed. - The various options can be used to change exactly what information is displayed. + .. deprecated:: v2.0.0, use `verdi archive version` or `verdi archive info` instead. """ - from aiida.tools.archive.abstract import get_format - - archive_format = get_format() - latest_version = archive_format.latest_version - try: - current_version = archive_format.read_version(archive) - except (UnreachableStorage, CorruptStorage) as exc: - echo.echo_critical(f'archive file of unknown format: {exc}') - if version: - echo.echo(current_version) - return - - if current_version != latest_version: - echo.echo_critical( - f"Archive version is not the latest: '{current_version}' != '{latest_version}'. " - 'Use `verdi migrate` to upgrade to the latest version' - ) - - with archive_format.open(archive, 'r') as archive_reader: - metadata = archive_reader.get_metadata() - - if meta_data: - echo.echo_dictionary(metadata, sort_keys=False) - return - - statistics = { - name: metadata[key] for key, name in [ - ['export_version', 'Version archive'], - ['aiida_version', 'Version aiida'], - ['compression', 'Compression'], - ['ctime', 'Created'], - ['mtime', 'Modified'], - ] if key in metadata - } - if 'conversion_info' in metadata: - statistics['Conversion info'] = '\n'.join(metadata['conversion_info']) - - echo.echo(tabulate.tabulate(statistics.items())) - - if database: - echo.echo('') - echo.echo('Database statistics') - echo.echo('-------------------') - with spinner(): - with archive_format.open(archive, 'r') as archive_reader: - data = archive_reader.get_backend().get_info(statistics=True) - echo.echo_dictionary(data, sort_keys=False, fmt='yaml') + ctx.invoke(archive_version, path=archive) + elif database: + ctx.invoke(archive_info, path=archive, statistics=True) + else: + ctx.invoke(archive_info, path=archive, statistics=False) @verdi_archive.command('create') @@ -427,7 +430,6 @@ def _import_archive_and_migrate(archive: str, web_based: bool, import_kwargs: di :param try_migration: whether to try a migration if the import raises `IncompatibleStorageSchema` """ - from aiida.common.exceptions import IncompatibleStorageSchema from aiida.common.folders import SandboxFolder from aiida.tools.archive.abstract import get_format from aiida.tools.archive.imports import import_archive as _import_archive diff --git a/aiida/storage/sqlite_zip/backend.py b/aiida/storage/sqlite_zip/backend.py index 10e19ef81f..8d8642c893 100644 --- a/aiida/storage/sqlite_zip/backend.py +++ b/aiida/storage/sqlite_zip/backend.py @@ -32,7 +32,7 @@ from . import models from .migrator import get_schema_version_head, validate_storage -from .utils import DB_FILENAME, REPO_FOLDER, create_sqla_engine, read_version +from .utils import DB_FILENAME, REPO_FOLDER, create_sqla_engine, extract_metadata, read_version class SqliteZipBackend(StorageBackend): # pylint: disable=too-many-public-methods @@ -45,8 +45,9 @@ def version_head(cls) -> str: @staticmethod def create_profile(path: str | Path) -> Profile: """Create a new profile instance for this backend, from the path to the zip file.""" + profile_name = Path(path).name return Profile( - 'default', { + profile_name, { 'storage': { 'backend': 'sqlite_zip', 'config': { @@ -191,8 +192,11 @@ def maintain(self, dry_run: bool = False, live: bool = True, **kwargs) -> None: raise NotImplementedError def get_info(self, statistics: bool = False) -> dict: - results = super().get_info(statistics=statistics) - results['repository'] = self.get_repository().get_info(statistics) + # since extracting the database file is expensive, we only do it if statistics is True + results = {'metadata': extract_metadata(self._path)} + if statistics: + results.update(super().get_info(statistics=statistics)) + results['repository'] = self.get_repository().get_info(statistics) return results diff --git a/aiida/tools/archive/create.py b/aiida/tools/archive/create.py index 5a83a860b1..acb5a200fe 100644 --- a/aiida/tools/archive/create.py +++ b/aiida/tools/archive/create.py @@ -281,13 +281,12 @@ def create_archive( writer.update_metadata({ 'ctime': datetime.now().isoformat(), 'creation_parameters': { - 'entities_starting_set': + 'entities_starting_set': None if entities is None else {etype.value: list(unique) for etype, unique in starting_uuids.items() if unique}, 'include_authinfos': include_authinfos, 'include_comments': include_comments, 'include_logs': include_logs, 'graph_traversal_rules': full_traversal_rules, - 'entity_counts': dict(count_summary), # type: ignore } }) # stream entity data to the archive diff --git a/docs/source/reference/command_line.rst b/docs/source/reference/command_line.rst index 7043a13b40..7979b1ffb9 100644 --- a/docs/source/reference/command_line.rst +++ b/docs/source/reference/command_line.rst @@ -27,8 +27,9 @@ Below is a list with all available subcommands. Commands: create Write subsets of the provenance graph to a single file. import Import data from an AiiDA archive file. - inspect Inspect contents of an archive without importing it. + info Summarise the contents of the archive. migrate Migrate an export archive to a more recent format version. + version Print the current version of the archive schema. .. _reference:command-line:verdi-calcjob: diff --git a/tests/cmdline/commands/test_archive_create.py b/tests/cmdline/commands/test_archive_create.py index b564fa8d1a..86b285399b 100644 --- a/tests/cmdline/commands/test_archive_create.py +++ b/tests/cmdline/commands/test_archive_create.py @@ -7,7 +7,7 @@ # For further information on the license, see the LICENSE.txt file # # For further information please visit http://www.aiida.net # ########################################################################### -"""Tests for `verdi export`.""" +"""Tests for `verdi archive`.""" import shutil import zipfile @@ -179,35 +179,35 @@ def test_migrate_low_verbosity(run_cli_command, tmp_path): @pytest.mark.parametrize('version', list_versions()) -def test_inspect_version(run_cli_command, version): - """Test the functionality of `verdi export inspect --version`.""" +def test_version(run_cli_command, version): + """Test the functionality of `verdi archive version`.""" archive = f'export_{version}_simple.aiida' filename_input = get_archive_file(archive, filepath='export/migrate') - options = ['--version', filename_input] - result = run_cli_command(cmd_archive.inspect, options) - assert result.output.strip() == f'{version}' + options = [filename_input] + result = run_cli_command(cmd_archive.archive_version, options) + assert version in result.output -def test_inspect_metadata(run_cli_command): - """Test the functionality of `verdi export inspect --meta-data`.""" +def test_info(run_cli_command): + """Test the functionality of `verdi archive info`.""" archive = f'export_{ArchiveFormatSqlZip().latest_version}_simple.aiida' filename_input = get_archive_file(archive, filepath='export/migrate') - options = ['--meta-data', filename_input] - result = run_cli_command(cmd_archive.inspect, options) + options = [filename_input] + result = run_cli_command(cmd_archive.archive_info, options) assert 'export_version' in result.output -def test_inspect_database(run_cli_command): - """Test the functionality of `verdi export inspect --meta-data`.""" +def test_info_detailed(run_cli_command): + """Test the functionality of `verdi archive info --statistics`.""" archive = f'export_{ArchiveFormatSqlZip().latest_version}_simple.aiida' filename_input = get_archive_file(archive, filepath='export/migrate') - options = ['--database', filename_input] - result = run_cli_command(cmd_archive.inspect, options) + options = ['--statistics', filename_input] + result = run_cli_command(cmd_archive.archive_info, options) assert 'Nodes:' in result.output -def test_inspect_empty_archive(run_cli_command): - """Test the functionality of `verdi export inspect` for an empty archive.""" +def test_info_empty_archive(run_cli_command): + """Test the functionality of `verdi archive info` for an empty archive.""" filename_input = get_archive_file('empty.aiida', filepath='export/migrate') - result = run_cli_command(cmd_archive.inspect, [filename_input], raises=True) - assert 'archive file of unknown format' in result.output + result = run_cli_command(cmd_archive.archive_info, [filename_input], raises=True) + assert 'archive file unreadable' in result.output From 8aa1f88eabab4d5a8411b75798df5ea673dc904c Mon Sep 17 00:00:00 2001 From: Chris Sewell Date: Sun, 27 Feb 2022 02:56:04 +0100 Subject: [PATCH 15/26] Improve `verdi archive` docstrings --- aiida/cmdline/commands/cmd_archive.py | 10 +++++----- docs/source/reference/command_line.rst | 10 +++++----- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/aiida/cmdline/commands/cmd_archive.py b/aiida/cmdline/commands/cmd_archive.py index 50ad0cb28f..94728e3012 100644 --- a/aiida/cmdline/commands/cmd_archive.py +++ b/aiida/cmdline/commands/cmd_archive.py @@ -40,7 +40,7 @@ def verdi_archive(): @verdi_archive.command('version') @click.argument('path', nargs=1, type=click.Path(exists=True, readable=True)) def archive_version(path): - """Print the current version of the archive schema.""" + """Print the current version of an archive's schema.""" # note: this mirrors `cmd_storage:storage_version` # it is currently hardcoded to the `SqliteZipBackend`, but could be generalized in the future from aiida.storage.sqlite_zip.backend import SqliteZipBackend @@ -59,7 +59,7 @@ def archive_version(path): @click.argument('path', nargs=1, type=click.Path(exists=True, readable=True)) @click.option('--statistics', is_flag=True, help='Provides more in-detail statistically relevant data.') def archive_info(path, statistics): - """Summarise the contents of the archive.""" + """Summarise the contents of an archive.""" # note: this mirrors `cmd_storage:storage_info` # it is currently hardcoded to the `SqliteZipBackend`, but could be generalized in the future from aiida.storage.sqlite_zip.backend import SqliteZipBackend @@ -139,7 +139,7 @@ def create( create_backward, return_backward, call_calc_backward, call_work_backward, include_comments, include_logs, include_authinfos, compress, batch_size, test_run ): - """Write subsets of the provenance graph to a single file. + """Create an archive from all or part of a profiles's data. Besides Nodes of the provenance graph, you can archive Groups, Codes, Computers, Comments and Logs. @@ -217,7 +217,7 @@ def create( help='Archive format version to migrate to (defaults to latest version).', ) def migrate(input_file, output_file, force, in_place, version): - """Migrate an export archive to a more recent format version.""" + """Migrate an archive to a more recent schema version.""" from aiida.common.progress_reporter import set_progress_bar_tqdm, set_progress_reporter from aiida.tools.archive.abstract import get_format @@ -336,7 +336,7 @@ def import_archive( ctx, archives, webpages, extras_mode_existing, extras_mode_new, comment_mode, include_authinfos, migration, batch_size, import_group, group, test_run ): - """Import data from an AiiDA archive file. + """Import archived data to a profile. The archive can be specified by its relative or absolute file path, or its HTTP URL. """ diff --git a/docs/source/reference/command_line.rst b/docs/source/reference/command_line.rst index 7979b1ffb9..a01950b053 100644 --- a/docs/source/reference/command_line.rst +++ b/docs/source/reference/command_line.rst @@ -25,11 +25,11 @@ Below is a list with all available subcommands. --help Show this message and exit. Commands: - create Write subsets of the provenance graph to a single file. - import Import data from an AiiDA archive file. - info Summarise the contents of the archive. - migrate Migrate an export archive to a more recent format version. - version Print the current version of the archive schema. + create Create an archive from all or part of a profiles's data. + import Import archived data to a profile. + info Summarise the contents of an archive. + migrate Migrate an archive to a more recent schema version. + version Print the current version of an archive's schema. .. _reference:command-line:verdi-calcjob: From 89fb338b3d4ff4783a74e6b6ddb5b9f22354cf73 Mon Sep 17 00:00:00 2001 From: Chris Sewell Date: Sun, 27 Feb 2022 11:19:38 +0100 Subject: [PATCH 16/26] Add tests to ensure psql_dos and sqlite_zip are in-sync --- .../sqlite_zip/migrations/v1_db_schema.py | 18 +-- aiida/storage/sqlite_zip/models.py | 2 +- ...migration.py => test_legacy_migrations.py} | 0 tests/tools/archive/test_schema.py | 127 ++++++++++++++++++ 4 files changed, 137 insertions(+), 10 deletions(-) rename tests/tools/archive/migration/{test_migration.py => test_legacy_migrations.py} (100%) create mode 100644 tests/tools/archive/test_schema.py diff --git a/aiida/storage/sqlite_zip/migrations/v1_db_schema.py b/aiida/storage/sqlite_zip/migrations/v1_db_schema.py index 1e56b1024f..6a1a0f60c6 100644 --- a/aiida/storage/sqlite_zip/migrations/v1_db_schema.py +++ b/aiida/storage/sqlite_zip/migrations/v1_db_schema.py @@ -14,7 +14,7 @@ However, when migrating an archive from the old format, we require a fixed revision of the schema. The only difference between the PostGreSQL schema and SQLite one, -is the replacement of ``JSONB`` with ``JSON``, and ``UUID`` with ``CHAR(36)``. +is the replacement of ``JSONB`` with ``JSON``, and ``UUID`` with ``CHAR(32)``. """ from sqlalchemy import ForeignKey, MetaData, orm from sqlalchemy.dialects.sqlite import JSON @@ -66,7 +66,7 @@ class DbComment(ArchiveV1Base): __tablename__ = 'db_dbcomment' id = Column(Integer, primary_key=True) # pylint: disable=invalid-name - uuid = Column(CHAR(36), default=get_new_uuid, nullable=False, unique=True) + uuid = Column(CHAR(32), default=get_new_uuid, nullable=False, unique=True) dbnode_id = Column( Integer, ForeignKey('db_dbnode.id', ondelete='CASCADE', deferrable=True, initially='DEFERRED'), @@ -81,7 +81,7 @@ class DbComment(ArchiveV1Base): nullable=False, index=True ) - content = Column(Text, default='', nullable=True) + content = Column(Text, default='', nullable=False) class DbComputer(ArchiveV1Base): @@ -89,10 +89,10 @@ class DbComputer(ArchiveV1Base): __tablename__ = 'db_dbcomputer' id = Column(Integer, primary_key=True) # pylint: disable=invalid-name - uuid = Column(CHAR(36), default=get_new_uuid, nullable=False, unique=True) + uuid = Column(CHAR(32), default=get_new_uuid, nullable=False, unique=True) label = Column(String(255), unique=True, nullable=False) hostname = Column(String(255), default='', nullable=False) - description = Column(Text, default='', nullable=True) + description = Column(Text, default='', nullable=False) scheduler_type = Column(String(255), default='', nullable=False) transport_type = Column(String(255), default='', nullable=False) _metadata = Column('metadata', JSON, default=dict, nullable=False) @@ -120,7 +120,7 @@ class DbGroup(ArchiveV1Base): __table_args__ = (UniqueConstraint('label', 'type_string'),) id = Column(Integer, primary_key=True) # pylint: disable=invalid-name - uuid = Column(CHAR(36), default=get_new_uuid, nullable=False, unique=True) + uuid = Column(CHAR(32), default=get_new_uuid, nullable=False, unique=True) label = Column(String(255), nullable=False, index=True) type_string = Column(String(255), default='', nullable=False, index=True) time = Column(DateTime(timezone=True), default=timezone.now, nullable=False) @@ -140,10 +140,10 @@ class DbLog(ArchiveV1Base): __tablename__ = 'db_dblog' id = Column(Integer, primary_key=True) # pylint: disable=invalid-name - uuid = Column(CHAR(36), default=get_new_uuid, nullable=False, unique=True) + uuid = Column(CHAR(32), default=get_new_uuid, nullable=False, unique=True) time = Column(DateTime(timezone=True), default=timezone.now, nullable=False) loggername = Column(String(255), default='', nullable=False, index=True) - levelname = Column(String(255), default='', nullable=False, index=True) + levelname = Column(String(50), default='', nullable=False, index=True) dbnode_id = Column( Integer, ForeignKey('db_dbnode.id', deferrable=True, initially='DEFERRED', ondelete='CASCADE'), @@ -160,7 +160,7 @@ class DbNode(ArchiveV1Base): __tablename__ = 'db_dbnode' id = Column(Integer, primary_key=True) # pylint: disable=invalid-name - uuid = Column(CHAR(36), default=get_new_uuid, nullable=False, unique=True) + uuid = Column(CHAR(32), default=get_new_uuid, nullable=False, unique=True) node_type = Column(String(255), default='', nullable=False, index=True) process_type = Column(String(255), index=True) label = Column(String(255), default='', index=True, nullable=False) diff --git a/aiida/storage/sqlite_zip/models.py b/aiida/storage/sqlite_zip/models.py index a81faa3dd0..a3922bc715 100644 --- a/aiida/storage/sqlite_zip/models.py +++ b/aiida/storage/sqlite_zip/models.py @@ -83,7 +83,7 @@ def pg_to_sqlite(pg_table: sa.Table): new = pg_table.to_metadata(SqliteBase.metadata) for column in new.columns: if isinstance(column.type, UUID): - column.type = sa.CHAR(32) + column.type = sa.String(32) elif isinstance(column.type, sa.DateTime): column.type = TZDateTime() elif isinstance(column.type, JSONB): diff --git a/tests/tools/archive/migration/test_migration.py b/tests/tools/archive/migration/test_legacy_migrations.py similarity index 100% rename from tests/tools/archive/migration/test_migration.py rename to tests/tools/archive/migration/test_legacy_migrations.py diff --git a/tests/tools/archive/test_schema.py b/tests/tools/archive/test_schema.py new file mode 100644 index 0000000000..175cc062cd --- /dev/null +++ b/tests/tools/archive/test_schema.py @@ -0,0 +1,127 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +"""Test the schema of the sqlite file within the archive.""" +from contextlib import suppress + +from archive_path import extract_file_in_zip +import pytest +from sqlalchemy import String, inspect +from sqlalchemy.dialects.postgresql import UUID +from sqlalchemy.engine import Inspector +import yaml + +from aiida.manage.manager import get_manager +from aiida.storage.sqlite_zip import models, utils +from aiida.storage.sqlite_zip.migrator import get_schema_version_head, migrate +from tests.utils.archives import get_archive_file + + +@pytest.mark.usefixtures('aiida_profile_clean') +def test_psql_sync_init(tmp_path): + """Test the schema is in-sync with the ``psql_dos`` backend, when initialising a new archive.""" + psql_insp = inspect(get_manager().get_profile_storage().get_session().bind) + + engine = utils.create_sqla_engine(tmp_path / 'archive.sqlite') + models.SqliteBase.metadata.create_all(engine) + sqlite_insp = inspect(engine) + + diffs = diff_schemas(psql_insp, sqlite_insp) + if diffs: + raise AssertionError(f'Schema is not in-sync with the psql backend:\n{yaml.safe_dump(diffs)}') + + +@pytest.mark.usefixtures('aiida_profile_clean') +def test_psql_sync_migrate(tmp_path): + """Test the schema is in-sync with the ``psql_dos`` backend, when migrating an old archive to the latest version.""" + psql_insp = inspect(get_manager().get_profile_storage().get_session().bind) + + # migrate an old archive + filepath_archive = get_archive_file('export_0.4_simple.aiida', 'export/migrate') + migrate(filepath_archive, tmp_path / 'archive.aiida', get_schema_version_head()) + + # extract the database + with tmp_path.joinpath('archive.sqlite').open('wb') as handle: + extract_file_in_zip(tmp_path / 'archive.aiida', 'db.sqlite3', handle) + + engine = utils.create_sqla_engine(tmp_path / 'archive.sqlite') + sqlite_insp = inspect(engine) + + diffs = diff_schemas(psql_insp, sqlite_insp) + if diffs: + raise AssertionError(f'Schema is not in-sync with the psql backend:\n{yaml.safe_dump(diffs)}') + + +def diff_schemas(psql_insp: Inspector, sqlite_insp: Inspector): # pylint: disable=too-many-branches + """Compare the reflected schemas of the two databases.""" + diffs = {} + + for table_name in sqlite_insp.get_table_names(): + if not table_name.startswith('db_') or table_name == 'db_dbsetting': + continue # not an aiida table + if table_name not in psql_insp.get_table_names(): + diffs[table_name] = 'additional' + for table_name in psql_insp.get_table_names(): + if not table_name.startswith('db_') or table_name == 'db_dbsetting': + continue # not an aiida table + if table_name not in sqlite_insp.get_table_names(): + diffs[table_name] = 'missing' + continue + psql_columns = {col['name']: col for col in psql_insp.get_columns(table_name)} + sqlite_columns = {col['name']: col for col in sqlite_insp.get_columns(table_name)} + for column_name in psql_columns: + # check existence + if column_name not in sqlite_columns: + diffs.setdefault(table_name, {})[column_name] = 'missing' + continue + # check type + psql_type = psql_columns[column_name]['type'] + sqlite_type = sqlite_columns[column_name]['type'] + # standardise types + with suppress(NotImplementedError): + psql_type = psql_type.as_generic() + with suppress(NotImplementedError): + sqlite_type = sqlite_type.as_generic() + if isinstance(psql_type, UUID): + psql_type = String(length=32) + if not isinstance(sqlite_type, type(psql_type)): + diffs.setdefault(table_name, {}).setdefault(column_name, {})['type'] = f'{sqlite_type} != {psql_type}' + elif isinstance(psql_type, String): + if psql_type.length != sqlite_type.length: + diffs.setdefault(table_name, + {}).setdefault(column_name, + {})['length'] = f'{sqlite_type.length} != {psql_type.length}' + # check nullability + psql_nullable = psql_columns[column_name]['nullable'] + sqlite_nullable = sqlite_columns[column_name]['nullable'] + if psql_nullable != sqlite_nullable: + diffs.setdefault(table_name, {}).setdefault(column_name, + {})['nullable'] = f'{sqlite_nullable} != {psql_nullable}' + + # compare unique constraints + psql_uq_constraints = [c['name'] for c in psql_insp.get_unique_constraints(table_name)] + sqlite_uq_constraints = [c['name'] for c in sqlite_insp.get_unique_constraints(table_name)] + for uq_constraint in psql_uq_constraints: + if uq_constraint not in sqlite_uq_constraints: + diffs.setdefault(table_name, {}).setdefault('uq_constraints', {})[uq_constraint] = 'missing' + for uq_constraint in sqlite_uq_constraints: + if uq_constraint not in psql_uq_constraints: + diffs.setdefault(table_name, {}).setdefault('uq_constraints', {})[uq_constraint] = 'additional' + + # compare foreign key constraints + psql_fk_constraints = [c['name'] for c in psql_insp.get_foreign_keys(table_name)] + sqlite_fk_constraints = [c['name'] for c in sqlite_insp.get_foreign_keys(table_name)] + for fk_constraint in psql_fk_constraints: + if fk_constraint not in sqlite_fk_constraints: + diffs.setdefault(table_name, {}).setdefault('fk_constraints', {})[fk_constraint] = 'missing' + for fk_constraint in sqlite_fk_constraints: + if fk_constraint not in psql_fk_constraints: + diffs.setdefault(table_name, {}).setdefault('fk_constraints', {})[fk_constraint] = 'additional' + + return diffs From 3ed4dc3faff61e325304b6fbbafbad8bc24c9bda Mon Sep 17 00:00:00 2001 From: Chris Sewell Date: Sun, 27 Feb 2022 18:37:34 +0100 Subject: [PATCH 17/26] test commenting out new tests --- tests/tools/archive/test_schema.py | 50 +++++++++++++++--------------- 1 file changed, 25 insertions(+), 25 deletions(-) diff --git a/tests/tools/archive/test_schema.py b/tests/tools/archive/test_schema.py index 175cc062cd..ffa3d296ad 100644 --- a/tests/tools/archive/test_schema.py +++ b/tests/tools/archive/test_schema.py @@ -23,39 +23,39 @@ from tests.utils.archives import get_archive_file -@pytest.mark.usefixtures('aiida_profile_clean') -def test_psql_sync_init(tmp_path): - """Test the schema is in-sync with the ``psql_dos`` backend, when initialising a new archive.""" - psql_insp = inspect(get_manager().get_profile_storage().get_session().bind) +# @pytest.mark.usefixtures('aiida_profile_clean') +# def test_psql_sync_init(tmp_path): +# """Test the schema is in-sync with the ``psql_dos`` backend, when initialising a new archive.""" +# psql_insp = inspect(get_manager().get_profile_storage().get_session().bind) - engine = utils.create_sqla_engine(tmp_path / 'archive.sqlite') - models.SqliteBase.metadata.create_all(engine) - sqlite_insp = inspect(engine) +# engine = utils.create_sqla_engine(tmp_path / 'archive.sqlite') +# models.SqliteBase.metadata.create_all(engine) +# sqlite_insp = inspect(engine) - diffs = diff_schemas(psql_insp, sqlite_insp) - if diffs: - raise AssertionError(f'Schema is not in-sync with the psql backend:\n{yaml.safe_dump(diffs)}') +# diffs = diff_schemas(psql_insp, sqlite_insp) +# if diffs: +# raise AssertionError(f'Schema is not in-sync with the psql backend:\n{yaml.safe_dump(diffs)}') -@pytest.mark.usefixtures('aiida_profile_clean') -def test_psql_sync_migrate(tmp_path): - """Test the schema is in-sync with the ``psql_dos`` backend, when migrating an old archive to the latest version.""" - psql_insp = inspect(get_manager().get_profile_storage().get_session().bind) +# @pytest.mark.usefixtures('aiida_profile_clean') +# def test_psql_sync_migrate(tmp_path): +# """Test the schema is in-sync with the ``psql_dos`` backend, when migrating an old archive to the latest version.""" +# psql_insp = inspect(get_manager().get_profile_storage().get_session().bind) - # migrate an old archive - filepath_archive = get_archive_file('export_0.4_simple.aiida', 'export/migrate') - migrate(filepath_archive, tmp_path / 'archive.aiida', get_schema_version_head()) +# # migrate an old archive +# filepath_archive = get_archive_file('export_0.4_simple.aiida', 'export/migrate') +# migrate(filepath_archive, tmp_path / 'archive.aiida', get_schema_version_head()) - # extract the database - with tmp_path.joinpath('archive.sqlite').open('wb') as handle: - extract_file_in_zip(tmp_path / 'archive.aiida', 'db.sqlite3', handle) +# # extract the database +# with tmp_path.joinpath('archive.sqlite').open('wb') as handle: +# extract_file_in_zip(tmp_path / 'archive.aiida', 'db.sqlite3', handle) - engine = utils.create_sqla_engine(tmp_path / 'archive.sqlite') - sqlite_insp = inspect(engine) +# engine = utils.create_sqla_engine(tmp_path / 'archive.sqlite') +# sqlite_insp = inspect(engine) - diffs = diff_schemas(psql_insp, sqlite_insp) - if diffs: - raise AssertionError(f'Schema is not in-sync with the psql backend:\n{yaml.safe_dump(diffs)}') +# diffs = diff_schemas(psql_insp, sqlite_insp) +# if diffs: +# raise AssertionError(f'Schema is not in-sync with the psql backend:\n{yaml.safe_dump(diffs)}') def diff_schemas(psql_insp: Inspector, sqlite_insp: Inspector): # pylint: disable=too-many-branches From 7035711bb4ef43e23439b71289b62eb933b0d7b6 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Sun, 27 Feb 2022 17:39:46 +0000 Subject: [PATCH 18/26] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- tests/tools/archive/test_schema.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/tests/tools/archive/test_schema.py b/tests/tools/archive/test_schema.py index ffa3d296ad..33e1c449cf 100644 --- a/tests/tools/archive/test_schema.py +++ b/tests/tools/archive/test_schema.py @@ -22,7 +22,6 @@ from aiida.storage.sqlite_zip.migrator import get_schema_version_head, migrate from tests.utils.archives import get_archive_file - # @pytest.mark.usefixtures('aiida_profile_clean') # def test_psql_sync_init(tmp_path): # """Test the schema is in-sync with the ``psql_dos`` backend, when initialising a new archive.""" @@ -36,7 +35,6 @@ # if diffs: # raise AssertionError(f'Schema is not in-sync with the psql backend:\n{yaml.safe_dump(diffs)}') - # @pytest.mark.usefixtures('aiida_profile_clean') # def test_psql_sync_migrate(tmp_path): # """Test the schema is in-sync with the ``psql_dos`` backend, when migrating an old archive to the latest version.""" From 7111d903c5a9ccc9390c928224f0bd2ded21e6ff Mon Sep 17 00:00:00 2001 From: Chris Sewell Date: Mon, 28 Feb 2022 14:41:34 +0100 Subject: [PATCH 19/26] re-instate test_schema --- tests/tools/archive/test_schema.py | 59 +++++++++++++++++------------- 1 file changed, 33 insertions(+), 26 deletions(-) diff --git a/tests/tools/archive/test_schema.py b/tests/tools/archive/test_schema.py index 33e1c449cf..d08c7e5fd4 100644 --- a/tests/tools/archive/test_schema.py +++ b/tests/tools/archive/test_schema.py @@ -17,43 +17,50 @@ from sqlalchemy.engine import Inspector import yaml -from aiida.manage.manager import get_manager +from aiida import get_profile +from aiida.storage.psql_dos.utils import create_sqlalchemy_engine from aiida.storage.sqlite_zip import models, utils from aiida.storage.sqlite_zip.migrator import get_schema_version_head, migrate from tests.utils.archives import get_archive_file -# @pytest.mark.usefixtures('aiida_profile_clean') -# def test_psql_sync_init(tmp_path): -# """Test the schema is in-sync with the ``psql_dos`` backend, when initialising a new archive.""" -# psql_insp = inspect(get_manager().get_profile_storage().get_session().bind) -# engine = utils.create_sqla_engine(tmp_path / 'archive.sqlite') -# models.SqliteBase.metadata.create_all(engine) -# sqlite_insp = inspect(engine) +@pytest.mark.usefixtures('aiida_profile_clean') +def test_psql_sync_init(tmp_path): + """Test the schema is in-sync with the ``psql_dos`` backend, when initialising a new archive.""" + # note, directly using the global profile's engine here left connections open + with create_sqlalchemy_engine(get_profile().storage_config).connect() as conn: + psql_insp = inspect(conn) -# diffs = diff_schemas(psql_insp, sqlite_insp) -# if diffs: -# raise AssertionError(f'Schema is not in-sync with the psql backend:\n{yaml.safe_dump(diffs)}') + engine = utils.create_sqla_engine(tmp_path / 'archive.sqlite') + models.SqliteBase.metadata.create_all(engine) + sqlite_insp = inspect(engine) -# @pytest.mark.usefixtures('aiida_profile_clean') -# def test_psql_sync_migrate(tmp_path): -# """Test the schema is in-sync with the ``psql_dos`` backend, when migrating an old archive to the latest version.""" -# psql_insp = inspect(get_manager().get_profile_storage().get_session().bind) + diffs = diff_schemas(psql_insp, sqlite_insp) + if diffs: + raise AssertionError(f'Schema is not in-sync with the psql backend:\n{yaml.safe_dump(diffs)}') -# # migrate an old archive -# filepath_archive = get_archive_file('export_0.4_simple.aiida', 'export/migrate') -# migrate(filepath_archive, tmp_path / 'archive.aiida', get_schema_version_head()) -# # extract the database -# with tmp_path.joinpath('archive.sqlite').open('wb') as handle: -# extract_file_in_zip(tmp_path / 'archive.aiida', 'db.sqlite3', handle) +@pytest.mark.usefixtures('aiida_profile_clean') +def test_psql_sync_migrate(tmp_path): + """Test the schema is in-sync with the ``psql_dos`` backend, when migrating an old archive to the latest version.""" + # note, directly using the global profile's engine here left connections open + with create_sqlalchemy_engine(get_profile().storage_config).connect() as conn: + psql_insp = inspect(conn) -# engine = utils.create_sqla_engine(tmp_path / 'archive.sqlite') -# sqlite_insp = inspect(engine) + # migrate an old archive + filepath_archive = get_archive_file('export_0.4_simple.aiida', 'export/migrate') + migrate(filepath_archive, tmp_path / 'archive.aiida', get_schema_version_head()) -# diffs = diff_schemas(psql_insp, sqlite_insp) -# if diffs: -# raise AssertionError(f'Schema is not in-sync with the psql backend:\n{yaml.safe_dump(diffs)}') + # extract the database + with tmp_path.joinpath('archive.sqlite').open('wb') as handle: + extract_file_in_zip(tmp_path / 'archive.aiida', 'db.sqlite3', handle) + + engine = utils.create_sqla_engine(tmp_path / 'archive.sqlite') + sqlite_insp = inspect(engine) + + diffs = diff_schemas(psql_insp, sqlite_insp) + if diffs: + raise AssertionError(f'Schema is not in-sync with the psql backend:\n{yaml.safe_dump(diffs)}') def diff_schemas(psql_insp: Inspector, sqlite_insp: Inspector): # pylint: disable=too-many-branches From 8078d9945bd0263e08e055aa95fe5ff535d84b79 Mon Sep 17 00:00:00 2001 From: Chris Sewell Date: Fri, 4 Mar 2022 14:05:57 +0100 Subject: [PATCH 20/26] improve typing --- aiida/storage/psql_dos/migrator.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/aiida/storage/psql_dos/migrator.py b/aiida/storage/psql_dos/migrator.py index 64a7c210d7..fc85d30bf8 100644 --- a/aiida/storage/psql_dos/migrator.py +++ b/aiida/storage/psql_dos/migrator.py @@ -283,7 +283,7 @@ def _alembic_script(cls): return ScriptDirectory.from_config(cls._alembic_config()) @contextlib.contextmanager - def _alembic_connect(self, _connection: Optional[Connection] = None): + def _alembic_connect(self, _connection: Optional[Connection] = None) -> Iterator[Config]: """Context manager to return an instance of an Alembic configuration. The profiles's database connection is added in the `attributes` property, through which it can then also be @@ -304,7 +304,7 @@ def _callback(step: MigrationInfo, **kwargs): # pylint: disable=unused-argument yield config @contextlib.contextmanager - def _migration_context(self, _connection: Optional[Connection] = None) -> MigrationContext: + def _migration_context(self, _connection: Optional[Connection] = None) -> Iterator[MigrationContext]: """Context manager to return an instance of an Alembic migration context. This migration context will have been configured with the current database connection, which allows this context From f0991b340f4944cf802445a1993e7620689c640b Mon Sep 17 00:00:00 2001 From: Chris Sewell Date: Fri, 4 Mar 2022 15:26:02 +0100 Subject: [PATCH 21/26] Handle dangling nodes in links/groups for legacy_to_main --- .../sqlite_zip/migrations/legacy_to_main.py | 32 ++++++++++++------ .../export/migrate/0.10_dangling_link.aiida | Bin 0 -> 2783 bytes .../migrate/0.10_unknown_nodes_in_group.aiida | Bin 0 -> 4452 bytes .../archive/migration/test_legacy_to_main.py | 29 ++++++++++++++++ 4 files changed, 51 insertions(+), 10 deletions(-) create mode 100644 tests/static/export/migrate/0.10_dangling_link.aiida create mode 100644 tests/static/export/migrate/0.10_unknown_nodes_in_group.aiida create mode 100644 tests/tools/archive/migration/test_legacy_to_main.py diff --git a/aiida/storage/sqlite_zip/migrations/legacy_to_main.py b/aiida/storage/sqlite_zip/migrations/legacy_to_main.py index c820a2cc9a..2645367a29 100644 --- a/aiida/storage/sqlite_zip/migrations/legacy_to_main.py +++ b/aiida/storage/sqlite_zip/migrations/legacy_to_main.py @@ -125,7 +125,7 @@ def in_archive_context(_inpath): with (new_zip / f'{REPO_FOLDER}/{hashkey}').open(mode='wb') as handle2: shutil.copyfileobj(handle, handle2) node_repos.setdefault(uuid, []).append((posix_rel.as_posix(), hashkey)) - MIGRATE_LOGGER.report(f'Unique files written: {len(central_dir)}') + MIGRATE_LOGGER.report(f'Unique repository files written: {len(central_dir)}') # convert the JSON database to SQLite _json_to_sqlite(working / DB_FILENAME, data, node_repos) @@ -143,7 +143,7 @@ def in_archive_context(_inpath): return working / DB_FILENAME -def _json_to_sqlite( +def _json_to_sqlite( # pylint: disable=too-many-branches,too-many-locals outpath: Path, data: dict, node_repos: Dict[str, List[Tuple[str, Optional[str]]]], batch_size: int = 100 ) -> None: """Convert a JSON archive format to SQLite.""" @@ -188,9 +188,17 @@ def _json_to_sqlite( if data['links_uuid']: def _transform_link(link_row): + try: + input_id = node_uuid_map[link_row['input']] + except KeyError: + raise StorageMigrationError(f'Database contains link with unknown input node: {link_row}') + try: + output_id = node_uuid_map[link_row['output']] + except KeyError: + raise StorageMigrationError(f'Database contains link with unknown output node: {link_row}') return { - 'input_id': node_uuid_map[link_row['input']], - 'output_id': node_uuid_map[link_row['output']], + 'input_id': input_id, + 'output_id': output_id, 'label': link_row['label'], 'type': link_row['type'] } @@ -207,16 +215,20 @@ def _transform_link(link_row): uuid: pk for uuid, pk in connection.execute(select(v1_schema.DbGroup.uuid, v1_schema.DbGroup.id)) # pylint: disable=unnecessary-comprehension } length = sum(len(uuids) for uuids in data['groups_uuid'].values()) + unknown_nodes = {} with get_progress_reporter()(desc='Adding Group-Nodes', total=length) as progress: for group_uuid, node_uuids in data['groups_uuid'].items(): group_id = group_uuid_map[group_uuid] - connection.execute( - insert(v1_schema.DbGroupNodes.__table__), [{ - 'dbnode_id': node_uuid_map[uuid], - 'dbgroup_id': group_id - } for uuid in node_uuids] - ) + rows = [] + for uuid in node_uuids: + if uuid in node_uuid_map: + rows.append({'dbnode_id': node_uuid_map[uuid], 'dbgroup_id': group_id}) + else: + unknown_nodes.setdefault(group_uuid, set()).add(uuid) + connection.execute(insert(v1_schema.DbGroupNodes.__table__), rows) progress.update(len(node_uuids)) + if unknown_nodes: + MIGRATE_LOGGER.warning(f'Dropped unknown nodes in groups: {unknown_nodes}') def _convert_datetime(key, value): diff --git a/tests/static/export/migrate/0.10_dangling_link.aiida b/tests/static/export/migrate/0.10_dangling_link.aiida new file mode 100644 index 0000000000000000000000000000000000000000..7bb644795f32b13266c5f351151aa22c2229f156 GIT binary patch literal 2783 zcmWIWW@h1H0D(=8;UQoKlwf0!VaUr*NiEh74dG;9UT{w|X+02^R&X;gvb3=9)ZbkkB2Q*=$z3{rG0%#w|DO^nTr6HSvW zERxNVAl7J+?_VP`B3+6eW~W^$Yx9NT@xK&1A5{KhAh!JjUg&8!r15fl`>qCT-` zvYOwP%hSwSrzZZ}?OA45woCWKH&xZ=^JW(p`SX>3f9RYWKAUTg{G3^PBrksZH=%(g z=$y;BBWKf?+?ab;9^NOfR@~V4^8JqwYc^MYVg8eUoomkOg|`mw;8suyW7xWY=lUE8 z)iq3ft<~O=kT~OGzDthgU7?wrdMhWqc&M5?D`t-^%N~^$r@mZX({s8jH|m%k zw-i6PYEsbjtj_)MmecY%&$B2`%Ab~zsq!y>{))dBGL8FYwz>K9R$Slu@_+gAP1_Uy z-dI_ocr$3Rn$OZsU$L{jX&e%&+!1R_w!e42l;mn{dN!z6^6P_N8X0dpCV$&J{l<%B zFDhjJPVL=i&pG@2be;ZfjTggpZSH<<_hfz7B=ov1JvF|UONae}-ql@Z33XP|)!(1~ zUB1+4Gy8Ns^L6>h>cr2Vi+X%<*74aD=Ij0%hb?dUyF=t`%x*IuMb#BL5|_^;o#H*X za}rDQQkm?fOVwuIGV)5-3N-&I{9BZ9Uz_^g1CkYf>6gNf+MJvJ@%-fT@y_O7#TU0f zzWY%gRMJ3F2Lpp5LlEPFIxi*$hH`cW23}z5$W1LtL`@+drk@kqh?zoqLw)ma8}R(q zZawEcK`B%0&91|Ghx4pOTNS;xEDoC%mD?ZPW4^0RdUW}+vg=;siq*Qm z-mqTK%)Zetz3ukBcQ>|6ADcRZE?i1IB{D7P{hRg7J=|yKp66Mw z5Fx*1cYwu5jmLXF_4@C)$SN9cC3Wyu=IUJ^br-nj#i+def7SNbO)L8&Qy#Fas_J-o zL^5SDqkE@lh2GBRbMKmXmA#fTkf>kN!zLUT`u3F6rN!!pIM{E9EQz|G$5G|t^;xFm z{EEp!4|fJ~=sj_()$CZn=~`B0G?U#xJTXH z|J*E_q{8=sPfK1cmp*(hK4Z33-map54t<*Mjh^f*S>73NKAavp~qVV%oSw=5{<{kyH934eONiU0b? zz1NTN{-X7tL^@A0T|Zx)sA*p6yeA|1N-^`yNn89cGB}&d#J=T`>YuN2B30^3+4W6( zl|}cN>n~CZf3?8kSXSSWTjj^6pLkvDwpY|m_qybz6JCO;9d2qn#3B#e9o)pHr!y6(%|JR;> z|LL-)O!2n~`}7t{OWmv8{#9m|M@{zh%a?a^#AkdDzh__bhdscXkx7mjSLG=ItwseH z{yKt~u&R_5Qk7yK%|s%q zTcDMo>Xz&FQhY0zoyd~;Y zzRI~0B7{aR>Z>&-u-}OGb{@1nbx^_R$@A=)&eLu6a5)wuMaPZ|4 z=IsRhH;#({fEb_%cvD$aoIf)t(AjAT0Mn6Grs;x@Dfom&AOHd3F#!Moe+~eNPTl^G z@sd$3F2V*D=0)C(L+6eC+$m~@&e^UxdB4_e&Gv3rg0VtHPd*ixnGQ}$tF3MMiXK=`tbqZ*rTh7shFn%4N=cFH*EeO+(IVfKAlORGnh#xMJ7O@^1)j`?JLS2o{p4^C;>UyRB3wP2B6MVy;+BceJFFKgO>5_m^4HJ>-h}P!X zE*-Dv$75r!>!ylJ@&6Ls=!VHw@VJ?9ExA2{W%gTkT;Q;+oUyDPcYIpKQ$|U@Q~!Y2 z(&@OA)Ke!sR_hJu>1_)`t#v9L3H8fXwhxWfE4WsPaL5eKLRTFfL^4lcD(bJ+BsFNQ z_4~v0l1=G^0Wuq>;NNPJETOJnHl4W3)8iU;Vj^VkIO?Ay?M*X(k=0JY(b8U&{|211|Huf;HQ{R>vivzM^z7*G- z=SJ!$PGuLQx!Q4(^V5=DJsjLmWh5oJCgsaZBDGBfEFxE+F zEhSXlQ?d~c-7mVF%1F-eNJ`IMk#aKapW@nUS6JdT@%1?uu#M`?n;XbuO;+SfUS7c* zCyL;OV+e!uh>$$S*!jXT%8jzE?+clk! z8|pjjn~-3qU~p9lW=AJDR#1QDItKtsya5HmVF0iM?A$IN7WM0vjifb8U3r408GYZf zL}%-uZai{o{h?+nj=JB_Qr) zwIB->T@|jD+Y#TX_En4~+LqiGLA&V7RgL=c>T*KZ!B7M~+#78XfJhv8RHeQ<|wFKOFr)}i>4(C#Zj)P0T!azclXB^__h z*hCBA+P`28p2t)-d-`VT2bS&rY;;VGOXx3Uoj>BF#H^I8`iI^bzlMM*{JO^Rtm;3W z>L@3AweRuWn#!(@ZXXehe3h3cTH&#?Wv3m$#WVJG+PHD|BMgbypmKO-t_1fHHL z97P?uE7;Mpm#U^4YEt1#f5;R{Pr7P*A289C-}VM6cRYJ_y^q{qVI4)4tI#)eH=I#% z8O$Nx@jq766<(KCKg2YzM4MwfWFOR1mife+tCUE)QQw`Qt6EoGizS*)cs18LV#YcT z<`^mN5p|-3}NZOWxG6~?$ph2*XGj1hjx?-{4b?oL@p-=K++ z1M&uye>IW(KU_ceF@bx$ya#KzQ~d6Wmv+PbFIlLGd(`Qu7Vc;pS)TcUa|3r8Blje# zcuc;`a$i7NTU$BY)Aysx6T^LzFo>iM0y)tXaJ(TZ&QDSlNM9L{qTd8==$nmx?dWDx4htk~g$Vy$cp8Tkhtd3K|+E|mx>SSjL z_~Cp5p_-LQV#XmQdSfhkC|YmVFMlF1>$X*Sn!K+AkNeMd~)q+PK^#@>v#`BRU>`u<&5_VEP+ z6Fy}lspt0C-gWI{Z0^P5hXv|3w&gll?&o}#ltQ`-(T@n!zmnX8fg4Yzq7j6rEqio z&EDqz2cM@2`={3)oTOg8s5|B!jT2t}VEa(qXyjYP&U-$QaMj_ohp`^dS4sp2y(Fa+XRUBX#h93ldn6HenG+ z%x|Uy`%O)F4XPHF;Z60%YBBKMSfUPH6RV}Et4Y_SDLi?Ksy~T*fGAKNz4w(A~rn!4AAbwZ#dBI!wYuI z86jYCo6xt={Q3?#7BAQ_7NTDjzbOCWWr5K6l>_<=c?B<~NSdIBF4Q?@+7A8p+eziW z%_$P(%ZqT5x!Z(F<(EImOL&nRb7K~Eew~}gS$KWm85O^LK?=%?+?X|DeBEBOp7QcQ zct7vWZ7|Oa{DP$M$1R7%krz`WF>xO3LZxz{$d0qf3zg0} Date: Fri, 4 Mar 2022 18:28:25 +0100 Subject: [PATCH 22/26] Handle null fields in legacy archives --- .../sqlite_zip/migrations/legacy_to_main.py | 2 +- .../sqlite_zip/migrations/v1_db_schema.py | 71 +++++---- .../migrations/versions/main_0000_initial.py | 7 + .../versions/main_0000a_replace_nulls.py | 146 ++++++++++++++++++ .../versions/main_0000b_non_nullable.py | 79 ++++++++++ .../migrations/versions/main_0001.py | 2 +- aiida/storage/sqlite_zip/migrator.py | 8 +- aiida/storage/sqlite_zip/models.py | 2 + .../export/migrate/export_v0.10_simple.aiida | Bin 84109 -> 0 bytes .../export/migrate/0.10_null_fields.aiida | Bin 0 -> 4357 bytes .../archive/migration/test_legacy_to_main.py | 6 + tests/tools/archive/test_schema.py | 16 +- 12 files changed, 300 insertions(+), 39 deletions(-) create mode 100644 aiida/storage/sqlite_zip/migrations/versions/main_0000a_replace_nulls.py create mode 100644 aiida/storage/sqlite_zip/migrations/versions/main_0000b_non_nullable.py delete mode 100644 tests/fixtures/export/migrate/export_v0.10_simple.aiida create mode 100644 tests/static/export/migrate/0.10_null_fields.aiida diff --git a/aiida/storage/sqlite_zip/migrations/legacy_to_main.py b/aiida/storage/sqlite_zip/migrations/legacy_to_main.py index 2645367a29..1b03651489 100644 --- a/aiida/storage/sqlite_zip/migrations/legacy_to_main.py +++ b/aiida/storage/sqlite_zip/migrations/legacy_to_main.py @@ -232,7 +232,7 @@ def _transform_link(link_row): def _convert_datetime(key, value): - if key in ('time', 'ctime', 'mtime'): + if key in ('time', 'ctime', 'mtime') and value is not None: return datetime.strptime(value, '%Y-%m-%dT%H:%M:%S.%f') return value diff --git a/aiida/storage/sqlite_zip/migrations/v1_db_schema.py b/aiida/storage/sqlite_zip/migrations/v1_db_schema.py index 6a1a0f60c6..bad4f14ac0 100644 --- a/aiida/storage/sqlite_zip/migrations/v1_db_schema.py +++ b/aiida/storage/sqlite_zip/migrations/v1_db_schema.py @@ -7,7 +7,8 @@ # For further information on the license, see the LICENSE.txt file # # For further information please visit http://www.aiida.net # ########################################################################### -"""This is the sqlite DB schema, coresponding to the `main_0001` revision of the `psql_dos` backend. +"""This is the sqlite DB schema, coresponding to the `main_0000` revision of the `sqlite_zip` backend, +see: `versions/main_0000_initial.py` For normal operation of the archive, we auto-generate the schema from the models in ``aiida.storage.psql_dos.models``. @@ -46,18 +47,18 @@ class DbAuthInfo(ArchiveV1Base): aiidauser_id = Column( Integer, ForeignKey('db_dbuser.id', ondelete='CASCADE', deferrable=True, initially='DEFERRED'), - nullable=False, + nullable=True, index=True ) dbcomputer_id = Column( Integer, ForeignKey('db_dbcomputer.id', ondelete='CASCADE', deferrable=True, initially='DEFERRED'), - nullable=False, + nullable=True, index=True ) - _metadata = Column('metadata', JSON, default=dict, nullable=False) - auth_params = Column(JSON, default=dict, nullable=False) - enabled = Column(Boolean, default=True, nullable=False) + _metadata = Column('metadata', JSON, default=dict, nullable=True) + auth_params = Column(JSON, default=dict, nullable=True) + enabled = Column(Boolean, default=True, nullable=True) class DbComment(ArchiveV1Base): @@ -70,18 +71,18 @@ class DbComment(ArchiveV1Base): dbnode_id = Column( Integer, ForeignKey('db_dbnode.id', ondelete='CASCADE', deferrable=True, initially='DEFERRED'), - nullable=False, + nullable=True, index=True ) - ctime = Column(DateTime(timezone=True), default=timezone.now, nullable=False) - mtime = Column(DateTime(timezone=True), default=timezone.now, nullable=False) + ctime = Column(DateTime(timezone=True), default=timezone.now, nullable=True) + mtime = Column(DateTime(timezone=True), default=timezone.now, nullable=True) user_id = Column( Integer, ForeignKey('db_dbuser.id', ondelete='CASCADE', deferrable=True, initially='DEFERRED'), - nullable=False, + nullable=True, index=True ) - content = Column(Text, default='', nullable=False) + content = Column(Text, default='', nullable=True) class DbComputer(ArchiveV1Base): @@ -91,11 +92,11 @@ class DbComputer(ArchiveV1Base): id = Column(Integer, primary_key=True) # pylint: disable=invalid-name uuid = Column(CHAR(32), default=get_new_uuid, nullable=False, unique=True) label = Column(String(255), unique=True, nullable=False) - hostname = Column(String(255), default='', nullable=False) - description = Column(Text, default='', nullable=False) - scheduler_type = Column(String(255), default='', nullable=False) - transport_type = Column(String(255), default='', nullable=False) - _metadata = Column('metadata', JSON, default=dict, nullable=False) + hostname = Column(String(255), default='', nullable=True) + description = Column(Text, default='', nullable=True) + scheduler_type = Column(String(255), default='', nullable=True) + transport_type = Column(String(255), default='', nullable=True) + _metadata = Column('metadata', JSON, default=dict, nullable=True) class DbGroupNodes(ArchiveV1Base): @@ -122,9 +123,9 @@ class DbGroup(ArchiveV1Base): id = Column(Integer, primary_key=True) # pylint: disable=invalid-name uuid = Column(CHAR(32), default=get_new_uuid, nullable=False, unique=True) label = Column(String(255), nullable=False, index=True) - type_string = Column(String(255), default='', nullable=False, index=True) - time = Column(DateTime(timezone=True), default=timezone.now, nullable=False) - description = Column(Text, default='', nullable=False) + type_string = Column(String(255), default='', nullable=True, index=True) + time = Column(DateTime(timezone=True), default=timezone.now, nullable=True) + description = Column(Text, default='', nullable=True) extras = Column(JSON, default=dict, nullable=False) user_id = Column( Integer, @@ -141,17 +142,17 @@ class DbLog(ArchiveV1Base): id = Column(Integer, primary_key=True) # pylint: disable=invalid-name uuid = Column(CHAR(32), default=get_new_uuid, nullable=False, unique=True) - time = Column(DateTime(timezone=True), default=timezone.now, nullable=False) - loggername = Column(String(255), default='', nullable=False, index=True) - levelname = Column(String(50), default='', nullable=False, index=True) + time = Column(DateTime(timezone=True), default=timezone.now, nullable=True) + loggername = Column(String(255), default='', nullable=True, index=True) + levelname = Column(String(50), default='', nullable=True, index=True) dbnode_id = Column( Integer, ForeignKey('db_dbnode.id', deferrable=True, initially='DEFERRED', ondelete='CASCADE'), nullable=False, index=True ) - message = Column(Text(), default='', nullable=False) - _metadata = Column('metadata', JSON, default=dict, nullable=False) + message = Column(Text(), default='', nullable=True) + _metadata = Column('metadata', JSON, default=dict, nullable=True) class DbNode(ArchiveV1Base): @@ -163,20 +164,24 @@ class DbNode(ArchiveV1Base): uuid = Column(CHAR(32), default=get_new_uuid, nullable=False, unique=True) node_type = Column(String(255), default='', nullable=False, index=True) process_type = Column(String(255), index=True) - label = Column(String(255), default='', index=True, nullable=False) - description = Column(Text(), default='', nullable=False) - ctime = Column(DateTime(timezone=True), default=timezone.now, nullable=False, index=True) - mtime = Column(DateTime(timezone=True), default=timezone.now, nullable=False, index=True) + label = Column(String(255), default='', index=True, nullable=True) + description = Column(Text(), default='', nullable=True) + ctime = Column(DateTime(timezone=True), default=timezone.now, nullable=True, index=True) + mtime = Column(DateTime(timezone=True), default=timezone.now, nullable=True, index=True) attributes = Column(JSON) extras = Column(JSON) repository_metadata = Column(JSON, nullable=False, default=dict, server_default='{}') dbcomputer_id = Column( Integer, ForeignKey('db_dbcomputer.id', deferrable=True, initially='DEFERRED', ondelete='RESTRICT'), - nullable=True + nullable=True, + index=True ) user_id = Column( - Integer, ForeignKey('db_dbuser.id', deferrable=True, initially='DEFERRED', ondelete='restrict'), nullable=False + Integer, + ForeignKey('db_dbuser.id', deferrable=True, initially='DEFERRED', ondelete='restrict'), + nullable=False, + index=True ) @@ -206,6 +211,6 @@ class DbUser(ArchiveV1Base): id = Column(Integer, primary_key=True) # pylint: disable=invalid-name email = Column(String(254), nullable=False, unique=True) - first_name = Column(String(254), default='', nullable=False) - last_name = Column(String(254), default='', nullable=False) - institution = Column(String(254), default='', nullable=False) + first_name = Column(String(254), default='', nullable=True) + last_name = Column(String(254), default='', nullable=True) + institution = Column(String(254), default='', nullable=True) diff --git a/aiida/storage/sqlite_zip/migrations/versions/main_0000_initial.py b/aiida/storage/sqlite_zip/migrations/versions/main_0000_initial.py index 65b236ab0a..d45772daaa 100644 --- a/aiida/storage/sqlite_zip/migrations/versions/main_0000_initial.py +++ b/aiida/storage/sqlite_zip/migrations/versions/main_0000_initial.py @@ -10,6 +10,13 @@ # pylint: disable=invalid-name,no-member """Initial main branch schema +This schema is mainly equivalent to the `main_0001` schema of the `psql_dos` backend. +The difference are: + +1. Data types: the replacement of ``JSONB`` with ``JSON``, and ``UUID`` with ``CHAR(32)``. +2. Some more fields are nullable, to allow migrations from legacy to main. + The nullable fields are then filled with default values, and set to non-nullable, in subsequent migrations. + Revision ID: main_0000 Revises: Create Date: 2021-02-02 diff --git a/aiida/storage/sqlite_zip/migrations/versions/main_0000a_replace_nulls.py b/aiida/storage/sqlite_zip/migrations/versions/main_0000a_replace_nulls.py new file mode 100644 index 0000000000..3769d29b20 --- /dev/null +++ b/aiida/storage/sqlite_zip/migrations/versions/main_0000a_replace_nulls.py @@ -0,0 +1,146 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +# pylint: disable=invalid-name,no-member +"""Replace null values with defaults + +Revision ID: main_0000a +Revises: main_0000 +Create Date: 2022-03-04 + +""" +from alembic import op +import sqlalchemy as sa + +from aiida.common import timezone + +# revision identifiers, used by Alembic. +revision = 'main_0000a' +down_revision = 'main_0000' +branch_labels = None +depends_on = None + + +def upgrade(): # pylint: disable=too-many-statements + """Convert null values to default values. + + This migration is performed in preparation for the next migration, + which will make these fields non-nullable. + """ + db_dbauthinfo = sa.sql.table( + 'db_dbauthinfo', + sa.sql.column('aiidauser_id', sa.Integer), + sa.sql.column('dbcomputer_id', sa.Integer), + sa.Column('enabled', sa.Boolean), + sa.Column('auth_params', sa.JSON), + sa.Column('metadata', sa.JSON()), + ) + + # remove rows with null values, which may have previously resulted from deletion of a user or computer + op.execute(db_dbauthinfo.delete().where(db_dbauthinfo.c.aiidauser_id.is_(None))) + op.execute(db_dbauthinfo.delete().where(db_dbauthinfo.c.dbcomputer_id.is_(None))) + + op.execute(db_dbauthinfo.update().where(db_dbauthinfo.c.enabled.is_(None)).values(enabled=True)) + op.execute(db_dbauthinfo.update().where(db_dbauthinfo.c.auth_params.is_(None)).values(auth_params={})) + op.execute(db_dbauthinfo.update().where(db_dbauthinfo.c.metadata.is_(None)).values(metadata={})) + + db_dbcomment = sa.sql.table( + 'db_dbcomment', + sa.sql.column('dbnode_id', sa.Integer), + sa.sql.column('user_id', sa.Integer), + sa.Column('content', sa.Text), + sa.Column('ctime', sa.DateTime(timezone=True)), + sa.Column('mtime', sa.DateTime(timezone=True)), + sa.Column('uuid', sa.CHAR(32)), + ) + + # remove rows with null values, which may have previously resulted from deletion of a node or user + op.execute(db_dbcomment.delete().where(db_dbcomment.c.dbnode_id.is_(None))) + op.execute(db_dbcomment.delete().where(db_dbcomment.c.user_id.is_(None))) + + op.execute(db_dbcomment.update().where(db_dbcomment.c.content.is_(None)).values(content='')) + op.execute(db_dbcomment.update().where(db_dbcomment.c.ctime.is_(None)).values(ctime=timezone.now())) + op.execute(db_dbcomment.update().where(db_dbcomment.c.mtime.is_(None)).values(mtime=timezone.now())) + + db_dbcomputer = sa.sql.table( + 'db_dbcomputer', + sa.Column('description', sa.Text), + sa.Column('hostname', sa.String(255)), + sa.Column('metadata', sa.JSON()), + sa.Column('scheduler_type', sa.String(255)), + sa.Column('transport_type', sa.String(255)), + sa.Column('uuid', sa.CHAR(32)), + ) + + op.execute(db_dbcomputer.update().where(db_dbcomputer.c.description.is_(None)).values(description='')) + op.execute(db_dbcomputer.update().where(db_dbcomputer.c.hostname.is_(None)).values(hostname='')) + op.execute(db_dbcomputer.update().where(db_dbcomputer.c.metadata.is_(None)).values(metadata={})) + op.execute(db_dbcomputer.update().where(db_dbcomputer.c.scheduler_type.is_(None)).values(scheduler_type='')) + op.execute(db_dbcomputer.update().where(db_dbcomputer.c.transport_type.is_(None)).values(transport_type='')) + + db_dbgroup = sa.sql.table( + 'db_dbgroup', + sa.Column('description', sa.Text), + sa.Column('label', sa.String(255)), + sa.Column('time', sa.DateTime(timezone=True)), + sa.Column('type_string', sa.String(255)), + sa.Column('uuid', sa.CHAR(32)), + ) + + op.execute(db_dbgroup.update().where(db_dbgroup.c.description.is_(None)).values(description='')) + op.execute(db_dbgroup.update().where(db_dbgroup.c.time.is_(None)).values(time=timezone.now())) + op.execute(db_dbgroup.update().where(db_dbgroup.c.type_string.is_(None)).values(type_string='core')) + + db_dblog = sa.sql.table( + 'db_dblog', + sa.Column('levelname', sa.String(255)), + sa.Column('loggername', sa.String(255)), + sa.Column('message', sa.Text), + sa.Column('metadata', sa.JSON()), + sa.Column('time', sa.DateTime(timezone=True)), + sa.Column('uuid', sa.CHAR(32)), + ) + + op.execute(db_dblog.update().where(db_dblog.c.levelname.is_(None)).values(levelname='')) + op.execute(db_dblog.update().where(db_dblog.c.loggername.is_(None)).values(loggername='')) + op.execute(db_dblog.update().where(db_dblog.c.message.is_(None)).values(message='')) + op.execute(db_dblog.update().where(db_dblog.c.metadata.is_(None)).values(metadata={})) + op.execute(db_dblog.update().where(db_dblog.c.time.is_(None)).values(time=timezone.now())) + + db_dbnode = sa.sql.table( + 'db_dbnode', + sa.Column('ctime', sa.DateTime(timezone=True)), + sa.Column('description', sa.Text), + sa.Column('label', sa.String(255)), + sa.Column('mtime', sa.DateTime(timezone=True)), + sa.Column('node_type', sa.String(255)), + sa.Column('uuid', sa.CHAR(32)), + ) + + op.execute(db_dbnode.update().where(db_dbnode.c.ctime.is_(None)).values(ctime=timezone.now())) + op.execute(db_dbnode.update().where(db_dbnode.c.description.is_(None)).values(description='')) + op.execute(db_dbnode.update().where(db_dbnode.c.label.is_(None)).values(label='')) + op.execute(db_dbnode.update().where(db_dbnode.c.mtime.is_(None)).values(mtime=timezone.now())) + + db_dbuser = sa.sql.table( + 'db_dbuser', + sa.Column('email', sa.String(254)), + sa.Column('first_name', sa.String(254)), + sa.Column('last_name', sa.String(254)), + sa.Column('institution', sa.String(254)), + ) + + op.execute(db_dbuser.update().where(db_dbuser.c.first_name.is_(None)).values(first_name='')) + op.execute(db_dbuser.update().where(db_dbuser.c.last_name.is_(None)).values(last_name='')) + op.execute(db_dbuser.update().where(db_dbuser.c.institution.is_(None)).values(institution='')) + + +def downgrade(): + """Downgrade database schema.""" + raise NotImplementedError('Downgrade of main_0000a.') diff --git a/aiida/storage/sqlite_zip/migrations/versions/main_0000b_non_nullable.py b/aiida/storage/sqlite_zip/migrations/versions/main_0000b_non_nullable.py new file mode 100644 index 0000000000..69d0119c8e --- /dev/null +++ b/aiida/storage/sqlite_zip/migrations/versions/main_0000b_non_nullable.py @@ -0,0 +1,79 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +# pylint: disable=invalid-name,no-member +"""Alter columns to be non-nullable (to bring inline with psql_dos main_0001). + +Revision ID: main_0000b +Revises: main_0000a +Create Date: 2022-03-04 + +""" +from alembic import op +import sqlalchemy as sa + +# revision identifiers, used by Alembic. +revision = 'main_0000b' +down_revision = 'main_0000a' +branch_labels = None +depends_on = None + + +def upgrade(): + """Upgrade database schema.""" + # see https://alembic.sqlalchemy.org/en/latest/batch.html#running-batch-migrations-for-sqlite-and-other-databases + # for why we run these in batches + with op.batch_alter_table('db_dbauthinfo') as batch_op: + batch_op.alter_column('aiidauser_id', existing_type=sa.INTEGER(), nullable=False) + batch_op.alter_column('dbcomputer_id', existing_type=sa.INTEGER(), nullable=False) + batch_op.alter_column('metadata', existing_type=sa.JSON(), nullable=False) + batch_op.alter_column('auth_params', existing_type=sa.JSON(), nullable=False) + batch_op.alter_column('enabled', existing_type=sa.BOOLEAN(), nullable=False) + + with op.batch_alter_table('db_dbcomment') as batch_op: + batch_op.alter_column('dbnode_id', existing_type=sa.INTEGER(), nullable=False) + batch_op.alter_column('user_id', existing_type=sa.INTEGER(), nullable=False) + batch_op.alter_column('content', existing_type=sa.TEXT(), nullable=False) + batch_op.alter_column('ctime', existing_type=sa.DateTime(timezone=True), nullable=False) + batch_op.alter_column('mtime', existing_type=sa.DateTime(timezone=True), nullable=False) + + with op.batch_alter_table('db_dbcomputer') as batch_op: + batch_op.alter_column('description', existing_type=sa.TEXT(), nullable=False) + batch_op.alter_column('hostname', existing_type=sa.String(255), nullable=False) + batch_op.alter_column('metadata', existing_type=sa.JSON(), nullable=False) + batch_op.alter_column('scheduler_type', existing_type=sa.String(255), nullable=False) + batch_op.alter_column('transport_type', existing_type=sa.String(255), nullable=False) + + with op.batch_alter_table('db_dbgroup') as batch_op: + batch_op.alter_column('description', existing_type=sa.TEXT(), nullable=False) + batch_op.alter_column('time', existing_type=sa.DateTime(timezone=True), nullable=False) + batch_op.alter_column('type_string', existing_type=sa.String(255), nullable=False) + + with op.batch_alter_table('db_dblog') as batch_op: + batch_op.alter_column('levelname', existing_type=sa.String(50), nullable=False) + batch_op.alter_column('loggername', existing_type=sa.String(255), nullable=False) + batch_op.alter_column('message', existing_type=sa.TEXT(), nullable=False) + batch_op.alter_column('time', existing_type=sa.DateTime(timezone=True), nullable=False) + batch_op.alter_column('metadata', existing_type=sa.JSON(), nullable=False) + + with op.batch_alter_table('db_dbnode') as batch_op: + batch_op.alter_column('ctime', existing_type=sa.DateTime(timezone=True), nullable=False) + batch_op.alter_column('description', existing_type=sa.TEXT(), nullable=False) + batch_op.alter_column('label', existing_type=sa.String(255), nullable=False) + batch_op.alter_column('mtime', existing_type=sa.DateTime(timezone=True), nullable=False) + + with op.batch_alter_table('db_dbuser') as batch_op: + batch_op.alter_column('first_name', existing_type=sa.String(254), nullable=False) + batch_op.alter_column('last_name', existing_type=sa.String(254), nullable=False) + batch_op.alter_column('institution', existing_type=sa.String(254), nullable=False) + + +def downgrade(): + """Downgrade database schema.""" + raise NotImplementedError('Downgrade of main_0000b.') diff --git a/aiida/storage/sqlite_zip/migrations/versions/main_0001.py b/aiida/storage/sqlite_zip/migrations/versions/main_0001.py index bf266a18db..706fc1c25e 100644 --- a/aiida/storage/sqlite_zip/migrations/versions/main_0001.py +++ b/aiida/storage/sqlite_zip/migrations/versions/main_0001.py @@ -16,7 +16,7 @@ """ revision = 'main_0001' -down_revision = 'main_0000' +down_revision = 'main_0000b' branch_labels = None depends_on = None diff --git a/aiida/storage/sqlite_zip/migrator.py b/aiida/storage/sqlite_zip/migrator.py index 45997e26ff..edc4099e14 100644 --- a/aiida/storage/sqlite_zip/migrator.py +++ b/aiida/storage/sqlite_zip/migrator.py @@ -241,7 +241,9 @@ def path_callback(inpath, outpath) -> bool: with _migration_context(db_path) as context: context.stamp(context.script, current_version) context.connection.commit() - with _alembic_connect(db_path) as config: + # see https://alembic.sqlalchemy.org/en/latest/batch.html#dealing-with-referencing-foreign-keys + # for why we do not enforce foreign keys here + with _alembic_connect(db_path, enforce_foreign_keys=False) as config: upgrade(config, version) update_metadata(metadata, version) @@ -343,13 +345,13 @@ def _alembic_script() -> ScriptDirectory: @contextlib.contextmanager -def _alembic_connect(db_path: Path) -> Iterator[Connection]: +def _alembic_connect(db_path: Path, enforce_foreign_keys=True) -> Iterator[Connection]: """Context manager to return an instance of an Alembic configuration. The profiles's database connection is added in the `attributes` property, through which it can then also be retrieved, also in the `env.py` file, which is run when the database is migrated. """ - with create_sqla_engine(db_path).connect() as connection: + with create_sqla_engine(db_path, enforce_foreign_keys=enforce_foreign_keys).connect() as connection: config = _alembic_config() config.attributes['connection'] = connection # pylint: disable=unsupported-assignment-operation diff --git a/aiida/storage/sqlite_zip/models.py b/aiida/storage/sqlite_zip/models.py index a3922bc715..d099100378 100644 --- a/aiida/storage/sqlite_zip/models.py +++ b/aiida/storage/sqlite_zip/models.py @@ -88,6 +88,8 @@ def pg_to_sqlite(pg_table: sa.Table): column.type = TZDateTime() elif isinstance(column.type, JSONB): column.type = JSON() + # remove any postgresql specific indexes, e.g. varchar_pattern_ops + new.indexes.difference_update([idx for idx in new.indexes if idx.dialect_kwargs]) return new diff --git a/tests/fixtures/export/migrate/export_v0.10_simple.aiida b/tests/fixtures/export/migrate/export_v0.10_simple.aiida deleted file mode 100644 index dbeea937c17b6f266242b8a6e94c4541a5ff91b3..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 84109 zcmbTc1z1$w+CNN*lt_aD(kU^+05dd5gLIcLNtd*wlqf9-NGjdkDbgh>-3`*+_04n6 z`@Y}zy#Mn**LA*`J$vt2`@Z*Dd#^ix_tI3w#-YIY>!Sld(fr5Bf9(j+N4TSdE7aNn z`I=wzzv%G(LAT8v+`vGNfzj)Pfq{>vLqJ`jd^RqQ4!dUCjx$Oh?aHK>2s$TI3*(PJ;(*}66$U%Suwi&42QGD6e67V=XPOe|^pF6_$L4iUL`Y5eZW^{7Dp`a3>=dXOQw)$N47@CkXQu&7pPiuheEmk-ip=`j%Kni&@q#Hj`|wPD%8IfW9eC1j{o3E!*3$_XL?V)&s6`@M@h6?d;i?mA@nG zgvZ(UDYk`;O~P_sv#7YFDhU&Zlz$OV3C3An=8g#kd2j&ou(XEG!gSXQF0D?2NEHQA zJBj4qR$@HLg`8++%QKcoMqnAS?eBeUO!|d+c2d%%;`;IvQj4(xm%}!PC^#}xIv?`h z`8x&ljJ-lGJ$&1dCAQ7mK<9jDjAvjVmMRBh`BT`f3%Ho5E@WMBUisw7kcCGIX3w#$ zPQG~BCu`x;SH~lZg`Zv)+F_|+($b$f5w`d5yT=3gMZGwXqKDipc<(R?um=|yB|GhC zLa|q+?AZ#1gsCj(7_V?gn6hM_?m|=6J`2bLjZbuFhbTm$%8OT4iwf86I_F~w87W5k z*~kR*=x++0>SYd8eC%eF*FGsxiJ!8FlOL}?eVuoFSRcd~*GBtb#mnHi_^(A@tks^Ii*n7lxa)v*TJ0JzzbnV@)J>VxBZu3NRp4n`v6uSn-e8wVtj>80 z@lGEa_J_&aq;_>6gS5ie2ZO#6$=g{Cwb|95pD#F;*5MII+-Eg7{Mt#jrN4W8x5+v1$KuI>o*!R^d%=&@E?oP*L9nBk^!e%+>D>*V{8Dz0urYkHijF6s-6h{7A^DxY+m{%pjBEqSJHFo+#j7sLG55)G%GJLj& zQyLXLYi{6Ng~uXf_N%1!P0b$Cw1G0wQ^7XON=nOegavYB?7a%cpl9%4B4rXC{1*wK z0QLTuFwG859K%lVll#iXcW;H=S*NEJ`W|g5R?~+Fr!lItkz;j{QV}c}I%n%fyui`H z@3xPCMUva(Fq$%lH&kPha1s$e`g9{jXn&zpZO^Z|uPK$e%UNAv_#|7tw;)u8Ec2B- zrkNI5M@B?QZ1V9^e5zNM<-`y>mtFIHaNW~$S^wXm63kx%;OHe+O0tszLInD~ngfrG0RVYvF zxgh>bmLhfDL-q2Y#g@?p_L68;Y5EPuzld{w7B9<(hgQ&Vk0P0 zQef(KrVuE0PQ^Gin3!P?sPyupzTus0>t+s+BY3eMZ1R&F^}hUfV^=Y2r2tbzfQH3n zIo`Jf&0L%yY0h?mb*5NxqPE6o^2s*9-gU)44qdPBugFN~1nhY_1B|;VezJe(j1^zf z2E|!qIfVg;hAQ6DC6xs910vh&S4b8;Ryq%8`IBeM`JKauWQP5BGW+Hw-d`I6WjX*) zm|b^1^bOh7nE(sU)gG4Ju9eloj_Q9-oi#|MM4rv;@VZM#+$RjR0_CS%dhplbOTXV_ z{64BQc$GXq>;mN7h?}44Dw?pQrsnr{7%884{Uw+0%tGv3=+0-n>H67fn!a-T`ejT7 zi(Tt8$8lFZqWgR|#0Mkd_$iCCO@*GH@r|#d+U_Odg7Tbh4cLe8VPZ*&kz0yU8~#5` z5AYW1Rz;>C@`>yN9^dp!IvdSpNIs_X+!lwi7H%?#|4y}4$`hu&dFA=emvvHjs!ehp zud_S&*H4?UU?xBm(c_9G&R^dZo=#~hgjTR-^1R@NQWbT6iw|;vQ`Q`|vpdtxJMdLM z-t*8^N3ED_dBxq*?!^BjX>XnY5ZDNFs>CZPg&ESpO33A%m#gid=u_IvYH7T{2F{LZzA(urNAl zRboWPV$IZ>5W)O3c~Xv)I&6|PTc0d*_Cqj2dLA$dVK~>)w=r-%fPlAIGN7`wXiP!=2Jt=N-7)!Sc0J) z0&}?Hv9CC7E8`o%iQ3`Y&$id^`*RxRm$kHq@2yHr1f$odr?wPb>@}=`6EwTv2@`RV ztxJZ?2FY?ok*&px_t$qtA)P3jqw|TN&7CW!EVGpc|Eveq1cA(^%vzZ;ODG2Xx3@Zu^-=VYv#vvsVpWj z(<|wX59wQst~nZb=t{yM%UEcTOr zRQF%4tNSGFw8{5dd;}Mb5I>D89D}FCdA^+ZMs`j=?mpt}s z%=bSSxMbyxN&NvmkU$Y=Amf+Lz0OM}8<>fWjhnHCf!2D$!WN*vYZAjP@&VrXU3#>d zo`2hyT?zW(@YfrUB+@qop7*DHe~3c$V2{<}Pw=BH#Lcz_RV@5hqsc^@2!x{Q^XDxs z>zlrdA2iMas>heEII)*T?>0b*?aBBv>-ELO|biPfO6vWXgjzF9a%i!-|`e_8^H=#dx_ zTl^0J1}3ilV9%O%;IF~etoNFUo1+g2e(gbKe)|qQB+oA$4w| z0*I+H&f>}S@)8KK=}yWPuOu$x4U}8Il8zQ4@%xxNoL-%>`E(2fWiRP*zS^XSs88KL z)<-dGJ07(1-+QNW0p0aSxu!g9k4_hwoRF8vO~c^`DN1rSnhcz_a*; zj8?E*Qq;~%uce!@f2AT!JG^kcLVimd-L)t0uh=n8J9b4-7**FG=zL}+spi`}4QBAp zlK}cY##XG2JeI7-!LN-m`jca~{HH^m0N*eW@}FIwLL8^IRo zc9H%wI-C|3L1Dr8kATsPB!ZX@mYDtD>}80^B1nS|S-;Ok@}d-So;kaQHWpO{Ov32T=|5@!oI^z>dzN#B`rTsE}K3*FFzt0QW6GUzT1Y`Q)C^*$BxiufdAP2 z-0L7303SqlTWi%mM=X%Px^3q|jH=O5EN6+dY#Mc*Il<~uH~jA(J?Lv1>0)W}UpTgd zx7lwF`u3{ZsIodAtoYLp7dT`>32psYZzSRfKN*!Wm1*WXe{V|Sdfjc2u2@~vvYK>ko>?gYsh*7!-A&7%pN;pEO& zS?90VnVR~wff&tGa`E?`%d9?ETZ!Ru94g3TYp)}mZi&;CsyGgY zN7+pS&kyG*_SR3kLYi)_M&z#?U494OZ~#VIy%S66-a>Ea--t6zVrimF}r zyTwFk!;PSVrX%MCbh6(m{CHmb2N>TP58x}|p6_{4{3(jd==@aF&|1%-n>+)~yjXNd zIpf)|!8$Irg!I2}IxX>J;H#~#(OX`9+O z3wFqNR&EQxGL7GAW5jG66hUDc2bug?U1q4Ezun!P*|gR9acn(oP1?<>vEuc{j~Q zwB&#vQ;4ZFhLK%}sm+U_J%&+`fKl)?K=+Y!Aui1r0pl1I`h;-|i;)r|+jt#In)`7~ zKX$ean$js>AEN-hB}nHBV`$r>@35pwKEfyv|HnnLzb^i@{p0qpC-c!A|1^RW_N&HGQa;vNlN`8o*JBdw^z1dHWYPc1 z^cw%*A%4nTAKN<J?)2v{=Lu9N_drJiCXbYbTcF5nVh(@N42_2C=>0MU_A(H(ox}A z249*2HtwvtiEY0zl|s6KGR5HrC6oC29!lq3P5)wJeLG&ej zFGq@~Egg;BdTN2a0E*pm#fL!mZq1d*anGV#ZaK}4D@BQ9S?MM7vBqiPN>& z9$XGuV9y)zfE3kgwQ8yb&#HXKUokOPr4Jstbt~kb{>Xcox9FMbQPdHsm+~4Ls<#Bo z-ZLb232wC;R+^o;Fyj$6ZXxlm-`JfbcdpTw)$!DEzy2*d^J203eaN8qndY1=;6Cb`%>94cZqyC9MpefPuHt8y~&o;Q)1fO`(4aJy+H{_fkpq~c%4GrRmS*$kZy70T^xOZow#tJ9sL&xHIacl z;em6KQCfo;GDqcI<>6PU5)YmX^NR)#<}xU*CAE7qoC@t*>zu znKn;N52Z7!C~Zc;HJ03!5=`}4|b;?s@;L)|D9z3ESx zo9B8LO_3vy)(Z#aulqvBhpsx#`Aoo+CYJss>Um?jCTit1L9|+J*GKn%N70yk8s&B| zF)*Gy#K3rjM$zn%uF(HM(ms7sbIbyegx(V@o60MX?3;|KcpIIL>*z5()aX^}&OL)P zQ7*TYN#Oyfe&eo@&t2FG``@fJFyw+a1D>sPcDTws>mKI(;h zLBUtDm@D0&XMH5RVWATFNd8yJ`9{i7Qp1oCp29c6zO65bj@dzx!?{ZxnYiY|S|yLQ z$i{(w1=-v9lqXYi(_I97Uad~_z!H_t5D(Bv#Wq-_TBP*XQTTU)Zw{iC%hly1TZ!Z++sAvy?6YJ`| zeTEE`2piRGa<&*lq&d@Y2#Lr|791hR|9@)SG;fyoweb$Va@`J`WP> z>i)I+hU>P*quzYp^iT$TU{|Dy|OOy2M$VBb^vVtcCfAEQIpc$QL{h_`Q7c#xx7APi)#8ZrJJxZ^;|v{X++#+bC%TZ zy!(Dd};Ly0S+ecWw5kW!i){;hOrxTDYnzM|k&Y2H3~f z!R>BvcErWucseo#YWw_R*|n{0G{wZv=8f%2np6Jz`yraT?`Q92Im$w}9UaOTMBO^- zpWFIFH~P1h8ydz`yQb!IV5p8^;_mg~ll5ULkVaBNz3JeWxCiNJ_JhRpLwQfzkaNRL z6=HLjg;gV7r7a7sBi$t##`|@y$I#=fb@9}g|9THfH?&5dlLsB@w!MVPs%T0`weX51 zGReFYgWUDW1Dj6`6=c&C^{x3hK@{O6*;N8~s#&Cxh{wqbWRyZ+8RW7^zWN?#YqY>9{wD71); zjQ5HNrp8f-MUIkn#sgtL6~5gtSTWhF#(@O8>P06W zjERf$3EwvzW25131MSc#r*EdXNwM=lOTec^);^hJ;no)aorjYrv{@k=Ui1_a;mL1c zcpPLf7=67e3tpt9%J&kiUtuDVn%Sm379zk|a(T#F(l~Lhe<}S5M^B(Jz*?es#G6js zO3BImWc=-8(lty0dCnlE(NqOhi$jZ=2j;;RHD6n6j|Xd})!QVg5WgR+J~9~fwtN)G zsS+KMa8UCbhZd{TRI4Aq1L8zvWF23PtN5gVu_ovTu^|LVB^VWy+y0T6zsutE0~BHC z`U;+l3|`}YXcMCvW7a94QBZ4oNW+;Fvs)uIbatiK6R2(@8)GpkzoDzCC9TU$+iMl` z7!zxNgopZvRuq{Wt4^$;4KEPymtEPZ1m3eJgbL3hsKzx3fuk1sql+MZ#{6>W{xq^q zI72LPC@}LIC(Kunu5wh2!{#Bm)c23J(GR#3W9;5)<$ntZ4y7TFN?M)t-sKz*!U_&N z#LWd#ymfvhr^LwjMOv@bdv(p{+|-TJur(b6GPb6>Y?l`E>wkRLl(N5e|3Q zn>}E61g#xuB3UVOOtRMJkm>gC4L%44A4YSEnn4=Y2g1ZWP6Q$xq>3a8P6=`(E|2ix zVU?UPB#S2>P4B66Ng@3xG1YU~224OO9R#=VsSP2_CNpx7t3*DS;|a?mPAE3{0pKUa zhhUj*Tf3a)PVOJH2TG|nd#K%4+u`dSXW*f^al+kmW0R5vv5~ZaxwJjiIexJ=*S))u z=-vM|GRyulW$sO8Tr&kpPYdIB)m`6lBgM$lVpaP1Yma~a)hr~aSQ~H0FR0+h zHKmh8x2D>s@WOXB@#9?3`>!ff3GKhod&%}#G}pScJ(`O}lfE$C9%q&Ci-6JD?fv8b z_ALK2&9&hSM8;^7j6(nZZu`-dK9sfPKWccCeX0Zjj3gL8i#m&ESW%P6P^R2?xvtnh zvL3^=`x52ZhJA{Bo=&70%!6}3JxH|4x=lS#IdJccgnm@-aNePcb%jeKYbvLcm5oN` zVr{+UqgIQz82>$(|I=;#-B0|}ZJ~tzjT`z`3?h*KkO2Vx4;i5F|BwM8@*gsY z2>*u+0Kxx|0SWpK8ASfmc@RYXhdsdm(|JJsr_UhxpY|Z|@4}{kC45l8|2*_Rb&VsY zC?W|R-Cy8fU}*g<4Ei7OMnXk|VNehZjDU;45h5rNVF(l<42KEAqe3}_9~kN$y87a zr%#dl+WPm*>%w{A9-hw`Q^T)aAmh7dhx}z{*X~cd4zmph6P zNj-}+m)LZ1krYdeq)!S%TJVVL;aT^4OB`s>B_77T%STI(Knxk8uQ*37swzCNNeq{t zqUY*dPsTrRKDF&6z7u>;@3k)zFtknVdSuAJXyuc4O_*6_rhYYn?YtlV2F3EH8DK2F zwojK<)&2m}%&L62KwaM~PL$%s`A%b`V#gaI#Q>Dk#$<3FxmWv=DRr7ob)%`HOw~sP znpsa(1P?8_+`@()I3h^waYAu7M~fd(OHbUP5v{9zMRV&b|JKIm3E|=PUnd^&_Z0BX z-$vHgb{WzBX#?#^rT=;S-&XFQVgR&PlsZRGe;ryB{*@S@jTQ<8fJ4DZFc<|v!T>N7 zNDu)82qT0A(5(QNAbPq1f)FTF2quI82noZH!YC*L0u&Mv1VF$5pj(WpDmIX$XaAC- zLSl!jtb|qt`hfDQ<>@PJkLq7@h!n-S=zTkRCBj#stLOfJ!6Sv85^`_y$H6@2&-Tn2 z>f(VLMgj0pq6ar8-s^)HyWb6Gp6Hj^mQaT*VUrDzVqtr5mk(PH7R+_kVY@acO5_qV zY3Ojl&HD|(X_m4*K~0ZfawgFUeee`1iN`WyWxK3Y|l{Wc};6jJEV(5jav1 zA|L=k3JAdGbNT(IhQ`^bt0)yepZ z=%e<7COsMHwx72@=SBeOzs#lDTAZYAhTI!onRx^|v>i`m>?`>5PP7{wl`ExPH2WO3 z5M{nwv#GdroBK@CbGPk1V_sx&2$Hlbn`qD2hd8(r#skYc%r4-tX;9Z15fZmlY`^{X z<-QX`z^+v;sY>kE)Mt&lHE)-bg&W7O{^XM{Psf&K=o6;@sN=LdIc+$go+h^K ze9`_-I-HBUH#dXv&5=##B5u73*E4S%F+x(*lLG1JU3e0%Bc=(cCwVKRM`Enn{!8P} zZl&KViCw5enu`=Us`F*pm%W;~6|XLpc9`@JOD6jt%Daf0Ih#FN=$FCF0Q>P+M2or) z?H(;4-WtGGrs-X_;C$dEq+9-*Hj4d!yywA0$hzJX5vVHKx;G87bD+|wFU zY4bRa7bny@oA|um@AA&R=FvH5s1rG>Fn(x=a%5sX>S8{sT8$rh9?Q>ulvCHspJ%{q z{z7t>{`D4K`fr{YU8rmC`J1z@iVgWax4zT*2TQl_Ud&Ug%=<>2GLY>{HTPYwov5sG z%==q)UfZ>hT~1!Vqi-g}`SpwxH{x;!cQRe%(BgYq*IWArf*VT#?w&flIvKhX>wXHU zp1`dDX1W{-MY(WK_Ix6?b_$RV-9rmTzQ%>dg{FG<$zSf|y1{bk*M0qJqF+56sOV~W z*gK0~v=6e9w;MlIS&ng!3;P_BofN$ma5*`s7TBTmb#FVU&8+QUW`eS9%I9|YyyU#m zck>K7#(vl?y0}!fCTqUsu`!M_F!Mg_s|+n$ZLHyt0=vPSLQqlB^Ut-h2kf+uUWCBN zcb_ONV*ArxXt43H{-*j^Xw++_UlTJ+|LPUdwVSaO4VNyl#MMD zCHJ@1bKZ)T?EPz_aEH%P>k4aBS(9OOzbPRU+>3n=CYA)@M0+d^?aSA094y%hqO(g0 z87&h+5%X9N@3Ypz-~0S=;W2-4l(XmL$@W<85C)(lSYGQ=*DUCGxTgBv=Ry~D31hpY z+%qp_mFl>o-1B!b^y892u13FcCZ-q3KsjYwaBO~1f?w$RQN{C*7y(jT#2MdY1q;%$ zDuaGniAkPv`D3K!zJK>L*D|RT>=Cf>9g0`X+d)Vv;V+&d|DHythal>BM9@<_0_n+4 zH^j{c{4Mr{C8hN15%>Gp;0-NTU2t11<|rxkivxm8qTdnk-#_IZ;P9v92{Nbu@n|e` z7+1c%2$1uM5^{-^hU?wM|y6mAZX2OV`6BD{WbWK!B}H+cRh z8lf55hwmH@=BO|6D0yC_spZCk9@$)}7c)FC*_7BXY z0zz2>_?~0sbKM}uz`TAxaHSGi{Yts@e@1pOa8ry0lv7E0vK+_;vQK>?SlaktLw5EY zBTR}c@Io7}zu%cR@Bj@rz}ELjo51NzMGigWlsOZs(PFj{sp%O zdMT5?s@WDoGQSl_GWMRTur^cUoI4AVWJ4MZ#*$&R`#2X^_C5bqwPwx}dglE33ZGMb z4Zo7n7kG|tGGR7~goA#r>d#g*u3u!W29*5QZDOgCcsJ?r(jEnV7*+XeDAa-Z3Dw_r zlnG(4NS3#MAjaCRtOqv1L1FpDgb5?MCdL+LIma@(E)r(99U1PL{#bZgY1VJ&k|l|l zzMj2lysDGp4OYisDiq=oMftOueW9@7i~UsOI<0k!50G=qlXv%Mlx7yg4gC-=YELJM8Jd4r-Mgnj z{bYjSTF;mE-H(T|xABa z&-)|a^Y4ate zc{FlH`NbK9{iDa*cj8Hpwld|v{%DGkM^r!7y<$WF#SIoa9%Qm9ZDzW2g`s91kfD+O$uyY>eiOPRfn>sfjc@|ui z%%NUqU7|(Pr~BOGhXXRhecWg*`gBJ0+K^#;Ko^8=LuI_)GFtPJ-L1d3f@C&M9ldop z!{5lzOaDH({0H4~FMj&g{S2Sut6sX=+zC`3NOy{{Qxf@h}i*U;D0e2FSp!_T@m)ybOhp$U-z6z3ede`Zi*Q4f6Fg8xU zci>niKR{w5QJcwP3_(2I52fi$u#YD(S$8{QbDrZKNOCjKY~vKUOr*YC4mosy_D)?$ zzM|P*k&7(r)zn0A(JXkqh`N}nLlH}T^^RTVVD`iJTK3{Mx^sNf)AQVMLt3St&Xm>9 zvQnBXx8E+;GxsZl;W)6{=e7OL>icaqd;7r>;TP2y{IjGh^jy>Lc_kioG0VTMPW*aS z5u#iP85w*z(sHUa1;wLeS@P7F-aWO-Bs0c4a-y+q{}Sa$=#6L~ZhgZ)FgCHRUUz=e z;ZHQ4Vt;N;YLORl*M7W5owU1nPsrCCPdILR(6u4SZ|>vJ@Fpyjk<}aiG^cynX?0mc^^G%*_O(G`a0X zxus8}-$tip7P)Yr)0=|h)SfIn)%^h8r5)n!Nl;5fi3(f3OCUs+hjVJSzMu+<-WtUy zJ@a7g9I<2uOx|k zl8{5ALyt%4wI62vYT3lI%XeiK`P+nitu$^v*~v`6uT7blexCMrHvUtnG@9N1Y zYR}0-6wcn5Nj`R=NV_c0o_sD8t;@_%Mg>A8 zj_8~mhp=+4p2^sWLAW&?+9yLjOyYHtaSig$Ep|v7*N;_VcZe$d7rVz@=TdD$&+Dww z74X&-UUJ+MW-SwS`Q0Rl-YTp#e~_|af^A}js{ZEl=k7UP9xBse?NKpZ1|Q1IwOnyiMe#0YQGbkRs5pnXqIp2QSKp0 z_cj|5u7^0@d8@PpT3+C%@GF1_kjXR;PIN0X<0F3pOSz)hHqAZ@MVRDFWQESAsH!4`~>vEj^QhqNIaApCuu`rFyvqSvXUk|OU>;&6|s-k@n{gxfr2O;hHK)7YVT zavJ<6JLn_HX+q}ZxzVP$fm*t2h@+T^F@&ZC-iG-?Wc~8_wwmT{I+T-2-*K?`e5Jz3 z>|tFwy!6CouL=+{(HhMXni9emg zBormQTZk7wd))u8JkpYJ4yuXH)_kO)i&B7{PM&^?g zKdSU?u&;#7c=oz)Zhil%mQWTB@9lZyt#0rR{;JemLwsg`ptZ-4U%gu=!SU!ihH#N* zCe-xEp{C~La+i-#`VW0AvLSY7K}*Vzn>S|h^2y_FTH^0NHZ2YPri|~&6I_*VoOgEb zo&UWx-Z~+(5K!?Yo~WALnz0cJqm29l5u(>kU`=HBQR^Xw|BjW7L8~;$hcFCeKyXb{ z1u0gHwiN|8*RnNQ*7V&syrFQ)tYm^R<9%r;nMXzc?c|}fslyeBX4^(eq@L~5{~g#6g(?vjqi59Z|AHc-@nS(R41oqR5C|X= zfj|g?(U=hm0fVD~BLGMU2}TN|QAL0NN<;`fs{&|r0|^Jh&|n7=2@`>W5TA6~U#9>l znvW?(+1?RVV&s)R7J(3@L1DDfU*DzE7=;;fDMYZQuI^vsu;_6L>y}uO@TZOwJ$WYPoTh5YvvmwqvrF&guCXTa-5Xz1cXt#d9Zn82JOC z=Zx~JQ%@~AQB0P9z9?cFIjzb_N>Hv5sy9n=FDI2)oRyapVg1;uLgxgJ^=)JB*#{Go z&l9-)-(FGhW|90h$rF||VsNQ4NJ{~`yGURSh4UD7LDL;F0a4l;>C8>d7NOYwp?h2o z?xq9%V`{tHr^f|jURlpymPcbxD6M=~-cgc1I3EIY$h6#04R0KCsocM0%;?Gh=r02WLI^+wgap7M2q+8=6%i5vpoGzK0s#ZjiYFKN?i=RgO&^c%@7Sw0_DxQUY|c+h`>K^?hI9K6`umM} zs@-7Pg)RFUzm2S(#;NLeFNgVeENkW&k{yn3z7Ncmrm(B1eR$ofIRrlz8o@Ij)2Q5X zZmZ!Ae|LTAQ(?M+Y;Asi-UG%%F+3CHyvgI?~8P1Tr<`F-hC z@dyH;jT>bbCKmO}Fd7%j)FVo;UiFuW#-UZa z;%YmvCSBr9t)wA2-Y{S6cM3XHH$}^b&bXC|HKgjLov|&WXFO)23mffiX^$SObJ4Jb zw5u||2Bxn7%LV^LupAL@re;V$6krOj!JcEh*X?Xl69F;ypdRpfUQ&0!FC*hO;!uc&V^^KJFwq*`l&e?`1rkj$&N z*uM`OFf@&LJ1e%bw5)vhzid9*Q(<8JwL^dYmCHicdtd-Kx_}@8L7|~dw3C9OU?@R2 zL_|aw1rrev6aYh!fAW%>U3WNcKz+fauKmd${ zqaaZFei?!Q01?i&G9J%U5kJPsl1I6@f`^Oyl+dUu3)`m`c%gki*7YG;WA)3#RM}Yr z%5=$LC(6AK&SD>LWtjh6OI|h;UPYq~=!n?Aem&^w77&g^fe~O33W4_6=u@DG02C>N zL|aLKh=4E@3_$`wNC6~N00l-X6k3BIXyh0K2MGzG-xzdvY(b?1K=W?t*sQB3ilX~Z zMzyW`crGuVto$KXb*^!0B%)-biP|-yrgPJjxcg=;pMF!7)4Mf6Wg8`#<02nk>zQ4d ztg4;7_nbFeVe?$3VY(uwOj}j-Fmr_K{ZyBmg0^jr;^?@Ztli1!&OxqU;aKG};veab z46JVBIbY@*we5Ar((J1uuWiS+GHIVQ^Yl2je5iVb4_AIlR-*oErtZ$o?ND}L<4MY- zwY*;pB*8d!qabXz+K>;%Up4wcE*@l3l7wu?NdS=!){K9M`}WHAorhG}-iN~%ksW>m z8|nM?Mo+WHe{}iYZR=ax2G?VvdTaG*kWnl<)q(NvS}lhpJnV1J7oY7dr7!wLQ_Xhx zZs6Cy^R5S{67DUbTm4M_dA##JPa?{tV#j6=`syYZhgo=TBO|2iSW7G?)ViLF=4<(S zS~d$8NHR_$N+Jp!Cso7EpD%harr9zjN-}nloMP+86s*8w4C<-Ut|?sszir6!YE;3pa=p00s|0m5K;t=Rto_D0*1!^ zQBV*Z0f0e7goHtYP!LcUi3G!8Xr~GTp`gMD5l%V?`F&)XbE@|4I(n^^B zYxG|8j|cu;G>Wd9ia-Sb!h%3J3J!-LAt+%XVG)$zUoof%+Vmrl=o*F~0PXnEG!QsQ z03ZNH$9MuT5DX@O4(mYw|408n(Wu+b#c`!7{{$Tk=hOc+{7ierXfw3xeMOt6e`T4_ zRa_wey6A!w1cA^t2p}wic25FA070-25P=YZgMcD}f+E6b)k8qhmx1U485E8XMj_yW zLc#!)5X3E3O``)q6nd{R2D*J(oLtZ1yxy-FFZ66cDK#d##A@2V2j9dfwq9_|_WBfl zt*c@mNjT{5h3gsf;|ng)8l2QLxG8g%_PyP6O3&W_Zuw+43(N`y?iiL?wB`#QWwc({seUu%u=Zr3`f+AIcPY|y-Z)Mj>$I(~TQ zs@~8+i~|Br(j=`x6T=Mk3rdQ5!vozIb^F)%Zl&?d*q9o_lna06=53GEG3DD^-7Wce z?O7HN)o)FW9F<>a(Rvs6*l|^X615MK?-tnApAOXf1Gt2t(^G-R>dOiP-|8*JvTzDa zO2uDxSBa^86~A_irhcaVF=L%Y>~#(ei{(sB@MUQ)tz4!v2(MDK`eI`ExxmXZ<0iDx(ea?`9PIq7UEt z-((i(0y`WDKtP2cAOt$ifdbI>5uHg0i-6G^3_ZIrBob{+As`SsHv*#T+d#B7K_Orm zI_&_XK*9pMqiU)h(j>Tx-hN$`2G*jW>qpArN`;upIkm4;!%1J*W?GYMmwA*F=&Q9? z)%RGU-p#EcvzCr;lO@(pbMyO?;CMa|yiBN;P?2mde@GK7Z8+M_m})I^zT3yuBKp#& ziZ`X}IGsapj0mecQ;;^&sDZ~3uKvxa;%LRw-yj*edGbc`sm8i;!pk2ctI4~Z>(t?m z1;@?1s=tp}n?zbtFGEq+F+I&csBM1;cd53fh~mAWd0HOe_k>N^NJ$-o@z0bdO-ZnP zc3q~v@?#AD`b_nQP!of%7?x>*IGiCEQ=*rCSu)ReBjhcm==&UhS_MQf2dXR1oq>o# zDD_1J7E^^d=TrT*&u(w#xm>j&2a55*`aAolb2WRe_G;VA6za5_W|$&N08o^XV8+Hi39I=4B|; z!HO;@SMF_g1daT?H&+qwG8KJlh-Y(?;=Guo->|rX<3VtX5ECk6&;2t%fp9q?Nn#cm z%r0H0Ny3qBEQc9Xp){|M!tH*a;M2OW_H&mj*%6W!+;Xp%99tcKjdS&dIG;NF*^ANl z>dP~0joUbH{FRF-qO-y$pMx{*tFQQ;{m2p;IpjvBk)0)14E$-2R8l1F_h5b`BuOd>Y+D;r#_2CQxzTATSbw;)%lLurNwpV@FNnFh%{{e8?{ zNIg`2txbmeOI}Hg#U&vwu}oveb0Z+!9qe+xz3Mos4X!TC|7{vz=Yrq7Lg^YtN5E#R z4~oou4(1(RUom!A^OFnb=FH98Or~lKuZNgq?Bgjl)J6iXe=J zdF%dpLu7-wTHR$sMrT-`(?o`Hr08dgYa>r!=zt|QsN{1A&(dia68L6Kpl`t8C*4-F z<~Ogn)I7Hbksm7y#=aS|RWV5YFZI4pjC%aQ5iJJ43I3(2Mz3xWL@!G~qR=@50=<|S zT{VZIT@BhVLSP6%q{v@Mr4Rs!MhVa%99l}yCK?TrfWYXG3M_;!ql0!^?CWMAulcU# z+ORy1nC9QJ9jC=>h}ya}&BevJ_WYQPYk2N4=jp6E>+Mp^yBYRzF8vdOzjVMyX%Y%d zl5Ebxq(KKGX6b_hY6^moLTb)7NxO;YnTsdB7Xv+YZC}cC{B1Ps<9C;L`DC{#U(C1d z8AKtkIp>*XeBp|Y;tVaGGEe>AsDP@u-t>s#FhxNG1WJANEvHQBE_#<}g0e6@Qv z{AaG`vHzR%!xiuN-4U}F*Pe!szwWvGhJSC|3e4wRG}I?^r*+Y<||zA02IpIN6+DTt{zjq{RsOO%<9ep62G>?dxf z5>W(I%U_!H*%q_-miVd(2=}a$=$iJ6TEBX_(;R|*8j}* z=EuqW<3r(Bd~s}7$#zwA*@mrv)cU6CjW4M#iH9| ziBS558YPBjWWPn{OEN}8`i1+%)dWsn*Ic(=-V)yi*r|>l3?Qo14VrGchZ1juSB)-2 z=bn~519sT`I({0>z40X#VpM5f45q0ap6$hJOssf1aW+Toy2x#N>8ZdlEFJ9s;7g0xioU+KBZqgJWFM})q2wQF(<=SG z3na8Zlxk(?G~qvg_-a5zSeYSiY{Iia)LL``-OLJidoaZmsYQNBN}8(}u1F zt;{S<&LhPg1#&%}45vhFnk}j=s!3}*gmbTW#W}YOdCntV?h7P?xf0aQXD)10QE|xT z4wjb(O*|hL`uL^F=O{{hJIqo%X{9(C_%D$WZB1fx^y2W%F)sY+1BUbBg>YvZ9@3omt@2t`d`4&vs{+4yKX#KP28s)wKR@QH5`t4EK z89%$O{;Je*IQiL4Szgn<{cUx!kI#pUWBa#eA1i#ty5`J(>jeUnS&I1^mj!*jVz77ef8w`?LU3;{xoEu57$6dZ@Qu`ng zv24xYhgRp?vOjF!BmOYW^fy(+xSeCwlh!*+&nlS{#(5sz{B9k{kbBa16m2fHYD*wb ztZ-Oka@9B*%i*vT-bUO{Pbp#F2G{1-y73O&c%OLe*^~min3(Aj^l({gR&y^3qi9}S zj65t_tn*XG*A|cTI@n~P`eFm$_?cwQw52#XNhwHm8VAlTsp}IT0`Di!kIST-)ExNk zNAG(-&DFbQrP;qdL~5K9UXj$gKIeVl8#9q@FBPNK{Mh!!#{b?@C!;+4bYop1cIv$lK`|KCyn=Na`BU1hs zA+>kAmJ9>#0{4NHx5u;;KZ;q)Esy_5I;|gyAAawBym9VK!;^Wv>en5`xA61s`2V5p zEu-QH`n|y<5G=sp&fpdZ?l2G}!7T)L*Wfxp2u^T!cXxMpcXu1yU3T)^dw1XW-LoI} z%%|=?T|HHGrsw~&D!cm7>YHxV7mB=2P9f2Zy+;04;kO&?Xu9>~6dZ=Pe$92=)S3<` zUVS1d06xQ5)rwENIA3pLy@V{cXjxZtkSCwG9Me8Eb(Oz51{r%P6-)_6-!8jEKIxOx zL&*273744zPZQdNX)}g)^xBgYU(Ua*f?5kF;J(m_5aowY$;rORF}@7qJnziD?M$Z= zb;=1r^50wv;Z*JJk8HYR{TMrp)}oJn<}2{zY7Df6f5 zal0U8ZSGL-?rMSD0OguQ+|2Ox^!$St>g2X8=A2^k4+q#dAuopwgbOxCj)MG_+YeZJ z-?fdVMO4ggUwCNXwKVdBi8w6rhcz2M=~v5KnQX=LBnSnJ4Hzj2C|lyYx9*&k-jdFi zEQz2pag+^Z^C!@UpeR17UUBbf>j6ebeL*X&8<5S3_&|mh)lMW`ne4HN9GAp<=aQIL z!K2ulq1QLS_~wCajoD-l2M88~mnFtBBw+y#bYuPwh;I|c1Rbj=6N5B;u1%Rpq6xqa zc?Xl)0@V8ln+kYi(=dwJX}~lgk6CSj!-LJlDV3AmZ3a+aa9twMWT#tjQY3nEIarAe zFOPm)ERn0#gcQZ}KyebjY^2EOSWce>m$ z0oeB!MDM4-;oen6tUF$A|KbCG)|+LXSg8kg?X1f8-Rv-+0{sJXQTf=E!FQc3SAp`} zera(4y6|X65>_!Q$XMy2KuG+)VQgSvvOBNaI-@^FG=ddmS?mRj4m+5nRHgzGi7l2) z`h_1^E^R^-T7E{i71ftpE|Y?!lZ=eO3wb-;=QZ8aK4s&C5CvN^a^t%CtVgJoPDp8x2;7_7H|`C}514u6&lgK=wQyUV$iAsj%nO)*50aMV z;|Co)wg~CSNTOnf0ox?r$~w*Rr2*>+p7SMYegApPf$f;55j^y++C9BD!9Q2^ZV3X zkB3QgliSCSD?Os5M7dS1M_5Pph#pE}fO_oN5>UZrT8}GkdG{xy&Pb>F`1aG#oKN z_R-bYfu5De+85|xYK8&*##6J!Oe27)z1_rOJntU3>vfJMZW@t6VX_Qiz(|QVE0gO$ ze@uXL9MHjoQ4CP&4*kBk!FPq(VvZiaD_zptj3TqV=;#W#waI?aSvF4fuNC&v zaks#L=!^he4}qS0(eVH2QW^C0oq~G0+sWL1YwwfG@oS^uze&D$bl1HQSX;bt1zb>7 z-7+={#Q$gP!mLSD`BP{Dt-1Knp<^=)|34Q(tk1}RvZ27)d#7$pFPj*QOCc*i_JZ8L z2sovpTyuh-QAGfd)%+6*`0W9))jh3Y-1%F7bG~8n&NtjEw!15%6e9_zXFp1YuzlCt zeJ%I&d}02OJ>mz#*3tQoof`XRSM7~5hi#)OR_t$@=PYm3j|t(? zwm5?lemD$*DXKuZi19(IClJ#YH}RlQB_WLQ^a6LRhp+3zuvOxM5g;xWQB?CgT zyD-=iMRsPS{n3Mown+#2P7RTaN2YaY?m^W1#)En?c3bO=Y2S8HCk=ddEU|bZL}W`N zC(P}DzC^+{K|(}7gn7Ri%YBXr{GJqL!%M^PJ@S*;>sgBqhPY_UgJH1W@u%j1yGQW= zDQ^vN(T}!lnCsMwr1rrk3fO-N;zJuS1|DO%T?46vQ&Kc)CkH0jTsA~ zbDLpH2S#M=Y_o=H)wbObdc~{2MeX>dqY0>#Wc!S3(Tzdryd{t0($=Wc8fkXb2ce0* z?s@Q=CV5=(kS}siYS(+5chm&QG^eAOcg7qcqqui_!M4S&ioz=9S`1g-?aBELWio4B z7`ckI{C>U0891(q&X}Gt`MjbtkF{MD!Q$17QQVl9G)x1h?$IfFC(R$bkJ=U#KC70C6>?5zd+)7(nc{kr&*T@h; zMv>p)c#W!j+G7$HAdAWq^;?i>(YDU-hn}D8p3#C+S31ORL_IqcrCiVjzHm^?kB1z! ziQm4m;L`4SXCz2LEY9^;{BdD|8vk?ZL$78=!Y3?yo)?E1G+kV016&agqNU@{N83BN z43_LTobqbot!krfyyJr&$~fLPbR}|pI_X(MvqnFWW6BAHo5GIhbvVqKkYaY5QdUIN z7;~#MuU*^saGdHy9+dA>7*4lOIA3Ms;Ur!!@wKfoZ$1vex{$Qs z!7Jm|(E9b+!Sn6cJBP$QnRdj%4QXN)d~#r?rDyuD3Ukr7%SQ@rrn5T8S-x{cqjSfo zU+VKTwUl7~TXcD;z}ro_)T%BiQCW1y7XFsl%Wnn;kIvk4rYO9 zKPgMeXOJ0wQs#E6l!~isa#e35JQ-_2d<5=Blf1BxU<%slzj+k2YeN1L z9WGC%Vzmra`r~J}U^kLi2FxU?BP(5+DY1%=%UB!-3x|<=pyl5k&9=0H zQ`*ZMY9Ui0lnW15NeoD(T#z_4p($Ll)Lh0GDxY7G*56`L0ZtvW{#9}KX)$1oO2cIz zB%8AKSF1n#{aY@yX`ecU4+w~yHWQ|CYTdu-P4B{kq+DuCYlHwq3zxC>7soa z(6Fv&C%N|3@v%TE{j4VZN0SJYRAUZsJ)%O!^-|_Iy2Id5%nwjBRT{+ju=9Dk+ia=` zG~-yX7}A#b>)S$qMD*u{{)R3O~W#{9^9oVb*H_}a8k znQxhOAEMABD}nQY3|B1tb~detIf{XIUx1!M`h~C)DSsc*v zLT&I$oCqZBbA7--NZHUo`d(umd1bo>GTYGK39lkl?Thv>j%~QZ$UrH+JHqMzbL_{L zT`82cBF!r+07hxKj-$bB1qVVl8M*WMSTCIO2hb?6t3z1lqIh`lbUz9d^JkJ<;@>ybbzyx?gGu>HPr>j&QkaP)+k-W=8k3>)S?gunS zKP9J09z<}CHiQ<8s=m$tskulte@lChOi~>B1C6h0NyHY_u#^?qB*4s_p;ZzQM*zbc zG;DaIiybe&Di?$zzZ+H-k=hCjP=kedztYC>HA-5lKg=(ij9>ilOS*qe#tglww6&@= zbhaw5xz*Z2rI1FAN3WSpP2vZ+*>JL~Ig9>M7-Yg34X8#Fc2xhX^CZ6oC}@{VrRF4E zB^X#&aY#aIR>tyWrJzR)kUHGJ(O^JR{=7k#hLJ^#&9$jcVl}jS`(!;ks{)-(^2ODH z{wCwkR%8X`8lCr9LX+}3D!>JSkO#7PsD+d2C}2%`50Bf^I%s+G*+UB4@!R=~N!7xk9kY)p&nAw8j%=w{aT=Jv(=g;|Gx2}Htb?|_USZs|-m)YrY{lM?5KVhI^V4!S8FNyPVzvP29J zY3LvJiiq#;w5UVBqEwh4g;xjcf@uqAjtUGY=%_4~?T!E!Ju5F@c{8>ZOz9D0Vv;W7 z!y_Tis2^2_lwyEQeDdgJ635Y)0|k(X;lc$OQLe8$!g8T?mLl)acdE!>HkV$LPGnwb z^Q1!jmM`PpH@kOL!PUA^jCyv^@|pM3oDazleIZ)zPn9ZBH1Cz)xyy^QTrGi5Oz^!L7;7MmmovTDNv5yMIJNZsgG6kYxRXw} z=1R^Iua_rDFnqp|p}kJ9MLvR8^vXQNzt=IO-c41L&x9h`O?5Z#QgTa?6J4QJ63P&L=TJWbTH$y(WMS{ zAkiF9w?7GTcF?{%H66ufL5^ys+}SGOVYz|F89GRjF<$9b&MI;AK>m^|)>hQ2%oGO9 z$MYv6x-VR06AZCKfd4&qlE`OiTDU_8g(;UTHa7k>rBiGc)PPN72)-@~BL$hZi|R#J zma&a&=3I@BJ(7~I`Ja;^M>t!JR)Th7P^o2hVsP-;+;>GOLt1W{qXcQgg+(&gepT{r zE5IP%9j3=oq`t)pr_BHpgC0)uA`2v zJRI<@a!u?oGZ5QSS2@zKp^79Q?xN4a7#(YxDS_0~9wuZ{Zam_|8QqoKKAH9 z1{|D=bC&EcF|wmN^~RHm_MvJdMj8>|XVFw255zuPRo_$cMa{XqdKzR5J{Q zwP!piwuJ6C8wNXFys*03uQuRj)e)V52Md9lmFV&>A=tH^%_REeOlro@MJW4lUj{!(M%7LQp;vc@I*>^Xm+g^@VJJs2h*3JWY4HaqjRo#4ljS!L zM(RhM`ib5}!+7ufOj1N9|MLClzSrijOsJ_mHKL+~rD>#;(vbcoP>B4pGE>=qu@+Bn+!w{P#S+f`4i z8j8VHZ|j5piuqd11P5-XOQWWs)%K_&?>x0N+3?j3xH(}+QI!E*eaC1`<~dT3_=AuZ zHi+C^(`1A0yd*iJ*Gwee5Ox+?}1f zflBHd%Z-wIK3!L631$bd1KP;yWp%6vM$0{e@{^w^HJE4?jX4&r+MgZMJ=P3|>FVI} zPYvtWWX{{C+=grOJYUa0{d!p*7f2GMx?-YT7Hk*V9FhFs#u9&7d@1@P@6Ad`rakai1 zdh5Q1hvee-I>nmHE;B5mv>Q55P1b0|;N`{B6;|uI;j{{tzrpVU6;o*c`@8!Rcyogg=(R$ebdxL>afTk=4$Uj2Xt09IBR`Ka2d>6a>6EZaB7&{g*IW(rVD=V`1nJ;c0NKuXJ>i55LlB$TL##{?i((H)~vSA97xy)-w#f;QyuJD-#PCTEAk)MGQ8FO zR+kYF=p8>`x|rXoiT~Yj{!aLM8T24+(l`Ln-aQ+&e0qH`jVIr5_SyTf;{RTGOJkgFfpC%Grxa39i}X+=oSaq5@XJlKx4Z_X zZLe~^yj-Jl-k~-$Rz!O)8AA52JumNr#ywy5e{kU;1RPOTxcJ{kws`_)qo%KTUO?e{ z+}d@Z+Ek=3I+5>{Kuqj83Y>0qjojnG%sChOiAhKNOkbqto%dX!s!L2+{D`(Bbi`$H zkD4W^^0D-TU~`X@gn#o

2D{StU)>C{+@HPKuDnG7Ks@zn*39kxPy?jm>Rdd*-Wa zT5UO+!5$qnS_&uY9I7o&U?Ch5CuG|C^ekH-pTA2x;?ABRA`wq2jq-26kUKy zA5*q$j#mM{*sc0V@rSROak&hXOI*O>y`x0-fsw)40rM!J5o#yCkW2kWH_Gtyh2BGT z)=&)SW0?6aV1`!}H8+?+>ne+Syfx9;VqM zx1VXGyvZbh0Uj%m=ya)P`JQKRoBgl#>#2h?_sxr`zdZjcZc5bfj$s-@{J{~p(Nk77 z6{qXuf7Rl(ad%#~k2xTjyv#Zd&b(sHXM}bOdG5}+XNG@SmeW!k`$0|Y@eZ-NA=D3gMg3={ZR@2Zz=bx@Pz1sYgzr}*e5(NUf_6vE5~6y8Y;G^F zpuv^6Ec!`Woy2X8HsgYcuEFkV599Zr+~)(X#g~qFf_rYok9CmK29vt+sBiy|`W{qT zvwrbZ0*prFv8c=0le?$^2(~`mzp9J`I|;n(w1p!qA_y|m~A}t?v<+j75kRV zaxlLH{Z>dPT1)9V~~;dz^d31C~s%`DJUcPqu=6ANxy=&xff0IMqXx46+Y_V zC=sZKhwbN|6SAv_RmPZ~DgraaKOB4{IJ?Q9b05f+fTcX_1e4{8|b-CJRsQIi_9+v;5LZOd0BUaBie{7+EY0$T#Dw3g8d8 zX-?AO zYvD%%-I66^^0W8-c4ZHVNK^Tv6g$DqIYWLD?&`U)G`Hl9L?r@zlr+%oYv8QFR?9*B zc(=#>2k#TP&fADV*;cjCNnP4#;%wG2ocJ>`#D{O&wYqtSx1NN>uM0Fc~a%k;aemPvtFj!YxBRwF;sujqB;_y~i${gxL%9PhN zTE=0Rlo+J?oADx(v)bHdkCs4{ zzW~9HO%=t>;K0S?p#XQ$)FDGb

QFg(lUiT>I%5DXvIE?68iPsj3OT1QhrC;OvUqj zoa$9Fl(=s=B?6Ry&2X{iQkA!6KdnARDKLqRZO#O9(@IrO1CQ!e$8DLt*j;MV={<+cTBI;PSRx&Ce8)W4vP)riGe(FPYo^;@Wj*{cpl^zh`ee>e_UpM?K?cW z>0CMa05u&MIPMw4Tv4*AUj>QLlW-bx$59j_XU99pD5e+!7XMONW}EA(0VG94_!l}@ z2V*9=Vr-kvuDco1xjr7>WUx`*Yob+f?F=fYeJvx4x7T8ewRyjr(h2{u%hzkryNAv} z4=-Wx`ZLF947o*%lb$Xpf6=_1JYGXxhuz0sN<|Fw<{>dkMwI%YjVL>K+rLQ#);#1l zr&U2fPC)Gyd`^@=Snk!aCWdtPFg#%)`9oWZd*G@OjQmHc2egJ^UY5z-VX$!A_O!0Q zE@z6E_?*S)cVY3ap26se);XU;`RM#2w^AJ??euS~?HQVUmhHyh1x~z*)sdg zV>j>Sih)-)&U?F?54Jt{VPxpm0gsvA5>Kn;uw-wzV$9=iSz>m|xzaGN`aTWaCEciI zsa2cfIsaaMCXh^;v(3v4(NXToiBwfSEy^$1ijQqLNcw#mwzEQgU3o6z${2c9{ z4nmOyFJVho99MIXu8IRb?|>yN9-NPb&DQm0<)&bRx&Ya*1r%h@YkWT*HyXU3z?^HQdAA z4n=Cy&BoE(U0a>q6o~rLAPEJew8$~Dyoc}9@-ZzrS<4=YJ8b;s$4|)nW&2%A8C}=! z)>?)5Q7*OQ+YQCqTz~ZW)x$A8Q$tK6q9p!UA}Z5zeSx>ofoErD(-Oj<@cKaa{*z-^ z?jOILA0CG7_`(~b6kHyxE<=ov+bHDGS{Pbh;9IoKxA4q=L~ z9x9NE*1)HE#=d>ZcFOn*w^Y1ju8JOPD8t`q`q6$$$7W{{@sR377l{v)QQS-+Rcd|x zB1hLcy$b%5A0YchZU8HnQU9h)ns${oI1ko&+Kp^9TF*jzL0bPcmLzj~p7o-Ke_IZhq z`m*JRBT7GcGEoK}IIbJC&bSU<_Gzv!UAF|*u}#>W}Jydq#jv6yo`Jqocd>)m95}PM6ps3J{vqf8`y~ z32xlK9u1a@8ME32#<4Prkn8~^bD@XQfDh))&j^Y6Uu^*rWl>4t6&t4tK7VA#UPeF0 zA7y@Sm>p*jTK5>^;~tO0^#2n4d&n};**5MA4R)UXEZ`%`q22XSK+$w*I;~u)5=L}T zk{*puG_jY&<9{(M_9H7vW%5z`)D+*vB3(CD(e)p#&1EyTXz?!4upYt+C%u9ywpMyb zQDL_jvzMEzHHwUEy_OJXHblPh>sWn@&J|R%jv~frCMB=MK-YRe6A2ng={cu1+xlC8 zdC2|I5fG4*O3nkrL4q!mD;4#dSC1uVC1>O<^-ikFF>;%4r-Xo~Q% z={IrVuosogwSR;PmUy3SlHb@&mFI{b-O`+h?jwx^w$p`xXN5b}{nQ3=Uj4I!=OkhD z&V&gV?YA-5K5^|58Rna{mA!RVrR&xjzpb@$_kz2vbsJuN9v4VkKK4T*LKtaMpmCX2 z8A<)c9$l&hea=>>S&+2rB>FhFx?MMO=)D0C5_S;gPl38{M|1h_j+7V*%|e)emn{m( zY@dFp)RZfNpvrt$eik_39hco~PUx4aKA?sN8R_si^?xbzdznrLw2qEq#gAyu+q@dI zSLC*T9}!~2W2JkDBB{md^!{SmiDr2yVPM8D72i0beX@OOYC|1S9Bwc9ah9yr%&WqG zS%lfk@!RQzN6iS4Jn?QVLf{Gmrhn)IyRDM;{6>uBi_EJylN_7&^X6FZ-Ot=@OSa9X zeO}355qyFI<@3$?m`eXhl!7f@D6xW!eey>Vv5$6mTBQ8Phn7yU?l(6yxCum*`&vX-gyAaaux?wF6Zj)E~wrV(_*@j0sjvrxJO|lA5BWoT< z;z?ofec-4uAUO%A`RepdqA`Bs@Ky}uZxi7aA4fR&XTf8r^_E7o&$d7aA^j5Rj^GDi zj$JWXX&3XTR`3g}?-e4yc9O9duL#5lt|biC>0$PCL(G;UMGnE5hIpekr{d_iVcf!2JKRruNpqsYo8tIg@QP3ME})U1QRd%NpGNm1xh z0M(sW+u;{w+Pf5pDQRBF9{$Hi-J>0)pI6LxM7<+~a`H9VHt%*g7uEt!JQSCoa%CBt zQ>EL0>&o-obtbyFn}ItBDXSaN#J|T9Ci&ingZ>0tAmDRTnte)37-Rbu;t8Vu(|F|z zw=Fh1(fIW2J^Q&tS+919=-ItJw7t)avs1B?vG**x3s-aL)s?1AKcjz4eYYZk9J>qcpTNGHOoMu#tU;EiS><7*7524c2|^c zGT;K#5!}^Knbg%gnJA^xvg=d^7C|dHew&tMK0nRWQ-QxMb;T8fBey45&Ylt~h<&C} zB~d5a&g|=Fa{z$lI#8+Zzfc*|6A5O`K%?V-`=a@c<_-QgUvx)ti8E$1oQ!d7x4DZT~WwhKR>eh;x%^2GUBpt#?VRmZx1FbX^nobPcDS?#}T z*VnGGpZ=^9`fWgXPH1wEQk%cM=IoM2;d|Bd_WT%eDl00c-NI_Bbyw)}%< ze)mU5GTg=Rc8@hldLRG&^#y9C`Sy^s&Oc;kWL^&0KTTLPNgkOV#TmCju|CS^awutm z+MPY*x@(HkVYgd72gM?hp3v%cwx50f+xX0L-A&NGw66Dkq?Sgz*)rP23s~nub{(-- z#$|VO&5>%O^M)nIbth4$En78eUbXuB;Yi@AWiU?BvQv6>(5f|g!!!3T&O+wxc@uZ( zu|C~{VR`WRQ`(ob^$Z(hDlbs7*qs+!zhVIH_ut&@;oZ9Et2B3PP7buA6Y25Izfwv` zoDN7>Q0frLKYTMTQ8{RD(`Vi7HOT)s{KhiR)x*7L(pdB6b=diT@QIesCI1Vb2u*(^ zeN@hre3M?X-u}qRTwwd>fl!XfsdB|7)=eppl}9FHeQI?&8R^$V6l;&|Bkjx1%-haX zdXZP?aJ=@5oWRQ<`ZHF{@5k`?Wj=FBJs&+GxbWEt_f&rVoGYwk<-~-4u4k7Ur!=)S z5*NR1EI03bWa&&}W!sadC%?1iPM@P@HaD+DYJ?M7#8Vh+7H4u|Wg#J*68+I<%wiW2 zJ#4M4X5WMKDYz_8$z_W`Ql~G1eK42Xjo6fWx?-E;k~nb&nJ64IhmF4#8b&V8D1=#Z^$Zt8xR0H=O6*lmJ3O`V-~Xu- zt`kfhHSR6sxlXgbp)(>}l${*h=gqS-TBGz>1HS#!?vNRLtj%up{jAPQJtB1VG$ zkw^IyGFtne%6?G`0Zh0D&Bv!int&EsQ1@mH&_cybDY6kmOu9=bl4h7?;x^WeHMR{cZOlOid{rn-ZRqRHZ)JL+)hCBPG*W{dn$J5UXp-_xdY zC(ZG?0B&4|p>bagJh9>W-&+AO34Wf8Ir}KpxkE9t{{BeGfpi- z%_1QvNI3aF(0wwyaKcz+@G{1`Eiz*uS9rqN*>LATsJlMrN<0cm5;Pa`8$K~u@GhIr z_R|v2Rec-`wlIDNCA^aJTpqERs1lu_O~0tqs6z%q=%=%d@O#Jd=oWg2`>4x`qBeR8 z0%N14aU~gLLzYb9Abm}=!IO(iky^Xla|v-dtIQ#ZUthzlNYbBCA9quJ(|yWPcuN*~ z`0^=>=Ph~BECN@Q?k%}@ZU$tv{q-18l+nzLUpcAODreF1aKD?DDehH~RpYOg=!lfD zK9!0Q6Gm16rAEoax1dS|Fi4Kt%m$yM>1DQc8DBGCc_ z`j3x65TArOJu`!!jZN2_@i&o;=K03)H!T8L%bHbm#e<_M0vIN=Ki=03!^Vy{zFas^ z1UL9Sm?g06mjEs5K_wO{g-`g9+X_ooTRU5@bPK3Nx8gRkanY&~R3Zjg++`LWUpKsN z0b1s!>((7#xFePG1@?8+3->I5#N<)gMd9lOx&g15%_t| zWScdVMr4Z^1W@x+4)?9KE@Dh-T%~6!vF#Q_A!Rq30p>rFIw`Ad1S`>?C5sr6$c)}4 zkF%pI(!xeic?r*$KVOn`t2ROIJDJn7FxQh%M(cYJZK{uB|lkBg&MvJ**Ywp73 zj814Eo%I4&6AUFvIA(ZHd>K>T-c!{U1WH*<<%&A8&jwG2zh)d46ZoQL6Mg7Ui#7}R z0-!TPwz-Z7U-m7b(TY@HJCf+G4&Y9H_i2!WKD>&J!-SEjMuVe24mma@m4^ctHE7Ws zId_|u>fk+HIIn))a6bQ`#p^p90*p@_{7uWb;oUe=u_@C$D1<2H5XF4G>`jKb@hKm; z_QA<*_U*su-mvC0y8S6%Q7%tW5`GcsF(39GDCCnAcNXAg_w&3QOmRg{hxf8;_R9}wvc^OUTp$7D5A!+=5miNZZC2Rw^mAqAl3Ke4epM6gb$$rjDOMsBP$pRW; z>>-TpCB?-VVv~bt7S@;h)O5LUsdkNd_&M^7(3j0>zI>biBHq2bs=GFHN63rK?`l6+ zq$UI!sPc_nV9{0}-Dw!o4Yu`EgJw>1+Gh}$F zGay@>9Nz3uI6P}pjBlgJozr_}vo^_GSl0Qo)C}%YsGL3$pjX{?zXI~Flxi^az3?sL z)-H0>iBD>!4~#$j4apD!P={#6E%Ja7vWY{Oc01HRx1aQu<_g4p{OF*P4r+B5QZ~I! z%)Pj*u8^%HEx=cFXV;a!xSlA&XFDt+nQvTr*GS-_IZK*jn6I4u)osCU;(WL$RRrVL zZW4dMOyZs`_88e;;QXSUPkz92p&!L^SU!R5!w0vrxwPV%pT(Cf2gL7H1NP>7=QCFt z9vO~4`hE6SF37r6{KYz{Z6Y;#=q~S2N(qL8Oqxt`AnKyyZ20KUU1;5Z|jFktntn=j?ueD$Xz0tqk{-Cv`FPUDZXI=Yi#Gu!WSqu~^J z|6aq^nu5!|!T2f3`6wiQZk7PRd?e=>wX4NC+Sac17PWZKhx*r;`J!a_i6M!F2g>5} z4Mz!l3Liux9WF2e-l+Fu-ziv+9+8Xu`FniNWHnfbI*g$jXId5XVNwqFxaE@;ADq+L zV%&yk))z6m4;VqxsOCsMk|iIei@WVcbAF>ly;Rgt^&D;jv!kfS`kKM_F}Q2!v<{TWF# zfK5up!?@7pi=x8ce$lC3<1_MfJmwl!mBj|35o2pE`6=okBG$GlJ%FqZkZu2y@8WvCq~eXfnr zLg2|yB~%MC5-HUkXpvE#{eW>^l%9gTT@*<`}rSqE;RM)vO<#l%XC>3klU zUxayxy>tYhTsk^gJpdt{|G5$d+;UG{Ef(Vdr;JJ&8LpAlYtw%Hh;v+_YYlfBgnPUr zyYT2;TCTDF*tBIv-418BLHyxm(L`HdqJ4--eY(_vV5teFbikBA-Nz#8(!qierBp2* zMGjENUbByiM=_qkV{R7;O+lCp5pf&~Z$pG|*F&$lDM+^T@iCNeban8xj@m9K9aRcc z{xa9|&ZjcpPV7|W9h!U7ac{DU8e9yiwNH50k z5qTufpq1aMw{nB;Q_FA54Fy9qGn~Jk&Aj^r-q#da%=u%94l4oKN_pGzWfNHlHb{N* z_GN254IQ%TtN4aSnG4mF&bc)JLH|Mu+mpGpNsK8v*YGMSoR;!3dl-eG!A% zVdkrp1oW61XnGmKwPR767hPf$$OL}nZI@p#zT+h@)T&QQLz_T=>82|2&HkAW&+$kQ zT&-SfQWH6Gpg3iYD^;;3-q(%MvR!)oXdGqeu+Tv~c>lo)-I(3gM8!VIfYH^Y{#u@% zmWEdJB!Pj}`8z@RpvLS1GAsdXQ?_f>-6-q(3SBX8V%NIgDAQ$H%Dbl+fGdX_Sw_UC zYEJ4ODbZwXP_S8EFasx7mxuvA#lDw- zpdk^>?D`msnv|o~q=SMn*NF~&e-pXi9zPJ2r>7czw+~rqC6-grS4xM+{SD*2vN^xb zNNSD*-aIMQx$n6@9#qCs1t>24Q=M=jBaLNWt5GJY9fKWU9=xUIFo(YwYUJA-8SKyh zQnRmN%5NB+fsMTBv4j4yx=QK6G1}Jz{Is8$kb>S}y{Gj~a#I-W@FNa_`$J&M0U7tj zUX&>x;7-RQDnD8kL+n3-^JocMbN>}NEI*c?Z6^>xi-}=$8%=xV;MiF3IhoL8dafYB z(;v67<$>l`-psGn#qXn7k%V8f5(7hwzzJ-M$Qq$Upqvq`BTZZVa%d<^TS> zYlI`4XBt4*7Qb?Ca(=J>4ah>RhP8 z3IxK#b1#2-t0cxM_DV8sO)=FqK`Hl9>+YEY2?kHMR=_=^$8gd#s;zKIc=!wlu!7tT zX1EBKx~6@`!cnfd>s2fXN9}S=lsMwW>PC$kd1**I(t=1sb%$~4EcJ%#ypD6zN17%d zWUuGy7#020IQc`b`T{9Lk&C92jLp|7os<&Xf>#_8SHi1MNujT!&m-Wy?F4w&)cVIMC?euRV7kEEZ~rEj^vSKlr$K)2zzS zcZ4;l!zsBNTb6?khOb9a1}w7og{sMdP6_5*x7jW(s)0E>DCorBZz%2uAEu}ZTXDa) zJZ275%sv@ZuSCq!^>(71G6L7I3tb;%qSGrSkjijKjh7rJNuvqSh!!idR)_%ju0_Up z#Lqwtl`~OK!NMjo)OTkizqSaZPS;D9dt&8IJ+F=aJL~ne2b%Tz9X4Wg`8M=e#SI>5 zvZW_L&_k=4M!1E8nyGVusuQHy^Q34ev*qOP+CSW5^Ip=Qi{uG@jbbsw@uoCz9QBVi zl%BE7>Axqvi8E(gl<~2UXr(gE69sUG(V$qG^k0}SIa2f*88IvnaM7z|86OTU(1pb2 zd3Z~ddhD{}(}xZZ^_;{)?}*{q1R!(HilZ=Jm1{8;)ZIsT0r-bqF&D ze9jhO4iK$2=F=VNB%+$%I__erivp(UYpC|sswyHbRX5pwNeZn5LI1ra;Hk9Ev@7dn zG|!@}xWa&?@%tXCq}sz#AGOqdN&B!5V-e_xs?N1x^^x^0wFR(noc6T=Tgx?D%ZGnH z3Vygp$J&MY(JAz~L9!aPu5we?#19yee8OMhczv*~OF~o=19!`bsy+DnEI+8`ov76~}%D10T>~ zfGV~XNVik;72@wdxk0$9;>Q}Al8UNs(M)JJ4>3l1MM9pg6M|5y#weg}8I&5}GnQms za5k?~r) z*TslKlpn2*l!hFmGY9X*j#1}(!xn6<)mLEfw{m#K&-XrxgX^ zRY`qwYq4E?$Qa;CE~D;lEV1{1DWp|}Aqx=ADa8R-o!+oOuiiEA2Qgo@aPNx5Ca#p! zF)ENBG;RABjw_elk(ZU3*r5MLB#sOcWD^j zFc!K8$4eZVAv8mioHv8Llf@_ZeKW%EjvVF`??lrftKV@i%qu&Wco06HC^0Fv3h_7h zF75Tc_Hl>UD&yB#mxcweaQj!s__n`9prFznR*+`p1#<-qNm)laU(X9_JgN@0!P5m6 zs#y43pU;mGE?CE!D1C%Ow2lu##0(Xw9~}=ssNMk5`weMm0KqiTOOqrbRLuC)+kmK> zw=c62$FGL>&>YD!O2JKJRVzEYvbrWbOG6Lpv^mcR8jy}Jt5H^_9jw*tJ1;bBXjw3 zOjT>~DjHSmHo=vyLo?x472PtGTnaRKFeE-RYZVw3Fb?7aIMnh{0STJg5PbZNH;kH5 znUbHU%RFFO^YFPB%Z3N@7LmOnWC^eTdQpR#M`E>xkI*&G24lsCpi@uGrNtt_Wdu;N zpN?&~%@MLM%S&9ssL+}kJuN{^1CF#%5MuX3$s7koYvD77Hy^j#chPq}S|6zj2lU6A(ZC(_R}I>=-J6g;SjUR4dc zcI;H;@-q=8!y_|MxPk}9Vd5H!>IfZ00M%;B;n~LwKl|_3xVW)oreKJIl)xQ*ZA9VT z^H=)Xa^lVtw7uT_LVqd{!3ay8o9%kUuRdsp)bthAiBa>gA;xi_yG}FjM2Uc?&crHc z7L3syR*aL4Av@8?;2cU&l~^5Qc2`6wm(3t+TGbTR4TlPVp^K~V{U16U+2i&p-6Az@ z-&^$;cAY-8IKzIOiD$aCzALS&Z_W3=6I*zaKAjD$AN&&Vgtty@AAj4k4|wAAY>#;+ z_%9*O?c#q+v%UKf^Z+=LpMt?8k^Eaw=MZRGzi%H9I1t*&|W z#+-J!)PR!VVqmqG~;yiiLA%o0md%%#l7hB5gs7i=nuzLLIW*-01KrAv7`Pg$I{N#v=X z?)gdeITy*~pvSS5`Fv*L!kIWw9f%W|y<_&#v!k2H#^%Kl-zDnL-|zGBIsHE06HXgn z;d=V6L?^p9+`c&W_&ncIy|}*T#+XOKz`iQ+B$6qPfu%=z!v^9_;#ois{{2C2@{=3> zlQj)i_4J0$kgca7O3|ZY0lY@5wzA`y^rL0!dNB;=&Fl9=*Uuxn|d4W{=px#L2H`75cQ*Ln$K8NAoc}y zdU){}AST8gQ5F?gHB9!fHk7DRY#;GyS|fyJubPoub9gk@es+7sEd)&xEsa@n1hp0v zN9wd+ZCOh>J^L88<|e8N)7TzGUrjfnyF;#N93Egb%C9@$nogUWPkXinB=Q3Lv#Th& zH==z{x`r}Sg`W)-rnersHw4UgKcw^SCq5UkhMKsNYWdV9={ZKxJJE5UxU9w>SC{O* z3MmKnV811{C{&Aj**7W^$5~I)|`^uxT?@1_4-S#TpHZ0DM^7h@aU13JYmMPrFC?Z=wGES!E zb5m6))VX!ejt4M{`zU8(uf<@$BmQ(CY4)nrOXD@Voc!+T2C^j!cQ3y=;2O1GfJHJ3 z6yhC-35mh)y(I{QOc3=LLM9`ePEAJP#x|AYXp~dihtX)19bBfIk{FPF%pSINrSIk~ z{QP*Gj35!~_vGt@JU3;h&L| zI=Uk7A(f-{4YuOfD~KcI6uh-Z5`ALEk0gY8VdLJ1fpR|dm*z2P@u%vX0cZ~W&y+~zF$t*J5|XR@1MlI`Cyth+s0K}KT{8?7?dCw zSlH$FvR6r%rTPC9Cy2PK_kC519d&f?6g?@VR=LJ)2b4`(yX2b5G4^XwU8K^F(viv& zRI+#0GN@0|d6D-~!QT0`!A0U%if7jHrZ*Qe0df&P7k*ulo-eyemTQ~*Z%PvUA$C0B z8qz72{{pTW5D6C3eUV|hwr`oR|Jvlrk_MiD&fEa;Ht%EMaC4| z8q}YJU?WdJPk1t#`gck1H+T186z0U~EZhB67Zn-H&oE%U1GUsyjsZ#t#fQxC)JFWd zbS83%f3`6{6ZV^(f8K&(5~Sc|&9|4ZIS8h7Z7E7z>A8h(^9`m25CLvf%z{r&;=|0H z)LbV}>aBk_eoJ)G8MdSWH}il8rQwQS7N~s!Ie}C$Y0;?^k)yiIH=c8Lx4d1H8-xv7(>~)UfwtU>bUEf3tT&Vgzx5NjkF8Lj_;xp)M7dm4}7ddacQ#8j?F%W1GoaNnAP}CTAs5n*^5% z_X_3r55NDI;3uh+F7CS343B%CbD!@*`S$G@6UWw=X`u%u@iShJTboVkxrS%9iERV; z!X&4{kpt_L{Gi27X0>CN{Z;*O`_TK0CA&@WIQ7N&4B>ajQt3Bc=}nRUliMSVJ!;VS zqdlC6B=@(Z>SfDGfHH<`Wsm9yw%!z<^FtHbZ8zmrf&}LeP5}wj!cTB^NrrRo*ySK{XtJC5_0G!sgE^Vzt%T+SLR?HEMyHf6x)VPkWeRZ;|nS%U=dP zPxjMPnk4-_^P}u!y}M%KH7*m1Es|}L=vvsMsjlEhf~q9@(+?;W3a_d6T7%}hHK`3G zmtW#XfzXc{I%0-Dx_x3yK*hooEthaIigj6w5~7u-yNNJacO&XV>~ch zE|J5)a?Id&%%FFYp)y^;xLFIIQ`IYzEhn9ud(&aWkcRt?tR-5H4o$bhf>P0FjZgw- zVCj2?Sb541##H{9_dsR^BH*FR0o;ks6&e@9C)Un@3+3=|ubIym^k8KrrQG0@kD6 zBeH~VS>WKh;JSu%Dp~We&q~uPT+7ZQbNQ!da|Kp+>v#sw;*Yp)Q=8PY)_UH5VH7ZR zwZECROi0rsa+v+B7M`1;`~JA)=oOL&quB8?@N{VwuR@xY@1f&5Zfv-70^zx?&DQp8 zJZ&3<0I9VfdQJAJ#euGxR*H^qz8t0D)86^g;V<$M>kcPUSFZ-`7nl*pc0t=5?lz0; z)oMDs`yNfI{7%{MNmha<$(0=+yN_cHYoKFZVcn8K%+As@vjM)ziL$gzBzBZ#B)hgv z7y5)V8k?8H3j z6-0K0W1N5(&@9_oc`0qN?!?p>UdxB^rh%?>yF%^rU^kq zt8wpj(Y$k7#P?KKPHyA&DjugO1mL^R_F>-&F3c7U z7#UG^^rr^SfD&b-wRF6+;`86}ja)V&l58+^LbVNzi+FAtG=*Dq#M`I_s`-$7voG<# z=x%>zXq3slQFnMV|9Pxsr)z>v;pO$5r1J?@-3{3aB=Sp7VwWlH(a9u1L60pW4lDIV z;+%6%VNR3%>l)HT?MiX_$!@rp(aKyh< z&RR2W7jVxvArq_m?HM17@5T9ADDcG*P+7Ly<9HzOd8VC)Z6@3|cym1#t+9JsVU5j< zb)asRCxrquqd145;B}Swn#jji#ytFxGLz%8=bt%SJR5C4kru9fcu>H{w0{$)W zR{U~GQP6jI=beiy1v;Ght0DaO#D|++AegysTuyW9BC6JvP^~0oO~hoGb&H~Sw3z4E zXen_@FfP8vVC%Y;a&n7es=8FOn+s*Hx&f9Mh3+#EY&fXyJ>6;&meEa;5GA^T;1i8tYE#3&F{bOaslWK@RTM zBZ6n+M>iw2^C?ZGFDJ*-s0WAf645 z6OV*KkIE~*(GY#ki&(} zv0-;4>a48$@BzZOwRAPuUM9LAO?P?$o?_l)HO z^?6X4Vfxu3iIbMY@Oqmmx?%n$pzj2v@lwgkAK?mpppbDd%N~5@AexlzS204?SQr>5 z)7B*AsC(UE3ttMmUs_=LVfszAw9I7~jG(2?yAa76^TYJSE}`(5uLb#zGCdVe^2U}v ziyZC}Eecbie}(Pk|1OIiRZbXicMHF*Kz_+Zab?tb3LUD?{R;YD{2DCn znH4XM*d^qgYzRl%xPm%5M5QP*SFS#S6^L2I`OS)xdQ8~*}&L<(1twbzDAq;Y)6~3 zfs~)y6oEDn^Y1$Bt(C=YB%+|$(v*}*!60MTX>%J4z<^;DD#+q!z)<1(O+5y!VaT1U zVjM1<_HhhU@#rFff~bL;6SOQ?t3ux+9H_FrS-|(aL_g({4&0F?vU7*1C3(iWOYX9| z`H8zZKQkt}ton`5pr|$_lIbcB8tk>5<129ljOgHbxz?FE5(pg>*Qh3KvG>AGei@mj zBT5q^*}N19&^0iH9+XFKu~~TSgbZN>l{3Rmy&zhc|12iOj!BMd3Q%kGY~o+rsjb*I zJg+5GRvS&8ih4G73jrb^G!7M+=YtGfV1s0uuC1%7u<6 z+*t-Cmd-gf-nkB*qVylPQy8x$KPWMNnk@|xwD4Hm8iJ6Su0eGT9e7Xv;weW7vZb(2 z=4*fM^{%!rY(ro11o`8Wr*;$YD{-=|U?dq0gX2sK^ccs!Z@C#B2 zOSl|x%UNpBAdG_-w6{51T?D2dZsB3cE%1nQ_yNfM zv7i$Nwqg+&wa*!ViCy9b6<`iyS~>v#P>6!lQ(WascSv-eV#F17!(2qtE^9CeQD72( zyZB)I$e^$aTu+HM-H+5rIm4>P#~BJHqZRFsIB=$o(ScbA=+w)2+I|lnODsN{^5x(Du5 zFA&1{D=KK=)ST3Qg1?b@FR8=;9SZxI=Akz2+2i%^$aS>46bG@MpdA{Q`4t3F@4K2U zrYiLa;yk5*PPu%a7djKfb(@e$QU79pB^JkK}sQihDuuhpF3Gj_O|!y`b75 zj5SStq3)k$UHza`aab1I2y}hf&120w{Q>$zokNHJ z6%JkrUD<)GSPH-Ke;D}OE_}oS*`+$Kg3o~8gV#1}*vaJ5?STpUrFmOD1su4QnL(>g zNmY>Iipa%1syyei_*dFX2ZL688R2pUhyw0(eht#YMBR}xY{kC-9_0}Az457pN1$7& z66sg=l0T_^!hb&))fEl#?>%G)`{$g$ZVQk;^^tl@+<)(>WfHPDhPbB&!ma|yoDHoP z`;RYMvja*9jGWs%xEA0GIn*iLF+rW>u*|gyPIv16D*d2+dSXWBTMwXkJU_2ib$?DD zW4{8Y@pv4_%5g^H{bu7usR}UE-2;D(>M`{W% zIn5u&6HzSQ4Jvdb` zAu0GE&w|-fV)11Nm3W)Mn_n3UO|$YBp7W}soDT*^VaxYoiFkUuckxSwkeRT^9>R*G z9Fl+OE-@XO0JjSaIhnMRTaNz%xehX9w4+_7fhImpjmeM9QSudo@^e>rq`WFY8nfGw zgIYKnNw;5xNGa2eW!it%=^lFUaHvq!J%#v;+z{goY!A!n*ciBgADM2K7qDz`l=k$g zwgO=ps;wsAfgs5aNv0@sT15rS9n3rB#78gK&`5d@yIjiE!Uml?CNcY_?U`E1*+C)1 z5y4Rz;0*MsGv~S3xB4vAwsFsQL!r_hJ8@1Iq@?1l>ZIbPHJO5d=`CP^JQX<2yaxRW zpt^44P)E#66grB?kKGTFuXy4K36~-7+8zM|u4}&3mL$>VHu?DYr7Pb!<~#*kGSb0H zG_?SEj*Hg$%q-MaG;UQVa#s(!#)f0~bhWnXyTUXIP;Mb6`tuVzjcDfO z))cm(ox${9mj;B^6n6(y;wrp~^j%!ztz3mw+hru;%i=8KN9IPM@Y&!3q=;@Yr{V#D z7f?#Z0YIaB@PBWFXIaB%qIkxt8l#mb5{FMfhMc1C$1htN6lH1&O&~c4NwbG_eutQ_ z_;($uBJZ;n>gj+V$q7r9wwb$NO*~q;{7aTuw>9GZmi)C5TZEyk_<6Shz?b7%xTSIcBvVa)uQbWRR8%K zF$ufxjo&T+H!(#5Nl`D}(gP+gu}!SFr@zx=D9#tvKdG!9=WYE0I%5uaWz^C!(bnQ? zF3vjCB0YGo9|Ip{ejy3BI-?yFrCed+oW}yIxlkqQFEC{lN?r@N-5j3La%n zD)iT2JD+>Ud`@|Lfp0b$MlJ`s`0EV5m1}mhR#mrH7H03Kdo^SIDp@cf5_Iw4lqC2g z(x9gQrlz*0I}#9&0{FCDyM1zf(4SuizSFftGrtmuGhv*K7+XK?INQ@{sMyD*#t%fL zj;S&aCWgId7f81uYpvZ3^br_seDRWL?}v5d&0d`T#fLER5lNC?f(QVqb2cdTJHoT!`wDGa#1A z!V8}1z%D=zuq+Dmlz|na0_*L!*0?iF3#qp{)b37Ayn6yR%t5|p#T1Xk4)!VyJZ)CO z#c_I<#T;`~$4#15JD8sXi@7l}2_0B=pvkjERVu?B#p)bh1Uy=Ha z6u|=!ek14~!~7O4FHSxwBx?44;*T9l*N;%u!UEPO2z?g+4!QWOX`KbW8)}gWb`idh zqs>;an3XWDnU^5Ysi$8tq*^82>ci8Y^^wgxkUf8mwTy89eN#`VCNQ$lv2oxwGKbp{ zZYWE4MYr?GICn~MoQqZ9k8}QR?x(Qnl&r?~0h|`dGK^RO}~KgRNt;)ial}M342L*LuhVN$*>YWGk#>Wdl3xsycnxJ41ax? z?R_a|(}rgq_4gM)n}|Cz*p%ye_I>Hl*n11LTShg>T?sYR_}4tg5)isPYCc*XHRGOm zEH&9VcDo%g{^c@yod5Tq@Hz8KS*zjWAF6HR?{3oTSH^pyKHL(j8PB8M5?=DpY73n5 ztWOhE*)J;|9CfITR@2!;=0iO)^Y4Q5_1s?3wqvPSt84Z2cj3XtYnx;woR=g6MEZWh zG1dw6qD~%P!^l@~RE@-|WsZ@T)-+ea+5R_f-|fwOl0KL`y%-2LQqSe7$4mVhsh@A} zmYJWd8k*$tCsN8Rb=F*|3+9FOu7SXc>Eg2sVOS19a=sq<04rNaN`C-WL9))u5vu>s zj&GpFj)9^8GGVo>u^twu&5{+iUOVeyy}f5wh@_;R z2)sDzJFYqD%?JosXzUoyPYXiLOGFNdFLRWW zS?1&CM|yBJczzdnL*|&3FYkz`lF4ngq}^?T*Rx&~+)I(p4-2GTOfMgYu!jFZ>#6f@ zAgpEQsc!$dIV>X`Lnd-~JMHABs&j%CFW5n1TiLEBm6zO&o3tub9t`GNZw*7G#CH-Y zUug}alvZmCqa*;Df8VNxpimy@*x?AxM?5s%Ktna{mX3Gc4;JrvAGHagdUo@}yDp$R z=cWK&w&u?@XtoplvtX7u8C)l$#dE442Q=NXK+`%do(e{2E-1|Un4JaBc!lMgoyA7{ zw+RPcv8DOrC;M{_NYM^R@mvp$JRN{O9{o>q1pF&?c(#qXDR0&hjQ4PU$HV#mHo*D7 zy9KTtAOCkp+2-8V76Q;8x;jzX8U`Uw{eUOT@X*iz4Q9Y!1;7q)oazm;*{0nXbAV>; z;nl+hF@@4Wx@H{`=oCXbV)#InKY(*?)LA_ z??~VKz0D?VdV5{wZmK@|&G9>T-Bv&E$Tt?)lw2wIQfs~YopFh7;yO4pZ8T{fW^`cp z?M04`zw+n8;Ca!=^70=|Pm4a=xo><<3{EHn8A&Q>I`4`-`NE_WEjPs~uPdK(30Apt zIx0>(t~wC0%L`RTEvbGR_OyiJs?YwE`}+FY8)BCK!;r9GMfKH_Rm9Ogcd@|wq;rw7 z@|&vhPg_*sHH@FFOy9g=x{RZsh~F>z{#gH8gh}*e3Zum(RJNhk>SJVDnOWuSEhzSM z9jDCgP==|0kdY#o?F)k_A>=LoYHAvM2#u{z1CZ_5f?!+Xw;k^3{PBEt9%78 zmVX<-VjkXw5k#gx=zgC3zhm`3114l7%Jd`yD;Ds{TkBcS7O1JT^Zg3#y!hK@lYoZ! zIz%{uG+}~;+U06Fuao6qGMxF_Sxo<2FFND04#MMZcN>O#aL2!pi8MHxSNGsm8S%#jTt{!JOb-IvGPI~(}WbAlEvNw7aR~hCT`J600Qp|ku z94=)b`kC@sdEUth@W*iR^`0nh2}Qz?;$!IXp_?)58hC3}ULWt!67Nt|0PnKmRhvhB z#=Z=}lq_kuTh!8~cW_O~VUT3nN3AKMQ~G|?FI?k3-?vr^p|z2A#ijf8TH>t8d0~z} zaR)0%jGN4e1-K1~dQSbl#2m9R`;1w6hs}$7xbSZbI_tEpRXVq48OADCUfPzBTv~bI zS>6T)-o@UWQ*<|aPpj$lx5d|x?VHwaa|zE(UN6|R$VPjU=7zsd`7WYHdL$6t!px21 zAz_>c8J?4@iC-LnIu+mZAle8^ znvJK#FTI-d_NwIDcarBx+jhe39DFJX?@;|$EP^z$g!TJIML1w&_f^GcAtRjqvEK=B zx&2;olk3@Ddp|uh{}x=C@rJfxsYb3(o@gogh`z)ymu#2rg=TZk6zv&NCYuVg)CjAHcsD#ya6M>*SQiP9MrJm}u9 zBvndt;^(={HYj_z28HANYY7IXvRV%KUwL`fW@p^+dMz#2I6OWy_GUihY-oL#PWWXs zx^GL~#mAd~pvy`dEYHr-oMWu>D0uV4-^!0xZ?S*g>Yaheqf061k^jXyc~@2=$GEri zpCoOi)bMWCNtIPqapkFE4&$oXcOxcJopb5g-sML~(zYkw;k0c?*1p!?^~$N)4<(={ zEq`9#?Ky5>@9U-XZhp<(YW}`2C7DOw=UnglPGSE2c2qse6#M+geQ1TPYr3D>-tBI{ zx?XL3@@`i&Ewe%y?eXK=-RrE8Vfg*+!nhy&p7j5XUhH_CZ(AOLg!F6z7wIv;r?qr- zd2ep-V(rfDW^U=|!OgSRyj!`Zl)zkjm8S8yk9Tf}Kf`J@pm%?#;#aZOLbH~3idKqX zNyb8kr3llI=++M3)sG|T+7wQC2fc|X}G!j(tg&R+x3Zb!1Y?$McBfW+Qs0B z$Rm>BQK2hGCEwfIsz@nM!Ko>yxnruHYNP$Ly3~BhG;b}Ku;1J%UcaZ{Qnd?9YQ9Qf z8Kn38Y0L#t_|{tVs7WuG(Qtq!efdEh&{YJIe-L;L3BaNaLY+@k@ML!&OSU)y>J zB^AW07fRL$(b=DJ2}8=Zf){VfhI7|#sNv;KbBiB~G&Gw`I6Ec|E*-0*&+F4R1N9Y) z#K+c20_$eENiHC2R4(lkH!h?B^j=b?rJL9C5hS~2aXAxQMjXPU?2b{khk=H-b=qfB zfisvz3jw0ob@lQ4pxeG~ukA9s+(;Jq7yPGB0yVWU!o@X7M zEqNXDe~|K%0si6W0oU~6W3e??+7Deqs^UYVY`2lnYvdj)i3`fHCB6&yb5T7Go9NR$ zpg3WAiLS?(&ba{ROUjljFzv9?T%bR6mK*-HZI8`hRB`Kq=4yg@wsUvp%!Q~se5rRB zKUTzvPg2eU5L0{v(?4 zE_2?hF>KQ(wIr_(^}HFRXT^)`C*enW?z-!(4@>M4yKf#Q5pRs}&9^tlYpXiKDXlsJ z1bb3v74O$)=cR40Ixat(?_u;jYX1r8fxi>ZbV<1oG(u(P+CDFJG?4OP93JAlS_O>r zV0nWKT|x(yfc}=o(J;*1{A2&Ivnnl6@9Cv}`w}E`gAAVr8oV;)uQ$JPEf#%!e{)g> zl-fmYU>Rd|GiqRK*9Aye52y=h8fFS84lN<=96p zrdvb(Y2UV%MvuT}E%&td@Phm6`)kv?I*;4B{qxJfyQ#p-7Lua-lQfT8ub|z8#nHw; z_F_<@rkrV$?ydLKbBOl~bkbrorz|JU&R8u|18a>|4$w`*w;;>@llmdE{{B%a$};rK z1K*U>o>n}ng^5R`hObi(eHSXmJbMv`V%wC{W2ch3!Z1(e4d3|KtWp)I_0=?@1C)1= zayD8&;13sRhf@!$O<}^kjG~Jhyn{PbDh_>B&byXqC(SE|%$yEeoqOnUzjECzo_|~_ z0HYPF95!!VAA4>~Lw&AK!c7#O;?7? zetS)afg_-w;Du0)mf5iD%PX$tFSlunu_bZTByYW@T^{wu6C8mf@$y8b2Qz=eA!<$S zDj0Y6ubjr&E*&6@HU?+x9MoT#FB!u_SVWwCb>Z%}87T3%A`z9SCZr-CZ4o+q%v)Mh@!G z?oMp&XA4g0WbN@>tw=0g$jVU~9*lb!A?V-uzCUJwFGBB$5}&v2JcchKP`@C*vAg zxgcfQaH^w)ac@7XcJ1zU<2sp`4y5fM7=E7H(5ko>KKr3GOZqbeU#;K<(w>l&DIO%rLS{xrk^Oe9V zJCA@<+;0A05;3@CrNKL>)+H_9l2Xx_<{bfu8#b?SVN2K0KP)CE*yy%v?_}k@g!p1k`_b#GP#+%HM$4Q(Q(6Gub zrVWVqEg7Z?;$RrQbQ2_{_LH4LliEJI{NQ8peUqvAb+3Zn=E&ew-+1w1@1cdQj7eYe>JW?t=!Bjs?}uPZG ze|r0L&rR|7$m6f?r^jA$mmjPz-4s}AY)zR4$USgwPm6T3qysaL@vpYU0{rftRA7wa zIG>$I+bLj1y^M;)X~e5=vo%F~R~@SpA5r*~3@4_4)AQzWe2CNMfevFr&Oe!Q0Mb^%z~c2xwDgZ^+j*kS6-UsyMjL50;3|0cE=qk6hlFF3 z$U4NiuMHMHeNltxknq&rQHIw-o#`!Utg0~_B5O7x049}(rQr#cOq z<1=(-GcRCt)XqLWc*#xDzl`gTQybQOA;c42=7;~ka%xS_-^7>(Wt)KU3rd-&G-xdicFrfuBK&%%qq zKUW=pv_$62)$AbAHnepY+XRkh{c>&bui!amQVH=e?6EHgG_mDV|Gj~R5bVh!>r8|CWk><(}0I+1J|#Z zO||Y*A%JPi+uU!r$i$nbL{UIDt4`8}AJ5YX=ObttF@^Sm66+a&I!zx$^ujv7-7vH+ zEl|Nh1-1K^(*dam1;v8_#QV%ugmday{nXSH^~G z%W526g+w>v?s(HtrxrtLpe{?T9tK@#z-6DoK<0-F-X8)iY)v?vs<|T;%~zqm1iHyj zAq67y2^_sMwp&{JAa^%G?p*H9b${A}O18pE*CI9Jl8f5N3DwqY>{<^caKxXLIGq%h z^0PYAL!gpwN3Rogk9lSIXNmmU#mr`fi~w#K5aDx4W4cb=@{ z-iAp;@+R06m{KBkW%wW6y_M|sR2LV%QUemgP&GtE?!C<5Z$EJ4-8K<3(ac;rGsit9gKN2J z>RQ7qn`;kd%&aw?6T*c+`6czy^ZLOw&n9C4G!WMlSDU4CF(Iw?8EzNJdi$j|K1B4W zOR9S`{je_1ldKb@YRd43$NU(m^`7i^LA9T0LTVR65<@?>&Ul`vd6(E5ForIzzb-wq z$0B=}n0UQSYtkv~#4UWqjuG>SCOUgY%+EW*MpFs&@#BESYnzv@?8)++*Le}zs<(Fm zE_3-OX0!Qa!@P4Mr3W8}nk@%y!*s*7Kfy?g?4bTu#Ez`RZ@z6YKf4Mq{IK=QUPa!@ z3q`96Rf0}D*^KT#~oE9aDVMd|e5+ zRLWUdDw-;JqQ>d^B#TUj+g@$Bjd9f9=8WFxidAf9D4#7b^=unLwYkzb*Kxr>7EhYQ zuIzieu2g**@V%#~ysPlswhD%9^(%%Tlhw!yAX_Myc&c_UJX|DM?Q!o&O9oA2RITZ^ zP^y92xD7PH;=d@`UsY5G|92^NG;(THq#}X0Wxn>=}>B+QbpJ@cin=h+><%bWZv_{1&`WXQH zOY?}gqm~KuSc(LSas5$9f|9RJ*~@-5IRHmSI~)b14Zi5IY_C*(D>NBa*}(eCNL^x(lqM4j8|4f6sGEkf`8-|f8A>D@vPlDL@1g3 zdS&uuSGRGIL2~wO|1n}@E|r#(8>kPM>~EqR?|%DA*dR&QnM>RUe3&L*v#Hqc0N`lt zubZ51M+|F*4Vf%6Y4&F}_tn`mY2KWPW-F#Ziwps<>``SRpd@6;Sv`<9nCH4l#zf8i z0Y?i;yIUanmLVazH+ciu|7HDG&dYeMJNAX`Fq+tjKN6DZ_*iTSX0uUKP?}jW z5-68tm&J}e7i%b@5pQ$mvv8RW&E&ssN~Hn+yrr)lCPvdxbh(m{q?p}2-O`j@QrkO$ zAx1PxHnq=uPGtZbt&|1;QC%$k2S=-5k)=Co`9@4S=J|Eg8xzm!lH}ZQ&FW#p#ovTN zS_PQCM3I-8Hi2Xo*G*lqQ5{lRM{Aja`nh9D+cNksLyUX zbD*_rKHr4^msa^*#0O3WG*FS;4+|mq?CeiZ8283Mfa$Cu25KlnP(u}g; z*>Miwxt55%cz$NkBm$o1yiyU1wiN*=MMqKvXO4I_S(aecyuWx>8lgu3pkt z4-b>TwG`rW`~zyev$POAGXPwOafWFC*70<16E*sr7yWuuHinxw@d6}6@E775yMH3J zx-#(Z8M_U=dmj2=EChR1ga0C?B-+^};Tn;eO;&&D1B`b6L`eARRVv#|hz&3@541J2 zGXRe}_CSNRxHf7#8XW~pqDT3|WQCc+3i;Rl_xuB{CGdzB7XnCaN+gFOarc#9~5^~kS{gs|8(2vl~%X!g51s1mIKz5;mm%F_&xo=t_9NPd+N?#KA4}^1DLq->@Pz$9a z8kL6The_H%lrAt+yY^Kxg-U7o2j2voBc4>lt0`aEJ(SJZBg*4%TR%1^EgC9rJcfLa zW6v{-yEaIq$wcQMqTsXrjJxF5KOQheZKpiFH=F^c7aKGC)Z0kpDZbSMri>@L{qr6- z8#9<^BsMFVdme)WFhQ5}dd!gBmxo-;((TIlnEs|8|NI#@{qP5h)4RI4N9F=Q`=X@I zr^I~w=kvTSy(ruAr}c1xJx+B7QMM6=egeg>J-C1P=DSRl?Pp})IC0>#o1JxTC?*gf z&Uok(5s5J;nh9d@v$o3k?n2V96MjSuV1Ol3GRbWtxo$dQ2Z}9cz`viU4G)fWq#`<7 zG9{u1#XIBh#;P@Y77ZZy0qW5jZX(AKR^ne-?2r#+A>SE zZ^p8}1P&o=tiX(H!ikG~MadZeT1XU@ma-lD<5m^SU*t;^JNV?>Cej!7yxS()x2miD zOA}ijJB*9u$1;3_Ep>v~H7Bw%&mgniLa>~~hab*-$Eq<`S?WNjGY=)v?%Ky?8k~H5 zk^BQ)y-yM=$rAjfP(SkPm)a|(Q~%3epOYv4cM-|zj^&!}{=`pJOUsn6Kr>Jyb5nGG z{wb4^GSt(~{r)SlCQG(~8BvtNNtsh$9gc_}(EP3tz1}R@&kSF8F2ZT0te&T~FvnN# zecR6J5bzn^!OJtQ$a(q2IHkKELGaWg*n$fys^Je5gS*?)oA)n(7I^FR0Qv)!_S>4q z&rqc1ddg<9owafsQhnNRBeUso9;fe>pTJcK{+^kM*To!YXKd328FK}NZl2`CsOv>x zpGwz``*dSxqfx9IhJ@kS9N*sPbDcHOMus&n&{q;m<#ziM_O$8RP=x+jubZBi;SOkQ z$u^}v4;?$AiLH~y%xcf=%ESjdK(-35*C@9$_sxiC4M9EM?k*J<>G};>#B@48W{9My zyC~ZYNJFEtCb3{&oy;{iW%7w&!=TmOMstrrf;qEC;qznyPRKTCFwT0*lj`*|Y<460 zwJ3vx8QmB8n+tz>%W=P@c~W(n0vy`?JC*7PCD^bGADNb{Dn`7b8>D5)rQA*~(C?IH zt{);fpzMG~Ejqn%m;K61Fg0xLrSQ0FntYrXq8JWI|5gz)CJcFmt{Ec>C(D zFiy3fGUj_!n*Bd4hjt-~kaTUJfE~q*hb}vmK)D&C&#(^(cb#*t_B^{OPoVtGiRHMV z1!Bo^zD3kTFdx9sax%Qd12DAK4;b21Bi(;6w7m})+F!xM{r|<#zIwpWmi&vMr6eM+ z?Z#=95p@!^(ISn1jSqSD(_o_5=>bDK^A|%q^FJ8ciQM9UFtmOur9zoBwq<+-;>zWT z$ zB%t=2n((I6CsENae%1cVq5Z7%z@bI-!8!^P9yqkD0Eag9FNZcwyFC6~@HN@Y&Yb*0 zVfc>xj*}%%t2J)$NOa<_*dj?UEBTG>(bvVGTI(SgV`N4G2bv5ZV-Pr!Tl~m5tL!fN z`A^+)dQG`P3ccx1TOP6u+|M-}BqX0Qe3tvIBtX+kA&C6*Z(;sVTfWFUJWf zFy%OHOPtww{z&H(tJq<(N2DrK_5nkSn*!1(2a4i(=;BR93#W7Nuz1SX5-?%~c=e8=+Zy(dbGqb~bd&4qE>>tb{#TLf7#Pkx=un{#$9tylMw-RT3mi7T~k zr^$F7Rw8}MhkrilnS4lFZT%8#&BBWw{vGoR-r9@!(2rP>R$ARb3IRbUPI*jbQLzD0 z*4v{Ze@V19yn5gF(O!H?Yi6Wrj+o#3GRU-r+)&_%JokmM@OmCbsOE=0+KHegeT;%u z1V)YY>C<+sBv#ewe-g@(HYkg9{XLux?a)Z)w=%e#SanjHJtv$8?KN9%Wt#n$0>!H3 z^aG$0cL@6C?-53|mq6_hs&vM&hF3xRzmw6hQJ$bNVJUrLe!`>@pSa?0oM)gV9cqye zkZ6wq60IOWqTLwMvr5( zrtEmf^z-X+&*w^dQNm`a^exX3ulNx1574x{@E5*?wge!<;no2v+s}jN1EaEqlH=9G z;ziu%BsJlwC%MjfsL2X)3T70<;6nNO=yWG%h59<>ZV5?)Xt#etu6Q?%L^=<|VPm!; z3K2_pHZJ)xRFld`NZ!}NO{vl1K;6;kPa+$y6lfi}?&)=$nh(2Jv)7yPmI+zoVv zGpybbEF>}_l!@wWwpvrb7aXZOxi_oU#331%>YAslS*0&1X)s=OVudio9d#9cV9_!> zuxJw~TgM(`eu=`*l`zZ`SLU^~j%)6nWL?=Bt#5MCW!v#U*pbnhx&{>bf=g7ri=1Mg z0Mn7(;|z%2R!ThAzo1EeR{{yIRurd18Dj11lJH15z7`mt`hr)%L)N|W&=qg(` zQ50insjh3^?G8#WxqQYxt<1*X@B0rwhrtr!) zuexg|IK1oYD&OeT27Rz90alRJVyTyTvFTIIcJJ2 z<8AS6R?JfPmXks6uS~@1)~LheW$7Mrb93=Y#w@BB?(`>Z$!!2O6x#FRdg15iWZM6S zvAd3{s_Pqt4Jw@?-AH$LcXw{OyIWG}?ruR!x*MdVyGy!L*dV2T3%tGG=Xt+##yJ1T zKp1YTLQ4(Afj^S3X$29M$0EUTe~gFB@=!} zVq4r9K7K%jmn_Dpu!)fiWiw@@GA$>+Y3GSq&e)ZjlLeLN3O5s3AUOxBCVY%TgZ{Wd zw}>kG1ChfAl*-g`31<2{9&o);TvZgRj2NMsK~$pGfUmIF1GmC)2uW$Hb7=sdDJ`uY zwt|UvDHmB&4YJ@M?kcH$J#=&b*G!STaP7f%{CYp0VC*i?~Cv1={R z$u29(_N+TCk*zYC>{+7)2Kz~)jXIXue`9e*c63dpN)H&cuSFd9#DWQdKb-vP0gaXg z&}hB>)o4|oG}^Sey$p==gZ4AM&tv4z8ZCIM{*Oj$nI-o}qjk>?NvS0@=+}<~JX{mR zUR31Aq?2?aU^lFng2gwZ(f_T{mQLDe$`khBOuzZkXelAmeo?&@EIzi8B~oe)vyUDD zn+&m%e2lo&=%wz*>!qzPX~W3SsY4)7AOCS9t9;7*ZSOep5MBj2%gIe#J4x8D> zOvGol`ys>NP-CIhrE#R}>3z8y3TH6t!XDayaMrP|9vr`%Fn%BEO9^=npB8Pa3?^mI zZP$jyUPh(p+((9}jK{D?vTC}4n0$6F)GyC&hEp)%Du)m3X z>m3l7_<2hU9mB2=Z<|IjHnE0EFPo?Gqni->*B2t(?}=Ho70_JGPjXmUv|nh21sMP4 zF+f+dpe1Ca`KV;=i`jZBQaT5ml?6SiwYUri&9y%stqr7iuyFXv9xd$UMt)@dcBTkU zE^;a@)%MHqa++(1ur8XsggeZwD$@hzz1uR(mjXc!pSXr)E04#*xd*+c3vu%lVr9p? zUpT4JM{mVf{CUuETxwA3-Wu5LFvo3U&7<02ZlF^N0$`!c!>+$^}2R9u_ zV?`2S#c6O;xu+0>JO;l2lXxE;-4$rAdz1<_-8&8U`v>Z`>4dM2!iH#Z(u+~eEMsYs za)wH~#BjE}sAxp{CzhC3zr`af(26ETu_S>Ll|uxoo4CSvKD`&!D17+xZHp`+@dbS? z&6=`JKVlc6$9GL23B_!7LBHpX4qZoCwAE?;<6Dtj|F5wm&45>TqVy_>KYP@4lhp7i z;OWN@WB@%r;`Ogh3G>0)+A`DOsi0)Gk=OFsRg@FPpIMFX+z$@zy*{NCd4HVx34Tw^ zpKMMR7?v=F>&~M4S(OwsNm^77krM}Ms_toR=t>zs?njeQT4wJszUM9?=ZG2DXJ?+2gWN(uAu< z^_^zB-*HqQoTxpd$+*MlQC;m!*+Fl@f{mZ^dq?yY*bhU11?BDrk3$Xe7HsUud?z&u z*NXnuOQ}&jtQ?KM-Jrn3R{o`qsDLG zc^!l2LcTp7wHdaypfnx#o3?@Ew{Eu$ZdTgH+?o5AH)gX==G)+YX}VUWFM}661gqx- zeH{vxRJk6)8XtCTcJ8$g4t|Zcc|?Jj5a<``)|PxXcwEi}+JDp)^wI_T>78sDw)ri7 zTsKDIM;^+>W_+8bqo418k4}eLn#V8ix-1w(Tl*Vr94POpAe?g93W_kFa4LHjcGi{e zP<-IMx`(76I1lTav21!``Ct`nK*0J5NKbTKX)X&O@4fEKrZefYpFwITWSKc7UrK60 z@qqMNtTG=hxaMBw?~s6MrPEiUeW5CR@qf)wgsE`+$xzf?+7bxxiPcf{ou_?u*-h)u z8x5e}oEg2m`9h0de2}Qu+>id*97DCMD|e;wkT0$A@YrRPBc{yzqszJ0Rr=DBBwaniANA@Cv=!iTA`(f@8RO&(PVK?IE85X#NZX}^^SDe+8 zWJxfUmDl({bA_#7B$EDY(Mm}!rSGZEm5cFjAI9&TCd)~P18*Ax{GlYKu(SoCQ*I$l z^T+}Hk=dh;pc-0xw6foIK+xQU#+7RvMBGZ9=OgVZ2Xh4I~YGgVsk)$tqY>_{Zgjm3ClUk9sWYJgsq*%onXnHeRI#YS9To1hAYtPs3~ z9Y1LFeu}j;qTr$lHBK3b0;%)|y!$z3LNTHun6Br`n4A4F462S zjmutsOSfCk_1^(D0@SIwpPdVOj=6-3{T$E)s#J{HGJ>k6?S2CkZI&s|cel0bXNopN z)E;peJ@S*B3$CBHZBFB=KyakE6J>fO>XPOcmC3fDL?bK)&SS1GHULH2x#k~^`P%9p zBn53^9X7P_9oGKGj}>_)gef9}4#^QiEJ=fy^SpmHaauFSTND2+u|<3K>+(StkZ1#s z`=&fYZc{o0!>vicx)KIbY?A*Ic~JwmSI0>#ofLw%_!xWQzSfypGhDT^(dn(H?*|u4 z0%OSHO$`jUT5{z%O>&k>7&Tf_r6%0nX;c(Gf7&orwpqOXJFf%vC#N>)6FOQI0?SD1 zt$lFp@o4wx4Cy=SA-c$?cl}#*7fC#(cV2AeufC-RrWZ@*H%ps?J^}BTd&OyOAooi! z7s$Ignv4_(8-wFwGO2Y0!5Z7<+RrJQ{;_)Aif4)T6L;T1|5Wc05DR~nXlab`ilLI7KJEHd_*6c6oiT$k== zVxGY_wcJ@Grb6}kxBH5FQz4IHt`Uq)o&kaS#-Vk+172Oj(XE0yhK?0u66d?N*kAMX z`U}1PkwbsuoObk`HsjReI5gCD9n(bWIfWjVw=jTujF6_mD~vEhRj-xE$`K1jpE{#P zZc;q%=X*dbFJDyvn~H_MjLm5pHVFN0(zy`ffOk60U-y$EYd;>0x=a^fnfy+qHu(ms zYvPDGYqHK9q39QxqW6!r(}yNOWBu8oOHm&p!gEsBjQ8vy(UmlZWE2R*Ds`NlDK;bYc?_des&Nv=O#zeI>Z1CS z|6Kh9D%x_ywA;EaKe9tgqN@u2(c#~nt2!)jHS2k9aB)*rjbbD#U>04Ca+3iH?AZ%h zZu&rJqg1`7vBZ}L-a>>g#tY0}+bKV!nlRfK!d<0HQUk2nj`V_1E{hQOYJO9~>CE7S z`D~&iWa`Flvdqmm;LP{o$-$?%5T&<(L-6E)#9FUW=Cecl>0gJ|Y~ZM_uF3kznzctO z%QWPz{eE&vY*pSJ!kDQN3(R6{^_6)39d5ey{vca=&EyJlO`Fc&8jjLt zipT4(qTA}Ow6lQ4nKFry#xJHfX2TJIg&Q%g;$H)W250K>+Ya%&>>L)|Z2qJS{R z4E-v<$u%;e!b<3bm>Z8u@JXNka^zEDHZX zv^@z$XBNioi#Ff4#J(~c+re@_P=Hu5R1g~3g`L~&P%Pq#FFUc90$wJlrf;8Wa4N3s zo}Ofi2#A_t(!7b~mT+*;Y}Jv_EIX&C!m+OZ4~Z6D!lA;^Yh(m@;@~oPM%H>8SAW6T zUh6CwtvxhN0DO;zmcm~t#?5zehFW`JC4sjydMR$rhYUoZUP7eOi^$Ro@xBxJ9Z`XNg?&>&?KlvkGLKGeq3TG|5I|Z( z0Tiw6Jc@FHE<_%&Xp7os5`E$h%-FaKr-m^sc5ad|9Nts{XBCC&F+5Olk(2_g$=9TS z@2#K_;;Y}(cW*a8O}M~+iliMS-`s9>;WO=*t;?%E`-H}*zxXXR%2S#<^bO-yUbR=G ziUs%SXyR+mJG0EjP{d6s22n0D>2hPL&F%Y)jfMoWMlR!Pw^6ykaWXp6P3U;)Z$u~h ziW=Gjaz^mwy#w>}%u?U{+o2QMiJaryDfp~+iS72M%%3dURcGgCi}qJVkqic4(bk@S zZ0?7M&=CsJcN;%r5v@9yVn-BgC{c<-AJ@D^KRiO!TvZd2b%<0Uk#z=bo zbIufbEK>CMW6JxW`~BQ^;fd&>(iG4*PHFsYB9$M>Q9>VUfvtwG9&1zF{W|KkBM2ql zaa`%CL};hyDTDGI!AgtA0c_pQrfNTM7G&ShNt6_TA@A;{n7-EJw5$AJ2Bkk?v@LfZ ztGQoTWfty*zQBpBT7YrkL>Mh=!!Bd>;9yc@DPU*xwBGFvK^$-7T7igka zQWS`Cm}Clh2)$pH<*I=A4t4EUYfR{QmyC)%%$6)G-+9DwBs~K4*?rAn+qW!3KX$S8 z@HuBYh?PvhiB6*YWc+k`g2c(LJSpRKF49a$f`Ya2GzsS;V<FanTp~+w7c@#`69_$0FHL(nWObojr0pW zG8YM-qv}0x(B$9SJR2S9z1j|A#TR}14S82IkY$ZBmrD1jifWGT*o11xpo))jFBvQ8 zoc*{)tj>Ch>Flv_=O2z1Q6C-PXgw=e(G@{ZjPOoVPq*Zqs9sQgcuiDQ+7z_Rcq4j< z&+h=n@I>FeQh=k~_z#XY6SJ0usH!nYu>t)9xK7+~so-OemjlN9%8F!_>1{lv;cn#! zgPvj~;peyoS?!T76D*MjTovHaC8#~(X{NMXjKxX_#Sh|@JiaT^V>!Lg0#s0 zg0$A=tpQ{bFCY_MVjkhkLLx{P-Vge*29ZfTK4QBV87jAVxsOcF@Ff$^z8v5vZ$@;p z)tMBmqEuF^N)HcFBGSs92vt|QUUuPBCuM6)s2l#m!8ipslsTL^H>rEHgB=i2>a3PT z3bSLQl8sjDZrV{Vvc!;=Qk=AUI8F%AY{e;*rV%Q+#=HNDv~_?;t2(mWUY*`|-+gAM zBK1ZF3cdvPF!ku-odaC2^_*e*nXyALl+?R6cRS>qn1+Zx;c)7fL#;r<1{pcw@5H-z z|B$r#>8;F~07)yZ#%+Xj2#~bcRX^~+H)n7iS_-4P^3D<_Px*BCi{GwDV}JAMFPaS{ z65gH#csmPRFeN?b)9Y3LF7=A9lHOdp#i}CxZWLA6qa=SE9#8?HDb*X=&MA9&U zp;zw_qk_|FZ{9LkBq1}3hCq7n!wS0bjYE+_-VPP^H7d|1nLhm?q1rg#SPABZTWZaz z-tP=+P}WEH7pk{PXcLn4me%*G-ic>%2XL>`Q^L}AMKP#bomKiXi^7XlvsDJ9O1!zq z?8t_2((c2wX!J6`GmeSMsL2VBnPAyEx2bJja5*l2yFCEX^5ZecJBqRPO$zzrLd(ZG zW(?J(^+aevP$A*wJ7Kf6#_%8=+Ox0C*Xa%$J22gBa=>e_b?NrFtyp5^W01q~-xvgW z0U7woQX$ruIyzUo^q(dSa#Qt$B1QfgYDQQmLxH=r-TTlEKpMXI0Z7A3BcIBl^h~6G z8lK>L1g&LS+NLyf%C2T=FmvdvX3~GHIFg@1_^gu05kOi(W}I$=bv(ee$R}R6 z{DS(ONy=MQzoRhCEbS|bUbd~+J1R9Q1*O|XzWGuS?BsYUu~T?yGJ6AA#PF5lld-dD|2Wn^}?3G5tp*YN>#T^XzGV!48EQ$m*w3(9GEapyueh!)a6jw*(` z%t{)H(U`v73uBXBqL3v^B}F?fi%a8rO4|3emdcYE9N zb)s@Tw9GEda<5~dUQxxExPtBrnr3_}^nmGc)etFtf;rz^Z+7H)sm#ETxk%eSi;UeQ z?>ghc@!Cv0P3vweni?|@M#)Wya2F~Jng>MM0{y~QgWT({`Mt)t4WMmNz}W9(T&2#} z-V;_7{|JeiUYmM(`9`)Sjfa#}5oK_RcGm3fPc0g@6OXpk;$n>#MvWdsex3K9+@ z*>Ca=j3XoGu?bZa7xaFZI~;w+0-7BJb&Of{h|#-9;ma(@A&!UmpK%zq59M*aLn2a@ z9|;~T{rDhjX=TQ};VSL@W=&vWaBi2J$#GEI6k8S^g9f>f*h0pl0on&We-TTXV;%zwkM&xXA@xI{45Qzk@>yA zq`3+);j1*>mA4IdI9E9BMtyj*I5x)8M^K45?u*aBl^fUgd7M%xMqmy{IliE3ZR76y zfG1YfWBeCin%-aEHnx7P z;eXi%OPxUK=pLx(e#>nlasEoq8v<(qmBmqN8k3JX+adzH>tB+#q0r^W&I=oia`kGB zvcTN)?G05_J4#RLMe$qT{5d{TlUoW&Z1*3Vl}H?FHQ!XRm519|G#1mS+b0VP3U=muJ!H*+6$)GyUrG>dY@HmM`v` z9f4?Nj0aIM^L$zLaMtOH)Y&iW3X!NOGTq?s+nQ`EPa^G7Cq0Gj>-$|=8{yGk1E^zg zO=qv3>zn0O-1$RjHIy$u7(o5@m^v7SI$M;4s{r)ERK%N?up&NkoQH;KC zo^!;W@eGe%_w}QJu|y82G#m`&VDg`^cvvYn34Zp|qXJG`ksQqWn>63%g~E@tJl7w< zzOlWt5zicL_kVD-lMAMH>r?FDwI`0&P=ajw%?g7PHjsghLi!jQ*8J_DVn)-p+xJ?J zX>B!h<=VJ*t(?N?@jK(g>6&?4nlX~OwSxQc+NccDFY6=@W@af&Bv%w^ACGd220iME$75GlfW{#;z(#+$mB#lwvP7dm1JGHm%D=>{e7bEB5z% zz0RZ#PmAcn<4G4{etfY>k|D0uupE*86h^FQ`ZtXDrrp#KUcD}&bbWooZpD>QZUubJ zZH_lE*D=}RQGPdiaa0ZOd~b{~))ZTAj1;VXgtqoc>^G`9>QB*GT@kJirAv^6aRj4m zl&%D8F8lgi3y4U9E44P#umy~@$CcjEXm6(gVWizhCHl>_0tTxOeIUIm9d{eO?=KJT zXgP}*=TDgo90j`f>8n=(l_mqbH+Fs($Tq>Q-bW99&eq5syE?vMwb8DmR%R`#6WssD z;^z$do4?^hNiq)%yN(^NzwyJnyvV*T>@hg&wkk9Jol0P@m0|o*TzhAZzA||;!rTvH_-KBQ_EuhCfE%b+Cpi{Es>%%|2u)VS(mqux1 zrvLMkpna6_l-0T@!v2lBI`SWU(kPgGjQ{-TzE)!nC++Vgom%z2fGD#osc9gg z-Tb)!C1E{3B$&gyKvKn0l_i$!Dcpszd_n`tQJLgKLvK-oy+4Zq!d+UoDXxUlUb}}M zHIqYabIdBn8@0bP%Y-(#3SVv|UxCMK} zF&6L0`n_#D^0#sNwMS{}{j146RoL&X%-wput^ArR9*nUfv2$rk-^jRmS?1EJ{S!w! z@rR?$dFE&fHI4p(QnkUv3{tU^*cj&oG&c z*EyFj-8kxX$gE8aHinudhR;MMnrxjKK?OI&j*fZto^E;pG(;$g*Dsoa{p0SG2RMxp z!inOGT2{E-I5|&6Mg@0`U}tdLCtVRrtRUxlJQL~jpgJ=8iM=;kUjil%1LE3_#2T<| zDTY%7@-+49vDKBOf%Ja~CcOmna7pN;@TD~iejgfyR=XEpm^i}|NQ+VQL%K&=`m0DS zIom+I)6n5)-T4mnxC zy{~jfY9SMtfFCwClf;OuZByN8&y(v>CLalgb!6`gejTdB!ytzwr}{>aEceLV#$c|-xVewI%?Orb$Cu`Bj2D%Z_y7Dw$a33)PP z4B@3yX595BT9)}yFO}~nB$X6jM4ZPi3r^%l8?ca)kybaGn|U7)X=Tt0U6atj z?~^16m{;yp39lRcAP|tkgJ}rYDh^FdBMW6J53_=+R=E_aQS|!hzQVb;6G08h zy$cG8JyyWhG-e4S-)5uuV!Q7Kold7Aslf+-b08O7z2*g*%&v+S8(=s{lHp9TvYYDW zMh=$Z!7)CS|Fue1Fq@~!kjfT_J`AAM1rO-MsnxhIb5SBETx64&1lrXxF*sH0N-}~! zx;NE+mqkaYj05GX{Mu84cEYyi*DJ3pKi(T+Qj2fo>AOz#TT|5Dj+9Q9`;D+89(vJFfqs2nXJre@j0fT_I*RO!`$t8=uuOn4G zZV^zzV`)1k2*c=#vD)k=5_;F$EEPy;nkH$Q_{W@m>Vx&@It{1@w z&Xq|FziHK;FCT{yUvO5f688dwT;*RZVrDOFH#P>x27-L(>$of?-tFgpnA0jMz;A}> z8U2T({dLDLf_Bs3Zw?}rjwK_nLFBImL0)26){|ay2_1gwPEyF{7pBk1v6&Jwn%g#1 zOBuGG?iKnlm|@+b9dYfXQ^8aKkhC#4+1-89>>rUxp@9E0rLLfjAV^<|} zT$RnSUN#z7I4_n5V}74xWE~l%ya%Zys-uVbwHxm19V}ZJ?enjoWl5FqN+}45#19G6 z8!3gW=V@aw78-jlm4$T7FgYmkuXX-o(yslY`NyQyh(?t{jU#@UK-@xDE;r1&WcBsV zJ0n-;tap8eZWp9SRKp@(VLQw^WCgg6G+l})HUU9`^=a$HFyWekgiDVC2{Qmns}7*F zTTdu07Y`G3H?~}*LX_~mqT`3aCzh5{OZ>xuqxoU}1RZ(RoQ9flh7`}zNp@l|q$sV+ zZZcld(#ffWO_C*rirq(zM^x*ZoQh$vr@LV-hJVDNnRggSsVN;%!VT`KAu2lht2zab zv-r;86ceYO#dKJ)E-m;%YG`kOVb2OxuzE?{^$Et5oi0({_Wgj{W1GB%u=@?XByPfJ7V6~6N*$~5gBbgL@ z7`=kI$GR(=cc#q_X*?04wPr`Mi{y~@Yq4WQvTI|&13GT3EENfuBs3zAEHgQba|ktb zjx0wQ*Z`R9w(`>eP=&>iLZWO2d6PU(Fw5-K~AlSdy=m#L zVHL7P^#$EvwkncY;tTUgVySV4@jFHZF4mxq=JG3pILD%e4&|k>G{CDJS^`=*JhLib zgn@R2EvH65teojP<+;BqtyP`#UzIlMNu?e650y4CRvcSRq4RRMNebI&(!6vo!Q2{5 zC4pV9+atpzNweyFKvUs!7PCv@Rd7i(2f}c6T6F48#b;aSH@Fn8lE-6z$O%j>bq69IpNGEm^YBM-oH(vzN4~X zcZrOPGnz2ZOQwbr58=-&_>B&9t^xH~QssMm{Bq2_jV7ioTK$sE2n%ae;DT%u(R6ZZ8?nuz@FAL3LAM zmF5&eWB@7BSqyS0Y|amiS1ncVSsExfv8uii1Gw6$f_Lyul?UJDN6485GzxV z4WCdd?|na-E*UPvqXb;q5`TKUmiBwfilQXDwZn?f)NeW8i?P0bY7jY z9W-eUhMm_Yoyi^erqtLvDKDnBHy|vohH@JNjoFpnRHVK^T2k*lGMuwCInFWYU((m= zf6UzhU|RK6`^0|>##vOBXn%DK8k1hwjyg`);W3d8? z;*%29W(Ycs)n+=vH>--LPcrS%Qlohhe4VW)?tb)Z$*_7V1=)Jd%yGmG+r~EGi!+iw5Nwqe0W>W;dE+xpo7d)?@l4Z( z0W|Gph6*djr%F?eSSTYJDRzGFbYg=shL|wasU={RO({W@O2*>WsD#6-sMmpm%rpxF zF1Sf>vlBiLcrAV;avm3UKktq%Oj$KR=UFj#{yn}*VQw?@v&}6t?Q-a+ssW}X$`dw> z7nT;|ys;WCZ0qL^^6bD!BzQE`9+PV>u{ZRwXQOHS$xHStum>JP6HRelbasA$jxJ4xzn->Jxpn-!XIQ$E#Y-%y}EaF{W} zNNm3vCrGCUQub;ZNgsj@k<}$a4FN%Z&g)BBAh&a+`0vqYo7VK%rd5Gko(62%Mm!U5 z?A@gVWSQ1)6{ZfEt>4Jk?D6?=GxvQhi8Xowo7OsrDH>sR7>*w1RQ;J6k$)+w4K+8YzB3y znM7UI^x9p(J1;Wp_fhmpqizdS$ce*%_Fqe)Y(i_J_`#E} z4O(kVeQYA|r)|UhPoCDCdDOTPXLL(8vp>KckZB>6;USebv=43lW?d?>{5U>&1*$m^ z$k#qSJ$$ib z_4c%AiHe%94fV(z8H}4C)K7@)xmeC3dhP{fAg$yZ$V09YhD8XfW}AOe#S~NsRl&FK z%MV=pFqC1oGzBNl42RS^?N!E-eP4_n=s%F=>-_<0`P#B%z2LdMUVb$)e`@LH*u$(^r^3;VLWv*zYqgHxmTCoxiU!4k_rhto%f6N~zCf}z zIjcC7T7wYUeWMrs)+Mxg-KchR#MD+%aT5Sv%lWAKeMFTc3qXj%1TVCROGpuHErbcK z31PZnP<%g z_P4s5OjYpwxI}yGWh#vx4S4xv9;5So*lTAYX-i3`36u&zJj9Ib$eL4ZsMjJ#7p8SN z8w~|XPd`Xlob{qsFC@CPl)H)1FnF zD+-R=7vf*^!x=9=-*k3gbEz$mnxd#?6QO?L-GR_|XUuZ410ew|(d*2iT9!Covsy5- zk97HNEn{wZ@^$203{8#5^|lh1;5Y*qHYatvnWuJIbCA5`5~xJs*n53L$u^O2Ly2zs zCCAOKCwhC1#g&&xYv3@5&J+I?Y?CnXM#S~VS>Ef9Dxy~i<=&`rnLtdMZZ_fTnDE76 z=Hu*^OuE74BP&sN{1{?p{Aew+=Q}SgZBB5>lTypD04TNPR(7d)-e7Zr3iktM{d3NP z)3rBVHCbsRczu4W{J~CPFCGQ;MC-(>*FsjnSZv<#TZWAYCiMtsSewUcdd-FY9h{~= z-M?XXk@ycS8+DSLV@(-k|JcDh=U3UPRp)RHmh8Lu{9j4AckbSjJVFwC zg!ESngO778Kk&QW-F)Z>1~PFEx~x~O(N9S2#uHLI6ziDKE4&Ugv~pjk61P-|)ZTLQ zhOD&tk|!i0CP)k7klBn&!rqmTAMmg?e1Xz$3CQ|MXO1+3^ZKZm6;}OL?+u zfxZ{kD0lJLVRyST3DE?Bi4p7(xvBrLm9O>PM;^8!) zHiSu&p~>TmE5^(K(Wy(iJq6K|IH>6IkDK^xUvpKmcw9lNYd`m^RN&gro_G z78oD_mDJxm;{6l-4nruABJ$07kaYg(veovRUCqlRoSjj~rZWfry(LlGipIf7`soA) zjCzqYas8RMoS>&R+tIA2Bww0vuboKtbLF&x5c45|3x_e2TJgsNVBw6xv zfc}=+nchukz+~o-_sWB@J^chAwQMGhw11FVob)21{8=btd_CeD$cEa9bmg@4Z{PCk zIqO-%pc~F+5F@VGTF*OU%HKJ{GtNKRN2Z-6J@x}8ubURO51g0 z=-B&FHR7JDwlC}Hk3T~j^`F`(tFLwnsRU^}?D}FC07&iN7GcwR^+)(3?2bjSMLj99 z(UFeWR|+q2-DvujLR3nd;LOmv~Q>{FLmdfq=f@+*sk11240vkU>oO>Ma zx$)hTAu*BLIEq6{K+#$yqawuKl@T+9K?uSqAeSV}7iCK#lRAzG$?O;86Y9X5CgZD@ z2nw|d46iDqV)K`OnB~`z^iT7F zw`g-~FZ}U%(F?;Cn{unLRq7*8Xtl&RBM1}Y_rHt3bC3TEsh#_S)Hd#XvLkZd7zB6aHAY}h0o zOxWPAv$1OJGGY(AuK|>n+BOlsbt*$e1Hf7smN~|J`Lt-4Y$;MuVbR46DO^gA0fIG{c_ z{VlYJp5yrc_p?yT)(jVyLsOU84a29_!k3g!Qk5YM!~55&6x5d=Jm0{BSQ>QHe{T^Y zR)*RpFKqn?cF0)+dQ0NsTO>yw*;)J>M)?OEr24g|`-dWc`Y#6#R)xHAD5d=YVn4(t zUNIT}(EvTJfxfW7D!MT}saJErHlktrkFNfrT|DL;vKFWRRnaVPqRFZOPnJxhx*{&P zM$`wSoh*16hmLIqN`GI)TGk=`xVkp)K+tTX(!%G>vn#5kIO!X5b(THC`n_FtYZYA$ zOCfzPh1q3Gx!5=8`cGSV_%Cea7G_J;nL&#$QKvO@WV{#PQx{3@#G6wdD_QoR0KSrc|OIm*guuJ&3mxQyRp}! z0BpG_;W5><7TUIP#@IFC7WjdwSph%VBFtgwLC}y7jz4uO)$-c&mFhxqt8(71EDy21 z{F7084H&g?1(IjaMr{BrvnVX|VxLvOW@9P$c3$&b(%M3u8? z{KjK8Z(>{>wX-u!XG=fjw(~SdVjRsrHS|zF{%Pn55J^%0-$rf3vr(&uFF6|1ufb-AQwn|nEvkSLjQcp(P|4N!!E$yZ<>G;7v2Wg z3gQ=SU)Gq~heFk)3{2PG;m)whk14X!-=VL>nxg~MPthtr%~R6;Oy3Mg>b7hJ_c!lw;+KUsV245-&M$E@}_`{;cAE36PfT!N;OCUb_ zG5nTGC{8ZICNcGY0k!P^0n{!;9}*p3`jPpDppJ>4QKh7RGgSR}DgUsj_PzyW{~gQz zJBceE!@PR4JbXw&_BHp9gBHk|j+o;X~hp7>rqcb-&&+y(=J? zvt{D2raju^I;lNoWfdwA)Nf$FKLgV;p-r}M!et)IxV|?$&Z>3`sfLU1?7_GlKxu0D z*nuo;U|T7RG$Uru*k|;}VBAgC3>#^O?-VV1K~>)bjhNUQ+DPswf1@FQ4YNfuf$@eN z*{*%6PU1w;5XG*|ZMy<{f3uaujGn+Bm`*~`Z9&Y3Ph)CT}KJDl` zb~Y^Pj^pK|IHTGSh`mU(mR>P8%QyB~6tAfgww1{yy1*|SUOKpNbBFXGlBOGlw!Lo> zwr`?uTIUO{i>;cX@j-N-D!x#!Mghp|=;uB1>_<4A7ioCP z!>I=(`iuZi%Or5y5ccsqYAA`DznzYk*Xcm)#u^O7#=CW2#6bf#)P^UXR<{O%duH{# zYg3$6yEkM-5A{4u;|p|{6x)~CABgK-;aiRkF{#~emyMBpn{{D86Pt{+ise8r!7xrB7t%YZLPPKjZ zYrJn$QJK8yl-V9@7NYlxEzheIblQOdhb00+KzCdK3{)|YYH~|JFw7+?1t0>)HRbZ=I-L<&V7Xg^P%F^)s@)`^` zgMm!zP9y9T6RJU6x+%7WI=PjERQ8bQ!VH_0*29!R+&Vi@K~y%(;@npSdBJB}QvsS6 zLJU@l^OxTHyRBP={u79IoIah$XuOlrAI>4#MtYEKS>imHEFjwu`eUvsPcM5Npycz0 zt~r#G*ki2ALj~se7B1OGl;ha%XRB>B7vv1|=ZWpC?{dsmB$mEq%r%xnJ|Tuqg80wQ zAKVE<9P}ug#)ZVab_bKVK`g|(h1m9&g-Ex^u1NqvwFhW^LTo)*&p7Qdqx_-aqB?`% zOF;TOw#%oo(RBlv90GeuW@MDtxUReTG|E%Z+pzUI>8{Pmm`1Pt^I_i+-vA!Z6GPb# zSeug%c>1iDF#+kXA_fjTrL7`AGx&x?3ZMSKrBwwpzkb4LRdU}N{>5o!YXZ3uTpI2C zn^@geSHjJ6o$uZOs-CXH8*$LKJiR(-nQZpMt2>|H>1oYD+2%JIK_-BMft5w zR-@SyPHXdo(=NPlr!dsyB>Q2EU3F%|{?fPE6Wk2|v79Q6c%Rzwc-^ZZz=LvF`SsMI zOiXD50v3?K3ASC-3$w&019+@29zP%2#!8|84z?fC%uu79>;fG-Xd5@Ah;|rvtk!3u z`UDNM-AHefRHJIJ4bk?}9U3_DhV9=iOhQLR$`GA+OIZX&d!M&$Bub~465-e9%!udA zu?n$}J$=S*Ex3F*t$*PxYC}E`mYESuC&U-z$2H(^5f|4hY}`()CK`|yG!ofk%#Spl z2+r~;I{5|qDzSM_yAfQo6ITFTvgx;oge?7NoAVOYEC-_VI2o8VM*j{wa(w#zM%#zF z3F3XagJJ<|)yE9q+4^Lb1Up%eFGf#`oTRn7i51cv3 zKW8q5eYnAiUuOni(`C*y+Jvj|m@0>yS$^ayiUm~AME^fLZQ)|+gFi%D^VRMtYc1#h-KD?ay>A0SF+A{Eay@>2ueJ_fWj1Qb5<~%6G z@EP;(Yw&0lOFC0)og9q<>Wq3b(j}KupLp6cD~MydKi3s7D$0wXBj(}l=X*lq^xbI3 zKZbCqEHy_1a{f@-S2pR5_I7u^w1V?1z=m*;|* z;d-XfP8A4S)ZNdiY^tQwF&^O15@V(F8PS@zwSxA1^Pti!gB;PiVJz?Ym>24v<=C_G z#W}#IMF)J^*~N=?Dk1c=QrS-6!e7jO$(#1=pULwc0BqTB;J-!?9G_n%{)}1Qz=U)GeiTVaE@Y{kAg z4F$O7(()zRYQ`Dv)lUO4D-@rF+D~6|_y*fvwM8cAykE1de)>-U{#dYHwu>6+pW$Vh zCFT7nolP;!r$^}zc>&(IGnA80debuUs|9+!>Z07F~ z0P-A*yFG#8g}4#S`&o-3hP%rDA!=W_1NU`61FWqstLfs@IMMQlx2N5eaSME6(byZ) z-@AoOz)YHkB~~c#ldjij^=k(Ynd27iQ7Lfra?rJb2SZkFOrMO}N(DRgfBdzP-tjW^ ziGze{)p=`-+e(GCEZLq)UVz#BLqKA>n?Q-BQ@%g<+U11;KJew$yr-LPfzZcI?^Ebu zJCi2R`xS=Ha2Ca~{p?!1k!xQAHy3GET?6*+_Pz90J&nLQo6P@h)Y?58wV3V@QoiO# z2>{x-BZuDYcU0Id#`buVT@<@TdDoms0B{uU!#_jV&X;4kxl6X)_Ct5;JoUX zt8DVB-+#%fh+QkZcyhb{2)Yy}h!p^~Sp_L~tK| zr?Or9aw$>xir#*wxv?reBcYrbQIwOQYG*XyV*@?cUpzD7*BPs!1*ALa4zGBZ2_f%p zF|~H(e!f}-B|GUkD&>~pT_u(Rc4a%BfIDgO6WGeBeIoQNI{qABVh2V&$_1^Ol1tAH zS<hL#p1yMu|M#=jGrJKD%LFP$9nj-BmyZWh+|8uOknC5M#rWQJg9T*=M$I8@P^k-hb2(2OYt~Xr6FfFs$J9+m{tnDvG98UQj3r-4w&PBHVXVE0&kqh zb&YD2GV}?{VySn&WF*0{B1zxedy2^&uS}|8+=7tvQCGVW4?A{{30KHzT4O@_6#h&D zV%g{*`=7h-(&Xmdy_;_Hc{ExGkau|z4xj}Rzqawf9oB;_pPq~ zb-fSg>3MGVcaQr%I-U;b<9WGlmdcF)i4L&6@9tRsq=-%xUeStt{&yVddol;WzYTI*s zjE2&SFE+5Pg_ua>9!?(a3)GWgsRn-zTt?b0sm5s5`%X-bD>LJMf1uRPwdvO^h4r_5 zHbGygB5dz%zosL;$K(0v7{e&idljHb%A8MnWfpVv5+yU_W9v(>L4&$A9oh!&c(f0{w$TJ~Hqg`JravV-QQ5z?W zxw{K(J#^-21=DklqJ$XM*tB}Smo&*7W{2t@JNj6{ZdeHx%W5`etL%x(!=ne8pq-Rd zW7FDROCz-0<$ha-2~CplK(X7?g=T-ZRSMW7Hnd;K`hn-I=WP_N%`#{Nn*eCgaRDD= z08;C@doPs4Lo%ZS!xRvYMAU9NtE8$(JOgx{uQBCfUL%PMK8n!s}xHj(hPs^?yJUkFhDRPayxq zXWhj_<}d838x?fSy+Q@T{=RleyMG9ZLn{MsOFq1$6LRVK5bcp4K(#Wq#^@hFwLNSx ztOa+zY(1YUa6r^|I~2W2aVbl3i5MorWyfq6^Iy#r%MJZiY{;!KS{ZeKR9oMk)Xbbv_;TSD5LDW7K=A#N9{7SYi!d>TO2F3lFFMulDPace=G%<*Nf zDE=2zt^6)jt$>N?Ur@FF@p|=6jv_z>V|LHr`_cZ*PU61$`s3*zKTHK9}rTPzaR zKG&CCEru^;w`I`0FQsP2*RWmXoun_l=Cz5B`0iEbHm&~fMV3nNm#Z(-`OE*rs+Bk+ zYM0hHwRa4SIE`$-{!MOdS$AD=`9z5?yV~M{9peuzGHcp(jlMMo#yjJ9ck3T{lj9zr z4C#}(Wuf8lvqh0mC=6_DsauP1{*p4TyTIh-cC$wbaRtw-5xtt|OXbG2ef`;4cLYkJ z4gLVDy`9oO+%F*E=o8NYRyXa`{)7i9E-7~_&=GpC(s z0aKL@r^B}^!1ipW+`tiI;&fneuTwjlD8R`!%CghlZMM#!*&0zz84U!y(7P_e{Hono2?b**@75O zp!YO#Ti7t^b2hb8VM6Dc|FMc$-aThw&c*pbGOmB_`5lUQDi z3Gp*UA~5h;x!D&sqB(ATANVI z;l7av5TliMr0isi*RF{a<00IsOiOvH-Mzzja{A4^Q0X|6 zB=E`Tu-5z{e_f$qA^wVWNAp)BhbLsg%iNDTDHkS6ebXFA&d@D5$N!00TQpPLzaPN< zI@h;nSd{N^++Xhep7}>3__1wSItJ7`JXd(Bk76XHmwIwM2m0(tqV?W*_J@+XAifWI zv>UT$dG8fh{=>41f9%Da*uY)~(xyu5lp{Zas|P^n{_$fu z!fe33Aa$Hn7R(dq1jHS|5?z$ih?1`vV_kBO@!aBH75V{MJHXmaR~m34-`U|MjTXb` znP>;|gqO1<_NI90wR=Te{k9`yVX*&_VE`PW0O)Yvfu_b1f1`Vkm?lN`XG;5o$jIdA zt{}H;P9NYmG*y3iXE-yGG6-BiRnAh=o*mnHBX}_HyW*JyCM)5|D*Gk+tZenHOL*nN zPY+M6CF)IV&#ou3Fz;QjX!3QJMgkX8k7yKG*i1remfatr^?L?35ftpYJj_y2$42f?Km0 zh~jYLaA?l%Y7sZz$0P6s)ibS?)>vra@!{sODvcF@j#@++%Bqn|2h7UFPny=^i?4Sw z97cD~i@P6XnSB3!c~59R+zmjlj0k$$gR4I6#??as-^-=X_uN4h>BJZeQ(~m~k`qn!os}<)wYk;TilN$I!;AN(m97FS?$4kKy`&z5^p z@mmbCcV7iArfa;|!>#9pjC8O0b~U&(A~RK#4dH;etJV2MNM_lqRj0!t+b88Veugp^ zmm+zD`*89H{at|CvibZE6v2n3qX4{j>s*hmQ5S=n&Iwjml?zSi0?|>)lt;1dCQ=S- z+7fbWS3sYqZ1Bj*eNz`}tq|$L>zV2rmUV$ky94lV!ZQ+_iH7=#lRC z>G@0(4Kh5+NixPGHa7qAD<{WyNfIP=LvwI z?vC~iOd0OlA)}9`(f^02_JwZe)bIR961J)^6)+RmKIpA2 zvIr2ezg+iKx@)@+zzQcyROoKxxL#N3jvP|HPVSEEQobIccAfi0)dFAR*)^vkV|BiH zeyykcFH(u)d8fS2+}j8{s73dJ>CPwOoi+XEDobVe~D;J`4J2N7h??1VG_PXyCkI{(Sh4RS;~!|6Sg|@_emC9fl=3Uhord#gdGjK?q08|D!Z=G;IUaUv-g;> zl>9gYbaQG9{qWgUZF!j+tn*i4VPo$`6KvgOusGV$Ah#pcA_{VEmlV2AzAkyc`OZbU z>m=8`cY~&TJ~x^1ET;5&c^UM`ycBj{&yw`8tOJvrca+{u-D${IS>1-&d*gx4cH~GkH z?hE0O_j?*i1rZ3ia$164;ZF6VFu08hnzS<;ur>kyBEIXxP^+9{c{9M)5rt#NX3 z5J1*uy)SQ)wq$#`9kv=K{6t|TUQKijxgGB2T6Rrgv_WJ3hG=6i#7>QLlNGmND5`O^ zGQ`H>c4<+NeC|rkidPdZPAK4!#L%%V+4q~DmfhBL?lvJ5LP%c^`@LN|l@jZ+?~4XB zMRtfAR^8q@xxL!n>|hn0N)EX*@MVF@bV?j$AyPndDB^j%c-bS-#E_HaVbmhGW*rie zjVA&MSO?~BXAiVEr|z)LEN?rCkV&kc&l0D7(0wQL8FwNoZD2L2l?8L$Fab?fU3THa3% z2w08r!lvswMMOlo5CV{Xf!rOGz#;-;7-NDJdP;rz7-y%`yHAuG{VXS+)Y@|F4Ur3M z)Gmk*q$fr$sxo2>Bf8|^P3rFJdr!n1MB1K_$xfk<(*73Hk~%d*m$&Ypk_cX_-BhJl zsx23?#_raWFTQI$IpJB9u~SoWJ(^10FyNwC5}id3_-?3qGd8gq&Wm{I%q?(`j=`V4 z#Nrpd3**>J5$hsP#Fr`88Rg-rP`cSoQ7$M3^^+bRy zxI-nHJ{$lijl}rIY#yR_BAgeJG}m~z%=A)VbIe1#7D7KqY<2Qm`0iHiQBkBZX`s2L zjNlS376?jD?%}2Pa#!hQ6lKXjy7Dr6&RDNJ>;#Ju-R$6dga_?+8HP(-pX(x|YiL<+ zg|t3eDi~8ewm+!V<6XlY=}F$RtO5CvwUx;XtX!2e%QqC#3C8po@R6}|5h4R@DB(M= z-(@&j${ToQY$gj1m=EZOBw-?EVuC+a*P;b$0?&t0i;XF*yi9`JJb*cm>m5{;$)MeA z5(MvIAKo7m<@A(q7%bxctzcsk@ASOoT3|K9wmT6N5;N?w3uL; z!PK%}rGLd!B3+BqV}s+nzH87c-p{Gx-&-x#_JtpR)3Js+{Efee#_A41f6jaMzV*sN zqt`U$hNa1&l!oUDGR+&hwO#McJoxlIR*%i^Qi$q%IP%P=w(x#t&#IoN8Bv^!Z>Xy= zldj++UY^S|sS~F6YHRy0@h0+HrlR&Df4`W$GXBpD8`Hys87xv>b*c}l*`I4& z6zJyH@TF+Hj;D=HP<-i}GBU$e0w9|66Wqs2Gv6Pk=Ch;qkxB9K(P(_$-h1amspyxH ze8xoTm;fpM~bOoAfslcS87k=VQijE+Bw!%J+SxJp0joqnAC38Qh_sTJ6)?s$owqNqA>? zk{Fv$j`?qidC0}~^+>Q9$=JkKk4PA@2gvF&JtyBjT--e(PH?UkPu9~44%QzJ4KbQ; zaP$nu9i%mQ*)3+<8@c5jl-H>jGF8zFY2X!HJAdL%&Vr zWiFPhO&15E*-W1*7HalawYr?8H#vWHMm1c?@XLkpPjP1z%fyo6>NIpdiz{%b1PRbw zRxUDBYrNubPGAp(c1a4x-%L8klrSYEa=L--LXDPBIr~7%g^-@HSCHS zMQf4>58faJMuon&0r%bp9QS)1^tOY#D_PT5c}>#u@nB@(g3{X5n>EVAI(BziDw1y` zUPow8CzQH3kR~mfUhH{%tm4~8M~n!|m3ue*Wm zR^df*r|Mk=&7&DqI7Wpj&Uy_YM3GF{{bqw{Z8q|=!brgz?Sh~jnFol{mAW1SNP5b*{+^hXcmmW&HJR#f^is;GK$>ISChuyh5~ zrp-ynbn?|Ja0Mr7gLy&nv?`B0PC8b^`~NX5Bt@qMOq*pdm|&<)+h|&(q#bH|7V(VT zrwjU6@5~5lTqb0H+Nri@JJJDnpXykNX5QLlzmOm5DV`?X_UuY3dqbBfao<7jE>08L zo%^chUK^cdpB=dgcRMzko+NY{12`EX`>&68^CC3j634@W7v2EF;>J|TjAoL}`D-D8A(niG^ zy_R3hF6d^Te=56~CDL!2ga>1-^3(kV-JAZMWjnhf3x?$S7Pyxbj#v6|PR)Xm9odzH zo2yLdq(|fG_hyQAb#EsZvnC!M}V}0R2{x(mR#bM=(l6* zMcW64Ru-J{C9=MbU*&la!g46>N!wDoS#|h5N;10nNua%<*zL|C_rP(50Zw}5eXesE zZd&`lZi$p^o$<3?d-L%&dc^AS{=5WC1byAowGO-4Mf>IR&hNHMNTzjs&V7Q@I_h7) ze%QO-@y14)b;3K^`$(uji4UXfEkfF6m1b0+hRbG#{I_@qw9CHB{3#B{2hBe2{z5_fE*aX*yycr8@&Bh%6)-t-A}<6tKi<&fn0sk@)$SIU*{ zvQmFQQ2RVk9dmR3lJ)R(=s|(Jl^C|WKygE{{`1@6Q_vWTs{cD9O9_ybqxa zy;QbSero^S_w8%(sKuHcff-bT5VxI(!@~I0xu`q?ss@>jJ#n}7P6tKJU?KoYrE~~` z(((~p)2sWUtl7yO_l?O(T6J(TUv%>6BeJP*{<~f@>__C}MY*hwSJ$5tm@=Y>h0uzW z3M)Qg;UT&YHxuliJ$Yx*{YWEhc;CCEG-h$7syh`<8iPZN z1$$*bd`=N}T^irRzxAAl`F3U1n}QZs85tvnxTb-+GXf@^Q!F&+*7UE^$AGak=$4-VIyQ)B6QoP;PVr+el7BxHRl z{;{a6*7$svo<`x0)Z6c|U3BlBaVyo1%C88_INI*u@GL7KQglkV$JJYL>y0a5&oC&V zRPo}nrwMIj>BnqE$6Gz5TW>$Gqz2wzhRn1Y&+r=_d)sdHVjnTQLtb@R{Jh8;ZPA_d zUM{JH%9RwOj~{|>^An5f6ysQZ;;zl8ibaQ?oxV7Fye6Oj*~K?2g_ALx95-d$Wh0Wa zA<;s)Ja=s`%vS~EOVjH$axpgK5IDPEy*=!WJ)7not$H*xwoYToib#>p_9mP=asBZH zI&R=fjKD1pVz7QpRpQ9PQXPjGwd-B33=RtLh zh`no4&W-xpcd~+xU*qI`(O7TUCv!xv4ItZL*OD#2AkHjDt^WjwY3yq*qxVX@6J0bbKPY zQ!>Heta^Q%{L{hxEpMucuUa@xW{jKZ2&wC|(VvEYR!T*r$}fsT~R}JtVk-K;jw}0cMe-gJpE(5Jr zYq%A^-yfTYi}J+{e(_H?d+_y=GQSJHKG$8HF~NT8JmW^ho3#nk#df>tHRz(v;Lzx3 z!>rK;TJgKk@~4NZ>YCFL=Q&&3#y zyhbv(UtS8Xj|2C1-O{>#BVw{@@kGtpy7i@(F&`pG26wJZ8a(NHZ8*Ei%Uk`%Y&HB|juQK~8Q#9Y zF?&`q>q_@my;8LqOA8A__7uWJ9Xp4)*_QW3O8K;BA)hKIP{Xf>WZ#xm7Tb@Y&a)LJ zoy%dlN1$zw;t* z2J!&zL}WUqb>((j{y-(Y|GM^}VL!d?a3KS=YbM}S*{Fan0Trh(v$mD5-QKHfF7LkW-kNU81+h{JHHnH34*2zZXb}~Sw~rY#XFkwtqEvF{ zs1)25IJrAh$85X$iwh5bw^>rN=fTs=SvyafJu5aY7v9$s&k5;qmN33YUPc(~vP|t% z{vlBA%cfl6{RtO_zhsG5OPyu`x^42j2wK}#u!vZ?O8<%CKpDKW$hDcwXSJSXWPL+Qpj>!|1CN}D(?=kget!_S+1A^-Dh8FGOx3f6sqpe~u zfs0gAY@v&obP3dF|lc~xB2ucsxMj^D*UZv&S+$VjN@C{?CW!_%|pZ-YI?9t zmpN5%#m&FKGNOK@to==|p#^j=ImyD(S%$CSs6xaOx0Ybir3caw_41**5gwMt?q^t6 z)ME;!&p?y4E#I)xwp#cuPhdEQf1U7{LR?+z;bE8)U+X(W=(qJ>WnmC2Q~OXV%MyUlOykzoigCQy#|6?#f}&AVAr<|oUx)3O{s zt*g%Mq2Wd?N3twjcMrsDPGOp=mj$*&H7`={;i2K7+C6j#Up0SR{CQ&j`-X#RQEB&v zU~OfOqgat{O8t+H@dw@fG2a)W+*DM}HdIthf74M>J@YyW?ua;h+$nQAYpb8h4!D+` zS)`_-;%A_u+WRjuE5OW>WRLp|>93k{{Y|>7sXy;OHMPT85%Jcy{-N=&^c?@9Fa8(( z4Qp#V(*H{hrvG{DN`Ln+YNCV94GIBgPqO=eS^QgpeSH5kzy?RK_$T!r4lj>u$ug${ zav@Yyz*T=A&%Dn5N_~n#y!Er|i|>lE8UQjfc>T|U-H1&J{!=`NIhkVmQ}ljy;s12l zb%LePKUw_YLNfCIb-}+GB>aW}7V{q%bo_<^1pOZvPznEP0*>$>7(jo+fQbDM4Di2U zfJOZW2I0S9073i*1}}fZfCT>!3|4-_0Q+C^;E2etQ9zLYg9duPVL9M|?Eh~`|1Ks$(O;d2gZ#sZf3y^d`gR?8wNzF32E>*C75*(==fJP}gJwV`LkKuD35_OWNO%aI3`Y>55EKyw1EUZ; z0*HYiFgP3%k0e5nC;|yZ#t|`4Bo+a|pdrwo#V3wjT^s^ykloXt#lJZCt9Z~#&{zTq zfq}s=Bp4Epfg(^OA{q<95?}-ff`mll5Cj4YjzYp=1SkQ8MBG7mua5j+$B@u~41R4aKOvDpF)FE&r2~9$QRD{6DSR_yZ1|mQrK=B|TBoZEr zgA;!i564*Z{66!ukiUutEfl`%aX1l%gCSvPED?t%;IK$6gp2~ZV9-!tDGUUFfQ3NF za3q#Y!XsfgBoT&zfx3Xge-^)6YkPK2;P`j!3(>!d2MrhxA%Kd)V#px1K(ujWJQ+d2 zV6iAN9t%Uj&^QtTN1qC4b%rD{(V}2_h zh9E!)NFs)aN8zD3C=~pQC6b^JC>i8E8JG)75l%!Bp-?CSiG$)HNHm%Rhr!S!0vUt* zS-e^W%SI@OeUHq~*7y8IJXk%T1QHodM8nBMklEl8iiP1wND>Gz1Pepq&=?W~PJ)qe zFftm%2?PcM3KLF%Bay%%zlbjxVcn+zEN}kB`Y!2Ttp{rl$Y~TAg@R&<5I7n_B)~~n z0&qJFLd1jGCgb1)A_R}YB2jPz4h}_;NN55cBqSbA#-WI$pRK<=&ffC-x(@~Z_}_;6 zk3AGvxv)4G1ciVS$OHn0gdwAlC@dMVTkTkolOz&oG6)FBdN2n=fWsg#G^j=x9*)Pu zKmq=edQU7>Lyds*lR*CcJM}Kf{wf}3NI06wxCK3=x6oicYrS8$nub3#mZ94+;zlsN|CV>PY;*c0P5%e$| z1Oyed859-`zVSdIJc$HS00W1E#s>w95EMiJhe6{(PePO7DA>==C+ljS+O7L~nxC!z z0t(`1=YusD3xy%zNHUQK%txY8Bn(*UFrW?LK;;Ab@F>!*{cr>dLck)(Py&_&QVxS5 zkqK}z`j^=6M24`_Sp)Ii%s)AQUFlcxV8up&C5}V}?S+U(!@)v>1HlKWhQSjNBv<_qEeFvo}>CdQ@zx@ diff --git a/tests/static/export/migrate/0.10_null_fields.aiida b/tests/static/export/migrate/0.10_null_fields.aiida new file mode 100644 index 0000000000000000000000000000000000000000..59d1c07dbcf13f86f3b6155b17848717a126aa7c GIT binary patch literal 4357 zcmb`J2|U#K8^?dcU>rp!VYg+HV@QoFl51uhLmA1L!c2pZYf*~)DQ$<8 zwC#$h6**FrTSYlSxs_FN?EKgnYAO9+`*{7l1}~rI`90sy^L*a~Q%oqzzq{QnZz8aHt^pK0*2HHc~p zGFcsPQXcbJQ3eLUJ}U((4gi8|8PpC-Crdx?L$2(az`B+Z8#-;as|+n<=)PSP*B(?| zZ=-1JkiO+?mKaT4SjD1Hn3Q?PC&lrCPoHv}hP zyHcL}Mpi!pbB;D**{5!}p6pJqoF)_)r5#Bd&Y5`;(ETy5X!5n2RNyD`Zp#@|vwy<_ z!_n8G$4B~1b2H+GbTQo})s~ud1l{yLk@RY{a?D|vY<2aDl>@kjlM)nKLvOIVGSFq*5h z5@_Tv9Vx)J64Q~I=>J5CS&#PR;iq6<>p4ef%&YLeBE>S|hQ zL=6%aOV%VUq}uYWNdjO0(2A)RipG#JZb(%%S2R+cgheBXZmMcXGKq{OqqSY!FsiBx zsTBX8R2cSKtuoa?q90|I`9Sw-CN1u(r%(U@ca00tkBIX0^`_3RheD=FiNJ*+m` zkxri}BTpIYQVz&M-CYFwtKW5pa=Rst`8!-6`0bTI5cGu31ICG<@QWY4gLT>@oEVIR z;b+HGC7&TbcyaI!c{*<}?uoHX@mHWT2=-oSaTjwhT31TRUeGhIx4=LvY!#HNA%&L{ zRfNh~d=8##5_)yEwV?b%T9a_QgjMO_A+wFwLW&~E>TN%86rWGQ?e&xNGY_y5Gy65j zd|U^gWR|@JPi-6-_SW4eJsYbX5lbcX&zR7ypKeF%>(0$d_Pq7AySEm@Z{My{DW5vD zK|;o1d)pdgQ;oq2bscJgls#+S&su9>D{>`!qhzCbYTMP_=4~33>@m zN)N>z(W-p^^J$@xn$iuY2NkOa6m+TCI||;aCJJ7j>WaiVQV_=+`9td3B5HqmkT2IZ z+kB}nD{OD;7`onqToe)`8slM}l5?N5>6Xgo;dUR9cYkB!Ty&en=s?5odHeRb}my*ffXqRhqHZ3{LKIu%R zSjy?t|M6)kh#8~R&{bJikToi`J0IdF9+~2=IXmANvohhh(#&_tH5_a8+f;*%!w)at z)HHTvSZPd$(IRBYH4OYfsJ7RO%F)}8;ur(sszegW&DDi+z?pY`$7UC=iEm}@1F%1f zodW<_a7oNZAP)enVRp{T)sOgj%SKach4Yucvg54Hyq}{5W|SjpO%vrjLt{(BUk)Vn?uIYk;7%;SvZf1FV#kcY6{cWy`%NeL&^GVh5Im`R*?&KFR8HL9VYLj{pv0!3wIWp!%#hjAxcqMyaoZmm@_DI(5zm( z(g>!f(C<>Tm8@@xQ0Xt)=RF?opFwY_naU`;{!&ve&bj$8W#1`US!nYpM{xh;%iP8G z2fD4wxiVm(7cCBq4F7KJJRSR&*wie4$u7vt^LgVU(bXI+bw`LwTK@XQ6mp|4=b5Bi zyPpF3TG9?f0+I=to2y(U-v_kZEZGFVtF`sKyv^GzY=vih`Ln>v)T#kr!xFe5vPGi4 z%4D5ul%f1>(Y?g6lVnA+(%%`_9iNGGd0{*x)|tZ6)s?;XMEPes!5!Jb|}kx!3_b0Kvj;|+Le-@g*?qRvT4c7zm6NEaC&-Jjafa0li@nY8K9Jvz+; zj<*FQE7XJo06;4)Z&DGaeZ`q3T{7M)`}KQ995S{E!96ZvARfgLFDr`^61A z-iN})oShR*YU`RzRTQi6O*MEGloE2686K~Kw_Xd2%-P0SE+Zf?4f8LIuGZGW@!)^m zoOsNH;a!ACTV^hOh7EY;2NJc@Bp{MGc;FimOd%X1oGVlKKNAYf;J{C-W%5PMFBu5D zDOoXt+ridc6{9c6vhZ;!Sb~(!iVO4;Y}w@rav5f4^?@b66bwN&WyJ;hI0RVqz;}}> zD-Wpm_1+{f-MApXD(GCfVgq{1vEW`I;rW^wI~Rh!r$?A@`E``3^){ngN7|r5hWB2UgJN@0dG$<< Date: Fri, 4 Mar 2022 19:21:28 +0100 Subject: [PATCH 23/26] fix tests --- tests/cmdline/commands/test_archive_create.py | 2 +- tests/cmdline/commands/test_archive_import.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/cmdline/commands/test_archive_create.py b/tests/cmdline/commands/test_archive_create.py index 86b285399b..9f4ebf5cb9 100644 --- a/tests/cmdline/commands/test_archive_create.py +++ b/tests/cmdline/commands/test_archive_create.py @@ -178,7 +178,7 @@ def test_migrate_low_verbosity(run_cli_command, tmp_path): assert ArchiveFormatSqlZip().read_version(filename_output) == ArchiveFormatSqlZip().latest_version -@pytest.mark.parametrize('version', list_versions()) +@pytest.mark.parametrize('version', [v for v in list_versions() if v not in ('main_0000a', 'main_0000b')]) def test_version(run_cli_command, version): """Test the functionality of `verdi archive version`.""" archive = f'export_{version}_simple.aiida' diff --git a/tests/cmdline/commands/test_archive_import.py b/tests/cmdline/commands/test_archive_import.py index 229c99f0f6..ddad778313 100644 --- a/tests/cmdline/commands/test_archive_import.py +++ b/tests/cmdline/commands/test_archive_import.py @@ -258,7 +258,7 @@ def test_migration(self): @pytest.mark.usefixtures('aiida_profile_clean') -@pytest.mark.parametrize('version', list_versions()) +@pytest.mark.parametrize('version', [v for v in list_versions() if v not in ('main_0000a', 'main_0000b')]) def test_import_old_local_archives(version, run_cli_command): """ Test import of old local archives Expected behavior: Automatically migrate to newest version and import correctly. From 1b39db982b5f7413f25d0a66ffb9471f7c39f331 Mon Sep 17 00:00:00 2001 From: Chris Sewell Date: Fri, 4 Mar 2022 20:59:10 +0100 Subject: [PATCH 24/26] Update migrator.py --- aiida/storage/sqlite_zip/migrator.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aiida/storage/sqlite_zip/migrator.py b/aiida/storage/sqlite_zip/migrator.py index edc4099e14..97f1a4fc93 100644 --- a/aiida/storage/sqlite_zip/migrator.py +++ b/aiida/storage/sqlite_zip/migrator.py @@ -160,7 +160,7 @@ def migrate( # pylint: disable=too-many-branches,too-many-statements,too-many-l # if the archive is a "legacy" format, i.e. has a data.json file, migrate it to the target/final legacy schema data: Optional[Dict[str, Any]] = None if current_version in LEGACY_MIGRATE_FUNCTIONS: - MIGRATE_LOGGER.report('Legacy migrations required') + MIGRATE_LOGGER.report(f'Legacy migrations required from {"tar" if is_tar else "zip"} format') MIGRATE_LOGGER.report('Extracting data.json ...') # read the data.json file data = _read_json(inpath, 'data.json', is_tar) From ae28d23cc558297f504a2d1e5f031d527e0577ce Mon Sep 17 00:00:00 2001 From: Chris Sewell Date: Sun, 6 Mar 2022 11:29:37 +0100 Subject: [PATCH 25/26] allow reading as folder --- aiida/storage/sqlite_zip/__init__.py | 2 +- aiida/storage/sqlite_zip/backend.py | 99 ++++++++++++++++++++-------- aiida/storage/sqlite_zip/migrator.py | 2 +- aiida/storage/sqlite_zip/utils.py | 53 +++++++++------ 4 files changed, 105 insertions(+), 51 deletions(-) diff --git a/aiida/storage/sqlite_zip/__init__.py b/aiida/storage/sqlite_zip/__init__.py index 85b3587914..d79b5e11c6 100644 --- a/aiida/storage/sqlite_zip/__init__.py +++ b/aiida/storage/sqlite_zip/__init__.py @@ -8,7 +8,7 @@ # For further information please visit http://www.aiida.net # ########################################################################### """Module with implementation of the storage backend, -using an SQLite database and repository files within a zipfile. +using an SQLite database and repository files, within a zipfile. The content of the zip file is:: diff --git a/aiida/storage/sqlite_zip/backend.py b/aiida/storage/sqlite_zip/backend.py index 8d8642c893..278b8eaead 100644 --- a/aiida/storage/sqlite_zip/backend.py +++ b/aiida/storage/sqlite_zip/backend.py @@ -15,8 +15,7 @@ from pathlib import Path import tempfile from typing import BinaryIO, Iterable, Iterator, Optional, Sequence, Tuple, Type, cast -import zipfile -from zipfile import ZipFile +from zipfile import ZipFile, is_zipfile from archive_path import extract_file_in_zip from sqlalchemy.orm import Session @@ -36,7 +35,20 @@ class SqliteZipBackend(StorageBackend): # pylint: disable=too-many-public-methods - """A read-only backend for a sqlite/zip format.""" + """A read-only backend for a sqlite/zip format. + + The storage format uses an SQLite database and repository files, within a folder or zipfile. + + The content of the folder/zipfile is:: + + |- metadata.json + |- db.sqlite3 + |- repo/ + |- hashkey1 + |- hashkey2 + ... + + """ @classmethod def version_head(cls) -> str: @@ -63,11 +75,11 @@ def create_profile(path: str | Path) -> Profile: @classmethod def version_profile(cls, profile: Profile) -> None: - return read_version(profile.storage_config['path']) + return read_version(profile.storage_config['path'], search_limit=None) @classmethod def migrate(cls, profile: Profile): - raise ReadOnlyError() + raise NotImplementedError('use the migrate function directly.') def __init__(self, profile: Profile): super().__init__(profile) @@ -76,7 +88,7 @@ def __init__(self, profile: Profile): # lazy open the archive zipfile and extract the database file self._db_file: Optional[Path] = None self._session: Optional[Session] = None - self._zipfile: Optional[zipfile.ZipFile] = None + self._repo: Optional[ZipfileBackendRepository] = None self._closed = False def __str__(self) -> str: @@ -93,35 +105,42 @@ def close(self): self._session.close() if self._db_file and self._db_file.exists(): self._db_file.unlink() - if self._zipfile: - self._zipfile.close() + if self._repo: + self._repo.close() self._session = None self._db_file = None - self._zipfile = None + self._repo = None self._closed = True def get_session(self) -> Session: """Return an SQLAlchemy session.""" if self._closed: raise ClosedStorage(str(self)) - if self._db_file is None: - _, path = tempfile.mkstemp() - self._db_file = Path(path) - with self._db_file.open('wb') as handle: - try: - extract_file_in_zip(self._path, DB_FILENAME, handle, search_limit=4) - except Exception as exc: - raise CorruptStorage(f'database could not be read: {exc}') from exc if self._session is None: - self._session = Session(create_sqla_engine(self._db_file)) + if is_zipfile(self._path): + _, path = tempfile.mkstemp() + db_file = self._db_file = Path(path) + with db_file.open('wb') as handle: + try: + extract_file_in_zip(self._path, DB_FILENAME, handle, search_limit=4) + except Exception as exc: + raise CorruptStorage(f'database could not be read: {exc}') from exc + else: + db_file = self._path / DB_FILENAME + if not db_file.exists(): + raise CorruptStorage(f'database could not be read: non-existent {db_file}') + self._session = Session(create_sqla_engine(db_file)) return self._session def get_repository(self) -> 'ZipfileBackendRepository': if self._closed: raise ClosedStorage(str(self)) - if self._zipfile is None: - self._zipfile = ZipFile(self._path, mode='r') # pylint: disable=consider-using-with - return ZipfileBackendRepository(self._zipfile) + if self._repo is None: + if is_zipfile(self._path): + self._repo = ZipfileBackendRepository(self._path) + else: + self._repo = ZipfileBackendRepository(self._path / REPO_FOLDER, folder=None) + return self._repo def query(self) -> 'SqliteBackendQueryBuilder': return SqliteBackendQueryBuilder(self) @@ -208,13 +227,35 @@ def __init__(self, msg='sqlite_zip storage is read-only'): # pylint: disable=us class ZipfileBackendRepository(AbstractRepositoryBackend): - """A read-only backend for an open zip file.""" + """A read-only backend for a zip file. + + The zip file should contain repository files with the key format: ``/``, + i.e. files named by the sha256 hash of the file contents, inside a ```` directory. + """ - def __init__(self, file: ZipFile): - self._zipfile = file + def __init__(self, path: str | Path, folder: str | None = REPO_FOLDER): + """Initialise the repository backend. + + :param path: the path to the zip file + :param folder: the folder inside the zip file to look for repository files + """ + self._path = Path(path) + self._zipfile: None | ZipFile = None + self._prefix = folder + '/' if folder else '' + + def close(self): + """Close the zip file.""" + if self._zipfile: + self._zipfile.close() @property def zipfile(self) -> ZipFile: + """Return the zip file.""" + if self._zipfile is None: + try: + self._zipfile = ZipFile(self._path, mode='r') # pylint: disable=consider-using-with + except Exception as exc: + raise CorruptStorage(f'repository could not be read: {exc}') from exc if self._zipfile.fp is None: raise ClosedStorage(f'zipfile closed: {self._zipfile}') return self._zipfile @@ -242,7 +283,7 @@ def _put_object_from_filelike(self, handle: BinaryIO) -> str: def has_object(self, key: str) -> bool: try: - self.zipfile.getinfo(f'{REPO_FOLDER}/{key}') + self.zipfile.getinfo(f'{self._prefix}{key}') except KeyError: return False return True @@ -252,13 +293,13 @@ def has_objects(self, keys: list[str]) -> list[bool]: def list_objects(self) -> Iterable[str]: for name in self.zipfile.namelist(): - if name.startswith(REPO_FOLDER + '/') and name[len(REPO_FOLDER) + 1:]: - yield name[len(REPO_FOLDER) + 1:] + if name.startswith(self._prefix) and name[len(self._prefix):]: + yield name[len(self._prefix):] @contextmanager def open(self, key: str) -> Iterator[BinaryIO]: try: - handle = self.zipfile.open(f'{REPO_FOLDER}/{key}') + handle = self.zipfile.open(f'{self._prefix}{key}') yield cast(BinaryIO, handle) except KeyError: raise FileNotFoundError(f'object with key `{key}` does not exist.') @@ -277,7 +318,7 @@ def get_object_hash(self, key: str) -> str: return key def maintain(self, dry_run: bool = False, live: bool = True, **kwargs) -> None: - raise NotImplementedError + pass def get_info(self, statistics: bool = False, **kwargs) -> dict: return {'objects': {'count': len(list(self.list_objects()))}} diff --git a/aiida/storage/sqlite_zip/migrator.py b/aiida/storage/sqlite_zip/migrator.py index 97f1a4fc93..6cd3819f3d 100644 --- a/aiida/storage/sqlite_zip/migrator.py +++ b/aiida/storage/sqlite_zip/migrator.py @@ -77,7 +77,7 @@ def migrate( # pylint: disable=too-many-branches,too-many-statements,too-many-l force: bool = False, compression: int = 6 ) -> None: - """Migrate an sqlite_zip storage file to a specific version. + """Migrate an `sqlite_zip` storage file to a specific version. Historically, this format could be a zip or a tar file, contained the database as a bespoke JSON format, and the repository files in the "legacy" per-node format. diff --git a/aiida/storage/sqlite_zip/utils.py b/aiida/storage/sqlite_zip/utils.py index ceab594948..e6e835cb4b 100644 --- a/aiida/storage/sqlite_zip/utils.py +++ b/aiida/storage/sqlite_zip/utils.py @@ -52,39 +52,52 @@ def create_sqla_engine(path: Union[str, Path], *, enforce_foreign_keys: bool = T return engine -def extract_metadata(path: Union[str, Path], search_limit: Optional[int] = 10) -> Dict[str, Any]: - """Extract the metadata dictionary from the archive""" - # we fail if not one of the first record in central directory (as expected) - # so we don't have to iter all repo files to fail - return json.loads(read_file_in_zip(path, META_FILENAME, 'utf8', search_limit=search_limit)) +def extract_metadata(path: Union[str, Path], *, search_limit: Optional[int] = 10) -> Dict[str, Any]: + """Extract the metadata dictionary from the archive. - -def read_version(path: Union[str, Path]) -> str: - """Read the version of the storage instance from the file. - - This is intended to work for all versions of the storage format. - - :param path: path to storage instance - - :raises: ``UnreachableStorage`` if a version cannot be read from the file + :param search_limit: the maximum number of records to search for the metadata file in a zip file. """ path = Path(path) - if not path.is_file(): - raise UnreachableStorage('archive file not found') + if not path.exists(): + raise UnreachableStorage(f'path not found: {path}') if zipfile.is_zipfile(path): try: - metadata = extract_metadata(path, search_limit=None) + metadata = json.loads(read_file_in_zip(path, META_FILENAME, search_limit=search_limit)) except Exception as exc: - raise CorruptStorage(f'Could not read metadata for version: {exc}') from exc + raise CorruptStorage(f'Could not read metadata: {exc}') from exc elif tarfile.is_tarfile(path): try: metadata = json.loads(read_file_in_tar(path, META_FILENAME)) except Exception as exc: - raise CorruptStorage(f'Could not read metadata for version: {exc}') from exc + raise CorruptStorage(f'Could not read metadata: {exc}') from exc + elif path.is_dir(): + if not path.joinpath(META_FILENAME).is_file(): + raise CorruptStorage('Could not find metadata file') + try: + metadata = json.loads(path.joinpath(META_FILENAME)) + except Exception as exc: + raise CorruptStorage(f'Could not read metadata: {exc}') from exc else: - raise CorruptStorage('Not a zip or tar file') + raise CorruptStorage('Path not a folder, zip or tar file') + + if not isinstance(metadata, dict): + raise CorruptStorage(f'Metadata is not a dictionary: {type(metadata)}') + return metadata + + +def read_version(path: Union[str, Path], *, search_limit: Optional[int] = None) -> str: + """Read the version of the storage instance from the path. + + This is intended to work for all versions of the storage format. + + :param path: path to storage instance, either a folder, zip file or tar file. + :param search_limit: the maximum number of records to search for the metadata file in a zip file. + + :raises: ``UnreachableStorage`` if a version cannot be read from the file + """ + metadata = extract_metadata(path, search_limit=search_limit) if 'export_version' in metadata: return metadata['export_version'] From b624c19146396e7a48b8e9c170139230a10ba9cf Mon Sep 17 00:00:00 2001 From: Chris Sewell Date: Sun, 6 Mar 2022 20:41:41 +0100 Subject: [PATCH 26/26] Add `aiida/storage/sqlite_zip` to mypy type checking --- .pre-commit-config.yaml | 1 + aiida/storage/psql_dos/backend.py | 2 +- aiida/storage/sqlite_zip/backend.py | 139 +++++++++++------- .../sqlite_zip/migrations/legacy_to_main.py | 2 +- .../versions/main_0000a_replace_nulls.py | 8 +- aiida/storage/sqlite_zip/migrator.py | 22 ++- aiida/storage/sqlite_zip/models.py | 25 +++- aiida/storage/sqlite_zip/utils.py | 16 +- tests/tools/archive/orm/test_links.py | 8 +- tests/tools/archive/test_backend.py | 10 +- 10 files changed, 144 insertions(+), 89 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index aa07afaf39..541518a9c7 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -103,6 +103,7 @@ repos: aiida/storage/psql_dos/backend.py| aiida/storage/psql_dos/orm/querybuilder/.*py| aiida/storage/psql_dos/utils.py| + aiida/storage/sqlite_zip/.*.py| aiida/tools/graph/graph_traversers.py| aiida/tools/groups/paths.py| aiida/tools/archive/.*py| diff --git a/aiida/storage/psql_dos/backend.py b/aiida/storage/psql_dos/backend.py index 7f0fe3d59f..683484845a 100644 --- a/aiida/storage/psql_dos/backend.py +++ b/aiida/storage/psql_dos/backend.py @@ -55,7 +55,7 @@ def version_head(cls) -> str: return cls.migrator.get_schema_version_head() @classmethod - def version_profile(cls, profile: Profile) -> None: + def version_profile(cls, profile: Profile) -> Optional[str]: return cls.migrator(profile).get_schema_version_profile(check_legacy=True) @classmethod diff --git a/aiida/storage/sqlite_zip/backend.py b/aiida/storage/sqlite_zip/backend.py index 278b8eaead..ff931cdb9d 100644 --- a/aiida/storage/sqlite_zip/backend.py +++ b/aiida/storage/sqlite_zip/backend.py @@ -39,7 +39,7 @@ class SqliteZipBackend(StorageBackend): # pylint: disable=too-many-public-metho The storage format uses an SQLite database and repository files, within a folder or zipfile. - The content of the folder/zipfile is:: + The content of the folder/zipfile should be:: |- metadata.json |- db.sqlite3 @@ -74,7 +74,7 @@ def create_profile(path: str | Path) -> Profile: ) @classmethod - def version_profile(cls, profile: Profile) -> None: + def version_profile(cls, profile: Profile) -> Optional[str]: return read_version(profile.storage_config['path'], search_limit=None) @classmethod @@ -88,7 +88,7 @@ def __init__(self, profile: Profile): # lazy open the archive zipfile and extract the database file self._db_file: Optional[Path] = None self._session: Optional[Session] = None - self._repo: Optional[ZipfileBackendRepository] = None + self._repo: Optional[_RoBackendRepository] = None self._closed = False def __str__(self) -> str: @@ -132,14 +132,16 @@ def get_session(self) -> Session: self._session = Session(create_sqla_engine(db_file)) return self._session - def get_repository(self) -> 'ZipfileBackendRepository': + def get_repository(self) -> '_RoBackendRepository': if self._closed: raise ClosedStorage(str(self)) if self._repo is None: if is_zipfile(self._path): self._repo = ZipfileBackendRepository(self._path) + elif (self._path / REPO_FOLDER).exists(): + self._repo = FolderBackendRepository(self._path / REPO_FOLDER) else: - self._repo = ZipfileBackendRepository(self._path / REPO_FOLDER, folder=None) + raise CorruptStorage(f'repository could not be read: non-existent {self._path / REPO_FOLDER}') return self._repo def query(self) -> 'SqliteBackendQueryBuilder': @@ -226,39 +228,20 @@ def __init__(self, msg='sqlite_zip storage is read-only'): # pylint: disable=us super().__init__(msg) -class ZipfileBackendRepository(AbstractRepositoryBackend): - """A read-only backend for a zip file. - - The zip file should contain repository files with the key format: ``/``, - i.e. files named by the sha256 hash of the file contents, inside a ```` directory. - """ +class _RoBackendRepository(AbstractRepositoryBackend): # pylint: disable=abstract-method + """A backend abstract for a read-only folder or zip file.""" - def __init__(self, path: str | Path, folder: str | None = REPO_FOLDER): + def __init__(self, path: str | Path): """Initialise the repository backend. :param path: the path to the zip file - :param folder: the folder inside the zip file to look for repository files """ self._path = Path(path) - self._zipfile: None | ZipFile = None - self._prefix = folder + '/' if folder else '' - - def close(self): - """Close the zip file.""" - if self._zipfile: - self._zipfile.close() + self._closed = False - @property - def zipfile(self) -> ZipFile: - """Return the zip file.""" - if self._zipfile is None: - try: - self._zipfile = ZipFile(self._path, mode='r') # pylint: disable=consider-using-with - except Exception as exc: - raise CorruptStorage(f'repository could not be read: {exc}') from exc - if self._zipfile.fp is None: - raise ClosedStorage(f'zipfile closed: {self._zipfile}') - return self._zipfile + def close(self) -> None: + """Close the repository.""" + self._closed = True @property def uuid(self) -> Optional[str]: @@ -281,47 +264,101 @@ def erase(self) -> None: def _put_object_from_filelike(self, handle: BinaryIO) -> str: raise ReadOnlyError() + def has_objects(self, keys: list[str]) -> list[bool]: + return [self.has_object(key) for key in keys] + + def iter_object_streams(self, keys: list[str]) -> Iterator[Tuple[str, BinaryIO]]: + for key in keys: + with self.open(key) as handle: # pylint: disable=not-context-manager + yield key, handle + + def delete_objects(self, keys: list[str]) -> None: + raise ReadOnlyError() + + def get_object_hash(self, key: str) -> str: + return key + + def maintain(self, dry_run: bool = False, live: bool = True, **kwargs) -> None: + pass + + def get_info(self, statistics: bool = False, **kwargs) -> dict: + return {'objects': {'count': len(list(self.list_objects()))}} + + +class ZipfileBackendRepository(_RoBackendRepository): + """A read-only backend for a zip file. + + The zip file should contain repository files with the key format: ``/``, + i.e. files named by the sha256 hash of the file contents, inside a ```` directory. + """ + + def __init__(self, path: str | Path): + super().__init__(path) + self._folder = REPO_FOLDER + self.__zipfile: None | ZipFile = None + + def close(self) -> None: + if self._zipfile: + self._zipfile.close() + super().close() + + @property + def _zipfile(self) -> ZipFile: + """Return the open zip file.""" + if self._closed: + raise ClosedStorage(f'repository is closed: {self._path}') + if self.__zipfile is None: + try: + self.__zipfile = ZipFile(self._path, mode='r') # pylint: disable=consider-using-with + except Exception as exc: + raise CorruptStorage(f'repository could not be read {self._path}: {exc}') from exc + return self.__zipfile + def has_object(self, key: str) -> bool: try: - self.zipfile.getinfo(f'{self._prefix}{key}') + self._zipfile.getinfo(f'{self._folder}/{key}') except KeyError: return False return True - def has_objects(self, keys: list[str]) -> list[bool]: - return [self.has_object(key) for key in keys] - def list_objects(self) -> Iterable[str]: - for name in self.zipfile.namelist(): - if name.startswith(self._prefix) and name[len(self._prefix):]: - yield name[len(self._prefix):] + prefix = f'{self._folder}/' + prefix_len = len(prefix) + for name in self._zipfile.namelist(): + if name.startswith(prefix) and name[prefix_len:]: + yield name[prefix_len:] @contextmanager def open(self, key: str) -> Iterator[BinaryIO]: try: - handle = self.zipfile.open(f'{self._prefix}{key}') + handle = self._zipfile.open(f'{self._folder}/{key}') yield cast(BinaryIO, handle) except KeyError: raise FileNotFoundError(f'object with key `{key}` does not exist.') finally: handle.close() - def iter_object_streams(self, keys: list[str]) -> Iterator[Tuple[str, BinaryIO]]: - for key in keys: - with self.open(key) as handle: # pylint: disable=not-context-manager - yield key, handle - def delete_objects(self, keys: list[str]) -> None: - raise ReadOnlyError() +class FolderBackendRepository(_RoBackendRepository): + """A read-only backend for a folder. - def get_object_hash(self, key: str) -> str: - return key + The folder should contain repository files, named by the sha256 hash of the file contents. + """ - def maintain(self, dry_run: bool = False, live: bool = True, **kwargs) -> None: - pass + def has_object(self, key: str) -> bool: + return self._path.joinpath(key).is_file() - def get_info(self, statistics: bool = False, **kwargs) -> dict: - return {'objects': {'count': len(list(self.list_objects()))}} + def list_objects(self) -> Iterable[str]: + for subpath in self._path.iterdir(): + if subpath.is_file(): + yield subpath.name + + @contextmanager + def open(self, key: str) -> Iterator[BinaryIO]: + if not self._path.joinpath(key).is_file(): + raise FileNotFoundError(f'object with key `{key}` does not exist.') + with self._path.joinpath(key).open('rb') as handle: + yield handle class SqliteBackendQueryBuilder(SqlaQueryBuilder): diff --git a/aiida/storage/sqlite_zip/migrations/legacy_to_main.py b/aiida/storage/sqlite_zip/migrations/legacy_to_main.py index 1b03651489..27566bccc1 100644 --- a/aiida/storage/sqlite_zip/migrations/legacy_to_main.py +++ b/aiida/storage/sqlite_zip/migrations/legacy_to_main.py @@ -215,7 +215,7 @@ def _transform_link(link_row): uuid: pk for uuid, pk in connection.execute(select(v1_schema.DbGroup.uuid, v1_schema.DbGroup.id)) # pylint: disable=unnecessary-comprehension } length = sum(len(uuids) for uuids in data['groups_uuid'].values()) - unknown_nodes = {} + unknown_nodes: Dict[str, set] = {} with get_progress_reporter()(desc='Adding Group-Nodes', total=length) as progress: for group_uuid, node_uuids in data['groups_uuid'].items(): group_id = group_uuid_map[group_uuid] diff --git a/aiida/storage/sqlite_zip/migrations/versions/main_0000a_replace_nulls.py b/aiida/storage/sqlite_zip/migrations/versions/main_0000a_replace_nulls.py index 3769d29b20..7d5fa87463 100644 --- a/aiida/storage/sqlite_zip/migrations/versions/main_0000a_replace_nulls.py +++ b/aiida/storage/sqlite_zip/migrations/versions/main_0000a_replace_nulls.py @@ -43,8 +43,8 @@ def upgrade(): # pylint: disable=too-many-statements ) # remove rows with null values, which may have previously resulted from deletion of a user or computer - op.execute(db_dbauthinfo.delete().where(db_dbauthinfo.c.aiidauser_id.is_(None))) - op.execute(db_dbauthinfo.delete().where(db_dbauthinfo.c.dbcomputer_id.is_(None))) + op.execute(db_dbauthinfo.delete().where(db_dbauthinfo.c.aiidauser_id.is_(None))) # type: ignore[arg-type] + op.execute(db_dbauthinfo.delete().where(db_dbauthinfo.c.dbcomputer_id.is_(None))) # type: ignore[arg-type] op.execute(db_dbauthinfo.update().where(db_dbauthinfo.c.enabled.is_(None)).values(enabled=True)) op.execute(db_dbauthinfo.update().where(db_dbauthinfo.c.auth_params.is_(None)).values(auth_params={})) @@ -61,8 +61,8 @@ def upgrade(): # pylint: disable=too-many-statements ) # remove rows with null values, which may have previously resulted from deletion of a node or user - op.execute(db_dbcomment.delete().where(db_dbcomment.c.dbnode_id.is_(None))) - op.execute(db_dbcomment.delete().where(db_dbcomment.c.user_id.is_(None))) + op.execute(db_dbcomment.delete().where(db_dbcomment.c.dbnode_id.is_(None))) # type: ignore[arg-type] + op.execute(db_dbcomment.delete().where(db_dbcomment.c.user_id.is_(None))) # type: ignore[arg-type] op.execute(db_dbcomment.update().where(db_dbcomment.c.content.is_(None)).values(content='')) op.execute(db_dbcomment.update().where(db_dbcomment.c.ctime.is_(None)).values(ctime=timezone.now())) diff --git a/aiida/storage/sqlite_zip/migrator.py b/aiida/storage/sqlite_zip/migrator.py index 6cd3819f3d..52cd81a91a 100644 --- a/aiida/storage/sqlite_zip/migrator.py +++ b/aiida/storage/sqlite_zip/migrator.py @@ -24,7 +24,6 @@ from alembic.runtime.migration import MigrationContext, MigrationInfo from alembic.script import ScriptDirectory from archive_path import ZipPath, extract_file_in_zip, open_file_in_tar, open_file_in_zip -from sqlalchemy.future.engine import Connection from aiida.common import json from aiida.common.exceptions import CorruptStorage, IncompatibleStorageSchema, StorageMigrationError @@ -34,12 +33,12 @@ from .migrations.legacy import FINAL_LEGACY_VERSION, LEGACY_MIGRATE_FUNCTIONS from .migrations.legacy_to_main import LEGACY_TO_MAIN_REVISION, perform_v1_migration from .migrations.utils import copy_tar_to_zip, copy_zip_to_zip, update_metadata -from .utils import DB_FILENAME, META_FILENAME, REPO_FOLDER, create_sqla_engine, read_version +from .utils import DB_FILENAME, META_FILENAME, REPO_FOLDER, create_sqla_engine, extract_metadata, read_version def get_schema_version_head() -> str: """Return the head schema version for this storage, i.e. the latest schema this storage can be migrated to.""" - return _alembic_script().revision_map.get_current_head('main') + return _alembic_script().revision_map.get_current_head('main') or '' def list_versions() -> List[str]: @@ -121,16 +120,11 @@ def migrate( # pylint: disable=too-many-branches,too-many-statements,too-many-l raise CorruptStorage(f'The input file is neither a tar nor a zip file: {inpath}') # read the metadata.json which should always be present - try: - metadata = _read_json(inpath, 'metadata.json', is_tar) - except FileNotFoundError: - raise CorruptStorage('No metadata.json file found') - except IOError as exc: - raise CorruptStorage(f'No input file could not be read: {exc}') from exc + metadata = extract_metadata(inpath, search_limit=None) # obtain the current version from the metadata if 'export_version' not in metadata: - raise CorruptStorage('No export_version found in metadata.json') + raise CorruptStorage('No export_version found in metadata') current_version = metadata['export_version'] # update the modified time of the file and the compression metadata['mtime'] = datetime.now().isoformat() @@ -141,7 +135,8 @@ def migrate( # pylint: disable=too-many-branches,too-many-statements,too-many-l # since 0.3 -> 0.4 requires costly migrations of repo files (you would need to unpack all of them) if current_version in ('0.1', '0.2', '0.3') or version in ('0.1', '0.2', '0.3'): raise StorageMigrationError( - f"Legacy migration from '{current_version}' -> '{version}' is not supported in aiida-core v2" + f"Legacy migration from '{current_version}' -> '{version}' is not supported in aiida-core v2. " + 'First migrate them to the latest version in aiida-core v1.' ) all_versions = list_versions() if current_version not in all_versions: @@ -239,8 +234,9 @@ def path_callback(inpath, outpath) -> bool: if current_version != version: MIGRATE_LOGGER.report('Performing SQLite migrations:') with _migration_context(db_path) as context: + assert context.script is not None context.stamp(context.script, current_version) - context.connection.commit() + context.connection.commit() # type: ignore # see https://alembic.sqlalchemy.org/en/latest/batch.html#dealing-with-referencing-foreign-keys # for why we do not enforce foreign keys here with _alembic_connect(db_path, enforce_foreign_keys=False) as config: @@ -345,7 +341,7 @@ def _alembic_script() -> ScriptDirectory: @contextlib.contextmanager -def _alembic_connect(db_path: Path, enforce_foreign_keys=True) -> Iterator[Connection]: +def _alembic_connect(db_path: Path, enforce_foreign_keys=True) -> Iterator[Config]: """Context manager to return an instance of an Alembic configuration. The profiles's database connection is added in the `attributes` property, through which it can then also be diff --git a/aiida/storage/sqlite_zip/models.py b/aiida/storage/sqlite_zip/models.py index d099100378..7e637e4bb1 100644 --- a/aiida/storage/sqlite_zip/models.py +++ b/aiida/storage/sqlite_zip/models.py @@ -120,11 +120,32 @@ def create_orm_cls(klass: base.Base) -> SqliteBase: DbLog = create_orm_cls(log.DbLog) DbLink = create_orm_cls(node.DbLink) -# to-do This was the minimum for creating a graph, but really all relationships should be copied -DbNode.dbcomputer = sa_orm.relationship('DbComputer', backref='dbnodes') # type: ignore[attr-defined] +# to-do ideally these relationships should be auto-generated in `create_orm_cls`, but this proved difficult +DbAuthInfo.aiidauser = sa_orm.relationship( # type: ignore[attr-defined] + 'DbUser', backref=sa_orm.backref('authinfos', passive_deletes=True, cascade='all, delete') +) +DbAuthInfo.dbcomputer = sa_orm.relationship( # type: ignore[attr-defined] + 'DbComputer', backref=sa_orm.backref('authinfos', passive_deletes=True, cascade='all, delete') +) +DbComment.dbnode = sa_orm.relationship('DbNode', backref='dbcomments') # type: ignore[attr-defined] +DbComment.user = sa_orm.relationship('DbUser') # type: ignore[attr-defined] +DbGroup.user = sa_orm.relationship( # type: ignore[attr-defined] + 'DbUser', backref=sa_orm.backref('dbgroups', cascade='merge') +) DbGroup.dbnodes = sa_orm.relationship( # type: ignore[attr-defined] 'DbNode', secondary='db_dbgroup_dbnodes', backref='dbgroups', lazy='dynamic' ) +DbLog.dbnode = sa_orm.relationship( # type: ignore[attr-defined] + 'DbNode', backref=sa_orm.backref('dblogs', passive_deletes='all', cascade='merge') +) +DbNode.dbcomputer = sa_orm.relationship( # type: ignore[attr-defined] + 'DbComputer', backref=sa_orm.backref('dbnodes', passive_deletes='all', cascade='merge') +) +DbNode.user = sa_orm.relationship('DbUser', backref=sa_orm.backref( # type: ignore[attr-defined] + 'dbnodes', + passive_deletes='all', + cascade='merge', +)) @functools.lru_cache(maxsize=10) diff --git a/aiida/storage/sqlite_zip/utils.py b/aiida/storage/sqlite_zip/utils.py index e6e835cb4b..cd2838314e 100644 --- a/aiida/storage/sqlite_zip/utils.py +++ b/aiida/storage/sqlite_zip/utils.py @@ -61,21 +61,21 @@ def extract_metadata(path: Union[str, Path], *, search_limit: Optional[int] = 10 if not path.exists(): raise UnreachableStorage(f'path not found: {path}') - if zipfile.is_zipfile(path): + if path.is_dir(): + if not path.joinpath(META_FILENAME).is_file(): + raise CorruptStorage('Could not find metadata file') try: - metadata = json.loads(read_file_in_zip(path, META_FILENAME, search_limit=search_limit)) + metadata = json.loads(path.joinpath(META_FILENAME).read_text(encoding='utf8')) except Exception as exc: raise CorruptStorage(f'Could not read metadata: {exc}') from exc - elif tarfile.is_tarfile(path): + elif path.is_file() and zipfile.is_zipfile(path): try: - metadata = json.loads(read_file_in_tar(path, META_FILENAME)) + metadata = json.loads(read_file_in_zip(path, META_FILENAME, search_limit=search_limit)) except Exception as exc: raise CorruptStorage(f'Could not read metadata: {exc}') from exc - elif path.is_dir(): - if not path.joinpath(META_FILENAME).is_file(): - raise CorruptStorage('Could not find metadata file') + elif path.is_file() and tarfile.is_tarfile(path): try: - metadata = json.loads(path.joinpath(META_FILENAME)) + metadata = json.loads(read_file_in_tar(path, META_FILENAME)) except Exception as exc: raise CorruptStorage(f'Could not read metadata: {exc}') from exc else: diff --git a/tests/tools/archive/orm/test_links.py b/tests/tools/archive/orm/test_links.py index f8ed4a78e5..d29dbaac42 100644 --- a/tests/tools/archive/orm/test_links.py +++ b/tests/tools/archive/orm/test_links.py @@ -532,10 +532,10 @@ def test_link_flags(tmp_path, aiida_profile_clean, aiida_localhost_factory): ) ) - link_flags_import_helper(input_links_forward, aiida_profile_clean.reset_db) - link_flags_import_helper(create_return_links_backward, aiida_profile_clean.reset_db) - link_flags_import_helper(call_links_backward_calc1, aiida_profile_clean.reset_db) - link_flags_import_helper(call_links_backward_work2, aiida_profile_clean.reset_db) + link_flags_import_helper(input_links_forward, aiida_profile_clean.clear_profile) + link_flags_import_helper(create_return_links_backward, aiida_profile_clean.clear_profile) + link_flags_import_helper(call_links_backward_calc1, aiida_profile_clean.clear_profile) + link_flags_import_helper(call_links_backward_work2, aiida_profile_clean.clear_profile) def test_double_return_links_for_workflows(tmp_path, aiida_profile_clean): diff --git a/tests/tools/archive/test_backend.py b/tests/tools/archive/test_backend.py index 08eb48317e..62a01f83e7 100644 --- a/tests/tools/archive/test_backend.py +++ b/tests/tools/archive/test_backend.py @@ -19,13 +19,13 @@ @pytest.fixture() -def archive(tmp_path): +def archive(): """Yield the archive open in read mode.""" - filepath_archive = get_archive_file('export_main_0001_simple.aiida', filepath='export/migrate') archive_format = ArchiveFormatSqlZip() - new_archive = tmp_path / 'out.aiida' - archive_format.migrate(filepath_archive, new_archive, archive_format.latest_version) - with archive_format.open(new_archive, 'r') as reader: + filepath_archive = get_archive_file( + f'export_{archive_format.latest_version}_simple.aiida', filepath='export/migrate' + ) + with archive_format.open(filepath_archive, 'r') as reader: yield reader