From 0d1ae622e54910d7dd1fac4d7ea20f5c0b44a921 Mon Sep 17 00:00:00 2001 From: Alex Wolf Date: Fri, 24 Mar 2023 22:21:14 +0100 Subject: [PATCH 01/11] :truck: Rename DObject to File --- docs/guide/1-data-validation.ipynb | 32 ++++----- lnschema_core/__init__.py | 4 +- lnschema_core/_core.py | 102 ++++++++++++++--------------- lnschema_core/_link.py | 24 +++---- lnschema_core/dev/_id.py | 2 +- lnschema_core/dev/_storage.py | 6 +- lnschema_core/dev/id.py | 4 +- lnschema_core/link.py | 6 +- tests/test_base.py | 6 +- 9 files changed, 93 insertions(+), 93 deletions(-) diff --git a/docs/guide/1-data-validation.ipynb b/docs/guide/1-data-validation.ipynb index 61ba5641..56157d17 100644 --- a/docs/guide/1-data-validation.ipynb +++ b/docs/guide/1-data-validation.ipynb @@ -314,7 +314,7 @@ "source": [ "import nbproject as nb\n", "import pytest\n", - "from lnschema_core import DObject, Transform, Run\n", + "from lnschema_core import File, Transform, Run\n", "from lnschema_core.dev.sqlmodel import SQLModelPrefix\n", "from sqlmodel import Relationship, Field, ForeignKeyConstraint\n", "from typing import Optional\n", @@ -381,7 +381,7 @@ "source": [ "# relationship validation works correctly (strict type checking, valid model)\n", "with pytest.raises(TypeError):\n", - " dobject = DObject(source=Transform(name=\"My test pipeline\"))" + " file = File(source=Transform(name=\"My test pipeline\"))" ] }, { @@ -391,14 +391,14 @@ "outputs": [], "source": [ "# relationship validation works correctly (strict type checking, resolved ForwardRef)\n", - "class DObjectRel(SQLModelPrefix, table=True):\n", + "class FileRel(SQLModelPrefix, table=True):\n", " id: str = Field(primary_key=True, default=None)\n", - " dobject_id: Optional[str] = Field(foreign_key=\"core.dobject.id\", index=True)\n", - " dobject: Optional[\"DObject\"] = Relationship()\n", + " file_id: Optional[str] = Field(foreign_key=\"core.file.id\", index=True)\n", + " file: Optional[\"File\"] = Relationship()\n", "\n", "\n", - "dobject = DObject(name=\"mydobject\", source_id=\"raf\", storage_id=\"g4s\")\n", - "dobject_real = DObjectRel(dobject=dobject)" + "file = File(name=\"myfile\", source_id=\"raf\", storage_id=\"g4s\")\n", + "file_real = FileRel(file=file)" ] }, { @@ -408,14 +408,14 @@ "outputs": [], "source": [ "# auto-population of required fk constraints works correctly\n", - "class DObjectRelFK(SQLModelPrefix, table=True):\n", + "class FileRelFK(SQLModelPrefix, table=True):\n", " id: str = Field(primary_key=True, default=None)\n", - " dobject_id: str = Field(foreign_key=\"core.dobject.id\", index=True)\n", - " dobject: DObject = Relationship()\n", + " file_id: str = Field(foreign_key=\"core.file.id\", index=True)\n", + " file: File = Relationship()\n", "\n", "\n", - "dobject = DObject(name=\"mydobject\", source_id=\"raf\", storage_id=\"g4s\")\n", - "dobject_rel = DObjectRelFK(dobject=dobject)\n", + "file = File(name=\"myfile\", source_id=\"raf\", storage_id=\"g4s\")\n", + "file_rel = FileRelFK(file=file)\n", "\n", "\n", "# auto-population of required fk constraints works correctly (composite fk)\n", @@ -437,9 +437,9 @@ "metadata": {}, "outputs": [], "source": [ - "# dobject validation works correctly (missing field errors are raised even when dobject is instantiated with custom constructor)\n", + "# file validation works correctly (missing field errors are raised even when file is instantiated with custom constructor)\n", "with pytest.raises(ValidationError):\n", - " dobject = DObject(name=\"no data objects\")" + " file = File(name=\"no data objects\")" ] }, { @@ -448,8 +448,8 @@ "metadata": {}, "outputs": [], "source": [ - "# dobject validation works correctly (no errors raised when dobject is conformantly instantiated with custom constructor)\n", - "dobject = DObject(name=\"mydobject\", source_id=nb.meta.store.id, storage_id=\"123\")" + "# file validation works correctly (no errors raised when file is conformantly instantiated with custom constructor)\n", + "file = File(name=\"myfile\", source_id=nb.meta.store.id, storage_id=\"123\")" ] } ], diff --git a/lnschema_core/__init__.py b/lnschema_core/__init__.py index 5ec3e5e9..47b69084 100644 --- a/lnschema_core/__init__.py +++ b/lnschema_core/__init__.py @@ -14,7 +14,7 @@ .. autosummary:: :toctree: . - DObject + File Run Transform DFolder @@ -37,4 +37,4 @@ __version__ = "0.30rc2" from . import dev, link -from ._core import DFolder, DObject, Features, Project, Run, Storage, Transform, User +from ._core import DFolder, Features, File, Project, Run, Storage, Transform, User diff --git a/lnschema_core/_core.py b/lnschema_core/_core.py index aeec716a..f25568c8 100644 --- a/lnschema_core/_core.py +++ b/lnschema_core/_core.py @@ -12,11 +12,11 @@ from sqlmodel import Field, ForeignKeyConstraint, Relationship from . import _name as schema_name -from ._link import DFolderDObject, DObjectFeatures, ProjectDFolder, RunIn # noqa +from ._link import DFolderFile, FileFeatures, ProjectDFolder, RunIn # noqa from ._timestamps import CreatedAt, UpdatedAt from ._users import CreatedBy from .dev import id as idg -from .dev._storage import filepath_from_dfolder, filepath_from_dobject +from .dev._storage import filepath_from_dfolder, filepath_from_file from .dev.sqlmodel import schema_sqlmodel from .dev.type import TransformType @@ -45,7 +45,7 @@ class User(SQLModel, table=True): # type: ignore class Storage(SQLModel, table=True): # type: ignore """Storage locations. - A dobject or run-associated file can be stored in any desired S3, + A file or run-associated file can be stored in any desired S3, GCP, Azure or local storage location. This table tracks these locations along with metadata. """ @@ -65,16 +65,16 @@ class Storage(SQLModel, table=True): # type: ignore class DFolder(SQLModel, table=True): # type: ignore """Data folders, collections of data objects. - In LaminDB, a data folder is a collection of data objects (`DObject`). + In LaminDB, a data folder is a collection of data objects (`File`). """ id: str = Field(default_factory=idg.dfolder, primary_key=True) name: str = Field(index=True) - dobjects: List["DObject"] = Relationship( # type: ignore # noqa + files: List["File"] = Relationship( # type: ignore # noqa back_populates="dfolders", - sa_relationship_kwargs=dict(secondary=DFolderDObject.__table__), + sa_relationship_kwargs=dict(secondary=DFolderFile.__table__), ) - """Collection of :class:`~lamindb.DObject`.""" + """Collection of :class:`~lamindb.File`.""" created_by: str = CreatedBy """Auto-populated link to :class:`~lamindb.schema.User`.""" created_at: datetime = CreatedAt @@ -107,10 +107,10 @@ def tree( ) def get(self, relpath: Union[str, Path, List[Union[str, Path]]], **fields): - """Get dobjects via relative path to dfolder.""" - from lamindb._folder import get_dobject + """Get files via relative path to dfolder.""" + from lamindb._folder import get_file - return get_dobject(dfolder=self, relpath=relpath, **fields) + return get_file(dfolder=self, relpath=relpath, **fields) @overload def __init__( @@ -127,7 +127,7 @@ def __init__( self, id: Optional[str] = None, name: Optional[str] = None, - dobjects: List["DObject"] = [], + files: List["File"] = [], ): """Initialize from fields.""" ... @@ -139,7 +139,7 @@ def __init__( # type: ignore # continue with fields id: Optional[str] = None, name: Optional[str] = None, - x: List["DObject"] = [], + x: List["File"] = [], ): if folder is not None: from lamindb._folder import get_dfolder_kwargs_from_data @@ -175,17 +175,17 @@ class Project(SQLModel, table=True): # type: ignore """Time of last update.""" -class DObject(SQLModel, table=True): # type: ignore +class File(SQLModel, table=True): # type: ignore """See lamindb.schema for docstring.""" - id: str = Field(default_factory=idg.dobject, primary_key=True) - """Base62 char ID, generated by :func:`~lamindb.schema.dev.id.dobject`.""" + id: str = Field(default_factory=idg.file, primary_key=True) + """Base62 char ID, generated by :func:`~lamindb.schema.dev.id.file`.""" name: Optional[str] = Field(index=True) """Semantic name or title. Defaults to `None`.""" suffix: Optional[str] = Field(default=None, index=True) """Suffix to construct the storage key. Defaults to `None`. - This is a file extension if the `dobject` is stored in a file format. + This is a file extension if the `file` is stored in a file format. It's `None` if the storage format doesn't have a canonical extension. """ @@ -200,26 +200,26 @@ class DObject(SQLModel, table=True): # type: ignore # We need the fully module-qualified path below, as there might be more # schema modules with an ORM called "Run" source: "lnschema_core._core.Run" = Relationship(back_populates="outputs") # type: ignore # noqa - """Link to :class:`~lamindb.Run` that generated the `dobject`.""" + """Link to :class:`~lamindb.Run` that generated the `file`.""" source_id: str = Field(foreign_key="core.run.id", index=True) """The source run id.""" storage_id: str = Field(foreign_key="core.storage.id", index=True) - """The id of :class:`~lamindb.schema.Storage` location that stores the `dobject`.""" + """The id of :class:`~lamindb.schema.Storage` location that stores the `file`.""" features: List["Features"] = Relationship( - back_populates="dobjects", - sa_relationship_kwargs=dict(secondary=DObjectFeatures.__table__), + back_populates="files", + sa_relationship_kwargs=dict(secondary=FileFeatures.__table__), ) """Link to feature sets :class:`~lamindb.Features`""" dfolders: List[DFolder] = Relationship( - back_populates="dobjects", - sa_relationship_kwargs=dict(secondary=DFolderDObject.__table__), + back_populates="files", + sa_relationship_kwargs=dict(secondary=DFolderFile.__table__), ) - """Collection of :class:`~lamindb.DFolder` that contain this dobject.""" + """Collection of :class:`~lamindb.DFolder` that contain this file.""" targets: List["lnschema_core._core.Run"] = Relationship( # type: ignore # noqa back_populates="inputs", sa_relationship_kwargs=dict(secondary=RunIn.__table__), ) - "Runs that use this dobject as input." + "Runs that use this file as input." created_at: datetime = CreatedAt """Time of creation.""" updated_at: Optional[datetime] = UpdatedAt @@ -232,7 +232,7 @@ class DObject(SQLModel, table=True): # type: ignore def path(self) -> Union[Path, CloudPath]: """Path on storage.""" - return filepath_from_dobject(self) + return filepath_from_file(self) def load(self, stream: bool = False, is_run_input: bool = False): """Load data object. @@ -245,7 +245,7 @@ def load(self, stream: bool = False, is_run_input: bool = False): """ from lamindb._load import load as lnload - return lnload(dobject=self, stream=stream, is_run_input=is_run_input) + return lnload(file=self, stream=stream, is_run_input=is_run_input) @overload def __init__( @@ -298,9 +298,9 @@ def __init__( # type: ignore features_ref: Optional[Any] = None, ): if data is not None: - from lamindb._record import get_dobject_kwargs_from_data + from lamindb._record import get_file_kwargs_from_data - kwargs, privates = get_dobject_kwargs_from_data( + kwargs, privates = get_file_kwargs_from_data( data=data, name=name, source=source, @@ -323,8 +323,8 @@ def __init__( # type: ignore self.features += features -DObject._objectkey = sa.Column("_objectkey", sqlmodel.sql.sqltypes.AutoString(), index=True) -DObject.__table__.append_constraint(sa.UniqueConstraint("storage_id", "_objectkey", "suffix", name="uq_storage__objectkey_suffix")) +File._objectkey = sa.Column("_objectkey", sqlmodel.sql.sqltypes.AutoString(), index=True) +File.__table__.append_constraint(sa.UniqueConstraint("storage_id", "_objectkey", "suffix", name="uq_storage__objectkey_suffix")) class Transform(SQLModel, table=True): # type: ignore @@ -376,27 +376,27 @@ class Transform(SQLModel, table=True): # type: ignore class Run(SQLModel, table=True): # type: ignore """Runs of data transforms. - A `run` is any transform of a `dobject`. + A `run` is any transform of a `file`. Args: id: Optional[str] = None name: Optional[str] = None load_latest: bool = False - Load latest run for given notebook or pipeline. transform: Optional[Transform] = None - inputs: List[DObject] = None - outputs: List[DObject] = None + inputs: List[File] = None + outputs: List[File] = None It typically has inputs and outputs: - - References to outputs are stored in the `dobject` table in the + - References to outputs are stored in the `file` table in the `source_id` column as a foreign key the `run` - table. This is possible as every given `dobject` has a unique data source: - the `run` that produced the `dobject`. However, note that a given - `run` may output several `dobjects`. + table. This is possible as every given `file` has a unique data source: + the `run` that produced the `file`. However, note that a given + `run` may output several `files`. - References to inputs are stored in the `run_in` table, a - many-to-many link table between the `dobject` and `run` tables. Any - `dobject` might serve as an input for many `runs`. Similarly, any - `run` might have many `dobjects` as inputs. + many-to-many link table between the `file` and `run` tables. Any + `file` might serve as an input for many `runs`. Similarly, any + `run` might have many `files` as inputs. """ __table_args__ = ( @@ -414,10 +414,10 @@ class Run(SQLModel, table=True): # type: ignore transform_v: Optional[str] = Field(default=None, index=True) transform: Transform = Relationship() """Link to :class:`~lamindb.schema.Notebook`.""" - outputs: List["DObject"] = Relationship(back_populates="source") - """Output data :class:`~lamindb.DObject`.""" - inputs: List["DObject"] = Relationship(back_populates="targets", sa_relationship_kwargs=dict(secondary=RunIn.__table__)) - """Input data :class:`~lamindb.DObject`.""" + outputs: List["File"] = Relationship(back_populates="source") + """Output data :class:`~lamindb.File`.""" + inputs: List["File"] = Relationship(back_populates="targets", sa_relationship_kwargs=dict(secondary=RunIn.__table__)) + """Input data :class:`~lamindb.File`.""" created_by: str = CreatedBy """Auto-populated link to :class:`~lamindb.schema.User`.""" created_at: datetime = CreatedAt @@ -434,8 +434,8 @@ def __init__( # type: ignore load_latest: bool = False, external_id: Optional[str] = None, transform: Optional[Transform] = None, - inputs: List[DObject] = None, - outputs: List[DObject] = None, + inputs: List[File] = None, + outputs: List[File] = None, ): kwargs = {k: v for k, v in locals().items() if v and k != "self"} @@ -485,9 +485,9 @@ class Features(SQLModel, table=True): # type: ignore type: str # was called entity_type created_by: str = CreatedBy created_at: datetime = CreatedAt - dobjects: List["DObject"] = Relationship( + files: List["File"] = Relationship( back_populates="features", - sa_relationship_kwargs=dict(secondary=DObjectFeatures.__table__), + sa_relationship_kwargs=dict(secondary=FileFeatures.__table__), ) @overload @@ -504,7 +504,7 @@ def __init__( self, id: Optional[str] = None, type: Optional[str] = None, - dobjects: List["DObject"] = [], + files: List["File"] = [], ): """Initialize from fields.""" ... @@ -517,7 +517,7 @@ def __init__( # type: ignore id: str = None, type: Any = None, # continue with fields - dobjects: List["DObject"] = [], + files: List["File"] = [], ): kwargs = {k: v for k, v in locals().items() if v and k != "self"} super().__init__(**kwargs) @@ -530,7 +530,7 @@ def __new__( id: str = None, type: Any = None, # continue with fields - dobjects: List["DObject"] = [], + files: List["File"] = [], ): if data is not None: from lamindb._record import get_features_from_data diff --git a/lnschema_core/_link.py b/lnschema_core/_link.py index a340eab1..e720d9d9 100644 --- a/lnschema_core/_link.py +++ b/lnschema_core/_link.py @@ -10,27 +10,27 @@ class RunIn(SQLModel, table=True): # type: ignore """Inputs of runs. - This is a many-to-many link table for `run` and `dobject` storing the + This is a many-to-many link table for `run` and `file` storing the inputs of data transformations. A data transformation can have an arbitrary number of data objects as inputs. - - The same `dobject` can be used as input in many different `runs`. - - One `run` can have several `dobjects` as inputs. + - The same `file` can be used as input in many different `runs`. + - One `run` can have several `files` as inputs. """ __tablename__ = f"{prefix}run_in" run_id: str = Field(foreign_key="core.run.id", primary_key=True) - dobject_id: str = Field(foreign_key="core.dobject.id", primary_key=True) + file_id: str = Field(foreign_key="core.file.id", primary_key=True) -class DObjectFeatures(SQLModel, table=True): # type: ignore - """Links `DObject` and `Features`.""" +class FileFeatures(SQLModel, table=True): # type: ignore + """Links `File` and `Features`.""" - __tablename__ = f"{prefix}dobject_features" + __tablename__ = f"{prefix}file_features" - dobject_id: str = Field(foreign_key="core.dobject.id", primary_key=True) + file_id: str = Field(foreign_key="core.file.id", primary_key=True) features_id: str = Field(foreign_key="core.features.id", primary_key=True) @@ -43,10 +43,10 @@ class ProjectDFolder(SQLModel, table=True): # type: ignore dfolder_id: str = Field(foreign_key="core.dfolder.id", primary_key=True) -class DFolderDObject(SQLModel, table=True): # type: ignore - """Link table of dfolder and dobject.""" +class DFolderFile(SQLModel, table=True): # type: ignore + """Link table of dfolder and file.""" - __tablename__ = f"{prefix}dfolder_dobject" + __tablename__ = f"{prefix}dfolder_file" dfolder_id: str = Field(foreign_key="core.dfolder.id", primary_key=True) - dobject_id: str = Field(foreign_key="core.dobject.id", primary_key=True) + file_id: str = Field(foreign_key="core.file.id", primary_key=True) diff --git a/lnschema_core/dev/_id.py b/lnschema_core/dev/_id.py index 35cfeea2..15e69338 100644 --- a/lnschema_core/dev/_id.py +++ b/lnschema_core/dev/_id.py @@ -33,7 +33,7 @@ def schema(): return base26(4) -def dobject() -> str: +def file() -> str: """Data object: 20 base62. 20 characters (62**20=7e+35 possibilities) roughly matches UUID (2*122=5e+36). diff --git a/lnschema_core/dev/_storage.py b/lnschema_core/dev/_storage.py index 6f913197..b364a38f 100644 --- a/lnschema_core/dev/_storage.py +++ b/lnschema_core/dev/_storage.py @@ -11,7 +11,7 @@ def get_name_suffix_from_filepath(filepath: Union[Path, CloudPath]): # add type annotations back asap when re-organizing the module -def storage_key_from_dobject(dobj): +def storage_key_from_file(dobj): if dobj._objectkey is None: return f"{dobj.id}{dobj.suffix}" else: @@ -19,10 +19,10 @@ def storage_key_from_dobject(dobj): # add type annotations back asap when re-organizing the module -def filepath_from_dobject(dobj): +def filepath_from_file(dobj): from lndb import settings - storage_key = storage_key_from_dobject(dobj) + storage_key = storage_key_from_file(dobj) filepath = settings.instance.storage.key_to_filepath(storage_key) return filepath diff --git a/lnschema_core/dev/id.py b/lnschema_core/dev/id.py index ceb1cee8..53043fa0 100644 --- a/lnschema_core/dev/id.py +++ b/lnschema_core/dev/id.py @@ -20,7 +20,7 @@ user instance storage - dobject + file dfolder run usage @@ -34,7 +34,7 @@ base62, base64, dfolder, - dobject, + file, instance, notebook, pipeline, diff --git a/lnschema_core/link.py b/lnschema_core/link.py index 3d7a6df5..ab38b25c 100644 --- a/lnschema_core/link.py +++ b/lnschema_core/link.py @@ -4,9 +4,9 @@ :toctree: . RunIn - DObjectFeatures + FileFeatures ProjectDFolder - DFolderDObject + DFolderFile """ -from ._link import DFolderDObject, DObjectFeatures, ProjectDFolder, RunIn # noqa +from ._link import DFolderFile, FileFeatures, ProjectDFolder, RunIn # noqa diff --git a/tests/test_base.py b/tests/test_base.py index a8da41bb..3adf585f 100644 --- a/tests/test_base.py +++ b/tests/test_base.py @@ -1,9 +1,9 @@ -from lnschema_core import DObject +from lnschema_core import File from lnschema_core.dev import id def test_id(): - assert len(id.dobject()) == 20 + assert len(id.file()) == 20 assert len(id.user()) == 8 assert len(id.secret()) == 40 assert len(id.usage()) == 24 @@ -11,4 +11,4 @@ def test_id(): def test_objectkey(): - assert "_objectkey" in DObject.__table__.columns.keys() + assert "_objectkey" in File.__table__.columns.keys() From 7f165e6d59714535cd9b6194b31bc3ba94d9a75f Mon Sep 17 00:00:00 2001 From: Alex Wolf Date: Fri, 24 Mar 2023 22:24:20 +0100 Subject: [PATCH 02/11] :truck: Rename DFolder to Folder --- lnschema_core/__init__.py | 4 ++-- lnschema_core/_core.py | 30 +++++++++++++++--------------- lnschema_core/_link.py | 16 ++++++++-------- lnschema_core/dev/_id.py | 2 +- lnschema_core/dev/_storage.py | 4 ++-- lnschema_core/dev/id.py | 4 ++-- lnschema_core/link.py | 6 +++--- 7 files changed, 33 insertions(+), 33 deletions(-) diff --git a/lnschema_core/__init__.py b/lnschema_core/__init__.py index 47b69084..b106b86e 100644 --- a/lnschema_core/__init__.py +++ b/lnschema_core/__init__.py @@ -17,7 +17,7 @@ File Run Transform - DFolder + Folder User Storage Project @@ -37,4 +37,4 @@ __version__ = "0.30rc2" from . import dev, link -from ._core import DFolder, Features, File, Project, Run, Storage, Transform, User +from ._core import Features, File, Folder, Project, Run, Storage, Transform, User diff --git a/lnschema_core/_core.py b/lnschema_core/_core.py index f25568c8..e4c9cc1a 100644 --- a/lnschema_core/_core.py +++ b/lnschema_core/_core.py @@ -12,11 +12,11 @@ from sqlmodel import Field, ForeignKeyConstraint, Relationship from . import _name as schema_name -from ._link import DFolderFile, FileFeatures, ProjectDFolder, RunIn # noqa +from ._link import FileFeatures, FolderFile, ProjectFolder, RunIn # noqa from ._timestamps import CreatedAt, UpdatedAt from ._users import CreatedBy from .dev import id as idg -from .dev._storage import filepath_from_dfolder, filepath_from_file +from .dev._storage import filepath_from_file, filepath_from_folder from .dev.sqlmodel import schema_sqlmodel from .dev.type import TransformType @@ -62,17 +62,17 @@ class Storage(SQLModel, table=True): # type: ignore updated_at: Optional[datetime] = UpdatedAt -class DFolder(SQLModel, table=True): # type: ignore +class Folder(SQLModel, table=True): # type: ignore """Data folders, collections of data objects. In LaminDB, a data folder is a collection of data objects (`File`). """ - id: str = Field(default_factory=idg.dfolder, primary_key=True) + id: str = Field(default_factory=idg.folder, primary_key=True) name: str = Field(index=True) files: List["File"] = Relationship( # type: ignore # noqa - back_populates="dfolders", - sa_relationship_kwargs=dict(secondary=DFolderFile.__table__), + back_populates="folders", + sa_relationship_kwargs=dict(secondary=FolderFile.__table__), ) """Collection of :class:`~lamindb.File`.""" created_by: str = CreatedBy @@ -88,7 +88,7 @@ class DFolder(SQLModel, table=True): # type: ignore def path(self) -> Union[Path, CloudPath]: """Path on storage.""" - return filepath_from_dfolder(self) + return filepath_from_folder(self) def tree( self, @@ -107,10 +107,10 @@ def tree( ) def get(self, relpath: Union[str, Path, List[Union[str, Path]]], **fields): - """Get files via relative path to dfolder.""" + """Get files via relative path to folder.""" from lamindb._folder import get_file - return get_file(dfolder=self, relpath=relpath, **fields) + return get_file(folder=self, relpath=relpath, **fields) @overload def __init__( @@ -142,9 +142,9 @@ def __init__( # type: ignore x: List["File"] = [], ): if folder is not None: - from lamindb._folder import get_dfolder_kwargs_from_data + from lamindb._folder import get_folder_kwargs_from_data - kwargs, privates = get_dfolder_kwargs_from_data( + kwargs, privates = get_folder_kwargs_from_data( folder=folder, name=name, ) @@ -159,7 +159,7 @@ def __init__( # type: ignore self._cloud_filepath = privates["_cloud_filepath"] -DFolder._objectkey = sa.Column("_objectkey", sqlmodel.sql.sqltypes.AutoString(), index=True) +Folder._objectkey = sa.Column("_objectkey", sqlmodel.sql.sqltypes.AutoString(), index=True) class Project(SQLModel, table=True): # type: ignore @@ -210,11 +210,11 @@ class File(SQLModel, table=True): # type: ignore sa_relationship_kwargs=dict(secondary=FileFeatures.__table__), ) """Link to feature sets :class:`~lamindb.Features`""" - dfolders: List[DFolder] = Relationship( + folders: List[Folder] = Relationship( back_populates="files", - sa_relationship_kwargs=dict(secondary=DFolderFile.__table__), + sa_relationship_kwargs=dict(secondary=FolderFile.__table__), ) - """Collection of :class:`~lamindb.DFolder` that contain this file.""" + """Collection of :class:`~lamindb.Folder` that contain this file.""" targets: List["lnschema_core._core.Run"] = Relationship( # type: ignore # noqa back_populates="inputs", sa_relationship_kwargs=dict(secondary=RunIn.__table__), diff --git a/lnschema_core/_link.py b/lnschema_core/_link.py index e720d9d9..ee697dd3 100644 --- a/lnschema_core/_link.py +++ b/lnschema_core/_link.py @@ -34,19 +34,19 @@ class FileFeatures(SQLModel, table=True): # type: ignore features_id: str = Field(foreign_key="core.features.id", primary_key=True) -class ProjectDFolder(SQLModel, table=True): # type: ignore - """Link table of project and dfolder.""" +class ProjectFolder(SQLModel, table=True): # type: ignore + """Link table of project and folder.""" - __tablename__ = f"{prefix}project_dfolder" + __tablename__ = f"{prefix}project_folder" project_id: str = Field(foreign_key="core.project.id", primary_key=True) - dfolder_id: str = Field(foreign_key="core.dfolder.id", primary_key=True) + folder_id: str = Field(foreign_key="core.folder.id", primary_key=True) -class DFolderFile(SQLModel, table=True): # type: ignore - """Link table of dfolder and file.""" +class FolderFile(SQLModel, table=True): # type: ignore + """Link table of folder and file.""" - __tablename__ = f"{prefix}dfolder_file" + __tablename__ = f"{prefix}folder_file" - dfolder_id: str = Field(foreign_key="core.dfolder.id", primary_key=True) + folder_id: str = Field(foreign_key="core.folder.id", primary_key=True) file_id: str = Field(foreign_key="core.file.id", primary_key=True) diff --git a/lnschema_core/dev/_id.py b/lnschema_core/dev/_id.py index 15e69338..81d3d31c 100644 --- a/lnschema_core/dev/_id.py +++ b/lnschema_core/dev/_id.py @@ -43,7 +43,7 @@ def file() -> str: return base62(n_char=20) -def dfolder() -> str: +def folder() -> str: """Data folder: 20 base62.""" return base62(n_char=20) diff --git a/lnschema_core/dev/_storage.py b/lnschema_core/dev/_storage.py index b364a38f..d498bf3f 100644 --- a/lnschema_core/dev/_storage.py +++ b/lnschema_core/dev/_storage.py @@ -28,7 +28,7 @@ def filepath_from_file(dobj): # add type annotations back asap when re-organizing the module -def filepath_from_dfolder(dfolder): +def filepath_from_folder(folder): from lndb import settings - return settings.instance.storage.key_to_filepath(dfolder._objectkey) + return settings.instance.storage.key_to_filepath(folder._objectkey) diff --git a/lnschema_core/dev/id.py b/lnschema_core/dev/id.py index 53043fa0..da1cb662 100644 --- a/lnschema_core/dev/id.py +++ b/lnschema_core/dev/id.py @@ -21,7 +21,7 @@ instance storage file - dfolder + folder run usage notebook @@ -33,8 +33,8 @@ base26, base62, base64, - dfolder, file, + folder, instance, notebook, pipeline, diff --git a/lnschema_core/link.py b/lnschema_core/link.py index ab38b25c..2744bb42 100644 --- a/lnschema_core/link.py +++ b/lnschema_core/link.py @@ -5,8 +5,8 @@ RunIn FileFeatures - ProjectDFolder - DFolderFile + ProjectFolder + FolderFile """ -from ._link import DFolderFile, FileFeatures, ProjectDFolder, RunIn # noqa +from ._link import FileFeatures, FolderFile, ProjectFolder, RunIn # noqa From f03740ca89faa14876f770729fc2ee4c6b2cd074 Mon Sep 17 00:00:00 2001 From: Alex Wolf Date: Fri, 24 Mar 2023 22:32:58 +0100 Subject: [PATCH 03/11] :green_heart: Add migration script --- .../2023-03-24-5846a15d9241-0_30rc3.py | 20 +++++++++++++++++++ 1 file changed, 20 insertions(+) create mode 100644 lnschema_core/migrations/versions/2023-03-24-5846a15d9241-0_30rc3.py diff --git a/lnschema_core/migrations/versions/2023-03-24-5846a15d9241-0_30rc3.py b/lnschema_core/migrations/versions/2023-03-24-5846a15d9241-0_30rc3.py new file mode 100644 index 00000000..a59a5f07 --- /dev/null +++ b/lnschema_core/migrations/versions/2023-03-24-5846a15d9241-0_30rc3.py @@ -0,0 +1,20 @@ +"""v0.30rc3.""" +import sqlalchemy as sa # noqa +import sqlmodel as sqm # noqa +from alembic import op + +from lnschema_core.dev.sqlmodel import get_sqlite_prefix_schema_delim_from_alembic + +revision = "5846a15d9241" +down_revision = "ebafd37fd6e1" + + +def upgrade() -> None: + sqlite, prefix, schema, delim = get_sqlite_prefix_schema_delim_from_alembic() + + op.rename_table(old_table_name=f"{prefix}dobject", new_table_name=f"{prefix}file", schema=schema) + op.rename_table(old_table_name=f"{prefix}dfolder", new_table_name=f"{prefix}folder", schema=schema) + + +def downgrade() -> None: + pass From 55b0b358f4bb0619e84d2217bad5bec0684b3cfa Mon Sep 17 00:00:00 2001 From: Alex Wolf Date: Fri, 24 Mar 2023 22:47:19 +0100 Subject: [PATCH 04/11] :green_heart: Update migr --- .../2023-03-24-5846a15d9241-0_30rc3.py | 37 +++++++++++++++++++ 1 file changed, 37 insertions(+) diff --git a/lnschema_core/migrations/versions/2023-03-24-5846a15d9241-0_30rc3.py b/lnschema_core/migrations/versions/2023-03-24-5846a15d9241-0_30rc3.py index a59a5f07..8fc608ff 100644 --- a/lnschema_core/migrations/versions/2023-03-24-5846a15d9241-0_30rc3.py +++ b/lnschema_core/migrations/versions/2023-03-24-5846a15d9241-0_30rc3.py @@ -14,6 +14,43 @@ def upgrade() -> None: op.rename_table(old_table_name=f"{prefix}dobject", new_table_name=f"{prefix}file", schema=schema) op.rename_table(old_table_name=f"{prefix}dfolder", new_table_name=f"{prefix}folder", schema=schema) + op.rename_table(old_table_name=f"{prefix}project_dfolder", new_table_name=f"{prefix}project_folder", schema=schema) + op.rename_table(old_table_name=f"{prefix}dfolder_dobject", new_table_name=f"{prefix}folder_file", schema=schema) + op.rename_table(old_table_name=f"{prefix}dobject_features", new_table_name=f"{prefix}file_features", schema=schema) + + op.drop_index(f"ix_core{delim}dobject__objectkey", table_name=f"{prefix}file", schema=schema) + op.drop_index(f"ix_core{delim}dobject_created_at", table_name=f"{prefix}file", schema=schema) + op.drop_index(f"ix_core{delim}dobject_hash", table_name=f"{prefix}file", schema=schema) + op.drop_index(f"ix_core{delim}dobject_name", table_name=f"{prefix}file", schema=schema) + op.drop_index(f"ix_core{delim}dobject_size", table_name=f"{prefix}file", schema=schema) + op.drop_index(f"ix_core{delim}dobject_source_id", table_name=f"{prefix}file", schema=schema) + op.drop_index(f"ix_core{delim}dobject_storage_id", table_name=f"{prefix}file", schema=schema) + op.drop_index(f"ix_core{delim}dobject_suffix", table_name=f"{prefix}file", schema=schema) + op.drop_index(f"ix_core{delim}dobject_updated_at", table_name=f"{prefix}file", schema=schema) + op.create_index(op.f(f"ix_core{delim}file__objectkey"), "file", ["_objectkey"], unique=False, schema=schema) + op.create_index(op.f(f"ix_core{delim}file_created_at"), "file", ["created_at"], unique=False, schema=schema) + op.create_index(op.f(f"ix_core{delim}file_hash"), "file", ["hash"], unique=False, schema=schema) + op.create_index(op.f(f"ix_core{delim}file_name"), "file", ["name"], unique=False, schema=schema) + op.create_index(op.f(f"ix_core{delim}file_size"), "file", ["size"], unique=False, schema=schema) + op.create_index(op.f(f"ix_core{delim}file_source_id"), "file", ["source_id"], unique=False, schema=schema) + op.create_index(op.f(f"ix_core{delim}file_storage_id"), "file", ["storage_id"], unique=False, schema=schema) + op.create_index(op.f(f"ix_core{delim}file_suffix"), "file", ["suffix"], unique=False, schema=schema) + op.create_index(op.f(f"ix_core{delim}file_updated_at"), "file", ["updated_at"], unique=False, schema=schema) + op.drop_index(f"ix_core{delim}dfolder__objectkey", table_name=f"{prefix}folder", schema=schema) + op.drop_index(f"ix_core{delim}dfolder_created_at", table_name=f"{prefix}folder", schema=schema) + op.drop_index(f"ix_core{delim}dfolder_created_by", table_name=f"{prefix}folder", schema=schema) + op.drop_index(f"ix_core{delim}dfolder_name", table_name=f"{prefix}folder", schema=schema) + op.drop_index(f"ix_core{delim}dfolder_updated_at", table_name=f"{prefix}folder", schema=schema) + op.create_index(op.f(f"ix_core{delim}folder__objectkey"), "folder", ["_objectkey"], unique=False, schema=schema) + op.create_index(op.f(f"ix_core{delim}folder_created_at"), "folder", ["created_at"], unique=False, schema=schema) + op.create_index(op.f(f"ix_core{delim}folder_created_by"), "folder", ["created_by"], unique=False, schema=schema) + op.create_index(op.f(f"ix_core{delim}folder_name"), "folder", ["name"], unique=False, schema=schema) + op.create_index(op.f(f"ix_core{delim}folder_updated_at"), "folder", ["updated_at"], unique=False, schema=schema) + + with op.batch_alter_table(f"{prefix}run_in", schema=schema) as batch_op: + batch_op.alter_column(column_name="dobject_id", new_column_name="file_id") + op.drop_constraint("fk_run_in_dobject_id_dobject", type_="foreignkey") + op.create_foreign_key(op.f("fk_run_in_file_id_file"), f"{prefix}file", ["file_id"], ["id"], referent_schema=schema) def downgrade() -> None: From 49cb60cbdd900767d572359e94081b3d8b70dff7 Mon Sep 17 00:00:00 2001 From: Alex Wolf Date: Fri, 24 Mar 2023 22:52:59 +0100 Subject: [PATCH 05/11] :green_heart: Fix --- lnschema_core/__init__.py | 2 +- .../migrations/versions/2023-03-24-5846a15d9241-0_30rc3.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/lnschema_core/__init__.py b/lnschema_core/__init__.py index b106b86e..55cc9807 100644 --- a/lnschema_core/__init__.py +++ b/lnschema_core/__init__.py @@ -33,7 +33,7 @@ """ _schema_id = "yvzi" _name = "core" -_migration = "ebafd37fd6e1" +_migration = "5846a15d9241" __version__ = "0.30rc2" from . import dev, link diff --git a/lnschema_core/migrations/versions/2023-03-24-5846a15d9241-0_30rc3.py b/lnschema_core/migrations/versions/2023-03-24-5846a15d9241-0_30rc3.py index 8fc608ff..07989c3e 100644 --- a/lnschema_core/migrations/versions/2023-03-24-5846a15d9241-0_30rc3.py +++ b/lnschema_core/migrations/versions/2023-03-24-5846a15d9241-0_30rc3.py @@ -49,8 +49,8 @@ def upgrade() -> None: with op.batch_alter_table(f"{prefix}run_in", schema=schema) as batch_op: batch_op.alter_column(column_name="dobject_id", new_column_name="file_id") - op.drop_constraint("fk_run_in_dobject_id_dobject", type_="foreignkey") - op.create_foreign_key(op.f("fk_run_in_file_id_file"), f"{prefix}file", ["file_id"], ["id"], referent_schema=schema) + batch_op.drop_constraint("fk_run_in_dobject_id_dobject", type_="foreignkey") + batch_op.create_foreign_key(op.f("fk_run_in_file_id_file"), f"{prefix}file", ["file_id"], ["id"], referent_schema=schema) def downgrade() -> None: From f025705e4d1570bf419f7fbaa6489fc23aa4429a Mon Sep 17 00:00:00 2001 From: Alex Wolf Date: Fri, 24 Mar 2023 23:04:11 +0100 Subject: [PATCH 06/11] :green_heart: Fix --- .../2023-03-24-5846a15d9241-0_30rc3.py | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/lnschema_core/migrations/versions/2023-03-24-5846a15d9241-0_30rc3.py b/lnschema_core/migrations/versions/2023-03-24-5846a15d9241-0_30rc3.py index 07989c3e..f7807544 100644 --- a/lnschema_core/migrations/versions/2023-03-24-5846a15d9241-0_30rc3.py +++ b/lnschema_core/migrations/versions/2023-03-24-5846a15d9241-0_30rc3.py @@ -47,6 +47,24 @@ def upgrade() -> None: op.create_index(op.f(f"ix_core{delim}folder_name"), "folder", ["name"], unique=False, schema=schema) op.create_index(op.f(f"ix_core{delim}folder_updated_at"), "folder", ["updated_at"], unique=False, schema=schema) + with op.batch_alter_table(f"{prefix}folder_file", schema=schema) as batch_op: + batch_op.alter_column(column_name="dobject_id", new_column_name="file_id") + batch_op.alter_column(column_name="dfolder_id", new_column_name="folder_id") + batch_op.drop_constraint("fk_dfolder_dobject_dobject_id_dobject", type_="foreignkey") + batch_op.drop_constraint("fk_dfolder_dobject_dfolder_id_dfolder", type_="foreignkey") + batch_op.create_foreign_key(op.f("fk_folder_file_file_id_file"), f"{prefix}file", ["file_id"], ["id"], referent_schema=schema) + batch_op.create_foreign_key(op.f("fk_folder_file_folder_id_folder"), f"{prefix}folder", ["folder_id"], ["id"], referent_schema=schema) + + with op.batch_alter_table(f"{prefix}file_features", schema=schema) as batch_op: + batch_op.alter_column(column_name="dobject_id", new_column_name="file_id") + batch_op.drop_constraint("fk_dobject_features_dobject_id_dobject", type_="foreignkey") + batch_op.create_foreign_key(op.f("fk_file_features_file_id_file"), f"{prefix}file", ["file_id"], ["id"], referent_schema=schema) + + with op.batch_alter_table(f"{prefix}project_folder", schema=schema) as batch_op: + batch_op.alter_column(column_name="dfolder_id", new_column_name="folder_id") + batch_op.drop_constraint("fk_project_dfolder_dfolder_id_dfolder", type_="foreignkey") + batch_op.create_foreign_key(op.f("fk_project_folder_folder_id_folder"), f"{prefix}folder", ["folder_id"], ["id"], referent_schema=schema) + with op.batch_alter_table(f"{prefix}run_in", schema=schema) as batch_op: batch_op.alter_column(column_name="dobject_id", new_column_name="file_id") batch_op.drop_constraint("fk_run_in_dobject_id_dobject", type_="foreignkey") From 60e5bd95dc372d00f2e9e5867a4393f1ec9854e1 Mon Sep 17 00:00:00 2001 From: Alex Wolf Date: Fri, 24 Mar 2023 23:12:31 +0100 Subject: [PATCH 07/11] :green_heart: Fix --- .../2023-03-24-5846a15d9241-0_30rc3.py | 34 +++++++++++-------- 1 file changed, 20 insertions(+), 14 deletions(-) diff --git a/lnschema_core/migrations/versions/2023-03-24-5846a15d9241-0_30rc3.py b/lnschema_core/migrations/versions/2023-03-24-5846a15d9241-0_30rc3.py index f7807544..7c6ca167 100644 --- a/lnschema_core/migrations/versions/2023-03-24-5846a15d9241-0_30rc3.py +++ b/lnschema_core/migrations/versions/2023-03-24-5846a15d9241-0_30rc3.py @@ -18,15 +18,18 @@ def upgrade() -> None: op.rename_table(old_table_name=f"{prefix}dfolder_dobject", new_table_name=f"{prefix}folder_file", schema=schema) op.rename_table(old_table_name=f"{prefix}dobject_features", new_table_name=f"{prefix}file_features", schema=schema) - op.drop_index(f"ix_core{delim}dobject__objectkey", table_name=f"{prefix}file", schema=schema) - op.drop_index(f"ix_core{delim}dobject_created_at", table_name=f"{prefix}file", schema=schema) - op.drop_index(f"ix_core{delim}dobject_hash", table_name=f"{prefix}file", schema=schema) - op.drop_index(f"ix_core{delim}dobject_name", table_name=f"{prefix}file", schema=schema) - op.drop_index(f"ix_core{delim}dobject_size", table_name=f"{prefix}file", schema=schema) - op.drop_index(f"ix_core{delim}dobject_source_id", table_name=f"{prefix}file", schema=schema) - op.drop_index(f"ix_core{delim}dobject_storage_id", table_name=f"{prefix}file", schema=schema) - op.drop_index(f"ix_core{delim}dobject_suffix", table_name=f"{prefix}file", schema=schema) - op.drop_index(f"ix_core{delim}dobject_updated_at", table_name=f"{prefix}file", schema=schema) + try: + op.drop_index(f"ix_core{delim}dobject__objectkey", table_name=f"{prefix}file", schema=schema) + op.drop_index(f"ix_core{delim}dobject_created_at", table_name=f"{prefix}file", schema=schema) + op.drop_index(f"ix_core{delim}dobject_hash", table_name=f"{prefix}file", schema=schema) + op.drop_index(f"ix_core{delim}dobject_name", table_name=f"{prefix}file", schema=schema) + op.drop_index(f"ix_core{delim}dobject_size", table_name=f"{prefix}file", schema=schema) + op.drop_index(f"ix_core{delim}dobject_source_id", table_name=f"{prefix}file", schema=schema) + op.drop_index(f"ix_core{delim}dobject_storage_id", table_name=f"{prefix}file", schema=schema) + op.drop_index(f"ix_core{delim}dobject_suffix", table_name=f"{prefix}file", schema=schema) + op.drop_index(f"ix_core{delim}dobject_updated_at", table_name=f"{prefix}file", schema=schema) + except Exception: + pass op.create_index(op.f(f"ix_core{delim}file__objectkey"), "file", ["_objectkey"], unique=False, schema=schema) op.create_index(op.f(f"ix_core{delim}file_created_at"), "file", ["created_at"], unique=False, schema=schema) op.create_index(op.f(f"ix_core{delim}file_hash"), "file", ["hash"], unique=False, schema=schema) @@ -36,11 +39,14 @@ def upgrade() -> None: op.create_index(op.f(f"ix_core{delim}file_storage_id"), "file", ["storage_id"], unique=False, schema=schema) op.create_index(op.f(f"ix_core{delim}file_suffix"), "file", ["suffix"], unique=False, schema=schema) op.create_index(op.f(f"ix_core{delim}file_updated_at"), "file", ["updated_at"], unique=False, schema=schema) - op.drop_index(f"ix_core{delim}dfolder__objectkey", table_name=f"{prefix}folder", schema=schema) - op.drop_index(f"ix_core{delim}dfolder_created_at", table_name=f"{prefix}folder", schema=schema) - op.drop_index(f"ix_core{delim}dfolder_created_by", table_name=f"{prefix}folder", schema=schema) - op.drop_index(f"ix_core{delim}dfolder_name", table_name=f"{prefix}folder", schema=schema) - op.drop_index(f"ix_core{delim}dfolder_updated_at", table_name=f"{prefix}folder", schema=schema) + try: + op.drop_index(f"ix_core{delim}dfolder__objectkey", table_name=f"{prefix}folder", schema=schema) + op.drop_index(f"ix_core{delim}dfolder_created_at", table_name=f"{prefix}folder", schema=schema) + op.drop_index(f"ix_core{delim}dfolder_created_by", table_name=f"{prefix}folder", schema=schema) + op.drop_index(f"ix_core{delim}dfolder_name", table_name=f"{prefix}folder", schema=schema) + op.drop_index(f"ix_core{delim}dfolder_updated_at", table_name=f"{prefix}folder", schema=schema) + except Exception: + pass op.create_index(op.f(f"ix_core{delim}folder__objectkey"), "folder", ["_objectkey"], unique=False, schema=schema) op.create_index(op.f(f"ix_core{delim}folder_created_at"), "folder", ["created_at"], unique=False, schema=schema) op.create_index(op.f(f"ix_core{delim}folder_created_by"), "folder", ["created_by"], unique=False, schema=schema) From 42fb43ea52831f6499fccea4113b6e8c6d5cc7ba Mon Sep 17 00:00:00 2001 From: Alex Wolf Date: Fri, 24 Mar 2023 23:16:16 +0100 Subject: [PATCH 08/11] :green_heart: Fix --- .../2023-03-24-5846a15d9241-0_30rc3.py | 28 +++++++++---------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/lnschema_core/migrations/versions/2023-03-24-5846a15d9241-0_30rc3.py b/lnschema_core/migrations/versions/2023-03-24-5846a15d9241-0_30rc3.py index 7c6ca167..4076fe99 100644 --- a/lnschema_core/migrations/versions/2023-03-24-5846a15d9241-0_30rc3.py +++ b/lnschema_core/migrations/versions/2023-03-24-5846a15d9241-0_30rc3.py @@ -30,15 +30,15 @@ def upgrade() -> None: op.drop_index(f"ix_core{delim}dobject_updated_at", table_name=f"{prefix}file", schema=schema) except Exception: pass - op.create_index(op.f(f"ix_core{delim}file__objectkey"), "file", ["_objectkey"], unique=False, schema=schema) - op.create_index(op.f(f"ix_core{delim}file_created_at"), "file", ["created_at"], unique=False, schema=schema) - op.create_index(op.f(f"ix_core{delim}file_hash"), "file", ["hash"], unique=False, schema=schema) - op.create_index(op.f(f"ix_core{delim}file_name"), "file", ["name"], unique=False, schema=schema) - op.create_index(op.f(f"ix_core{delim}file_size"), "file", ["size"], unique=False, schema=schema) - op.create_index(op.f(f"ix_core{delim}file_source_id"), "file", ["source_id"], unique=False, schema=schema) - op.create_index(op.f(f"ix_core{delim}file_storage_id"), "file", ["storage_id"], unique=False, schema=schema) - op.create_index(op.f(f"ix_core{delim}file_suffix"), "file", ["suffix"], unique=False, schema=schema) - op.create_index(op.f(f"ix_core{delim}file_updated_at"), "file", ["updated_at"], unique=False, schema=schema) + op.create_index(op.f(f"ix_core{delim}file__objectkey"), f"{prefix}file", ["_objectkey"], unique=False, schema=schema) + op.create_index(op.f(f"ix_core{delim}file_created_at"), f"{prefix}file", ["created_at"], unique=False, schema=schema) + op.create_index(op.f(f"ix_core{delim}file_hash"), f"{prefix}file", ["hash"], unique=False, schema=schema) + op.create_index(op.f(f"ix_core{delim}file_name"), f"{prefix}file", ["name"], unique=False, schema=schema) + op.create_index(op.f(f"ix_core{delim}file_size"), f"{prefix}file", ["size"], unique=False, schema=schema) + op.create_index(op.f(f"ix_core{delim}file_source_id"), f"{prefix}file", ["source_id"], unique=False, schema=schema) + op.create_index(op.f(f"ix_core{delim}file_storage_id"), f"{prefix}file", ["storage_id"], unique=False, schema=schema) + op.create_index(op.f(f"ix_core{delim}file_suffix"), f"{prefix}file", ["suffix"], unique=False, schema=schema) + op.create_index(op.f(f"ix_core{delim}file_updated_at"), f"{prefix}file", ["updated_at"], unique=False, schema=schema) try: op.drop_index(f"ix_core{delim}dfolder__objectkey", table_name=f"{prefix}folder", schema=schema) op.drop_index(f"ix_core{delim}dfolder_created_at", table_name=f"{prefix}folder", schema=schema) @@ -47,11 +47,11 @@ def upgrade() -> None: op.drop_index(f"ix_core{delim}dfolder_updated_at", table_name=f"{prefix}folder", schema=schema) except Exception: pass - op.create_index(op.f(f"ix_core{delim}folder__objectkey"), "folder", ["_objectkey"], unique=False, schema=schema) - op.create_index(op.f(f"ix_core{delim}folder_created_at"), "folder", ["created_at"], unique=False, schema=schema) - op.create_index(op.f(f"ix_core{delim}folder_created_by"), "folder", ["created_by"], unique=False, schema=schema) - op.create_index(op.f(f"ix_core{delim}folder_name"), "folder", ["name"], unique=False, schema=schema) - op.create_index(op.f(f"ix_core{delim}folder_updated_at"), "folder", ["updated_at"], unique=False, schema=schema) + op.create_index(op.f(f"ix_core{delim}folder__objectkey"), f"{prefix}folder", ["_objectkey"], unique=False, schema=schema) + op.create_index(op.f(f"ix_core{delim}folder_created_at"), f"{prefix}folder", ["created_at"], unique=False, schema=schema) + op.create_index(op.f(f"ix_core{delim}folder_created_by"), f"{prefix}folder", ["created_by"], unique=False, schema=schema) + op.create_index(op.f(f"ix_core{delim}folder_name"), f"{prefix}folder", ["name"], unique=False, schema=schema) + op.create_index(op.f(f"ix_core{delim}folder_updated_at"), f"{prefix}folder", ["updated_at"], unique=False, schema=schema) with op.batch_alter_table(f"{prefix}folder_file", schema=schema) as batch_op: batch_op.alter_column(column_name="dobject_id", new_column_name="file_id") From 8d0e2daca8cc33edd7d056783a4746f76cd94bb2 Mon Sep 17 00:00:00 2001 From: Alex Wolf Date: Fri, 24 Mar 2023 23:22:28 +0100 Subject: [PATCH 09/11] :green_heart: Fix --- .../2023-03-24-5846a15d9241-0_30rc3.py | 22 ++++++++++++++----- 1 file changed, 17 insertions(+), 5 deletions(-) diff --git a/lnschema_core/migrations/versions/2023-03-24-5846a15d9241-0_30rc3.py b/lnschema_core/migrations/versions/2023-03-24-5846a15d9241-0_30rc3.py index 4076fe99..a66f971f 100644 --- a/lnschema_core/migrations/versions/2023-03-24-5846a15d9241-0_30rc3.py +++ b/lnschema_core/migrations/versions/2023-03-24-5846a15d9241-0_30rc3.py @@ -56,24 +56,36 @@ def upgrade() -> None: with op.batch_alter_table(f"{prefix}folder_file", schema=schema) as batch_op: batch_op.alter_column(column_name="dobject_id", new_column_name="file_id") batch_op.alter_column(column_name="dfolder_id", new_column_name="folder_id") - batch_op.drop_constraint("fk_dfolder_dobject_dobject_id_dobject", type_="foreignkey") - batch_op.drop_constraint("fk_dfolder_dobject_dfolder_id_dfolder", type_="foreignkey") + try: + batch_op.drop_constraint("fk_dfolder_dobject_dobject_id_dobject", type_="foreignkey") + batch_op.drop_constraint("fk_dfolder_dobject_dfolder_id_dfolder", type_="foreignkey") + except Exception: + pass batch_op.create_foreign_key(op.f("fk_folder_file_file_id_file"), f"{prefix}file", ["file_id"], ["id"], referent_schema=schema) batch_op.create_foreign_key(op.f("fk_folder_file_folder_id_folder"), f"{prefix}folder", ["folder_id"], ["id"], referent_schema=schema) with op.batch_alter_table(f"{prefix}file_features", schema=schema) as batch_op: batch_op.alter_column(column_name="dobject_id", new_column_name="file_id") - batch_op.drop_constraint("fk_dobject_features_dobject_id_dobject", type_="foreignkey") + try: + batch_op.drop_constraint("fk_dobject_features_dobject_id_dobject", type_="foreignkey") + except Exception: + pass batch_op.create_foreign_key(op.f("fk_file_features_file_id_file"), f"{prefix}file", ["file_id"], ["id"], referent_schema=schema) with op.batch_alter_table(f"{prefix}project_folder", schema=schema) as batch_op: batch_op.alter_column(column_name="dfolder_id", new_column_name="folder_id") - batch_op.drop_constraint("fk_project_dfolder_dfolder_id_dfolder", type_="foreignkey") + try: + batch_op.drop_constraint("fk_project_dfolder_dfolder_id_dfolder", type_="foreignkey") + except Exception: + pass batch_op.create_foreign_key(op.f("fk_project_folder_folder_id_folder"), f"{prefix}folder", ["folder_id"], ["id"], referent_schema=schema) with op.batch_alter_table(f"{prefix}run_in", schema=schema) as batch_op: batch_op.alter_column(column_name="dobject_id", new_column_name="file_id") - batch_op.drop_constraint("fk_run_in_dobject_id_dobject", type_="foreignkey") + try: + batch_op.drop_constraint("fk_run_in_dobject_id_dobject", type_="foreignkey") + except Exception: + pass batch_op.create_foreign_key(op.f("fk_run_in_file_id_file"), f"{prefix}file", ["file_id"], ["id"], referent_schema=schema) From 1c69b75b7b8aa9b6899f8ba4ca542ffa7974dd42 Mon Sep 17 00:00:00 2001 From: Alex Wolf Date: Fri, 24 Mar 2023 23:41:07 +0100 Subject: [PATCH 10/11] :green_heart: Try --- .../2023-03-24-5846a15d9241-0_30rc3.py | 35 ++++++++++--------- 1 file changed, 18 insertions(+), 17 deletions(-) diff --git a/lnschema_core/migrations/versions/2023-03-24-5846a15d9241-0_30rc3.py b/lnschema_core/migrations/versions/2023-03-24-5846a15d9241-0_30rc3.py index a66f971f..ba440a81 100644 --- a/lnschema_core/migrations/versions/2023-03-24-5846a15d9241-0_30rc3.py +++ b/lnschema_core/migrations/versions/2023-03-24-5846a15d9241-0_30rc3.py @@ -2,6 +2,7 @@ import sqlalchemy as sa # noqa import sqlmodel as sqm # noqa from alembic import op +from sqlalchemy.engine.reflection import Inspector from lnschema_core.dev.sqlmodel import get_sqlite_prefix_schema_delim_from_alembic @@ -10,6 +11,9 @@ def upgrade() -> None: + engine = op.get_bind().engine + inspector = Inspector.from_engine(engine) + sqlite, prefix, schema, delim = get_sqlite_prefix_schema_delim_from_alembic() op.rename_table(old_table_name=f"{prefix}dobject", new_table_name=f"{prefix}file", schema=schema) @@ -56,36 +60,33 @@ def upgrade() -> None: with op.batch_alter_table(f"{prefix}folder_file", schema=schema) as batch_op: batch_op.alter_column(column_name="dobject_id", new_column_name="file_id") batch_op.alter_column(column_name="dfolder_id", new_column_name="folder_id") - try: - batch_op.drop_constraint("fk_dfolder_dobject_dobject_id_dobject", type_="foreignkey") - batch_op.drop_constraint("fk_dfolder_dobject_dfolder_id_dfolder", type_="foreignkey") - except Exception: - pass + for constraint in inspector.get_foreign_keys(f"{prefix}folder_file", schema=schema): + if constraint["name"] == "fk_dfolder_dobject_dobject_id_dobject": + batch_op.drop_constraint("fk_dfolder_dobject_dobject_id_dobject", type_="foreignkey") + if constraint["name"] == "fk_dfolder_dobject_dfolder_id_dfolder": + batch_op.drop_constraint("fk_dfolder_dobject_dfolder_id_dfolder", type_="foreignkey") batch_op.create_foreign_key(op.f("fk_folder_file_file_id_file"), f"{prefix}file", ["file_id"], ["id"], referent_schema=schema) batch_op.create_foreign_key(op.f("fk_folder_file_folder_id_folder"), f"{prefix}folder", ["folder_id"], ["id"], referent_schema=schema) with op.batch_alter_table(f"{prefix}file_features", schema=schema) as batch_op: batch_op.alter_column(column_name="dobject_id", new_column_name="file_id") - try: - batch_op.drop_constraint("fk_dobject_features_dobject_id_dobject", type_="foreignkey") - except Exception: - pass + for constraint in inspector.get_foreign_keys(f"{prefix}file_features", schema=schema): + if constraint["name"] == "fk_dobject_features_dobject_id_dobject": + batch_op.drop_constraint("fk_dobject_features_dobject_id_dobject", type_="foreignkey") batch_op.create_foreign_key(op.f("fk_file_features_file_id_file"), f"{prefix}file", ["file_id"], ["id"], referent_schema=schema) with op.batch_alter_table(f"{prefix}project_folder", schema=schema) as batch_op: batch_op.alter_column(column_name="dfolder_id", new_column_name="folder_id") - try: - batch_op.drop_constraint("fk_project_dfolder_dfolder_id_dfolder", type_="foreignkey") - except Exception: - pass + for constraint in inspector.get_foreign_keys(f"{prefix}project_folder", schema=schema): + if constraint["name"] == "fk_project_dfolder_dfolder_id_dfolder": + batch_op.drop_constraint("fk_project_dfolder_dfolder_id_dfolder", type_="foreignkey") batch_op.create_foreign_key(op.f("fk_project_folder_folder_id_folder"), f"{prefix}folder", ["folder_id"], ["id"], referent_schema=schema) with op.batch_alter_table(f"{prefix}run_in", schema=schema) as batch_op: batch_op.alter_column(column_name="dobject_id", new_column_name="file_id") - try: - batch_op.drop_constraint("fk_run_in_dobject_id_dobject", type_="foreignkey") - except Exception: - pass + for constraint in inspector.get_foreign_keys(f"{prefix}run_in", schema=schema): + if constraint["name"] == "fk_run_in_dobject_id_dobject": + batch_op.drop_constraint("fk_run_in_dobject_id_dobject", type_="foreignkey") batch_op.create_foreign_key(op.f("fk_run_in_file_id_file"), f"{prefix}file", ["file_id"], ["id"], referent_schema=schema) From f6dd839c3ff8b26696d6096d745a2fa7b2194d8c Mon Sep 17 00:00:00 2001 From: Alex Wolf Date: Fri, 24 Mar 2023 23:46:28 +0100 Subject: [PATCH 11/11] :green_heart: Try --- .../migrations/versions/2023-03-24-5846a15d9241-0_30rc3.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/lnschema_core/migrations/versions/2023-03-24-5846a15d9241-0_30rc3.py b/lnschema_core/migrations/versions/2023-03-24-5846a15d9241-0_30rc3.py index ba440a81..19ea5eb3 100644 --- a/lnschema_core/migrations/versions/2023-03-24-5846a15d9241-0_30rc3.py +++ b/lnschema_core/migrations/versions/2023-03-24-5846a15d9241-0_30rc3.py @@ -60,7 +60,7 @@ def upgrade() -> None: with op.batch_alter_table(f"{prefix}folder_file", schema=schema) as batch_op: batch_op.alter_column(column_name="dobject_id", new_column_name="file_id") batch_op.alter_column(column_name="dfolder_id", new_column_name="folder_id") - for constraint in inspector.get_foreign_keys(f"{prefix}folder_file", schema=schema): + for constraint in inspector.get_foreign_keys(f"{prefix}dfolder_dobject", schema=schema): if constraint["name"] == "fk_dfolder_dobject_dobject_id_dobject": batch_op.drop_constraint("fk_dfolder_dobject_dobject_id_dobject", type_="foreignkey") if constraint["name"] == "fk_dfolder_dobject_dfolder_id_dfolder": @@ -70,14 +70,14 @@ def upgrade() -> None: with op.batch_alter_table(f"{prefix}file_features", schema=schema) as batch_op: batch_op.alter_column(column_name="dobject_id", new_column_name="file_id") - for constraint in inspector.get_foreign_keys(f"{prefix}file_features", schema=schema): + for constraint in inspector.get_foreign_keys(f"{prefix}dobject_features", schema=schema): if constraint["name"] == "fk_dobject_features_dobject_id_dobject": batch_op.drop_constraint("fk_dobject_features_dobject_id_dobject", type_="foreignkey") batch_op.create_foreign_key(op.f("fk_file_features_file_id_file"), f"{prefix}file", ["file_id"], ["id"], referent_schema=schema) with op.batch_alter_table(f"{prefix}project_folder", schema=schema) as batch_op: batch_op.alter_column(column_name="dfolder_id", new_column_name="folder_id") - for constraint in inspector.get_foreign_keys(f"{prefix}project_folder", schema=schema): + for constraint in inspector.get_foreign_keys(f"{prefix}project_dfolder", schema=schema): if constraint["name"] == "fk_project_dfolder_dfolder_id_dfolder": batch_op.drop_constraint("fk_project_dfolder_dfolder_id_dfolder", type_="foreignkey") batch_op.create_foreign_key(op.f("fk_project_folder_folder_id_folder"), f"{prefix}folder", ["folder_id"], ["id"], referent_schema=schema)