Skip to content

Commit

Permalink
Merge pull request #44 from BioImageTools/pre-commit
Browse files Browse the repository at this point in the history
Fix pre-commit
  • Loading branch information
dstansby authored Nov 22, 2024
2 parents fbf9231 + 0cd8261 commit d3b17c2
Show file tree
Hide file tree
Showing 11 changed files with 147 additions and 165 deletions.
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
repos:
- repo: https://github.com/abravalheri/validate-pyproject
rev: v0.15
rev: v0.23
hooks:
- id: validate-pyproject

Expand Down
13 changes: 9 additions & 4 deletions docs/tutorial.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,14 +25,17 @@

# ## Updating models
#
# All the fields in the models can be updated in place. When you do this, any validation on the individual field you are updating will take place.
# All the fields in the models can be updated in place. When you do this, any
# validation on the individual field you are updating will take place.
#
# For example, there is no name for the first multiscales entry, so lets add it

multiscales_meta[0].name = "The first multiscales entry"
pprint(multiscales_meta)

# One constraint in the OME-zarr spec is that the coordiante transforms have to be a scale, or a scale then tranlsation (strictly in that order). So if we try and make a transformation just a translation, it will raise an error.
# One constraint in the OME-zarr spec is that the coordiante transforms have to be a
# scale, or a scale then tranlsation (strictly in that order). So if we try and make a
# transformation just a translation, it will raise an error.

multiscales_meta[0].datasets[0].coordinateTransformations = VectorTranslation(
type="translation", translation=[1, 2, 3]
Expand All @@ -44,11 +47,13 @@

# ## Accessing data
#
# Although these models do not handle reading or writing data, they do expose the zarr arrays.
# Although these models do not handle reading or writing data, they do expose the zarr
# arrays.

zarr_arr = ome_zarr_image.group[multiscales_meta[0].datasets[0].path]
pprint(zarr_arr)

# ## Not using validation
#
# If you want to create models that are not validated against the OME-zarr specifciation, you can use the ``model_construct`` method on the models.
# If you want to create models that are not validated against the OME-zarr
# specifciation, you can use the ``model_construct`` method on the models.
6 changes: 5 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ dev = [

test = ["pytest"]

pydantic=["pydantic", "pydantic-zarr"]
pydantic = ["pydantic", "pydantic-zarr"]

[tool.uv]
default-groups = ["docs", "dev", "pydantic", "test"]
Expand Down Expand Up @@ -77,6 +77,10 @@ select = [
ignore = [
"D401", # First line should be in imperative mood (remove to opt in)
"D200", # One line docstring should fit on one line.
"D205", # 1 blank line required between summary line and description
"D400", # First line should end with a period
"D100", # Missing docstring in public module
"D104", # Missing docstring in public package
]

[tool.ruff.lint.per-file-ignores]
Expand Down
2 changes: 0 additions & 2 deletions src/ome_zarr_models/__init__.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
""""""

from importlib.metadata import PackageNotFoundError, version

try:
Expand Down
4 changes: 4 additions & 0 deletions src/ome_zarr_models/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,4 +7,8 @@ class Base(pydantic.BaseModel):
"""

class Config:
"""
Pydantic config.
"""

validate_assignment = True
16 changes: 12 additions & 4 deletions src/ome_zarr_models/v04/coordinate_transformations.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,14 @@
from __future__ import annotations

from collections.abc import Iterable, Sequence
from typing import Literal
from typing import TYPE_CHECKING, Literal

from typing_extensions import Self

from ome_zarr_models.base import Base

if TYPE_CHECKING:
from collections.abc import Iterable, Sequence

__all__ = [
"Identity",
"VectorScale",
Expand Down Expand Up @@ -51,6 +53,9 @@ def build(cls, data: Iterable[float]) -> Self:

@property
def ndim(self) -> int:
"""
Number of dimensions.
"""
return len(self.scale)


Expand Down Expand Up @@ -90,6 +95,9 @@ def build(cls, data: Iterable[float]) -> Self:

@property
def ndim(self) -> int:
"""
Number of dimensions.
"""
return len(self.translation)


Expand Down Expand Up @@ -123,8 +131,8 @@ def _build_transforms(
scale: Sequence[float], translation: Sequence[float] | None
) -> tuple[VectorScale] | tuple[VectorScale, VectorTranslation]:
"""
Create a `VectorScale` and optionally a `VectorTranslation` from a scale and a translation
parameter.
Create a `VectorScale` and optionally a `VectorTranslation` from a scale and a
translation parameter.
"""
vec_scale = VectorScale.build(scale)
if translation is None:
Expand Down
28 changes: 22 additions & 6 deletions src/ome_zarr_models/v04/labels.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,14 +2,16 @@

import warnings
from collections import Counter
from collections.abc import Hashable, Iterable
from typing import Annotated, Literal
from typing import TYPE_CHECKING, Annotated, Literal

from pydantic import AfterValidator, Field, model_validator

from ome_zarr_models.base import Base
from ome_zarr_models.v04.multiscales import MultiscaleGroupAttrs

if TYPE_CHECKING:
from collections.abc import Hashable, Iterable

__all__ = ["ConInt", "RGBA", "Color", "Source", "Property", "ImageLabel", "GroupAttrs"]

ConInt = Annotated[int, Field(strict=True, ge=0, le=255)]
Expand All @@ -28,27 +30,40 @@ def _duplicates(values: Iterable[Hashable]) -> dict[Hashable, int]:

class Color(Base):
"""
A label value and RGBA as defined in https://ngff.openmicroscopy.org/0.4/#label-md
A label value and RGBA.
References
----------
https://ngff.openmicroscopy.org/0.4/#label-md
"""

label_value: int = Field(..., serialization_alias="label-value")
rgba: RGBA | None


class Source(Base):
"""
Source data for the labels.
"""

# TODO: add validation that this path resolves to something
image: str | None = "../../"


class Property(Base):
"""
A single property.
"""

label_value: int = Field(..., serialization_alias="label-value")


def _parse_colors(colors: list[Color] | None) -> list[Color] | None:
if colors is None:
msg = (
"The field `colors` is `None`. Version 0.4 of"
"the OME-NGFF spec states that `colors` should be a list of label descriptors."
"the OME-NGFF spec states that `colors` should be a list of "
"label descriptors."
)
warnings.warn(msg, stacklevel=1)
else:
Expand All @@ -67,7 +82,8 @@ def _parse_version(version: Literal["0.4"] | None) -> Literal["0.4"] | None:
if version is None:
_ = (
"The `version` attribute is `None`. Version 0.4 of "
"the OME-NGFF spec states that `version` should either be unset or the string 0.4"
"the OME-NGFF spec states that `version` should either be unset or "
"the string 0.4"
)
return version

Expand Down Expand Up @@ -106,7 +122,7 @@ class ImageLabel(Base):
source: Source | None = None

@model_validator(mode="after")
def parse_model(self) -> ImageLabel:
def _parse_model(self) -> ImageLabel:
return _parse_imagelabel(self)


Expand Down
82 changes: 56 additions & 26 deletions src/ome_zarr_models/v04/multiscales.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,8 @@
from __future__ import annotations

from collections import Counter
from collections.abc import Sequence
from typing import Annotated, Any, get_args
from typing import TYPE_CHECKING, Annotated, Any, get_args

import zarr
from pydantic import AfterValidator, Field, model_validator
from pydantic_zarr.v2 import ArraySpec, GroupSpec

Expand All @@ -15,9 +13,16 @@
ScaleTransform,
TranslationTransform,
VectorTransform,
_build_transforms,
_ndim,
)
from ome_zarr_models.v04.omero import Omero

if TYPE_CHECKING:
from collections.abc import Iterable, Sequence

import zarr

from ome_zarr_models.v04.omero import Omero

__all__ = ["VALID_NDIM", "Dataset", "Multiscale", "MultiscaleGroup"]

Expand Down Expand Up @@ -82,7 +87,10 @@ def _ensure_axis_length(axes: Sequence[Axis]) -> Sequence[Axis]:
Ensures that there are between 2 and 5 axes (inclusive)
"""
if (len_axes := len(axes)) not in VALID_NDIM:
msg = f"Incorrect number of axes provided ({len_axes}). Only 2, 3, 4, or 5 axes are allowed."
msg = (
f"Incorrect number of axes provided ({len_axes}). "
"Only 2, 3, 4, or 5 axes are allowed."
)
raise ValueError(msg)
return axes

Expand All @@ -93,7 +101,10 @@ def _ensure_axis_names(axes: Sequence[Axis]) -> Sequence[Axis]:
"""
name_dupes = duplicates(a.name for a in axes)
if len(name_dupes) > 0:
msg = f"Axis names must be unique. Axis names {tuple(name_dupes.keys())} are repeated."
msg = (
f"Axis names must be unique. Axis names {tuple(name_dupes.keys())} are "
"repeated."
)
raise ValueError(msg)
return axes

Expand All @@ -112,7 +123,10 @@ def _ensure_axis_types(axes: Sequence[Axis]) -> Sequence[Axis]:
type_census = Counter(axis_types)
num_spaces = type_census["space"]
if num_spaces not in [2, 3]:
msg = f"Invalid number of space axes: {num_spaces}. Only 2 or 3 space axes are allowed."
msg = (
f"Invalid number of space axes: {num_spaces}. "
"Only 2 or 3 space axes are allowed."
)
raise ValueError(msg)

if not all(a == "space" for a in axis_types[-num_spaces:]):
Expand All @@ -124,12 +138,18 @@ def _ensure_axis_types(axes: Sequence[Axis]) -> Sequence[Axis]:
raise ValueError(msg)

if (num_channels := type_census["channel"]) > 1:
msg = f"Invalid number of channel axes: {num_channels}. Only 1 channel axis is allowed."
msg = (
f"Invalid number of channel axes: {num_channels}. "
"Only 1 channel axis is allowed."
)
raise ValueError(msg)

custom_axes = set(axis_types) - set(get_args(AxisType))
if (num_custom := len(custom_axes)) > 1:
msg = f"Invalid number of custom axes: {num_custom}. Only 1 custom axis is allowed."
msg = (
f"Invalid number of custom axes: {num_custom}. "
"Only 1 custom axis is allowed."
)
raise ValueError(msg)
return axes

Expand All @@ -151,28 +171,27 @@ class Dataset(Base):
]

@classmethod
def build(
cls,
*,
path: str,
scale: Iterable[float],
translation: Iterable[float] ):
def build(cls, *, path: str, scale: Iterable[float], translation: Iterable[float]):
"""
Construct a `Dataset` from a path, a scale, and a translation.
"""
return cls(
path=path,
coordinateTransformations=_build_transforms(scale=scale, translation=translation))
path=path,
coordinateTransformations=_build_transforms(
scale=scale, translation=translation
),
)


def _ensure_top_transforms_dimensionality(data: Multiscale) -> Multiscale:
"""
Ensure that the dimensionality of the top-level coordinateTransformations, if present,
is consistent with the rest of the model.
Ensure that the dimensionality of the top-level coordinateTransformations,
if present, is consistent with the rest of the model.
"""
ctx = data.coordinateTransformations
if ctx is not None:
# check that the dimensionality of the coordinateTransformations is internally consistent
# check that the dimensionality of the coordinateTransformations is
# internally consistent
_ = _ensure_transform_dimensionality(ctx)

return data
Expand Down Expand Up @@ -213,7 +232,8 @@ def _ensure_axes_dataset_transforms(data) -> Multiscale:
if self_ndim != tx.ndim:
msg = (
f"The length of axes does not match the dimensionality of "
f"the {tx.type} transform in datasets[{ds_idx}].coordinateTransformations. "
f"the {tx.type} transform in "
f"datasets[{ds_idx}].coordinateTransformations. "
f"Got {self_ndim} axes, but the {tx.type} transform has "
f"dimensionality {tx.ndim}"
)
Expand Down Expand Up @@ -246,8 +266,9 @@ class Multiscale(Base):
@property
def ndim(self) -> int:
"""
Report the dimensionality of the data described by this metadata, which is determined
by the length of the axes attribute.
Dimensionality of the data described by this metadata.
Determined by the length of the axes attribute.
"""
return len(self.axes)

Expand Down Expand Up @@ -278,11 +299,17 @@ class MultiscaleGroupAttrs(Base):


class MultiscaleGroup(GroupSpec[MultiscaleGroupAttrs, ArraySpec | GroupSpec]):
"""
A multiscale group.
"""

@classmethod
def from_zarr(cls, node: zarr.Group) -> MultiscaleGroup:
"""
Create an instance of `Group` from a `node`, a `zarr.Group`. This method discovers Zarr arrays in the hierarchy rooted at `node` by inspecting the OME-NGFF
multiscales metadata.
Create an instance of `Group` from a `node`, a `zarr.Group`.
This method discovers Zarr arrays in the hierarchy rooted at `node` by
inspecting the OME-NGFF multiscales metadata.
Parameters
----------
Expand All @@ -296,14 +323,16 @@ def from_zarr(cls, node: zarr.Group) -> MultiscaleGroup:
"""
# on unlistable storage backends, the members of this group will be {}
raise NotImplementedError
"""
guess = GroupSpec.from_zarr(node, depth=0)
try:
multi_meta_maybe = guess.attributes["multiscales"]
except KeyError as e:
store_path = get_path(node.store)
msg = (
"Failed to find mandatory `multiscales` key in the attributes of the Zarr group at "
"Failed to find mandatory `multiscales` key in the attributes of the "
"Zarr group at "
f"{node.store}://{store_path}://{node.path}."
)
raise KeyError(msg) from e
Expand Down Expand Up @@ -335,3 +364,4 @@ def from_zarr(cls, node: zarr.Group) -> MultiscaleGroup:
update={"members": members_normalized.members}
)
return cls(**guess_inferred_members.model_dump())
"""
Loading

0 comments on commit d3b17c2

Please sign in to comment.