Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

add tests to all connectivity features (many are buggy) #161

Merged
merged 3 commits into from
Mar 2, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/siibra-testing.yml
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ jobs:
- name: Install test dependencies
run: pip install pytest pytest-cov coverage
- name: Run test with pytest
run: pytest
run: pytest -rx

coverage:
runs-on: ubuntu-latest
Expand Down
6 changes: 5 additions & 1 deletion siibra/core/atlas.py
Original file line number Diff line number Diff line change
Expand Up @@ -150,9 +150,13 @@ def _from_json(cls, obj):
return atlas
return obj

@property
def model_id(self):
return self.id

def to_model(self, **kwargs) -> SiibraAtlasModel:
return SiibraAtlasModel(
id=self.id,
id=self.model_id,
type="juelich/iav/atlas/v1.0.0",
name=self.name,
spaces=[SiibraAtIdModel(id=spc.to_model().id) for spc in self.spaces],
Expand Down
6 changes: 5 additions & 1 deletion siibra/core/datasets.py
Original file line number Diff line number Diff line change
Expand Up @@ -105,9 +105,13 @@ def extract_type_id(cls, spec):
return spec[key]
raise RuntimeError(f"No type defined in dataset specification: {spec}")

@property
def model_id(self):
return self.id or hashlib.md5(f"{str(self)}{self.description}".encode("utf-8")).hexdigest()

def to_model(self, **kwargs) -> DatasetJsonModel:
metadata=DatasetVersionModel(
id=self.id or hashlib.md5(f"{str(self)}{self.description}".encode("utf-8")).hexdigest(),
id=self.model_id,
type="https://openminds.ebrains.eu/core/DatasetVersion",
accessibility={
"@id": "https://openminds.ebrains.eu/instances/productAccessibility/freeAccess",
Expand Down
6 changes: 5 additions & 1 deletion siibra/core/parcellation.py
Original file line number Diff line number Diff line change
Expand Up @@ -530,9 +530,13 @@ def get_brain_atlas_version_id(self, space: Space) -> str:
def get_brain_atlas_version_name(self, space: Space) -> str:
return f"{self.name} in {space.to_model().full_name}"

@property
def model_id(self):
return self.id

def to_model(self, **kwargs) -> SiibraParcellationModel:
return SiibraParcellationModel(
id=self.id,
id=self.model_id,
type=SIIBRA_PARCELLATION_MODEL_TYPE,
name=self.name,
datasets=[ds.to_model() for ds in self.datasets if isinstance(ds, OriginDescription) or isinstance(ds, EbrainsDataset)],
Expand Down
16 changes: 11 additions & 5 deletions siibra/core/region.py
Original file line number Diff line number Diff line change
Expand Up @@ -804,6 +804,16 @@ def _from_json(cls, jsonstr, parcellation):

return result

@property
def model_id(self):
from .. import parcellations
if self.parcellation is parcellations.SUPERFICIAL_FIBRE_BUNDLES:
return f"https://openminds.ebrains.eu/instances/parcellationEntityVersion/SWMA_2018_{self.name}"
import hashlib
def get_unique_id(id):
return hashlib.md5(id.encode("utf-8")).hexdigest()
return f"https://openminds.ebrains.eu/instances/parcellationEntityVersion/{get_unique_id(self.id)}"

def to_model(self, detail=False, space: Space=None, **kwargs) -> ParcellationEntityVersionModel:
if detail:
assert isinstance(self.parent, JSONSerializable), f"Region.parent must be a JSONSerializable"
Expand All @@ -813,11 +823,8 @@ def to_model(self, detail=False, space: Space=None, **kwargs) -> ParcellationEnt
centroids = self.centroids(space)
assert len(centroids) == 1, f"expect a single centroid as return for centroid(space) call, but got {len(centroids)} results."

import hashlib
def get_unique_id(id):
return hashlib.md5(id.encode("utf-8")).hexdigest()
pev = ParcellationEntityVersionModel(
id=f"https://openminds.ebrains.eu/instances/parcellationEntityVersion/{get_unique_id(self.id)}",
id=self.model_id,
type=OPENMINDS_PARCELLATION_ENTITY_VERSION_TYPE,
has_parent=[{
'@id': self.parent.to_model(detail=False).id
Expand Down Expand Up @@ -902,7 +909,6 @@ def vol_to_id_dict(vol: VolumeSrc):
if is_rh:
pev.version_identifier = f"2018, rh"

pev.id = f"https://openminds.ebrains.eu/instances/parcellationEntityVersion/SWMA_2018_{self.name}"
pev.lookup_label = f"SWMA_2018_{self.name}"


Expand Down
20 changes: 19 additions & 1 deletion siibra/core/serializable_concept.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from abc import ABC, abstractmethod
from abc import ABC, abstractmethod, abstractproperty
import numpy as np
import zlib
import base64
Expand All @@ -8,6 +8,24 @@

class JSONSerializable(ABC):

@abstractproperty
def model_id(self):
"""
The model_id abstract property should populate the @id attribute of the model returned by to_model
It should also allow unified indexing of a list of JSONSerializable without first calling to_model method (which could be expensive)

e.g.

without model_id abstract property:
# finding a feature based on feature_id
found_feature = [feat for feat in features if feat.to_model().id == feature_id]

with model_id abstract property:
# finding a feature based on feature_id
found_feature = [feat for feat in features if feat.model_id == feature_id]
"""
raise AttributeError("model_id property needs to be overwritten by subclass!")

@abstractmethod
def to_model(self, **kwargs):
raise AttributeError("JSONSerializable needs to have to_model overwritten")
Expand Down
30 changes: 24 additions & 6 deletions siibra/core/space.py
Original file line number Diff line number Diff line change
Expand Up @@ -165,9 +165,13 @@ def _from_json(cls, obj):

return result

@property
def model_id(self):
return self.id

def to_model(self, **kwargs) -> commonCoordinateSpace.Model:
return commonCoordinateSpace.Model(
id=self.id,
id=self.model_id,
type="https://openminds.ebrains.eu/sands/CoordinateSpace",
anatomical_axes_orientation={
"@id": "https://openminds.ebrains.eu/vocab/anatomicalAxesOrientation/XYZ"
Expand Down Expand Up @@ -215,8 +219,12 @@ def __init__(self, space: Space):
else:
self.space: Space = Space.REGISTRY[space]

@property
def model_id(self):
return f"spy/location/space:{self.space.model_id if self.space is not None else 'None'}"

def to_model(self, **kwargs) -> LocationModel:
return LocationModel(space={ "@id": self.space.to_model().id })
return LocationModel(space={ "@id": self.space.model_id })

@abstractmethod
def intersects(self, mask: Nifti1Image):
Expand Down Expand Up @@ -330,7 +338,6 @@ def __iter__(self):
def __str__(self):
return f"{self.__class__.__name__} in {self.space.name}"


class Point(Location, JSONSerializable):
"""A single 3D point in reference space."""

Expand Down Expand Up @@ -603,13 +610,19 @@ def bigbrain_section(self):
)
return int((coronal_position + 70.0) / 0.02 + 1.5)

@property
def model_id(self):
space_id = self.space.model_id
return hashlib.md5(f"{space_id}{','.join(str(val) for val in self)}".encode("utf-8")).hexdigest()


def to_model(self, **kwargs) -> CoordinatePointModel:
if self.space is None:
raise RuntimeError(f"Point.to_model cannot be done on Location entity that does not have space defined!")
space_id = self.space.to_model().id
point_id = hashlib.md5(f"{space_id}{','.join(str(val) for val in self)}".encode("utf-8")).hexdigest()
space_id = self.space.model_id

return CoordinatePointModel(
id=point_id,
id=self.model_id,
type="https://openminds.ebrains.eu/sands/CoordinatePoint",
coordinate_space={
"@id": space_id
Expand Down Expand Up @@ -822,11 +835,16 @@ def __init__(self, point1, point2, space: Space, minsize: float =None):
for d in range(3):
if self.shape[d] < minsize:
self.maxpoint[d] = self.minpoint[d] + minsize
@property
def model_id(self):
import hashlib
return hashlib.md5(str(self).encode("utf-8")).hexdigest()

def to_model(self, **kwargs) -> BoundingBoxModel:
super_model = super().to_model(**kwargs)
return BoundingBoxModel(
**super_model.dict(),
id=self.model_id,
center=self.center.to_model(**kwargs),
minpoint=self.minpoint.to_model(**kwargs),
maxpoint=self.maxpoint.to_model(**kwargs),
Expand Down
42 changes: 31 additions & 11 deletions siibra/features/connectivity.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@

import pandas as pd
import numpy as np
from typing import Dict, List
from typing import Dict, List, Union
import hashlib
from pydantic import Field

Expand Down Expand Up @@ -83,23 +83,43 @@ def get_profile(self, regionspec):
def __str__(self):
return ParcellationFeature.__str__(self) + " " + str(self.src_info)

@property
def model_id(self):
return hashlib.md5(str(self).encode("utf-8")).hexdigest()

def to_model(self, **kwargs) -> ConnectivityMatrixDataModel:
from ..core import Region
dtype_set = {dtype for dtype in self.matrix.dtypes}

assert len(dtype_set) == 1, f"expect only 1 type of data, but got {len(dtype_set)}"
dtype, = list(dtype_set)
is_int = np.issubdtype(dtype, int)
is_float = np.issubdtype(dtype, float)
assert is_int or is_float, f"expect datatype to be subdtype of either int or float, but is neither: {str(dtype)}"

if len(dtype_set) == 0:
raise TypeError(f"dtype is an empty set!")

force_float = False
if len(dtype_set) == 1:
dtype, = list(dtype_set)
is_int = np.issubdtype(dtype, int)
is_float = np.issubdtype(dtype, float)
assert is_int or is_float, f"expect datatype to be subdtype of either int or float, but is neither: {str(dtype)}"

if len(dtype_set) > 1:
logger.warning(f"expect only 1 type of data, but got {len(dtype_set)}, will cast everything to float")
force_float = True

def get_column_name(col: Union[str, Region]) -> str:
if isinstance(col, str):
return col
if isinstance(col, Region):
return col.name
raise TypeError(f"matrix column value {col} of instance {col.__class__} can be be converted to str.")

return ConnectivityMatrixDataModel(
id=hashlib.md5(str(self).encode("utf-8")).hexdigest(),
id=self.model_id,
name=str(self),
columns=[name for name in self.matrix.columns.values],
columns=[get_column_name(name) for name in self.matrix.columns.values],
parcellations=[{
"@id": parc.to_model().id,
} for parc in self.parcellations],
matrix=NpArrayDataModel(self.matrix.to_numpy(dtype="int32" if is_int else "float32")),
matrix=NpArrayDataModel(self.matrix.to_numpy(dtype="float32" if force_float or is_float else "int32")),
)


Expand Down Expand Up @@ -341,7 +361,7 @@ def _matrixloader(self, jsonloader):
assert "data" in data
col_names = data["data"]["field names"]
row_names = list(data["data"]["profiles"].keys())
assert col_names == row_names
assert col_names == row_names, f"{data['name']} assertion error: expected col_names == row_names"
matrix = pd.DataFrame(
data=[data["data"]["profiles"][r] for r in col_names],
columns=col_names,
Expand Down
18 changes: 15 additions & 3 deletions siibra/features/ieeg.py
Original file line number Diff line number Diff line change
Expand Up @@ -129,10 +129,14 @@ def _update_location(self):
self.location = PointSet(points, points[0].space)
self.dataset._update_location()

@property
def model_id(self):
return f"{self.dataset.model_id}:{self.sub_id}"

def to_model(self, **kwargs) -> IEEGSessionModel:
dataset = self.dataset.to_model()
return IEEGSessionModel(
id=f"{dataset.id}:{self.sub_id}",
id=self.model_id,
dataset=dataset,
sub_id=self.sub_id,
electrodes={
Expand Down Expand Up @@ -179,6 +183,10 @@ def _update_location(self):
self.location = PointSet(points, self.session.space)
self.session._update_location()

@property
def model_id(self):
return f"{self.session.model_id}:{self.electrode_id}"

def to_model(self, **kwargs) -> IEEGElectrodeModel:
return IEEGElectrodeModel(
electrode_id=self.electrode_id,
Expand All @@ -197,7 +205,7 @@ class IEEG_ContactPoint(SpatialFeature, JSONSerializable):
def __init__(self, electrode, id, coord):
point = Point(coord, electrode.space)
SpatialFeature.__init__(self, point)
self.electrode = electrode
self.electrode: IEEG_Electrode = electrode
self.id = id
self.point = point
electrode.register_contact_point(self)
Expand Down Expand Up @@ -228,9 +236,13 @@ def prev(self):
else:
return None

@property
def model_id(self):
return f"{self.electrode.model_id}:{self.id}"

def to_model(self, **kwargs) -> IEEGContactPointModel:
return IEEGContactPointModel(
id=self.id,
id=self.model_id,
point=self.point.to_model(**kwargs)
)

Expand Down
4 changes: 4 additions & 0 deletions siibra/features/receptors.py
Original file line number Diff line number Diff line change
Expand Up @@ -458,6 +458,10 @@ def img_from_bytes(b):
)
self._autoradiograph_loaders[rtype] = HttpRequest(url, img_from_bytes)

@property
def model_id(self):
return super().model_id

def to_model(self, detail=False, **kwargs) -> ReceptorDatasetModel:
base_dict = dict(super().to_model(detail=detail, **kwargs).dict())
base_dict["type"] = "siibra/receptor"
Expand Down
4 changes: 4 additions & 0 deletions siibra/features/voi.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,10 @@ def _from_json(cls, definition):
return result
return definition

@property
def model_id(self):
return super().model_id

def to_model(self, **kwargs) -> VOIDataModel:
super_model = super().to_model(**kwargs)
return VOIDataModel(
Expand Down
11 changes: 1 addition & 10 deletions siibra/retrieval/requests.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,15 +27,6 @@
from io import BytesIO
import urllib
import pandas as pd
from tempfile import mktemp


def bytes_to_temporary_zipfile(b):
fname = mktemp(suffix=".zip")
with open(fname, "wb") as f:
f.write(b)
return ZipFile(fname)


DECODERS = {
".nii.gz": lambda b: Nifti1Image.from_bytes(gzip.decompress(b)),
Expand All @@ -45,7 +36,7 @@ def bytes_to_temporary_zipfile(b):
".tck": lambda b: streamlines.load(BytesIO(b)),
".csv": lambda b: pd.read_csv(BytesIO(b), delimiter=";"),
".txt": lambda b: pd.read_csv(BytesIO(b), delimiter=" ", header=None),
".zip": bytes_to_temporary_zipfile,
".zip": lambda b: ZipFile(BytesIO(b)),
".png": lambda b: io.imread(BytesIO(b))
}

Expand Down
4 changes: 4 additions & 0 deletions siibra/volumes/volume.py
Original file line number Diff line number Diff line change
Expand Up @@ -154,6 +154,10 @@ def _from_json(cls, obj):
result.map_type = MapType[maptype.upper()]
return result

@property
def model_id(self):
return super().model_id

def to_model(self, **kwargs) -> VolumeModel:
super_model = super().to_model(**kwargs)
return VolumeModel(
Expand Down
Loading