Skip to content

Commit 4f2f68e

Browse files
authored
Merge branch 'main' into enh_use_precalc_img_bboxes
2 parents d4d66c5 + 0662f69 commit 4f2f68e

File tree

6 files changed

+106
-60
lines changed

6 files changed

+106
-60
lines changed

e2e/features/image/test_image.py

+32-46
Original file line numberDiff line numberDiff line change
@@ -3,65 +3,51 @@
33
from siibra.features.image.image import Image
44
import time
55

6-
# Update this as new configs are added
7-
results = [
8-
(
9-
siibra.features.get(siibra.get_template("big brain"), "CellbodyStainedSection"),
10-
145,
11-
),
12-
(
13-
siibra.features.get(
14-
siibra.get_template("big brain"), "CellBodyStainedVolumeOfInterest"
15-
),
16-
2,
17-
),
18-
(
19-
siibra.features.get(
20-
siibra.get_template("mni152"), "image", restrict_space=True
21-
),
22-
4,
23-
),
24-
(
25-
siibra.features.get(
26-
siibra.get_template("mni152"), "image", restrict_space=False
27-
),
28-
13,
29-
),
30-
(
31-
siibra.features.get(
32-
siibra.get_region("julich 3.1", "hoc1 left"), "CellbodyStainedSection"
33-
),
34-
45,
35-
),
36-
(
37-
siibra.features.get(
38-
siibra.get_region("julich 2.9", "hoc1 left"), "CellbodyStainedSection"
39-
),
40-
41,
41-
),
6+
PRERELEASE_FEATURES_W_NO_DATASET = [
7+
"The Enriched Connectome - Block face images of full sagittal human brain sections (blockface)",
8+
"The Enriched Connectome - 3D polarized light imaging connectivity data of full sagittal human brain sections (HSV fibre orientation map)",
429
]
43-
features = [f for fts, _ in results for f in fts]
10+
all_image_features = [f for ft in siibra.features.Feature._SUBCLASSES[siibra.features.image.image.Image] for f in ft._get_instances()]
4411

4512

46-
@pytest.mark.parametrize("feature", features)
13+
@pytest.mark.parametrize("feature", all_image_features)
4714
def test_feature_has_datasets(feature: Image):
48-
assert len(feature.datasets) > 0
49-
50-
51-
@pytest.mark.parametrize("features, result_len", results)
52-
def test_image_query_results(features: Image, result_len: int):
53-
assert len(features) == result_len
15+
if feature.name in PRERELEASE_FEATURES_W_NO_DATASET:
16+
if len(feature.datasets) > 0:
17+
pytest.fail(f"Feature '{feature}' was listed as prerelase previosly but now have dataset information. Please update `PRERELEASE_FEATURES_W_NO_DATASET`")
18+
pytest.skip(f"Feature '{feature}' has no datasets yet as it is a prerelease data.")
19+
assert len(feature.datasets) > 0, f"{feature} has no datasets"
5420

5521

5622
def test_images_datasets_names():
5723
start = time.time()
58-
all_ds_names = {ds.name for f in features for ds in f.datasets}
24+
all_ds_names = {ds.name for f in all_image_features for ds in f.datasets}
5925
end = time.time()
6026
duration = start - end
61-
assert len(all_ds_names) == 9, "expected 9 distinct names"
27+
assert len(all_ds_names) == 10, "expected 10 distinct names" # this must be updated if new datasets are added
6228
assert duration < 1, "Expected getting dataset names to be less than 1s"
6329

6430

31+
# Update this as new configs are added
32+
33+
query_and_results = [
34+
(siibra.features.get(siibra.get_template("big brain"), "CellbodyStainedSection"), 145),
35+
(siibra.features.get(siibra.get_template("big brain"), "CellBodyStainedVolumeOfInterest"), 2),
36+
(siibra.features.get(siibra.get_template("mni152"), "image", restrict_space=True), 4),
37+
(siibra.features.get(siibra.get_template("mni152"), "image", restrict_space=False), 13), # TODO: should this query find all the images or it is okay if bigbrain sections fail?
38+
(siibra.features.get(siibra.get_region('julich 3.1', 'hoc1 left'), "CellbodyStainedSection"), 45),
39+
(siibra.features.get(siibra.get_region('julich 2.9', 'hoc1 left'), "CellbodyStainedSection"), 41)
40+
]
41+
42+
43+
@pytest.mark.parametrize("query_results, result_len", query_and_results)
44+
def test_image_query_results(
45+
query_results: Image,
46+
result_len: int
47+
):
48+
assert len(query_results) == result_len
49+
50+
6551
def test_color_channel_fetching():
6652
dti_rgb_vol = [
6753
f

e2e/volumes/test_compute_centroids.py

+11
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,11 @@
1+
import pytest
2+
import siibra
3+
4+
from siibra.volumes import Map
5+
6+
preconfiugres_maps = list(siibra.maps)
7+
8+
9+
@pytest.mark.parametrize("mp", preconfiugres_maps)
10+
def test_compute_centroids(mp: Map):
11+
_ = mp.compute_centroids()

siibra/livequeries/allen.py

+4-1
Original file line numberDiff line numberDiff line change
@@ -292,7 +292,10 @@ def _retrieve_microarray(cls, donor_id: str, probe_ids: str) -> Iterable[GeneExp
292292
url = AllenBrainAtlasQuery._QUERY["microarray"].format(
293293
probe_ids=",".join([str(id) for id in probe_ids]), donor_id=donor_id
294294
)
295-
response = HttpRequest(url, json.loads).get()
295+
try:
296+
response = HttpRequest(url, json.loads).get()
297+
except json.JSONDecodeError as e:
298+
raise RuntimeError(f"Allen institute site produced an empty response - please try again later.\n{e}")
296299
if not response["success"]:
297300
raise Exception(
298301
"Invalid response when retrieving microarray data: {}".format(url)

siibra/locations/boundingbox.py

+13-6
Original file line numberDiff line numberDiff line change
@@ -115,12 +115,12 @@ def is_planar(self) -> bool:
115115
def __str__(self):
116116
if self.space is None:
117117
return (
118-
f"Bounding box from ({','.join(f'{v:.2f}' for v in self.minpoint)}) mm "
119-
f"to ({','.join(f'{v:.2f}' for v in self.maxpoint)}) mm"
118+
f"Bounding box from ({','.join(f'{v:.2f}' for v in self.minpoint)})mm "
119+
f"to ({','.join(f'{v:.2f}' for v in self.maxpoint)})mm"
120120
)
121121
else:
122122
return (
123-
f"Bounding box from ({','.join(f'{v:.2f}' for v in self.minpoint)}) mm "
123+
f"Bounding box from ({','.join(f'{v:.2f}' for v in self.minpoint)})mm "
124124
f"to ({','.join(f'{v:.2f}' for v in self.maxpoint)})mm in {self.space.name} space"
125125
)
126126

@@ -188,12 +188,18 @@ def _intersect_bbox(self, other: 'BoundingBox', dims=[0, 1, 2]):
188188
result_minpt.append(A[dim])
189189
result_maxpt.append(B[dim])
190190

191+
if result_minpt == result_maxpt:
192+
return result_minpt
193+
191194
bbox = BoundingBox(
192195
point1=point.Point(result_minpt, self.space),
193196
point2=point.Point(result_maxpt, self.space),
194197
space=self.space,
195198
)
196-
return bbox if bbox.volume > 0 else None
199+
200+
if bbox.volume == 0 and sum(cmin == cmax for cmin, cmax in zip(result_minpt, result_maxpt)) == 2:
201+
return None
202+
return bbox
197203

198204
def _intersect_mask(self, mask: 'Nifti1Image', threshold=0):
199205
"""Intersect this bounding box with an image mask. Returns None if they do not intersect.
@@ -291,9 +297,10 @@ def warp(self, space):
291297
return self
292298
else:
293299
try:
294-
return self.corners.warp(spaceobj).boundingbox
295-
except ValueError:
300+
warped_corners = self.corners.warp(spaceobj)
301+
except SpaceWarpingFailedError:
296302
raise SpaceWarpingFailedError(f"Warping {str(self)} to {spaceobj.name} not successful.")
303+
return warped_corners.boundingbox
297304

298305
def transform(self, affine: np.ndarray, space=None):
299306
"""Returns a new bounding box obtained by transforming the

siibra/locations/point.py

+9-4
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@
1818

1919
from ..commons import logger
2020
from ..retrieval.requests import HttpRequest
21+
from ..exceptions import SpaceWarpingFailedError
2122

2223
from urllib.parse import quote
2324
import re
@@ -55,10 +56,14 @@ def parse(spec, unit="mm") -> Tuple[float, float, float]:
5556
if len(digits) == 3:
5657
return tuple(float(d) for d in digits)
5758
elif isinstance(spec, (tuple, list)) and len(spec) in [3, 4]:
59+
if any(v is None for v in spec):
60+
raise RuntimeError("Cannot parse cooridantes containing None values.")
5861
if len(spec) == 4:
5962
assert spec[3] == 1
6063
return tuple(float(v.item()) if isinstance(v, np.ndarray) else float(v) for v in spec[:3])
6164
elif isinstance(spec, np.ndarray) and spec.size == 3:
65+
if any(np.isnan(v) for v in spec):
66+
raise RuntimeError("Cannot parse cooridantes containing NaN values.")
6267
return tuple(float(v.item()) if isinstance(v, np.ndarray) else float(v) for v in spec[:3])
6368
elif isinstance(spec, Point):
6469
return spec.coordinate
@@ -125,7 +130,7 @@ def warp(self, space):
125130
if spaceobj == self.space:
126131
return self
127132
if any(_ not in location.Location.SPACEWARP_IDS for _ in [self.space.id, spaceobj.id]):
128-
raise ValueError(
133+
raise SpaceWarpingFailedError(
129134
f"Cannot convert coordinates between {self.space.id} and {spaceobj.id}"
130135
)
131136
url = "{server}/transform-point?source_space={src}&target_space={tgt}&x={x}&y={y}&z={z}".format(
@@ -137,9 +142,9 @@ def warp(self, space):
137142
z=self.coordinate[2],
138143
)
139144
response = HttpRequest(url, lambda b: json.loads(b.decode())).get()
140-
if any(map(np.isnan, response['target_point'])):
141-
logger.info(f'Warping {str(self)} to {spaceobj.name} resulted in NaN')
142-
return None
145+
if np.any(np.isnan(response['target_point'])):
146+
raise SpaceWarpingFailedError(f'Warping {str(self)} to {spaceobj.name} resulted in NaN')
147+
143148
return self.__class__(
144149
coordinatespec=tuple(response["target_point"]),
145150
space=spaceobj.id,

siibra/locations/pointset.py

+37-3
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@
1818

1919
from ..retrieval.requests import HttpRequest
2020
from ..commons import logger
21+
from ..exceptions import SpaceWarpingFailedError
2122

2223
from typing import List, Union, Tuple
2324
import numbers
@@ -149,7 +150,7 @@ def warp(self, space, chunksize=1000):
149150
if spaceobj == self.space:
150151
return self
151152
if any(_ not in location.Location.SPACEWARP_IDS for _ in [self.space.id, spaceobj.id]):
152-
raise ValueError(
153+
raise SpaceWarpingFailedError(
153154
f"Cannot convert coordinates between {self.space.id} and {spaceobj.id}"
154155
)
155156

@@ -178,6 +179,10 @@ def warp(self, space, chunksize=1000):
178179
).data
179180
tgt_points.extend(list(response["target_points"]))
180181

182+
# TODO: consider using np.isnan(np.dot(arr, arr)). see https://stackoverflow.com/a/45011547
183+
if np.any(np.isnan(response['target_points'])):
184+
raise SpaceWarpingFailedError(f'Warping {str(self)} to {spaceobj.name} resulted in NaN')
185+
181186
return self.__class__(coordinates=tuple(tgt_points), space=spaceobj, labels=self.labels)
182187

183188
def transform(self, affine: np.ndarray, space=None):
@@ -276,7 +281,34 @@ def homogeneous(self):
276281
"""Access the list of 3D point as an Nx4 array of homogeneous coordinates."""
277282
return np.c_[self.coordinates, np.ones(len(self))]
278283

279-
def find_clusters(self, min_fraction=1 / 200, max_fraction=1 / 8):
284+
def find_clusters(
285+
self,
286+
min_fraction: float = 1 / 200,
287+
max_fraction: float = 1 / 8
288+
) -> List[int]:
289+
"""
290+
Find clusters using HDBSCAN (https://dl.acm.org/doi/10.1145/2733381)
291+
implementation of scikit-learn (https://dl.acm.org/doi/10.5555/1953048.2078195).
292+
293+
Parameters
294+
----------
295+
min_fraction: min cluster size as a fraction of total points in the PointSet
296+
max_fraction: max cluster size as a fraction of total points in the PointSet
297+
298+
Returns
299+
-------
300+
List[int]
301+
Returns the cluster labels found by skilearn.cluster.HDBSCAN.
302+
303+
Note
304+
----
305+
Replaces the labels of the PointSet instance with these labels.
306+
307+
Raises
308+
------
309+
RuntimeError
310+
If a sklearn version without HDBSCAN is installed.
311+
"""
280312
if not _HAS_HDBSCAN:
281313
raise RuntimeError(
282314
f"HDBSCAN is not available with your version {sklearn.__version__} "
@@ -289,7 +321,9 @@ def find_clusters(self, min_fraction=1 / 200, max_fraction=1 / 8):
289321
max_cluster_size=int(N * max_fraction),
290322
)
291323
if self.labels is not None:
292-
logger.warn("Existing labels of PointSet will be overwritten with cluster labels.")
324+
logger.warning(
325+
"Existing labels of PointSet will be overwritten with cluster labels."
326+
)
293327
self.labels = clustering.fit_predict(points)
294328
return self.labels
295329

0 commit comments

Comments
 (0)