siibra 1.0.1a1__py3-none-any.whl → 1.0.1a4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of siibra might be problematic. Click here for more details.

Files changed (67) hide show
  1. siibra/VERSION +1 -1
  2. siibra/__init__.py +7 -16
  3. siibra/commons.py +19 -8
  4. siibra/configuration/configuration.py +5 -6
  5. siibra/configuration/factory.py +13 -8
  6. siibra/core/__init__.py +1 -1
  7. siibra/core/assignment.py +19 -7
  8. siibra/core/atlas.py +3 -3
  9. siibra/core/concept.py +4 -2
  10. siibra/core/parcellation.py +5 -5
  11. siibra/core/region.py +24 -25
  12. siibra/core/space.py +4 -6
  13. siibra/core/structure.py +2 -2
  14. siibra/explorer/url.py +2 -2
  15. siibra/features/anchor.py +3 -7
  16. siibra/features/connectivity/regional_connectivity.py +51 -40
  17. siibra/features/dataset/ebrains.py +1 -1
  18. siibra/features/feature.py +29 -20
  19. siibra/features/image/__init__.py +6 -3
  20. siibra/features/image/image.py +2 -4
  21. siibra/features/image/sections.py +81 -2
  22. siibra/features/image/volume_of_interest.py +8 -7
  23. siibra/features/tabular/__init__.py +1 -1
  24. siibra/features/tabular/bigbrain_intensity_profile.py +2 -1
  25. siibra/features/tabular/cell_density_profile.py +8 -9
  26. siibra/features/tabular/cortical_profile.py +6 -6
  27. siibra/features/tabular/gene_expression.py +34 -16
  28. siibra/features/tabular/layerwise_bigbrain_intensities.py +4 -3
  29. siibra/features/tabular/layerwise_cell_density.py +83 -24
  30. siibra/features/tabular/receptor_density_fingerprint.py +34 -9
  31. siibra/features/tabular/receptor_density_profile.py +1 -2
  32. siibra/features/tabular/regional_timeseries_activity.py +7 -7
  33. siibra/features/tabular/tabular.py +14 -7
  34. siibra/livequeries/allen.py +23 -22
  35. siibra/livequeries/bigbrain.py +239 -51
  36. siibra/livequeries/ebrains.py +13 -10
  37. siibra/livequeries/query.py +3 -3
  38. siibra/locations/__init__.py +17 -8
  39. siibra/locations/boundingbox.py +10 -8
  40. siibra/{experimental/plane3d.py → locations/experimental.py} +113 -13
  41. siibra/locations/location.py +17 -13
  42. siibra/locations/point.py +14 -19
  43. siibra/locations/pointcloud.py +57 -12
  44. siibra/retrieval/cache.py +1 -0
  45. siibra/retrieval/datasets.py +19 -13
  46. siibra/retrieval/repositories.py +10 -11
  47. siibra/retrieval/requests.py +26 -24
  48. siibra/vocabularies/__init__.py +1 -2
  49. siibra/volumes/__init__.py +4 -3
  50. siibra/volumes/parcellationmap.py +33 -17
  51. siibra/volumes/providers/freesurfer.py +4 -4
  52. siibra/volumes/providers/gifti.py +4 -4
  53. siibra/volumes/providers/neuroglancer.py +19 -22
  54. siibra/volumes/providers/nifti.py +6 -6
  55. siibra/volumes/providers/provider.py +3 -2
  56. siibra/volumes/sparsemap.py +19 -26
  57. siibra/volumes/volume.py +21 -28
  58. {siibra-1.0.1a1.dist-info → siibra-1.0.1a4.dist-info}/METADATA +37 -17
  59. siibra-1.0.1a4.dist-info/RECORD +80 -0
  60. {siibra-1.0.1a1.dist-info → siibra-1.0.1a4.dist-info}/WHEEL +1 -1
  61. siibra/experimental/__init__.py +0 -19
  62. siibra/experimental/contour.py +0 -61
  63. siibra/experimental/cortical_profile_sampler.py +0 -57
  64. siibra/experimental/patch.py +0 -98
  65. siibra-1.0.1a1.dist-info/RECORD +0 -84
  66. {siibra-1.0.1a1.dist-info → siibra-1.0.1a4.dist-info/licenses}/LICENSE +0 -0
  67. {siibra-1.0.1a1.dist-info → siibra-1.0.1a4.dist-info}/top_level.txt +0 -0
@@ -14,19 +14,23 @@
14
14
  # limitations under the License.
15
15
  """Matches BigBrain intensity profiles extracted by Wagstyl et al. to volumes."""
16
16
 
17
- from . import query
17
+ from typing import List
18
+ from os import path
18
19
 
19
- from ..features.tabular import bigbrain_intensity_profile, layerwise_bigbrain_intensities
20
+ import numpy as np
21
+ from scipy.spatial import KDTree
22
+
23
+ from . import query
20
24
  from ..features import anchor as _anchor
21
- from ..commons import logger
22
- from ..locations import point, pointcloud, location
25
+ from ..features.tabular import bigbrain_intensity_profile, layerwise_bigbrain_intensities
26
+ from ..features.image import CellbodyStainedSection, BigBrain1MicronPatch
27
+ from ..commons import logger, siibra_tqdm
28
+ from ..locations import point, pointcloud, location, experimental
23
29
  from ..core import structure
30
+ from ..core.concept import get_registry
24
31
  from ..retrieval import requests, cache
25
32
  from ..retrieval.datasets import GenericDataset
26
-
27
- import numpy as np
28
- from typing import List
29
- from os import path
33
+ from ..volumes import Volume, from_array
30
34
 
31
35
 
32
36
  class WagstylProfileLoader:
@@ -41,23 +45,23 @@ class WagstylProfileLoader:
41
45
  DATASET = GenericDataset(
42
46
  name="HIBALL workshop on cortical layers",
43
47
  contributors=[
44
- 'Konrad Wagstyl',
45
- 'Stéphanie Larocque',
46
- 'Guillem Cucurull',
47
- 'Claude Lepage',
48
- 'Joseph Paul Cohen',
49
- 'Sebastian Bludau',
50
- 'Nicola Palomero-Gallagher',
51
- 'Lindsay B. Lewis',
52
- 'Thomas Funck',
53
- 'Hannah Spitzer',
54
- 'Timo Dickscheid',
55
- 'Paul C. Fletcher',
56
- 'Adriana Romero',
57
- 'Karl Zilles',
58
- 'Katrin Amunts',
59
- 'Yoshua Bengio',
60
- 'Alan C. Evans'
48
+ "Konrad Wagstyl",
49
+ "Stéphanie Larocque",
50
+ "Guillem Cucurull",
51
+ "Claude Lepage",
52
+ "Joseph Paul Cohen",
53
+ "Sebastian Bludau",
54
+ "Nicola Palomero-Gallagher",
55
+ "Lindsay B. Lewis",
56
+ "Thomas Funck",
57
+ "Hannah Spitzer",
58
+ "Timo Dickscheid",
59
+ "Paul C. Fletcher",
60
+ "Adriana Romero",
61
+ "Karl Zilles",
62
+ "Katrin Amunts",
63
+ "Yoshua Bengio",
64
+ "Alan C. Evans",
61
65
  ],
62
66
  url="https://github.com/kwagstyl/cortical_layers_tutorial/",
63
67
  description="Cortical profiles of BigBrain staining intensities computed by Konrad Wagstyl, "
@@ -67,7 +71,7 @@ class WagstylProfileLoader:
67
71
  "http://dx.doi.org/10.1371/journal.pbio.3000678."
68
72
  "The data is taken from the tutorial at "
69
73
  "https://github.com/kwagstyl/cortical_layers_tutorial. Each vertex is "
70
- "assigned to the regional map when queried."
74
+ "assigned to the regional map when queried.",
71
75
  )
72
76
 
73
77
  def __init__(self):
@@ -76,15 +80,22 @@ class WagstylProfileLoader:
76
80
 
77
81
  @property
78
82
  def profile_labels(self):
79
- return np.arange(0., 1., 1. / self._profiles.shape[1])
83
+ return np.arange(0.0, 1.0, 1.0 / self._profiles.shape[1])
80
84
 
81
85
  @classmethod
82
86
  def _load(cls):
83
87
  # read thicknesses, in mm, and normalize by their last column which is the total thickness
84
- thickness = requests.HttpRequest(f"{cls.REPO}/{cls.THICKNESSES_FILE_LEFT}").data.T
85
- total_thickness = thickness[:, :-1].sum(1) # last column is the computed total thickness
88
+ thickness = requests.HttpRequest(
89
+ f"{cls.REPO}/{cls.THICKNESSES_FILE_LEFT}"
90
+ ).data.T
91
+ total_thickness = thickness[:, :-1].sum(
92
+ 1
93
+ ) # last column is the computed total thickness
86
94
  valid = np.where(total_thickness > 0)[0]
87
- cls._boundary_depths = np.c_[np.zeros_like(valid), (thickness[valid, :] / total_thickness[valid, None]).cumsum(1)]
95
+ cls._boundary_depths = np.c_[
96
+ np.zeros_like(valid),
97
+ (thickness[valid, :] / total_thickness[valid, None]).cumsum(1),
98
+ ]
88
99
  cls._boundary_depths[:, -1] = 1 # account for float calculation errors
89
100
 
90
101
  # find profiles with valid thickness
@@ -112,15 +123,23 @@ class WagstylProfileLoader:
112
123
  cache.Warmup.register_warmup_fn()(lambda: WagstylProfileLoader._load())
113
124
 
114
125
 
115
- class BigBrainProfileQuery(query.LiveQuery, args=[], FeatureType=bigbrain_intensity_profile.BigBrainIntensityProfile):
126
+ class BigBrainProfileQuery(
127
+ query.LiveQuery,
128
+ args=[],
129
+ FeatureType=bigbrain_intensity_profile.BigBrainIntensityProfile,
130
+ ):
116
131
 
117
132
  def __init__(self):
118
133
  query.LiveQuery.__init__(self)
119
134
 
120
- def query(self, concept: structure.BrainStructure, **kwargs) -> List[bigbrain_intensity_profile.BigBrainIntensityProfile]:
135
+ def query(
136
+ self, concept: structure.BrainStructure, **kwargs
137
+ ) -> List[bigbrain_intensity_profile.BigBrainIntensityProfile]:
121
138
  loader = WagstylProfileLoader()
122
- mesh_vertices = pointcloud.PointCloud(loader._vertices, space='bigbrain')
123
- matched = concept.intersection(mesh_vertices) # returns a reduced PointCloud with og indices as labels
139
+ mesh_vertices = pointcloud.PointCloud(loader._vertices, space="bigbrain")
140
+ matched = concept.intersection(
141
+ mesh_vertices
142
+ ) # returns a reduced PointCloud with og indices as labels
124
143
  if matched is None:
125
144
  return []
126
145
  if isinstance(matched, point.Point):
@@ -134,21 +153,21 @@ class BigBrainProfileQuery(query.LiveQuery, args=[], FeatureType=bigbrain_intens
134
153
  features = []
135
154
  for i in indices:
136
155
  anchor = _anchor.AnatomicalAnchor(
137
- location=point.Point(loader._vertices[i], space='bigbrain'),
156
+ location=point.Point(loader._vertices[i], space="bigbrain"),
138
157
  region=str(concept),
139
- species='Homo sapiens'
158
+ species="Homo sapiens",
140
159
  )
141
160
  prof = bigbrain_intensity_profile.BigBrainIntensityProfile(
142
161
  anchor=anchor,
143
162
  depths=loader.profile_labels,
144
163
  values=loader._profiles[i],
145
- boundaries=loader._boundary_depths[i]
164
+ boundaries=loader._boundary_depths[i],
146
165
  )
147
166
  prof.anchor._assignments[concept] = _anchor.AnatomicalAssignment(
148
167
  query_structure=concept,
149
168
  assigned_structure=concept,
150
169
  qualification=_anchor.Qualification.CONTAINED,
151
- explanation=f"Surface vertex of BigBrain cortical profile was filtered using {concept}"
170
+ explanation=f"Surface vertex of BigBrain cortical profile was filtered using {concept}",
152
171
  )
153
172
  prof.datasets = [WagstylProfileLoader.DATASET]
154
173
  features.append(prof)
@@ -156,16 +175,24 @@ class BigBrainProfileQuery(query.LiveQuery, args=[], FeatureType=bigbrain_intens
156
175
  return features
157
176
 
158
177
 
159
- class LayerwiseBigBrainIntensityQuery(query.LiveQuery, args=[], FeatureType=layerwise_bigbrain_intensities.LayerwiseBigBrainIntensities):
178
+ class LayerwiseBigBrainIntensityQuery(
179
+ query.LiveQuery,
180
+ args=[],
181
+ FeatureType=layerwise_bigbrain_intensities.LayerwiseBigBrainIntensities,
182
+ ):
160
183
 
161
184
  def __init__(self):
162
185
  query.LiveQuery.__init__(self)
163
186
 
164
- def query(self, concept: structure.BrainStructure, **kwargs) -> List[layerwise_bigbrain_intensities.LayerwiseBigBrainIntensities]:
187
+ def query(
188
+ self, concept: structure.BrainStructure, **kwargs
189
+ ) -> List[layerwise_bigbrain_intensities.LayerwiseBigBrainIntensities]:
165
190
 
166
191
  loader = WagstylProfileLoader()
167
- mesh_vertices = pointcloud.PointCloud(loader._vertices, space='bigbrain')
168
- matched = concept.intersection(mesh_vertices) # returns a reduced PointCloud with og indices as labels if the concept is a region
192
+ mesh_vertices = pointcloud.PointCloud(loader._vertices, space="bigbrain")
193
+ matched = concept.intersection(
194
+ mesh_vertices
195
+ ) # returns a reduced PointCloud with og indices as labels if the concept is a region
169
196
  if matched is None:
170
197
  return []
171
198
  if isinstance(matched, point.Point):
@@ -180,27 +207,188 @@ class LayerwiseBigBrainIntensityQuery(query.LiveQuery, args=[], FeatureType=laye
180
207
  # compute array of layer labels for all coefficients in profiles_left
181
208
  N = matched_profiles.shape[1]
182
209
  prange = np.arange(N)
183
- layer_labels = 7 - np.array([
184
- [np.array([[(prange < T) * 1] for i, T in enumerate((b * N).astype('int'))]).squeeze().sum(0)]
185
- for b in boundary_depths
186
- ]).reshape((-1, 200))
210
+ layer_labels = 7 - np.array(
211
+ [
212
+ [
213
+ np.array(
214
+ [
215
+ [(prange < T) * 1]
216
+ for i, T in enumerate((b * N).astype("int"))
217
+ ]
218
+ )
219
+ .squeeze()
220
+ .sum(0)
221
+ ]
222
+ for b in boundary_depths
223
+ ]
224
+ ).reshape((-1, 200))
187
225
 
188
226
  anchor = _anchor.AnatomicalAnchor(
189
- location=pointcloud.PointCloud(loader._vertices[indices, :], space='bigbrain'),
227
+ location=pointcloud.PointCloud(
228
+ loader._vertices[indices, :], space="bigbrain"
229
+ ),
190
230
  region=str(concept),
191
- species='Homo sapiens'
231
+ species="Homo sapiens",
192
232
  )
193
233
  result = layerwise_bigbrain_intensities.LayerwiseBigBrainIntensities(
194
234
  anchor=anchor,
195
- means=[matched_profiles[layer_labels == layer].mean() for layer in range(1, 7)],
196
- stds=[matched_profiles[layer_labels == layer].std() for layer in range(1, 7)],
235
+ means=[
236
+ matched_profiles[layer_labels == layer].mean() for layer in range(1, 7)
237
+ ],
238
+ stds=[
239
+ matched_profiles[layer_labels == layer].std() for layer in range(1, 7)
240
+ ],
197
241
  )
198
242
  result.anchor._assignments[concept] = _anchor.AnatomicalAssignment(
199
243
  query_structure=concept,
200
244
  assigned_structure=concept,
201
245
  qualification=_anchor.Qualification.CONTAINED,
202
- explanation=f"Surface vertices of BigBrain cortical profiles were filtered using {concept}"
246
+ explanation=f"Surface vertices of BigBrain cortical profiles were filtered using {concept}",
203
247
  )
204
248
  result.datasets = [WagstylProfileLoader.DATASET]
205
249
 
206
250
  return [result]
251
+
252
+
253
+ class BigBrain1MicronPatchQuery(
254
+ query.LiveQuery, args=[], FeatureType=BigBrain1MicronPatch
255
+ ):
256
+ """
257
+ Sample approximately orthogonal cortical image patches
258
+ from BigBrain 1 micron sections, guided by an image volume
259
+ in a supported reference space providing. The image
260
+ volume is used as a weighted mask to extract patches
261
+ along the cortical midsurface with nonzero weights in the
262
+ input image.
263
+ An optional lower_threshold can be used to narrow down
264
+ the search
265
+ The weight is stored with the resulting features.
266
+ """
267
+
268
+ def __init__(self, lower_threshold=0.):
269
+ self.layermap = get_registry("Map").get("cortical layers bigbrain")
270
+ self.lower_threshold = lower_threshold
271
+ query.LiveQuery.__init__(self)
272
+
273
+ def query(
274
+ self, concept: structure.BrainStructure, **kwargs
275
+ ) -> List[BigBrain1MicronPatch]:
276
+
277
+ # make sure input is an image volume
278
+ # TODO function should be extended to deal with other concepts as well
279
+ if not isinstance(concept, Volume):
280
+ logger.warning(
281
+ "Querying BigBrain1MicronPatch features requires to "
282
+ "query with an image volume."
283
+ )
284
+ return []
285
+
286
+ # threshold image volume, if requested
287
+ if self.lower_threshold > 0.0:
288
+ logger.info(
289
+ f"Applying lower threshold of {self.lower_threshold} "
290
+ "for BigBrain 1 micron patch query."
291
+ )
292
+ img = concept.fetch()
293
+ arr = np.asanyarray(img.dataobj)
294
+ arr[arr < self.lower_threshold] = 0
295
+ query_vol = from_array(arr, img.affine, space=concept.space, name="filtered volume")
296
+ else:
297
+ query_vol = concept
298
+ bb_bbox = query_vol.get_boundingbox().warp('bigbrain')
299
+
300
+ # find 1 micron BigBrain sections intersecting the thresholded volume
301
+ sections: List[CellbodyStainedSection] = [
302
+ s
303
+ for s in CellbodyStainedSection._get_instances()
304
+ if isinstance(s, CellbodyStainedSection)
305
+ and s.get_boundingbox(clip=False).intersects(query_vol)
306
+ ]
307
+ if not sections:
308
+ return []
309
+
310
+ # extract relevant patches
311
+ features = []
312
+ for hemisphere in ["left", "right"]:
313
+
314
+ # get layer 4 mesh in the hemisphere
315
+ l4 = self.layermap.parcellation.get_region("4 " + hemisphere)
316
+ l4mesh = self.layermap.fetch(l4, format="mesh")
317
+ layerverts = {
318
+ n: self.layermap.fetch(region=n, format="mesh")["verts"]
319
+ for n in self.layermap.regions if hemisphere in n
320
+ }
321
+ l4verts = pointcloud.PointCloud(layerverts[l4.name], "bigbrain")
322
+ if not l4verts.boundingbox.intersects(bb_bbox):
323
+ continue
324
+
325
+ # for each relevant BigBrain 1 micron section, intersect layer IV mesh
326
+ # to obtain midcortex-locations, and build their orthogonal patches.
327
+ # store the concept's value with the patch.
328
+ vertex_tree = KDTree(layerverts[l4.name])
329
+ for s in siibra_tqdm(
330
+ sections, unit="sections", desc=f"Sampling patches in {hemisphere} hemisphere"
331
+ ):
332
+
333
+ # compute layer IV contour in the image plane
334
+ imgplane = experimental.Plane.from_image(s)
335
+ try:
336
+ contour_segments = imgplane.intersect_mesh(l4mesh)
337
+ except AssertionError:
338
+ logger.error(f"Could not intersect with layer 4 mesh: {s.name}")
339
+ continue
340
+ if len(contour_segments) == 0:
341
+ continue
342
+
343
+ # score the contour points with the query image volume
344
+ all_points = pointcloud.from_points(sum(map(list, contour_segments), []))
345
+ all_probs = query_vol.evaluate_points(all_points)
346
+ points_prob_lookup = {
347
+ pt.coordinate: prob
348
+ for pt, prob in zip(all_points, all_probs)
349
+ if prob >= self.lower_threshold
350
+ }
351
+ if len(points_prob_lookup) == 0:
352
+ continue
353
+
354
+ # For each contour point,
355
+ # - find the closest BigBrain layer surface vertex,
356
+ # - build the profile of corresponding vertices across all layers
357
+ # - project the profile to the image section
358
+ # - determine the oriented patch along the profile
359
+ _, indices = vertex_tree.query(np.array(list(points_prob_lookup.keys())))
360
+ for prob, nnb in zip(points_prob_lookup.values(), indices):
361
+
362
+ prof = pointcloud.Contour(
363
+ [
364
+ layerverts[_][nnb]
365
+ for _ in self.layermap.regions
366
+ if hemisphere in _
367
+ ],
368
+ space=self.layermap.space,
369
+ )
370
+ patch = imgplane.get_enclosing_patch(prof)
371
+ if patch is None:
372
+ continue
373
+
374
+ anchor = _anchor.AnatomicalAnchor(
375
+ location=patch, species="Homo sapiens"
376
+ )
377
+ anchor._assignments[concept] = _anchor.AnatomicalAssignment(
378
+ query_structure=query_vol,
379
+ assigned_structure=s.anchor.volume,
380
+ qualification=_anchor.Qualification.CONTAINED
381
+ )
382
+ features.append(
383
+ BigBrain1MicronPatch(
384
+ patch=patch,
385
+ profile=prof,
386
+ section=s,
387
+ vertex=nnb,
388
+ relevance=prob,
389
+ anchor=anchor
390
+ )
391
+ )
392
+
393
+ # return the patches sorted by relevance (ie. probability)
394
+ return sorted(features, key=lambda p: p.relevance, reverse=True)
@@ -14,18 +14,21 @@
14
14
  # limitations under the License.
15
15
  """Query data features published as Ebrains datasets with AtlasConcepts"""
16
16
 
17
- from ..features.dataset import ebrains as _ebrains
18
- from . import query
17
+ from collections import defaultdict
18
+ import re
19
+ from packaging.version import Version
20
+ from tempfile import NamedTemporaryFile
21
+ from typing import TYPE_CHECKING
19
22
 
23
+ from . import query
24
+ from ..features.dataset import ebrains as _ebrains
20
25
  from ..commons import logger, siibra_tqdm
21
26
  from ..features import anchor as _anchor
22
27
  from ..retrieval import requests, datasets, cache
23
- from ..core import parcellation, region
28
+ from ..core.concept import get_registry
24
29
 
25
- from collections import defaultdict
26
- import re
27
- from packaging.version import Version
28
- from tempfile import NamedTemporaryFile
30
+ if TYPE_CHECKING:
31
+ from ..core.region import Region
29
32
 
30
33
 
31
34
  class EbrainsFeatureQuery(query.LiveQuery, args=[], FeatureType=_ebrains.EbrainsDataFeature):
@@ -58,17 +61,17 @@ class EbrainsFeatureQuery(query.LiveQuery, args=[], FeatureType=_ebrains.Ebrains
58
61
  if self.__class__.parcellation_ids is None:
59
62
  self.__class__.parcellation_ids = [
60
63
  dset.id
61
- for parc in parcellation.Parcellation.registry()
64
+ for parc in get_registry("Parcellation")
62
65
  for dset in parc.datasets
63
66
  if isinstance(dset, datasets.EbrainsV3DatasetVersion)
64
67
  ]
65
68
 
66
- def query(self, region: region.Region):
69
+ def query(self, region: "Region"):
67
70
  versioned_datasets = defaultdict(dict)
68
71
  invalid_species_datasets = {}
69
72
  results = self.loader.data.get("results", [])
70
73
 
71
- for r in siibra_tqdm(results, total=len(results)):
74
+ for r in siibra_tqdm(results, total=len(results), unit='dataset', desc=f'Matching datasets to "{region}"'):
72
75
  regionname = r.get("name", None)
73
76
  alias = r.get("alias", None)
74
77
  for ds_spec in r.get("datasets", []):
@@ -14,13 +14,13 @@
14
14
  # limitations under the License.
15
15
  """Handles feature queries that rely on live or on-the-fly calculations."""
16
16
 
17
+ from abc import ABC, abstractmethod
18
+ from typing import List
19
+
17
20
  from ..commons import logger
18
21
  from ..features.feature import Feature
19
22
  from ..core.concept import AtlasConcept
20
23
 
21
- from abc import ABC, abstractmethod
22
- from typing import List
23
-
24
24
 
25
25
  class LiveQuery(ABC):
26
26
 
@@ -13,26 +13,34 @@
13
13
  # See the License for the specific language governing permissions and
14
14
  # limitations under the License.
15
15
  """Handles spatial concepts and spatial operation like warping between spaces."""
16
+ from typing import Iterable
17
+ from functools import reduce
16
18
 
17
19
  from .location import Location
18
20
  from .point import Point
19
- from .pointcloud import PointCloud, from_points
21
+ from .pointcloud import PointCloud, Contour, from_points
22
+ from .experimental import AxisAlignedPatch, Plane
20
23
  from .boundingbox import BoundingBox
21
24
 
22
25
 
23
- def reassign_union(loc0: 'Location', loc1: 'Location') -> 'Location':
26
+ def reassign_union(*args: Iterable["Location"]) -> "Location":
27
+ return reduce(pairwise_union, args)
28
+
29
+
30
+ def pairwise_union(loc0: "Location", loc1: "Location") -> "Location":
24
31
  """
25
32
  Add two locations of same or different type to find their union as a
26
33
  Location object.
34
+
27
35
  Note
28
36
  ----
29
37
  `loc1` will be warped to `loc0` they are not in the same space.
38
+
30
39
  Parameters
31
40
  ----------
32
41
  loc0 : Location
33
- _description_
34
42
  loc1 : Location
35
- _description_
43
+
36
44
  Returns
37
45
  -------
38
46
  Location
@@ -58,12 +66,13 @@ def reassign_union(loc0: 'Location', loc1: 'Location') -> 'Location':
58
66
  try:
59
67
  return loc1.union(loc0)
60
68
  except Exception:
61
- raise NotImplementedError(f"There are no union method for {(loc0.__class__.__name__, loc1.__class__.__name__)}")
69
+ raise NotImplementedError(
70
+ f"There are no union method for {(loc0.__class__.__name__, loc1.__class__.__name__)}"
71
+ )
62
72
 
63
73
  # convert Points to PointClouds
64
74
  loc0, loc1 = [
65
- from_points([loc]) if isinstance(loc, Point) else loc
66
- for loc in [loc0, loc1]
75
+ from_points([loc]) if isinstance(loc, Point) else loc for loc in [loc0, loc1]
67
76
  ]
68
77
 
69
78
  # adopt the space of the first location
@@ -82,7 +91,7 @@ def reassign_union(loc0: 'Location', loc1: 'Location') -> 'Location':
82
91
  point1=[min(p[i] for p in coordinates) for i in range(3)],
83
92
  point2=[max(p[i] for p in coordinates) for i in range(3)],
84
93
  space=loc0.space,
85
- sigma_mm=[loc0.minpoint.sigma, loc0.maxpoint.sigma]
94
+ sigma_mm=[loc0.minpoint.sigma, loc0.maxpoint.sigma],
86
95
  )
87
96
 
88
97
  return reassign_union(loc1_w, loc0)
@@ -14,15 +14,16 @@
14
14
  # limitations under the License.
15
15
  """A box defined by two farthest corner coordinates on a specific space."""
16
16
 
17
- from . import point, pointcloud, location
17
+ from itertools import product
18
+ import hashlib
19
+ from typing import TYPE_CHECKING, Union, Dict
18
20
 
21
+ import numpy as np
22
+
23
+ from . import location, point, pointcloud
19
24
  from ..commons import logger
20
25
  from ..exceptions import SpaceWarpingFailedError
21
26
 
22
- from itertools import product
23
- import hashlib
24
- import numpy as np
25
- from typing import TYPE_CHECKING, Union
26
27
  if TYPE_CHECKING:
27
28
  from ..core.structure import BrainStructure
28
29
  from nibabel import Nifti1Image
@@ -85,7 +86,7 @@ class BoundingBox(location.Location):
85
86
  self.maxpoint[d] = self.minpoint[d] + minsize
86
87
 
87
88
  if self.volume == 0:
88
- logger.warning(f"Zero-volume bounding box from points {point1} and {point2} in {self.space} space.")
89
+ logger.debug(f"Zero-volume bounding box from points {point1} and {point2} in {self.space} space.")
89
90
 
90
91
  @property
91
92
  def id(self) -> str:
@@ -281,14 +282,15 @@ class BoundingBox(location.Location):
281
282
  sigma_mm=np.mean([self.minpoint.sigma, self.maxpoint.sigma])
282
283
  )
283
284
 
284
- def warp(self, space):
285
+ def warp(self, space: Union[str, Dict, "Space"]):
285
286
  """Returns a new bounding box obtained by warping the
286
287
  min- and maxpoint of this one into the new target space.
287
288
 
288
289
  TODO process the sigma values o the points
289
290
  """
290
291
  from ..core.space import Space
291
- spaceobj = Space.get_instance(space)
292
+
293
+ spaceobj = space if isinstance(space, Space) else Space.get_instance(space)
292
294
  if spaceobj == self.space:
293
295
  return self
294
296
  else: