siibra 1.0a19__py3-none-any.whl → 1.0.1a1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of siibra might be problematic. Click here for more details.

Files changed (82) hide show
  1. siibra/VERSION +1 -1
  2. siibra/__init__.py +7 -7
  3. siibra/commons.py +8 -53
  4. siibra/configuration/__init__.py +1 -1
  5. siibra/configuration/configuration.py +1 -1
  6. siibra/configuration/factory.py +11 -21
  7. siibra/core/__init__.py +1 -1
  8. siibra/core/assignment.py +1 -1
  9. siibra/core/atlas.py +21 -15
  10. siibra/core/concept.py +3 -3
  11. siibra/core/parcellation.py +69 -54
  12. siibra/core/region.py +178 -158
  13. siibra/core/space.py +1 -1
  14. siibra/core/structure.py +2 -2
  15. siibra/exceptions.py +13 -1
  16. siibra/experimental/__init__.py +1 -1
  17. siibra/experimental/contour.py +8 -8
  18. siibra/experimental/cortical_profile_sampler.py +1 -1
  19. siibra/experimental/patch.py +3 -3
  20. siibra/experimental/plane3d.py +12 -12
  21. siibra/explorer/__init__.py +1 -1
  22. siibra/explorer/url.py +2 -2
  23. siibra/explorer/util.py +1 -1
  24. siibra/features/__init__.py +1 -1
  25. siibra/features/anchor.py +14 -15
  26. siibra/features/connectivity/__init__.py +1 -1
  27. siibra/features/connectivity/functional_connectivity.py +1 -1
  28. siibra/features/connectivity/regional_connectivity.py +4 -4
  29. siibra/features/connectivity/streamline_counts.py +1 -1
  30. siibra/features/connectivity/streamline_lengths.py +1 -1
  31. siibra/features/connectivity/tracing_connectivity.py +1 -1
  32. siibra/features/dataset/__init__.py +1 -1
  33. siibra/features/dataset/ebrains.py +1 -1
  34. siibra/features/feature.py +24 -26
  35. siibra/features/image/__init__.py +1 -1
  36. siibra/features/image/image.py +2 -2
  37. siibra/features/image/sections.py +1 -1
  38. siibra/features/image/volume_of_interest.py +1 -1
  39. siibra/features/tabular/__init__.py +1 -1
  40. siibra/features/tabular/bigbrain_intensity_profile.py +2 -2
  41. siibra/features/tabular/cell_density_profile.py +98 -64
  42. siibra/features/tabular/cortical_profile.py +3 -3
  43. siibra/features/tabular/gene_expression.py +1 -1
  44. siibra/features/tabular/layerwise_bigbrain_intensities.py +1 -1
  45. siibra/features/tabular/layerwise_cell_density.py +4 -23
  46. siibra/features/tabular/receptor_density_fingerprint.py +13 -10
  47. siibra/features/tabular/receptor_density_profile.py +1 -1
  48. siibra/features/tabular/regional_timeseries_activity.py +4 -4
  49. siibra/features/tabular/tabular.py +7 -5
  50. siibra/livequeries/__init__.py +1 -1
  51. siibra/livequeries/allen.py +42 -19
  52. siibra/livequeries/bigbrain.py +21 -12
  53. siibra/livequeries/ebrains.py +1 -1
  54. siibra/livequeries/query.py +2 -3
  55. siibra/locations/__init__.py +11 -11
  56. siibra/locations/boundingbox.py +30 -29
  57. siibra/locations/location.py +1 -1
  58. siibra/locations/point.py +7 -7
  59. siibra/locations/{pointset.py → pointcloud.py} +36 -33
  60. siibra/retrieval/__init__.py +1 -1
  61. siibra/retrieval/cache.py +1 -1
  62. siibra/retrieval/datasets.py +4 -4
  63. siibra/retrieval/exceptions/__init__.py +1 -1
  64. siibra/retrieval/repositories.py +13 -30
  65. siibra/retrieval/requests.py +25 -8
  66. siibra/vocabularies/__init__.py +1 -1
  67. siibra/volumes/__init__.py +2 -2
  68. siibra/volumes/parcellationmap.py +119 -91
  69. siibra/volumes/providers/__init__.py +1 -1
  70. siibra/volumes/providers/freesurfer.py +3 -3
  71. siibra/volumes/providers/gifti.py +1 -1
  72. siibra/volumes/providers/neuroglancer.py +67 -41
  73. siibra/volumes/providers/nifti.py +12 -26
  74. siibra/volumes/providers/provider.py +1 -1
  75. siibra/volumes/sparsemap.py +125 -246
  76. siibra/volumes/volume.py +150 -61
  77. {siibra-1.0a19.dist-info → siibra-1.0.1a1.dist-info}/METADATA +26 -4
  78. siibra-1.0.1a1.dist-info/RECORD +84 -0
  79. {siibra-1.0a19.dist-info → siibra-1.0.1a1.dist-info}/WHEEL +1 -1
  80. siibra-1.0a19.dist-info/RECORD +0 -84
  81. {siibra-1.0a19.dist-info → siibra-1.0.1a1.dist-info}/LICENSE +0 -0
  82. {siibra-1.0a19.dist-info → siibra-1.0.1a1.dist-info}/top_level.txt +0 -0
@@ -1,4 +1,4 @@
1
- # Copyright 2018-2024
1
+ # Copyright 2018-2025
2
2
  # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
3
 
4
4
  # Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,22 +13,22 @@
13
13
  # See the License for the specific language governing permissions and
14
14
  # limitations under the License.
15
15
 
16
- from ..locations import point, pointset, boundingbox
16
+ from ..locations import point, pointcloud, boundingbox
17
17
 
18
18
  import numpy as np
19
19
 
20
20
 
21
- class Contour(pointset.PointSet):
21
+ class Contour(pointcloud.PointCloud):
22
22
  """
23
- A PointSet that represents a contour line.
23
+ A PointCloud that represents a contour line.
24
24
  The only difference is that the point order is relevant,
25
25
  and consecutive points are thought as being connected by an edge.
26
26
 
27
- In fact, PointSet assumes order as well, but no connections between points.
27
+ In fact, PointCloud assumes order as well, but no connections between points.
28
28
  """
29
29
 
30
30
  def __init__(self, coordinates, space=None, sigma_mm=0, labels: list = None):
31
- pointset.PointSet.__init__(self, coordinates, space, sigma_mm, labels)
31
+ pointcloud.PointCloud.__init__(self, coordinates, space, sigma_mm, labels)
32
32
 
33
33
  def crop(self, voi: boundingbox.BoundingBox):
34
34
  """
@@ -45,8 +45,8 @@ class Contour(pointset.PointSet):
45
45
  cropped = self.intersection(voi)
46
46
 
47
47
  if cropped is not None and not isinstance(cropped, point.Point):
48
- assert isinstance(cropped, pointset.PointSet)
49
- # Identifiy contour splits are by discontinuouities ("jumps")
48
+ assert isinstance(cropped, pointcloud.PointCloud)
49
+ # Identify contour splits are by discontinuouities ("jumps")
50
50
  # of their labels, which denote positions in the original contour
51
51
  jumps = np.diff([self.labels.index(lb) for lb in cropped.labels])
52
52
  splits = [0] + list(np.where(jumps > 1)[0] + 1) + [len(cropped)]
@@ -1,4 +1,4 @@
1
- # Copyright 2018-2024
1
+ # Copyright 2018-2025
2
2
  # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
3
 
4
4
  # Licensed under the Apache License, Version 2.0 (the "License");
@@ -1,4 +1,4 @@
1
- # Copyright 2018-2024
1
+ # Copyright 2018-2025
2
2
  # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
3
 
4
4
  # Licensed under the Apache License, Version 2.0 (the "License");
@@ -14,7 +14,7 @@
14
14
  # limitations under the License.
15
15
 
16
16
  from ..volumes import volume
17
- from ..locations import pointset, boundingbox
17
+ from ..locations import pointcloud, boundingbox
18
18
  from ..commons import translation_matrix, y_rotation_matrix
19
19
 
20
20
  import numpy as np
@@ -24,7 +24,7 @@ from nilearn import image
24
24
 
25
25
  class Patch:
26
26
 
27
- def __init__(self, corners: pointset.PointSet):
27
+ def __init__(self, corners: pointcloud.PointCloud):
28
28
  """Construct a patch in physical coordinates.
29
29
  As of now, only patches aligned in the y plane of the physical space
30
30
  are supported."""
@@ -1,4 +1,4 @@
1
- # Copyright 2018-2024
1
+ # Copyright 2018-2025
2
2
  # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
3
 
4
4
  # Licensed under the Apache License, Version 2.0 (the "License");
@@ -15,7 +15,7 @@
15
15
 
16
16
  from . import contour
17
17
  from . import patch
18
- from ..locations import point, pointset
18
+ from ..locations import point, pointcloud
19
19
  from ..volumes import volume
20
20
 
21
21
  import numpy as np
@@ -33,7 +33,7 @@ class Plane3D:
33
33
  The plane's reference space is defined by the first point.
34
34
  """
35
35
  self.space = point1.space
36
- # normal is the cross product of two arbitray in-plane vectors
36
+ # normal is the cross product of two arbitrary in-plane vectors
37
37
  n = np.cross(
38
38
  (point2.warp(self.space) - point1).coordinate,
39
39
  (point3.warp(self.space) - point1).coordinate,
@@ -93,9 +93,9 @@ class Plane3D:
93
93
  and an Mx3 array "faces" of face definitions.
94
94
  Each row in the face array corresponds to the three indices of vertices making up the
95
95
  triangle.
96
- The result is a list of contour segments, each represented as a PointSet
96
+ The result is a list of contour segments, each represented as a PointCloud
97
97
  holding the ordered list of contour points.
98
- The point labels in each "contour" PointSet hold the index of the face in the
98
+ The point labels in each "contour" PointCloud hold the index of the face in the
99
99
  mesh which made up each contour point.
100
100
  """
101
101
 
@@ -107,7 +107,7 @@ class Plane3D:
107
107
  )[0]
108
108
  faces = mesh["faces"][face_indices]
109
109
 
110
- # for each of N selected faces, indicate wether we cross the plane
110
+ # for each of N selected faces, indicate whether we cross the plane
111
111
  # as we go from vertex 2->0, 0->1, 1->2, respectively.
112
112
  # This gives us an Nx3 array, where forward crossings are identified by 1,
113
113
  # and backward crossings by -1.
@@ -160,7 +160,7 @@ class Plane3D:
160
160
  face_id = 0 # index of the mesh face to consider
161
161
  while len(face_indices) > 0:
162
162
 
163
- # continue the contour with the next foward edge intersection
163
+ # continue the contour with the next forward edge intersection
164
164
  p = fwd_intersections[face_id]
165
165
  points.append(p)
166
166
  # Remember the ids of the face and start-/end vertices for the point
@@ -185,17 +185,17 @@ class Plane3D:
185
185
 
186
186
  return result
187
187
 
188
- def project_points(self, points: pointset.PointSet):
188
+ def project_points(self, points: pointcloud.PointCloud):
189
189
  """projects the given points onto the plane."""
190
190
  assert self.space == points.space
191
191
  XYZ = points.coordinates
192
192
  N = XYZ.shape[0]
193
193
  dists = np.dot(self._n, XYZ.T) - self._d
194
- return pointset.PointSet(
194
+ return pointcloud.PointCloud(
195
195
  XYZ - np.tile(self._n, (N, 1)) * dists[:, np.newaxis], space=self.space
196
196
  )
197
197
 
198
- def get_enclosing_patch(self, points: pointset.PointSet, margin=[0.5, 0.5]):
198
+ def get_enclosing_patch(self, points: pointcloud.PointCloud, margin=[0.5, 0.5]):
199
199
  """
200
200
  Computes the enclosing patch in the given plane
201
201
  which contains the projections of the given points.
@@ -225,7 +225,7 @@ class Plane3D:
225
225
 
226
226
  m0, m1 = margin
227
227
  w = np.linalg.norm(p3 - p2)
228
- corners = pointset.PointSet(
228
+ corners = pointcloud.PointCloud(
229
229
  [
230
230
  p1 + (w / 2 + m1) * v2 + m0 * v1,
231
231
  p0 + (w / 2 + m1) * v2 - m0 * v1,
@@ -249,7 +249,7 @@ class Plane3D:
249
249
  assert isinstance(image, volume.Volume)
250
250
  im_lowres = image.fetch(resolution_mm=1)
251
251
  plane_dims = np.where(np.argsort(im_lowres.shape) < 2)[0]
252
- voxels = pointset.PointSet(
252
+ voxels = pointcloud.PointCloud(
253
253
  np.vstack(([0, 0, 0], np.identity(3)[plane_dims])), space=None
254
254
  )
255
255
  points = voxels.transform(im_lowres.affine, space=image.space)
@@ -1,4 +1,4 @@
1
- # Copyright 2018-2024
1
+ # Copyright 2018-2025
2
2
  # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
3
 
4
4
  # Licensed under the Apache License, Version 2.0 (the "License");
siibra/explorer/url.py CHANGED
@@ -1,4 +1,4 @@
1
- # Copyright 2018-2024
1
+ # Copyright 2018-2025
2
2
  # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
3
 
4
4
  # Licensed under the Apache License, Version 2.0 (the "License");
@@ -211,7 +211,7 @@ def get_hash(full_string: str):
211
211
  for char in full_string:
212
212
  # overflowing is expected and in fact the whole reason why convert number to int32
213
213
 
214
- # in windows, int32((0 - min_int32) << 5), rather than overflow to wraper around, raises OverflowError
214
+ # in windows, int32((0 - min_int32) << 5), rather than overflow to wrapper around, raises OverflowError
215
215
  shifted_5 = int32(
216
216
  (return_val - min_int32) if return_val > max_int32 else return_val << 5
217
217
  )
siibra/explorer/util.py CHANGED
@@ -1,4 +1,4 @@
1
- # Copyright 2018-2024
1
+ # Copyright 2018-2025
2
2
  # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
3
 
4
4
  # Licensed under the Apache License, Version 2.0 (the "License");
@@ -1,4 +1,4 @@
1
- # Copyright 2018-2024
1
+ # Copyright 2018-2025
2
2
  # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
3
 
4
4
  # Licensed under the Apache License, Version 2.0 (the "License");
siibra/features/anchor.py CHANGED
@@ -1,4 +1,4 @@
1
- # Copyright 2018-2024
1
+ # Copyright 2018-2025
2
2
  # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
3
 
4
4
  # Licensed under the Apache License, Version 2.0 (the "License");
@@ -19,7 +19,7 @@ from ..commons import Species, logger
19
19
  from ..core.structure import BrainStructure
20
20
  from ..core.assignment import AnatomicalAssignment, Qualification
21
21
  from ..locations.location import Location
22
- from ..core.parcellation import Parcellation
22
+ from ..core.parcellation import Parcellation, find_regions
23
23
  from ..core.region import Region
24
24
  from ..core.space import Space
25
25
  from ..exceptions import SpaceWarpingFailedError
@@ -88,7 +88,7 @@ class AnatomicalAnchor:
88
88
 
89
89
  @property
90
90
  def space(self) -> Space:
91
- # may be overriden by derived classes, e.g. in features.VolumeOfInterest
91
+ # may be overridden by derived classes, e.g. in features.VolumeOfInterest
92
92
  return None if self.location is None else self.location.space
93
93
 
94
94
  @property
@@ -126,13 +126,13 @@ class AnatomicalAnchor:
126
126
  # decode the region specification into a dict of region objects and assignment qualifications
127
127
  regions = {
128
128
  region: Qualification.EXACT
129
- for region in Parcellation.find_regions(self._regionspec)
129
+ for region in find_regions(self._regionspec, filter_children=True, find_topmost=False)
130
130
  if region.species in self.species
131
131
  }
132
132
  # add more regions from possible aliases of the region spec
133
133
  for alt_species, aliases in self.region_aliases.items():
134
134
  for alias_regionspec, qualificationspec in aliases.items():
135
- for r in Parcellation.find_regions(alias_regionspec):
135
+ for r in find_regions(alias_regionspec, filter_children=True, find_topmost=False):
136
136
  if r.species != alt_species:
137
137
  continue
138
138
  if r not in regions:
@@ -156,18 +156,17 @@ class AnatomicalAnchor:
156
156
  else:
157
157
  return region + separator + location
158
158
 
159
- def assign(self, concept: BrainStructure, restrict_space: bool = False) -> AnatomicalAssignment:
159
+ def assign(self, concept: Union[BrainStructure, Space]) -> AnatomicalAssignment:
160
160
  """
161
161
  Match this anchor to a query concept. Assignments are cached at runtime,
162
162
  so repeated assignment with the same concept will be cheap.
163
163
  """
164
- if (
165
- restrict_space
166
- and self.location is not None
167
- and isinstance(concept, Location)
168
- and not self.location.space.matches(concept.space)
169
- ):
170
- return []
164
+ if isinstance(concept, Space):
165
+ if self.location is not None and self.location.space.matches(concept):
166
+ return [AnatomicalAssignment(concept, self.location, Qualification.CONTAINED)]
167
+ else:
168
+ return []
169
+
171
170
  if concept not in self._assignments:
172
171
  assignments: List[AnatomicalAssignment] = []
173
172
  if self.location is not None:
@@ -184,8 +183,8 @@ class AnatomicalAnchor:
184
183
  else None
185
184
  return self._assignments[concept]
186
185
 
187
- def matches(self, concept: BrainStructure, restrict_space: bool = False) -> bool:
188
- return len(self.assign(concept, restrict_space)) > 0
186
+ def matches(self, concept: Union[BrainStructure, Space]) -> bool:
187
+ return len(self.assign(concept)) > 0
189
188
 
190
189
  def represented_parcellations(self) -> List[Parcellation]:
191
190
  """
@@ -1,4 +1,4 @@
1
- # Copyright 2018-2024
1
+ # Copyright 2018-2025
2
2
  # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
3
 
4
4
  # Licensed under the Apache License, Version 2.0 (the "License");
@@ -1,4 +1,4 @@
1
- # Copyright 2018-2024
1
+ # Copyright 2018-2025
2
2
  # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
3
 
4
4
  # Licensed under the Apache License, Version 2.0 (the "License");
@@ -1,4 +1,4 @@
1
- # Copyright 2018-2024
1
+ # Copyright 2018-2025
2
2
  # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
3
 
4
4
  # Licensed under the Apache License, Version 2.0 (the "License");
@@ -21,7 +21,7 @@ from .. import anchor as _anchor
21
21
 
22
22
  from ...commons import logger, QUIET, siibra_tqdm
23
23
  from ...core import region as _region
24
- from ...locations import pointset
24
+ from ...locations import pointcloud
25
25
  from ...retrieval.repositories import RepositoryConnector
26
26
  from ...retrieval.requests import HttpRequest
27
27
 
@@ -275,7 +275,7 @@ class RegionalConnectivity(Feature, Compoundable):
275
275
 
276
276
  regions = [r for r in matrix.index if matches(r, region)]
277
277
  if len(regions) == 0:
278
- raise ValueError(f"Invalid region specificiation: {region}")
278
+ raise ValueError(f"Invalid region specification: {region}")
279
279
  elif len(regions) > 1:
280
280
  raise ValueError(f"Region specification {region} matched more than one profile: {regions}")
281
281
  else:
@@ -440,7 +440,7 @@ class RegionalConnectivity(Feature, Compoundable):
440
440
  found = [r for r in region if r.name in all_centroids]
441
441
  assert len(found) > 0
442
442
  result.append(
443
- tuple(pointset.PointSet(
443
+ tuple(pointcloud.PointCloud(
444
444
  [all_centroids[r.name] for r in found], space=space
445
445
  ).centroid)
446
446
  )
@@ -1,4 +1,4 @@
1
- # Copyright 2018-2024
1
+ # Copyright 2018-2025
2
2
  # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
3
 
4
4
  # Licensed under the Apache License, Version 2.0 (the "License");
@@ -1,4 +1,4 @@
1
- # Copyright 2018-2024
1
+ # Copyright 2018-2025
2
2
  # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
3
 
4
4
  # Licensed under the Apache License, Version 2.0 (the "License");
@@ -1,4 +1,4 @@
1
- # Copyright 2018-2024
1
+ # Copyright 2018-2025
2
2
  # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
3
 
4
4
  # Licensed under the Apache License, Version 2.0 (the "License");
@@ -1,4 +1,4 @@
1
- # Copyright 2018-2024
1
+ # Copyright 2018-2025
2
2
  # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
3
 
4
4
  # Licensed under the Apache License, Version 2.0 (the "License");
@@ -1,4 +1,4 @@
1
- # Copyright 2018-2024
1
+ # Copyright 2018-2025
2
2
  # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
3
 
4
4
  # Licensed under the Apache License, Version 2.0 (the "License");
@@ -1,4 +1,4 @@
1
- # Copyright 2018-2024
1
+ # Copyright 2018-2025
2
2
  # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
3
 
4
4
  # Licensed under the Apache License, Version 2.0 (the "License");
@@ -53,7 +53,7 @@ _README_TMPL = """
53
53
  Downloaded from siibra toolsuite.
54
54
  siibra-python version: {version}
55
55
 
56
- All releated resources (e.g. doi, web resources) are categorized under publications.
56
+ All related resources (e.g. doi, web resources) are categorized under publications.
57
57
 
58
58
  Name
59
59
  ----
@@ -199,9 +199,9 @@ class Feature:
199
199
  @property
200
200
  def authors(self):
201
201
  return [
202
- contributer['name']
202
+ contributor['name']
203
203
  for ds in self.datasets
204
- for contributer in ds.contributors
204
+ for contributor in ds.contributors
205
205
  ]
206
206
 
207
207
  @property
@@ -251,13 +251,16 @@ class Feature:
251
251
  """ Removes all instantiated object instances"""
252
252
  cls._preconfigured_instances = None
253
253
 
254
- def matches(self, concept: structure.BrainStructure, restrict_space: bool = False) -> bool:
254
+ def matches(
255
+ self,
256
+ concept: Union[structure.BrainStructure, space.Space],
257
+ ) -> bool:
255
258
  """
256
- Match the features anatomical anchor agains the given query concept.
259
+ Match the features anatomical anchor against the given query concept.
257
260
  Record the most recently matched concept for inspection by the caller.
258
261
  """
259
262
  # TODO: storing the last matched concept. It is not ideal, might cause problems in multithreading
260
- if self.anchor and self.anchor.matches(concept, restrict_space):
263
+ if self.anchor and self.anchor.matches(concept):
261
264
  self.anchor._last_matched_concept = concept
262
265
  return True
263
266
  self.anchor._last_matched_concept = None
@@ -483,10 +486,10 @@ class Feature:
483
486
  f"objects linked to {str(concept)}{argstr}"
484
487
  )
485
488
  q = QueryType(**kwargs)
486
- try:
489
+ if isinstance(concept, space.Space):
490
+ features = q.query(concept.get_template())
491
+ else:
487
492
  features = q.query(concept)
488
- except StopIteration:
489
- continue
490
493
  live_instances.extend(
491
494
  Feature._wrap_livequery_feature(f, Feature._serialize_query_context(f, concept))
492
495
  for f in features
@@ -497,9 +500,8 @@ class Feature:
497
500
  @classmethod
498
501
  def _match(
499
502
  cls,
500
- concept: structure.BrainStructure,
503
+ concept: Union[structure.BrainStructure, space.Space],
501
504
  feature_type: Union[str, Type['Feature'], list],
502
- restrict_space: bool = False,
503
505
  **kwargs
504
506
  ) -> List['Feature']:
505
507
  """
@@ -507,7 +509,7 @@ class Feature:
507
509
  This will
508
510
  - call Feature.match(concept) for any registered preconfigured features
509
511
  - run any registered live queries
510
- The preconfigured and live query instances are merged and returend as a list.
512
+ The preconfigured and live query instances are merged and returned as a list.
511
513
 
512
514
  If multiple feature types are given, recurse for each of them.
513
515
 
@@ -517,11 +519,7 @@ class Feature:
517
519
  concept: AtlasConcept
518
520
  An anatomical concept, typically a brain region or parcellation.
519
521
  feature_type: subclass of Feature, str
520
- specififies the type of features ("modality")
521
- restrict_space: bool: default: False
522
- If true, will skip features anchored at spatial locations of
523
- different spaces than the concept. Requires concept to be a
524
- Location.
522
+ specifies the type of features ("modality")
525
523
  """
526
524
  if isinstance(feature_type, list):
527
525
  # a list of feature types is given, collect match results on those
@@ -531,7 +529,7 @@ class Feature:
531
529
  )
532
530
  return list(dict.fromkeys(
533
531
  sum((
534
- cls._match(concept, t, restrict_space, **kwargs) for t in feature_type
532
+ cls._match(concept, t, **kwargs) for t in feature_type
535
533
  ), [])
536
534
  ))
537
535
 
@@ -548,15 +546,15 @@ class Feature:
548
546
  f"'{feature_type}' decoded as feature type/s: "
549
547
  f"{[c.__name__ for c in ftype_candidates]}."
550
548
  )
551
- return cls._match(concept, ftype_candidates, restrict_space, **kwargs)
549
+ return cls._match(concept, ftype_candidates, **kwargs)
552
550
 
553
551
  assert issubclass(feature_type, Feature)
554
552
 
555
553
  # At this stage, no recursion is needed.
556
554
  # We expect a specific supported feature type is to be matched now.
557
- if not isinstance(concept, structure.BrainStructure):
555
+ if not isinstance(concept, (structure.BrainStructure, space.Space)):
558
556
  raise ValueError(
559
- f"{concept.__class__.__name__} cannot be used for feature queries as it is not a BrainStructure type."
557
+ f"{concept.__class__.__name__} cannot be used for feature queries as it is not a `BrainStructure` or a `Space` type."
560
558
  )
561
559
 
562
560
  # Collect any preconfigured instances of the requested feature type
@@ -574,7 +572,7 @@ class Feature:
574
572
  total=len(instances),
575
573
  disable=(not instances)
576
574
  )
577
- if f.matches(concept, restrict_space)
575
+ if f.matches(concept)
578
576
  ]
579
577
 
580
578
  # Then run any registered live queries for the requested feature type
@@ -606,7 +604,7 @@ class Feature:
606
604
  if inst.id == feature_id
607
605
  ]
608
606
  if len(candidates) == 0:
609
- raise NotFoundException(f"No feature instance wth {feature_id} found.")
607
+ raise NotFoundException(f"No feature instance with {feature_id} found.")
610
608
  if len(candidates) == 1:
611
609
  return candidates[0]
612
610
  else:
@@ -721,7 +719,7 @@ class Compoundable(ABC):
721
719
  """
722
720
  Compute the merge data and create a merged instance from a set of
723
721
  elements of this class. This will be used by CompoundFeature to
724
- create the aggegated data and plot it. For example, to compute an
722
+ create the aggregated data and plot it. For example, to compute an
725
723
  average connectivity matrix from a set of subfeatures, we create a
726
724
  RegionalConnectivty feature.
727
725
  """
@@ -730,7 +728,7 @@ class Compoundable(ABC):
730
728
 
731
729
  class CompoundFeature(Feature):
732
730
  """
733
- A compound aggregating mutliple features of the same type, forming its
731
+ A compound aggregating multiple features of the same type, forming its
734
732
  elements. The anatomical anchors and data of the features is merged.
735
733
  Features need to subclass "Compoundable" to allow aggregation
736
734
  into a compound feature.
@@ -1,4 +1,4 @@
1
- # Copyright 2018-2024
1
+ # Copyright 2018-2025
2
2
  # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
3
 
4
4
  # Licensed under the Apache License, Version 2.0 (the "License");
@@ -1,4 +1,4 @@
1
- # Copyright 2018-2024
1
+ # Copyright 2018-2025
2
2
  # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
3
 
4
4
  # Licensed under the Apache License, Version 2.0 (the "License");
@@ -44,7 +44,7 @@ class ImageAnchor(_anchor.AnatomicalAnchor):
44
44
  if self._location_cached is None:
45
45
  self._location_cached = self.volume.get_boundingbox(
46
46
  clip=False
47
- ) # use unclipped to preseve exisiting behaviour
47
+ ) # use unclipped to preserve existing behaviour
48
48
  return self._location_cached
49
49
 
50
50
  @property
@@ -1,4 +1,4 @@
1
- # Copyright 2018-2024
1
+ # Copyright 2018-2025
2
2
  # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
3
 
4
4
  # Licensed under the Apache License, Version 2.0 (the "License");
@@ -1,4 +1,4 @@
1
- # Copyright 2018-2024
1
+ # Copyright 2018-2025
2
2
  # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
3
 
4
4
  # Licensed under the Apache License, Version 2.0 (the "License");
@@ -1,4 +1,4 @@
1
- # Copyright 2018-2024
1
+ # Copyright 2018-2025
2
2
  # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
3
 
4
4
  # Licensed under the Apache License, Version 2.0 (the "License");
@@ -1,4 +1,4 @@
1
- # Copyright 2018-2024
1
+ # Copyright 2018-2025
2
2
  # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
3
 
4
4
  # Licensed under the Apache License, Version 2.0 (the "License");
@@ -65,7 +65,7 @@ class BigBrainIntensityProfile(
65
65
 
66
66
  @classmethod
67
67
  def _merge_anchors(cls, anchors: List['AnatomicalAnchor']):
68
- from ...locations.pointset import from_points
68
+ from ...locations.pointcloud import from_points
69
69
  from ...features.anchor import AnatomicalAnchor
70
70
 
71
71
  location = from_points([anchor.location for anchor in anchors])