siibra 1.0.1a0__py3-none-any.whl → 1.0.1a2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of siibra might be problematic. Click here for more details.

Files changed (82) hide show
  1. siibra/VERSION +1 -1
  2. siibra/__init__.py +11 -20
  3. siibra/commons.py +17 -14
  4. siibra/configuration/__init__.py +1 -1
  5. siibra/configuration/configuration.py +6 -6
  6. siibra/configuration/factory.py +10 -9
  7. siibra/core/__init__.py +2 -2
  8. siibra/core/assignment.py +2 -1
  9. siibra/core/atlas.py +4 -4
  10. siibra/core/concept.py +7 -5
  11. siibra/core/parcellation.py +10 -10
  12. siibra/core/region.py +82 -73
  13. siibra/core/space.py +5 -7
  14. siibra/core/structure.py +4 -4
  15. siibra/exceptions.py +6 -2
  16. siibra/explorer/__init__.py +1 -1
  17. siibra/explorer/url.py +2 -2
  18. siibra/explorer/util.py +1 -1
  19. siibra/features/__init__.py +1 -1
  20. siibra/features/anchor.py +4 -6
  21. siibra/features/connectivity/__init__.py +1 -1
  22. siibra/features/connectivity/functional_connectivity.py +1 -1
  23. siibra/features/connectivity/regional_connectivity.py +12 -15
  24. siibra/features/connectivity/streamline_counts.py +1 -1
  25. siibra/features/connectivity/streamline_lengths.py +1 -1
  26. siibra/features/connectivity/tracing_connectivity.py +1 -1
  27. siibra/features/dataset/__init__.py +1 -1
  28. siibra/features/dataset/ebrains.py +2 -2
  29. siibra/features/feature.py +31 -28
  30. siibra/features/image/__init__.py +5 -3
  31. siibra/features/image/image.py +4 -6
  32. siibra/features/image/sections.py +82 -3
  33. siibra/features/image/volume_of_interest.py +1 -9
  34. siibra/features/tabular/__init__.py +2 -2
  35. siibra/features/tabular/bigbrain_intensity_profile.py +3 -2
  36. siibra/features/tabular/cell_density_profile.py +10 -11
  37. siibra/features/tabular/cortical_profile.py +9 -9
  38. siibra/features/tabular/gene_expression.py +7 -6
  39. siibra/features/tabular/layerwise_bigbrain_intensities.py +5 -4
  40. siibra/features/tabular/layerwise_cell_density.py +5 -7
  41. siibra/features/tabular/receptor_density_fingerprint.py +47 -19
  42. siibra/features/tabular/receptor_density_profile.py +2 -3
  43. siibra/features/tabular/regional_timeseries_activity.py +9 -9
  44. siibra/features/tabular/tabular.py +10 -9
  45. siibra/livequeries/__init__.py +1 -1
  46. siibra/livequeries/allen.py +23 -25
  47. siibra/livequeries/bigbrain.py +252 -55
  48. siibra/livequeries/ebrains.py +14 -11
  49. siibra/livequeries/query.py +5 -5
  50. siibra/locations/__init__.py +19 -10
  51. siibra/locations/boundingbox.py +10 -13
  52. siibra/{experimental/plane3d.py → locations/experimental.py} +117 -17
  53. siibra/locations/location.py +11 -13
  54. siibra/locations/point.py +10 -19
  55. siibra/locations/pointcloud.py +59 -23
  56. siibra/retrieval/__init__.py +1 -1
  57. siibra/retrieval/cache.py +2 -1
  58. siibra/retrieval/datasets.py +23 -17
  59. siibra/retrieval/exceptions/__init__.py +1 -1
  60. siibra/retrieval/repositories.py +14 -15
  61. siibra/retrieval/requests.py +32 -30
  62. siibra/vocabularies/__init__.py +2 -3
  63. siibra/volumes/__init__.py +5 -4
  64. siibra/volumes/parcellationmap.py +55 -20
  65. siibra/volumes/providers/__init__.py +1 -1
  66. siibra/volumes/providers/freesurfer.py +7 -7
  67. siibra/volumes/providers/gifti.py +5 -5
  68. siibra/volumes/providers/neuroglancer.py +25 -28
  69. siibra/volumes/providers/nifti.py +7 -7
  70. siibra/volumes/providers/provider.py +4 -3
  71. siibra/volumes/sparsemap.py +8 -7
  72. siibra/volumes/volume.py +33 -40
  73. {siibra-1.0.1a0.dist-info → siibra-1.0.1a2.dist-info}/METADATA +21 -8
  74. siibra-1.0.1a2.dist-info/RECORD +80 -0
  75. {siibra-1.0.1a0.dist-info → siibra-1.0.1a2.dist-info}/WHEEL +1 -1
  76. siibra/experimental/__init__.py +0 -19
  77. siibra/experimental/contour.py +0 -61
  78. siibra/experimental/cortical_profile_sampler.py +0 -57
  79. siibra/experimental/patch.py +0 -98
  80. siibra-1.0.1a0.dist-info/RECORD +0 -84
  81. {siibra-1.0.1a0.dist-info → siibra-1.0.1a2.dist-info}/LICENSE +0 -0
  82. {siibra-1.0.1a0.dist-info → siibra-1.0.1a2.dist-info}/top_level.txt +0 -0
@@ -1,4 +1,4 @@
1
- # Copyright 2018-2024
1
+ # Copyright 2018-2025
2
2
  # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
3
 
4
4
  # Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,19 +13,19 @@
13
13
  # See the License for the specific language governing permissions and
14
14
  # limitations under the License.
15
15
 
16
+ from typing import Callable, List, Union
17
+
18
+ import numpy as np
19
+ import pandas as pd
20
+
16
21
  from . import tabular
22
+ from .. import anchor as _anchor
17
23
  from ..feature import Compoundable
18
-
19
24
  from ...core import region as _region
20
- from .. import anchor as _anchor
21
25
  from ...commons import QUIET, siibra_tqdm
22
26
  from ...locations import pointcloud
23
- from ...retrieval.repositories import RepositoryConnector
24
27
  from ...retrieval.requests import HttpRequest
25
-
26
- from typing import Callable, List, Union
27
- import pandas as pd
28
- import numpy as np
28
+ from ...retrieval.repositories import RepositoryConnector
29
29
 
30
30
 
31
31
  class RegionalTimeseriesActivity(tabular.Tabular, Compoundable):
@@ -239,7 +239,7 @@ class RegionalTimeseriesActivity(tabular.Tabular, Compoundable):
239
239
  backend="plotly", **kwargs
240
240
  ):
241
241
  """
242
- Create a carpet plot ofthe timeseries data per region.
242
+ Create a carpet plot of the timeseries data per region.
243
243
 
244
244
  Parameters
245
245
  ----------
@@ -1,4 +1,4 @@
1
- # Copyright 2018-2024
1
+ # Copyright 2018-2025
2
2
  # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
3
 
4
4
  # Licensed under the Apache License, Version 2.0 (the "License");
@@ -15,15 +15,14 @@
15
15
  """Base type of features in tabular formats."""
16
16
 
17
17
  from zipfile import ZipFile
18
- from .. import feature
19
-
20
- from .. import anchor as _anchor
21
-
22
- from ... import commons
23
18
 
24
19
  import pandas as pd
25
20
  from textwrap import wrap
26
21
 
22
+ from .. import feature
23
+ from .. import anchor as _anchor
24
+ from ...commons import logger
25
+
27
26
 
28
27
  class Tabular(feature.Feature):
29
28
  """
@@ -91,9 +90,11 @@ class Tabular(feature.Feature):
91
90
  if backend == "matplotlib":
92
91
  try:
93
92
  import matplotlib.pyplot as plt
94
- except ImportError:
95
- commons.logger.error("matplotlib not available. Plotting of fingerprints disabled.")
96
- return None
93
+ except ImportError as e:
94
+ logger.error(
95
+ "matplotlib not available. Please install matplotlib or use or another backend such as plotly."
96
+ )
97
+ raise e
97
98
  # default kwargs
98
99
  if kwargs.get("error_y") is None:
99
100
  kwargs["yerr"] = kwargs.get("yerr", 'std' if 'std' in self.data.columns else None)
@@ -1,4 +1,4 @@
1
- # Copyright 2018-2024
1
+ # Copyright 2018-2025
2
2
  # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
3
 
4
4
  # Licensed under the Apache License, Version 2.0 (the "License");
@@ -1,4 +1,4 @@
1
- # Copyright 2018-2024
1
+ # Copyright 2018-2025
2
2
  # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
3
 
4
4
  # Licensed under the Apache License, Version 2.0 (the "License");
@@ -14,21 +14,21 @@
14
14
  # limitations under the License.
15
15
  """Query Allen Human Brain Atlas microarray data in specified volume."""
16
16
 
17
- from .query import LiveQuery
17
+ from typing import List
18
+ from xml.etree import ElementTree
19
+ import json
18
20
 
19
- from ..core import space as _space, structure
21
+ import numpy as np
22
+
23
+ from . import query as _query
24
+ from ..core import structure
20
25
  from ..features import anchor as _anchor
21
26
  from ..features.tabular.gene_expression import GeneExpressions
22
27
  from ..commons import logger, Species
23
- from ..locations import point, pointcloud
28
+ from ..locations import pointcloud
24
29
  from ..retrieval import HttpRequest
25
30
  from ..vocabularies import GENE_NAMES
26
31
 
27
- from typing import List
28
- from xml.etree import ElementTree
29
- import numpy as np
30
- import json
31
-
32
32
 
33
33
  BASE_URL = "http://api.brain-map.org/api/v2/data"
34
34
 
@@ -51,7 +51,7 @@ class InvalidAllenAPIResponseException(Exception):
51
51
  pass
52
52
 
53
53
 
54
- class AllenBrainAtlasQuery(LiveQuery, args=['gene'], FeatureType=GeneExpressions):
54
+ class AllenBrainAtlasQuery(_query.LiveQuery, args=['gene'], FeatureType=GeneExpressions):
55
55
  """
56
56
  Interface to Allen Human Brain Atlas microarray data.
57
57
 
@@ -66,7 +66,7 @@ class AllenBrainAtlasQuery(LiveQuery, args=['gene'], FeatureType=GeneExpressions
66
66
  - Each sample was subject to multiple (in fact 4) different probes.
67
67
  - The probe data structures contain the list of gene expression of a
68
68
  particular gene measured in each sample. Therefore the length of the gene
69
- expression list in a probe coresponds to the number of samples taken in
69
+ expression list in a probe corresponds to the number of samples taken in
70
70
  the corresponding donor for the given gene.
71
71
  """
72
72
 
@@ -114,10 +114,10 @@ class AllenBrainAtlasQuery(LiveQuery, args=['gene'], FeatureType=GeneExpressions
114
114
  Each sample is linked to a donor, brain structure, and
115
115
  ICBM coordinate.
116
116
  When querying with a brain structure, the ICBM coordinates
117
- will be tested agains the region mask in ICBM space
117
+ will be tested against the region mask in ICBM space
118
118
  to produce a table of outputs.
119
119
  """
120
- LiveQuery.__init__(self, **kwargs)
120
+ _query.LiveQuery.__init__(self, **kwargs)
121
121
  gene = kwargs.get('gene')
122
122
 
123
123
  def parse_gene(spec):
@@ -144,27 +144,25 @@ class AllenBrainAtlasQuery(LiveQuery, args=['gene'], FeatureType=GeneExpressions
144
144
  'https://github.com/FZJ-INM1-BDA/siibra-python/issues/636.'
145
145
  )
146
146
 
147
- mnispace = _space.Space.registry().get('mni152')
148
-
149
147
  # Match the microarray probes to the query mask.
150
148
  # Record matched instances and their locations.
151
- measurements = []
152
- coordinates = []
153
- for measurement in self:
154
- pt = point.Point(measurement['mni_xyz'], space=mnispace, sigma_mm=LOCATION_PRECISION_MM)
155
- if pt in concept:
156
- measurements.append(measurement)
157
- coordinates.append(pt)
158
-
159
- if len(coordinates) == 0:
149
+ all_measurements = list(self)
150
+ all_mes_points = pointcloud.PointCloud(
151
+ [measurement['mni_xyz'] for measurement in all_measurements],
152
+ space='mni152',
153
+ sigma_mm=LOCATION_PRECISION_MM
154
+ )
155
+ intersecting_points = concept.intersection(all_mes_points)
156
+ if intersecting_points is None:
160
157
  logger.info(f"No probes found that lie within {concept}")
161
158
  return []
159
+ measurements = [all_measurements[index] for index in intersecting_points.labels]
162
160
 
163
161
  # Build the anatomical anchor and assignment to the query concept.
164
162
  # It will be attached to the returned feature, with the set of matched
165
163
  # MNI coordinates as anchor's location.
166
164
  anchor = _anchor.AnatomicalAnchor(
167
- location=pointcloud.from_points(coordinates),
165
+ location=intersecting_points,
168
166
  species=self.species
169
167
  )
170
168
  explanation = f"MNI coordinates of tissue samples were filtered using {concept}"
@@ -1,4 +1,4 @@
1
- # Copyright 2018-2024
1
+ # Copyright 2018-2025
2
2
  # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
3
 
4
4
  # Licensed under the Apache License, Version 2.0 (the "License");
@@ -12,21 +12,25 @@
12
12
  # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
13
  # See the License for the specific language governing permissions and
14
14
  # limitations under the License.
15
- """Matches BigBrain intesity profiles extracted by Wagstyl et al. to volumes."""
15
+ """Matches BigBrain intensity profiles extracted by Wagstyl et al. to volumes."""
16
16
 
17
- from . import query
17
+ from typing import List
18
+ from os import path
18
19
 
19
- from ..features.tabular import bigbrain_intensity_profile, layerwise_bigbrain_intensities
20
+ import numpy as np
21
+ from scipy.spatial import KDTree
22
+
23
+ from . import query
20
24
  from ..features import anchor as _anchor
21
- from ..commons import logger
22
- from ..locations import point, pointcloud
25
+ from ..features.tabular import bigbrain_intensity_profile, layerwise_bigbrain_intensities
26
+ from ..features.image import CellbodyStainedSection, BigBrain1MicronPatch
27
+ from ..commons import logger, siibra_tqdm
28
+ from ..locations import point, pointcloud, location, experimental
23
29
  from ..core import structure
30
+ from ..core.concept import get_registry
24
31
  from ..retrieval import requests, cache
25
32
  from ..retrieval.datasets import GenericDataset
26
-
27
- import numpy as np
28
- from typing import List
29
- from os import path
33
+ from ..volumes import Volume, from_array
30
34
 
31
35
 
32
36
  class WagstylProfileLoader:
@@ -41,23 +45,23 @@ class WagstylProfileLoader:
41
45
  DATASET = GenericDataset(
42
46
  name="HIBALL workshop on cortical layers",
43
47
  contributors=[
44
- 'Konrad Wagstyl',
45
- 'Stéphanie Larocque',
46
- 'Guillem Cucurull',
47
- 'Claude Lepage',
48
- 'Joseph Paul Cohen',
49
- 'Sebastian Bludau',
50
- 'Nicola Palomero-Gallagher',
51
- 'Lindsay B. Lewis',
52
- 'Thomas Funck',
53
- 'Hannah Spitzer',
54
- 'Timo Dickscheid',
55
- 'Paul C. Fletcher',
56
- 'Adriana Romero',
57
- 'Karl Zilles',
58
- 'Katrin Amunts',
59
- 'Yoshua Bengio',
60
- 'Alan C. Evans'
48
+ "Konrad Wagstyl",
49
+ "Stéphanie Larocque",
50
+ "Guillem Cucurull",
51
+ "Claude Lepage",
52
+ "Joseph Paul Cohen",
53
+ "Sebastian Bludau",
54
+ "Nicola Palomero-Gallagher",
55
+ "Lindsay B. Lewis",
56
+ "Thomas Funck",
57
+ "Hannah Spitzer",
58
+ "Timo Dickscheid",
59
+ "Paul C. Fletcher",
60
+ "Adriana Romero",
61
+ "Karl Zilles",
62
+ "Katrin Amunts",
63
+ "Yoshua Bengio",
64
+ "Alan C. Evans",
61
65
  ],
62
66
  url="https://github.com/kwagstyl/cortical_layers_tutorial/",
63
67
  description="Cortical profiles of BigBrain staining intensities computed by Konrad Wagstyl, "
@@ -67,7 +71,7 @@ class WagstylProfileLoader:
67
71
  "http://dx.doi.org/10.1371/journal.pbio.3000678."
68
72
  "The data is taken from the tutorial at "
69
73
  "https://github.com/kwagstyl/cortical_layers_tutorial. Each vertex is "
70
- "assigned to the regional map when queried."
74
+ "assigned to the regional map when queried.",
71
75
  )
72
76
 
73
77
  def __init__(self):
@@ -76,15 +80,22 @@ class WagstylProfileLoader:
76
80
 
77
81
  @property
78
82
  def profile_labels(self):
79
- return np.arange(0., 1., 1. / self._profiles.shape[1])
83
+ return np.arange(0.0, 1.0, 1.0 / self._profiles.shape[1])
80
84
 
81
85
  @classmethod
82
86
  def _load(cls):
83
87
  # read thicknesses, in mm, and normalize by their last column which is the total thickness
84
- thickness = requests.HttpRequest(f"{cls.REPO}/{cls.THICKNESSES_FILE_LEFT}").data.T
85
- total_thickness = thickness[:, :-1].sum(1) # last column is the computed total thickness
88
+ thickness = requests.HttpRequest(
89
+ f"{cls.REPO}/{cls.THICKNESSES_FILE_LEFT}"
90
+ ).data.T
91
+ total_thickness = thickness[:, :-1].sum(
92
+ 1
93
+ ) # last column is the computed total thickness
86
94
  valid = np.where(total_thickness > 0)[0]
87
- cls._boundary_depths = np.c_[np.zeros_like(valid), (thickness[valid, :] / total_thickness[valid, None]).cumsum(1)]
95
+ cls._boundary_depths = np.c_[
96
+ np.zeros_like(valid),
97
+ (thickness[valid, :] / total_thickness[valid, None]).cumsum(1),
98
+ ]
88
99
  cls._boundary_depths[:, -1] = 1 # account for float calculation errors
89
100
 
90
101
  # find profiles with valid thickness
@@ -112,38 +123,51 @@ class WagstylProfileLoader:
112
123
  cache.Warmup.register_warmup_fn()(lambda: WagstylProfileLoader._load())
113
124
 
114
125
 
115
- class BigBrainProfileQuery(query.LiveQuery, args=[], FeatureType=bigbrain_intensity_profile.BigBrainIntensityProfile):
126
+ class BigBrainProfileQuery(
127
+ query.LiveQuery,
128
+ args=[],
129
+ FeatureType=bigbrain_intensity_profile.BigBrainIntensityProfile,
130
+ ):
116
131
 
117
132
  def __init__(self):
118
133
  query.LiveQuery.__init__(self)
119
134
 
120
- def query(self, concept: structure.BrainStructure, **kwargs) -> List[bigbrain_intensity_profile.BigBrainIntensityProfile]:
135
+ def query(
136
+ self, concept: structure.BrainStructure, **kwargs
137
+ ) -> List[bigbrain_intensity_profile.BigBrainIntensityProfile]:
121
138
  loader = WagstylProfileLoader()
122
- mesh_vertices = pointcloud.PointCloud(loader._vertices, space='bigbrain')
123
- matched = concept.intersection(mesh_vertices) # returns a reduced PointCloud with og indices as labels
139
+ mesh_vertices = pointcloud.PointCloud(loader._vertices, space="bigbrain")
140
+ matched = concept.intersection(
141
+ mesh_vertices
142
+ ) # returns a reduced PointCloud with og indices as labels
124
143
  if matched is None:
125
144
  return []
145
+ if isinstance(matched, point.Point):
146
+ matched = pointcloud.from_points([matched])
126
147
  assert isinstance(matched, pointcloud.PointCloud)
148
+ if isinstance(concept, location.Location):
149
+ mesh_as_list = mesh_vertices.as_list()
150
+ matched.labels = [mesh_as_list.index(v.coordinate) for v in matched]
127
151
  indices = matched.labels
128
152
  assert indices is not None
129
153
  features = []
130
- for i in matched.labels:
154
+ for i in indices:
131
155
  anchor = _anchor.AnatomicalAnchor(
132
- location=point.Point(loader._vertices[i], space='bigbrain'),
156
+ location=point.Point(loader._vertices[i], space="bigbrain"),
133
157
  region=str(concept),
134
- species='Homo sapiens'
158
+ species="Homo sapiens",
135
159
  )
136
160
  prof = bigbrain_intensity_profile.BigBrainIntensityProfile(
137
161
  anchor=anchor,
138
162
  depths=loader.profile_labels,
139
163
  values=loader._profiles[i],
140
- boundaries=loader._boundary_depths[i]
164
+ boundaries=loader._boundary_depths[i],
141
165
  )
142
166
  prof.anchor._assignments[concept] = _anchor.AnatomicalAssignment(
143
167
  query_structure=concept,
144
168
  assigned_structure=concept,
145
169
  qualification=_anchor.Qualification.CONTAINED,
146
- explanation=f"Surface vertex of BigBrain cortical profile was filtered using {concept}"
170
+ explanation=f"Surface vertex of BigBrain cortical profile was filtered using {concept}",
147
171
  )
148
172
  prof.datasets = [WagstylProfileLoader.DATASET]
149
173
  features.append(prof)
@@ -151,47 +175,220 @@ class BigBrainProfileQuery(query.LiveQuery, args=[], FeatureType=bigbrain_intens
151
175
  return features
152
176
 
153
177
 
154
- class LayerwiseBigBrainIntensityQuery(query.LiveQuery, args=[], FeatureType=layerwise_bigbrain_intensities.LayerwiseBigBrainIntensities):
178
+ class LayerwiseBigBrainIntensityQuery(
179
+ query.LiveQuery,
180
+ args=[],
181
+ FeatureType=layerwise_bigbrain_intensities.LayerwiseBigBrainIntensities,
182
+ ):
155
183
 
156
184
  def __init__(self):
157
185
  query.LiveQuery.__init__(self)
158
186
 
159
- def query(self, concept: structure.BrainStructure, **kwargs) -> List[layerwise_bigbrain_intensities.LayerwiseBigBrainIntensities]:
187
+ def query(
188
+ self, concept: structure.BrainStructure, **kwargs
189
+ ) -> List[layerwise_bigbrain_intensities.LayerwiseBigBrainIntensities]:
160
190
 
161
191
  loader = WagstylProfileLoader()
162
- mesh_vertices = pointcloud.PointCloud(loader._vertices, space='bigbrain')
163
- matched = concept.intersection(mesh_vertices) # returns a reduced PointCloud with og indices as labels
192
+ mesh_vertices = pointcloud.PointCloud(loader._vertices, space="bigbrain")
193
+ matched = concept.intersection(
194
+ mesh_vertices
195
+ ) # returns a reduced PointCloud with og indices as labels if the concept is a region
164
196
  if matched is None:
165
197
  return []
198
+ if isinstance(matched, point.Point):
199
+ matched = pointcloud.from_points([matched])
166
200
  assert isinstance(matched, pointcloud.PointCloud)
201
+ if isinstance(concept, location.Location):
202
+ mesh_as_list = mesh_vertices.as_list()
203
+ matched.labels = [mesh_as_list.index(v.coordinate) for v in matched]
167
204
  indices = matched.labels
168
- assert indices is not None
169
205
  matched_profiles = loader._profiles[indices, :]
170
206
  boundary_depths = loader._boundary_depths[indices, :]
171
207
  # compute array of layer labels for all coefficients in profiles_left
172
208
  N = matched_profiles.shape[1]
173
209
  prange = np.arange(N)
174
- layer_labels = 7 - np.array([
175
- [np.array([[(prange < T) * 1] for i, T in enumerate((b * N).astype('int'))]).squeeze().sum(0)]
176
- for b in boundary_depths
177
- ]).reshape((-1, 200))
210
+ layer_labels = 7 - np.array(
211
+ [
212
+ [
213
+ np.array(
214
+ [
215
+ [(prange < T) * 1]
216
+ for i, T in enumerate((b * N).astype("int"))
217
+ ]
218
+ )
219
+ .squeeze()
220
+ .sum(0)
221
+ ]
222
+ for b in boundary_depths
223
+ ]
224
+ ).reshape((-1, 200))
178
225
 
179
226
  anchor = _anchor.AnatomicalAnchor(
180
- location=pointcloud.PointCloud(loader._vertices[indices, :], space='bigbrain'),
227
+ location=pointcloud.PointCloud(
228
+ loader._vertices[indices, :], space="bigbrain"
229
+ ),
181
230
  region=str(concept),
182
- species='Homo sapiens'
231
+ species="Homo sapiens",
183
232
  )
184
233
  result = layerwise_bigbrain_intensities.LayerwiseBigBrainIntensities(
185
234
  anchor=anchor,
186
- means=[matched_profiles[layer_labels == layer].mean() for layer in range(1, 7)],
187
- stds=[matched_profiles[layer_labels == layer].std() for layer in range(1, 7)],
235
+ means=[
236
+ matched_profiles[layer_labels == layer].mean() for layer in range(1, 7)
237
+ ],
238
+ stds=[
239
+ matched_profiles[layer_labels == layer].std() for layer in range(1, 7)
240
+ ],
188
241
  )
189
242
  result.anchor._assignments[concept] = _anchor.AnatomicalAssignment(
190
243
  query_structure=concept,
191
244
  assigned_structure=concept,
192
245
  qualification=_anchor.Qualification.CONTAINED,
193
- explanation=f"Surface vertices of BigBrain cortical profiles were filtered using {concept}"
246
+ explanation=f"Surface vertices of BigBrain cortical profiles were filtered using {concept}",
194
247
  )
195
248
  result.datasets = [WagstylProfileLoader.DATASET]
196
249
 
197
250
  return [result]
251
+
252
+
253
+ class BigBrain1MicronPatchQuery(
254
+ query.LiveQuery, args=[], FeatureType=BigBrain1MicronPatch
255
+ ):
256
+ """
257
+ Sample approximately orthogonal cortical image patches
258
+ from BigBrain 1 micron sections, guided by an image volume
259
+ in a supported reference space providing. The image
260
+ volume is used as a weighted mask to extract patches
261
+ along the cortical midsurface with nonzero weights in the
262
+ input image.
263
+ An optional lower_threshold can be used to narrow down
264
+ the search
265
+ The weight is stored with the resulting features.
266
+ """
267
+
268
+ def __init__(self, lower_threshold=0.):
269
+ self.layermap = get_registry("Map").get("cortical layers bigbrain")
270
+ self.lower_threshold = lower_threshold
271
+ query.LiveQuery.__init__(self)
272
+
273
+ def query(
274
+ self, concept: structure.BrainStructure, **kwargs
275
+ ) -> List[BigBrain1MicronPatch]:
276
+
277
+ # make sure input is an image volume
278
+ # TODO function should be extended to deal with other concepts as well
279
+ if not isinstance(concept, Volume):
280
+ logger.warning(
281
+ "Querying BigBrain1MicronPatch features requires to "
282
+ "query with an image volume."
283
+ )
284
+ return []
285
+
286
+ # threshold image volume, if requested
287
+ if self.lower_threshold > 0.0:
288
+ logger.info(
289
+ f"Applying lower threshold of {self.lower_threshold} "
290
+ "for BigBrain 1 micron patch query."
291
+ )
292
+ img = concept.fetch()
293
+ arr = np.asanyarray(img.dataobj)
294
+ arr[arr < self.lower_threshold] = 0
295
+ query_vol = from_array(arr, img.affine, space=concept.space, name="filtered volume")
296
+ else:
297
+ query_vol = concept
298
+ bb_bbox = query_vol.get_boundingbox().warp('bigbrain')
299
+
300
+ # find 1 micron BigBrain sections intersecting the thresholded volume
301
+ sections: List[CellbodyStainedSection] = [
302
+ s
303
+ for s in CellbodyStainedSection._get_instances()
304
+ if isinstance(s, CellbodyStainedSection)
305
+ and s.get_boundingbox(clip=False).intersects(query_vol)
306
+ ]
307
+ if not sections:
308
+ return []
309
+
310
+ # extract relevant patches
311
+ features = []
312
+ for hemisphere in ["left", "right"]:
313
+
314
+ # get layer 4 mesh in the hemisphere
315
+ l4 = self.layermap.parcellation.get_region("4 " + hemisphere)
316
+ l4mesh = self.layermap.fetch(l4, format="mesh")
317
+ layerverts = {
318
+ n: self.layermap.fetch(region=n, format="mesh")["verts"]
319
+ for n in self.layermap.regions if hemisphere in n
320
+ }
321
+ l4verts = pointcloud.PointCloud(layerverts[l4.name], "bigbrain")
322
+ if not l4verts.boundingbox.intersects(bb_bbox):
323
+ continue
324
+
325
+ # for each relevant BigBrain 1 micron section, intersect layer IV mesh
326
+ # to obtain midcortex-locations, and build their orthogonal patches.
327
+ # store the concept's value with the patch.
328
+ vertex_tree = KDTree(layerverts[l4.name])
329
+ for s in siibra_tqdm(
330
+ sections, unit="sections", desc=f"Sampling patches in {hemisphere} hemisphere"
331
+ ):
332
+
333
+ # compute layer IV contour in the image plane
334
+ imgplane = experimental.Plane.from_image(s)
335
+ try:
336
+ contour_segments = imgplane.intersect_mesh(l4mesh)
337
+ except AssertionError:
338
+ logger.error(f"Could not intersect with layer 4 mesh: {s.name}")
339
+ continue
340
+ if len(contour_segments) == 0:
341
+ continue
342
+
343
+ # score the contour points with the query image volume
344
+ all_points = pointcloud.from_points(sum(map(list, contour_segments), []))
345
+ all_probs = query_vol.evaluate_points(all_points)
346
+ points_prob_lookup = {
347
+ pt.coordinate: prob
348
+ for pt, prob in zip(all_points, all_probs)
349
+ if prob >= self.lower_threshold
350
+ }
351
+ if len(points_prob_lookup) == 0:
352
+ continue
353
+
354
+ # For each contour point,
355
+ # - find the closest BigBrain layer surface vertex,
356
+ # - build the profile of corresponding vertices across all layers
357
+ # - project the profile to the image section
358
+ # - determine the oriented patch along the profile
359
+ _, indices = vertex_tree.query(np.array(list(points_prob_lookup.keys())))
360
+ for prob, nnb in zip(points_prob_lookup.values(), indices):
361
+
362
+ prof = pointcloud.Contour(
363
+ [
364
+ layerverts[_][nnb]
365
+ for _ in self.layermap.regions
366
+ if hemisphere in _
367
+ ],
368
+ space=self.layermap.space,
369
+ )
370
+ patch = imgplane.get_enclosing_patch(prof)
371
+ if patch is None:
372
+ continue
373
+
374
+ anchor = _anchor.AnatomicalAnchor(
375
+ location=patch, species="Homo sapiens"
376
+ )
377
+ anchor._assignments[concept] = _anchor.AnatomicalAssignment(
378
+ query_structure=query_vol,
379
+ assigned_structure=s.anchor.volume,
380
+ qualification=_anchor.Qualification.CONTAINED
381
+ )
382
+ features.append(
383
+ BigBrain1MicronPatch(
384
+ patch=patch,
385
+ profile=prof,
386
+ section=s,
387
+ vertex=nnb,
388
+ relevance=prob,
389
+ anchor=anchor
390
+ )
391
+ )
392
+
393
+ # return the patches sorted by relevance (ie. probability)
394
+ return sorted(features, key=lambda p: p.relevance, reverse=True)
@@ -1,4 +1,4 @@
1
- # Copyright 2018-2024
1
+ # Copyright 2018-2025
2
2
  # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
3
 
4
4
  # Licensed under the Apache License, Version 2.0 (the "License");
@@ -14,18 +14,21 @@
14
14
  # limitations under the License.
15
15
  """Query data features published as Ebrains datasets with AtlasConcepts"""
16
16
 
17
- from ..features.dataset import ebrains as _ebrains
18
- from . import query
17
+ from collections import defaultdict
18
+ import re
19
+ from packaging.version import Version
20
+ from tempfile import NamedTemporaryFile
21
+ from typing import TYPE_CHECKING
19
22
 
23
+ from . import query
24
+ from ..features.dataset import ebrains as _ebrains
20
25
  from ..commons import logger, siibra_tqdm
21
26
  from ..features import anchor as _anchor
22
27
  from ..retrieval import requests, datasets, cache
23
- from ..core import parcellation, region
28
+ from ..core.concept import get_registry
24
29
 
25
- from collections import defaultdict
26
- import re
27
- from packaging.version import Version
28
- from tempfile import NamedTemporaryFile
30
+ if TYPE_CHECKING:
31
+ from ..core.region import Region
29
32
 
30
33
 
31
34
  class EbrainsFeatureQuery(query.LiveQuery, args=[], FeatureType=_ebrains.EbrainsDataFeature):
@@ -58,17 +61,17 @@ class EbrainsFeatureQuery(query.LiveQuery, args=[], FeatureType=_ebrains.Ebrains
58
61
  if self.__class__.parcellation_ids is None:
59
62
  self.__class__.parcellation_ids = [
60
63
  dset.id
61
- for parc in parcellation.Parcellation.registry()
64
+ for parc in get_registry("Parcellation")
62
65
  for dset in parc.datasets
63
66
  if isinstance(dset, datasets.EbrainsV3DatasetVersion)
64
67
  ]
65
68
 
66
- def query(self, region: region.Region):
69
+ def query(self, region: "Region"):
67
70
  versioned_datasets = defaultdict(dict)
68
71
  invalid_species_datasets = {}
69
72
  results = self.loader.data.get("results", [])
70
73
 
71
- for r in siibra_tqdm(results, total=len(results)):
74
+ for r in siibra_tqdm(results, total=len(results), unit='dataset', desc=f'Matching datasets to "{region}"'):
72
75
  regionname = r.get("name", None)
73
76
  alias = r.get("alias", None)
74
77
  for ds_spec in r.get("datasets", []):