siibra 1.0a1__1-py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of siibra might be problematic. Click here for more details.
- siibra/VERSION +1 -0
- siibra/__init__.py +164 -0
- siibra/commons.py +823 -0
- siibra/configuration/__init__.py +17 -0
- siibra/configuration/configuration.py +189 -0
- siibra/configuration/factory.py +589 -0
- siibra/core/__init__.py +16 -0
- siibra/core/assignment.py +110 -0
- siibra/core/atlas.py +239 -0
- siibra/core/concept.py +308 -0
- siibra/core/parcellation.py +387 -0
- siibra/core/region.py +1223 -0
- siibra/core/space.py +131 -0
- siibra/core/structure.py +111 -0
- siibra/exceptions.py +63 -0
- siibra/experimental/__init__.py +19 -0
- siibra/experimental/contour.py +61 -0
- siibra/experimental/cortical_profile_sampler.py +57 -0
- siibra/experimental/patch.py +98 -0
- siibra/experimental/plane3d.py +256 -0
- siibra/explorer/__init__.py +17 -0
- siibra/explorer/url.py +222 -0
- siibra/explorer/util.py +87 -0
- siibra/features/__init__.py +117 -0
- siibra/features/anchor.py +224 -0
- siibra/features/connectivity/__init__.py +33 -0
- siibra/features/connectivity/functional_connectivity.py +57 -0
- siibra/features/connectivity/regional_connectivity.py +494 -0
- siibra/features/connectivity/streamline_counts.py +27 -0
- siibra/features/connectivity/streamline_lengths.py +27 -0
- siibra/features/connectivity/tracing_connectivity.py +30 -0
- siibra/features/dataset/__init__.py +17 -0
- siibra/features/dataset/ebrains.py +90 -0
- siibra/features/feature.py +970 -0
- siibra/features/image/__init__.py +27 -0
- siibra/features/image/image.py +115 -0
- siibra/features/image/sections.py +26 -0
- siibra/features/image/volume_of_interest.py +88 -0
- siibra/features/tabular/__init__.py +24 -0
- siibra/features/tabular/bigbrain_intensity_profile.py +77 -0
- siibra/features/tabular/cell_density_profile.py +298 -0
- siibra/features/tabular/cortical_profile.py +322 -0
- siibra/features/tabular/gene_expression.py +257 -0
- siibra/features/tabular/layerwise_bigbrain_intensities.py +62 -0
- siibra/features/tabular/layerwise_cell_density.py +95 -0
- siibra/features/tabular/receptor_density_fingerprint.py +192 -0
- siibra/features/tabular/receptor_density_profile.py +110 -0
- siibra/features/tabular/regional_timeseries_activity.py +294 -0
- siibra/features/tabular/tabular.py +139 -0
- siibra/livequeries/__init__.py +19 -0
- siibra/livequeries/allen.py +352 -0
- siibra/livequeries/bigbrain.py +197 -0
- siibra/livequeries/ebrains.py +145 -0
- siibra/livequeries/query.py +49 -0
- siibra/locations/__init__.py +91 -0
- siibra/locations/boundingbox.py +454 -0
- siibra/locations/location.py +115 -0
- siibra/locations/point.py +344 -0
- siibra/locations/pointcloud.py +349 -0
- siibra/retrieval/__init__.py +27 -0
- siibra/retrieval/cache.py +233 -0
- siibra/retrieval/datasets.py +389 -0
- siibra/retrieval/exceptions/__init__.py +27 -0
- siibra/retrieval/repositories.py +769 -0
- siibra/retrieval/requests.py +659 -0
- siibra/vocabularies/__init__.py +45 -0
- siibra/vocabularies/gene_names.json +29176 -0
- siibra/vocabularies/receptor_symbols.json +210 -0
- siibra/vocabularies/region_aliases.json +460 -0
- siibra/volumes/__init__.py +23 -0
- siibra/volumes/parcellationmap.py +1279 -0
- siibra/volumes/providers/__init__.py +20 -0
- siibra/volumes/providers/freesurfer.py +113 -0
- siibra/volumes/providers/gifti.py +165 -0
- siibra/volumes/providers/neuroglancer.py +736 -0
- siibra/volumes/providers/nifti.py +266 -0
- siibra/volumes/providers/provider.py +107 -0
- siibra/volumes/sparsemap.py +468 -0
- siibra/volumes/volume.py +892 -0
- siibra-1.0.0a1.dist-info/LICENSE +201 -0
- siibra-1.0.0a1.dist-info/METADATA +160 -0
- siibra-1.0.0a1.dist-info/RECORD +84 -0
- siibra-1.0.0a1.dist-info/WHEEL +5 -0
- siibra-1.0.0a1.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,1279 @@
|
|
|
1
|
+
# Copyright 2018-2024
|
|
2
|
+
# Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
|
|
3
|
+
|
|
4
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
5
|
+
# you may not use this file except in compliance with the License.
|
|
6
|
+
# You may obtain a copy of the License at
|
|
7
|
+
|
|
8
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
9
|
+
|
|
10
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
11
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
12
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
13
|
+
# See the License for the specific language governing permissions and
|
|
14
|
+
# limitations under the License.
|
|
15
|
+
"""Provides spatial representations for parcellations and regions."""
|
|
16
|
+
|
|
17
|
+
from . import volume as _volume
|
|
18
|
+
from .providers import provider
|
|
19
|
+
from .. import logger, QUIET, exceptions
|
|
20
|
+
from ..commons import (
|
|
21
|
+
MapIndex,
|
|
22
|
+
MapType,
|
|
23
|
+
compare_arrays,
|
|
24
|
+
resample_img_to_img,
|
|
25
|
+
connected_components,
|
|
26
|
+
clear_name,
|
|
27
|
+
create_key,
|
|
28
|
+
create_gaussian_kernel,
|
|
29
|
+
siibra_tqdm,
|
|
30
|
+
Species,
|
|
31
|
+
CompareMapsResult,
|
|
32
|
+
generate_uuid
|
|
33
|
+
)
|
|
34
|
+
from ..core import concept, space, parcellation, region as _region
|
|
35
|
+
from ..locations import location, point, pointcloud
|
|
36
|
+
|
|
37
|
+
import numpy as np
|
|
38
|
+
from typing import Union, Dict, List, TYPE_CHECKING, Iterable, Tuple
|
|
39
|
+
from scipy.ndimage import distance_transform_edt
|
|
40
|
+
from collections import defaultdict
|
|
41
|
+
from nilearn import image
|
|
42
|
+
import pandas as pd
|
|
43
|
+
from dataclasses import dataclass, asdict
|
|
44
|
+
|
|
45
|
+
if TYPE_CHECKING:
|
|
46
|
+
from ..core.region import Region
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
@dataclass
|
|
50
|
+
class MapAssignment:
|
|
51
|
+
input_structure: int
|
|
52
|
+
centroid: Union[Tuple[np.ndarray], point.Point]
|
|
53
|
+
volume: int
|
|
54
|
+
fragment: str
|
|
55
|
+
map_value: np.ndarray
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
@dataclass
|
|
59
|
+
class AssignImageResult(CompareMapsResult, MapAssignment):
|
|
60
|
+
pass
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
class Map(concept.AtlasConcept, configuration_folder="maps"):
|
|
64
|
+
|
|
65
|
+
def __init__(
|
|
66
|
+
self,
|
|
67
|
+
identifier: str,
|
|
68
|
+
name: str,
|
|
69
|
+
space_spec: dict,
|
|
70
|
+
parcellation_spec: dict,
|
|
71
|
+
indices: Dict[str, List[Dict]],
|
|
72
|
+
volumes: list = [],
|
|
73
|
+
shortname: str = "",
|
|
74
|
+
description: str = "",
|
|
75
|
+
modality: str = None,
|
|
76
|
+
publications: list = [],
|
|
77
|
+
datasets: list = [],
|
|
78
|
+
prerelease: bool = False,
|
|
79
|
+
):
|
|
80
|
+
"""
|
|
81
|
+
Constructs a new parcellation object.
|
|
82
|
+
|
|
83
|
+
Parameters
|
|
84
|
+
----------
|
|
85
|
+
identifier: str
|
|
86
|
+
Unique identifier of the parcellation
|
|
87
|
+
name: str
|
|
88
|
+
Human-readable name of the parcellation
|
|
89
|
+
space_spec: dict
|
|
90
|
+
Specification of the space (use @id or name fields)
|
|
91
|
+
parcellation_spec: str
|
|
92
|
+
Specification of the parcellation (use @id or name fields)
|
|
93
|
+
indices: dict
|
|
94
|
+
Dictionary of indices for the brain regions.
|
|
95
|
+
Keys are exact region names.
|
|
96
|
+
Per region name, a list of dictionaries with fields "volume" and "label" is expected,
|
|
97
|
+
where "volume" points to the index of the Volume object where this region is mapped,
|
|
98
|
+
and optional "label" is the voxel label for that region.
|
|
99
|
+
For continuous / probability maps, the "label" can be null or omitted.
|
|
100
|
+
For single-volume labelled maps, the "volume" can be null or omitted.
|
|
101
|
+
volumes: list[Volume]
|
|
102
|
+
parcellation volumes
|
|
103
|
+
shortname: str, optional
|
|
104
|
+
Shortform of human-readable name
|
|
105
|
+
description: str, optional
|
|
106
|
+
Textual description of the parcellation
|
|
107
|
+
modality: str, default: None
|
|
108
|
+
Specification of the modality used for creating the parcellation
|
|
109
|
+
publications: list
|
|
110
|
+
List of associated publications, each a dictionary with "doi" and/or "citation" fields
|
|
111
|
+
datasets : list
|
|
112
|
+
datasets associated with this concept
|
|
113
|
+
"""
|
|
114
|
+
concept.AtlasConcept.__init__(
|
|
115
|
+
self,
|
|
116
|
+
identifier=identifier,
|
|
117
|
+
name=name,
|
|
118
|
+
species=None, # inherits species from space
|
|
119
|
+
shortname=shortname,
|
|
120
|
+
description=description,
|
|
121
|
+
publications=publications,
|
|
122
|
+
datasets=datasets,
|
|
123
|
+
modality=modality,
|
|
124
|
+
prerelease=prerelease,
|
|
125
|
+
)
|
|
126
|
+
self._space_spec = space_spec
|
|
127
|
+
self._parcellation_spec = parcellation_spec
|
|
128
|
+
|
|
129
|
+
# Since the volumes might include 4D arrays, where the actual
|
|
130
|
+
# volume index points to a z coordinate, we create subvolume
|
|
131
|
+
# indexers from the given volume provider if 'z' is specified.
|
|
132
|
+
self._indices: Dict[str, List[MapIndex]] = {}
|
|
133
|
+
self.volumes: List[_volume.Volume] = []
|
|
134
|
+
remap_volumes = {}
|
|
135
|
+
# TODO: This assumes knowledge of the preconfigruation specs wrt. z.
|
|
136
|
+
# z to subvolume conversion should probably go to the factory.
|
|
137
|
+
for regionname, indexlist in indices.items():
|
|
138
|
+
k = clear_name(regionname)
|
|
139
|
+
self._indices[k] = []
|
|
140
|
+
for index in indexlist:
|
|
141
|
+
vol = index.get('volume', 0)
|
|
142
|
+
assert vol in range(len(volumes))
|
|
143
|
+
z = index.get('z')
|
|
144
|
+
if (vol, z) not in remap_volumes:
|
|
145
|
+
if z is None:
|
|
146
|
+
self.volumes.append(volumes[vol])
|
|
147
|
+
else:
|
|
148
|
+
self.volumes.append(_volume.Subvolume(volumes[vol], z))
|
|
149
|
+
remap_volumes[vol, z] = len(self.volumes) - 1
|
|
150
|
+
self._indices[k].append(
|
|
151
|
+
MapIndex(volume=remap_volumes[vol, z], label=index.get('label'), fragment=index.get('fragment'))
|
|
152
|
+
)
|
|
153
|
+
|
|
154
|
+
# make sure the indices are unique - each map/label pair should appear at most once
|
|
155
|
+
all_indices = sum(self._indices.values(), [])
|
|
156
|
+
seen = set()
|
|
157
|
+
duplicates = {x for x in all_indices if x in seen or seen.add(x)}
|
|
158
|
+
if len(duplicates) > 0:
|
|
159
|
+
logger.warning(f"Non unique indices encountered in {self}: {duplicates}")
|
|
160
|
+
self._affine_cached = None
|
|
161
|
+
|
|
162
|
+
@property
|
|
163
|
+
def key(self):
|
|
164
|
+
_id = self.id
|
|
165
|
+
return create_key(_id[len("siibra-map-v0.0.1"):])
|
|
166
|
+
|
|
167
|
+
@property
|
|
168
|
+
def species(self) -> Species:
|
|
169
|
+
# lazy implementation
|
|
170
|
+
if self._species_cached is None:
|
|
171
|
+
self._species_cached = self.space.species
|
|
172
|
+
return self.space._species_cached
|
|
173
|
+
|
|
174
|
+
def get_index(self, region: Union[str, "Region"]):
|
|
175
|
+
"""
|
|
176
|
+
Returns the unique index corresponding to the specified region.
|
|
177
|
+
|
|
178
|
+
Tip
|
|
179
|
+
----
|
|
180
|
+
Use find_indices() method for a less strict search returning all matches.
|
|
181
|
+
|
|
182
|
+
Parameters
|
|
183
|
+
----------
|
|
184
|
+
region: str or Region
|
|
185
|
+
|
|
186
|
+
Returns
|
|
187
|
+
-------
|
|
188
|
+
MapIndex
|
|
189
|
+
|
|
190
|
+
Raises
|
|
191
|
+
------
|
|
192
|
+
NonUniqueIndexError
|
|
193
|
+
If not unique or not defined in this parcellation map.
|
|
194
|
+
"""
|
|
195
|
+
matches = self.find_indices(region)
|
|
196
|
+
if len(matches) > 1:
|
|
197
|
+
# if there is an exact match, we still use it. If not, we cannot proceed.
|
|
198
|
+
regionname = region.name if isinstance(region, _region.Region) \
|
|
199
|
+
else region
|
|
200
|
+
for index, matched_name in matches.items():
|
|
201
|
+
if matched_name == regionname:
|
|
202
|
+
return index
|
|
203
|
+
raise exceptions.NonUniqueIndexError(
|
|
204
|
+
f"The specification '{region}' matches multiple mapped "
|
|
205
|
+
f"structures in {str(self)}: {list(matches.values())}"
|
|
206
|
+
)
|
|
207
|
+
elif len(matches) == 0:
|
|
208
|
+
raise exceptions.NonUniqueIndexError(
|
|
209
|
+
f"The specification '{region}' does not match to any structure mapped in {self}"
|
|
210
|
+
)
|
|
211
|
+
else:
|
|
212
|
+
return next(iter(matches))
|
|
213
|
+
|
|
214
|
+
def find_indices(self, region: Union[str, "Region"]):
|
|
215
|
+
"""
|
|
216
|
+
Returns the volume/label indices in this map which match the given
|
|
217
|
+
region specification.
|
|
218
|
+
|
|
219
|
+
Parameters
|
|
220
|
+
----------
|
|
221
|
+
region: str or Region
|
|
222
|
+
|
|
223
|
+
Returns
|
|
224
|
+
-------
|
|
225
|
+
dict
|
|
226
|
+
- keys: MapIndex
|
|
227
|
+
- values: region name
|
|
228
|
+
"""
|
|
229
|
+
if region in self._indices:
|
|
230
|
+
return {
|
|
231
|
+
idx: region
|
|
232
|
+
for idx in self._indices[region]
|
|
233
|
+
}
|
|
234
|
+
regionname = region.name if isinstance(region, _region.Region) else region
|
|
235
|
+
matched_region_names = set(_.name for _ in (self.parcellation.find(regionname)))
|
|
236
|
+
matches = matched_region_names & self._indices.keys()
|
|
237
|
+
if len(matches) == 0:
|
|
238
|
+
logger.warning(f"Region {regionname} not defined in {self}")
|
|
239
|
+
return {
|
|
240
|
+
idx: regionname
|
|
241
|
+
for regionname in matches
|
|
242
|
+
for idx in self._indices[regionname]
|
|
243
|
+
}
|
|
244
|
+
|
|
245
|
+
def get_region(self, label: int = None, volume: int = 0, index: MapIndex = None):
|
|
246
|
+
"""
|
|
247
|
+
Returns the region mapped by the given index, if any.
|
|
248
|
+
|
|
249
|
+
Tip
|
|
250
|
+
----
|
|
251
|
+
Use get_index() or find_indices() methods to obtain the MapIndex.
|
|
252
|
+
|
|
253
|
+
Parameters
|
|
254
|
+
----------
|
|
255
|
+
label: int, default: None
|
|
256
|
+
volume: int, default: 0
|
|
257
|
+
index: MapIndex, default: None
|
|
258
|
+
|
|
259
|
+
Returns
|
|
260
|
+
-------
|
|
261
|
+
Region
|
|
262
|
+
A region object defined in the parcellation map.
|
|
263
|
+
"""
|
|
264
|
+
if isinstance(label, MapIndex) and index is None:
|
|
265
|
+
raise TypeError("Specify MapIndex with 'index' keyword.")
|
|
266
|
+
if index is None:
|
|
267
|
+
index = MapIndex(volume, label)
|
|
268
|
+
matches = [
|
|
269
|
+
regionname
|
|
270
|
+
for regionname, indexlist in self._indices.items()
|
|
271
|
+
if index in indexlist
|
|
272
|
+
]
|
|
273
|
+
if len(matches) == 0:
|
|
274
|
+
logger.warning(f"Index {index} not defined in {self}")
|
|
275
|
+
return None
|
|
276
|
+
elif len(matches) == 1:
|
|
277
|
+
return self.parcellation.get_region(matches[0])
|
|
278
|
+
else:
|
|
279
|
+
# this should not happen, already tested in constructor
|
|
280
|
+
raise RuntimeError(f"Index {index} is not unique in {self}")
|
|
281
|
+
|
|
282
|
+
@property
|
|
283
|
+
def space(self):
|
|
284
|
+
for key in ["@id", "name"]:
|
|
285
|
+
if key in self._space_spec:
|
|
286
|
+
return space.Space.get_instance(self._space_spec[key])
|
|
287
|
+
return space.Space(None, "Unspecified space", species=Species.UNSPECIFIED_SPECIES)
|
|
288
|
+
|
|
289
|
+
@property
|
|
290
|
+
def parcellation(self):
|
|
291
|
+
for key in ["@id", "name"]:
|
|
292
|
+
if key in self._parcellation_spec:
|
|
293
|
+
return parcellation.Parcellation.get_instance(self._parcellation_spec[key])
|
|
294
|
+
logger.warning(
|
|
295
|
+
f"Cannot determine parcellation of {self.__class__.__name__} "
|
|
296
|
+
f"{self.name} from {self._parcellation_spec}"
|
|
297
|
+
)
|
|
298
|
+
return None
|
|
299
|
+
|
|
300
|
+
@property
|
|
301
|
+
def labels(self):
|
|
302
|
+
"""
|
|
303
|
+
The set of all label indices defined in this map, including "None" if
|
|
304
|
+
not defined for one or more regions.
|
|
305
|
+
"""
|
|
306
|
+
return {d.label for v in self._indices.values() for d in v}
|
|
307
|
+
|
|
308
|
+
@property
|
|
309
|
+
def maptype(self) -> MapType:
|
|
310
|
+
if all(isinstance(_, int) for _ in self.labels):
|
|
311
|
+
return MapType.LABELLED
|
|
312
|
+
elif self.labels == {None}:
|
|
313
|
+
return MapType.STATISTICAL
|
|
314
|
+
else:
|
|
315
|
+
raise RuntimeError(
|
|
316
|
+
f"Inconsistent label indices encountered in {self}"
|
|
317
|
+
)
|
|
318
|
+
|
|
319
|
+
def __len__(self):
|
|
320
|
+
return len(self.volumes)
|
|
321
|
+
|
|
322
|
+
@property
|
|
323
|
+
def regions(self):
|
|
324
|
+
return list(self._indices)
|
|
325
|
+
|
|
326
|
+
def get_volume(
|
|
327
|
+
self,
|
|
328
|
+
region: Union[str, "Region"] = None,
|
|
329
|
+
*,
|
|
330
|
+
index: MapIndex = None,
|
|
331
|
+
**kwargs,
|
|
332
|
+
) -> Union[_volume.Volume, _volume.FilteredVolume, _volume.Subvolume]:
|
|
333
|
+
try:
|
|
334
|
+
length = len([arg for arg in [region, index] if arg is not None])
|
|
335
|
+
assert length == 1
|
|
336
|
+
except AssertionError:
|
|
337
|
+
if length > 1:
|
|
338
|
+
raise exceptions.ExcessiveArgumentException(
|
|
339
|
+
"One and only one of region or index can be defined for `get_volume`."
|
|
340
|
+
)
|
|
341
|
+
mapindex = None
|
|
342
|
+
if region is not None:
|
|
343
|
+
try:
|
|
344
|
+
assert isinstance(region, (str, _region.Region))
|
|
345
|
+
except AssertionError:
|
|
346
|
+
raise TypeError(f"Please provide a region name or region instance, not a {type(region)}")
|
|
347
|
+
mapindex = self.get_index(region)
|
|
348
|
+
if index is not None:
|
|
349
|
+
assert isinstance(index, MapIndex)
|
|
350
|
+
mapindex = index
|
|
351
|
+
if mapindex is None:
|
|
352
|
+
if len(self) == 1:
|
|
353
|
+
mapindex = MapIndex(volume=0, label=None)
|
|
354
|
+
elif len(self) > 1:
|
|
355
|
+
assert self.maptype == MapType.LABELLED, f"Cannot merge multiple volumes of map type {self.maptype}. Please specify a region or index."
|
|
356
|
+
logger.info(
|
|
357
|
+
"Map provides multiple volumes and no specification is"
|
|
358
|
+
" provided. Resampling all volumes to the space."
|
|
359
|
+
)
|
|
360
|
+
labels = list(range(len(self.volumes)))
|
|
361
|
+
merged_volume = _volume.merge(self.volumes, labels, **kwargs)
|
|
362
|
+
return merged_volume
|
|
363
|
+
else:
|
|
364
|
+
raise exceptions.NoVolumeFound("Map provides no volumes.")
|
|
365
|
+
|
|
366
|
+
kwargs_fragment = kwargs.pop("fragment", None)
|
|
367
|
+
if kwargs_fragment is not None:
|
|
368
|
+
if (mapindex.fragment is not None) and (kwargs_fragment != mapindex.fragment):
|
|
369
|
+
raise exceptions.ConflictingArgumentException(
|
|
370
|
+
"Conflicting specifications for fetching volume fragment: "
|
|
371
|
+
f"{mapindex.fragment} / {kwargs_fragment}"
|
|
372
|
+
)
|
|
373
|
+
mapindex.fragment = kwargs_fragment
|
|
374
|
+
|
|
375
|
+
if mapindex.volume is None:
|
|
376
|
+
mapindex.volume = 0
|
|
377
|
+
if mapindex.volume >= len(self.volumes):
|
|
378
|
+
raise IndexError(
|
|
379
|
+
f"{self} provides {len(self)} mapped volumes, but #{mapindex.volume} was requested."
|
|
380
|
+
)
|
|
381
|
+
if mapindex.label is None and mapindex.fragment is None:
|
|
382
|
+
return self.volumes[mapindex.volume]
|
|
383
|
+
|
|
384
|
+
return _volume.FilteredVolume(
|
|
385
|
+
parent_volume=self.volumes[mapindex.volume],
|
|
386
|
+
label=mapindex.label,
|
|
387
|
+
fragment=mapindex.fragment,
|
|
388
|
+
)
|
|
389
|
+
|
|
390
|
+
def fetch(
|
|
391
|
+
self,
|
|
392
|
+
region: Union[str, "Region"] = None,
|
|
393
|
+
*,
|
|
394
|
+
index: MapIndex = None,
|
|
395
|
+
**fetch_kwargs
|
|
396
|
+
):
|
|
397
|
+
"""
|
|
398
|
+
Fetches one particular volume of this parcellation map.
|
|
399
|
+
|
|
400
|
+
If there's only one volume, this is the default, otherwise further
|
|
401
|
+
specification is requested:
|
|
402
|
+
- the volume index,
|
|
403
|
+
- the MapIndex (which results in a regional map being returned)
|
|
404
|
+
|
|
405
|
+
You might also consider fetch_iter() to iterate the volumes, or
|
|
406
|
+
compress() to produce a single-volume parcellation map.
|
|
407
|
+
|
|
408
|
+
Parameters
|
|
409
|
+
----------
|
|
410
|
+
region: str, Region
|
|
411
|
+
Specification of a region name, resulting in a regional map
|
|
412
|
+
(mask or statistical map) to be returned.
|
|
413
|
+
index: MapIndex
|
|
414
|
+
Explicit specification of the map index, typically resulting
|
|
415
|
+
in a regional map (mask or statistical map) to be returned.
|
|
416
|
+
Note that supplying 'region' will result in retrieving the map index of that region
|
|
417
|
+
automatically.
|
|
418
|
+
**fetch_kwargs
|
|
419
|
+
- resolution_mm: resolution in millimeters
|
|
420
|
+
- format: the format of the volume, like "mesh" or "nii"
|
|
421
|
+
- voi: a BoundingBox of interest
|
|
422
|
+
|
|
423
|
+
|
|
424
|
+
Note
|
|
425
|
+
----
|
|
426
|
+
Not all keyword arguments are supported for volume formats. Format
|
|
427
|
+
is restricted by available formats (check formats property).
|
|
428
|
+
|
|
429
|
+
Returns
|
|
430
|
+
-------
|
|
431
|
+
An image or mesh
|
|
432
|
+
"""
|
|
433
|
+
vol = self.get_volume(region=region, index=index, **fetch_kwargs)
|
|
434
|
+
return vol.fetch(**fetch_kwargs)
|
|
435
|
+
|
|
436
|
+
def fetch_iter(self, **kwargs):
|
|
437
|
+
"""
|
|
438
|
+
Returns an iterator to fetch all mapped volumes sequentially.
|
|
439
|
+
|
|
440
|
+
All arguments are passed on to function Map.fetch(). By default, it
|
|
441
|
+
will go through all fragments as well.
|
|
442
|
+
"""
|
|
443
|
+
fragments = {kwargs.pop('fragment', None)} or self.fragments or {None}
|
|
444
|
+
return (
|
|
445
|
+
self.fetch(
|
|
446
|
+
index=MapIndex(volume=i, label=None, fragment=frag), **kwargs
|
|
447
|
+
)
|
|
448
|
+
for frag in fragments
|
|
449
|
+
for i in range(len(self))
|
|
450
|
+
)
|
|
451
|
+
|
|
452
|
+
@property
|
|
453
|
+
def provides_image(self):
|
|
454
|
+
return any(v.provides_image for v in self.volumes)
|
|
455
|
+
|
|
456
|
+
@property
|
|
457
|
+
def fragments(self):
|
|
458
|
+
return {
|
|
459
|
+
index.fragment
|
|
460
|
+
for indices in self._indices.values()
|
|
461
|
+
for index in indices
|
|
462
|
+
if index.fragment is not None
|
|
463
|
+
}
|
|
464
|
+
|
|
465
|
+
@property
|
|
466
|
+
def provides_mesh(self):
|
|
467
|
+
return any(v.provides_mesh for v in self.volumes)
|
|
468
|
+
|
|
469
|
+
@property
|
|
470
|
+
def formats(self):
|
|
471
|
+
return {f for v in self.volumes for f in v.formats}
|
|
472
|
+
|
|
473
|
+
@property
|
|
474
|
+
def is_labelled(self):
|
|
475
|
+
return self.maptype == MapType.LABELLED
|
|
476
|
+
|
|
477
|
+
@property
|
|
478
|
+
def affine(self):
|
|
479
|
+
if self._affine_cached is None:
|
|
480
|
+
# we compute the affine from a volumetric volume provider
|
|
481
|
+
for fmt in _volume.Volume.SUPPORTED_FORMATS:
|
|
482
|
+
if fmt not in _volume.Volume.MESH_FORMATS:
|
|
483
|
+
if fmt not in self.formats:
|
|
484
|
+
continue
|
|
485
|
+
try:
|
|
486
|
+
self._affine_cached = self.fetch(index=MapIndex(volume=0), format=fmt).affine
|
|
487
|
+
break
|
|
488
|
+
except Exception:
|
|
489
|
+
logger.debug("Caught exceptions:\n", exc_info=1)
|
|
490
|
+
continue
|
|
491
|
+
else:
|
|
492
|
+
raise RuntimeError(f"No volumetric provider in {self} to derive the affine matrix.")
|
|
493
|
+
if not isinstance(self._affine_cached, np.ndarray):
|
|
494
|
+
logger.error("invalid affine:", self._affine_cached)
|
|
495
|
+
return self._affine_cached
|
|
496
|
+
|
|
497
|
+
def __iter__(self):
|
|
498
|
+
return self.fetch_iter()
|
|
499
|
+
|
|
500
|
+
def compress(self, **kwargs):
|
|
501
|
+
"""
|
|
502
|
+
Converts this map into a labelled 3D parcellation map, obtained by
|
|
503
|
+
taking the voxelwise maximum across the mapped volumes and fragments,
|
|
504
|
+
and re-labelling regions sequentially.
|
|
505
|
+
|
|
506
|
+
Paramaters
|
|
507
|
+
----------
|
|
508
|
+
**kwargs: Takes the fetch arguments of its space's template.
|
|
509
|
+
|
|
510
|
+
Returns
|
|
511
|
+
-------
|
|
512
|
+
parcellationmap.Map
|
|
513
|
+
"""
|
|
514
|
+
if len(self.volumes) == 1 and not self.fragments:
|
|
515
|
+
raise RuntimeError("The map cannot be merged since there are no multiple volumes or fragments.")
|
|
516
|
+
|
|
517
|
+
# initialize empty volume according to the template
|
|
518
|
+
template_img = self.space.get_template().fetch(**kwargs)
|
|
519
|
+
result_arr = np.zeros_like(np.asanyarray(template_img.dataobj))
|
|
520
|
+
result_affine = template_img.affine
|
|
521
|
+
voxelwise_max = np.zeros_like(result_arr)
|
|
522
|
+
interpolation = 'nearest' if self.is_labelled else 'linear'
|
|
523
|
+
next_labelindex = 1
|
|
524
|
+
region_indices = defaultdict(list)
|
|
525
|
+
|
|
526
|
+
for volidx in siibra_tqdm(
|
|
527
|
+
range(len(self.volumes)), total=len(self.volumes), unit='maps',
|
|
528
|
+
desc=f"Compressing {len(self.volumes)} {self.maptype.name.lower()} volumes into single-volume parcellation",
|
|
529
|
+
disable=(len(self.volumes) == 1)
|
|
530
|
+
):
|
|
531
|
+
for frag in siibra_tqdm(
|
|
532
|
+
self.fragments, total=len(self.fragments), unit='maps',
|
|
533
|
+
desc=f"Compressing {len(self.fragments)} {self.maptype.name.lower()} fragments into single-fragment parcellation",
|
|
534
|
+
disable=(len(self.fragments) == 1 or self.fragments is None)
|
|
535
|
+
):
|
|
536
|
+
mapindex = MapIndex(volume=volidx, fragment=frag)
|
|
537
|
+
img = self.fetch(index=mapindex)
|
|
538
|
+
if np.allclose(img.affine, result_affine):
|
|
539
|
+
img_data = np.asanyarray(img.dataobj)
|
|
540
|
+
else:
|
|
541
|
+
logger.debug(f"Compression requires to resample volume {volidx} ({interpolation})")
|
|
542
|
+
img_data = np.asanyarray(
|
|
543
|
+
resample_img_to_img(img, template_img).dataobj
|
|
544
|
+
)
|
|
545
|
+
|
|
546
|
+
if self.is_labelled:
|
|
547
|
+
labels = set(np.unique(img_data)) - {0}
|
|
548
|
+
else:
|
|
549
|
+
labels = {None}
|
|
550
|
+
|
|
551
|
+
for label in labels:
|
|
552
|
+
with QUIET:
|
|
553
|
+
mapindex.__setattr__("label", int(label))
|
|
554
|
+
region = self.get_region(index=mapindex)
|
|
555
|
+
if region is None:
|
|
556
|
+
logger.warning(f"Label index {label} is observed in map volume {self}, but no region is defined for it.")
|
|
557
|
+
continue
|
|
558
|
+
region_indices[region.name].append({"volume": 0, "label": next_labelindex})
|
|
559
|
+
if label is None:
|
|
560
|
+
update_voxels = (img_data > voxelwise_max)
|
|
561
|
+
else:
|
|
562
|
+
update_voxels = (img_data == label)
|
|
563
|
+
result_arr[update_voxels] = next_labelindex
|
|
564
|
+
voxelwise_max[update_voxels] = img_data[update_voxels]
|
|
565
|
+
next_labelindex += 1
|
|
566
|
+
|
|
567
|
+
return Map(
|
|
568
|
+
identifier=f"{create_key(self.name)}_compressed",
|
|
569
|
+
name=f"{self.name} compressed",
|
|
570
|
+
space_spec=self._space_spec,
|
|
571
|
+
parcellation_spec=self._parcellation_spec,
|
|
572
|
+
indices=region_indices,
|
|
573
|
+
volumes=[_volume.from_array(
|
|
574
|
+
result_arr, result_affine, self._space_spec, name=self.name + " compressed"
|
|
575
|
+
)]
|
|
576
|
+
)
|
|
577
|
+
|
|
578
|
+
def compute_centroids(self, split_components: bool = True, **fetch_kwargs) -> Dict[str, pointcloud.PointCloud]:
|
|
579
|
+
"""
|
|
580
|
+
Compute a dictionary of all regions in this map to their centroids.
|
|
581
|
+
By default, the regional masks will be split to connected components
|
|
582
|
+
and each point in the PointCloud corresponds to a region component.
|
|
583
|
+
|
|
584
|
+
Parameters
|
|
585
|
+
----------
|
|
586
|
+
split_components: bool, default: True
|
|
587
|
+
If True, finds the spatial properties for each connected component
|
|
588
|
+
found by skimage.measure.label.
|
|
589
|
+
|
|
590
|
+
Returns
|
|
591
|
+
-------
|
|
592
|
+
Dict[str, point.Point]
|
|
593
|
+
Region names as keys and computed centroids as items.
|
|
594
|
+
"""
|
|
595
|
+
assert self.provides_image, "Centroid computation for meshes is not supported yet."
|
|
596
|
+
centroids = dict()
|
|
597
|
+
for regionname, indexlist in siibra_tqdm(
|
|
598
|
+
self._indices.items(), unit="regions", desc="Computing centroids"
|
|
599
|
+
):
|
|
600
|
+
assert regionname not in centroids
|
|
601
|
+
# get the mask of the region in this map
|
|
602
|
+
with QUIET:
|
|
603
|
+
if len(indexlist) >= 1:
|
|
604
|
+
merged_volume = _volume.merge(
|
|
605
|
+
[
|
|
606
|
+
_volume.from_nifti(
|
|
607
|
+
self.fetch(index=index, **fetch_kwargs),
|
|
608
|
+
self.space,
|
|
609
|
+
f"{self.name} - {index}"
|
|
610
|
+
)
|
|
611
|
+
for index in indexlist
|
|
612
|
+
],
|
|
613
|
+
labels=[1] * len(indexlist)
|
|
614
|
+
)
|
|
615
|
+
mapimg = merged_volume.fetch()
|
|
616
|
+
elif len(indexlist) == 1:
|
|
617
|
+
index = indexlist[0]
|
|
618
|
+
mapimg = self.fetch(index=index, **fetch_kwargs) # returns a mask of the region
|
|
619
|
+
props = _volume.ComponentSpatialProperties.compute_from_image(
|
|
620
|
+
img=mapimg,
|
|
621
|
+
space=self.space,
|
|
622
|
+
split_components=split_components,
|
|
623
|
+
)
|
|
624
|
+
try:
|
|
625
|
+
centroids[regionname] = pointcloud.from_points([c.centroid for c in props])
|
|
626
|
+
except exceptions.EmptyPointCloudError:
|
|
627
|
+
centroids[regionname] = None
|
|
628
|
+
return centroids
|
|
629
|
+
|
|
630
|
+
def get_resampled_template(self, **fetch_kwargs) -> _volume.Volume:
|
|
631
|
+
"""
|
|
632
|
+
Resample the reference space template to fetched map image. Uses
|
|
633
|
+
nilearn.image.resample_to_img to resample the template.
|
|
634
|
+
|
|
635
|
+
Parameters
|
|
636
|
+
----------
|
|
637
|
+
**fetch_kwargs: takes the arguments of Map.fetch()
|
|
638
|
+
|
|
639
|
+
Returns
|
|
640
|
+
-------
|
|
641
|
+
Volume
|
|
642
|
+
"""
|
|
643
|
+
source_template = self.space.get_template().fetch()
|
|
644
|
+
map_image = self.fetch(**fetch_kwargs)
|
|
645
|
+
img = image.resample_to_img(source_template, map_image, interpolation='continuous')
|
|
646
|
+
return _volume.from_array(
|
|
647
|
+
data=img.dataobj,
|
|
648
|
+
affine=img.affine,
|
|
649
|
+
space=self.space,
|
|
650
|
+
name=f"{source_template} resampled to coordinate system of {self}"
|
|
651
|
+
)
|
|
652
|
+
|
|
653
|
+
def colorize(self, values: dict, **kwargs) -> _volume.Volume:
|
|
654
|
+
"""Colorize the map with the provided regional values.
|
|
655
|
+
|
|
656
|
+
Parameters
|
|
657
|
+
----------
|
|
658
|
+
values : dict
|
|
659
|
+
Dictionary mapping regions to values
|
|
660
|
+
|
|
661
|
+
Return
|
|
662
|
+
------
|
|
663
|
+
Nifti1Image
|
|
664
|
+
"""
|
|
665
|
+
|
|
666
|
+
result = None
|
|
667
|
+
for volidx, vol in enumerate(self.fetch_iter(**kwargs)):
|
|
668
|
+
if isinstance(vol, dict):
|
|
669
|
+
raise NotImplementedError("Map colorization not yet implemented for meshes.")
|
|
670
|
+
img = np.asanyarray(vol.dataobj)
|
|
671
|
+
maxarr = np.zeros_like(img)
|
|
672
|
+
for r, value in values.items():
|
|
673
|
+
index = self.get_index(r)
|
|
674
|
+
if index.volume != volidx:
|
|
675
|
+
continue
|
|
676
|
+
if result is None:
|
|
677
|
+
result = np.zeros_like(img)
|
|
678
|
+
affine = vol.affine
|
|
679
|
+
if index.label is None:
|
|
680
|
+
updates = img > maxarr
|
|
681
|
+
result[updates] = value
|
|
682
|
+
maxarr[updates] = img[updates]
|
|
683
|
+
else:
|
|
684
|
+
result[img == index.label] = value
|
|
685
|
+
|
|
686
|
+
return _volume.from_array(
|
|
687
|
+
data=result,
|
|
688
|
+
affine=affine,
|
|
689
|
+
space=self.space,
|
|
690
|
+
name=f"Custom colorization of {self}"
|
|
691
|
+
)
|
|
692
|
+
|
|
693
|
+
def get_colormap(self, region_specs: Iterable = None):
|
|
694
|
+
"""
|
|
695
|
+
Generate a matplotlib colormap from known rgb values of label indices.
|
|
696
|
+
|
|
697
|
+
Parameters
|
|
698
|
+
----------
|
|
699
|
+
region_specs: iterable(regions), optional
|
|
700
|
+
Optional parameter to only color the desired regions.
|
|
701
|
+
|
|
702
|
+
Returns
|
|
703
|
+
-------
|
|
704
|
+
ListedColormap
|
|
705
|
+
"""
|
|
706
|
+
from matplotlib.colors import ListedColormap
|
|
707
|
+
import numpy as np
|
|
708
|
+
|
|
709
|
+
colors = {}
|
|
710
|
+
if region_specs is not None:
|
|
711
|
+
include_region_names = {
|
|
712
|
+
self.parcellation.get_region(region_spec).name for region_spec in region_specs
|
|
713
|
+
}
|
|
714
|
+
else:
|
|
715
|
+
include_region_names = None
|
|
716
|
+
|
|
717
|
+
for regionname, indices in self._indices.items():
|
|
718
|
+
for index in indices:
|
|
719
|
+
if index.label is None:
|
|
720
|
+
continue
|
|
721
|
+
|
|
722
|
+
if (include_region_names is not None) and (regionname not in include_region_names):
|
|
723
|
+
continue
|
|
724
|
+
else:
|
|
725
|
+
region = self.get_region(index=index)
|
|
726
|
+
if region.rgb is not None:
|
|
727
|
+
colors[index.label] = region.rgb
|
|
728
|
+
|
|
729
|
+
pallette = np.array(
|
|
730
|
+
[
|
|
731
|
+
list(colors[i]) + [1] if i in colors else [0, 0, 0, 0]
|
|
732
|
+
for i in range(max(colors.keys()) + 1)
|
|
733
|
+
]
|
|
734
|
+
) / [255, 255, 255, 1]
|
|
735
|
+
return ListedColormap(pallette)
|
|
736
|
+
|
|
737
|
+
def sample_locations(self, regionspec, numpoints: int):
|
|
738
|
+
""" Sample 3D locations inside a given region.
|
|
739
|
+
|
|
740
|
+
The probability distribution is approximated from the region mask based
|
|
741
|
+
on the squared distance transform.
|
|
742
|
+
|
|
743
|
+
Parameters
|
|
744
|
+
----------
|
|
745
|
+
regionspec: Region or str
|
|
746
|
+
Region to be used
|
|
747
|
+
numpoints: int
|
|
748
|
+
Number of samples to draw
|
|
749
|
+
|
|
750
|
+
Returns
|
|
751
|
+
-------
|
|
752
|
+
PointCloud
|
|
753
|
+
Sample points in physcial coordinates corresponding to this
|
|
754
|
+
parcellationmap
|
|
755
|
+
"""
|
|
756
|
+
index = self.get_index(regionspec)
|
|
757
|
+
mask = self.fetch(index=index)
|
|
758
|
+
arr = np.asanyarray(mask.dataobj)
|
|
759
|
+
if arr.dtype.char in np.typecodes['AllInteger']:
|
|
760
|
+
# a binary mask - use distance transform to get sampling weights
|
|
761
|
+
W = distance_transform_edt(np.asanyarray(mask.dataobj))**2
|
|
762
|
+
else:
|
|
763
|
+
# a statistical map - interpret directly as weights
|
|
764
|
+
W = arr
|
|
765
|
+
p = (W / W.sum()).ravel()
|
|
766
|
+
XYZ_ = np.array(
|
|
767
|
+
np.unravel_index(np.random.choice(len(p), numpoints, p=p), W.shape)
|
|
768
|
+
).T
|
|
769
|
+
XYZ = np.dot(mask.affine, np.c_[XYZ_, np.ones(numpoints)].T)[:3, :].T
|
|
770
|
+
return pointcloud.PointCloud(XYZ, space=self.space)
|
|
771
|
+
|
|
772
|
+
def to_sparse(self):
|
|
773
|
+
"""
|
|
774
|
+
Creates a SparseMap object from this parcellation map object.
|
|
775
|
+
|
|
776
|
+
Returns
|
|
777
|
+
-------
|
|
778
|
+
SparseMap
|
|
779
|
+
"""
|
|
780
|
+
from .sparsemap import SparseMap
|
|
781
|
+
indices = {
|
|
782
|
+
regionname: [
|
|
783
|
+
{'volume': idx.volume, 'label': idx.label, 'fragment': idx.fragment}
|
|
784
|
+
for idx in indexlist
|
|
785
|
+
]
|
|
786
|
+
for regionname, indexlist in self._indices.items()
|
|
787
|
+
}
|
|
788
|
+
return SparseMap(
|
|
789
|
+
identifier=self.id,
|
|
790
|
+
name=self.name,
|
|
791
|
+
space_spec={'@id': self.space.id},
|
|
792
|
+
parcellation_spec={'@id': self.parcellation.id},
|
|
793
|
+
indices=indices,
|
|
794
|
+
volumes=self.volumes,
|
|
795
|
+
shortname=self.shortname,
|
|
796
|
+
description=self.description,
|
|
797
|
+
modality=self.modality,
|
|
798
|
+
publications=self.publications,
|
|
799
|
+
datasets=self.datasets
|
|
800
|
+
)
|
|
801
|
+
|
|
802
|
+
def _read_voxel(
|
|
803
|
+
self,
|
|
804
|
+
x: Union[int, np.ndarray, List],
|
|
805
|
+
y: Union[int, np.ndarray, List],
|
|
806
|
+
z: Union[int, np.ndarray, List]
|
|
807
|
+
):
|
|
808
|
+
def _read_voxels_from_volume(xyz, volimg):
|
|
809
|
+
xyz = np.stack(xyz, axis=1)
|
|
810
|
+
valid_points_mask = np.all([(0 <= di) & (di < vol_size) for vol_size, di in zip(volimg.shape, xyz.T)], axis=0)
|
|
811
|
+
x, y, z = xyz[valid_points_mask].T
|
|
812
|
+
valid_points_indices, *_ = np.where(valid_points_mask)
|
|
813
|
+
valid_data_points = np.asanyarray(volimg.dataobj)[x, y, z]
|
|
814
|
+
return zip(valid_points_indices, valid_data_points)
|
|
815
|
+
|
|
816
|
+
# integers are just single-element arrays, cast to avoid an extra code branch for integers
|
|
817
|
+
x, y, z = [np.array(di) for di in (x, y, z)]
|
|
818
|
+
|
|
819
|
+
fragments = self.fragments or {None}
|
|
820
|
+
return [
|
|
821
|
+
(pointindex, volume, fragment, data_point)
|
|
822
|
+
for fragment in fragments
|
|
823
|
+
for volume, volimg in enumerate(self.fetch_iter(fragment=fragment))
|
|
824
|
+
# transformations or user input might produce points outside the volume, filter these out.
|
|
825
|
+
for (pointindex, data_point) in _read_voxels_from_volume((x, y, z), volimg)
|
|
826
|
+
]
|
|
827
|
+
|
|
828
|
+
def _assign(
|
|
829
|
+
self,
|
|
830
|
+
item: location.Location,
|
|
831
|
+
minsize_voxel=1,
|
|
832
|
+
lower_threshold=0.0,
|
|
833
|
+
**kwargs
|
|
834
|
+
) -> List[Union[MapAssignment, AssignImageResult]]:
|
|
835
|
+
"""
|
|
836
|
+
For internal use only. Returns a dataclass, which provides better static type checking.
|
|
837
|
+
"""
|
|
838
|
+
|
|
839
|
+
if isinstance(item, point.Point):
|
|
840
|
+
return self._assign_points(
|
|
841
|
+
pointcloud.PointCloud([item], item.space, sigma_mm=item.sigma),
|
|
842
|
+
lower_threshold
|
|
843
|
+
)
|
|
844
|
+
if isinstance(item, pointcloud.PointCloud):
|
|
845
|
+
return self._assign_points(item, lower_threshold)
|
|
846
|
+
if isinstance(item, _volume.Volume):
|
|
847
|
+
return self._assign_volume(
|
|
848
|
+
queryvolume=item,
|
|
849
|
+
lower_threshold=lower_threshold,
|
|
850
|
+
minsize_voxel=minsize_voxel,
|
|
851
|
+
**kwargs
|
|
852
|
+
)
|
|
853
|
+
|
|
854
|
+
raise RuntimeError(
|
|
855
|
+
f"Items of type {item.__class__.__name__} cannot be used for region assignment."
|
|
856
|
+
)
|
|
857
|
+
|
|
858
|
+
def assign(
|
|
859
|
+
self,
|
|
860
|
+
item: location.Location,
|
|
861
|
+
minsize_voxel=1,
|
|
862
|
+
lower_threshold=0.0,
|
|
863
|
+
**kwargs
|
|
864
|
+
) -> "pd.DataFrame":
|
|
865
|
+
"""Assign an input Location to brain regions.
|
|
866
|
+
|
|
867
|
+
The input is assumed to be defined in the same coordinate space
|
|
868
|
+
as this parcellation map.
|
|
869
|
+
|
|
870
|
+
Parameters
|
|
871
|
+
----------
|
|
872
|
+
item: Location
|
|
873
|
+
A spatial object defined in the same physical reference space as
|
|
874
|
+
this parcellation map, which could be a point, set of points, or
|
|
875
|
+
image volume. If it is an image, it will be resampled to the same voxel
|
|
876
|
+
space if its affine transformation differs from that of the
|
|
877
|
+
parcellation map. Resampling will use linear interpolation for float
|
|
878
|
+
image types, otherwise nearest neighbor.
|
|
879
|
+
minsize_voxel: int, default: 1
|
|
880
|
+
Minimum voxel size of image components to be taken into account.
|
|
881
|
+
lower_threshold: float, default: 0
|
|
882
|
+
Lower threshold on values in the statistical map. Values smaller
|
|
883
|
+
than this threshold will be excluded from the assignment computation.
|
|
884
|
+
|
|
885
|
+
Returns
|
|
886
|
+
-------
|
|
887
|
+
pandas.DataFrame
|
|
888
|
+
A table of associated regions and their scores per component found
|
|
889
|
+
in the input image, or per coordinate provided. The scores are:
|
|
890
|
+
|
|
891
|
+
- Value: Maximum value of the voxels in the map covered by an
|
|
892
|
+
input coordinate or input image signal component.
|
|
893
|
+
- Pearson correlation coefficient between the brain region map
|
|
894
|
+
and an input image signal component (NaN for exact coordinates)
|
|
895
|
+
- Contains: Percentage of the brain region map contained in an
|
|
896
|
+
input image signal component, measured from their binarized
|
|
897
|
+
masks as the ratio between the volume of their intersection
|
|
898
|
+
and the volume of the brain region (NaN for exact coordinates)
|
|
899
|
+
- Contained: Percentage of an input image signal component
|
|
900
|
+
contained in the brain region map, measured from their binary
|
|
901
|
+
masks as the ratio between the volume of their intersection and
|
|
902
|
+
the volume of the input image signal component (NaN for exact
|
|
903
|
+
coordinates)
|
|
904
|
+
"""
|
|
905
|
+
|
|
906
|
+
assignments = self._assign(item, minsize_voxel, lower_threshold, **kwargs)
|
|
907
|
+
|
|
908
|
+
# format assignments as pandas dataframe
|
|
909
|
+
columns = [
|
|
910
|
+
"input structure",
|
|
911
|
+
"centroid",
|
|
912
|
+
"volume",
|
|
913
|
+
"fragment",
|
|
914
|
+
"region",
|
|
915
|
+
"correlation",
|
|
916
|
+
"intersection over union",
|
|
917
|
+
"map value",
|
|
918
|
+
"map weighted mean",
|
|
919
|
+
"map containedness",
|
|
920
|
+
"input weighted mean",
|
|
921
|
+
"input containedness"
|
|
922
|
+
]
|
|
923
|
+
if len(assignments) == 0:
|
|
924
|
+
return pd.DataFrame(columns=columns)
|
|
925
|
+
# determine the unique set of observed indices in order to do region lookups
|
|
926
|
+
# only once for each map index occuring in the point list
|
|
927
|
+
labelled = self.is_labelled # avoid calling this in a loop
|
|
928
|
+
observed_indices = { # unique set of observed map indices. NOTE: len(observed_indices) << len(assignments)
|
|
929
|
+
(
|
|
930
|
+
a.volume,
|
|
931
|
+
a.fragment,
|
|
932
|
+
a.map_value if labelled else None
|
|
933
|
+
)
|
|
934
|
+
for a in assignments
|
|
935
|
+
}
|
|
936
|
+
region_lut = { # lookup table of observed region objects
|
|
937
|
+
(v, f, l): self.get_region(
|
|
938
|
+
index=MapIndex(
|
|
939
|
+
volume=int(v),
|
|
940
|
+
label=l if l is None else int(l),
|
|
941
|
+
fragment=f
|
|
942
|
+
)
|
|
943
|
+
)
|
|
944
|
+
for v, f, l in observed_indices
|
|
945
|
+
}
|
|
946
|
+
|
|
947
|
+
dataframe_list = []
|
|
948
|
+
for a in assignments:
|
|
949
|
+
item_to_append = {
|
|
950
|
+
"input structure": a.input_structure,
|
|
951
|
+
"centroid": a.centroid,
|
|
952
|
+
"volume": a.volume,
|
|
953
|
+
"fragment": a.fragment,
|
|
954
|
+
"region": region_lut[
|
|
955
|
+
a.volume,
|
|
956
|
+
a.fragment,
|
|
957
|
+
a.map_value if labelled else None
|
|
958
|
+
],
|
|
959
|
+
}
|
|
960
|
+
# because AssignImageResult is a subclass of Assignment
|
|
961
|
+
# need to check for isinstance AssignImageResult first
|
|
962
|
+
if isinstance(a, AssignImageResult):
|
|
963
|
+
item_to_append = {
|
|
964
|
+
**item_to_append,
|
|
965
|
+
**{
|
|
966
|
+
"correlation": a.correlation,
|
|
967
|
+
"intersection over union": a.intersection_over_union,
|
|
968
|
+
"map value": a.map_value,
|
|
969
|
+
"map weighted mean": a.weighted_mean_of_first,
|
|
970
|
+
"map containedness": a.intersection_over_first,
|
|
971
|
+
"input weighted mean": a.weighted_mean_of_second,
|
|
972
|
+
"input containedness": a.intersection_over_second,
|
|
973
|
+
}
|
|
974
|
+
}
|
|
975
|
+
elif isinstance(a, MapAssignment):
|
|
976
|
+
item_to_append = {
|
|
977
|
+
**item_to_append,
|
|
978
|
+
**{
|
|
979
|
+
"correlation": None,
|
|
980
|
+
"intersection over union": None,
|
|
981
|
+
"map value": a.map_value,
|
|
982
|
+
"map weighted mean": None,
|
|
983
|
+
"map containedness": None,
|
|
984
|
+
"input weighted mean": None,
|
|
985
|
+
"input containedness": None,
|
|
986
|
+
}
|
|
987
|
+
}
|
|
988
|
+
else:
|
|
989
|
+
raise RuntimeError("assignments must be of type Assignment or AssignImageResult!")
|
|
990
|
+
|
|
991
|
+
dataframe_list.append(item_to_append)
|
|
992
|
+
return (
|
|
993
|
+
pd.DataFrame(dataframe_list)
|
|
994
|
+
.convert_dtypes() # convert will guess numeric column types
|
|
995
|
+
.reindex(columns=columns)
|
|
996
|
+
.dropna(axis='columns', how='all')
|
|
997
|
+
)
|
|
998
|
+
|
|
999
|
+
def _assign_points(self, points: pointcloud.PointCloud, lower_threshold: float) -> List[MapAssignment]:
|
|
1000
|
+
"""
|
|
1001
|
+
assign a PointCloud to this parcellation map.
|
|
1002
|
+
|
|
1003
|
+
Parameters
|
|
1004
|
+
-----------
|
|
1005
|
+
lower_threshold: float, default: 0
|
|
1006
|
+
Lower threshold on values in the statistical map. Values smaller than
|
|
1007
|
+
this threshold will be excluded from the assignment computation.
|
|
1008
|
+
"""
|
|
1009
|
+
assignments = []
|
|
1010
|
+
|
|
1011
|
+
if points.space != self.space:
|
|
1012
|
+
logger.info(
|
|
1013
|
+
f"Coordinates will be converted from {points.space.name} "
|
|
1014
|
+
f"to {self.space.name} space for assignment."
|
|
1015
|
+
)
|
|
1016
|
+
# convert sigma to voxel coordinates
|
|
1017
|
+
scaling = np.array(
|
|
1018
|
+
[np.linalg.norm(self.affine[:, i]) for i in range(3)]
|
|
1019
|
+
).mean()
|
|
1020
|
+
phys2vox = np.linalg.inv(self.affine)
|
|
1021
|
+
|
|
1022
|
+
# if all points have the same sigma, and lead to a standard deviation
|
|
1023
|
+
# below 3 voxels, we are much faster with a multi-coordinate readout.
|
|
1024
|
+
if points.has_constant_sigma:
|
|
1025
|
+
sigma_vox = points.sigma[0] / scaling
|
|
1026
|
+
if sigma_vox < 3:
|
|
1027
|
+
pts_warped = points.warp(self.space.id)
|
|
1028
|
+
X, Y, Z = (np.dot(phys2vox, pts_warped.homogeneous.T) + 0.5).astype("int")[:3]
|
|
1029
|
+
for pointindex, vol, frag, value in self._read_voxel(X, Y, Z):
|
|
1030
|
+
if value > lower_threshold:
|
|
1031
|
+
position = pts_warped[pointindex].coordinate
|
|
1032
|
+
assignments.append(
|
|
1033
|
+
MapAssignment(
|
|
1034
|
+
input_structure=pointindex,
|
|
1035
|
+
centroid=tuple(np.array(position).round(2)),
|
|
1036
|
+
volume=vol,
|
|
1037
|
+
fragment=frag,
|
|
1038
|
+
map_value=value
|
|
1039
|
+
)
|
|
1040
|
+
)
|
|
1041
|
+
return assignments
|
|
1042
|
+
|
|
1043
|
+
# if we get here, we need to handle each point independently.
|
|
1044
|
+
# This is much slower but more precise in dealing with the uncertainties
|
|
1045
|
+
# of the coordinates.
|
|
1046
|
+
for pointindex, pt in siibra_tqdm(
|
|
1047
|
+
enumerate(points.warp(self.space.id)),
|
|
1048
|
+
total=len(points), desc="Assigning points",
|
|
1049
|
+
):
|
|
1050
|
+
sigma_vox = pt.sigma / scaling
|
|
1051
|
+
if sigma_vox < 3:
|
|
1052
|
+
# voxel-precise - just read out the value in the maps
|
|
1053
|
+
N = len(self)
|
|
1054
|
+
logger.debug(f"Assigning coordinate {tuple(pt)} to {N} maps")
|
|
1055
|
+
x, y, z = (np.dot(phys2vox, pt.homogeneous) + 0.5).astype("int")[:3]
|
|
1056
|
+
values = self._read_voxel(x, y, z)
|
|
1057
|
+
for _, vol, frag, value in values:
|
|
1058
|
+
if value > lower_threshold:
|
|
1059
|
+
assignments.append(
|
|
1060
|
+
MapAssignment(
|
|
1061
|
+
input_structure=pointindex,
|
|
1062
|
+
centroid=tuple(pt),
|
|
1063
|
+
volume=vol,
|
|
1064
|
+
fragment=frag,
|
|
1065
|
+
map_value=value
|
|
1066
|
+
)
|
|
1067
|
+
)
|
|
1068
|
+
else:
|
|
1069
|
+
logger.debug(
|
|
1070
|
+
f"Assigning uncertain coordinate {tuple(pt)} to {len(self)} maps."
|
|
1071
|
+
)
|
|
1072
|
+
kernel = create_gaussian_kernel(sigma_vox, 3)
|
|
1073
|
+
r = int(kernel.shape[0] / 2) # effective radius
|
|
1074
|
+
assert pt.homogeneous.shape[0] == 1
|
|
1075
|
+
xyz_vox = (np.dot(phys2vox, pt.homogeneous.T) + 0.5).astype("int")
|
|
1076
|
+
shift = np.identity(4)
|
|
1077
|
+
shift[:3, -1] = xyz_vox[:3, 0] - r
|
|
1078
|
+
# build niftiimage with the Gaussian blob,
|
|
1079
|
+
# then recurse into this method with the image input
|
|
1080
|
+
gaussian_kernel = _volume.from_array(
|
|
1081
|
+
data=kernel,
|
|
1082
|
+
affine=np.dot(self.affine, shift),
|
|
1083
|
+
space=self.space,
|
|
1084
|
+
name=f"Gaussian kernel of {pt}"
|
|
1085
|
+
)
|
|
1086
|
+
for entry in self._assign(
|
|
1087
|
+
item=gaussian_kernel,
|
|
1088
|
+
lower_threshold=lower_threshold,
|
|
1089
|
+
split_components=False
|
|
1090
|
+
):
|
|
1091
|
+
entry.input_structure = pointindex
|
|
1092
|
+
entry.centroid = tuple(pt)
|
|
1093
|
+
assignments.append(entry)
|
|
1094
|
+
return assignments
|
|
1095
|
+
|
|
1096
|
+
def _assign_volume(
|
|
1097
|
+
self,
|
|
1098
|
+
queryvolume: "_volume.Volume",
|
|
1099
|
+
lower_threshold: float,
|
|
1100
|
+
split_components: bool = True,
|
|
1101
|
+
**kwargs
|
|
1102
|
+
) -> List[AssignImageResult]:
|
|
1103
|
+
"""
|
|
1104
|
+
Assign an image volume to this parcellation map.
|
|
1105
|
+
|
|
1106
|
+
Parameters
|
|
1107
|
+
-----------
|
|
1108
|
+
queryvolume: Volume
|
|
1109
|
+
the volume to be compared with maps
|
|
1110
|
+
minsize_voxel: int, default: 1
|
|
1111
|
+
Minimum voxel size of image components to be taken into account.
|
|
1112
|
+
lower_threshold: float, default: 0
|
|
1113
|
+
Lower threshold on values in the statistical map. Values smaller than
|
|
1114
|
+
this threshold will be excluded from the assignment computation.
|
|
1115
|
+
split_components: bool, default: True
|
|
1116
|
+
Whether to split the query volume into disjoint components.
|
|
1117
|
+
"""
|
|
1118
|
+
# TODO: split_components is not known to `assign`
|
|
1119
|
+
# TODO: `minsize_voxel` is not used here. Consider the implementation of `assign` again.
|
|
1120
|
+
if kwargs:
|
|
1121
|
+
logger.info(f"The keywords {[k for k in kwargs]} are not passed on during volume assignment.")
|
|
1122
|
+
|
|
1123
|
+
assert queryvolume.space == self.space, ValueError("Assigned volume must be in the same space as the map.")
|
|
1124
|
+
|
|
1125
|
+
if split_components:
|
|
1126
|
+
iter_components = lambda arr: connected_components(arr)
|
|
1127
|
+
else:
|
|
1128
|
+
iter_components = lambda arr: [(0, arr)]
|
|
1129
|
+
|
|
1130
|
+
queryimg = queryvolume.fetch()
|
|
1131
|
+
assignments = []
|
|
1132
|
+
all_indices = [
|
|
1133
|
+
index
|
|
1134
|
+
for regionindices in self._indices.values()
|
|
1135
|
+
for index in regionindices
|
|
1136
|
+
]
|
|
1137
|
+
with QUIET and provider.SubvolumeProvider.UseCaching():
|
|
1138
|
+
for index in siibra_tqdm(
|
|
1139
|
+
all_indices,
|
|
1140
|
+
desc=f"Assigning {queryvolume} to {self}",
|
|
1141
|
+
disable=len(all_indices) < 5,
|
|
1142
|
+
unit="map",
|
|
1143
|
+
leave=False
|
|
1144
|
+
):
|
|
1145
|
+
region_map = self.fetch(index=index)
|
|
1146
|
+
region_map_arr = np.asanyarray(region_map.dataobj)
|
|
1147
|
+
# the shape and affine are checked by `nilearn.image.resample_to_img()`
|
|
1148
|
+
# and returns the original data if resampling is not necessary.
|
|
1149
|
+
queryimgarr_res = np.asanyarray(
|
|
1150
|
+
resample_img_to_img(queryimg, region_map).dataobj
|
|
1151
|
+
)
|
|
1152
|
+
for compmode, voxelmask in iter_components(queryimgarr_res):
|
|
1153
|
+
scores = compare_arrays(
|
|
1154
|
+
voxelmask,
|
|
1155
|
+
region_map.affine, # after resampling, both should have the same affine
|
|
1156
|
+
region_map_arr,
|
|
1157
|
+
region_map.affine
|
|
1158
|
+
)
|
|
1159
|
+
component_position = np.array(np.where(voxelmask)).T.mean(0)
|
|
1160
|
+
if scores.intersection_over_union > lower_threshold:
|
|
1161
|
+
assignments.append(
|
|
1162
|
+
AssignImageResult(
|
|
1163
|
+
input_structure=compmode,
|
|
1164
|
+
centroid=tuple(component_position.round(2)),
|
|
1165
|
+
volume=index.volume,
|
|
1166
|
+
fragment=index.fragment,
|
|
1167
|
+
map_value=index.label,
|
|
1168
|
+
**asdict(scores)
|
|
1169
|
+
)
|
|
1170
|
+
)
|
|
1171
|
+
|
|
1172
|
+
return assignments
|
|
1173
|
+
|
|
1174
|
+
|
|
1175
|
+
def from_volume(
|
|
1176
|
+
name: str,
|
|
1177
|
+
volume: Union[_volume.Volume, List[_volume.Volume]],
|
|
1178
|
+
regionnames: List[str],
|
|
1179
|
+
regionlabels: List[int],
|
|
1180
|
+
parcellation_spec: Union[str, "parcellation.Parcellation"] = None
|
|
1181
|
+
) -> 'Map':
|
|
1182
|
+
"""
|
|
1183
|
+
Add a custom labelled parcellation map to siibra from a labelled NIfTI file.
|
|
1184
|
+
|
|
1185
|
+
Parameters
|
|
1186
|
+
------------
|
|
1187
|
+
name: str
|
|
1188
|
+
Human-readable name of the parcellation.
|
|
1189
|
+
volume: Volume, or a list of Volumes.
|
|
1190
|
+
space_spec: str, Space
|
|
1191
|
+
Specification of the reference space (space object, name, keyword, or id - e.g. 'mni152').
|
|
1192
|
+
regionnames: list[str]
|
|
1193
|
+
List of human-readable names of the mapped regions.
|
|
1194
|
+
regionlabels: list[int]
|
|
1195
|
+
List of integer labels in the nifti file corresponding to the list of regions.
|
|
1196
|
+
parcellation: str or Parcellation. Optional.
|
|
1197
|
+
If the related parcellation already defined or preconfigured in siibra.
|
|
1198
|
+
"""
|
|
1199
|
+
# providers and map indices
|
|
1200
|
+
providers = []
|
|
1201
|
+
volumes = volume if isinstance(volume, list) else [volume]
|
|
1202
|
+
map_space = volumes[0].space
|
|
1203
|
+
assert all(v.space == map_space for v in volumes), "Volumes have to be in the same space"
|
|
1204
|
+
for vol_idx, vol in enumerate(volumes):
|
|
1205
|
+
image = vol.fetch()
|
|
1206
|
+
arr = np.asanyarray(image.dataobj)
|
|
1207
|
+
labels_in_volume = np.unique(arr)[1:].astype('int')
|
|
1208
|
+
|
|
1209
|
+
# populate region indices from given name/label lists
|
|
1210
|
+
indices = dict()
|
|
1211
|
+
for label, regionname in zip(regionlabels, regionnames):
|
|
1212
|
+
if label not in labels_in_volume:
|
|
1213
|
+
logger.warning(
|
|
1214
|
+
f"Label {label} not mapped in the provided NIfTI volume -> "
|
|
1215
|
+
f"region '{regionname} will not be in the map."
|
|
1216
|
+
)
|
|
1217
|
+
elif label in [v[0]['label'] for v in indices.values() if v[0]['volume'] == vol_idx]:
|
|
1218
|
+
logger.warning(f"Label {label} already defined in the same volume; will not map it to '{regionname}'.")
|
|
1219
|
+
else:
|
|
1220
|
+
assert regionname not in indices, f"'{regionname}' must be unique in `regionnames`."
|
|
1221
|
+
indices[regionname] = [{'volume': vol_idx, 'label': label}]
|
|
1222
|
+
|
|
1223
|
+
# check for any remaining labels in the NIfTI volume
|
|
1224
|
+
unnamed_labels = list(set(labels_in_volume) - set(regionlabels))
|
|
1225
|
+
if unnamed_labels:
|
|
1226
|
+
logger.warning(
|
|
1227
|
+
f"The following labels appear in the NIfTI volume {vol_idx}, but not in "
|
|
1228
|
+
f"the specified regions: {', '.join(str(lb) for lb in unnamed_labels)}. "
|
|
1229
|
+
"They will be removed from the nifti volume."
|
|
1230
|
+
)
|
|
1231
|
+
for label in unnamed_labels:
|
|
1232
|
+
arr[arr == label] = 0
|
|
1233
|
+
providers.extend(vol._providers.values())
|
|
1234
|
+
|
|
1235
|
+
# parcellation
|
|
1236
|
+
if parcellation_spec is None:
|
|
1237
|
+
parcellation_spec = name
|
|
1238
|
+
try:
|
|
1239
|
+
parcobj = parcellation.Parcellation.registry().get(parcellation_spec)
|
|
1240
|
+
logger.info(f"Using '{parcellation_spec}', siibra decoded the parcellation as '{parcobj}'")
|
|
1241
|
+
except Exception:
|
|
1242
|
+
logger.info(
|
|
1243
|
+
f"Using '{parcellation_spec}', siibra could not decode the "
|
|
1244
|
+
" parcellation. Building a new parcellation."
|
|
1245
|
+
)
|
|
1246
|
+
# build a new parcellation
|
|
1247
|
+
parcobj = parcellation.Parcellation(
|
|
1248
|
+
identifier=generate_uuid(','.join(regionnames)),
|
|
1249
|
+
name=name,
|
|
1250
|
+
species=vol.space.species,
|
|
1251
|
+
regions=list(map(_region.Region, regionnames)),
|
|
1252
|
+
)
|
|
1253
|
+
if parcobj.key not in list(parcellation.Parcellation.registry()):
|
|
1254
|
+
parcellation.Parcellation.registry().add(parcobj.key, parcobj)
|
|
1255
|
+
|
|
1256
|
+
for region in siibra_tqdm(
|
|
1257
|
+
indices.keys(),
|
|
1258
|
+
desc="Checking if provided regions are defined in the parcellation."
|
|
1259
|
+
):
|
|
1260
|
+
try:
|
|
1261
|
+
_ = parcobj.get_region(region)
|
|
1262
|
+
except Exception:
|
|
1263
|
+
logger.warning(f"'{region}' is missing in the parcellation.")
|
|
1264
|
+
|
|
1265
|
+
# build the parcellation map object
|
|
1266
|
+
parcmap = Map(
|
|
1267
|
+
identifier=generate_uuid(name),
|
|
1268
|
+
name=f"{name} map in {map_space.name}",
|
|
1269
|
+
space_spec={"@id": map_space.id},
|
|
1270
|
+
parcellation_spec={'name': parcobj.name},
|
|
1271
|
+
indices=indices,
|
|
1272
|
+
volumes=volumes
|
|
1273
|
+
)
|
|
1274
|
+
|
|
1275
|
+
# add it to siibra's registry
|
|
1276
|
+
Map.registry().add(parcmap.key, parcmap)
|
|
1277
|
+
|
|
1278
|
+
# return the map - note that it has a pointer to the parcellation
|
|
1279
|
+
return parcmap
|