siibra 1.0a1__1-py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of siibra might be problematic. Click here for more details.
- siibra/VERSION +1 -0
- siibra/__init__.py +164 -0
- siibra/commons.py +823 -0
- siibra/configuration/__init__.py +17 -0
- siibra/configuration/configuration.py +189 -0
- siibra/configuration/factory.py +589 -0
- siibra/core/__init__.py +16 -0
- siibra/core/assignment.py +110 -0
- siibra/core/atlas.py +239 -0
- siibra/core/concept.py +308 -0
- siibra/core/parcellation.py +387 -0
- siibra/core/region.py +1223 -0
- siibra/core/space.py +131 -0
- siibra/core/structure.py +111 -0
- siibra/exceptions.py +63 -0
- siibra/experimental/__init__.py +19 -0
- siibra/experimental/contour.py +61 -0
- siibra/experimental/cortical_profile_sampler.py +57 -0
- siibra/experimental/patch.py +98 -0
- siibra/experimental/plane3d.py +256 -0
- siibra/explorer/__init__.py +17 -0
- siibra/explorer/url.py +222 -0
- siibra/explorer/util.py +87 -0
- siibra/features/__init__.py +117 -0
- siibra/features/anchor.py +224 -0
- siibra/features/connectivity/__init__.py +33 -0
- siibra/features/connectivity/functional_connectivity.py +57 -0
- siibra/features/connectivity/regional_connectivity.py +494 -0
- siibra/features/connectivity/streamline_counts.py +27 -0
- siibra/features/connectivity/streamline_lengths.py +27 -0
- siibra/features/connectivity/tracing_connectivity.py +30 -0
- siibra/features/dataset/__init__.py +17 -0
- siibra/features/dataset/ebrains.py +90 -0
- siibra/features/feature.py +970 -0
- siibra/features/image/__init__.py +27 -0
- siibra/features/image/image.py +115 -0
- siibra/features/image/sections.py +26 -0
- siibra/features/image/volume_of_interest.py +88 -0
- siibra/features/tabular/__init__.py +24 -0
- siibra/features/tabular/bigbrain_intensity_profile.py +77 -0
- siibra/features/tabular/cell_density_profile.py +298 -0
- siibra/features/tabular/cortical_profile.py +322 -0
- siibra/features/tabular/gene_expression.py +257 -0
- siibra/features/tabular/layerwise_bigbrain_intensities.py +62 -0
- siibra/features/tabular/layerwise_cell_density.py +95 -0
- siibra/features/tabular/receptor_density_fingerprint.py +192 -0
- siibra/features/tabular/receptor_density_profile.py +110 -0
- siibra/features/tabular/regional_timeseries_activity.py +294 -0
- siibra/features/tabular/tabular.py +139 -0
- siibra/livequeries/__init__.py +19 -0
- siibra/livequeries/allen.py +352 -0
- siibra/livequeries/bigbrain.py +197 -0
- siibra/livequeries/ebrains.py +145 -0
- siibra/livequeries/query.py +49 -0
- siibra/locations/__init__.py +91 -0
- siibra/locations/boundingbox.py +454 -0
- siibra/locations/location.py +115 -0
- siibra/locations/point.py +344 -0
- siibra/locations/pointcloud.py +349 -0
- siibra/retrieval/__init__.py +27 -0
- siibra/retrieval/cache.py +233 -0
- siibra/retrieval/datasets.py +389 -0
- siibra/retrieval/exceptions/__init__.py +27 -0
- siibra/retrieval/repositories.py +769 -0
- siibra/retrieval/requests.py +659 -0
- siibra/vocabularies/__init__.py +45 -0
- siibra/vocabularies/gene_names.json +29176 -0
- siibra/vocabularies/receptor_symbols.json +210 -0
- siibra/vocabularies/region_aliases.json +460 -0
- siibra/volumes/__init__.py +23 -0
- siibra/volumes/parcellationmap.py +1279 -0
- siibra/volumes/providers/__init__.py +20 -0
- siibra/volumes/providers/freesurfer.py +113 -0
- siibra/volumes/providers/gifti.py +165 -0
- siibra/volumes/providers/neuroglancer.py +736 -0
- siibra/volumes/providers/nifti.py +266 -0
- siibra/volumes/providers/provider.py +107 -0
- siibra/volumes/sparsemap.py +468 -0
- siibra/volumes/volume.py +892 -0
- siibra-1.0.0a1.dist-info/LICENSE +201 -0
- siibra-1.0.0a1.dist-info/METADATA +160 -0
- siibra-1.0.0a1.dist-info/RECORD +84 -0
- siibra-1.0.0a1.dist-info/WHEEL +5 -0
- siibra-1.0.0a1.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,468 @@
|
|
|
1
|
+
# Copyright 2018-2024
|
|
2
|
+
# Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
|
|
3
|
+
|
|
4
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
5
|
+
# you may not use this file except in compliance with the License.
|
|
6
|
+
# You may obtain a copy of the License at
|
|
7
|
+
|
|
8
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
9
|
+
|
|
10
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
11
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
12
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
13
|
+
# See the License for the specific language governing permissions and
|
|
14
|
+
# limitations under the License.
|
|
15
|
+
"""Represents lists of probabilistic brain region maps."""
|
|
16
|
+
from . import parcellationmap, volume as _volume
|
|
17
|
+
|
|
18
|
+
from .providers import provider
|
|
19
|
+
from ..commons import MapIndex, logger, connected_components, siibra_tqdm
|
|
20
|
+
from ..locations import boundingbox
|
|
21
|
+
from ..retrieval.cache import CACHE
|
|
22
|
+
from ..retrieval.requests import HttpRequest, FileLoader
|
|
23
|
+
|
|
24
|
+
from os import path, makedirs
|
|
25
|
+
from typing import Dict, List
|
|
26
|
+
from nilearn import image
|
|
27
|
+
import numpy as np
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
class SparseIndex:
|
|
31
|
+
|
|
32
|
+
# Precomputed sparse indices are stored in an EBRAINS data proxy
|
|
33
|
+
_BUCKET = "https://data-proxy.ebrains.eu/api/v1/buckets/reference-atlas-data/"
|
|
34
|
+
_DATAPROXY_BASEURL = _BUCKET + "sparse-indices/sparse-indices-siibra_python_v1.0/"
|
|
35
|
+
|
|
36
|
+
_SUFFIXES = {
|
|
37
|
+
"probs": ".sparseindex_v1.probs.txt.gz",
|
|
38
|
+
"bboxes": ".sparseindex_v1.bboxes.txt.gz",
|
|
39
|
+
"voxels": ".sparseindex_v1.voxels.nii.gz"
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
def __init__(self):
|
|
43
|
+
self.probs = []
|
|
44
|
+
self.bboxes = []
|
|
45
|
+
|
|
46
|
+
# these are initialized when adding the first volume, see below
|
|
47
|
+
self.affine: np.ndarray = None
|
|
48
|
+
self.shape = None
|
|
49
|
+
self.voxels: np.ndarray = None
|
|
50
|
+
|
|
51
|
+
def add_img(self, imgdata: np.ndarray, affine: np.ndarray):
|
|
52
|
+
|
|
53
|
+
if self.num_volumes == 0:
|
|
54
|
+
self.affine = affine
|
|
55
|
+
self.shape = imgdata.shape
|
|
56
|
+
self.voxels = np.zeros(imgdata.shape, dtype=np.int32) - 1
|
|
57
|
+
else:
|
|
58
|
+
if (imgdata.shape != self.shape) or ((affine - self.affine).sum() != 0):
|
|
59
|
+
raise RuntimeError(
|
|
60
|
+
"Building sparse maps from volumes with different voxel spaces is not yet supported in siibra."
|
|
61
|
+
)
|
|
62
|
+
|
|
63
|
+
volume = self.num_volumes
|
|
64
|
+
X, Y, Z = [v.astype("int32") for v in np.where(imgdata > 0)]
|
|
65
|
+
for x, y, z, prob in zip(X, Y, Z, imgdata[X, Y, Z]):
|
|
66
|
+
coord_id = self.voxels[x, y, z]
|
|
67
|
+
if coord_id >= 0:
|
|
68
|
+
# Coordinate already seen. Add observed value.
|
|
69
|
+
assert volume not in self.probs[coord_id]
|
|
70
|
+
assert len(self.probs) > coord_id
|
|
71
|
+
self.probs[coord_id][volume] = prob
|
|
72
|
+
else:
|
|
73
|
+
# New coordinate. Append entry with observed value.
|
|
74
|
+
coord_id = len(self.probs)
|
|
75
|
+
self.voxels[x, y, z] = coord_id
|
|
76
|
+
self.probs.append({volume: prob})
|
|
77
|
+
|
|
78
|
+
self.bboxes.append(
|
|
79
|
+
{
|
|
80
|
+
"minpoint": (X.min(), Y.min(), Z.min()),
|
|
81
|
+
"maxpoint": (X.max(), Y.max(), Z.max()),
|
|
82
|
+
}
|
|
83
|
+
)
|
|
84
|
+
|
|
85
|
+
@property
|
|
86
|
+
def num_volumes(self):
|
|
87
|
+
return len(self.bboxes)
|
|
88
|
+
|
|
89
|
+
def max(self):
|
|
90
|
+
return self.voxels.max()
|
|
91
|
+
|
|
92
|
+
def coords(self, volume: int):
|
|
93
|
+
# Nx3 array with x/y/z coordinates of the N nonzero values of the given mapindex
|
|
94
|
+
assert volume in range(self.num_volumes)
|
|
95
|
+
coord_ids = [i for i, l in enumerate(self.probs) if volume in l]
|
|
96
|
+
x0, y0, z0 = self.bboxes[volume]["minpoint"]
|
|
97
|
+
x1, y1, z1 = self.bboxes[volume]["maxpoint"]
|
|
98
|
+
return (
|
|
99
|
+
np.array(
|
|
100
|
+
np.where(
|
|
101
|
+
np.isin(
|
|
102
|
+
self.voxels[x0: x1 + 1, y0: y1 + 1, z0: z1 + 1],
|
|
103
|
+
coord_ids,
|
|
104
|
+
)
|
|
105
|
+
)
|
|
106
|
+
).T
|
|
107
|
+
+ (x0, y0, z0)
|
|
108
|
+
).T
|
|
109
|
+
|
|
110
|
+
def mapped_voxels(self, volume: int):
|
|
111
|
+
# returns the x, y, and z coordinates of nonzero voxels for the map
|
|
112
|
+
# with the given index, together with their corresponding values v.
|
|
113
|
+
assert volume in range(self.num_volumes)
|
|
114
|
+
x, y, z = [v.squeeze() for v in np.split(self.coords(volume), 3)]
|
|
115
|
+
v = [self.probs[i][volume] for i in self.voxels[x, y, z]]
|
|
116
|
+
return x, y, z, v
|
|
117
|
+
|
|
118
|
+
@classmethod
|
|
119
|
+
def load(cls, filepath_or_url: str) -> "SparseIndex":
|
|
120
|
+
"""
|
|
121
|
+
Loads a precomputed SparseIndex to the memory.
|
|
122
|
+
|
|
123
|
+
Parameters
|
|
124
|
+
----------
|
|
125
|
+
filepath_or_url: str
|
|
126
|
+
Path/url to the SparseIndex files
|
|
127
|
+
(eg. https://url_to_files/basefilename):
|
|
128
|
+
- basefilename.sparseindex_v1.probs.txt.gz
|
|
129
|
+
- basefilename.sparseindex_v1.bboxes.txt.gz
|
|
130
|
+
- basefilename.sparseindex_v1.voxels.nii.gz
|
|
131
|
+
|
|
132
|
+
Returns
|
|
133
|
+
-------
|
|
134
|
+
SparseIndex
|
|
135
|
+
"""
|
|
136
|
+
from gzip import decompress
|
|
137
|
+
spindtxt_decoder = lambda b: decompress(b).decode('utf-8').strip().splitlines()
|
|
138
|
+
|
|
139
|
+
probsfile = filepath_or_url + SparseIndex._SUFFIXES["probs"]
|
|
140
|
+
bboxfile = filepath_or_url + SparseIndex._SUFFIXES["bboxes"]
|
|
141
|
+
voxelfile = filepath_or_url + SparseIndex._SUFFIXES["voxels"]
|
|
142
|
+
if all(path.isfile(f) for f in [probsfile, bboxfile, voxelfile]):
|
|
143
|
+
request = FileLoader
|
|
144
|
+
else:
|
|
145
|
+
request = HttpRequest
|
|
146
|
+
|
|
147
|
+
result = cls()
|
|
148
|
+
|
|
149
|
+
voxels = request(voxelfile).get()
|
|
150
|
+
result.voxels = np.asanyarray(voxels.dataobj)
|
|
151
|
+
result.affine = voxels.affine
|
|
152
|
+
result.shape = voxels.shape
|
|
153
|
+
|
|
154
|
+
lines_probs = request(probsfile, func=spindtxt_decoder).get()
|
|
155
|
+
for line in siibra_tqdm(
|
|
156
|
+
lines_probs,
|
|
157
|
+
total=len(lines_probs),
|
|
158
|
+
desc="Loading sparse index",
|
|
159
|
+
unit="voxels"
|
|
160
|
+
):
|
|
161
|
+
fields = line.strip().split(" ")
|
|
162
|
+
mapindices = list(map(int, fields[0::2]))
|
|
163
|
+
values = list(map(float, fields[1::2]))
|
|
164
|
+
D = dict(zip(mapindices, values))
|
|
165
|
+
result.probs.append(D)
|
|
166
|
+
|
|
167
|
+
lines_bboxes = request(bboxfile, func=spindtxt_decoder).get()
|
|
168
|
+
for line in lines_bboxes:
|
|
169
|
+
fields = line.strip().split(" ")
|
|
170
|
+
result.bboxes.append({
|
|
171
|
+
"minpoint": tuple(map(int, fields[:3])),
|
|
172
|
+
"maxpoint": tuple(map(int, fields[3:])),
|
|
173
|
+
})
|
|
174
|
+
|
|
175
|
+
return result
|
|
176
|
+
|
|
177
|
+
def save(self, base_filename: str, folder: str = ""):
|
|
178
|
+
"""
|
|
179
|
+
Save SparseIndex (3x) files to under the folder `folder`
|
|
180
|
+
with base_filename. If SparseIndex is not cached, siibra will first
|
|
181
|
+
create it first.
|
|
182
|
+
|
|
183
|
+
Parameters
|
|
184
|
+
----------
|
|
185
|
+
base_filename: str
|
|
186
|
+
The files that will be created as:
|
|
187
|
+
- base_filename.sparseindex_v1.probs.txt.gz
|
|
188
|
+
- base_filename.sparseindex_v1.bboxes.txt.gz
|
|
189
|
+
- base_filename.sparseindex_v1.voxels.nii.gz
|
|
190
|
+
|
|
191
|
+
folder: str, default=""
|
|
192
|
+
"""
|
|
193
|
+
from nibabel import Nifti1Image
|
|
194
|
+
import gzip
|
|
195
|
+
|
|
196
|
+
fullpath = path.join(folder, base_filename)
|
|
197
|
+
logger.info(f"Saving SparseIndex to '{base_filename}' with suffixes {SparseIndex._SUFFIXES}")
|
|
198
|
+
|
|
199
|
+
if folder and not path.isdir(folder):
|
|
200
|
+
makedirs(folder)
|
|
201
|
+
|
|
202
|
+
Nifti1Image(self.voxels, self.affine).to_filename(
|
|
203
|
+
fullpath + SparseIndex._SUFFIXES["voxels"]
|
|
204
|
+
)
|
|
205
|
+
with gzip.open(fullpath + SparseIndex._SUFFIXES["probs"], 'wt') as f:
|
|
206
|
+
for D in self.probs:
|
|
207
|
+
f.write(
|
|
208
|
+
"{}\n".format(
|
|
209
|
+
" ".join(f"{i} {p}" for i, p in D.items())
|
|
210
|
+
)
|
|
211
|
+
)
|
|
212
|
+
with gzip.open(fullpath + SparseIndex._SUFFIXES["bboxes"], "wt") as f:
|
|
213
|
+
for bbox in self.bboxes:
|
|
214
|
+
f.write(
|
|
215
|
+
"{} {}\n".format(
|
|
216
|
+
" ".join(map(str, bbox["minpoint"])),
|
|
217
|
+
" ".join(map(str, bbox["maxpoint"])),
|
|
218
|
+
)
|
|
219
|
+
)
|
|
220
|
+
logger.info(f"SparseIndex is saved to {fullpath}.")
|
|
221
|
+
|
|
222
|
+
@classmethod
|
|
223
|
+
def from_sparsemap(cls, sparsemap: "SparseMap") -> "SparseIndex":
|
|
224
|
+
with provider.SubvolumeProvider.UseCaching():
|
|
225
|
+
spind = cls()
|
|
226
|
+
for img in siibra_tqdm(
|
|
227
|
+
sparsemap.fetch_iter(), total=len(sparsemap), unit="maps",
|
|
228
|
+
desc="Fetching volumetric maps and computing SparseIndex"
|
|
229
|
+
):
|
|
230
|
+
spind.add_img(np.asanyarray(img.dataobj), img.affine)
|
|
231
|
+
return spind
|
|
232
|
+
|
|
233
|
+
|
|
234
|
+
class SparseMap(parcellationmap.Map):
|
|
235
|
+
"""
|
|
236
|
+
A sparse representation of list of statistical (e.g. probabilistic) brain
|
|
237
|
+
region maps.
|
|
238
|
+
|
|
239
|
+
It represents the 3D statistical maps of N brain regions by two data structures:
|
|
240
|
+
|
|
241
|
+
1) 'spatial_index', a 3D volume where non-negative values represent unique indices into a list of region assignments
|
|
242
|
+
2) 'probs', a list of region assignments where each entry is a dict
|
|
243
|
+
|
|
244
|
+
More precisely, given ``i = sparse_index.voxels[x, y, z]`` we define that
|
|
245
|
+
|
|
246
|
+
- if `i<0`, no brain region is assigned at this location
|
|
247
|
+
- if `i>=0`, ``probs[i]`` defines the probabilities of brain regions.
|
|
248
|
+
|
|
249
|
+
Each entry in probs is a dictionary that represents the region assignments for
|
|
250
|
+
the unique voxel where ``spatial_index == i``. The assignment maps from a MapIndex
|
|
251
|
+
to the actual (probability) value.
|
|
252
|
+
"""
|
|
253
|
+
|
|
254
|
+
def __init__(
|
|
255
|
+
self,
|
|
256
|
+
identifier: str,
|
|
257
|
+
name: str,
|
|
258
|
+
space_spec: dict,
|
|
259
|
+
parcellation_spec: dict,
|
|
260
|
+
indices: Dict[str, MapIndex],
|
|
261
|
+
volumes: list = [],
|
|
262
|
+
shortname: str = "",
|
|
263
|
+
description: str = "",
|
|
264
|
+
modality: str = None,
|
|
265
|
+
publications: list = [],
|
|
266
|
+
datasets: list = [],
|
|
267
|
+
prerelease: bool = False,
|
|
268
|
+
):
|
|
269
|
+
parcellationmap.Map.__init__(
|
|
270
|
+
self,
|
|
271
|
+
identifier=identifier,
|
|
272
|
+
name=name,
|
|
273
|
+
space_spec=space_spec,
|
|
274
|
+
parcellation_spec=parcellation_spec,
|
|
275
|
+
indices=indices,
|
|
276
|
+
shortname=shortname,
|
|
277
|
+
description=description,
|
|
278
|
+
modality=modality,
|
|
279
|
+
publications=publications,
|
|
280
|
+
datasets=datasets,
|
|
281
|
+
volumes=volumes,
|
|
282
|
+
prerelease=prerelease,
|
|
283
|
+
)
|
|
284
|
+
self._sparse_index_cached = None
|
|
285
|
+
|
|
286
|
+
@property
|
|
287
|
+
def _cache_prefix(self):
|
|
288
|
+
return CACHE.build_filename(f"{self.parcellation.id}_{self.space.id}_{self.maptype}_{self.key.lower()}_index")
|
|
289
|
+
|
|
290
|
+
@property
|
|
291
|
+
def sparse_index(self):
|
|
292
|
+
if self._sparse_index_cached is None:
|
|
293
|
+
try: # try loading from cache on disk
|
|
294
|
+
spind = SparseIndex.load(self._cache_prefix)
|
|
295
|
+
except Exception:
|
|
296
|
+
spind = None
|
|
297
|
+
if spind is None: # try from precomputed source
|
|
298
|
+
try:
|
|
299
|
+
logger.info("Downloading and loading precomputed SparseIndex...")
|
|
300
|
+
spind = SparseIndex.load(SparseIndex._DATAPROXY_BASEURL + self.key.lower())
|
|
301
|
+
except Exception:
|
|
302
|
+
logger.error("Failed to download precomputed SparseIndex.", exc_info=1)
|
|
303
|
+
if spind is None: # Download each map and compute the SparseIndex
|
|
304
|
+
spind = SparseIndex.from_sparsemap(self)
|
|
305
|
+
spind.save(self._cache_prefix, folder=CACHE.folder)
|
|
306
|
+
self._sparse_index_cached = spind
|
|
307
|
+
assert self._sparse_index_cached.max() == len(self._sparse_index_cached.probs) - 1
|
|
308
|
+
return self._sparse_index_cached
|
|
309
|
+
|
|
310
|
+
@property
|
|
311
|
+
def affine(self):
|
|
312
|
+
return self.sparse_index.affine
|
|
313
|
+
|
|
314
|
+
@property
|
|
315
|
+
def shape(self):
|
|
316
|
+
return self.sparse_index.shape
|
|
317
|
+
|
|
318
|
+
def _read_voxel(self, x, y, z):
|
|
319
|
+
spind = self.sparse_index
|
|
320
|
+
vx = spind.voxels[x, y, z]
|
|
321
|
+
if isinstance(vx, int):
|
|
322
|
+
return list(
|
|
323
|
+
(None, volume, None, value)
|
|
324
|
+
for volume, value in spind.probs[vx].items()
|
|
325
|
+
)
|
|
326
|
+
else:
|
|
327
|
+
return list(
|
|
328
|
+
(pointindex, volume, None, value)
|
|
329
|
+
for pointindex, voxel in enumerate(vx)
|
|
330
|
+
for volume, value in spind.probs[voxel].items()
|
|
331
|
+
)
|
|
332
|
+
|
|
333
|
+
def _assign_volume(
|
|
334
|
+
self,
|
|
335
|
+
queryvolume: "_volume.Volume",
|
|
336
|
+
minsize_voxel: int,
|
|
337
|
+
lower_threshold: float,
|
|
338
|
+
split_components: bool = True
|
|
339
|
+
) -> List[parcellationmap.AssignImageResult]:
|
|
340
|
+
"""
|
|
341
|
+
Assign an image volume to this sparse map.
|
|
342
|
+
|
|
343
|
+
Parameters
|
|
344
|
+
-----------
|
|
345
|
+
queryvolume: Volume
|
|
346
|
+
the volume to be compared with maps
|
|
347
|
+
minsize_voxel: int, default: 1
|
|
348
|
+
Minimum voxel size of image components to be taken into account.
|
|
349
|
+
lower_threshold: float, default: 0
|
|
350
|
+
Lower threshold on values in the statistical map. Values smaller than
|
|
351
|
+
this threshold will be excluded from the assignment computation.
|
|
352
|
+
split_components: bool, default: True
|
|
353
|
+
Whether to split the query volume into disjoint components.
|
|
354
|
+
"""
|
|
355
|
+
queryimg = queryvolume.fetch()
|
|
356
|
+
imgdata = np.asanyarray(queryimg.dataobj)
|
|
357
|
+
imgaffine = queryimg.affine
|
|
358
|
+
assignments = []
|
|
359
|
+
|
|
360
|
+
# resample query image into this image's voxel space, if required
|
|
361
|
+
if (imgaffine - self.affine).sum() == 0:
|
|
362
|
+
querydata = imgdata.squeeze()
|
|
363
|
+
else:
|
|
364
|
+
if issubclass(imgdata.dtype.type, np.integer):
|
|
365
|
+
interp = "nearest"
|
|
366
|
+
else:
|
|
367
|
+
interp = "linear"
|
|
368
|
+
from nibabel import Nifti1Image
|
|
369
|
+
queryimg = image.resample_img(
|
|
370
|
+
Nifti1Image(imgdata, imgaffine),
|
|
371
|
+
target_affine=self.affine,
|
|
372
|
+
target_shape=self.shape,
|
|
373
|
+
interpolation=interp,
|
|
374
|
+
)
|
|
375
|
+
querydata = np.asanyarray(queryimg.dataobj).squeeze()
|
|
376
|
+
|
|
377
|
+
iter_func = connected_components if split_components \
|
|
378
|
+
else lambda img: [(1, img)]
|
|
379
|
+
|
|
380
|
+
for mode, modemask in iter_func(querydata):
|
|
381
|
+
|
|
382
|
+
# determine bounding box of the mode
|
|
383
|
+
XYZ2 = np.array(np.where(modemask)).T
|
|
384
|
+
position = np.dot(self.affine, np.r_[XYZ2.mean(0), 1])[:3]
|
|
385
|
+
if XYZ2.shape[0] <= minsize_voxel:
|
|
386
|
+
continue
|
|
387
|
+
X2, Y2, Z2 = [v.squeeze() for v in np.split(XYZ2, 3, axis=1)]
|
|
388
|
+
|
|
389
|
+
bbox2 = boundingbox.BoundingBox(XYZ2.min(0), XYZ2.max(0) + 1, space=None)
|
|
390
|
+
if bbox2.volume == 0:
|
|
391
|
+
continue
|
|
392
|
+
|
|
393
|
+
spind = self.sparse_index
|
|
394
|
+
|
|
395
|
+
for volume in siibra_tqdm(
|
|
396
|
+
range(len(self)),
|
|
397
|
+
desc=f"Assigning structure #{mode} to {len(self)} sparse maps",
|
|
398
|
+
total=len(self),
|
|
399
|
+
unit=" map"
|
|
400
|
+
):
|
|
401
|
+
bbox1 = boundingbox.BoundingBox(
|
|
402
|
+
self.sparse_index.bboxes[volume]["minpoint"],
|
|
403
|
+
self.sparse_index.bboxes[volume]["maxpoint"],
|
|
404
|
+
space=None,
|
|
405
|
+
)
|
|
406
|
+
if bbox1.intersection(bbox2) is None:
|
|
407
|
+
continue
|
|
408
|
+
|
|
409
|
+
# compute union of voxel space bounding boxes
|
|
410
|
+
bbox = bbox1.union(bbox2)
|
|
411
|
+
bbshape = np.array(bbox.shape, dtype="int") + 1
|
|
412
|
+
x0, y0, z0 = map(int, bbox.minpoint)
|
|
413
|
+
|
|
414
|
+
# build flattened vector of map values
|
|
415
|
+
v1 = np.zeros(np.prod(bbshape))
|
|
416
|
+
XYZ1 = spind.coords(volume).T
|
|
417
|
+
X1, Y1, Z1 = [v.squeeze() for v in np.split(XYZ1, 3, axis=1)]
|
|
418
|
+
indices1 = np.ravel_multi_index(
|
|
419
|
+
(X1 - x0, Y1 - y0, Z1 - z0), bbshape
|
|
420
|
+
)
|
|
421
|
+
v1[indices1] = [spind.probs[i][volume] for i in spind.voxels[X1, Y1, Z1]]
|
|
422
|
+
v1[v1 < lower_threshold] = 0
|
|
423
|
+
|
|
424
|
+
# build flattened vector of input image mode
|
|
425
|
+
v2 = np.zeros(np.prod(bbshape))
|
|
426
|
+
indices2 = np.ravel_multi_index(
|
|
427
|
+
(X2 - x0, Y2 - y0, Z2 - z0), bbshape
|
|
428
|
+
)
|
|
429
|
+
v2[indices2] = querydata[X2, Y2, Z2]
|
|
430
|
+
|
|
431
|
+
assert v1.shape == v2.shape
|
|
432
|
+
|
|
433
|
+
intersection = np.sum(
|
|
434
|
+
(v1 > 0) & (v2 > 0)
|
|
435
|
+
) # np.minimum(v1, v2).sum()
|
|
436
|
+
if intersection == 0:
|
|
437
|
+
continue
|
|
438
|
+
iou = intersection / np.sum(
|
|
439
|
+
(v1 > 0) | (v2 > 0)
|
|
440
|
+
) # np.maximum(v1, v2).sum()
|
|
441
|
+
|
|
442
|
+
v1d = v1 - v1.mean()
|
|
443
|
+
v2d = v2 - v2.mean()
|
|
444
|
+
rho = (
|
|
445
|
+
(v1d * v2d).sum()
|
|
446
|
+
/ np.sqrt((v1d ** 2).sum())
|
|
447
|
+
/ np.sqrt((v2d ** 2).sum())
|
|
448
|
+
)
|
|
449
|
+
|
|
450
|
+
maxval = v1.max()
|
|
451
|
+
|
|
452
|
+
assignments.append(
|
|
453
|
+
parcellationmap.AssignImageResult(
|
|
454
|
+
input_structure=mode,
|
|
455
|
+
centroid=tuple(position.round(2)),
|
|
456
|
+
volume=volume,
|
|
457
|
+
fragment=None,
|
|
458
|
+
map_value=maxval,
|
|
459
|
+
intersection_over_union=iou,
|
|
460
|
+
intersection_over_first=intersection / (v1 > 0).sum(),
|
|
461
|
+
intersection_over_second=intersection / (v2 > 0).sum(),
|
|
462
|
+
correlation=rho,
|
|
463
|
+
weighted_mean_of_first=np.sum(v1 * v2) / np.sum(v2),
|
|
464
|
+
weighted_mean_of_second=np.sum(v1 * v2) / np.sum(v1)
|
|
465
|
+
)
|
|
466
|
+
)
|
|
467
|
+
|
|
468
|
+
return assignments
|