siibra 0.5a2__py3-none-any.whl → 1.0.0a1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of siibra might be problematic. Click here for more details.
- siibra/VERSION +1 -1
- siibra/__init__.py +20 -12
- siibra/commons.py +145 -90
- siibra/configuration/__init__.py +1 -1
- siibra/configuration/configuration.py +22 -17
- siibra/configuration/factory.py +177 -128
- siibra/core/__init__.py +1 -8
- siibra/core/{relation_qualification.py → assignment.py} +17 -14
- siibra/core/atlas.py +66 -35
- siibra/core/concept.py +81 -39
- siibra/core/parcellation.py +83 -67
- siibra/core/region.py +569 -263
- siibra/core/space.py +7 -39
- siibra/core/structure.py +111 -0
- siibra/exceptions.py +63 -0
- siibra/experimental/__init__.py +19 -0
- siibra/experimental/contour.py +61 -0
- siibra/experimental/cortical_profile_sampler.py +57 -0
- siibra/experimental/patch.py +98 -0
- siibra/experimental/plane3d.py +256 -0
- siibra/explorer/__init__.py +16 -0
- siibra/explorer/url.py +112 -52
- siibra/explorer/util.py +31 -9
- siibra/features/__init__.py +73 -8
- siibra/features/anchor.py +75 -196
- siibra/features/connectivity/__init__.py +1 -1
- siibra/features/connectivity/functional_connectivity.py +2 -2
- siibra/features/connectivity/regional_connectivity.py +99 -10
- siibra/features/connectivity/streamline_counts.py +1 -1
- siibra/features/connectivity/streamline_lengths.py +1 -1
- siibra/features/connectivity/tracing_connectivity.py +1 -1
- siibra/features/dataset/__init__.py +1 -1
- siibra/features/dataset/ebrains.py +3 -3
- siibra/features/feature.py +219 -110
- siibra/features/image/__init__.py +1 -1
- siibra/features/image/image.py +21 -13
- siibra/features/image/sections.py +1 -1
- siibra/features/image/volume_of_interest.py +1 -1
- siibra/features/tabular/__init__.py +1 -1
- siibra/features/tabular/bigbrain_intensity_profile.py +24 -13
- siibra/features/tabular/cell_density_profile.py +111 -69
- siibra/features/tabular/cortical_profile.py +82 -16
- siibra/features/tabular/gene_expression.py +117 -6
- siibra/features/tabular/layerwise_bigbrain_intensities.py +7 -9
- siibra/features/tabular/layerwise_cell_density.py +9 -24
- siibra/features/tabular/receptor_density_fingerprint.py +11 -6
- siibra/features/tabular/receptor_density_profile.py +12 -15
- siibra/features/tabular/regional_timeseries_activity.py +74 -18
- siibra/features/tabular/tabular.py +17 -8
- siibra/livequeries/__init__.py +1 -7
- siibra/livequeries/allen.py +139 -77
- siibra/livequeries/bigbrain.py +104 -128
- siibra/livequeries/ebrains.py +7 -4
- siibra/livequeries/query.py +1 -2
- siibra/locations/__init__.py +32 -25
- siibra/locations/boundingbox.py +153 -127
- siibra/locations/location.py +45 -80
- siibra/locations/point.py +97 -83
- siibra/locations/pointcloud.py +349 -0
- siibra/retrieval/__init__.py +1 -1
- siibra/retrieval/cache.py +107 -13
- siibra/retrieval/datasets.py +9 -14
- siibra/retrieval/exceptions/__init__.py +2 -1
- siibra/retrieval/repositories.py +147 -53
- siibra/retrieval/requests.py +64 -29
- siibra/vocabularies/__init__.py +2 -2
- siibra/volumes/__init__.py +7 -9
- siibra/volumes/parcellationmap.py +396 -253
- siibra/volumes/providers/__init__.py +20 -0
- siibra/volumes/providers/freesurfer.py +113 -0
- siibra/volumes/{gifti.py → providers/gifti.py} +29 -18
- siibra/volumes/{neuroglancer.py → providers/neuroglancer.py} +204 -92
- siibra/volumes/{nifti.py → providers/nifti.py} +64 -44
- siibra/volumes/providers/provider.py +107 -0
- siibra/volumes/sparsemap.py +159 -260
- siibra/volumes/volume.py +720 -152
- {siibra-0.5a2.dist-info → siibra-1.0.0a1.dist-info}/METADATA +25 -28
- siibra-1.0.0a1.dist-info/RECORD +84 -0
- {siibra-0.5a2.dist-info → siibra-1.0.0a1.dist-info}/WHEEL +1 -1
- siibra/locations/pointset.py +0 -198
- siibra-0.5a2.dist-info/RECORD +0 -74
- {siibra-0.5a2.dist-info → siibra-1.0.0a1.dist-info}/LICENSE +0 -0
- {siibra-0.5a2.dist-info → siibra-1.0.0a1.dist-info}/top_level.txt +0 -0
siibra/volumes/sparsemap.py
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
# Copyright 2018-
|
|
1
|
+
# Copyright 2018-2024
|
|
2
2
|
# Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
|
|
3
3
|
|
|
4
4
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
@@ -15,25 +15,30 @@
|
|
|
15
15
|
"""Represents lists of probabilistic brain region maps."""
|
|
16
16
|
from . import parcellationmap, volume as _volume
|
|
17
17
|
|
|
18
|
-
from
|
|
18
|
+
from .providers import provider
|
|
19
|
+
from ..commons import MapIndex, logger, connected_components, siibra_tqdm
|
|
19
20
|
from ..locations import boundingbox
|
|
20
|
-
from ..retrieval import
|
|
21
|
-
from ..retrieval.
|
|
21
|
+
from ..retrieval.cache import CACHE
|
|
22
|
+
from ..retrieval.requests import HttpRequest, FileLoader
|
|
22
23
|
|
|
23
|
-
from os import path,
|
|
24
|
-
from
|
|
25
|
-
import gzip
|
|
26
|
-
from typing import Dict, Union, TYPE_CHECKING, List
|
|
24
|
+
from os import path, makedirs
|
|
25
|
+
from typing import Dict, List
|
|
27
26
|
from nilearn import image
|
|
28
|
-
from nibabel import Nifti1Image, load
|
|
29
27
|
import numpy as np
|
|
30
28
|
|
|
31
|
-
if TYPE_CHECKING:
|
|
32
|
-
from ..core.region import Region
|
|
33
|
-
|
|
34
29
|
|
|
35
30
|
class SparseIndex:
|
|
36
31
|
|
|
32
|
+
# Precomputed sparse indices are stored in an EBRAINS data proxy
|
|
33
|
+
_BUCKET = "https://data-proxy.ebrains.eu/api/v1/buckets/reference-atlas-data/"
|
|
34
|
+
_DATAPROXY_BASEURL = _BUCKET + "sparse-indices/sparse-indices-siibra_python_v1.0/"
|
|
35
|
+
|
|
36
|
+
_SUFFIXES = {
|
|
37
|
+
"probs": ".sparseindex_v1.probs.txt.gz",
|
|
38
|
+
"bboxes": ".sparseindex_v1.bboxes.txt.gz",
|
|
39
|
+
"voxels": ".sparseindex_v1.voxels.nii.gz"
|
|
40
|
+
}
|
|
41
|
+
|
|
37
42
|
def __init__(self):
|
|
38
43
|
self.probs = []
|
|
39
44
|
self.bboxes = []
|
|
@@ -43,20 +48,19 @@ class SparseIndex:
|
|
|
43
48
|
self.shape = None
|
|
44
49
|
self.voxels: np.ndarray = None
|
|
45
50
|
|
|
46
|
-
def add_img(self,
|
|
51
|
+
def add_img(self, imgdata: np.ndarray, affine: np.ndarray):
|
|
47
52
|
|
|
48
53
|
if self.num_volumes == 0:
|
|
49
|
-
self.affine =
|
|
50
|
-
self.shape =
|
|
51
|
-
self.voxels = np.zeros(
|
|
54
|
+
self.affine = affine
|
|
55
|
+
self.shape = imgdata.shape
|
|
56
|
+
self.voxels = np.zeros(imgdata.shape, dtype=np.int32) - 1
|
|
52
57
|
else:
|
|
53
|
-
if (
|
|
58
|
+
if (imgdata.shape != self.shape) or ((affine - self.affine).sum() != 0):
|
|
54
59
|
raise RuntimeError(
|
|
55
60
|
"Building sparse maps from volumes with different voxel spaces is not yet supported in siibra."
|
|
56
61
|
)
|
|
57
62
|
|
|
58
63
|
volume = self.num_volumes
|
|
59
|
-
imgdata = np.asanyarray(img.dataobj)
|
|
60
64
|
X, Y, Z = [v.astype("int32") for v in np.where(imgdata > 0)]
|
|
61
65
|
for x, y, z, prob in zip(X, Y, Z, imgdata[X, Y, Z]):
|
|
62
66
|
coord_id = self.voxels[x, y, z]
|
|
@@ -111,83 +115,121 @@ class SparseIndex:
|
|
|
111
115
|
v = [self.probs[i][volume] for i in self.voxels[x, y, z]]
|
|
112
116
|
return x, y, z, v
|
|
113
117
|
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
Serialize this index to the cache, using the given prefix for the cache
|
|
117
|
-
filenames.
|
|
118
|
-
"""
|
|
119
|
-
probsfile = cache.CACHE.build_filename(f"{cache_prefix}", suffix="probs.txt.gz")
|
|
120
|
-
bboxfile = cache.CACHE.build_filename(f"{cache_prefix}", suffix="bboxes.txt.gz")
|
|
121
|
-
voxelfile = cache.CACHE.build_filename(f"{cache_prefix}", suffix="voxels.nii.gz")
|
|
122
|
-
Nifti1Image(self.voxels, self.affine).to_filename(voxelfile)
|
|
123
|
-
with gzip.open(probsfile, 'wt') as f:
|
|
124
|
-
for D in self.probs:
|
|
125
|
-
f.write("{}\n".format(" ".join(f"{i} {p}" for i, p in D.items())))
|
|
126
|
-
with gzip.open(bboxfile, "wt") as f:
|
|
127
|
-
for bbox in self.bboxes:
|
|
128
|
-
f.write(
|
|
129
|
-
"{} {}\n".format(
|
|
130
|
-
" ".join(map(str, bbox["minpoint"])),
|
|
131
|
-
" ".join(map(str, bbox["maxpoint"])),
|
|
132
|
-
)
|
|
133
|
-
)
|
|
134
|
-
|
|
135
|
-
@staticmethod
|
|
136
|
-
def _from_local_cache(cache_name: str):
|
|
118
|
+
@classmethod
|
|
119
|
+
def load(cls, filepath_or_url: str) -> "SparseIndex":
|
|
137
120
|
"""
|
|
138
|
-
|
|
139
|
-
suitable cache files with the specified prefix.
|
|
121
|
+
Loads a precomputed SparseIndex to the memory.
|
|
140
122
|
|
|
141
123
|
Parameters
|
|
142
124
|
----------
|
|
143
|
-
|
|
144
|
-
|
|
125
|
+
filepath_or_url: str
|
|
126
|
+
Path/url to the SparseIndex files
|
|
127
|
+
(eg. https://url_to_files/basefilename):
|
|
128
|
+
- basefilename.sparseindex_v1.probs.txt.gz
|
|
129
|
+
- basefilename.sparseindex_v1.bboxes.txt.gz
|
|
130
|
+
- basefilename.sparseindex_v1.voxels.nii.gz
|
|
145
131
|
|
|
146
132
|
Returns
|
|
147
133
|
-------
|
|
148
134
|
SparseIndex
|
|
149
|
-
None if cached files are not found or suitable.
|
|
150
135
|
"""
|
|
136
|
+
from gzip import decompress
|
|
137
|
+
spindtxt_decoder = lambda b: decompress(b).decode('utf-8').strip().splitlines()
|
|
138
|
+
|
|
139
|
+
probsfile = filepath_or_url + SparseIndex._SUFFIXES["probs"]
|
|
140
|
+
bboxfile = filepath_or_url + SparseIndex._SUFFIXES["bboxes"]
|
|
141
|
+
voxelfile = filepath_or_url + SparseIndex._SUFFIXES["voxels"]
|
|
142
|
+
if all(path.isfile(f) for f in [probsfile, bboxfile, voxelfile]):
|
|
143
|
+
request = FileLoader
|
|
144
|
+
else:
|
|
145
|
+
request = HttpRequest
|
|
151
146
|
|
|
152
|
-
|
|
153
|
-
bboxfile = cache.CACHE.build_filename(f"{cache_name}", suffix="bboxes.txt.gz")
|
|
154
|
-
voxelfile = cache.CACHE.build_filename(f"{cache_name}", suffix="voxels.nii.gz")
|
|
155
|
-
if not all(path.isfile(f) for f in [probsfile, bboxfile, voxelfile]):
|
|
156
|
-
return None
|
|
157
|
-
|
|
158
|
-
result = SparseIndex()
|
|
147
|
+
result = cls()
|
|
159
148
|
|
|
160
|
-
voxels =
|
|
149
|
+
voxels = request(voxelfile).get()
|
|
161
150
|
result.voxels = np.asanyarray(voxels.dataobj)
|
|
162
151
|
result.affine = voxels.affine
|
|
163
152
|
result.shape = voxels.shape
|
|
164
153
|
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
)
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
"maxpoint": tuple(map(int, fields[3:])),
|
|
186
|
-
}
|
|
187
|
-
)
|
|
154
|
+
lines_probs = request(probsfile, func=spindtxt_decoder).get()
|
|
155
|
+
for line in siibra_tqdm(
|
|
156
|
+
lines_probs,
|
|
157
|
+
total=len(lines_probs),
|
|
158
|
+
desc="Loading sparse index",
|
|
159
|
+
unit="voxels"
|
|
160
|
+
):
|
|
161
|
+
fields = line.strip().split(" ")
|
|
162
|
+
mapindices = list(map(int, fields[0::2]))
|
|
163
|
+
values = list(map(float, fields[1::2]))
|
|
164
|
+
D = dict(zip(mapindices, values))
|
|
165
|
+
result.probs.append(D)
|
|
166
|
+
|
|
167
|
+
lines_bboxes = request(bboxfile, func=spindtxt_decoder).get()
|
|
168
|
+
for line in lines_bboxes:
|
|
169
|
+
fields = line.strip().split(" ")
|
|
170
|
+
result.bboxes.append({
|
|
171
|
+
"minpoint": tuple(map(int, fields[:3])),
|
|
172
|
+
"maxpoint": tuple(map(int, fields[3:])),
|
|
173
|
+
})
|
|
188
174
|
|
|
189
175
|
return result
|
|
190
176
|
|
|
177
|
+
def save(self, base_filename: str, folder: str = ""):
|
|
178
|
+
"""
|
|
179
|
+
Save SparseIndex (3x) files to under the folder `folder`
|
|
180
|
+
with base_filename. If SparseIndex is not cached, siibra will first
|
|
181
|
+
create it first.
|
|
182
|
+
|
|
183
|
+
Parameters
|
|
184
|
+
----------
|
|
185
|
+
base_filename: str
|
|
186
|
+
The files that will be created as:
|
|
187
|
+
- base_filename.sparseindex_v1.probs.txt.gz
|
|
188
|
+
- base_filename.sparseindex_v1.bboxes.txt.gz
|
|
189
|
+
- base_filename.sparseindex_v1.voxels.nii.gz
|
|
190
|
+
|
|
191
|
+
folder: str, default=""
|
|
192
|
+
"""
|
|
193
|
+
from nibabel import Nifti1Image
|
|
194
|
+
import gzip
|
|
195
|
+
|
|
196
|
+
fullpath = path.join(folder, base_filename)
|
|
197
|
+
logger.info(f"Saving SparseIndex to '{base_filename}' with suffixes {SparseIndex._SUFFIXES}")
|
|
198
|
+
|
|
199
|
+
if folder and not path.isdir(folder):
|
|
200
|
+
makedirs(folder)
|
|
201
|
+
|
|
202
|
+
Nifti1Image(self.voxels, self.affine).to_filename(
|
|
203
|
+
fullpath + SparseIndex._SUFFIXES["voxels"]
|
|
204
|
+
)
|
|
205
|
+
with gzip.open(fullpath + SparseIndex._SUFFIXES["probs"], 'wt') as f:
|
|
206
|
+
for D in self.probs:
|
|
207
|
+
f.write(
|
|
208
|
+
"{}\n".format(
|
|
209
|
+
" ".join(f"{i} {p}" for i, p in D.items())
|
|
210
|
+
)
|
|
211
|
+
)
|
|
212
|
+
with gzip.open(fullpath + SparseIndex._SUFFIXES["bboxes"], "wt") as f:
|
|
213
|
+
for bbox in self.bboxes:
|
|
214
|
+
f.write(
|
|
215
|
+
"{} {}\n".format(
|
|
216
|
+
" ".join(map(str, bbox["minpoint"])),
|
|
217
|
+
" ".join(map(str, bbox["maxpoint"])),
|
|
218
|
+
)
|
|
219
|
+
)
|
|
220
|
+
logger.info(f"SparseIndex is saved to {fullpath}.")
|
|
221
|
+
|
|
222
|
+
@classmethod
|
|
223
|
+
def from_sparsemap(cls, sparsemap: "SparseMap") -> "SparseIndex":
|
|
224
|
+
with provider.SubvolumeProvider.UseCaching():
|
|
225
|
+
spind = cls()
|
|
226
|
+
for img in siibra_tqdm(
|
|
227
|
+
sparsemap.fetch_iter(), total=len(sparsemap), unit="maps",
|
|
228
|
+
desc="Fetching volumetric maps and computing SparseIndex"
|
|
229
|
+
):
|
|
230
|
+
spind.add_img(np.asanyarray(img.dataobj), img.affine)
|
|
231
|
+
return spind
|
|
232
|
+
|
|
191
233
|
|
|
192
234
|
class SparseMap(parcellationmap.Map):
|
|
193
235
|
"""
|
|
@@ -209,10 +251,6 @@ class SparseMap(parcellationmap.Map):
|
|
|
209
251
|
to the actual (probability) value.
|
|
210
252
|
"""
|
|
211
253
|
|
|
212
|
-
# A gitlab instance with holds precomputed sparse indices
|
|
213
|
-
_GITLAB_SERVER = 'https://jugit.fz-juelich.de'
|
|
214
|
-
_GITLAB_PROJECT = 5779
|
|
215
|
-
|
|
216
254
|
def __init__(
|
|
217
255
|
self,
|
|
218
256
|
identifier: str,
|
|
@@ -225,7 +263,8 @@ class SparseMap(parcellationmap.Map):
|
|
|
225
263
|
description: str = "",
|
|
226
264
|
modality: str = None,
|
|
227
265
|
publications: list = [],
|
|
228
|
-
datasets: list = []
|
|
266
|
+
datasets: list = [],
|
|
267
|
+
prerelease: bool = False,
|
|
229
268
|
):
|
|
230
269
|
parcellationmap.Map.__init__(
|
|
231
270
|
self,
|
|
@@ -240,44 +279,30 @@ class SparseMap(parcellationmap.Map):
|
|
|
240
279
|
publications=publications,
|
|
241
280
|
datasets=datasets,
|
|
242
281
|
volumes=volumes,
|
|
282
|
+
prerelease=prerelease,
|
|
243
283
|
)
|
|
244
284
|
self._sparse_index_cached = None
|
|
245
285
|
|
|
246
286
|
@property
|
|
247
287
|
def _cache_prefix(self):
|
|
248
|
-
return f"{self.parcellation.id}_{self.space.id}_{self.maptype}_{self.
|
|
288
|
+
return CACHE.build_filename(f"{self.parcellation.id}_{self.space.id}_{self.maptype}_{self.key.lower()}_index")
|
|
249
289
|
|
|
250
290
|
@property
|
|
251
291
|
def sparse_index(self):
|
|
252
292
|
if self._sparse_index_cached is None:
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
293
|
+
try: # try loading from cache on disk
|
|
294
|
+
spind = SparseIndex.load(self._cache_prefix)
|
|
295
|
+
except Exception:
|
|
296
|
+
spind = None
|
|
297
|
+
if spind is None: # try from precomputed source
|
|
258
298
|
try:
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
spind = self.load_zipped_sparseindex(zipfile)
|
|
299
|
+
logger.info("Downloading and loading precomputed SparseIndex...")
|
|
300
|
+
spind = SparseIndex.load(SparseIndex._DATAPROXY_BASEURL + self.key.lower())
|
|
262
301
|
except Exception:
|
|
263
|
-
logger.
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
spind = SparseIndex()
|
|
268
|
-
for vol in siibra_tqdm(
|
|
269
|
-
range(len(self)), total=len(self), unit="maps",
|
|
270
|
-
desc=f"Fetching {len(self)} volumetric maps"
|
|
271
|
-
):
|
|
272
|
-
img = super().fetch(
|
|
273
|
-
index=MapIndex(volume=vol, label=None)
|
|
274
|
-
)
|
|
275
|
-
if img is None:
|
|
276
|
-
region = self.get_region(volume=vol)
|
|
277
|
-
logger.error(f"Cannot retrieve volume #{vol} for {region.name}, it will not be included in the sparse map.")
|
|
278
|
-
continue
|
|
279
|
-
spind.add_img(img)
|
|
280
|
-
spind._to_local_cache(self._cache_prefix)
|
|
302
|
+
logger.error("Failed to download precomputed SparseIndex.", exc_info=1)
|
|
303
|
+
if spind is None: # Download each map and compute the SparseIndex
|
|
304
|
+
spind = SparseIndex.from_sparsemap(self)
|
|
305
|
+
spind.save(self._cache_prefix, folder=CACHE.folder)
|
|
281
306
|
self._sparse_index_cached = spind
|
|
282
307
|
assert self._sparse_index_cached.max() == len(self._sparse_index_cached.probs) - 1
|
|
283
308
|
return self._sparse_index_cached
|
|
@@ -290,142 +315,6 @@ class SparseMap(parcellationmap.Map):
|
|
|
290
315
|
def shape(self):
|
|
291
316
|
return self.sparse_index.shape
|
|
292
317
|
|
|
293
|
-
def save_sparseindex(self, destination: str, filename: str = None):
|
|
294
|
-
"""
|
|
295
|
-
Save SparseIndex as a .zip in destination folder from local cache. If
|
|
296
|
-
SparseIndex is not cached, siibra will firt create it first.
|
|
297
|
-
|
|
298
|
-
Parameters
|
|
299
|
-
----------
|
|
300
|
-
destination: str
|
|
301
|
-
The path where the zip file will be created.
|
|
302
|
-
filename: str, default=None
|
|
303
|
-
Name of the zip and prefix of the SparseIndex files. If None, siibra
|
|
304
|
-
uses `name` property.
|
|
305
|
-
"""
|
|
306
|
-
if filename is None:
|
|
307
|
-
filename = f"{self.name.replace(' ', '_')}_index"
|
|
308
|
-
logger.info(f"Saving SparseIndex of '{self.name}' as '{filename}.zip'")
|
|
309
|
-
if not path.isdir(destination):
|
|
310
|
-
makedirs(destination)
|
|
311
|
-
if self._sparse_index_cached is None:
|
|
312
|
-
_ = self.sparse_index
|
|
313
|
-
suffices = [".probs.txt.gz", ".bboxes.txt.gz", ".voxels.nii.gz"]
|
|
314
|
-
try:
|
|
315
|
-
with ZipFile(f"{destination}/{filename}.zip", 'w') as zipf:
|
|
316
|
-
for suffix in suffices:
|
|
317
|
-
zipf.write(
|
|
318
|
-
filename=cache.CACHE.build_filename(self._cache_prefix, suffix),
|
|
319
|
-
arcname=path.basename(f"{filename}{suffix}"),
|
|
320
|
-
compress_type=ZIP_DEFLATED
|
|
321
|
-
)
|
|
322
|
-
except Exception as e:
|
|
323
|
-
logger.error("Could not save SparseIndex:\n")
|
|
324
|
-
raise e
|
|
325
|
-
logger.info("SparseIndex is saved.")
|
|
326
|
-
|
|
327
|
-
def load_zipped_sparseindex(self, zipfname: str):
|
|
328
|
-
"""
|
|
329
|
-
Load SparseIndex from previously computed source and creates a local
|
|
330
|
-
cache.
|
|
331
|
-
|
|
332
|
-
Parameters
|
|
333
|
-
----------
|
|
334
|
-
zipfile: str
|
|
335
|
-
A url or a path to zip file containing the SparseIndex files for
|
|
336
|
-
this SparseMap precomputed by siibra.
|
|
337
|
-
|
|
338
|
-
Returns
|
|
339
|
-
-------
|
|
340
|
-
SparseIndex
|
|
341
|
-
"""
|
|
342
|
-
zconn = ZipfileConnector(zipfname)
|
|
343
|
-
with ZipFile(zconn.zipfile, 'r') as zp:
|
|
344
|
-
suffices = [".probs.txt.gz", ".bboxes.txt.gz", ".voxels.nii.gz"]
|
|
345
|
-
for suffix in suffices:
|
|
346
|
-
file = [f for f in zconn.search_files(suffix=suffix)]
|
|
347
|
-
assert len(file) == 1, f"Could not find a unique '{suffix}' file in {zipfname}."
|
|
348
|
-
zp.extract(file[0], cache.CACHE.folder)
|
|
349
|
-
rename(
|
|
350
|
-
path.join(cache.CACHE.folder, file[0]),
|
|
351
|
-
cache.CACHE.build_filename(self._cache_prefix, suffix=suffix)
|
|
352
|
-
)
|
|
353
|
-
zconn.clear_cache()
|
|
354
|
-
|
|
355
|
-
return SparseIndex._from_local_cache(self._cache_prefix)
|
|
356
|
-
|
|
357
|
-
def fetch(
|
|
358
|
-
self,
|
|
359
|
-
region_or_index: Union[MapIndex, str, 'Region'] = None,
|
|
360
|
-
*,
|
|
361
|
-
index: MapIndex = None,
|
|
362
|
-
region: Union[str, 'Region'] = None,
|
|
363
|
-
**kwargs
|
|
364
|
-
):
|
|
365
|
-
"""
|
|
366
|
-
Recreate a particular volumetric map from the sparse
|
|
367
|
-
representation.
|
|
368
|
-
|
|
369
|
-
Parameters
|
|
370
|
-
----------
|
|
371
|
-
region_or_index: str, Region, MapIndex
|
|
372
|
-
Lazy match the specification.
|
|
373
|
-
index : MapIndex
|
|
374
|
-
The index to be fetched.
|
|
375
|
-
region: str, Region
|
|
376
|
-
Region name specification. If given, will be used to decode the map
|
|
377
|
-
index of a particular region.
|
|
378
|
-
|
|
379
|
-
Returns
|
|
380
|
-
-------
|
|
381
|
-
An image or mesh
|
|
382
|
-
"""
|
|
383
|
-
if kwargs.get('format') in ['mesh'] + _volume.Volume.MESH_FORMATS:
|
|
384
|
-
# a mesh is requested, this is not handled by the sparse map
|
|
385
|
-
return super().fetch(region_or_index, index=index, region=region, **kwargs)
|
|
386
|
-
|
|
387
|
-
try:
|
|
388
|
-
length = len([arg for arg in [region_or_index, region, index] if arg is not None])
|
|
389
|
-
assert length == 1
|
|
390
|
-
except AssertionError:
|
|
391
|
-
if length > 1:
|
|
392
|
-
raise parcellationmap.ExcessiveArgumentException(
|
|
393
|
-
"One and only one of region_or_index, region, index can be defined for fetch"
|
|
394
|
-
)
|
|
395
|
-
# user can provide no arguments, which assumes one and only one volume present
|
|
396
|
-
|
|
397
|
-
if isinstance(region_or_index, MapIndex):
|
|
398
|
-
index = region_or_index
|
|
399
|
-
|
|
400
|
-
from ..core.region import Region
|
|
401
|
-
if isinstance(region_or_index, (str, Region)):
|
|
402
|
-
region = region_or_index
|
|
403
|
-
|
|
404
|
-
volidx = None
|
|
405
|
-
if index is not None:
|
|
406
|
-
assert isinstance(index, MapIndex)
|
|
407
|
-
volidx = index.volume
|
|
408
|
-
if region is not None:
|
|
409
|
-
index = self.get_index(region)
|
|
410
|
-
assert index is not None
|
|
411
|
-
volidx = index.volume
|
|
412
|
-
|
|
413
|
-
if volidx is None:
|
|
414
|
-
try:
|
|
415
|
-
assert len(self) == 1
|
|
416
|
-
volidx = 0
|
|
417
|
-
except AssertionError:
|
|
418
|
-
raise parcellationmap.InsufficientArgumentException(
|
|
419
|
-
f"{self.__class__.__name__} provides {len(self)} volumes. "
|
|
420
|
-
"Specify 'region' or 'index' for fetch() to identify one."
|
|
421
|
-
)
|
|
422
|
-
|
|
423
|
-
assert isinstance(volidx, int)
|
|
424
|
-
x, y, z, v = self.sparse_index.mapped_voxels(volidx)
|
|
425
|
-
result = np.zeros(self.shape, dtype=np.float32)
|
|
426
|
-
result[x, y, z] = v
|
|
427
|
-
return Nifti1Image(result, self.affine)
|
|
428
|
-
|
|
429
318
|
def _read_voxel(self, x, y, z):
|
|
430
319
|
spind = self.sparse_index
|
|
431
320
|
vx = spind.voxels[x, y, z]
|
|
@@ -441,48 +330,58 @@ class SparseMap(parcellationmap.Map):
|
|
|
441
330
|
for volume, value in spind.probs[voxel].items()
|
|
442
331
|
)
|
|
443
332
|
|
|
444
|
-
def
|
|
333
|
+
def _assign_volume(
|
|
334
|
+
self,
|
|
335
|
+
queryvolume: "_volume.Volume",
|
|
336
|
+
minsize_voxel: int,
|
|
337
|
+
lower_threshold: float,
|
|
338
|
+
split_components: bool = True
|
|
339
|
+
) -> List[parcellationmap.AssignImageResult]:
|
|
445
340
|
"""
|
|
446
|
-
Assign an image volume to this
|
|
341
|
+
Assign an image volume to this sparse map.
|
|
447
342
|
|
|
448
|
-
Parameters
|
|
343
|
+
Parameters
|
|
449
344
|
-----------
|
|
450
|
-
|
|
451
|
-
the
|
|
345
|
+
queryvolume: Volume
|
|
346
|
+
the volume to be compared with maps
|
|
452
347
|
minsize_voxel: int, default: 1
|
|
453
348
|
Minimum voxel size of image components to be taken into account.
|
|
454
349
|
lower_threshold: float, default: 0
|
|
455
350
|
Lower threshold on values in the statistical map. Values smaller than
|
|
456
351
|
this threshold will be excluded from the assignment computation.
|
|
352
|
+
split_components: bool, default: True
|
|
353
|
+
Whether to split the query volume into disjoint components.
|
|
457
354
|
"""
|
|
355
|
+
queryimg = queryvolume.fetch()
|
|
356
|
+
imgdata = np.asanyarray(queryimg.dataobj)
|
|
357
|
+
imgaffine = queryimg.affine
|
|
458
358
|
assignments = []
|
|
459
359
|
|
|
460
360
|
# resample query image into this image's voxel space, if required
|
|
461
|
-
if (
|
|
462
|
-
|
|
361
|
+
if (imgaffine - self.affine).sum() == 0:
|
|
362
|
+
querydata = imgdata.squeeze()
|
|
463
363
|
else:
|
|
464
|
-
if issubclass(
|
|
364
|
+
if issubclass(imgdata.dtype.type, np.integer):
|
|
465
365
|
interp = "nearest"
|
|
466
366
|
else:
|
|
467
367
|
interp = "linear"
|
|
368
|
+
from nibabel import Nifti1Image
|
|
468
369
|
queryimg = image.resample_img(
|
|
469
|
-
|
|
370
|
+
Nifti1Image(imgdata, imgaffine),
|
|
470
371
|
target_affine=self.affine,
|
|
471
372
|
target_shape=self.shape,
|
|
472
373
|
interpolation=interp,
|
|
473
374
|
)
|
|
375
|
+
querydata = np.asanyarray(queryimg.dataobj).squeeze()
|
|
474
376
|
|
|
475
|
-
|
|
476
|
-
|
|
477
|
-
iter_func = iterate_connected_components if split_components \
|
|
377
|
+
iter_func = connected_components if split_components \
|
|
478
378
|
else lambda img: [(1, img)]
|
|
479
379
|
|
|
480
|
-
for mode,
|
|
380
|
+
for mode, modemask in iter_func(querydata):
|
|
481
381
|
|
|
482
382
|
# determine bounding box of the mode
|
|
483
|
-
modemask = np.asanyarray(modeimg.dataobj)
|
|
484
383
|
XYZ2 = np.array(np.where(modemask)).T
|
|
485
|
-
position = np.dot(
|
|
384
|
+
position = np.dot(self.affine, np.r_[XYZ2.mean(0), 1])[:3]
|
|
486
385
|
if XYZ2.shape[0] <= minsize_voxel:
|
|
487
386
|
continue
|
|
488
387
|
X2, Y2, Z2 = [v.squeeze() for v in np.split(XYZ2, 3, axis=1)]
|