siibra 1.0a14__py3-none-any.whl → 1.0.1a0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of siibra might be problematic. Click here for more details.

Files changed (80) hide show
  1. siibra/VERSION +1 -1
  2. siibra/__init__.py +15 -5
  3. siibra/commons.py +3 -48
  4. siibra/configuration/__init__.py +1 -1
  5. siibra/configuration/configuration.py +1 -1
  6. siibra/configuration/factory.py +164 -127
  7. siibra/core/__init__.py +1 -1
  8. siibra/core/assignment.py +1 -1
  9. siibra/core/atlas.py +24 -17
  10. siibra/core/concept.py +18 -9
  11. siibra/core/parcellation.py +76 -55
  12. siibra/core/region.py +163 -183
  13. siibra/core/space.py +3 -1
  14. siibra/core/structure.py +1 -2
  15. siibra/exceptions.py +17 -1
  16. siibra/experimental/contour.py +6 -6
  17. siibra/experimental/patch.py +2 -2
  18. siibra/experimental/plane3d.py +8 -8
  19. siibra/explorer/__init__.py +1 -1
  20. siibra/explorer/url.py +15 -0
  21. siibra/explorer/util.py +1 -1
  22. siibra/features/__init__.py +1 -1
  23. siibra/features/anchor.py +13 -14
  24. siibra/features/connectivity/__init__.py +1 -1
  25. siibra/features/connectivity/functional_connectivity.py +1 -1
  26. siibra/features/connectivity/regional_connectivity.py +7 -5
  27. siibra/features/connectivity/streamline_counts.py +1 -1
  28. siibra/features/connectivity/streamline_lengths.py +1 -1
  29. siibra/features/connectivity/tracing_connectivity.py +1 -1
  30. siibra/features/dataset/__init__.py +1 -1
  31. siibra/features/dataset/ebrains.py +1 -1
  32. siibra/features/feature.py +50 -28
  33. siibra/features/image/__init__.py +1 -1
  34. siibra/features/image/image.py +18 -13
  35. siibra/features/image/sections.py +1 -1
  36. siibra/features/image/volume_of_interest.py +1 -1
  37. siibra/features/tabular/__init__.py +1 -1
  38. siibra/features/tabular/bigbrain_intensity_profile.py +2 -2
  39. siibra/features/tabular/cell_density_profile.py +102 -66
  40. siibra/features/tabular/cortical_profile.py +5 -3
  41. siibra/features/tabular/gene_expression.py +1 -1
  42. siibra/features/tabular/layerwise_bigbrain_intensities.py +1 -1
  43. siibra/features/tabular/layerwise_cell_density.py +8 -25
  44. siibra/features/tabular/receptor_density_fingerprint.py +5 -3
  45. siibra/features/tabular/receptor_density_profile.py +5 -3
  46. siibra/features/tabular/regional_timeseries_activity.py +7 -5
  47. siibra/features/tabular/tabular.py +5 -3
  48. siibra/livequeries/__init__.py +1 -1
  49. siibra/livequeries/allen.py +46 -20
  50. siibra/livequeries/bigbrain.py +9 -9
  51. siibra/livequeries/ebrains.py +1 -1
  52. siibra/livequeries/query.py +1 -2
  53. siibra/locations/__init__.py +10 -10
  54. siibra/locations/boundingbox.py +77 -38
  55. siibra/locations/location.py +12 -4
  56. siibra/locations/point.py +14 -9
  57. siibra/locations/{pointset.py → pointcloud.py} +69 -27
  58. siibra/retrieval/__init__.py +1 -1
  59. siibra/retrieval/cache.py +1 -1
  60. siibra/retrieval/datasets.py +1 -1
  61. siibra/retrieval/exceptions/__init__.py +1 -1
  62. siibra/retrieval/repositories.py +10 -27
  63. siibra/retrieval/requests.py +20 -3
  64. siibra/vocabularies/__init__.py +1 -1
  65. siibra/volumes/__init__.py +2 -2
  66. siibra/volumes/parcellationmap.py +121 -94
  67. siibra/volumes/providers/__init__.py +1 -1
  68. siibra/volumes/providers/freesurfer.py +1 -1
  69. siibra/volumes/providers/gifti.py +1 -1
  70. siibra/volumes/providers/neuroglancer.py +68 -42
  71. siibra/volumes/providers/nifti.py +18 -28
  72. siibra/volumes/providers/provider.py +2 -2
  73. siibra/volumes/sparsemap.py +128 -247
  74. siibra/volumes/volume.py +252 -65
  75. {siibra-1.0a14.dist-info → siibra-1.0.1a0.dist-info}/METADATA +17 -4
  76. siibra-1.0.1a0.dist-info/RECORD +84 -0
  77. {siibra-1.0a14.dist-info → siibra-1.0.1a0.dist-info}/WHEEL +1 -1
  78. siibra-1.0a14.dist-info/RECORD +0 -84
  79. {siibra-1.0a14.dist-info → siibra-1.0.1a0.dist-info}/LICENSE +0 -0
  80. {siibra-1.0a14.dist-info → siibra-1.0.1a0.dist-info}/top_level.txt +0 -0
@@ -1,4 +1,4 @@
1
- # Copyright 2018-2021
1
+ # Copyright 2018-2024
2
2
  # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
3
 
4
4
  # Licensed under the Apache License, Version 2.0 (the "License");
@@ -17,7 +17,7 @@ from . import provider as _provider
17
17
 
18
18
  from ...commons import logger, resample_img_to_img
19
19
  from ...retrieval import requests
20
- from ...locations import pointset, boundingbox as _boundingbox
20
+ from ...locations import pointcloud, boundingbox as _boundingbox
21
21
 
22
22
  from typing import Union, Dict, Tuple
23
23
  import nibabel as nib
@@ -65,28 +65,16 @@ class NiftiProvider(_provider.VolumeProvider, srctype="nii"):
65
65
  def fragments(self):
66
66
  return [k for k in self._img_loaders if k is not None]
67
67
 
68
- def get_boundingbox(self, clip=True, background=0, **fetch_kwargs) -> "_boundingbox.BoundingBox":
68
+ def get_boundingbox(self, **fetch_kwargs) -> "_boundingbox.BoundingBox":
69
69
  """
70
70
  Return the bounding box in physical coordinates of the union of
71
71
  fragments in this nifti volume.
72
72
 
73
73
  Parameters
74
74
  ----------
75
- clip : bool, default: True
76
- Whether to clip the background of the volume.
77
- background : float, default: 0.0
78
- The background value to clip.
79
- Note
80
- ----
81
- To use it, clip must be True.
82
75
  fetch_kwargs:
83
76
  Not used
84
77
  """
85
- if fetch_kwargs:
86
- logger.warning(
87
- "`volume.fetch()` keyword arguments supplied. Nifti volumes"
88
- " cannot pass them for bounding box calculation."
89
- )
90
78
  bbox = None
91
79
  for loader in self._img_loaders.values():
92
80
  img = loader()
@@ -95,19 +83,17 @@ class NiftiProvider(_provider.VolumeProvider, srctype="nii"):
95
83
  f"N-D NIfTI volume has shape {img.shape}, but "
96
84
  f"bounding box considers only {img.shape[:3]}"
97
85
  )
98
- if clip:
99
- next_bbox = _boundingbox.from_array(
100
- np.asanyarray(img.dataobj), threshold=background, space=None
101
- ).transform(img.affine)
102
- else:
103
- shape = img.shape[:3]
104
- next_bbox = _boundingbox.BoundingBox(
105
- (0, 0, 0), shape, space=None
106
- ).transform(img.affine)
86
+ shape = img.shape[:3]
87
+ next_bbox = _boundingbox.BoundingBox(
88
+ (0, 0, 0), shape, space=None
89
+ ).transform(img.affine)
107
90
  bbox = next_bbox if bbox is None else bbox.union(next_bbox)
108
91
  return bbox
109
92
 
110
93
  def _merge_fragments(self) -> nib.Nifti1Image:
94
+ """
95
+ Merge all fragments this volume contains into one Nifti1Image.
96
+ """
111
97
  bbox = self.get_boundingbox(clip=False, background=0.0)
112
98
  num_conflicts = 0
113
99
  result = None
@@ -188,19 +174,23 @@ class NiftiProvider(_provider.VolumeProvider, srctype="nii"):
188
174
  result = loader()
189
175
 
190
176
  if voi is not None:
177
+ zoom_xyz = np.array(result.header.get_zooms()) # voxel dimensions in xyzt_units
191
178
  bb_vox = voi.transform(np.linalg.inv(result.affine))
192
- (x0, y0, z0), (x1, y1, z1) = bb_vox.minpoint, bb_vox.maxpoint
179
+ x0, y0, z0 = np.floor(np.array(bb_vox.minpoint.coordinate) / zoom_xyz).astype(int)
180
+ x1, y1, z1 = np.ceil(np.array(bb_vox.maxpoint.coordinate) / zoom_xyz).astype(int)
193
181
  shift = np.identity(4)
194
182
  shift[:3, -1] = bb_vox.minpoint
195
183
  result = nib.Nifti1Image(
196
184
  dataobj=result.dataobj[x0:x1, y0:y1, z0:z1],
197
185
  affine=np.dot(result.affine, shift),
186
+ dtype=result.header.get_data_dtype(),
198
187
  )
199
188
 
200
189
  if label is not None:
201
190
  result = nib.Nifti1Image(
202
191
  (result.get_fdata() == label).astype('uint8'),
203
- result.affine
192
+ result.affine,
193
+ dtype='uint8'
204
194
  )
205
195
 
206
196
  return result
@@ -239,7 +229,7 @@ class NiftiProvider(_provider.VolumeProvider, srctype="nii"):
239
229
 
240
230
  Returns:
241
231
  --------
242
- PointSet
232
+ PointCloud
243
233
  """
244
234
 
245
235
  from skimage.feature.peak import peak_local_max
@@ -253,7 +243,7 @@ class NiftiProvider(_provider.VolumeProvider, srctype="nii"):
253
243
  min_distance=dist,
254
244
  )
255
245
  return (
256
- pointset.PointSet(
246
+ pointcloud.PointCloud(
257
247
  [np.dot(img.affine, [x, y, z, 1])[:3] for x, y, z in voxels],
258
248
  space=self.space,
259
249
  ),
@@ -1,4 +1,4 @@
1
- # Copyright 2018-2021
1
+ # Copyright 2018-2024
2
2
  # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
3
 
4
4
  # Licensed under the Apache License, Version 2.0 (the "License");
@@ -28,7 +28,7 @@ VolumeData = Union[Nifti1Image, Dict]
28
28
 
29
29
  class VolumeProvider(ABC):
30
30
 
31
- _SUBCLASSES = []
31
+ _SUBCLASSES: List[VolumeProvider] = []
32
32
 
33
33
  def __init_subclass__(cls, srctype: str) -> None:
34
34
  cls.srctype = srctype
@@ -1,4 +1,4 @@
1
- # Copyright 2018-2022
1
+ # Copyright 2018-2024
2
2
  # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
3
 
4
4
  # Licensed under the Apache License, Version 2.0 (the "License");
@@ -18,23 +18,27 @@ from . import parcellationmap, volume as _volume
18
18
  from .providers import provider
19
19
  from ..commons import MapIndex, logger, connected_components, siibra_tqdm
20
20
  from ..locations import boundingbox
21
- from ..retrieval import cache
22
- from ..retrieval.repositories import ZipfileConnector, GitlabConnector
23
- from ..exceptions import InsufficientArgumentException, ExcessiveArgumentException
24
-
25
- from os import path, rename, makedirs
26
- from zipfile import ZipFile, ZIP_DEFLATED
27
- import gzip
28
- from typing import Dict, Union, TYPE_CHECKING, List
21
+ from ..retrieval.cache import CACHE
22
+ from ..retrieval.requests import HttpRequest, FileLoader
23
+
24
+ from os import path, makedirs
25
+ from typing import Dict, List
29
26
  from nilearn import image
30
27
  import numpy as np
31
28
 
32
- if TYPE_CHECKING:
33
- from ..core.region import Region
34
-
35
29
 
36
30
  class SparseIndex:
37
31
 
32
+ # Precomputed sparse indices are stored in an EBRAINS data proxy
33
+ _BUCKET = "https://data-proxy.ebrains.eu/api/v1/buckets/reference-atlas-data/"
34
+ _DATAPROXY_BASEURL = _BUCKET + "sparse-indices/sparse-indices-siibra_python_v1.0/"
35
+
36
+ _SUFFIXES = {
37
+ "probs": ".sparseindex_v1.probs.txt.gz",
38
+ "bboxes": ".sparseindex_v1.bboxes.txt.gz",
39
+ "voxels": ".sparseindex_v1.voxels.nii.gz"
40
+ }
41
+
38
42
  def __init__(self):
39
43
  self.probs = []
40
44
  self.bboxes = []
@@ -111,85 +115,121 @@ class SparseIndex:
111
115
  v = [self.probs[i][volume] for i in self.voxels[x, y, z]]
112
116
  return x, y, z, v
113
117
 
114
- def _to_local_cache(self, cache_prefix: str):
115
- """
116
- Serialize this index to the cache, using the given prefix for the cache
117
- filenames.
118
- """
119
- from nibabel import Nifti1Image
120
- probsfile = cache.CACHE.build_filename(f"{cache_prefix}", suffix="probs.txt.gz")
121
- bboxfile = cache.CACHE.build_filename(f"{cache_prefix}", suffix="bboxes.txt.gz")
122
- voxelfile = cache.CACHE.build_filename(f"{cache_prefix}", suffix="voxels.nii.gz")
123
- Nifti1Image(self.voxels, self.affine).to_filename(voxelfile)
124
- with gzip.open(probsfile, 'wt') as f:
125
- for D in self.probs:
126
- f.write("{}\n".format(" ".join(f"{i} {p}" for i, p in D.items())))
127
- with gzip.open(bboxfile, "wt") as f:
128
- for bbox in self.bboxes:
129
- f.write(
130
- "{} {}\n".format(
131
- " ".join(map(str, bbox["minpoint"])),
132
- " ".join(map(str, bbox["maxpoint"])),
133
- )
134
- )
135
-
136
- @staticmethod
137
- def _from_local_cache(cache_name: str):
118
+ @classmethod
119
+ def load(cls, filepath_or_url: str) -> "SparseIndex":
138
120
  """
139
- Attempts to build a sparse index from the siibra cache, looking for
140
- suitable cache files with the specified prefix.
121
+ Loads a precomputed SparseIndex to the memory.
141
122
 
142
123
  Parameters
143
124
  ----------
144
- prefix: str
145
- Prefix of the filenames.
125
+ filepath_or_url: str
126
+ Path/url to the SparseIndex files
127
+ (eg. https://url_to_files/basefilename):
128
+ - basefilename.sparseindex_v1.probs.txt.gz
129
+ - basefilename.sparseindex_v1.bboxes.txt.gz
130
+ - basefilename.sparseindex_v1.voxels.nii.gz
146
131
 
147
132
  Returns
148
133
  -------
149
134
  SparseIndex
150
- None if cached files are not found or suitable.
151
135
  """
152
- from nibabel import load
153
-
154
- probsfile = cache.CACHE.build_filename(f"{cache_name}", suffix="probs.txt.gz")
155
- bboxfile = cache.CACHE.build_filename(f"{cache_name}", suffix="bboxes.txt.gz")
156
- voxelfile = cache.CACHE.build_filename(f"{cache_name}", suffix="voxels.nii.gz")
157
- if not all(path.isfile(f) for f in [probsfile, bboxfile, voxelfile]):
158
- return None
136
+ from gzip import decompress
137
+ spindtxt_decoder = lambda b: decompress(b).decode('utf-8').strip().splitlines()
138
+
139
+ probsfile = filepath_or_url + SparseIndex._SUFFIXES["probs"]
140
+ bboxfile = filepath_or_url + SparseIndex._SUFFIXES["bboxes"]
141
+ voxelfile = filepath_or_url + SparseIndex._SUFFIXES["voxels"]
142
+ if all(path.isfile(f) for f in [probsfile, bboxfile, voxelfile]):
143
+ request = FileLoader
144
+ else:
145
+ request = HttpRequest
159
146
 
160
- result = SparseIndex()
147
+ result = cls()
161
148
 
162
- voxels = load(voxelfile)
149
+ voxels = request(voxelfile).get()
163
150
  result.voxels = np.asanyarray(voxels.dataobj)
164
151
  result.affine = voxels.affine
165
152
  result.shape = voxels.shape
166
153
 
167
- with gzip.open(probsfile, "rt") as f:
168
- lines = f.readlines()
169
- for line in siibra_tqdm(
170
- lines,
171
- total=len(lines),
172
- desc="Loading sparse index",
173
- unit="voxels"
174
- ):
175
- fields = line.strip().split(" ")
176
- mapindices = list(map(int, fields[0::2]))
177
- values = list(map(float, fields[1::2]))
178
- D = dict(zip(mapindices, values))
179
- result.probs.append(D)
180
-
181
- with gzip.open(bboxfile, "rt") as f:
182
- for line in f:
183
- fields = line.strip().split(" ")
184
- result.bboxes.append(
185
- {
186
- "minpoint": tuple(map(int, fields[:3])),
187
- "maxpoint": tuple(map(int, fields[3:])),
188
- }
189
- )
154
+ lines_probs = request(probsfile, func=spindtxt_decoder).get()
155
+ for line in siibra_tqdm(
156
+ lines_probs,
157
+ total=len(lines_probs),
158
+ desc="Loading sparse index",
159
+ unit="voxels"
160
+ ):
161
+ fields = line.strip().split(" ")
162
+ mapindices = list(map(int, fields[0::2]))
163
+ values = list(map(float, fields[1::2]))
164
+ D = dict(zip(mapindices, values))
165
+ result.probs.append(D)
166
+
167
+ lines_bboxes = request(bboxfile, func=spindtxt_decoder).get()
168
+ for line in lines_bboxes:
169
+ fields = line.strip().split(" ")
170
+ result.bboxes.append({
171
+ "minpoint": tuple(map(int, fields[:3])),
172
+ "maxpoint": tuple(map(int, fields[3:])),
173
+ })
190
174
 
191
175
  return result
192
176
 
177
+ def save(self, base_filename: str, folder: str = ""):
178
+ """
179
+ Save SparseIndex (3x) files to under the folder `folder`
180
+ with base_filename. If SparseIndex is not cached, siibra will first
181
+ create it first.
182
+
183
+ Parameters
184
+ ----------
185
+ base_filename: str
186
+ The files that will be created as:
187
+ - base_filename.sparseindex_v1.probs.txt.gz
188
+ - base_filename.sparseindex_v1.bboxes.txt.gz
189
+ - base_filename.sparseindex_v1.voxels.nii.gz
190
+
191
+ folder: str, default=""
192
+ """
193
+ from nibabel import Nifti1Image
194
+ import gzip
195
+
196
+ fullpath = path.join(folder, base_filename)
197
+ logger.info(f"Saving SparseIndex to '{base_filename}' with suffixes {SparseIndex._SUFFIXES}")
198
+
199
+ if folder and not path.isdir(folder):
200
+ makedirs(folder)
201
+
202
+ Nifti1Image(self.voxels, self.affine).to_filename(
203
+ fullpath + SparseIndex._SUFFIXES["voxels"]
204
+ )
205
+ with gzip.open(fullpath + SparseIndex._SUFFIXES["probs"], 'wt') as f:
206
+ for D in self.probs:
207
+ f.write(
208
+ "{}\n".format(
209
+ " ".join(f"{i} {p}" for i, p in D.items())
210
+ )
211
+ )
212
+ with gzip.open(fullpath + SparseIndex._SUFFIXES["bboxes"], "wt") as f:
213
+ for bbox in self.bboxes:
214
+ f.write(
215
+ "{} {}\n".format(
216
+ " ".join(map(str, bbox["minpoint"])),
217
+ " ".join(map(str, bbox["maxpoint"])),
218
+ )
219
+ )
220
+ logger.info(f"SparseIndex is saved to {fullpath}.")
221
+
222
+ @classmethod
223
+ def from_sparsemap(cls, sparsemap: "SparseMap") -> "SparseIndex":
224
+ with provider.SubvolumeProvider.UseCaching():
225
+ spind = cls()
226
+ for img in siibra_tqdm(
227
+ sparsemap.fetch_iter(), total=len(sparsemap), unit="maps",
228
+ desc="Fetching volumetric maps and computing SparseIndex"
229
+ ):
230
+ spind.add_img(np.asanyarray(img.dataobj), img.affine)
231
+ return spind
232
+
193
233
 
194
234
  class SparseMap(parcellationmap.Map):
195
235
  """
@@ -211,10 +251,6 @@ class SparseMap(parcellationmap.Map):
211
251
  to the actual (probability) value.
212
252
  """
213
253
 
214
- # A gitlab instance with holds precomputed sparse indices
215
- _GITLAB_SERVER = 'https://jugit.fz-juelich.de'
216
- _GITLAB_PROJECT = 5779
217
-
218
254
  def __init__(
219
255
  self,
220
256
  identifier: str,
@@ -227,7 +263,8 @@ class SparseMap(parcellationmap.Map):
227
263
  description: str = "",
228
264
  modality: str = None,
229
265
  publications: list = [],
230
- datasets: list = []
266
+ datasets: list = [],
267
+ prerelease: bool = False,
231
268
  ):
232
269
  parcellationmap.Map.__init__(
233
270
  self,
@@ -242,44 +279,30 @@ class SparseMap(parcellationmap.Map):
242
279
  publications=publications,
243
280
  datasets=datasets,
244
281
  volumes=volumes,
282
+ prerelease=prerelease,
245
283
  )
246
284
  self._sparse_index_cached = None
247
285
 
248
286
  @property
249
287
  def _cache_prefix(self):
250
- return f"{self.parcellation.id}_{self.space.id}_{self.maptype}_{self.name}_index"
288
+ return CACHE.build_filename(f"{self.parcellation.id}_{self.space.id}_{self.maptype}_{self.key.lower()}_index")
251
289
 
252
290
  @property
253
291
  def sparse_index(self):
254
292
  if self._sparse_index_cached is None:
255
- spind = SparseIndex._from_local_cache(self._cache_prefix)
256
- if spind is None:
257
- logger.info("Downloading precomputed SparseIndex...")
258
- gconn = GitlabConnector(self._GITLAB_SERVER, self._GITLAB_PROJECT, "main")
259
- zip_fname = f"{self.name.replace(' ', '_').replace('statistical', 'continuous')}_index.zip"
293
+ try: # try loading from cache on disk
294
+ spind = SparseIndex.load(self._cache_prefix)
295
+ except Exception:
296
+ spind = None
297
+ if spind is None: # try from precomputed source
260
298
  try:
261
- assert zip_fname in gconn.search_files(), f"{zip_fname} is not in {gconn}."
262
- zipfile = gconn.get_loader(zip_fname).url
263
- spind = self.load_zipped_sparseindex(zipfile)
299
+ logger.info("Downloading and loading precomputed SparseIndex...")
300
+ spind = SparseIndex.load(SparseIndex._DATAPROXY_BASEURL + self.key.lower())
264
301
  except Exception:
265
- logger.info("Failed to load precomputed SparseIndex from Gitlab.")
266
- logger.debug(f"Could not load SparseIndex from Gitlab at {gconn}", exc_info=1)
267
- if spind is None:
268
- with provider.SubvolumeProvider.UseCaching():
269
- spind = SparseIndex()
270
- for vol in siibra_tqdm(
271
- range(len(self)), total=len(self), unit="maps",
272
- desc=f"Fetching {len(self)} volumetric maps"
273
- ):
274
- img = super().fetch(
275
- index=MapIndex(volume=vol, label=None)
276
- )
277
- if img is None:
278
- region = self.get_region(volume=vol)
279
- logger.error(f"Cannot retrieve volume #{vol} for {region.name}, it will not be included in the sparse map.")
280
- continue
281
- spind.add_img(np.asanyarray(img.dataobj), img.affine)
282
- spind._to_local_cache(self._cache_prefix)
302
+ logger.error("Failed to download precomputed SparseIndex.", exc_info=1)
303
+ if spind is None: # Download each map and compute the SparseIndex
304
+ spind = SparseIndex.from_sparsemap(self)
305
+ spind.save(self._cache_prefix, folder=CACHE.folder)
283
306
  self._sparse_index_cached = spind
284
307
  assert self._sparse_index_cached.max() == len(self._sparse_index_cached.probs) - 1
285
308
  return self._sparse_index_cached
@@ -292,148 +315,6 @@ class SparseMap(parcellationmap.Map):
292
315
  def shape(self):
293
316
  return self.sparse_index.shape
294
317
 
295
- def save_sparseindex(self, destination: str, filename: str = None):
296
- """
297
- Save SparseIndex as a .zip in destination folder from local cache. If
298
- SparseIndex is not cached, siibra will firt create it first.
299
-
300
- Parameters
301
- ----------
302
- destination: str
303
- The path where the zip file will be created.
304
- filename: str, default=None
305
- Name of the zip and prefix of the SparseIndex files. If None, siibra
306
- uses `name` property.
307
- """
308
- if filename is None:
309
- filename = f"{self.name.replace(' ', '_')}_index"
310
- logger.info(f"Saving SparseIndex of '{self.name}' as '{filename}.zip'")
311
- if not path.isdir(destination):
312
- makedirs(destination)
313
- if self._sparse_index_cached is None:
314
- _ = self.sparse_index
315
- suffices = [".probs.txt.gz", ".bboxes.txt.gz", ".voxels.nii.gz"]
316
- try:
317
- with ZipFile(f"{destination}/{filename}.zip", 'w') as zipf:
318
- for suffix in suffices:
319
- zipf.write(
320
- filename=cache.CACHE.build_filename(self._cache_prefix, suffix),
321
- arcname=path.basename(f"{filename}{suffix}"),
322
- compress_type=ZIP_DEFLATED
323
- )
324
- except Exception as e:
325
- logger.error("Could not save SparseIndex:\n")
326
- raise e
327
- logger.info("SparseIndex is saved.")
328
-
329
- def load_zipped_sparseindex(self, zipfname: str):
330
- """
331
- Load SparseIndex from previously computed source and creates a local
332
- cache.
333
-
334
- Parameters
335
- ----------
336
- zipfile: str
337
- A url or a path to zip file containing the SparseIndex files for
338
- this SparseMap precomputed by siibra.
339
-
340
- Returns
341
- -------
342
- SparseIndex
343
- """
344
- zconn = ZipfileConnector(zipfname)
345
- with ZipFile(zconn.zipfile, 'r') as zp:
346
- suffices = [".probs.txt.gz", ".bboxes.txt.gz", ".voxels.nii.gz"]
347
- for suffix in suffices:
348
- file = [f for f in zconn.search_files(suffix=suffix)]
349
- assert len(file) == 1, f"Could not find a unique '{suffix}' file in {zipfname}."
350
- zp.extract(file[0], cache.CACHE.folder)
351
- rename(
352
- path.join(cache.CACHE.folder, file[0]),
353
- cache.CACHE.build_filename(self._cache_prefix, suffix=suffix)
354
- )
355
- zconn.clear_cache()
356
-
357
- return SparseIndex._from_local_cache(self._cache_prefix)
358
-
359
- def fetch(
360
- self,
361
- region_or_index: Union[MapIndex, str, 'Region'] = None,
362
- *,
363
- index: MapIndex = None,
364
- region: Union[str, 'Region'] = None,
365
- **kwargs
366
- ):
367
- """
368
- Recreate a particular volumetric map from the sparse
369
- representation.
370
-
371
- Parameters
372
- ----------
373
- region_or_index: str, Region, MapIndex
374
- Lazy match the specification.
375
- index : MapIndex
376
- The index to be fetched.
377
- region: str, Region
378
- Region name specification. If given, will be used to decode the map
379
- index of a particular region.
380
-
381
- Returns
382
- -------
383
- An image or mesh
384
- """
385
- if kwargs.get('format') in ['mesh'] + _volume.Volume.MESH_FORMATS:
386
- # a mesh is requested, this is not handled by the sparse map
387
- return super().fetch(region_or_index, index=index, region=region, **kwargs)
388
-
389
- try:
390
- length = len([arg for arg in [region_or_index, region, index] if arg is not None])
391
- assert length == 1
392
- except AssertionError:
393
- if length > 1:
394
- raise ExcessiveArgumentException(
395
- "One and only one of region_or_index, region, index can be defined for fetch"
396
- )
397
- # user can provide no arguments, which assumes one and only one volume present
398
-
399
- if isinstance(region_or_index, MapIndex):
400
- index = region_or_index
401
-
402
- from ..core.region import Region
403
- if isinstance(region_or_index, (str, Region)):
404
- region = region_or_index
405
-
406
- volidx = None
407
- if index is not None:
408
- assert isinstance(index, MapIndex)
409
- volidx = index.volume
410
- if region is not None:
411
- index = self.get_index(region)
412
- assert index is not None
413
- volidx = index.volume
414
-
415
- if volidx is None:
416
- try:
417
- assert len(self) == 1
418
- volidx = 0
419
- except AssertionError:
420
- raise InsufficientArgumentException(
421
- f"{self.__class__.__name__} provides {len(self)} volumes. "
422
- "Specify 'region' or 'index' for fetch() to identify one."
423
- )
424
-
425
- assert isinstance(volidx, int)
426
- x, y, z, v = self.sparse_index.mapped_voxels(volidx)
427
- result = np.zeros(self.shape, dtype=np.float32)
428
- result[x, y, z] = v
429
- volume = _volume.from_array(
430
- data=result,
431
- affine=self.affine,
432
- space=self.space,
433
- name=f"Sparse map of {region} from {self.parcellation} in {self.space}"
434
- )
435
- return volume.fetch()
436
-
437
318
  def _read_voxel(self, x, y, z):
438
319
  spind = self.sparse_index
439
320
  vx = spind.voxels[x, y, z]