siibra 1.0a19__py3-none-any.whl → 1.0.1a0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of siibra might be problematic. Click here for more details.

Files changed (38) hide show
  1. siibra/VERSION +1 -1
  2. siibra/__init__.py +3 -3
  3. siibra/commons.py +0 -46
  4. siibra/configuration/factory.py +10 -20
  5. siibra/core/atlas.py +20 -14
  6. siibra/core/parcellation.py +67 -52
  7. siibra/core/region.py +133 -123
  8. siibra/exceptions.py +8 -0
  9. siibra/experimental/contour.py +6 -6
  10. siibra/experimental/patch.py +2 -2
  11. siibra/experimental/plane3d.py +8 -8
  12. siibra/features/anchor.py +12 -13
  13. siibra/features/connectivity/regional_connectivity.py +2 -2
  14. siibra/features/feature.py +14 -16
  15. siibra/features/tabular/bigbrain_intensity_profile.py +1 -1
  16. siibra/features/tabular/cell_density_profile.py +97 -63
  17. siibra/features/tabular/layerwise_cell_density.py +3 -22
  18. siibra/features/tabular/regional_timeseries_activity.py +2 -2
  19. siibra/livequeries/allen.py +39 -16
  20. siibra/livequeries/bigbrain.py +8 -8
  21. siibra/livequeries/query.py +0 -1
  22. siibra/locations/__init__.py +9 -9
  23. siibra/locations/boundingbox.py +29 -24
  24. siibra/locations/point.py +4 -4
  25. siibra/locations/{pointset.py → pointcloud.py} +30 -22
  26. siibra/retrieval/repositories.py +9 -26
  27. siibra/retrieval/requests.py +19 -2
  28. siibra/volumes/__init__.py +1 -1
  29. siibra/volumes/parcellationmap.py +88 -81
  30. siibra/volumes/providers/neuroglancer.py +62 -36
  31. siibra/volumes/providers/nifti.py +11 -25
  32. siibra/volumes/sparsemap.py +124 -245
  33. siibra/volumes/volume.py +141 -52
  34. {siibra-1.0a19.dist-info → siibra-1.0.1a0.dist-info}/METADATA +16 -3
  35. {siibra-1.0a19.dist-info → siibra-1.0.1a0.dist-info}/RECORD +38 -38
  36. {siibra-1.0a19.dist-info → siibra-1.0.1a0.dist-info}/WHEEL +1 -1
  37. {siibra-1.0a19.dist-info → siibra-1.0.1a0.dist-info}/LICENSE +0 -0
  38. {siibra-1.0a19.dist-info → siibra-1.0.1a0.dist-info}/top_level.txt +0 -0
@@ -18,23 +18,27 @@ from . import parcellationmap, volume as _volume
18
18
  from .providers import provider
19
19
  from ..commons import MapIndex, logger, connected_components, siibra_tqdm
20
20
  from ..locations import boundingbox
21
- from ..retrieval import cache
22
- from ..retrieval.repositories import ZipfileConnector, GitlabConnector
23
- from ..exceptions import InsufficientArgumentException, ExcessiveArgumentException
24
-
25
- from os import path, rename, makedirs
26
- from zipfile import ZipFile, ZIP_DEFLATED
27
- import gzip
28
- from typing import Dict, Union, TYPE_CHECKING, List
21
+ from ..retrieval.cache import CACHE
22
+ from ..retrieval.requests import HttpRequest, FileLoader
23
+
24
+ from os import path, makedirs
25
+ from typing import Dict, List
29
26
  from nilearn import image
30
27
  import numpy as np
31
28
 
32
- if TYPE_CHECKING:
33
- from ..core.region import Region
34
-
35
29
 
36
30
  class SparseIndex:
37
31
 
32
+ # Precomputed sparse indices are stored in an EBRAINS data proxy
33
+ _BUCKET = "https://data-proxy.ebrains.eu/api/v1/buckets/reference-atlas-data/"
34
+ _DATAPROXY_BASEURL = _BUCKET + "sparse-indices/sparse-indices-siibra_python_v1.0/"
35
+
36
+ _SUFFIXES = {
37
+ "probs": ".sparseindex_v1.probs.txt.gz",
38
+ "bboxes": ".sparseindex_v1.bboxes.txt.gz",
39
+ "voxels": ".sparseindex_v1.voxels.nii.gz"
40
+ }
41
+
38
42
  def __init__(self):
39
43
  self.probs = []
40
44
  self.bboxes = []
@@ -111,85 +115,121 @@ class SparseIndex:
111
115
  v = [self.probs[i][volume] for i in self.voxels[x, y, z]]
112
116
  return x, y, z, v
113
117
 
114
- def _to_local_cache(self, cache_prefix: str):
115
- """
116
- Serialize this index to the cache, using the given prefix for the cache
117
- filenames.
118
- """
119
- from nibabel import Nifti1Image
120
- probsfile = cache.CACHE.build_filename(f"{cache_prefix}", suffix="probs.txt.gz")
121
- bboxfile = cache.CACHE.build_filename(f"{cache_prefix}", suffix="bboxes.txt.gz")
122
- voxelfile = cache.CACHE.build_filename(f"{cache_prefix}", suffix="voxels.nii.gz")
123
- Nifti1Image(self.voxels, self.affine).to_filename(voxelfile)
124
- with gzip.open(probsfile, 'wt') as f:
125
- for D in self.probs:
126
- f.write("{}\n".format(" ".join(f"{i} {p}" for i, p in D.items())))
127
- with gzip.open(bboxfile, "wt") as f:
128
- for bbox in self.bboxes:
129
- f.write(
130
- "{} {}\n".format(
131
- " ".join(map(str, bbox["minpoint"])),
132
- " ".join(map(str, bbox["maxpoint"])),
133
- )
134
- )
135
-
136
- @staticmethod
137
- def _from_local_cache(cache_name: str):
118
+ @classmethod
119
+ def load(cls, filepath_or_url: str) -> "SparseIndex":
138
120
  """
139
- Attempts to build a sparse index from the siibra cache, looking for
140
- suitable cache files with the specified prefix.
121
+ Loads a precomputed SparseIndex to the memory.
141
122
 
142
123
  Parameters
143
124
  ----------
144
- prefix: str
145
- Prefix of the filenames.
125
+ filepath_or_url: str
126
+ Path/url to the SparseIndex files
127
+ (eg. https://url_to_files/basefilename):
128
+ - basefilename.sparseindex_v1.probs.txt.gz
129
+ - basefilename.sparseindex_v1.bboxes.txt.gz
130
+ - basefilename.sparseindex_v1.voxels.nii.gz
146
131
 
147
132
  Returns
148
133
  -------
149
134
  SparseIndex
150
- None if cached files are not found or suitable.
151
135
  """
152
- from nibabel import load
153
-
154
- probsfile = cache.CACHE.build_filename(f"{cache_name}", suffix="probs.txt.gz")
155
- bboxfile = cache.CACHE.build_filename(f"{cache_name}", suffix="bboxes.txt.gz")
156
- voxelfile = cache.CACHE.build_filename(f"{cache_name}", suffix="voxels.nii.gz")
157
- if not all(path.isfile(f) for f in [probsfile, bboxfile, voxelfile]):
158
- return None
136
+ from gzip import decompress
137
+ spindtxt_decoder = lambda b: decompress(b).decode('utf-8').strip().splitlines()
138
+
139
+ probsfile = filepath_or_url + SparseIndex._SUFFIXES["probs"]
140
+ bboxfile = filepath_or_url + SparseIndex._SUFFIXES["bboxes"]
141
+ voxelfile = filepath_or_url + SparseIndex._SUFFIXES["voxels"]
142
+ if all(path.isfile(f) for f in [probsfile, bboxfile, voxelfile]):
143
+ request = FileLoader
144
+ else:
145
+ request = HttpRequest
159
146
 
160
- result = SparseIndex()
147
+ result = cls()
161
148
 
162
- voxels = load(voxelfile)
149
+ voxels = request(voxelfile).get()
163
150
  result.voxels = np.asanyarray(voxels.dataobj)
164
151
  result.affine = voxels.affine
165
152
  result.shape = voxels.shape
166
153
 
167
- with gzip.open(probsfile, "rt") as f:
168
- lines = f.readlines()
169
- for line in siibra_tqdm(
170
- lines,
171
- total=len(lines),
172
- desc="Loading sparse index",
173
- unit="voxels"
174
- ):
175
- fields = line.strip().split(" ")
176
- mapindices = list(map(int, fields[0::2]))
177
- values = list(map(float, fields[1::2]))
178
- D = dict(zip(mapindices, values))
179
- result.probs.append(D)
180
-
181
- with gzip.open(bboxfile, "rt") as f:
182
- for line in f:
183
- fields = line.strip().split(" ")
184
- result.bboxes.append(
185
- {
186
- "minpoint": tuple(map(int, fields[:3])),
187
- "maxpoint": tuple(map(int, fields[3:])),
188
- }
189
- )
154
+ lines_probs = request(probsfile, func=spindtxt_decoder).get()
155
+ for line in siibra_tqdm(
156
+ lines_probs,
157
+ total=len(lines_probs),
158
+ desc="Loading sparse index",
159
+ unit="voxels"
160
+ ):
161
+ fields = line.strip().split(" ")
162
+ mapindices = list(map(int, fields[0::2]))
163
+ values = list(map(float, fields[1::2]))
164
+ D = dict(zip(mapindices, values))
165
+ result.probs.append(D)
166
+
167
+ lines_bboxes = request(bboxfile, func=spindtxt_decoder).get()
168
+ for line in lines_bboxes:
169
+ fields = line.strip().split(" ")
170
+ result.bboxes.append({
171
+ "minpoint": tuple(map(int, fields[:3])),
172
+ "maxpoint": tuple(map(int, fields[3:])),
173
+ })
190
174
 
191
175
  return result
192
176
 
177
+ def save(self, base_filename: str, folder: str = ""):
178
+ """
179
+ Save SparseIndex (3x) files to under the folder `folder`
180
+ with base_filename. If SparseIndex is not cached, siibra will first
181
+ create it first.
182
+
183
+ Parameters
184
+ ----------
185
+ base_filename: str
186
+ The files that will be created as:
187
+ - base_filename.sparseindex_v1.probs.txt.gz
188
+ - base_filename.sparseindex_v1.bboxes.txt.gz
189
+ - base_filename.sparseindex_v1.voxels.nii.gz
190
+
191
+ folder: str, default=""
192
+ """
193
+ from nibabel import Nifti1Image
194
+ import gzip
195
+
196
+ fullpath = path.join(folder, base_filename)
197
+ logger.info(f"Saving SparseIndex to '{base_filename}' with suffixes {SparseIndex._SUFFIXES}")
198
+
199
+ if folder and not path.isdir(folder):
200
+ makedirs(folder)
201
+
202
+ Nifti1Image(self.voxels, self.affine).to_filename(
203
+ fullpath + SparseIndex._SUFFIXES["voxels"]
204
+ )
205
+ with gzip.open(fullpath + SparseIndex._SUFFIXES["probs"], 'wt') as f:
206
+ for D in self.probs:
207
+ f.write(
208
+ "{}\n".format(
209
+ " ".join(f"{i} {p}" for i, p in D.items())
210
+ )
211
+ )
212
+ with gzip.open(fullpath + SparseIndex._SUFFIXES["bboxes"], "wt") as f:
213
+ for bbox in self.bboxes:
214
+ f.write(
215
+ "{} {}\n".format(
216
+ " ".join(map(str, bbox["minpoint"])),
217
+ " ".join(map(str, bbox["maxpoint"])),
218
+ )
219
+ )
220
+ logger.info(f"SparseIndex is saved to {fullpath}.")
221
+
222
+ @classmethod
223
+ def from_sparsemap(cls, sparsemap: "SparseMap") -> "SparseIndex":
224
+ with provider.SubvolumeProvider.UseCaching():
225
+ spind = cls()
226
+ for img in siibra_tqdm(
227
+ sparsemap.fetch_iter(), total=len(sparsemap), unit="maps",
228
+ desc="Fetching volumetric maps and computing SparseIndex"
229
+ ):
230
+ spind.add_img(np.asanyarray(img.dataobj), img.affine)
231
+ return spind
232
+
193
233
 
194
234
  class SparseMap(parcellationmap.Map):
195
235
  """
@@ -211,10 +251,6 @@ class SparseMap(parcellationmap.Map):
211
251
  to the actual (probability) value.
212
252
  """
213
253
 
214
- # A gitlab instance with holds precomputed sparse indices
215
- _GITLAB_SERVER = 'https://jugit.fz-juelich.de'
216
- _GITLAB_PROJECT = 5779
217
-
218
254
  def __init__(
219
255
  self,
220
256
  identifier: str,
@@ -249,39 +285,24 @@ class SparseMap(parcellationmap.Map):
249
285
 
250
286
  @property
251
287
  def _cache_prefix(self):
252
- return f"{self.parcellation.id}_{self.space.id}_{self.maptype}_{self.name}_index"
288
+ return CACHE.build_filename(f"{self.parcellation.id}_{self.space.id}_{self.maptype}_{self.key.lower()}_index")
253
289
 
254
290
  @property
255
291
  def sparse_index(self):
256
292
  if self._sparse_index_cached is None:
257
- spind = SparseIndex._from_local_cache(self._cache_prefix)
258
- if spind is None:
259
- logger.info("Downloading precomputed SparseIndex...")
260
- gconn = GitlabConnector(self._GITLAB_SERVER, self._GITLAB_PROJECT, "main")
261
- zip_fname = f"{self.name.replace(' ', '_').replace('statistical', 'continuous')}_index.zip"
293
+ try: # try loading from cache on disk
294
+ spind = SparseIndex.load(self._cache_prefix)
295
+ except Exception:
296
+ spind = None
297
+ if spind is None: # try from precomputed source
262
298
  try:
263
- assert zip_fname in gconn.search_files(), f"{zip_fname} is not in {gconn}."
264
- zipfile = gconn.get_loader(zip_fname).url
265
- spind = self.load_zipped_sparseindex(zipfile)
299
+ logger.info("Downloading and loading precomputed SparseIndex...")
300
+ spind = SparseIndex.load(SparseIndex._DATAPROXY_BASEURL + self.key.lower())
266
301
  except Exception:
267
- logger.info("Failed to load precomputed SparseIndex from Gitlab.")
268
- logger.debug(f"Could not load SparseIndex from Gitlab at {gconn}", exc_info=1)
269
- if spind is None:
270
- with provider.SubvolumeProvider.UseCaching():
271
- spind = SparseIndex()
272
- for vol in siibra_tqdm(
273
- range(len(self)), total=len(self), unit="maps",
274
- desc=f"Fetching {len(self)} volumetric maps"
275
- ):
276
- img = super().fetch(
277
- index=MapIndex(volume=vol, label=None)
278
- )
279
- if img is None:
280
- region = self.get_region(volume=vol)
281
- logger.error(f"Cannot retrieve volume #{vol} for {region.name}, it will not be included in the sparse map.")
282
- continue
283
- spind.add_img(np.asanyarray(img.dataobj), img.affine)
284
- spind._to_local_cache(self._cache_prefix)
302
+ logger.error("Failed to download precomputed SparseIndex.", exc_info=1)
303
+ if spind is None: # Download each map and compute the SparseIndex
304
+ spind = SparseIndex.from_sparsemap(self)
305
+ spind.save(self._cache_prefix, folder=CACHE.folder)
285
306
  self._sparse_index_cached = spind
286
307
  assert self._sparse_index_cached.max() == len(self._sparse_index_cached.probs) - 1
287
308
  return self._sparse_index_cached
@@ -294,148 +315,6 @@ class SparseMap(parcellationmap.Map):
294
315
  def shape(self):
295
316
  return self.sparse_index.shape
296
317
 
297
- def save_sparseindex(self, destination: str, filename: str = None):
298
- """
299
- Save SparseIndex as a .zip in destination folder from local cache. If
300
- SparseIndex is not cached, siibra will firt create it first.
301
-
302
- Parameters
303
- ----------
304
- destination: str
305
- The path where the zip file will be created.
306
- filename: str, default=None
307
- Name of the zip and prefix of the SparseIndex files. If None, siibra
308
- uses `name` property.
309
- """
310
- if filename is None:
311
- filename = f"{self.name.replace(' ', '_')}_index"
312
- logger.info(f"Saving SparseIndex of '{self.name}' as '{filename}.zip'")
313
- if not path.isdir(destination):
314
- makedirs(destination)
315
- if self._sparse_index_cached is None:
316
- _ = self.sparse_index
317
- suffices = [".probs.txt.gz", ".bboxes.txt.gz", ".voxels.nii.gz"]
318
- try:
319
- with ZipFile(f"{destination}/{filename}.zip", 'w') as zipf:
320
- for suffix in suffices:
321
- zipf.write(
322
- filename=cache.CACHE.build_filename(self._cache_prefix, suffix),
323
- arcname=path.basename(f"{filename}{suffix}"),
324
- compress_type=ZIP_DEFLATED
325
- )
326
- except Exception as e:
327
- logger.error("Could not save SparseIndex:\n")
328
- raise e
329
- logger.info("SparseIndex is saved.")
330
-
331
- def load_zipped_sparseindex(self, zipfname: str):
332
- """
333
- Load SparseIndex from previously computed source and creates a local
334
- cache.
335
-
336
- Parameters
337
- ----------
338
- zipfile: str
339
- A url or a path to zip file containing the SparseIndex files for
340
- this SparseMap precomputed by siibra.
341
-
342
- Returns
343
- -------
344
- SparseIndex
345
- """
346
- zconn = ZipfileConnector(zipfname)
347
- with ZipFile(zconn.zipfile, 'r') as zp:
348
- suffices = [".probs.txt.gz", ".bboxes.txt.gz", ".voxels.nii.gz"]
349
- for suffix in suffices:
350
- file = [f for f in zconn.search_files(suffix=suffix)]
351
- assert len(file) == 1, f"Could not find a unique '{suffix}' file in {zipfname}."
352
- zp.extract(file[0], cache.CACHE.folder)
353
- rename(
354
- path.join(cache.CACHE.folder, file[0]),
355
- cache.CACHE.build_filename(self._cache_prefix, suffix=suffix)
356
- )
357
- zconn.clear_cache()
358
-
359
- return SparseIndex._from_local_cache(self._cache_prefix)
360
-
361
- def fetch(
362
- self,
363
- region_or_index: Union[MapIndex, str, 'Region'] = None,
364
- *,
365
- index: MapIndex = None,
366
- region: Union[str, 'Region'] = None,
367
- **kwargs
368
- ):
369
- """
370
- Recreate a particular volumetric map from the sparse
371
- representation.
372
-
373
- Parameters
374
- ----------
375
- region_or_index: str, Region, MapIndex
376
- Lazy match the specification.
377
- index : MapIndex
378
- The index to be fetched.
379
- region: str, Region
380
- Region name specification. If given, will be used to decode the map
381
- index of a particular region.
382
-
383
- Returns
384
- -------
385
- An image or mesh
386
- """
387
- if kwargs.get('format') in ['mesh'] + _volume.Volume.MESH_FORMATS:
388
- # a mesh is requested, this is not handled by the sparse map
389
- return super().fetch(region_or_index, index=index, region=region, **kwargs)
390
-
391
- try:
392
- length = len([arg for arg in [region_or_index, region, index] if arg is not None])
393
- assert length == 1
394
- except AssertionError:
395
- if length > 1:
396
- raise ExcessiveArgumentException(
397
- "One and only one of region_or_index, region, index can be defined for fetch"
398
- )
399
- # user can provide no arguments, which assumes one and only one volume present
400
-
401
- if isinstance(region_or_index, MapIndex):
402
- index = region_or_index
403
-
404
- from ..core.region import Region
405
- if isinstance(region_or_index, (str, Region)):
406
- region = region_or_index
407
-
408
- volidx = None
409
- if index is not None:
410
- assert isinstance(index, MapIndex)
411
- volidx = index.volume
412
- if region is not None:
413
- index = self.get_index(region)
414
- assert index is not None
415
- volidx = index.volume
416
-
417
- if volidx is None:
418
- try:
419
- assert len(self) == 1
420
- volidx = 0
421
- except AssertionError:
422
- raise InsufficientArgumentException(
423
- f"{self.__class__.__name__} provides {len(self)} volumes. "
424
- "Specify 'region' or 'index' for fetch() to identify one."
425
- )
426
-
427
- assert isinstance(volidx, int)
428
- x, y, z, v = self.sparse_index.mapped_voxels(volidx)
429
- result = np.zeros(self.shape, dtype=np.float32)
430
- result[x, y, z] = v
431
- volume = _volume.from_array(
432
- data=result,
433
- affine=self.affine,
434
- space=self.space,
435
- name=f"Sparse map of {region} from {self.parcellation} in {self.space}"
436
- )
437
- return volume.fetch()
438
-
439
318
  def _read_voxel(self, x, y, z):
440
319
  spind = self.sparse_index
441
320
  vx = spind.voxels[x, y, z]