siibra 1.0a8__py3-none-any.whl → 1.0a11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of siibra might be problematic. Click here for more details.

Files changed (37) hide show
  1. siibra/VERSION +1 -1
  2. siibra/commons.py +75 -30
  3. siibra/configuration/factory.py +16 -17
  4. siibra/core/atlas.py +40 -16
  5. siibra/core/region.py +242 -39
  6. siibra/features/__init__.py +19 -8
  7. siibra/features/connectivity/functional_connectivity.py +1 -1
  8. siibra/features/connectivity/regional_connectivity.py +45 -3
  9. siibra/features/feature.py +63 -13
  10. siibra/features/image/image.py +3 -1
  11. siibra/features/tabular/bigbrain_intensity_profile.py +1 -1
  12. siibra/features/tabular/cell_density_profile.py +5 -3
  13. siibra/features/tabular/cortical_profile.py +79 -15
  14. siibra/features/tabular/gene_expression.py +110 -1
  15. siibra/features/tabular/layerwise_bigbrain_intensities.py +1 -1
  16. siibra/features/tabular/layerwise_cell_density.py +3 -1
  17. siibra/features/tabular/receptor_density_fingerprint.py +3 -1
  18. siibra/features/tabular/receptor_density_profile.py +3 -5
  19. siibra/features/tabular/regional_timeseries_activity.py +59 -10
  20. siibra/features/tabular/tabular.py +4 -2
  21. siibra/livequeries/bigbrain.py +34 -0
  22. siibra/locations/location.py +1 -1
  23. siibra/retrieval/cache.py +15 -10
  24. siibra/retrieval/repositories.py +2 -2
  25. siibra/retrieval/requests.py +30 -1
  26. siibra/volumes/parcellationmap.py +18 -22
  27. siibra/volumes/providers/__init__.py +1 -0
  28. siibra/volumes/providers/freesurfer.py +113 -0
  29. siibra/volumes/providers/neuroglancer.py +55 -25
  30. siibra/volumes/providers/nifti.py +14 -16
  31. siibra/volumes/sparsemap.py +1 -1
  32. siibra/volumes/volume.py +13 -15
  33. {siibra-1.0a8.dist-info → siibra-1.0a11.dist-info}/METADATA +1 -1
  34. {siibra-1.0a8.dist-info → siibra-1.0a11.dist-info}/RECORD +37 -36
  35. {siibra-1.0a8.dist-info → siibra-1.0a11.dist-info}/LICENSE +0 -0
  36. {siibra-1.0a8.dist-info → siibra-1.0a11.dist-info}/WHEEL +0 -0
  37. {siibra-1.0a8.dist-info → siibra-1.0a11.dist-info}/top_level.txt +0 -0
siibra/VERSION CHANGED
@@ -1 +1 @@
1
- 1.0a08
1
+ 1.0a11
siibra/commons.py CHANGED
@@ -18,11 +18,12 @@ import os
18
18
  import re
19
19
  from enum import Enum
20
20
  from nibabel import Nifti1Image
21
+ from nilearn.image import resample_to_img
21
22
  import logging
22
23
  from tqdm import tqdm
23
24
  import numpy as np
24
25
  import pandas as pd
25
- from typing import Generic, Iterable, Iterator, List, TypeVar, Union, Dict
26
+ from typing import Generic, Iterable, Iterator, List, TypeVar, Union, Dict, Generator, Tuple
26
27
  from skimage.filters import gaussian
27
28
  from dataclasses import dataclass
28
29
  from hashlib import md5
@@ -102,9 +103,11 @@ class InstanceTable(Generic[T], Iterable):
102
103
  self._dataframe_cached = None
103
104
 
104
105
  def add(self, key: str, value: T) -> None:
105
- """Add a key/value pair to the registry.
106
+ """
107
+ Add a key/value pair to the registry.
106
108
 
107
- Args:
109
+ Parameters
110
+ ----------
108
111
  key (string): Unique name or key of the object
109
112
  value (object): The registered object
110
113
  """
@@ -153,10 +156,13 @@ class InstanceTable(Generic[T], Iterable):
153
156
  the first in sorted order is returned. If the specification does not match,
154
157
  a RuntimeError is raised.
155
158
 
156
- Args:
157
- spec [int or str]: Index or string specification of an object
159
+ Parameters
160
+ ----------
161
+ spec: int, str
162
+ Index or string specification of an object
158
163
 
159
- Returns:
164
+ Returns
165
+ -------
160
166
  Matched object
161
167
  """
162
168
  if spec is None:
@@ -508,34 +514,69 @@ def compare_arrays(arr1: np.ndarray, affine1: np.ndarray, arr2: np.ndarray, affi
508
514
  )
509
515
 
510
516
 
511
- def resample_array_to_array(
512
- source_data: np.ndarray,
513
- source_affine: np.ndarray,
514
- target_data: np.ndarray,
515
- target_affine: np.ndarray
516
- ) -> np.ndarray:
517
+ def resample_img_to_img(
518
+ source_img: Nifti1Image,
519
+ target_img: Nifti1Image,
520
+ interpolation: str = ""
521
+ ) -> Nifti1Image:
517
522
  """
518
- Returns the source data resampled to match the target data
519
- according to their affines.
523
+ Resamples to source image to match the target image according to target's
524
+ affine. (A wrapper of `nilearn.image.resample_to_img`.)
525
+
526
+ Parameters
527
+ ----------
528
+ source_img : Nifti1Image
529
+ target_img : Nifti1Image
530
+ interpolation : str, Default: "nearest" if the source image is a mask otherwise "linear".
531
+ Can be 'continuous', 'linear', or 'nearest'. Indicates the resample method.
532
+
533
+ Returns
534
+ -------
535
+ Nifti1Image
520
536
  """
521
- from nibabel import Nifti1Image
522
- from nilearn.image import resample_to_img
523
- interp = "nearest" if issubclass(source_data.dtype.type, np.integer) \
524
- else 'linear'
537
+ interpolation = "nearest" if np.array_equal(np.unique(source_img.dataobj), [0, 1]) else "linear"
525
538
  resampled_img = resample_to_img(
526
- Nifti1Image(source_data, source_affine),
527
- Nifti1Image(target_data, target_affine),
528
- interpolation=interp
539
+ source_img=source_img,
540
+ target_img=target_img,
541
+ interpolation=interpolation
529
542
  )
530
- return np.asanyarray(resampled_img.dataobj)
543
+ return resampled_img
531
544
 
532
545
 
533
- def connected_components(imgdata: np.ndarray):
546
+ def connected_components(
547
+ imgdata: np.ndarray,
548
+ background: int = 0,
549
+ connectivity: int = 2,
550
+ threshold: float = 0.0,
551
+ ) -> Generator[Tuple[int, np.ndarray], None, None]:
534
552
  """
535
- Provide an iterator over connected components in the array
553
+ Provide an iterator over connected components in the array. If the image
554
+ data is float (such as probability maps), it will convert to a mask and
555
+ then find the connected components.
556
+
557
+ Note
558
+ ----
559
+ `Uses skimage.measure.label()` to determine foreground compenents.
560
+
561
+ Parameters
562
+ ----------
563
+ imgdata : np.ndarray
564
+ background_value : int, Default: 0
565
+ connectivity : int, Default: 2
566
+ threshold: float, Default: 0.0
567
+ The threshold used to create mask from probability maps, i.e, anything
568
+ below set to 0 and rest to 1.
569
+
570
+ Yields
571
+ ------
572
+ Generator[Tuple[int, np.ndarray], None, None]
573
+ tuple of integer label of the component and component as an nd.array in
574
+ the shape of the original image.
536
575
  """
537
576
  from skimage import measure
538
- components = measure.label(imgdata, connectivity=2, background=0)
577
+
578
+ mask = (imgdata > threshold).astype('uint8')
579
+ components = measure.label(mask, connectivity=connectivity, background=background)
539
580
  component_labels = np.unique(components)
540
581
  return (
541
582
  (label, (components == label).astype('uint8'))
@@ -630,11 +671,15 @@ def MI(arr1, arr2, nbins=100, normalized=True):
630
671
  """
631
672
  Compute the mutual information between two 3D arrays, which need to have the same shape.
632
673
 
633
- Parameters:
634
- arr1 : First 3D array
635
- arr2 : Second 3D array
636
- nbins : number of bins to use for computing the joint histogram (applies to intensity range)
637
- normalized : Boolean, default:True
674
+ Parameters
675
+ ----------
676
+ arr1: np.ndarray
677
+ First 3D array
678
+ arr2: np.ndarray
679
+ Second 3D array
680
+ nbins: int
681
+ number of bins to use for computing the joint histogram (applies to intensity range)
682
+ normalized: Boolean. Default: True
638
683
  if True, the normalized MI of arrays X and Y will be returned,
639
684
  leading to a range of values between 0 and 1. Normalization is
640
685
  achieved by NMI = 2*MI(X,Y) / (H(X) + H(Y)), where H(x) is the entropy of X
@@ -27,12 +27,12 @@ from ..core import atlas, parcellation, space, region
27
27
  from ..locations import point, pointset
28
28
  from ..retrieval import datasets, repositories
29
29
  from ..volumes import volume, sparsemap, parcellationmap
30
- from ..volumes.providers import provider, gifti, neuroglancer, nifti
30
+ from ..volumes.providers.provider import VolumeProvider
31
31
 
32
32
  from os import path
33
33
  import json
34
34
  import numpy as np
35
- from typing import List, Type, Dict, Callable
35
+ from typing import List, Dict, Callable
36
36
  import pandas as pd
37
37
  from io import BytesIO
38
38
  from functools import wraps
@@ -102,7 +102,7 @@ class Factory:
102
102
  for i, vspec in enumerate(volume_specs):
103
103
  if space_id:
104
104
  if 'space' in vspec:
105
- logger.warning(f"Replacing space spec {vspec['space']} in volume spec with {space_id}")
105
+ assert vspec['space']["@id"] == space_id, "Space spec {vspec['space']} in volume field must be the same with space field in the configuration."
106
106
  vspec['space'] = {"@id": space_id}
107
107
  if names and vspec.get('name') is None: # only use provided name if the volume has no specific name
108
108
  vspec['name'] = names[i]
@@ -256,18 +256,9 @@ class Factory:
256
256
  @build_type("siibra/volume/v0.0.1")
257
257
  def build_volume(cls, spec):
258
258
  providers: List[volume.VolumeProvider] = []
259
- provider_types: List[Type[volume.VolumeProvider]] = [
260
- neuroglancer.NeuroglancerProvider,
261
- neuroglancer.NeuroglancerMesh,
262
- neuroglancer.NeuroglancerSurfaceMesh,
263
- nifti.NiftiProvider,
264
- nifti.ZipContainedNiftiProvider,
265
- gifti.GiftiMesh,
266
- gifti.GiftiSurfaceLabeling
267
- ]
268
259
 
269
260
  for srctype, provider_spec in spec.get("providers", {}).items():
270
- for ProviderType in provider_types:
261
+ for ProviderType in VolumeProvider._SUBCLASSES:
271
262
  if srctype == ProviderType.srctype:
272
263
  providers.append(ProviderType(provider_spec))
273
264
  break
@@ -276,7 +267,7 @@ class Factory:
276
267
  logger.warning(f"No provider defined for volume Source type {srctype}")
277
268
  cls._warnings_issued.append(srctype)
278
269
 
279
- assert all([isinstance(p, provider.VolumeProvider) for p in providers])
270
+ assert all([isinstance(p, VolumeProvider) for p in providers])
280
271
  result = volume.Volume(
281
272
  space_spec=spec.get("space", {}),
282
273
  providers=providers,
@@ -294,7 +285,7 @@ class Factory:
294
285
  assert "filename" in spec
295
286
  basename = path.splitext(path.basename(spec['filename']))[0]
296
287
  name = basename.replace('-', ' ').replace('_', ' ').replace('continuous', 'statistical')
297
- identifier = f"{spec['@type'].replace('/','-')}_{basename}"
288
+ identifier = f"{spec['@type'].replace('/', '-')}_{basename}"
298
289
  volumes = cls.extract_volumes(spec, space_id=spec["space"].get("@id"), name_prefix=basename)
299
290
 
300
291
  if spec.get("sparsemap", {}).get("is_sparsemap"):
@@ -365,6 +356,7 @@ class Factory:
365
356
  tsvfile=spec['file'],
366
357
  anchor=cls.extract_anchor(spec),
367
358
  datasets=cls.extract_datasets(spec),
359
+ id=spec.get("@id", None)
368
360
  )
369
361
 
370
362
  @classmethod
@@ -375,6 +367,7 @@ class Factory:
375
367
  layerfiles=spec['layerfiles'],
376
368
  anchor=cls.extract_anchor(spec),
377
369
  datasets=cls.extract_datasets(spec),
370
+ id=spec.get("@id", None)
378
371
  )
379
372
 
380
373
  @classmethod
@@ -385,6 +378,7 @@ class Factory:
385
378
  tsvfile=spec['file'],
386
379
  anchor=cls.extract_anchor(spec),
387
380
  datasets=cls.extract_datasets(spec),
381
+ id=spec.get("@id", None)
388
382
  )
389
383
 
390
384
  @classmethod
@@ -396,6 +390,7 @@ class Factory:
396
390
  url=spec['file'],
397
391
  anchor=cls.extract_anchor(spec),
398
392
  datasets=cls.extract_datasets(spec),
393
+ id=spec.get("@id", None)
399
394
  )
400
395
 
401
396
  @classmethod
@@ -408,6 +403,7 @@ class Factory:
408
403
  "space_spec": vol._space_spec,
409
404
  "providers": vol._providers.values(),
410
405
  "datasets": cls.extract_datasets(spec),
406
+ "id": spec.get("@id", None)
411
407
  }
412
408
  modality = spec.get('modality', "")
413
409
  if modality == "cell body staining":
@@ -425,6 +421,7 @@ class Factory:
425
421
  "space_spec": vol._space_spec,
426
422
  "providers": vol._providers.values(),
427
423
  "datasets": cls.extract_datasets(spec),
424
+ "id": spec.get("@id", None)
428
425
  }
429
426
  modality = spec.get('modality', "")
430
427
  if modality == "cell body staining":
@@ -495,7 +492,8 @@ class Factory:
495
492
  "filename": filename,
496
493
  "subject": fkey if files_indexed_by == "subject" else "average",
497
494
  "feature": fkey if files_indexed_by == "feature" else None,
498
- "connector": repo_connector or base_url + filename
495
+ "connector": repo_connector or base_url + filename,
496
+ "id": spec.get("@id", None)
499
497
  })
500
498
  conn_by_file.append(conn_cls(**kwargs))
501
499
  return conn_by_file
@@ -528,7 +526,8 @@ class Factory:
528
526
  for fkey, filename in files.items():
529
527
  kwargs.update({
530
528
  "filename": filename,
531
- "subject": fkey
529
+ "subject": fkey,
530
+ "id": spec.get("@id", None)
532
531
  })
533
532
  timeseries_by_file.append(timeseries_cls(**kwargs))
534
533
  return timeseries_by_file
siibra/core/atlas.py CHANGED
@@ -64,9 +64,20 @@ class Atlas(concept.AtlasConcept, configuration_folder="atlases"):
64
64
  matchfunc=_parcellation.Parcellation.match,
65
65
  )
66
66
 
67
- def get_parcellation(self, parcellation=None):
68
- """Returns a valid parcellation object defined by the atlas.
69
- If no specification is provided, the default is returned."""
67
+ def get_parcellation(self, parcellation=None) -> "_parcellation.Parcellation":
68
+ """
69
+ Returns a valid parcellation object defined by the atlas. If no
70
+ specification is provided, the default is returned.
71
+
72
+ Parameters
73
+ ----------
74
+ parcellation: str, Parcellation
75
+ specification of a parcellation or a parcellation object
76
+
77
+ Returns
78
+ -------
79
+ Parcellation
80
+ """
70
81
 
71
82
  if parcellation is None:
72
83
  parcellation_obj = self.parcellations[self._parcellation_ids[0]]
@@ -80,12 +91,19 @@ class Atlas(concept.AtlasConcept, configuration_folder="atlases"):
80
91
 
81
92
  return self.parcellations[parcellation]
82
93
 
83
- def get_space(self, space=None):
84
- """Returns a valid reference space object defined by the atlas.
85
- If no specification is provided, the default is returned.
94
+ def get_space(self, space=None) -> "_space.Space":
95
+ """
96
+ Returns a valid reference space object defined by the atlas. If no
97
+ specification is provided, the default is returned.
98
+
99
+ Parameters
100
+ ----------
101
+ space: str, Space
102
+ specification of a space or a space object
86
103
 
87
- Parameters:
88
- space: Space, or string specification of a space
104
+ Returns
105
+ -------
106
+ Space
89
107
  """
90
108
  if space is None:
91
109
  space_obj = self.spaces[self._space_ids[0]]
@@ -105,12 +123,13 @@ class Atlas(concept.AtlasConcept, configuration_folder="atlases"):
105
123
  parcellation: _parcellation.Parcellation = None,
106
124
  maptype: MapType = MapType.LABELLED,
107
125
  ):
108
- """Returns a parcellation map in the given space.
126
+ """
127
+ Returns a parcellation map in the given space.
109
128
 
110
129
  Parameters
111
130
  ----------
112
131
 
113
- space : Space
132
+ space: Space
114
133
  The requested reference space. If None, the default is used.
115
134
  parcellation: Parcellation
116
135
  The requested parcellation. If None, the default is used.
@@ -160,13 +179,18 @@ class Atlas(concept.AtlasConcept, configuration_folder="atlases"):
160
179
  def get_voi(self, space: _space.Space, point1: tuple, point2: tuple):
161
180
  """Get a volume of interest spanned by two points in the given reference space.
162
181
 
163
- Args:
164
- space (Space or str): The target reference space, or a string specification of the space
165
- point1 (Tuple): A 3D coordinate given in this reference space
166
- point2 (Tuple): Another 3D coordinate given in this reference space
182
+ Parameters
183
+ ----------
184
+ space: Space, str
185
+ The target reference space, or a string specification of the space
186
+ point1: Tuple
187
+ A 3D coordinate given in this reference space
188
+ point2: Tuple
189
+ Another 3D coordinate given in this reference space
167
190
 
168
- Returns:
169
- Bounding Box
191
+ Returns
192
+ -------
193
+ BoundingBox
170
194
  """
171
195
  return self.get_template(space).get_boundingbox(point1, point2)
172
196