siibra 1.0.1a1__py3-none-any.whl → 1.0.1a5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of siibra might be problematic. Click here for more details.

Files changed (67) hide show
  1. siibra/VERSION +1 -1
  2. siibra/__init__.py +7 -16
  3. siibra/commons.py +19 -8
  4. siibra/configuration/configuration.py +5 -6
  5. siibra/configuration/factory.py +13 -8
  6. siibra/core/__init__.py +1 -1
  7. siibra/core/assignment.py +19 -7
  8. siibra/core/atlas.py +3 -3
  9. siibra/core/concept.py +4 -2
  10. siibra/core/parcellation.py +5 -5
  11. siibra/core/region.py +24 -25
  12. siibra/core/space.py +4 -6
  13. siibra/core/structure.py +2 -2
  14. siibra/explorer/url.py +2 -2
  15. siibra/features/anchor.py +3 -7
  16. siibra/features/connectivity/regional_connectivity.py +51 -40
  17. siibra/features/dataset/ebrains.py +1 -1
  18. siibra/features/feature.py +29 -20
  19. siibra/features/image/__init__.py +6 -3
  20. siibra/features/image/image.py +2 -4
  21. siibra/features/image/sections.py +81 -2
  22. siibra/features/image/volume_of_interest.py +8 -7
  23. siibra/features/tabular/__init__.py +1 -1
  24. siibra/features/tabular/bigbrain_intensity_profile.py +2 -1
  25. siibra/features/tabular/cell_density_profile.py +8 -9
  26. siibra/features/tabular/cortical_profile.py +6 -6
  27. siibra/features/tabular/gene_expression.py +34 -16
  28. siibra/features/tabular/layerwise_bigbrain_intensities.py +4 -3
  29. siibra/features/tabular/layerwise_cell_density.py +83 -24
  30. siibra/features/tabular/receptor_density_fingerprint.py +34 -9
  31. siibra/features/tabular/receptor_density_profile.py +1 -2
  32. siibra/features/tabular/regional_timeseries_activity.py +7 -7
  33. siibra/features/tabular/tabular.py +14 -7
  34. siibra/livequeries/allen.py +23 -22
  35. siibra/livequeries/bigbrain.py +239 -51
  36. siibra/livequeries/ebrains.py +13 -10
  37. siibra/livequeries/query.py +3 -3
  38. siibra/locations/__init__.py +17 -8
  39. siibra/locations/boundingbox.py +10 -8
  40. siibra/{experimental/plane3d.py → locations/experimental.py} +113 -13
  41. siibra/locations/location.py +17 -13
  42. siibra/locations/point.py +14 -19
  43. siibra/locations/pointcloud.py +57 -12
  44. siibra/retrieval/cache.py +1 -0
  45. siibra/retrieval/datasets.py +19 -13
  46. siibra/retrieval/repositories.py +10 -11
  47. siibra/retrieval/requests.py +26 -24
  48. siibra/vocabularies/__init__.py +1 -2
  49. siibra/volumes/__init__.py +4 -3
  50. siibra/volumes/parcellationmap.py +33 -17
  51. siibra/volumes/providers/freesurfer.py +4 -4
  52. siibra/volumes/providers/gifti.py +4 -4
  53. siibra/volumes/providers/neuroglancer.py +19 -22
  54. siibra/volumes/providers/nifti.py +6 -6
  55. siibra/volumes/providers/provider.py +3 -2
  56. siibra/volumes/sparsemap.py +19 -26
  57. siibra/volumes/volume.py +21 -28
  58. {siibra-1.0.1a1.dist-info → siibra-1.0.1a5.dist-info}/METADATA +37 -17
  59. siibra-1.0.1a5.dist-info/RECORD +80 -0
  60. {siibra-1.0.1a1.dist-info → siibra-1.0.1a5.dist-info}/WHEEL +1 -1
  61. siibra/experimental/__init__.py +0 -19
  62. siibra/experimental/contour.py +0 -61
  63. siibra/experimental/cortical_profile_sampler.py +0 -57
  64. siibra/experimental/patch.py +0 -98
  65. siibra-1.0.1a1.dist-info/RECORD +0 -84
  66. {siibra-1.0.1a1.dist-info → siibra-1.0.1a5.dist-info/licenses}/LICENSE +0 -0
  67. {siibra-1.0.1a1.dist-info → siibra-1.0.1a5.dist-info}/top_level.txt +0 -0
@@ -14,9 +14,18 @@
14
14
  # limitations under the License.
15
15
  """Provides spatial representations for parcellations and regions."""
16
16
 
17
+ from collections import defaultdict
18
+ from dataclasses import dataclass, asdict
19
+ from typing import Union, Dict, List, TYPE_CHECKING, Iterable, Tuple
20
+
21
+ import numpy as np
22
+ import pandas as pd
23
+ from scipy.ndimage import distance_transform_edt
24
+ from nilearn import image
25
+
17
26
  from . import volume as _volume
18
27
  from .providers import provider
19
- from .. import logger, QUIET, exceptions
28
+ from .. import exceptions
20
29
  from ..commons import (
21
30
  MapIndex,
22
31
  MapType,
@@ -29,19 +38,13 @@ from ..commons import (
29
38
  siibra_tqdm,
30
39
  Species,
31
40
  CompareMapsResult,
32
- generate_uuid
41
+ generate_uuid,
42
+ logger,
43
+ QUIET,
33
44
  )
34
45
  from ..core import concept, space, parcellation, region as _region
35
46
  from ..locations import location, point, pointcloud
36
47
 
37
- import numpy as np
38
- from typing import Union, Dict, List, TYPE_CHECKING, Iterable, Tuple
39
- from scipy.ndimage import distance_transform_edt
40
- from collections import defaultdict
41
- from nilearn import image
42
- import pandas as pd
43
- from dataclasses import dataclass, asdict
44
-
45
48
  if TYPE_CHECKING:
46
49
  from ..core.region import Region
47
50
 
@@ -690,7 +693,7 @@ class Map(concept.AtlasConcept, configuration_folder="maps"):
690
693
  name=f"Custom colorization of {self}"
691
694
  )
692
695
 
693
- def get_colormap(self, region_specs: Iterable = None, *, allow_random_colors: bool = False):
696
+ def get_colormap(self, region_specs: Iterable = None, *, fill_uncolored: bool = False):
694
697
  """
695
698
  Generate a matplotlib colormap from known rgb values of label indices.
696
699
 
@@ -698,7 +701,8 @@ class Map(concept.AtlasConcept, configuration_folder="maps"):
698
701
  ----------
699
702
  region_specs: iterable(regions), optional
700
703
  Optional parameter to only color the desired regions.
701
- allow_random_colors: bool , optional
704
+ fill_uncolored: bool , optional
705
+ If a region has no preconfigured color, a color will be randomly (reproducible) created.
702
706
 
703
707
  Returns
704
708
  -------
@@ -711,10 +715,10 @@ class Map(concept.AtlasConcept, configuration_folder="maps"):
711
715
  "matplotlib not available. Please install matplotlib to create a matplotlib colormap."
712
716
  )
713
717
  raise e
714
- if allow_random_colors:
718
+ if fill_uncolored:
715
719
  seed = len(self.regions)
716
720
  np.random.seed(seed)
717
- logger.info(f"Random colors are allowed for regions without preconfgirued colors. Random seee: {seed}.")
721
+ logger.info(f"Random colors are allowed for regions without preconfgirued colors. Random seed: {seed}.")
718
722
 
719
723
  colors = {}
720
724
  if region_specs is not None:
@@ -724,6 +728,7 @@ class Map(concept.AtlasConcept, configuration_folder="maps"):
724
728
  else:
725
729
  include_region_names = None
726
730
 
731
+ no_predefined_color = []
727
732
  for regionname, indices in self._indices.items():
728
733
  for index in indices:
729
734
  if index.label is None:
@@ -735,22 +740,33 @@ class Map(concept.AtlasConcept, configuration_folder="maps"):
735
740
  region = self.get_region(index=index)
736
741
  if region.rgb is not None:
737
742
  colors[index.label] = region.rgb
738
- elif allow_random_colors:
743
+ elif fill_uncolored:
739
744
  random_clr = [np.random.randint(0, 255) for r in range(3)]
740
745
  while random_clr in list(colors.values()):
741
746
  random_clr = [np.random.randint(0, 255) for r in range(3)]
742
747
  colors[index.label] = random_clr
748
+ else:
749
+ no_predefined_color.append(region.name)
743
750
 
744
751
  if len(colors) == 0:
745
752
  raise exceptions.NoPredifinedColormapException(
746
753
  f"There is no predefined/preconfigured colormap for '{self}'."
747
- "Set `allow_random_colors=True` to a colormap with random values"
754
+ "Set `fill_uncolored=True` to get a reproducible colormap."
748
755
  )
749
756
 
757
+ if no_predefined_color:
758
+ logger.info(
759
+ f"No preconfigured color found for the following regions."
760
+ "Use `fill_uncolored=True` to display with a non-background color.\n"
761
+ f"{no_predefined_color}"
762
+ )
763
+
764
+ max_label_index = max(index[0].label for index in self._indices.values())
765
+
750
766
  palette = np.array(
751
767
  [
752
768
  list(colors[i]) + [1] if i in colors else [0, 0, 0, 0]
753
- for i in range(max(colors.keys()) + 1)
769
+ for i in range(max_label_index + 1)
754
770
  ]
755
771
  ) / [255, 255, 255, 1]
756
772
  return ListedColormap(palette)
@@ -14,12 +14,12 @@
14
14
  # limitations under the License.
15
15
  """Handles reading and preparing gii files."""
16
16
 
17
- from . import provider as _provider
18
-
19
- from ...retrieval.requests import HttpRequest, ZipfileRequest
17
+ from typing import Union, Dict, TYPE_CHECKING
20
18
 
21
19
  import numpy as np
22
- from typing import Union, Dict, TYPE_CHECKING
20
+
21
+ from . import provider as _provider
22
+ from ...retrieval.requests import HttpRequest, ZipfileRequest
23
23
 
24
24
  if TYPE_CHECKING:
25
25
  from ...locations import boundingbox as _boundingbox
@@ -14,15 +14,15 @@
14
14
  # limitations under the License.
15
15
  """Handles reading and preparing gii files."""
16
16
 
17
- from . import provider as _provider
17
+ from typing import Union, Dict
18
+
19
+ import numpy as np
18
20
 
21
+ from . import provider as _provider
19
22
  from ...retrieval import requests
20
23
  from ...commons import logger, merge_meshes
21
24
  from ...locations import boundingbox as _boundingbox
22
25
 
23
- import numpy as np
24
- from typing import Union, Dict
25
-
26
26
 
27
27
  class GiftiMesh(_provider.VolumeProvider, srctype="gii-mesh"):
28
28
  """
@@ -13,8 +13,20 @@
13
13
  # See the License for the specific language governing permissions and
14
14
  # limitations under the License.
15
15
 
16
- from . import provider as _provider
16
+ from io import BytesIO
17
+ import os
18
+ from typing import Union, Dict, Tuple
19
+ import json
20
+
21
+ import numpy as np
22
+ import nibabel as nib
23
+ from neuroglancer_scripts.precomputed_io import get_IO_for_existing_dataset, PrecomputedIO
24
+ from neuroglancer_scripts.http_accessor import HttpAccessor
25
+ from neuroglancer_scripts.mesh import read_precomputed_mesh, affine_transform_mesh
17
26
 
27
+ from . import provider as _provider
28
+ from ...retrieval import requests, cache
29
+ from ...locations import boundingbox as _boundingbox
18
30
  from ...commons import (
19
31
  logger,
20
32
  MapType,
@@ -23,18 +35,6 @@ from ...commons import (
23
35
  QUIET,
24
36
  resample_img_to_img
25
37
  )
26
- from ...retrieval import requests, cache
27
- from ...locations import boundingbox as _boundingbox
28
-
29
- from neuroglancer_scripts.precomputed_io import get_IO_for_existing_dataset, PrecomputedIO
30
- from neuroglancer_scripts.http_accessor import HttpAccessor
31
- from neuroglancer_scripts.mesh import read_precomputed_mesh, affine_transform_mesh
32
- from io import BytesIO
33
- import nibabel as nib
34
- import os
35
- import numpy as np
36
- from typing import Union, Dict, Tuple
37
- import json
38
38
 
39
39
 
40
40
  def shift_ng_transfrom(
@@ -529,14 +529,7 @@ class NeuroglancerScale:
529
529
  if voi is None:
530
530
  bbox_ = _boundingbox.BoundingBox((0, 0, 0), self.size, space=None)
531
531
  else:
532
- bbox_ = voi.transform(np.linalg.inv(self.affine))
533
-
534
- for dim in range(3):
535
- if bbox_.shape[dim] < 1:
536
- logger.warning(
537
- f"Bounding box in voxel space will be enlarged to by {self.res_mm[dim]} along axis {dim}."
538
- )
539
- bbox_.maxpoint[dim] = bbox_.maxpoint[dim] + self.res_mm[dim]
532
+ bbox_ = voi.transform(np.linalg.inv(self.affine), space=None)
540
533
 
541
534
  # extract minimum and maximum the chunk indices to be loaded
542
535
  gx0, gy0, gz0 = self._point_to_lower_chunk_idx(tuple(bbox_.minpoint))
@@ -559,8 +552,12 @@ class NeuroglancerScale:
559
552
  # exact bounding box requested, to cut off undesired borders
560
553
  data_min = np.array([gx0, gy0, gz0]) * self.chunk_sizes
561
554
  x0, y0, z0 = (np.array(bbox_.minpoint) - data_min).astype("int")
562
- xd, yd, zd = np.ceil((np.array(bbox_.maxpoint))).astype(int) - np.floor((np.array(bbox_.minpoint))).astype(int)
555
+ xd, yd, zd = np.ceil(bbox_.maxpoint).astype(int) - np.floor(bbox_.minpoint).astype(int)
563
556
  offset = tuple(bbox_.minpoint)
557
+ if voi is not None:
558
+ logger.debug(
559
+ f"Input: {voi.minpoint.coordinate}, {voi.maxpoint.coordinate}.\nVoxel space: {bbox_.minpoint.coordinate}, {bbox_.maxpoint.coordinate}"
560
+ )
564
561
 
565
562
  # build the nifti image
566
563
  trans = np.identity(4)[[2, 1, 0, 3], :] # zyx -> xyz
@@ -13,17 +13,17 @@
13
13
  # See the License for the specific language governing permissions and
14
14
  # limitations under the License.
15
15
 
16
- from . import provider as _provider
16
+ import os
17
+ from typing import Union, Dict, Tuple
18
+
19
+ import numpy as np
20
+ import nibabel as nib
17
21
 
22
+ from . import provider as _provider
18
23
  from ...commons import logger, resample_img_to_img
19
24
  from ...retrieval import requests
20
25
  from ...locations import pointcloud, boundingbox as _boundingbox
21
26
 
22
- from typing import Union, Dict, Tuple
23
- import nibabel as nib
24
- import os
25
- import numpy as np
26
-
27
27
 
28
28
  class NiftiProvider(_provider.VolumeProvider, srctype="nii"):
29
29
 
@@ -17,12 +17,13 @@ from __future__ import annotations
17
17
 
18
18
  from abc import ABC, abstractmethod
19
19
  from typing import TYPE_CHECKING, Union, Dict, List
20
- from nibabel import Nifti1Image
21
20
  import json
21
+
22
+ from nibabel import Nifti1Image
23
+
22
24
  if TYPE_CHECKING:
23
25
  from ...locations.boundingbox import BoundingBox
24
26
 
25
- # TODO add mesh primitive. Check nibabel implementation? Use trimesh? Do we want to add yet another dependency?
26
27
  VolumeData = Union[Nifti1Image, Dict]
27
28
 
28
29
 
@@ -13,19 +13,19 @@
13
13
  # See the License for the specific language governing permissions and
14
14
  # limitations under the License.
15
15
  """Represents lists of probabilistic brain region maps."""
16
- from . import parcellationmap, volume as _volume
17
16
 
17
+ from os import path, makedirs
18
+ from typing import Dict, List
19
+
20
+ import numpy as np
21
+
22
+ from . import parcellationmap, volume as _volume
18
23
  from .providers import provider
19
- from ..commons import MapIndex, logger, connected_components, siibra_tqdm
24
+ from ..commons import MapIndex, logger, connected_components, siibra_tqdm, resample_img_to_img
20
25
  from ..locations import boundingbox
21
26
  from ..retrieval.cache import CACHE
22
27
  from ..retrieval.requests import HttpRequest, FileLoader
23
28
 
24
- from os import path, makedirs
25
- from typing import Dict, List
26
- from nilearn import image
27
- import numpy as np
28
-
29
29
 
30
30
  class SparseIndex:
31
31
 
@@ -352,30 +352,23 @@ class SparseMap(parcellationmap.Map):
352
352
  split_components: bool, default: True
353
353
  Whether to split the query volume into disjoint components.
354
354
  """
355
+ from nibabel import Nifti1Image
356
+
355
357
  queryimg = queryvolume.fetch()
356
- imgdata = np.asanyarray(queryimg.dataobj)
357
- imgaffine = queryimg.affine
358
+ assert isinstance(queryimg, Nifti1Image)
358
359
  assignments = []
359
360
 
360
- # resample query image into this image's voxel space, if required
361
- if (imgaffine - self.affine).sum() == 0:
362
- querydata = imgdata.squeeze()
363
- else:
364
- if issubclass(imgdata.dtype.type, np.integer):
365
- interp = "nearest"
366
- else:
367
- interp = "linear"
368
- from nibabel import Nifti1Image
369
- queryimg = image.resample_img(
370
- Nifti1Image(imgdata, imgaffine),
371
- target_affine=self.affine,
372
- target_shape=self.shape,
373
- interpolation=interp,
361
+ # resample query image into this image's voxel space, if required (nilearn checks)
362
+ queryimg = resample_img_to_img(
363
+ source_img=queryimg,
364
+ target_img=Nifti1Image(
365
+ np.zeros(self.shape), affine=self.affine, dtype=queryimg.dataobj.dtype
374
366
  )
375
- querydata = np.asanyarray(queryimg.dataobj).squeeze()
367
+ )
368
+ self.space.get_template()
369
+ querydata = np.asanyarray(queryimg.dataobj).squeeze()
376
370
 
377
- iter_func = connected_components if split_components \
378
- else lambda img: [(1, img)]
371
+ iter_func = connected_components if split_components else lambda img: [(1, img)]
379
372
 
380
373
  for mode, modemask in iter_func(querydata):
381
374
 
siibra/volumes/volume.py CHANGED
@@ -14,27 +14,26 @@
14
14
  # limitations under the License.
15
15
  """A specific mesh or 3D array."""
16
16
 
17
- from .providers import provider as _provider
18
-
19
- from .. import logger
20
- from ..retrieval import requests
21
- from ..core import space as _space, structure
22
- from ..locations import point, pointcloud, boundingbox
23
- from ..commons import resample_img_to_img, siibra_tqdm, affine_scaling, connected_components
24
- from ..exceptions import NoMapAvailableError, SpaceWarpingFailedError, EmptyPointCloudError
25
-
26
- from dataclasses import dataclass
27
- from nibabel import Nifti1Image
28
- import numpy as np
29
17
  from typing import List, Dict, Union, Set, TYPE_CHECKING
18
+ from dataclasses import dataclass
30
19
  from time import sleep
31
20
  import json
32
- from skimage import feature as skimage_feature, filters
33
21
  from functools import lru_cache
34
22
 
23
+ import numpy as np
24
+ from nibabel import Nifti1Image
25
+ from skimage import feature as skimage_feature, filters
26
+
27
+ from . import providers as _providers
28
+ from ..commons import resample_img_to_img, siibra_tqdm, affine_scaling, connected_components, logger
29
+ from ..exceptions import NoMapAvailableError, SpaceWarpingFailedError, EmptyPointCloudError
30
+ from ..retrieval import requests
31
+ from ..core import space as _space, structure
32
+ from ..core.concept import get_registry
33
+ from ..locations import point, pointcloud, boundingbox
34
+
35
35
  if TYPE_CHECKING:
36
- from ..retrieval.datasets import EbrainsDataset
37
- TypeDataset = EbrainsDataset
36
+ from ..retrieval.datasets import EbrainsDataset as TypeDataset
38
37
 
39
38
 
40
39
  @dataclass
@@ -129,7 +128,7 @@ class Volume(structure.BrainStructure):
129
128
  def __init__(
130
129
  self,
131
130
  space_spec: dict,
132
- providers: List['_provider.VolumeProvider'],
131
+ providers: List[_providers.provider.VolumeProvider],
133
132
  name: str = "",
134
133
  variant: str = None,
135
134
  datasets: List['TypeDataset'] = [],
@@ -138,7 +137,7 @@ class Volume(structure.BrainStructure):
138
137
  self._name = name
139
138
  self._space_spec = space_spec
140
139
  self.variant = variant
141
- self._providers: Dict[str, _provider.VolumeProvider] = {}
140
+ self._providers: Dict[str, _providers.provider.VolumeProvider] = {}
142
141
  self.datasets = datasets
143
142
  self._boundingbox = bbox
144
143
  for provider in providers:
@@ -714,7 +713,7 @@ class Subvolume(Volume):
714
713
  self,
715
714
  space_spec=parent_volume._space_spec,
716
715
  providers=[
717
- _provider.SubvolumeProvider(p, z=z)
716
+ _providers.provider.SubvolumeProvider(p, z=z)
718
717
  for p in parent_volume._providers.values()
719
718
  ],
720
719
  name=parent_volume.name + f" - z: {z}"
@@ -722,25 +721,21 @@ class Subvolume(Volume):
722
721
 
723
722
 
724
723
  def from_file(filename: str, space: str, name: str) -> Volume:
725
- """ Builds a nifti volume from a filename. """
726
- from ..core.concept import get_registry
727
- from .providers.nifti import NiftiProvider
724
+ """Builds a nifti volume from a filename."""
728
725
  spaceobj = get_registry("Space").get(space)
729
726
  return Volume(
730
727
  space_spec={"@id": spaceobj.id},
731
- providers=[NiftiProvider(filename)],
728
+ providers=[_providers.NiftiProvider(filename)],
732
729
  name=filename if name is None else name,
733
730
  )
734
731
 
735
732
 
736
733
  def from_nifti(nifti: Nifti1Image, space: str, name: str) -> Volume:
737
734
  """Builds a nifti volume from a Nifti image."""
738
- from ..core.concept import get_registry
739
- from .providers.nifti import NiftiProvider
740
735
  spaceobj = get_registry("Space").get(space)
741
736
  return Volume(
742
737
  space_spec={"@id": spaceobj.id},
743
- providers=[NiftiProvider((np.asanyarray(nifti.dataobj), nifti.affine))],
738
+ providers=[_providers.NiftiProvider((np.asanyarray(nifti.dataobj), nifti.affine))],
744
739
  name=name
745
740
  )
746
741
 
@@ -754,13 +749,11 @@ def from_array(
754
749
  """Builds a siibra volume from an array and an affine matrix."""
755
750
  if len(name) == 0:
756
751
  raise ValueError("Please provide a non-empty string for `name`")
757
- from ..core.concept import get_registry
758
- from .providers.nifti import NiftiProvider
759
752
  spacespec = next(iter(space.values())) if isinstance(space, dict) else space
760
753
  spaceobj = get_registry("Space").get(spacespec)
761
754
  return Volume(
762
755
  space_spec={"@id": spaceobj.id},
763
- providers=[NiftiProvider((data, affine))],
756
+ providers=[_providers.NiftiProvider((data, affine))],
764
757
  name=name,
765
758
  )
766
759
 
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.2
1
+ Metadata-Version: 2.4
2
2
  Name: siibra
3
- Version: 1.0.1a1
3
+ Version: 1.0.1a5
4
4
  Summary: siibra - Software interfaces for interacting with brain atlases
5
5
  Home-page: https://github.com/FZJ-INM1-BDA/siibra-python
6
6
  Author: Big Data Analytics Group, Forschungszentrum Juelich, Institute of Neuroscience and Medicine (INM-1)
@@ -33,6 +33,7 @@ Dynamic: classifier
33
33
  Dynamic: description
34
34
  Dynamic: description-content-type
35
35
  Dynamic: home-page
36
+ Dynamic: license-file
36
37
  Dynamic: requires-dist
37
38
  Dynamic: requires-python
38
39
  Dynamic: summary
@@ -43,22 +44,28 @@ Dynamic: summary
43
44
  siibra - Software interface for interacting with brain atlases
44
45
  ==============================================================
45
46
 
46
- Copyright 2018-2024, Forschungszentrum Jülich GmbH
47
+ Copyright 2018-2025, Forschungszentrum Jülich GmbH
47
48
 
48
49
  *Authors: Big Data Analytics Group, Institute of Neuroscience and
49
50
  Medicine (INM-1), Forschungszentrum Jülich GmbH*
50
51
 
51
52
  .. intro-start
52
53
 
53
- ``siibra`` is a Python client to a brain atlas framework that integrates brain parcellations and reference spaces at different spatial scales, and connects them with a broad range of multimodal regional data features.
54
+ ``siibra-python`` is a Python client to a brain atlas framework that integrates brain parcellations and reference spaces at different spatial scales, and connects them with a broad range of multimodal regional data features.
54
55
  It aims to facilitate programmatic and reproducible incorporation of brain parcellations and brain region features from different sources into neuroscience workflows.
55
56
 
56
- **Note:** ``siibra-python`` *is still in development. While care is taken that it works reliably, its API is not yet stable and you may still encounter bugs when using it.*
57
-
58
57
  ``siibra`` provides structured access to parcellation schemes in different brain reference spaces, including volumetric reference templates at macroscopic and microscopic resolutions as well as surface representations.
59
58
  It supports both discretely labelled and statistical (probabilistic) parcellation maps, which can be used to assign brain regions to spatial locations and image signals, to retrieve region-specific neuroscience datasets from multiple online repositories, and to sample information from high-resolution image data.
60
59
  The datasets anchored to brain regions address features of molecular, cellular and architecture as well as connectivity, and are complemented with live queries to external repositories as well as dynamic extraction from "big" image volumes such as the 20 micrometer BigBrain model.
61
60
 
61
+ ``siibra`` hides much of the complexity that would be required to collect and interact with the individual parcellations, templates and data repositories.
62
+ By encapsulating many aspects of interacting with different maps and reference templates spaces, it also minimizes common errors like misinterpretation of coordinates from different reference spaces, confusing label indices of brain regions, or using inconsistent versions of parcellation maps.
63
+ It aims to provide a safe way of using maps defined across multiple spatial scales for reproducible analysis.
64
+
65
+ .. intro-end
66
+
67
+ .. about-start
68
+
62
69
  ``siibra`` was developed in the frame of the `Human Brain Project <https://humanbrainproject.eu>`__ for accessing the `EBRAINS
63
70
  human brain atlas <https://ebrains.eu/service/human-brain-atlas>`__.
64
71
  It stores most of its contents as sustainable and open datasets in the `EBRAINS Knowledge Graph <https://kg.ebrains.eu>`__, and is designed to support the `OpenMINDS metadata standards <https://github.com/HumanBrainProject/openMINDS_SANDS>`__.
@@ -67,11 +74,7 @@ In fact, the viewer is a good resource for exploring ``siibra``\ ’s core funct
67
74
  Feature queries in ``siibra`` are parameterized by data modality and anatomical location, while the latter could be a brain region, brain parcellation, or location in reference space.
68
75
  Beyond the explorative focus of ``siibra-explorer``, the Python library supports a range of data analysis functions suitable for typical neuroscience workflows.
69
76
 
70
- ``siibra`` hides much of the complexity that would be required to collect and interact with the individual parcellations, templates and data repositories.
71
- By encapsulating many aspects of interacting with different maps and reference templates spaces, it also minimizes common errors like misinterpretation of coordinates from different reference spaces, confusing label indices of brain regions, or using inconsistent versions of parcellation maps.
72
- It aims to provide a safe way of using maps defined across multiple spatial scales for reproducible analysis.
73
-
74
- .. intro-end
77
+ .. about-end
75
78
 
76
79
  .. getting-started-start
77
80
 
@@ -80,10 +83,26 @@ Installation
80
83
 
81
84
  ``siibra`` is available on pypi.
82
85
  To install the latest released version, simply run ``pip install siibra``.
86
+ The installation typically takes about 2 minutes on a standard computer where Python is already installed.
83
87
  In order to work with the latest version from github, use ``pip install git+https://github.com/FZJ-INM1-BDA/siibra-python.git@main``.
84
88
 
85
- There is also an image based on jupyter:scipy-notebook, which already includes ``siibra``.
86
-
89
+ ``siibra-python`` should be installable on recent versions of Windows, Linux and Mac OS in a recent Python 3 environment.
90
+ We run continuous integration tests for versions 3.7 - 3.12 on recent Ubuntu images.
91
+
92
+ The library requires a couple of open source packages, namely:
93
+ ```
94
+ anytree >= 2.12.1
95
+ nibabel >= 5.3.2
96
+ appdirs >= 1.4.4
97
+ scikit-image >= 0.25.0
98
+ requests >= 2.32.3
99
+ neuroglancer-scripts >= 1.2.0
100
+ nilearn >= 0.11.0
101
+ filelock >= 3.16.1
102
+ ebrains-drive >= 0.6.0
103
+ ```
104
+
105
+ You can also install a docker image with all dependencies included:
87
106
  .. code-block:: sh
88
107
 
89
108
  docker run -dit \
@@ -93,19 +112,20 @@ There is also an image based on jupyter:scipy-notebook, which already includes `
93
112
  docker-registry.ebrains.eu/siibra/siibra-python:latest
94
113
 
95
114
 
115
+
96
116
  Documentation & Help
97
117
  ====================
98
118
 
99
119
  ``siibra-python``\ ’s documentation is hosted on https://siibra-python.readthedocs.io.
100
120
  The documentation includes a catalogue of documented code examples that walk you through the different concepts and functionalities.
121
+ These examples use real data and include both the code and the produced expected outputs.
122
+ They can be accessed at https://siibra-python.readthedocs.io/en/latest/examples.html, and are
123
+ automatically tested and updated whenever a new version of ``siibra-python`` is published.
101
124
  As a new user, it is recommended to go through these examples - they are easy and will quickly provide you with the right code snippets that get you started.
102
- Furthermore, a set of jupyter notebooks demonstrating more extensive example use cases are maintained in the `siibra-tutorials <https://github.com/FZJ-INM1-BDA/siibra-tutorials>`__ repository.
103
- We are working on a full API documentation of the library. You find the current status on readthedocs, but be aware that it is not yet complete and as up-to-date as the code examples.
125
+ The documentation on readthedocs further includes introductory explanations and an API reference.
104
126
 
105
127
  If you run into issues, please open a ticket on `EBRAINS support <https://ebrains.eu/support/>`__ or file bugs and
106
128
  feature requests on `github <https://github.com/FZJ-INM1-BDA/siibra-python/issues>`__.
107
- Please keep in mind that ``siibra-python`` is still in development.
108
- While care is taken to make everything work reliably, the API of the library is not yet stable, and the software is not yet fully tested.
109
129
 
110
130
  .. getting-started-end
111
131
 
@@ -0,0 +1,80 @@
1
+ siibra/VERSION,sha256=ipm-gM9pHLEb5l5Cx8ZcZ8wKJBYKiaWH87grIEoJ_OM,14
2
+ siibra/__init__.py,sha256=1uWhsE93KG4N9wiWoMdEokUXxfoRcyznXDktjAGhpEI,4496
3
+ siibra/commons.py,sha256=KmbRdX6pfKTgIgbYufxhkE9YiLQ35GObbw6VyFdRm_w,28016
4
+ siibra/exceptions.py,sha256=6MlXOadwXcCsceOE4lmy4fLJyAaBCCVvJF6BZlMYjU8,1371
5
+ siibra/configuration/__init__.py,sha256=ArqQ_B8C_O61KA4Fk3ho8ksckbjLu-COOlPGiXyf8LE,752
6
+ siibra/configuration/configuration.py,sha256=A651Tg632_a_c_yjxs-FJW_douQyzSJBoCDad4expVk,7151
7
+ siibra/configuration/factory.py,sha256=srhxxOAW_qyfe7oIFfuiX6gviou5CRfle5DVKe8FtB0,22798
8
+ siibra/core/__init__.py,sha256=zW887SH2txImUfvU80k83NbxxnlHlbdzBjdryY-3-48,766
9
+ siibra/core/assignment.py,sha256=aSzunUMKVVE0wl_2qEK_NryQIoV8_qMV5t3MBmkdEo8,4310
10
+ siibra/core/atlas.py,sha256=Iwqgcf8sG8_iL4mlw_4Nzi7HWVR-wxYLESqnIS0CFTw,8549
11
+ siibra/core/concept.py,sha256=rLqgJ72Mt1Mc8mlh-bGYOyq65rV7Vc8vCZGs-amJp1w,10891
12
+ siibra/core/parcellation.py,sha256=JzrnoB8G0XupffP8mnwE3oHYmHjo2Mzn4-pXnZ2R6Ow,14480
13
+ siibra/core/region.py,sha256=036Fjck1H6wGSubJomjEHUN1I-XMPSPqx3_78MEYbG8,44108
14
+ siibra/core/space.py,sha256=X7FyKgdhao3ezSWQZ0MAjDxlQh305S-4a4D630RaM-c,4588
15
+ siibra/core/structure.py,sha256=M2li4PPiJf27dOc3b2ycCfHf7Ad1AWxBYc9OpSFazJM,4498
16
+ siibra/explorer/__init__.py,sha256=XBAeYm4W3HlbWsKtt8gOwqE_FinIEY7RdA6Rg4Y275A,781
17
+ siibra/explorer/url.py,sha256=S6Tpg1p2gmu5byHVqsXfkugUStLd1Fu94EqsDzgEU9E,7047
18
+ siibra/explorer/util.py,sha256=ul82TQZAULdupr4tJBACdkjlHm2mt8LJ9UpwNWGHYhE,2083
19
+ siibra/features/__init__.py,sha256=FER6DMnkPhXSV1XMZWibZdyBwVhIgWYSUGYMEYEKb9c,3970
20
+ siibra/features/anchor.py,sha256=cMuaGB8ZCGMpdeb3plzHzNC4ImsxsriQ66janWcjhjk,9101
21
+ siibra/features/feature.py,sha256=uVTp1vvZRnl9DIks49-StrZ88UEfEe-ixshuK5_bvwg,35576
22
+ siibra/features/connectivity/__init__.py,sha256=FkPf0vyrLo3ERxrDbsRHUd7FUgJyajD87NiiXIiXhmY,1161
23
+ siibra/features/connectivity/functional_connectivity.py,sha256=9lQoOXv8lZUnyMduAbWABwDIkQC0QTI8V23yx0NjOBg,2122
24
+ siibra/features/connectivity/regional_connectivity.py,sha256=6oDx-tsvQIh-MG823-6yLpVHvr-rOoS--CKR22GSwxA,18706
25
+ siibra/features/connectivity/streamline_counts.py,sha256=JaAYf6-1S8NYhkE4lhshCSY__EQ5BFcL2i_XXdFfgrM,1064
26
+ siibra/features/connectivity/streamline_lengths.py,sha256=QeuoW_ZDVa9dxCguaemj4Cq9CCPB8ur8_alhATto2-w,1067
27
+ siibra/features/connectivity/tracing_connectivity.py,sha256=rkYgD8mOZzDp0STo5djhDqOaEdz-9j5EuLffXE0F01A,1083
28
+ siibra/features/dataset/__init__.py,sha256=qRV_P0335b4LnSMiONRpSC4elGckp0FXmtJz_QQuVLA,748
29
+ siibra/features/dataset/ebrains.py,sha256=zA_GSIJzeJgClY5KrsfAJgrdjcM5N2Z_mz442UR_1vc,2544
30
+ siibra/features/image/__init__.py,sha256=i_NccD2WJmVAtpLVWeccn8UEnViDMC6Y2CSGARL0Qt8,1049
31
+ siibra/features/image/image.py,sha256=heXBen5Sq3dVEcHSb4W4rs7n9nOXy3Nqp-eO0Vzjz4A,3583
32
+ siibra/features/image/sections.py,sha256=rZPoeZbu_cK8td5J4LVxZxsojjwTodrHe42P2de28_Y,3708
33
+ siibra/features/image/volume_of_interest.py,sha256=fBkTUujUHdcfzF1kPvMTV4x41VJXPUJ5Lbpr_g4GxYM,2649
34
+ siibra/features/tabular/__init__.py,sha256=DdE1GXPEsHSNJ3pyNYZNWu5JxDfuI08DbrDF1iBrXIA,1203
35
+ siibra/features/tabular/bigbrain_intensity_profile.py,sha256=C7rP2QTJPyTz_EnRlgdFWPVaDcwJszTlm1ylHBydJOE,2709
36
+ siibra/features/tabular/cell_density_profile.py,sha256=ScpjD9W-w-lLxdG6q42Zfyqe8LW4JvaorIdnixuPzo4,10818
37
+ siibra/features/tabular/cortical_profile.py,sha256=x0TqZh8FzcVx13EwPtbt8kBYfF1dREWWuf9BhpWzj7o,12542
38
+ siibra/features/tabular/gene_expression.py,sha256=8gJuUrL13oIySOPgiy5j46a5Udae_cJ1_QfrUHA72CI,10309
39
+ siibra/features/tabular/layerwise_bigbrain_intensities.py,sha256=iFCNG641yCSuIXFTY30xgRQqLERDHMqJrAy3SdKkAJ4,2118
40
+ siibra/features/tabular/layerwise_cell_density.py,sha256=MYFiUfDfx6XQj8dezV5-3eKWBg0VOn0T3a1utELE2PM,6513
41
+ siibra/features/tabular/receptor_density_fingerprint.py,sha256=CvFJv940whxzavhEQWnTjueDEq0ZoiuYVpqUpR8t-Ec,8065
42
+ siibra/features/tabular/receptor_density_profile.py,sha256=VW2Ry8ifQLfmiBDKqIdvaN7K1YzMENGU92Lnm4wA8P0,3724
43
+ siibra/features/tabular/regional_timeseries_activity.py,sha256=wuno4oI1I-dyxRr8-tLFj42iaD6dYZiPigCqVMhrG50,10027
44
+ siibra/features/tabular/tabular.py,sha256=37lSZ10cEAzGorA8jxdJvMcAIZvklIR3Vo75YPz7ONA,5783
45
+ siibra/livequeries/__init__.py,sha256=hToUNmyFq1UW5CxScHyBB-mSZ7kUK_w1Cvrk1OkZQf0,875
46
+ siibra/livequeries/allen.py,sha256=l5OqVqq3SZ3x7-pol1PwX6AnvdyufjVUYXtGr3wWBj0,14946
47
+ siibra/livequeries/bigbrain.py,sha256=hT-T6gKRoZQFPWvitm1hv-DoMkkGjz-NfqSx7khfLR0,15762
48
+ siibra/livequeries/ebrains.py,sha256=GBwpm9f_rVjZgF-SyGZ5PGMmL9e9OZzXhHs78uCNhYs,5952
49
+ siibra/livequeries/query.py,sha256=P_uUVFxv4KyOPQXh2WnDQxuaSSBK_dXPKEr8I0-4xSQ,1849
50
+ siibra/locations/__init__.py,sha256=yTJVMdv-pICm_gUYLQ09lDKSkZBfgUihM6LNv07e_yk,3546
51
+ siibra/locations/boundingbox.py,sha256=ACRittMABMZwLPzGnv5Z5E6cIHJfCFWv851s5Bg87WQ,16442
52
+ siibra/locations/experimental.py,sha256=kd9yroLKpnY-N6hoMVgtHK4pVdgeGqhpad4GyxYy6LU,14484
53
+ siibra/locations/location.py,sha256=tb__0td1c7_3BKJFqqEqwspO5m8BZ8ZlyYwvtVBq8SY,4706
54
+ siibra/locations/point.py,sha256=cL2hqbejec24ZoXpbBQQXtHZ-H7KcyxnbITzSBjkXTs,12675
55
+ siibra/locations/pointcloud.py,sha256=AxtR43xG4i1hUXNmal4F_g5WKQo_A1ZtW4l4jsf3p6o,14108
56
+ siibra/retrieval/__init__.py,sha256=E-UA8rDQZFGkHmAcmit6siONo7G2mH_Y0xgLlR1RfvY,1062
57
+ siibra/retrieval/cache.py,sha256=uMWEi93VePSOSXaiU5PZAbUlWBYyO7gbpRxod4jO2Rc,7833
58
+ siibra/retrieval/datasets.py,sha256=JgnSc41TM0oGNAVn8zQjL84HML-feOBVy5bLxxHrEt8,11110
59
+ siibra/retrieval/repositories.py,sha256=mMmSAnLZJjDb-bi86FPKNQXDbIPKpA_kh7IjKlTPkxM,29935
60
+ siibra/retrieval/requests.py,sha256=VhAnD8mTK26YHVv0haWjfYcdnJuHg6uHhZD-TSs8Cfo,23094
61
+ siibra/retrieval/exceptions/__init__.py,sha256=sOuzPHh2xq1p55T0zAcrSW2B1wfwwYEXBOWIuCjGePE,875
62
+ siibra/vocabularies/__init__.py,sha256=aWXsCVmtz0ZtX5J-f_XeyeGSAj9XbuQQjKJz_rvPVtY,1287
63
+ siibra/vocabularies/gene_names.json,sha256=i-gnh753GyZtQfX_dWibNYr_d5ccDPHooOwsdeKUYqE,1647972
64
+ siibra/vocabularies/receptor_symbols.json,sha256=F6DZIArPCBmJV_lWGV-zDpBBH_GOJOZm67LBE4qzMa4,5722
65
+ siibra/vocabularies/region_aliases.json,sha256=T2w1wRlxPNTsPppXn0bzC70tNsb8mOjLsoHuxDSYm2w,8563
66
+ siibra/volumes/__init__.py,sha256=9eiVjgGTqq9BiFJaLVLABtTrhIcE2k3Cn51yC4EKplg,936
67
+ siibra/volumes/parcellationmap.py,sha256=z5wWxK_kcFpS_ROpc9ny4aCbM7P9qc3DRDxrOKGp2Hk,51409
68
+ siibra/volumes/sparsemap.py,sha256=Yv07LIlt5yigw-ubWFq89hHnsivT7dymYCsCTe7Wu9I,17167
69
+ siibra/volumes/volume.py,sha256=wLmtqoXBDoPvE3WBO_Hc8uny3cdE9mB0KgdCq0LOTa0,32408
70
+ siibra/volumes/providers/__init__.py,sha256=AHZCjutCqO4mnHxyykVjqxlz85jDqFWcSjsa4ciwc1A,934
71
+ siibra/volumes/providers/freesurfer.py,sha256=l3zkLlE28EAEmg75tv9yp1YYiaHVkf4Zi8rKn9TUWVs,4893
72
+ siibra/volumes/providers/gifti.py,sha256=JGuixlSJTVjbDU_M5oMDCV8BAwIzuczhnI-qZ7LMQIc,6231
73
+ siibra/volumes/providers/neuroglancer.py,sha256=b3TiJ6yrx_akLFKgHRKZyHculzzRIqbZ7U3TMQHy6-k,28618
74
+ siibra/volumes/providers/nifti.py,sha256=aAzkmeDZaXRZ-dkAeEb2cSywNn9WzIz0z7yFtN6iNpU,10135
75
+ siibra/volumes/providers/provider.py,sha256=Vn02au_LKynO5SIfqLyjqzxCf7JD9Wm4i7yEFcTX0WU,3585
76
+ siibra-1.0.1a5.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
77
+ siibra-1.0.1a5.dist-info/METADATA,sha256=CO_Qej2SZ4h9saebuTGGPQLhPZOI4PQE4xSEHKVqKl4,9301
78
+ siibra-1.0.1a5.dist-info/WHEEL,sha256=0CuiUZ_p9E4cD6NyLD6UG80LBXYyiSYZOKDm5lp32xk,91
79
+ siibra-1.0.1a5.dist-info/top_level.txt,sha256=NF0OSGLL0li2qyC7MaU0iBB5Y9S09_euPpvisD0-8Hg,7
80
+ siibra-1.0.1a5.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (75.8.0)
2
+ Generator: setuptools (80.3.1)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5