siibra 1.0.1a1__py3-none-any.whl → 1.0.1a2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of siibra might be problematic. Click here for more details.

Files changed (66) hide show
  1. siibra/VERSION +1 -1
  2. siibra/__init__.py +7 -16
  3. siibra/commons.py +9 -7
  4. siibra/configuration/configuration.py +5 -5
  5. siibra/configuration/factory.py +9 -8
  6. siibra/core/__init__.py +1 -1
  7. siibra/core/assignment.py +1 -0
  8. siibra/core/atlas.py +3 -3
  9. siibra/core/concept.py +4 -2
  10. siibra/core/parcellation.py +5 -5
  11. siibra/core/region.py +24 -25
  12. siibra/core/space.py +4 -6
  13. siibra/core/structure.py +2 -2
  14. siibra/features/anchor.py +2 -4
  15. siibra/features/connectivity/regional_connectivity.py +10 -13
  16. siibra/features/dataset/ebrains.py +1 -1
  17. siibra/features/feature.py +21 -18
  18. siibra/features/image/__init__.py +4 -2
  19. siibra/features/image/image.py +2 -4
  20. siibra/features/image/sections.py +81 -2
  21. siibra/features/image/volume_of_interest.py +0 -8
  22. siibra/features/tabular/__init__.py +1 -1
  23. siibra/features/tabular/bigbrain_intensity_profile.py +2 -1
  24. siibra/features/tabular/cell_density_profile.py +8 -9
  25. siibra/features/tabular/cortical_profile.py +6 -6
  26. siibra/features/tabular/gene_expression.py +6 -5
  27. siibra/features/tabular/layerwise_bigbrain_intensities.py +4 -3
  28. siibra/features/tabular/layerwise_cell_density.py +4 -6
  29. siibra/features/tabular/receptor_density_fingerprint.py +34 -9
  30. siibra/features/tabular/receptor_density_profile.py +1 -2
  31. siibra/features/tabular/regional_timeseries_activity.py +7 -7
  32. siibra/features/tabular/tabular.py +4 -5
  33. siibra/livequeries/allen.py +20 -22
  34. siibra/livequeries/bigbrain.py +239 -51
  35. siibra/livequeries/ebrains.py +13 -10
  36. siibra/livequeries/query.py +3 -3
  37. siibra/locations/__init__.py +17 -8
  38. siibra/locations/boundingbox.py +7 -6
  39. siibra/{experimental/plane3d.py → locations/experimental.py} +113 -13
  40. siibra/locations/location.py +10 -12
  41. siibra/locations/point.py +7 -16
  42. siibra/locations/pointcloud.py +51 -10
  43. siibra/retrieval/cache.py +1 -0
  44. siibra/retrieval/datasets.py +19 -13
  45. siibra/retrieval/repositories.py +10 -11
  46. siibra/retrieval/requests.py +26 -24
  47. siibra/vocabularies/__init__.py +1 -2
  48. siibra/volumes/__init__.py +4 -3
  49. siibra/volumes/parcellationmap.py +30 -16
  50. siibra/volumes/providers/freesurfer.py +4 -4
  51. siibra/volumes/providers/gifti.py +4 -4
  52. siibra/volumes/providers/neuroglancer.py +19 -22
  53. siibra/volumes/providers/nifti.py +6 -6
  54. siibra/volumes/providers/provider.py +3 -2
  55. siibra/volumes/sparsemap.py +7 -6
  56. siibra/volumes/volume.py +21 -28
  57. {siibra-1.0.1a1.dist-info → siibra-1.0.1a2.dist-info}/METADATA +10 -6
  58. siibra-1.0.1a2.dist-info/RECORD +80 -0
  59. {siibra-1.0.1a1.dist-info → siibra-1.0.1a2.dist-info}/WHEEL +1 -1
  60. siibra/experimental/__init__.py +0 -19
  61. siibra/experimental/contour.py +0 -61
  62. siibra/experimental/cortical_profile_sampler.py +0 -57
  63. siibra/experimental/patch.py +0 -98
  64. siibra-1.0.1a1.dist-info/RECORD +0 -84
  65. {siibra-1.0.1a1.dist-info → siibra-1.0.1a2.dist-info}/LICENSE +0 -0
  66. {siibra-1.0.1a1.dist-info → siibra-1.0.1a2.dist-info}/top_level.txt +0 -0
@@ -14,9 +14,18 @@
14
14
  # limitations under the License.
15
15
  """Provides spatial representations for parcellations and regions."""
16
16
 
17
+ from collections import defaultdict
18
+ from dataclasses import dataclass, asdict
19
+ from typing import Union, Dict, List, TYPE_CHECKING, Iterable, Tuple
20
+
21
+ import numpy as np
22
+ import pandas as pd
23
+ from scipy.ndimage import distance_transform_edt
24
+ from nilearn import image
25
+
17
26
  from . import volume as _volume
18
27
  from .providers import provider
19
- from .. import logger, QUIET, exceptions
28
+ from .. import exceptions
20
29
  from ..commons import (
21
30
  MapIndex,
22
31
  MapType,
@@ -29,19 +38,13 @@ from ..commons import (
29
38
  siibra_tqdm,
30
39
  Species,
31
40
  CompareMapsResult,
32
- generate_uuid
41
+ generate_uuid,
42
+ logger,
43
+ QUIET,
33
44
  )
34
45
  from ..core import concept, space, parcellation, region as _region
35
46
  from ..locations import location, point, pointcloud
36
47
 
37
- import numpy as np
38
- from typing import Union, Dict, List, TYPE_CHECKING, Iterable, Tuple
39
- from scipy.ndimage import distance_transform_edt
40
- from collections import defaultdict
41
- from nilearn import image
42
- import pandas as pd
43
- from dataclasses import dataclass, asdict
44
-
45
48
  if TYPE_CHECKING:
46
49
  from ..core.region import Region
47
50
 
@@ -690,7 +693,7 @@ class Map(concept.AtlasConcept, configuration_folder="maps"):
690
693
  name=f"Custom colorization of {self}"
691
694
  )
692
695
 
693
- def get_colormap(self, region_specs: Iterable = None, *, allow_random_colors: bool = False):
696
+ def get_colormap(self, region_specs: Iterable = None, *, fill_uncolored: bool = False):
694
697
  """
695
698
  Generate a matplotlib colormap from known rgb values of label indices.
696
699
 
@@ -698,7 +701,8 @@ class Map(concept.AtlasConcept, configuration_folder="maps"):
698
701
  ----------
699
702
  region_specs: iterable(regions), optional
700
703
  Optional parameter to only color the desired regions.
701
- allow_random_colors: bool , optional
704
+ fill_uncolored: bool , optional
705
+ If a region has no preconfigured color, a color will be randomly (reproducible) created.
702
706
 
703
707
  Returns
704
708
  -------
@@ -711,10 +715,10 @@ class Map(concept.AtlasConcept, configuration_folder="maps"):
711
715
  "matplotlib not available. Please install matplotlib to create a matplotlib colormap."
712
716
  )
713
717
  raise e
714
- if allow_random_colors:
718
+ if fill_uncolored:
715
719
  seed = len(self.regions)
716
720
  np.random.seed(seed)
717
- logger.info(f"Random colors are allowed for regions without preconfgirued colors. Random seee: {seed}.")
721
+ logger.info(f"Random colors are allowed for regions without preconfgirued colors. Random seed: {seed}.")
718
722
 
719
723
  colors = {}
720
724
  if region_specs is not None:
@@ -724,6 +728,7 @@ class Map(concept.AtlasConcept, configuration_folder="maps"):
724
728
  else:
725
729
  include_region_names = None
726
730
 
731
+ no_predefined_color = []
727
732
  for regionname, indices in self._indices.items():
728
733
  for index in indices:
729
734
  if index.label is None:
@@ -735,16 +740,25 @@ class Map(concept.AtlasConcept, configuration_folder="maps"):
735
740
  region = self.get_region(index=index)
736
741
  if region.rgb is not None:
737
742
  colors[index.label] = region.rgb
738
- elif allow_random_colors:
743
+ elif fill_uncolored:
739
744
  random_clr = [np.random.randint(0, 255) for r in range(3)]
740
745
  while random_clr in list(colors.values()):
741
746
  random_clr = [np.random.randint(0, 255) for r in range(3)]
742
747
  colors[index.label] = random_clr
748
+ else:
749
+ no_predefined_color.append(region.name)
743
750
 
744
751
  if len(colors) == 0:
745
752
  raise exceptions.NoPredifinedColormapException(
746
753
  f"There is no predefined/preconfigured colormap for '{self}'."
747
- "Set `allow_random_colors=True` to a colormap with random values"
754
+ "Set `fill_uncolored=True` to get a reproducible colormap."
755
+ )
756
+
757
+ if no_predefined_color:
758
+ logger.info(
759
+ f"No preconfigured color found for the following regions."
760
+ "Use `fill_uncolored=True` to display with a non-background color.\n"
761
+ f"{no_predefined_color}"
748
762
  )
749
763
 
750
764
  palette = np.array(
@@ -14,12 +14,12 @@
14
14
  # limitations under the License.
15
15
  """Handles reading and preparing gii files."""
16
16
 
17
- from . import provider as _provider
18
-
19
- from ...retrieval.requests import HttpRequest, ZipfileRequest
17
+ from typing import Union, Dict, TYPE_CHECKING
20
18
 
21
19
  import numpy as np
22
- from typing import Union, Dict, TYPE_CHECKING
20
+
21
+ from . import provider as _provider
22
+ from ...retrieval.requests import HttpRequest, ZipfileRequest
23
23
 
24
24
  if TYPE_CHECKING:
25
25
  from ...locations import boundingbox as _boundingbox
@@ -14,15 +14,15 @@
14
14
  # limitations under the License.
15
15
  """Handles reading and preparing gii files."""
16
16
 
17
- from . import provider as _provider
17
+ from typing import Union, Dict
18
+
19
+ import numpy as np
18
20
 
21
+ from . import provider as _provider
19
22
  from ...retrieval import requests
20
23
  from ...commons import logger, merge_meshes
21
24
  from ...locations import boundingbox as _boundingbox
22
25
 
23
- import numpy as np
24
- from typing import Union, Dict
25
-
26
26
 
27
27
  class GiftiMesh(_provider.VolumeProvider, srctype="gii-mesh"):
28
28
  """
@@ -13,8 +13,20 @@
13
13
  # See the License for the specific language governing permissions and
14
14
  # limitations under the License.
15
15
 
16
- from . import provider as _provider
16
+ from io import BytesIO
17
+ import os
18
+ from typing import Union, Dict, Tuple
19
+ import json
20
+
21
+ import numpy as np
22
+ import nibabel as nib
23
+ from neuroglancer_scripts.precomputed_io import get_IO_for_existing_dataset, PrecomputedIO
24
+ from neuroglancer_scripts.http_accessor import HttpAccessor
25
+ from neuroglancer_scripts.mesh import read_precomputed_mesh, affine_transform_mesh
17
26
 
27
+ from . import provider as _provider
28
+ from ...retrieval import requests, cache
29
+ from ...locations import boundingbox as _boundingbox
18
30
  from ...commons import (
19
31
  logger,
20
32
  MapType,
@@ -23,18 +35,6 @@ from ...commons import (
23
35
  QUIET,
24
36
  resample_img_to_img
25
37
  )
26
- from ...retrieval import requests, cache
27
- from ...locations import boundingbox as _boundingbox
28
-
29
- from neuroglancer_scripts.precomputed_io import get_IO_for_existing_dataset, PrecomputedIO
30
- from neuroglancer_scripts.http_accessor import HttpAccessor
31
- from neuroglancer_scripts.mesh import read_precomputed_mesh, affine_transform_mesh
32
- from io import BytesIO
33
- import nibabel as nib
34
- import os
35
- import numpy as np
36
- from typing import Union, Dict, Tuple
37
- import json
38
38
 
39
39
 
40
40
  def shift_ng_transfrom(
@@ -529,14 +529,7 @@ class NeuroglancerScale:
529
529
  if voi is None:
530
530
  bbox_ = _boundingbox.BoundingBox((0, 0, 0), self.size, space=None)
531
531
  else:
532
- bbox_ = voi.transform(np.linalg.inv(self.affine))
533
-
534
- for dim in range(3):
535
- if bbox_.shape[dim] < 1:
536
- logger.warning(
537
- f"Bounding box in voxel space will be enlarged to by {self.res_mm[dim]} along axis {dim}."
538
- )
539
- bbox_.maxpoint[dim] = bbox_.maxpoint[dim] + self.res_mm[dim]
532
+ bbox_ = voi.transform(np.linalg.inv(self.affine), space=None)
540
533
 
541
534
  # extract minimum and maximum the chunk indices to be loaded
542
535
  gx0, gy0, gz0 = self._point_to_lower_chunk_idx(tuple(bbox_.minpoint))
@@ -559,8 +552,12 @@ class NeuroglancerScale:
559
552
  # exact bounding box requested, to cut off undesired borders
560
553
  data_min = np.array([gx0, gy0, gz0]) * self.chunk_sizes
561
554
  x0, y0, z0 = (np.array(bbox_.minpoint) - data_min).astype("int")
562
- xd, yd, zd = np.ceil((np.array(bbox_.maxpoint))).astype(int) - np.floor((np.array(bbox_.minpoint))).astype(int)
555
+ xd, yd, zd = np.ceil(bbox_.maxpoint).astype(int) - np.floor(bbox_.minpoint).astype(int)
563
556
  offset = tuple(bbox_.minpoint)
557
+ if voi is not None:
558
+ logger.debug(
559
+ f"Input: {voi.minpoint.coordinate}, {voi.maxpoint.coordinate}.\nVoxel space: {bbox_.minpoint.coordinate}, {bbox_.maxpoint.coordinate}"
560
+ )
564
561
 
565
562
  # build the nifti image
566
563
  trans = np.identity(4)[[2, 1, 0, 3], :] # zyx -> xyz
@@ -13,17 +13,17 @@
13
13
  # See the License for the specific language governing permissions and
14
14
  # limitations under the License.
15
15
 
16
- from . import provider as _provider
16
+ import os
17
+ from typing import Union, Dict, Tuple
18
+
19
+ import numpy as np
20
+ import nibabel as nib
17
21
 
22
+ from . import provider as _provider
18
23
  from ...commons import logger, resample_img_to_img
19
24
  from ...retrieval import requests
20
25
  from ...locations import pointcloud, boundingbox as _boundingbox
21
26
 
22
- from typing import Union, Dict, Tuple
23
- import nibabel as nib
24
- import os
25
- import numpy as np
26
-
27
27
 
28
28
  class NiftiProvider(_provider.VolumeProvider, srctype="nii"):
29
29
 
@@ -17,12 +17,13 @@ from __future__ import annotations
17
17
 
18
18
  from abc import ABC, abstractmethod
19
19
  from typing import TYPE_CHECKING, Union, Dict, List
20
- from nibabel import Nifti1Image
21
20
  import json
21
+
22
+ from nibabel import Nifti1Image
23
+
22
24
  if TYPE_CHECKING:
23
25
  from ...locations.boundingbox import BoundingBox
24
26
 
25
- # TODO add mesh primitive. Check nibabel implementation? Use trimesh? Do we want to add yet another dependency?
26
27
  VolumeData = Union[Nifti1Image, Dict]
27
28
 
28
29
 
@@ -13,19 +13,20 @@
13
13
  # See the License for the specific language governing permissions and
14
14
  # limitations under the License.
15
15
  """Represents lists of probabilistic brain region maps."""
16
- from . import parcellationmap, volume as _volume
17
16
 
17
+ from os import path, makedirs
18
+ from typing import Dict, List
19
+
20
+ import numpy as np
21
+ from nilearn import image
22
+
23
+ from . import parcellationmap, volume as _volume
18
24
  from .providers import provider
19
25
  from ..commons import MapIndex, logger, connected_components, siibra_tqdm
20
26
  from ..locations import boundingbox
21
27
  from ..retrieval.cache import CACHE
22
28
  from ..retrieval.requests import HttpRequest, FileLoader
23
29
 
24
- from os import path, makedirs
25
- from typing import Dict, List
26
- from nilearn import image
27
- import numpy as np
28
-
29
30
 
30
31
  class SparseIndex:
31
32
 
siibra/volumes/volume.py CHANGED
@@ -14,27 +14,26 @@
14
14
  # limitations under the License.
15
15
  """A specific mesh or 3D array."""
16
16
 
17
- from .providers import provider as _provider
18
-
19
- from .. import logger
20
- from ..retrieval import requests
21
- from ..core import space as _space, structure
22
- from ..locations import point, pointcloud, boundingbox
23
- from ..commons import resample_img_to_img, siibra_tqdm, affine_scaling, connected_components
24
- from ..exceptions import NoMapAvailableError, SpaceWarpingFailedError, EmptyPointCloudError
25
-
26
- from dataclasses import dataclass
27
- from nibabel import Nifti1Image
28
- import numpy as np
29
17
  from typing import List, Dict, Union, Set, TYPE_CHECKING
18
+ from dataclasses import dataclass
30
19
  from time import sleep
31
20
  import json
32
- from skimage import feature as skimage_feature, filters
33
21
  from functools import lru_cache
34
22
 
23
+ import numpy as np
24
+ from nibabel import Nifti1Image
25
+ from skimage import feature as skimage_feature, filters
26
+
27
+ from . import providers as _providers
28
+ from ..commons import resample_img_to_img, siibra_tqdm, affine_scaling, connected_components, logger
29
+ from ..exceptions import NoMapAvailableError, SpaceWarpingFailedError, EmptyPointCloudError
30
+ from ..retrieval import requests
31
+ from ..core import space as _space, structure
32
+ from ..core.concept import get_registry
33
+ from ..locations import point, pointcloud, boundingbox
34
+
35
35
  if TYPE_CHECKING:
36
- from ..retrieval.datasets import EbrainsDataset
37
- TypeDataset = EbrainsDataset
36
+ from ..retrieval.datasets import EbrainsDataset as TypeDataset
38
37
 
39
38
 
40
39
  @dataclass
@@ -129,7 +128,7 @@ class Volume(structure.BrainStructure):
129
128
  def __init__(
130
129
  self,
131
130
  space_spec: dict,
132
- providers: List['_provider.VolumeProvider'],
131
+ providers: List[_providers.provider.VolumeProvider],
133
132
  name: str = "",
134
133
  variant: str = None,
135
134
  datasets: List['TypeDataset'] = [],
@@ -138,7 +137,7 @@ class Volume(structure.BrainStructure):
138
137
  self._name = name
139
138
  self._space_spec = space_spec
140
139
  self.variant = variant
141
- self._providers: Dict[str, _provider.VolumeProvider] = {}
140
+ self._providers: Dict[str, _providers.provider.VolumeProvider] = {}
142
141
  self.datasets = datasets
143
142
  self._boundingbox = bbox
144
143
  for provider in providers:
@@ -714,7 +713,7 @@ class Subvolume(Volume):
714
713
  self,
715
714
  space_spec=parent_volume._space_spec,
716
715
  providers=[
717
- _provider.SubvolumeProvider(p, z=z)
716
+ _providers.provider.SubvolumeProvider(p, z=z)
718
717
  for p in parent_volume._providers.values()
719
718
  ],
720
719
  name=parent_volume.name + f" - z: {z}"
@@ -722,25 +721,21 @@ class Subvolume(Volume):
722
721
 
723
722
 
724
723
  def from_file(filename: str, space: str, name: str) -> Volume:
725
- """ Builds a nifti volume from a filename. """
726
- from ..core.concept import get_registry
727
- from .providers.nifti import NiftiProvider
724
+ """Builds a nifti volume from a filename."""
728
725
  spaceobj = get_registry("Space").get(space)
729
726
  return Volume(
730
727
  space_spec={"@id": spaceobj.id},
731
- providers=[NiftiProvider(filename)],
728
+ providers=[_providers.NiftiProvider(filename)],
732
729
  name=filename if name is None else name,
733
730
  )
734
731
 
735
732
 
736
733
  def from_nifti(nifti: Nifti1Image, space: str, name: str) -> Volume:
737
734
  """Builds a nifti volume from a Nifti image."""
738
- from ..core.concept import get_registry
739
- from .providers.nifti import NiftiProvider
740
735
  spaceobj = get_registry("Space").get(space)
741
736
  return Volume(
742
737
  space_spec={"@id": spaceobj.id},
743
- providers=[NiftiProvider((np.asanyarray(nifti.dataobj), nifti.affine))],
738
+ providers=[_providers.NiftiProvider((np.asanyarray(nifti.dataobj), nifti.affine))],
744
739
  name=name
745
740
  )
746
741
 
@@ -754,13 +749,11 @@ def from_array(
754
749
  """Builds a siibra volume from an array and an affine matrix."""
755
750
  if len(name) == 0:
756
751
  raise ValueError("Please provide a non-empty string for `name`")
757
- from ..core.concept import get_registry
758
- from .providers.nifti import NiftiProvider
759
752
  spacespec = next(iter(space.values())) if isinstance(space, dict) else space
760
753
  spaceobj = get_registry("Space").get(spacespec)
761
754
  return Volume(
762
755
  space_spec={"@id": spaceobj.id},
763
- providers=[NiftiProvider((data, affine))],
756
+ providers=[_providers.NiftiProvider((data, affine))],
764
757
  name=name,
765
758
  )
766
759
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: siibra
3
- Version: 1.0.1a1
3
+ Version: 1.0.1a2
4
4
  Summary: siibra - Software interfaces for interacting with brain atlases
5
5
  Home-page: https://github.com/FZJ-INM1-BDA/siibra-python
6
6
  Author: Big Data Analytics Group, Forschungszentrum Juelich, Institute of Neuroscience and Medicine (INM-1)
@@ -59,6 +59,14 @@ It aims to facilitate programmatic and reproducible incorporation of brain parce
59
59
  It supports both discretely labelled and statistical (probabilistic) parcellation maps, which can be used to assign brain regions to spatial locations and image signals, to retrieve region-specific neuroscience datasets from multiple online repositories, and to sample information from high-resolution image data.
60
60
  The datasets anchored to brain regions address features of molecular, cellular and architecture as well as connectivity, and are complemented with live queries to external repositories as well as dynamic extraction from "big" image volumes such as the 20 micrometer BigBrain model.
61
61
 
62
+ ``siibra`` hides much of the complexity that would be required to collect and interact with the individual parcellations, templates and data repositories.
63
+ By encapsulating many aspects of interacting with different maps and reference templates spaces, it also minimizes common errors like misinterpretation of coordinates from different reference spaces, confusing label indices of brain regions, or using inconsistent versions of parcellation maps.
64
+ It aims to provide a safe way of using maps defined across multiple spatial scales for reproducible analysis.
65
+
66
+ .. intro-end
67
+
68
+ .. about-start
69
+
62
70
  ``siibra`` was developed in the frame of the `Human Brain Project <https://humanbrainproject.eu>`__ for accessing the `EBRAINS
63
71
  human brain atlas <https://ebrains.eu/service/human-brain-atlas>`__.
64
72
  It stores most of its contents as sustainable and open datasets in the `EBRAINS Knowledge Graph <https://kg.ebrains.eu>`__, and is designed to support the `OpenMINDS metadata standards <https://github.com/HumanBrainProject/openMINDS_SANDS>`__.
@@ -67,11 +75,7 @@ In fact, the viewer is a good resource for exploring ``siibra``\ ’s core funct
67
75
  Feature queries in ``siibra`` are parameterized by data modality and anatomical location, while the latter could be a brain region, brain parcellation, or location in reference space.
68
76
  Beyond the explorative focus of ``siibra-explorer``, the Python library supports a range of data analysis functions suitable for typical neuroscience workflows.
69
77
 
70
- ``siibra`` hides much of the complexity that would be required to collect and interact with the individual parcellations, templates and data repositories.
71
- By encapsulating many aspects of interacting with different maps and reference templates spaces, it also minimizes common errors like misinterpretation of coordinates from different reference spaces, confusing label indices of brain regions, or using inconsistent versions of parcellation maps.
72
- It aims to provide a safe way of using maps defined across multiple spatial scales for reproducible analysis.
73
-
74
- .. intro-end
78
+ .. about-end
75
79
 
76
80
  .. getting-started-start
77
81
 
@@ -0,0 +1,80 @@
1
+ siibra/VERSION,sha256=tnNU724QbIsJd2y5y9A2yYp48XeoqEJ_sfDYRgMGdrg,14
2
+ siibra/__init__.py,sha256=1uWhsE93KG4N9wiWoMdEokUXxfoRcyznXDktjAGhpEI,4496
3
+ siibra/commons.py,sha256=i4qS4CPteESu2NlchZuLjJrc23PYc0_WMiPr7WMi9ro,27646
4
+ siibra/exceptions.py,sha256=6MlXOadwXcCsceOE4lmy4fLJyAaBCCVvJF6BZlMYjU8,1371
5
+ siibra/configuration/__init__.py,sha256=ArqQ_B8C_O61KA4Fk3ho8ksckbjLu-COOlPGiXyf8LE,752
6
+ siibra/configuration/configuration.py,sha256=FhJ2MF925BeI3KHk8r68-1PnMzIqTfsZEM8ZpvbtqzQ,7263
7
+ siibra/configuration/factory.py,sha256=WPOP0hugX47cTWMGsXo99B4RER14pGllJDUvcC3kMko,22628
8
+ siibra/core/__init__.py,sha256=zW887SH2txImUfvU80k83NbxxnlHlbdzBjdryY-3-48,766
9
+ siibra/core/assignment.py,sha256=7TL3cV2uU8XHatpOkrt5uMju04HfDtcgRR7HM3B1chU,3820
10
+ siibra/core/atlas.py,sha256=Iwqgcf8sG8_iL4mlw_4Nzi7HWVR-wxYLESqnIS0CFTw,8549
11
+ siibra/core/concept.py,sha256=rLqgJ72Mt1Mc8mlh-bGYOyq65rV7Vc8vCZGs-amJp1w,10891
12
+ siibra/core/parcellation.py,sha256=JzrnoB8G0XupffP8mnwE3oHYmHjo2Mzn4-pXnZ2R6Ow,14480
13
+ siibra/core/region.py,sha256=036Fjck1H6wGSubJomjEHUN1I-XMPSPqx3_78MEYbG8,44108
14
+ siibra/core/space.py,sha256=X7FyKgdhao3ezSWQZ0MAjDxlQh305S-4a4D630RaM-c,4588
15
+ siibra/core/structure.py,sha256=M2li4PPiJf27dOc3b2ycCfHf7Ad1AWxBYc9OpSFazJM,4498
16
+ siibra/explorer/__init__.py,sha256=XBAeYm4W3HlbWsKtt8gOwqE_FinIEY7RdA6Rg4Y275A,781
17
+ siibra/explorer/url.py,sha256=ja5i-VkEMYwqhlQ-K5tEfnlYTcgMpPFYJCK7IV0d3Us,7069
18
+ siibra/explorer/util.py,sha256=ul82TQZAULdupr4tJBACdkjlHm2mt8LJ9UpwNWGHYhE,2083
19
+ siibra/features/__init__.py,sha256=FER6DMnkPhXSV1XMZWibZdyBwVhIgWYSUGYMEYEKb9c,3970
20
+ siibra/features/anchor.py,sha256=Umu_Ljkr656h7jvgp43Wi6thMFEycxz_Lf8Bj_QOTZA,9129
21
+ siibra/features/feature.py,sha256=mpQ0M7sMYJEqOkx91rvaDlV_W5iRi_-z4S3eJLAZR8M,35283
22
+ siibra/features/connectivity/__init__.py,sha256=FkPf0vyrLo3ERxrDbsRHUd7FUgJyajD87NiiXIiXhmY,1161
23
+ siibra/features/connectivity/functional_connectivity.py,sha256=9lQoOXv8lZUnyMduAbWABwDIkQC0QTI8V23yx0NjOBg,2122
24
+ siibra/features/connectivity/regional_connectivity.py,sha256=V401G_EOTIDVz3IJJR4d2xOxLiWmIb9qQAMFD46qPlM,18283
25
+ siibra/features/connectivity/streamline_counts.py,sha256=JaAYf6-1S8NYhkE4lhshCSY__EQ5BFcL2i_XXdFfgrM,1064
26
+ siibra/features/connectivity/streamline_lengths.py,sha256=QeuoW_ZDVa9dxCguaemj4Cq9CCPB8ur8_alhATto2-w,1067
27
+ siibra/features/connectivity/tracing_connectivity.py,sha256=rkYgD8mOZzDp0STo5djhDqOaEdz-9j5EuLffXE0F01A,1083
28
+ siibra/features/dataset/__init__.py,sha256=qRV_P0335b4LnSMiONRpSC4elGckp0FXmtJz_QQuVLA,748
29
+ siibra/features/dataset/ebrains.py,sha256=zA_GSIJzeJgClY5KrsfAJgrdjcM5N2Z_mz442UR_1vc,2544
30
+ siibra/features/image/__init__.py,sha256=_Vf6AgIEiYTmtYiKdM7v6YRQco3XWnrEL8vLDhU1sqo,1015
31
+ siibra/features/image/image.py,sha256=heXBen5Sq3dVEcHSb4W4rs7n9nOXy3Nqp-eO0Vzjz4A,3583
32
+ siibra/features/image/sections.py,sha256=rZPoeZbu_cK8td5J4LVxZxsojjwTodrHe42P2de28_Y,3708
33
+ siibra/features/image/volume_of_interest.py,sha256=6rMMWekSAji7p0KFJMSVX7QuhbODfDBEIR-KNHGvnuM,2392
34
+ siibra/features/tabular/__init__.py,sha256=DdE1GXPEsHSNJ3pyNYZNWu5JxDfuI08DbrDF1iBrXIA,1203
35
+ siibra/features/tabular/bigbrain_intensity_profile.py,sha256=C7rP2QTJPyTz_EnRlgdFWPVaDcwJszTlm1ylHBydJOE,2709
36
+ siibra/features/tabular/cell_density_profile.py,sha256=ScpjD9W-w-lLxdG6q42Zfyqe8LW4JvaorIdnixuPzo4,10818
37
+ siibra/features/tabular/cortical_profile.py,sha256=x0TqZh8FzcVx13EwPtbt8kBYfF1dREWWuf9BhpWzj7o,12542
38
+ siibra/features/tabular/gene_expression.py,sha256=8n3aJgJfU_hyIZFnOmKg76GNCath5TMkH0tElWvZhTg,9828
39
+ siibra/features/tabular/layerwise_bigbrain_intensities.py,sha256=iFCNG641yCSuIXFTY30xgRQqLERDHMqJrAy3SdKkAJ4,2118
40
+ siibra/features/tabular/layerwise_cell_density.py,sha256=j5EznhcWjGb_hwbsQtIiQNWkbeUtEOS13vPeP-Zw2Xw,3740
41
+ siibra/features/tabular/receptor_density_fingerprint.py,sha256=CvFJv940whxzavhEQWnTjueDEq0ZoiuYVpqUpR8t-Ec,8065
42
+ siibra/features/tabular/receptor_density_profile.py,sha256=VW2Ry8ifQLfmiBDKqIdvaN7K1YzMENGU92Lnm4wA8P0,3724
43
+ siibra/features/tabular/regional_timeseries_activity.py,sha256=wuno4oI1I-dyxRr8-tLFj42iaD6dYZiPigCqVMhrG50,10027
44
+ siibra/features/tabular/tabular.py,sha256=wFfCeRwh45Bu7eUT9hBygqy4K010tf7n96t2ddA0tIk,5392
45
+ siibra/livequeries/__init__.py,sha256=hToUNmyFq1UW5CxScHyBB-mSZ7kUK_w1Cvrk1OkZQf0,875
46
+ siibra/livequeries/allen.py,sha256=QtKf5sYsSuF8NeqPvyw_DakEJrVYh47mfJVF1rMkJWg,14827
47
+ siibra/livequeries/bigbrain.py,sha256=hT-T6gKRoZQFPWvitm1hv-DoMkkGjz-NfqSx7khfLR0,15762
48
+ siibra/livequeries/ebrains.py,sha256=GBwpm9f_rVjZgF-SyGZ5PGMmL9e9OZzXhHs78uCNhYs,5952
49
+ siibra/livequeries/query.py,sha256=P_uUVFxv4KyOPQXh2WnDQxuaSSBK_dXPKEr8I0-4xSQ,1849
50
+ siibra/locations/__init__.py,sha256=yTJVMdv-pICm_gUYLQ09lDKSkZBfgUihM6LNv07e_yk,3546
51
+ siibra/locations/boundingbox.py,sha256=uCFQBiwsq39UUTz0xwTNIIl4RMXQRH3MDXO0t9pbY9Q,16369
52
+ siibra/locations/experimental.py,sha256=kd9yroLKpnY-N6hoMVgtHK4pVdgeGqhpad4GyxYy6LU,14484
53
+ siibra/locations/location.py,sha256=sdIZiUHGURpOo9JVMUH2w1dEmZAtMBbBXYED-ho_K5s,4383
54
+ siibra/locations/point.py,sha256=g8rjHCuHENTgkiJB7UGH0nWu4anbhf2GSlUfjhek67o,12533
55
+ siibra/locations/pointcloud.py,sha256=_9-4k9yLMiCu8phoxFO6bUsIuJFbRqw7BmBMQ0_FHmk,13979
56
+ siibra/retrieval/__init__.py,sha256=E-UA8rDQZFGkHmAcmit6siONo7G2mH_Y0xgLlR1RfvY,1062
57
+ siibra/retrieval/cache.py,sha256=uMWEi93VePSOSXaiU5PZAbUlWBYyO7gbpRxod4jO2Rc,7833
58
+ siibra/retrieval/datasets.py,sha256=JgnSc41TM0oGNAVn8zQjL84HML-feOBVy5bLxxHrEt8,11110
59
+ siibra/retrieval/repositories.py,sha256=mMmSAnLZJjDb-bi86FPKNQXDbIPKpA_kh7IjKlTPkxM,29935
60
+ siibra/retrieval/requests.py,sha256=VhAnD8mTK26YHVv0haWjfYcdnJuHg6uHhZD-TSs8Cfo,23094
61
+ siibra/retrieval/exceptions/__init__.py,sha256=sOuzPHh2xq1p55T0zAcrSW2B1wfwwYEXBOWIuCjGePE,875
62
+ siibra/vocabularies/__init__.py,sha256=aWXsCVmtz0ZtX5J-f_XeyeGSAj9XbuQQjKJz_rvPVtY,1287
63
+ siibra/vocabularies/gene_names.json,sha256=i-gnh753GyZtQfX_dWibNYr_d5ccDPHooOwsdeKUYqE,1647972
64
+ siibra/vocabularies/receptor_symbols.json,sha256=F6DZIArPCBmJV_lWGV-zDpBBH_GOJOZm67LBE4qzMa4,5722
65
+ siibra/vocabularies/region_aliases.json,sha256=T2w1wRlxPNTsPppXn0bzC70tNsb8mOjLsoHuxDSYm2w,8563
66
+ siibra/volumes/__init__.py,sha256=9eiVjgGTqq9BiFJaLVLABtTrhIcE2k3Cn51yC4EKplg,936
67
+ siibra/volumes/parcellationmap.py,sha256=-Yb9SINRyug-nT_pZkLGvZB3iNrpxFyjp1-PgdMTv2k,51329
68
+ siibra/volumes/sparsemap.py,sha256=PI-3dxORjCFyg_B03ByQpwdAT69GHMRrcLBgweHc0vM,17438
69
+ siibra/volumes/volume.py,sha256=wLmtqoXBDoPvE3WBO_Hc8uny3cdE9mB0KgdCq0LOTa0,32408
70
+ siibra/volumes/providers/__init__.py,sha256=AHZCjutCqO4mnHxyykVjqxlz85jDqFWcSjsa4ciwc1A,934
71
+ siibra/volumes/providers/freesurfer.py,sha256=l3zkLlE28EAEmg75tv9yp1YYiaHVkf4Zi8rKn9TUWVs,4893
72
+ siibra/volumes/providers/gifti.py,sha256=JGuixlSJTVjbDU_M5oMDCV8BAwIzuczhnI-qZ7LMQIc,6231
73
+ siibra/volumes/providers/neuroglancer.py,sha256=b3TiJ6yrx_akLFKgHRKZyHculzzRIqbZ7U3TMQHy6-k,28618
74
+ siibra/volumes/providers/nifti.py,sha256=aAzkmeDZaXRZ-dkAeEb2cSywNn9WzIz0z7yFtN6iNpU,10135
75
+ siibra/volumes/providers/provider.py,sha256=Vn02au_LKynO5SIfqLyjqzxCf7JD9Wm4i7yEFcTX0WU,3585
76
+ siibra-1.0.1a2.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
77
+ siibra-1.0.1a2.dist-info/METADATA,sha256=lndYng0x27sYef1UlLiD6evMTEYXFsCamHTuTWgfzcs,9111
78
+ siibra-1.0.1a2.dist-info/WHEEL,sha256=52BFRY2Up02UkjOa29eZOS2VxUrpPORXg1pkohGGUS8,91
79
+ siibra-1.0.1a2.dist-info/top_level.txt,sha256=NF0OSGLL0li2qyC7MaU0iBB5Y9S09_euPpvisD0-8Hg,7
80
+ siibra-1.0.1a2.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (75.8.0)
2
+ Generator: setuptools (76.0.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
@@ -1,19 +0,0 @@
1
- # Copyright 2018-2025
2
- # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
-
4
- # Licensed under the Apache License, Version 2.0 (the "License");
5
- # you may not use this file except in compliance with the License.
6
- # You may obtain a copy of the License at
7
-
8
- # http://www.apache.org/licenses/LICENSE-2.0
9
-
10
- # Unless required by applicable law or agreed to in writing, software
11
- # distributed under the License is distributed on an "AS IS" BASIS,
12
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
- # See the License for the specific language governing permissions and
14
- # limitations under the License.
15
-
16
- from .plane3d import Plane3D
17
- from .contour import Contour
18
- from .cortical_profile_sampler import CorticalProfileSampler
19
- from .patch import Patch
@@ -1,61 +0,0 @@
1
- # Copyright 2018-2025
2
- # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
-
4
- # Licensed under the Apache License, Version 2.0 (the "License");
5
- # you may not use this file except in compliance with the License.
6
- # You may obtain a copy of the License at
7
-
8
- # http://www.apache.org/licenses/LICENSE-2.0
9
-
10
- # Unless required by applicable law or agreed to in writing, software
11
- # distributed under the License is distributed on an "AS IS" BASIS,
12
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
- # See the License for the specific language governing permissions and
14
- # limitations under the License.
15
-
16
- from ..locations import point, pointcloud, boundingbox
17
-
18
- import numpy as np
19
-
20
-
21
- class Contour(pointcloud.PointCloud):
22
- """
23
- A PointCloud that represents a contour line.
24
- The only difference is that the point order is relevant,
25
- and consecutive points are thought as being connected by an edge.
26
-
27
- In fact, PointCloud assumes order as well, but no connections between points.
28
- """
29
-
30
- def __init__(self, coordinates, space=None, sigma_mm=0, labels: list = None):
31
- pointcloud.PointCloud.__init__(self, coordinates, space, sigma_mm, labels)
32
-
33
- def crop(self, voi: boundingbox.BoundingBox):
34
- """
35
- Crop the contour with a volume of interest.
36
- Since the contour might be split from the cropping,
37
- returns a set of contour segments.
38
- """
39
- segments = []
40
-
41
- # set the contour point labels to a linear numbering
42
- # so we can use them after the intersection to detect splits.
43
- old_labels = self.labels
44
- self.labels = list(range(len(self)))
45
- cropped = self.intersection(voi)
46
-
47
- if cropped is not None and not isinstance(cropped, point.Point):
48
- assert isinstance(cropped, pointcloud.PointCloud)
49
- # Identify contour splits are by discontinuouities ("jumps")
50
- # of their labels, which denote positions in the original contour
51
- jumps = np.diff([self.labels.index(lb) for lb in cropped.labels])
52
- splits = [0] + list(np.where(jumps > 1)[0] + 1) + [len(cropped)]
53
- for i, j in zip(splits[:-1], splits[1:]):
54
- segments.append(
55
- self.__class__(cropped.coordinates[i:j, :], space=cropped.space)
56
- )
57
-
58
- # reset labels of the input contour points.
59
- self.labels = old_labels
60
-
61
- return segments
@@ -1,57 +0,0 @@
1
- # Copyright 2018-2025
2
- # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
-
4
- # Licensed under the Apache License, Version 2.0 (the "License");
5
- # you may not use this file except in compliance with the License.
6
- # You may obtain a copy of the License at
7
-
8
- # http://www.apache.org/licenses/LICENSE-2.0
9
-
10
- # Unless required by applicable law or agreed to in writing, software
11
- # distributed under the License is distributed on an "AS IS" BASIS,
12
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
- # See the License for the specific language governing permissions and
14
- # limitations under the License.
15
-
16
- from . import contour
17
- from ..locations import point
18
- from ..core import parcellation
19
-
20
- import numpy as np
21
-
22
-
23
- class CorticalProfileSampler:
24
- """Samples cortical profiles from the cortical layer maps."""
25
-
26
- def __init__(self):
27
- self.layermap = parcellation.Parcellation.get_instance(
28
- "cortical layers"
29
- ).get_map(space="bigbrain", maptype="labelled")
30
-
31
- def query(self, query_point: point.Point):
32
- q = query_point.warp(self.layermap.space)
33
- smallest_dist = np.inf
34
- best_match = None
35
- for layername in self.layermap.regions:
36
- vertices = self.layermap.fetch(region=layername, format="mesh")["verts"]
37
- dists = np.sqrt(((vertices - q.coordinate) ** 2).sum(1))
38
- best = np.argmin(dists)
39
- if dists[best] < smallest_dist:
40
- best_match = (layername, best)
41
- smallest_dist = dists[best]
42
-
43
- best_vertex = best_match[1]
44
- hemisphere = "left" if "left" in best_match[0] else "right"
45
- print(f"Best match is vertex #{best_match[1]} in {best_match[0]}.")
46
-
47
- profile = [
48
- (_, self.layermap.fetch(region=_, format="mesh")["verts"][best_vertex])
49
- for _ in self.layermap.regions
50
- if hemisphere in _
51
- ]
52
-
53
- return contour.Contour(
54
- [p[1] for p in profile],
55
- space=self.layermap.space,
56
- labels=[p[0] for p in profile],
57
- )