siibra 1.0.1a1__py3-none-any.whl → 1.0.1a5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of siibra might be problematic. Click here for more details.

Files changed (67) hide show
  1. siibra/VERSION +1 -1
  2. siibra/__init__.py +7 -16
  3. siibra/commons.py +19 -8
  4. siibra/configuration/configuration.py +5 -6
  5. siibra/configuration/factory.py +13 -8
  6. siibra/core/__init__.py +1 -1
  7. siibra/core/assignment.py +19 -7
  8. siibra/core/atlas.py +3 -3
  9. siibra/core/concept.py +4 -2
  10. siibra/core/parcellation.py +5 -5
  11. siibra/core/region.py +24 -25
  12. siibra/core/space.py +4 -6
  13. siibra/core/structure.py +2 -2
  14. siibra/explorer/url.py +2 -2
  15. siibra/features/anchor.py +3 -7
  16. siibra/features/connectivity/regional_connectivity.py +51 -40
  17. siibra/features/dataset/ebrains.py +1 -1
  18. siibra/features/feature.py +29 -20
  19. siibra/features/image/__init__.py +6 -3
  20. siibra/features/image/image.py +2 -4
  21. siibra/features/image/sections.py +81 -2
  22. siibra/features/image/volume_of_interest.py +8 -7
  23. siibra/features/tabular/__init__.py +1 -1
  24. siibra/features/tabular/bigbrain_intensity_profile.py +2 -1
  25. siibra/features/tabular/cell_density_profile.py +8 -9
  26. siibra/features/tabular/cortical_profile.py +6 -6
  27. siibra/features/tabular/gene_expression.py +34 -16
  28. siibra/features/tabular/layerwise_bigbrain_intensities.py +4 -3
  29. siibra/features/tabular/layerwise_cell_density.py +83 -24
  30. siibra/features/tabular/receptor_density_fingerprint.py +34 -9
  31. siibra/features/tabular/receptor_density_profile.py +1 -2
  32. siibra/features/tabular/regional_timeseries_activity.py +7 -7
  33. siibra/features/tabular/tabular.py +14 -7
  34. siibra/livequeries/allen.py +23 -22
  35. siibra/livequeries/bigbrain.py +239 -51
  36. siibra/livequeries/ebrains.py +13 -10
  37. siibra/livequeries/query.py +3 -3
  38. siibra/locations/__init__.py +17 -8
  39. siibra/locations/boundingbox.py +10 -8
  40. siibra/{experimental/plane3d.py → locations/experimental.py} +113 -13
  41. siibra/locations/location.py +17 -13
  42. siibra/locations/point.py +14 -19
  43. siibra/locations/pointcloud.py +57 -12
  44. siibra/retrieval/cache.py +1 -0
  45. siibra/retrieval/datasets.py +19 -13
  46. siibra/retrieval/repositories.py +10 -11
  47. siibra/retrieval/requests.py +26 -24
  48. siibra/vocabularies/__init__.py +1 -2
  49. siibra/volumes/__init__.py +4 -3
  50. siibra/volumes/parcellationmap.py +33 -17
  51. siibra/volumes/providers/freesurfer.py +4 -4
  52. siibra/volumes/providers/gifti.py +4 -4
  53. siibra/volumes/providers/neuroglancer.py +19 -22
  54. siibra/volumes/providers/nifti.py +6 -6
  55. siibra/volumes/providers/provider.py +3 -2
  56. siibra/volumes/sparsemap.py +19 -26
  57. siibra/volumes/volume.py +21 -28
  58. {siibra-1.0.1a1.dist-info → siibra-1.0.1a5.dist-info}/METADATA +37 -17
  59. siibra-1.0.1a5.dist-info/RECORD +80 -0
  60. {siibra-1.0.1a1.dist-info → siibra-1.0.1a5.dist-info}/WHEEL +1 -1
  61. siibra/experimental/__init__.py +0 -19
  62. siibra/experimental/contour.py +0 -61
  63. siibra/experimental/cortical_profile_sampler.py +0 -57
  64. siibra/experimental/patch.py +0 -98
  65. siibra-1.0.1a1.dist-info/RECORD +0 -84
  66. {siibra-1.0.1a1.dist-info → siibra-1.0.1a5.dist-info/licenses}/LICENSE +0 -0
  67. {siibra-1.0.1a1.dist-info → siibra-1.0.1a5.dist-info}/top_level.txt +0 -0
@@ -13,17 +13,15 @@
13
13
  # See the License for the specific language governing permissions and
14
14
  # limitations under the License.
15
15
 
16
- from . import cortical_profile
17
- from .. import anchor as _anchor
18
- from . import tabular
19
- from ..tabular.cell_density_profile import cell_reader, layer_reader
16
+ import numpy as np
17
+ import pandas as pd
18
+ from textwrap import wrap
20
19
 
20
+ from . import tabular, cell_reader, layer_reader
21
+ from .. import anchor as _anchor
21
22
  from ... import commons
22
23
  from ...retrieval import requests
23
24
 
24
- import pandas as pd
25
- import numpy as np
26
-
27
25
 
28
26
  class LayerwiseCellDensity(
29
27
  tabular.Tabular,
@@ -39,6 +37,7 @@ class LayerwiseCellDensity(
39
37
  "detected cells in that layer with the area covered by the layer. Therefore, each profile contains 6 measurement points. "
40
38
  "The cortical depth is estimated from the measured layer thicknesses."
41
39
  )
40
+ BIGBRAIN_VOLUMETRIC_SHRINKAGE_FACTOR = 1.931
42
41
 
43
42
  def __init__(
44
43
  self,
@@ -59,13 +58,13 @@ class LayerwiseCellDensity(
59
58
  id=id,
60
59
  prerelease=prerelease,
61
60
  )
62
- self.unit = "# detected cells/0.1mm3"
61
+ self.unit = "# detected cells / $0.1mm^3$"
63
62
  self._filepairs = list(zip(segmentfiles, layerfiles))
64
63
  self._densities = None
65
64
 
66
65
  def _load_densities(self):
67
- density_dict = {}
68
- for i, (cellfile, layerfile) in enumerate(self._filepairs):
66
+ data = []
67
+ for cellfile, layerfile in self._filepairs:
69
68
  try:
70
69
  cells = requests.HttpRequest(cellfile, func=cell_reader).data
71
70
  layers = requests.HttpRequest(layerfile, func=layer_reader).data
@@ -74,22 +73,82 @@ class LayerwiseCellDensity(
74
73
  commons.logger.error(f"Skipping to bootstrap a {self.__class__.__name__} feature, cannot access file resource.")
75
74
  continue
76
75
  counts = cells.layer.value_counts()
77
- areas = layers["Area(micron**2)"]
78
- indices = np.intersect1d(areas.index, counts.index)
79
- density_dict[i] = counts[indices] / areas * 100 ** 2 * 5
80
- return pd.DataFrame(density_dict)
76
+ # compute the volumetric shrinkage corrections in the same ways as it was used
77
+ # for the pdf reports in the underlying dataset
78
+ shrinkage_volumetric = self.BIGBRAIN_VOLUMETRIC_SHRINKAGE_FACTOR
79
+ layer_volumes = (
80
+ layers["Area(micron**2)"] # this is the number of pixels, shrinkage corrected from the dataset
81
+ * 20 # go to cube micrometer in one patch with 20 micron thickness
82
+ * np.cbrt(shrinkage_volumetric) # compensate linear shrinkage for 3rd dimension
83
+ / 100 ** 3 # go to 0.1 cube millimeter
84
+ )
85
+ fields = cellfile.split("/")
86
+ for layer in layer_volumes.index:
87
+ data.append({
88
+ 'layer': layer,
89
+ 'layername': layers["Name"].loc[layer],
90
+ 'counts': counts.loc[layer],
91
+ 'area_mu2': layers["Area(micron**2)"].loc[layer],
92
+ 'volume': layer_volumes.loc[layer],
93
+ 'density': counts.loc[layer] / layer_volumes.loc[layer],
94
+ 'regionspec': fields[-5],
95
+ 'section': int(fields[-3]),
96
+ 'patch': int(fields[-2]),
97
+ })
98
+ return pd.DataFrame(data)
81
99
 
82
100
  @property
83
101
  def data(self):
84
102
  if self._data_cached is None:
85
- densities = self._load_densities()
86
- self._data_cached = pd.DataFrame(
87
- np.array([
88
- list(densities.mean(axis=1)),
89
- list(densities.std(axis=1))
90
- ]).T,
91
- columns=['mean', 'std'],
92
- index=[cortical_profile.CorticalProfile.LAYERS[_] for _ in densities.index]
93
- )
94
- self._data_cached.index.name = 'layer'
103
+ self._data_cached = self._load_densities()
104
+ # self._data_cached.index.name = 'layer'
95
105
  return self._data_cached
106
+
107
+ def plot(self, *args, backend="matplotlib", **kwargs):
108
+ wrapwidth = kwargs.pop("textwrap") if "textwrap" in kwargs else 40
109
+ kwargs["title"] = kwargs.pop(
110
+ "title",
111
+ "\n".join(wrap(
112
+ f"{self.modality} in {self.anchor._regionspec or self.anchor.location}",
113
+ wrapwidth
114
+ ))
115
+ )
116
+ kwargs["kind"] = kwargs.get("kind", "box")
117
+ kwargs["ylabel"] = kwargs.get(
118
+ "ylabel",
119
+ f"\n{self.unit}" if hasattr(self, 'unit') else ""
120
+ )
121
+ if backend == "matplotlib":
122
+ if kwargs["kind"] == "box":
123
+ from matplotlib.pyplot import tight_layout
124
+
125
+ np.random.seed(int(self.data["density"].mean()))
126
+
127
+ title = kwargs.pop("title")
128
+ default_kwargs = {
129
+ "grid": True,
130
+ 'by': "layername",
131
+ 'column': ['density'],
132
+ 'showfliers': False,
133
+ 'xlabel': 'layer',
134
+ 'color': 'dimgray',
135
+ }
136
+ ax, *_ = self.data.plot(*args, backend=backend, **{**default_kwargs, **kwargs})
137
+ for i, (layer, d) in enumerate(self.data.groupby('layername')):
138
+ ax.scatter(
139
+ np.random.normal(i + 1, 0.05, len(d.density)),
140
+ d.density,
141
+ c='b', s=3
142
+ )
143
+ ax.set_title(title)
144
+ tight_layout()
145
+ return ax
146
+ return self.data.plot(*args, backend=backend, **kwargs)
147
+ elif backend == "plotly":
148
+ kwargs["title"] = kwargs["title"].replace('\n', "<br>")
149
+ yaxis_title = kwargs.pop("ylabel")
150
+ fig = self.data.plot(y='density', x='layer', points="all", backend=backend, **kwargs)
151
+ fig.update_layout(yaxis_title=yaxis_title)
152
+ return fig
153
+ else:
154
+ return self.data.plot(*args, backend=backend, **kwargs)
@@ -13,18 +13,18 @@
13
13
  # See the License for the specific language governing permissions and
14
14
  # limitations under the License.
15
15
 
16
- from .. import anchor as _anchor
17
- from . import tabular
16
+ from textwrap import wrap
17
+ from typing import List
18
+
19
+ import numpy as np
20
+ import pandas as pd
18
21
 
22
+ from . import tabular
23
+ from .. import anchor as _anchor
19
24
  from ...commons import logger
20
25
  from ...vocabularies import RECEPTOR_SYMBOLS
21
26
  from ...retrieval import requests
22
27
 
23
- import pandas as pd
24
- import numpy as np
25
- from textwrap import wrap
26
- from typing import List
27
-
28
28
 
29
29
  class ReceptorDensityFingerprint(
30
30
  tabular.Tabular,
@@ -190,6 +190,31 @@ class ReceptorDensityFingerprint(
190
190
  else:
191
191
  raise NotImplementedError
192
192
 
193
- def plot(self, *args, **kwargs):
193
+ def plot(
194
+ self,
195
+ *args,
196
+ receptors: List[str] = None,
197
+ backend: str = "matplotlib",
198
+ **kwargs
199
+ ):
200
+ """
201
+ Create a bar plot of receptor density fingerprint.
202
+
203
+ Parameters
204
+ ----------
205
+ receptors : List[str], optional
206
+ Plot a subset of receptors.
207
+ backend: str
208
+ "matplotlib", "plotly", or others supported by pandas DataFrame
209
+ plotting backend.
210
+ **kwargs
211
+ takes Matplotlib.pyplot keyword arguments
212
+ """
194
213
  kwargs['xlabel'] = ""
195
- return super().plot(*args, **kwargs)
214
+ kwargs["backend"] = backend
215
+ og_data = self.data
216
+ if receptors is not None:
217
+ self._data_cached = og_data.loc[receptors]
218
+ fig = super().plot(*args, **kwargs)
219
+ self._data_cached = og_data
220
+ return fig
@@ -13,9 +13,8 @@
13
13
  # See the License for the specific language governing permissions and
14
14
  # limitations under the License.
15
15
 
16
- from .. import anchor as _anchor
17
16
  from . import cortical_profile
18
-
17
+ from .. import anchor as _anchor
19
18
  from ... import vocabularies
20
19
  from ...commons import create_key
21
20
  from ...retrieval import requests
@@ -13,19 +13,19 @@
13
13
  # See the License for the specific language governing permissions and
14
14
  # limitations under the License.
15
15
 
16
+ from typing import Callable, List, Union
17
+
18
+ import numpy as np
19
+ import pandas as pd
20
+
16
21
  from . import tabular
22
+ from .. import anchor as _anchor
17
23
  from ..feature import Compoundable
18
-
19
24
  from ...core import region as _region
20
- from .. import anchor as _anchor
21
25
  from ...commons import QUIET, siibra_tqdm
22
26
  from ...locations import pointcloud
23
- from ...retrieval.repositories import RepositoryConnector
24
27
  from ...retrieval.requests import HttpRequest
25
-
26
- from typing import Callable, List, Union
27
- import pandas as pd
28
- import numpy as np
28
+ from ...retrieval.repositories import RepositoryConnector
29
29
 
30
30
 
31
31
  class RegionalTimeseriesActivity(tabular.Tabular, Compoundable):
@@ -15,15 +15,14 @@
15
15
  """Base type of features in tabular formats."""
16
16
 
17
17
  from zipfile import ZipFile
18
- from .. import feature
19
-
20
- from .. import anchor as _anchor
21
-
22
- from ...commons import logger
23
18
 
24
19
  import pandas as pd
25
20
  from textwrap import wrap
26
21
 
22
+ from .. import feature
23
+ from .. import anchor as _anchor
24
+ from ...commons import logger
25
+
27
26
 
28
27
  class Tabular(feature.Feature):
29
28
  """
@@ -100,14 +99,20 @@ class Tabular(feature.Feature):
100
99
  if kwargs.get("error_y") is None:
101
100
  kwargs["yerr"] = kwargs.get("yerr", 'std' if 'std' in self.data.columns else None)
102
101
  yerr_label = f" \u00b1 {kwargs.get('yerr')}" if kwargs.get('yerr') else ''
103
- kwargs["width"] = kwargs.get("width", 0.95)
102
+ if kwargs.get('kind') == 'bar':
103
+ kwargs["width"] = kwargs.get("width", 0.8)
104
+ kwargs["edgecolor"] = kwargs.get('edgecolor', 'black')
105
+ kwargs["linewidth"] = kwargs.get('linewidth', 1.0)
106
+ kwargs["capsize"] = kwargs.get('capsize', 4)
104
107
  kwargs["ylabel"] = kwargs.get(
105
108
  "ylabel",
106
109
  f"{kwargs['y']}{yerr_label}" + f"\n{self.unit}" if hasattr(self, 'unit') else ""
107
110
  )
108
111
  kwargs["grid"] = kwargs.get("grid", True)
109
112
  kwargs["legend"] = kwargs.get("legend", False)
110
- xticklabel_rotation = kwargs.get("xticklabel_rotation", 60)
113
+ kwargs["color"] = kwargs.get('color', 'darkgrey')
114
+
115
+ xticklabel_rotation = kwargs.get("rot", 60)
111
116
  ax = self.data.plot(*args, backend=backend, **kwargs)
112
117
  ax.set_title(ax.get_title(), fontsize="medium")
113
118
  ax.set_xticklabels(
@@ -115,6 +120,8 @@ class Tabular(feature.Feature):
115
120
  rotation=xticklabel_rotation,
116
121
  ha='center' if xticklabel_rotation % 90 == 0 else 'right'
117
122
  )
123
+ ax.spines['top'].set_visible(False)
124
+ ax.spines['right'].set_visible(False)
118
125
  plt.tight_layout()
119
126
  return ax
120
127
  elif backend == "plotly":
@@ -14,21 +14,22 @@
14
14
  # limitations under the License.
15
15
  """Query Allen Human Brain Atlas microarray data in specified volume."""
16
16
 
17
- from .query import LiveQuery
17
+ from typing import List
18
+ from xml.etree import ElementTree
19
+ import json
18
20
 
19
- from ..core import space as _space, structure
21
+ import numpy as np
22
+
23
+ from . import query as _query
24
+ from ..core import structure
25
+ from ..core.region import Region
20
26
  from ..features import anchor as _anchor
21
27
  from ..features.tabular.gene_expression import GeneExpressions
22
28
  from ..commons import logger, Species
23
- from ..locations import point, pointcloud
29
+ from ..locations import pointcloud
24
30
  from ..retrieval import HttpRequest
25
31
  from ..vocabularies import GENE_NAMES
26
32
 
27
- from typing import List
28
- from xml.etree import ElementTree
29
- import numpy as np
30
- import json
31
-
32
33
 
33
34
  BASE_URL = "http://api.brain-map.org/api/v2/data"
34
35
 
@@ -51,7 +52,7 @@ class InvalidAllenAPIResponseException(Exception):
51
52
  pass
52
53
 
53
54
 
54
- class AllenBrainAtlasQuery(LiveQuery, args=['gene'], FeatureType=GeneExpressions):
55
+ class AllenBrainAtlasQuery(_query.LiveQuery, args=['gene'], FeatureType=GeneExpressions):
55
56
  """
56
57
  Interface to Allen Human Brain Atlas microarray data.
57
58
 
@@ -117,7 +118,7 @@ class AllenBrainAtlasQuery(LiveQuery, args=['gene'], FeatureType=GeneExpressions
117
118
  will be tested against the region mask in ICBM space
118
119
  to produce a table of outputs.
119
120
  """
120
- LiveQuery.__init__(self, **kwargs)
121
+ _query.LiveQuery.__init__(self, **kwargs)
121
122
  gene = kwargs.get('gene')
122
123
 
123
124
  def parse_gene(spec):
@@ -144,27 +145,25 @@ class AllenBrainAtlasQuery(LiveQuery, args=['gene'], FeatureType=GeneExpressions
144
145
  'https://github.com/FZJ-INM1-BDA/siibra-python/issues/636.'
145
146
  )
146
147
 
147
- mnispace = _space.Space.registry().get('mni152')
148
-
149
148
  # Match the microarray probes to the query mask.
150
149
  # Record matched instances and their locations.
151
- measurements = []
152
- coordinates = []
153
- for measurement in self:
154
- pt = point.Point(measurement['mni_xyz'], space=mnispace, sigma_mm=LOCATION_PRECISION_MM)
155
- if pt in concept:
156
- measurements.append(measurement)
157
- coordinates.append(pt)
158
-
159
- if len(coordinates) == 0:
150
+ all_measurements = list(self)
151
+ all_mes_points = pointcloud.PointCloud(
152
+ [measurement['mni_xyz'] for measurement in all_measurements],
153
+ space='mni152',
154
+ sigma_mm=LOCATION_PRECISION_MM
155
+ )
156
+ intersecting_points = concept.intersection(all_mes_points)
157
+ if intersecting_points is None:
160
158
  logger.info(f"No probes found that lie within {concept}")
161
159
  return []
160
+ measurements = [all_measurements[index] for index in intersecting_points.labels]
162
161
 
163
162
  # Build the anatomical anchor and assignment to the query concept.
164
163
  # It will be attached to the returned feature, with the set of matched
165
164
  # MNI coordinates as anchor's location.
166
165
  anchor = _anchor.AnatomicalAnchor(
167
- location=pointcloud.from_points(coordinates),
166
+ location=intersecting_points,
168
167
  species=self.species
169
168
  )
170
169
  explanation = f"MNI coordinates of tissue samples were filtered using {concept}"
@@ -175,6 +174,8 @@ class AllenBrainAtlasQuery(LiveQuery, args=['gene'], FeatureType=GeneExpressions
175
174
  explanation=explanation
176
175
  )]
177
176
  anchor._last_matched_concept = concept
177
+ if isinstance(concept, Region):
178
+ anchor._regionspec = concept.name
178
179
 
179
180
  return [GeneExpressions(
180
181
  anchor=anchor,