siibra 1.0a1__1-py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of siibra might be problematic. Click here for more details.

Files changed (84) hide show
  1. siibra/VERSION +1 -0
  2. siibra/__init__.py +164 -0
  3. siibra/commons.py +823 -0
  4. siibra/configuration/__init__.py +17 -0
  5. siibra/configuration/configuration.py +189 -0
  6. siibra/configuration/factory.py +589 -0
  7. siibra/core/__init__.py +16 -0
  8. siibra/core/assignment.py +110 -0
  9. siibra/core/atlas.py +239 -0
  10. siibra/core/concept.py +308 -0
  11. siibra/core/parcellation.py +387 -0
  12. siibra/core/region.py +1223 -0
  13. siibra/core/space.py +131 -0
  14. siibra/core/structure.py +111 -0
  15. siibra/exceptions.py +63 -0
  16. siibra/experimental/__init__.py +19 -0
  17. siibra/experimental/contour.py +61 -0
  18. siibra/experimental/cortical_profile_sampler.py +57 -0
  19. siibra/experimental/patch.py +98 -0
  20. siibra/experimental/plane3d.py +256 -0
  21. siibra/explorer/__init__.py +17 -0
  22. siibra/explorer/url.py +222 -0
  23. siibra/explorer/util.py +87 -0
  24. siibra/features/__init__.py +117 -0
  25. siibra/features/anchor.py +224 -0
  26. siibra/features/connectivity/__init__.py +33 -0
  27. siibra/features/connectivity/functional_connectivity.py +57 -0
  28. siibra/features/connectivity/regional_connectivity.py +494 -0
  29. siibra/features/connectivity/streamline_counts.py +27 -0
  30. siibra/features/connectivity/streamline_lengths.py +27 -0
  31. siibra/features/connectivity/tracing_connectivity.py +30 -0
  32. siibra/features/dataset/__init__.py +17 -0
  33. siibra/features/dataset/ebrains.py +90 -0
  34. siibra/features/feature.py +970 -0
  35. siibra/features/image/__init__.py +27 -0
  36. siibra/features/image/image.py +115 -0
  37. siibra/features/image/sections.py +26 -0
  38. siibra/features/image/volume_of_interest.py +88 -0
  39. siibra/features/tabular/__init__.py +24 -0
  40. siibra/features/tabular/bigbrain_intensity_profile.py +77 -0
  41. siibra/features/tabular/cell_density_profile.py +298 -0
  42. siibra/features/tabular/cortical_profile.py +322 -0
  43. siibra/features/tabular/gene_expression.py +257 -0
  44. siibra/features/tabular/layerwise_bigbrain_intensities.py +62 -0
  45. siibra/features/tabular/layerwise_cell_density.py +95 -0
  46. siibra/features/tabular/receptor_density_fingerprint.py +192 -0
  47. siibra/features/tabular/receptor_density_profile.py +110 -0
  48. siibra/features/tabular/regional_timeseries_activity.py +294 -0
  49. siibra/features/tabular/tabular.py +139 -0
  50. siibra/livequeries/__init__.py +19 -0
  51. siibra/livequeries/allen.py +352 -0
  52. siibra/livequeries/bigbrain.py +197 -0
  53. siibra/livequeries/ebrains.py +145 -0
  54. siibra/livequeries/query.py +49 -0
  55. siibra/locations/__init__.py +91 -0
  56. siibra/locations/boundingbox.py +454 -0
  57. siibra/locations/location.py +115 -0
  58. siibra/locations/point.py +344 -0
  59. siibra/locations/pointcloud.py +349 -0
  60. siibra/retrieval/__init__.py +27 -0
  61. siibra/retrieval/cache.py +233 -0
  62. siibra/retrieval/datasets.py +389 -0
  63. siibra/retrieval/exceptions/__init__.py +27 -0
  64. siibra/retrieval/repositories.py +769 -0
  65. siibra/retrieval/requests.py +659 -0
  66. siibra/vocabularies/__init__.py +45 -0
  67. siibra/vocabularies/gene_names.json +29176 -0
  68. siibra/vocabularies/receptor_symbols.json +210 -0
  69. siibra/vocabularies/region_aliases.json +460 -0
  70. siibra/volumes/__init__.py +23 -0
  71. siibra/volumes/parcellationmap.py +1279 -0
  72. siibra/volumes/providers/__init__.py +20 -0
  73. siibra/volumes/providers/freesurfer.py +113 -0
  74. siibra/volumes/providers/gifti.py +165 -0
  75. siibra/volumes/providers/neuroglancer.py +736 -0
  76. siibra/volumes/providers/nifti.py +266 -0
  77. siibra/volumes/providers/provider.py +107 -0
  78. siibra/volumes/sparsemap.py +468 -0
  79. siibra/volumes/volume.py +892 -0
  80. siibra-1.0.0a1.dist-info/LICENSE +201 -0
  81. siibra-1.0.0a1.dist-info/METADATA +160 -0
  82. siibra-1.0.0a1.dist-info/RECORD +84 -0
  83. siibra-1.0.0a1.dist-info/WHEEL +5 -0
  84. siibra-1.0.0a1.dist-info/top_level.txt +1 -0
@@ -0,0 +1,95 @@
1
+ # Copyright 2018-2024
2
+ # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
+
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+
16
+ from . import cortical_profile
17
+ from .. import anchor as _anchor
18
+ from . import tabular
19
+ from ..tabular.cell_density_profile import cell_reader, layer_reader
20
+
21
+ from ... import commons
22
+ from ...retrieval import requests
23
+
24
+ import pandas as pd
25
+ import numpy as np
26
+
27
+
28
+ class LayerwiseCellDensity(
29
+ tabular.Tabular,
30
+ configuration_folder="features/tabular/layerstatistics/celldensity",
31
+ category='cellular'
32
+ ):
33
+
34
+ DESCRIPTION = (
35
+ "Layerwise estimated densities of detected cell bodies (in detected cells per 0.1 cube millimeter) "
36
+ "obtained by applying a Deep Learning based instance segmentation algorithm (Contour Proposal Network; Upschulte "
37
+ "et al., Neuroimage 2022) to a 1 micron resolution cortical image patch prepared with modified Silver staining. "
38
+ "Densities have been computed per cortical layer after manual layer segmentation, by dividing the number of "
39
+ "detected cells in that layer with the area covered by the layer. Therefore, each profile contains 6 measurement points. "
40
+ "The cortical depth is estimated from the measured layer thicknesses."
41
+ )
42
+
43
+ def __init__(
44
+ self,
45
+ segmentfiles: list,
46
+ layerfiles: list,
47
+ anchor: _anchor.AnatomicalAnchor,
48
+ datasets: list = [],
49
+ id: str = None,
50
+ prerelease: bool = False,
51
+ ):
52
+ tabular.Tabular.__init__(
53
+ self,
54
+ description=self.DESCRIPTION,
55
+ modality="Cell body density",
56
+ anchor=anchor,
57
+ datasets=datasets,
58
+ data=None, # lazy loading below
59
+ id=id,
60
+ prerelease=prerelease,
61
+ )
62
+ self.unit = "# detected cells/0.1mm3"
63
+ self._filepairs = list(zip(segmentfiles, layerfiles))
64
+ self._densities = None
65
+
66
+ def _load_densities(self):
67
+ density_dict = {}
68
+ for i, (cellfile, layerfile) in enumerate(self._filepairs):
69
+ try:
70
+ cells = requests.HttpRequest(cellfile, func=cell_reader).data
71
+ layers = requests.HttpRequest(layerfile, func=layer_reader).data
72
+ except requests.SiibraHttpRequestError as e:
73
+ print(str(e))
74
+ commons.logger.error(f"Skipping to bootstrap a {self.__class__.__name__} feature, cannot access file resource.")
75
+ continue
76
+ counts = cells.layer.value_counts()
77
+ areas = layers["Area(micron**2)"]
78
+ indices = np.intersect1d(areas.index, counts.index)
79
+ density_dict[i] = counts[indices] / areas * 100 ** 2 * 5
80
+ return pd.DataFrame(density_dict)
81
+
82
+ @property
83
+ def data(self):
84
+ if self._data_cached is None:
85
+ densities = self._load_densities()
86
+ self._data_cached = pd.DataFrame(
87
+ np.array([
88
+ list(densities.mean(axis=1)),
89
+ list(densities.std(axis=1))
90
+ ]).T,
91
+ columns=['mean', 'std'],
92
+ index=[cortical_profile.CorticalProfile.LAYERS[_] for _ in densities.index]
93
+ )
94
+ self._data_cached.index.name = 'layer'
95
+ return self._data_cached
@@ -0,0 +1,192 @@
1
+ # Copyright 2018-2024
2
+ # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
+
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+
16
+ from .. import anchor as _anchor
17
+ from . import tabular
18
+
19
+ from ... import commons, vocabularies
20
+ from ...retrieval import requests
21
+
22
+ import pandas as pd
23
+ import numpy as np
24
+ from textwrap import wrap
25
+ from typing import List
26
+
27
+
28
+ class ReceptorDensityFingerprint(
29
+ tabular.Tabular,
30
+ configuration_folder="features/tabular/fingerprints/receptor",
31
+ category='molecular'
32
+ ):
33
+
34
+ DESCRIPTION = (
35
+ "Fingerprint of densities (in fmol/mg protein) of receptors for classical neurotransmitters "
36
+ "obtained by means of quantitative in vitro autoradiography. The fingerprint provides average "
37
+ "density measurments for different receptors measured in tissue samples from different subjects "
38
+ "together with the corresponding standard deviations. "
39
+ )
40
+
41
+ def __init__(
42
+ self,
43
+ tsvfile: str,
44
+ anchor: _anchor.AnatomicalAnchor,
45
+ datasets: list = [],
46
+ id: str = None,
47
+ prerelease: bool = False,
48
+ ):
49
+ """ Generate a receptor fingerprint from a URL to a .tsv file
50
+ formatted according to the structure used by Palomero-Gallagher et al.
51
+ """
52
+ tabular.Tabular.__init__(
53
+ self,
54
+ description=self.DESCRIPTION,
55
+ modality="Neurotransmitter receptor density",
56
+ anchor=anchor,
57
+ data=None, # lazy loading below
58
+ datasets=datasets,
59
+ id=id,
60
+ prerelease=prerelease,
61
+ )
62
+ self._loader = requests.HttpRequest(tsvfile)
63
+
64
+ @property
65
+ def unit(self) -> str:
66
+ return self._loader.data.iloc[:, -1][0]
67
+
68
+ @property
69
+ def receptors(self) -> List[str]:
70
+ return list(self.data.index)
71
+
72
+ @property
73
+ def neurotransmitters(self) -> List[str]:
74
+ # TODO quite a lot of receptor features have undecipherable symbols, mainly double quoted receptor
75
+ # Likely ill-formed tsv's
76
+ return [
77
+ "{} ({})".format(
78
+ vocabularies.RECEPTOR_SYMBOLS[t]['neurotransmitter']['label'],
79
+ vocabularies.RECEPTOR_SYMBOLS[t]['neurotransmitter']['name'],
80
+ ) if t in vocabularies.RECEPTOR_SYMBOLS else
81
+ f"{t} (undeciphered)"
82
+ for t in self.receptors
83
+ ]
84
+
85
+ @property
86
+ def data(self):
87
+ if self._data_cached is None:
88
+ label_col, mean_col, std_col = list(self._loader.data.columns)[:3]
89
+ self._data_cached = pd.DataFrame(
90
+ np.array([
91
+ self._loader.data[mean_col],
92
+ self._loader.data[std_col]
93
+ ]).T,
94
+ index=self._loader.data[label_col],
95
+ columns=['mean', 'std']
96
+ )
97
+ self._data_cached.index.name = 'receptor'
98
+ return self._data_cached.copy()
99
+
100
+ @classmethod
101
+ def parse_tsv_data(cls, data: dict):
102
+ units = {list(v.values())[3] for v in data.values()}
103
+ labels = list(data.keys())
104
+ assert len(units) == 1
105
+ try:
106
+ mean = [data[_]["density (mean)"] for _ in labels]
107
+ std = [data[_]["density (sd)"] for _ in labels]
108
+ except KeyError as e:
109
+ print(str(e))
110
+ commons.logger.error("Could not parse fingerprint from this dictionary")
111
+ return {
112
+ 'unit': next(iter(units)),
113
+ 'labels': labels,
114
+ 'means': [float(m) if m.isnumeric() else 0 for m in mean],
115
+ 'stds': [float(s) if s.isnumeric() else 0 for s in std],
116
+ }
117
+
118
+ def polar_plot(self, *args, backend='matplotlib', **kwargs):
119
+ """
120
+ Create a polar plot of the fingerprint.
121
+ backend: str
122
+ "matplotlib" or "plotly"
123
+ """
124
+ if backend == "matplotlib":
125
+ try:
126
+ import matplotlib.pyplot as plt
127
+ except ImportError:
128
+ commons.logger.error("matplotlib not available. Plotting of fingerprints disabled.")
129
+ return None
130
+ from collections import deque
131
+
132
+ # default args
133
+ wrapwidth = 40
134
+ y = kwargs.pop("y") if "y" in kwargs else self.data.columns[0]
135
+ yerr = kwargs.pop("yerr") if "yerr" in kwargs else None
136
+ if yerr is None:
137
+ yerr = 'std' if 'std' in self.data.columns else None
138
+ ax = kwargs.pop("ax") if "ax" in kwargs else plt.subplot(111, projection="polar")
139
+
140
+ datafield = y or self.data.columns[0]
141
+ if yerr is None and 'std' in self.data.columns:
142
+ yerr = 'std'
143
+ # values = list(self.data[datafield])
144
+ angles = deque(np.linspace(0, 2 * np.pi, self.data.shape[0] + 1)[:-1][::-1])
145
+ angles.rotate(5)
146
+ angles = list(angles)
147
+ # for the values, repeat the first element to have a closed plot
148
+ indices = list(range(self.data.shape[0])) + [0]
149
+ y = list(self.data[datafield].iloc[indices])
150
+ plt.plot(angles + [angles[0]], y, "k-", lw=3, **kwargs)
151
+ if yerr:
152
+ bounds0 = y - self.data[yerr].iloc[indices]
153
+ plt.plot(angles + [angles[0]], bounds0, "k", lw=0.5, **kwargs)
154
+ bounds1 = y + self.data[yerr].iloc[indices]
155
+ plt.plot(angles + [angles[0]], bounds1, "k", lw=0.5, **kwargs)
156
+ ax.set_xticks(angles)
157
+ ax.set_xticklabels([_ for _ in self.data.index])
158
+ ax.set_ylabel(self.unit)
159
+ ax.set_title(
160
+ "\n".join(wrap(f"{self.modality} anchored at {self.anchor._regionspec}", wrapwidth))
161
+ )
162
+ ax.tick_params(pad=9, labelsize=10)
163
+ ax.tick_params(axis="y", labelsize=8)
164
+ plt.tight_layout()
165
+ return ax
166
+ elif backend == "plotly":
167
+ from plotly.express import line_polar
168
+ df = pd.DataFrame(
169
+ {
170
+ "values": pd.concat(
171
+ [
172
+ self.data["mean"],
173
+ self.data["mean"] - self.data["std"],
174
+ self.data["mean"] + self.data["std"]
175
+ ]
176
+ ),
177
+ "cat": (
178
+ len(self.data) * ["mean"]
179
+ + len(self.data) * ["mean - std"]
180
+ + len(self.data) * ["mean + std"]
181
+ )
182
+ }
183
+ )
184
+ return line_polar(
185
+ df, r="values", theta=df.index, color="cat", line_close=True, **kwargs
186
+ )
187
+ else:
188
+ raise NotImplementedError
189
+
190
+ def plot(self, *args, **kwargs):
191
+ kwargs['xlabel'] = ""
192
+ return super().plot(*args, **kwargs)
@@ -0,0 +1,110 @@
1
+ # Copyright 2018-2024
2
+ # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
+
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+
16
+ from .. import anchor as _anchor
17
+ from . import cortical_profile
18
+
19
+ from ... import vocabularies
20
+ from ...commons import create_key
21
+ from ...retrieval import requests
22
+
23
+
24
+ class ReceptorDensityProfile(
25
+ cortical_profile.CorticalProfile,
26
+ configuration_folder="features/tabular/corticalprofiles/receptor",
27
+ category='molecular'
28
+ ):
29
+
30
+ DESCRIPTION = (
31
+ "Cortical profile of densities (in fmol/mg protein) of receptors for classical neurotransmitters "
32
+ "obtained by means of quantitative in vitro autoradiography. The profiles provide, for a "
33
+ "single tissue sample, an exemplary density distribution for a single receptor from the pial surface "
34
+ "to the border between layer VI and the white matter."
35
+ )
36
+
37
+ _filter_attrs = cortical_profile.CorticalProfile._filter_attrs + ["receptor"]
38
+
39
+ def __init__(
40
+ self,
41
+ receptor: str,
42
+ tsvfile: str,
43
+ anchor: _anchor.AnatomicalAnchor,
44
+ datasets: list = [],
45
+ id: str = None,
46
+ prerelease: bool = False,
47
+ ):
48
+ """Generate a receptor density profile from a URL to a .tsv file
49
+ formatted according to the structure used by Palomero-Gallagher et al.
50
+ """
51
+ cortical_profile.CorticalProfile.__init__(
52
+ self,
53
+ description=self.DESCRIPTION,
54
+ modality="Receptor density",
55
+ anchor=anchor,
56
+ datasets=datasets,
57
+ id=id,
58
+ prerelease=prerelease
59
+ )
60
+ self.receptor = receptor
61
+ self._data_cached = None
62
+ self._loader = requests.HttpRequest(tsvfile)
63
+ self._unit_cached = None
64
+
65
+ @property
66
+ def key(self):
67
+ return "{}_{}_{}_{}_{}".format(
68
+ create_key(self.__class__.__name__),
69
+ self.id,
70
+ create_key(self.species_name),
71
+ create_key(self.regionspec),
72
+ create_key(self.receptor)
73
+ )
74
+
75
+ @property
76
+ def receptor_fullname(self):
77
+ return vocabularies.RECEPTOR_SYMBOLS[self.receptor]['receptor']['name']
78
+
79
+ @property
80
+ def neurotransmitter(self):
81
+ return "{} ({})".format(
82
+ vocabularies.RECEPTOR_SYMBOLS[self.receptor]['neurotransmitter']['label'],
83
+ vocabularies.RECEPTOR_SYMBOLS[self.receptor]['neurotransmitter']['name'],
84
+ )
85
+
86
+ @property
87
+ def unit(self):
88
+ # triggers lazy loading of the HttpRequest
89
+ return self._loader.data.iloc[:, -1][0]
90
+
91
+ @property
92
+ def _values(self):
93
+ # triggers lazy loading of the HttpRequest
94
+ return self._loader.data.iloc[:, -2].values
95
+
96
+ @property
97
+ def _depths(self):
98
+ return self._loader.data.iloc[:, 0].values / 100.
99
+
100
+ @classmethod
101
+ def parse_tsv_data(self, data):
102
+ units = {list(v.values())[3] for v in data.values()}
103
+ assert len(units) == 1
104
+ return {
105
+ "depth": [float(k) / 100.0 for k in data.keys() if k.isnumeric()],
106
+ "density": [
107
+ float(list(v.values())[2]) for k, v in data.items() if k.isnumeric()
108
+ ],
109
+ "unit": next(iter(units)),
110
+ }
@@ -0,0 +1,294 @@
1
+ # Copyright 2018-2024
2
+ # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
+
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+
16
+ from . import tabular
17
+ from ..feature import Compoundable
18
+
19
+ from ...core import region as _region
20
+ from .. import anchor as _anchor
21
+ from ...commons import QUIET, siibra_tqdm
22
+ from ...locations import pointcloud
23
+ from ...retrieval.repositories import RepositoryConnector
24
+ from ...retrieval.requests import HttpRequest
25
+
26
+ from typing import Callable, List, Union
27
+ import pandas as pd
28
+ import numpy as np
29
+
30
+
31
+ class RegionalTimeseriesActivity(tabular.Tabular, Compoundable):
32
+ """
33
+ Datasets that provide regional activity over time.
34
+ """
35
+
36
+ _filter_attrs = ["modality", "cohort", "subject"]
37
+ _compound_attrs = ["modality", "cohort"]
38
+
39
+ def __init__(
40
+ self,
41
+ cohort: str,
42
+ modality: str,
43
+ regions: list,
44
+ connector: RepositoryConnector,
45
+ decode_func: Callable,
46
+ filename: str,
47
+ anchor: _anchor.AnatomicalAnchor,
48
+ timestep: str,
49
+ description: str = "",
50
+ datasets: list = [],
51
+ subject: str = "average",
52
+ id: str = None,
53
+ prerelease: bool = False,
54
+ ):
55
+ """
56
+ """
57
+ tabular.Tabular.__init__(
58
+ self,
59
+ modality=modality,
60
+ description=description,
61
+ anchor=anchor,
62
+ datasets=datasets,
63
+ data=None, # lazy loading below
64
+ id=id,
65
+ prerelease=prerelease,
66
+ )
67
+ self.cohort = cohort.upper()
68
+ if isinstance(connector, str) and connector:
69
+ self._connector = HttpRequest(connector, decode_func)
70
+ else:
71
+ self._connector = connector
72
+ self._filename = filename
73
+ self._decode_func = decode_func
74
+ self.regions = regions
75
+ self._table = None
76
+ self._subject = subject
77
+ val, unit = timestep.split(" ")
78
+ self.timestep = (float(val), unit)
79
+
80
+ @property
81
+ def subject(self):
82
+ """Returns the subject identifiers for which the table represents."""
83
+ return self._subject
84
+
85
+ @property
86
+ def name(self):
87
+ return f"{self.subject} - " + super().name + f" cohort: {self.cohort}"
88
+
89
+ @property
90
+ def data(self) -> pd.DataFrame:
91
+ """
92
+ Returns a table as a pandas dataframe where the index is a timeseries.
93
+ """
94
+ if self._table is None:
95
+ self._load_table()
96
+ return self._table.copy()
97
+
98
+ @classmethod
99
+ def _merge_elements(
100
+ cls,
101
+ elements: List["RegionalTimeseriesActivity"],
102
+ description: str,
103
+ modality: str,
104
+ anchor: _anchor.AnatomicalAnchor,
105
+ ):
106
+ assert len({f.cohort for f in elements}) == 1
107
+ assert len({f.timestep for f in elements}) == 1
108
+ merged = cls(
109
+ cohort=elements[0].cohort,
110
+ regions=elements[0].regions,
111
+ connector=elements[0]._connector,
112
+ decode_func=elements[0]._decode_func,
113
+ filename="",
114
+ timestep=" ".join(str(val) for val in elements[0].timestep),
115
+ subject="average",
116
+ description=description,
117
+ modality=modality,
118
+ anchor=anchor,
119
+ **{"paradigm": "average"} if getattr(elements[0], "paradigm") else {}
120
+ )
121
+ if isinstance(elements[0]._connector, HttpRequest):
122
+ getter = lambda elm: elm._connector.get()
123
+ else:
124
+ getter = lambda elm: elm._connector.get(elm._filename, decode_func=elm._decode_func)
125
+ all_arrays = [
126
+ getter(elm)
127
+ for elm in siibra_tqdm(
128
+ elements,
129
+ total=len(elements),
130
+ desc=f"Averaging {len(elements)} activity tables"
131
+ )
132
+ ]
133
+ merged._table = elements[0]._arraylike_to_dataframe(
134
+ np.stack(all_arrays).mean(0)
135
+ )
136
+ return merged
137
+
138
+ def _load_table(self):
139
+ """
140
+ Extract the timeseries table.
141
+ """
142
+ if isinstance(self._connector, HttpRequest):
143
+ array = self._connector.data
144
+ else:
145
+ array = self._connector.get(self._filename, decode_func=self._decode_func)
146
+ self._table = self._arraylike_to_dataframe(array)
147
+
148
+ def _arraylike_to_dataframe(self, array: Union[np.ndarray, pd.DataFrame]) -> pd.DataFrame:
149
+ if not isinstance(array, np.ndarray):
150
+ array = array.to_numpy()
151
+ ncols = array.shape[1]
152
+ table = pd.DataFrame(
153
+ array,
154
+ index=pd.TimedeltaIndex(
155
+ np.arange(0, array.shape[0]) * self.timestep[0],
156
+ unit=self.timestep[1],
157
+ name="time"
158
+ )
159
+ )
160
+ parcellations = self.anchor.represented_parcellations()
161
+ assert len(parcellations) == 1
162
+ parc = next(iter(parcellations))
163
+ with QUIET:
164
+ columnmap = {
165
+ i: parc.get_region(regionname, allow_tuple=True)
166
+ for i, regionname in enumerate(self.regions)
167
+ }
168
+ if len(columnmap) == ncols:
169
+ remapper = {
170
+ label - min(columnmap.keys()): region
171
+ for label, region in columnmap.items()
172
+ }
173
+ table = table.rename(columns=remapper)
174
+
175
+ return table
176
+
177
+ def __str__(self):
178
+ return self.name
179
+
180
+ def compute_centroids(self, space):
181
+ """
182
+ Computes the list of centroid coordinates corresponding to
183
+ dataframe columns, in the given reference space.
184
+
185
+ Parameters
186
+ ----------
187
+ space: Space, str
188
+
189
+ Returns
190
+ -------
191
+ list[tuple(float, float, float)]
192
+ """
193
+ result = []
194
+ parcellations = self.anchor.represented_parcellations()
195
+ assert len(parcellations) == 1
196
+ parcmap = next(iter(parcellations)).get_map(space)
197
+ all_centroids = parcmap.compute_centroids()
198
+ for regionname in self.regions:
199
+ region = parcmap.parcellation.get_region(regionname, allow_tuple=True)
200
+ if isinstance(region, tuple): # deal with sets of matched regions
201
+ found = [c for r in region for c in r if c.name in all_centroids]
202
+ else:
203
+ found = [r for r in region if r.name in all_centroids]
204
+ assert len(found) > 0
205
+ result.append(
206
+ tuple(pointcloud.PointCloud(
207
+ [all_centroids[r.name] for r in found], space=space
208
+ ).centroid)
209
+ )
210
+ return result
211
+
212
+ def plot(
213
+ self, regions: List[Union[str, "_region.Region"]] = None, *args,
214
+ backend="matplotlib", **kwargs
215
+ ):
216
+ """
217
+ Create a bar plot of averaged timeseries data per region.
218
+
219
+ Parameters
220
+ ----------
221
+ regions: List[str or Region]
222
+ subject: str, default: None
223
+ If None, returns the subject averaged table.
224
+ args and kwargs:
225
+ takes arguments and keyword arguments for the desired plotting
226
+ backend.
227
+ """
228
+ if isinstance(regions, (str, _region.Region)):
229
+ regions = [regions]
230
+ if regions is None:
231
+ regions = self.regions
232
+ indices = [self.regions.index(r) for r in regions]
233
+ table = self.data.iloc[:, indices]
234
+ table.columns = [str(r) for r in table.columns]
235
+ return table.mean().plot(kind="bar", *args, backend=backend, **kwargs)
236
+
237
+ def plot_carpet(
238
+ self, regions: List[Union[str, "_region.Region"]] = None, *args,
239
+ backend="plotly", **kwargs
240
+ ):
241
+ """
242
+ Create a carpet plot ofthe timeseries data per region.
243
+
244
+ Parameters
245
+ ----------
246
+ regions: List[str or Region]
247
+ subject: str, default: None
248
+ If None, returns the subject averaged table.
249
+ args and kwargs:
250
+ takes arguments and keyword arguments for `plotly.express.imshow`
251
+ """
252
+ if backend != "plotly":
253
+ raise NotImplementedError("Currently, carpet plot is only implemented with `plotly`.")
254
+ if isinstance(regions, (str, _region.Region)):
255
+ regions = [regions]
256
+ if regions is None:
257
+ regions = self.regions
258
+ indices = [self.regions.index(r) for r in regions]
259
+ table = self.data.iloc[:, indices].reset_index(drop=True)
260
+ table.columns = [str(r) for r in table.columns]
261
+ kwargs["title"] = kwargs.get("title", f"{self.modality}" + f" for subject={self.subject}")
262
+ kwargs["labels"] = kwargs.get("labels", {
263
+ "xlabel": self.data.index.to_numpy(dtype='timedelta64[ms]')}
264
+ )
265
+ from plotly.express import imshow
266
+ return imshow(
267
+ *args,
268
+ table.T,
269
+ **kwargs
270
+ )
271
+
272
+
273
+ class RegionalBOLD(
274
+ RegionalTimeseriesActivity,
275
+ configuration_folder="features/tabular/activity_timeseries/bold",
276
+ category="functional"
277
+ ):
278
+ """
279
+ Blood-oxygen-level-dependent (BOLD) signals per region.
280
+ """
281
+
282
+ _filter_attrs = RegionalTimeseriesActivity._filter_attrs + ["paradigm"]
283
+ _compound_attrs = RegionalTimeseriesActivity._compound_attrs + ["paradigm"]
284
+
285
+ def __init__(self, paradigm: str, **kwargs):
286
+ RegionalTimeseriesActivity.__init__(self, **kwargs)
287
+ self.paradigm = paradigm
288
+
289
+ # paradign is used to distinguish functional connectivity features from each other.
290
+ assert self.paradigm, "RegionalBOLD must have paradigm defined!"
291
+
292
+ @property
293
+ def name(self):
294
+ return super().name + f", paradigm: {self.paradigm}"