siibra 1.0a1__1-py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of siibra might be problematic. Click here for more details.

Files changed (84) hide show
  1. siibra/VERSION +1 -0
  2. siibra/__init__.py +164 -0
  3. siibra/commons.py +823 -0
  4. siibra/configuration/__init__.py +17 -0
  5. siibra/configuration/configuration.py +189 -0
  6. siibra/configuration/factory.py +589 -0
  7. siibra/core/__init__.py +16 -0
  8. siibra/core/assignment.py +110 -0
  9. siibra/core/atlas.py +239 -0
  10. siibra/core/concept.py +308 -0
  11. siibra/core/parcellation.py +387 -0
  12. siibra/core/region.py +1223 -0
  13. siibra/core/space.py +131 -0
  14. siibra/core/structure.py +111 -0
  15. siibra/exceptions.py +63 -0
  16. siibra/experimental/__init__.py +19 -0
  17. siibra/experimental/contour.py +61 -0
  18. siibra/experimental/cortical_profile_sampler.py +57 -0
  19. siibra/experimental/patch.py +98 -0
  20. siibra/experimental/plane3d.py +256 -0
  21. siibra/explorer/__init__.py +17 -0
  22. siibra/explorer/url.py +222 -0
  23. siibra/explorer/util.py +87 -0
  24. siibra/features/__init__.py +117 -0
  25. siibra/features/anchor.py +224 -0
  26. siibra/features/connectivity/__init__.py +33 -0
  27. siibra/features/connectivity/functional_connectivity.py +57 -0
  28. siibra/features/connectivity/regional_connectivity.py +494 -0
  29. siibra/features/connectivity/streamline_counts.py +27 -0
  30. siibra/features/connectivity/streamline_lengths.py +27 -0
  31. siibra/features/connectivity/tracing_connectivity.py +30 -0
  32. siibra/features/dataset/__init__.py +17 -0
  33. siibra/features/dataset/ebrains.py +90 -0
  34. siibra/features/feature.py +970 -0
  35. siibra/features/image/__init__.py +27 -0
  36. siibra/features/image/image.py +115 -0
  37. siibra/features/image/sections.py +26 -0
  38. siibra/features/image/volume_of_interest.py +88 -0
  39. siibra/features/tabular/__init__.py +24 -0
  40. siibra/features/tabular/bigbrain_intensity_profile.py +77 -0
  41. siibra/features/tabular/cell_density_profile.py +298 -0
  42. siibra/features/tabular/cortical_profile.py +322 -0
  43. siibra/features/tabular/gene_expression.py +257 -0
  44. siibra/features/tabular/layerwise_bigbrain_intensities.py +62 -0
  45. siibra/features/tabular/layerwise_cell_density.py +95 -0
  46. siibra/features/tabular/receptor_density_fingerprint.py +192 -0
  47. siibra/features/tabular/receptor_density_profile.py +110 -0
  48. siibra/features/tabular/regional_timeseries_activity.py +294 -0
  49. siibra/features/tabular/tabular.py +139 -0
  50. siibra/livequeries/__init__.py +19 -0
  51. siibra/livequeries/allen.py +352 -0
  52. siibra/livequeries/bigbrain.py +197 -0
  53. siibra/livequeries/ebrains.py +145 -0
  54. siibra/livequeries/query.py +49 -0
  55. siibra/locations/__init__.py +91 -0
  56. siibra/locations/boundingbox.py +454 -0
  57. siibra/locations/location.py +115 -0
  58. siibra/locations/point.py +344 -0
  59. siibra/locations/pointcloud.py +349 -0
  60. siibra/retrieval/__init__.py +27 -0
  61. siibra/retrieval/cache.py +233 -0
  62. siibra/retrieval/datasets.py +389 -0
  63. siibra/retrieval/exceptions/__init__.py +27 -0
  64. siibra/retrieval/repositories.py +769 -0
  65. siibra/retrieval/requests.py +659 -0
  66. siibra/vocabularies/__init__.py +45 -0
  67. siibra/vocabularies/gene_names.json +29176 -0
  68. siibra/vocabularies/receptor_symbols.json +210 -0
  69. siibra/vocabularies/region_aliases.json +460 -0
  70. siibra/volumes/__init__.py +23 -0
  71. siibra/volumes/parcellationmap.py +1279 -0
  72. siibra/volumes/providers/__init__.py +20 -0
  73. siibra/volumes/providers/freesurfer.py +113 -0
  74. siibra/volumes/providers/gifti.py +165 -0
  75. siibra/volumes/providers/neuroglancer.py +736 -0
  76. siibra/volumes/providers/nifti.py +266 -0
  77. siibra/volumes/providers/provider.py +107 -0
  78. siibra/volumes/sparsemap.py +468 -0
  79. siibra/volumes/volume.py +892 -0
  80. siibra-1.0.0a1.dist-info/LICENSE +201 -0
  81. siibra-1.0.0a1.dist-info/METADATA +160 -0
  82. siibra-1.0.0a1.dist-info/RECORD +84 -0
  83. siibra-1.0.0a1.dist-info/WHEEL +5 -0
  84. siibra-1.0.0a1.dist-info/top_level.txt +1 -0
@@ -0,0 +1,494 @@
1
+ # Copyright 2018-2024
2
+ # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
+
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+
16
+ from zipfile import ZipFile
17
+ from ..feature import Feature, Compoundable
18
+ from ..tabular.tabular import Tabular
19
+
20
+ from .. import anchor as _anchor
21
+
22
+ from ...commons import logger, QUIET, siibra_tqdm
23
+ from ...core import region as _region
24
+ from ...locations import pointcloud
25
+ from ...retrieval.repositories import RepositoryConnector
26
+ from ...retrieval.requests import HttpRequest
27
+
28
+
29
+ import pandas as pd
30
+ import numpy as np
31
+ from typing import Callable, Union, List, Tuple, Iterator
32
+
33
+ try:
34
+ from typing import Literal
35
+ except ImportError: # support python 3.7
36
+ from typing_extensions import Literal
37
+
38
+
39
+ class RegionalConnectivity(Feature, Compoundable):
40
+ """
41
+ Parcellation-averaged connectivity, providing one or more matrices of a
42
+ given modality for a given parcellation.
43
+ """
44
+
45
+ _filter_attrs = ["modality", "cohort", "subject"]
46
+ _compound_attrs = ["modality", "cohort"]
47
+
48
+ def __init__(
49
+ self,
50
+ cohort: str,
51
+ modality: str,
52
+ regions: list,
53
+ connector: RepositoryConnector,
54
+ decode_func: Callable,
55
+ filename: str,
56
+ anchor: _anchor.AnatomicalAnchor,
57
+ description: str = "",
58
+ datasets: list = [],
59
+ subject: str = "average",
60
+ feature: str = None,
61
+ id: str = None,
62
+ prerelease: bool = False,
63
+ ):
64
+ """
65
+ Construct a parcellation-averaged connectivity matrix.
66
+
67
+ Parameters
68
+ ----------
69
+ cohort: str
70
+ Name of the cohort used for computing the connectivity.
71
+ modality: str
72
+ Connectivity modality, typically set by derived classes.
73
+ regions: list[str]
74
+ Names of the regions from the parcellation
75
+ connector: Connector
76
+ Repository connector for loading the actual data array(s).
77
+ decode_func: function
78
+ Function to convert the bytestream of a loaded file into an array
79
+ files: dict
80
+ A dictionary linking names of matrices (typically subject ids)
81
+ to the relative filenames of the data array(s) in the repository connector.
82
+ anchor: AnatomicalAnchor
83
+ anatomical localization of the matrix, expected to encode the parcellation
84
+ in the region attribute.
85
+ description: str, optional
86
+ textual description of this connectivity matrix.
87
+ datasets : list[Dataset]
88
+ list of datasets corresponding to this feature.
89
+ """
90
+ Feature.__init__(
91
+ self,
92
+ modality=modality,
93
+ description=description,
94
+ anchor=anchor,
95
+ datasets=datasets,
96
+ id=id,
97
+ prerelease=prerelease
98
+ )
99
+ self.cohort = cohort.upper()
100
+ if isinstance(connector, str) and connector:
101
+ self._connector = HttpRequest(connector, decode_func)
102
+ else:
103
+ self._connector = connector
104
+ self._filename = filename
105
+ self._decode_func = decode_func
106
+ self.regions = regions
107
+ self._matrix = None
108
+ self._subject = subject
109
+ self._feature = feature
110
+
111
+ @property
112
+ def subject(self):
113
+ """Returns the subject identifiers for which the matrix represents."""
114
+ return self._subject
115
+
116
+ @property
117
+ def feature(self):
118
+ """If applicable, returns the type of feature for which the matrix represents."""
119
+ return self._feature
120
+
121
+ @property
122
+ def name(self):
123
+ return f"{self.feature or self.subject} - " + super().name + f" cohort: {self.cohort}"
124
+
125
+ @property
126
+ def data(self) -> pd.DataFrame:
127
+ """
128
+ Returns a matrix as a pandas dataframe.
129
+
130
+ Returns
131
+ -------
132
+ pd.DataFrame
133
+ A square matrix with region names as the column and row names.
134
+ """
135
+ if self._matrix is None:
136
+ self._load_matrix()
137
+ return self._matrix.copy()
138
+
139
+ @classmethod
140
+ def _merge_elements(
141
+ cls,
142
+ elements: List["RegionalConnectivity"],
143
+ description: str,
144
+ modality: str,
145
+ anchor: _anchor.AnatomicalAnchor,
146
+ ):
147
+ assert len({f.cohort for f in elements}) == 1
148
+ merged = cls(
149
+ cohort=elements[0].cohort,
150
+ regions=elements[0].regions,
151
+ connector=elements[0]._connector,
152
+ decode_func=elements[0]._decode_func,
153
+ filename="",
154
+ subject="average",
155
+ feature="average",
156
+ description=description,
157
+ modality=modality,
158
+ anchor=anchor,
159
+ **{"paradigm": "averaged (by siibra)"} if getattr(elements[0], "paradigm", None) else {}
160
+ )
161
+ if isinstance(elements[0]._connector, HttpRequest):
162
+ getter = lambda elm: elm._connector.get()
163
+ else:
164
+ getter = lambda elm: elm._connector.get(elm._filename, decode_func=elm._decode_func)
165
+ all_arrays = [
166
+ getter(elm)
167
+ for elm in siibra_tqdm(
168
+ elements,
169
+ total=len(elements),
170
+ desc=f"Averaging {len(elements)} connectivity matrices"
171
+ )
172
+ ]
173
+ merged._matrix = elements[0]._arraylike_to_dataframe(
174
+ np.stack(all_arrays).mean(0)
175
+ )
176
+ return merged
177
+
178
+ def _plot_matrix(
179
+ self, regions: List[str] = None,
180
+ logscale: bool = False, *args, backend="nilearn", **kwargs
181
+ ):
182
+ """
183
+ Plots the heatmap of the connectivity matrix using nilearn.plotting.
184
+
185
+ Parameters
186
+ ----------
187
+ regions: list[str]
188
+ Display the matrix only for selected regions. By default, shows all the regions.
189
+ It can only be a subset of regions of the feature.
190
+ logscale: bool
191
+ Display the data in log10 scale
192
+ backend: str
193
+ "nilearn" or "plotly"
194
+ **kwargs:
195
+ Can take all the arguments `nilearn.plotting.plot_matrix` can take. See the doc at
196
+ https://nilearn.github.io/stable/modules/generated/nilearn.plotting.plot_matrix.html
197
+ """
198
+ if regions is None:
199
+ regions = self.regions
200
+ indices = [self.regions.index(r) for r in regions]
201
+ matrix = self.data.iloc[indices, indices].to_numpy() # nilearn.plotting.plot_matrix works better with a numpy array
202
+
203
+ if logscale:
204
+ matrix = np.log10(matrix)
205
+
206
+ # default kwargs
207
+ kwargs["title"] = kwargs.get(
208
+ "title",
209
+ "".join([
210
+ f"{self.feature if self.feature else ''} - {self.subject} - ",
211
+ f"{self.modality} in ",
212
+ f"{', '.join({_.name for _ in self.anchor.regions})}"
213
+ ])
214
+ )
215
+
216
+ if kwargs.get("reorder") or (backend == "nilearn"):
217
+ kwargs["figure"] = kwargs.get("figure", (15, 15))
218
+ from nilearn import plotting
219
+ plotting.plot_matrix(
220
+ matrix,
221
+ labels=regions,
222
+ **kwargs
223
+ )
224
+ elif backend == "plotly":
225
+ kwargs["title"] = kwargs["title"].replace("\n", "<br>")
226
+ from plotly.express import imshow
227
+ return imshow(matrix, *args, x=regions, y=regions, **kwargs)
228
+ else:
229
+ raise NotImplementedError(
230
+ f"Plotting connectivity matrices with {backend} is not supported."
231
+ )
232
+
233
+ def _to_zip(self, fh: ZipFile):
234
+ super()._to_zip(fh)
235
+ if self.feature is None:
236
+ fh.writestr(f"sub/{self._filename}/matrix.csv", self.data.to_csv())
237
+ else:
238
+ fh.writestr(f"feature/{self._filename}/matrix.csv", self.data.to_csv())
239
+
240
+ def get_profile(
241
+ self,
242
+ region: Union[str, _region.Region],
243
+ min_connectivity: float = 0,
244
+ max_rows: int = None,
245
+ direction: Literal['column', 'row'] = 'column'
246
+ ):
247
+ """
248
+ Extract a regional profile from the matrix, to obtain a tabular data
249
+ feature with the connectivity as the single column. Rows are be sorted
250
+ by descending connection strength.
251
+
252
+ Parameters
253
+ ----------
254
+ region: str, Region
255
+ min_connectivity: float, default: 0
256
+ Regions with connectivity less than this value are discarded.
257
+ max_rows: int, default: None
258
+ Max number of regions with highest connectivity.
259
+ direction: str, default: 'column'
260
+ Choose the direction of profile extraction particularly for
261
+ non-symmetric matrices. ('column' or 'row')
262
+ """
263
+ matrix = self.data
264
+ if direction.lower() not in ['column', 'row']:
265
+ raise ValueError("Direction can only be 'column' or 'row'")
266
+ if direction.lower() == 'row':
267
+ matrix = matrix.transpose()
268
+
269
+ def matches(r1, r2):
270
+ if isinstance(r1, tuple):
271
+ return any(r.matches(r2) for r in r1)
272
+ else:
273
+ assert isinstance(r1, _region.Region)
274
+ return r1.matches(r2)
275
+
276
+ regions = [r for r in matrix.index if matches(r, region)]
277
+ if len(regions) == 0:
278
+ raise ValueError(f"Invalid region specificiation: {region}")
279
+ elif len(regions) > 1:
280
+ raise ValueError(f"Region specification {region} matched more than one profile: {regions}")
281
+ else:
282
+ name = self.modality
283
+ series = matrix[regions[0]]
284
+ last_index = len(series) - 1 if max_rows is None \
285
+ else min(max_rows, len(series) - 1)
286
+ return Tabular(
287
+ description=self.description,
288
+ modality=f"{self.modality} {self.cohort}",
289
+ anchor=_anchor.AnatomicalAnchor(
290
+ species=list(self.anchor.species)[0],
291
+ region=regions[0]
292
+ ),
293
+ data=(
294
+ series[:last_index]
295
+ .to_frame(name=name)
296
+ .query(f'`{name}` > {min_connectivity}')
297
+ .sort_values(by=name, ascending=False)
298
+ .rename_axis('Target regions')
299
+ ),
300
+ datasets=self.datasets
301
+ )
302
+
303
+ def plot(
304
+ self,
305
+ regions: Union[str, _region.Region, List[Union[str, _region.Region]]] = None,
306
+ min_connectivity: float = 0,
307
+ max_rows: int = None,
308
+ direction: Literal['column', 'row'] = 'column',
309
+ logscale: bool = False,
310
+ *args,
311
+ backend="matplotlib",
312
+ **kwargs
313
+ ):
314
+ """
315
+ Parameters
316
+ ----------
317
+ regions: Union[str, _region.Region], None
318
+ If None, returns the full connectivity matrix.
319
+ If a region is provided, returns the profile for that region.
320
+ If list of regions is provided, returns the matrix for the selected
321
+ regions.
322
+ min_connectivity: float, default 0
323
+ Only for region profile.
324
+ max_rows: int, default None
325
+ Only for region profile.
326
+ direction: 'column' or 'row', default: 'column'
327
+ Only for matrix.
328
+ logscale: bool, default: False
329
+ backend: str, default: "matplotlib" for profiles and "nilearn" for matrices
330
+ """
331
+ if regions is None or isinstance(regions, list):
332
+ plot_matrix_backend = "nilearn" if backend == "matplotlib" else backend
333
+ return self._plot_matrix(
334
+ regions=regions, logscale=logscale, *args,
335
+ backend=plot_matrix_backend,
336
+ **kwargs
337
+ )
338
+
339
+ profile = self.get_profile(regions, min_connectivity, max_rows, direction)
340
+ kwargs["kind"] = kwargs.get("kind", "barh")
341
+ if backend == "matplotlib":
342
+ kwargs["logx"] = kwargs.get("logx", logscale)
343
+ return profile.data.plot(*args, backend=backend, **kwargs)
344
+ elif backend == "plotly":
345
+ kwargs.update({
346
+ "color": kwargs.get("color", profile.data.columns[0]),
347
+ "x": kwargs.get("x", profile.data.columns[0]),
348
+ "y": kwargs.get("y", [r.name for r in profile.data.index]),
349
+ "log_x": logscale,
350
+ "labels": {"y": " ", "x": ""},
351
+ "color_continuous_scale": "jet",
352
+ "width": 600, "height": 15 * len(profile.data)
353
+ })
354
+ fig = profile.data.plot(*args, backend=backend, **kwargs)
355
+ fig.update_layout({
356
+ "font": dict(size=9),
357
+ "yaxis": {"autorange": "reversed"},
358
+ "coloraxis": {"colorbar": {
359
+ "orientation": "h", "title": "", "xpad": 0, "ypad": 10
360
+ }},
361
+ "margin": dict(l=0, r=0, b=0, t=0, pad=0)
362
+ })
363
+ return fig
364
+ else:
365
+ return profile.data.plot(*args, backend=backend, **kwargs)
366
+
367
+ def get_profile_colorscale(
368
+ self,
369
+ region: Union[str, _region.Region],
370
+ min_connectivity: float = 0,
371
+ max_rows: int = None,
372
+ direction: Literal['column', 'row'] = 'column',
373
+ colorgradient: str = "jet"
374
+ ) -> Iterator[Tuple[_region.Region, Tuple[int, int, int]]]:
375
+ """
376
+ Extract the colorscale corresponding to the regional profile from the
377
+ matrix sorted by the values. See `get_profile` for further details.
378
+
379
+ Note:
380
+ -----
381
+ Requires `plotly`.
382
+
383
+ Parameters
384
+ ----------
385
+ region: str, Region
386
+ min_connectivity: float, default: 0
387
+ Regions with connectivity less than this value are discarded.
388
+ max_rows: int, default: None
389
+ Max number of regions with highest connectivity.
390
+ direction: str, default: 'column'
391
+ Choose the direction of profile extraction particularly for
392
+ non-symmetric matrices. ('column' or 'row')
393
+ colorgradient: str, default: 'jet'
394
+ The gradient used to extract colorscale.
395
+ Returns
396
+ -------
397
+ Iterator[Tuple[_region.Region, Tuple[int, int, int]]]
398
+ Color values are in RGB 255.
399
+ """
400
+ from plotly.express.colors import sample_colorscale
401
+ profile = self.get_profile(region, min_connectivity, max_rows, direction)
402
+ normalized = profile.data / profile.data.max()
403
+ colorscale = sample_colorscale(
404
+ colorgradient,
405
+ normalized.values.reshape(len(profile.data))
406
+ )
407
+ return zip(
408
+ profile.data.index.values,
409
+ [eval(c.removeprefix('rgb')) for c in colorscale]
410
+ )
411
+
412
+ def __len__(self):
413
+ return len(self._filename)
414
+
415
+ def __str__(self):
416
+ return self.name
417
+
418
+ def compute_centroids(self, space):
419
+ """
420
+ Computes the list of centroid coordinates corresponding to
421
+ matrix rows, in the given reference space.
422
+
423
+ Parameters
424
+ ----------
425
+ space: Space, str
426
+ Returns
427
+ -------
428
+ list[tuple(float, float, float)]
429
+ """
430
+ result = []
431
+ parcellations = self.anchor.represented_parcellations()
432
+ assert len(parcellations) == 1
433
+ parcmap = next(iter(parcellations)).get_map(space)
434
+ all_centroids = parcmap.compute_centroids()
435
+ for regionname in self.regions:
436
+ region = parcmap.parcellation.get_region(regionname, allow_tuple=True)
437
+ if isinstance(region, tuple): # deal with sets of matched regions
438
+ found = [c for r in region for c in r if c.name in all_centroids]
439
+ else:
440
+ found = [r for r in region if r.name in all_centroids]
441
+ assert len(found) > 0
442
+ result.append(
443
+ tuple(pointcloud.PointCloud(
444
+ [all_centroids[r.name] for r in found], space=space
445
+ ).centroid)
446
+ )
447
+ return result
448
+
449
+ def _arraylike_to_dataframe(self, array: Union[np.ndarray, pd.DataFrame]) -> pd.DataFrame:
450
+ """
451
+ Convert a numpy array with the connectivity matrix to
452
+ a DataFrame with regions as column and row headers.
453
+ """
454
+ if not isinstance(array, np.ndarray):
455
+ array = array.to_numpy()
456
+ assert array.shape[0] == array.shape[1], f"Connectivity matrices must be square but found {array.shape}"
457
+ if not (array == array.T).all():
458
+ logger.warning("The connectivity matrix is not symmetric.")
459
+ df = pd.DataFrame(array)
460
+ parcellations = self.anchor.represented_parcellations()
461
+ assert len(parcellations) == 1
462
+ parc = next(iter(parcellations))
463
+ with QUIET:
464
+ indexmap = {
465
+ i: parc.get_region(regionname, allow_tuple=True)
466
+ for i, regionname in enumerate(self.regions)
467
+ }
468
+ nrows = array.shape[0]
469
+ try:
470
+ assert len(indexmap) == nrows
471
+ remapper = {
472
+ label - min(indexmap.keys()): region
473
+ for label, region in indexmap.items()
474
+ }
475
+ df = df.rename(index=remapper).rename(columns=remapper)
476
+ except Exception:
477
+ raise RuntimeError("Could not decode connectivity matrix regions.")
478
+ return df
479
+
480
+ def _load_matrix(self):
481
+ """
482
+ Extract connectivity matrix.
483
+ """
484
+ if isinstance(self._connector, HttpRequest):
485
+ array = self._connector.data
486
+ else:
487
+ array = self._connector.get(self._filename, decode_func=self._decode_func)
488
+ nrows = array.shape[0]
489
+ if array.shape[1] != nrows:
490
+ raise RuntimeError(
491
+ f"Non-quadratic connectivity matrix {nrows}x{array.shape[1]} "
492
+ f"from {self._filename} in {str(self._connector)}"
493
+ )
494
+ self._matrix = self._arraylike_to_dataframe(array)
@@ -0,0 +1,27 @@
1
+ # Copyright 2018-2024
2
+ # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
+
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+
16
+ from . import regional_connectivity
17
+
18
+
19
+ class StreamlineCounts(
20
+ regional_connectivity.RegionalConnectivity,
21
+ configuration_folder="features/connectivity/regional/streamlinecounts",
22
+ category="connectivity"
23
+ ):
24
+ """Structural connectivity matrix of streamline counts grouped by a parcellation."""
25
+
26
+ def __init__(self, **kwargs):
27
+ regional_connectivity.RegionalConnectivity.__init__(self, **kwargs)
@@ -0,0 +1,27 @@
1
+ # Copyright 2018-2024
2
+ # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
+
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+
16
+ from . import regional_connectivity
17
+
18
+
19
+ class StreamlineLengths(
20
+ regional_connectivity.RegionalConnectivity,
21
+ configuration_folder="features/connectivity/regional/streamlinelengths",
22
+ category="connectivity"
23
+ ):
24
+ """Structural connectivity matrix of streamline lengths grouped by a parcellation."""
25
+
26
+ def __init__(self, **kwargs):
27
+ regional_connectivity.RegionalConnectivity.__init__(self, **kwargs)
@@ -0,0 +1,30 @@
1
+ # Copyright 2018-2024
2
+ # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
+
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+
16
+ from . import regional_connectivity
17
+
18
+
19
+ class TracingConnectivity(
20
+ regional_connectivity.RegionalConnectivity,
21
+ configuration_folder="features/connectivity/regional/tracing",
22
+ category="connectivity"
23
+ ):
24
+ """
25
+ Connectivity matrix obtained in a semi-quantitative manner and grouped by a
26
+ parcellation.
27
+ """
28
+
29
+ def __init__(self, **kwargs):
30
+ regional_connectivity.RegionalConnectivity.__init__(self, **kwargs)
@@ -0,0 +1,17 @@
1
+ # Copyright 2018-2024
2
+ # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
+
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+ """Non-preconfigured data features published as datasets."""
16
+
17
+ from .ebrains import EbrainsDataFeature
@@ -0,0 +1,90 @@
1
+ # Copyright 2018-2024
2
+ # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
+
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+ """Non-preconfigured data features hosted at EBRAINS."""
16
+
17
+ from zipfile import ZipFile
18
+ from .. import anchor as _anchor
19
+ from .. import feature
20
+
21
+ from ...retrieval import datasets
22
+
23
+
24
+ DOI_TMPL = """
25
+ doi
26
+ ---
27
+ {doi}
28
+ """
29
+
30
+
31
+ class EbrainsDataFeature(feature.Feature, category="other"):
32
+ def __init__(
33
+ self,
34
+ dataset_version_id: str,
35
+ anchor: _anchor.AnatomicalAnchor
36
+ ):
37
+ feature.Feature.__init__(
38
+ self,
39
+ modality=None, # lazy implementation below
40
+ description=None, # lazy implementation below
41
+ anchor=anchor,
42
+ datasets=[datasets.EbrainsV3DatasetVersion(id=dataset_version_id)],
43
+ )
44
+ self.version = None
45
+ self._next = None
46
+ self._prev = None
47
+
48
+ @property
49
+ def id(self):
50
+ return self._dataset.id
51
+
52
+ @property
53
+ def _dataset(self) -> datasets.EbrainsV3DatasetVersion:
54
+ assert len(self.datasets) == 1
55
+ return self.datasets[0]
56
+
57
+ @property
58
+ def description(self) -> str:
59
+ return self._dataset.description
60
+
61
+ @property
62
+ def name(self):
63
+ if self._dataset.name.startswith(" "):
64
+ return f"Ebrains Dataset: {self._dataset.is_version_of[0].name}"
65
+ else:
66
+ return f"Ebrains Dataset: {self._dataset.name}"
67
+
68
+ @property
69
+ def version_identifier(self):
70
+ return self._dataset.version_identifier
71
+
72
+ @property
73
+ def version_history(self):
74
+ return self._dataset.version_changelog
75
+
76
+ @property
77
+ def url(self):
78
+ return self._dataset.ebrains_page
79
+
80
+ def __hash__(self):
81
+ return hash(self._dataset)
82
+
83
+ def __eq__(self, o: object) -> bool:
84
+ if not isinstance(o, EbrainsDataFeature):
85
+ return False
86
+ return self._dataset == o._dataset
87
+
88
+ def _to_zip(self, fh: ZipFile):
89
+ super()._to_zip(fh)
90
+ fh.writestr("doi.md", DOI_TMPL.format(doi=self.url))