junifer 0.0.5.dev110__py3-none-any.whl → 0.0.5.dev145__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- junifer/__init__.py +17 -0
- junifer/_version.py +2 -2
- junifer/api/__init__.py +3 -0
- junifer/api/decorators.py +9 -0
- junifer/api/functions.py +7 -4
- junifer/api/parser.py +3 -0
- junifer/api/queue_context/__init__.py +3 -0
- junifer/api/res/afni/run_afni_docker.sh +1 -1
- junifer/api/res/ants/run_ants_docker.sh +1 -1
- junifer/api/res/fsl/run_fsl_docker.sh +1 -1
- junifer/api/tests/test_api_utils.py +2 -0
- junifer/configs/juseless/__init__.py +3 -0
- junifer/configs/juseless/datagrabbers/__init__.py +9 -0
- junifer/configs/juseless/datagrabbers/aomic_id1000_vbm.py +3 -0
- junifer/configs/juseless/datagrabbers/camcan_vbm.py +3 -0
- junifer/configs/juseless/datagrabbers/ixi_vbm.py +3 -0
- junifer/configs/juseless/datagrabbers/ucla.py +3 -0
- junifer/configs/juseless/datagrabbers/ukb_vbm.py +3 -0
- junifer/data/__init__.py +20 -0
- junifer/data/coordinates.py +8 -0
- junifer/data/masks.py +10 -0
- junifer/data/parcellations.py +9 -0
- junifer/data/template_spaces.py +3 -0
- junifer/data/tests/test_masks.py +1 -1
- junifer/data/utils.py +3 -0
- junifer/datagrabber/__init__.py +15 -0
- junifer/datagrabber/aomic/__init__.py +3 -0
- junifer/datagrabber/aomic/id1000.py +3 -0
- junifer/datagrabber/aomic/piop1.py +3 -0
- junifer/datagrabber/aomic/piop2.py +3 -0
- junifer/datagrabber/base.py +3 -0
- junifer/datagrabber/datalad_base.py +3 -0
- junifer/datagrabber/hcp1200/__init__.py +3 -0
- junifer/datagrabber/hcp1200/datalad_hcp1200.py +3 -0
- junifer/datagrabber/hcp1200/hcp1200.py +3 -0
- junifer/datagrabber/multiple.py +3 -0
- junifer/datagrabber/pattern.py +5 -1
- junifer/datagrabber/pattern_datalad.py +3 -0
- junifer/datagrabber/utils.py +3 -0
- junifer/datareader/__init__.py +3 -0
- junifer/datareader/default.py +3 -0
- junifer/external/BrainPrint/brainprint/__init__.py +4 -0
- junifer/external/BrainPrint/brainprint/_version.py +3 -0
- junifer/external/BrainPrint/brainprint/asymmetry.py +91 -0
- junifer/external/BrainPrint/brainprint/brainprint.py +441 -0
- junifer/external/BrainPrint/brainprint/surfaces.py +258 -0
- junifer/external/BrainPrint/brainprint/utils/__init__.py +1 -0
- junifer/external/BrainPrint/brainprint/utils/_config.py +112 -0
- junifer/external/BrainPrint/brainprint/utils/utils.py +188 -0
- junifer/markers/__init__.py +22 -0
- junifer/markers/base.py +21 -7
- junifer/markers/brainprint.py +665 -0
- junifer/markers/collection.py +3 -0
- junifer/markers/complexity/__init__.py +10 -0
- junifer/markers/complexity/complexity_base.py +3 -0
- junifer/markers/complexity/hurst_exponent.py +3 -0
- junifer/markers/complexity/multiscale_entropy_auc.py +3 -0
- junifer/markers/complexity/perm_entropy.py +3 -0
- junifer/markers/complexity/range_entropy.py +3 -0
- junifer/markers/complexity/range_entropy_auc.py +3 -0
- junifer/markers/complexity/sample_entropy.py +3 -0
- junifer/markers/complexity/tests/test_hurst_exponent.py +7 -0
- junifer/markers/complexity/tests/test_multiscale_entropy_auc.py +7 -0
- junifer/markers/complexity/tests/test_perm_entropy.py +7 -0
- junifer/markers/complexity/tests/test_range_entropy.py +7 -0
- junifer/markers/complexity/tests/test_range_entropy_auc.py +7 -0
- junifer/markers/complexity/tests/test_sample_entropy.py +7 -0
- junifer/markers/complexity/tests/test_weighted_perm_entropy.py +7 -0
- junifer/markers/complexity/weighted_perm_entropy.py +3 -0
- junifer/markers/ets_rss.py +3 -0
- junifer/markers/falff/__init__.py +3 -0
- junifer/markers/falff/_afni_falff.py +3 -0
- junifer/markers/falff/_junifer_falff.py +3 -0
- junifer/markers/falff/falff_base.py +3 -0
- junifer/markers/falff/falff_parcels.py +3 -0
- junifer/markers/falff/falff_spheres.py +3 -0
- junifer/markers/functional_connectivity/__init__.py +9 -0
- junifer/markers/functional_connectivity/crossparcellation_functional_connectivity.py +3 -0
- junifer/markers/functional_connectivity/edge_functional_connectivity_parcels.py +3 -0
- junifer/markers/functional_connectivity/edge_functional_connectivity_spheres.py +3 -0
- junifer/markers/functional_connectivity/functional_connectivity_base.py +3 -0
- junifer/markers/functional_connectivity/functional_connectivity_parcels.py +3 -0
- junifer/markers/functional_connectivity/functional_connectivity_spheres.py +3 -0
- junifer/markers/parcel_aggregation.py +3 -0
- junifer/markers/reho/__init__.py +3 -0
- junifer/markers/reho/_afni_reho.py +4 -1
- junifer/markers/reho/_junifer_reho.py +4 -1
- junifer/markers/reho/reho_base.py +2 -0
- junifer/markers/reho/reho_parcels.py +5 -2
- junifer/markers/reho/reho_spheres.py +5 -2
- junifer/markers/sphere_aggregation.py +3 -0
- junifer/markers/temporal_snr/__init__.py +3 -0
- junifer/markers/temporal_snr/temporal_snr_base.py +3 -0
- junifer/markers/temporal_snr/temporal_snr_parcels.py +3 -0
- junifer/markers/temporal_snr/temporal_snr_spheres.py +3 -0
- junifer/markers/tests/test_brainprint.py +47 -0
- junifer/onthefly/__init__.py +3 -0
- junifer/onthefly/read_transform.py +3 -0
- junifer/pipeline/__init__.py +8 -0
- junifer/pipeline/pipeline_step_mixin.py +3 -0
- junifer/pipeline/registry.py +3 -0
- junifer/pipeline/singleton.py +3 -0
- junifer/pipeline/update_meta_mixin.py +3 -0
- junifer/pipeline/utils.py +3 -0
- junifer/pipeline/workdir_manager.py +3 -0
- junifer/preprocess/__init__.py +8 -0
- junifer/preprocess/base.py +4 -1
- junifer/preprocess/confounds/__init__.py +3 -0
- junifer/preprocess/confounds/fmriprep_confound_remover.py +3 -0
- junifer/preprocess/smoothing/__init__.py +3 -0
- junifer/preprocess/warping/__init__.py +3 -0
- junifer/preprocess/warping/_ants_warper.py +3 -0
- junifer/preprocess/warping/_fsl_warper.py +3 -0
- junifer/stats.py +3 -0
- junifer/storage/__init__.py +8 -0
- junifer/storage/base.py +3 -0
- junifer/storage/hdf5.py +3 -0
- junifer/storage/pandas_base.py +3 -0
- junifer/storage/sqlite.py +3 -0
- junifer/storage/utils.py +9 -0
- junifer/testing/__init__.py +3 -0
- junifer/testing/datagrabbers.py +7 -0
- junifer/testing/utils.py +3 -0
- junifer/utils/__init__.py +10 -0
- junifer/utils/fs.py +3 -0
- junifer/utils/helpers.py +3 -0
- junifer/utils/logging.py +10 -0
- {junifer-0.0.5.dev110.dist-info → junifer-0.0.5.dev145.dist-info}/METADATA +2 -1
- {junifer-0.0.5.dev110.dist-info → junifer-0.0.5.dev145.dist-info}/RECORD +134 -124
- {junifer-0.0.5.dev110.dist-info → junifer-0.0.5.dev145.dist-info}/AUTHORS.rst +0 -0
- {junifer-0.0.5.dev110.dist-info → junifer-0.0.5.dev145.dist-info}/LICENSE.md +0 -0
- {junifer-0.0.5.dev110.dist-info → junifer-0.0.5.dev145.dist-info}/WHEEL +0 -0
- {junifer-0.0.5.dev110.dist-info → junifer-0.0.5.dev145.dist-info}/entry_points.txt +0 -0
- {junifer-0.0.5.dev110.dist-info → junifer-0.0.5.dev145.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,665 @@
|
|
1
|
+
"""Provide class for BrainPrint."""
|
2
|
+
|
3
|
+
# Authors: Synchon Mandal <s.mandal@fz-juelich.de>
|
4
|
+
# License: AGPL
|
5
|
+
|
6
|
+
try:
|
7
|
+
from importlib.metadata import packages_distributions
|
8
|
+
except ImportError: # pragma: no cover
|
9
|
+
from importlib_metadata import packages_distributions
|
10
|
+
|
11
|
+
import uuid
|
12
|
+
from copy import deepcopy
|
13
|
+
from importlib.util import find_spec
|
14
|
+
from itertools import chain
|
15
|
+
from pathlib import Path
|
16
|
+
from typing import (
|
17
|
+
TYPE_CHECKING,
|
18
|
+
Any,
|
19
|
+
ClassVar,
|
20
|
+
Dict,
|
21
|
+
List,
|
22
|
+
Optional,
|
23
|
+
Set,
|
24
|
+
Union,
|
25
|
+
)
|
26
|
+
|
27
|
+
import numpy as np
|
28
|
+
import numpy.typing as npt
|
29
|
+
|
30
|
+
from ..api.decorators import register_marker
|
31
|
+
from ..external.BrainPrint.brainprint.brainprint import (
|
32
|
+
compute_asymmetry,
|
33
|
+
compute_brainprint,
|
34
|
+
)
|
35
|
+
from ..external.BrainPrint.brainprint.surfaces import surf_to_vtk
|
36
|
+
from ..pipeline import WorkDirManager
|
37
|
+
from ..pipeline.utils import check_ext_dependencies
|
38
|
+
from ..utils import logger, raise_error, run_ext_cmd
|
39
|
+
from .base import BaseMarker
|
40
|
+
|
41
|
+
|
42
|
+
if TYPE_CHECKING:
|
43
|
+
from junifer.storage import BaseFeatureStorage
|
44
|
+
|
45
|
+
|
46
|
+
__all__ = ["BrainPrint"]
|
47
|
+
|
48
|
+
|
49
|
+
@register_marker
|
50
|
+
class BrainPrint(BaseMarker):
|
51
|
+
"""Class for BrainPrint.
|
52
|
+
|
53
|
+
Parameters
|
54
|
+
----------
|
55
|
+
num : positive int, optional
|
56
|
+
Number of eigenvalues to compute (default 50).
|
57
|
+
skip_cortex : bool, optional
|
58
|
+
Whether to skip cortical surface or not (default False).
|
59
|
+
keep_eigenvectors : bool, optional
|
60
|
+
Whether to also return eigenvectors or not (default False).
|
61
|
+
norm : str, optional
|
62
|
+
Eigenvalues normalization method (default "none").
|
63
|
+
reweight : bool, optional
|
64
|
+
Whether to reweight eigenvalues or not (default False).
|
65
|
+
asymmetry : bool, optional
|
66
|
+
Whether to calculate asymmetry between lateral structures
|
67
|
+
(default False).
|
68
|
+
asymmetry_distance : {"euc"}, optional
|
69
|
+
Distance measurement to use if ``asymmetry=True``:
|
70
|
+
|
71
|
+
* ``"euc"`` : Euclidean
|
72
|
+
|
73
|
+
(default "euc").
|
74
|
+
use_cholmod : bool, optional
|
75
|
+
If True, attempts to use the Cholesky decomposition for improved
|
76
|
+
execution speed. Requires the ``scikit-sparse`` library. If it cannot
|
77
|
+
be found, an error will be thrown. If False, will use slower LU
|
78
|
+
decomposition (default False).
|
79
|
+
name : str, optional
|
80
|
+
The name of the marker. If None, will use the class name (default
|
81
|
+
None).
|
82
|
+
|
83
|
+
"""
|
84
|
+
|
85
|
+
_EXT_DEPENDENCIES: ClassVar[List[Dict[str, Union[str, List[str]]]]] = [
|
86
|
+
{
|
87
|
+
"name": "freesurfer",
|
88
|
+
"commands": [
|
89
|
+
"mri_binarize",
|
90
|
+
"mri_pretess",
|
91
|
+
"mri_mc",
|
92
|
+
"mris_convert",
|
93
|
+
],
|
94
|
+
},
|
95
|
+
]
|
96
|
+
|
97
|
+
_DEPENDENCIES: ClassVar[Set[str]] = {"lapy", "numpy"}
|
98
|
+
|
99
|
+
def __init__(
|
100
|
+
self,
|
101
|
+
num: int = 50,
|
102
|
+
skip_cortex=False,
|
103
|
+
keep_eigenvectors: bool = False,
|
104
|
+
norm: str = "none",
|
105
|
+
reweight: bool = False,
|
106
|
+
asymmetry: bool = False,
|
107
|
+
asymmetry_distance: str = "euc",
|
108
|
+
use_cholmod: bool = False,
|
109
|
+
name: Optional[str] = None,
|
110
|
+
) -> None:
|
111
|
+
self.num = num
|
112
|
+
self.skip_cortex = skip_cortex
|
113
|
+
self.keep_eigenvectors = keep_eigenvectors
|
114
|
+
self.norm = norm
|
115
|
+
self.reweight = reweight
|
116
|
+
self.asymmetry = asymmetry
|
117
|
+
self.asymmetry_distance = asymmetry_distance
|
118
|
+
self.use_cholmod = use_cholmod
|
119
|
+
super().__init__(name=name, on="FreeSurfer")
|
120
|
+
|
121
|
+
def get_valid_inputs(self) -> List[str]:
|
122
|
+
"""Get valid data types for input.
|
123
|
+
|
124
|
+
Returns
|
125
|
+
-------
|
126
|
+
list of str
|
127
|
+
The list of data types that can be used as input for this marker.
|
128
|
+
|
129
|
+
"""
|
130
|
+
return ["FreeSurfer"]
|
131
|
+
|
132
|
+
# TODO: kept for making this class concrete; should be removed later
|
133
|
+
def get_output_type(self, input_type: str) -> str:
|
134
|
+
"""Get output type.
|
135
|
+
|
136
|
+
Parameters
|
137
|
+
----------
|
138
|
+
input_type : str
|
139
|
+
The data type input to the marker.
|
140
|
+
|
141
|
+
Returns
|
142
|
+
-------
|
143
|
+
str
|
144
|
+
The storage type output by the marker.
|
145
|
+
|
146
|
+
"""
|
147
|
+
return "vector"
|
148
|
+
|
149
|
+
# TODO: overridden to allow multiple outputs from single data type; should
|
150
|
+
# be removed later
|
151
|
+
def validate(self, input: List[str]) -> List[str]:
|
152
|
+
"""Validate the the pipeline step.
|
153
|
+
|
154
|
+
Parameters
|
155
|
+
----------
|
156
|
+
input : list of str
|
157
|
+
The input to the pipeline step.
|
158
|
+
|
159
|
+
Returns
|
160
|
+
-------
|
161
|
+
list of str
|
162
|
+
The output of the pipeline step.
|
163
|
+
|
164
|
+
"""
|
165
|
+
|
166
|
+
def _check_dependencies(obj) -> None:
|
167
|
+
"""Check obj._DEPENDENCIES.
|
168
|
+
|
169
|
+
Parameters
|
170
|
+
----------
|
171
|
+
obj : object
|
172
|
+
Object to check _DEPENDENCIES of.
|
173
|
+
|
174
|
+
Raises
|
175
|
+
------
|
176
|
+
ImportError
|
177
|
+
If the pipeline step object is missing dependencies required
|
178
|
+
for its working.
|
179
|
+
|
180
|
+
"""
|
181
|
+
# Check if _DEPENDENCIES attribute is found;
|
182
|
+
# (markers and preprocessors will have them but not datareaders
|
183
|
+
# as of now)
|
184
|
+
dependencies_not_found = []
|
185
|
+
if hasattr(obj, "_DEPENDENCIES"):
|
186
|
+
# Check if dependencies are importable
|
187
|
+
for dependency in obj._DEPENDENCIES:
|
188
|
+
# First perform an easy check
|
189
|
+
if find_spec(dependency) is None:
|
190
|
+
# Then check mapped names
|
191
|
+
if dependency not in list(
|
192
|
+
chain.from_iterable(
|
193
|
+
packages_distributions().values()
|
194
|
+
)
|
195
|
+
):
|
196
|
+
dependencies_not_found.append(dependency)
|
197
|
+
# Raise error if any dependency is not found
|
198
|
+
if dependencies_not_found:
|
199
|
+
raise_error(
|
200
|
+
msg=(
|
201
|
+
f"{dependencies_not_found} are not installed but are "
|
202
|
+
f"required for using {obj.__class__.__name__}."
|
203
|
+
),
|
204
|
+
klass=ImportError,
|
205
|
+
)
|
206
|
+
|
207
|
+
def _check_ext_dependencies(obj) -> None:
|
208
|
+
"""Check obj._EXT_DEPENDENCIES.
|
209
|
+
|
210
|
+
Parameters
|
211
|
+
----------
|
212
|
+
obj : object
|
213
|
+
Object to check _EXT_DEPENDENCIES of.
|
214
|
+
|
215
|
+
"""
|
216
|
+
# Check if _EXT_DEPENDENCIES attribute is found;
|
217
|
+
# (some markers and preprocessors might have them)
|
218
|
+
if hasattr(obj, "_EXT_DEPENDENCIES"):
|
219
|
+
for dependency in obj._EXT_DEPENDENCIES:
|
220
|
+
check_ext_dependencies(**dependency)
|
221
|
+
|
222
|
+
# Check dependencies
|
223
|
+
_check_dependencies(self)
|
224
|
+
# Check external dependencies
|
225
|
+
# _check_ext_dependencies(self)
|
226
|
+
# Validate input
|
227
|
+
_ = self.validate_input(input=input)
|
228
|
+
# Validate output type
|
229
|
+
outputs = ["scalar_table", "vector"]
|
230
|
+
return outputs
|
231
|
+
|
232
|
+
def _create_aseg_surface(
|
233
|
+
self,
|
234
|
+
aseg_path: Path,
|
235
|
+
norm_path: Path,
|
236
|
+
indices: List,
|
237
|
+
) -> Path:
|
238
|
+
"""Generate a surface from the aseg and label files.
|
239
|
+
|
240
|
+
Parameters
|
241
|
+
----------
|
242
|
+
aseg_path : pathlib.Path
|
243
|
+
The FreeSurfer aseg path.
|
244
|
+
norm_path : pathlib.Path
|
245
|
+
The FreeSurfer norm path.
|
246
|
+
indices : list
|
247
|
+
List of label indices to include in the surface generation.
|
248
|
+
|
249
|
+
Returns
|
250
|
+
-------
|
251
|
+
pathlib.Path
|
252
|
+
Path to the generated surface in VTK format.
|
253
|
+
|
254
|
+
"""
|
255
|
+
tempfile_prefix = f"aseg.{uuid.uuid4()}"
|
256
|
+
|
257
|
+
# Set mri_binarize command
|
258
|
+
mri_binarize_output_path = self._tempdir / f"{tempfile_prefix}.mgz"
|
259
|
+
mri_binarize_cmd = [
|
260
|
+
"mri_binarize",
|
261
|
+
f"--i {aseg_path.resolve()}",
|
262
|
+
f"--match {''.join(indices)}",
|
263
|
+
f"--o {mri_binarize_output_path.resolve()}",
|
264
|
+
]
|
265
|
+
# Call mri_binarize command
|
266
|
+
run_ext_cmd(name="mri_binarize", cmd=mri_binarize_cmd)
|
267
|
+
|
268
|
+
label_value = "1"
|
269
|
+
# Fix label (pretess)
|
270
|
+
# Set mri_pretess command
|
271
|
+
mri_pretess_cmd = [
|
272
|
+
"mri_pretess",
|
273
|
+
f"{mri_binarize_output_path.resolve()}",
|
274
|
+
f"{label_value}",
|
275
|
+
f"{norm_path.resolve()}",
|
276
|
+
f"{mri_binarize_output_path.resolve()}",
|
277
|
+
]
|
278
|
+
# Call mri_pretess command
|
279
|
+
run_ext_cmd(name="mri_pretess", cmd=mri_pretess_cmd)
|
280
|
+
|
281
|
+
# Run marching cube to extract surface
|
282
|
+
# Set mri_mc command
|
283
|
+
mri_mc_output_path = self._tempdir / f"{tempfile_prefix}.surf"
|
284
|
+
mri_mc_cmd = [
|
285
|
+
"mri_mc",
|
286
|
+
f"{mri_binarize_output_path.resolve()}",
|
287
|
+
f"{label_value}",
|
288
|
+
f"{mri_mc_output_path.resolve()}",
|
289
|
+
]
|
290
|
+
# Run mri_mc command
|
291
|
+
run_ext_cmd(name="mri_mc", cmd=mri_mc_cmd)
|
292
|
+
|
293
|
+
# Convert to vtk
|
294
|
+
# Set mris_convert command
|
295
|
+
surface_path = (
|
296
|
+
self._element_tempdir / f"aseg.final.{'_'.join(indices)}.vtk"
|
297
|
+
)
|
298
|
+
mris_convert_cmd = [
|
299
|
+
"mris_convert",
|
300
|
+
f"{mri_mc_output_path.resolve()}",
|
301
|
+
f"{surface_path.resolve()}",
|
302
|
+
]
|
303
|
+
# Run mris_convert command
|
304
|
+
run_ext_cmd(name="mris_convert", cmd=mris_convert_cmd)
|
305
|
+
|
306
|
+
return surface_path
|
307
|
+
|
308
|
+
def _create_aseg_surfaces(
|
309
|
+
self,
|
310
|
+
aseg_path: Path,
|
311
|
+
norm_path: Path,
|
312
|
+
) -> Dict[str, Path]:
|
313
|
+
"""Create surfaces from FreeSurfer aseg labels.
|
314
|
+
|
315
|
+
Parameters
|
316
|
+
----------
|
317
|
+
aseg_path : pathlib.Path
|
318
|
+
The FreeSurfer aseg path.
|
319
|
+
norm_path : pathlib.Path
|
320
|
+
The FreeSurfer norm path.
|
321
|
+
|
322
|
+
Returns
|
323
|
+
-------
|
324
|
+
dict
|
325
|
+
Dictionary of label names mapped to corresponding surface paths.
|
326
|
+
|
327
|
+
"""
|
328
|
+
# Define aseg labels
|
329
|
+
|
330
|
+
# combined and individual aseg labels:
|
331
|
+
# - Left Striatum: left Caudate + Putamen + Accumbens
|
332
|
+
# - Right Striatum: right Caudate + Putamen + Accumbens
|
333
|
+
# - CorpusCallosum: 5 subregions combined
|
334
|
+
# - Cerebellum: brainstem + (left+right) cerebellum WM and GM
|
335
|
+
# - Ventricles: (left+right) lat.vent + inf.lat.vent + choroidplexus +
|
336
|
+
# 3rdVent + CSF
|
337
|
+
# - Lateral-Ventricle: lat.vent + inf.lat.vent + choroidplexus
|
338
|
+
# - 3rd-Ventricle: 3rd-Ventricle + CSF
|
339
|
+
|
340
|
+
aseg_labels = {
|
341
|
+
"CorpusCallosum": ["251", "252", "253", "254", "255"],
|
342
|
+
"Cerebellum": ["7", "8", "16", "46", "47"],
|
343
|
+
"Ventricles": ["4", "5", "14", "24", "31", "43", "44", "63"],
|
344
|
+
"3rd-Ventricle": ["14", "24"],
|
345
|
+
"4th-Ventricle": ["15"],
|
346
|
+
"Brain-Stem": ["16"],
|
347
|
+
"Left-Striatum": ["11", "12", "26"],
|
348
|
+
"Left-Lateral-Ventricle": ["4", "5", "31"],
|
349
|
+
"Left-Cerebellum-White-Matter": ["7"],
|
350
|
+
"Left-Cerebellum-Cortex": ["8"],
|
351
|
+
"Left-Thalamus-Proper": ["10"],
|
352
|
+
"Left-Caudate": ["11"],
|
353
|
+
"Left-Putamen": ["12"],
|
354
|
+
"Left-Pallidum": ["13"],
|
355
|
+
"Left-Hippocampus": ["17"],
|
356
|
+
"Left-Amygdala": ["18"],
|
357
|
+
"Left-Accumbens-area": ["26"],
|
358
|
+
"Left-VentralDC": ["28"],
|
359
|
+
"Right-Striatum": ["50", "51", "58"],
|
360
|
+
"Right-Lateral-Ventricle": ["43", "44", "63"],
|
361
|
+
"Right-Cerebellum-White-Matter": ["46"],
|
362
|
+
"Right-Cerebellum-Cortex": ["47"],
|
363
|
+
"Right-Thalamus-Proper": ["49"],
|
364
|
+
"Right-Caudate": ["50"],
|
365
|
+
"Right-Putamen": ["51"],
|
366
|
+
"Right-Pallidum": ["52"],
|
367
|
+
"Right-Hippocampus": ["53"],
|
368
|
+
"Right-Amygdala": ["54"],
|
369
|
+
"Right-Accumbens-area": ["58"],
|
370
|
+
"Right-VentralDC": ["60"],
|
371
|
+
}
|
372
|
+
return {
|
373
|
+
label: self._create_aseg_surface(
|
374
|
+
aseg_path=aseg_path,
|
375
|
+
norm_path=norm_path,
|
376
|
+
indices=indices,
|
377
|
+
)
|
378
|
+
for label, indices in aseg_labels.items()
|
379
|
+
}
|
380
|
+
|
381
|
+
def _create_cortical_surfaces(
|
382
|
+
self,
|
383
|
+
lh_white_path: Path,
|
384
|
+
rh_white_path: Path,
|
385
|
+
lh_pial_path: Path,
|
386
|
+
rh_pial_path: Path,
|
387
|
+
) -> Dict[str, Path]:
|
388
|
+
"""Create cortical surfaces from FreeSurfer labels.
|
389
|
+
|
390
|
+
Parameters
|
391
|
+
----------
|
392
|
+
lh_white_path : pathlib.Path
|
393
|
+
The FreeSurfer lh.white path.
|
394
|
+
rh_white_path : pathlib.Path
|
395
|
+
The FreeSurfer rh.white path.
|
396
|
+
lh_pial_path : pathlib.Path
|
397
|
+
The FreeSurfer lh.pial path.
|
398
|
+
rh_pial_path : pathlib.Path
|
399
|
+
The FreeSurfer rh.pial path.
|
400
|
+
|
401
|
+
Returns
|
402
|
+
-------
|
403
|
+
dict
|
404
|
+
Cortical surface label names with their paths as dictionary.
|
405
|
+
|
406
|
+
"""
|
407
|
+
return {
|
408
|
+
"lh-white-2d": surf_to_vtk(
|
409
|
+
lh_white_path.resolve(),
|
410
|
+
(self._element_tempdir / "lh.white.vtk").resolve(),
|
411
|
+
),
|
412
|
+
"rh-white-2d": surf_to_vtk(
|
413
|
+
rh_white_path.resolve(),
|
414
|
+
(self._element_tempdir / "rh.white.vtk").resolve(),
|
415
|
+
),
|
416
|
+
"lh-pial-2d": surf_to_vtk(
|
417
|
+
lh_pial_path.resolve(),
|
418
|
+
(self._element_tempdir / "lh.pial.vtk").resolve(),
|
419
|
+
),
|
420
|
+
"rh-pial-2d": surf_to_vtk(
|
421
|
+
rh_pial_path.resolve(),
|
422
|
+
(self._element_tempdir / "rh.pial.vtk").resolve(),
|
423
|
+
),
|
424
|
+
}
|
425
|
+
|
426
|
+
def compute(
|
427
|
+
self,
|
428
|
+
input: Dict[str, Any],
|
429
|
+
extra_input: Optional[Dict] = None,
|
430
|
+
) -> Dict:
|
431
|
+
"""Compute.
|
432
|
+
|
433
|
+
Parameters
|
434
|
+
----------
|
435
|
+
input : dict
|
436
|
+
The FreeSurfer data as dictionary.
|
437
|
+
extra_input : dict, optional
|
438
|
+
The other fields in the pipeline data object (default None).
|
439
|
+
|
440
|
+
Returns
|
441
|
+
-------
|
442
|
+
dict
|
443
|
+
The computed result as dictionary. The dictionary has the following
|
444
|
+
keys:
|
445
|
+
|
446
|
+
* ``eigenvalues`` : dict of surface labels (str) and eigenvalues
|
447
|
+
(``np.ndarray``)
|
448
|
+
* ``eigenvectors`` : dict of surface labels (str) and eigenvectors
|
449
|
+
(``np.ndarray``) if ``keep_eigenvectors=True``
|
450
|
+
else None
|
451
|
+
* ``distances`` : dict of ``{left_label}_{right_label}`` (str) and
|
452
|
+
distance (float) if ``asymmetry=True`` else None
|
453
|
+
|
454
|
+
References
|
455
|
+
----------
|
456
|
+
.. [1] Wachinger, C., Golland, P., Kremen, W. et al. (2015)
|
457
|
+
BrainPrint: A discriminative characterization of brain
|
458
|
+
morphology.
|
459
|
+
NeuroImage, Volume 109, Pages 232-248.
|
460
|
+
https://doi.org/10.1016/j.neuroimage.2015.01.032.
|
461
|
+
.. [2] Reuter, M., Wolter, F.E., Peinecke, N. (2006)
|
462
|
+
Laplace-Beltrami spectra as 'Shape-DNA' of surfaces and solids.
|
463
|
+
Computer-Aided Design, Volume 38, Issue 4, Pages 342-366.
|
464
|
+
https://doi.org/10.1016/j.cad.2005.10.011.
|
465
|
+
|
466
|
+
"""
|
467
|
+
logger.debug("Computing BrainPrint")
|
468
|
+
|
469
|
+
# Create component-scoped tempdir
|
470
|
+
self._tempdir = WorkDirManager().get_tempdir(prefix="brainprint")
|
471
|
+
# Create element-scoped tempdir so that the files are
|
472
|
+
# available later as nibabel stores file path reference for
|
473
|
+
# loading on computation
|
474
|
+
self._element_tempdir = WorkDirManager().get_element_tempdir(
|
475
|
+
prefix="brainprint"
|
476
|
+
)
|
477
|
+
# Generate surfaces
|
478
|
+
surfaces = self._create_aseg_surfaces(
|
479
|
+
aseg_path=input["aseg"]["path"],
|
480
|
+
norm_path=input["norm"]["path"],
|
481
|
+
)
|
482
|
+
if not self.skip_cortex:
|
483
|
+
cortical_surfaces = self._create_cortical_surfaces(
|
484
|
+
lh_white_path=input["lh_white"]["path"],
|
485
|
+
rh_white_path=input["rh_white"]["path"],
|
486
|
+
lh_pial_path=input["lh_pial"]["path"],
|
487
|
+
rh_pial_path=input["rh_pial"]["path"],
|
488
|
+
)
|
489
|
+
surfaces.update(cortical_surfaces)
|
490
|
+
# Compute brainprint
|
491
|
+
eigenvalues, _ = compute_brainprint(
|
492
|
+
surfaces=surfaces,
|
493
|
+
keep_eigenvectors=self.keep_eigenvectors,
|
494
|
+
num=self.num,
|
495
|
+
norm=self.norm,
|
496
|
+
reweight=self.reweight,
|
497
|
+
use_cholmod=self.use_cholmod,
|
498
|
+
)
|
499
|
+
# Calculate distances (if required)
|
500
|
+
distances = None
|
501
|
+
if self.asymmetry:
|
502
|
+
distances = compute_asymmetry(
|
503
|
+
eigenvalues=eigenvalues,
|
504
|
+
distance=self.asymmetry_distance,
|
505
|
+
skip_cortex=self.skip_cortex,
|
506
|
+
)
|
507
|
+
|
508
|
+
# Delete tempdir
|
509
|
+
WorkDirManager().delete_tempdir(self._tempdir)
|
510
|
+
|
511
|
+
output = {
|
512
|
+
"eigenvalues": {
|
513
|
+
"data": self._fix_nan(
|
514
|
+
[val[2:] for val in eigenvalues.values()]
|
515
|
+
).T,
|
516
|
+
"col_names": list(eigenvalues.keys()),
|
517
|
+
"row_names": [f"ev{i}" for i in range(self.num)],
|
518
|
+
"row_header_col_name": "eigenvalue",
|
519
|
+
},
|
520
|
+
"areas": {
|
521
|
+
"data": self._fix_nan(
|
522
|
+
[val[0] for val in eigenvalues.values()]
|
523
|
+
),
|
524
|
+
"col_names": list(eigenvalues.keys()),
|
525
|
+
},
|
526
|
+
"volumes": {
|
527
|
+
"data": self._fix_nan(
|
528
|
+
[val[1] for val in eigenvalues.values()]
|
529
|
+
),
|
530
|
+
"col_names": list(eigenvalues.keys()),
|
531
|
+
},
|
532
|
+
}
|
533
|
+
if self.asymmetry:
|
534
|
+
output["distances"] = {
|
535
|
+
"data": self._fix_nan(list(distances.values())),
|
536
|
+
"col_names": list(distances.keys()),
|
537
|
+
}
|
538
|
+
return output
|
539
|
+
|
540
|
+
def _fix_nan(
|
541
|
+
self,
|
542
|
+
input_data: List[Union[float, str, npt.ArrayLike]],
|
543
|
+
) -> np.ndarray:
|
544
|
+
"""Convert BrainPrint output with string NaN to ``numpy.nan``.
|
545
|
+
|
546
|
+
Parameters
|
547
|
+
----------
|
548
|
+
input_data : list of str, float or numpy.ndarray-like
|
549
|
+
The data to convert.
|
550
|
+
|
551
|
+
Returns
|
552
|
+
-------
|
553
|
+
np.ndarray
|
554
|
+
The converted data as ``numpy.ndarray``.
|
555
|
+
|
556
|
+
"""
|
557
|
+
arr = np.asarray(input_data)
|
558
|
+
arr[arr == "NaN"] = np.nan
|
559
|
+
return arr.astype(np.float64)
|
560
|
+
|
561
|
+
# TODO: overridden to allow storing multiple outputs from single input;
|
562
|
+
# should be removed later
|
563
|
+
def store(
|
564
|
+
self,
|
565
|
+
type_: str,
|
566
|
+
feature: str,
|
567
|
+
out: Dict[str, Any],
|
568
|
+
storage: "BaseFeatureStorage",
|
569
|
+
) -> None:
|
570
|
+
"""Store.
|
571
|
+
|
572
|
+
Parameters
|
573
|
+
----------
|
574
|
+
type_ : str
|
575
|
+
The data type to store.
|
576
|
+
feature : {"eigenvalues", "distances", "areas", "volumes"}
|
577
|
+
The feature name to store.
|
578
|
+
out : dict
|
579
|
+
The computed result as a dictionary to store.
|
580
|
+
storage : storage-like
|
581
|
+
The storage class, for example, SQLiteFeatureStorage.
|
582
|
+
|
583
|
+
Raises
|
584
|
+
------
|
585
|
+
ValueError
|
586
|
+
If ``feature`` is invalid.
|
587
|
+
|
588
|
+
"""
|
589
|
+
if feature == "eigenvalues":
|
590
|
+
output_type = "scalar_table"
|
591
|
+
elif feature in ["distances", "areas", "volumes"]:
|
592
|
+
output_type = "vector"
|
593
|
+
else:
|
594
|
+
raise_error(f"Unknown feature: {feature}")
|
595
|
+
|
596
|
+
logger.debug(f"Storing {output_type} in {storage}")
|
597
|
+
storage.store(kind=output_type, **out)
|
598
|
+
|
599
|
+
# TODO: overridden to allow storing multiple outputs from single input;
|
600
|
+
# should be removed later
|
601
|
+
def _fit_transform(
|
602
|
+
self,
|
603
|
+
input: Dict[str, Dict],
|
604
|
+
storage: Optional["BaseFeatureStorage"] = None,
|
605
|
+
) -> Dict:
|
606
|
+
"""Fit and transform.
|
607
|
+
|
608
|
+
Parameters
|
609
|
+
----------
|
610
|
+
input : dict
|
611
|
+
The Junifer Data object.
|
612
|
+
storage : storage-like, optional
|
613
|
+
The storage class, for example, SQLiteFeatureStorage.
|
614
|
+
|
615
|
+
Returns
|
616
|
+
-------
|
617
|
+
dict
|
618
|
+
The processed output as a dictionary. If `storage` is provided,
|
619
|
+
empty dictionary is returned.
|
620
|
+
|
621
|
+
"""
|
622
|
+
out = {}
|
623
|
+
for type_ in self._on:
|
624
|
+
if type_ in input.keys():
|
625
|
+
logger.info(f"Computing {type_}")
|
626
|
+
t_input = input[type_]
|
627
|
+
extra_input = input.copy()
|
628
|
+
extra_input.pop(type_)
|
629
|
+
t_meta = t_input["meta"].copy()
|
630
|
+
t_meta["type"] = type_
|
631
|
+
|
632
|
+
# Returns multiple features
|
633
|
+
t_out = self.compute(input=t_input, extra_input=extra_input)
|
634
|
+
|
635
|
+
if storage is None:
|
636
|
+
out[type_] = {}
|
637
|
+
|
638
|
+
for feature_name, feature_data in t_out.items():
|
639
|
+
# Make deep copy of the feature data for manipulation
|
640
|
+
feature_data_copy = deepcopy(feature_data)
|
641
|
+
# Make deep copy of metadata and add to feature data
|
642
|
+
feature_data_copy["meta"] = deepcopy(t_meta)
|
643
|
+
# Update metadata for the feature,
|
644
|
+
# feature data is not manipulated, only meta
|
645
|
+
self.update_meta(feature_data_copy, "marker")
|
646
|
+
# Update marker feature's metadata name
|
647
|
+
feature_data_copy["meta"]["marker"][
|
648
|
+
"name"
|
649
|
+
] += f"_{feature_name}"
|
650
|
+
|
651
|
+
if storage is not None:
|
652
|
+
logger.info(f"Storing in {storage}")
|
653
|
+
self.store(
|
654
|
+
type_=type_,
|
655
|
+
feature=feature_name,
|
656
|
+
out=feature_data_copy,
|
657
|
+
storage=storage,
|
658
|
+
)
|
659
|
+
else:
|
660
|
+
logger.info(
|
661
|
+
"No storage specified, returning dictionary"
|
662
|
+
)
|
663
|
+
out[type_][feature_name] = feature_data_copy
|
664
|
+
|
665
|
+
return out
|
junifer/markers/collection.py
CHANGED
@@ -25,3 +25,13 @@ else:
|
|
25
25
|
from .weighted_perm_entropy import WeightedPermEntropy
|
26
26
|
from .sample_entropy import SampleEntropy
|
27
27
|
from .multiscale_entropy_auc import MultiscaleEntropyAUC
|
28
|
+
|
29
|
+
__all__ = [
|
30
|
+
"HurstExponent",
|
31
|
+
"RangeEntropy",
|
32
|
+
"RangeEntropyAUC",
|
33
|
+
"PermEntropy",
|
34
|
+
"WeightedPermEntropy",
|
35
|
+
"SampleEntropy",
|
36
|
+
"MultiscaleEntropyAUC",
|
37
|
+
]
|